repo_name stringlengths 5 100 | path stringlengths 4 375 | copies stringclasses 991 values | size stringlengths 4 7 | content stringlengths 666 1M | license stringclasses 15 values |
|---|---|---|---|---|---|
yunhaowang/IDP-APA | utilities/py_idpapa_sam2gpd_for_sr_polya.py | 1 | 4906 | #!/usr/bin/env python
import sys,re,time,argparse
def main(args):
# print >>sys.stdout, "Start analysis: " + time.strftime("%a,%d %b %Y %H:%M:%S")
output_gpd(args.input,args.output,args.length,args.ratio)
# print >>sys.stdout, "Finish analysis: " + time.strftime("%a,%d %b %Y %H:%M:%S")
def extract_exon_length_from_cigar(cigar):
cigar_m = ["0"] + re.findall(r"(\d+)M",cigar)
cigar_d = ["0"] + re.findall(r"(\d+)D",cigar)
cigar_m_s,cigar_d_s = [0,0]
for m in cigar_m:
cigar_m_s += int(m)
for d in cigar_d:
cigar_d_s += int(d)
exon_length = cigar_m_s+cigar_d_s
return exon_length
def extract_soft_clip_from_cigar(cigar):
cigar_5 = ["0"] + re.findall(r"^(\d+)S",cigar)
cigar_3 = ["0"] + re.findall(r"(\d+)S$",cigar)
cigar_5_s,cigar_3_s = [0,0]
for s5 in cigar_5:
cigar_5_s += int(s5)
for s3 in cigar_3:
cigar_3_s += int(s3)
return cigar_5_s,cigar_3_s
def sam2gpd(sam):
qname,flag,rname,pos,mapq,cigar,rnext,pnext,tlen,seq = sam.split("\t")[:10]
tag = "\t".join(sam.strip().split("\t")[11:])
s5,s3 = extract_soft_clip_from_cigar(cigar)
sf = str(s5)+"_"+str(s3)
strand = (re.search(r"XS:A:(\S)",tag)).group(1)
cigar_n_l = 0
exon_length = 0
exon_start = int(pos)-1
exon_end = 0
exon_start_list = []
exon_end_list = []
if "N" in cigar:
for exon in cigar.split("N"):
exon = exon + "N"
exon_start = exon_start + exon_length + cigar_n_l
exon_length = extract_exon_length_from_cigar(exon)
exon_end = exon_start + exon_length
if re.search(r"(\d+)N",exon):
cigar_n_l = int((re.search(r"(\d+)N",exon)).group(1))
exon_start_list.append(str(exon_start))
exon_end_list.append(str(exon_end))
else:
exon_start = exon_start
exon_length = extract_exon_length_from_cigar(cigar)
exon_end = exon_start + exon_length
exon_start_list.append(str(exon_start))
exon_end_list.append(str(exon_end))
exon_start_list.append("")
exon_end_list.append("")
gpd = [qname,qname,rname,strand,str(int(pos)-1),str(exon_end),mapq,sf,str(len(exon_start_list)-1),",".join(exon_start_list),",".join(exon_end_list)]
return gpd
read1_flag = "no_polya"
def output_gpd(sam_file,gpd_file,polya_len,a_ratio):
global read1_flag
for line in sam_file:
if line[0] != "@":
qname,flag,rname,pos,mapq,cigar,rnext,pnext,tlen,seq = line.strip().split("\t")[:10]
if rname != "*":
if flag == "83":
if re.search(r"(\d+)S$",cigar):
sf_n = int((re.search(r"(\d+)S$",cigar)).group(1))
sf_seq = seq[-sf_n:]
if sf_n >= polya_len and float(sf_seq.count("A"))/float(sf_n) >= a_ratio:
read1_info = sam2gpd(line.strip())
read1_flag = "83"
else:
read1_flag = "no_polya"
else:
read1_flag = "no_polya"
elif flag == "99":
if re.search(r"^(\d+)S",cigar):
sf_n = int((re.search(r"^(\d+)S",cigar)).group(1))
sf_seq = seq[:sf_n]
if sf_n >= polya_len and float(sf_seq.count("T"))/float(sf_n) >= a_ratio:
read1_info = sam2gpd(line.strip())
read1_flag = "99"
else:
read1_flag = "no_polya"
else:
read1_flag = "no_polya"
else:
if read1_flag == "83":
read2_info = sam2gpd(line.strip())
tts_both = read1_info[5]
info_first = read1_info[2:]
tss_both = read2_info[4]
info_first[2] = tss_both
info_second = read2_info[-5:]
info_both = info_first + info_second
print >>gpd_file,qname + "\t" + "\t".join(info_both)
elif read1_flag == "99":
read2_info = sam2gpd(line.strip())
tss_both = read1_info[4]
info_first = read1_info[2:]
tts_both = read2_info[5]
info_first[3] = tts_both
info_second = read2_info[-5:]
info_both = info_first + info_second
print >>gpd_file,qname + "\t" + "\t".join(info_both)
else:
pass
sam_file.close()
gpd_file.close()
def do_inputs():
output_gpd_format = '''
1. read id
2. chromosome
3. strand
4. start site of alignment of fragment
5. end site of alignment of fragment
6. MAPQ of read1 (mate1)
7. Number of nucleotides that are softly-clipped by aligner (mate1)
8. exon number (mate1)
9. exon start set (mate1)
10. exon end set (mate1)
11. MAPQ of read1 (mate2)
12. Number of nucleotides that are softly-clipped by aligner (mate2)
13. exon number (mate2)
14. exon start set (mate2)
15. exon end set (mate2)'''
parser = argparse.ArgumentParser(description="Function: convert sam to gpd.",formatter_class=argparse.ArgumentDefaultsHelpFormatter)
parser.add_argument('-l','--length',type=int,default=5,help="PolyA tail length")
parser.add_argument('-r','--ratio',type=float,default=0.8,help="Ratio of A in polyA tail")
parser.add_argument('-i','--input',type=argparse.FileType('r'),required=True,help="Input: sam file")
parser.add_argument('-o','--output',type=argparse.FileType('w'),required=True,help="Output: gpd file")
args = parser.parse_args()
return args
if __name__=="__main__":
args = do_inputs()
main(args)
| apache-2.0 |
GovReady/govready-q | tools/simple_sar_server/sar_etl.py | 1 | 3675 | #!/usr/bin/env python3
########################################################
#
# A simple middleware to submit a scan report
# assessment results
#
# Usage:
# python sar_etl.py <apikey> <sar_service_url> [-s <system_id>] [-d <deployment_uuid>]
#
# Example:
# python sar_etl.py eCXZbZwmBrtD5hgrJ8ptmJfvDA5vlDcc http://localhost:8888/ -s 132 -d f7b0d84e-397c-43de-bb1f-421afa467993
# Accessing:
# curl localhost:8888
#
#
# Optional arguments:
# -h, --help show this help message and exit
# -d deployment uuid
# -s system id
# -v, --verbose output more information
#
################################################################
# Parse command-line arguments
import click
# Web stuff
from http.server import HTTPServer, BaseHTTPRequestHandler
from urllib import request, parse
# System stuff
import os
import platform
import re
import signal
import sys
# JSON and other data handling
import json
import random
import uuid
# Default constants
GOVREADYHOST = "http://localhost:8000"
SPACER = "\n====\n"
# Gracefully exit on control-C
signal.signal(signal.SIGINT, lambda signal_number, current_stack_frame: sys.exit(0))
# Define a fatal error handler
class FatalError(Exception):
pass
# Define a halted error handler
class HaltedError(Exception):
pass
# Define a non-zero return code error handler
class ReturncodeNonZeroError(Exception):
def __init__(self, completed_process, msg=None):
if msg is None:
# default message if none set
msg = "An external program or script returned an error."
super(ReturncodeNonZeroError, self).__init__(msg)
self.completed_process = completed_process
# Commandline arguments
@click.command()
@click.argument('apikey', default=None)
@click.argument('sar_url', default=None)
@click.option('-s', default=None)
@click.option('-d', default=None)
def main(apikey,sar_url, s, d):
# Set system_id, deployment_uuid
system_id = s
deployment_uuid = d
# deployment_id = 226
#build query
url_query = f"?system_id={system_id}&deployment_uuid={deployment_uuid}"
# Get SAR from SAR Service
print(SPACER)
print(f"Retrieving SAR from service: {sar_url}{url_query}")
handler = request.urlopen(f"{sar_url}{url_query}")
sar = json.loads(handler.read().decode( 'utf-8' ));
# print(sar)
system_id = sar["metadata"]["system_id"]
if deployment_uuid is not None:
d_uuid_uuid = uuid.UUID(f'urn:uuid:{deployment_uuid}')
else:
d_uuid_uuid = None
# Submit sar data to GovReady-q API
data = {
"system_id": system_id,
"deployment_uuid": d_uuid_uuid,
"sar_json": json.dumps(sar)
}
from pprint import pprint
# pprint(data)
data = bytes( parse.urlencode( data ).encode() )
# POST retrieved SAR data to GovReady-Q via API
"""
curl --header "Authorization: <api_key>" \
-F "name=test_sar_api" \
-F "system_id=86" \
-F "deployment_id=23" \
-F "data=@controls/data/test_data/test_sar1.json" \
localhost:8000/api/v1/systems/86/assessment/new
"""
# Prepare headers
headers = {
"Authorization": f"{apikey}"
}
# Set GovReady URL
gr_api_url = f"{GOVREADYHOST}/api/v1/systems/{system_id}/assessment/new"
print(SPACER)
print(f"Posting retrieved SAR to: {gr_api_url}")
# Post to GovReady
req = request.Request( gr_api_url, data=data, headers=headers, method="POST" );
response = request.urlopen(req)
response.read()
print(SPACER)
print(response.read())
if __name__ == "__main__":
main()
| gpl-3.0 |
huggingface/transformers | scripts/fsmt/fsmt-make-tiny-model.py | 2 | 2179 | #!/usr/bin/env python
# coding: utf-8
# Copyright 2020 The HuggingFace Team. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# This script creates a super tiny model that is useful inside tests, when we just want to test that
# the machinery works, without needing to the check the quality of the outcomes.
#
# This version creates a tiny model through reduction of a normal pre-trained model, but keeping the
# full vocab, merges file, and thus also resulting in a larger model due to a large vocab size.
# This gives ~3MB in total for all files.
#
# If you want a 50 times smaller than this see `fsmt-make-super-tiny-model.py`, which is slightly more complicated
#
#
# It will be used then as "stas/tiny-wmt19-en-de"
# Build
from transformers import FSMTTokenizer, FSMTConfig, FSMTForConditionalGeneration
mname = "facebook/wmt19-en-de"
tokenizer = FSMTTokenizer.from_pretrained(mname)
# get the correct vocab sizes, etc. from the master model
config = FSMTConfig.from_pretrained(mname)
config.update(dict(
d_model=4,
encoder_layers=1, decoder_layers=1,
encoder_ffn_dim=4, decoder_ffn_dim=4,
encoder_attention_heads=1, decoder_attention_heads=1))
tiny_model = FSMTForConditionalGeneration(config)
print(f"num of params {tiny_model.num_parameters()}")
# Test
batch = tokenizer(["Making tiny model"], return_tensors="pt")
outputs = tiny_model(**batch)
print("test output:", len(outputs.logits[0]))
# Save
mname_tiny = "tiny-wmt19-en-de"
tiny_model.half() # makes it smaller
tiny_model.save_pretrained(mname_tiny)
tokenizer.save_pretrained(mname_tiny)
print(f"Generated {mname_tiny}")
# Upload
# transformers-cli upload tiny-wmt19-en-de
| apache-2.0 |
alkyl1978/gnuradio | gr-blocks/python/blocks/qa_throttle.py | 57 | 1201 | #!/usr/bin/env python
#
# Copyright 2013 Free Software Foundation, Inc.
#
# This file is part of GNU Radio
#
# GNU Radio is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 3, or (at your option)
# any later version.
#
# GNU Radio is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with GNU Radio; see the file COPYING. If not, write to
# the Free Software Foundation, Inc., 51 Franklin Street,
# Boston, MA 02110-1301, USA.
#
from gnuradio import gr, gr_unittest, blocks
class test_throttle(gr_unittest.TestCase):
def setUp(self):
self.tb = gr.top_block()
def tearDown(self):
self.tb = None
def test_01(self):
# Test that we can make the block
op = blocks.throttle(gr.sizeof_gr_complex, 1)
if __name__ == '__main__':
gr_unittest.run(test_throttle, "test_throttle.xml")
| gpl-3.0 |
mread/buck | third-party/py/pathlib/test_pathlib.py | 1 | 77169 | import collections
import io
import os
import errno
import pathlib
import pickle
import shutil
import socket
import stat
import sys
import tempfile
import unittest
from contextlib import contextmanager
if sys.version_info < (2, 7):
try:
import unittest2 as unittest
except ImportError:
raise ImportError("unittest2 is required for tests on pre-2.7")
try:
from test import support
except ImportError:
from test import test_support as support
TESTFN = support.TESTFN
try:
import grp, pwd
except ImportError:
grp = pwd = None
# Backported from 3.4
def fs_is_case_insensitive(directory):
"""Detects if the file system for the specified directory is case-insensitive."""
base_fp, base_path = tempfile.mkstemp(dir=directory)
case_path = base_path.upper()
if case_path == base_path:
case_path = base_path.lower()
try:
return os.path.samefile(base_path, case_path)
except OSError as e:
if e.errno != errno.ENOENT:
raise
return False
finally:
os.unlink(base_path)
support.fs_is_case_insensitive = fs_is_case_insensitive
class _BaseFlavourTest(object):
def _check_parse_parts(self, arg, expected):
f = self.flavour.parse_parts
sep = self.flavour.sep
altsep = self.flavour.altsep
actual = f([x.replace('/', sep) for x in arg])
self.assertEqual(actual, expected)
if altsep:
actual = f([x.replace('/', altsep) for x in arg])
self.assertEqual(actual, expected)
drv, root, parts = actual
# neither bytes (py3) nor unicode (py2)
self.assertIsInstance(drv, str)
self.assertIsInstance(root, str)
for p in parts:
self.assertIsInstance(p, str)
def test_parse_parts_common(self):
check = self._check_parse_parts
sep = self.flavour.sep
# Unanchored parts
check([], ('', '', []))
check(['a'], ('', '', ['a']))
check(['a/'], ('', '', ['a']))
check(['a', 'b'], ('', '', ['a', 'b']))
# Expansion
check(['a/b'], ('', '', ['a', 'b']))
check(['a/b/'], ('', '', ['a', 'b']))
check(['a', 'b/c', 'd'], ('', '', ['a', 'b', 'c', 'd']))
# Collapsing and stripping excess slashes
check(['a', 'b//c', 'd'], ('', '', ['a', 'b', 'c', 'd']))
check(['a', 'b/c/', 'd'], ('', '', ['a', 'b', 'c', 'd']))
# Eliminating standalone dots
check(['.'], ('', '', []))
check(['.', '.', 'b'], ('', '', ['b']))
check(['a', '.', 'b'], ('', '', ['a', 'b']))
check(['a', '.', '.'], ('', '', ['a']))
# The first part is anchored
check(['/a/b'], ('', sep, [sep, 'a', 'b']))
check(['/a', 'b'], ('', sep, [sep, 'a', 'b']))
check(['/a/', 'b'], ('', sep, [sep, 'a', 'b']))
# Ignoring parts before an anchored part
check(['a', '/b', 'c'], ('', sep, [sep, 'b', 'c']))
check(['a', '/b', '/c'], ('', sep, [sep, 'c']))
class PosixFlavourTest(_BaseFlavourTest, unittest.TestCase):
flavour = pathlib._posix_flavour
def test_parse_parts(self):
check = self._check_parse_parts
# Collapsing of excess leading slashes, except for the double-slash
# special case.
check(['//a', 'b'], ('', '//', ['//', 'a', 'b']))
check(['///a', 'b'], ('', '/', ['/', 'a', 'b']))
check(['////a', 'b'], ('', '/', ['/', 'a', 'b']))
# Paths which look like NT paths aren't treated specially
check(['c:a'], ('', '', ['c:a']))
check(['c:\\a'], ('', '', ['c:\\a']))
check(['\\a'], ('', '', ['\\a']))
def test_splitroot(self):
f = self.flavour.splitroot
self.assertEqual(f(''), ('', '', ''))
self.assertEqual(f('a'), ('', '', 'a'))
self.assertEqual(f('a/b'), ('', '', 'a/b'))
self.assertEqual(f('a/b/'), ('', '', 'a/b/'))
self.assertEqual(f('/a'), ('', '/', 'a'))
self.assertEqual(f('/a/b'), ('', '/', 'a/b'))
self.assertEqual(f('/a/b/'), ('', '/', 'a/b/'))
# The root is collapsed when there are redundant slashes
# except when there are exactly two leading slashes, which
# is a special case in POSIX.
self.assertEqual(f('//a'), ('', '//', 'a'))
self.assertEqual(f('///a'), ('', '/', 'a'))
self.assertEqual(f('///a/b'), ('', '/', 'a/b'))
# Paths which look like NT paths aren't treated specially
self.assertEqual(f('c:/a/b'), ('', '', 'c:/a/b'))
self.assertEqual(f('\\/a/b'), ('', '', '\\/a/b'))
self.assertEqual(f('\\a\\b'), ('', '', '\\a\\b'))
class NTFlavourTest(_BaseFlavourTest, unittest.TestCase):
flavour = pathlib._windows_flavour
def test_parse_parts(self):
check = self._check_parse_parts
# First part is anchored
check(['c:'], ('c:', '', ['c:']))
check(['c:\\'], ('c:', '\\', ['c:\\']))
check(['\\'], ('', '\\', ['\\']))
check(['c:a'], ('c:', '', ['c:', 'a']))
check(['c:\\a'], ('c:', '\\', ['c:\\', 'a']))
check(['\\a'], ('', '\\', ['\\', 'a']))
# UNC paths
check(['\\\\a\\b'], ('\\\\a\\b', '\\', ['\\\\a\\b\\']))
check(['\\\\a\\b\\'], ('\\\\a\\b', '\\', ['\\\\a\\b\\']))
check(['\\\\a\\b\\c'], ('\\\\a\\b', '\\', ['\\\\a\\b\\', 'c']))
# Second part is anchored, so that the first part is ignored
check(['a', 'Z:b', 'c'], ('Z:', '', ['Z:', 'b', 'c']))
check(['a', 'Z:\\b', 'c'], ('Z:', '\\', ['Z:\\', 'b', 'c']))
check(['a', '\\b', 'c'], ('', '\\', ['\\', 'b', 'c']))
# UNC paths
check(['a', '\\\\b\\c', 'd'], ('\\\\b\\c', '\\', ['\\\\b\\c\\', 'd']))
# Collapsing and stripping excess slashes
check(['a', 'Z:\\\\b\\\\c\\', 'd\\'], ('Z:', '\\', ['Z:\\', 'b', 'c', 'd']))
# UNC paths
check(['a', '\\\\b\\c\\\\', 'd'], ('\\\\b\\c', '\\', ['\\\\b\\c\\', 'd']))
# Extended paths
check(['\\\\?\\c:\\'], ('\\\\?\\c:', '\\', ['\\\\?\\c:\\']))
check(['\\\\?\\c:\\a'], ('\\\\?\\c:', '\\', ['\\\\?\\c:\\', 'a']))
# Extended UNC paths (format is "\\?\UNC\server\share")
check(['\\\\?\\UNC\\b\\c'], ('\\\\?\\UNC\\b\\c', '\\', ['\\\\?\\UNC\\b\\c\\']))
check(['\\\\?\\UNC\\b\\c\\d'], ('\\\\?\\UNC\\b\\c', '\\', ['\\\\?\\UNC\\b\\c\\', 'd']))
def test_splitroot(self):
f = self.flavour.splitroot
self.assertEqual(f(''), ('', '', ''))
self.assertEqual(f('a'), ('', '', 'a'))
self.assertEqual(f('a\\b'), ('', '', 'a\\b'))
self.assertEqual(f('\\a'), ('', '\\', 'a'))
self.assertEqual(f('\\a\\b'), ('', '\\', 'a\\b'))
self.assertEqual(f('c:a\\b'), ('c:', '', 'a\\b'))
self.assertEqual(f('c:\\a\\b'), ('c:', '\\', 'a\\b'))
# Redundant slashes in the root are collapsed
self.assertEqual(f('\\\\a'), ('', '\\', 'a'))
self.assertEqual(f('\\\\\\a/b'), ('', '\\', 'a/b'))
self.assertEqual(f('c:\\\\a'), ('c:', '\\', 'a'))
self.assertEqual(f('c:\\\\\\a/b'), ('c:', '\\', 'a/b'))
# Valid UNC paths
self.assertEqual(f('\\\\a\\b'), ('\\\\a\\b', '\\', ''))
self.assertEqual(f('\\\\a\\b\\'), ('\\\\a\\b', '\\', ''))
self.assertEqual(f('\\\\a\\b\\c\\d'), ('\\\\a\\b', '\\', 'c\\d'))
# These are non-UNC paths (according to ntpath.py and test_ntpath)
# However, command.com says such paths are invalid, so it's
# difficult to know what the right semantics are
self.assertEqual(f('\\\\\\a\\b'), ('', '\\', 'a\\b'))
self.assertEqual(f('\\\\a'), ('', '\\', 'a'))
#
# Tests for the pure classes
#
with_fsencode = unittest.skipIf(sys.version_info < (3, 2),
'os.fsencode has been introduced in version 3.2')
class _BasePurePathTest(object):
# keys are canonical paths, values are list of tuples of arguments
# supposed to produce equal paths
equivalences = {
'a/b': [
('a', 'b'), ('a/', 'b'), ('a', 'b/'), ('a/', 'b/'),
('a/b/',), ('a//b',), ('a//b//',),
# empty components get removed
('', 'a', 'b'), ('a', '', 'b'), ('a', 'b', ''),
],
'/b/c/d': [
('a', '/b/c', 'd'), ('a', '///b//c', 'd/'),
('/a', '/b/c', 'd'),
# empty components get removed
('/', 'b', '', 'c/d'), ('/', '', 'b/c/d'), ('', '/b/c/d'),
],
}
def setUp(self):
p = self.cls('a')
self.flavour = p._flavour
self.sep = self.flavour.sep
self.altsep = self.flavour.altsep
def test_constructor_common(self):
P = self.cls
p = P('a')
self.assertIsInstance(p, P)
P('a', 'b', 'c')
P('/a', 'b', 'c')
P('a/b/c')
P('/a/b/c')
self.assertEqual(P(P('a')), P('a'))
self.assertEqual(P(P('a'), 'b'), P('a/b'))
self.assertEqual(P(P('a'), P('b')), P('a/b'))
def test_join_common(self):
P = self.cls
p = P('a/b')
pp = p.joinpath('c')
self.assertEqual(pp, P('a/b/c'))
self.assertIs(type(pp), type(p))
pp = p.joinpath('c', 'd')
self.assertEqual(pp, P('a/b/c/d'))
pp = p.joinpath(P('c'))
self.assertEqual(pp, P('a/b/c'))
pp = p.joinpath('/c')
self.assertEqual(pp, P('/c'))
def test_div_common(self):
# Basically the same as joinpath()
P = self.cls
p = P('a/b')
pp = p / 'c'
self.assertEqual(pp, P('a/b/c'))
self.assertIs(type(pp), type(p))
pp = p / 'c/d'
self.assertEqual(pp, P('a/b/c/d'))
pp = p / 'c' / 'd'
self.assertEqual(pp, P('a/b/c/d'))
pp = 'c' / p / 'd'
self.assertEqual(pp, P('c/a/b/d'))
pp = p / P('c')
self.assertEqual(pp, P('a/b/c'))
pp = p/ '/c'
self.assertEqual(pp, P('/c'))
def _check_str(self, expected, args):
p = self.cls(*args)
s = str(p)
self.assertEqual(s, expected.replace('/', self.sep))
self.assertIsInstance(s, str)
def test_str_common(self):
# Canonicalized paths roundtrip
for pathstr in ('a', 'a/b', 'a/b/c', '/', '/a/b', '/a/b/c'):
self._check_str(pathstr, (pathstr,))
# Special case for the empty path
self._check_str('.', ('',))
# Other tests for str() are in test_equivalences()
def test_as_posix_common(self):
P = self.cls
for pathstr in ('a', 'a/b', 'a/b/c', '/', '/a/b', '/a/b/c'):
self.assertEqual(P(pathstr).as_posix(), pathstr)
# Other tests for as_posix() are in test_equivalences()
@with_fsencode
def test_as_bytes_common(self):
sep = os.fsencode(self.sep)
P = self.cls
self.assertEqual(bytes(P('a/b')), b'a' + sep + b'b')
def test_as_uri_common(self):
P = self.cls
with self.assertRaises(ValueError):
P('a').as_uri()
with self.assertRaises(ValueError):
P().as_uri()
def test_repr_common(self):
for pathstr in ('a', 'a/b', 'a/b/c', '/', '/a/b', '/a/b/c'):
p = self.cls(pathstr)
clsname = p.__class__.__name__
r = repr(p)
self.assertIsInstance(r, str)
# The repr() is in the form ClassName("forward-slashes path")
self.assertTrue(r.startswith(clsname + '('), r)
self.assertTrue(r.endswith(')'), r)
inner = r[len(clsname) + 1 : -1]
self.assertEqual(eval(inner), p.as_posix())
# The repr() roundtrips
q = eval(r, pathlib.__dict__)
self.assertIs(q.__class__, p.__class__)
self.assertEqual(q, p)
self.assertEqual(repr(q), r)
def test_eq_common(self):
P = self.cls
self.assertEqual(P('a/b'), P('a/b'))
self.assertEqual(P('a/b'), P('a', 'b'))
self.assertNotEqual(P('a/b'), P('a'))
self.assertNotEqual(P('a/b'), P('/a/b'))
self.assertNotEqual(P('a/b'), P())
self.assertNotEqual(P('/a/b'), P('/'))
self.assertNotEqual(P(), P('/'))
self.assertNotEqual(P(), "")
self.assertNotEqual(P(), {})
self.assertNotEqual(P(), int)
def test_match_common(self):
P = self.cls
self.assertRaises(ValueError, P('a').match, '')
self.assertRaises(ValueError, P('a').match, '.')
# Simple relative pattern
self.assertTrue(P('b.py').match('b.py'))
self.assertTrue(P('a/b.py').match('b.py'))
self.assertTrue(P('/a/b.py').match('b.py'))
self.assertFalse(P('a.py').match('b.py'))
self.assertFalse(P('b/py').match('b.py'))
self.assertFalse(P('/a.py').match('b.py'))
self.assertFalse(P('b.py/c').match('b.py'))
# Wilcard relative pattern
self.assertTrue(P('b.py').match('*.py'))
self.assertTrue(P('a/b.py').match('*.py'))
self.assertTrue(P('/a/b.py').match('*.py'))
self.assertFalse(P('b.pyc').match('*.py'))
self.assertFalse(P('b./py').match('*.py'))
self.assertFalse(P('b.py/c').match('*.py'))
# Multi-part relative pattern
self.assertTrue(P('ab/c.py').match('a*/*.py'))
self.assertTrue(P('/d/ab/c.py').match('a*/*.py'))
self.assertFalse(P('a.py').match('a*/*.py'))
self.assertFalse(P('/dab/c.py').match('a*/*.py'))
self.assertFalse(P('ab/c.py/d').match('a*/*.py'))
# Absolute pattern
self.assertTrue(P('/b.py').match('/*.py'))
self.assertFalse(P('b.py').match('/*.py'))
self.assertFalse(P('a/b.py').match('/*.py'))
self.assertFalse(P('/a/b.py').match('/*.py'))
# Multi-part absolute pattern
self.assertTrue(P('/a/b.py').match('/a/*.py'))
self.assertFalse(P('/ab.py').match('/a/*.py'))
self.assertFalse(P('/a/b/c.py').match('/a/*.py'))
# Double-star wildcard absolute pattern
self.assertTrue(P('/a.py').match('**/*.py'))
self.assertTrue(P('/a/b.py').match('**'))
self.assertTrue(P('/a/b.py').match('**/*'))
self.assertTrue(P('/a/b.py').match('**/*.py'))
self.assertTrue(P('/a/b/c.py').match('**/*.py'))
self.assertTrue(P('/a/b/c/d.py').match('**/*.py'))
self.assertFalse(P('/a/b/c/d.spam').match('**/*.py'))
# Double-star wildcard relative pattern
self.assertTrue(P('a.py').match('**/*.py'))
self.assertTrue(P('a/b.py').match('**'))
self.assertTrue(P('a/b.py').match('**/*'))
self.assertTrue(P('a/b.py').match('**/*.py'))
self.assertTrue(P('a/b/c.py').match('**/*py'))
self.assertTrue(P('a/b/c/d.py').match('**/*py'))
self.assertFalse(P('a/b/c/d.spam').match('**/*.py'))
# Double-star wildcard absolute pattern with prefix
self.assertTrue(P('/a/b.py').match('/a/**'))
self.assertTrue(P('/a/b.py').match('/a/**/*'))
self.assertTrue(P('/a/b.py').match('/a/**/*.py'))
self.assertTrue(P('/a/b/c.py').match('/a/**/*py'))
self.assertTrue(P('/a/b/c/d.py').match('/a/**/*py'))
# Failed lookahead absolute pattern with prefix
self.assertTrue(P('/a/b/c/b/c').match('/a/b/**'))
self.assertFalse(P('/a/spam/c/b/c').match('/a/b/**'))
# Double-star wildcard relative pattern with prefix
self.assertTrue(P('a/b.py').match('a/**'))
self.assertTrue(P('a/b.py').match('a/**/*'))
self.assertTrue(P('a/b.py').match('a/**/*.py'))
self.assertTrue(P('a/b/c.py').match('a/**/*py'))
self.assertTrue(P('a/b/c/d.py').match('a/**/*py'))
self.assertFalse(P('a/b/c/d.spam').match('a/**/*py'))
self.assertFalse(P('a/b/c/d.py').match('e/**'))
# Failed lookahead relative pattern with prefix
self.assertTrue(P('a/b/c/b/c').match('a/b/**'))
self.assertFalse(P('a/spam/c/b/c').match('a/b/**'))
# Double-star wildcard pattern with suffix
self.assertTrue(P('/c/a/c/a/b').match('**/a/b'))
self.assertTrue(P('c/a/c/a/b').match('**/a/b'))
self.assertFalse(P('c/a/c/spam/b').match('**/a/b'))
# Double-star with multiple path components
self.assertTrue(P('a/b/c/food/e.py').match('**/b/*/foo*/*.py'))
self.assertTrue(P('a/b/c/d.py').match('**/b/**/*.py'))
# Double-star with single path component
self.assertTrue(P('foo').match('**/*'))
self.assertTrue(P('foo').match('**/**'))
self.assertTrue(P('foo').match('**/**/**'))
# Match entire relative path
self.assertTrue(P('foo/a.py').match('foo/*.py', match_entire=True))
self.assertFalse(P('bar/foo/a.py').match('foo/*.py', match_entire=True))
def test_ordering_common(self):
# Ordering is tuple-alike
def assertLess(a, b):
self.assertLess(a, b)
self.assertGreater(b, a)
P = self.cls
a = P('a')
b = P('a/b')
c = P('abc')
d = P('b')
assertLess(a, b)
assertLess(a, c)
assertLess(a, d)
assertLess(b, c)
assertLess(c, d)
P = self.cls
a = P('/a')
b = P('/a/b')
c = P('/abc')
d = P('/b')
assertLess(a, b)
assertLess(a, c)
assertLess(a, d)
assertLess(b, c)
assertLess(c, d)
if sys.version_info > (3,):
with self.assertRaises(TypeError):
P() < {}
else:
P() < {}
def test_parts_common(self):
# `parts` returns a tuple
sep = self.sep
P = self.cls
p = P('a/b')
parts = p.parts
self.assertEqual(parts, ('a', 'b'))
for part in parts:
self.assertIsInstance(part, str)
# The object gets reused
self.assertIs(parts, p.parts)
# When the path is absolute, the anchor is a separate part
p = P('/a/b')
parts = p.parts
self.assertEqual(parts, (sep, 'a', 'b'))
def test_equivalences(self):
for k, tuples in self.equivalences.items():
canon = k.replace('/', self.sep)
posix = k.replace(self.sep, '/')
if canon != posix:
tuples = tuples + [
tuple(part.replace('/', self.sep) for part in t)
for t in tuples
]
tuples.append((posix, ))
pcanon = self.cls(canon)
for t in tuples:
p = self.cls(*t)
self.assertEqual(p, pcanon, "failed with args {0}".format(t))
self.assertEqual(hash(p), hash(pcanon))
self.assertEqual(str(p), canon)
self.assertEqual(p.as_posix(), posix)
def test_parent_common(self):
# Relative
P = self.cls
p = P('a/b/c')
self.assertEqual(p.parent, P('a/b'))
self.assertEqual(p.parent.parent, P('a'))
self.assertEqual(p.parent.parent.parent, P())
self.assertEqual(p.parent.parent.parent.parent, P())
# Anchored
p = P('/a/b/c')
self.assertEqual(p.parent, P('/a/b'))
self.assertEqual(p.parent.parent, P('/a'))
self.assertEqual(p.parent.parent.parent, P('/'))
self.assertEqual(p.parent.parent.parent.parent, P('/'))
def test_parents_common(self):
# Relative
P = self.cls
p = P('a/b/c')
par = p.parents
self.assertEqual(len(par), 3)
self.assertEqual(par[0], P('a/b'))
self.assertEqual(par[1], P('a'))
self.assertEqual(par[2], P('.'))
self.assertEqual(list(par), [P('a/b'), P('a'), P('.')])
with self.assertRaises(IndexError):
par[-1]
with self.assertRaises(IndexError):
par[3]
with self.assertRaises(TypeError):
par[0] = p
# Anchored
p = P('/a/b/c')
par = p.parents
self.assertEqual(len(par), 3)
self.assertEqual(par[0], P('/a/b'))
self.assertEqual(par[1], P('/a'))
self.assertEqual(par[2], P('/'))
self.assertEqual(list(par), [P('/a/b'), P('/a'), P('/')])
with self.assertRaises(IndexError):
par[3]
def test_drive_common(self):
P = self.cls
self.assertEqual(P('a/b').drive, '')
self.assertEqual(P('/a/b').drive, '')
self.assertEqual(P('').drive, '')
def test_root_common(self):
P = self.cls
sep = self.sep
self.assertEqual(P('').root, '')
self.assertEqual(P('a/b').root, '')
self.assertEqual(P('/').root, sep)
self.assertEqual(P('/a/b').root, sep)
def test_anchor_common(self):
P = self.cls
sep = self.sep
self.assertEqual(P('').anchor, '')
self.assertEqual(P('a/b').anchor, '')
self.assertEqual(P('/').anchor, sep)
self.assertEqual(P('/a/b').anchor, sep)
def test_name_common(self):
P = self.cls
self.assertEqual(P('').name, '')
self.assertEqual(P('.').name, '')
self.assertEqual(P('/').name, '')
self.assertEqual(P('a/b').name, 'b')
self.assertEqual(P('/a/b').name, 'b')
self.assertEqual(P('/a/b/.').name, 'b')
self.assertEqual(P('a/b.py').name, 'b.py')
self.assertEqual(P('/a/b.py').name, 'b.py')
def test_suffix_common(self):
P = self.cls
self.assertEqual(P('').suffix, '')
self.assertEqual(P('.').suffix, '')
self.assertEqual(P('..').suffix, '')
self.assertEqual(P('/').suffix, '')
self.assertEqual(P('a/b').suffix, '')
self.assertEqual(P('/a/b').suffix, '')
self.assertEqual(P('/a/b/.').suffix, '')
self.assertEqual(P('a/b.py').suffix, '.py')
self.assertEqual(P('/a/b.py').suffix, '.py')
self.assertEqual(P('a/.hgrc').suffix, '')
self.assertEqual(P('/a/.hgrc').suffix, '')
self.assertEqual(P('a/.hg.rc').suffix, '.rc')
self.assertEqual(P('/a/.hg.rc').suffix, '.rc')
self.assertEqual(P('a/b.tar.gz').suffix, '.gz')
self.assertEqual(P('/a/b.tar.gz').suffix, '.gz')
self.assertEqual(P('a/Some name. Ending with a dot.').suffix, '')
self.assertEqual(P('/a/Some name. Ending with a dot.').suffix, '')
def test_suffixes_common(self):
P = self.cls
self.assertEqual(P('').suffixes, [])
self.assertEqual(P('.').suffixes, [])
self.assertEqual(P('/').suffixes, [])
self.assertEqual(P('a/b').suffixes, [])
self.assertEqual(P('/a/b').suffixes, [])
self.assertEqual(P('/a/b/.').suffixes, [])
self.assertEqual(P('a/b.py').suffixes, ['.py'])
self.assertEqual(P('/a/b.py').suffixes, ['.py'])
self.assertEqual(P('a/.hgrc').suffixes, [])
self.assertEqual(P('/a/.hgrc').suffixes, [])
self.assertEqual(P('a/.hg.rc').suffixes, ['.rc'])
self.assertEqual(P('/a/.hg.rc').suffixes, ['.rc'])
self.assertEqual(P('a/b.tar.gz').suffixes, ['.tar', '.gz'])
self.assertEqual(P('/a/b.tar.gz').suffixes, ['.tar', '.gz'])
self.assertEqual(P('a/Some name. Ending with a dot.').suffixes, [])
self.assertEqual(P('/a/Some name. Ending with a dot.').suffixes, [])
def test_stem_common(self):
P = self.cls
self.assertEqual(P('').stem, '')
self.assertEqual(P('.').stem, '')
self.assertEqual(P('..').stem, '..')
self.assertEqual(P('/').stem, '')
self.assertEqual(P('a/b').stem, 'b')
self.assertEqual(P('a/b.py').stem, 'b')
self.assertEqual(P('a/.hgrc').stem, '.hgrc')
self.assertEqual(P('a/.hg.rc').stem, '.hg')
self.assertEqual(P('a/b.tar.gz').stem, 'b.tar')
self.assertEqual(P('a/Some name. Ending with a dot.').stem,
'Some name. Ending with a dot.')
def test_with_name_common(self):
P = self.cls
self.assertEqual(P('a/b').with_name('d.xml'), P('a/d.xml'))
self.assertEqual(P('/a/b').with_name('d.xml'), P('/a/d.xml'))
self.assertEqual(P('a/b.py').with_name('d.xml'), P('a/d.xml'))
self.assertEqual(P('/a/b.py').with_name('d.xml'), P('/a/d.xml'))
self.assertEqual(P('a/Dot ending.').with_name('d.xml'), P('a/d.xml'))
self.assertEqual(P('/a/Dot ending.').with_name('d.xml'), P('/a/d.xml'))
self.assertRaises(ValueError, P('').with_name, 'd.xml')
self.assertRaises(ValueError, P('.').with_name, 'd.xml')
self.assertRaises(ValueError, P('/').with_name, 'd.xml')
def test_with_suffix_common(self):
P = self.cls
self.assertEqual(P('a/b').with_suffix('.gz'), P('a/b.gz'))
self.assertEqual(P('/a/b').with_suffix('.gz'), P('/a/b.gz'))
self.assertEqual(P('a/b.py').with_suffix('.gz'), P('a/b.gz'))
self.assertEqual(P('/a/b.py').with_suffix('.gz'), P('/a/b.gz'))
# Path doesn't have a "filename" component
self.assertRaises(ValueError, P('').with_suffix, '.gz')
self.assertRaises(ValueError, P('.').with_suffix, '.gz')
self.assertRaises(ValueError, P('/').with_suffix, '.gz')
# Invalid suffix
self.assertRaises(ValueError, P('a/b').with_suffix, 'gz')
self.assertRaises(ValueError, P('a/b').with_suffix, '/')
self.assertRaises(ValueError, P('a/b').with_suffix, '/.gz')
self.assertRaises(ValueError, P('a/b').with_suffix, 'c/d')
self.assertRaises(ValueError, P('a/b').with_suffix, '.c/.d')
def test_relative_to_common(self):
P = self.cls
p = P('a/b')
self.assertRaises(TypeError, p.relative_to)
if sys.version_info > (3,):
self.assertRaises(TypeError, p.relative_to, b'a')
self.assertEqual(p.relative_to(P()), P('a/b'))
self.assertEqual(p.relative_to(''), P('a/b'))
self.assertEqual(p.relative_to(P('a')), P('b'))
self.assertEqual(p.relative_to('a'), P('b'))
self.assertEqual(p.relative_to('a/'), P('b'))
self.assertEqual(p.relative_to(P('a/b')), P())
self.assertEqual(p.relative_to('a/b'), P())
# With several args
self.assertEqual(p.relative_to('a', 'b'), P())
# Unrelated paths
self.assertRaises(ValueError, p.relative_to, P('c'))
self.assertRaises(ValueError, p.relative_to, P('a/b/c'))
self.assertRaises(ValueError, p.relative_to, P('a/c'))
self.assertRaises(ValueError, p.relative_to, P('/a'))
p = P('/a/b')
self.assertEqual(p.relative_to(P('/')), P('a/b'))
self.assertEqual(p.relative_to('/'), P('a/b'))
self.assertEqual(p.relative_to(P('/a')), P('b'))
self.assertEqual(p.relative_to('/a'), P('b'))
self.assertEqual(p.relative_to('/a/'), P('b'))
self.assertEqual(p.relative_to(P('/a/b')), P())
self.assertEqual(p.relative_to('/a/b'), P())
# Unrelated paths
self.assertRaises(ValueError, p.relative_to, P('/c'))
self.assertRaises(ValueError, p.relative_to, P('/a/b/c'))
self.assertRaises(ValueError, p.relative_to, P('/a/c'))
self.assertRaises(ValueError, p.relative_to, P())
self.assertRaises(ValueError, p.relative_to, '')
self.assertRaises(ValueError, p.relative_to, P('a'))
def test_pickling_common(self):
P = self.cls
p = P('/a/b')
for proto in range(0, pickle.HIGHEST_PROTOCOL + 1):
dumped = pickle.dumps(p, proto)
pp = pickle.loads(dumped)
self.assertIs(pp.__class__, p.__class__)
self.assertEqual(pp, p)
self.assertEqual(hash(pp), hash(p))
self.assertEqual(str(pp), str(p))
class PurePosixPathTest(_BasePurePathTest, unittest.TestCase):
cls = pathlib.PurePosixPath
def test_root(self):
P = self.cls
self.assertEqual(P('/a/b').root, '/')
self.assertEqual(P('///a/b').root, '/')
# POSIX special case for two leading slashes
self.assertEqual(P('//a/b').root, '//')
def test_eq(self):
P = self.cls
self.assertNotEqual(P('a/b'), P('A/b'))
self.assertEqual(P('/a'), P('///a'))
self.assertNotEqual(P('/a'), P('//a'))
def test_as_uri(self):
P = self.cls
self.assertEqual(P('/').as_uri(), 'file:///')
self.assertEqual(P('/a/b.c').as_uri(), 'file:///a/b.c')
self.assertEqual(P('/a/b%#c').as_uri(), 'file:///a/b%25%23c')
@with_fsencode
def test_as_uri_non_ascii(self):
from urllib.parse import quote_from_bytes
P = self.cls
try:
os.fsencode('\xe9')
except UnicodeEncodeError:
self.skipTest("\\xe9 cannot be encoded to the filesystem encoding")
self.assertEqual(P('/a/b\xe9').as_uri(),
'file:///a/b' + quote_from_bytes(os.fsencode('\xe9')))
def test_match(self):
P = self.cls
self.assertFalse(P('A.py').match('a.PY'))
def test_is_absolute(self):
P = self.cls
self.assertFalse(P().is_absolute())
self.assertFalse(P('a').is_absolute())
self.assertFalse(P('a/b/').is_absolute())
self.assertTrue(P('/').is_absolute())
self.assertTrue(P('/a').is_absolute())
self.assertTrue(P('/a/b/').is_absolute())
self.assertTrue(P('//a').is_absolute())
self.assertTrue(P('//a/b').is_absolute())
def test_is_reserved(self):
P = self.cls
self.assertIs(False, P('').is_reserved())
self.assertIs(False, P('/').is_reserved())
self.assertIs(False, P('/foo/bar').is_reserved())
self.assertIs(False, P('/dev/con/PRN/NUL').is_reserved())
def test_join(self):
P = self.cls
p = P('//a')
pp = p.joinpath('b')
self.assertEqual(pp, P('//a/b'))
pp = P('/a').joinpath('//c')
self.assertEqual(pp, P('//c'))
pp = P('//a').joinpath('/c')
self.assertEqual(pp, P('/c'))
def test_div(self):
# Basically the same as joinpath()
P = self.cls
p = P('//a')
pp = p / 'b'
self.assertEqual(pp, P('//a/b'))
pp = P('/a') / '//c'
self.assertEqual(pp, P('//c'))
pp = P('//a') / '/c'
self.assertEqual(pp, P('/c'))
class PureWindowsPathTest(_BasePurePathTest, unittest.TestCase):
cls = pathlib.PureWindowsPath
equivalences = _BasePurePathTest.equivalences.copy()
equivalences.update({
'c:a': [ ('c:', 'a'), ('c:', 'a/'), ('/', 'c:', 'a') ],
'c:/a': [
('c:/', 'a'), ('c:', '/', 'a'), ('c:', '/a'),
('/z', 'c:/', 'a'), ('//x/y', 'c:/', 'a'),
],
'//a/b/': [ ('//a/b',) ],
'//a/b/c': [
('//a/b', 'c'), ('//a/b/', 'c'),
],
})
def test_str(self):
p = self.cls('a/b/c')
self.assertEqual(str(p), 'a\\b\\c')
p = self.cls('c:/a/b/c')
self.assertEqual(str(p), 'c:\\a\\b\\c')
p = self.cls('//a/b')
self.assertEqual(str(p), '\\\\a\\b\\')
p = self.cls('//a/b/c')
self.assertEqual(str(p), '\\\\a\\b\\c')
p = self.cls('//a/b/c/d')
self.assertEqual(str(p), '\\\\a\\b\\c\\d')
def test_eq(self):
P = self.cls
self.assertEqual(P('c:a/b'), P('c:a/b'))
self.assertEqual(P('c:a/b'), P('c:', 'a', 'b'))
self.assertNotEqual(P('c:a/b'), P('d:a/b'))
self.assertNotEqual(P('c:a/b'), P('c:/a/b'))
self.assertNotEqual(P('/a/b'), P('c:/a/b'))
# Case-insensitivity
self.assertEqual(P('a/B'), P('A/b'))
self.assertEqual(P('C:a/B'), P('c:A/b'))
self.assertEqual(P('//Some/SHARE/a/B'), P('//somE/share/A/b'))
@with_fsencode
def test_as_uri(self):
P = self.cls
with self.assertRaises(ValueError):
P('/a/b').as_uri()
with self.assertRaises(ValueError):
P('c:a/b').as_uri()
self.assertEqual(P('c:/').as_uri(), 'file:///c:/')
self.assertEqual(P('c:/a/b.c').as_uri(), 'file:///c:/a/b.c')
self.assertEqual(P('c:/a/b%#c').as_uri(), 'file:///c:/a/b%25%23c')
self.assertEqual(P('c:/a/b\xe9').as_uri(), 'file:///c:/a/b%C3%A9')
self.assertEqual(P('//some/share/').as_uri(), 'file://some/share/')
self.assertEqual(P('//some/share/a/b.c').as_uri(),
'file://some/share/a/b.c')
self.assertEqual(P('//some/share/a/b%#c\xe9').as_uri(),
'file://some/share/a/b%25%23c%C3%A9')
def test_match_common(self):
P = self.cls
# Absolute patterns
self.assertTrue(P('c:/').match('/'))
self.assertTrue(P('c:/b.py').match('/*.py'))
self.assertTrue(P('c:/b.py').match('c:*.py'))
self.assertTrue(P('c:/b.py').match('c:/*.py'))
self.assertFalse(P('d:/b.py').match('c:/*.py')) # wrong drive
self.assertFalse(P('b.py').match('/*.py'))
self.assertFalse(P('b.py').match('c:*.py'))
self.assertFalse(P('b.py').match('c:/*.py'))
self.assertFalse(P('c:b.py').match('/*.py'))
self.assertFalse(P('c:b.py').match('c:/*.py'))
self.assertFalse(P('/b.py').match('c:*.py'))
self.assertFalse(P('/b.py').match('c:/*.py'))
# UNC patterns
self.assertTrue(P('//some/share/a.py').match('/*.py'))
self.assertTrue(P('//some/share/a.py').match('//some/share/*.py'))
self.assertFalse(P('//other/share/a.py').match('//some/share/*.py'))
self.assertFalse(P('//some/share/a/b.py').match('//some/share/*.py'))
# Case-insensitivity
self.assertTrue(P('B.py').match('b.PY'))
self.assertTrue(P('c:/a/B.Py').match('C:/A/*.pY'))
self.assertTrue(P('//Some/Share/B.Py').match('//somE/sharE/*.pY'))
def test_ordering_common(self):
# Case-insensitivity
def assertOrderedEqual(a, b):
self.assertLessEqual(a, b)
self.assertGreaterEqual(b, a)
P = self.cls
p = P('c:A/b')
q = P('C:a/B')
assertOrderedEqual(p, q)
self.assertFalse(p < q)
self.assertFalse(p > q)
p = P('//some/Share/A/b')
q = P('//Some/SHARE/a/B')
assertOrderedEqual(p, q)
self.assertFalse(p < q)
self.assertFalse(p > q)
def test_parts(self):
P = self.cls
p = P('c:a/b')
parts = p.parts
self.assertEqual(parts, ('c:', 'a', 'b'))
p = P('c:/a/b')
parts = p.parts
self.assertEqual(parts, ('c:\\', 'a', 'b'))
p = P('//a/b/c/d')
parts = p.parts
self.assertEqual(parts, ('\\\\a\\b\\', 'c', 'd'))
def test_parent(self):
# Anchored
P = self.cls
p = P('z:a/b/c')
self.assertEqual(p.parent, P('z:a/b'))
self.assertEqual(p.parent.parent, P('z:a'))
self.assertEqual(p.parent.parent.parent, P('z:'))
self.assertEqual(p.parent.parent.parent.parent, P('z:'))
p = P('z:/a/b/c')
self.assertEqual(p.parent, P('z:/a/b'))
self.assertEqual(p.parent.parent, P('z:/a'))
self.assertEqual(p.parent.parent.parent, P('z:/'))
self.assertEqual(p.parent.parent.parent.parent, P('z:/'))
p = P('//a/b/c/d')
self.assertEqual(p.parent, P('//a/b/c'))
self.assertEqual(p.parent.parent, P('//a/b'))
self.assertEqual(p.parent.parent.parent, P('//a/b'))
def test_parents(self):
# Anchored
P = self.cls
p = P('z:a/b/')
par = p.parents
self.assertEqual(len(par), 2)
self.assertEqual(par[0], P('z:a'))
self.assertEqual(par[1], P('z:'))
self.assertEqual(list(par), [P('z:a'), P('z:')])
with self.assertRaises(IndexError):
par[2]
p = P('z:/a/b/')
par = p.parents
self.assertEqual(len(par), 2)
self.assertEqual(par[0], P('z:/a'))
self.assertEqual(par[1], P('z:/'))
self.assertEqual(list(par), [P('z:/a'), P('z:/')])
with self.assertRaises(IndexError):
par[2]
p = P('//a/b/c/d')
par = p.parents
self.assertEqual(len(par), 2)
self.assertEqual(par[0], P('//a/b/c'))
self.assertEqual(par[1], P('//a/b'))
self.assertEqual(list(par), [P('//a/b/c'), P('//a/b')])
with self.assertRaises(IndexError):
par[2]
def test_drive(self):
P = self.cls
self.assertEqual(P('c:').drive, 'c:')
self.assertEqual(P('c:a/b').drive, 'c:')
self.assertEqual(P('c:/').drive, 'c:')
self.assertEqual(P('c:/a/b/').drive, 'c:')
self.assertEqual(P('//a/b').drive, '\\\\a\\b')
self.assertEqual(P('//a/b/').drive, '\\\\a\\b')
self.assertEqual(P('//a/b/c/d').drive, '\\\\a\\b')
def test_root(self):
P = self.cls
self.assertEqual(P('c:').root, '')
self.assertEqual(P('c:a/b').root, '')
self.assertEqual(P('c:/').root, '\\')
self.assertEqual(P('c:/a/b/').root, '\\')
self.assertEqual(P('//a/b').root, '\\')
self.assertEqual(P('//a/b/').root, '\\')
self.assertEqual(P('//a/b/c/d').root, '\\')
def test_anchor(self):
P = self.cls
self.assertEqual(P('c:').anchor, 'c:')
self.assertEqual(P('c:a/b').anchor, 'c:')
self.assertEqual(P('c:/').anchor, 'c:\\')
self.assertEqual(P('c:/a/b/').anchor, 'c:\\')
self.assertEqual(P('//a/b').anchor, '\\\\a\\b\\')
self.assertEqual(P('//a/b/').anchor, '\\\\a\\b\\')
self.assertEqual(P('//a/b/c/d').anchor, '\\\\a\\b\\')
def test_name(self):
P = self.cls
self.assertEqual(P('c:').name, '')
self.assertEqual(P('c:/').name, '')
self.assertEqual(P('c:a/b').name, 'b')
self.assertEqual(P('c:/a/b').name, 'b')
self.assertEqual(P('c:a/b.py').name, 'b.py')
self.assertEqual(P('c:/a/b.py').name, 'b.py')
self.assertEqual(P('//My.py/Share.php').name, '')
self.assertEqual(P('//My.py/Share.php/a/b').name, 'b')
def test_suffix(self):
P = self.cls
self.assertEqual(P('c:').suffix, '')
self.assertEqual(P('c:/').suffix, '')
self.assertEqual(P('c:a/b').suffix, '')
self.assertEqual(P('c:/a/b').suffix, '')
self.assertEqual(P('c:a/b.py').suffix, '.py')
self.assertEqual(P('c:/a/b.py').suffix, '.py')
self.assertEqual(P('c:a/.hgrc').suffix, '')
self.assertEqual(P('c:/a/.hgrc').suffix, '')
self.assertEqual(P('c:a/.hg.rc').suffix, '.rc')
self.assertEqual(P('c:/a/.hg.rc').suffix, '.rc')
self.assertEqual(P('c:a/b.tar.gz').suffix, '.gz')
self.assertEqual(P('c:/a/b.tar.gz').suffix, '.gz')
self.assertEqual(P('c:a/Some name. Ending with a dot.').suffix, '')
self.assertEqual(P('c:/a/Some name. Ending with a dot.').suffix, '')
self.assertEqual(P('//My.py/Share.php').suffix, '')
self.assertEqual(P('//My.py/Share.php/a/b').suffix, '')
def test_suffixes(self):
P = self.cls
self.assertEqual(P('c:').suffixes, [])
self.assertEqual(P('c:/').suffixes, [])
self.assertEqual(P('c:a/b').suffixes, [])
self.assertEqual(P('c:/a/b').suffixes, [])
self.assertEqual(P('c:a/b.py').suffixes, ['.py'])
self.assertEqual(P('c:/a/b.py').suffixes, ['.py'])
self.assertEqual(P('c:a/.hgrc').suffixes, [])
self.assertEqual(P('c:/a/.hgrc').suffixes, [])
self.assertEqual(P('c:a/.hg.rc').suffixes, ['.rc'])
self.assertEqual(P('c:/a/.hg.rc').suffixes, ['.rc'])
self.assertEqual(P('c:a/b.tar.gz').suffixes, ['.tar', '.gz'])
self.assertEqual(P('c:/a/b.tar.gz').suffixes, ['.tar', '.gz'])
self.assertEqual(P('//My.py/Share.php').suffixes, [])
self.assertEqual(P('//My.py/Share.php/a/b').suffixes, [])
self.assertEqual(P('c:a/Some name. Ending with a dot.').suffixes, [])
self.assertEqual(P('c:/a/Some name. Ending with a dot.').suffixes, [])
def test_stem(self):
P = self.cls
self.assertEqual(P('c:').stem, '')
self.assertEqual(P('c:.').stem, '')
self.assertEqual(P('c:..').stem, '..')
self.assertEqual(P('c:/').stem, '')
self.assertEqual(P('c:a/b').stem, 'b')
self.assertEqual(P('c:a/b.py').stem, 'b')
self.assertEqual(P('c:a/.hgrc').stem, '.hgrc')
self.assertEqual(P('c:a/.hg.rc').stem, '.hg')
self.assertEqual(P('c:a/b.tar.gz').stem, 'b.tar')
self.assertEqual(P('c:a/Some name. Ending with a dot.').stem,
'Some name. Ending with a dot.')
def test_with_name(self):
P = self.cls
self.assertEqual(P('c:a/b').with_name('d.xml'), P('c:a/d.xml'))
self.assertEqual(P('c:/a/b').with_name('d.xml'), P('c:/a/d.xml'))
self.assertEqual(P('c:a/Dot ending.').with_name('d.xml'), P('c:a/d.xml'))
self.assertEqual(P('c:/a/Dot ending.').with_name('d.xml'), P('c:/a/d.xml'))
self.assertRaises(ValueError, P('c:').with_name, 'd.xml')
self.assertRaises(ValueError, P('c:/').with_name, 'd.xml')
self.assertRaises(ValueError, P('//My/Share').with_name, 'd.xml')
def test_with_suffix(self):
P = self.cls
self.assertEqual(P('c:a/b').with_suffix('.gz'), P('c:a/b.gz'))
self.assertEqual(P('c:/a/b').with_suffix('.gz'), P('c:/a/b.gz'))
self.assertEqual(P('c:a/b.py').with_suffix('.gz'), P('c:a/b.gz'))
self.assertEqual(P('c:/a/b.py').with_suffix('.gz'), P('c:/a/b.gz'))
# Path doesn't have a "filename" component
self.assertRaises(ValueError, P('').with_suffix, '.gz')
self.assertRaises(ValueError, P('.').with_suffix, '.gz')
self.assertRaises(ValueError, P('/').with_suffix, '.gz')
self.assertRaises(ValueError, P('//My/Share').with_suffix, '.gz')
# Invalid suffix
self.assertRaises(ValueError, P('c:a/b').with_suffix, 'gz')
self.assertRaises(ValueError, P('c:a/b').with_suffix, '/')
self.assertRaises(ValueError, P('c:a/b').with_suffix, '\\')
self.assertRaises(ValueError, P('c:a/b').with_suffix, 'c:')
self.assertRaises(ValueError, P('c:a/b').with_suffix, '/.gz')
self.assertRaises(ValueError, P('c:a/b').with_suffix, '\\.gz')
self.assertRaises(ValueError, P('c:a/b').with_suffix, 'c:.gz')
self.assertRaises(ValueError, P('c:a/b').with_suffix, 'c/d')
self.assertRaises(ValueError, P('c:a/b').with_suffix, 'c\\d')
self.assertRaises(ValueError, P('c:a/b').with_suffix, '.c/d')
self.assertRaises(ValueError, P('c:a/b').with_suffix, '.c\\d')
def test_relative_to(self):
P = self.cls
p = P('C:Foo/Bar')
self.assertEqual(p.relative_to(P('c:')), P('Foo/Bar'))
self.assertEqual(p.relative_to('c:'), P('Foo/Bar'))
self.assertEqual(p.relative_to(P('c:foO')), P('Bar'))
self.assertEqual(p.relative_to('c:foO'), P('Bar'))
self.assertEqual(p.relative_to('c:foO/'), P('Bar'))
self.assertEqual(p.relative_to(P('c:foO/baR')), P())
self.assertEqual(p.relative_to('c:foO/baR'), P())
# Unrelated paths
self.assertRaises(ValueError, p.relative_to, P())
self.assertRaises(ValueError, p.relative_to, '')
self.assertRaises(ValueError, p.relative_to, P('d:'))
self.assertRaises(ValueError, p.relative_to, P('/'))
self.assertRaises(ValueError, p.relative_to, P('Foo'))
self.assertRaises(ValueError, p.relative_to, P('/Foo'))
self.assertRaises(ValueError, p.relative_to, P('C:/Foo'))
self.assertRaises(ValueError, p.relative_to, P('C:Foo/Bar/Baz'))
self.assertRaises(ValueError, p.relative_to, P('C:Foo/Baz'))
p = P('C:/Foo/Bar')
self.assertEqual(p.relative_to(P('c:')), P('/Foo/Bar'))
self.assertEqual(p.relative_to('c:'), P('/Foo/Bar'))
self.assertEqual(str(p.relative_to(P('c:'))), '\\Foo\\Bar')
self.assertEqual(str(p.relative_to('c:')), '\\Foo\\Bar')
self.assertEqual(p.relative_to(P('c:/')), P('Foo/Bar'))
self.assertEqual(p.relative_to('c:/'), P('Foo/Bar'))
self.assertEqual(p.relative_to(P('c:/foO')), P('Bar'))
self.assertEqual(p.relative_to('c:/foO'), P('Bar'))
self.assertEqual(p.relative_to('c:/foO/'), P('Bar'))
self.assertEqual(p.relative_to(P('c:/foO/baR')), P())
self.assertEqual(p.relative_to('c:/foO/baR'), P())
# Unrelated paths
self.assertRaises(ValueError, p.relative_to, P('C:/Baz'))
self.assertRaises(ValueError, p.relative_to, P('C:/Foo/Bar/Baz'))
self.assertRaises(ValueError, p.relative_to, P('C:/Foo/Baz'))
self.assertRaises(ValueError, p.relative_to, P('C:Foo'))
self.assertRaises(ValueError, p.relative_to, P('d:'))
self.assertRaises(ValueError, p.relative_to, P('d:/'))
self.assertRaises(ValueError, p.relative_to, P('/'))
self.assertRaises(ValueError, p.relative_to, P('/Foo'))
self.assertRaises(ValueError, p.relative_to, P('//C/Foo'))
# UNC paths
p = P('//Server/Share/Foo/Bar')
self.assertEqual(p.relative_to(P('//sErver/sHare')), P('Foo/Bar'))
self.assertEqual(p.relative_to('//sErver/sHare'), P('Foo/Bar'))
self.assertEqual(p.relative_to('//sErver/sHare/'), P('Foo/Bar'))
self.assertEqual(p.relative_to(P('//sErver/sHare/Foo')), P('Bar'))
self.assertEqual(p.relative_to('//sErver/sHare/Foo'), P('Bar'))
self.assertEqual(p.relative_to('//sErver/sHare/Foo/'), P('Bar'))
self.assertEqual(p.relative_to(P('//sErver/sHare/Foo/Bar')), P())
self.assertEqual(p.relative_to('//sErver/sHare/Foo/Bar'), P())
# Unrelated paths
self.assertRaises(ValueError, p.relative_to, P('/Server/Share/Foo'))
self.assertRaises(ValueError, p.relative_to, P('c:/Server/Share/Foo'))
self.assertRaises(ValueError, p.relative_to, P('//z/Share/Foo'))
self.assertRaises(ValueError, p.relative_to, P('//Server/z/Foo'))
def test_is_absolute(self):
P = self.cls
# Under NT, only paths with both a drive and a root are absolute
self.assertFalse(P().is_absolute())
self.assertFalse(P('a').is_absolute())
self.assertFalse(P('a/b/').is_absolute())
self.assertFalse(P('/').is_absolute())
self.assertFalse(P('/a').is_absolute())
self.assertFalse(P('/a/b/').is_absolute())
self.assertFalse(P('c:').is_absolute())
self.assertFalse(P('c:a').is_absolute())
self.assertFalse(P('c:a/b/').is_absolute())
self.assertTrue(P('c:/').is_absolute())
self.assertTrue(P('c:/a').is_absolute())
self.assertTrue(P('c:/a/b/').is_absolute())
# UNC paths are absolute by definition
self.assertTrue(P('//a/b').is_absolute())
self.assertTrue(P('//a/b/').is_absolute())
self.assertTrue(P('//a/b/c').is_absolute())
self.assertTrue(P('//a/b/c/d').is_absolute())
def test_join(self):
P = self.cls
p = P('C:/a/b')
pp = p.joinpath('x/y')
self.assertEqual(pp, P('C:/a/b/x/y'))
pp = p.joinpath('/x/y')
self.assertEqual(pp, P('C:/x/y'))
# Joining with a different drive => the first path is ignored, even
# if the second path is relative.
pp = p.joinpath('D:x/y')
self.assertEqual(pp, P('D:x/y'))
pp = p.joinpath('D:/x/y')
self.assertEqual(pp, P('D:/x/y'))
pp = p.joinpath('//host/share/x/y')
self.assertEqual(pp, P('//host/share/x/y'))
# Joining with the same drive => the first path is appended to if
# the second path is relative.
pp = p.joinpath('c:x/y')
self.assertEqual(pp, P('C:/a/b/x/y'))
pp = p.joinpath('c:/x/y')
self.assertEqual(pp, P('C:/x/y'))
def test_div(self):
# Basically the same as joinpath()
P = self.cls
p = P('C:/a/b')
self.assertEqual(p / 'x/y', P('C:/a/b/x/y'))
self.assertEqual(p / 'x' / 'y', P('C:/a/b/x/y'))
self.assertEqual(p / '/x/y', P('C:/x/y'))
self.assertEqual(p / '/x' / 'y', P('C:/x/y'))
# Joining with a different drive => the first path is ignored, even
# if the second path is relative.
self.assertEqual(p / 'D:x/y', P('D:x/y'))
self.assertEqual(p / 'D:' / 'x/y', P('D:x/y'))
self.assertEqual(p / 'D:/x/y', P('D:/x/y'))
self.assertEqual(p / 'D:' / '/x/y', P('D:/x/y'))
self.assertEqual(p / '//host/share/x/y', P('//host/share/x/y'))
# Joining with the same drive => the first path is appended to if
# the second path is relative.
self.assertEqual(p / 'c:x/y', P('C:/a/b/x/y'))
self.assertEqual(p / 'c:/x/y', P('C:/x/y'))
def test_is_reserved(self):
P = self.cls
self.assertIs(False, P('').is_reserved())
self.assertIs(False, P('/').is_reserved())
self.assertIs(False, P('/foo/bar').is_reserved())
self.assertIs(True, P('con').is_reserved())
self.assertIs(True, P('NUL').is_reserved())
self.assertIs(True, P('NUL.txt').is_reserved())
self.assertIs(True, P('com1').is_reserved())
self.assertIs(True, P('com9.bar').is_reserved())
self.assertIs(False, P('bar.com9').is_reserved())
self.assertIs(True, P('lpt1').is_reserved())
self.assertIs(True, P('lpt9.bar').is_reserved())
self.assertIs(False, P('bar.lpt9').is_reserved())
# Only the last component matters
self.assertIs(False, P('c:/NUL/con/baz').is_reserved())
# UNC paths are never reserved
self.assertIs(False, P('//my/share/nul/con/aux').is_reserved())
class PurePathTest(_BasePurePathTest, unittest.TestCase):
cls = pathlib.PurePath
def test_concrete_class(self):
p = self.cls('a')
self.assertIs(type(p),
pathlib.PureWindowsPath if os.name == 'nt' else pathlib.PurePosixPath)
def test_different_flavours_unequal(self):
p = pathlib.PurePosixPath('a')
q = pathlib.PureWindowsPath('a')
self.assertNotEqual(p, q)
@unittest.skipIf(sys.version_info < (3, 0),
'Most types are orderable in Python 2')
def test_different_flavours_unordered(self):
p = pathlib.PurePosixPath('a')
q = pathlib.PureWindowsPath('a')
with self.assertRaises(TypeError):
p < q
with self.assertRaises(TypeError):
p <= q
with self.assertRaises(TypeError):
p > q
with self.assertRaises(TypeError):
p >= q
#
# Tests for the concrete classes
#
# Make sure any symbolic links in the base test path are resolved
BASE = os.path.realpath(TESTFN)
join = lambda *x: os.path.join(BASE, *x)
rel_join = lambda *x: os.path.join(TESTFN, *x)
def symlink_skip_reason():
if not pathlib.supports_symlinks:
return "no system support for symlinks"
try:
os.symlink(__file__, BASE)
except OSError as e:
return str(e)
else:
support.unlink(BASE)
return None
symlink_skip_reason = symlink_skip_reason()
only_nt = unittest.skipIf(os.name != 'nt',
'test requires a Windows-compatible system')
only_posix = unittest.skipIf(os.name == 'nt',
'test requires a POSIX-compatible system')
with_symlinks = unittest.skipIf(symlink_skip_reason, symlink_skip_reason)
@only_posix
class PosixPathAsPureTest(PurePosixPathTest):
cls = pathlib.PosixPath
@only_nt
class WindowsPathAsPureTest(PureWindowsPathTest):
cls = pathlib.WindowsPath
class _BasePathTest(object):
"""Tests for the FS-accessing functionalities of the Path classes."""
# (BASE)
# |
# |-- dirA/
# |-- linkC -> "../dirB"
# |-- dirB/
# | |-- fileB
# |-- linkD -> "../dirB"
# |-- dirC/
# | |-- fileC
# | |-- fileD
# |-- fileA
# |-- linkA -> "fileA"
# |-- linkB -> "dirB"
#
def setUp(self):
os.mkdir(BASE)
self.addCleanup(shutil.rmtree, BASE)
os.mkdir(join('dirA'))
os.mkdir(join('dirB'))
os.mkdir(join('dirC'))
os.mkdir(join('dirC', 'dirD'))
with open(join('fileA'), 'wb') as f:
f.write(b"this is file A\n")
with open(join('dirB', 'fileB'), 'wb') as f:
f.write(b"this is file B\n")
with open(join('dirC', 'fileC'), 'wb') as f:
f.write(b"this is file C\n")
with open(join('dirC', 'dirD', 'fileD'), 'wb') as f:
f.write(b"this is file D\n")
if not symlink_skip_reason:
# Relative symlinks
os.symlink('fileA', join('linkA'))
os.symlink('non-existing', join('brokenLink'))
self.dirlink('dirB', join('linkB'))
self.dirlink(os.path.join('..', 'dirB'), join('dirA', 'linkC'))
# This one goes upwards but doesn't create a loop
self.dirlink(os.path.join('..', 'dirB'), join('dirB', 'linkD'))
if os.name == 'nt':
# Workaround for http://bugs.python.org/issue13772
def dirlink(self, src, dest):
os.symlink(src, dest, target_is_directory=True)
else:
def dirlink(self, src, dest):
os.symlink(src, dest)
def assertSame(self, path_a, path_b):
self.assertTrue(os.path.samefile(str(path_a), str(path_b)),
"%r and %r don't point to the same file" %
(path_a, path_b))
def assertFileNotFound(self, func, *args, **kwargs):
exc = FileNotFoundError if sys.version_info >= (3, 3) else EnvironmentError
with self.assertRaises(exc) as cm:
# Python 2.6 kludge for http://bugs.python.org/issue7853
try:
func(*args, **kwargs)
except:
raise
self.assertEqual(cm.exception.errno, errno.ENOENT)
def _test_cwd(self, p):
q = self.cls(os.getcwd())
self.assertEqual(p, q)
self.assertEqual(str(p), str(q))
self.assertIs(type(p), type(q))
self.assertTrue(p.is_absolute())
def test_cwd(self):
p = self.cls.cwd()
self._test_cwd(p)
def test_empty_path(self):
# The empty path points to '.'
p = self.cls('')
self.assertEqual(p.stat(), os.stat('.'))
def test_exists(self):
P = self.cls
p = P(BASE)
self.assertIs(True, p.exists())
self.assertIs(True, (p / 'dirA').exists())
self.assertIs(True, (p / 'fileA').exists())
if not symlink_skip_reason:
self.assertIs(True, (p / 'linkA').exists())
self.assertIs(True, (p / 'linkB').exists())
self.assertIs(False, (p / 'foo').exists())
self.assertIs(False, P('/xyzzy').exists())
def test_open_common(self):
p = self.cls(BASE)
with (p / 'fileA').open('r') as f:
self.assertIsInstance(f, io.TextIOBase)
self.assertEqual(f.read(), "this is file A\n")
with (p / 'fileA').open('rb') as f:
self.assertIsInstance(f, io.BufferedIOBase)
self.assertEqual(f.read().strip(), b"this is file A")
with (p / 'fileA').open('rb', buffering=0) as f:
self.assertIsInstance(f, io.RawIOBase)
self.assertEqual(f.read().strip(), b"this is file A")
def test_iterdir(self):
P = self.cls
p = P(BASE)
it = p.iterdir()
paths = set(it)
expected = ['dirA', 'dirB', 'dirC', 'fileA']
if not symlink_skip_reason:
expected += ['linkA', 'linkB', 'brokenLink']
self.assertEqual(paths, set( P(BASE, q) for q in expected ))
@with_symlinks
def test_iterdir_symlink(self):
# __iter__ on a symlink to a directory
P = self.cls
p = P(BASE, 'linkB')
paths = set(p.iterdir())
expected = set( P(BASE, 'linkB', q) for q in ['fileB', 'linkD'] )
self.assertEqual(paths, expected)
def test_iterdir_nodir(self):
# __iter__ on something that is not a directory
p = self.cls(BASE, 'fileA')
with self.assertRaises(OSError) as cm:
# Python 2.6 kludge for http://bugs.python.org/issue7853
try:
next(p.iterdir())
except:
raise
# ENOENT or EINVAL under Windows, ENOTDIR otherwise
# (see issue #12802)
self.assertIn(cm.exception.errno, (errno.ENOTDIR,
errno.ENOENT, errno.EINVAL))
def test_glob_common(self):
def _check(glob, expected):
self.assertEqual(set(glob), set( P(BASE, q) for q in expected ))
P = self.cls
p = P(BASE)
it = p.glob("fileA")
self.assertIsInstance(it, collections.Iterator)
_check(it, ["fileA"])
_check(p.glob("fileB"), [])
_check(p.glob("dir*/file*"), ["dirB/fileB", "dirC/fileC"])
if symlink_skip_reason:
_check(p.glob("*A"), ['dirA', 'fileA'])
else:
_check(p.glob("*A"), ['dirA', 'fileA', 'linkA'])
if symlink_skip_reason:
_check(p.glob("*B/*"), ['dirB/fileB'])
else:
_check(p.glob("*B/*"), ['dirB/fileB', 'dirB/linkD',
'linkB/fileB', 'linkB/linkD'])
if symlink_skip_reason:
_check(p.glob("*/fileB"), ['dirB/fileB'])
else:
_check(p.glob("*/fileB"), ['dirB/fileB', 'linkB/fileB'])
_check(p.glob("dirC/**"), ['dirC/fileC', 'dirC/dirD', 'dirC/dirD/fileD'])
def test_rglob_common(self):
def _check(glob, expected):
self.assertEqual(set(glob), set( P(BASE, q) for q in expected ))
P = self.cls
p = P(BASE)
it = p.rglob("fileA")
self.assertIsInstance(it, collections.Iterator)
# XXX cannot test because of symlink loops in the test setup
#_check(it, ["fileA"])
#_check(p.rglob("fileB"), ["dirB/fileB"])
#_check(p.rglob("*/fileA"), [""])
#_check(p.rglob("*/fileB"), ["dirB/fileB"])
#_check(p.rglob("file*"), ["fileA", "dirB/fileB"])
# No symlink loops here
p = P(BASE, "dirC")
_check(p.rglob("file*"), ["dirC/fileC", "dirC/dirD/fileD"])
_check(p.rglob("*/*"), ["dirC/dirD/fileD"])
def test_glob_dotdot(self):
# ".." is not special in globs
P = self.cls
p = P(BASE)
self.assertEqual(set(p.glob("..")), set([ P(BASE, "..") ]))
self.assertEqual(set(p.glob("dirA/../file*")), set([ P(BASE, "dirA/../fileA") ]))
self.assertEqual(set(p.glob("../xyzzy")), set())
def _check_resolve_relative(self, p, expected):
q = p.resolve()
self.assertEqual(q, expected)
def _check_resolve_absolute(self, p, expected):
q = p.resolve()
self.assertEqual(q, expected)
@with_symlinks
def test_resolve_common(self):
P = self.cls
p = P(BASE, 'foo')
with self.assertRaises(OSError) as cm:
p.resolve()
self.assertEqual(cm.exception.errno, errno.ENOENT)
# These are all relative symlinks
p = P(BASE, 'dirB', 'fileB')
self._check_resolve_relative(p, p)
p = P(BASE, 'linkA')
self._check_resolve_relative(p, P(BASE, 'fileA'))
p = P(BASE, 'dirA', 'linkC', 'fileB')
self._check_resolve_relative(p, P(BASE, 'dirB', 'fileB'))
p = P(BASE, 'dirB', 'linkD', 'fileB')
self._check_resolve_relative(p, P(BASE, 'dirB', 'fileB'))
# Now create absolute symlinks
d = tempfile.mkdtemp(suffix='-dirD')
self.addCleanup(shutil.rmtree, d)
os.symlink(os.path.join(d), join('dirA', 'linkX'))
os.symlink(join('dirB'), os.path.join(d, 'linkY'))
p = P(BASE, 'dirA', 'linkX', 'linkY', 'fileB')
self._check_resolve_absolute(p, P(BASE, 'dirB', 'fileB'))
@with_symlinks
def test_resolve_dot(self):
# See https://bitbucket.org/pitrou/pathlib/issue/9/pathresolve-fails-on-complex-symlinks
p = self.cls(BASE)
self.dirlink('.', join('0'))
self.dirlink(os.path.join('0', '0'), join('1'))
self.dirlink(os.path.join('1', '1'), join('2'))
q = p / '2'
self.assertEqual(q.resolve(), p)
def test_chmod(self):
p = self.cls(BASE) / 'fileA'
mode = p.stat().st_mode
# Clear writable bit
new_mode = mode & ~0o222
p.chmod(new_mode)
self.assertEqual(p.stat().st_mode, new_mode)
# Set writable bit
new_mode = mode | 0o222
p.chmod(new_mode)
self.assertEqual(p.stat().st_mode, new_mode)
# XXX also need a test for lchmod
def test_stat(self):
p = self.cls(BASE) / 'fileA'
st = p.stat()
self.assertEqual(p.stat(), st)
# Change file mode by flipping write bit
p.chmod(st.st_mode ^ 0o222)
self.addCleanup(p.chmod, st.st_mode)
self.assertNotEqual(p.stat(), st)
@with_symlinks
def test_lstat(self):
p = self.cls(BASE)/ 'linkA'
st = p.stat()
self.assertNotEqual(st, p.lstat())
def test_lstat_nosymlink(self):
p = self.cls(BASE) / 'fileA'
st = p.stat()
self.assertEqual(st, p.lstat())
@unittest.skipUnless(pwd, "the pwd module is needed for this test")
def test_owner(self):
p = self.cls(BASE) / 'fileA'
uid = p.stat().st_uid
try:
name = pwd.getpwuid(uid).pw_name
except KeyError:
self.skipTest(
"user %d doesn't have an entry in the system database" % uid)
self.assertEqual(name, p.owner())
@unittest.skipUnless(grp, "the grp module is needed for this test")
def test_group(self):
p = self.cls(BASE) / 'fileA'
gid = p.stat().st_gid
try:
name = grp.getgrgid(gid).gr_name
except KeyError:
self.skipTest(
"group %d doesn't have an entry in the system database" % gid)
self.assertEqual(name, p.group())
def test_unlink(self):
p = self.cls(BASE) / 'fileA'
p.unlink()
self.assertFileNotFound(p.stat)
self.assertFileNotFound(p.unlink)
def test_rmdir(self):
p = self.cls(BASE) / 'dirA'
for q in p.iterdir():
q.unlink()
p.rmdir()
self.assertFileNotFound(p.stat)
self.assertFileNotFound(p.unlink)
def test_rename(self):
P = self.cls(BASE)
p = P / 'fileA'
size = p.stat().st_size
# Renaming to another path
q = P / 'dirA' / 'fileAA'
p.rename(q)
self.assertEqual(q.stat().st_size, size)
self.assertFileNotFound(p.stat)
# Renaming to a str of a relative path
r = rel_join('fileAAA')
q.rename(r)
self.assertEqual(os.stat(r).st_size, size)
self.assertFileNotFound(q.stat)
def test_replace(self):
P = self.cls(BASE)
p = P / 'fileA'
if sys.version_info < (3, 3):
self.assertRaises(NotImplementedError, p.replace, p)
return
size = p.stat().st_size
# Replacing a non-existing path
q = P / 'dirA' / 'fileAA'
p.replace(q)
self.assertEqual(q.stat().st_size, size)
self.assertFileNotFound(p.stat)
# Replacing another (existing) path
r = rel_join('dirB', 'fileB')
q.replace(r)
self.assertEqual(os.stat(r).st_size, size)
self.assertFileNotFound(q.stat)
def test_touch_common(self):
P = self.cls(BASE)
p = P / 'newfileA'
self.assertFalse(p.exists())
p.touch()
self.assertTrue(p.exists())
old_mtime = p.stat().st_mtime
# Rewind the mtime sufficiently far in the past to work around
# filesystem-specific timestamp granularity.
os.utime(str(p), (old_mtime - 10, old_mtime - 10))
# The file mtime is refreshed by calling touch() again
p.touch()
self.assertGreaterEqual(p.stat().st_mtime, old_mtime)
p = P / 'newfileB'
self.assertFalse(p.exists())
p.touch(mode=0o700, exist_ok=False)
self.assertTrue(p.exists())
self.assertRaises(OSError, p.touch, exist_ok=False)
def test_touch_nochange(self):
P = self.cls(BASE)
p = P / 'fileA'
p.touch()
with p.open('rb') as f:
self.assertEqual(f.read().strip(), b"this is file A")
def test_mkdir(self):
P = self.cls(BASE)
p = P / 'newdirA'
self.assertFalse(p.exists())
p.mkdir()
self.assertTrue(p.exists())
self.assertTrue(p.is_dir())
with self.assertRaises(OSError) as cm:
# Python 2.6 kludge for http://bugs.python.org/issue7853
try:
p.mkdir()
except:
raise
self.assertEqual(cm.exception.errno, errno.EEXIST)
def test_mkdir_parents(self):
# Creating a chain of directories
p = self.cls(BASE, 'newdirB', 'newdirC')
self.assertFalse(p.exists())
with self.assertRaises(OSError) as cm:
# Python 2.6 kludge for http://bugs.python.org/issue7853
try:
p.mkdir()
except:
raise
self.assertEqual(cm.exception.errno, errno.ENOENT)
p.mkdir(parents=True)
self.assertTrue(p.exists())
self.assertTrue(p.is_dir())
with self.assertRaises(OSError) as cm:
try:
p.mkdir(parents=True)
except:
raise
self.assertEqual(cm.exception.errno, errno.EEXIST)
# test `mode` arg
mode = stat.S_IMODE(p.stat().st_mode) # default mode
p = self.cls(BASE, 'newdirD', 'newdirE')
p.mkdir(0o555, parents=True)
self.assertTrue(p.exists())
self.assertTrue(p.is_dir())
if os.name != 'nt':
# the directory's permissions follow the mode argument
self.assertEqual(stat.S_IMODE(p.stat().st_mode), 0o7555 & mode)
# the parent's permissions follow the default process settings
self.assertEqual(stat.S_IMODE(p.parent.stat().st_mode), mode)
@with_symlinks
def test_symlink_to(self):
P = self.cls(BASE)
target = P / 'fileA'
# Symlinking a path target
link = P / 'dirA' / 'linkAA'
link.symlink_to(target)
self.assertEqual(link.stat(), target.stat())
self.assertNotEqual(link.lstat(), target.stat())
# Symlinking a str target
link = P / 'dirA' / 'linkAAA'
link.symlink_to(str(target))
self.assertEqual(link.stat(), target.stat())
self.assertNotEqual(link.lstat(), target.stat())
self.assertFalse(link.is_dir())
# Symlinking to a directory
target = P / 'dirB'
link = P / 'dirA' / 'linkAAAA'
link.symlink_to(target, target_is_directory=True)
self.assertEqual(link.stat(), target.stat())
self.assertNotEqual(link.lstat(), target.stat())
self.assertTrue(link.is_dir())
self.assertTrue(list(link.iterdir()))
def test_is_dir(self):
P = self.cls(BASE)
self.assertTrue((P / 'dirA').is_dir())
self.assertFalse((P / 'fileA').is_dir())
self.assertFalse((P / 'non-existing').is_dir())
if not symlink_skip_reason:
self.assertFalse((P / 'linkA').is_dir())
self.assertTrue((P / 'linkB').is_dir())
self.assertFalse((P/ 'brokenLink').is_dir())
def test_is_file(self):
P = self.cls(BASE)
self.assertTrue((P / 'fileA').is_file())
self.assertFalse((P / 'dirA').is_file())
self.assertFalse((P / 'non-existing').is_file())
if not symlink_skip_reason:
self.assertTrue((P / 'linkA').is_file())
self.assertFalse((P / 'linkB').is_file())
self.assertFalse((P/ 'brokenLink').is_file())
def test_is_symlink(self):
P = self.cls(BASE)
self.assertFalse((P / 'fileA').is_symlink())
self.assertFalse((P / 'dirA').is_symlink())
self.assertFalse((P / 'non-existing').is_symlink())
if not symlink_skip_reason:
self.assertTrue((P / 'linkA').is_symlink())
self.assertTrue((P / 'linkB').is_symlink())
self.assertTrue((P/ 'brokenLink').is_symlink())
def test_is_fifo_false(self):
P = self.cls(BASE)
self.assertFalse((P / 'fileA').is_fifo())
self.assertFalse((P / 'dirA').is_fifo())
self.assertFalse((P / 'non-existing').is_fifo())
@unittest.skipUnless(hasattr(os, "mkfifo"), "os.mkfifo() required")
def test_is_fifo_true(self):
P = self.cls(BASE, 'myfifo')
os.mkfifo(str(P))
self.assertTrue(P.is_fifo())
self.assertFalse(P.is_socket())
self.assertFalse(P.is_file())
def test_is_socket_false(self):
P = self.cls(BASE)
self.assertFalse((P / 'fileA').is_socket())
self.assertFalse((P / 'dirA').is_socket())
self.assertFalse((P / 'non-existing').is_socket())
@unittest.skipUnless(hasattr(socket, "AF_UNIX"), "Unix sockets required")
def test_is_socket_true(self):
P = self.cls(BASE, 'mysock')
sock = socket.socket(socket.AF_UNIX, socket.SOCK_STREAM)
self.addCleanup(sock.close)
try:
sock.bind(str(P))
except OSError as e:
if "AF_UNIX path too long" in str(e):
self.skipTest("cannot bind Unix socket: " + str(e))
self.assertTrue(P.is_socket())
self.assertFalse(P.is_fifo())
self.assertFalse(P.is_file())
def test_is_block_device_false(self):
P = self.cls(BASE)
self.assertFalse((P / 'fileA').is_block_device())
self.assertFalse((P / 'dirA').is_block_device())
self.assertFalse((P / 'non-existing').is_block_device())
def test_is_char_device_false(self):
P = self.cls(BASE)
self.assertFalse((P / 'fileA').is_char_device())
self.assertFalse((P / 'dirA').is_char_device())
self.assertFalse((P / 'non-existing').is_char_device())
def test_is_char_device_true(self):
# Under Unix, /dev/null should generally be a char device
P = self.cls('/dev/null')
if not P.exists():
self.skipTest("/dev/null required")
self.assertTrue(P.is_char_device())
self.assertFalse(P.is_block_device())
self.assertFalse(P.is_file())
def test_pickling_common(self):
p = self.cls(BASE, 'fileA')
for proto in range(0, pickle.HIGHEST_PROTOCOL + 1):
dumped = pickle.dumps(p, proto)
pp = pickle.loads(dumped)
self.assertEqual(pp.stat(), p.stat())
def test_parts_interning(self):
P = self.cls
p = P('/usr/bin/foo')
q = P('/usr/local/bin')
# 'usr'
self.assertIs(p.parts[1], q.parts[1])
# 'bin'
self.assertIs(p.parts[2], q.parts[3])
def _check_complex_symlinks(self, link0_target):
# Test solving a non-looping chain of symlinks (issue #19887)
P = self.cls(BASE)
self.dirlink(os.path.join('link0', 'link0'), join('link1'))
self.dirlink(os.path.join('link1', 'link1'), join('link2'))
self.dirlink(os.path.join('link2', 'link2'), join('link3'))
self.dirlink(link0_target, join('link0'))
# Resolve absolute paths
p = (P / 'link0').resolve()
self.assertEqual(p, P)
self.assertEqual(str(p), BASE)
p = (P / 'link1').resolve()
self.assertEqual(p, P)
self.assertEqual(str(p), BASE)
p = (P / 'link2').resolve()
self.assertEqual(p, P)
self.assertEqual(str(p), BASE)
p = (P / 'link3').resolve()
self.assertEqual(p, P)
self.assertEqual(str(p), BASE)
# Resolve relative paths
old_path = os.getcwd()
os.chdir(BASE)
try:
p = self.cls('link0').resolve()
self.assertEqual(p, P)
self.assertEqual(str(p), BASE)
p = self.cls('link1').resolve()
self.assertEqual(p, P)
self.assertEqual(str(p), BASE)
p = self.cls('link2').resolve()
self.assertEqual(p, P)
self.assertEqual(str(p), BASE)
p = self.cls('link3').resolve()
self.assertEqual(p, P)
self.assertEqual(str(p), BASE)
finally:
os.chdir(old_path)
@with_symlinks
def test_complex_symlinks_absolute(self):
self._check_complex_symlinks(BASE)
@with_symlinks
def test_complex_symlinks_relative(self):
self._check_complex_symlinks('.')
@with_symlinks
def test_complex_symlinks_relative_dot_dot(self):
self._check_complex_symlinks(os.path.join('dirA', '..'))
class PathTest(_BasePathTest, unittest.TestCase):
cls = pathlib.Path
def test_concrete_class(self):
p = self.cls('a')
self.assertIs(type(p),
pathlib.WindowsPath if os.name == 'nt' else pathlib.PosixPath)
def test_unsupported_flavour(self):
if os.name == 'nt':
self.assertRaises(NotImplementedError, pathlib.PosixPath)
else:
self.assertRaises(NotImplementedError, pathlib.WindowsPath)
@only_posix
class PosixPathTest(_BasePathTest, unittest.TestCase):
cls = pathlib.PosixPath
def _check_symlink_loop(self, *args):
path = self.cls(*args)
with self.assertRaises(RuntimeError):
print(path.resolve())
def test_open_mode(self):
old_mask = os.umask(0)
self.addCleanup(os.umask, old_mask)
p = self.cls(BASE)
with (p / 'new_file').open('wb'):
pass
st = os.stat(join('new_file'))
self.assertEqual(stat.S_IMODE(st.st_mode), 0o666)
os.umask(0o022)
with (p / 'other_new_file').open('wb'):
pass
st = os.stat(join('other_new_file'))
self.assertEqual(stat.S_IMODE(st.st_mode), 0o644)
def test_touch_mode(self):
old_mask = os.umask(0)
self.addCleanup(os.umask, old_mask)
p = self.cls(BASE)
(p / 'new_file').touch()
st = os.stat(join('new_file'))
self.assertEqual(stat.S_IMODE(st.st_mode), 0o666)
os.umask(0o022)
(p / 'other_new_file').touch()
st = os.stat(join('other_new_file'))
self.assertEqual(stat.S_IMODE(st.st_mode), 0o644)
(p / 'masked_new_file').touch(mode=0o750)
st = os.stat(join('masked_new_file'))
self.assertEqual(stat.S_IMODE(st.st_mode), 0o750)
@with_symlinks
def test_resolve_loop(self):
# Loop detection for broken symlinks under POSIX
P = self.cls
# Loops with relative symlinks
os.symlink('linkX/inside', join('linkX'))
self._check_symlink_loop(BASE, 'linkX')
os.symlink('linkY', join('linkY'))
self._check_symlink_loop(BASE, 'linkY')
os.symlink('linkZ/../linkZ', join('linkZ'))
self._check_symlink_loop(BASE, 'linkZ')
# Loops with absolute symlinks
os.symlink(join('linkU/inside'), join('linkU'))
self._check_symlink_loop(BASE, 'linkU')
os.symlink(join('linkV'), join('linkV'))
self._check_symlink_loop(BASE, 'linkV')
os.symlink(join('linkW/../linkW'), join('linkW'))
self._check_symlink_loop(BASE, 'linkW')
def test_glob(self):
P = self.cls
p = P(BASE)
given = set(p.glob("FILEa"))
expect = set() if not support.fs_is_case_insensitive(BASE) else given
self.assertEqual(given, expect)
self.assertEqual(set(p.glob("FILEa*")), set())
def test_rglob(self):
P = self.cls
p = P(BASE, "dirC")
given = set(p.rglob("FILEd"))
expect = set() if not support.fs_is_case_insensitive(BASE) else given
self.assertEqual(given, expect)
self.assertEqual(set(p.rglob("FILEd*")), set())
@only_nt
class WindowsPathTest(_BasePathTest, unittest.TestCase):
cls = pathlib.WindowsPath
def test_glob(self):
P = self.cls
p = P(BASE)
self.assertEqual(set(p.glob("FILEa")), set( P(BASE, "fileA") ))
def test_rglob(self):
P = self.cls
p = P(BASE, "dirC")
self.assertEqual(set(p.rglob("FILEd")), set( P(BASE, "dirC/dirD/fileD") ))
def main():
unittest.main(__name__)
if __name__ == "__main__":
main()
| apache-2.0 |
mitocw/edx-platform | common/djangoapps/course_action_state/tests/test_rerun_manager.py | 4 | 3853 | """
Tests specific to the CourseRerunState Model and Manager.
"""
from django.test import TestCase
from opaque_keys.edx.locations import CourseLocator
from six import text_type
from course_action_state.managers import CourseRerunUIStateManager
from course_action_state.models import CourseRerunState
from student.tests.factories import UserFactory
class TestCourseRerunStateManager(TestCase):
"""
Test class for testing the CourseRerunUIStateManager.
"""
def setUp(self):
super(TestCourseRerunStateManager, self).setUp()
self.source_course_key = CourseLocator("source_org", "source_course_num", "source_run")
self.course_key = CourseLocator("test_org", "test_course_num", "test_run")
self.created_user = UserFactory()
self.display_name = "destination course name"
self.expected_rerun_state = {
'created_user': self.created_user,
'updated_user': self.created_user,
'course_key': self.course_key,
'source_course_key': self.source_course_key,
"display_name": self.display_name,
'action': CourseRerunUIStateManager.ACTION,
'should_display': True,
'message': "",
}
def verify_rerun_state(self):
"""
Gets the rerun state object for self.course_key and verifies that the values
of its fields equal self.expected_rerun_state.
"""
found_rerun = CourseRerunState.objects.find_first(course_key=self.course_key)
found_rerun_state = {key: getattr(found_rerun, key) for key in self.expected_rerun_state}
self.assertDictEqual(found_rerun_state, self.expected_rerun_state)
return found_rerun
def dismiss_ui_and_verify(self, rerun):
"""
Updates the should_display field of the rerun state object for self.course_key
and verifies its new state.
"""
user_who_dismisses_ui = UserFactory()
CourseRerunState.objects.update_should_display(
entry_id=rerun.id,
user=user_who_dismisses_ui,
should_display=False,
)
self.expected_rerun_state.update({
'updated_user': user_who_dismisses_ui,
'should_display': False,
})
self.verify_rerun_state()
def initiate_rerun(self):
CourseRerunState.objects.initiated(
source_course_key=self.source_course_key,
destination_course_key=self.course_key,
user=self.created_user,
display_name=self.display_name,
)
def test_rerun_initiated(self):
self.initiate_rerun()
self.expected_rerun_state.update(
{'state': CourseRerunUIStateManager.State.IN_PROGRESS}
)
self.verify_rerun_state()
def test_rerun_succeeded(self):
# initiate
self.initiate_rerun()
# set state to succeed
CourseRerunState.objects.succeeded(course_key=self.course_key)
self.expected_rerun_state.update({
'state': CourseRerunUIStateManager.State.SUCCEEDED,
})
rerun = self.verify_rerun_state()
# dismiss ui and verify
self.dismiss_ui_and_verify(rerun)
def test_rerun_failed(self):
# initiate
self.initiate_rerun()
# set state to fail
exception = Exception("failure in rerunning")
try:
raise exception
except:
CourseRerunState.objects.failed(course_key=self.course_key)
self.expected_rerun_state.update(
{'state': CourseRerunUIStateManager.State.FAILED}
)
self.expected_rerun_state.pop('message')
rerun = self.verify_rerun_state()
self.assertIn(text_type(exception), rerun.message)
# dismiss ui and verify
self.dismiss_ui_and_verify(rerun)
| agpl-3.0 |
tonnrueter/pymca_devel | PyMca/Median2DBrowser.py | 1 | 7064 | #/*##########################################################################
# Copyright (C) 2004-2012 European Synchrotron Radiation Facility
#
# This file is part of the PyMca X-ray Fluorescence Toolkit developed at
# the ESRF by the Software group.
#
# This toolkit is free software; you can redistribute it and/or modify it
# under the terms of the GNU General Public License as published by the Free
# Software Foundation; either version 2 of the License, or (at your option)
# any later version.
#
# PyMca is distributed in the hope that it will be useful, but WITHOUT ANY
# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. See the GNU General Public License for more
# details.
#
# You should have received a copy of the GNU General Public License along with
# PyMca; if not, write to the Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
#
# PyMca follows the dual licensing model of Riverbank's PyQt and cannot be
# used as a free plugin for a non-free program.
#
# Please contact the ESRF industrial unit (industry@esrf.fr) if this license
# is a problem for you.
#############################################################################*/
__author__ = "V.A. Sole - ESRF Software Group"
import numpy
try:
from PyMca import StackBrowser
from PyMca.PyMcaSciPy.signal import median
except ImportError:
print("Median2DBrowser importing directly!")
import StackBrowser
from PyMcaSciPy.signal import median
medfilt2d = median.medfilt2d
qt = StackBrowser.qt
DEBUG = 0
class MedianParameters(qt.QWidget):
def __init__(self, parent=None, use_conditional=False):
qt.QWidget.__init__(self, parent)
self.mainLayout = qt.QHBoxLayout(self)
self.mainLayout.setMargin(0)
self.mainLayout.setSpacing(2)
self.label = qt.QLabel(self)
self.label.setText("Median filter width: ")
self.widthSpin = qt.QSpinBox(self)
self.widthSpin.setMinimum(1)
self.widthSpin.setMaximum(99)
self.widthSpin.setValue(1)
self.widthSpin.setSingleStep(2)
if use_conditional:
self.conditionalLabel = qt.QLabel(self)
self.conditionalLabel.setText("Conditional:")
self.conditionalSpin = qt.QSpinBox(self)
self.conditionalSpin.setMinimum(0)
self.conditionalSpin.setMaximum(1)
self.conditionalSpin.setValue(0)
self.mainLayout.addWidget(self.label)
self.mainLayout.addWidget(self.widthSpin)
if use_conditional:
self.mainLayout.addWidget(self.conditionalLabel)
self.mainLayout.addWidget(self.conditionalSpin)
class Median2DBrowser(StackBrowser.StackBrowser):
def __init__(self, *var, **kw):
StackBrowser.StackBrowser.__init__(self, *var, **kw)
self.setWindowTitle("Image Browser with Median Filter")
self._medianParameters = {'use':True,
'row_width':5,
'column_width':5,
'conditional':0}
self._medianParametersWidget = MedianParameters(self,
use_conditional=1)
self._medianParametersWidget.widthSpin.setValue(5)
self.layout().addWidget(self._medianParametersWidget)
self.connect(self._medianParametersWidget.widthSpin,
qt.SIGNAL('valueChanged(int)'),
self.setKernelWidth)
self.connect(self._medianParametersWidget.conditionalSpin,
qt.SIGNAL('valueChanged(int)'),
self.setConditionalFlag)
def setKernelWidth(self, value):
kernelSize = numpy.asarray(value)
if not (int(value) % 2):
msg = qt.QMessageBox(self)
msg.setIcon(qt.QMessageBox.Critical)
msg.setWindowTitle("Median filter error")
msg.setText("One odd values accepted")
msg.exec_()
return
if len(kernelSize.shape) == 0:
kernelSize = [kernelSize.item()] * 2
self._medianParameters['row_width'] = kernelSize[0]
self._medianParameters['column_width'] = kernelSize[1]
self._medianParametersWidget.widthSpin.setValue(int(kernelSize[0]))
current = self.slider.value()
self.showImage(current, moveslider=False)
def setConditionalFlag(self, value):
self._medianParameters['conditional'] = int(value)
self._medianParametersWidget.conditionalSpin.setValue(int(value))
current = self.slider.value()
self.showImage(current, moveslider=False)
def _buildTitle(self, legend, index):
a = self._medianParameters['row_width']
b = self._medianParameters['column_width']
title = StackBrowser.StackBrowser._buildTitle(self, legend, index)
if max(a, b) > 1:
if self._medianParameters['conditional'] == 0:
return "Median Filter (%d,%d) of %s" % (a, b, title)
else:
return "Conditional Median Filter (%d,%d) of %s" % (a, b, title)
else:
return title
def showImage(self, index=0, moveslider=True):
if not len(self.dataObjectsList):
return
legend = self.dataObjectsList[0]
dataObject = self.dataObjectsDict[legend]
data = self._getImageDataFromSingleIndex(index)
if self._backgroundSubtraction and (self._backgroundImage is not None):
self.setImageData(data - self._backgroundImage)
else:
self.setImageData(data, clearmask=False)
txt = self._buildTitle(legend, index)
self.graphWidget.graph.setTitle(txt)
self.name.setText(txt)
if moveslider:
self.slider.setValue(index)
def setImageData(self, data, **kw):
if self._medianParameters['use']:
if max(self._medianParameters['row_width'],
self._medianParameters['column_width']) > 1:
conditional = self._medianParameters['conditional']
data = medfilt2d(data,[self._medianParameters['row_width'],
self._medianParameters['column_width']],
conditional=conditional)
# this method is in fact of MaskImageWidget
StackBrowser.StackBrowser.setImageData(self, data, **kw)
if __name__ == "__main__":
#create a dummy stack
nrows = 100
ncols = 200
nchannels = 1024
a = numpy.ones((nrows, ncols), numpy.float)
stackData = numpy.zeros((nrows, ncols, nchannels), numpy.float)
for i in range(nchannels):
if i % 10:
stackData[:, :, i] = a * i
else:
stackData[:, :, i] = 10 * a * i
app = qt.QApplication([])
qt.QObject.connect(app, qt.SIGNAL("lastWindowClosed()"),
app,qt.SLOT("quit()"))
w = Median2DBrowser()
w.setStackDataObject(stackData, index=0)
w.show()
app.exec_()
| gpl-2.0 |
opendot/dotManager | main.py | 1 | 23979 | import sys
import random
from PyQt4 import QtGui, QtCore
from RFID import *
from DrupalConnector import *
class MainWindow(QtGui.QMainWindow):
def __init__(self):
super(MainWindow, self).__init__()
self.initUI()
self.checkCardReader()
def initUI(self):
cWidget = QtGui.QWidget(self)
vBox = QtGui.QVBoxLayout()
vBox.setSpacing(2)
self.createBtn = QtGui.QPushButton('Create account', cWidget)
self.connect(self.createBtn, QtCore.SIGNAL('clicked()'), self.createBtnPressed);
self.activateBtn = QtGui.QPushButton('Activate account', cWidget)
self.connect(self.activateBtn, QtCore.SIGNAL('clicked()'), self.activateBtnPressed);
self.infoBtn = QtGui.QPushButton('Get information', cWidget)
self.connect(self.infoBtn, QtCore.SIGNAL('clicked()'), self.infoBtnPressed);
self.increaseBtn = QtGui.QPushButton('Increase dots', cWidget)
self.connect(self.increaseBtn, QtCore.SIGNAL('clicked()'), self.increaseBtnPressed);
self.decreaseBtn = QtGui.QPushButton('Decrease dots', cWidget)
self.connect(self.decreaseBtn, QtCore.SIGNAL('clicked()'), self.decreaseBtnPressed);
vBox.addWidget(self.createBtn)
vBox.addWidget(self.activateBtn)
vBox.addWidget(self.infoBtn)
vBox.addWidget(self.increaseBtn)
vBox.addWidget(self.decreaseBtn)
cWidget.setLayout(vBox)
self.setCentralWidget(cWidget)
self.resize(200, 100)
center(self)
self.setWindowTitle('opendot manager')
def checkCardReader(self):
ports = list(rfid_reader.get_ports())
text, ok = QtGui.QInputDialog.getItem(self, "Select port", "Please choose the port connected to the card reader:", ports, 0, False)
if not ok or not text:
QtGui.QMessageBox.critical(self, 'opendot manager', "Please connect the card reader")
sys.exit()
try:
rfid_reader.connect(str(text))
except serial.SerialException, e:
QtGui.QMessageBox.critical(self, 'opendot manager', str(e))
sys.exit()
def infoBtnPressed(self):
print ">>> Get user info"
infoDialog = InfoDialog(self)
infoDialog.deleteLater()
def createBtnPressed(self):
print ">>> Create user"
createDiag = CreateDialog(self)
createDiag.exec_()
createDiag.deleteLater()
def activateBtnPressed(self):
print ">>> Activate user"
activateDiag = ActivateDialog(self)
activateDiag.exec_()
activateDiag.deleteLater()
def increaseBtnPressed(self):
print ">>> Increase dots"
increaseDiag = IncreaseDialog(self)
increaseDiag.deleteLater()
def decreaseBtnPressed(self):
print ">>> Decrease dots"
decreaseDiag = DecreaseDialog(self)
decreaseDiag.deleteLater()
####
##
## LOGIN WINDOW
##
####
class LoginWindow(QtGui.QWidget):
def __init__(self, main):
super(LoginWindow, self).__init__()
self.MainWindow = main
self.initUI()
self.checkServer()
def initUI(self):
grid = QtGui.QGridLayout()
grid.setSpacing(10)
self.usernameLabel = QtGui.QLabel("Username")
self.usernameLine = QtGui.QLineEdit()
self.passwordLabel = QtGui.QLabel("Password")
self.passwordLine = QtGui.QLineEdit()
self.passwordLine.setEchoMode(QtGui.QLineEdit.Password)
self.loginBtn = QtGui.QPushButton('Login')
self.connect(self.loginBtn, QtCore.SIGNAL('clicked()'), self.doLogin);
grid.addWidget(self.usernameLabel,0,0)
grid.addWidget(self.usernameLine,0,1)
grid.addWidget(self.passwordLabel,1,0)
grid.addWidget(self.passwordLine,1,1)
grid.addWidget(self.loginBtn,2,1)
self.setLayout(grid)
self.setGeometry(500, 300, 200, 100)
center(self)
self.setWindowTitle('opendot manager - login')
def checkServer(self):
try:
result = connector.connect()
except Exception, e:
QtGui.QMessageBox.critical(self, 'Error', str(e))
sys.exit()
def doLogin(self):
username = str(self.usernameLine.text())
password = str(self.passwordLine.text())
if (len(username) == 0 or len(password) == 0):
QtGui.QMessageBox.critical(self, 'Error', 'Wrong credentials')
else:
self.loginBtn.setDisabled(True)
waitingCursor(self)
try:
result = connector.login(username, password)
except Exception, e:
QtGui.QMessageBox.critical(self, 'Error', str(e))
return
finally:
self.loginBtn.setDisabled(False)
normalCursor(self)
self.MainWindow.show()
self.close()
def keyPressEvent(self, e):
if e.key() == QtCore.Qt.Key_Return:
self.doLogin()
####
##
## CREATE DIALOG
##
####
class CreateDialog(QtGui.QDialog):
def __init__(self, parent=None):
super(CreateDialog, self).__init__()
self.initUI()
self.readCard()
def initUI(self):
grid = QtGui.QGridLayout()
grid.setSpacing(10)
self.cardIdLabel = QtGui.QLabel('Card number:');
self.cardId = QtGui.QLabel();
self.usernameLabel = QtGui.QLabel('Username:');
self.username = QtGui.QLineEdit();
self.nameLabel = QtGui.QLabel('Name:');
self.name = QtGui.QLineEdit();
self.surnameLabel = QtGui.QLabel('Surname:');
self.surname = QtGui.QLineEdit();
self.emailLabel = QtGui.QLabel('Email:');
self.email = QtGui.QLineEdit();
self.confirmBtn = QtGui.QPushButton('Confirm')
self.connect(self.confirmBtn, QtCore.SIGNAL('clicked()'), self.confirm)
grid.addWidget(self.cardIdLabel,0,0)
grid.addWidget(self.cardId,0,1)
grid.addWidget(self.usernameLabel,1,0)
grid.addWidget(self.username,1,1)
grid.addWidget(self.nameLabel,2,0)
grid.addWidget(self.name,2,1)
grid.addWidget(self.surnameLabel,3,0)
grid.addWidget(self.surname,3,1)
grid.addWidget(self.emailLabel,4,0)
grid.addWidget(self.email,4,1)
grid.addWidget(self.confirmBtn,5,1)
self.setLayout(grid)
self.setGeometry(500, 300, 200, 100)
center(self)
self.setWindowTitle('opendot manager - activate user')
def readCard(self):
QtGui.QMessageBox.question(self, 'Reading', 'Pass the card over the card reader')
cid = rfid_reader.read_from_serial()
cb = QtGui.QApplication.clipboard()
cb.clear(mode=cb.Clipboard)
cb.setText(cid, mode=cb.Clipboard)
self.cardId.setText(cid)
def confirm(self):
cid = str(self.cardId.text())
user = str(self.username.text())
name = str(self.name.text())
surname = str(self.surname.text())
email = str(self.email.text())
if (len(user) > 0 and len(name) > 0 and len(surname) > 0 and len(email) > 0):
u = {}
u['name'] = user
u['mail'] = email
u['field_nome'] = {'und': [{'value': name}]}
u['field_cognome'] = {'und': [{'value': surname}]}
u['notify'] = '1'
u['status'] = '1'
u['pass'] = ''.join( [chr(random.randint(97,122)) for i in xrange(0,10)] )
waitingCursor(self)
try:
connector.create(u)
connector.activate(user, cid, False)
except Exception, e:
QtGui.QMessageBox.critical(self, 'Error', str(e))
return
finally:
normalCursor(self)
QtGui.QMessageBox.information(self, 'Success', 'New user \"{}\" created!'.format(user))
self.close()
else:
QtGui.QMessageBox.critical(self, 'Error', 'Missing fields!')
####
##
## INFORMATION DIALOG
##
####
class InfoDialog(QtGui.QDialog):
def __init__(self, parent=None):
super(InfoDialog, self).__init__()
self.initUI()
self.readCard()
def initUI(self):
grid = QtGui.QGridLayout()
grid.setSpacing(10)
self.cardIdLabel = QtGui.QLabel('Card number:');
self.cardId = QtGui.QLabel();
self.usernameLabel = QtGui.QLabel('Username:');
self.username = QtGui.QLabel();
self.nameLabel = QtGui.QLabel('Name:');
self.name = QtGui.QLabel();
self.surnameLabel = QtGui.QLabel('Surname:');
self.surname = QtGui.QLabel();
self.dotsLabel = QtGui.QLabel('Dots:');
self.dots = QtGui.QLabel();
grid.addWidget(self.cardIdLabel,0,0)
grid.addWidget(self.cardId,0,1)
grid.addWidget(self.usernameLabel,1,0)
grid.addWidget(self.username,1,1)
grid.addWidget(self.nameLabel,2,0)
grid.addWidget(self.name,2,1)
grid.addWidget(self.surnameLabel,3,0)
grid.addWidget(self.surname,3,1)
grid.addWidget(self.dotsLabel,4,0)
grid.addWidget(self.dots,4,1)
self.setLayout(grid)
self.setGeometry(500, 300, 200, 100)
center(self)
self.setWindowTitle('opendot manager - activate user')
def readCard(self):
QtGui.QMessageBox.question(self, 'Reading', 'Pass the card over the card reader')
cid = rfid_reader.read_from_serial()
cb = QtGui.QApplication.clipboard()
cb.clear(mode=cb.Clipboard)
cb.setText(cid, mode=cb.Clipboard)
self.cardId.setText(cid)
waitingCursor(main)
try:
user = connector.get_user(cid)
except Exception, e:
QtGui.QMessageBox.critical(self, 'Error', str(e))
self.close()
return
finally:
normalCursor(main)
self.currUser = User(user)
self.username.setText(self.currUser.getUsername())
self.name.setText(self.currUser.getName())
self.surname.setText(self.currUser.getSurname())
self.dots.setText(str(self.currUser.getDots()))
self.exec_()
####
##
## ACTIVATION DIALOG
##
####
class ActivateDialog(QtGui.QDialog):
def __init__(self, parent=None):
super(ActivateDialog, self).__init__()
self.initUI()
self.readCard()
def initUI(self):
grid = QtGui.QGridLayout()
grid.setSpacing(10)
self.cardIdLabel = QtGui.QLabel('Card number:');
self.cardId = QtGui.QLabel();
self.usernameLabel = QtGui.QLabel('Username:');
self.usernameLine = QtGui.QLineEdit();
self.activateBtn = QtGui.QPushButton('Activate')
self.connect(self.activateBtn, QtCore.SIGNAL('clicked()'), self.doActivation)
grid.addWidget(self.cardIdLabel,0,0)
grid.addWidget(self.cardId,0,1)
grid.addWidget(self.usernameLabel,1,0)
grid.addWidget(self.usernameLine,1,1)
grid.addWidget(self.activateBtn,2,1)
self.setLayout(grid)
self.resize(200, 100)
center(self)
self.setWindowTitle('opendot manager - activate user')
def readCard(self):
QtGui.QMessageBox.question(self, 'Reading', 'Pass the card over the card reader')
cId = rfid_reader.read_from_serial()
self.cardId.setText(cId)
cb = QtGui.QApplication.clipboard()
cb.clear(mode=cb.Clipboard )
cb.setText(cId, mode=cb.Clipboard)
def doActivation(self):
user = str(self.usernameLine.text())
cid = str(self.cardId.text())
if (len(user) > 0):
waitingCursor(self)
try:
result = connector.activate(user, cid, True)
except Exception, e:
QtGui.QMessageBox.critical(self, 'Error', str(e))
return
finally:
normalCursor(self)
QtGui.QMessageBox.information(self, 'Success', 'User "{}" activated!'.format(user))
self.close()
else:
QtGui.QMessageBox.critical(self, 'Error', 'Wrong username')
####
##
## INCREASE DOTS DIALOG
##
####
class IncreaseDialog(QtGui.QDialog):
def __init__(self, parent=None):
super(IncreaseDialog, self).__init__()
self.initUI()
self.readCard()
def loadButtons(self):
self.font = QtGui.QFont()
self.font.setFamily("Open Sans")
self.font.setBold(True)
self.smallBtn = QtGui.QPushButton()
self.font.setPointSize(11)
self.smallBtn.setFont(self.font)
self.smallBtn.setText("SMALL")
self.smallBtn.resize(100, 50)
self.connect(self.smallBtn, QtCore.SIGNAL('clicked()'), lambda plan="small": self.doIncrease(plan))
self.mediumBtn = QtGui.QPushButton()
self.font.setPointSize(13)
self.mediumBtn.setFont(self.font)
self.mediumBtn.setText("MEDIUM")
self.mediumBtn.resize(100, 50)
self.connect(self.mediumBtn, QtCore.SIGNAL('clicked()'), lambda plan="medium": self.doIncrease(plan))
self.largeBtn = QtGui.QPushButton()
self.font.setPointSize(15)
self.largeBtn.setFont(self.font)
self.largeBtn.setText("LARGE")
self.largeBtn.resize(100, 50)
self.connect(self.largeBtn, QtCore.SIGNAL('clicked()'), lambda plan="large": self.doIncrease(plan))
self.manualBtn = QtGui.QPushButton()
self.manualBtn.setText("Pay Per Use")
self.manualBtn.resize(100, 50)
self.connect(self.manualBtn, QtCore.SIGNAL('clicked()'), lambda plan="ppu": self.doIncrease(plan))
self.confirmBtn = QtGui.QPushButton('Confirm')
self.connect(self.confirmBtn, QtCore.SIGNAL('clicked()'), self.confirm)
def initUI(self):
self.loadButtons()
grid = QtGui.QGridLayout()
grid.setSpacing(10)
self.userLabel = QtGui.QLabel("Username")
self.username = QtGui.QLabel()
self.currBalanceLabel = QtGui.QLabel("Current Balance")
self.currBalance = QtGui.QLabel()
self.newBalanceLabel = QtGui.QLabel("New Balance")
self.newBalance = QtGui.QLabel()
# labels
grid.addWidget(self.userLabel,0,0)
grid.addWidget(self.username,0,1)
grid.addWidget(self.currBalanceLabel,1,0)
grid.addWidget(self.currBalance,1,1)
grid.addWidget(self.newBalanceLabel,2,0)
grid.addWidget(self.newBalance,2,1)
# buttons
grid.addWidget(self.smallBtn,3,0)
grid.addWidget(self.mediumBtn,3,1)
grid.addWidget(self.largeBtn,3,2)
grid.addWidget(self.manualBtn,3,3)
grid.addWidget(self.confirmBtn,4,3)
self.setLayout(grid)
#self.resize(500, 460)
center(self)
self.setWindowTitle('opendot manager - increase dots')
def readCard(self):
QtGui.QMessageBox.question(self, 'Reading', 'Pass the card over the card reader')
cId = rfid_reader.read_from_serial()
cb = QtGui.QApplication.clipboard()
cb.clear(mode=cb.Clipboard )
cb.setText(cId, mode=cb.Clipboard)
waitingCursor(main)
try:
user = connector.get_user(cId)
except Exception, e:
QtGui.QMessageBox.critical(self, 'Error', str(e))
self.close()
return
finally:
normalCursor(main)
self.currUser = User(user)
self.balance = self.currUser.getDots()
self.username.setText(self.currUser.getUsername())
self.currBalance.setText(str(self.currUser.getDots()))
self.newBalance.setText(str(self.balance))
self.exec_()
def doIncrease(self, plan):
if (plan == "small"):
self.balance += 50
elif (plan == "medium"):
self.balance += 100
elif (plan == "large"):
self.balance += 250
elif (plan == "ppu"):
dots, ok = QtGui.QInputDialog.getText(self, 'Purchased dots', 'Number of dots purchased:')
if (ok and len(dots) > 0):
self.balance += float(dots)
self.newBalance.setText(str(self.balance))
def confirm(self):
if (self.balance == float(self.currBalance.text())):
self.close()
else:
reply = QtGui.QMessageBox.question(self, 'Confirmation', "New balance will be {} dots, confirm?".format(self.balance), QtGui.QMessageBox.Yes|QtGui.QMessageBox.No)
if (reply == QtGui.QMessageBox.Yes):
self.currUser.setDots(self.balance)
waitingCursor(self)
print self.currUser.getDots()
try:
connector.update(self.currUser)
except Exception, e:
QtGui.QMessageBox.critical(self, 'Error', str(e))
return
finally:
normalCursor(self)
QtGui.QMessageBox.information(self, 'Success', 'New balance confirmed!')
self.close()
####
##
## DECREASE DOTS DIALOG
##
####
class DecreaseDialog(QtGui.QDialog):
def __init__(self, parent=None):
super(DecreaseDialog, self).__init__()
if sys.platform.startswith('win'):
self.icons_dir = os.path.dirname(os.path.abspath(sys.argv[0])) + "\icons"
elif sys.platform.startswith('linux') or sys.platform.startswith('cygwin'):
self.icons_dir = "icons"
self.initUI()
self.readCard()
def loadIcons(self):
self.icon = QtGui.QIcon()
self.icon1 = QtGui.QIcon()
self.icon2 = QtGui.QIcon()
self.icon3 = QtGui.QIcon()
self.icon4 = QtGui.QIcon()
self.icon5 = QtGui.QIcon()
self.icon6 = QtGui.QIcon()
self.icon7 = QtGui.QIcon()
self.icon.addFile(os.path.join(self.icons_dir, "01.png"))
self.icon1.addFile(os.path.join(self.icons_dir, "02.png"))
self.icon2.addFile(os.path.join(self.icons_dir, "03.png"))
self.icon3.addFile(os.path.join(self.icons_dir, "04.png"))
self.icon4.addFile(os.path.join(self.icons_dir, "05.png"))
self.icon5.addFile(os.path.join(self.icons_dir, "06.png"))
self.icon6.addFile(os.path.join(self.icons_dir, "07.png"))
self.icon7.addFile(os.path.join(self.icons_dir, "08.png"))
def loadButtons(self):
self.printer1Btn = QtGui.QPushButton()
self.printer1Btn.setIcon(self.icon)
self.printer1Btn.setIconSize(QtCore.QSize(100, 100))
self.printer1Btn.setText("3D Printer\nFDM")
self.connect(self.printer1Btn, QtCore.SIGNAL('clicked()'), lambda machine="printer1": self.doDecrease(machine))
self.printer2Btn = QtGui.QPushButton()
self.printer2Btn.setIcon(self.icon1)
self.printer2Btn.setIconSize(QtCore.QSize(100, 100))
self.printer2Btn.setText("3D Printer\nSTL")
self.connect(self.printer2Btn, QtCore.SIGNAL('clicked()'), lambda machine="printer2": self.doDecrease(machine))
self.printer3Btn = QtGui.QPushButton()
self.printer3Btn.setIcon(self.icon2)
self.printer3Btn.setIconSize(QtCore.QSize(100, 100))
self.printer3Btn.setText("3D Printer Hi-Res\nSTL")
self.connect(self.printer3Btn, QtCore.SIGNAL('clicked()'), lambda machine="printer3": self.doDecrease(machine))
self.cncBtn = QtGui.QPushButton()
self.cncBtn.setIcon(self.icon3)
self.cncBtn.setIconSize(QtCore.QSize(100, 100))
self.cncBtn.setText("Fresatrice CNC")
self.connect(self.cncBtn, QtCore.SIGNAL('clicked()'), lambda machine="cnc": self.doDecrease(machine))
self.festBtn = QtGui.QPushButton()
self.festBtn.setIcon(self.icon4)
self.festBtn.setIconSize(QtCore.QSize(100, 100))
self.festBtn.setText("Sega ad\naffondamento")
self.connect(self.festBtn, QtCore.SIGNAL('clicked()'), lambda machine="festool": self.doDecrease(machine))
self.cutterBtn = QtGui.QPushButton()
self.cutterBtn.setIcon(self.icon5)
self.cutterBtn.setIconSize(QtCore.QSize(100, 100))
self.cutterBtn.setText("Vinyl Cutter")
self.connect(self.cutterBtn, QtCore.SIGNAL('clicked()'), lambda machine="cutter": self.doDecrease(machine))
self.laserBtn = QtGui.QPushButton()
self.laserBtn.setIcon(self.icon6)
self.laserBtn.setIconSize(QtCore.QSize(100, 100))
self.laserBtn.setText("Laser Cutter Pro")
self.connect(self.laserBtn, QtCore.SIGNAL('clicked()'), lambda machine="laser": self.doDecrease(machine))
self.materialsBtn = QtGui.QPushButton()
self.materialsBtn.setIcon(self.icon7)
self.materialsBtn.setIconSize(QtCore.QSize(100, 100))
self.materialsBtn.setText("Materials")
self.connect(self.materialsBtn, QtCore.SIGNAL('clicked()'), lambda machine="materials": self.doDecrease(machine))
self.confirmBtn = QtGui.QPushButton('Confirm')
self.connect(self.confirmBtn, QtCore.SIGNAL('clicked()'), self.confirm)
def initUI(self):
self.loadIcons()
self.loadButtons()
grid = QtGui.QGridLayout()
grid.setSpacing(10)
self.userLabel = QtGui.QLabel("Username")
self.username = QtGui.QLabel()
self.currBalanceLabel = QtGui.QLabel("Current Balance")
self.currBalance = QtGui.QLabel()
self.newBalanceLabel = QtGui.QLabel("New Balance")
self.newBalance = QtGui.QLabel()
# labels
grid.addWidget(self.userLabel,0,0)
grid.addWidget(self.username,0,1)
grid.addWidget(self.currBalanceLabel,1,0)
grid.addWidget(self.currBalance,1,1)
grid.addWidget(self.newBalanceLabel,2,0)
grid.addWidget(self.newBalance,2,1)
# buttons
grid.addWidget(self.printer1Btn,3,0)
grid.addWidget(self.laserBtn,3,1)
grid.addWidget(self.cutterBtn,4,0)
grid.addWidget(self.cncBtn,4,1)
grid.addWidget(self.printer2Btn,5,0)
grid.addWidget(self.printer3Btn,5,1)
grid.addWidget(self.festBtn,6,0)
grid.addWidget(self.materialsBtn,6,1)
grid.addWidget(self.confirmBtn,7,1)
self.setLayout(grid)
#self.resize(500, 460)
center(self)
self.setWindowTitle('opendot manager - Decrease dots')
def readCard(self):
QtGui.QMessageBox.question(self, 'Reading', 'Pass the card over the card reader')
cId = rfid_reader.read_from_serial()
cb = QtGui.QApplication.clipboard()
cb.clear(mode=cb.Clipboard )
cb.setText(cId, mode=cb.Clipboard)
waitingCursor(main)
try:
user = connector.get_user(cId)
except Exception, e:
QtGui.QMessageBox.critical(self, 'Error', str(e))
self.close()
return
finally:
normalCursor(main)
self.currUser = User(user)
self.balance = self.currUser.getDots()
self.username.setText(self.currUser.getUsername())
self.currBalance.setText(str(self.currUser.getDots()))
self.newBalance.setText(str(self.balance))
self.exec_()
def confirm(self):
if (self.balance == float(self.currBalance.text())):
self.close()
elif (self.balance < 0):
QtGui.QMessageBox.critical(self, 'opendot manager', "Insufficient dots, recharge needed!")
self.close()
else:
reply = QtGui.QMessageBox.question(self, 'Confirmation', "New balance will be {} dots, confirm?".format(self.balance), QtGui.QMessageBox.Yes|QtGui.QMessageBox.No)
if (reply == QtGui.QMessageBox.Yes):
self.currUser.setDots(self.balance)
waitingCursor(self)
try:
connector.update(self.currUser)
except Exception, e:
QtGui.QMessageBox.critical(self, 'Error', str(e))
return
finally:
normalCursor(self)
QtGui.QMessageBox.information(self, 'Success', 'New balance confirmed!')
self.close()
def doDecrease(self, machine):
if (machine == "materials"):
eur, ok = QtGui.QInputDialog.getText(self, 'Used material', 'Value of used materials (in euro):')
if (ok and len(eur) > 0):
try:
dots = float(eur) * 1.25
except Exception, e:
QtGui.QMessageBox.critical(self, 'Error', "Valore non corretto")
return
self.balance -= dots
self.newBalance.setText(str(self.balance))
else:
minutes, ok = QtGui.QInputDialog.getText(self, 'Usage time', 'Usage time (in minutes):')
if ok:
minutes = float(minutes)
if (machine == "printer1"):
if (minutes <= 60):
self.balance -= minutes*0.25
else:
self.balance -= (15 + ((minutes-60) * 0.1))
elif (machine == "printer2" or machine == "printer3"):
self.balance -= (15 + minutes*0.25)
elif (machine == "cnc" or machine == "laser" or machine == "festool" or machine == "cutter"):
self.balance -= minutes # 1 dot/min
self.newBalance.setText(str(self.balance))
def waitingCursor(window):
window.setCursor(QtCore.Qt.WaitCursor)
def normalCursor(window):
window.unsetCursor()
def center(window):
qr = window.frameGeometry()
cp = QtGui.QDesktopWidget().availableGeometry().center()
qr.moveCenter(cp)
window.move(qr.topLeft())
if __name__ == '__main__':
app = QtGui.QApplication(sys.argv)
app.setStyle("plastique")
if sys.platform.startswith('win'):
icon = QtGui.QIcon(os.path.dirname(os.path.abspath(sys.argv[0])) + "\icon.ico")
sshFile = os.path.dirname(os.path.abspath(sys.argv[0])) + "\darkorange.stylesheet"
elif sys.platform.startswith('linux') or sys.platform.startswith('cygwin'):
icon = QtGui.QIcon("icon.ico")
sshFile = "darkorange.stylesheet"
app.setWindowIcon(icon)
with open(sshFile,"r") as fh:
app.setStyleSheet(fh.read())
rfid_reader = RFID()
connector = DrupalConnector()
main = MainWindow()
login = LoginWindow(main)
login.show()
sys.exit(app.exec_())
| gpl-2.0 |
cgourlay/readthedocs.org | readthedocs/projects/migrations/0002_add_vcs_type.py | 5 | 7574 | # encoding: utf-8
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding field 'Project.repo_type'
db.add_column('projects_project', 'repo_type', self.gf('django.db.models.fields.CharField')(default='git', max_length=10), keep_default=False)
def backwards(self, orm):
# Deleting field 'Project.repo_type'
db.delete_column('projects_project', 'repo_type')
models = {
'auth.group': {
'Meta': {'object_name': 'Group'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
'auth.permission': {
'Meta': {'ordering': "('content_type__app_label', 'content_type__model', 'codename')", 'unique_together': "(('content_type', 'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
'auth.user': {
'Meta': {'object_name': 'User'},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Group']", 'symmetrical': 'False', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'})
},
'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
'projects.file': {
'Meta': {'ordering': "('denormalized_path',)", 'object_name': 'File'},
'content': ('django.db.models.fields.TextField', [], {}),
'denormalized_path': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'heading': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'ordering': ('django.db.models.fields.PositiveSmallIntegerField', [], {'default': '1'}),
'parent': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'children'", 'null': 'True', 'to': "orm['projects.File']"}),
'project': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'files'", 'to': "orm['projects.Project']"}),
'slug': ('django.db.models.fields.SlugField', [], {'max_length': '50', 'db_index': 'True'}),
'status': ('django.db.models.fields.PositiveSmallIntegerField', [], {'default': '1'})
},
'projects.filerevision': {
'Meta': {'ordering': "('-revision_number',)", 'object_name': 'FileRevision'},
'comment': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'created_date': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'diff': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'file': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'revisions'", 'to': "orm['projects.File']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_reverted': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'revision_number': ('django.db.models.fields.IntegerField', [], {})
},
'projects.project': {
'Meta': {'ordering': "('-modified_date', 'name')", 'object_name': 'Project'},
'copyright': ('django.db.models.fields.CharField', [], {'max_length': '255', 'blank': 'True'}),
'description': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'docs_directory': ('django.db.models.fields.CharField', [], {'max_length': '255', 'blank': 'True'}),
'extensions': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '255'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'modified_date': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'path': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'project_url': ('django.db.models.fields.URLField', [], {'max_length': '200', 'blank': 'True'}),
'pub_date': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'repo': ('django.db.models.fields.CharField', [], {'max_length': '100', 'blank': 'True'}),
'repo_type': ('django.db.models.fields.CharField', [], {'default': "'git'", 'max_length': '10'}),
'slug': ('django.db.models.fields.SlugField', [], {'max_length': '50', 'db_index': 'True'}),
'status': ('django.db.models.fields.PositiveSmallIntegerField', [], {'default': '1'}),
'suffix': ('django.db.models.fields.CharField', [], {'default': "'.rst'", 'max_length': '10'}),
'theme': ('django.db.models.fields.CharField', [], {'default': "'default'", 'max_length': '20'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'projects'", 'to': "orm['auth.User']"}),
'version': ('django.db.models.fields.CharField', [], {'max_length': '100', 'blank': 'True'})
}
}
complete_apps = ['projects']
| mit |
eudicots/Cactus | cactus/skeleton/plugins/sprites.disabled.py | 10 | 1164 | import os
import sys
import pipes
import shutil
import subprocess
"""
This plugin uses glue to sprite images:
http://glue.readthedocs.org/en/latest/quickstart.html
Install:
(Only if you want to sprite jpg too)
brew install libjpeg
sudo easy_install pip
sudo pip uninstall pil
sudo pip install pil
sudo pip install glue
"""
try:
import glue
except Exception as e:
sys.exit('Could not use glue: %s\nMaybe install: sudo easy_install glue' % e)
IMG_PATH = 'static/img/sprites'
CSS_PATH = 'static/css/sprites'
KEY = '_PREV_CHECKSUM'
def checksum(path):
command = 'md5 `find %s -type f`' % pipes.quote(IMG_PATH)
return subprocess.check_output(command, shell=True)
def preBuild(site):
if not os.path.isdir(IMG_PATH):
return
currChecksum = checksum(IMG_PATH)
prevChecksum = getattr(site, KEY, None)
# Don't run if none of the images has changed
if currChecksum == prevChecksum:
return
if os.path.isdir(CSS_PATH):
shutil.rmtree(CSS_PATH)
os.mkdir(CSS_PATH)
os.system('glue --cachebuster --crop --optipng "%s" "%s" --project' % (IMG_PATH, CSS_PATH))
setattr(site, KEY, currChecksum)
| bsd-3-clause |
gregdek/ansible | lib/ansible/modules/network/junos/junos_config.py | 4 | 14862 | #!/usr/bin/python
# -*- coding: utf-8 -*-
# (c) 2017, Ansible by Red Hat, inc
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'network'}
DOCUMENTATION = """
---
module: junos_config
version_added: "2.1"
author: "Peter Sprygada (@privateip)"
short_description: Manage configuration on devices running Juniper JUNOS
description:
- This module provides an implementation for working with the active
configuration running on Juniper JUNOS devices. It provides a set
of arguments for loading configuration, performing rollback operations
and zeroing the active configuration on the device.
extends_documentation_fragment: junos
options:
lines:
description:
- This argument takes a list of C(set) or C(delete) configuration
lines to push into the remote device. Each line must start with
either C(set) or C(delete). This argument is mutually exclusive
with the I(src) argument.
src:
description:
- The I(src) argument provides a path to the configuration file
to load into the remote system. The path can either be a full
system path to the configuration file if the value starts with /
or relative to the root of the implemented role or playbook.
This argument is mutually exclusive with the I(lines) argument.
version_added: "2.2"
src_format:
description:
- The I(src_format) argument specifies the format of the configuration
found int I(src). If the I(src_format) argument is not provided,
the module will attempt to determine the format of the configuration
file specified in I(src).
choices: ['xml', 'set', 'text', 'json']
version_added: "2.2"
rollback:
description:
- The C(rollback) argument instructs the module to rollback the
current configuration to the identifier specified in the
argument. If the specified rollback identifier does not
exist on the remote device, the module will fail. To rollback
to the most recent commit, set the C(rollback) argument to 0.
zeroize:
description:
- The C(zeroize) argument is used to completely sanitize the
remote device configuration back to initial defaults. This
argument will effectively remove all current configuration
statements on the remote device.
type: bool
confirm:
description:
- The C(confirm) argument will configure a time out value in minutes
for the commit to be confirmed before it is automatically
rolled back. If the C(confirm) argument is set to False, this
argument is silently ignored. If the value for this argument
is set to 0, the commit is confirmed immediately.
default: 0
comment:
description:
- The C(comment) argument specifies a text string to be used
when committing the configuration. If the C(confirm) argument
is set to False, this argument is silently ignored.
default: configured by junos_config
replace:
description:
- The C(replace) argument will instruct the remote device to
replace the current configuration hierarchy with the one specified
in the corresponding hierarchy of the source configuration loaded
from this module.
- Note this argument should be considered deprecated. To achieve
the equivalent, set the I(update) argument to C(replace). This argument
will be removed in a future release. The C(replace) and C(update) argument
is mutually exclusive.
type: bool
default: 'no'
backup:
description:
- This argument will cause the module to create a full backup of
the current C(running-config) from the remote device before any
changes are made. The backup file is written to the C(backup)
folder in the playbook root directory or role root directory, if
playbook is part of an ansible role. If the directory does not exist,
it is created.
type: bool
default: 'no'
version_added: "2.2"
update:
description:
- This argument will decide how to load the configuration
data particularly when the candidate configuration and loaded
configuration contain conflicting statements. Following are
accepted values.
C(merge) combines the data in the loaded configuration with the
candidate configuration. If statements in the loaded configuration
conflict with statements in the candidate configuration, the loaded
statements replace the candidate ones.
C(override) discards the entire candidate configuration and replaces
it with the loaded configuration.
C(replace) substitutes each hierarchy level in the loaded configuration
for the corresponding level.
default: merge
choices: ['merge', 'override', 'replace']
version_added: "2.3"
confirm_commit:
description:
- This argument will execute commit operation on remote device.
It can be used to confirm a previous commit.
type: bool
default: 'no'
version_added: "2.4"
check_commit:
description:
- This argument will check correctness of syntax; do not apply changes.
- Note that this argument can be used to confirm verified configuration done via commit confirmed operation
type: bool
default: 'no'
version_added: "2.8"
requirements:
- ncclient (>=v0.5.2)
notes:
- This module requires the netconf system service be enabled on
the remote device being managed.
- Abbreviated commands are NOT idempotent, see
L(Network FAQ,../network/user_guide/faq.html#why-do-the-config-modules-always-return-changed-true-with-abbreviated-commands).
- Loading JSON-formatted configuration I(json) is supported
starting in Junos OS Release 16.1 onwards.
- Update C(override) not currently compatible with C(set) notation.
- Tested against vSRX JUNOS version 15.1X49-D15.4, vqfx-10000 JUNOS Version 15.1X53-D60.4.
- Recommended connection is C(netconf). See L(the Junos OS Platform Options,../network/user_guide/platform_junos.html).
- This module also works with C(local) connections for legacy playbooks.
"""
EXAMPLES = """
- name: load configure file into device
junos_config:
src: srx.cfg
comment: update config
- name: load configure lines into device
junos_config:
lines:
- set interfaces ge-0/0/1 unit 0 description "Test interface"
- set vlans vlan01 description "Test vlan"
comment: update config
- name: Set routed VLAN interface (RVI) IPv4 address
junos_config:
lines:
- set vlans vlan01 vlan-id 1
- set interfaces irb unit 10 family inet address 10.0.0.1/24
- set vlans vlan01 l3-interface irb.10
- name: Check correctness of commit configuration
junos_config:
check_commit: yes
- name: rollback the configuration to id 10
junos_config:
rollback: 10
- name: zero out the current configuration
junos_config:
zeroize: yes
- name: Set VLAN access and trunking
junos_config:
lines:
- set vlans vlan02 vlan-id 6
- set interfaces ge-0/0/6.0 family ethernet-switching interface-mode access vlan members vlan02
- set interfaces ge-0/0/6.0 family ethernet-switching interface-mode trunk vlan members vlan02
- name: confirm a previous commit
junos_config:
confirm_commit: yes
- name: for idempotency, use full-form commands
junos_config:
lines:
# - set int ge-0/0/1 unit 0 desc "Test interface"
- set interfaces ge-0/0/1 unit 0 description "Test interface"
"""
RETURN = """
backup_path:
description: The full path to the backup file
returned: when backup is yes
type: str
sample: /playbooks/ansible/backup/config.2016-07-16@22:28:34
"""
import re
import json
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.network.common.netconf import exec_rpc
from ansible.module_utils.network.junos.junos import get_diff, load_config, get_configuration
from ansible.module_utils.network.junos.junos import commit_configuration, discard_changes, locked_config
from ansible.module_utils.network.junos.junos import junos_argument_spec, load_configuration, tostring
from ansible.module_utils.six import string_types
from ansible.module_utils._text import to_native, to_text
try:
from lxml.etree import Element, fromstring
except ImportError:
from xml.etree.ElementTree import Element, fromstring
try:
from lxml.etree import ParseError
except ImportError:
try:
from xml.etree.ElementTree import ParseError
except ImportError:
# for Python < 2.7
from xml.parsers.expat import ExpatError
ParseError = ExpatError
USE_PERSISTENT_CONNECTION = True
DEFAULT_COMMENT = 'configured by junos_config'
def check_args(module, warnings):
if module.params['replace'] is not None:
module.fail_json(msg='argument replace is deprecated, use update')
def zeroize(module):
return exec_rpc(module, tostring(Element('request-system-zeroize')), ignore_warning=False)
def rollback(ele, id='0'):
return get_diff(ele, id)
def guess_format(config):
try:
json.loads(config)
return 'json'
except ValueError:
pass
try:
fromstring(config)
return 'xml'
except ParseError:
pass
if config.startswith('set') or config.startswith('delete'):
return 'set'
return 'text'
def filter_delete_statements(module, candidate):
reply = get_configuration(module, format='set')
match = reply.find('.//configuration-set')
if match is None:
# Could not find configuration-set in reply, perhaps device does not support it?
return candidate
config = to_native(match.text, encoding='latin-1')
modified_candidate = candidate[:]
for index, line in reversed(list(enumerate(candidate))):
if line.startswith('delete'):
newline = re.sub('^delete', 'set', line)
if newline not in config:
del modified_candidate[index]
return modified_candidate
def configure_device(module, warnings, candidate):
kwargs = {}
config_format = None
if module.params['src']:
config_format = module.params['src_format'] or guess_format(str(candidate))
if config_format == 'set':
kwargs.update({'format': 'text', 'action': 'set'})
else:
kwargs.update({'format': config_format, 'action': module.params['update']})
if isinstance(candidate, string_types):
candidate = candidate.split('\n')
# this is done to filter out `delete ...` statements which map to
# nothing in the config as that will cause an exception to be raised
if any((module.params['lines'], config_format == 'set')):
candidate = filter_delete_statements(module, candidate)
kwargs['format'] = 'text'
kwargs['action'] = 'set'
return load_config(module, candidate, warnings, **kwargs)
def main():
""" main entry point for module execution
"""
argument_spec = dict(
lines=dict(type='list'),
src=dict(type='path'),
src_format=dict(choices=['xml', 'text', 'set', 'json']),
# update operations
update=dict(default='merge', choices=['merge', 'override', 'replace', 'update']),
# deprecated replace in Ansible 2.3
replace=dict(type='bool'),
confirm=dict(default=0, type='int'),
comment=dict(default=DEFAULT_COMMENT),
confirm_commit=dict(type='bool', default=False),
check_commit=dict(type='bool', default=False),
# config operations
backup=dict(type='bool', default=False),
rollback=dict(type='int'),
zeroize=dict(default=False, type='bool'),
)
argument_spec.update(junos_argument_spec)
mutually_exclusive = [('lines', 'src', 'rollback', 'zeroize')]
module = AnsibleModule(argument_spec=argument_spec,
mutually_exclusive=mutually_exclusive,
supports_check_mode=True)
warnings = list()
check_args(module, warnings)
candidate = module.params['lines'] or module.params['src']
commit = not module.check_mode
result = {'changed': False, 'warnings': warnings}
if module.params['backup']:
for conf_format in ['set', 'text']:
reply = get_configuration(module, format=conf_format)
match = reply.find('.//configuration-%s' % conf_format)
if match is not None:
break
else:
module.fail_json(msg='unable to retrieve device configuration')
result['__backup__'] = match.text.strip()
rollback_id = module.params['rollback']
if rollback_id:
diff = rollback(module, rollback_id)
if commit:
kwargs = {
'comment': module.params['comment']
}
with locked_config(module):
load_configuration(module, rollback=rollback_id)
commit_configuration(module, **kwargs)
if module._diff:
result['diff'] = {'prepared': diff}
result['changed'] = True
elif module.params['zeroize']:
if commit:
zeroize(module)
result['changed'] = True
else:
if candidate:
with locked_config(module):
diff = configure_device(module, warnings, candidate)
if diff:
if commit:
kwargs = {
'comment': module.params['comment']
}
confirm = module.params['confirm']
if confirm > 0:
kwargs.update({
'confirm': True,
'confirm_timeout': to_text(confirm, errors='surrogate_then_replace')
})
commit_configuration(module, **kwargs)
else:
discard_changes(module)
result['changed'] = True
if module._diff:
result['diff'] = {'prepared': diff}
elif module.params['check_commit']:
commit_configuration(module, check=True)
elif module.params['confirm_commit']:
with locked_config(module):
# confirm a previous commit
commit_configuration(module)
result['changed'] = True
module.exit_json(**result)
if __name__ == '__main__':
main()
| gpl-3.0 |
neilLasrado/erpnext | erpnext/stock/stock_balance.py | 1 | 9558 | # Copyright (c) 2015, Frappe Technologies Pvt. Ltd. and Contributors
# License: GNU General Public License v3. See license.txt
from __future__ import print_function, unicode_literals
import frappe
from frappe.utils import flt, cstr, nowdate, nowtime
from erpnext.stock.utils import update_bin
from erpnext.stock.stock_ledger import update_entries_after
from erpnext.controllers.stock_controller import update_gl_entries_after
def repost(only_actual=False, allow_negative_stock=False, allow_zero_rate=False, only_bin=False):
"""
Repost everything!
"""
frappe.db.auto_commit_on_many_writes = 1
if allow_negative_stock:
existing_allow_negative_stock = frappe.db.get_value("Stock Settings", None, "allow_negative_stock")
frappe.db.set_value("Stock Settings", None, "allow_negative_stock", 1)
item_warehouses = frappe.db.sql("""
select distinct item_code, warehouse
from
(select item_code, warehouse from tabBin
union
select item_code, warehouse from `tabStock Ledger Entry`) a
""")
for d in item_warehouses:
try:
repost_stock(d[0], d[1], allow_zero_rate, only_actual, only_bin, allow_negative_stock)
frappe.db.commit()
except:
frappe.db.rollback()
if allow_negative_stock:
frappe.db.set_value("Stock Settings", None, "allow_negative_stock", existing_allow_negative_stock)
frappe.db.auto_commit_on_many_writes = 0
def repost_stock(item_code, warehouse, allow_zero_rate=False,
only_actual=False, only_bin=False, allow_negative_stock=False):
if not only_bin:
repost_actual_qty(item_code, warehouse, allow_zero_rate, allow_negative_stock)
if item_code and warehouse and not only_actual:
qty_dict = {
"reserved_qty": get_reserved_qty(item_code, warehouse),
"indented_qty": get_indented_qty(item_code, warehouse),
"ordered_qty": get_ordered_qty(item_code, warehouse),
"planned_qty": get_planned_qty(item_code, warehouse)
}
if only_bin:
qty_dict.update({
"actual_qty": get_balance_qty_from_sle(item_code, warehouse)
})
update_bin_qty(item_code, warehouse, qty_dict)
def repost_actual_qty(item_code, warehouse, allow_zero_rate=False, allow_negative_stock=False): update_entries_after({ "item_code": item_code, "warehouse": warehouse },
allow_zero_rate=allow_zero_rate, allow_negative_stock=allow_negative_stock)
def get_balance_qty_from_sle(item_code, warehouse):
balance_qty = frappe.db.sql("""select qty_after_transaction from `tabStock Ledger Entry`
where item_code=%s and warehouse=%s and is_cancelled='No'
order by posting_date desc, posting_time desc, creation desc
limit 1""", (item_code, warehouse))
return flt(balance_qty[0][0]) if balance_qty else 0.0
def get_reserved_qty(item_code, warehouse, batch_no=None):
batch_no_query = ""
if batch_no:
batch_no_query = " and batch_no = {0}".format(frappe.db.escape(batch_no))
reserved_qty = frappe.db.sql("""
select
sum(dnpi_qty * ((so_item_qty - so_item_delivered_qty) / so_item_qty))
from
(
(select
qty as dnpi_qty,
(
select qty from `tabSales Order Item`
where name = dnpi.parent_detail_docname
and (delivered_by_supplier is null or delivered_by_supplier = 0)
) as so_item_qty,
(
select delivered_qty from `tabSales Order Item`
where name = dnpi.parent_detail_docname
and delivered_by_supplier = 0
) as so_item_delivered_qty,
parent, name
from
(
select qty, parent_detail_docname, parent, name
from `tabPacked Item` dnpi_in
where item_code = %s and warehouse = %s
and parenttype="Sales Order"
and item_code != parent_item
and exists (select * from `tabSales Order` so
where name = dnpi_in.parent and docstatus = 1 and status != 'Closed')
) dnpi)
union
(select stock_qty as dnpi_qty, qty as so_item_qty,
delivered_qty as so_item_delivered_qty, parent, name
from `tabSales Order Item` so_item
where item_code = %s and warehouse = %s {0}
and (so_item.delivered_by_supplier is null or so_item.delivered_by_supplier = 0)
and exists(select * from `tabSales Order` so
where so.name = so_item.parent and so.docstatus = 1
and so.status != 'Closed'))
) tab
where
so_item_qty >= so_item_delivered_qty""".format(batch_no_query), (item_code, warehouse, item_code, warehouse))
return flt(reserved_qty[0][0]) if reserved_qty else 0
def get_indented_qty(item_code, warehouse):
# Ordered Qty is always maintained in stock UOM
inward_qty = frappe.db.sql("""
select sum(mr_item.stock_qty - mr_item.ordered_qty)
from `tabMaterial Request Item` mr_item, `tabMaterial Request` mr
where mr_item.item_code=%s and mr_item.warehouse=%s
and mr.material_request_type in ('Purchase', 'Manufacture', 'Customer Provided', 'Material Transfer')
and mr_item.stock_qty > mr_item.ordered_qty and mr_item.parent=mr.name
and mr.status!='Stopped' and mr.docstatus=1
""", (item_code, warehouse))
inward_qty = flt(inward_qty[0][0]) if inward_qty else 0
outward_qty = frappe.db.sql("""
select sum(mr_item.stock_qty - mr_item.ordered_qty)
from `tabMaterial Request Item` mr_item, `tabMaterial Request` mr
where mr_item.item_code=%s and mr_item.warehouse=%s
and mr.material_request_type = 'Material Issue'
and mr_item.stock_qty > mr_item.ordered_qty and mr_item.parent=mr.name
and mr.status!='Stopped' and mr.docstatus=1
""", (item_code, warehouse))
outward_qty = flt(outward_qty[0][0]) if outward_qty else 0
requested_qty = inward_qty - outward_qty
return requested_qty
def get_ordered_qty(item_code, warehouse):
ordered_qty = frappe.db.sql("""
select sum((po_item.qty - po_item.received_qty)*po_item.conversion_factor)
from `tabPurchase Order Item` po_item, `tabPurchase Order` po
where po_item.item_code=%s and po_item.warehouse=%s
and po_item.qty > po_item.received_qty and po_item.parent=po.name
and po.status not in ('Closed', 'Delivered') and po.docstatus=1
and po_item.delivered_by_supplier = 0""", (item_code, warehouse))
return flt(ordered_qty[0][0]) if ordered_qty else 0
def get_planned_qty(item_code, warehouse):
planned_qty = frappe.db.sql("""
select sum(qty - produced_qty) from `tabWork Order`
where production_item = %s and fg_warehouse = %s and status not in ("Stopped", "Completed")
and docstatus=1 and qty > produced_qty""", (item_code, warehouse))
return flt(planned_qty[0][0]) if planned_qty else 0
def update_bin_qty(item_code, warehouse, qty_dict=None):
from erpnext.stock.utils import get_bin
bin = get_bin(item_code, warehouse)
mismatch = False
for field, value in qty_dict.items():
if flt(bin.get(field)) != flt(value):
bin.set(field, flt(value))
mismatch = True
if mismatch:
bin.set_projected_qty()
bin.db_update()
bin.clear_cache()
def set_stock_balance_as_per_serial_no(item_code=None, posting_date=None, posting_time=None,
fiscal_year=None):
if not posting_date: posting_date = nowdate()
if not posting_time: posting_time = nowtime()
condition = " and item.name='%s'" % item_code.replace("'", "\'") if item_code else ""
bin = frappe.db.sql("""select bin.item_code, bin.warehouse, bin.actual_qty, item.stock_uom
from `tabBin` bin, tabItem item
where bin.item_code = item.name and item.has_serial_no = 1 %s""" % condition)
for d in bin:
serial_nos = frappe.db.sql("""select count(name) from `tabSerial No`
where item_code=%s and warehouse=%s and docstatus < 2""", (d[0], d[1]))
if serial_nos and flt(serial_nos[0][0]) != flt(d[2]):
print(d[0], d[1], d[2], serial_nos[0][0])
sle = frappe.db.sql("""select valuation_rate, company from `tabStock Ledger Entry`
where item_code = %s and warehouse = %s and ifnull(is_cancelled, 'No') = 'No'
order by posting_date desc limit 1""", (d[0], d[1]))
sle_dict = {
'doctype' : 'Stock Ledger Entry',
'item_code' : d[0],
'warehouse' : d[1],
'transaction_date' : nowdate(),
'posting_date' : posting_date,
'posting_time' : posting_time,
'voucher_type' : 'Stock Reconciliation (Manual)',
'voucher_no' : '',
'voucher_detail_no' : '',
'actual_qty' : flt(serial_nos[0][0]) - flt(d[2]),
'stock_uom' : d[3],
'incoming_rate' : sle and flt(serial_nos[0][0]) > flt(d[2]) and flt(sle[0][0]) or 0,
'company' : sle and cstr(sle[0][1]) or 0,
'is_cancelled' : 'No',
'batch_no' : '',
'serial_no' : ''
}
sle_doc = frappe.get_doc(sle_dict)
sle_doc.flags.ignore_validate = True
sle_doc.flags.ignore_links = True
sle_doc.insert()
args = sle_dict.copy()
args.update({
"sle_id": sle_doc.name,
"is_amended": 'No'
})
update_bin(args)
update_entries_after({
"item_code": d[0],
"warehouse": d[1],
"posting_date": posting_date,
"posting_time": posting_time
})
def reset_serial_no_status_and_warehouse(serial_nos=None):
if not serial_nos:
serial_nos = frappe.db.sql_list("""select name from `tabSerial No` where docstatus = 0""")
for serial_no in serial_nos:
try:
sr = frappe.get_doc("Serial No", serial_no)
last_sle = sr.get_last_sle()
if flt(last_sle.actual_qty) > 0:
sr.warehouse = last_sle.warehouse
sr.via_stock_ledger = True
sr.save()
except:
pass
def repost_gle_for_stock_transactions(posting_date=None, posting_time=None, for_warehouses=None):
frappe.db.auto_commit_on_many_writes = 1
if not posting_date:
posting_date = "1900-01-01"
if not posting_time:
posting_time = "00:00"
update_gl_entries_after(posting_date, posting_time, for_warehouses=for_warehouses)
frappe.db.auto_commit_on_many_writes = 0
| gpl-3.0 |
dajohnso/cfme_tests | cfme/infrastructure/host.py | 1 | 22049 | # -*- coding: utf-8 -*-
"""A model of an Infrastructure Host in CFME."""
from functools import partial
from navmazing import NavigateToSibling, NavigateToAttribute
from manageiq_client.api import APIException
from selenium.common.exceptions import NoSuchElementException
from cfme.base.credential import Credential as BaseCredential
from cfme.common import PolicyProfileAssignable
from cfme.common.host_views import (
HostAddView,
HostDetailsView,
HostDiscoverView,
HostDriftAnalysis,
HostDriftHistory,
HostEditView,
HostManagePoliciesView,
HostsView,
HostTimelinesView
)
from cfme.exceptions import HostNotFound, ItemNotFound
from cfme.infrastructure.datastore import HostAllDatastoresView
from cfme.web_ui import mixins
from utils import conf
from utils.appliance import Navigatable
from utils.appliance.implementations.ui import CFMENavigateStep, navigate_to, navigator
from utils.ipmi import IPMI
from utils.log import logger
from utils.pretty import Pretty
from utils.update import Updateable
from utils.wait import wait_for
class Host(Updateable, Pretty, Navigatable, PolicyProfileAssignable):
"""Model of an infrastructure host in cfme.
Args:
name: Name of the host.
hostname: Hostname of the host.
ip_address: The IP address as a string.
custom_ident: The custom identifiter.
host_platform: Included but appears unused in CFME at the moment.
ipmi_address: The IPMI address.
mac_address: The mac address of the system.
credentials (:py:class:`Credential`): see Credential inner class.
ipmi_credentials (:py:class:`Credential`): see Credential inner class.
Usage:
myhost = Host(name='vmware',
credentials=Provider.Credential(principal='admin', secret='foobar'))
myhost.create()
"""
pretty_attrs = ['name', 'hostname', 'ip_address', 'custom_ident']
def __init__(self, name=None, hostname=None, ip_address=None, custom_ident=None,
host_platform=None, ipmi_address=None, mac_address=None, credentials=None,
ipmi_credentials=None, interface_type='lan', provider=None, appliance=None):
Navigatable.__init__(self, appliance=appliance)
self.name = name
self.quad_name = 'host'
self.hostname = hostname
self.ip_address = ip_address
self.custom_ident = custom_ident
self.host_platform = host_platform
self.ipmi_address = ipmi_address
self.mac_address = mac_address
self.credentials = credentials
self.ipmi_credentials = ipmi_credentials
self.interface_type = interface_type
self.db_id = None
self.provider = provider
class Credential(BaseCredential, Updateable):
"""Provider credentials
Args:
**kwargs: If using IPMI type credential, ipmi = True"""
def __init__(self, **kwargs):
super(Host.Credential, self).__init__(**kwargs)
self.ipmi = kwargs.get('ipmi')
def create(self, cancel=False, validate_credentials=False):
"""Creates a host in the UI.
Args:
cancel (bool): Whether to cancel out of the creation. The cancel is done after all the
information present in the Host has been filled in the UI.
validate_credentials (bool): Whether to validate credentials - if True and the
credentials are invalid, an error will be raised.
"""
view = navigate_to(self, "Add")
view.fill({
"name": self.name,
"hostname": self.hostname or self.ip_address,
"host_platform": self.host_platform,
"custom_ident": self.custom_ident,
"ipmi_address": self.ipmi_address,
"mac_address": self.mac_address
})
if self.credentials is not None:
view.endpoints.default.fill(self.credentials.view_value_mapping)
if validate_credentials:
view.endpoints.default.validate_button.click()
if self.ipmi_credentials is not None:
view.endpoints.ipmi.fill(self.ipmi_credentials.view_value_mapping)
if validate_credentials:
view.endpoints.ipmi.validate_button.click()
if not cancel:
view.add_button.click()
flash_message = 'Host / Node " {}" was added'.format(self.name)
else:
view.cancel_button.click()
flash_message = "Add of new Host / Node was cancelled by the user"
view = self.create_view(HostsView)
assert view.is_displayed
view.flash.assert_success_message(flash_message)
def update(self, updates, cancel=False, validate_credentials=False):
"""Updates a host in the UI. Better to use utils.update.update context manager than call
this directly.
Args:
updates (dict): fields that are changing.
cancel (bool): whether to cancel out of the update.
"""
view = navigate_to(self, "Edit")
changed = view.fill({
"name": updates.get("name"),
"hostname": updates.get("hostname") or updates.get("ip_address"),
"custom_ident": updates.get("custom_ident"),
"ipmi_address": updates.get("ipmi_address"),
"mac_address": updates.get("mac_address")
})
credentials = updates.get("credentials")
ipmi_credentials = updates.get("ipmi_credentials")
credentials_changed = False
ipmi_credentials_changed = False
if credentials is not None:
if view.change_stored_password.is_displayed:
view.change_stored_password.click()
credentials_changed = view.endpoints.default.fill(credentials.view_value_mapping)
if validate_credentials:
view.endpoints.default.validate_button.click()
if ipmi_credentials is not None:
if view.change_stored_password.is_displayed:
view.change_stored_password.click()
ipmi_credentials_changed = view.endpoints.ipmi.fill(ipmi_credentials.view_value_mapping)
if validate_credentials:
view.endpoints.ipmi.validate_button.click()
changed = any([changed, credentials_changed, ipmi_credentials_changed])
if changed:
view.save_button.click()
logger.debug("Trying to save update for host with id: %s", str(self.get_db_id))
view = self.create_view(HostDetailsView)
view.flash.assert_success_message(
'Host / Node "{}" was saved'.format(updates.get("name", self.name)))
else:
view.cancel_button.click()
view.flash.assert_success_message(
'Edit of Host / Node "{}" was cancelled by the user'.format(
updates.get("name", self.name)))
def delete(self, cancel=True):
"""Deletes this host from CFME.
Args:
cancel (bool): Whether to cancel the deletion, defaults to True
"""
view = navigate_to(self, "Details")
view.toolbar.configuration.item_select("Remove item", handle_alert=cancel)
if not cancel:
view = self.create_view(HostsView)
assert view.is_displayed
view.flash.assert_success_message("The selected Hosts / Nodes was deleted")
def load_details(self, refresh=False):
"""To be compatible with the Taggable and PolicyProfileAssignable mixins.
Args:
refresh (bool): Whether to perform the page refresh, defaults to False
"""
view = navigate_to(self, "Details")
if refresh:
view.browser.refresh()
view.flush_widget_cache()
def execute_button(self, button_group, button, cancel=True):
# TODO this method should be converted to widgetastic. A toolbar with parametrized view will
# be probably required.
from cfme.web_ui import form_buttons
import cfme.fixtures.pytest_selenium as sel
import cfme.web_ui.flash as flash
import cfme.web_ui.toolbar as tb
navigate_to(self, 'Details')
host_btn = partial(tb.select, button_group)
host_btn(button, invokes_alert=True)
sel.click(form_buttons.submit)
flash.assert_success_message("Order Request was Submitted")
host_btn(button, invokes_alert=True)
sel.click(form_buttons.cancel)
flash.assert_success_message("Service Order was cancelled by the user")
def power_on(self):
view = navigate_to(self, "Details")
view.toolbar.power.item_select("Power On", handle_alert=True)
def power_off(self):
view = navigate_to(self, "Details")
view.toolbar.power.item_select("Power Off", handle_alert=True)
def get_power_state(self):
return self.get_detail("Properties", "Power State")
def refresh(self, cancel=False):
"""Perform 'Refresh Relationships and Power States' for the host.
Args:
cancel (bool): Whether the action should be cancelled, default to False
"""
view = navigate_to(self, "Details")
view.toolbar.configuration.item_select("Refresh Relationships and Power States",
handle_alert=cancel)
def wait_for_host_state_change(self, desired_state, timeout=300):
"""Wait for Host to come to desired state. This function waits just the needed amount of
time thanks to wait_for.
Args:
desired_state (str): 'on' or 'off'
timeout (int): Specify amount of time (in seconds) to wait until TimedOutError is raised
"""
view = navigate_to(self, "All")
def _looking_for_state_change():
entity = view.entities.get_entity(by_name=self.name)
return "currentstate-{}".format(desired_state) in entity.status
return wait_for(
_looking_for_state_change,
fail_func=view.browser.refresh,
num_sec=timeout
)
def get_ipmi(self):
return IPMI(
hostname=self.ipmi_address,
username=self.ipmi_credentials.principal,
password=self.ipmi_credentials.secret,
interface_type=self.interface_type
)
def get_detail(self, title, field):
"""Gets details from the details summary tables.
Args:
title (str): Summary Table title
field (str): Summary table field name
Returns: A string representing the entities of the SummaryTable's value.
"""
view = navigate_to(self, "Details")
return getattr(view.entities, title.lower().replace(" ", "_")).get_text_of(field)
@property
def exists(self):
"""Checks if the host exists in the UI.
Returns: :py:class:`bool`
"""
view = navigate_to(self, "All")
try:
view.entities.get_first_entity(by_name=self.name)
except ItemNotFound:
return False
else:
return True
@property
def has_valid_credentials(self):
"""Checks if host has valid credentials save.
Returns: :py:class:`bool`
"""
view = navigate_to(self, "All")
entity = view.entities.get_first_entity(by_name=self.name)
return entity.creds.strip().lower() == "checkmark"
def update_credentials_rest(self, credentials):
""" Updates host's credentials via rest api
Args:
credentials (dict) : credentials from yaml file
Returns: ``True`` if credentials are saved and valid; ``False`` otherwise
"""
# TODO: Move to Sentaku
try:
host = self.appliance.rest_api.collections.hosts.get(name=self.name)
host.action.edit(credentials={"userid": credentials.principal,
"password": credentials.secret})
except APIException:
return False
def get_datastores(self):
"""Gets list of all datastores used by this host.
Returns: :py:class:`list` of datastores names
"""
host_details_view = navigate_to(self, "Details")
host_details_view.entities.relationships.click_at("Datastores")
datastores_view = self.create_view(HostAllDatastoresView)
assert datastores_view.is_displayed
return [entity.name for entity in datastores_view.entites.get_all_()]
@property
def get_db_id(self):
if self.db_id is None:
self.db_id = self.appliance.host_id(self.name)
return self.db_id
else:
return self.db_id
def run_smartstate_analysis(self):
"""Runs smartstate analysis on this host.
Note:
The host must have valid credentials already set up for this to work.
"""
view = navigate_to(self, "Details")
view.toolbar.configuration.item_select("Perform SmartState Analysis", handle_alert=True)
view.flash.assert_success_message('"{}": Analysis successfully initiated'.format(self.name))
def check_compliance(self, timeout=240):
"""Initiates compliance check and waits for it to finish."""
view = navigate_to(self, "Details")
original_state = self.compliance_status
view.toolbar.policy.item_select("Check Compliance of Last Known Configuration",
handle_alert=True)
view.flash.assert_no_errors()
wait_for(
lambda: self.compliance_status != original_state,
num_sec=timeout, delay=5, message="compliance of {} checked".format(self.name)
)
@property
def compliance_status(self):
"""Returns the title of the compliance SummaryTable. The title contains datetime so it can
be compared.
Returns:
:py:class:`NoneType` if no title is present (no compliance checks before), otherwise str
"""
view = navigate_to(self, "Details")
view.browser.refresh()
return self.get_detail("Compliance", "Status")
@property
def is_compliant(self):
"""Check if the Host is compliant.
Returns:
:py:class:`bool`
"""
text = self.compliance_status.strip().lower()
if text.startswith("non-compliant"):
return False
elif text.startswith("compliant"):
return True
else:
raise ValueError("{} is not a known state for compliance".format(text))
def equal_drift_results(self, drift_section, section, *indexes):
"""Compares drift analysis results of a row specified by it's title text.
Args:
drift_section (str): Title text of the row to compare
section (str): Accordion section where the change happened
indexes: Indexes of results to compare starting with 0 for first row (latest result).
Compares all available drifts, if left empty (default)
Note:
There have to be at least 2 drift results available for this to work.
Returns:
:py:class:`bool`
"""
def _select_rows(indexes):
for i in indexes:
drift_history_view.history_table[i][0].click()
# mark by indexes or mark all
details_view = navigate_to(self, "Details")
details_view.entities.relationships.click_at("Drift History")
drift_history_view = self.create_view(HostDriftHistory)
assert drift_history_view.is_displayed
if indexes:
_select_rows(indexes)
else:
# We can't compare more than 10 drift results at once
# so when selecting all, we have to limit it to the latest 10
rows_number = len(list(drift_history_view.history_table.rows()))
if rows_number > 10:
_select_rows(range(10))
else:
_select_rows(range(rows_number))
drift_history_view.analyze_button.click()
drift_analysis_view = self.create_view(HostDriftAnalysis)
assert drift_analysis_view.is_displayed
drift_analysis_view.drift_sections.check(section)
if not drift_analysis_view.toolbar.all_attributes.active:
drift_analysis_view.toolbar.all_attributes.click()
return drift_analysis_view.drift_analysis(drift_section).is_changed
def tag(self, tag, **kwargs):
"""Tags the system by given tag"""
navigate_to(self, 'Details')
mixins.add_tag(tag, **kwargs)
def untag(self, tag):
"""Removes the selected tag off the system"""
navigate_to(self, 'Details')
mixins.remove_tag(tag)
@navigator.register(Host)
class All(CFMENavigateStep):
VIEW = HostsView
prerequisite = NavigateToAttribute("appliance.server", "LoggedIn")
def step(self):
try:
self.prerequisite_view.navigation.select("Compute", "Infrastructure", "Hosts")
except NoSuchElementException:
self.prerequisite_view.navigation.select("Compute", "Infrastructure", "Nodes")
@navigator.register(Host)
class Details(CFMENavigateStep):
VIEW = HostDetailsView
prerequisite = NavigateToSibling("All")
def step(self):
self.prerequisite_view.entities.get_first_entity(by_name=self.obj.name).click()
@navigator.register(Host)
class Edit(CFMENavigateStep):
VIEW = HostEditView
prerequisite = NavigateToSibling("Details")
def step(self):
self.prerequisite_view.toolbar.configuration.item_select("Edit this item")
@navigator.register(Host)
class Add(CFMENavigateStep):
VIEW = HostAddView
prerequisite = NavigateToSibling("All")
def step(self):
self.prerequisite_view.toolbar.configuration.item_select("Add a New item")
@navigator.register(Host)
class Discover(CFMENavigateStep):
VIEW = HostDiscoverView
prerequisite = NavigateToSibling("All")
def step(self):
self.prerequisite_view.toolbar.configuration.item_select("Discover items")
@navigator.register(Host)
class PolicyAssignment(CFMENavigateStep):
VIEW = HostManagePoliciesView
prerequisite = NavigateToSibling("Details")
def step(self):
self.prerequisite_view.toolbar.policy.item_select("Manage Policies")
@navigator.register(Host)
class Provision(CFMENavigateStep):
prerequisite = NavigateToSibling("Details")
def step(self):
self.prerequisite_view.toolbar.lifecycle.item_select("Provision this item")
@navigator.register(Host)
class Timelines(CFMENavigateStep):
VIEW = HostTimelinesView
prerequisite = NavigateToSibling("Details")
def step(self):
self.prerequisite_view.toolbar.monitoring.item_select("Timelines")
def get_credentials_from_config(credential_config_name):
creds = conf.credentials[credential_config_name]
return Host.Credential(principal=creds["username"], secret=creds["password"])
def get_from_config(provider_config_name):
"""Creates a Host object given a yaml entry in cfme_data.
Usage:
get_from_config('esx')
Returns: A Host object that has methods that operate on CFME
"""
# TODO: Include provider key in YAML and include provider object when creating
prov_config = conf.cfme_data.get('management_hosts', {})[provider_config_name]
credentials = get_credentials_from_config(prov_config['credentials'])
ipmi_credentials = get_credentials_from_config(prov_config['ipmi_credentials'])
ipmi_credentials.ipmi = True
return Host(
name=prov_config['name'],
hostname=prov_config['hostname'],
ip_address=prov_config['ipaddress'],
custom_ident=prov_config.get('custom_ident'),
host_platform=prov_config.get('host_platform'),
ipmi_address=prov_config['ipmi_address'],
mac_address=prov_config['mac_address'],
interface_type=prov_config.get('interface_type', 'lan'),
credentials=credentials,
ipmi_credentials=ipmi_credentials
)
def wait_for_a_host():
"""Waits for any host to appear in the UI."""
view = navigate_to(Host, "All")
logger.info("Waiting for a host to appear...")
wait_for(
lambda: int(view.paginator.items_amount),
fail_condition=0,
message="Wait for any host to appear",
num_sec=1000,
fail_func=view.browser.refresh
)
def wait_for_host_delete(host):
"""Waits for the host to remove from the UI.
Args:
host (Host): host object
"""
view = navigate_to(Host, "All")
logger.info("Waiting for a host to delete...")
wait_for(
lambda: not host.exists,
message="Wait for the host to disappear",
num_sec=500,
fail_func=view.browser.refresh
)
def wait_for_host_to_appear(host):
"""Waits for the host to appear in the UI.
Args:
host (Host): host object
"""
view = navigate_to(Host, "All")
logger.info("Waiting for the host to appear...")
wait_for(
lambda: host.exists,
message="Wait for the host to appear",
num_sec=1000,
fail_func=view.browser.refresh
)
def get_all_hosts():
"""Returns names list of all hosts.
Returns:
list: names list of all hosts
"""
view = navigate_to(Host, "All")
return [entity.name for entity in view.entities.get_all(surf_pages=True)]
def find_quadicon(host_name):
"""Find and return a quadicon belonging to a specific host.
Args:
host_name (str): A host name as displayed at the quadicon
Returns: :py:class:`cfme.common.host_views.HostQuadIconItem` instance
"""
view = navigate_to(Host, "All")
if view.toolbar.view_selector.selected != "Grid View":
view.toolbar.view_selector.select("Grid View")
try:
quad_icon = view.entities.get_first_entity(by_name=host_name)
except ItemNotFound:
raise HostNotFound("Host '{}' not found in UI!".format(host_name))
else:
return quad_icon
| gpl-2.0 |
manhong2112/CodeColle | Python/Pygame/pygame~/examples/macosx/macfont.py | 17 | 4486 | """
EXPERIMENTAL CODE!
Here we load a .TTF font file, and display it in
a basic pygame window. It demonstrates several of the
Font object attributes. Nothing exciting in here, but
it makes a great example for basic window, event, and
font management.
"""
import pygame
import math
from pygame.locals import *
from pygame import Surface
from pygame.surfarray import blit_array, make_surface, pixels3d, pixels2d
import Numeric
from Foundation import *
from AppKit import *
def _getColor(color=None):
if color is None:
return NSColor.clearColor()
div255 = (0.00390625).__mul__
if len(color) == 3:
color = tuple(color) + (255.0,)
return NSColor.colorWithDeviceRed_green_blue_alpha_(*map(div255, color))
def _getBitmapImageRep(size, hasAlpha=True):
width, height = map(int, size)
return NSBitmapImageRep.alloc().initWithBitmapDataPlanes_pixelsWide_pixelsHigh_bitsPerSample_samplesPerPixel_hasAlpha_isPlanar_colorSpaceName_bytesPerRow_bitsPerPixel_(None, width, height, 8, 4, hasAlpha, False, NSDeviceRGBColorSpace, width*4, 32)
class SysFont(object):
def __init__(self, name, size):
self._font = NSFont.fontWithName_size_(name, size)
self._isBold = False
self._isOblique = False
self._isUnderline = False
self._family = name
self._size = size
self._setupFont()
def _setupFont(self):
name = self._family
if self._isBold or self._isOblique:
name = '%s-%s%s' % (
name,
self._isBold and 'Bold' or '',
self._isOblique and 'Oblique' or '')
self._font = NSFont.fontWithName_size_(name, self._size)
print (name, self._font)
if self._font is None:
if self._isBold:
self._font = NSFont.boldSystemFontOfSize(self._size)
else:
self._font = NSFont.systemFontOfSize_(self._size)
def get_ascent(self):
return self._font.ascender()
def get_descent(self):
return -self._font.descender()
def get_bold(self):
return self._isBold
def get_height(self):
return self._font.defaultLineHeightForFont()
def get_italic(self):
return self._isOblique
def get_linesize(self):
pass
def get_underline(self):
return self._isUnderline
def set_bold(self, isBold):
if isBold != self._isBold:
self._isBold = isBold
self._setupFont()
def set_italic(self, isOblique):
if isOblique != self._isOblique:
self._isOblique = isOblique
self._setupFont()
def set_underline(self, isUnderline):
self._isUnderline = isUnderline
def size(self, text):
return tuple(map(int,map(math.ceil, NSString.sizeWithAttributes_(text, {
NSFontAttributeName: self._font,
NSUnderlineStyleAttributeName: self._isUnderline and 1.0 or None,
}))))
def render(self, text, antialias, forecolor, backcolor=(0,0,0,255)):
size = self.size(text)
img = NSImage.alloc().initWithSize_(size)
img.lockFocus()
NSString.drawAtPoint_withAttributes_(text, (0.0, 0.0), {
NSFontAttributeName: self._font,
NSUnderlineStyleAttributeName: self._isUnderline and 1.0 or None,
NSBackgroundColorAttributeName: backcolor and _getColor(backcolor) or None,
NSForegroundColorAttributeName: _getColor(forecolor),
})
rep = NSBitmapImageRep.alloc().initWithFocusedViewRect_(((0.0, 0.0), size))
img.unlockFocus()
if rep.samplesPerPixel() == 4:
s = Surface(size, SRCALPHA|SWSURFACE, 32, [-1<<24,0xff<<16,0xff<<8,0xff])
a = Numeric.reshape(Numeric.fromstring(rep.bitmapData(), typecode=Numeric.Int32), (size[1], size[0]))
blit_array(s, Numeric.swapaxes(a,0,1))
return s.convert_alpha()
if __name__=='__main__':
pygame.init()
screen = pygame.display.set_mode((600, 600))
s = SysFont('Gill Sans', 36)
s.set_italic(1)
s.set_underline(1)
done = False
surf = s.render('OS X Fonts!', True, (255,0,0,255), (0,0,0,0))
screen.blit(surf, (0,0))
screen.blit(surf, (2, 2))
pygame.display.update()
while not done:
for e in pygame.event.get():
if e.type == QUIT or (e.type == KEYUP and e.key == K_ESCAPE):
done = True
break
| mit |
googleads/google-ads-python | google/ads/googleads/v6/enums/types/app_payment_model_type.py | 1 | 1113 | # -*- coding: utf-8 -*-
# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import proto # type: ignore
__protobuf__ = proto.module(
package="google.ads.googleads.v6.enums",
marshal="google.ads.googleads.v6",
manifest={"AppPaymentModelTypeEnum",},
)
class AppPaymentModelTypeEnum(proto.Message):
r"""Represents a criterion for targeting paid apps."""
class AppPaymentModelType(proto.Enum):
r"""Enum describing possible app payment models."""
UNSPECIFIED = 0
UNKNOWN = 1
PAID = 30
__all__ = tuple(sorted(__protobuf__.manifest))
| apache-2.0 |
ElDeveloper/scikit-learn | sklearn/datasets/tests/test_svmlight_format.py | 228 | 11221 | from bz2 import BZ2File
import gzip
from io import BytesIO
import numpy as np
import os
import shutil
from tempfile import NamedTemporaryFile
from sklearn.externals.six import b
from sklearn.utils.testing import assert_equal
from sklearn.utils.testing import assert_array_equal
from sklearn.utils.testing import assert_array_almost_equal
from sklearn.utils.testing import assert_raises
from sklearn.utils.testing import raises
from sklearn.utils.testing import assert_in
import sklearn
from sklearn.datasets import (load_svmlight_file, load_svmlight_files,
dump_svmlight_file)
currdir = os.path.dirname(os.path.abspath(__file__))
datafile = os.path.join(currdir, "data", "svmlight_classification.txt")
multifile = os.path.join(currdir, "data", "svmlight_multilabel.txt")
invalidfile = os.path.join(currdir, "data", "svmlight_invalid.txt")
invalidfile2 = os.path.join(currdir, "data", "svmlight_invalid_order.txt")
def test_load_svmlight_file():
X, y = load_svmlight_file(datafile)
# test X's shape
assert_equal(X.indptr.shape[0], 7)
assert_equal(X.shape[0], 6)
assert_equal(X.shape[1], 21)
assert_equal(y.shape[0], 6)
# test X's non-zero values
for i, j, val in ((0, 2, 2.5), (0, 10, -5.2), (0, 15, 1.5),
(1, 5, 1.0), (1, 12, -3),
(2, 20, 27)):
assert_equal(X[i, j], val)
# tests X's zero values
assert_equal(X[0, 3], 0)
assert_equal(X[0, 5], 0)
assert_equal(X[1, 8], 0)
assert_equal(X[1, 16], 0)
assert_equal(X[2, 18], 0)
# test can change X's values
X[0, 2] *= 2
assert_equal(X[0, 2], 5)
# test y
assert_array_equal(y, [1, 2, 3, 4, 1, 2])
def test_load_svmlight_file_fd():
# test loading from file descriptor
X1, y1 = load_svmlight_file(datafile)
fd = os.open(datafile, os.O_RDONLY)
try:
X2, y2 = load_svmlight_file(fd)
assert_array_equal(X1.data, X2.data)
assert_array_equal(y1, y2)
finally:
os.close(fd)
def test_load_svmlight_file_multilabel():
X, y = load_svmlight_file(multifile, multilabel=True)
assert_equal(y, [(0, 1), (2,), (), (1, 2)])
def test_load_svmlight_files():
X_train, y_train, X_test, y_test = load_svmlight_files([datafile] * 2,
dtype=np.float32)
assert_array_equal(X_train.toarray(), X_test.toarray())
assert_array_equal(y_train, y_test)
assert_equal(X_train.dtype, np.float32)
assert_equal(X_test.dtype, np.float32)
X1, y1, X2, y2, X3, y3 = load_svmlight_files([datafile] * 3,
dtype=np.float64)
assert_equal(X1.dtype, X2.dtype)
assert_equal(X2.dtype, X3.dtype)
assert_equal(X3.dtype, np.float64)
def test_load_svmlight_file_n_features():
X, y = load_svmlight_file(datafile, n_features=22)
# test X'shape
assert_equal(X.indptr.shape[0], 7)
assert_equal(X.shape[0], 6)
assert_equal(X.shape[1], 22)
# test X's non-zero values
for i, j, val in ((0, 2, 2.5), (0, 10, -5.2),
(1, 5, 1.0), (1, 12, -3)):
assert_equal(X[i, j], val)
# 21 features in file
assert_raises(ValueError, load_svmlight_file, datafile, n_features=20)
def test_load_compressed():
X, y = load_svmlight_file(datafile)
with NamedTemporaryFile(prefix="sklearn-test", suffix=".gz") as tmp:
tmp.close() # necessary under windows
with open(datafile, "rb") as f:
shutil.copyfileobj(f, gzip.open(tmp.name, "wb"))
Xgz, ygz = load_svmlight_file(tmp.name)
# because we "close" it manually and write to it,
# we need to remove it manually.
os.remove(tmp.name)
assert_array_equal(X.toarray(), Xgz.toarray())
assert_array_equal(y, ygz)
with NamedTemporaryFile(prefix="sklearn-test", suffix=".bz2") as tmp:
tmp.close() # necessary under windows
with open(datafile, "rb") as f:
shutil.copyfileobj(f, BZ2File(tmp.name, "wb"))
Xbz, ybz = load_svmlight_file(tmp.name)
# because we "close" it manually and write to it,
# we need to remove it manually.
os.remove(tmp.name)
assert_array_equal(X.toarray(), Xbz.toarray())
assert_array_equal(y, ybz)
@raises(ValueError)
def test_load_invalid_file():
load_svmlight_file(invalidfile)
@raises(ValueError)
def test_load_invalid_order_file():
load_svmlight_file(invalidfile2)
@raises(ValueError)
def test_load_zero_based():
f = BytesIO(b("-1 4:1.\n1 0:1\n"))
load_svmlight_file(f, zero_based=False)
def test_load_zero_based_auto():
data1 = b("-1 1:1 2:2 3:3\n")
data2 = b("-1 0:0 1:1\n")
f1 = BytesIO(data1)
X, y = load_svmlight_file(f1, zero_based="auto")
assert_equal(X.shape, (1, 3))
f1 = BytesIO(data1)
f2 = BytesIO(data2)
X1, y1, X2, y2 = load_svmlight_files([f1, f2], zero_based="auto")
assert_equal(X1.shape, (1, 4))
assert_equal(X2.shape, (1, 4))
def test_load_with_qid():
# load svmfile with qid attribute
data = b("""
3 qid:1 1:0.53 2:0.12
2 qid:1 1:0.13 2:0.1
7 qid:2 1:0.87 2:0.12""")
X, y = load_svmlight_file(BytesIO(data), query_id=False)
assert_array_equal(y, [3, 2, 7])
assert_array_equal(X.toarray(), [[.53, .12], [.13, .1], [.87, .12]])
res1 = load_svmlight_files([BytesIO(data)], query_id=True)
res2 = load_svmlight_file(BytesIO(data), query_id=True)
for X, y, qid in (res1, res2):
assert_array_equal(y, [3, 2, 7])
assert_array_equal(qid, [1, 1, 2])
assert_array_equal(X.toarray(), [[.53, .12], [.13, .1], [.87, .12]])
@raises(ValueError)
def test_load_invalid_file2():
load_svmlight_files([datafile, invalidfile, datafile])
@raises(TypeError)
def test_not_a_filename():
# in python 3 integers are valid file opening arguments (taken as unix
# file descriptors)
load_svmlight_file(.42)
@raises(IOError)
def test_invalid_filename():
load_svmlight_file("trou pic nic douille")
def test_dump():
Xs, y = load_svmlight_file(datafile)
Xd = Xs.toarray()
# slicing a csr_matrix can unsort its .indices, so test that we sort
# those correctly
Xsliced = Xs[np.arange(Xs.shape[0])]
for X in (Xs, Xd, Xsliced):
for zero_based in (True, False):
for dtype in [np.float32, np.float64, np.int32]:
f = BytesIO()
# we need to pass a comment to get the version info in;
# LibSVM doesn't grok comments so they're not put in by
# default anymore.
dump_svmlight_file(X.astype(dtype), y, f, comment="test",
zero_based=zero_based)
f.seek(0)
comment = f.readline()
try:
comment = str(comment, "utf-8")
except TypeError: # fails in Python 2.x
pass
assert_in("scikit-learn %s" % sklearn.__version__, comment)
comment = f.readline()
try:
comment = str(comment, "utf-8")
except TypeError: # fails in Python 2.x
pass
assert_in(["one", "zero"][zero_based] + "-based", comment)
X2, y2 = load_svmlight_file(f, dtype=dtype,
zero_based=zero_based)
assert_equal(X2.dtype, dtype)
assert_array_equal(X2.sorted_indices().indices, X2.indices)
if dtype == np.float32:
assert_array_almost_equal(
# allow a rounding error at the last decimal place
Xd.astype(dtype), X2.toarray(), 4)
else:
assert_array_almost_equal(
# allow a rounding error at the last decimal place
Xd.astype(dtype), X2.toarray(), 15)
assert_array_equal(y, y2)
def test_dump_multilabel():
X = [[1, 0, 3, 0, 5],
[0, 0, 0, 0, 0],
[0, 5, 0, 1, 0]]
y = [[0, 1, 0], [1, 0, 1], [1, 1, 0]]
f = BytesIO()
dump_svmlight_file(X, y, f, multilabel=True)
f.seek(0)
# make sure it dumps multilabel correctly
assert_equal(f.readline(), b("1 0:1 2:3 4:5\n"))
assert_equal(f.readline(), b("0,2 \n"))
assert_equal(f.readline(), b("0,1 1:5 3:1\n"))
def test_dump_concise():
one = 1
two = 2.1
three = 3.01
exact = 1.000000000000001
# loses the last decimal place
almost = 1.0000000000000001
X = [[one, two, three, exact, almost],
[1e9, 2e18, 3e27, 0, 0],
[0, 0, 0, 0, 0],
[0, 0, 0, 0, 0],
[0, 0, 0, 0, 0]]
y = [one, two, three, exact, almost]
f = BytesIO()
dump_svmlight_file(X, y, f)
f.seek(0)
# make sure it's using the most concise format possible
assert_equal(f.readline(),
b("1 0:1 1:2.1 2:3.01 3:1.000000000000001 4:1\n"))
assert_equal(f.readline(), b("2.1 0:1000000000 1:2e+18 2:3e+27\n"))
assert_equal(f.readline(), b("3.01 \n"))
assert_equal(f.readline(), b("1.000000000000001 \n"))
assert_equal(f.readline(), b("1 \n"))
f.seek(0)
# make sure it's correct too :)
X2, y2 = load_svmlight_file(f)
assert_array_almost_equal(X, X2.toarray())
assert_array_equal(y, y2)
def test_dump_comment():
X, y = load_svmlight_file(datafile)
X = X.toarray()
f = BytesIO()
ascii_comment = "This is a comment\nspanning multiple lines."
dump_svmlight_file(X, y, f, comment=ascii_comment, zero_based=False)
f.seek(0)
X2, y2 = load_svmlight_file(f, zero_based=False)
assert_array_almost_equal(X, X2.toarray())
assert_array_equal(y, y2)
# XXX we have to update this to support Python 3.x
utf8_comment = b("It is true that\n\xc2\xbd\xc2\xb2 = \xc2\xbc")
f = BytesIO()
assert_raises(UnicodeDecodeError,
dump_svmlight_file, X, y, f, comment=utf8_comment)
unicode_comment = utf8_comment.decode("utf-8")
f = BytesIO()
dump_svmlight_file(X, y, f, comment=unicode_comment, zero_based=False)
f.seek(0)
X2, y2 = load_svmlight_file(f, zero_based=False)
assert_array_almost_equal(X, X2.toarray())
assert_array_equal(y, y2)
f = BytesIO()
assert_raises(ValueError,
dump_svmlight_file, X, y, f, comment="I've got a \0.")
def test_dump_invalid():
X, y = load_svmlight_file(datafile)
f = BytesIO()
y2d = [y]
assert_raises(ValueError, dump_svmlight_file, X, y2d, f)
f = BytesIO()
assert_raises(ValueError, dump_svmlight_file, X, y[:-1], f)
def test_dump_query_id():
# test dumping a file with query_id
X, y = load_svmlight_file(datafile)
X = X.toarray()
query_id = np.arange(X.shape[0]) // 2
f = BytesIO()
dump_svmlight_file(X, y, f, query_id=query_id, zero_based=True)
f.seek(0)
X1, y1, query_id1 = load_svmlight_file(f, query_id=True, zero_based=True)
assert_array_almost_equal(X, X1.toarray())
assert_array_almost_equal(y, y1)
assert_array_almost_equal(query_id, query_id1)
| bsd-3-clause |
bnprk/django-oscar | src/oscar/apps/customer/app.py | 48 | 10703 | from django.conf.urls import url
from django.contrib.auth.decorators import login_required
from django.views import generic
from oscar.core.application import Application
from oscar.core.loading import get_class
class CustomerApplication(Application):
name = 'customer'
summary_view = get_class('customer.views', 'AccountSummaryView')
order_history_view = get_class('customer.views', 'OrderHistoryView')
order_detail_view = get_class('customer.views', 'OrderDetailView')
anon_order_detail_view = get_class('customer.views',
'AnonymousOrderDetailView')
order_line_view = get_class('customer.views', 'OrderLineView')
address_list_view = get_class('customer.views', 'AddressListView')
address_create_view = get_class('customer.views', 'AddressCreateView')
address_update_view = get_class('customer.views', 'AddressUpdateView')
address_delete_view = get_class('customer.views', 'AddressDeleteView')
address_change_status_view = get_class('customer.views',
'AddressChangeStatusView')
email_list_view = get_class('customer.views', 'EmailHistoryView')
email_detail_view = get_class('customer.views', 'EmailDetailView')
login_view = get_class('customer.views', 'AccountAuthView')
logout_view = get_class('customer.views', 'LogoutView')
register_view = get_class('customer.views', 'AccountRegistrationView')
profile_view = get_class('customer.views', 'ProfileView')
profile_update_view = get_class('customer.views', 'ProfileUpdateView')
profile_delete_view = get_class('customer.views', 'ProfileDeleteView')
change_password_view = get_class('customer.views', 'ChangePasswordView')
notification_inbox_view = get_class('customer.notifications.views',
'InboxView')
notification_archive_view = get_class('customer.notifications.views',
'ArchiveView')
notification_update_view = get_class('customer.notifications.views',
'UpdateView')
notification_detail_view = get_class('customer.notifications.views',
'DetailView')
alert_list_view = get_class('customer.alerts.views',
'ProductAlertListView')
alert_create_view = get_class('customer.alerts.views',
'ProductAlertCreateView')
alert_confirm_view = get_class('customer.alerts.views',
'ProductAlertConfirmView')
alert_cancel_view = get_class('customer.alerts.views',
'ProductAlertCancelView')
wishlists_add_product_view = get_class('customer.wishlists.views',
'WishListAddProduct')
wishlists_list_view = get_class('customer.wishlists.views',
'WishListListView')
wishlists_detail_view = get_class('customer.wishlists.views',
'WishListDetailView')
wishlists_create_view = get_class('customer.wishlists.views',
'WishListCreateView')
wishlists_create_with_product_view = get_class('customer.wishlists.views',
'WishListCreateView')
wishlists_update_view = get_class('customer.wishlists.views',
'WishListUpdateView')
wishlists_delete_view = get_class('customer.wishlists.views',
'WishListDeleteView')
wishlists_remove_product_view = get_class('customer.wishlists.views',
'WishListRemoveProduct')
wishlists_move_product_to_another_view = get_class(
'customer.wishlists.views', 'WishListMoveProductToAnotherWishList')
def get_urls(self):
urls = [
# Login, logout and register doesn't require login
url(r'^login/$', self.login_view.as_view(), name='login'),
url(r'^logout/$', self.logout_view.as_view(), name='logout'),
url(r'^register/$', self.register_view.as_view(), name='register'),
url(r'^$', login_required(self.summary_view.as_view()),
name='summary'),
url(r'^change-password/$',
login_required(self.change_password_view.as_view()),
name='change-password'),
# Profile
url(r'^profile/$',
login_required(self.profile_view.as_view()),
name='profile-view'),
url(r'^profile/edit/$',
login_required(self.profile_update_view.as_view()),
name='profile-update'),
url(r'^profile/delete/$',
login_required(self.profile_delete_view.as_view()),
name='profile-delete'),
# Order history
url(r'^orders/$',
login_required(self.order_history_view.as_view()),
name='order-list'),
url(r'^order-status/(?P<order_number>[\w-]*)/(?P<hash>\w+)/$',
self.anon_order_detail_view.as_view(), name='anon-order'),
url(r'^orders/(?P<order_number>[\w-]*)/$',
login_required(self.order_detail_view.as_view()),
name='order'),
url(r'^orders/(?P<order_number>[\w-]*)/(?P<line_id>\d+)$',
login_required(self.order_line_view.as_view()),
name='order-line'),
# Address book
url(r'^addresses/$',
login_required(self.address_list_view.as_view()),
name='address-list'),
url(r'^addresses/add/$',
login_required(self.address_create_view.as_view()),
name='address-create'),
url(r'^addresses/(?P<pk>\d+)/$',
login_required(self.address_update_view.as_view()),
name='address-detail'),
url(r'^addresses/(?P<pk>\d+)/delete/$',
login_required(self.address_delete_view.as_view()),
name='address-delete'),
url(r'^addresses/(?P<pk>\d+)/'
r'(?P<action>default_for_(billing|shipping))/$',
login_required(self.address_change_status_view.as_view()),
name='address-change-status'),
# Email history
url(r'^emails/$',
login_required(self.email_list_view.as_view()),
name='email-list'),
url(r'^emails/(?P<email_id>\d+)/$',
login_required(self.email_detail_view.as_view()),
name='email-detail'),
# Notifications
# Redirect to notification inbox
url(r'^notifications/$', generic.RedirectView.as_view(
url='/accounts/notifications/inbox/')),
url(r'^notifications/inbox/$',
login_required(self.notification_inbox_view.as_view()),
name='notifications-inbox'),
url(r'^notifications/archive/$',
login_required(self.notification_archive_view.as_view()),
name='notifications-archive'),
url(r'^notifications/update/$',
login_required(self.notification_update_view.as_view()),
name='notifications-update'),
url(r'^notifications/(?P<pk>\d+)/$',
login_required(self.notification_detail_view.as_view()),
name='notifications-detail'),
# Alerts
# Alerts can be setup by anonymous users: some views do not
# require login
url(r'^alerts/$',
login_required(self.alert_list_view.as_view()),
name='alerts-list'),
url(r'^alerts/create/(?P<pk>\d+)/$',
self.alert_create_view.as_view(),
name='alert-create'),
url(r'^alerts/confirm/(?P<key>[a-z0-9]+)/$',
self.alert_confirm_view.as_view(),
name='alerts-confirm'),
url(r'^alerts/cancel/key/(?P<key>[a-z0-9]+)/$',
self.alert_cancel_view.as_view(),
name='alerts-cancel-by-key'),
url(r'^alerts/cancel/(?P<pk>[a-z0-9]+)/$',
login_required(self.alert_cancel_view.as_view()),
name='alerts-cancel-by-pk'),
# Wishlists
url(r'wishlists/$',
login_required(self.wishlists_list_view.as_view()),
name='wishlists-list'),
url(r'wishlists/add/(?P<product_pk>\d+)/$',
login_required(self.wishlists_add_product_view.as_view()),
name='wishlists-add-product'),
url(r'wishlists/(?P<key>[a-z0-9]+)/add/(?P<product_pk>\d+)/',
login_required(self.wishlists_add_product_view.as_view()),
name='wishlists-add-product'),
url(r'wishlists/create/$',
login_required(self.wishlists_create_view.as_view()),
name='wishlists-create'),
url(r'wishlists/create/with-product/(?P<product_pk>\d+)/$',
login_required(self.wishlists_create_view.as_view()),
name='wishlists-create-with-product'),
# Wishlists can be publicly shared, no login required
url(r'wishlists/(?P<key>[a-z0-9]+)/$',
self.wishlists_detail_view.as_view(), name='wishlists-detail'),
url(r'wishlists/(?P<key>[a-z0-9]+)/update/$',
login_required(self.wishlists_update_view.as_view()),
name='wishlists-update'),
url(r'wishlists/(?P<key>[a-z0-9]+)/delete/$',
login_required(self.wishlists_delete_view.as_view()),
name='wishlists-delete'),
url(r'wishlists/(?P<key>[a-z0-9]+)/lines/(?P<line_pk>\d+)/delete/',
login_required(self.wishlists_remove_product_view.as_view()),
name='wishlists-remove-product'),
url(r'wishlists/(?P<key>[a-z0-9]+)/products/(?P<product_pk>\d+)/'
r'delete/',
login_required(self.wishlists_remove_product_view.as_view()),
name='wishlists-remove-product'),
url(r'wishlists/(?P<key>[a-z0-9]+)/lines/(?P<line_pk>\d+)/move-to/'
r'(?P<to_key>[a-z0-9]+)/$',
login_required(self.wishlists_move_product_to_another_view
.as_view()),
name='wishlists-move-product-to-another')]
return self.post_process_urls(urls)
application = CustomerApplication()
| bsd-3-clause |
sergey-shandar/autorest | src/generator/AutoRest.Python.Azure.Tests/Expected/AcceptanceTests/Lro/fixtures/acceptancetestslro/operations/lro_retrys_operations.py | 14 | 25071 | # coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
# --------------------------------------------------------------------------
from msrest.pipeline import ClientRawResponse
from msrestazure.azure_exceptions import CloudError
from msrestazure.azure_operation import AzureOperationPoller
import uuid
from .. import models
class LRORetrysOperations(object):
"""LRORetrysOperations operations.
:param client: Client for service requests.
:param config: Configuration of service client.
:param serializer: An object model serializer.
:param deserializer: An objec model deserializer.
"""
def __init__(self, client, config, serializer, deserializer):
self._client = client
self._serialize = serializer
self._deserialize = deserializer
self.config = config
def put201_creating_succeeded200(
self, product=None, custom_headers=None, raw=False, **operation_config):
"""Long running put request, service returns a 500, then a 201 to the
initial request, with an entity that contains
ProvisioningState=’Creating’. Polls return this value until the last
poll returns a ‘200’ with ProvisioningState=’Succeeded’.
:param product: Product to put
:type product: :class:`Product
<fixtures.acceptancetestslro.models.Product>`
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:rtype:
:class:`AzureOperationPoller<msrestazure.azure_operation.AzureOperationPoller>`
instance that returns :class:`Product
<fixtures.acceptancetestslro.models.Product>`
:rtype: :class:`ClientRawResponse<msrest.pipeline.ClientRawResponse>`
if raw=true
:raises: :class:`CloudError<msrestazure.azure_exceptions.CloudError>`
"""
# Construct URL
url = '/lro/retryerror/put/201/creating/succeeded/200'
# Construct parameters
query_parameters = {}
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if self.config.generate_client_request_id:
header_parameters['x-ms-client-request-id'] = str(uuid.uuid1())
if custom_headers:
header_parameters.update(custom_headers)
if self.config.accept_language is not None:
header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str')
# Construct body
if product is not None:
body_content = self._serialize.body(product, 'Product')
else:
body_content = None
# Construct and send request
def long_running_send():
request = self._client.put(url, query_parameters)
return self._client.send(
request, header_parameters, body_content, **operation_config)
def get_long_running_status(status_link, headers=None):
request = self._client.get(status_link)
if headers:
request.headers.update(headers)
return self._client.send(
request, header_parameters, **operation_config)
def get_long_running_output(response):
if response.status_code not in [200, 201]:
exp = CloudError(response)
exp.request_id = response.headers.get('x-ms-request-id')
raise exp
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('Product', response)
if response.status_code == 201:
deserialized = self._deserialize('Product', response)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response
return deserialized
if raw:
response = long_running_send()
return get_long_running_output(response)
long_running_operation_timeout = operation_config.get(
'long_running_operation_timeout',
self.config.long_running_operation_timeout)
return AzureOperationPoller(
long_running_send, get_long_running_output,
get_long_running_status, long_running_operation_timeout)
def put_async_relative_retry_succeeded(
self, product=None, custom_headers=None, raw=False, **operation_config):
"""Long running put request, service returns a 500, then a 200 to the
initial request, with an entity that contains
ProvisioningState=’Creating’. Poll the endpoint indicated in the
Azure-AsyncOperation header for operation status.
:param product: Product to put
:type product: :class:`Product
<fixtures.acceptancetestslro.models.Product>`
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:rtype:
:class:`AzureOperationPoller<msrestazure.azure_operation.AzureOperationPoller>`
instance that returns :class:`Product
<fixtures.acceptancetestslro.models.Product>`
:rtype: :class:`ClientRawResponse<msrest.pipeline.ClientRawResponse>`
if raw=true
:raises: :class:`CloudError<msrestazure.azure_exceptions.CloudError>`
"""
# Construct URL
url = '/lro/retryerror/putasync/retry/succeeded'
# Construct parameters
query_parameters = {}
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if self.config.generate_client_request_id:
header_parameters['x-ms-client-request-id'] = str(uuid.uuid1())
if custom_headers:
header_parameters.update(custom_headers)
if self.config.accept_language is not None:
header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str')
# Construct body
if product is not None:
body_content = self._serialize.body(product, 'Product')
else:
body_content = None
# Construct and send request
def long_running_send():
request = self._client.put(url, query_parameters)
return self._client.send(
request, header_parameters, body_content, **operation_config)
def get_long_running_status(status_link, headers=None):
request = self._client.get(status_link)
if headers:
request.headers.update(headers)
return self._client.send(
request, header_parameters, **operation_config)
def get_long_running_output(response):
if response.status_code not in [200]:
exp = CloudError(response)
exp.request_id = response.headers.get('x-ms-request-id')
raise exp
deserialized = None
header_dict = {}
if response.status_code == 200:
deserialized = self._deserialize('Product', response)
header_dict = {
'Azure-AsyncOperation': 'str',
'Location': 'str',
'Retry-After': 'int',
}
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
client_raw_response.add_headers(header_dict)
return client_raw_response
return deserialized
if raw:
response = long_running_send()
return get_long_running_output(response)
long_running_operation_timeout = operation_config.get(
'long_running_operation_timeout',
self.config.long_running_operation_timeout)
return AzureOperationPoller(
long_running_send, get_long_running_output,
get_long_running_status, long_running_operation_timeout)
def delete_provisioning202_accepted200_succeeded(
self, custom_headers=None, raw=False, **operation_config):
"""Long running delete request, service returns a 500, then a 202 to the
initial request, with an entity that contains
ProvisioningState=’Accepted’. Polls return this value until the last
poll returns a ‘200’ with ProvisioningState=’Succeeded’.
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:rtype:
:class:`AzureOperationPoller<msrestazure.azure_operation.AzureOperationPoller>`
instance that returns :class:`Product
<fixtures.acceptancetestslro.models.Product>`
:rtype: :class:`ClientRawResponse<msrest.pipeline.ClientRawResponse>`
if raw=true
:raises: :class:`CloudError<msrestazure.azure_exceptions.CloudError>`
"""
# Construct URL
url = '/lro/retryerror/delete/provisioning/202/accepted/200/succeeded'
# Construct parameters
query_parameters = {}
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if self.config.generate_client_request_id:
header_parameters['x-ms-client-request-id'] = str(uuid.uuid1())
if custom_headers:
header_parameters.update(custom_headers)
if self.config.accept_language is not None:
header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str')
# Construct and send request
def long_running_send():
request = self._client.delete(url, query_parameters)
return self._client.send(request, header_parameters, **operation_config)
def get_long_running_status(status_link, headers=None):
request = self._client.get(status_link)
if headers:
request.headers.update(headers)
return self._client.send(
request, header_parameters, **operation_config)
def get_long_running_output(response):
if response.status_code not in [200, 202]:
exp = CloudError(response)
exp.request_id = response.headers.get('x-ms-request-id')
raise exp
deserialized = None
header_dict = {}
if response.status_code == 200:
deserialized = self._deserialize('Product', response)
header_dict = {
'Location': 'str',
'Retry-After': 'int',
}
if response.status_code == 202:
deserialized = self._deserialize('Product', response)
header_dict = {
'Location': 'str',
'Retry-After': 'int',
}
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
client_raw_response.add_headers(header_dict)
return client_raw_response
return deserialized
if raw:
response = long_running_send()
return get_long_running_output(response)
long_running_operation_timeout = operation_config.get(
'long_running_operation_timeout',
self.config.long_running_operation_timeout)
return AzureOperationPoller(
long_running_send, get_long_running_output,
get_long_running_status, long_running_operation_timeout)
def delete202_retry200(
self, custom_headers=None, raw=False, **operation_config):
"""Long running delete request, service returns a 500, then a 202 to the
initial request. Polls return this value until the last poll returns a
‘200’ with ProvisioningState=’Succeeded’.
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:rtype:
:class:`AzureOperationPoller<msrestazure.azure_operation.AzureOperationPoller>`
instance that returns None
:rtype: :class:`ClientRawResponse<msrest.pipeline.ClientRawResponse>`
if raw=true
:raises: :class:`CloudError<msrestazure.azure_exceptions.CloudError>`
"""
# Construct URL
url = '/lro/retryerror/delete/202/retry/200'
# Construct parameters
query_parameters = {}
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if self.config.generate_client_request_id:
header_parameters['x-ms-client-request-id'] = str(uuid.uuid1())
if custom_headers:
header_parameters.update(custom_headers)
if self.config.accept_language is not None:
header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str')
# Construct and send request
def long_running_send():
request = self._client.delete(url, query_parameters)
return self._client.send(request, header_parameters, **operation_config)
def get_long_running_status(status_link, headers=None):
request = self._client.get(status_link)
if headers:
request.headers.update(headers)
return self._client.send(
request, header_parameters, **operation_config)
def get_long_running_output(response):
if response.status_code not in [202]:
exp = CloudError(response)
exp.request_id = response.headers.get('x-ms-request-id')
raise exp
if raw:
client_raw_response = ClientRawResponse(None, response)
client_raw_response.add_headers({
'Location': 'str',
'Retry-After': 'int',
})
return client_raw_response
if raw:
response = long_running_send()
return get_long_running_output(response)
long_running_operation_timeout = operation_config.get(
'long_running_operation_timeout',
self.config.long_running_operation_timeout)
return AzureOperationPoller(
long_running_send, get_long_running_output,
get_long_running_status, long_running_operation_timeout)
def delete_async_relative_retry_succeeded(
self, custom_headers=None, raw=False, **operation_config):
"""Long running delete request, service returns a 500, then a 202 to the
initial request. Poll the endpoint indicated in the
Azure-AsyncOperation header for operation status.
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:rtype:
:class:`AzureOperationPoller<msrestazure.azure_operation.AzureOperationPoller>`
instance that returns None
:rtype: :class:`ClientRawResponse<msrest.pipeline.ClientRawResponse>`
if raw=true
:raises: :class:`CloudError<msrestazure.azure_exceptions.CloudError>`
"""
# Construct URL
url = '/lro/retryerror/deleteasync/retry/succeeded'
# Construct parameters
query_parameters = {}
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if self.config.generate_client_request_id:
header_parameters['x-ms-client-request-id'] = str(uuid.uuid1())
if custom_headers:
header_parameters.update(custom_headers)
if self.config.accept_language is not None:
header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str')
# Construct and send request
def long_running_send():
request = self._client.delete(url, query_parameters)
return self._client.send(request, header_parameters, **operation_config)
def get_long_running_status(status_link, headers=None):
request = self._client.get(status_link)
if headers:
request.headers.update(headers)
return self._client.send(
request, header_parameters, **operation_config)
def get_long_running_output(response):
if response.status_code not in [202]:
exp = CloudError(response)
exp.request_id = response.headers.get('x-ms-request-id')
raise exp
if raw:
client_raw_response = ClientRawResponse(None, response)
client_raw_response.add_headers({
'Azure-AsyncOperation': 'str',
'Location': 'str',
'Retry-After': 'int',
})
return client_raw_response
if raw:
response = long_running_send()
return get_long_running_output(response)
long_running_operation_timeout = operation_config.get(
'long_running_operation_timeout',
self.config.long_running_operation_timeout)
return AzureOperationPoller(
long_running_send, get_long_running_output,
get_long_running_status, long_running_operation_timeout)
def post202_retry200(
self, product=None, custom_headers=None, raw=False, **operation_config):
"""Long running post request, service returns a 500, then a 202 to the
initial request, with 'Location' and 'Retry-After' headers, Polls
return a 200 with a response body after success.
:param product: Product to put
:type product: :class:`Product
<fixtures.acceptancetestslro.models.Product>`
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:rtype:
:class:`AzureOperationPoller<msrestazure.azure_operation.AzureOperationPoller>`
instance that returns None
:rtype: :class:`ClientRawResponse<msrest.pipeline.ClientRawResponse>`
if raw=true
:raises: :class:`CloudError<msrestazure.azure_exceptions.CloudError>`
"""
# Construct URL
url = '/lro/retryerror/post/202/retry/200'
# Construct parameters
query_parameters = {}
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if self.config.generate_client_request_id:
header_parameters['x-ms-client-request-id'] = str(uuid.uuid1())
if custom_headers:
header_parameters.update(custom_headers)
if self.config.accept_language is not None:
header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str')
# Construct body
if product is not None:
body_content = self._serialize.body(product, 'Product')
else:
body_content = None
# Construct and send request
def long_running_send():
request = self._client.post(url, query_parameters)
return self._client.send(
request, header_parameters, body_content, **operation_config)
def get_long_running_status(status_link, headers=None):
request = self._client.get(status_link)
if headers:
request.headers.update(headers)
return self._client.send(
request, header_parameters, **operation_config)
def get_long_running_output(response):
if response.status_code not in [202]:
exp = CloudError(response)
exp.request_id = response.headers.get('x-ms-request-id')
raise exp
if raw:
client_raw_response = ClientRawResponse(None, response)
client_raw_response.add_headers({
'Location': 'str',
'Retry-After': 'int',
})
return client_raw_response
if raw:
response = long_running_send()
return get_long_running_output(response)
long_running_operation_timeout = operation_config.get(
'long_running_operation_timeout',
self.config.long_running_operation_timeout)
return AzureOperationPoller(
long_running_send, get_long_running_output,
get_long_running_status, long_running_operation_timeout)
def post_async_relative_retry_succeeded(
self, product=None, custom_headers=None, raw=False, **operation_config):
"""Long running post request, service returns a 500, then a 202 to the
initial request, with an entity that contains
ProvisioningState=’Creating’. Poll the endpoint indicated in the
Azure-AsyncOperation header for operation status.
:param product: Product to put
:type product: :class:`Product
<fixtures.acceptancetestslro.models.Product>`
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:rtype:
:class:`AzureOperationPoller<msrestazure.azure_operation.AzureOperationPoller>`
instance that returns None
:rtype: :class:`ClientRawResponse<msrest.pipeline.ClientRawResponse>`
if raw=true
:raises: :class:`CloudError<msrestazure.azure_exceptions.CloudError>`
"""
# Construct URL
url = '/lro/retryerror/postasync/retry/succeeded'
# Construct parameters
query_parameters = {}
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if self.config.generate_client_request_id:
header_parameters['x-ms-client-request-id'] = str(uuid.uuid1())
if custom_headers:
header_parameters.update(custom_headers)
if self.config.accept_language is not None:
header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str')
# Construct body
if product is not None:
body_content = self._serialize.body(product, 'Product')
else:
body_content = None
# Construct and send request
def long_running_send():
request = self._client.post(url, query_parameters)
return self._client.send(
request, header_parameters, body_content, **operation_config)
def get_long_running_status(status_link, headers=None):
request = self._client.get(status_link)
if headers:
request.headers.update(headers)
return self._client.send(
request, header_parameters, **operation_config)
def get_long_running_output(response):
if response.status_code not in [202]:
exp = CloudError(response)
exp.request_id = response.headers.get('x-ms-request-id')
raise exp
if raw:
client_raw_response = ClientRawResponse(None, response)
client_raw_response.add_headers({
'Azure-AsyncOperation': 'str',
'Location': 'str',
'Retry-After': 'int',
})
return client_raw_response
if raw:
response = long_running_send()
return get_long_running_output(response)
long_running_operation_timeout = operation_config.get(
'long_running_operation_timeout',
self.config.long_running_operation_timeout)
return AzureOperationPoller(
long_running_send, get_long_running_output,
get_long_running_status, long_running_operation_timeout)
| mit |
cristian99garcia/pilas-activity | pilas/actores/mapa.py | 1 | 4881 | # -*- encoding: utf-8 -*-
# Pilas engine - A video game framework.
#
# Copyright 2010 - Hugo Ruscitti
# License: LGPLv3 (see http://www.gnu.org/licenses/lgpl.html)
#
# Website - http://www.pilas-engine.com.ar
import pilas
from pilas.actores import Actor
class Mapa(Actor):
"""Representa mapas creados a partir de imagenes mas pequeñas.
Este actor te permite crear escenarios tipo ``tiles``, una técnica
de contrucción de escenarios muy popular en los videojuegos.
Puedes crear un actor a partir de una grilla, e indicando cada
uno los bloques o simplemente usando un programa externo llamado
**tiled** (ver http://www.mapeditor.org).
Por ejemplo, para crear un mapa desde un archivo del programa
**tiled** puedes escribir:
>>> mapa = pilas.actores.Mapa('untitled2.tmx')
"""
def __init__(self, grilla_o_mapa=None, x=0, y=0, restitucion=0.56):
Actor.__init__(self, 'invisible.png', x, y)
self.restitucion = restitucion
self.figuras = []
self.bloques = []
if not grilla_o_mapa:
grilla_o_mapa = grilla = pilas.imagenes.cargar_grilla("grillas/plataformas_10_10.png", 10, 10)
self.grilla_o_mapa = grilla_o_mapa
if isinstance(grilla_o_mapa, str):
self._cargar_mapa(grilla_o_mapa)
else:
self.grilla = grilla_o_mapa
self._ancho_cuadro = grilla_o_mapa.cuadro_ancho
self._alto_cuadro = grilla_o_mapa.cuadro_alto
def _cargar_mapa(self, archivo):
"Carga el escenario desde un archivo .tmz (del programa tiled)."
archivo = pilas.utils.obtener_ruta_al_recurso(archivo)
# Carga los nodos principales.
nodo = pilas.utils.xmlreader.makeRootNode(archivo)
nodo_mapa = nodo.getChild('map')
nodo_tileset = nodo_mapa.getChild('tileset')
# Cantidad de bloques en el mapa.
self.columnas = int(nodo_mapa.getAttributeValue('width'))
self.filas = int(nodo_mapa.getAttributeValue('height'))
# Atributos de la imagen asociada al mapa.
self._ruta = nodo_tileset.getChild('image').getAttributeValue('source')
self._ruta = pilas.utils.obtener_ruta_al_recurso(self._ruta)
self._ancho_imagen = int(nodo_tileset.getChild('image').getAttributeValue('width'))
self._alto_imagen = int(nodo_tileset.getChild('image').getAttributeValue('height'))
self._ancho_cuadro = int(nodo_tileset.getAttributeValue('tilewidth'))
self._alto_cuadro = int(nodo_tileset.getAttributeValue('tileheight'))
# Carga la grilla de imagenes desde el mapa.
self.grilla = pilas.imagenes.cargar_grilla(self._ruta,
self._ancho_imagen / self._ancho_cuadro,
self._alto_imagen / self._alto_cuadro)
# Carga las capas del mapa.
layers = nodo.getChild('map').getChildren('layer')
if len(layers) == 0:
raise Exception("Debe tener al menos una capa (layer).")
# La capa 0 (inferior) define los bloques no-solidos.
self._crear_bloques(layers[0], solidos=False)
# El resto de las capas definen bloques solidos
for layer in layers[1:]:
self._crear_bloques(layer, solidos=True)
def _crear_bloques(self, capa, solidos):
"Genera actores que representan los bloques del escenario."
datos = capa.getChild('data').getData()
# Convierte todo el mapa en una matriz de numeros.
bloques = [[int(x) for x in x.split(',') if x] for x in datos.split()]
for (y, fila) in enumerate(bloques):
for (x, bloque) in enumerate(fila):
if bloque:
self.pintar_bloque(y, x, bloque -1, solidos)
def pintar_bloque(self, fila, columna, indice, es_bloque_solido=False):
nuevo_bloque = pilas.actores.Actor('invisible.png')
nuevo_bloque.imagen = self.grilla
nuevo_bloque.imagen.definir_cuadro(indice)
nuevo_bloque.izquierda = columna * self._ancho_cuadro - 320
nuevo_bloque.arriba = -fila * self._alto_cuadro + 240
self.bloques.append(nuevo_bloque)
if es_bloque_solido:
figura = pilas.fisica.Rectangulo(nuevo_bloque.izquierda + self._ancho_cuadro / 2,
nuevo_bloque.arriba - self._alto_cuadro / 2,
self._ancho_cuadro, self._alto_cuadro, dinamica=False,
restitucion=self.restitucion)
self.figuras.append(figura)
def reiniciar(self):
self._eliminar_bloques()
if isinstance(self.grilla_o_mapa, str):
self._cargar_mapa(self.grilla_o_mapa)
def eliminar(self):
self._eliminar_bloques()
def _eliminar_bloques(self):
for b in self.bloques:
b.eliminar()
for f in self.figuras:
f.eliminar()
| gpl-3.0 |
pinkavaj/rstt | gr-rstt/python/nle2.py | 1 | 4939 | #!/usr/bin/python3
import numpy as np
import matplotlib.pyplot as plt
import sys
class Src:
def __init__(self, fname, block_size, navg = 1):
self.offs = 0
self.navg = navg
self.block_size = block_size
self.data = np.fromfile(fname, dtype=np.float32)
l = divmod(len(self.data), block_size)[0]
self.data = self.data[0:l*block_size]
self.data = self.data.reshape(l, block_size)
def read(self):
navg = self.navg
# do frame averaging"
data = np.zeros(self.block_size)
while navg > 0 and self.offs < len(self.data):
data += self.data[self.offs]
self.offs += 1
navg -= 1
if navg:
return None
return data / self.navg
class Extreme:
def __init__(self, data, mean_rel):
"""Data should be integral of signal power value,
mean_rel should be relative mean value, see Split.set_mean for details."""
idx = self.idx = 0
self.val = 0
while idx < len(data):
val = data[idx] - data[0] - mean_rel * idx
if val > self.val:
self.val = val
self.idx = idx
elif -val > self.val:
self.val = -val
self.idx = idx
idx += 1
class Split:
def __init__(self, start, _len):
self.start = start
self.len = _len
def __str__(self):
return "start = %f; len = %f; mean_rel = %f;" % (self.start, self.len, self.mean_rel, )
def get_mean(self, data):
return (self.mean_rel * (self.len - 1) + data[self.start]) / self.len
def set_mean(self, data):
"""Set relative mean value for data in range defined by Split.
Data should be integrated power value of signal."""
if self.len > 1:
l = self.len - 1
self.mean_rel = (data[self.start + l] - data[self.start]) / l
else:
self.mean_rel = 0.
def set_extreme(self, data):
"""Find new extreme."""
self.extreme = Extreme(self.data(data), self.mean_rel)
def data(self, data):
return data[self.start:self.start+self.len]
class Show:
"""Noise level estimation. Input is vector of FFT(1024) series."""
def __init__(self, src):
self.src = src
def run2(self, noise_pct = 0.33, noise_w = 0.05, threshold = 3):
d = self.src.read()
noise_pct = int(self.src.block_size * noise_pct)
noise_w = int(self.src.block_size * noise_w)
while len(d):
# plot: original signal
offs = int(len(d) / 2)
x = range(0 - offs, len(d) - offs)
plt.plot(x, d)
# plot: ln(original signal)
d_log = [np.log(p) for p in d]
min_ = max(d_log)
for p in d_log:
if p < min_ and np.isfinite(p):
min_ = p
d_log = [p if np.isfinite(p) else min_ for p in d_log ]
#plt.plot(x, d_log)
self.write_signal('out', d_log)
# moving average and moving sigma
mean = [sum(d_log[0:noise_w]), ]
d_log2 = [x*x for x in d_log]
mean2 = [sum(d_log2[0:noise_w]), ]
for i in range(noise_w, len(d_log)):
ii = i - noise_w
mean.append(mean[ii] - d_log[ii] + d_log[i])
mean2.append(mean2[ii] - d_log2[ii] + d_log2[i])
mean = [i/noise_w for i in mean]
mean2 = [i/noise_w for i in mean2]
# signal dispersion around moving average
s = []
for i in range(0, len(mean)):
s.append(np.sqrt(mean2[i] - mean[i]**2))
#s_plt = [max(s),] * int(noise_w/2) + s
#s_plt = s_plt + [max(s), ] * (len(x) - len(s))
#plt.plot(x, s_plt)
s.sort()
s = s[:noise_pct]
s = sum(s) / len(s) * threshold
mean.sort()
#plt.plot(range(0, len(mean)), mean)
mean = mean[:noise_pct]
mean = sum(mean) / len(mean)
#plt.plot(x, [mean - s, ] * len(d_log))
#plt.plot(x, [mean, ] * len(d_log))
#plt.plot(x, [mean + s, ] * len(d_log))
print(mean - s, mean, mean + s)
s_lo = [np.exp(mean - s), ] * len(d_log)
s_m = [np.exp(mean), ] * len(d_log)
s_hi = [np.exp(mean + s), ] * len(d_log)
plt.plot(x, s_lo)
plt.plot(x, s_m)
plt.plot(x, s_hi)
plt.show()
plt.close()
def write_signal(self, fname, data):
with open('out', 'w') as f:
i = 0
while i < len(data):
f.write("%.4f, " % data[i])
i += 1
if i % 8 == 0 and i != 0:
f.write("\n")
if __name__ == '__main__':
s = Show(Src(sys.argv[1], 1024, 2**11))
s.run2()
| apache-2.0 |
PingPesto/BitList | bitlist/views.py | 1 | 4399 | from bitlist.models.user import User
from bitlist.models.song import Song
from bitlist.models.playlist import Playlist
import jobs
import json
import player
from pyramid.security import remember
from pyramid.security import forget
from pyramid.view import view_config
from pyramid.view import forbidden_view_config
from pyramid.httpexceptions import HTTPFound
from .models.song import Song
# ===== Authentication Routes ======
@view_config(route_name='home', renderer='templates/login.jinja2')
@view_config(route_name='login', renderer='templates/login.jinja2')
@forbidden_view_config(renderer='templates/login.jinja2')
def login(request):
login_url = request.route_url('login')
home_url = request.route_url('home')
referrer = request.url
if referrer == login_url or referrer == home_url:
referrer = '/player' # never use the login form itself as came_from
came_from = request.params.get('came_from', referrer)
message = ''
login = ''
password = ''
if 'form.submitted' in request.params:
login = request.params['login']
password = request.params['password']
if User.check_password(login, password):
headers = remember(request, login)
return HTTPFound(location = came_from,
headers = headers)
message = 'Failed login'
return dict(
message = message,
url = request.application_url + '/login',
came_from = came_from,
login = login,
password = password,
)
@view_config(route_name='logout')
def logout(request):
headers = forget(request)
return HTTPFound(location = "{}/login".format(request.application_url),
headers = headers)
# ====== FRONT END ROUTES ==========
@view_config(route_name='player', renderer='templates/player.jinja2',
permission='listen')
def player_view(request):
server_path = "http://{}:8000".format(request.host.split(':')[0])
status = request.mpd.status()
playlist = Playlist.get('default')
if status['state'] != 'play':
random_song = Song.get_random()
playlist.add(request.mpd, random_song.id)
request.mpd.play()
status['state'] = 'play'
return { 'playlist': playlist.songs,
'status': status,
'player_host': server_path,
'library': Song.objects}
@view_config(route_name='songs', renderer='json')
def library(request):
available_music = redis_song_library()
return dict(songs=available_music)
@view_config(route_name='songinfo', renderer='json')
def songinfo(request):
song = Song.get_by_id(request.matchdict['songid'])
return song
# ======= MUSIC DAEMON CONTROLS =======
@view_config(route_name='play', renderer='json')
def player_play(request):
request.mpd.play()
return {'Status': 'Success'}
@view_config(route_name='skip', renderer='json')
def player_skip(request):
request.mpd.next()
@view_config(route_name='status', renderer='json')
def player_status(request):
return request.mpd.status()
@view_config(route_name='playlist', renderer='json')
def player_playlist(request):
#TODO: Use a playlist other than default
p = Playlist.objects(name='default').first()
return p.to_json()
@view_config(route_name='playlistseed', renderer='json')
def player_playlist_seed(request):
pid = jobs.warm_db_cache.delay()
return {'JobID': pid.id}
@view_config(route_name='playlistenqueue', renderer='json')
def player_playlist_enqueue(request):
p = Playlist.get('default')
p.add(request.mpd, request.matchdict['song'])
p.reload()
return p.songs
# ======== FETCH API CONTROLS =======
@view_config(route_name='fetch_youtube', renderer='json')
def fetch_youtube_url(request):
pid = jobs.transcode_youtube_link.delay(request.matchdict['videoid'])
return {'JobID': pid.id}
@view_config(route_name='fetch_soundcloud', renderer='json')
def fetch_soundcloud_url(request):
pid = jobs.transcode_soundcloud_link.delay(request.matchdict['user'],
request.matchdict['songid'])
return {'JobID': pid.id}
@view_config(route_name='fetch_spotify', renderer='json')
def fetch_spotify_url(request):
pid = jobs.transcode_spotify_link.delay(request.matchdict['resource'])
return {'JobID': pid.id}
# ======== Redis API CONTROLS =======
| gpl-3.0 |
sillywilly42/simian | src/simian/settings.py | 1 | 24724 | #!/usr/bin/env python
#
# Copyright 2016 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS-IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""Configurable settings module."""
import ConfigParser
import importlib
import logging
import os
import re
import sys
import types
from simian.auth import x509
# If True, all domain users have (readonly) access to web UI. If False, only
# define admins, support, security, etc. users have access.
# TODO(user): move setting to Datastore.
ALLOW_ALL_DOMAIN_USERS_READ_ACCESS = False
ALLOW_SELF_REPORT = True
# Automatic values:
# True if running in debug mode.
DEBUG = False
# True if running in dev_appserver.
DEV_APPSERVER = False
# True if running in Google App Engine.
GAE = False
# True if running in unit testing environment.
TESTING = False
# True if running in unit testing environment and settings_test is under test.
SETTINGS_TESTING = False
if (os.environ.get('SERVER_SOFTWARE', '').startswith('Development') and
'testbed' not in os.environ.get('SERVER_SOFTWARE', '')):
logging.getLogger().setLevel(logging.DEBUG)
DEBUG = True
DEV_APPSERVER = True
GAE = True
elif os.environ.get('SERVER_SOFTWARE', '').startswith('Google App Engine'):
GAE = True
# If unit tests for this module are running, set SETTINGS_TEST to True.
if os.environ.get('____TESTING_SETTINGS_MODULE'):
SETTINGS_TESTING = True
# If unit tests are running, set TESTING to True.
if 'unittest2' in sys.modules or 'unittest' in sys.modules:
TESTING = True
if GAE:
SERVER_HOSTNAME = os.environ.get('DEFAULT_VERSION_HOSTNAME')
# To require a second administrator to approve changes set this to True.
APPROVAL_REQUIRED = False
# To allow a group of non-admin users to make proposals set this to True.
ENABLE_PROPOSALS_GROUP = False
class BaseSettings(types.ModuleType):
"""Base class for a settings container that acts like a module.
An instance of this class can replace a loaded module to provide read and
write access to settings variables via attribute access.
Don't use this class directly, use a child class.
Child classes should implement:
- _Get()
- _Set()
- _Dir()
Child classes can optionally implement:
- _Initialize()
- _PopulateGlobals()
- _CheckValidation()
Child classes can use these helper functions to configure themselves:
- _SetValidation()
This class will do a few things upon initialization:
- Copy attributes from the module e.g. __author__
- Run _PopulateGlobals(), which will:
- Copy already defined CONSTANTS into the settings
- Override this method in subclasses if this feature is undesired.
- Call _Initialize() which provides for per-class initialization.
Due to code style requirements where constants should be UPPERCASE,
BaseSettings is very specific about attribute and key name case handling.
No matter how subclass underlying code stores key/value pairs, the
settings attributes will always be made available via "UPPER_CASE_NAME".
The _Get() and _Set() methods will always receive the corresponding
"upper_case_name".
e.g.
settings.FOO => _Get('foo')
settings.FOO = 'bar' => _Set('foo', 'bar')
"""
# These constant values should match the method names that perform
# the validation.
_VALIDATION_REGEX = '_CheckValueRegex'
_VALIDATION_FUNC = '_CheckValueFunc'
_VALIDATION_PEM_X509_CERT = 'CheckValuePemX509Cert'
_VALIDATION_PEM_RSA_PRIVATE_KEY = 'CheckValuePemRsaPrivateKey'
_VALIDATION_TYPES = [
_VALIDATION_REGEX, _VALIDATION_FUNC,
_VALIDATION_PEM_X509_CERT, _VALIDATION_PEM_RSA_PRIVATE_KEY]
def __init__(self, module, *args, **kwargs):
"""Init.
Args:
module: module, the module that this class instance is replacing.
"""
# pylint: disable=non-parent-init-called
types.ModuleType.__init__(self, module.__name__, *args, **kwargs)
self._module = module
if hasattr(module, '__doc__'):
self.__doc__ = module.__doc__
if hasattr(module, '__author__'):
self.__author__ = module.__author__
self._is_class = 1
self._validation = {}
self._Initialize()
self._PopulateGlobals()
def _Initialize(self):
"""Initialize the class.
Example usage: set up the storage that will be used for settings.
DO NOT call superclass _Initialize() methods!
This method will remain specific to each class.
"""
pass # intentional
def _Globals(self):
"""Return globals() or an overridden value."""
return globals()
def _PopulateGlobals(self, set_func=None, globals_=None):
"""Find global VARIABLES and load them as settings, if possible.
Args:
set_func: function, otherwise defaults to self._Set. Replacement
function should follow the same interface.
globals_: function, default globals, return all global variables
as dict.
"""
if set_func is None:
set_func = self._Set
if globals_ is None:
globals_ = self._Globals
for k in globals_():
if k.upper() == k and not callable(globals_()[k]):
try:
set_func(k.lower(), globals_()[k])
except NotImplementedError:
break
def _Get(self, k):
"""Get one settings item.
Args:
k: str, name to get. The name will always be in lowercase.
Returns:
any settings value
Raises:
NotImplementedError: if this method is not implemented.
AttributeError(k): if this settings item does not exist.
"""
raise NotImplementedError
def _Set(self, k, v):
"""Set one settings item.
Args:
k: str, name to set. The name will always be in lowercase.
v: str, value to set.
Raises:
NotImplementedError: if this method is not implemented.
"""
raise NotImplementedError
def _Dir(self):
"""Returns directory of all settings names as a list.
Raises:
NotImplementedError: if this method is not implemented.
"""
raise NotImplementedError
def _CheckValueRegex(self, k, v, regex):
"""Check whether v meets regex validation for setting k.
Args:
k: str, settings name.
v: any value.
regex: str or compiled re.RegexObject.
Returns:
None if the value is appropriate and can be set.
Raises:
ValueError: if the value is not appropriately formed to be set for k.
"""
if isinstance(regex, basestring):
regex = re.compile(regex)
m = regex.search(str(v))
if m is None:
raise ValueError('Invalid value "%s" for %s' % (v, k))
def _CheckValueFunc(self, k, v, func):
"""Check whether v meets func validation for setting k.
Args:
k: str, name.
v: any value.
func: func, callable, call and expect True/False.
Returns:
None if the value is appropriate and can be set.
Raises:
ValueError: if the value is not appropriately formed to be set for k.
"""
if not callable(func):
raise TypeError('func is not callable')
b = func(k, v)
if b == True:
return
raise ValueError('value "%s" for %s' % (v, k))
def CheckValuePemX509Cert(self, _, v):
"""Check whether v meets PEM cert validation for setting k.
Args:
_: str, name, unused.
v: any value.
Returns:
None if the value is appropriate and can be set.
Raises:
ValueError: if the value is not appropriately formed to be set for k.
"""
try:
_ = x509.LoadCertificateFromPEM(v)
except x509.Error, e:
raise ValueError(str(e))
def CheckValuePemRsaPrivateKey(self, _, v):
"""Check whether v meets PEM RSA priv key validation for settings k.
Args:
_: str, name, unused..
v: any value.
Returns:
None if the value is appropriate and can be set.
Raises:
ValueError: if the value is not appropriately formed to be set for k.
"""
try:
_ = x509.LoadRSAPrivateKeyFromPEM(v)
except x509.Error, e:
raise ValueError(str(e))
def _CheckValidation(self, k, v):
"""Check whether v is an appropriate value for settings k.
Args:
k: str, name.
v: any value.
Returns:
None if the value is appropriate and can be set.
Raises:
ValueError: if the value is not appropriately formed to be set for k.
"""
if k not in self._validation:
return
for validation_type in self._validation[k]:
# The validation_type str is also the name of the method to call
# to perform the value check.
getattr(self, validation_type)(
k, v, *self._validation[k][validation_type])
def _SetValidation(self, k, t, *validation):
"""Set validation on setting k.
Args:
k: str, name.
t: str, type of validation, in self._VALIDATION_TYPES
*validation: data to supply as validation data to validation func.
Raises:
ValueError: if t is invalid.
"""
if t not in self._VALIDATION_TYPES:
raise ValueError(t)
if k not in self._validation:
self._validation[k] = {}
self._validation[k][t] = validation
def GetValidationRegex(self, k):
"""Get regex validation for setting k.
Args:
k: str, name.
Returns:
str regex validation if one exists, otherwise None.
"""
if k not in self._validation:
return None
return self._validation[k].get(self._VALIDATION_REGEX, [None])[0]
def CheckValidation(self, k=None):
"""Check validation for setting k, or default all.
Args:
k: str, optional, name.
Returns:
None if all settings values are OK.
Raises:
ValueError: if setting value is invalid.
"""
if k is not None:
if k not in self._settings:
return
settings_keys = [k]
else:
settings_keys = self._settings.keys()
for k in settings_keys:
self._CheckValidation(k, self._settings[k])
def __getattr__(self, k):
"""Returns value for attribute with name k.
Args:
k: str, name.
Returns:
value at k.
Raises:
AttributeError: if this attribute does not exist.
"""
if k.startswith('_'):
if k in self.__dict__:
return self.__dict__[k]
else:
raise AttributeError(k)
else:
try:
return self._Get(str(k).lower())
except AttributeError, e:
if e.args[0] == k:
raise AttributeError(str(k).upper())
raise
def __setattr__(self, k, v):
"""Sets attribute value at name k with value v.
Args:
k: str, name.
v: any value, value.
"""
if k.startswith('_'):
self.__dict__[k] = v
else:
self._Set(str(k).lower(), v)
def __dir__(self):
"""Returns list of all attribute names."""
return [x.upper() for x in self._Dir()]
class ModuleSettings(BaseSettings):
"""Settings that uses another module for storage.
Don't use this class directly, use a child class.
"""
def _LoadSettingsModule(self):
"""Load the module used for settings storage and return its full name."""
raise NotImplementedError
def _Initialize(self):
"""Initialize the settings storage.
Raises:
NotImplementedError: if module access fails.
"""
self._module_name = self._LoadSettingsModule()
try:
self._module = sys.modules[self._module_name]
except (KeyError, AttributeError), e:
raise NotImplementedError(
'ModuleSettings not implemented correctly: %s' % str(e))
def _Get(self, k):
"""Get one settings item.
Args:
k: str, name to get. The name will always be in lowercase.
Returns:
any settings value
Raises:
AttributeError: if this settings item does not exist.
"""
if hasattr(self._module, k.upper()):
return getattr(self._module, k.upper())
else:
raise AttributeError(k)
def _Set(self, k, v):
"""Set one settings item.
Args:
k: str, name to set. The name will always be in lowercase.
v: str, value to set.
"""
self._CheckValidation(k, v)
setattr(self._module, k.upper(), v)
class TestModuleSettings(ModuleSettings): # pylint: disable=abstract-method
"""Settings that uses the test_settings module for storage."""
def _LoadSettingsModule(self):
"""Load the test_settings module and return its name.
Returns:
str, fully qualified module name.
Raises:
ImportError: if the test_settings module could not be loaded.
"""
try:
# pylint: disable=g-importing-member
# pylint: disable=g-import-not-at-top
# pylint: disable=no-name-in-module
from tests.simian import test_settings as unused_foo
except ImportError:
raise ImportError(
'Missing test_settings, check dependencies')
return 'tests.simian.test_settings'
class DictSettings(BaseSettings):
"""Settings that uses a dictionary for storage."""
def _Initialize(self):
self._settings = {}
def _Get(self, k):
"""Get one settings item.
Args:
k: str, name to get. The name will always be in lowercase.
Returns:
any settings value
Raises:
AttributeError: if this settings item does not exist.
"""
if k in self._settings:
return self._settings[k]
else:
raise AttributeError(k)
def _Set(self, k, v):
"""Set one settings item.
Args:
k: str, name to set. The name will always be in lowercase.
v: str, value to set.
"""
self._CheckValidation(k, v)
self._settings[k] = v
def _Dir(self):
"""Returns directory of all settings names as a list."""
return self._settings.keys()
class SimianDictSettings(DictSettings): # pylint: disable=abstract-method
"""Settings stored in a dictionary for Simian."""
def _IsCaIdValid(self, _, v):
"""Is this settings ca_id value valid?
Args:
_: str, key being checked. See _VALIDATION_FUNC interface. Unused.
v: unknown type (probably None or str), check this ca_id value.
Returns:
True if valid, False if not.
"""
# TODO(user): Refactor. This regex is also @ simian/auth:util.CA_ID_RE
return v is None or re.search(r'^[A-Z][0-9A-Z]+$', str(v)) is not None
def _Initialize(self):
"""Initialize."""
# We do this to initialize underlying DictSettings, nothing more:
super(SimianDictSettings, self)._Initialize()
mail_regex = (
r'\b[A-Za-z0-9._%+-]+@[A-Za-z0-9.-]+\.[A-Za-z]{2,4}\b')
# Common settings
self._SetValidation(
'ca_public_cert_pem', self._VALIDATION_PEM_X509_CERT)
self._SetValidation(
'server_public_cert_pem', self._VALIDATION_PEM_X509_CERT)
# Note: One should add other CA_ID based parameter validation here.
# Client specific settings
self._SetValidation('ca_id', self._VALIDATION_FUNC, self._IsCaIdValid)
# Server specific settings
self._SetValidation(
'apple_auto_promote_enabled', self._VALIDATION_REGEX,
r'^(True|False)$')
self._SetValidation(
'apple_auto_promote_stable_weekday', self._VALIDATION_REGEX,
r'^[0-6]$')
self._SetValidation(
'apple_auto_unattended_enabled', self._VALIDATION_REGEX,
r'^(True|False)$')
self._SetValidation(
'apple_unstable_grace_period_days', self._VALIDATION_REGEX,
r'^[0-9]+$')
self._SetValidation(
'apple_testing_grace_period_days', self._VALIDATION_REGEX,
r'^[0-9]+$')
self._SetValidation(
'email_admin_list', self._VALIDATION_REGEX,
r'^%s' % mail_regex)
self._SetValidation(
'email_domain', self._VALIDATION_REGEX,
r'^\w+(\.\w+)*(\.[a-z]{2,4})$')
self._SetValidation(
'email_on_every_change', self._VALIDATION_REGEX,
r'^(True|False)$')
self._SetValidation(
'email_sender', self._VALIDATION_REGEX,
r'^([\w ]+ <%s>|%s)$' % (mail_regex, mail_regex))
self._SetValidation(
'email_reply_to', self._VALIDATION_REGEX,
r'^([\w ]+ <%s>|%s)$' % (mail_regex, mail_regex))
self._SetValidation(
'hour_start', self._VALIDATION_REGEX,
r'^[0-9]+$')
self._SetValidation(
'hour_stop', self._VALIDATION_REGEX,
r'^[0-9]+$')
self._SetValidation(
'uuid_lookup_url', self._VALIDATION_REGEX,
r'^https?\:\/\/[a-zA-Z0-9\-\.]+(\.[a-zA-Z]{2,3})?(\/\S*)?$')
self._SetValidation(
'owner_lookup_url', self._VALIDATION_REGEX,
r'^https?\:\/\/[a-zA-Z0-9\-\.]+(\.[a-zA-Z]{2,3})?(\/\S*)?$')
self._SetValidation(
'server_private_key_pem', self._VALIDATION_PEM_RSA_PRIVATE_KEY)
class FilesystemSettings(SimianDictSettings):
"""Settings that uses the filesystem for read-only storage."""
_path = os.environ.get('SIMIAN_CONFIG_PATH') or '/etc/simian/'
def _PopulateGlobals(self, set_func=None, globals_=None):
"""Populate global variables into the settings dict."""
self._Set('server_port', 443)
def _TranslateValue(self, value):
"""Translate incoming str value into other types.
Args:
value: str, e.g. 'hello' or '1' or 'True'
Returns:
e.g. (str)'hello', (int)1, (bool)True
"""
try:
i = int(value)
return i
except ValueError:
pass
if value.lower() in ['true', 'false']:
return value.lower() == 'true'
try:
if value[0] == '\'' and value[-1] == '\'':
value = value[1:-1]
elif value[0] == '\"' and value[-1] == '\"':
value = value[1:-1]
elif value[0] == '[' and value[-1] == ']':
value = re.split(r'\s*,\s*', value[1:-1])
except IndexError:
pass
return value
def _GetExternalConfiguration(
self, name, default=None, path=None, as_file=False, open_=open,
isdir_=os.path.isdir, join_=os.path.join):
"""Get configuration from external config files.
Args:
name: str, name of configuration file.
default: object, default None, default value.
path: str, default self.path, path to config file.
as_file: bool, default False, if True read the entire file contents
and return the contents as a string. If False, interpret the file
as a config file per ConfigParser.
open_: func, default open, function to open files with, for tests.
isdir_: func, default os.path.isdir, for tests.
join_: func, default os.path.join, for tests.
Returns:
if as_file=True, string contents of entire file.
if as_file=False, dictionary of settings loaded from file.
"""
logging.debug('_GetExternalConfiguration(%s)', name)
if path is None:
path = self._path
config = {}
if not isdir_(path):
logging.error('Configuration directory not found: %s', path)
value = None
elif as_file:
filepath = join_(path, name)
try:
f = open_(filepath, 'r')
value = f.read()
if value:
value = value.strip()
f.close()
except IOError:
value = None
else:
filepath = '%s.cfg' % join_(path, name)
config = {}
try:
f = open_(filepath, 'r')
cp = ConfigParser.ConfigParser()
cp.readfp(f)
f.close()
for i, v in cp.items('settings'):
config[i] = self._TranslateValue(v)
value = config
except (IOError, ConfigParser.Error):
value = None
if value is None:
value = default
if value is None:
logging.error('Configuration not found: %s', name)
return value
def _GetExternalPem(self, k):
"""Get an external PEM value from config.
Args:
k: str, name to retrieve.
Returns:
value
Raises:
AttributeError: if the name does not exist in external settings.
"""
logging.debug('_GetExternalPem(%s)', k)
if k in self._settings:
return self._settings[k]
pem_file = '%s.pem' % k[:-4]
path = os.path.join(self._path, 'ssl')
pem = self._GetExternalConfiguration(pem_file, as_file=True, path=path)
if pem:
self._settings[k] = pem
else:
raise AttributeError(k)
return pem
def _GetExternalValue(self, k):
"""Get an external name/value from config.
Args:
k: str, name to retrieve.
Returns:
value
Raises:
AttributeError: if the name does not exist in external settings.
"""
logging.debug('_GetExternalValue(%s)', k)
if k in self._settings:
return self._settings[k]
config = self._GetExternalConfiguration('settings')
if config is not None:
for j in config:
self._settings[j] = config[j]
if k not in self._settings:
raise AttributeError(k)
else:
raise AttributeError(k)
return self._settings[k]
def _Get(self, k):
"""Get one settings item.
Args:
k: str, name to get. The name will always be in lowercase.
Returns:
any settings value
Raises:
AttributeError: if this settings item does not exist.
"""
# oss config have domain/subdomain instead of server_hostname
if k == 'server_hostname':
try:
return self._GetExternalValue(k)
except AttributeError:
return '%s.%s' % (
self._GetExternalValue('subdomain'),
self._GetExternalValue('domain')
)
logging.debug('_Get(%s)', k)
if k.endswith('_pem'):
v = self._GetExternalPem(k)
else:
v = self._GetExternalValue(k)
return v
def _Dir(self):
"""Returns directory of all settings names as a list.
Raises:
NotImplementedError: if this method is not implemented.
"""
raise NotImplementedError
class DatastoreSettings(SimianDictSettings):
"""Settings stored in GAE datastore and dictionary.
All globals are loaded into the dictionary storage, but not _Set() into
the datastore.
All future _Get() operations check both the dictionary and datastore.
All future _Set() operations only affect the datastore.
"""
def _Initialize(self):
SimianDictSettings._Initialize(self)
self._module.models = importlib.import_module(
'simian.mac.models')
def _PopulateGlobals(self, set_func=None, globals_=None):
"""Populate global variables into the settings dict."""
# Populate the global variables into the dictionary backed settings via
# this specific set_func. Without this specific usage the global settings
# would be populated back into datastore via _Set() calls.
# pylint: disable=protected-access
set_func = lambda k, v: DictSettings._Set(self, k, v)
DictSettings._PopulateGlobals(self, set_func=set_func, globals_=globals_)
def _Get(self, k):
"""Get one settings item.
Args:
k: str, name to get. The name will always be in lowercase.
Returns:
any settings value
Raises:
AttributeError: if this settings item does not exist.
"""
# Try the dictionary of settings first.
try:
v = DictSettings._Get(self, k) # pylint: disable=protected-access
return v
except AttributeError:
pass # Not a problem, keep trying.
item, unused_mtime = self._module.models.Settings.GetItem(k)
if item is None:
raise AttributeError(k)
return item
def _Set(self, k, v):
"""Set one settings item.
Args:
k: str, name to set. The name will always be in lowercase.
v: str, value to set.
"""
self._CheckValidation(k, v)
self._module.models.Settings.SetItem(k, v)
def _Dir(self):
"""Returns directory of all settings names as a list.
Returns:
list of all settings names.
Raises:
NotImplementedError: if this method is not implemented.
"""
a = self._module.models.Settings.GetAll()
return list(set(DictSettings._Dir(self) + [x for x in a])) # pylint: disable=protected-access
def Setup():
if __name__ != '__main__':
if not hasattr(sys.modules[__name__], 'is_class'):
settings_class = None
if GAE:
settings_class = DatastoreSettings
elif DEV_APPSERVER:
settings_class = DatastoreSettings
elif TESTING:
if not SETTINGS_TESTING:
settings_class = TestModuleSettings
else:
settings_class = FilesystemSettings
if settings_class is not None:
sys.modules[__name__] = settings_class(sys.modules[__name__])
Setup()
| apache-2.0 |
scotthartbti/android_external_chromium_org | build/protoc_java.py | 90 | 1317 | #!/usr/bin/env python
# Copyright (c) 2012 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Generate java source files from protobufs
Usage:
protoc_java.py {protoc} {proto_path} {java_out} {stamp_file} {proto_files}
This is a helper file for the genproto_java action in protoc_java.gypi.
It performs the following steps:
1. Deletes all old sources (ensures deleted classes are not part of new jars).
2. Creates source directory.
3. Generates Java files using protoc.
4. Creates a new stamp file.
"""
import os
import shutil
import subprocess
import sys
def main(argv):
if len(argv) < 5:
usage()
return 1
protoc_path, proto_path, java_out, stamp_file = argv[1:5]
proto_files = argv[5:]
# Delete all old sources
if os.path.exists(java_out):
shutil.rmtree(java_out)
# Create source directory
os.makedirs(java_out)
# Generate Java files using protoc
ret = subprocess.call(
[protoc_path, '--proto_path', proto_path, '--java_out', java_out]
+ proto_files)
if ret == 0:
# Create a new stamp file
with file(stamp_file, 'a'):
os.utime(stamp_file, None)
return ret
def usage():
print(__doc__);
if __name__ == '__main__':
sys.exit(main(sys.argv))
| bsd-3-clause |
Barrog/C4-Datapack | data/jscript/quests/7_ATripBegins/__init__.py | 1 | 3040 | # Created by CubicVirtuoso
# Any problems feel free to drop by #l2j-datapack on irc.freenode.net
import sys
from net.sf.l2j.gameserver.model.quest import State
from net.sf.l2j.gameserver.model.quest import QuestState
from net.sf.l2j.gameserver.model.quest.jython import QuestJython as JQuest
#NPCs
MIRABEL = 7146
ARIEL = 7148
ASTERIOS = 7154
#ITEM
ARIELS_RECOMMENDATION = 7572
#REWARDS
ADENA = 57
SCROLL_OF_ESCAPE_GIRAN = 7559
MARK_OF_TRAVELER = 7570
class Quest (JQuest) :
def __init__(self,id,name,descr): JQuest.__init__(self,id,name,descr)
def onEvent (self,event,st) :
htmltext = event
if event == "7146-03.htm" :
st.set("cond","1")
st.setState(STARTED)
st.playSound("ItemSound.quest_accept")
elif event == "7148-02.htm" :
st.giveItems(ARIELS_RECOMMENDATION,1)
st.set("cond","2")
st.set("id","2")
st.playSound("ItemSound.quest_middle")
elif event == "7154-02.htm" :
st.takeItems(ARIELS_RECOMMENDATION,-1)
st.set("cond","3")
st.set("id","3")
st.playSound("ItemSound.quest_middle")
elif event == "7146-06.htm" :
st.giveItems(SCROLL_OF_ESCAPE_GIRAN,1)
st.giveItems(MARK_OF_TRAVELER, 1)
st.set("cond","0")
st.setState(COMPLETED)
st.playSound("ItemSound.quest_finish")
return htmltext
def onTalk (Self,npc,st):
htmltext = "<html><head><body>I have nothing to say you</body></html>"
npcId = npc.getNpcId()
cond = st.getInt("cond")
id = st.getState()
if id == CREATED :
st.set("cond","0")
if st.getPlayer().getRace().ordinal() == 1 :
if st.getPlayer().getLevel() >= 3 :
htmltext = "7146-02.htm"
else :
htmltext = "<html><head><body>Quest for characters level 3 above.</body></html>"
st.exitQuest(1)
else :
htmltext = "7146-01.htm"
st.exitQuest(1)
elif npcId == MIRABEL and id == COMPLETED :
htmltext = "<html><head><body>I can't supply you with another Giran Scroll of Escape. Sorry traveller.</body></html>"
elif npcId == MIRABEL and cond == 1 :
htmltext = "7146-04.htm"
elif npcId == ARIEL and cond :
if st.getQuestItemsCount(ARIELS_RECOMMENDATION) == 0 :
htmltext = "7148-01.htm"
else :
htmltext = "7148-03.htm"
elif npcId == ASTERIOS and cond == 2 and st.getQuestItemsCount(ARIELS_RECOMMENDATION) > 0 :
htmltext = "7154-01.htm"
elif npcId == MIRABEL and cond == 3 :
htmltext = "7146-05.htm"
return htmltext
QUEST = Quest(7,"7_ATripBegins","A Trip Begins")
CREATED = State('Start', QUEST)
STARTED = State('Started', QUEST)
COMPLETED = State('Completed', QUEST)
QUEST.setInitialState(CREATED)
QUEST.addStartNpc(MIRABEL)
CREATED.addTalkId(MIRABEL)
COMPLETED.addTalkId(MIRABEL)
STARTED.addTalkId(MIRABEL)
STARTED.addTalkId(ARIEL)
STARTED.addTalkId(ASTERIOS)
STARTED.addQuestDrop(MIRABEL,ARIELS_RECOMMENDATION,1)
print "importing quests: 7: A Trip Begins"
| gpl-2.0 |
hpcloud-mon/monasca-perf | monasca_perf/influx_load.py | 1 | 2171 | from urlparse import urlparse
from threading import Thread
import httplib, sys, multiprocessing
from Queue import Queue
import simplejson
import time
import base64
num_processes = int(sys.argv[1])
num_threads = int(sys.argv[2])
num_requests = int(sys.argv[3])
num_metrics_per_request = int(sys.argv[4])
series_name = sys.argv[5]
username = sys.argv[6]
password = sys.argv[7]
print num_processes * num_threads * num_requests * num_metrics_per_request
auth = base64.standard_b64encode('%s:%s' % (username,password)).replace('\n','')
authorization = "Basic "
authorization += auth
headers = {"Content-type": "application/json", "Authorization": authorization }
urls = [
'http://localhost:8086/db/testmetrics/series'
]
def doWork(q):
url=q.get()
for x in xrange(num_requests):
status,response=getStatus(url)
doSomethingWithResult(status,response)
q.task_done()
def getStatus(ourl):
try:
url = urlparse(ourl)
conn = httplib.HTTPConnection(url.netloc)
body = []
points = []
for i in xrange(num_metrics_per_request):
epoch = (int)(time.time()) - 120
points.append([epoch,i])
body.append({"name": series_name, "columns": ["timestamp", "value"], "points": points})
body = simplejson.dumps(body)
#print body
conn.request("POST", url.path, body, headers)
res = conn.getresponse()
if res.status != 200:
raise Exception(res.status)
return res.status, ourl
except Exception as ex:
print ex
return "error", ourl
def doSomethingWithResult(status, url):
pass
def doProcess():
q=Queue(num_threads)
for i in range(num_threads):
t=Thread(target=doWork, args=(q,))
t.daemon=True
t.start()
try:
for i in xrange(num_threads):
url = urls[i%len(urls)]
q.put(url.strip())
q.join()
except KeyboardInterrupt:
sys.exit(1)
if __name__ == '__main__':
jobs = []
for i in range(num_processes):
p = multiprocessing.Process(target=doProcess)
jobs.append(p)
p.start()
p.join()
| apache-2.0 |
Titulacion-Sistemas/PythonTitulacion-EV | Lib/site-packages/pywin32-219-py2.7-win32.egg/Demos/security/setuserobjectsecurity.py | 40 | 3016 | import win32security,win32api,win32con, win32process
new_privs = ((win32security.LookupPrivilegeValue('',win32security.SE_SECURITY_NAME),win32con.SE_PRIVILEGE_ENABLED),
(win32security.LookupPrivilegeValue('',win32security.SE_TCB_NAME),win32con.SE_PRIVILEGE_ENABLED),
(win32security.LookupPrivilegeValue('',win32security.SE_SHUTDOWN_NAME),win32con.SE_PRIVILEGE_ENABLED),
(win32security.LookupPrivilegeValue('',win32security.SE_RESTORE_NAME),win32con.SE_PRIVILEGE_ENABLED),
(win32security.LookupPrivilegeValue('',win32security.SE_TAKE_OWNERSHIP_NAME),win32con.SE_PRIVILEGE_ENABLED),
(win32security.LookupPrivilegeValue('',win32security.SE_CREATE_PERMANENT_NAME),win32con.SE_PRIVILEGE_ENABLED),
(win32security.LookupPrivilegeValue('',win32security.SE_ENABLE_DELEGATION_NAME),win32con.SE_PRIVILEGE_ENABLED),
(win32security.LookupPrivilegeValue('',win32security.SE_CHANGE_NOTIFY_NAME),win32con.SE_PRIVILEGE_ENABLED),
(win32security.LookupPrivilegeValue('',win32security.SE_DEBUG_NAME),win32con.SE_PRIVILEGE_ENABLED),
(win32security.LookupPrivilegeValue('',win32security.SE_PROF_SINGLE_PROCESS_NAME),win32con.SE_PRIVILEGE_ENABLED),
(win32security.LookupPrivilegeValue('',win32security.SE_SYSTEM_PROFILE_NAME),win32con.SE_PRIVILEGE_ENABLED),
(win32security.LookupPrivilegeValue('',win32security.SE_LOCK_MEMORY_NAME),win32con.SE_PRIVILEGE_ENABLED)
)
all_info=win32security.OWNER_SECURITY_INFORMATION|win32security.GROUP_SECURITY_INFORMATION| \
win32security.DACL_SECURITY_INFORMATION|win32security.SACL_SECURITY_INFORMATION
info=win32security.OWNER_SECURITY_INFORMATION|win32security.GROUP_SECURITY_INFORMATION|win32security.DACL_SECURITY_INFORMATION
ph=win32process.GetCurrentProcess()
th = win32security.OpenProcessToken(ph,win32security.TOKEN_ALL_ACCESS) ##win32con.TOKEN_ADJUST_PRIVILEGES)
win32security.AdjustTokenPrivileges(th,0,new_privs)
my_sid = win32security.GetTokenInformation(th,win32security.TokenUser)[0]
pwr_sid=win32security.LookupAccountName('','Power Users')[0]
h=win32process.GetProcessWindowStation()
sd=win32security.GetUserObjectSecurity(h,info)
dacl=sd.GetSecurityDescriptorDacl()
ace_cnt=dacl.GetAceCount()
dacl.AddAccessAllowedAce(dacl.GetAclRevision(),win32con.ACCESS_SYSTEM_SECURITY|win32con.WRITE_DAC,my_sid)
sd.SetSecurityDescriptorDacl(1,dacl,0)
sd.SetSecurityDescriptorGroup(pwr_sid,0)
sd.SetSecurityDescriptorOwner(pwr_sid,0)
win32security.SetUserObjectSecurity(h,info,sd)
new_sd=win32security.GetUserObjectSecurity(h,info)
assert new_sd.GetSecurityDescriptorDacl().GetAceCount()==ace_cnt+1,'Did not add an ace to the Dacl !!!!!!'
assert win32security.LookupAccountSid('',new_sd.GetSecurityDescriptorOwner())[0]=='Power Users','Owner not successfully set to Power Users !!!!!'
assert win32security.LookupAccountSid('',new_sd.GetSecurityDescriptorGroup())[0]=='Power Users','Group not successfully set to Power Users !!!!!'
| mit |
torchingloom/edx-platform | common/test/acceptance/tests/test_video_module.py | 1 | 8440 | # -*- coding: utf-8 -*-
"""
Acceptance tests for Video.
"""
from .helpers import UniqueCourseTest
from ..pages.lms.video import VideoPage
from ..pages.lms.tab_nav import TabNavPage
from ..pages.studio.auto_auth import AutoAuthPage
from ..pages.lms.course_info import CourseInfoPage
from ..fixtures.course import CourseFixture, XBlockFixtureDesc
HTML5_SOURCES = [
'https://s3.amazonaws.com/edx-course-videos/edx-intro/edX-FA12-cware-1_100.mp4',
'https://s3.amazonaws.com/edx-course-videos/edx-intro/edX-FA12-cware-1_100.webm',
'https://s3.amazonaws.com/edx-course-videos/edx-intro/edX-FA12-cware-1_100.ogv',
]
HTML5_SOURCES_INCORRECT = [
'https://s3.amazonaws.com/edx-course-videos/edx-intro/edX-FA12-cware-1_100.mp99',
]
HTML5_METADATA = {
'youtube_id_1_0': '',
'youtube_id_0_75': '',
'youtube_id_1_25': '',
'youtube_id_1_5': '',
'html5_sources': HTML5_SOURCES
}
YT_HTML5_METADATA = {
'html5_sources': HTML5_SOURCES
}
class VideoBaseTest(UniqueCourseTest):
"""
Base class for tests of the Video Player
Sets up the course and provides helper functions for the Video tests.
"""
def setUp(self):
"""
Initialization of pages and course fixture for video tests
"""
super(VideoBaseTest, self).setUp()
self.video = VideoPage(self.browser)
self.tab_nav = TabNavPage(self.browser)
self.course_info_page = CourseInfoPage(self.browser, self.course_id)
self.course_fixture = CourseFixture(
self.course_info['org'], self.course_info['number'],
self.course_info['run'], self.course_info['display_name']
)
self.metadata = {}
self.assets = None
def navigate_to_video(self):
""" Prepare the course and get to the video and render it """
self._install_course_fixture()
self._navigate_to_courseware_video_and_render()
def navigate_to_video_no_render(self):
"""
Prepare the course and get to the video unit
however do not wait for it to render, because
the has been an error.
"""
self._install_course_fixture()
self._navigate_to_courseware_video_no_render()
def _install_course_fixture(self):
""" Install the course fixture that has been defined """
if self.assets:
self.course_fixture.add_asset(self.assets)
# If you are not sending any metadata then `None` should be send as metadata to XBlockFixtureDesc
# instead of empty dictionary otherwise test will not produce correct results.
_metadata = self.metadata if self.metadata else None
self.course_fixture.add_children(
XBlockFixtureDesc('chapter', 'Test Chapter').add_children(
XBlockFixtureDesc('sequential', 'Test Section').add_children(
XBlockFixtureDesc('vertical', 'Test Vertical-0').add_children(
XBlockFixtureDesc('video', 'Video', metadata=_metadata)
)))).install()
def _navigate_to_courseware_video(self):
""" Register for the course and navigate to the video unit """
AutoAuthPage(self.browser, course_id=self.course_id).visit()
self.course_info_page.visit()
self.tab_nav.go_to_tab('Courseware')
def _navigate_to_courseware_video_and_render(self):
""" Wait for the video player to render """
self._navigate_to_courseware_video()
self.video.wait_for_video_player_render()
def _navigate_to_courseware_video_no_render(self):
""" Wait for the video Xmodule but not for rendering """
self._navigate_to_courseware_video()
self.video.wait_for_video_class()
class YouTubeVideoTest(VideoBaseTest):
""" Test YouTube Video Player """
def setUp(self):
super(YouTubeVideoTest, self).setUp()
def test_video_component_rendered_in_youtube_without_html5_sources(self):
"""
Scenario: Video component is rendered in the LMS in Youtube mode without HTML5 sources
Given the course has a Video component in "Youtube" mode
Then the video has rendered in "Youtube" mode
"""
self.navigate_to_video()
# Verify that video has rendered in "Youtube" mode
self.assertTrue(self.video.is_video_rendered('youtube'))
def test_cc_button_without_english_transcript_youtube_mode(self):
"""
Scenario: CC button works correctly w/o english transcript in Youtube mode of Video component
Given the course has a Video component in "Youtube" mode
And I have defined a non-english transcript for the video
And I have uploaded a non-english transcript file to assets
Then I see the correct text in the captions
"""
self.metadata['transcripts'] = {'zh': 'chinese_transcripts.srt'}
self.assets = 'chinese_transcripts.srt'
self.navigate_to_video()
self.video.show_captions()
# Verify that we see "好 各位同学" text in the captions
unicode_text = "好 各位同学".decode('utf-8')
self.assertIn(unicode_text, self.video.captions_text)
def test_cc_button_transcripts_and_sub_fields_empty(self):
"""
Scenario: CC button works correctly if transcripts and sub fields are empty,
but transcript file exists in assets (Youtube mode of Video component)
Given the course has a Video component in "Youtube" mode
And I have uploaded a .srt.sjson file to assets
Then I see the correct english text in the captions
"""
self.assets = 'subs_OEoXaMPEzfM.srt.sjson'
self.navigate_to_video()
self.video.show_captions()
# Verify that we see "Hi, welcome to Edx." text in the captions
self.assertIn('Hi, welcome to Edx.', self.video.captions_text)
def test_cc_button_hidden_if_no_translations(self):
"""
Scenario: CC button is hidden if no translations
Given the course has a Video component in "Youtube" mode
Then the "CC" button is hidden
"""
self.navigate_to_video()
self.assertFalse(self.video.is_button_shown('CC'))
class YouTubeHtml5VideoTest(VideoBaseTest):
""" Test YouTube HTML5 Video Player """
def setUp(self):
super(YouTubeHtml5VideoTest, self).setUp()
self.metadata = YT_HTML5_METADATA
def test_video_component_rendered_in_youtube_with_unsupported_html5_sources(self):
"""
Scenario: Video component is rendered in the LMS in Youtube mode
with HTML5 sources that doesn't supported by browser
Given the course has a Video component in "Youtube_HTML5_Unsupported_Video" mode
Then the video has rendered in "Youtube" mode
"""
self.metadata['html5_sources'] = HTML5_SOURCES_INCORRECT
self.navigate_to_video()
# Verify that the video has rendered in "Youtube" mode
self.assertTrue(self.video.is_video_rendered('youtube'))
class Html5VideoTest(VideoBaseTest):
""" Test HTML5 Video Player """
def setUp(self):
super(Html5VideoTest, self).setUp()
self.metadata = HTML5_METADATA
def test_autoplay_disabled_for_video_component(self):
"""
Scenario: Autoplay is disabled in LMS for a Video component
Given the course has a Video component in "HTML5" mode
Then it does not have autoplay enabled
"""
self.navigate_to_video()
# Verify that the video has autoplay mode disabled
self.assertFalse(self.video.is_autoplay_enabled)
def test_video_component_rendered_in_html5_with_unsupported_html5_sources(self):
"""
Scenario: Video component is rendered in the LMS in HTML5 mode with HTML5 sources that doesn't supported by browser
Given the course has a Video component in "HTML5_Unsupported_Video" mode
Then error message is shown
And error message has correct text
"""
self.metadata['html5_sources'] = HTML5_SOURCES_INCORRECT
self.navigate_to_video_no_render()
# Verify that error message is shown
self.assertTrue(self.video.is_error_message_shown)
# Verify that error message has correct text
correct_error_message_text = 'ERROR: No playable video sources found!'
self.assertIn(correct_error_message_text, self.video.error_message_text)
| agpl-3.0 |
shahbaz17/zamboni | mkt/search/utils.py | 11 | 2062 | from math import log10
from django.core.exceptions import ObjectDoesNotExist
from elasticsearch_dsl.search import Search as dslSearch
from statsd import statsd
from mkt.constants.base import VALID_STATUSES
class Search(dslSearch):
def execute(self):
with statsd.timer('search.execute'):
results = super(Search, self).execute()
statsd.timing('search.took', results.took)
return results
def _property_value_by_region(obj, region=None, property=None):
if obj.is_dummy_content_for_qa():
# Apps and Websites set up by QA for testing should never be considered
# popular or trending.
return 0
if region and not region.adolescent:
by_region = region.id
else:
by_region = 0
try:
return getattr(obj, property).get(region=by_region).value
except ObjectDoesNotExist:
return 0
def get_popularity(obj, region=None):
"""
Returns popularity value for the given obj to use in Elasticsearch.
If no region, uses global value.
If region and region is not mature, uses global value.
Otherwise uses regional popularity value.
"""
return _property_value_by_region(obj, region=region, property='popularity')
def get_trending(obj, region=None):
"""
Returns trending value for the given obj to use in Elasticsearch.
If no region, uses global value.
If region and region is not mature, uses global value.
Otherwise uses regional popularity value.
"""
return _property_value_by_region(obj, region=region, property='trending')
def get_boost(obj):
"""
Returns the boost used in Elasticsearch for this app.
The boost is based on a few factors, the most important is number of
installs. We use log10 so the boost doesn't completely overshadow any
other boosting we do at query time.
"""
boost = max(log10(1 + get_popularity(obj)), 1.0)
# We give a little extra boost to approved apps.
if obj.status in VALID_STATUSES:
boost *= 4
return boost
| bsd-3-clause |
abhidrona/gn-osc-custom | sites/demo/apps/order/migrations/0012_auto__add_field_paymentevent_reference.py | 16 | 33636 | # -*- coding: utf-8 -*-
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding field 'PaymentEvent.reference'
db.add_column('order_paymentevent', 'reference',
self.gf('django.db.models.fields.CharField')(default='', max_length=128, blank=True),
keep_default=False)
def backwards(self, orm):
# Deleting field 'PaymentEvent.reference'
db.delete_column('order_paymentevent', 'reference')
models = {
'address.country': {
'Meta': {'ordering': "('-is_highlighted', 'name')", 'object_name': 'Country'},
'is_highlighted': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'db_index': 'True'}),
'is_shipping_country': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'db_index': 'True'}),
'iso_3166_1_a2': ('django.db.models.fields.CharField', [], {'max_length': '2', 'primary_key': 'True'}),
'iso_3166_1_a3': ('django.db.models.fields.CharField', [], {'max_length': '3', 'null': 'True', 'db_index': 'True'}),
'iso_3166_1_numeric': ('django.db.models.fields.PositiveSmallIntegerField', [], {'null': 'True', 'db_index': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'printable_name': ('django.db.models.fields.CharField', [], {'max_length': '128'})
},
'auth.group': {
'Meta': {'object_name': 'Group'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
'auth.permission': {
'Meta': {'ordering': "('content_type__app_label', 'content_type__model', 'codename')", 'unique_together': "(('content_type', 'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
'auth.user': {
'Meta': {'object_name': 'User'},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Group']", 'symmetrical': 'False', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'})
},
'catalogue.attributeentity': {
'Meta': {'object_name': 'AttributeEntity'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'slug': ('django.db.models.fields.SlugField', [], {'max_length': '255', 'blank': 'True'}),
'type': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'entities'", 'to': "orm['catalogue.AttributeEntityType']"})
},
'catalogue.attributeentitytype': {
'Meta': {'object_name': 'AttributeEntityType'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'slug': ('django.db.models.fields.SlugField', [], {'max_length': '255', 'blank': 'True'})
},
'catalogue.attributeoption': {
'Meta': {'object_name': 'AttributeOption'},
'group': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'options'", 'to': "orm['catalogue.AttributeOptionGroup']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'option': ('django.db.models.fields.CharField', [], {'max_length': '255'})
},
'catalogue.attributeoptiongroup': {
'Meta': {'object_name': 'AttributeOptionGroup'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '128'})
},
'catalogue.category': {
'Meta': {'ordering': "['full_name']", 'object_name': 'Category'},
'depth': ('django.db.models.fields.PositiveIntegerField', [], {}),
'description': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'full_name': ('django.db.models.fields.CharField', [], {'max_length': '255', 'db_index': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'image': ('django.db.models.fields.files.ImageField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '255', 'db_index': 'True'}),
'numchild': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0'}),
'path': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '255'}),
'slug': ('django.db.models.fields.SlugField', [], {'max_length': '255'})
},
'catalogue.option': {
'Meta': {'object_name': 'Option'},
'code': ('django.db.models.fields.SlugField', [], {'unique': 'True', 'max_length': '128'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'type': ('django.db.models.fields.CharField', [], {'default': "'Required'", 'max_length': '128'})
},
'catalogue.product': {
'Meta': {'ordering': "['-date_created']", 'object_name': 'Product'},
'attributes': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['catalogue.ProductAttribute']", 'through': "orm['catalogue.ProductAttributeValue']", 'symmetrical': 'False'}),
'categories': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['catalogue.Category']", 'through': "orm['catalogue.ProductCategory']", 'symmetrical': 'False'}),
'date_created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'date_updated': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'db_index': 'True', 'blank': 'True'}),
'description': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_discountable': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'parent': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'variants'", 'null': 'True', 'to': "orm['catalogue.Product']"}),
'product_class': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['catalogue.ProductClass']", 'null': 'True'}),
'product_options': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['catalogue.Option']", 'symmetrical': 'False', 'blank': 'True'}),
'rating': ('django.db.models.fields.FloatField', [], {'null': 'True'}),
'recommended_products': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['catalogue.Product']", 'symmetrical': 'False', 'through': "orm['catalogue.ProductRecommendation']", 'blank': 'True'}),
'related_products': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'related_name': "'relations'", 'blank': 'True', 'to': "orm['catalogue.Product']"}),
'score': ('django.db.models.fields.FloatField', [], {'default': '0.0', 'db_index': 'True'}),
'slug': ('django.db.models.fields.SlugField', [], {'max_length': '255'}),
'status': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '128', 'null': 'True', 'blank': 'True'}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'upc': ('django.db.models.fields.CharField', [], {'max_length': '64', 'unique': 'True', 'null': 'True', 'blank': 'True'})
},
'catalogue.productattribute': {
'Meta': {'ordering': "['code']", 'object_name': 'ProductAttribute'},
'code': ('django.db.models.fields.SlugField', [], {'max_length': '128'}),
'entity_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['catalogue.AttributeEntityType']", 'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'option_group': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['catalogue.AttributeOptionGroup']", 'null': 'True', 'blank': 'True'}),
'product_class': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'attributes'", 'null': 'True', 'to': "orm['catalogue.ProductClass']"}),
'required': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'type': ('django.db.models.fields.CharField', [], {'default': "'text'", 'max_length': '20'})
},
'catalogue.productattributevalue': {
'Meta': {'object_name': 'ProductAttributeValue'},
'attribute': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['catalogue.ProductAttribute']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'product': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'attribute_values'", 'to': "orm['catalogue.Product']"}),
'value_boolean': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'value_date': ('django.db.models.fields.DateField', [], {'null': 'True', 'blank': 'True'}),
'value_entity': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['catalogue.AttributeEntity']", 'null': 'True', 'blank': 'True'}),
'value_float': ('django.db.models.fields.FloatField', [], {'null': 'True', 'blank': 'True'}),
'value_integer': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'value_option': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['catalogue.AttributeOption']", 'null': 'True', 'blank': 'True'}),
'value_richtext': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'value_text': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'})
},
'catalogue.productcategory': {
'Meta': {'ordering': "['-is_canonical']", 'object_name': 'ProductCategory'},
'category': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['catalogue.Category']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_canonical': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'db_index': 'True'}),
'product': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['catalogue.Product']"})
},
'catalogue.productclass': {
'Meta': {'ordering': "['name']", 'object_name': 'ProductClass'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'options': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['catalogue.Option']", 'symmetrical': 'False', 'blank': 'True'}),
'requires_shipping': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'slug': ('django.db.models.fields.SlugField', [], {'unique': 'True', 'max_length': '128'}),
'track_stock': ('django.db.models.fields.BooleanField', [], {'default': 'True'})
},
'catalogue.productrecommendation': {
'Meta': {'object_name': 'ProductRecommendation'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'primary': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'primary_recommendations'", 'to': "orm['catalogue.Product']"}),
'ranking': ('django.db.models.fields.PositiveSmallIntegerField', [], {'default': '0'}),
'recommendation': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['catalogue.Product']"})
},
'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
'customer.communicationeventtype': {
'Meta': {'object_name': 'CommunicationEventType'},
'category': ('django.db.models.fields.CharField', [], {'default': "u'Order related'", 'max_length': '255'}),
'code': ('django.db.models.fields.SlugField', [], {'max_length': '128'}),
'date_created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'date_updated': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'email_body_html_template': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'email_body_template': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'email_subject_template': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'sms_template': ('django.db.models.fields.CharField', [], {'max_length': '170', 'blank': 'True'})
},
'order.billingaddress': {
'Meta': {'object_name': 'BillingAddress'},
'country': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['address.Country']"}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '255', 'blank': 'True'}),
'line1': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'line2': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'line3': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'line4': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'postcode': ('django.db.models.fields.CharField', [], {'max_length': '64', 'null': 'True', 'blank': 'True'}),
'search_text': ('django.db.models.fields.CharField', [], {'max_length': '1000'}),
'state': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '64', 'null': 'True', 'blank': 'True'})
},
'order.communicationevent': {
'Meta': {'ordering': "['-date_created']", 'object_name': 'CommunicationEvent'},
'date_created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'event_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['customer.CommunicationEventType']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'order': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'communication_events'", 'to': "orm['order.Order']"})
},
'order.line': {
'Meta': {'object_name': 'Line'},
'est_dispatch_date': ('django.db.models.fields.DateField', [], {'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'line_price_before_discounts_excl_tax': ('django.db.models.fields.DecimalField', [], {'max_digits': '12', 'decimal_places': '2'}),
'line_price_before_discounts_incl_tax': ('django.db.models.fields.DecimalField', [], {'max_digits': '12', 'decimal_places': '2'}),
'line_price_excl_tax': ('django.db.models.fields.DecimalField', [], {'max_digits': '12', 'decimal_places': '2'}),
'line_price_incl_tax': ('django.db.models.fields.DecimalField', [], {'max_digits': '12', 'decimal_places': '2'}),
'order': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'lines'", 'to': "orm['order.Order']"}),
'partner': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'order_lines'", 'null': 'True', 'on_delete': 'models.SET_NULL', 'to': "orm['partner.Partner']"}),
'partner_line_notes': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'partner_line_reference': ('django.db.models.fields.CharField', [], {'max_length': '128', 'null': 'True', 'blank': 'True'}),
'partner_name': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'partner_sku': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'product': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['catalogue.Product']", 'null': 'True', 'on_delete': 'models.SET_NULL', 'blank': 'True'}),
'quantity': ('django.db.models.fields.PositiveIntegerField', [], {'default': '1'}),
'status': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'unit_cost_price': ('django.db.models.fields.DecimalField', [], {'null': 'True', 'max_digits': '12', 'decimal_places': '2', 'blank': 'True'}),
'unit_price_excl_tax': ('django.db.models.fields.DecimalField', [], {'null': 'True', 'max_digits': '12', 'decimal_places': '2', 'blank': 'True'}),
'unit_price_incl_tax': ('django.db.models.fields.DecimalField', [], {'null': 'True', 'max_digits': '12', 'decimal_places': '2', 'blank': 'True'}),
'unit_retail_price': ('django.db.models.fields.DecimalField', [], {'null': 'True', 'max_digits': '12', 'decimal_places': '2', 'blank': 'True'}),
'upc': ('django.db.models.fields.CharField', [], {'max_length': '128', 'null': 'True', 'blank': 'True'})
},
'order.lineattribute': {
'Meta': {'object_name': 'LineAttribute'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'line': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'attributes'", 'to': "orm['order.Line']"}),
'option': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'line_attributes'", 'null': 'True', 'on_delete': 'models.SET_NULL', 'to': "orm['catalogue.Option']"}),
'type': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'value': ('django.db.models.fields.CharField', [], {'max_length': '255'})
},
'order.lineprice': {
'Meta': {'ordering': "('id',)", 'object_name': 'LinePrice'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'line': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'prices'", 'to': "orm['order.Line']"}),
'order': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'line_prices'", 'to': "orm['order.Order']"}),
'price_excl_tax': ('django.db.models.fields.DecimalField', [], {'max_digits': '12', 'decimal_places': '2'}),
'price_incl_tax': ('django.db.models.fields.DecimalField', [], {'max_digits': '12', 'decimal_places': '2'}),
'quantity': ('django.db.models.fields.PositiveIntegerField', [], {'default': '1'}),
'shipping_excl_tax': ('django.db.models.fields.DecimalField', [], {'default': '0', 'max_digits': '12', 'decimal_places': '2'}),
'shipping_incl_tax': ('django.db.models.fields.DecimalField', [], {'default': '0', 'max_digits': '12', 'decimal_places': '2'})
},
'order.order': {
'Meta': {'ordering': "['-date_placed']", 'object_name': 'Order'},
'basket_id': ('django.db.models.fields.PositiveIntegerField', [], {'null': 'True', 'blank': 'True'}),
'billing_address': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['order.BillingAddress']", 'null': 'True', 'blank': 'True'}),
'date_placed': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'db_index': 'True', 'blank': 'True'}),
'guest_email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'number': ('django.db.models.fields.CharField', [], {'max_length': '128', 'db_index': 'True'}),
'shipping_address': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['order.ShippingAddress']", 'null': 'True', 'blank': 'True'}),
'shipping_excl_tax': ('django.db.models.fields.DecimalField', [], {'default': '0', 'max_digits': '12', 'decimal_places': '2'}),
'shipping_incl_tax': ('django.db.models.fields.DecimalField', [], {'default': '0', 'max_digits': '12', 'decimal_places': '2'}),
'shipping_method': ('django.db.models.fields.CharField', [], {'max_length': '128', 'null': 'True', 'blank': 'True'}),
'site': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['sites.Site']"}),
'status': ('django.db.models.fields.CharField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'total_excl_tax': ('django.db.models.fields.DecimalField', [], {'max_digits': '12', 'decimal_places': '2'}),
'total_incl_tax': ('django.db.models.fields.DecimalField', [], {'max_digits': '12', 'decimal_places': '2'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'orders'", 'null': 'True', 'to': "orm['auth.User']"})
},
'order.orderdiscount': {
'Meta': {'object_name': 'OrderDiscount'},
'amount': ('django.db.models.fields.DecimalField', [], {'default': '0', 'max_digits': '12', 'decimal_places': '2'}),
'category': ('django.db.models.fields.CharField', [], {'default': "'Basket'", 'max_length': '64'}),
'frequency': ('django.db.models.fields.PositiveIntegerField', [], {'null': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'message': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'offer_id': ('django.db.models.fields.PositiveIntegerField', [], {'null': 'True', 'blank': 'True'}),
'offer_name': ('django.db.models.fields.CharField', [], {'max_length': '128', 'null': 'True', 'db_index': 'True'}),
'order': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'discounts'", 'to': "orm['order.Order']"}),
'voucher_code': ('django.db.models.fields.CharField', [], {'max_length': '128', 'null': 'True', 'db_index': 'True'}),
'voucher_id': ('django.db.models.fields.PositiveIntegerField', [], {'null': 'True', 'blank': 'True'})
},
'order.ordernote': {
'Meta': {'object_name': 'OrderNote'},
'date_created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'date_updated': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'message': ('django.db.models.fields.TextField', [], {}),
'note_type': ('django.db.models.fields.CharField', [], {'max_length': '128', 'null': 'True'}),
'order': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'notes'", 'to': "orm['order.Order']"}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']", 'null': 'True'})
},
'order.paymentevent': {
'Meta': {'ordering': "['-date_created']", 'object_name': 'PaymentEvent'},
'amount': ('django.db.models.fields.DecimalField', [], {'max_digits': '12', 'decimal_places': '2'}),
'date_created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'event_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['order.PaymentEventType']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'lines': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['order.Line']", 'through': "orm['order.PaymentEventQuantity']", 'symmetrical': 'False'}),
'order': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'payment_events'", 'to': "orm['order.Order']"}),
'reference': ('django.db.models.fields.CharField', [], {'max_length': '128', 'blank': 'True'})
},
'order.paymenteventquantity': {
'Meta': {'object_name': 'PaymentEventQuantity'},
'event': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'line_quantities'", 'to': "orm['order.PaymentEvent']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'line': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'payment_event_quantities'", 'to': "orm['order.Line']"}),
'quantity': ('django.db.models.fields.PositiveIntegerField', [], {})
},
'order.paymenteventtype': {
'Meta': {'ordering': "('sequence_number',)", 'object_name': 'PaymentEventType'},
'code': ('django.db.models.fields.SlugField', [], {'unique': 'True', 'max_length': '128'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '128'}),
'sequence_number': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0'})
},
'order.shippingaddress': {
'Meta': {'object_name': 'ShippingAddress'},
'country': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['address.Country']"}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '255', 'blank': 'True'}),
'line1': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'line2': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'line3': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'line4': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'notes': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'phone_number': ('django.db.models.fields.CharField', [], {'max_length': '32', 'null': 'True', 'blank': 'True'}),
'postcode': ('django.db.models.fields.CharField', [], {'max_length': '64', 'null': 'True', 'blank': 'True'}),
'search_text': ('django.db.models.fields.CharField', [], {'max_length': '1000'}),
'state': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '64', 'null': 'True', 'blank': 'True'})
},
'order.shippingevent': {
'Meta': {'ordering': "['-date_created']", 'object_name': 'ShippingEvent'},
'date_created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'event_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['order.ShippingEventType']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'lines': ('django.db.models.fields.related.ManyToManyField', [], {'related_name': "'shipping_events'", 'symmetrical': 'False', 'through': "orm['order.ShippingEventQuantity']", 'to': "orm['order.Line']"}),
'notes': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'order': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'shipping_events'", 'to': "orm['order.Order']"})
},
'order.shippingeventquantity': {
'Meta': {'object_name': 'ShippingEventQuantity'},
'event': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'line_quantities'", 'to': "orm['order.ShippingEvent']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'line': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'shipping_event_quantities'", 'to': "orm['order.Line']"}),
'quantity': ('django.db.models.fields.PositiveIntegerField', [], {})
},
'order.shippingeventtype': {
'Meta': {'ordering': "('sequence_number',)", 'object_name': 'ShippingEventType'},
'code': ('django.db.models.fields.SlugField', [], {'unique': 'True', 'max_length': '128'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_required': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '255'}),
'sequence_number': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0'})
},
'partner.partner': {
'Meta': {'object_name': 'Partner'},
'code': ('django.db.models.fields.SlugField', [], {'unique': 'True', 'max_length': '128'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '128', 'null': 'True', 'blank': 'True'}),
'users': ('django.db.models.fields.related.ManyToManyField', [], {'blank': 'True', 'related_name': "'partners'", 'null': 'True', 'symmetrical': 'False', 'to': "orm['auth.User']"})
},
'sites.site': {
'Meta': {'ordering': "('domain',)", 'object_name': 'Site', 'db_table': "'django_site'"},
'domain': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
}
}
complete_apps = ['order'] | bsd-3-clause |
oeeagle/quantum | neutron/tests/unit/nec/test_nec_agent.py | 8 | 15285 | # vim: tabstop=4 shiftwidth=4 softtabstop=4
#
# Copyright 2013 NEC Corporation. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import contextlib
import copy
import itertools
import time
import mock
from oslo.config import cfg
import testtools
from neutron.agent.linux import ovs_lib
from neutron.extensions import securitygroup as ext_sg
from neutron.plugins.nec.agent import nec_neutron_agent
from neutron.tests import base
DAEMON_LOOP_COUNT = 10
OVS_DPID = '00000629355b6943'
OVS_DPID_0X = '0x' + OVS_DPID
class TestNecAgentBase(base.BaseTestCase):
def setUp(self):
super(TestNecAgentBase, self).setUp()
self.addCleanup(cfg.CONF.reset)
self.addCleanup(mock.patch.stopall)
cfg.CONF.set_override('rpc_backend',
'neutron.openstack.common.rpc.impl_fake')
cfg.CONF.set_override('host', 'dummy-host')
with contextlib.nested(
mock.patch.object(ovs_lib.OVSBridge, 'get_datapath_id',
return_value=OVS_DPID),
mock.patch('socket.gethostname', return_value='dummy-host'),
mock.patch('neutron.openstack.common.loopingcall.'
'FixedIntervalLoopingCall'),
mock.patch('neutron.agent.rpc.PluginReportStateAPI')
) as (get_datapath_id, gethostname,
loopingcall, state_rpc_api):
kwargs = {'integ_br': 'integ_br',
'root_helper': 'dummy_wrapper',
'polling_interval': 1}
self.agent = nec_neutron_agent.NECNeutronAgent(**kwargs)
self.loopingcall = loopingcall
self.state_rpc_api = state_rpc_api
class TestNecAgent(TestNecAgentBase):
def _setup_mock(self):
vif_ports = [ovs_lib.VifPort('port1', '1', 'id-1', 'mac-1',
self.agent.int_br),
ovs_lib.VifPort('port2', '2', 'id-2', 'mac-2',
self.agent.int_br)]
self.get_vif_ports = mock.patch.object(
ovs_lib.OVSBridge, 'get_vif_ports',
return_value=vif_ports).start()
self.update_ports = mock.patch.object(
nec_neutron_agent.NECPluginApi, 'update_ports').start()
self.prepare_devices_filter = mock.patch.object(
self.agent.sg_agent, 'prepare_devices_filter').start()
self.remove_devices_filter = mock.patch.object(
self.agent.sg_agent, 'remove_devices_filter').start()
def _test_single_loop(self, with_exc=False, need_sync=False):
self.agent.cur_ports = ['id-0', 'id-1']
self.agent.need_sync = need_sync
self.agent.loop_handler()
if with_exc:
self.assertEqual(self.agent.cur_ports, ['id-0', 'id-1'])
self.assertTrue(self.agent.need_sync)
else:
self.assertEqual(self.agent.cur_ports, ['id-1', 'id-2'])
self.assertFalse(self.agent.need_sync)
def test_single_loop_normal(self):
self._setup_mock()
self._test_single_loop()
agent_id = 'nec-q-agent.dummy-host'
self.update_ports.assert_called_once_with(
mock.ANY, agent_id, OVS_DPID_0X,
[{'id': 'id-2', 'mac': 'mac-2', 'port_no': '2'}],
['id-0'])
self.prepare_devices_filter.assert_called_once_with(['id-2'])
self.remove_devices_filter.assert_called_once_with(['id-0'])
def test_single_loop_need_sync(self):
self._setup_mock()
self._test_single_loop(need_sync=True)
agent_id = 'nec-q-agent.dummy-host'
self.update_ports.assert_called_once_with(
mock.ANY, agent_id, OVS_DPID_0X,
[{'id': 'id-1', 'mac': 'mac-1', 'port_no': '1'},
{'id': 'id-2', 'mac': 'mac-2', 'port_no': '2'}],
[])
self.prepare_devices_filter.assert_called_once_with(['id-1', 'id-2'])
self.assertFalse(self.remove_devices_filter.call_count)
def test_single_loop_with_sg_exception_remove(self):
self._setup_mock()
self.update_ports.side_effect = Exception()
self._test_single_loop(with_exc=True)
def test_single_loop_with_sg_exception_prepare(self):
self._setup_mock()
self.prepare_devices_filter.side_effect = Exception()
self._test_single_loop(with_exc=True)
def test_single_loop_with_update_ports_exception(self):
self._setup_mock()
self.remove_devices_filter.side_effect = Exception()
self._test_single_loop(with_exc=True)
def test_daemon_loop(self):
def state_check(index):
self.assertEqual(len(self.vif_ports_scenario[index]),
len(self.agent.cur_ports))
# Fake time.sleep to stop the infinite loop in daemon_loop()
self.sleep_count = 0
def sleep_mock(*args, **kwargs):
state_check(self.sleep_count)
self.sleep_count += 1
if self.sleep_count >= DAEMON_LOOP_COUNT:
raise RuntimeError()
vif_ports = [ovs_lib.VifPort('port1', '1', 'id-1', 'mac-1',
self.agent.int_br),
ovs_lib.VifPort('port2', '2', 'id-2', 'mac-2',
self.agent.int_br)]
self.vif_ports_scenario = [[], [], vif_ports[0:1], vif_ports[0:2],
vif_ports[1:2], []]
# Ensure vif_ports_scenario is longer than DAEMON_LOOP_COUNT
if len(self.vif_ports_scenario) < DAEMON_LOOP_COUNT:
self.vif_ports_scenario.extend(
[] for _i in xrange(DAEMON_LOOP_COUNT -
len(self.vif_ports_scenario)))
with contextlib.nested(
mock.patch.object(time, 'sleep', side_effect=sleep_mock),
mock.patch.object(ovs_lib.OVSBridge, 'get_vif_ports'),
mock.patch.object(nec_neutron_agent.NECPluginApi, 'update_ports'),
mock.patch.object(self.agent.sg_agent, 'prepare_devices_filter'),
mock.patch.object(self.agent.sg_agent, 'remove_devices_filter')
) as (sleep, get_vif_potrs, update_ports,
prepare_devices_filter, remove_devices_filter):
get_vif_potrs.side_effect = self.vif_ports_scenario
with testtools.ExpectedException(RuntimeError):
self.agent.daemon_loop()
self.assertEqual(update_ports.call_count, 4)
self.assertEqual(sleep.call_count, DAEMON_LOOP_COUNT)
agent_id = 'nec-q-agent.dummy-host'
expected = [
mock.call(mock.ANY, agent_id, OVS_DPID_0X,
[{'id': 'id-1', 'mac': 'mac-1', 'port_no': '1'}],
[]),
mock.call(mock.ANY, agent_id, OVS_DPID_0X,
[{'id': 'id-2', 'mac': 'mac-2', 'port_no': '2'}],
[]),
mock.call(mock.ANY, agent_id, OVS_DPID_0X,
[], ['id-1']),
mock.call(mock.ANY, agent_id, OVS_DPID_0X,
[], ['id-2'])
]
update_ports.assert_has_calls(expected)
expected = [mock.call(['id-1']),
mock.call(['id-2'])]
self.assertEqual(prepare_devices_filter.call_count, 2)
prepare_devices_filter.assert_has_calls(expected)
self.assertEqual(remove_devices_filter.call_count, 2)
remove_devices_filter.assert_has_calls(expected)
sleep.assert_called_with(self.agent.polling_interval)
def test_report_state_installed(self):
self.loopingcall.assert_called_once_with(self.agent._report_state)
instance = self.loopingcall.return_value
instance.start.assert_called_once_with(interval=4)
def _check_report_state(self, cur_ports, num_ports, fail_mode,
first=False):
self.assertEqual(first or fail_mode,
'start_flag' in self.agent.agent_state)
self.agent.cur_ports = cur_ports
self.agent._report_state()
self.assertEqual(fail_mode,
'start_flag' in self.agent.agent_state)
self.assertEqual(self.agent.
agent_state['configurations']['devices'],
num_ports)
self.num_ports_hist.append(num_ports)
def _test_report_state(self, fail_mode):
log_mocked = mock.patch.object(nec_neutron_agent, 'LOG')
log_patched = log_mocked.start()
def record_state(*args, **kwargs):
self.record_calls.append(copy.deepcopy(args))
if fail_mode:
raise Exception()
self.record_calls = []
self.num_ports_hist = []
state_rpc = self.state_rpc_api.return_value
state_rpc.report_state.side_effect = record_state
dummy_vif = ovs_lib.VifPort('port1', '1', 'id-1', 'mac-1', None)
self.state_rpc_api.assert_called_once_with('q-plugin')
self.assertIn('start_flag', self.agent.agent_state)
self._check_report_state([], 0, fail_mode, first=True)
self._check_report_state([dummy_vif] * 2, 2, fail_mode)
self._check_report_state([dummy_vif] * 5, 5, fail_mode)
self._check_report_state([], 0, fail_mode)
# Since loopingcall start is mocked, call_count is same as
# the call count of check_report_state.
self.assertEqual(state_rpc.report_state.call_count, 4)
self.assertEqual(len(self.record_calls), 4)
for i, x in enumerate(itertools.izip(self.record_calls,
self.num_ports_hist)):
rec, num_ports = x
expected_state = {
'binary': 'neutron-nec-agent',
'host': 'dummy-host',
'topic': 'N/A',
'configurations': {'devices': 0},
'agent_type': 'NEC plugin agent'}
expected_state['configurations']['devices'] = num_ports
if i == 0 or fail_mode:
expected_state['start_flag'] = True
self.assertEqual(expected_state, rec[1])
self.assertEqual(fail_mode, log_patched.exception.called)
def test_report_state(self):
self._test_report_state(fail_mode=False)
def test_report_state_fail(self):
self._test_report_state(fail_mode=True)
class TestNecAgentCallback(TestNecAgentBase):
def test_port_update(self):
with contextlib.nested(
mock.patch.object(ovs_lib.OVSBridge, 'get_vif_port_by_id'),
mock.patch.object(self.agent.sg_agent, 'refresh_firewall')
) as (get_vif_port_by_id, refresh_firewall):
context = mock.Mock()
vifport = ovs_lib.VifPort('port1', '1', 'id-1', 'mac-1',
self.agent.int_br)
# The OVS port does not exist.
get_vif_port_by_id.return_value = None
port = {'id': 'update-port-1'}
self.agent.callback_nec.port_update(context, port=port)
self.assertEqual(get_vif_port_by_id.call_count, 1)
self.assertFalse(refresh_firewall.call_count)
# The OVS port exists but no security group is associated.
get_vif_port_by_id.return_value = vifport
port = {'id': 'update-port-1'}
self.agent.callback_nec.port_update(context, port=port)
self.assertEqual(get_vif_port_by_id.call_count, 2)
self.assertFalse(refresh_firewall.call_count)
# The OVS port exists but a security group is associated.
get_vif_port_by_id.return_value = vifport
port = {'id': 'update-port-1',
ext_sg.SECURITYGROUPS: ['default']}
self.agent.callback_nec.port_update(context, port=port)
self.assertEqual(get_vif_port_by_id.call_count, 3)
self.assertEqual(refresh_firewall.call_count, 1)
get_vif_port_by_id.return_value = None
port = {'id': 'update-port-1',
ext_sg.SECURITYGROUPS: ['default']}
self.agent.callback_nec.port_update(context, port=port)
self.assertEqual(get_vif_port_by_id.call_count, 4)
self.assertEqual(refresh_firewall.call_count, 1)
class TestNecAgentPluginApi(TestNecAgentBase):
def _test_plugin_api(self, expected_failure=False):
with contextlib.nested(
mock.patch.object(nec_neutron_agent.NECPluginApi, 'make_msg'),
mock.patch.object(nec_neutron_agent.NECPluginApi, 'call'),
mock.patch.object(nec_neutron_agent, 'LOG')
) as (make_msg, apicall, log):
agent_id = 'nec-q-agent.dummy-host'
if expected_failure:
apicall.side_effect = Exception()
self.agent.plugin_rpc.update_ports(
mock.sentinel.ctx, agent_id, OVS_DPID_0X,
# port_added
[{'id': 'id-1', 'mac': 'mac-1', 'port_no': '1'},
{'id': 'id-2', 'mac': 'mac-2', 'port_no': '2'}],
# port_removed
['id-3', 'id-4', 'id-5'])
make_msg.assert_called_once_with(
'update_ports', topic='q-agent-notifier',
agent_id=agent_id, datapath_id=OVS_DPID_0X,
port_added=[{'id': 'id-1', 'mac': 'mac-1', 'port_no': '1'},
{'id': 'id-2', 'mac': 'mac-2', 'port_no': '2'}],
port_removed=['id-3', 'id-4', 'id-5'])
apicall.assert_called_once_with(mock.sentinel.ctx,
make_msg.return_value)
self.assertTrue(log.info.called)
if expected_failure:
self.assertTrue(log.warn.called)
def test_plugin_api(self):
self._test_plugin_api()
class TestNecAgentMain(base.BaseTestCase):
def test_main(self):
with contextlib.nested(
mock.patch.object(nec_neutron_agent, 'NECNeutronAgent'),
mock.patch('eventlet.monkey_patch'),
mock.patch.object(nec_neutron_agent, 'logging_config'),
mock.patch.object(nec_neutron_agent, 'config')
) as (agent, eventlet, logging_config, cfg):
cfg.OVS.integration_bridge = 'br-int-x'
cfg.AGENT.root_helper = 'dummy-helper'
cfg.AGENT.polling_interval = 10
nec_neutron_agent.main()
self.assertTrue(eventlet.called)
self.assertTrue(logging_config.setup_logging.called)
agent.assert_has_calls([
mock.call('br-int-x', 'dummy-helper', 10),
mock.call().daemon_loop()
])
| apache-2.0 |
tyll/fas | fas/openssl_fas.py | 11 | 3067 | # Pretty much all copied from pyOpenSSL's certgen.py example and func's certs.py
# func's certs.py is GPLv2+
# pyOpenSSL is LGPL (Probably v2+)
# The pyOpenSSL examples may be under the same license but I'm not certain.
from OpenSSL import crypto
TYPE_RSA = crypto.TYPE_RSA
TYPE_DSA = crypto.TYPE_DSA
def retrieve_key_from_file(keyfile):
fo = open(keyfile, 'r')
buf = fo.read()
keypair = crypto.load_privatekey(crypto.FILETYPE_PEM, buf)
return keypair
def retrieve_cert_from_file(certfile):
fo = open(certfile, 'r')
buf = fo.read()
cert = crypto.load_certificate(crypto.FILETYPE_PEM, buf)
return cert
def createKeyPair(key_type, bits):
"""
Create a public/private key pair.
Arguments: key_type - Key type, must be one of TYPE_RSA and TYPE_DSA
bits - Number of bits to use in the key
Returns: The public/private key pair in a PKey object
"""
pkey = crypto.PKey()
pkey.generate_key(key_type, bits)
return pkey
def createCertRequest(pkey, digest="md5", **name):
"""
Create a certificate request.
Arguments: pkey - The key to associate with the request
digest - Digestion method to use for signing, default is md5
**name - The name of the subject of the request, possible
arguments are:
C - Country name
ST - State or province name
L - Locality name
O - Organization name
OU - Organizational unit name
CN - Common name
emailAddress - E-mail address
Returns: The certificate request in an X509Req object
"""
req = crypto.X509Req()
subj = req.get_subject()
for (key,value) in name.items():
setattr(subj, key, value)
req.set_pubkey(pkey)
req.sign(pkey, digest)
return req
def createCertificate(req, (issuerCert, issuerKey), serial, (notBefore, notAfter), digest="md5"):
"""
Generate a certificate given a certificate request.
Arguments: req - Certificate reqeust to use
issuerCert - The certificate of the issuer
issuerKey - The private key of the issuer
serial - Serial number for the certificate
notBefore - Timestamp (relative to now) when the certificate
starts being valid
notAfter - Timestamp (relative to now) when the certificate
stops being valid
digest - Digest method to use for signing, default is md5
Returns: The signed certificate in an X509 object
"""
cert = crypto.X509()
cert.set_serial_number(serial)
cert.gmtime_adj_notBefore(notBefore)
cert.gmtime_adj_notAfter(notAfter)
cert.set_issuer(issuerCert.get_subject())
cert.set_subject(req.get_subject())
cert.set_pubkey(req.get_pubkey())
cert.sign(issuerKey, digest)
return cert
| gpl-2.0 |
omelnic/sphinxsearch | api/test.py | 53 | 3218 | #
# $Id$
#
from sphinxapi import *
import sys, time
if not sys.argv[1:]:
print "Usage: python test.py [OPTIONS] query words\n"
print "Options are:"
print "-h, --host <HOST>\tconnect to searchd at host HOST"
print "-p, --port\t\tconnect to searchd at port PORT"
print "-i, --index <IDX>\tsearch through index(es) specified by IDX"
print "-s, --sortby <EXPR>\tsort matches by 'EXPR'"
print "-a, --any\t\tuse 'match any word' matching mode"
print "-b, --boolean\t\tuse 'boolean query' matching mode"
print "-e, --extended\t\tuse 'extended query' matching mode"
print "-f, --filter <ATTR>\tfilter by attribute 'ATTR' (default is 'group_id')"
print "-v, --value <VAL>\tadd VAL to allowed 'group_id' values list"
print "-g, --groupby <EXPR>\tgroup matches by 'EXPR'"
print "-gs,--groupsort <EXPR>\tsort groups by 'EXPR'"
print "-l, --limit <COUNT>\tretrieve COUNT matches (default is 20)"
sys.exit(0)
q = ''
mode = SPH_MATCH_ALL
host = 'localhost'
port = 9312
index = '*'
filtercol = 'group_id'
filtervals = []
sortby = ''
groupby = ''
groupsort = '@group desc'
limit = 0
i = 1
while (i<len(sys.argv)):
arg = sys.argv[i]
if arg=='-h' or arg=='--host':
i += 1
host = sys.argv[i]
elif arg=='-p' or arg=='--port':
i += 1
port = int(sys.argv[i])
elif arg=='-i':
i += 1
index = sys.argv[i]
elif arg=='-s':
i += 1
sortby = sys.argv[i]
elif arg=='-a' or arg=='--any':
mode = SPH_MATCH_ANY
elif arg=='-b' or arg=='--boolean':
mode = SPH_MATCH_BOOLEAN
elif arg=='-e' or arg=='--extended':
mode = SPH_MATCH_EXTENDED
elif arg=='-f' or arg=='--filter':
i += 1
filtercol = sys.argv[i]
elif arg=='-v' or arg=='--value':
i += 1
filtervals.append ( int(sys.argv[i]) )
elif arg=='-g' or arg=='--groupby':
i += 1
groupby = sys.argv[i]
elif arg=='-gs' or arg=='--groupsort':
i += 1
groupsort = sys.argv[i]
elif arg=='-l' or arg=='--limit':
i += 1
limit = int(sys.argv[i])
else:
q = '%s%s ' % ( q, arg )
i += 1
# do query
cl = SphinxClient()
cl.SetServer ( host, port )
cl.SetMatchMode ( mode )
if filtervals:
cl.SetFilter ( filtercol, filtervals )
if groupby:
cl.SetGroupBy ( groupby, SPH_GROUPBY_ATTR, groupsort )
if sortby:
cl.SetSortMode ( SPH_SORT_EXTENDED, sortby )
if limit:
cl.SetLimits ( 0, limit, max(limit,1000) )
res = cl.Query ( q, index )
if not res:
print 'query failed: %s' % cl.GetLastError()
sys.exit(1)
if cl.GetLastWarning():
print 'WARNING: %s\n' % cl.GetLastWarning()
print 'Query \'%s\' retrieved %d of %d matches in %s sec' % (q, res['total'], res['total_found'], res['time'])
print 'Query stats:'
if res.has_key('words'):
for info in res['words']:
print '\t\'%s\' found %d times in %d documents' % (info['word'], info['hits'], info['docs'])
if res.has_key('matches'):
n = 1
print '\nMatches:'
for match in res['matches']:
attrsdump = ''
for attr in res['attrs']:
attrname = attr[0]
attrtype = attr[1]
value = match['attrs'][attrname]
if attrtype==SPH_ATTR_TIMESTAMP:
value = time.strftime ( '%Y-%m-%d %H:%M:%S', time.localtime(value) )
attrsdump = '%s, %s=%s' % ( attrsdump, attrname, value )
print '%d. doc_id=%s, weight=%d%s' % (n, match['id'], match['weight'], attrsdump)
n += 1
#
# $Id$
#
| gpl-2.0 |
40223101/w17test | static/Brython3.1.3-20150514-095342/Lib/http/cookies.py | 735 | 20810 | #!/usr/bin/env python3
#
####
# Copyright 2000 by Timothy O'Malley <timo@alum.mit.edu>
#
# All Rights Reserved
#
# Permission to use, copy, modify, and distribute this software
# and its documentation for any purpose and without fee is hereby
# granted, provided that the above copyright notice appear in all
# copies and that both that copyright notice and this permission
# notice appear in supporting documentation, and that the name of
# Timothy O'Malley not be used in advertising or publicity
# pertaining to distribution of the software without specific, written
# prior permission.
#
# Timothy O'Malley DISCLAIMS ALL WARRANTIES WITH REGARD TO THIS
# SOFTWARE, INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY
# AND FITNESS, IN NO EVENT SHALL Timothy O'Malley BE LIABLE FOR
# ANY SPECIAL, INDIRECT OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS,
# WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS
# ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR
# PERFORMANCE OF THIS SOFTWARE.
#
####
#
# Id: Cookie.py,v 2.29 2000/08/23 05:28:49 timo Exp
# by Timothy O'Malley <timo@alum.mit.edu>
#
# Cookie.py is a Python module for the handling of HTTP
# cookies as a Python dictionary. See RFC 2109 for more
# information on cookies.
#
# The original idea to treat Cookies as a dictionary came from
# Dave Mitchell (davem@magnet.com) in 1995, when he released the
# first version of nscookie.py.
#
####
r"""
Here's a sample session to show how to use this module.
At the moment, this is the only documentation.
The Basics
----------
Importing is easy...
>>> from http import cookies
Most of the time you start by creating a cookie.
>>> C = cookies.SimpleCookie()
Once you've created your Cookie, you can add values just as if it were
a dictionary.
>>> C = cookies.SimpleCookie()
>>> C["fig"] = "newton"
>>> C["sugar"] = "wafer"
>>> C.output()
'Set-Cookie: fig=newton\r\nSet-Cookie: sugar=wafer'
Notice that the printable representation of a Cookie is the
appropriate format for a Set-Cookie: header. This is the
default behavior. You can change the header and printed
attributes by using the .output() function
>>> C = cookies.SimpleCookie()
>>> C["rocky"] = "road"
>>> C["rocky"]["path"] = "/cookie"
>>> print(C.output(header="Cookie:"))
Cookie: rocky=road; Path=/cookie
>>> print(C.output(attrs=[], header="Cookie:"))
Cookie: rocky=road
The load() method of a Cookie extracts cookies from a string. In a
CGI script, you would use this method to extract the cookies from the
HTTP_COOKIE environment variable.
>>> C = cookies.SimpleCookie()
>>> C.load("chips=ahoy; vienna=finger")
>>> C.output()
'Set-Cookie: chips=ahoy\r\nSet-Cookie: vienna=finger'
The load() method is darn-tootin smart about identifying cookies
within a string. Escaped quotation marks, nested semicolons, and other
such trickeries do not confuse it.
>>> C = cookies.SimpleCookie()
>>> C.load('keebler="E=everybody; L=\\"Loves\\"; fudge=\\012;";')
>>> print(C)
Set-Cookie: keebler="E=everybody; L=\"Loves\"; fudge=\012;"
Each element of the Cookie also supports all of the RFC 2109
Cookie attributes. Here's an example which sets the Path
attribute.
>>> C = cookies.SimpleCookie()
>>> C["oreo"] = "doublestuff"
>>> C["oreo"]["path"] = "/"
>>> print(C)
Set-Cookie: oreo=doublestuff; Path=/
Each dictionary element has a 'value' attribute, which gives you
back the value associated with the key.
>>> C = cookies.SimpleCookie()
>>> C["twix"] = "none for you"
>>> C["twix"].value
'none for you'
The SimpleCookie expects that all values should be standard strings.
Just to be sure, SimpleCookie invokes the str() builtin to convert
the value to a string, when the values are set dictionary-style.
>>> C = cookies.SimpleCookie()
>>> C["number"] = 7
>>> C["string"] = "seven"
>>> C["number"].value
'7'
>>> C["string"].value
'seven'
>>> C.output()
'Set-Cookie: number=7\r\nSet-Cookie: string=seven'
Finis.
"""
#
# Import our required modules
#
import re
import string
__all__ = ["CookieError", "BaseCookie", "SimpleCookie"]
_nulljoin = ''.join
_semispacejoin = '; '.join
_spacejoin = ' '.join
#
# Define an exception visible to External modules
#
class CookieError(Exception):
pass
# These quoting routines conform to the RFC2109 specification, which in
# turn references the character definitions from RFC2068. They provide
# a two-way quoting algorithm. Any non-text character is translated
# into a 4 character sequence: a forward-slash followed by the
# three-digit octal equivalent of the character. Any '\' or '"' is
# quoted with a preceeding '\' slash.
#
# These are taken from RFC2068 and RFC2109.
# _LegalChars is the list of chars which don't require "'s
# _Translator hash-table for fast quoting
#
_LegalChars = string.ascii_letters + string.digits + "!#$%&'*+-.^_`|~:"
_Translator = {
'\000' : '\\000', '\001' : '\\001', '\002' : '\\002',
'\003' : '\\003', '\004' : '\\004', '\005' : '\\005',
'\006' : '\\006', '\007' : '\\007', '\010' : '\\010',
'\011' : '\\011', '\012' : '\\012', '\013' : '\\013',
'\014' : '\\014', '\015' : '\\015', '\016' : '\\016',
'\017' : '\\017', '\020' : '\\020', '\021' : '\\021',
'\022' : '\\022', '\023' : '\\023', '\024' : '\\024',
'\025' : '\\025', '\026' : '\\026', '\027' : '\\027',
'\030' : '\\030', '\031' : '\\031', '\032' : '\\032',
'\033' : '\\033', '\034' : '\\034', '\035' : '\\035',
'\036' : '\\036', '\037' : '\\037',
# Because of the way browsers really handle cookies (as opposed
# to what the RFC says) we also encode , and ;
',' : '\\054', ';' : '\\073',
'"' : '\\"', '\\' : '\\\\',
'\177' : '\\177', '\200' : '\\200', '\201' : '\\201',
'\202' : '\\202', '\203' : '\\203', '\204' : '\\204',
'\205' : '\\205', '\206' : '\\206', '\207' : '\\207',
'\210' : '\\210', '\211' : '\\211', '\212' : '\\212',
'\213' : '\\213', '\214' : '\\214', '\215' : '\\215',
'\216' : '\\216', '\217' : '\\217', '\220' : '\\220',
'\221' : '\\221', '\222' : '\\222', '\223' : '\\223',
'\224' : '\\224', '\225' : '\\225', '\226' : '\\226',
'\227' : '\\227', '\230' : '\\230', '\231' : '\\231',
'\232' : '\\232', '\233' : '\\233', '\234' : '\\234',
'\235' : '\\235', '\236' : '\\236', '\237' : '\\237',
'\240' : '\\240', '\241' : '\\241', '\242' : '\\242',
'\243' : '\\243', '\244' : '\\244', '\245' : '\\245',
'\246' : '\\246', '\247' : '\\247', '\250' : '\\250',
'\251' : '\\251', '\252' : '\\252', '\253' : '\\253',
'\254' : '\\254', '\255' : '\\255', '\256' : '\\256',
'\257' : '\\257', '\260' : '\\260', '\261' : '\\261',
'\262' : '\\262', '\263' : '\\263', '\264' : '\\264',
'\265' : '\\265', '\266' : '\\266', '\267' : '\\267',
'\270' : '\\270', '\271' : '\\271', '\272' : '\\272',
'\273' : '\\273', '\274' : '\\274', '\275' : '\\275',
'\276' : '\\276', '\277' : '\\277', '\300' : '\\300',
'\301' : '\\301', '\302' : '\\302', '\303' : '\\303',
'\304' : '\\304', '\305' : '\\305', '\306' : '\\306',
'\307' : '\\307', '\310' : '\\310', '\311' : '\\311',
'\312' : '\\312', '\313' : '\\313', '\314' : '\\314',
'\315' : '\\315', '\316' : '\\316', '\317' : '\\317',
'\320' : '\\320', '\321' : '\\321', '\322' : '\\322',
'\323' : '\\323', '\324' : '\\324', '\325' : '\\325',
'\326' : '\\326', '\327' : '\\327', '\330' : '\\330',
'\331' : '\\331', '\332' : '\\332', '\333' : '\\333',
'\334' : '\\334', '\335' : '\\335', '\336' : '\\336',
'\337' : '\\337', '\340' : '\\340', '\341' : '\\341',
'\342' : '\\342', '\343' : '\\343', '\344' : '\\344',
'\345' : '\\345', '\346' : '\\346', '\347' : '\\347',
'\350' : '\\350', '\351' : '\\351', '\352' : '\\352',
'\353' : '\\353', '\354' : '\\354', '\355' : '\\355',
'\356' : '\\356', '\357' : '\\357', '\360' : '\\360',
'\361' : '\\361', '\362' : '\\362', '\363' : '\\363',
'\364' : '\\364', '\365' : '\\365', '\366' : '\\366',
'\367' : '\\367', '\370' : '\\370', '\371' : '\\371',
'\372' : '\\372', '\373' : '\\373', '\374' : '\\374',
'\375' : '\\375', '\376' : '\\376', '\377' : '\\377'
}
def _quote(str, LegalChars=_LegalChars):
r"""Quote a string for use in a cookie header.
If the string does not need to be double-quoted, then just return the
string. Otherwise, surround the string in doublequotes and quote
(with a \) special characters.
"""
if all(c in LegalChars for c in str):
return str
else:
return '"' + _nulljoin(_Translator.get(s, s) for s in str) + '"'
_OctalPatt = re.compile(r"\\[0-3][0-7][0-7]")
_QuotePatt = re.compile(r"[\\].")
def _unquote(str):
# If there aren't any doublequotes,
# then there can't be any special characters. See RFC 2109.
if len(str) < 2:
return str
if str[0] != '"' or str[-1] != '"':
return str
# We have to assume that we must decode this string.
# Down to work.
# Remove the "s
str = str[1:-1]
# Check for special sequences. Examples:
# \012 --> \n
# \" --> "
#
i = 0
n = len(str)
res = []
while 0 <= i < n:
o_match = _OctalPatt.search(str, i)
q_match = _QuotePatt.search(str, i)
if not o_match and not q_match: # Neither matched
res.append(str[i:])
break
# else:
j = k = -1
if o_match:
j = o_match.start(0)
if q_match:
k = q_match.start(0)
if q_match and (not o_match or k < j): # QuotePatt matched
res.append(str[i:k])
res.append(str[k+1])
i = k + 2
else: # OctalPatt matched
res.append(str[i:j])
res.append(chr(int(str[j+1:j+4], 8)))
i = j + 4
return _nulljoin(res)
# The _getdate() routine is used to set the expiration time in the cookie's HTTP
# header. By default, _getdate() returns the current time in the appropriate
# "expires" format for a Set-Cookie header. The one optional argument is an
# offset from now, in seconds. For example, an offset of -3600 means "one hour
# ago". The offset may be a floating point number.
#
_weekdayname = ['Mon', 'Tue', 'Wed', 'Thu', 'Fri', 'Sat', 'Sun']
_monthname = [None,
'Jan', 'Feb', 'Mar', 'Apr', 'May', 'Jun',
'Jul', 'Aug', 'Sep', 'Oct', 'Nov', 'Dec']
def _getdate(future=0, weekdayname=_weekdayname, monthname=_monthname):
from time import gmtime, time
now = time()
year, month, day, hh, mm, ss, wd, y, z = gmtime(now + future)
return "%s, %02d %3s %4d %02d:%02d:%02d GMT" % \
(weekdayname[wd], day, monthname[month], year, hh, mm, ss)
class Morsel(dict):
"""A class to hold ONE (key, value) pair.
In a cookie, each such pair may have several attributes, so this class is
used to keep the attributes associated with the appropriate key,value pair.
This class also includes a coded_value attribute, which is used to hold
the network representation of the value. This is most useful when Python
objects are pickled for network transit.
"""
# RFC 2109 lists these attributes as reserved:
# path comment domain
# max-age secure version
#
# For historical reasons, these attributes are also reserved:
# expires
#
# This is an extension from Microsoft:
# httponly
#
# This dictionary provides a mapping from the lowercase
# variant on the left to the appropriate traditional
# formatting on the right.
_reserved = {
"expires" : "expires",
"path" : "Path",
"comment" : "Comment",
"domain" : "Domain",
"max-age" : "Max-Age",
"secure" : "secure",
"httponly" : "httponly",
"version" : "Version",
}
_flags = {'secure', 'httponly'}
def __init__(self):
# Set defaults
self.key = self.value = self.coded_value = None
# Set default attributes
for key in self._reserved:
dict.__setitem__(self, key, "")
def __setitem__(self, K, V):
K = K.lower()
if not K in self._reserved:
raise CookieError("Invalid Attribute %s" % K)
dict.__setitem__(self, K, V)
def isReservedKey(self, K):
return K.lower() in self._reserved
def set(self, key, val, coded_val, LegalChars=_LegalChars):
# First we verify that the key isn't a reserved word
# Second we make sure it only contains legal characters
if key.lower() in self._reserved:
raise CookieError("Attempt to set a reserved key: %s" % key)
if any(c not in LegalChars for c in key):
raise CookieError("Illegal key value: %s" % key)
# It's a good key, so save it.
self.key = key
self.value = val
self.coded_value = coded_val
def output(self, attrs=None, header="Set-Cookie:"):
return "%s %s" % (header, self.OutputString(attrs))
__str__ = output
def __repr__(self):
return '<%s: %s=%s>' % (self.__class__.__name__,
self.key, repr(self.value))
def js_output(self, attrs=None):
# Print javascript
return """
<script type="text/javascript">
<!-- begin hiding
document.cookie = \"%s\";
// end hiding -->
</script>
""" % (self.OutputString(attrs).replace('"', r'\"'))
def OutputString(self, attrs=None):
# Build up our result
#
result = []
append = result.append
# First, the key=value pair
append("%s=%s" % (self.key, self.coded_value))
# Now add any defined attributes
if attrs is None:
attrs = self._reserved
items = sorted(self.items())
for key, value in items:
if value == "":
continue
if key not in attrs:
continue
if key == "expires" and isinstance(value, int):
append("%s=%s" % (self._reserved[key], _getdate(value)))
elif key == "max-age" and isinstance(value, int):
append("%s=%d" % (self._reserved[key], value))
elif key == "secure":
append(str(self._reserved[key]))
elif key == "httponly":
append(str(self._reserved[key]))
else:
append("%s=%s" % (self._reserved[key], value))
# Return the result
return _semispacejoin(result)
#
# Pattern for finding cookie
#
# This used to be strict parsing based on the RFC2109 and RFC2068
# specifications. I have since discovered that MSIE 3.0x doesn't
# follow the character rules outlined in those specs. As a
# result, the parsing rules here are less strict.
#
_LegalCharsPatt = r"[\w\d!#%&'~_`><@,:/\$\*\+\-\.\^\|\)\(\?\}\{\=]"
_CookiePattern = re.compile(r"""
(?x) # This is a verbose pattern
(?P<key> # Start of group 'key'
""" + _LegalCharsPatt + r"""+? # Any word of at least one letter
) # End of group 'key'
( # Optional group: there may not be a value.
\s*=\s* # Equal Sign
(?P<val> # Start of group 'val'
"(?:[^\\"]|\\.)*" # Any doublequoted string
| # or
\w{3},\s[\w\d\s-]{9,11}\s[\d:]{8}\sGMT # Special case for "expires" attr
| # or
""" + _LegalCharsPatt + r"""* # Any word or empty string
) # End of group 'val'
)? # End of optional value group
\s* # Any number of spaces.
(\s+|;|$) # Ending either at space, semicolon, or EOS.
""", re.ASCII) # May be removed if safe.
# At long last, here is the cookie class. Using this class is almost just like
# using a dictionary. See this module's docstring for example usage.
#
class BaseCookie(dict):
"""A container class for a set of Morsels."""
def value_decode(self, val):
"""real_value, coded_value = value_decode(STRING)
Called prior to setting a cookie's value from the network
representation. The VALUE is the value read from HTTP
header.
Override this function to modify the behavior of cookies.
"""
return val, val
def value_encode(self, val):
"""real_value, coded_value = value_encode(VALUE)
Called prior to setting a cookie's value from the dictionary
representation. The VALUE is the value being assigned.
Override this function to modify the behavior of cookies.
"""
strval = str(val)
return strval, strval
def __init__(self, input=None):
if input:
self.load(input)
def __set(self, key, real_value, coded_value):
"""Private method for setting a cookie's value"""
M = self.get(key, Morsel())
M.set(key, real_value, coded_value)
dict.__setitem__(self, key, M)
def __setitem__(self, key, value):
"""Dictionary style assignment."""
rval, cval = self.value_encode(value)
self.__set(key, rval, cval)
def output(self, attrs=None, header="Set-Cookie:", sep="\015\012"):
"""Return a string suitable for HTTP."""
result = []
items = sorted(self.items())
for key, value in items:
result.append(value.output(attrs, header))
return sep.join(result)
__str__ = output
def __repr__(self):
l = []
items = sorted(self.items())
for key, value in items:
l.append('%s=%s' % (key, repr(value.value)))
return '<%s: %s>' % (self.__class__.__name__, _spacejoin(l))
def js_output(self, attrs=None):
"""Return a string suitable for JavaScript."""
result = []
items = sorted(self.items())
for key, value in items:
result.append(value.js_output(attrs))
return _nulljoin(result)
def load(self, rawdata):
"""Load cookies from a string (presumably HTTP_COOKIE) or
from a dictionary. Loading cookies from a dictionary 'd'
is equivalent to calling:
map(Cookie.__setitem__, d.keys(), d.values())
"""
if isinstance(rawdata, str):
self.__parse_string(rawdata)
else:
# self.update() wouldn't call our custom __setitem__
for key, value in rawdata.items():
self[key] = value
return
def __parse_string(self, str, patt=_CookiePattern):
i = 0 # Our starting point
n = len(str) # Length of string
M = None # current morsel
while 0 <= i < n:
# Start looking for a cookie
match = patt.search(str, i)
if not match:
# No more cookies
break
key, value = match.group("key"), match.group("val")
i = match.end(0)
# Parse the key, value in case it's metainfo
if key[0] == "$":
# We ignore attributes which pertain to the cookie
# mechanism as a whole. See RFC 2109.
# (Does anyone care?)
if M:
M[key[1:]] = value
elif key.lower() in Morsel._reserved:
if M:
if value is None:
if key.lower() in Morsel._flags:
M[key] = True
else:
M[key] = _unquote(value)
elif value is not None:
rval, cval = self.value_decode(value)
self.__set(key, rval, cval)
M = self[key]
class SimpleCookie(BaseCookie):
"""
SimpleCookie supports strings as cookie values. When setting
the value using the dictionary assignment notation, SimpleCookie
calls the builtin str() to convert the value to a string. Values
received from HTTP are kept as strings.
"""
def value_decode(self, val):
return _unquote(val), val
def value_encode(self, val):
strval = str(val)
return strval, _quote(strval)
| gpl-3.0 |
savoirfairelinux/odoo | addons/sale_mrp/__openerp__.py | 61 | 1935 | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
{
'name': 'Sales and MRP Management',
'version': '1.0',
'category': 'Hidden',
'description': """
This module provides facility to the user to install mrp and sales modulesat a time.
====================================================================================
It is basically used when we want to keep track of production orders generated
from sales order. It adds sales name and sales Reference on production order.
""",
'author': 'OpenERP SA',
'website': 'http://www.openerp.com',
'images': ['images/SO_to_MO.jpeg'],
'depends': ['mrp', 'sale_stock'],
'data': [
'security/ir.model.access.csv',
'sale_mrp_view.xml',
],
'demo': [],
'test':[
'test/cancellation_propagated.yml',
'test/sale_mrp.yml',
],
'installable': True,
'auto_install': True,
}
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
| agpl-3.0 |
KanchanChauhan/erpnext | erpnext/startup/notifications.py | 4 | 1797 | # Copyright (c) 2015, Frappe Technologies Pvt. Ltd. and Contributors
# License: GNU General Public License v3. See license.txt
from __future__ import unicode_literals
def get_notification_config():
return { "for_doctype":
{
"Issue": {"status": "Open"},
"Warranty Claim": {"status": "Open"},
"Task": {"status": ("in", ("Open", "Overdue"))},
"Project": {"status": "Open"},
"Item": {"total_projected_qty": ("<", 0)},
"Lead": {"status": "Open"},
"Contact": {"status": "Open"},
"Opportunity": {"status": "Open"},
"Quotation": {"docstatus": 0},
"Sales Order": {
"status": ("not in", ("Completed", "Closed")),
"docstatus": ("<", 2)
},
"Journal Entry": {"docstatus": 0},
"Sales Invoice": {
"outstanding_amount": (">", 0),
"docstatus": ("<", 2)
},
"Purchase Invoice": {
"outstanding_amount": (">", 0),
"docstatus": ("<", 2)
},
"Payment Entry": {"docstatus": 0},
"Leave Application": {"status": "Open"},
"Expense Claim": {"approval_status": "Draft"},
"Job Applicant": {"status": "Open"},
"Delivery Note": {
"status": ("not in", ("Completed", "Closed")),
"docstatus": ("<", 2)
},
"Stock Entry": {"docstatus": 0},
"Material Request": {
"docstatus": ("<", 2),
"status": ("not in", ("Stopped",)),
"per_ordered": ("<", 100)
},
"Request for Quotation": { "docstatus": 0 },
"Supplier Quotation": {"docstatus": 0},
"Purchase Order": {
"status": ("not in", ("Completed", "Closed")),
"docstatus": ("<", 2)
},
"Purchase Receipt": {
"status": ("not in", ("Completed", "Closed")),
"docstatus": ("<", 2)
},
"Production Order": { "status": ("in", ("Draft", "Not Started", "In Process")) },
"BOM": {"docstatus": 0},
"Timesheet": {"status": "Draft"}
}
}
| gpl-3.0 |
eoogbe/api-client-staging | generated/python/googleapis-common-protos/google/api/system_parameter_pb2.py | 17 | 6134 | # Generated by the protocol buffer compiler. DO NOT EDIT!
# source: google/api/system_parameter.proto
import sys
_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1'))
from google.protobuf import descriptor as _descriptor
from google.protobuf import message as _message
from google.protobuf import reflection as _reflection
from google.protobuf import symbol_database as _symbol_database
from google.protobuf import descriptor_pb2
# @@protoc_insertion_point(imports)
_sym_db = _symbol_database.Default()
DESCRIPTOR = _descriptor.FileDescriptor(
name='google/api/system_parameter.proto',
package='google.api',
syntax='proto3',
serialized_pb=_b('\n!google/api/system_parameter.proto\x12\ngoogle.api\"B\n\x10SystemParameters\x12.\n\x05rules\x18\x01 \x03(\x0b\x32\x1f.google.api.SystemParameterRule\"X\n\x13SystemParameterRule\x12\x10\n\x08selector\x18\x01 \x01(\t\x12/\n\nparameters\x18\x02 \x03(\x0b\x32\x1b.google.api.SystemParameter\"Q\n\x0fSystemParameter\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x13\n\x0bhttp_header\x18\x02 \x01(\t\x12\x1b\n\x13url_query_parameter\x18\x03 \x01(\tB/\n\x0e\x63om.google.apiB\x14SystemParameterProtoP\x01\xa2\x02\x04GAPIb\x06proto3')
)
_sym_db.RegisterFileDescriptor(DESCRIPTOR)
_SYSTEMPARAMETERS = _descriptor.Descriptor(
name='SystemParameters',
full_name='google.api.SystemParameters',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='rules', full_name='google.api.SystemParameters.rules', index=0,
number=1, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=49,
serialized_end=115,
)
_SYSTEMPARAMETERRULE = _descriptor.Descriptor(
name='SystemParameterRule',
full_name='google.api.SystemParameterRule',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='selector', full_name='google.api.SystemParameterRule.selector', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='parameters', full_name='google.api.SystemParameterRule.parameters', index=1,
number=2, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=117,
serialized_end=205,
)
_SYSTEMPARAMETER = _descriptor.Descriptor(
name='SystemParameter',
full_name='google.api.SystemParameter',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='name', full_name='google.api.SystemParameter.name', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='http_header', full_name='google.api.SystemParameter.http_header', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='url_query_parameter', full_name='google.api.SystemParameter.url_query_parameter', index=2,
number=3, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=207,
serialized_end=288,
)
_SYSTEMPARAMETERS.fields_by_name['rules'].message_type = _SYSTEMPARAMETERRULE
_SYSTEMPARAMETERRULE.fields_by_name['parameters'].message_type = _SYSTEMPARAMETER
DESCRIPTOR.message_types_by_name['SystemParameters'] = _SYSTEMPARAMETERS
DESCRIPTOR.message_types_by_name['SystemParameterRule'] = _SYSTEMPARAMETERRULE
DESCRIPTOR.message_types_by_name['SystemParameter'] = _SYSTEMPARAMETER
SystemParameters = _reflection.GeneratedProtocolMessageType('SystemParameters', (_message.Message,), dict(
DESCRIPTOR = _SYSTEMPARAMETERS,
__module__ = 'google.api.system_parameter_pb2'
# @@protoc_insertion_point(class_scope:google.api.SystemParameters)
))
_sym_db.RegisterMessage(SystemParameters)
SystemParameterRule = _reflection.GeneratedProtocolMessageType('SystemParameterRule', (_message.Message,), dict(
DESCRIPTOR = _SYSTEMPARAMETERRULE,
__module__ = 'google.api.system_parameter_pb2'
# @@protoc_insertion_point(class_scope:google.api.SystemParameterRule)
))
_sym_db.RegisterMessage(SystemParameterRule)
SystemParameter = _reflection.GeneratedProtocolMessageType('SystemParameter', (_message.Message,), dict(
DESCRIPTOR = _SYSTEMPARAMETER,
__module__ = 'google.api.system_parameter_pb2'
# @@protoc_insertion_point(class_scope:google.api.SystemParameter)
))
_sym_db.RegisterMessage(SystemParameter)
DESCRIPTOR.has_options = True
DESCRIPTOR._options = _descriptor._ParseOptions(descriptor_pb2.FileOptions(), _b('\n\016com.google.apiB\024SystemParameterProtoP\001\242\002\004GAPI'))
# @@protoc_insertion_point(module_scope)
| bsd-3-clause |
fxia22/ASM_xf | PythonD/lib/python2.4/distutils/archive_util.py | 4 | 6235 | """distutils.archive_util
Utility functions for creating archive files (tarballs, zip files,
that sort of thing)."""
# This module should be kept compatible with Python 2.1.
__revision__ = "$Id: archive_util.py,v 1.17 2004/11/10 22:23:13 loewis Exp $"
import os
from distutils.errors import DistutilsExecError
from distutils.spawn import spawn
from distutils.dir_util import mkpath
from distutils import log
def make_tarball (base_name, base_dir, compress="gzip",
verbose=0, dry_run=0):
"""Create a (possibly compressed) tar file from all the files under
'base_dir'. 'compress' must be "gzip" (the default), "compress",
"bzip2", or None. Both "tar" and the compression utility named by
'compress' must be on the default program search path, so this is
probably Unix-specific. The output tar file will be named 'base_dir' +
".tar", possibly plus the appropriate compression extension (".gz",
".bz2" or ".Z"). Return the output filename.
"""
# XXX GNU tar 1.13 has a nifty option to add a prefix directory.
# It's pretty new, though, so we certainly can't require it --
# but it would be nice to take advantage of it to skip the
# "create a tree of hardlinks" step! (Would also be nice to
# detect GNU tar to use its 'z' option and save a step.)
compress_ext = { 'gzip': ".gz",
'bzip2': '.bz2',
'compress': ".Z" }
# flags for compression program, each element of list will be an argument
compress_flags = {'gzip': ["-f9"],
'compress': ["-f"],
'bzip2': ['-f9']}
if compress is not None and compress not in compress_ext.keys():
raise ValueError, \
"bad value for 'compress': must be None, 'gzip', or 'compress'"
archive_name = base_name + ".tar"
mkpath(os.path.dirname(archive_name), dry_run=dry_run)
cmd = ["tar", "-cf", archive_name, base_dir]
spawn(cmd, dry_run=dry_run)
if compress:
spawn([compress] + compress_flags[compress] + [archive_name],
dry_run=dry_run)
return archive_name + compress_ext[compress]
else:
return archive_name
# make_tarball ()
def make_zipfile (base_name, base_dir, verbose=0, dry_run=0):
"""Create a zip file from all the files under 'base_dir'. The output
zip file will be named 'base_dir' + ".zip". Uses either the "zipfile"
Python module (if available) or the InfoZIP "zip" utility (if installed
and found on the default search path). If neither tool is available,
raises DistutilsExecError. Returns the name of the output zip file.
"""
try:
import zipfile
except ImportError:
zipfile = None
zip_filename = base_name + ".zip"
mkpath(os.path.dirname(zip_filename), dry_run=dry_run)
# If zipfile module is not available, try spawning an external
# 'zip' command.
if zipfile is None:
if verbose:
zipoptions = "-r"
else:
zipoptions = "-rq"
try:
spawn(["zip", zipoptions, zip_filename, base_dir],
dry_run=dry_run)
except DistutilsExecError:
# XXX really should distinguish between "couldn't find
# external 'zip' command" and "zip failed".
raise DistutilsExecError, \
("unable to create zip file '%s': "
"could neither import the 'zipfile' module nor "
"find a standalone zip utility") % zip_filename
else:
log.info("creating '%s' and adding '%s' to it",
zip_filename, base_dir)
def visit (z, dirname, names):
for name in names:
path = os.path.normpath(os.path.join(dirname, name))
if os.path.isfile(path):
z.write(path, path)
log.info("adding '%s'" % path)
if not dry_run:
z = zipfile.ZipFile(zip_filename, "w",
compression=zipfile.ZIP_DEFLATED)
os.path.walk(base_dir, visit, z)
z.close()
return zip_filename
# make_zipfile ()
ARCHIVE_FORMATS = {
'gztar': (make_tarball, [('compress', 'gzip')], "gzip'ed tar-file"),
'bztar': (make_tarball, [('compress', 'bzip2')], "bzip2'ed tar-file"),
'ztar': (make_tarball, [('compress', 'compress')], "compressed tar file"),
'tar': (make_tarball, [('compress', None)], "uncompressed tar file"),
'zip': (make_zipfile, [],"ZIP file")
}
def check_archive_formats (formats):
for format in formats:
if not ARCHIVE_FORMATS.has_key(format):
return format
else:
return None
def make_archive (base_name, format,
root_dir=None, base_dir=None,
verbose=0, dry_run=0):
"""Create an archive file (eg. zip or tar). 'base_name' is the name
of the file to create, minus any format-specific extension; 'format'
is the archive format: one of "zip", "tar", "ztar", or "gztar".
'root_dir' is a directory that will be the root directory of the
archive; ie. we typically chdir into 'root_dir' before creating the
archive. 'base_dir' is the directory where we start archiving from;
ie. 'base_dir' will be the common prefix of all files and
directories in the archive. 'root_dir' and 'base_dir' both default
to the current directory. Returns the name of the archive file.
"""
save_cwd = os.getcwd()
if root_dir is not None:
log.debug("changing into '%s'", root_dir)
base_name = os.path.abspath(base_name)
if not dry_run:
os.chdir(root_dir)
if base_dir is None:
base_dir = os.curdir
kwargs = { 'dry_run': dry_run }
try:
format_info = ARCHIVE_FORMATS[format]
except KeyError:
raise ValueError, "unknown archive format '%s'" % format
func = format_info[0]
for (arg,val) in format_info[1]:
kwargs[arg] = val
filename = apply(func, (base_name, base_dir), kwargs)
if root_dir is not None:
log.debug("changing back to '%s'", save_cwd)
os.chdir(save_cwd)
return filename
# make_archive ()
| gpl-2.0 |
mattbasta/amo-validator | validator/testcases/javascript/call_definitions.py | 1 | 37806 | import math
import re
import types
import actions
import traverser as js_traverser
import predefinedentities
from jstypes import *
from validator.constants import BUGZILLA_BUG
from validator.compat import (FX6_DEFINITION, FX7_DEFINITION, FX8_DEFINITION,
FX9_DEFINITION, FX11_DEFINITION, TB12_DEFINITION,
TB13_DEFINITION)
from validator.decorator import version_range
from validator.python import copy
# Function prototypes should implement the following:
# wrapper : The JSWrapper instace that is being called
# arguments : A list of argument nodes; untraversed
# traverser : The current traverser object
def amp_rp_bug660359(wrapper, arguments, traverser):
"""
Flag all calls to AddonManagerPrivate.registerProvider for incompatibility
with Gecko 6.
"""
traverser.err.notice(
err_id=("testcases_javascript_calldefinitions",
"amp_rp_bug660359"),
notice="Custom add-on types may not work properly in Gecko 6",
description="This add-on appears to register custom add-on types, "
"which are affected and may not work properly due to "
"changes made on Gecko 6. For more information, "
"please refer to "
"https://bugzilla.mozilla.org/show_bug.cgi?id=595848",
filename=traverser.filename,
line=traverser.line,
column=traverser.position,
context=traverser.context,
for_appversions=FX6_DEFINITION,
compatibility_type="error",
tier=5)
def urlparser_parsepath_bug691588(wrapper, arguments, traverser):
"""
nsIURLParser.parsePath doesn't take paramPos/paramLen in FX9.
"""
if len(arguments) > 8:
traverser.err.error(
("testcases_javascript_call_definititions",
"fx9_compat",
"urlparser_691588"),
("nsIURLParser.parsePath's signature has changed in Gecko 9."
" See %s for more information.") % (BUGZILLA_BUG % 665706),
for_appversions=FX9_DEFINITION,
filename=traverser.filename, line=traverser.line,
column=traverser.position, context=traverser.context,
compatibility_type="error",
tier=5)
def url_param_bug691588(t):
"""
nsIURL.param is gone in FX9.
"""
t.err.error(
err_id=("testcases_javascript_call_definititions", "fx9_compat",
"urlparser_691588"),
error="`nsIURL.param` has been removed in Gecko 9.",
description="See %s for more information." % BUGZILLA_BUG % 665706,
for_appversions=FX9_DEFINITION,
filename=t.filename,
line=t.line,
column=t.position,
context=t.context,
compatibility_type="error",
tier=5)
def browserhistory_removepages(wrapper, arguments, traverser):
"""
nsIBrowserHistory.removePages takes 2 args in FX9 instead of 3.
"""
if len(arguments) > 2:
traverser.err.error(
err_id=("testcases_javascript_call_definititions",
"fx9_compat", "browserhistory_removepages"),
error="nsIBrowser.removePages' signature has changed in Gecko 9.",
description="See %s for more information." %
BUGZILLA_BUG % 681420,
for_appversions=FX9_DEFINITION,
filename=traverser.filename,
line=traverser.line,
column=traverser.position,
context=traverser.context,
compatibility_type="error",
tier=5)
def browserhistory_registeropenpage(t):
"""
nsIBrowser.registerOpenPage is gone in Gecko 9.
"""
t.err.error(
err_id=("testcases_javascript_call_definititions",
"fx9_compat", "browserhistory_registeropenpage"),
error="nsIBrowser.registerOpenPage has been removed in Gecko 9.",
description="See %s for more information." % BUGZILLA_BUG % 681420,
for_appversions=FX9_DEFINITION,
filename=t.filename, line=t.line,
column=t.position, context=t.context,
compatibility_type="error",
tier=5)
def browserhistory_unregisteropenpage(t):
"""
nsIBrowser.unregisterOpenPage is gone in Gecko 9.
"""
t.err.error(
err_id=("testcases_javascript_call_definititions",
"fx9_compat", "browserhistory_unregisteropenpage"),
error="nsIBrowser.unregisterOpenPage has been removed in Gecko 9.",
description="See %s for more information." % BUGZILLA_BUG % 681420,
for_appversions=FX9_DEFINITION,
filename=t.filename, line=t.line,
column=t.position, context=t.context,
compatibility_type="error",
tier=5)
def spellcheck_savedefaultdictionary(t):
"""
nsIEditorSpellCheck.saveDefaultDictionary is gone in Gecko 9.
"""
t.err.error(
("testcases_javascript_call_definititions",
"fx9_compat",
"spellcheck_savedefaultdictionary"),
("nsIEditorSpellCheck.saveDefaultDictionary has been removed in"
" Gecko 9. See %s for more information.") % (BUGZILLA_BUG % 678842),
for_appversions=FX9_DEFINITION,
filename=t.filename, line=t.line,
column=t.position, context=t.context,
compatibility_type="error",
tier=5)
def spellcheck_updatecurrentdictionary(wrapper, arguments, traverser):
"""
nsIEditorSpellCheck.UpdateCurrentDictionary takes no args in Gecko 9.
"""
if len(arguments) > 0:
traverser.err.error(
("testcases_javascript_call_definititions",
"fx9_compat",
"spellcheck_updatecurrentdictionary"),
("nsIEditorSpellCheck.UpdateCurrentDictionary takes no arguments "
"in Gecko 9. See %s for more information."
) % (BUGZILLA_BUG % 678842),
for_appversions=FX9_DEFINITION,
filename=traverser.filename, line=traverser.line,
column=traverser.position, context=traverser.context,
compatibility_type="error",
tier=5)
def webbrowserpersist_saveuri(wrapper, arguments, traverser):
"""
nsIWebBrowserPersist.saveURI requires a valid privacy context as
of Firefox 19
"""
if len(arguments) >= 7:
load_context = traverser._traverse_node(arguments[6])
if load_context.get_literal_value() is None:
traverser.err.warning(
err_id=("testcases_javascript_call_definititions",
"webbrowserpersist_saveuri"),
warning=("saveURI should not be called with a null load "
"context"),
description=("While nsIWebBrowserPersist.saveURI accepts null "
"in place of a privacy context, this usage is "
"acceptable only when no appropriate load "
"context exists."),
filename=traverser.filename,
line=traverser.line,
column=traverser.position,
context=traverser.context,
tier=4)
def xpcom_constructor(method, extend=False, mutate=False, pretraversed=False):
"""Returns a function which wraps an XPCOM class instantiation function."""
def definition(wrapper, arguments, traverser):
"""Wraps an XPCOM class instantiation function."""
if not arguments:
return None
traverser._debug("(XPCOM Encountered)")
if not pretraversed:
arguments = [traverser._traverse_node(x) for x in arguments]
argz = arguments[0]
if not argz.is_global or "xpcom_map" not in argz.value:
argz = JSWrapper(traverser=traverser)
argz.value = {"xpcom_map": lambda: {"value": {}}}
traverser._debug("(Building XPCOM...)")
inst = traverser._build_global(
method, argz.value["xpcom_map"]())
inst.value["overwritable"] = True
if extend or mutate:
# FIXME: There should be a way to get this without
# traversing the call chain twice.
parent = actions.trace_member(traverser, wrapper["callee"]["object"])
if mutate and not (parent.is_global and
isinstance(parent.value, dict) and
"value" in parent.value):
# Assume that the parent object is a first class
# wrapped native
parent.value = inst.value
# FIXME: Only objects marked as global are processed
# as XPCOM instances
parent.is_global = True
if isinstance(parent.value, dict):
if extend and mutate:
if callable(parent.value["value"]):
parent.value["value"] = \
parent.value["value"](t=traverser)
parent.value["value"].update(inst.value["value"])
return parent
if extend:
inst.value["value"].update(parent.value["value"])
if mutate:
parent.value = inst.value
return inst
definition.__name__ = "xpcom_%s" % str(method)
return definition
# Global object function definitions:
def string_global(wrapper, arguments, traverser):
if not arguments:
return JSWrapper("", traverser=traverser)
arg = traverser._traverse_node(arguments[0])
value = actions._get_as_str(arg.get_literal_value())
return JSWrapper(value, traverser=traverser)
def array_global(wrapper, arguments, traverser):
output = JSArray()
if arguments:
output.elements = [traverser._traverse_node(a) for a in arguments]
return JSWrapper(output, traverser=traverser)
def number_global(wrapper, arguments, traverser):
if not arguments:
return JSWrapper(0, traverser=traverser)
arg = traverser._traverse_node(arguments[0])
try:
value = float(arg.get_literal_value())
except (ValueError, TypeError):
return traverser._build_global(
name="NaN",
entity=predefinedentities.GLOBAL_ENTITIES[u"NaN"])
return JSWrapper(value, traverser=traverser)
def boolean_global(wrapper, arguments, traverser):
if not arguments:
return JSWrapper(False, traverser=traverser)
arg = traverser._traverse_node(arguments[0])
return JSWrapper(bool(arg.get_literal_value()), traverser=traverser)
def python_wrap(func, args, nargs=False):
"""
This is a helper function that wraps Python functions and exposes them to
the JS engine. The first parameter should be the Python function to wrap.
The second parameter should be a list of tuples. Each tuple should
contain:
1. The type of value to expect:
- "string"
- "num"
2. A default value.
"""
def _process_literal(type_, literal):
if type_ == "string":
return actions._get_as_str(literal)
elif type_ == "num":
return actions._get_as_num(literal)
return literal
def wrap(wrapper, arguments, traverser):
passed_args = [traverser._traverse_node(a) for a in arguments]
params = []
if not nargs:
# Handle definite argument lists.
for type_, def_value in args:
if passed_args:
parg = passed_args[0]
passed_args = passed_args[1:]
passed_literal = parg.get_literal_value()
passed_literal = _process_literal(type_, passed_literal)
params.append(passed_literal)
else:
params.append(def_value)
else:
# Handle dynamic argument lists.
for arg in passed_args:
literal = arg.get_literal_value()
params.append(_process_literal(args[0], literal))
traverser._debug("Calling wrapped Python function with: (%s)" %
", ".join(map(str, params)))
try:
output = func(*params)
except (ValueError, TypeError):
# If we cannot compute output, just return nothing.
output = None
return JSWrapper(output, traverser=traverser)
return wrap
def math_log(wrapper, arguments, traverser):
"""Return a better value than the standard python log function."""
args = [traverser._traverse_node(a) for a in arguments]
if not args:
return JSWrapper(0, traverser=traverser)
arg = actions._get_as_num(args[0].get_literal_value())
if arg == 0:
return JSWrapper(float('-inf'), traverser=traverser)
if arg < 0:
return JSWrapper(traverser=traverser)
arg = math.log(arg)
return JSWrapper(arg, traverser=traverser)
def math_random(wrapper, arguments, traverser):
"""Return a "random" value for Math.random()."""
return JSWrapper(0.5, traverser=traverser)
def math_round(wrapper, arguments, traverser):
"""Return a better value than the standard python round function."""
args = [traverser._traverse_node(a) for a in arguments]
if not args:
return JSWrapper(0, traverser=traverser)
arg = actions._get_as_num(args[0].get_literal_value())
# Prevent nasty infinity tracebacks.
if abs(arg) == float("inf"):
return args[0]
# Python rounds away from zero, JS rounds "up".
if arg < 0 and int(arg) != arg:
arg += 0.0000000000000001
arg = round(arg)
return JSWrapper(arg, traverser=traverser)
def nsIDOMFile_deprec(wrapper, arguments, traverser):
"""Throw a compatibility error about removed XPCOM methods."""
traverser.err.notice(
err_id=("testcases_javascript_calldefinitions", "nsIDOMFile",
"deprec"),
notice="Deprecated nsIDOMFile methods in use.",
description=("Your add-on uses methods that have been removed from "
"the nsIDOMFile interface in Gecko 7. Please refer to "
"%s for more information.") % (BUGZILLA_BUG % 661876),
filename=traverser.filename,
line=traverser.line,
column=traverser.position,
context=traverser.context,
compatibility_type="error",
for_appversions=FX7_DEFINITION,
tier=5)
return JSWrapper(JSObject(), traverser=traverser, dirty=True)
def nsIJSON_deprec(wrapper, arguments, traverser):
"""Throw a compatibility error about removed XPCOM methods."""
traverser.err.notice(
err_id=("testcases_javascript_calldefinitions", "nsIJSON",
"deprec"),
notice="Deprecated nsIJSON methods in use.",
description=("The encode and decode methods in nsIJSON have been "
"deprecated in Gecko 7. You can use the methods in the "
"global JSON object instead. See %s for more "
"information.") %
"https://developer.mozilla.org/En/Using_native_JSON",
#"%s for more information.") % (BUGZILLA_BUG % 645922),
filename=traverser.filename,
line=traverser.line,
column=traverser.position,
context=traverser.context,
compatibility_type="warning",
for_appversions=FX7_DEFINITION,
tier=5)
return JSWrapper(JSObject(), traverser=traverser, dirty=True)
def nsIImapMailFolderSink_changed(wrapper, arguments, traverser):
"""Flag calls to nsIImapMailFolderSink for possible incompatibility with Thunderbird 6"""
traverser.err.notice(
err_id=("testcases_javascript_calldefinitions", "nsIImapMailFolderSink"),
notice="Modified nsIImapMailFolderSink method in use.",
description="This add-on appears to use nsIImapMailFolderSink.setUrlState, "
"which may no longer work correctly due to "
"changes made in Thunderbird 6. For more information, "
"please refer to "
"https://bugzilla.mozilla.org/show_bug.cgi?id=464126",
filename=traverser.filename,
line=traverser.line,
column=traverser.position,
context=traverser.context,
compatibility_type="error",
for_appversions={'{3550f703-e582-4d05-9a08-453d09bdfdc6}':
version_range("thunderbird", "6.0a1", "8.0a1")},
tier=5)
return JSWrapper(JSObject(), traverser=traverser, dirty=True)
def nsIImapProtocol_removed(wrapper, arguments, traverser):
"""Flag calls to nsIImapProtocol for incompatibility with Thunderbird 6"""
traverser.err.notice(
err_id=("testcases_javascript_calldefinitions", "nsIImapProtocol"),
notice="Removed nsIImapProtocol method in use.",
description="This add-on appears to use nsIImapProtocol.NotifyHdrsToDownload, "
"which may no longer work correctly due to "
"changes made in Thunderbird 6. For more information, "
"please refer to "
"https://bugzilla.mozilla.org/show_bug.cgi?id=464126",
filename=traverser.filename,
line=traverser.line,
column=traverser.position,
context=traverser.context,
compatibility_type="error",
for_appversions={'{3550f703-e582-4d05-9a08-453d09bdfdc6}':
version_range("thunderbird", "6.0a1", "8.0a1")},
tier=5)
return JSWrapper(JSObject(), traverser=traverser, dirty=True)
def document_getSelection(wrapper, arguments, traverser):
"""Flag Gecko 8 calls to document.getSelection()."""
MDN_ARTICLE = "https://developer.mozilla.org/En/Window.getSelection"
traverser.err.notice(
err_id=("testcases_javascript_calldefinitions", "document_getSel"),
notice="document.getSelection()'s return type has changed.",
description="The return type of document.getSelection() has changed "
"in Gecko 8. This function is deprecated, and you "
"should be using window.getSelection() instead. See "
"%s for more information." % MDN_ARTICLE,
filename=traverser.filename,
line=traverser.line,
column=traverser.position,
context=traverser.context,
compatibility_type="error",
for_appversions=FX8_DEFINITION,
tier=5)
# The new spec returns an object.
return JSWrapper(JSObject(), traverser=traverser, dirty=True)
def nsIMsgThread_removed(wrapper, arguments, traverser):
"""Flag calls to nsIMsgThread for incompatibility with Thunderbird 7"""
traverser.err.notice(
err_id=("testcases_javascript_calldefinitions", "nsIMsgThread"),
notice="Removed nsIMsgThread method in use.",
description="This add-on appears to use nsIMsgThread.GetChildAt, "
"which may no longer work correctly due to "
"changes made in Thunderbird 7. For more information, "
"please refer to "
"https://bugzilla.mozilla.org/show_bug.cgi?id=617839",
filename=traverser.filename,
line=traverser.line,
column=traverser.position,
context=traverser.context,
compatibility_type="error",
for_appversions={'{3550f703-e582-4d05-9a08-453d09bdfdc6}':
version_range("thunderbird", "7.0a1", "8.0a1")},
tier=5)
return JSWrapper(JSObject(), traverser=traverser, dirty=True)
def mail_attachment_api(wrapper, arguments, traverser):
"""Flag calls to the global attachment functions for incompatibility with Thunderbird 7"""
traverser.err.notice(
err_id=("testcases_javascript_calldefinitions", "AttachmentAPI"),
notice="Removed attachment API function in use.",
description="This add-on appears to use a global attachment function, one of: "
"attachmentIsEmpty, cloneAttachment, createNewAttachmentInfo "
"detachAttachment, openAttachment or saveAttachment, "
"which were removed in Thunderbird 7. For more information, "
"please refer to "
"https://bugzilla.mozilla.org/show_bug.cgi?id=657856",
filename=traverser.filename,
line=traverser.line,
column=traverser.position,
context=traverser.context,
compatibility_type="error",
for_appversions={'{3550f703-e582-4d05-9a08-453d09bdfdc6}':
version_range("thunderbird", "7.0a1", "8.0a1")},
tier=5)
return JSWrapper(JSObject(), traverser=traverser, dirty=True)
def nsIMsgSearchScopeTerm_removed(wrapper, arguments, traverser):
"""Flag calls to nsIMsgSearchScopeTerm methods for incompatibility with Thunderbird 8"""
traverser.err.notice(
err_id=("testcases_javascript_calldefinitions", "nsIMsgSearchScopeTerm"),
notice="Removed nsIMsgSearchScopeTerm method in use.",
description="This add-on appears to use nsIMsgSearchScopeTerm.mailFile or, "
"nsIMsgSearchScopeTerm.inputStream, both of which have been removed"
"as part of changes made in Thunderbird 8. For more information, "
"please refer to "
"https://bugzilla.mozilla.org/show_bug.cgi?id=668700",
filename=traverser.filename,
line=traverser.line,
column=traverser.position,
context=traverser.context,
compatibility_type="error",
for_appversions={'{3550f703-e582-4d05-9a08-453d09bdfdc6}':
version_range("thunderbird", "8.0a1", "9.0a1")},
tier=5)
return JSWrapper(JSObject(), traverser=traverser, dirty=True)
def gComposeBundle_removed(wrapper, arguments, traverser):
"""Flag uses of gComposeBundle for incompatibility with Thunderbird 9"""
traverser.err.notice(
err_id=("testcases_javascript_calldefinitions", "gComposeBundle"),
notice="Removed gComposeBundle global variable in use.",
description="This add-on appears to use gComposeBundle which has been removed "
"as part of changes made in Thunderbird 9. For more information, "
"please refer to "
"https://bugzilla.mozilla.org/show_bug.cgi?id=670639",
filename=traverser.filename,
line=traverser.line,
column=traverser.position,
context=traverser.context,
compatibility_type="error",
for_appversions={'{3550f703-e582-4d05-9a08-453d09bdfdc6}':
version_range("thunderbird", "9.0a1", "10.0a1")},
tier=5)
return JSWrapper(JSObject(), traverser=traverser, dirty=True)
def TB9FocusFunctions_removed(wrapper, arguments, traverser):
"""
Flag calls to WhichPaneHasFocus and FocusOnFirstAttachment
for incompatibility with Thunderbird 9
"""
traverser.err.notice(
err_id=("testcases_javascript_calldefinitions", "WhichPaneHasFocus"),
notice="Removed WhichPaneHasFocus or FocusOnFirstAttachment function in use.",
description="This add-on appears to use WhichPaneHasFocus "
"or FocusOnFirstAttachment which have been removed "
"as part of changes made in Thunderbird 9. For more information, "
"please refer to "
"https://bugzilla.mozilla.org/show_bug.cgi?id=581932",
filename=traverser.filename,
line=traverser.line,
column=traverser.position,
context=traverser.context,
compatibility_type="error",
for_appversions={'{3550f703-e582-4d05-9a08-453d09bdfdc6}':
version_range("thunderbird", "9.0a1", "10.0a1")},
tier=5)
return JSWrapper(JSObject(), traverser=traverser, dirty=True)
def TB10Function_removed(wrapper, arguments, traverser):
"""
Flag calls to MsgDeleteMessageFromMessageWindow and
goToggleSplitter for incompatibility with Thunderbird 10
"""
traverser.err.notice(
err_id=("testcases_javascript_calldefinitions", "MsgDeleteMessageFromMessageWindow"),
notice="Removed MsgDeleteMessageFromMessageWindow or goToggleSplitter function in use.",
description="This add-on appears to use MsgDeleteMessageFromMessageWindow "
"or goToggleSplitter which have been removed "
"as part of changes made in Thunderbird 10. For more information, "
"please refer to https://bugzilla.mozilla.org/show_bug.cgi?id=702201 and "
"https://bugzilla.mozilla.org/show_bug.cgi?id=609245",
filename=traverser.filename,
line=traverser.line,
column=traverser.position,
context=traverser.context,
compatibility_type="error",
for_appversions={'{3550f703-e582-4d05-9a08-453d09bdfdc6}':
version_range("thunderbird", "10.0a1", "11.0a1")},
tier=5)
return JSWrapper(JSObject(), traverser=traverser, dirty=True)
def TB10Function_renamed(wrapper, arguments, traverser):
"""
Flag calls to AddMessageComposeOfflineObserver and
RemoveMessageComposeOfflineObserver for incompatibility with Thunderbird 10
"""
traverser.err.notice(
err_id=("testcases_javascript_calldefinitions", "AddMessageComposeOfflineObserver"),
notice="Removed AddMessageComposeOfflineObserver or goToggleSplitter function in use.",
description="This add-on appears to use AddMessageComposeOfflineObserver or "
"RemoveMessageComposeOfflineObserver which have been renamed to "
"AddMessageComposeOfflineQuitObserver and RemoveMessageComposeOfflineQuitObserver "
"respectively as part of changes made in Thunderbird 10. For more information, "
"please refer to https://bugzilla.mozilla.org/show_bug.cgi?id=682581",
filename=traverser.filename,
line=traverser.line,
column=traverser.position,
context=traverser.context,
compatibility_type="error",
for_appversions={'{3550f703-e582-4d05-9a08-453d09bdfdc6}':
version_range("thunderbird", "10.0a1", "11.0a1")},
tier=5)
return JSWrapper(JSObject(), traverser=traverser, dirty=True)
def nsIMsgQuote_changed(wrapper, arguments, traverser):
"""
Flag calls to nsIMsgQuote.quoteMessage for incompatibility with Thunderbird 11
"""
traverser.err.notice(
err_id=("testcases_javascript_calldefinitions", "nsIMsgQuote"),
notice="Altered nsIMsgQuote.quoteMessage function in use.",
description="This add-on appears to use nsIMsgQuote.quoteMessage which had the argument aOrigHdr"
"added as part of changes made in Thunderbird 11. For more information, "
"please refer to https://bugzilla.mozilla.org/show_bug.cgi?id=351109",
filename=traverser.filename,
line=traverser.line,
column=traverser.position,
context=traverser.context,
compatibility_type="error",
for_appversions={'{3550f703-e582-4d05-9a08-453d09bdfdc6}':
version_range("thunderbird", "11.0a1", "12.0a1")},
tier=5)
return JSWrapper(JSObject(), traverser=traverser, dirty=True)
def nsIComm4xProfile_removed(wrapper, arguments, traverser):
"""
Flag use of nsIComm4xProfile for incompatibility with Thunderbird 11
"""
traverser.err.notice(
err_id=("testcases_javascript_calldefinitions", "nsIComm4xProfile"),
notice="Removed nsIComm4xProfile interface in use.",
description="This add-on appears to use nsIComm4xProfile which was "
"removed as part of changes made in Thunderbird 11. For "
"more information, please refer to %s." %
BUGZILLA_BUG % 689437,
filename=traverser.filename,
line=traverser.line,
column=traverser.position,
context=traverser.context,
compatibility_type="error",
for_appversions={'{3550f703-e582-4d05-9a08-453d09bdfdc6}':
version_range("thunderbird", "11.0a1", "12.0a1")},
tier=5)
return JSWrapper(JSObject(), traverser=traverser, dirty=True)
def nsIMailtoUrl_changed(wrapper, arguments, traverser):
"""
Flag calls to nsIMailtoUrl.GetMessageContents for incompatibility with
Thunderbird 11.
"""
traverser.err.notice(
err_id=("testcases_javascript_calldefinitions", "nsIMsgQuote"),
notice="Altered nsIMsgQuote.quoteMessage function in use.",
description="This add-on appears to use nsIMailtoUrl."
"GetMessageContents which was changed to"
"nsIMailtoUrl.getMessageContents (lower case g) as part "
"of Thunderbird 11. For more information, please refer to "
"%s." % BUGZILLA_BUG % 711980,
filename=traverser.filename,
line=traverser.line,
column=traverser.position,
context=traverser.context,
compatibility_type="error",
for_appversions={'{3550f703-e582-4d05-9a08-453d09bdfdc6}':
version_range("thunderbird", "11.0a1", "12.0a1")},
tier=5)
return JSWrapper(JSObject(), traverser=traverser, dirty=True)
def nsIMsgFolder_changed(wrapper, arguments, traverser):
"""
Flag use of nsIMsgFolder.offlineStoreOutputStream for incompatibility with
Thunderbird 12.
"""
traverser.err.notice(
err_id=("testcases_javascript_calldefinitions", "nsIMsgFolder"),
notice="Altered nsIMsgFolder.offlineStoreOutputStream attr in use.",
description="This add-on appears to use nsIMsgFolder."
"offlineStoreOutputStream which was replaced with "
"method getOfflineStoreOutputStream(in nsIMsgDBHdr aHdr) "
"in Thunderbird 12. For more information, please refer to "
"%s." % BUGZILLA_BUG % 402392,
filename=traverser.filename,
line=traverser.line,
column=traverser.position,
context=traverser.context,
compatibility_type="error",
for_appversions=TB12_DEFINITION,
tier=5)
return JSWrapper(JSObject(), traverser=traverser, dirty=True)
def nsIMsgDatabase_changed(wrapper, arguments, traverser):
"""
Flag use of nsIMsgDatabase related methods for incompatibility with
Thunderbird 12.
"""
traverser.err.notice(
err_id=("testcases_javascript_calldefinitions", "nsIMsgDatabase"),
notice="Altered nsIDatabase methods in use.",
description="This add-on appears to use nsIMsgDatabase::Open, "
"nsIMsgDBService::openMailDBFromFile, or "
"nsIMsgOutputStream.folderStream which have been changed "
"in Thunderbird 12. For more information, please refer to "
"%s." % BUGZILLA_BUG % 402392,
filename=traverser.filename,
line=traverser.line,
column=traverser.position,
context=traverser.context,
compatibility_type="error",
for_appversions=TB12_DEFINITION,
tier=5)
return JSWrapper(JSObject(), traverser=traverser, dirty=True)
def TB12_nsIImapProtocol_changed(wrapper, arguments, traverser):
"""
Flag use of nsIImapProtocol::Initialize and
nsIImapIncomingServer::GetImapConnectionAndLoadUrl for incompatibility
with Thunderbird 12.
"""
traverser.err.notice(
err_id=("testcases_javascript_calldefinitions", "nsIImapProtocol"),
notice="Altered nsIImapProtocol or IncomingServer methods in use.",
description="This add-on uses nsIImapProtocol::Initialize or "
"nsIImapIncomingServer::GetImapConnectionAndLoadUrl "
"which had parameters removed "
"in Thunderbird 12. For more information, please refer to "
"%s." % BUGZILLA_BUG % 704707,
filename=traverser.filename,
line=traverser.line,
column=traverser.position,
context=traverser.context,
compatibility_type="error",
for_appversions=TB12_DEFINITION,
tier=5)
return JSWrapper(JSObject(), traverser=traverser, dirty=True)
def TB13_nsIMsgLocalMailFolder_changed(wrapper, arguments, traverser):
"""
Flag use of nsIMsgLocalMailFolder::addMessage and addMessageBatch
with Thunderbird 13.
"""
traverser.err.warning(
err_id=("testcases_javascript_calldefinitions", "nsIMsgLocalMailFolder"),
warning="Altered nsIMsgLocalMailFolder methods in use.",
description="This add-on uses nsIMsgLocalMailFolder::addMessage or "
"nsIMsgLocalMailFolder::addMessageBatch "
"which had their return values altered "
"in Thunderbird 13. For more information, please refer to "
"%s." % BUGZILLA_BUG % 647699,
filename=traverser.filename,
line=traverser.line,
column=traverser.position,
context=traverser.context,
compatibility_type="error",
for_appversions=TB13_DEFINITION,
tier=5)
return JSWrapper(JSObject(), traverser=traverser, dirty=True)
def TB13_nsIMsgNewsFolder_changed(wrapper, arguments, traverser):
"""Flag use of several nsIMsgNewsFolder methods with Thunderbird 13."""
traverser.err.warning(
err_id=("testcases_javascript_calldefinitions", "nsIMsgNewsFolder"),
warning="Altered nsIMsgNewsFolder methods in use.",
description="This add-on uses nsIMsgNewsFolder::getGroupPasswordWithUI"
" getGroupUsernameWithUI(), forgetGroupUsername() "
"or forgetGroupPassword() which were removed "
"in Thunderbird 13. For more information, please refer to "
"%s." % BUGZILLA_BUG % 201750,
filename=traverser.filename,
line=traverser.line,
column=traverser.position,
context=traverser.context,
compatibility_type="error",
for_appversions=TB13_DEFINITION,
tier=5)
return JSWrapper(JSObject(), traverser=traverser, dirty=True)
def requestAnimationFrame(wrapper, arguments, traverser):
"""
As of FX11, requestAnimationFrame should be called with at least one
parameter.
"""
if arguments:
return
traverser.err.warning(
err_id=("testcases_js_actions", "requestAnimationFrame", "no_args"),
warning="requestAnimationFrame now requires one parameter",
description="The requestAnimationFrame function now requires one "
"parameter and can't be called without any arguments. "
"See %s for more information." % BUGZILLA_BUG % 704171,
filename=traverser.filename,
line=traverser.line,
column=traverser.position,
context=traverser.context,
compatibility_type="error",
for_appversions=FX11_DEFINITION,
tier=5)
def js_wrap(wrapper, arguments, traverser):
"""Return the wrapped variant of an unwrapped JSObject."""
if not arguments:
traverser._debug("WRAP:NO ARGS")
return
traverser._debug("WRAPPING OBJECT")
obj = traverser._traverse_node(arguments[0])
if obj.value is None:
traverser._debug("WRAPPING OBJECT>>NOTHING TO WRAP")
return JSWrapper(JSObject(), traverser=traverser)
if obj.is_global:
obj.value["is_unwrapped"] = False
else:
obj.value.is_unwrapped = False
return obj
def js_unwrap(wrapper, arguments, traverser):
"""Return the unwrapped variant of an unwrapped JSObject."""
if not arguments:
traverser._debug("UNWRAP:NO ARGS")
return
traverser._debug("UNWRAPPING OBJECT")
obj = traverser._traverse_node(arguments[0])
if obj.value is None:
traverser._debug("UNWRAPPING OBJECT>>NOTHING TO UNWRAP")
return JSWrapper(JSObject(unwrapped=True), traverser=traverser)
if obj.is_global:
obj.value["is_unwrapped"] = True
else:
obj.value.is_unwrapped = True
return obj
def open_in_chrome_context(uri, method, traverser):
if not uri.is_literal():
traverser.err.notice(
err_id=("js", "instanceactions", "%s_nonliteral" % method),
notice="`%s` called with non-literal parameter." % method,
description="Calling `%s` with variable parameters can result in "
"potential security vulnerabilities if the variable "
"contains a remote URI. Consider using `window.open` "
"with the `chrome=no` flag." % method,
filename=traverser.filename,
line=traverser.line,
column=traverser.position,
context=traverser.context)
remote_url = re.compile(r"^(https?|ftp|data):(//)?", re.I)
uri = unicode(uri.get_literal_value())
if uri.startswith("//") or remote_url.match(uri):
traverser.err.warning(
err_id=("js", "instanceactions", "%s_remote_uri" % method),
warning="`%s` called with non-local URI." % method,
description="Calling `%s` with a non-local URI will result in the "
"dialog being opened with chrome privileges." % method,
filename=traverser.filename,
line=traverser.line,
column=traverser.position,
context=traverser.context)
| bsd-3-clause |
pigeonflight/strider-plone | docker/appengine/lib/django-1.2/tests/regressiontests/model_regress/tests.py | 39 | 6781 | import datetime
from operator import attrgetter
from django.conf import settings
from django.core.exceptions import ValidationError
from django.db import DEFAULT_DB_ALIAS
from django.test import TestCase
from django.utils import tzinfo
from models import (Worker, Article, Party, Event, Department,
BrokenUnicodeMethod, NonAutoPK)
class ModelTests(TestCase):
# The bug is that the following queries would raise:
# "TypeError: Related Field has invalid lookup: gte"
def test_related_gte_lookup(self):
"""
Regression test for #10153: foreign key __gte lookups.
"""
Worker.objects.filter(department__gte=0)
def test_related_lte_lookup(self):
"""
Regression test for #10153: foreign key __lte lookups.
"""
Worker.objects.filter(department__lte=0)
def test_empty_choice(self):
# NOTE: Part of the regression test here is merely parsing the model
# declaration. The verbose_name, in particular, did not always work.
a = Article.objects.create(
headline="Look at me!", pub_date=datetime.datetime.now()
)
# An empty choice field should return None for the display name.
self.assertEqual(a.get_status_display(), None)
# Empty strings should be returned as Unicode
a = Article.objects.get(pk=a.pk)
self.assertEqual(a.misc_data, u'')
self.assertEqual(type(a.misc_data), unicode)
def test_long_textfield(self):
# TextFields can hold more than 4000 characters (this was broken in
# Oracle).
a = Article.objects.create(
headline="Really, really big",
pub_date=datetime.datetime.now(),
article_text = "ABCDE" * 1000
)
a = Article.objects.get(pk=a.pk)
self.assertEqual
(len(a.article_text), 5000)
def test_date_lookup(self):
# Regression test for #659
Party.objects.create(when=datetime.datetime(1999, 12, 31))
Party.objects.create(when=datetime.datetime(1998, 12, 31))
Party.objects.create(when=datetime.datetime(1999, 1, 1))
self.assertQuerysetEqual(
Party.objects.filter(when__month=2), []
)
self.assertQuerysetEqual(
Party.objects.filter(when__month=1), [
datetime.date(1999, 1, 1)
],
attrgetter("when")
)
self.assertQuerysetEqual(
Party.objects.filter(when__month=12), [
datetime.date(1999, 12, 31),
datetime.date(1998, 12, 31),
],
attrgetter("when")
)
self.assertQuerysetEqual(
Party.objects.filter(when__year=1998), [
datetime.date(1998, 12, 31),
],
attrgetter("when")
)
# Regression test for #8510
self.assertQuerysetEqual(
Party.objects.filter(when__day="31"), [
datetime.date(1999, 12, 31),
datetime.date(1998, 12, 31),
],
attrgetter("when")
)
self.assertQuerysetEqual(
Party.objects.filter(when__month="12"), [
datetime.date(1999, 12, 31),
datetime.date(1998, 12, 31),
],
attrgetter("when")
)
self.assertQuerysetEqual(
Party.objects.filter(when__year="1998"), [
datetime.date(1998, 12, 31),
],
attrgetter("when")
)
def test_date_filter_null(self):
# Date filtering was failing with NULL date values in SQLite
# (regression test for #3501, amongst other things).
Party.objects.create(when=datetime.datetime(1999, 1, 1))
Party.objects.create()
p = Party.objects.filter(when__month=1)[0]
self.assertEqual(p.when, datetime.date(1999, 1, 1))
self.assertQuerysetEqual(
Party.objects.filter(pk=p.pk).dates("when", "month"), [
1
],
attrgetter("month")
)
def test_get_next_prev_by_field(self):
# Check that get_next_by_FIELD and get_previous_by_FIELD don't crash
# when we have usecs values stored on the database
#
# It crashed after the Field.get_db_prep_* refactor, because on most
# backends DateTimeFields supports usecs, but DateTimeField.to_python
# didn't recognize them. (Note that
# Model._get_next_or_previous_by_FIELD coerces values to strings)
Event.objects.create(when=datetime.datetime(2000, 1, 1, 16, 0, 0))
Event.objects.create(when=datetime.datetime(2000, 1, 1, 6, 1, 1))
Event.objects.create(when=datetime.datetime(2000, 1, 1, 13, 1, 1))
e = Event.objects.create(when=datetime.datetime(2000, 1, 1, 12, 0, 20, 24))
self.assertEqual(
e.get_next_by_when().when, datetime.datetime(2000, 1, 1, 13, 1, 1)
)
self.assertEqual(
e.get_previous_by_when().when, datetime.datetime(2000, 1, 1, 6, 1, 1)
)
def test_primary_key_foreign_key_types(self):
# Check Department and Worker (non-default PK type)
d = Department.objects.create(id=10, name="IT")
w = Worker.objects.create(department=d, name="Full-time")
self.assertEqual(unicode(w), "Full-time")
def test_broken_unicode(self):
# Models with broken unicode methods should still have a printable repr
b = BrokenUnicodeMethod.objects.create(name="Jerry")
self.assertEqual(repr(b), "<BrokenUnicodeMethod: [Bad Unicode data]>")
if settings.DATABASES[DEFAULT_DB_ALIAS]["ENGINE"] not in [
"django.db.backends.mysql",
"django.db.backends.oracle"
]:
def test_timezones(self):
# Saving an updating with timezone-aware datetime Python objects.
# Regression test for #10443.
# The idea is that all these creations and saving should work
# without crashing. It's not rocket science.
dt1 = datetime.datetime(2008, 8, 31, 16, 20, tzinfo=tzinfo.FixedOffset(600))
dt2 = datetime.datetime(2008, 8, 31, 17, 20, tzinfo=tzinfo.FixedOffset(600))
obj = Article.objects.create(
headline="A headline", pub_date=dt1, article_text="foo"
)
obj.pub_date = dt2
obj.save()
self.assertEqual(
Article.objects.filter(headline="A headline").update(pub_date=dt1),
1
)
class ModelValidationTest(TestCase):
def test_pk_validation(self):
one = NonAutoPK.objects.create(name="one")
again = NonAutoPK(name="one")
self.assertRaises(ValidationError, again.validate_unique)
| mit |
cmbiwer/pycbc | pycbc/events/coinc.py | 4 | 43750 | # Copyright (C) 2015 Alex Nitz
#
# This program is free software; you can redistribute it and/or modify it
# under the terms of the GNU General Public License as published by the
# Free Software Foundation; either version 3 of the License, or (at your
# option) any later version.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General
# Public License for more details.
#
# You should have received a copy of the GNU General Public License along
# with this program; if not, write to the Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
#
# =============================================================================
#
# Preamble
#
# =============================================================================
#
""" This modules contains functions for calculating and manipulating
coincident triggers.
"""
import h5py, numpy, logging, pycbc.pnutils, copy, lal
from pycbc.detector import Detector
def background_bin_from_string(background_bins, data):
""" Return template ids for each bin as defined by the format string
Parameters
----------
bins: list of strings
List of strings which define how a background bin is taken from the
list of templates.
data: dict of numpy.ndarrays
Dict with parameter key values and numpy.ndarray values which define
the parameters of the template bank to bin up.
Returns
-------
bins: dict
Dictionary of location indices indexed by a bin name
"""
used = numpy.array([], dtype=numpy.uint32)
bins = {}
for mbin in background_bins:
name, bin_type, boundary = tuple(mbin.split(':'))
if boundary[0:2] == 'lt':
member_func = lambda vals, bd=boundary : vals < float(bd[2:])
elif boundary[0:2] == 'gt':
member_func = lambda vals, bd=boundary : vals > float(bd[2:])
else:
raise RuntimeError("Can't parse boundary condition! Must begin "
"with 'lt' or 'gt'")
if bin_type == 'component' and boundary[0:2] == 'lt':
# maximum component mass is less than boundary value
vals = numpy.maximum(data['mass1'], data['mass2'])
if bin_type == 'component' and boundary[0:2] == 'gt':
# minimum component mass is greater than bdary
vals = numpy.minimum(data['mass1'], data['mass2'])
elif bin_type == 'total':
vals = data['mass1'] + data['mass2']
elif bin_type == 'chirp':
vals = pycbc.pnutils.mass1_mass2_to_mchirp_eta(
data['mass1'], data['mass2'])[0]
elif bin_type == 'SEOBNRv2Peak':
vals = pycbc.pnutils.get_freq('fSEOBNRv2Peak',
data['mass1'], data['mass2'], data['spin1z'], data['spin2z'])
elif bin_type == 'SEOBNRv4Peak':
vals = pycbc.pnutils.get_freq('fSEOBNRv4Peak', data['mass1'],
data['mass2'], data['spin1z'],
data['spin2z'])
elif bin_type == 'SEOBNRv2duration':
vals = pycbc.pnutils.get_imr_duration(data['mass1'], data['mass2'],
data['spin1z'], data['spin2z'], data['f_lower'],
approximant='SEOBNRv2')
else:
raise ValueError('Invalid bin type %s' % bin_type)
locs = member_func(vals)
del vals
# make sure we don't reuse anything from an earlier bin
locs = numpy.where(locs)[0]
locs = numpy.delete(locs, numpy.where(numpy.in1d(locs, used))[0])
used = numpy.concatenate([used, locs])
bins[name] = locs
return bins
def calculate_n_louder(bstat, fstat, dec, skip_background=False):
""" Calculate for each foreground event the number of background events
that are louder than it.
Parameters
----------
bstat: numpy.ndarray
Array of the background statistic values
fstat: numpy.ndarray or scalar
Array of the foreground statistic values or single value
dec: numpy.ndarray
Array of the decimation factors for the background statistics
skip_background: optional, {boolean, False}
Skip calculating cumulative numbers for background triggers
Returns
-------
cum_back_num: numpy.ndarray
The cumulative array of background triggers. Does not return this
argument if skip_background == True
fore_n_louder: numpy.ndarray
The number of background triggers above each foreground trigger
"""
sort = bstat.argsort()
bstat = bstat[sort]
dec = dec[sort]
# calculate cumulative number of triggers louder than the trigger in
# a given index. We need to subtract the decimation factor, as the cumsum
# includes itself in the first sum (it is inclusive of the first value)
n_louder = dec[::-1].cumsum()[::-1] - dec
# Determine how many values are louder than the foreground ones
# We need to subtract one from the index, to be consistent with the definition
# of n_louder, as here we do want to include the background value at the
# found index
idx = numpy.searchsorted(bstat, fstat, side='left') - 1
# If the foreground are *quieter* than the background or at the same value
# then the search sorted algorithm will choose position -1, which does not exist
# We force it back to zero.
if isinstance(idx, numpy.ndarray): # Case where our input is an array
idx[idx < 0] = 0
else: # Case where our input is just a scalar value
if idx < 0:
idx = 0
fore_n_louder = n_louder[idx]
if not skip_background:
unsort = sort.argsort()
back_cum_num = n_louder[unsort]
return back_cum_num, fore_n_louder
else:
return fore_n_louder
def timeslide_durations(start1, start2, end1, end2, timeslide_offsets):
""" Find the coincident time for each timeslide.
Find the coincident time for each timeslide, where the first time vector
is slid to the right by the offset in the given timeslide_offsets vector.
Parameters
----------
start1: numpy.ndarray
Array of the start of valid analyzed times for detector 1
start2: numpy.ndarray
Array of the start of valid analyzed times for detector 2
end1: numpy.ndarray
Array of the end of valid analyzed times for detector 1
end2: numpy.ndarray
Array of the end of valid analyzed times for detector 2
timseslide_offset: numpy.ndarray
Array of offsets (in seconds) for each timeslide
Returns
--------
durations: numpy.ndarray
Array of coincident time for each timeslide in the offset array
"""
from . import veto
durations = []
seg2 = veto.start_end_to_segments(start2, end2)
for offset in timeslide_offsets:
seg1 = veto.start_end_to_segments(start1 + offset, end1 + offset)
durations.append(abs((seg1 & seg2).coalesce()))
return numpy.array(durations)
def time_coincidence(t1, t2, window, slide_step=0):
""" Find coincidences by time window
Parameters
----------
t1 : numpy.ndarray
Array of trigger times from the first detector
t2 : numpy.ndarray
Array of trigger times from the second detector
window : float
Coincidence window maximum time difference, arbitrary units (usually s)
slide_step : float (default 0)
If calculating background coincidences, the interval between background
slides, arbitrary units (usually s)
Returns
-------
idx1 : numpy.ndarray
Array of indices into the t1 array for coincident triggers
idx2 : numpy.ndarray
Array of indices into the t2 array
slide : numpy.ndarray
Array of slide ids
"""
if slide_step:
fold1 = t1 % slide_step
fold2 = t2 % slide_step
else:
fold1 = t1
fold2 = t2
sort1 = fold1.argsort()
sort2 = fold2.argsort()
fold1 = fold1[sort1]
fold2 = fold2[sort2]
if slide_step:
# FIXME explain this
fold2 = numpy.concatenate([fold2 - slide_step, fold2, fold2 + slide_step])
sort2 = numpy.concatenate([sort2, sort2, sort2])
left = numpy.searchsorted(fold2, fold1 - window)
right = numpy.searchsorted(fold2, fold1 + window)
idx1 = numpy.repeat(sort1, right - left)
idx2 = [sort2[l:r] for l, r in zip(left, right)]
if len(idx2) > 0:
idx2 = numpy.concatenate(idx2)
else:
idx2 = numpy.array([], dtype=numpy.int64)
if slide_step:
diff = ((t1 / slide_step)[idx1] - (t2 / slide_step)[idx2])
slide = numpy.rint(diff)
else:
slide = numpy.zeros(len(idx1))
return idx1.astype(numpy.uint32), idx2.astype(numpy.uint32), slide.astype(numpy.int32)
def time_multi_coincidence(times, slide_step=0, slop=.003,
pivot='H1', fixed='L1'):
""" Find multi detector coincidences.
Parameters
----------
times: dict of numpy.ndarrays
Dictionary keyed by ifo of single ifo trigger times
slide_step: float
Interval between time slides
slop: float
The amount of time to add to the TOF between detectors for coincidence
pivot: str
The ifo to which time shifts are applied in first stage coincidence
fixed: str
The other ifo used in first stage coincidence, subsequently used as a
time reference for additional ifos. All other ifos are not time shifted
relative to this ifo
Returns
-------
ids: dict of arrays of int
Dictionary keyed by ifo with ids of trigger times forming coincidences.
Coincidence is tested for every pair of ifos that can be formed from
the input dict: only those tuples of times passing all tests are
recorded
slide: array of int
Slide ids of coincident triggers in pivot ifo
"""
def win(ifo1, ifo2):
d1 = Detector(ifo1)
d2 = Detector(ifo2)
return d1.light_travel_time_to_detector(d2) + slop
# Find coincs between the 'pivot' and 'fixed' detectors as in 2-ifo case
pivot_id, fix_id, slide = time_coincidence(times[pivot], times[fixed],
win(pivot, fixed),
slide_step=slide_step)
# Additional detectors do not slide independently of the 'fixed' one
# Each trigger in an additional detector must be concident with both
# triggers in an existing coincidence
# Slide 'pivot' trigger times to be coincident with trigger times in
# 'fixed' detector
fixed_time = times[fixed][fix_id]
pivot_time = times[pivot][pivot_id] - slide_step * slide
ctimes = {fixed: fixed_time, pivot: pivot_time}
ids = {fixed: fix_id, pivot: pivot_id}
dep_ifos = [ifo for ifo in times.keys() if ifo != fixed and ifo != pivot]
for ifo1 in dep_ifos:
# FIXME - make this loop into a function?
# otime is extra ifo time in original trigger order
otime = times[ifo1]
# tsort gives ordering from original order to time sorted order
tsort = otime.argsort()
time1 = otime[tsort]
# Find coincidences between dependent ifo triggers and existing coincs
# - Cycle over fixed and pivot
# - At the 1st iteration, the fixed and pivot triggers are reduced to
# those for which the first out of fixed/pivot forms a coinc with ifo1
# - At the 2nd iteration, we are left with triggers for which both
# fixed and pivot are coincident with ifo1
# - If there is more than 1 dependent ifo, ones that were previously
# tested against fixed and pivot are now present for testing with new
# dependent ifos
for ifo2 in ids:
logging.info('added ifo %s, testing against %s' % (ifo1, ifo2))
w = win(ifo1, ifo2)
left = numpy.searchsorted(time1, ctimes[ifo2] - w)
right = numpy.searchsorted(time1, ctimes[ifo2] + w)
# Any times within time1 coincident with the time in ifo2 have
# indices between 'left' and 'right'
# 'nz' indexes into times in ifo2 which have coincidences with ifo1
# times
nz = (right - left).nonzero()
if len(right - left):
rlmax = (right - left).max()
if len(nz[0]) and rlmax > 1:
# We expect at most one coincident time in ifo1, assuming
# trigger spacing in ifo1 > time window.
# However there are rare corner cases at starts/ends of inspiral
# jobs. For these, arbitrarily keep the first trigger and
# discard the second (and any subsequent ones).
where = right - left == rlmax
logging.warning('Triggers in %s are closer than coincidence '
'window, 1 or more coincs will be discarded. '
'This is a warning, not an error.' % ifo1)
print([float(ti) for ti in
time1[left[where][0]:right[where][0]]])
# identify indices of times in ifo1 that form coincs with ifo2
dep_ids = left[nz]
# slide is array of slide ids attached to pivot ifo
slide = slide[nz]
for ifo in ctimes:
# cycle over fixed and pivot & any previous additional ifos
# reduce times and IDs to just those forming a coinc with ifo1
ctimes[ifo] = ctimes[ifo][nz]
ids[ifo] = ids[ifo][nz]
# undo time sorting on indices of ifo1 triggers, add ifo1 ids and times
# to dicts for testing against any additional detectrs
ids[ifo1] = tsort[dep_ids]
ctimes[ifo1] = otime[ids[ifo1]]
return ids, slide
def cluster_coincs(stat, time1, time2, timeslide_id, slide, window, argmax=numpy.argmax):
"""Cluster coincident events for each timeslide separately, across
templates, based on the ranking statistic
Parameters
----------
stat: numpy.ndarray
vector of ranking values to maximize
time1: numpy.ndarray
first time vector
time2: numpy.ndarray
second time vector
timeslide_id: numpy.ndarray
vector that determines the timeslide offset
slide: float
length of the timeslides offset interval
window: float
length to cluster over
Returns
-------
cindex: numpy.ndarray
The set of indices corresponding to the surviving coincidences.
"""
logging.info('clustering coinc triggers over %ss window' % window)
if len(time1) == 0 or len(time2) == 0:
logging.info('No coinc triggers in one, or both, ifos.')
return numpy.array([])
if numpy.isfinite(slide):
# for a time shifted coinc, time1 is greater than time2 by approximately timeslide_id*slide
# adding this quantity gives a mean coinc time located around time1
time = (time1 + time2 + timeslide_id * slide) / 2
else:
time = 0.5 * (time2 + time1)
tslide = timeslide_id.astype(numpy.float128)
time = time.astype(numpy.float128)
span = (time.max() - time.min()) + window * 10
time = time + span * tslide
cidx = cluster_over_time(stat, time, window, argmax)
return cidx
def cluster_coincs_multiifo(stat, time_coincs, timeslide_id, slide, window, argmax=numpy.argmax):
"""Cluster coincident events for each timeslide separately, across
templates, based on the ranking statistic
Parameters
----------
stat: numpy.ndarray
vector of ranking values to maximize
time_coincs: tuple of numpy.ndarrays
trigger times for each ifo, or -1 if an ifo does not participate in a coinc
timeslide_id: numpy.ndarray
vector that determines the timeslide offset
slide: float
length of the timeslides offset interval
window: float
duration of clustering window in seconds
Returns
-------
cindex: numpy.ndarray
The set of indices corresponding to the surviving coincidences
"""
time_coinc_zip = zip(*time_coincs)
if len(time_coinc_zip) == 0:
logging.info('No coincident triggers.')
return numpy.array([])
time_avg_num = []
#find number of ifos and mean time over participating ifos for each coinc
for tc in time_coinc_zip:
time_avg_num.append(mean_if_greater_than_zero(tc))
time_avg, num_ifos = zip(*time_avg_num)
time_avg = numpy.array(time_avg)
num_ifos = numpy.array(num_ifos)
# shift all but the pivot ifo by (num_ifos-1) * timeslide_id * slide
# this leads to a mean coinc time located around pivot time
if numpy.isfinite(slide):
nifos_minusone = (num_ifos - numpy.ones_like(num_ifos))
time_avg = time_avg + (nifos_minusone * timeslide_id * slide)/num_ifos
tslide = timeslide_id.astype(numpy.float128)
time_avg = time_avg.astype(numpy.float128)
span = (time_avg.max() - time_avg.min()) + window * 10
time_avg = time_avg + span * tslide
cidx = cluster_over_time(stat, time_avg, window, argmax)
return cidx
def mean_if_greater_than_zero(vals):
""" Calculate mean over numerical values, ignoring values less than zero.
E.g. used for mean time over coincident triggers when timestamps are set
to -1 for ifos not included in the coincidence.
Parameters
----------
vals: iterator of numerical values
values to be mean averaged
Returns
-------
mean: float
The mean of the values in the original vector which are
greater than zero
num_above_zero: int
The number of entries in the vector which are above zero
"""
vals = numpy.array(vals)
above_zero = vals > 0
return vals[above_zero].mean(), above_zero.sum()
def cluster_over_time(stat, time, window, argmax=numpy.argmax):
"""Cluster generalized transient events over time via maximum stat over a
symmetric sliding window
Parameters
----------
stat: numpy.ndarray
vector of ranking values to maximize
time: numpy.ndarray
time to use for clustering
window: float
length to cluster over
argmax: function
the function used to calculate the maximum value
Returns
-------
cindex: numpy.ndarray
The set of indices corresponding to the surviving coincidences.
"""
logging.info('Clustering events over %s s window', window)
indices = []
time_sorting = time.argsort()
stat = stat[time_sorting]
time = time[time_sorting]
left = numpy.searchsorted(time, time - window)
right = numpy.searchsorted(time, time + window)
indices = numpy.zeros(len(left), dtype=numpy.uint32)
# i is the index we are inspecting, j is the next one to save
i = 0
j = 0
while i < len(left):
l = left[i]
r = right[i]
# If there are no other points to compare it is obviously the max
if (r - l) == 1:
indices[j] = i
j += 1
i += 1
continue
# Find the location of the maximum within the time interval around i
max_loc = argmax(stat[l:r]) + l
# If this point is the max, we can skip to the right boundary
if max_loc == i:
indices[j] = i
i = r
j += 1
# If the max is later than i, we can skip to it
elif max_loc > i:
i = max_loc
elif max_loc < i:
i += 1
indices = indices[:j]
logging.info('%d triggers remaining', len(indices))
return time_sorting[indices]
class MultiRingBuffer(object):
"""Dynamic size n-dimensional ring buffer that can expire elements."""
def __init__(self, num_rings, max_time, dtype):
"""
Parameters
----------
num_rings: int
The number of ring buffers to create. They all will have the same
intrinsic size and will expire at the same time.
max_time: int
The maximum "time" an element can exist in each ring.
dtype: numpy.dtype
The type of each element in the ring buffer.
"""
self.max_time = max_time
self.buffer = []
self.buffer_expire = []
for _ in range(num_rings):
self.buffer.append(numpy.zeros(0, dtype=dtype))
self.buffer_expire.append(numpy.zeros(0, dtype=int))
self.time = 0
@property
def filled_time(self):
return min(self.time, self.max_time)
def num_elements(self):
return sum([len(a) for a in self.buffer])
@property
def nbytes(self):
return sum([a.nbytes for a in self.buffer])
def discard_last(self, indices):
"""Discard the triggers added in the latest update"""
for i in indices:
self.buffer_expire[i] = self.buffer_expire[i][:-1]
self.buffer[i] = self.buffer[i][:-1]
def advance_time(self):
"""Advance the internal time increment by 1, expiring any triggers that
are now too old.
"""
self.time += 1
def add(self, indices, values):
"""Add triggers in 'values' to the buffers indicated by the indices
"""
for i, v in zip(indices, values):
self.buffer[i] = numpy.append(self.buffer[i], v)
self.buffer_expire[i] = numpy.append(self.buffer_expire[i], self.time)
self.advance_time()
def expire_vector(self, buffer_index):
"""Return the expiration vector of a given ring buffer """
return self.buffer_expire[buffer_index]
def data(self, buffer_index):
"""Return the data vector for a given ring buffer"""
# Check for expired elements and discard if they exist
expired = self.time - self.max_time
exp = self.buffer_expire[buffer_index]
j = 0
while j < len(exp):
# Everything before this j must be expired
if exp[j] >= expired:
self.buffer_expire[buffer_index] = exp[j:].copy()
self.buffer[buffer_index] = self.buffer[buffer_index][j:].copy()
break
j += 1
return self.buffer[buffer_index]
class CoincExpireBuffer(object):
"""Unordered dynamic sized buffer that handles
multiple expiration vectors.
"""
def __init__(self, expiration, ifos,
initial_size=2**20, dtype=numpy.float32):
"""
Parameters
----------
expiration: int
The 'time' in arbitrary integer units to allow to pass before
removing an element.
ifos: list of strs
List of strings to identify the multiple data expiration times.
initial_size: int, optional
The initial size of the buffer.
dtype: numpy.dtype
The dtype of each element of the buffer.
"""
self.expiration = expiration
self.buffer = numpy.zeros(initial_size, dtype=dtype)
self.index = 0
self.ifos = ifos
self.time = {}
self.timer = {}
for ifo in self.ifos:
self.time[ifo] = 0
self.timer[ifo] = numpy.zeros(initial_size, dtype=numpy.int32)
def __len__(self):
return self.index
@property
def nbytes(self):
return self.buffer.nbytes
def increment(self, ifos):
"""Increment without adding triggers"""
self.add([], [], ifos)
def remove(self, num):
"""Remove the the last 'num' elements from the buffer"""
self.index -= num
def add(self, values, times, ifos):
"""Add values to the internal buffer
Parameters
----------
values: numpy.ndarray
Array of elements to add to the internal buffer.
times: dict of arrays
The current time to use for each element being added.
ifos: list of strs
The set of timers to be incremented.
"""
for ifo in ifos:
self.time[ifo] += 1
# Resize the internal buffer if we need more space
if self.index + len(values) >= len(self.buffer):
newlen = len(self.buffer) * 2
for ifo in self.ifos:
self.timer[ifo].resize(newlen)
self.buffer.resize(newlen)
self.buffer[self.index:self.index+len(values)] = values
if len(values) > 0:
for ifo in self.ifos:
self.timer[ifo][self.index:self.index+len(values)] = times[ifo]
self.index += len(values)
# Remove the expired old elements
keep = None
for ifo in ifos:
kt = self.timer[ifo][:self.index] >= self.time[ifo] - self.expiration
keep = numpy.logical_and(keep, kt) if keep is not None else kt
self.buffer[:keep.sum()] = self.buffer[:self.index][keep]
for ifo in self.ifos:
self.timer[ifo][:keep.sum()] = self.timer[ifo][:self.index][keep]
self.index = keep.sum()
def num_greater(self, value):
"""Return the number of elements larger than 'value'"""
return (self.buffer[:self.index] > value).sum()
@property
def data(self):
"""Return the array of elements"""
return self.buffer[:self.index]
class LiveCoincTimeslideBackgroundEstimator(object):
"""Rolling buffer background estimation."""
def __init__(self, num_templates, analysis_block, background_statistic,
stat_files, ifos,
ifar_limit=100,
timeslide_interval=.035,
coinc_threshold=.002,
return_background=False):
"""
Parameters
----------
num_templates: int
The size of the template bank
analysis_block: int
The number of seconds in each analysis segment
background_statistic: str
The name of the statistic to rank coincident events.
stat_files: list of strs
List of filenames that contain information used to construct
various coincident statistics.
ifos: list of strs
List of ifo names that are being analyzed. At the moment this must
be two items such as ['H1', 'L1'].
ifar_limit: float
The largest inverse false alarm rate in years that we would like to
calculate.
timeslide_interval: float
The time in seconds between consecutive timeslide offsets.
coinc_threshold: float
Amount of time allowed to form a coincidence in addition to the
time of flight in seconds.
return_background: boolean
If true, background triggers will also be included in the file
output.
"""
from . import stat
self.num_templates = num_templates
self.analysis_block = analysis_block
# Only pass a valid stat file for this ifo pair
for fname in stat_files:
f = h5py.File(fname, 'r')
ifos_set = set([f.attrs['ifo0'], f.attrs['ifo1']])
f.close()
if ifos_set == set(ifos):
stat_files = [fname]
logging.info('Setup ifos %s-%s with file %s and stat %s',
ifos[0], ifos[1], fname, background_statistic)
self.stat_calculator = stat.get_statistic(background_statistic)(stat_files)
self.timeslide_interval = timeslide_interval
self.return_background = return_background
self.ifos = ifos
if len(self.ifos) != 2:
raise ValueError("Only a two ifo analysis is supported at this time")
self.lookback_time = (ifar_limit * lal.YRJUL_SI * timeslide_interval) ** 0.5
self.buffer_size = int(numpy.ceil(self.lookback_time / analysis_block))
det0, det1 = Detector(ifos[0]), Detector(ifos[1])
self.time_window = det0.light_travel_time_to_detector(det1) + coinc_threshold
self.coincs = CoincExpireBuffer(self.buffer_size, self.ifos)
self.singles = {}
@classmethod
def pick_best_coinc(cls, coinc_results):
"""Choose the best two-ifo coinc by ifar first, then statistic if needed.
This function picks which of the available double-ifo coincs to use.
It chooses the best (highest) ifar. The ranking statistic is used as
a tie-breaker.
A trials factor is applied if multiple types of coincs are possible
at this time given the active ifos.
Parameters
----------
coinc_results: list of coinc result dicts
Dictionary by detector pair of coinc result dicts.
Returns
-------
best: coinc results dict
If there is a coinc, this will contain the 'best' one. Otherwise
it will return the provided dict.
"""
mstat = 0
mifar = 0
mresult = None
# record the trials factor from the possible coincs we could
# maximize over
trials = 0
for result in coinc_results:
# Check that a coinc was possible. See the 'add_singles' method
# to see where this flag was added into the results dict
if 'coinc_possible' in result:
trials += 1
# Check that a coinc exists
if 'foreground/ifar' in result:
ifar = result['foreground/ifar']
stat = result['foreground/stat']
if ifar > mifar or (ifar == mifar and stat > mstat):
mifar = ifar
mstat = stat
mresult = result
# apply trials factor for the best coinc
if mresult:
mresult['foreground/ifar'] = mifar / float(trials)
logging.info('Found %s coinc with ifar %s',
mresult['foreground/type'],
mresult['foreground/ifar'])
return mresult
# If no coinc, just return one of the results dictionaries. They will
# all contain the same results (i.e. single triggers) in this case.
else:
return coinc_results[0]
@classmethod
def from_cli(cls, args, num_templates, analysis_chunk, ifos):
return cls(num_templates, analysis_chunk,
args.background_statistic,
args.background_statistic_files,
return_background=args.store_background,
ifar_limit=args.background_ifar_limit,
timeslide_interval=args.timeslide_interval,
ifos=ifos)
@staticmethod
def insert_args(parser):
from . import stat
group = parser.add_argument_group('Coincident Background Estimation')
group.add_argument('--background-statistic', default='newsnr',
choices=sorted(stat.statistic_dict.keys()),
help="Ranking statistic to use for candidate coincident events")
group.add_argument('--background-statistic-files', nargs='+',
help="Files containing precalculate values to calculate ranking"
" statistic values", default=[])
group.add_argument('--store-background', action='store_true',
help="Return background triggers with zerolag coincidencs")
group.add_argument('--background-ifar-limit', type=float,
help="The limit on inverse false alarm rate to calculate "
"background in years", default=100.0)
group.add_argument('--timeslide-interval', type=float,
help="The interval between timeslides in seconds", default=0.1)
group.add_argument('--ifar-remove-threshold', type=float,
help="NOT YET IMPLEMENTED", default=100.0)
@property
def background_time(self):
"""Return the amount of background time that the buffers contain"""
time = 1.0 / self.timeslide_interval
for ifo in self.singles:
time *= self.singles[ifo].filled_time * self.analysis_block
return time
def save_state(self, filename):
"""Save the current state of the background buffers"""
from six.moves import cPickle
cPickle.dump(self, filename)
@staticmethod
def restore_state(filename):
"""Restore state of the background buffers from a file"""
from six.moves import cPickle
return cPickle.load(filename)
def ifar(self, coinc_stat):
"""Return the far that would be associated with the coincident given.
"""
n = self.coincs.num_greater(coinc_stat)
return self.background_time / lal.YRJUL_SI / (n + 1)
def set_singles_buffer(self, results):
"""Create the singles buffer
This creates the singles buffer for each ifo. The dtype is determined
by a representative sample of the single triggers in the results.
Parameters
----------
restuls: dict of dict
Dict indexed by ifo and then trigger column.
"""
# Determine the dtype from a sample of the data.
self.singles_dtype = []
data = False
for ifo in self.ifos:
if ifo in results and results[ifo] is not False:
data = results[ifo]
break
if data is False:
return
for key in data:
self.singles_dtype.append((key, data[key].dtype))
if 'stat' not in data:
self.singles_dtype.append(('stat', self.stat_calculator.single_dtype))
# Create a ring buffer for each template ifo combination
for ifo in self.ifos:
self.singles[ifo] = MultiRingBuffer(self.num_templates,
self.buffer_size,
self.singles_dtype)
def _add_singles_to_buffer(self, results, ifos):
"""Add single detector triggers to the internal buffer
Parameters
----------
results: dict of arrays
Dictionary of dictionaries indexed by ifo and keys such as 'snr',
'chisq', etc. The specific format it determined by the
LiveBatchMatchedFilter class.
Returns
-------
updated_singles: dict of numpy.ndarrays
Array of indices that have been just updated in the internal
buffers of single detector triggers.
"""
if len(self.singles.keys()) == 0:
self.set_singles_buffer(results)
# convert to single detector trigger values
# FIXME Currently configured to use pycbc live output
# where chisq is the reduced chisq and chisq_dof is the actual DOF
logging.info("adding singles to the background estimate...")
updated_indices = {}
for ifo in ifos:
trigs = results[ifo]
if len(trigs['snr'] > 0):
trigsc = copy.copy(trigs)
trigsc['chisq'] = trigs['chisq'] * trigs['chisq_dof']
trigsc['chisq_dof'] = (trigs['chisq_dof'] + 2) / 2
single_stat = self.stat_calculator.single(trigsc)
else:
single_stat = numpy.array([], ndmin=1,
dtype=self.stat_calculator.single_dtype)
trigs['stat'] = single_stat
# add each single detector trigger to the and advance the buffer
data = numpy.zeros(len(single_stat), dtype=self.singles_dtype)
for key, value in trigs.items():
data[key] = value
self.singles[ifo].add(trigs['template_id'], data)
updated_indices[ifo] = trigs['template_id']
return updated_indices
def _find_coincs(self, results, ifos):
"""Look for coincs within the set of single triggers
Parameters
----------
results: dict of arrays
Dictionary of dictionaries indexed by ifo and keys such as 'snr',
'chisq', etc. The specific format it determined by the
LiveBatchMatchedFilter class.
Returns
-------
coinc_results: dict of arrays
A dictionary of arrays containing the coincident results.
"""
# for each single detector trigger find the allowed coincidences
# Record which template and the index of the single trigger
# that forms each coincident trigger
cstat = [[]]
offsets = []
ctimes = {self.ifos[0]:[], self.ifos[1]:[]}
single_expire = {self.ifos[0]:[], self.ifos[1]:[]}
template_ids = [[]]
trigger_ids = {self.ifos[0]:[[]], self.ifos[1]:[[]]}
# Calculate all the permutations of coincident triggers for each
# new single detector trigger collected
for ifo in ifos:
trigs = results[ifo]
oifo = self.ifos[1] if self.ifos[0] == ifo else self.ifos[0]
for i in range(len(trigs['end_time'])):
trig_stat = trigs['stat'][i]
trig_time = trigs['end_time'][i]
template = trigs['template_id'][i]
times = self.singles[oifo].data(template)['end_time']
stats = self.singles[oifo].data(template)['stat']
i1, _, slide = time_coincidence(times,
numpy.array(trig_time, ndmin=1,
dtype=numpy.float64),
self.time_window,
self.timeslide_interval)
trig_stat = numpy.resize(trig_stat, len(i1))
c = self.stat_calculator.coinc(stats[i1], trig_stat,
slide, self.timeslide_interval)
offsets.append(slide)
cstat.append(c)
ctimes[oifo].append(times[i1])
ctimes[ifo].append(numpy.zeros(len(c), dtype=numpy.float64))
ctimes[ifo][-1].fill(trig_time)
single_expire[oifo].append(self.singles[oifo].expire_vector(template)[i1])
single_expire[ifo].append(numpy.zeros(len(c),
dtype=numpy.int32))
single_expire[ifo][-1].fill(self.singles[ifo].time - 1)
# save the template and trigger ids to keep association
# to singles. The trigger was just added so it must be in
# the last position we mark this with -1 so the
# slicing picks the right point
template_ids.append(numpy.zeros(len(c)) + template)
trigger_ids[oifo].append(i1)
trigger_ids[ifo].append(numpy.zeros(len(c)) - 1)
cstat = numpy.concatenate(cstat)
template_ids = numpy.concatenate(template_ids).astype(numpy.int32)
for ifo in ifos:
trigger_ids[ifo] = numpy.concatenate(trigger_ids[ifo]).astype(numpy.int32)
# cluster the triggers we've found
# (both zerolag and non handled together)
num_zerolag = 0
num_background = 0
logging.info('%s background and zerolag coincs', len(cstat))
if len(cstat) > 0:
offsets = numpy.concatenate(offsets)
ctime0 = numpy.concatenate(ctimes[self.ifos[0]]).astype(numpy.float64)
ctime1 = numpy.concatenate(ctimes[self.ifos[1]]).astype(numpy.float64)
cidx = cluster_coincs(cstat, ctime0, ctime1, offsets,
self.timeslide_interval,
self.analysis_block)
offsets = offsets[cidx]
zerolag_idx = (offsets == 0)
bkg_idx = (offsets != 0)
for ifo in self.ifos:
single_expire[ifo] = numpy.concatenate(single_expire[ifo])
single_expire[ifo] = single_expire[ifo][cidx][bkg_idx]
self.coincs.add(cstat[cidx][bkg_idx], single_expire, ifos)
num_zerolag = zerolag_idx.sum()
num_background = bkg_idx.sum()
elif len(ifos) > 0:
self.coincs.increment(ifos)
####################################Collect coinc results for saving
coinc_results = {}
# Save information about zerolag triggers
if num_zerolag > 0:
zerolag_results = {}
idx = cidx[zerolag_idx][0]
zerolag_cstat = cstat[cidx][zerolag_idx]
zerolag_results['foreground/ifar'] = self.ifar(zerolag_cstat)
zerolag_results['foreground/stat'] = zerolag_cstat
template = template_ids[idx]
for ifo in self.ifos:
trig_id = trigger_ids[ifo][idx]
single_data = self.singles[ifo].data(template)[trig_id]
for key in single_data.dtype.names:
path = 'foreground/%s/%s' % (ifo, key)
zerolag_results[path] = single_data[key]
zerolag_results['foreground/type'] = '-'.join(self.ifos)
coinc_results.update(zerolag_results)
# Save some summary statistics about the background
coinc_results['background/time'] = numpy.array([self.background_time])
coinc_results['background/count'] = len(self.coincs.data)
# Save all the background triggers
if self.return_background:
coinc_results['background/stat'] = self.coincs.data
return num_background, coinc_results
def backout_last(self, updated_singles, num_coincs):
"""Remove the recently added singles and coincs
Parameters
----------
updated_singles: dict of numpy.ndarrays
Array of indices that have been just updated in the internal
buffers of single detector triggers.
num_coincs: int
The number of coincs that were just added to the internal buffer
of coincident triggers
"""
for ifo in updated_singles:
self.singles[ifo].discard_last(updated_singles[ifo])
self.coincs.remove(num_coincs)
def add_singles(self, results):
"""Add singles to the bacckground estimate and find candidates
Parameters
----------
results: dict of arrays
Dictionary of dictionaries indexed by ifo and keys such as 'snr',
'chisq', etc. The specific format it determined by the
LiveBatchMatchedFilter class.
Returns
-------
coinc_results: dict of arrays
A dictionary of arrays containing the coincident results.
"""
# Let's see how large everything is
logging.info('BKG Coincs %s stored %s bytes',
len(self.coincs), self.coincs.nbytes)
# If there are no results just return
valid_ifos = [k for k in results.keys() if results[k] and k in self.ifos]
if len(valid_ifos) == 0: return {}
# Add single triggers to the internal buffer
self._add_singles_to_buffer(results, ifos=valid_ifos)
# Calculate zerolag and background coincidences
_, coinc_results = self._find_coincs(results, ifos=valid_ifos)
# record if a coinc is possible in this chunk
if len(valid_ifos) == 2:
coinc_results['coinc_possible'] = True
return coinc_results
| gpl-3.0 |
CS-SI/QGIS | tests/src/python/test_qgsvectorlayer.py | 5 | 116398 | # -*- coding: utf-8 -*-
"""QGIS Unit tests for QgsVectorLayer.
.. note:: This program is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation; either version 2 of the License, or
(at your option) any later version.
"""
__author__ = 'Tim Sutton'
__date__ = '20/08/2012'
__copyright__ = 'Copyright 2012, The QGIS Project'
# This will get replaced with a git SHA1 when you do a git archive
__revision__ = '$Format:%H$'
import qgis # NOQA
import os
from qgis.PyQt.QtCore import QVariant, Qt
from qgis.PyQt.QtGui import QPainter
from qgis.PyQt.QtXml import QDomDocument
from qgis.core import (QgsWkbTypes,
QgsAction,
QgsDefaultValue,
QgsEditorWidgetSetup,
QgsVectorLayer,
QgsRectangle,
QgsFeature,
QgsFeatureRequest,
QgsGeometry,
QgsPointXY,
QgsField,
QgsFieldConstraints,
QgsFields,
QgsVectorLayerJoinInfo,
QgsSymbol,
QgsSingleSymbolRenderer,
QgsCoordinateReferenceSystem,
QgsVectorLayerCache,
QgsReadWriteContext,
QgsProject,
QgsUnitTypes,
QgsAggregateCalculator,
QgsPoint,
QgsExpressionContext,
QgsExpressionContextScope,
QgsExpressionContextUtils,
QgsLineSymbol,
QgsMapLayerStyle,
QgsMapLayerDependency,
QgsPalLayerSettings,
QgsVectorLayerSimpleLabeling,
QgsSingleCategoryDiagramRenderer,
QgsDiagramLayerSettings,
QgsTextFormat,
QgsVectorLayerSelectedFeatureSource,
QgsExpression,
NULL)
from qgis.gui import (QgsAttributeTableModel,
QgsGui
)
from qgis.testing import start_app, unittest
from featuresourcetestbase import FeatureSourceTestCase
from utilities import unitTestDataPath
start_app()
def createEmptyLayer():
layer = QgsVectorLayer("Point", "addfeat", "memory")
assert layer.featureCount() == 0
return layer
def createEmptyLayerWithFields():
layer = QgsVectorLayer("Point?field=fldtxt:string&field=fldint:integer", "addfeat", "memory")
assert layer.featureCount() == 0
return layer
def createLayerWithOnePoint():
layer = QgsVectorLayer("Point?field=fldtxt:string&field=fldint:integer",
"addfeat", "memory")
pr = layer.dataProvider()
f = QgsFeature()
f.setAttributes(["test", 123])
f.setGeometry(QgsGeometry.fromPointXY(QgsPointXY(100, 200)))
assert pr.addFeatures([f])
assert layer.featureCount() == 1
return layer
def createLayerWithTwoPoints():
layer = QgsVectorLayer("Point?field=fldtxt:string&field=fldint:integer",
"addfeat", "memory")
pr = layer.dataProvider()
f = QgsFeature()
f.setAttributes(["test", 123])
f.setGeometry(QgsGeometry.fromPointXY(QgsPointXY(100, 200)))
f2 = QgsFeature()
f2.setAttributes(["test2", 457])
f2.setGeometry(QgsGeometry.fromPointXY(QgsPointXY(100, 200)))
assert pr.addFeatures([f, f2])
assert layer.featureCount() == 2
return layer
def createLayerWithFivePoints():
layer = QgsVectorLayer("Point?field=fldtxt:string&field=fldint:integer",
"addfeat", "memory")
pr = layer.dataProvider()
f = QgsFeature()
f.setAttributes(["test", 123])
f.setGeometry(QgsGeometry.fromPointXY(QgsPointXY(100, 200)))
f2 = QgsFeature()
f2.setAttributes(["test2", 457])
f2.setGeometry(QgsGeometry.fromPointXY(QgsPointXY(200, 200)))
f3 = QgsFeature()
f3.setAttributes(["test2", 888])
f3.setGeometry(QgsGeometry.fromPointXY(QgsPointXY(300, 200)))
f4 = QgsFeature()
f4.setAttributes(["test3", -1])
f4.setGeometry(QgsGeometry.fromPointXY(QgsPointXY(400, 300)))
f5 = QgsFeature()
f5.setAttributes(["test4", 0])
f5.setGeometry(QgsGeometry.fromPointXY(QgsPointXY(0, 0)))
assert pr.addFeatures([f, f2, f3, f4, f5])
assert layer.featureCount() == 5
return layer
def createJoinLayer():
joinLayer = QgsVectorLayer(
"Point?field=x:string&field=y:integer&field=z:integer",
"joinlayer", "memory")
pr = joinLayer.dataProvider()
f1 = QgsFeature()
f1.setAttributes(["foo", 123, 321])
f1.setGeometry(QgsGeometry.fromPointXY(QgsPointXY(1, 1)))
f2 = QgsFeature()
f2.setAttributes(["bar", 456, 654])
f2.setGeometry(QgsGeometry.fromPointXY(QgsPointXY(2, 2)))
f3 = QgsFeature()
f3.setAttributes(["qar", 457, 111])
f3.setGeometry(QgsGeometry.fromPointXY(QgsPointXY(2, 2)))
f4 = QgsFeature()
f4.setAttributes(["a", 458, 19])
f4.setGeometry(QgsGeometry.fromPointXY(QgsPointXY(2, 2)))
assert pr.addFeatures([f1, f2, f3, f4])
assert joinLayer.featureCount() == 4
return joinLayer
def dumpFeature(f):
print("--- FEATURE DUMP ---")
print(("valid: %d | id: %d" % (f.isValid(), f.id())))
geom = f.geometry()
if geom:
print(("geometry wkb: %d" % geom.wkbType()))
else:
print("no geometry")
print(("attrs: %s" % str(f.attributes())))
def formatAttributes(attrs):
return repr([str(a) for a in attrs])
def dumpEditBuffer(layer):
editBuffer = layer.editBuffer()
if not editBuffer:
print("NO EDITING!")
return
print("ADDED:")
for fid, f in editBuffer.addedFeatures().items():
print(("%d: %s | %s" % (
f.id(), formatAttributes(f.attributes()),
f.geometry().asWkt())))
print("CHANGED GEOM:")
for fid, geom in editBuffer.changedGeometries().items():
print(("%d | %s" % (f.id(), f.geometry().asWkt())))
class TestQgsVectorLayer(unittest.TestCase, FeatureSourceTestCase):
@classmethod
def getSource(cls):
vl = QgsVectorLayer(
'Point?crs=epsg:4326&field=pk:integer&field=cnt:integer&field=name:string(0)&field=name2:string(0)&field=num_char:string&key=pk',
'test', 'memory')
assert (vl.isValid())
f1 = QgsFeature()
f1.setAttributes([5, -200, NULL, 'NuLl', '5'])
f1.setGeometry(QgsGeometry.fromWkt('Point (-71.123 78.23)'))
f2 = QgsFeature()
f2.setAttributes([3, 300, 'Pear', 'PEaR', '3'])
f3 = QgsFeature()
f3.setAttributes([1, 100, 'Orange', 'oranGe', '1'])
f3.setGeometry(QgsGeometry.fromWkt('Point (-70.332 66.33)'))
f4 = QgsFeature()
f4.setAttributes([2, 200, 'Apple', 'Apple', '2'])
f4.setGeometry(QgsGeometry.fromWkt('Point (-68.2 70.8)'))
f5 = QgsFeature()
f5.setAttributes([4, 400, 'Honey', 'Honey', '4'])
f5.setGeometry(QgsGeometry.fromWkt('Point (-65.32 78.3)'))
vl.dataProvider().addFeatures([f1, f2, f3, f4, f5])
return vl
@classmethod
def setUpClass(cls):
"""Run before all tests"""
QgsGui.editorWidgetRegistry().initEditors()
# Create test layer for FeatureSourceTestCase
cls.source = cls.getSource()
def testGetFeaturesSubsetAttributes2(self):
""" Override and skip this QgsFeatureSource test. We are using a memory provider, and it's actually more efficient for the memory provider to return
its features as direct copies (due to implicit sharing of QgsFeature)
"""
pass
def testGetFeaturesNoGeometry(self):
""" Override and skip this QgsFeatureSource test. We are using a memory provider, and it's actually more efficient for the memory provider to return
its features as direct copies (due to implicit sharing of QgsFeature)
"""
pass
def test_FeatureCount(self):
myPath = os.path.join(unitTestDataPath(), 'lines.shp')
myLayer = QgsVectorLayer(myPath, 'Lines', 'ogr')
myCount = myLayer.featureCount()
self.assertEqual(myCount, 6)
# ADD FEATURE
def test_AddFeature(self):
layer = createEmptyLayerWithFields()
feat = QgsFeature(layer.fields())
feat.setGeometry(QgsGeometry.fromPointXY(QgsPointXY(1, 2)))
def checkAfter():
self.assertEqual(layer.featureCount(), 1)
# check select+nextFeature
f = next(layer.getFeatures())
self.assertEqual(f.geometry().asPoint(), QgsPointXY(1, 2))
# check feature at id
f2 = next(layer.getFeatures(QgsFeatureRequest(f.id())))
self.assertEqual(f2.geometry().asPoint(), QgsPointXY(1, 2))
def checkBefore():
self.assertEqual(layer.featureCount(), 0)
# check select+nextFeature
with self.assertRaises(StopIteration):
next(layer.getFeatures())
checkBefore()
# try to add feature without editing mode
self.assertFalse(layer.addFeature(feat))
# add feature
layer.startEditing()
# try adding feature with incorrect number of fields
bad_feature = QgsFeature()
self.assertFalse(layer.addFeature(bad_feature))
# add good feature
self.assertTrue(layer.addFeature(feat))
checkAfter()
self.assertEqual(layer.dataProvider().featureCount(), 0)
# now try undo/redo
layer.undoStack().undo()
checkBefore()
layer.undoStack().redo()
checkAfter()
self.assertTrue(layer.commitChanges())
checkAfter()
self.assertEqual(layer.dataProvider().featureCount(), 1)
# ADD FEATURES
def test_AddFeatures(self):
layer = createEmptyLayerWithFields()
feat1 = QgsFeature(layer.fields())
feat1.setGeometry(QgsGeometry.fromPointXY(QgsPointXY(1, 2)))
feat2 = QgsFeature(layer.fields())
feat2.setGeometry(QgsGeometry.fromPointXY(QgsPointXY(11, 12)))
def checkAfter():
self.assertEqual(layer.featureCount(), 2)
# check select+nextFeature
it = layer.getFeatures()
f1 = next(it)
self.assertEqual(f1.geometry().asPoint(), QgsPointXY(1, 2))
f2 = next(it)
self.assertEqual(f2.geometry().asPoint(), QgsPointXY(11, 12))
# check feature at id
f1_1 = next(layer.getFeatures(QgsFeatureRequest(f1.id())))
self.assertEqual(f1_1.geometry().asPoint(), QgsPointXY(1, 2))
f2_1 = next(layer.getFeatures(QgsFeatureRequest(f2.id())))
self.assertEqual(f2_1.geometry().asPoint(), QgsPointXY(11, 12))
def checkBefore():
self.assertEqual(layer.featureCount(), 0)
# check select+nextFeature
with self.assertRaises(StopIteration):
next(layer.getFeatures())
checkBefore()
# try to add feature without editing mode
self.assertFalse(layer.addFeatures([feat1, feat2]))
# add feature
layer.startEditing()
# try adding feature with incorrect number of fields
bad_feature = QgsFeature()
self.assertFalse(layer.addFeatures([bad_feature]))
# add good features
self.assertTrue(layer.addFeatures([feat1, feat2]))
checkAfter()
self.assertEqual(layer.dataProvider().featureCount(), 0)
# now try undo/redo
layer.undoStack().undo()
layer.undoStack().undo()
checkBefore()
layer.undoStack().redo()
layer.undoStack().redo()
checkAfter()
self.assertTrue(layer.commitChanges())
checkAfter()
self.assertEqual(layer.dataProvider().featureCount(), 2)
# DELETE FEATURE
def test_DeleteFeature(self):
layer = createLayerWithOnePoint()
fid = 1
def checkAfter():
self.assertEqual(layer.featureCount(), 0)
# check select+nextFeature
with self.assertRaises(StopIteration):
next(layer.getFeatures())
# check feature at id
with self.assertRaises(StopIteration):
next(layer.getFeatures(QgsFeatureRequest(fid)))
def checkBefore():
self.assertEqual(layer.featureCount(), 1)
# check select+nextFeature
fi = layer.getFeatures()
f = next(fi)
self.assertEqual(f.geometry().asPoint(), QgsPointXY(100, 200))
with self.assertRaises(StopIteration):
next(fi)
# check feature at id
f2 = next(layer.getFeatures(QgsFeatureRequest(fid)))
self.assertEqual(f2.id(), fid)
checkBefore()
# try to delete feature without editing mode
self.assertFalse(layer.deleteFeature(fid))
# delete feature
layer.startEditing()
self.assertTrue(layer.deleteFeature(fid))
checkAfter()
# make sure calling it twice does not work
self.assertFalse(layer.deleteFeature(fid))
# now try undo/redo
layer.undoStack().undo()
checkBefore()
layer.undoStack().redo()
checkAfter()
self.assertEqual(layer.dataProvider().featureCount(), 1)
self.assertTrue(layer.commitChanges())
checkAfter()
self.assertEqual(layer.dataProvider().featureCount(), 0)
def test_DeleteFeatureAfterAddFeature(self):
layer = createEmptyLayer()
feat = QgsFeature()
feat.setGeometry(QgsGeometry.fromPointXY(QgsPointXY(1, 2)))
def checkBefore():
self.assertEqual(layer.featureCount(), 0)
# check select+nextFeature
with self.assertRaises(StopIteration):
next(layer.getFeatures())
def checkAfter1():
self.assertEqual(layer.featureCount(), 1)
def checkAfter2():
checkBefore() # should be the same state: no features
checkBefore()
# add feature
layer.startEditing()
self.assertTrue(layer.addFeature(feat))
checkAfter1()
fid = feat.id()
self.assertTrue(layer.deleteFeature(fid))
checkAfter2()
# now try undo/redo
layer.undoStack().undo()
checkAfter1()
layer.undoStack().undo()
checkBefore()
layer.undoStack().redo()
checkAfter1()
layer.undoStack().redo()
checkAfter2()
self.assertTrue(layer.commitChanges())
checkAfter2()
self.assertEqual(layer.dataProvider().featureCount(), 0)
def test_DeleteJoinedFeature(self):
joinLayer = createJoinLayer()
joinLayer2 = createJoinLayer()
QgsProject.instance().addMapLayers([joinLayer, joinLayer2])
layer = createLayerWithOnePoint()
join = QgsVectorLayerJoinInfo()
join.setTargetFieldName("fldint")
join.setJoinLayer(joinLayer)
join.setJoinFieldName("y")
join.setUsingMemoryCache(True)
join.setEditable(True)
join.setCascadedDelete(True)
layer.addJoin(join)
join2 = QgsVectorLayerJoinInfo()
join2.setTargetFieldName("fldint")
join2.setJoinLayer(joinLayer2)
join2.setJoinFieldName("y")
join2.setUsingMemoryCache(True)
join2.setPrefix("custom-prefix_")
join2.setEditable(True)
join2.setCascadedDelete(False)
layer.addJoin(join2)
# check number of features
self.assertEqual(layer.featureCount(), 1)
self.assertEqual(joinLayer.featureCount(), 4)
self.assertEqual(joinLayer2.featureCount(), 4)
# delete a feature which is also in joined layers
layer.startEditing()
joinLayer.startEditing()
joinLayer2.startEditing()
filter = QgsExpression.createFieldEqualityExpression('fldint', '123')
feature = next(layer.getFeatures(QgsFeatureRequest().setFilterExpression(filter)))
layer.deleteFeature(feature.id())
# check number of features
self.assertEqual(layer.featureCount(), 0)
self.assertEqual(joinLayer.featureCount(), 3) # deleteCascade activated
self.assertEqual(joinLayer2.featureCount(), 4) # deleteCascade deactivated
# CHANGE ATTRIBUTE
def test_ChangeAttribute(self):
layer = createLayerWithOnePoint()
fid = 1
def checkAfter():
# check select+nextFeature
fi = layer.getFeatures()
f = next(fi)
self.assertEqual(f[0], "good")
# check feature at id
f2 = next(layer.getFeatures(QgsFeatureRequest(f.id())))
self.assertEqual(f2[0], "good")
def checkBefore():
# check select+nextFeature
f = next(layer.getFeatures())
self.assertEqual(f[0], "test")
checkBefore()
# try to change attribute without editing mode
self.assertFalse(layer.changeAttributeValue(fid, 0, "good"))
# change attribute
layer.startEditing()
self.assertTrue(layer.changeAttributeValue(fid, 0, "good"))
checkAfter()
# now try undo/redo
layer.undoStack().undo()
checkBefore()
layer.undoStack().redo()
checkAfter()
self.assertTrue(layer.commitChanges())
checkAfter()
def test_ChangeAttributeAfterAddFeature(self):
layer = createLayerWithOnePoint()
layer.dataProvider().deleteFeatures([1]) # no need for this feature
newF = QgsFeature()
newF.setGeometry(QgsGeometry.fromPointXY(QgsPointXY(1, 1)))
newF.setAttributes(["hello", 42])
def checkAfter():
self.assertEqual(len(layer.fields()), 2)
# check feature
fi = layer.getFeatures()
f = next(fi)
attrs = f.attributes()
self.assertEqual(len(attrs), 2)
self.assertEqual(attrs[0], "hello")
self.assertEqual(attrs[1], 12)
with self.assertRaises(StopIteration):
next(fi)
# check feature at id
f2 = next(layer.getFeatures(QgsFeatureRequest(f.id())))
self.assertEqual(f2[0], "hello")
self.assertEqual(f2[1], 12)
def checkBefore():
# check feature
with self.assertRaises(StopIteration):
next(layer.getFeatures())
checkBefore()
layer.startEditing()
layer.beginEditCommand("AddFeature + ChangeAttribute")
self.assertTrue(layer.addFeature(newF))
self.assertTrue(layer.changeAttributeValue(newF.id(), 1, 12))
layer.endEditCommand()
checkAfter()
# now try undo/redo
layer.undoStack().undo()
checkBefore()
layer.undoStack().redo()
checkAfter()
self.assertTrue(layer.commitChanges())
checkAfter()
# print "COMMIT ERRORS:"
# for item in list(layer.commitErrors()): print item
# CHANGE GEOMETRY
def test_ChangeGeometry(self):
layer = createLayerWithOnePoint()
fid = 1
def checkAfter():
# check select+nextFeature
f = next(layer.getFeatures())
self.assertEqual(f.geometry().asPoint(), QgsPointXY(300, 400))
# check feature at id
f2 = next(layer.getFeatures(QgsFeatureRequest(f.id())))
self.assertEqual(f2.geometry().asPoint(), QgsPointXY(300, 400))
def checkBefore():
# check select+nextFeature
f = next(layer.getFeatures())
self.assertEqual(f.geometry().asPoint(), QgsPointXY(100, 200))
# try to change geometry without editing mode
self.assertFalse(layer.changeGeometry(fid, QgsGeometry.fromPointXY(QgsPointXY(300, 400))))
checkBefore()
# change geometry
layer.startEditing()
layer.beginEditCommand("ChangeGeometry")
self.assertTrue(layer.changeGeometry(fid, QgsGeometry.fromPointXY(QgsPointXY(300, 400))))
layer.endEditCommand()
checkAfter()
# now try undo/redo
layer.undoStack().undo()
checkBefore()
layer.undoStack().redo()
checkAfter()
self.assertTrue(layer.commitChanges())
checkAfter()
def test_ChangeGeometryAfterChangeAttribute(self):
layer = createLayerWithOnePoint()
fid = 1
def checkAfter():
# check select+nextFeature
f = next(layer.getFeatures())
self.assertEqual(f.geometry().asPoint(), QgsPointXY(300, 400))
self.assertEqual(f[0], "changed")
# check feature at id
f2 = next(layer.getFeatures(QgsFeatureRequest(f.id())))
self.assertEqual(f2.geometry().asPoint(), QgsPointXY(300, 400))
self.assertEqual(f2[0], "changed")
def checkBefore():
# check select+nextFeature
f = next(layer.getFeatures())
self.assertEqual(f.geometry().asPoint(), QgsPointXY(100, 200))
self.assertEqual(f[0], "test")
checkBefore()
# change geometry
layer.startEditing()
layer.beginEditCommand("ChangeGeometry + ChangeAttribute")
self.assertTrue(layer.changeAttributeValue(fid, 0, "changed"))
self.assertTrue(layer.changeGeometry(fid, QgsGeometry.fromPointXY(QgsPointXY(300, 400))))
layer.endEditCommand()
checkAfter()
# now try undo/redo
layer.undoStack().undo()
checkBefore()
layer.undoStack().redo()
checkAfter()
self.assertTrue(layer.commitChanges())
checkAfter()
def test_ChangeGeometryAfterAddFeature(self):
layer = createLayerWithOnePoint()
layer.dataProvider().deleteFeatures([1]) # no need for this feature
newF = QgsFeature()
newF.setGeometry(QgsGeometry.fromPointXY(QgsPointXY(1, 1)))
newF.setAttributes(["hello", 42])
def checkAfter():
self.assertEqual(len(layer.fields()), 2)
# check feature
f = next(layer.getFeatures())
self.assertEqual(f.geometry().asPoint(), QgsPointXY(2, 2))
# check feature at id
f2 = next(layer.getFeatures(QgsFeatureRequest(f.id())))
self.assertEqual(f2.geometry().asPoint(), QgsPointXY(2, 2))
def checkBefore():
# check feature
with self.assertRaises(StopIteration):
next(layer.getFeatures())
checkBefore()
layer.startEditing()
layer.beginEditCommand("AddFeature+ChangeGeometry")
self.assertTrue(layer.addFeature(newF))
self.assertTrue(layer.changeGeometry(newF.id(), QgsGeometry.fromPointXY(QgsPointXY(2, 2))))
layer.endEditCommand()
checkAfter()
# now try undo/redo
layer.undoStack().undo()
checkBefore()
layer.undoStack().redo()
checkAfter()
self.assertTrue(layer.commitChanges())
checkAfter()
# print "COMMIT ERRORS:"
# for item in list(layer.commitErrors()): print item
# updateFeature
def testUpdateFeature(self):
layer = createLayerWithFivePoints()
features = [f for f in layer.getFeatures()]
# try to change feature without editing mode
self.assertFalse(layer.updateFeature(features[0]))
layer.startEditing()
# no matching feature
f = QgsFeature(1123)
self.assertFalse(layer.updateFeature(f))
# change geometry and attributes
f = features[0]
f.setAttributes(['new', 321])
f.setGeometry(QgsGeometry.fromPointXY(QgsPointXY(-200, -200)))
self.assertTrue(layer.updateFeature(f))
new_feature = next(layer.getFeatures(QgsFeatureRequest(f.id())))
self.assertEqual(new_feature.attributes(), ['new', 321])
self.assertEqual(new_feature.geometry().asPoint(), QgsPointXY(-200, -200))
# add feature with no geometry
f6 = QgsFeature()
f6.setAttributes(["test6", 555])
self.assertTrue(layer.dataProvider().addFeatures([f6]))
features = [f for f in layer.getFeatures()]
# update feature with no geometry -> have geometry
f = features[-1]
f.setGeometry(QgsGeometry.fromPointXY(QgsPointXY(-350, -250)))
self.assertTrue(layer.updateFeature(f))
new_feature = next(layer.getFeatures(QgsFeatureRequest(f.id())))
self.assertEqual(new_feature.attributes(), ['test6', 555])
self.assertTrue(new_feature.hasGeometry())
self.assertEqual(new_feature.geometry().asPoint(), QgsPointXY(-350, -250))
# update feature from geometry -> no geometry
f = features[1]
f.clearGeometry()
self.assertTrue(layer.updateFeature(f))
new_feature = next(layer.getFeatures(QgsFeatureRequest(f.id())))
self.assertEqual(new_feature.attributes(), ['test2', 457])
self.assertFalse(new_feature.hasGeometry())
# ADD ATTRIBUTE
def test_AddAttribute(self):
layer = createLayerWithOnePoint()
fld1 = QgsField("fld1", QVariant.Int, "integer")
# fld2 = QgsField("fld2", QVariant.Int, "integer")
def checkBefore():
# check fields
flds = layer.fields()
self.assertEqual(len(flds), 2)
self.assertEqual(flds[0].name(), "fldtxt")
self.assertEqual(flds[1].name(), "fldint")
# check feature
f = next(layer.getFeatures())
attrs = f.attributes()
self.assertEqual(len(attrs), 2)
self.assertEqual(attrs[0], "test")
self.assertEqual(attrs[1], 123)
def checkAfter():
# check fields
flds = layer.fields()
self.assertEqual(len(flds), 3)
self.assertEqual(flds[0].name(), "fldtxt")
self.assertEqual(flds[1].name(), "fldint")
self.assertEqual(flds[2].name(), "fld1")
# check feature
f = next(layer.getFeatures())
attrs = f.attributes()
self.assertEqual(len(attrs), 3)
self.assertEqual(attrs[0], "test")
self.assertEqual(attrs[1], 123)
self.assertTrue(attrs[2] is None)
# check feature at id
f2 = next(layer.getFeatures(QgsFeatureRequest(f.id())))
self.assertEqual(f2[0], "test")
self.assertEqual(f2[1], 123)
self.assertTrue(f2[2] is None)
# for nt in layer.dataProvider().nativeTypes():
# print (nt.mTypeDesc, nt.mTypeName, nt.mType, nt.mMinLen,
# nt.mMaxLen, nt.mMinPrec, nt.mMaxPrec)
self.assertTrue(layer.dataProvider().supportedType(fld1))
# without editing mode
self.assertFalse(layer.addAttribute(fld1))
layer.startEditing()
checkBefore()
self.assertTrue(layer.addAttribute(fld1))
checkAfter()
# now try undo/redo
layer.undoStack().undo()
checkBefore()
layer.undoStack().redo()
checkAfter()
layer.commitChanges()
checkAfter()
def test_AddAttributeAfterAddFeature(self):
layer = createLayerWithOnePoint()
layer.dataProvider().deleteFeatures([1]) # no need for this feature
newF = QgsFeature()
newF.setGeometry(QgsGeometry.fromPointXY(QgsPointXY(1, 1)))
newF.setAttributes(["hello", 42])
fld1 = QgsField("fld1", QVariant.Int, "integer")
def checkBefore():
self.assertEqual(len(layer.fields()), 2)
# check feature
with self.assertRaises(StopIteration):
next(layer.getFeatures())
def checkAfter():
self.assertEqual(len(layer.fields()), 3)
# check feature
f = next(layer.getFeatures())
attrs = f.attributes()
self.assertEqual(len(attrs), 3)
self.assertEqual(attrs[0], "hello")
self.assertEqual(attrs[1], 42)
self.assertTrue(attrs[2] is None)
# check feature at id
f2 = next(layer.getFeatures(QgsFeatureRequest(f.id())))
self.assertEqual(f2[0], "hello")
self.assertEqual(f2[1], 42)
self.assertTrue(f2[2] is None)
layer.startEditing()
checkBefore()
layer.beginEditCommand("AddFeature + AddAttribute")
self.assertTrue(layer.addFeature(newF))
self.assertTrue(layer.addAttribute(fld1))
layer.endEditCommand()
checkAfter()
# now try undo/redo
layer.undoStack().undo()
checkBefore()
layer.undoStack().redo()
checkAfter()
layer.commitChanges()
checkAfter()
# print "COMMIT ERRORS:"
# for item in list(layer.commitErrors()): print item
def test_AddAttributeAfterChangeValue(self):
pass # not interesting to test...?
def test_AddAttributeAfterDeleteAttribute(self):
pass # maybe it would be good to test
# DELETE ATTRIBUTE
def test_DeleteAttribute(self):
layer = createLayerWithOnePoint()
layer.dataProvider().addAttributes(
[QgsField("flddouble", QVariant.Double, "double")])
layer.dataProvider().changeAttributeValues(
{1: {2: 5.5}})
# without editing mode
self.assertFalse(layer.deleteAttribute(0))
def checkBefore():
flds = layer.fields()
self.assertEqual(len(flds), 3)
self.assertEqual(flds[0].name(), "fldtxt")
self.assertEqual(flds[1].name(), "fldint")
self.assertEqual(flds[2].name(), "flddouble")
f = next(layer.getFeatures())
attrs = f.attributes()
self.assertEqual(len(attrs), 3)
self.assertEqual(attrs[0], "test")
self.assertEqual(attrs[1], 123)
self.assertEqual(attrs[2], 5.5)
layer.startEditing()
checkBefore()
self.assertTrue(layer.deleteAttribute(0))
def checkAfterOneDelete():
flds = layer.fields()
# for fld in flds: print "FLD", fld.name()
self.assertEqual(len(flds), 2)
self.assertEqual(flds[0].name(), "fldint")
self.assertEqual(flds[1].name(), "flddouble")
self.assertEqual(layer.attributeList(), [0, 1])
f = next(layer.getFeatures())
attrs = f.attributes()
self.assertEqual(len(attrs), 2)
self.assertEqual(attrs[0], 123)
self.assertEqual(attrs[1], 5.5)
checkAfterOneDelete()
# delete last attribute
self.assertTrue(layer.deleteAttribute(0))
def checkAfterTwoDeletes():
self.assertEqual(layer.attributeList(), [0])
flds = layer.fields()
# for fld in flds: print "FLD", fld.name()
self.assertEqual(len(flds), 1)
self.assertEqual(flds[0].name(), "flddouble")
f = next(layer.getFeatures())
attrs = f.attributes()
self.assertEqual(len(attrs), 1)
self.assertEqual(attrs[0], 5.5)
# check feature at id
f2 = next(layer.getFeatures(QgsFeatureRequest(f.id())))
self.assertEqual(len(f2.attributes()), 1)
self.assertEqual(f2[0], 5.5)
checkAfterTwoDeletes()
layer.undoStack().undo()
checkAfterOneDelete()
layer.undoStack().undo()
checkBefore()
layer.undoStack().redo()
checkAfterOneDelete()
layer.undoStack().redo()
checkAfterTwoDeletes()
self.assertTrue(layer.commitChanges()) # COMMIT!
checkAfterTwoDeletes()
def test_DeleteAttributeAfterAddAttribute(self):
layer = createLayerWithOnePoint()
fld1 = QgsField("fld1", QVariant.Int, "integer")
def checkAfter(): # layer should be unchanged
flds = layer.fields()
self.assertEqual(len(flds), 2)
self.assertEqual(flds[0].name(), "fldtxt")
self.assertEqual(flds[1].name(), "fldint")
# check feature
f = next(layer.getFeatures())
attrs = f.attributes()
self.assertEqual(len(attrs), 2)
self.assertEqual(attrs[0], "test")
self.assertEqual(attrs[1], 123)
# check feature at id
f2 = next(layer.getFeatures(QgsFeatureRequest(f.id())))
self.assertEqual(len(f2.attributes()), 2)
self.assertEqual(f2[0], "test")
self.assertEqual(f2[1], 123)
checkAfter()
layer.startEditing()
layer.beginEditCommand("AddAttribute + DeleteAttribute")
self.assertTrue(layer.addAttribute(fld1))
self.assertTrue(layer.deleteAttribute(2))
layer.endEditCommand()
checkAfter()
# now try undo/redo
layer.undoStack().undo()
checkAfter()
layer.undoStack().redo()
checkAfter()
layer.commitChanges()
checkAfter()
def test_DeleteAttributeAfterAddFeature(self):
layer = createLayerWithOnePoint()
layer.dataProvider().deleteFeatures([1]) # no need for this feature
newF = QgsFeature()
newF.setGeometry(QgsGeometry.fromPointXY(QgsPointXY(1, 1)))
newF.setAttributes(["hello", 42])
def checkBefore():
self.assertEqual(len(layer.fields()), 2)
# check feature
with self.assertRaises(StopIteration):
next(layer.getFeatures())
def checkAfter1():
self.assertEqual(len(layer.fields()), 2)
# check feature
f = next(layer.getFeatures())
attrs = f.attributes()
self.assertEqual(len(attrs), 2)
self.assertEqual(attrs[0], "hello")
self.assertEqual(attrs[1], 42)
def checkAfter2():
self.assertEqual(len(layer.fields()), 1)
# check feature
f = next(layer.getFeatures())
attrs = f.attributes()
self.assertEqual(len(attrs), 1)
self.assertEqual(attrs[0], 42)
layer.startEditing()
checkBefore()
layer.addFeature(newF)
checkAfter1()
layer.deleteAttribute(0)
checkAfter2()
# now try undo/redo
layer.undoStack().undo()
checkAfter1()
layer.undoStack().undo()
checkBefore()
layer.undoStack().redo()
checkAfter1()
layer.undoStack().redo()
checkAfter2()
layer.commitChanges()
checkAfter2()
def test_DeleteAttributeAfterChangeValue(self):
layer = createLayerWithOnePoint()
def checkBefore():
# check feature
f = next(layer.getFeatures())
attrs = f.attributes()
self.assertEqual(len(attrs), 2)
self.assertEqual(attrs[0], "test")
self.assertEqual(attrs[1], 123)
def checkAfter1():
# check feature
f = next(layer.getFeatures())
attrs = f.attributes()
self.assertEqual(len(attrs), 2)
self.assertEqual(attrs[0], "changed")
self.assertEqual(attrs[1], 123)
def checkAfter2():
# check feature
f = next(layer.getFeatures())
attrs = f.attributes()
self.assertEqual(len(attrs), 1)
self.assertEqual(attrs[0], 123)
layer.startEditing()
checkBefore()
self.assertTrue(layer.changeAttributeValue(1, 0, "changed"))
checkAfter1()
self.assertTrue(layer.deleteAttribute(0))
checkAfter2()
# now try undo/redo
layer.undoStack().undo()
checkAfter1()
layer.undoStack().undo()
checkBefore()
layer.undoStack().redo()
checkAfter1()
layer.undoStack().redo()
checkAfter2()
layer.commitChanges()
checkAfter2()
# RENAME ATTRIBUTE
def test_RenameAttribute(self):
layer = createLayerWithOnePoint()
# without editing mode
self.assertFalse(layer.renameAttribute(0, 'renamed'))
def checkFieldNames(names):
flds = layer.fields()
f = next(layer.getFeatures())
self.assertEqual(flds.count(), len(names))
self.assertEqual(f.fields().count(), len(names))
for idx, expected_name in enumerate(names):
self.assertEqual(flds[idx].name(), expected_name)
self.assertEqual(f.fields().at(idx).name(), expected_name)
layer.startEditing()
checkFieldNames(['fldtxt', 'fldint'])
self.assertFalse(layer.renameAttribute(-1, 'fldtxt2'))
self.assertFalse(layer.renameAttribute(10, 'fldtxt2'))
self.assertFalse(layer.renameAttribute(0, 'fldint')) # duplicate name
self.assertTrue(layer.renameAttribute(0, 'fldtxt2'))
checkFieldNames(['fldtxt2', 'fldint'])
layer.undoStack().undo()
checkFieldNames(['fldtxt', 'fldint'])
layer.undoStack().redo()
checkFieldNames(['fldtxt2', 'fldint'])
# change two fields
self.assertTrue(layer.renameAttribute(1, 'fldint2'))
checkFieldNames(['fldtxt2', 'fldint2'])
layer.undoStack().undo()
checkFieldNames(['fldtxt2', 'fldint'])
layer.undoStack().undo()
checkFieldNames(['fldtxt', 'fldint'])
layer.undoStack().redo()
checkFieldNames(['fldtxt2', 'fldint'])
layer.undoStack().redo()
checkFieldNames(['fldtxt2', 'fldint2'])
# two renames
self.assertTrue(layer.renameAttribute(0, 'fldtxt3'))
checkFieldNames(['fldtxt3', 'fldint2'])
self.assertTrue(layer.renameAttribute(0, 'fldtxt4'))
checkFieldNames(['fldtxt4', 'fldint2'])
layer.undoStack().undo()
checkFieldNames(['fldtxt3', 'fldint2'])
layer.undoStack().undo()
checkFieldNames(['fldtxt2', 'fldint2'])
layer.undoStack().redo()
checkFieldNames(['fldtxt3', 'fldint2'])
layer.undoStack().redo()
checkFieldNames(['fldtxt4', 'fldint2'])
def test_RenameAttributeAfterAdd(self):
layer = createLayerWithOnePoint()
def checkFieldNames(names):
flds = layer.fields()
f = next(layer.getFeatures())
self.assertEqual(flds.count(), len(names))
self.assertEqual(f.fields().count(), len(names))
for idx, expected_name in enumerate(names):
self.assertEqual(flds[idx].name(), expected_name)
self.assertEqual(f.fields().at(idx).name(), expected_name)
layer.startEditing()
checkFieldNames(['fldtxt', 'fldint'])
self.assertTrue(layer.renameAttribute(1, 'fldint2'))
checkFieldNames(['fldtxt', 'fldint2'])
# add an attribute
self.assertTrue(layer.addAttribute(QgsField("flddouble", QVariant.Double, "double")))
checkFieldNames(['fldtxt', 'fldint2', 'flddouble'])
# rename it
self.assertTrue(layer.renameAttribute(2, 'flddouble2'))
checkFieldNames(['fldtxt', 'fldint2', 'flddouble2'])
self.assertTrue(layer.addAttribute(QgsField("flddate", QVariant.Date, "date")))
checkFieldNames(['fldtxt', 'fldint2', 'flddouble2', 'flddate'])
self.assertTrue(layer.renameAttribute(2, 'flddouble3'))
checkFieldNames(['fldtxt', 'fldint2', 'flddouble3', 'flddate'])
self.assertTrue(layer.renameAttribute(3, 'flddate2'))
checkFieldNames(['fldtxt', 'fldint2', 'flddouble3', 'flddate2'])
layer.undoStack().undo()
checkFieldNames(['fldtxt', 'fldint2', 'flddouble3', 'flddate'])
layer.undoStack().undo()
checkFieldNames(['fldtxt', 'fldint2', 'flddouble2', 'flddate'])
layer.undoStack().undo()
checkFieldNames(['fldtxt', 'fldint2', 'flddouble2'])
layer.undoStack().undo()
checkFieldNames(['fldtxt', 'fldint2', 'flddouble'])
layer.undoStack().undo()
checkFieldNames(['fldtxt', 'fldint2'])
layer.undoStack().undo()
checkFieldNames(['fldtxt', 'fldint'])
layer.undoStack().redo()
checkFieldNames(['fldtxt', 'fldint2'])
layer.undoStack().redo()
checkFieldNames(['fldtxt', 'fldint2', 'flddouble'])
layer.undoStack().redo()
checkFieldNames(['fldtxt', 'fldint2', 'flddouble2'])
layer.undoStack().redo()
checkFieldNames(['fldtxt', 'fldint2', 'flddouble2', 'flddate'])
layer.undoStack().redo()
checkFieldNames(['fldtxt', 'fldint2', 'flddouble3', 'flddate'])
layer.undoStack().redo()
checkFieldNames(['fldtxt', 'fldint2', 'flddouble3', 'flddate2'])
def test_RenameAttributeAndDelete(self):
layer = createLayerWithOnePoint()
layer.dataProvider().addAttributes(
[QgsField("flddouble", QVariant.Double, "double")])
layer.updateFields()
def checkFieldNames(names):
flds = layer.fields()
f = next(layer.getFeatures())
self.assertEqual(flds.count(), len(names))
self.assertEqual(f.fields().count(), len(names))
for idx, expected_name in enumerate(names):
self.assertEqual(flds[idx].name(), expected_name)
self.assertEqual(f.fields().at(idx).name(), expected_name)
layer.startEditing()
checkFieldNames(['fldtxt', 'fldint', 'flddouble'])
self.assertTrue(layer.renameAttribute(0, 'fldtxt2'))
checkFieldNames(['fldtxt2', 'fldint', 'flddouble'])
self.assertTrue(layer.renameAttribute(2, 'flddouble2'))
checkFieldNames(['fldtxt2', 'fldint', 'flddouble2'])
# delete an attribute
self.assertTrue(layer.deleteAttribute(0))
checkFieldNames(['fldint', 'flddouble2'])
# rename remaining
self.assertTrue(layer.renameAttribute(0, 'fldint2'))
checkFieldNames(['fldint2', 'flddouble2'])
self.assertTrue(layer.renameAttribute(1, 'flddouble3'))
checkFieldNames(['fldint2', 'flddouble3'])
# delete an attribute
self.assertTrue(layer.deleteAttribute(0))
checkFieldNames(['flddouble3'])
self.assertTrue(layer.renameAttribute(0, 'flddouble4'))
checkFieldNames(['flddouble4'])
layer.undoStack().undo()
checkFieldNames(['flddouble3'])
layer.undoStack().undo()
checkFieldNames(['fldint2', 'flddouble3'])
layer.undoStack().undo()
checkFieldNames(['fldint2', 'flddouble2'])
layer.undoStack().undo()
checkFieldNames(['fldint', 'flddouble2'])
layer.undoStack().undo()
checkFieldNames(['fldtxt2', 'fldint', 'flddouble2'])
layer.undoStack().undo()
checkFieldNames(['fldtxt2', 'fldint', 'flddouble'])
layer.undoStack().undo()
checkFieldNames(['fldtxt', 'fldint', 'flddouble'])
# layer.undoStack().redo()
# checkFieldNames(['fldtxt2', 'fldint'])
# layer.undoStack().redo()
# checkFieldNames(['fldint'])
def test_RenameExpressionField(self):
layer = createLayerWithOnePoint()
exp_field_idx = layer.addExpressionField('1+1', QgsField('math_is_hard', QVariant.Int))
# rename and check
self.assertTrue(layer.renameAttribute(exp_field_idx, 'renamed'))
self.assertEqual(layer.fields()[exp_field_idx].name(), 'renamed')
f = next(layer.getFeatures())
self.assertEqual(f.fields()[exp_field_idx].name(), 'renamed')
def test_fields(self):
layer = createLayerWithOnePoint()
flds = layer.fields()
self.assertEqual(flds.indexFromName("fldint"), 1)
self.assertEqual(flds.indexFromName("fldXXX"), -1)
def test_getFeatures(self):
layer = createLayerWithOnePoint()
f = QgsFeature()
fi = layer.getFeatures()
self.assertTrue(fi.nextFeature(f))
self.assertTrue(f.isValid())
self.assertEqual(f.id(), 1)
self.assertEqual(f.geometry().asPoint(), QgsPointXY(100, 200))
self.assertEqual(f["fldtxt"], "test")
self.assertEqual(f["fldint"], 123)
self.assertFalse(fi.nextFeature(f))
layer2 = createLayerWithFivePoints()
# getFeature(fid)
feat = layer2.getFeature(4)
self.assertTrue(feat.isValid())
self.assertEqual(feat['fldtxt'], 'test3')
self.assertEqual(feat['fldint'], -1)
feat = layer2.getFeature(10)
self.assertFalse(feat.isValid())
# getFeatures(expression)
it = layer2.getFeatures("fldint <= 0")
fids = [f.id() for f in it]
self.assertEqual(set(fids), set([4, 5]))
# getFeatures(fids)
it = layer2.getFeatures([1, 2])
fids = [f.id() for f in it]
self.assertEqual(set(fids), set([1, 2]))
# getFeatures(rect)
it = layer2.getFeatures(QgsRectangle(99, 99, 201, 201))
fids = [f.id() for f in it]
self.assertEqual(set(fids), set([1, 2]))
def test_join(self):
joinLayer = createJoinLayer()
joinLayer2 = createJoinLayer()
QgsProject.instance().addMapLayers([joinLayer, joinLayer2])
layer = createLayerWithOnePoint()
join = QgsVectorLayerJoinInfo()
join.setTargetFieldName("fldint")
join.setJoinLayer(joinLayer)
join.setJoinFieldName("y")
join.setUsingMemoryCache(True)
layer.addJoin(join)
join2 = QgsVectorLayerJoinInfo()
join2.setTargetFieldName("fldint")
join2.setJoinLayer(joinLayer2)
join2.setJoinFieldName("y")
join2.setUsingMemoryCache(True)
join2.setPrefix("custom-prefix_")
layer.addJoin(join2)
flds = layer.fields()
self.assertEqual(len(flds), 6)
self.assertEqual(flds[2].name(), "joinlayer_x")
self.assertEqual(flds[3].name(), "joinlayer_z")
self.assertEqual(flds[4].name(), "custom-prefix_x")
self.assertEqual(flds[5].name(), "custom-prefix_z")
self.assertEqual(flds.fieldOrigin(0), QgsFields.OriginProvider)
self.assertEqual(flds.fieldOrigin(2), QgsFields.OriginJoin)
self.assertEqual(flds.fieldOrigin(3), QgsFields.OriginJoin)
self.assertEqual(flds.fieldOriginIndex(0), 0)
self.assertEqual(flds.fieldOriginIndex(2), 0)
self.assertEqual(flds.fieldOriginIndex(3), 2)
f = QgsFeature()
fi = layer.getFeatures()
self.assertTrue(fi.nextFeature(f))
attrs = f.attributes()
self.assertEqual(len(attrs), 6)
self.assertEqual(attrs[0], "test")
self.assertEqual(attrs[1], 123)
self.assertEqual(attrs[2], "foo")
self.assertEqual(attrs[3], 321)
self.assertFalse(fi.nextFeature(f))
f2 = next(layer.getFeatures(QgsFeatureRequest(f.id())))
self.assertEqual(len(f2.attributes()), 6)
self.assertEqual(f2[2], "foo")
self.assertEqual(f2[3], 321)
def test_JoinStats(self):
""" test calculating min/max/uniqueValues on joined field """
joinLayer = createJoinLayer()
layer = createLayerWithTwoPoints()
QgsProject.instance().addMapLayers([joinLayer, layer])
join = QgsVectorLayerJoinInfo()
join.setTargetFieldName("fldint")
join.setJoinLayer(joinLayer)
join.setJoinFieldName("y")
join.setUsingMemoryCache(True)
layer.addJoin(join)
# stats on joined fields should only include values present by join
self.assertEqual(layer.minimumValue(3), 111)
self.assertEqual(layer.maximumValue(3), 321)
self.assertEqual(set(layer.uniqueValues(3)), set([111, 321]))
def test_valid_join_when_opening_project(self):
join_field = "id"
fid = 4
attr_idx = 4
join_attr_idx = 1
new_value = 33.0
# read project and get layers
myPath = os.path.join(unitTestDataPath(), 'joins.qgs')
rc = QgsProject.instance().read(myPath)
layer = QgsProject.instance().mapLayersByName("polys_with_id")[0]
join_layer = QgsProject.instance().mapLayersByName("polys_overlapping_with_id")[0]
# create an attribute table for the main_layer and the
# joined layer
cache = QgsVectorLayerCache(layer, 100)
am = QgsAttributeTableModel(cache)
am.loadLayer()
join_cache = QgsVectorLayerCache(join_layer, 100)
join_am = QgsAttributeTableModel(join_cache)
join_am.loadLayer()
# check feature value of a joined field from the attribute model
model_index = am.idToIndex(fid)
feature_model = am.feature(model_index)
join_model_index = join_am.idToIndex(fid)
join_feature_model = join_am.feature(join_model_index)
self.assertEqual(feature_model.attribute(attr_idx), join_feature_model.attribute(join_attr_idx))
# change attribute value for a feature of the joined layer
join_layer.startEditing()
join_layer.changeAttributeValue(fid, join_attr_idx, new_value)
join_layer.commitChanges()
# check the feature previously modified
join_model_index = join_am.idToIndex(fid)
join_feature_model = join_am.feature(join_model_index)
self.assertEqual(join_feature_model.attribute(join_attr_idx), new_value)
# recreate a new cache and model to simulate the opening of
# a new attribute table
cache = QgsVectorLayerCache(layer, 100)
am = QgsAttributeTableModel(cache)
am.loadLayer()
# test that the model is up to date with the joined layer
model_index = am.idToIndex(fid)
feature_model = am.feature(model_index)
self.assertEqual(feature_model.attribute(attr_idx), new_value)
# restore value
join_layer.startEditing()
join_layer.changeAttributeValue(fid, join_attr_idx, 7.0)
join_layer.commitChanges()
def testUniqueValue(self):
""" test retrieving unique values """
layer = createLayerWithFivePoints()
# test layer with just provider features
self.assertEqual(set(layer.uniqueValues(1)), set([123, 457, 888, -1, 0]))
# add feature with new value
layer.startEditing()
f1 = QgsFeature()
f1.setAttributes(["test2", 999])
self.assertTrue(layer.addFeature(f1))
# should be included in unique values
self.assertEqual(set(layer.uniqueValues(1)), set([123, 457, 888, -1, 0, 999]))
# add it again, should be no change
f2 = QgsFeature()
f2.setAttributes(["test2", 999])
self.assertTrue(layer.addFeature(f1))
self.assertEqual(set(layer.uniqueValues(1)), set([123, 457, 888, -1, 0, 999]))
# add another feature
f3 = QgsFeature()
f3.setAttributes(["test2", 9999])
self.assertTrue(layer.addFeature(f3))
self.assertEqual(set(layer.uniqueValues(1)), set([123, 457, 888, -1, 0, 999, 9999]))
# change an attribute value to a new unique value
f1_id = next(layer.getFeatures()).id()
self.assertTrue(layer.changeAttributeValue(f1_id, 1, 481523))
# note - this isn't 100% accurate, since 123 no longer exists - but it avoids looping through all features
self.assertEqual(set(layer.uniqueValues(1)), set([123, 457, 888, -1, 0, 999, 9999, 481523]))
def testUniqueStringsMatching(self):
""" test retrieving unique strings matching subset """
layer = QgsVectorLayer("Point?field=fldtxt:string", "addfeat", "memory")
pr = layer.dataProvider()
f = QgsFeature()
f.setAttributes(["apple"])
f2 = QgsFeature()
f2.setAttributes(["orange"])
f3 = QgsFeature()
f3.setAttributes(["pear"])
f4 = QgsFeature()
f4.setAttributes(["BanaNa"])
f5 = QgsFeature()
f5.setAttributes(["ApriCot"])
assert pr.addFeatures([f, f2, f3, f4, f5])
assert layer.featureCount() == 5
# test layer with just provider features
self.assertEqual(set(layer.uniqueStringsMatching(0, 'N')), set(['orange', 'BanaNa']))
# add feature with new value
layer.startEditing()
f1 = QgsFeature()
f1.setAttributes(["waterMelon"])
self.assertTrue(layer.addFeature(f1))
# should be included in unique values
self.assertEqual(set(layer.uniqueStringsMatching(0, 'N')), set(['orange', 'BanaNa', 'waterMelon']))
# add it again, should be no change
f2 = QgsFeature()
f2.setAttributes(["waterMelon"])
self.assertTrue(layer.addFeature(f1))
self.assertEqual(set(layer.uniqueStringsMatching(0, 'N')), set(['orange', 'BanaNa', 'waterMelon']))
self.assertEqual(set(layer.uniqueStringsMatching(0, 'aN')), set(['orange', 'BanaNa']))
# add another feature
f3 = QgsFeature()
f3.setAttributes(["pineapple"])
self.assertTrue(layer.addFeature(f3))
self.assertEqual(set(layer.uniqueStringsMatching(0, 'n')), set(['orange', 'BanaNa', 'waterMelon', 'pineapple']))
# change an attribute value to a new unique value
f = QgsFeature()
f1_id = next(layer.getFeatures()).id()
self.assertTrue(layer.changeAttributeValue(f1_id, 0, 'coconut'))
# note - this isn't 100% accurate, since orange no longer exists - but it avoids looping through all features
self.assertEqual(set(layer.uniqueStringsMatching(0, 'n')),
set(['orange', 'BanaNa', 'waterMelon', 'pineapple', 'coconut']))
def testMinValue(self):
""" test retrieving minimum values """
layer = createLayerWithFivePoints()
# test layer with just provider features
self.assertEqual(layer.minimumValue(1), -1)
# add feature with new value
layer.startEditing()
f1 = QgsFeature()
f1.setAttributes(["test2", -999])
self.assertTrue(layer.addFeature(f1))
# should be new minimum value
self.assertEqual(layer.minimumValue(1), -999)
# add it again, should be no change
f2 = QgsFeature()
f2.setAttributes(["test2", -999])
self.assertTrue(layer.addFeature(f1))
self.assertEqual(layer.minimumValue(1), -999)
# add another feature
f3 = QgsFeature()
f3.setAttributes(["test2", -1000])
self.assertTrue(layer.addFeature(f3))
self.assertEqual(layer.minimumValue(1), -1000)
# change an attribute value to a new minimum value
f1_id = next(layer.getFeatures()).id()
self.assertTrue(layer.changeAttributeValue(f1_id, 1, -1001))
self.assertEqual(layer.minimumValue(1), -1001)
def testMaxValue(self):
""" test retrieving maximum values """
layer = createLayerWithFivePoints()
# test layer with just provider features
self.assertEqual(layer.maximumValue(1), 888)
# add feature with new value
layer.startEditing()
f1 = QgsFeature()
f1.setAttributes(["test2", 999])
self.assertTrue(layer.addFeature(f1))
# should be new maximum value
self.assertEqual(layer.maximumValue(1), 999)
# add it again, should be no change
f2 = QgsFeature()
f2.setAttributes(["test2", 999])
self.assertTrue(layer.addFeature(f1))
self.assertEqual(layer.maximumValue(1), 999)
# add another feature
f3 = QgsFeature()
f3.setAttributes(["test2", 1000])
self.assertTrue(layer.addFeature(f3))
self.assertEqual(layer.maximumValue(1), 1000)
# change an attribute value to a new maximum value
f1_id = next(layer.getFeatures()).id()
self.assertTrue(layer.changeAttributeValue(f1_id, 1, 1001))
self.assertEqual(layer.maximumValue(1), 1001)
def test_InvalidOperations(self):
layer = createLayerWithOnePoint()
layer.startEditing()
# ADD FEATURE
newF1 = QgsFeature()
self.assertFalse(layer.addFeature(newF1)) # need attributes like the layer has)
# DELETE FEATURE
self.assertFalse(layer.deleteFeature(-333))
# we do not check for existence of the feature id if it's
# not newly added feature
# self.assertFalse(layer.deleteFeature(333))
# CHANGE GEOMETRY
self.assertFalse(layer.changeGeometry(
-333, QgsGeometry.fromPointXY(QgsPointXY(1, 1))))
# CHANGE VALUE
self.assertFalse(layer.changeAttributeValue(-333, 0, 1))
self.assertFalse(layer.changeAttributeValue(1, -1, 1))
# ADD ATTRIBUTE
self.assertFalse(layer.addAttribute(QgsField()))
# DELETE ATTRIBUTE
self.assertFalse(layer.deleteAttribute(-1))
def onBlendModeChanged(self, mode):
self.blendModeTest = mode
def test_setBlendMode(self):
layer = createLayerWithOnePoint()
self.blendModeTest = 0
layer.blendModeChanged.connect(self.onBlendModeChanged)
layer.setBlendMode(QPainter.CompositionMode_Screen)
self.assertEqual(self.blendModeTest, QPainter.CompositionMode_Screen)
self.assertEqual(layer.blendMode(), QPainter.CompositionMode_Screen)
def test_setFeatureBlendMode(self):
layer = createLayerWithOnePoint()
self.blendModeTest = 0
layer.featureBlendModeChanged.connect(self.onBlendModeChanged)
layer.setFeatureBlendMode(QPainter.CompositionMode_Screen)
self.assertEqual(self.blendModeTest, QPainter.CompositionMode_Screen)
self.assertEqual(layer.featureBlendMode(), QPainter.CompositionMode_Screen)
def test_ExpressionField(self):
layer = createLayerWithOnePoint()
cnt = layer.fields().count()
idx = layer.addExpressionField('5', QgsField('test', QVariant.LongLong))
fet = next(layer.getFeatures())
self.assertEqual(fet[idx], 5)
# check fields
self.assertEqual(layer.fields().count(), cnt + 1)
self.assertEqual(fet.fields(), layer.fields())
# retrieve single feature and check fields
fet = next(layer.getFeatures(QgsFeatureRequest().setFilterFid(1)))
self.assertEqual(fet.fields(), layer.fields())
layer.updateExpressionField(idx, '9')
self.assertEqual(next(layer.getFeatures())[idx], 9)
layer.removeExpressionField(idx)
self.assertEqual(layer.fields().count(), cnt)
# expression field which references itself
idx = layer.addExpressionField('sum(test2)', QgsField('test2', QVariant.LongLong))
fet = next(layer.getFeatures())
self.assertEqual(fet['test2'], NULL)
def test_ExpressionFieldEllipsoidLengthCalculation(self):
# create a temporary layer
temp_layer = QgsVectorLayer("LineString?crs=epsg:3111&field=pk:int", "vl", "memory")
self.assertTrue(temp_layer.isValid())
f1 = QgsFeature(temp_layer.dataProvider().fields(), 1)
f1.setAttribute("pk", 1)
f1.setGeometry(QgsGeometry.fromPolylineXY([QgsPointXY(2484588, 2425722), QgsPointXY(2482767, 2398853)]))
temp_layer.dataProvider().addFeatures([f1])
# set project CRS and ellipsoid
srs = QgsCoordinateReferenceSystem(3111, QgsCoordinateReferenceSystem.EpsgCrsId)
QgsProject.instance().setCrs(srs)
QgsProject.instance().setEllipsoid("WGS84")
QgsProject.instance().setDistanceUnits(QgsUnitTypes.DistanceMeters)
idx = temp_layer.addExpressionField('$length', QgsField('length', QVariant.Double)) # NOQA
# check value
f = next(temp_layer.getFeatures())
expected = 26932.156
self.assertAlmostEqual(f['length'], expected, 3)
# change project length unit, check calculation respects unit
QgsProject.instance().setDistanceUnits(QgsUnitTypes.DistanceFeet)
f = next(temp_layer.getFeatures())
expected = 88360.0918635
self.assertAlmostEqual(f['length'], expected, 3)
def test_ExpressionFieldEllipsoidAreaCalculation(self):
# create a temporary layer
temp_layer = QgsVectorLayer("Polygon?crs=epsg:3111&field=pk:int", "vl", "memory")
self.assertTrue(temp_layer.isValid())
f1 = QgsFeature(temp_layer.dataProvider().fields(), 1)
f1.setAttribute("pk", 1)
f1.setGeometry(QgsGeometry.fromPolygonXY([[QgsPointXY(2484588, 2425722), QgsPointXY(2482767, 2398853),
QgsPointXY(2520109, 2397715), QgsPointXY(2520792, 2425494),
QgsPointXY(2484588, 2425722)]]))
temp_layer.dataProvider().addFeatures([f1])
# set project CRS and ellipsoid
srs = QgsCoordinateReferenceSystem(3111, QgsCoordinateReferenceSystem.EpsgCrsId)
QgsProject.instance().setCrs(srs)
QgsProject.instance().setEllipsoid("WGS84")
QgsProject.instance().setAreaUnits(QgsUnitTypes.AreaSquareMeters)
idx = temp_layer.addExpressionField('$area', QgsField('area', QVariant.Double)) # NOQA
# check value
f = next(temp_layer.getFeatures())
expected = 1009089817.0
self.assertAlmostEqual(f['area'], expected, delta=1.0)
# change project area unit, check calculation respects unit
QgsProject.instance().setAreaUnits(QgsUnitTypes.AreaSquareMiles)
f = next(temp_layer.getFeatures())
expected = 389.6117565069
self.assertAlmostEqual(f['area'], expected, 3)
def test_ExpressionFilter(self):
layer = createLayerWithOnePoint()
idx = layer.addExpressionField('5', QgsField('test', QVariant.LongLong)) # NOQA
features = layer.getFeatures(QgsFeatureRequest().setFilterExpression('"test" = 6'))
assert (len(list(features)) == 0)
features = layer.getFeatures(QgsFeatureRequest().setFilterExpression('"test" = 5'))
assert (len(list(features)) == 1)
def testSelectByIds(self):
""" Test selecting by ID"""
layer = QgsVectorLayer(os.path.join(unitTestDataPath(), 'points.shp'), 'Points', 'ogr')
# SetSelection
layer.selectByIds([1, 3, 5, 7], QgsVectorLayer.SetSelection)
self.assertEqual(set(layer.selectedFeatureIds()), set([1, 3, 5, 7]))
# check that existing selection is cleared
layer.selectByIds([2, 4, 6], QgsVectorLayer.SetSelection)
self.assertEqual(set(layer.selectedFeatureIds()), set([2, 4, 6]))
# AddToSelection
layer.selectByIds([3, 5], QgsVectorLayer.AddToSelection)
self.assertEqual(set(layer.selectedFeatureIds()), set([2, 3, 4, 5, 6]))
layer.selectByIds([1], QgsVectorLayer.AddToSelection)
self.assertEqual(set(layer.selectedFeatureIds()), set([1, 2, 3, 4, 5, 6]))
# IntersectSelection
layer.selectByIds([1, 3, 5, 6], QgsVectorLayer.IntersectSelection)
self.assertEqual(set(layer.selectedFeatureIds()), set([1, 3, 5, 6]))
layer.selectByIds([1, 2, 5, 6], QgsVectorLayer.IntersectSelection)
self.assertEqual(set(layer.selectedFeatureIds()), set([1, 5, 6]))
# RemoveFromSelection
layer.selectByIds([2, 6, 7], QgsVectorLayer.RemoveFromSelection)
self.assertEqual(set(layer.selectedFeatureIds()), set([1, 5]))
layer.selectByIds([1, 5], QgsVectorLayer.RemoveFromSelection)
self.assertEqual(set(layer.selectedFeatureIds()), set([]))
def testSelectByExpression(self):
""" Test selecting by expression """
layer = QgsVectorLayer(os.path.join(unitTestDataPath(), 'points.shp'), 'Points', 'ogr')
# SetSelection
layer.selectByExpression('"Class"=\'B52\' and "Heading" > 10 and "Heading" <70', QgsVectorLayer.SetSelection)
self.assertEqual(set(layer.selectedFeatureIds()), set([10, 11]))
# check that existing selection is cleared
layer.selectByExpression('"Class"=\'Biplane\'', QgsVectorLayer.SetSelection)
self.assertEqual(set(layer.selectedFeatureIds()), set([1, 5, 6, 7, 8]))
# SetSelection no matching
layer.selectByExpression('"Class"=\'A380\'', QgsVectorLayer.SetSelection)
self.assertEqual(set(layer.selectedFeatureIds()), set([]))
# AddToSelection
layer.selectByExpression('"Importance"=3', QgsVectorLayer.AddToSelection)
self.assertEqual(set(layer.selectedFeatureIds()), set([0, 2, 3, 4, 14]))
layer.selectByExpression('"Importance"=4', QgsVectorLayer.AddToSelection)
self.assertEqual(set(layer.selectedFeatureIds()), set([0, 2, 3, 4, 13, 14]))
# IntersectSelection
layer.selectByExpression('"Heading"<100', QgsVectorLayer.IntersectSelection)
self.assertEqual(set(layer.selectedFeatureIds()), set([0, 2, 3, 4]))
layer.selectByExpression('"Cabin Crew"=1', QgsVectorLayer.IntersectSelection)
self.assertEqual(set(layer.selectedFeatureIds()), set([2, 3]))
# RemoveFromSelection
layer.selectByExpression('"Heading"=85', QgsVectorLayer.RemoveFromSelection)
self.assertEqual(set(layer.selectedFeatureIds()), set([3]))
layer.selectByExpression('"Heading"=95', QgsVectorLayer.RemoveFromSelection)
self.assertEqual(set(layer.selectedFeatureIds()), set([]))
def testSelectByRect(self):
""" Test selecting by rectangle """
layer = QgsVectorLayer(os.path.join(unitTestDataPath(), 'points.shp'), 'Points', 'ogr')
# SetSelection
layer.selectByRect(QgsRectangle(-112, 30, -94, 45), QgsVectorLayer.SetSelection)
self.assertEqual(set(layer.selectedFeatureIds()), set([2, 3, 7, 10, 11, 15]))
# check that existing selection is cleared
layer.selectByRect(QgsRectangle(-112, 30, -94, 37), QgsVectorLayer.SetSelection)
self.assertEqual(set(layer.selectedFeatureIds()), set([2, 3, 10, 15]))
# SetSelection no matching
layer.selectByRect(QgsRectangle(112, 30, 115, 45), QgsVectorLayer.SetSelection)
self.assertEqual(set(layer.selectedFeatureIds()), set([]))
# AddToSelection
layer.selectByRect(QgsRectangle(-112, 30, -94, 37), QgsVectorLayer.AddToSelection)
self.assertEqual(set(layer.selectedFeatureIds()), set([2, 3, 10, 15]))
layer.selectByRect(QgsRectangle(-112, 37, -94, 45), QgsVectorLayer.AddToSelection)
self.assertEqual(set(layer.selectedFeatureIds()), set([2, 3, 7, 10, 11, 15]))
# IntersectSelection
layer.selectByRect(QgsRectangle(-112, 30, -94, 37), QgsVectorLayer.IntersectSelection)
self.assertEqual(set(layer.selectedFeatureIds()), set([2, 3, 10, 15]))
layer.selectByIds([2, 10, 13])
layer.selectByRect(QgsRectangle(-112, 30, -94, 37), QgsVectorLayer.IntersectSelection)
self.assertEqual(set(layer.selectedFeatureIds()), set([2, 10]))
# RemoveFromSelection
layer.selectByRect(QgsRectangle(-112, 30, -94, 45), QgsVectorLayer.SetSelection)
layer.selectByRect(QgsRectangle(-112, 30, -94, 37), QgsVectorLayer.RemoveFromSelection)
self.assertEqual(set(layer.selectedFeatureIds()), set([7, 11]))
layer.selectByRect(QgsRectangle(-112, 30, -94, 45), QgsVectorLayer.RemoveFromSelection)
self.assertEqual(set(layer.selectedFeatureIds()), set([]))
def testAggregate(self):
""" Test aggregate calculation """
layer = QgsVectorLayer("Point?field=fldint:integer", "layer", "memory")
pr = layer.dataProvider()
int_values = [4, 2, 3, 2, 5, None, 8]
features = []
for i in int_values:
f = QgsFeature()
f.setFields(layer.fields())
f.setAttributes([i])
features.append(f)
assert pr.addFeatures(features)
tests = [[QgsAggregateCalculator.Count, 6],
[QgsAggregateCalculator.Sum, 24],
[QgsAggregateCalculator.Mean, 4],
[QgsAggregateCalculator.StDev, 2.0816],
[QgsAggregateCalculator.StDevSample, 2.2803],
[QgsAggregateCalculator.Min, 2],
[QgsAggregateCalculator.Max, 8],
[QgsAggregateCalculator.Range, 6],
[QgsAggregateCalculator.Median, 3.5],
[QgsAggregateCalculator.CountDistinct, 5],
[QgsAggregateCalculator.CountMissing, 1],
[QgsAggregateCalculator.FirstQuartile, 2],
[QgsAggregateCalculator.ThirdQuartile, 5.0],
[QgsAggregateCalculator.InterQuartileRange, 3.0]
]
for t in tests:
val, ok = layer.aggregate(t[0], 'fldint')
self.assertTrue(ok)
if isinstance(t[1], int):
self.assertEqual(val, t[1])
else:
self.assertAlmostEqual(val, t[1], 3)
# test with parameters
layer = QgsVectorLayer("Point?field=fldstring:string", "layer", "memory")
pr = layer.dataProvider()
string_values = ['this', 'is', 'a', 'test']
features = []
for s in string_values:
f = QgsFeature()
f.setFields(layer.fields())
f.setAttributes([s])
features.append(f)
assert pr.addFeatures(features)
params = QgsAggregateCalculator.AggregateParameters()
params.delimiter = ' '
val, ok = layer.aggregate(QgsAggregateCalculator.StringConcatenate, 'fldstring', params)
self.assertTrue(ok)
self.assertEqual(val, 'this is a test')
def testAggregateInVirtualField(self):
"""
Test aggregates in a virtual field
"""
layer = QgsVectorLayer("Point?field=fldint:integer", "layer", "memory")
pr = layer.dataProvider()
int_values = [4, 2, 3, 2, 5, None, 8]
features = []
for i in int_values:
f = QgsFeature()
f.setFields(layer.fields())
f.setAttributes([i])
features.append(f)
assert pr.addFeatures(features)
field = QgsField('virtual', QVariant.Double)
layer.addExpressionField('sum(fldint*2)', field)
vals = [f['virtual'] for f in layer.getFeatures()]
self.assertEqual(vals, [48, 48, 48, 48, 48, 48, 48])
def onLayerOpacityChanged(self, tr):
self.opacityTest = tr
def test_setLayerOpacity(self):
layer = createLayerWithOnePoint()
self.opacityTest = 0
layer.opacityChanged.connect(self.onLayerOpacityChanged)
layer.setOpacity(0.5)
self.assertEqual(self.opacityTest, 0.5)
self.assertEqual(layer.opacity(), 0.5)
def onRendererChanged(self):
self.rendererChanged = True
def test_setRenderer(self):
layer = createLayerWithOnePoint()
self.rendererChanged = False
layer.rendererChanged.connect(self.onRendererChanged)
r = QgsSingleSymbolRenderer(QgsSymbol.defaultSymbol(QgsWkbTypes.PointGeometry))
layer.setRenderer(r)
self.assertTrue(self.rendererChanged)
self.assertEqual(layer.renderer(), r)
def testGetSetAliases(self):
""" test getting and setting aliases """
layer = createLayerWithOnePoint()
self.assertFalse(layer.attributeAlias(0))
self.assertFalse(layer.attributeAlias(1))
self.assertFalse(layer.attributeAlias(2))
layer.setFieldAlias(0, "test")
self.assertEqual(layer.attributeAlias(0), "test")
self.assertFalse(layer.attributeAlias(1))
self.assertFalse(layer.attributeAlias(2))
self.assertEqual(layer.fields().at(0).alias(), "test")
layer.setFieldAlias(1, "test2")
self.assertEqual(layer.attributeAlias(0), "test")
self.assertEqual(layer.attributeAlias(1), "test2")
self.assertFalse(layer.attributeAlias(2))
self.assertEqual(layer.fields().at(0).alias(), "test")
self.assertEqual(layer.fields().at(1).alias(), "test2")
layer.setFieldAlias(1, None)
self.assertEqual(layer.attributeAlias(0), "test")
self.assertFalse(layer.attributeAlias(1))
self.assertFalse(layer.attributeAlias(2))
self.assertEqual(layer.fields().at(0).alias(), "test")
self.assertFalse(layer.fields().at(1).alias())
layer.removeFieldAlias(0)
self.assertFalse(layer.attributeAlias(0))
self.assertFalse(layer.attributeAlias(1))
self.assertFalse(layer.attributeAlias(2))
self.assertFalse(layer.fields().at(0).alias())
self.assertFalse(layer.fields().at(1).alias())
def testSaveRestoreAliases(self):
""" test saving and restoring aliases from xml"""
layer = createLayerWithOnePoint()
# no default expressions
doc = QDomDocument("testdoc")
elem = doc.createElement("maplayer")
self.assertTrue(layer.writeXml(elem, doc, QgsReadWriteContext()))
layer2 = createLayerWithOnePoint()
self.assertTrue(layer2.readXml(elem, QgsReadWriteContext()))
self.assertFalse(layer2.attributeAlias(0))
self.assertFalse(layer2.attributeAlias(1))
# set some aliases
layer.setFieldAlias(0, "test")
layer.setFieldAlias(1, "test2")
doc = QDomDocument("testdoc")
elem = doc.createElement("maplayer")
self.assertTrue(layer.writeXml(elem, doc, QgsReadWriteContext()))
layer3 = createLayerWithOnePoint()
self.assertTrue(layer3.readXml(elem, QgsReadWriteContext()))
self.assertEqual(layer3.attributeAlias(0), "test")
self.assertEqual(layer3.attributeAlias(1), "test2")
self.assertEqual(layer3.fields().at(0).alias(), "test")
self.assertEqual(layer3.fields().at(1).alias(), "test2")
def testGetSetDefaults(self):
""" test getting and setting default expressions """
layer = createLayerWithOnePoint()
self.assertFalse(layer.defaultValueDefinition(0))
self.assertFalse(layer.defaultValueDefinition(0).expression())
self.assertFalse(layer.defaultValueDefinition(0).applyOnUpdate())
self.assertFalse(layer.defaultValueDefinition(1))
self.assertFalse(layer.defaultValueDefinition(2))
layer.setDefaultValueDefinition(0, QgsDefaultValue("'test'"))
self.assertTrue(layer.defaultValueDefinition(0))
self.assertEqual(layer.defaultValueDefinition(0).expression(), "'test'")
self.assertFalse(layer.defaultValueDefinition(0).applyOnUpdate())
self.assertFalse(layer.defaultValueDefinition(1))
self.assertFalse(layer.defaultValueDefinition(1).applyOnUpdate())
self.assertFalse(layer.defaultValueDefinition(2))
self.assertFalse(layer.defaultValueDefinition(2).applyOnUpdate())
self.assertEqual(layer.fields().at(0).defaultValueDefinition().expression(), "'test'")
layer.setDefaultValueDefinition(1, QgsDefaultValue("2+2"))
self.assertEqual(layer.defaultValueDefinition(0).expression(), "'test'")
self.assertFalse(layer.defaultValueDefinition(0).applyOnUpdate())
self.assertEqual(layer.defaultValueDefinition(1).expression(), "2+2")
self.assertFalse(layer.defaultValueDefinition(1).applyOnUpdate())
self.assertFalse(layer.defaultValueDefinition(2))
self.assertFalse(layer.defaultValueDefinition(2).applyOnUpdate())
self.assertEqual(layer.fields().at(0).defaultValueDefinition().expression(), "'test'")
self.assertEqual(layer.fields().at(1).defaultValueDefinition().expression(), "2+2")
layer.setDefaultValueDefinition(1, QgsDefaultValue("2+2", True))
self.assertEqual(layer.defaultValueDefinition(0).expression(), "'test'")
self.assertFalse(layer.defaultValueDefinition(0).applyOnUpdate())
self.assertEqual(layer.defaultValueDefinition(1).expression(), "2+2")
self.assertTrue(layer.defaultValueDefinition(1).applyOnUpdate())
self.assertEqual(layer.fields().at(0).defaultValueDefinition().expression(), "'test'")
self.assertEqual(layer.fields().at(1).defaultValueDefinition().expression(), "2+2")
def testSaveRestoreDefaults(self):
""" test saving and restoring default expressions from xml"""
layer = createLayerWithOnePoint()
# no default expressions
doc = QDomDocument("testdoc")
elem = doc.createElement("maplayer")
self.assertTrue(layer.writeXml(elem, doc, QgsReadWriteContext()))
layer2 = createLayerWithOnePoint()
self.assertTrue(layer2.readXml(elem, QgsReadWriteContext()))
self.assertFalse(layer2.defaultValueDefinition(0))
self.assertFalse(layer2.defaultValueDefinition(1))
# set some default expressions
layer.setDefaultValueDefinition(0, QgsDefaultValue("'test'"))
layer.setDefaultValueDefinition(1, QgsDefaultValue("2+2"))
doc = QDomDocument("testdoc")
elem = doc.createElement("maplayer")
self.assertTrue(layer.writeXml(elem, doc, QgsReadWriteContext()))
layer3 = createLayerWithOnePoint()
self.assertTrue(layer3.readXml(elem, QgsReadWriteContext()))
self.assertEqual(layer3.defaultValueDefinition(0).expression(), "'test'")
self.assertEqual(layer3.defaultValueDefinition(1).expression(), "2+2")
self.assertEqual(layer3.fields().at(0).defaultValueDefinition().expression(), "'test'")
self.assertEqual(layer3.fields().at(1).defaultValueDefinition().expression(), "2+2")
def testEvaluatingDefaultExpressions(self):
""" tests calculation of default values"""
layer = createLayerWithOnePoint()
layer.setDefaultValueDefinition(0, QgsDefaultValue("'test'"))
layer.setDefaultValueDefinition(1, QgsDefaultValue("2+2"))
self.assertEqual(layer.defaultValue(0), 'test')
self.assertEqual(layer.defaultValue(1), 4)
# using feature
layer.setDefaultValueDefinition(1, QgsDefaultValue('$id * 2'))
feature = QgsFeature(4)
feature.setValid(True)
feature.setFields(layer.fields())
# no feature:
self.assertFalse(layer.defaultValue(1))
# with feature:
self.assertEqual(layer.defaultValue(0, feature), 'test')
self.assertEqual(layer.defaultValue(1, feature), 8)
# using feature geometry
layer.setDefaultValueDefinition(1, QgsDefaultValue('$x * 2'))
feature.setGeometry(QgsGeometry(QgsPoint(6, 7)))
self.assertEqual(layer.defaultValue(1, feature), 12)
# using contexts
scope = QgsExpressionContextScope()
scope.setVariable('var1', 16)
context = QgsExpressionContext()
context.appendScope(scope)
layer.setDefaultValueDefinition(1, QgsDefaultValue('$id + @var1'))
self.assertEqual(layer.defaultValue(1, feature, context), 20)
# if no scope passed, should use a default constructed one including layer variables
QgsExpressionContextUtils.setLayerVariable(layer, 'var2', 4)
QgsExpressionContextUtils.setProjectVariable(QgsProject.instance(), 'var3', 8)
layer.setDefaultValueDefinition(1, QgsDefaultValue('to_int(@var2) + to_int(@var3) + $id'))
self.assertEqual(layer.defaultValue(1, feature), 16)
# bad expression
layer.setDefaultValueDefinition(1, QgsDefaultValue('not a valid expression'))
self.assertFalse(layer.defaultValue(1))
def testApplyOnUpdateDefaultExpressions(self):
"""tests apply on update of default values"""
layer = createLayerWithOnePoint()
layer.setDefaultValueDefinition(0, QgsDefaultValue("CONCAT('l: ', @number, ',f: ', \"fldint\" )", True))
layer.setDefaultValueDefinition(1, QgsDefaultValue("1 * @number", False))
QgsExpressionContextUtils.setLayerVariable(layer, 'number', 4)
layer.startEditing()
feature = QgsFeature()
feature.setFields(layer.fields())
feature.setValid(True)
# Both default values should be set on feature create
feature.setAttribute(1, layer.defaultValue(1, feature))
feature.setAttribute(0, layer.defaultValue(0, feature))
self.assertTrue(layer.addFeature(feature))
fid = feature.id()
self.assertEqual(layer.getFeature(fid)['fldtxt'], 'l: 4,f: 4')
self.assertEqual(layer.getFeature(fid)['fldint'], 4)
# ApplyOnUpdateDefaultValue should be set on changeAttributeValue
layer.changeAttributeValue(fid, 1, 20)
self.assertEqual(layer.getFeature(fid)['fldtxt'], 'l: 4,f: 20')
self.assertEqual(layer.getFeature(fid)['fldint'], 20)
# When changing the value of the "derived" attribute, only this one
# should be updated
QgsExpressionContextUtils.setLayerVariable(layer, 'number', 8)
layer.changeAttributeValue(fid, 0, 0)
self.assertEqual(layer.getFeature(fid)['fldtxt'], 'l: 8,f: 20')
self.assertEqual(layer.getFeature(fid)['fldint'], 20)
# Check update on geometry change
layer.setDefaultValueDefinition(1, QgsDefaultValue("x($geometry)", True))
layer.changeGeometry(fid, QgsGeometry.fromPointXY(QgsPointXY(300, 200)))
self.assertEqual(layer.getFeature(fid)['fldint'], 300)
def testGetSetConstraints(self):
""" test getting and setting field constraints """
layer = createLayerWithOnePoint()
self.assertFalse(layer.fieldConstraints(0))
self.assertFalse(layer.fieldConstraints(1))
self.assertFalse(layer.fieldConstraints(2))
layer.setFieldConstraint(0, QgsFieldConstraints.ConstraintNotNull)
self.assertEqual(layer.fieldConstraints(0), QgsFieldConstraints.ConstraintNotNull)
self.assertFalse(layer.fieldConstraints(1))
self.assertFalse(layer.fieldConstraints(2))
self.assertEqual(layer.fields().at(0).constraints().constraints(), QgsFieldConstraints.ConstraintNotNull)
self.assertEqual(layer.fields().at(0).constraints().constraintOrigin(QgsFieldConstraints.ConstraintNotNull),
QgsFieldConstraints.ConstraintOriginLayer)
self.assertEqual(layer.fields().at(0).constraints().constraintStrength(QgsFieldConstraints.ConstraintNotNull),
QgsFieldConstraints.ConstraintStrengthHard)
layer.setFieldConstraint(1, QgsFieldConstraints.ConstraintNotNull)
layer.setFieldConstraint(1, QgsFieldConstraints.ConstraintUnique)
self.assertEqual(layer.fieldConstraints(0), QgsFieldConstraints.ConstraintNotNull)
self.assertEqual(layer.fieldConstraints(1),
QgsFieldConstraints.ConstraintNotNull | QgsFieldConstraints.ConstraintUnique)
self.assertFalse(layer.fieldConstraints(2))
self.assertEqual(layer.fields().at(0).constraints().constraints(), QgsFieldConstraints.ConstraintNotNull)
self.assertEqual(layer.fields().at(0).constraints().constraintOrigin(QgsFieldConstraints.ConstraintNotNull),
QgsFieldConstraints.ConstraintOriginLayer)
self.assertEqual(layer.fields().at(0).constraints().constraintStrength(QgsFieldConstraints.ConstraintNotNull),
QgsFieldConstraints.ConstraintStrengthHard)
self.assertEqual(layer.fields().at(1).constraints().constraints(),
QgsFieldConstraints.ConstraintNotNull | QgsFieldConstraints.ConstraintUnique)
self.assertEqual(layer.fields().at(1).constraints().constraintOrigin(QgsFieldConstraints.ConstraintNotNull),
QgsFieldConstraints.ConstraintOriginLayer)
self.assertEqual(layer.fields().at(1).constraints().constraintOrigin(QgsFieldConstraints.ConstraintUnique),
QgsFieldConstraints.ConstraintOriginLayer)
self.assertEqual(layer.fields().at(1).constraints().constraintStrength(QgsFieldConstraints.ConstraintNotNull),
QgsFieldConstraints.ConstraintStrengthHard)
self.assertEqual(layer.fields().at(1).constraints().constraintStrength(QgsFieldConstraints.ConstraintUnique),
QgsFieldConstraints.ConstraintStrengthHard)
layer.removeFieldConstraint(1, QgsFieldConstraints.ConstraintNotNull)
layer.removeFieldConstraint(1, QgsFieldConstraints.ConstraintUnique)
self.assertEqual(layer.fieldConstraints(0), QgsFieldConstraints.ConstraintNotNull)
self.assertFalse(layer.fieldConstraints(1))
self.assertFalse(layer.fieldConstraints(2))
self.assertEqual(layer.fields().at(0).constraints().constraints(), QgsFieldConstraints.ConstraintNotNull)
self.assertEqual(layer.fields().at(0).constraints().constraintOrigin(QgsFieldConstraints.ConstraintNotNull),
QgsFieldConstraints.ConstraintOriginLayer)
self.assertEqual(layer.fields().at(0).constraints().constraintStrength(QgsFieldConstraints.ConstraintNotNull),
QgsFieldConstraints.ConstraintStrengthHard)
self.assertFalse(layer.fields().at(1).constraints().constraints())
self.assertEqual(layer.fields().at(1).constraints().constraintOrigin(QgsFieldConstraints.ConstraintNotNull),
QgsFieldConstraints.ConstraintOriginNotSet)
self.assertEqual(layer.fields().at(1).constraints().constraintStrength(QgsFieldConstraints.ConstraintNotNull),
QgsFieldConstraints.ConstraintStrengthNotSet)
def testSaveRestoreConstraints(self):
""" test saving and restoring constraints from xml"""
layer = createLayerWithOnePoint()
# no constraints
doc = QDomDocument("testdoc")
elem = doc.createElement("maplayer")
self.assertTrue(layer.writeXml(elem, doc, QgsReadWriteContext()))
layer2 = createLayerWithOnePoint()
self.assertTrue(layer2.readXml(elem, QgsReadWriteContext()))
self.assertFalse(layer2.fieldConstraints(0))
self.assertFalse(layer2.fieldConstraints(1))
# set some constraints
layer.setFieldConstraint(0, QgsFieldConstraints.ConstraintNotNull)
layer.setFieldConstraint(1, QgsFieldConstraints.ConstraintNotNull, QgsFieldConstraints.ConstraintStrengthSoft)
layer.setFieldConstraint(1, QgsFieldConstraints.ConstraintUnique)
doc = QDomDocument("testdoc")
elem = doc.createElement("maplayer")
self.assertTrue(layer.writeXml(elem, doc, QgsReadWriteContext()))
layer3 = createLayerWithOnePoint()
self.assertTrue(layer3.readXml(elem, QgsReadWriteContext()))
self.assertEqual(layer3.fieldConstraints(0), QgsFieldConstraints.ConstraintNotNull)
self.assertEqual(layer3.fieldConstraints(1),
QgsFieldConstraints.ConstraintNotNull | QgsFieldConstraints.ConstraintUnique)
self.assertEqual(layer3.fields().at(0).constraints().constraints(), QgsFieldConstraints.ConstraintNotNull)
self.assertEqual(layer3.fields().at(0).constraints().constraintOrigin(QgsFieldConstraints.ConstraintNotNull),
QgsFieldConstraints.ConstraintOriginLayer)
self.assertEqual(layer.fields().at(0).constraints().constraintStrength(QgsFieldConstraints.ConstraintNotNull),
QgsFieldConstraints.ConstraintStrengthHard)
self.assertEqual(layer3.fields().at(1).constraints().constraints(),
QgsFieldConstraints.ConstraintNotNull | QgsFieldConstraints.ConstraintUnique)
self.assertEqual(layer3.fields().at(1).constraints().constraintOrigin(QgsFieldConstraints.ConstraintNotNull),
QgsFieldConstraints.ConstraintOriginLayer)
self.assertEqual(layer3.fields().at(1).constraints().constraintOrigin(QgsFieldConstraints.ConstraintUnique),
QgsFieldConstraints.ConstraintOriginLayer)
self.assertEqual(layer.fields().at(1).constraints().constraintStrength(QgsFieldConstraints.ConstraintNotNull),
QgsFieldConstraints.ConstraintStrengthSoft)
self.assertEqual(layer.fields().at(1).constraints().constraintStrength(QgsFieldConstraints.ConstraintUnique),
QgsFieldConstraints.ConstraintStrengthHard)
def testGetSetConstraintExpressions(self):
""" test getting and setting field constraint expressions """
layer = createLayerWithOnePoint()
self.assertFalse(layer.constraintExpression(0))
self.assertFalse(layer.constraintExpression(1))
self.assertFalse(layer.constraintExpression(2))
layer.setConstraintExpression(0, '1+2')
self.assertEqual(layer.constraintExpression(0), '1+2')
self.assertFalse(layer.constraintExpression(1))
self.assertFalse(layer.constraintExpression(2))
self.assertEqual(layer.fields().at(0).constraints().constraintExpression(), '1+2')
layer.setConstraintExpression(1, '3+4', 'desc')
self.assertEqual(layer.constraintExpression(0), '1+2')
self.assertEqual(layer.constraintExpression(1), '3+4')
self.assertEqual(layer.constraintDescription(1), 'desc')
self.assertFalse(layer.constraintExpression(2))
self.assertEqual(layer.fields().at(0).constraints().constraintExpression(), '1+2')
self.assertEqual(layer.fields().at(1).constraints().constraintExpression(), '3+4')
self.assertEqual(layer.fields().at(1).constraints().constraintDescription(), 'desc')
layer.setConstraintExpression(1, None)
self.assertEqual(layer.constraintExpression(0), '1+2')
self.assertFalse(layer.constraintExpression(1))
self.assertFalse(layer.constraintExpression(2))
self.assertEqual(layer.fields().at(0).constraints().constraintExpression(), '1+2')
self.assertFalse(layer.fields().at(1).constraints().constraintExpression())
def testSaveRestoreConstraintExpressions(self):
""" test saving and restoring constraint expressions from xml"""
layer = createLayerWithOnePoint()
# no constraints
doc = QDomDocument("testdoc")
elem = doc.createElement("maplayer")
self.assertTrue(layer.writeXml(elem, doc, QgsReadWriteContext()))
layer2 = createLayerWithOnePoint()
self.assertTrue(layer2.readXml(elem, QgsReadWriteContext()))
self.assertFalse(layer2.constraintExpression(0))
self.assertFalse(layer2.constraintExpression(1))
# set some constraints
layer.setConstraintExpression(0, '1+2')
layer.setConstraintExpression(1, '3+4', 'desc')
doc = QDomDocument("testdoc")
elem = doc.createElement("maplayer")
self.assertTrue(layer.writeXml(elem, doc, QgsReadWriteContext()))
layer3 = createLayerWithOnePoint()
self.assertTrue(layer3.readXml(elem, QgsReadWriteContext()))
self.assertEqual(layer3.constraintExpression(0), '1+2')
self.assertEqual(layer3.constraintExpression(1), '3+4')
self.assertEqual(layer3.constraintDescription(1), 'desc')
self.assertEqual(layer3.fields().at(0).constraints().constraintExpression(), '1+2')
self.assertEqual(layer3.fields().at(1).constraints().constraintExpression(), '3+4')
self.assertEqual(layer3.fields().at(1).constraints().constraintDescription(), 'desc')
self.assertEqual(layer3.fields().at(0).constraints().constraints(), QgsFieldConstraints.ConstraintExpression)
self.assertEqual(layer3.fields().at(1).constraints().constraints(), QgsFieldConstraints.ConstraintExpression)
self.assertEqual(layer3.fields().at(0).constraints().constraintOrigin(QgsFieldConstraints.ConstraintExpression),
QgsFieldConstraints.ConstraintOriginLayer)
self.assertEqual(layer3.fields().at(1).constraints().constraintOrigin(QgsFieldConstraints.ConstraintExpression),
QgsFieldConstraints.ConstraintOriginLayer)
def testGetFeatureLimitWithEdits(self):
""" test getting features with a limit, when edits are present """
layer = createLayerWithOnePoint()
# now has one feature with id 0
pr = layer.dataProvider()
f1 = QgsFeature(1)
f1.setAttributes(["test", 3])
f1.setGeometry(QgsGeometry.fromPointXY(QgsPointXY(300, 200)))
f2 = QgsFeature(2)
f2.setAttributes(["test", 3])
f2.setGeometry(QgsGeometry.fromPointXY(QgsPointXY(100, 200)))
f3 = QgsFeature(3)
f3.setAttributes(["test", 3])
f3.setGeometry(QgsGeometry.fromPointXY(QgsPointXY(100, 200)))
self.assertTrue(pr.addFeatures([f1, f2, f3]))
req = QgsFeatureRequest().setLimit(2)
self.assertEqual(len(list(layer.getFeatures(req))), 2)
# now delete feature f1
layer.startEditing()
self.assertTrue(layer.deleteFeature(1))
req = QgsFeatureRequest().setLimit(2)
self.assertEqual(len(list(layer.getFeatures(req))), 2)
layer.rollBack()
# change an attribute value required by filter
layer.startEditing()
req = QgsFeatureRequest().setFilterExpression('fldint=3').setLimit(2)
self.assertTrue(layer.changeAttributeValue(2, 1, 4))
self.assertEqual(len(list(layer.getFeatures(req))), 2)
layer.rollBack()
layer.startEditing()
req = QgsFeatureRequest().setFilterRect(QgsRectangle(50, 100, 150, 300)).setLimit(2)
self.assertTrue(layer.changeGeometry(2, QgsGeometry.fromPointXY(QgsPointXY(500, 600))))
self.assertEqual(len(list(layer.getFeatures(req))), 2)
layer.rollBack()
def testClone(self):
# init crs
srs = QgsCoordinateReferenceSystem(3111, QgsCoordinateReferenceSystem.EpsgCrsId)
# init map layer styles
tmplayer = createLayerWithTwoPoints()
sym1 = QgsLineSymbol()
sym1.setColor(Qt.magenta)
tmplayer.setRenderer(QgsSingleSymbolRenderer(sym1))
style0 = QgsMapLayerStyle()
style0.readFromLayer(tmplayer)
style1 = QgsMapLayerStyle()
style1.readFromLayer(tmplayer)
# init dependencies layers
ldep = createLayerWithTwoPoints()
dep = QgsMapLayerDependency(ldep.id())
# init layer
layer = createLayerWithTwoPoints()
layer.setBlendMode(QPainter.CompositionMode_Screen)
layer.styleManager().addStyle('style0', style0)
layer.styleManager().addStyle('style1', style1)
layer.setName('MyName')
layer.setShortName('MyShortName')
layer.setMaximumScale(0.5)
layer.setMinimumScale(1.5)
layer.setScaleBasedVisibility(True)
layer.setTitle('MyTitle')
layer.setAbstract('MyAbstract')
layer.setKeywordList('MyKeywordList')
layer.setDataUrl('MyDataUrl')
layer.setDataUrlFormat('MyDataUrlFormat')
layer.setAttribution('MyAttribution')
layer.setAttributionUrl('MyAttributionUrl')
layer.setMetadataUrl('MyMetadataUrl')
layer.setMetadataUrlType('MyMetadataUrlType')
layer.setMetadataUrlFormat('MyMetadataUrlFormat')
layer.setLegendUrl('MyLegendUrl')
layer.setLegendUrlFormat('MyLegendUrlFormat')
layer.setDependencies([dep])
layer.setCrs(srs)
layer.setCustomProperty('MyKey0', 'MyValue0')
layer.setCustomProperty('MyKey1', 'MyValue1')
layer.setOpacity(0.66)
layer.setProviderEncoding('latin9')
layer.setDisplayExpression('MyDisplayExpression')
layer.setMapTipTemplate('MyMapTipTemplate')
layer.setExcludeAttributesWfs(['MyExcludeAttributeWFS'])
layer.setExcludeAttributesWms(['MyExcludeAttributeWMS'])
layer.setFeatureBlendMode(QPainter.CompositionMode_Xor)
sym = QgsLineSymbol()
sym.setColor(Qt.magenta)
layer.setRenderer(QgsSingleSymbolRenderer(sym))
simplify = layer.simplifyMethod()
simplify.setTolerance(33.3)
simplify.setThreshold(0.333)
layer.setSimplifyMethod(simplify)
layer.setFieldAlias(0, 'MyAlias0')
layer.setFieldAlias(1, 'MyAlias1')
jl0 = createLayerWithTwoPoints()
j0 = QgsVectorLayerJoinInfo()
j0.setJoinLayer(jl0)
jl1 = createLayerWithTwoPoints()
j1 = QgsVectorLayerJoinInfo()
j1.setJoinLayer(jl1)
layer.addJoin(j0)
layer.addJoin(j1)
fids = layer.allFeatureIds()
selected_fids = fids[0:3]
layer.selectByIds(selected_fids)
cfg = layer.attributeTableConfig()
cfg.setSortOrder(Qt.DescendingOrder) # by default AscendingOrder
layer.setAttributeTableConfig(cfg)
pal = QgsPalLayerSettings()
text_format = QgsTextFormat()
text_format.setSize(33)
text_format.setColor(Qt.magenta)
pal.setFormat(text_format)
labeling = QgsVectorLayerSimpleLabeling(pal)
layer.setLabeling(labeling)
diag_renderer = QgsSingleCategoryDiagramRenderer()
diag_renderer.setAttributeLegend(False) # true by default
layer.setDiagramRenderer(diag_renderer)
diag_settings = QgsDiagramLayerSettings()
diag_settings.setPriority(3)
diag_settings.setZIndex(0.33)
layer.setDiagramLayerSettings(diag_settings)
edit_form_config = layer.editFormConfig()
edit_form_config.setUiForm("MyUiForm")
edit_form_config.setInitFilePath("MyInitFilePath")
layer.setEditFormConfig(edit_form_config)
widget_setup = QgsEditorWidgetSetup("MyWidgetSetupType", {})
layer.setEditorWidgetSetup(0, widget_setup)
layer.setConstraintExpression(0, "MyFieldConstraintExpression")
layer.setFieldConstraint(0, QgsFieldConstraints.ConstraintUnique, QgsFieldConstraints.ConstraintStrengthHard)
layer.setDefaultValueDefinition(0, QgsDefaultValue("MyDefaultValueExpression"))
action = QgsAction(QgsAction.Unix, "MyActionDescription", "MyActionCmd")
layer.actions().addAction(action)
# clone layer
clone = layer.clone()
# generate xml from layer
layer_doc = QDomDocument("doc")
layer_elem = layer_doc.createElement("maplayer")
layer.writeLayerXml(layer_elem, layer_doc, QgsReadWriteContext())
# generate xml from clone
clone_doc = QDomDocument("doc")
clone_elem = clone_doc.createElement("maplayer")
clone.writeLayerXml(clone_elem, clone_doc, QgsReadWriteContext())
# replace id within xml of clone
clone_id_elem = clone_elem.firstChildElement("id")
clone_id_elem_patch = clone_doc.createElement("id")
clone_id_elem_patch_value = clone_doc.createTextNode(layer.id())
clone_id_elem_patch.appendChild(clone_id_elem_patch_value)
clone_elem.replaceChild(clone_id_elem_patch, clone_id_elem)
# update doc
clone_doc.appendChild(clone_elem)
layer_doc.appendChild(layer_elem)
# compare xml documents
self.assertEqual(layer_doc.toString(), clone_doc.toString())
def testQgsVectorLayerSelectedFeatureSource(self):
"""
test QgsVectorLayerSelectedFeatureSource
"""
layer = QgsVectorLayer("Point?crs=epsg:3111&field=fldtxt:string&field=fldint:integer",
"addfeat", "memory")
pr = layer.dataProvider()
f1 = QgsFeature(1)
f1.setAttributes(["test", 123])
f1.setGeometry(QgsGeometry.fromPointXY(QgsPointXY(100, 200)))
f2 = QgsFeature(2)
f2.setAttributes(["test2", 457])
f2.setGeometry(QgsGeometry.fromPointXY(QgsPointXY(200, 200)))
f3 = QgsFeature(3)
f3.setAttributes(["test2", 888])
f3.setGeometry(QgsGeometry.fromPointXY(QgsPointXY(300, 200)))
f4 = QgsFeature(4)
f4.setAttributes(["test3", -1])
f4.setGeometry(QgsGeometry.fromPointXY(QgsPointXY(400, 300)))
f5 = QgsFeature(5)
f5.setAttributes(["test4", 0])
f5.setGeometry(QgsGeometry.fromPointXY(QgsPointXY(0, 0)))
self.assertTrue(pr.addFeatures([f1, f2, f3, f4, f5]))
self.assertEqual(layer.featureCount(), 5)
source = QgsVectorLayerSelectedFeatureSource(layer)
self.assertEqual(source.sourceCrs().authid(), 'EPSG:3111')
self.assertEqual(source.wkbType(), QgsWkbTypes.Point)
self.assertEqual(source.fields(), layer.fields())
# no selection
self.assertEqual(source.featureCount(), 0)
it = source.getFeatures()
f = QgsFeature()
self.assertFalse(it.nextFeature(f))
# with selection
layer.selectByIds([f1.id(), f3.id(), f5.id()])
source = QgsVectorLayerSelectedFeatureSource(layer)
self.assertEqual(source.featureCount(), 3)
ids = set([f.id() for f in source.getFeatures()])
self.assertEqual(ids, {f1.id(), f3.id(), f5.id()})
# test that requesting subset of ids intersects this request with the selected ids
ids = set([f.id() for f in source.getFeatures(QgsFeatureRequest().setFilterFids([f1.id(), f2.id(), f5.id()]))])
self.assertEqual(ids, {f1.id(), f5.id()})
# test that requesting id works
ids = set([f.id() for f in source.getFeatures(QgsFeatureRequest().setFilterFid(f1.id()))])
self.assertEqual(ids, {f1.id()})
ids = set([f.id() for f in source.getFeatures(QgsFeatureRequest().setFilterFid(f5.id()))])
self.assertEqual(ids, {f5.id()})
# test that source has stored snapshot of selected features
layer.selectByIds([f2.id(), f4.id()])
self.assertEqual(source.featureCount(), 3)
ids = set([f.id() for f in source.getFeatures()])
self.assertEqual(ids, {f1.id(), f3.id(), f5.id()})
# test that source is not dependent on layer
del layer
ids = set([f.id() for f in source.getFeatures()])
self.assertEqual(ids, {f1.id(), f3.id(), f5.id()})
def testFeatureRequestWithReprojectionAndVirtualFields(self):
layer = self.getSource()
field = QgsField('virtual', QVariant.Double)
layer.addExpressionField('$x', field)
virtual_values = [f['virtual'] for f in layer.getFeatures()]
self.assertAlmostEqual(virtual_values[0], -71.123, 2)
self.assertEqual(virtual_values[1], NULL)
self.assertAlmostEqual(virtual_values[2], -70.332, 2)
self.assertAlmostEqual(virtual_values[3], -68.2, 2)
self.assertAlmostEqual(virtual_values[4], -65.32, 2)
# repeat, with reprojection on request
request = QgsFeatureRequest().setDestinationCrs(QgsCoordinateReferenceSystem('epsg:3785'),
QgsProject.instance().transformContext())
features = [f for f in layer.getFeatures(request)]
# virtual field value should not change, even though geometry has
self.assertAlmostEqual(features[0]['virtual'], -71.123, 2)
self.assertAlmostEqual(features[0].geometry().constGet().x(), -7917376, -5)
self.assertEqual(features[1]['virtual'], NULL)
self.assertFalse(features[1].hasGeometry())
self.assertAlmostEqual(features[2]['virtual'], -70.332, 2)
self.assertAlmostEqual(features[2].geometry().constGet().x(), -7829322, -5)
self.assertAlmostEqual(features[3]['virtual'], -68.2, 2)
self.assertAlmostEqual(features[3].geometry().constGet().x(), -7591989, -5)
self.assertAlmostEqual(features[4]['virtual'], -65.32, 2)
self.assertAlmostEqual(features[4].geometry().constGet().x(), -7271389, -5)
class TestQgsVectorLayerSourceAddedFeaturesInBuffer(unittest.TestCase, FeatureSourceTestCase):
@classmethod
def getSource(cls):
vl = QgsVectorLayer(
'Point?crs=epsg:4326&field=pk:integer&field=cnt:integer&field=name:string(0)&field=name2:string(0)&field=num_char:string&key=pk',
'test', 'memory')
assert (vl.isValid())
f1 = QgsFeature()
f1.setAttributes([5, -200, NULL, 'NuLl', '5'])
f1.setGeometry(QgsGeometry.fromWkt('Point (-71.123 78.23)'))
f2 = QgsFeature()
f2.setAttributes([3, 300, 'Pear', 'PEaR', '3'])
f3 = QgsFeature()
f3.setAttributes([1, 100, 'Orange', 'oranGe', '1'])
f3.setGeometry(QgsGeometry.fromWkt('Point (-70.332 66.33)'))
f4 = QgsFeature()
f4.setAttributes([2, 200, 'Apple', 'Apple', '2'])
f4.setGeometry(QgsGeometry.fromWkt('Point (-68.2 70.8)'))
f5 = QgsFeature()
f5.setAttributes([4, 400, 'Honey', 'Honey', '4'])
f5.setGeometry(QgsGeometry.fromWkt('Point (-65.32 78.3)'))
# create a layer with features only in the added features buffer - not the provider
vl.startEditing()
vl.addFeatures([f1, f2, f3, f4, f5])
return vl
@classmethod
def setUpClass(cls):
"""Run before all tests"""
# Create test layer for FeatureSourceTestCase
cls.source = cls.getSource()
def testGetFeaturesSubsetAttributes2(self):
""" Override and skip this QgsFeatureSource test. We are using a memory provider, and it's actually more efficient for the memory provider to return
its features as direct copies (due to implicit sharing of QgsFeature)
"""
pass
def testGetFeaturesNoGeometry(self):
""" Override and skip this QgsFeatureSource test. We are using a memory provider, and it's actually more efficient for the memory provider to return
its features as direct copies (due to implicit sharing of QgsFeature)
"""
pass
def testOrderBy(self):
""" Skip order by tests - edited features are not sorted in iterators.
(Maybe they should be??)
"""
pass
def testMinimumValue(self):
""" Skip min values test - due to inconsistencies in how null values are treated by providers.
They are included here, but providers don't include them.... which is right?
"""
pass
class TestQgsVectorLayerSourceChangedGeometriesInBuffer(unittest.TestCase, FeatureSourceTestCase):
@classmethod
def getSource(cls):
vl = QgsVectorLayer(
'Point?crs=epsg:4326&field=pk:integer&field=cnt:integer&field=name:string(0)&field=name2:string(0)&field=num_char:string&key=pk',
'test', 'memory')
assert (vl.isValid())
f1 = QgsFeature()
f1.setAttributes([5, -200, NULL, 'NuLl', '5'])
f2 = QgsFeature()
f2.setAttributes([3, 300, 'Pear', 'PEaR', '3'])
f2.setGeometry(QgsGeometry.fromWkt('Point (-70.5 65.2)'))
f3 = QgsFeature()
f3.setAttributes([1, 100, 'Orange', 'oranGe', '1'])
f4 = QgsFeature()
f4.setAttributes([2, 200, 'Apple', 'Apple', '2'])
f5 = QgsFeature()
f5.setAttributes([4, 400, 'Honey', 'Honey', '4'])
vl.dataProvider().addFeatures([f1, f2, f3, f4, f5])
ids = {f['pk']: f.id() for f in vl.getFeatures()}
# modify geometries in buffer
vl.startEditing()
vl.changeGeometry(ids[5], QgsGeometry.fromWkt('Point (-71.123 78.23)'))
vl.changeGeometry(ids[3], QgsGeometry())
vl.changeGeometry(ids[1], QgsGeometry.fromWkt('Point (-70.332 66.33)'))
vl.changeGeometry(ids[2], QgsGeometry.fromWkt('Point (-68.2 70.8)'))
vl.changeGeometry(ids[4], QgsGeometry.fromWkt('Point (-65.32 78.3)'))
return vl
@classmethod
def setUpClass(cls):
"""Run before all tests"""
# Create test layer for FeatureSourceTestCase
cls.source = cls.getSource()
def testGetFeaturesSubsetAttributes2(self):
""" Override and skip this QgsFeatureSource test. We are using a memory provider, and it's actually more efficient for the memory provider to return
its features as direct copies (due to implicit sharing of QgsFeature)
"""
pass
def testGetFeaturesNoGeometry(self):
""" Override and skip this QgsFeatureSource test. We are using a memory provider, and it's actually more efficient for the memory provider to return
its features as direct copies (due to implicit sharing of QgsFeature)
"""
pass
def testOrderBy(self):
""" Skip order by tests - edited features are not sorted in iterators.
(Maybe they should be??)
"""
pass
class TestQgsVectorLayerSourceChangedAttributesInBuffer(unittest.TestCase, FeatureSourceTestCase):
@classmethod
def getSource(cls):
vl = QgsVectorLayer(
'Point?crs=epsg:4326&field=pk:integer&field=cnt:integer&field=name:string(0)&field=name2:string(0)&field=num_char:string&key=pk',
'test', 'memory')
assert (vl.isValid())
f1 = QgsFeature()
f1.setAttributes([5, 200, 'a', 'b', 'c'])
f1.setGeometry(QgsGeometry.fromWkt('Point (-71.123 78.23)'))
f2 = QgsFeature()
f2.setAttributes([3, -200, 'd', 'e', 'f'])
f3 = QgsFeature()
f3.setAttributes([1, -100, 'g', 'h', 'i'])
f3.setGeometry(QgsGeometry.fromWkt('Point (-70.332 66.33)'))
f4 = QgsFeature()
f4.setAttributes([2, -200, 'j', 'k', 'l'])
f4.setGeometry(QgsGeometry.fromWkt('Point (-68.2 70.8)'))
f5 = QgsFeature()
f5.setAttributes([4, 400, 'm', 'n', 'o'])
f5.setGeometry(QgsGeometry.fromWkt('Point (-65.32 78.3)'))
vl.dataProvider().addFeatures([f1, f2, f3, f4, f5])
ids = {f['pk']: f.id() for f in vl.getFeatures()}
# modify geometries in buffer
vl.startEditing()
vl.changeAttributeValue(ids[5], 1, -200)
vl.changeAttributeValue(ids[5], 2, NULL)
vl.changeAttributeValue(ids[5], 3, 'NuLl')
vl.changeAttributeValue(ids[5], 4, '5')
vl.changeAttributeValue(ids[3], 1, 300)
vl.changeAttributeValue(ids[3], 2, 'Pear')
vl.changeAttributeValue(ids[3], 3, 'PEaR')
vl.changeAttributeValue(ids[3], 4, '3')
vl.changeAttributeValue(ids[1], 1, 100)
vl.changeAttributeValue(ids[1], 2, 'Orange')
vl.changeAttributeValue(ids[1], 3, 'oranGe')
vl.changeAttributeValue(ids[1], 4, '1')
vl.changeAttributeValue(ids[2], 1, 200)
vl.changeAttributeValue(ids[2], 2, 'Apple')
vl.changeAttributeValue(ids[2], 3, 'Apple')
vl.changeAttributeValue(ids[2], 4, '2')
vl.changeAttributeValue(ids[4], 1, 400)
vl.changeAttributeValue(ids[4], 2, 'Honey')
vl.changeAttributeValue(ids[4], 3, 'Honey')
vl.changeAttributeValue(ids[4], 4, '4')
return vl
@classmethod
def setUpClass(cls):
"""Run before all tests"""
# Create test layer for FeatureSourceTestCase
cls.source = cls.getSource()
def testGetFeaturesSubsetAttributes2(self):
""" Override and skip this QgsFeatureSource test. We are using a memory provider, and it's actually more efficient for the memory provider to return
its features as direct copies (due to implicit sharing of QgsFeature)
"""
pass
def testGetFeaturesNoGeometry(self):
""" Override and skip this QgsFeatureSource test. We are using a memory provider, and it's actually more efficient for the memory provider to return
its features as direct copies (due to implicit sharing of QgsFeature)
"""
pass
def testOrderBy(self):
""" Skip order by tests - edited features are not sorted in iterators.
(Maybe they should be??)
"""
pass
def testUniqueValues(self):
""" Skip unique values test - as noted in the docs this is unreliable when features are in the buffer
"""
pass
def testMinimumValue(self):
""" Skip min values test - as noted in the docs this is unreliable when features are in the buffer
"""
pass
def testMaximumValue(self):
""" Skip max values test - as noted in the docs this is unreliable when features are in the buffer
"""
pass
class TestQgsVectorLayerSourceDeletedFeaturesInBuffer(unittest.TestCase, FeatureSourceTestCase):
@classmethod
def getSource(cls):
vl = QgsVectorLayer(
'Point?crs=epsg:4326&field=pk:integer&field=cnt:integer&field=name:string(0)&field=name2:string(0)&field=num_char:string&key=pk',
'test', 'memory')
assert (vl.isValid())
# add a bunch of similar features to the provider
b1 = QgsFeature()
b1.setAttributes([5, -300, 'Apple', 'PEaR', '1'])
b1.setGeometry(QgsGeometry.fromWkt('Point (-70.332 66.33)'))
b2 = QgsFeature()
b2.setAttributes([3, 100, 'Orange', 'NuLl', '2'])
b2.setGeometry(QgsGeometry.fromWkt('Point (-71.123 78.23)'))
b3 = QgsFeature()
b3.setAttributes([1, -200, 'Honey', 'oranGe', '5'])
b4 = QgsFeature()
b4.setAttributes([2, 400, 'Pear', 'Honey', '3'])
b4.setGeometry(QgsGeometry.fromWkt('Point (-65.32 78.3)'))
b5 = QgsFeature()
b5.setAttributes([4, 200, NULL, 'oranGe', '3'])
b5.setGeometry(QgsGeometry.fromWkt('Point (-68.2 70.8)'))
vl.dataProvider().addFeatures([b1, b2, b3, b4, b5])
bad_ids = [f['pk'] for f in vl.getFeatures()]
# here's our good features
f1 = QgsFeature()
f1.setAttributes([5, -200, NULL, 'NuLl', '5'])
f1.setGeometry(QgsGeometry.fromWkt('Point (-71.123 78.23)'))
f2 = QgsFeature()
f2.setAttributes([3, 300, 'Pear', 'PEaR', '3'])
f3 = QgsFeature()
f3.setAttributes([1, 100, 'Orange', 'oranGe', '1'])
f3.setGeometry(QgsGeometry.fromWkt('Point (-70.332 66.33)'))
f4 = QgsFeature()
f4.setAttributes([2, 200, 'Apple', 'Apple', '2'])
f4.setGeometry(QgsGeometry.fromWkt('Point (-68.2 70.8)'))
f5 = QgsFeature()
f5.setAttributes([4, 400, 'Honey', 'Honey', '4'])
f5.setGeometry(QgsGeometry.fromWkt('Point (-65.32 78.3)'))
vl.dataProvider().addFeatures([f1, f2, f3, f4, f5])
# delete the bad features, but don't commit
vl.startEditing()
vl.deleteFeatures(bad_ids)
return vl
@classmethod
def setUpClass(cls):
"""Run before all tests"""
# Create test layer for FeatureSourceTestCase
cls.source = cls.getSource()
def testGetFeaturesSubsetAttributes2(self):
""" Override and skip this QgsFeatureSource test. We are using a memory provider, and it's actually more efficient for the memory provider to return
its features as direct copies (due to implicit sharing of QgsFeature)
"""
pass
def testGetFeaturesNoGeometry(self):
""" Override and skip this QgsFeatureSource test. We are using a memory provider, and it's actually more efficient for the memory provider to return
its features as direct copies (due to implicit sharing of QgsFeature)
"""
pass
def testOrderBy(self):
""" Skip order by tests - edited features are not sorted in iterators.
(Maybe they should be??)
"""
pass
def testUniqueValues(self):
""" Skip unique values test - as noted in the docs this is unreliable when features are in the buffer
"""
pass
def testMinimumValue(self):
""" Skip min values test - as noted in the docs this is unreliable when features are in the buffer
"""
pass
def testMaximumValue(self):
""" Skip max values test - as noted in the docs this is unreliable when features are in the buffer
"""
pass
# TODO:
# - fetch rect: feat with changed geometry: 1. in rect, 2. out of rect
# - more join tests
# - import
if __name__ == '__main__':
unittest.main()
| gpl-2.0 |
uchuutamashi/6s096-Project | third_party/gtest/test/gtest_catch_exceptions_test.py | 2139 | 9901 | #!/usr/bin/env python
#
# Copyright 2010 Google Inc. All Rights Reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""Tests Google Test's exception catching behavior.
This script invokes gtest_catch_exceptions_test_ and
gtest_catch_exceptions_ex_test_ (programs written with
Google Test) and verifies their output.
"""
__author__ = 'vladl@google.com (Vlad Losev)'
import os
import gtest_test_utils
# Constants.
FLAG_PREFIX = '--gtest_'
LIST_TESTS_FLAG = FLAG_PREFIX + 'list_tests'
NO_CATCH_EXCEPTIONS_FLAG = FLAG_PREFIX + 'catch_exceptions=0'
FILTER_FLAG = FLAG_PREFIX + 'filter'
# Path to the gtest_catch_exceptions_ex_test_ binary, compiled with
# exceptions enabled.
EX_EXE_PATH = gtest_test_utils.GetTestExecutablePath(
'gtest_catch_exceptions_ex_test_')
# Path to the gtest_catch_exceptions_test_ binary, compiled with
# exceptions disabled.
EXE_PATH = gtest_test_utils.GetTestExecutablePath(
'gtest_catch_exceptions_no_ex_test_')
environ = gtest_test_utils.environ
SetEnvVar = gtest_test_utils.SetEnvVar
# Tests in this file run a Google-Test-based test program and expect it
# to terminate prematurely. Therefore they are incompatible with
# the premature-exit-file protocol by design. Unset the
# premature-exit filepath to prevent Google Test from creating
# the file.
SetEnvVar(gtest_test_utils.PREMATURE_EXIT_FILE_ENV_VAR, None)
TEST_LIST = gtest_test_utils.Subprocess(
[EXE_PATH, LIST_TESTS_FLAG], env=environ).output
SUPPORTS_SEH_EXCEPTIONS = 'ThrowsSehException' in TEST_LIST
if SUPPORTS_SEH_EXCEPTIONS:
BINARY_OUTPUT = gtest_test_utils.Subprocess([EXE_PATH], env=environ).output
EX_BINARY_OUTPUT = gtest_test_utils.Subprocess(
[EX_EXE_PATH], env=environ).output
# The tests.
if SUPPORTS_SEH_EXCEPTIONS:
# pylint:disable-msg=C6302
class CatchSehExceptionsTest(gtest_test_utils.TestCase):
"""Tests exception-catching behavior."""
def TestSehExceptions(self, test_output):
self.assert_('SEH exception with code 0x2a thrown '
'in the test fixture\'s constructor'
in test_output)
self.assert_('SEH exception with code 0x2a thrown '
'in the test fixture\'s destructor'
in test_output)
self.assert_('SEH exception with code 0x2a thrown in SetUpTestCase()'
in test_output)
self.assert_('SEH exception with code 0x2a thrown in TearDownTestCase()'
in test_output)
self.assert_('SEH exception with code 0x2a thrown in SetUp()'
in test_output)
self.assert_('SEH exception with code 0x2a thrown in TearDown()'
in test_output)
self.assert_('SEH exception with code 0x2a thrown in the test body'
in test_output)
def testCatchesSehExceptionsWithCxxExceptionsEnabled(self):
self.TestSehExceptions(EX_BINARY_OUTPUT)
def testCatchesSehExceptionsWithCxxExceptionsDisabled(self):
self.TestSehExceptions(BINARY_OUTPUT)
class CatchCxxExceptionsTest(gtest_test_utils.TestCase):
"""Tests C++ exception-catching behavior.
Tests in this test case verify that:
* C++ exceptions are caught and logged as C++ (not SEH) exceptions
* Exception thrown affect the remainder of the test work flow in the
expected manner.
"""
def testCatchesCxxExceptionsInFixtureConstructor(self):
self.assert_('C++ exception with description '
'"Standard C++ exception" thrown '
'in the test fixture\'s constructor'
in EX_BINARY_OUTPUT)
self.assert_('unexpected' not in EX_BINARY_OUTPUT,
'This failure belongs in this test only if '
'"CxxExceptionInConstructorTest" (no quotes) '
'appears on the same line as words "called unexpectedly"')
if ('CxxExceptionInDestructorTest.ThrowsExceptionInDestructor' in
EX_BINARY_OUTPUT):
def testCatchesCxxExceptionsInFixtureDestructor(self):
self.assert_('C++ exception with description '
'"Standard C++ exception" thrown '
'in the test fixture\'s destructor'
in EX_BINARY_OUTPUT)
self.assert_('CxxExceptionInDestructorTest::TearDownTestCase() '
'called as expected.'
in EX_BINARY_OUTPUT)
def testCatchesCxxExceptionsInSetUpTestCase(self):
self.assert_('C++ exception with description "Standard C++ exception"'
' thrown in SetUpTestCase()'
in EX_BINARY_OUTPUT)
self.assert_('CxxExceptionInConstructorTest::TearDownTestCase() '
'called as expected.'
in EX_BINARY_OUTPUT)
self.assert_('CxxExceptionInSetUpTestCaseTest constructor '
'called as expected.'
in EX_BINARY_OUTPUT)
self.assert_('CxxExceptionInSetUpTestCaseTest destructor '
'called as expected.'
in EX_BINARY_OUTPUT)
self.assert_('CxxExceptionInSetUpTestCaseTest::SetUp() '
'called as expected.'
in EX_BINARY_OUTPUT)
self.assert_('CxxExceptionInSetUpTestCaseTest::TearDown() '
'called as expected.'
in EX_BINARY_OUTPUT)
self.assert_('CxxExceptionInSetUpTestCaseTest test body '
'called as expected.'
in EX_BINARY_OUTPUT)
def testCatchesCxxExceptionsInTearDownTestCase(self):
self.assert_('C++ exception with description "Standard C++ exception"'
' thrown in TearDownTestCase()'
in EX_BINARY_OUTPUT)
def testCatchesCxxExceptionsInSetUp(self):
self.assert_('C++ exception with description "Standard C++ exception"'
' thrown in SetUp()'
in EX_BINARY_OUTPUT)
self.assert_('CxxExceptionInSetUpTest::TearDownTestCase() '
'called as expected.'
in EX_BINARY_OUTPUT)
self.assert_('CxxExceptionInSetUpTest destructor '
'called as expected.'
in EX_BINARY_OUTPUT)
self.assert_('CxxExceptionInSetUpTest::TearDown() '
'called as expected.'
in EX_BINARY_OUTPUT)
self.assert_('unexpected' not in EX_BINARY_OUTPUT,
'This failure belongs in this test only if '
'"CxxExceptionInSetUpTest" (no quotes) '
'appears on the same line as words "called unexpectedly"')
def testCatchesCxxExceptionsInTearDown(self):
self.assert_('C++ exception with description "Standard C++ exception"'
' thrown in TearDown()'
in EX_BINARY_OUTPUT)
self.assert_('CxxExceptionInTearDownTest::TearDownTestCase() '
'called as expected.'
in EX_BINARY_OUTPUT)
self.assert_('CxxExceptionInTearDownTest destructor '
'called as expected.'
in EX_BINARY_OUTPUT)
def testCatchesCxxExceptionsInTestBody(self):
self.assert_('C++ exception with description "Standard C++ exception"'
' thrown in the test body'
in EX_BINARY_OUTPUT)
self.assert_('CxxExceptionInTestBodyTest::TearDownTestCase() '
'called as expected.'
in EX_BINARY_OUTPUT)
self.assert_('CxxExceptionInTestBodyTest destructor '
'called as expected.'
in EX_BINARY_OUTPUT)
self.assert_('CxxExceptionInTestBodyTest::TearDown() '
'called as expected.'
in EX_BINARY_OUTPUT)
def testCatchesNonStdCxxExceptions(self):
self.assert_('Unknown C++ exception thrown in the test body'
in EX_BINARY_OUTPUT)
def testUnhandledCxxExceptionsAbortTheProgram(self):
# Filters out SEH exception tests on Windows. Unhandled SEH exceptions
# cause tests to show pop-up windows there.
FITLER_OUT_SEH_TESTS_FLAG = FILTER_FLAG + '=-*Seh*'
# By default, Google Test doesn't catch the exceptions.
uncaught_exceptions_ex_binary_output = gtest_test_utils.Subprocess(
[EX_EXE_PATH,
NO_CATCH_EXCEPTIONS_FLAG,
FITLER_OUT_SEH_TESTS_FLAG],
env=environ).output
self.assert_('Unhandled C++ exception terminating the program'
in uncaught_exceptions_ex_binary_output)
self.assert_('unexpected' not in uncaught_exceptions_ex_binary_output)
if __name__ == '__main__':
gtest_test_utils.Main()
| unlicense |
Ooblioob/collab | core/search/migrations/0001_initial.py | 5 | 1468 | # -*- coding: utf-8 -*-
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding model 'SearchableTool'
db.create_table('search_searchabletool', (
('id', self.gf(
'django.db.models.fields.AutoField')(primary_key=True)),
('name', self.gf('django.db.models.fields.CharField')
(unique=True, max_length=255)),
('link', self.gf(
'django.db.models.fields.CharField')(max_length=2048)),
('date_added', self.gf('django.db.models.fields.DateTimeField')
(auto_now_add=True, blank=True)),
))
db.send_create_signal('search', ['SearchableTool'])
def backwards(self, orm):
# Deleting model 'SearchableTool'
db.delete_table('search_searchabletool')
models = {
'core.search.searchabletool': {
'Meta': {'object_name': 'SearchableTool'},
'date_added': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'link': ('django.db.models.fields.CharField', [], {'max_length': '2048'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '255'})
}
}
complete_apps = ['search']
| cc0-1.0 |
vrkansagara/YouCompleteMe | python/ycm/tests/vimsupport_test.py | 17 | 21475 | #!/usr/bin/env python
#
# Copyright (C) 2015 YouCompleteMe contributors
#
# This file is part of YouCompleteMe.
#
# YouCompleteMe is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# YouCompleteMe is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with YouCompleteMe. If not, see <http://www.gnu.org/licenses/>.
from ycm import vimsupport
from nose.tools import eq_
def ReplaceChunk_SingleLine_Repl_1_test():
# Replace with longer range
# 12345678901234567
result_buffer = [ "This is a string" ]
start, end = _BuildLocations( 1, 1, 1, 5 )
( line_offset, char_offset ) = vimsupport.ReplaceChunk( start,
end,
'How long',
0,
0,
result_buffer )
eq_( [ "How long is a string" ], result_buffer )
eq_( line_offset, 0 )
eq_( char_offset, 4 )
# and replace again, using delta
start, end = _BuildLocations( 1, 10, 1, 11 )
( new_line_offset, new_char_offset ) = vimsupport.ReplaceChunk(
start,
end,
' piece of ',
line_offset,
char_offset,
result_buffer )
line_offset += new_line_offset
char_offset += new_char_offset
eq_( [ 'How long is a piece of string' ], result_buffer )
eq_( new_line_offset, 0 )
eq_( new_char_offset, 9 )
eq_( line_offset, 0 )
eq_( char_offset, 13 )
# and once more, for luck
start, end = _BuildLocations( 1, 11, 1, 17 )
( new_line_offset, new_char_offset ) = vimsupport.ReplaceChunk(
start,
end,
'pie',
line_offset,
char_offset,
result_buffer )
line_offset += new_line_offset
char_offset += new_char_offset
eq_( ['How long is a piece of pie' ], result_buffer )
eq_( new_line_offset, 0 )
eq_( new_char_offset, -3 )
eq_( line_offset, 0 )
eq_( char_offset, 10 )
def ReplaceChunk_SingleLine_Repl_2_test():
# Replace with shorter range
# 12345678901234567
result_buffer = [ "This is a string" ]
start, end = _BuildLocations( 1, 11, 1, 17 )
( line_offset, char_offset ) = vimsupport.ReplaceChunk( start,
end,
'test',
0,
0,
result_buffer )
eq_( [ "This is a test" ], result_buffer )
eq_( line_offset, 0 )
eq_( char_offset, -2 )
def ReplaceChunk_SingleLine_Repl_3_test():
# Replace with equal range
# 12345678901234567
result_buffer = [ "This is a string" ]
start, end = _BuildLocations( 1, 6, 1, 8 )
( line_offset, char_offset ) = vimsupport.ReplaceChunk( start,
end,
'be',
0,
0,
result_buffer )
eq_( [ "This be a string" ], result_buffer )
eq_( line_offset, 0 )
eq_( char_offset, 0 )
def ReplaceChunk_SingleLine_Add_1_test():
# Insert at start
result_buffer = [ "is a string" ]
start, end = _BuildLocations( 1, 1, 1, 1 )
( line_offset, char_offset ) = vimsupport.ReplaceChunk( start,
end,
'This ',
0,
0,
result_buffer )
eq_( [ "This is a string" ], result_buffer )
eq_( line_offset, 0 )
eq_( char_offset, 5 )
def ReplaceChunk_SingleLine_Add_2_test():
# Insert at end
result_buffer = [ "This is a " ]
start, end = _BuildLocations( 1, 11, 1, 11 )
( line_offset, char_offset ) = vimsupport.ReplaceChunk( start,
end,
'string',
0,
0,
result_buffer )
eq_( [ "This is a string" ], result_buffer )
eq_( line_offset, 0 )
eq_( char_offset, 6 )
def ReplaceChunk_SingleLine_Add_3_test():
# Insert in the middle
result_buffer = [ "This is a string" ]
start, end = _BuildLocations( 1, 8, 1, 8 )
( line_offset, char_offset ) = vimsupport.ReplaceChunk( start,
end,
' not',
0,
0,
result_buffer )
eq_( [ "This is not a string" ], result_buffer )
eq_( line_offset, 0 )
eq_( char_offset, 4 )
def ReplaceChunk_SingleLine_Del_1_test():
# Delete from start
result_buffer = [ "This is a string" ]
start, end = _BuildLocations( 1, 1, 1, 6 )
( line_offset, char_offset ) = vimsupport.ReplaceChunk( start,
end,
'',
0,
0,
result_buffer )
eq_( [ "is a string" ], result_buffer )
eq_( line_offset, 0 )
eq_( char_offset, -5 )
def ReplaceChunk_SingleLine_Del_2_test():
# Delete from end
result_buffer = [ "This is a string" ]
start, end = _BuildLocations( 1, 10, 1, 18 )
( line_offset, char_offset ) = vimsupport.ReplaceChunk( start,
end,
'',
0,
0,
result_buffer )
eq_( [ "This is a" ], result_buffer )
eq_( line_offset, 0 )
eq_( char_offset, -8 )
def ReplaceChunk_SingleLine_Del_3_test():
# Delete from middle
result_buffer = [ "This is not a string" ]
start, end = _BuildLocations( 1, 9, 1, 13 )
( line_offset, char_offset ) = vimsupport.ReplaceChunk( start,
end,
'',
0,
0,
result_buffer )
eq_( [ "This is a string" ], result_buffer )
eq_( line_offset, 0 )
eq_( char_offset, -4 )
def ReplaceChunk_RemoveSingleLine_test():
result_buffer = [ "aAa", "aBa", "aCa" ]
start, end = _BuildLocations( 2, 1, 3, 1 )
( line_offset, char_offset ) = vimsupport.ReplaceChunk( start, end, '',
0, 0, result_buffer )
expected_buffer = [ "aAa", "aCa" ]
eq_( expected_buffer, result_buffer )
eq_( line_offset, -1 )
eq_( char_offset, 0 )
def ReplaceChunk_SingleToMultipleLines_test():
result_buffer = [ "aAa",
"aBa",
"aCa" ]
start, end = _BuildLocations( 2, 2, 2, 2 )
( line_offset, char_offset ) = vimsupport.ReplaceChunk( start, end, 'Eb\nbF',
0, 0, result_buffer )
expected_buffer = [ "aAa",
"aEb",
"bFBa",
"aCa" ]
eq_( expected_buffer, result_buffer )
eq_( line_offset, 1 )
eq_( char_offset, 1 )
# now make another change to the "2nd" line
start, end = _BuildLocations( 2, 3, 2, 4 )
( new_line_offset, new_char_offset ) = vimsupport.ReplaceChunk(
start,
end,
'cccc',
line_offset,
char_offset,
result_buffer )
line_offset += new_line_offset
char_offset += new_char_offset
eq_( [ "aAa", "aEb", "bFBcccc", "aCa" ], result_buffer )
eq_( line_offset, 1 )
eq_( char_offset, 4 )
def ReplaceChunk_SingleToMultipleLines2_test():
result_buffer = [ "aAa", "aBa", "aCa" ]
start, end = _BuildLocations( 2, 2, 2, 2 )
( line_offset, char_offset ) = vimsupport.ReplaceChunk( start,
end,
'Eb\nbFb\nG',
0,
0,
result_buffer )
expected_buffer = [ "aAa", "aEb" ,"bFb", "GBa", "aCa" ]
eq_( expected_buffer, result_buffer )
eq_( line_offset, 2 )
eq_( char_offset, 0 )
def ReplaceChunk_SingleToMultipleLines3_test():
result_buffer = [ "aAa", "aBa", "aCa" ]
start, end = _BuildLocations( 2, 2, 2, 2 )
( line_offset, char_offset ) = vimsupport.ReplaceChunk( start,
end,
'Eb\nbFb\nbGb',
0,
0,
result_buffer )
expected_buffer = [ "aAa", "aEb" ,"bFb", "bGbBa", "aCa" ]
eq_( expected_buffer, result_buffer )
eq_( line_offset, 2 )
eq_( char_offset, 2 )
def ReplaceChunk_SingleToMultipleLinesReplace_test():
result_buffer = [ "aAa", "aBa", "aCa" ]
start, end = _BuildLocations( 1, 2, 1, 4 )
( line_offset, char_offset ) = vimsupport.ReplaceChunk( start,
end,
'Eb\nbFb\nbGb',
0,
0,
result_buffer )
expected_buffer = [ "aEb", "bFb", "bGb", "aBa", "aCa" ]
eq_( expected_buffer, result_buffer )
eq_( line_offset, 2 )
eq_( char_offset, 0 )
def ReplaceChunk_SingleToMultipleLinesReplace_2_test():
result_buffer = [ "aAa",
"aBa",
"aCa" ]
start, end = _BuildLocations( 1, 2, 1, 4 )
( line_offset, char_offset ) = vimsupport.ReplaceChunk( start,
end,
'Eb\nbFb\nbGb',
0,
0,
result_buffer )
expected_buffer = [ "aEb",
"bFb",
"bGb",
"aBa",
"aCa" ]
eq_( expected_buffer, result_buffer )
eq_( line_offset, 2 )
eq_( char_offset, 0 )
# now do a subsequent change (insert at end of line "1")
start, end = _BuildLocations( 1, 4, 1, 4 )
( new_line_offset, new_char_offset ) = vimsupport.ReplaceChunk(
start,
end,
'cccc',
line_offset,
char_offset,
result_buffer )
line_offset += new_line_offset
char_offset += new_char_offset
eq_( [ "aEb",
"bFb",
"bGbcccc",
"aBa",
"aCa" ], result_buffer )
eq_( line_offset, 2 )
eq_( char_offset, 4 )
def ReplaceChunk_MultipleLinesToSingleLine_test():
result_buffer = [ "aAa", "aBa", "aCaaaa" ]
start, end = _BuildLocations( 2, 2, 3, 2 )
( line_offset, char_offset ) = vimsupport.ReplaceChunk( start, end, 'E',
0, 0, result_buffer )
expected_buffer = [ "aAa", "aECaaaa" ]
eq_( expected_buffer, result_buffer )
eq_( line_offset, -1 )
eq_( char_offset, 1 )
# make another modification applying offsets
start, end = _BuildLocations( 3, 3, 3, 4 )
( new_line_offset, new_char_offset ) = vimsupport.ReplaceChunk(
start,
end,
'cccc',
line_offset,
char_offset,
result_buffer )
line_offset += new_line_offset
char_offset += new_char_offset
eq_( [ "aAa", "aECccccaaa" ], result_buffer )
eq_( line_offset, -1 )
eq_( char_offset, 4 )
# and another, for luck
start, end = _BuildLocations( 3, 4, 3, 5 )
( new_line_offset, new_char_offset ) = vimsupport.ReplaceChunk(
start,
end,
'dd\ndd',
line_offset,
char_offset,
result_buffer )
line_offset += new_line_offset
char_offset += new_char_offset
eq_( [ "aAa", "aECccccdd", "ddaa" ], result_buffer )
eq_( line_offset, 0 )
eq_( char_offset, -2 )
def ReplaceChunk_MultipleLinesToSameMultipleLines_test():
result_buffer = [ "aAa", "aBa", "aCa", "aDe" ]
start, end = _BuildLocations( 2, 2, 3, 2 )
( line_offset, char_offset ) = vimsupport.ReplaceChunk( start, end, 'Eb\nbF',
0, 0, result_buffer )
expected_buffer = [ "aAa", "aEb", "bFCa", "aDe" ]
eq_( expected_buffer, result_buffer )
eq_( line_offset, 0 )
eq_( char_offset, 1 )
def ReplaceChunk_MultipleLinesToMoreMultipleLines_test():
result_buffer = [ "aAa", "aBa", "aCa", "aDe" ]
start, end = _BuildLocations( 2, 2, 3, 2 )
( line_offset, char_offset ) = vimsupport.ReplaceChunk( start,
end,
'Eb\nbFb\nbG',
0,
0,
result_buffer )
expected_buffer = [ "aAa", "aEb", "bFb", "bGCa", "aDe" ]
eq_( expected_buffer, result_buffer )
eq_( line_offset, 1 )
eq_( char_offset, 1 )
def ReplaceChunk_MultipleLinesToLessMultipleLines_test():
result_buffer = [ "aAa", "aBa", "aCa", "aDe" ]
start, end = _BuildLocations( 1, 2, 3, 2 )
( line_offset, char_offset ) = vimsupport.ReplaceChunk( start, end, 'Eb\nbF',
0, 0, result_buffer )
expected_buffer = [ "aEb", "bFCa", "aDe" ]
eq_( expected_buffer, result_buffer )
eq_( line_offset, -1 )
eq_( char_offset, 1 )
def ReplaceChunk_MultipleLinesToEvenLessMultipleLines_test():
result_buffer = [ "aAa", "aBa", "aCa", "aDe" ]
start, end = _BuildLocations( 1, 2, 4, 2 )
( line_offset, char_offset ) = vimsupport.ReplaceChunk( start, end, 'Eb\nbF',
0, 0, result_buffer )
expected_buffer = [ "aEb", "bFDe" ]
eq_( expected_buffer, result_buffer )
eq_( line_offset, -2 )
eq_( char_offset, 1 )
def ReplaceChunk_SpanBufferEdge_test():
result_buffer = [ "aAa", "aBa", "aCa" ]
start, end = _BuildLocations( 1, 1, 1, 3 )
( line_offset, char_offset ) = vimsupport.ReplaceChunk( start, end, 'bDb',
0, 0, result_buffer )
expected_buffer = [ "bDba", "aBa", "aCa" ]
eq_( expected_buffer, result_buffer )
eq_( line_offset, 0 )
eq_( char_offset, 1 )
def ReplaceChunk_DeleteTextInLine_test():
result_buffer = [ "aAa", "aBa", "aCa" ]
start, end = _BuildLocations( 2, 2, 2, 3 )
( line_offset, char_offset ) = vimsupport.ReplaceChunk( start, end, '',
0, 0, result_buffer )
expected_buffer = [ "aAa", "aa", "aCa" ]
eq_( expected_buffer, result_buffer )
eq_( line_offset, 0 )
eq_( char_offset, -1 )
def ReplaceChunk_AddTextInLine_test():
result_buffer = [ "aAa", "aBa", "aCa" ]
start, end = _BuildLocations( 2, 2, 2, 2 )
( line_offset, char_offset ) = vimsupport.ReplaceChunk( start, end, 'bDb',
0, 0, result_buffer )
expected_buffer = [ "aAa", "abDbBa", "aCa" ]
eq_( expected_buffer, result_buffer )
eq_( line_offset, 0 )
eq_( char_offset, 3 )
def ReplaceChunk_ReplaceTextInLine_test():
result_buffer = [ "aAa", "aBa", "aCa" ]
start, end = _BuildLocations( 2, 2, 2, 3 )
( line_offset, char_offset ) = vimsupport.ReplaceChunk( start, end, 'bDb',
0, 0, result_buffer )
expected_buffer = [ "aAa", "abDba", "aCa" ]
eq_( expected_buffer, result_buffer )
eq_( line_offset, 0 )
eq_( char_offset, 2 )
def ReplaceChunk_SingleLineOffsetWorks_test():
result_buffer = [ "aAa", "aBa", "aCa" ]
start, end = _BuildLocations( 1, 1, 1, 2 )
( line_offset, char_offset ) = vimsupport.ReplaceChunk( start, end, 'bDb',
1, 1, result_buffer )
expected_buffer = [ "aAa", "abDba", "aCa" ]
eq_( expected_buffer, result_buffer )
eq_( line_offset, 0 )
eq_( char_offset, 2 )
def ReplaceChunk_SingleLineToMultipleLinesOffsetWorks_test():
result_buffer = [ "aAa", "aBa", "aCa" ]
start, end = _BuildLocations( 1, 1, 1, 2 )
( line_offset, char_offset ) = vimsupport.ReplaceChunk( start, end, 'Db\nE',
1, 1, result_buffer )
expected_buffer = [ "aAa", "aDb", "Ea", "aCa" ]
eq_( expected_buffer, result_buffer )
eq_( line_offset, 1 )
eq_( char_offset, -1 )
def ReplaceChunk_MultipleLinesToSingleLineOffsetWorks_test():
result_buffer = [ "aAa", "aBa", "aCa" ]
start, end = _BuildLocations( 1, 1, 2, 2 )
( line_offset, char_offset ) = vimsupport.ReplaceChunk( start, end, 'bDb',
1, 1, result_buffer )
expected_buffer = [ "aAa", "abDbCa" ]
eq_( expected_buffer, result_buffer )
eq_( line_offset, -1 )
eq_( char_offset, 3 )
def ReplaceChunk_MultipleLineOffsetWorks_test():
result_buffer = [ "aAa", "aBa", "aCa" ]
start, end = _BuildLocations( 3, 1, 4, 3 )
( line_offset, char_offset ) = vimsupport.ReplaceChunk( start,
end,
'bDb\nbEb\nbFb',
-1,
1,
result_buffer )
expected_buffer = [ "aAa", "abDb", "bEb", "bFba" ]
eq_( expected_buffer, result_buffer )
eq_( line_offset, 1 )
eq_( char_offset, 1 )
def _BuildLocations( start_line, start_column, end_line, end_column ):
return {
'line_num' : start_line,
'column_num': start_column,
}, {
'line_num' : end_line,
'column_num': end_column,
}
| gpl-3.0 |
Nitaco/ansible | lib/ansible/modules/web_infrastructure/acme_account.py | 5 | 10667 | #!/usr/bin/python
# -*- coding: utf-8 -*-
# (c) 2016 Michael Gruener <michael.gruener@chaosmoon.net>
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = '''
---
module: acme_account
author: "Felix Fontein (@felixfontein)"
version_added: "2.6"
short_description: Create, modify or delete accounts with Let's Encrypt
description:
- "Allows to create, modify or delete accounts with Let's Encrypt.
Let's Encrypt is a free, automated, and open certificate authority
(CA), run for the public's benefit. For details see U(https://letsencrypt.org)."
- "This module only works with the ACME v2 protocol."
extends_documentation_fragment:
- acme
options:
state:
description:
- "The state of the account, to be identified by its account key."
- "If the state is C(absent), the account will either not exist or be
deactivated."
- "If the state is C(changed_key), the account must exist. The account
key will be changed; no other information will be touched."
required: true
choices:
- present
- absent
- changed_key
allow_creation:
description:
- "Whether account creation is allowed (when state is C(present))."
default: yes
type: bool
contact:
description:
- "A list of contact URLs."
- "Email addresses must be prefixed with C(mailto:)."
- "See https://tools.ietf.org/html/draft-ietf-acme-acme-10#section-7.1.2
for what is allowed."
- "Must be specified when state is C(present). Will be ignored
if state is C(absent) or C(changed_key)."
default: []
terms_agreed:
description:
- "Boolean indicating whether you agree to the terms of service document."
- "ACME servers can require this to be true."
default: no
type: bool
new_account_key_src:
description:
- "Path to a file containing the Let's Encrypt account RSA or Elliptic Curve
key to change to."
- "Same restrictions apply as to C(account_key_src)."
- "Mutually exclusive with C(new_account_key_content)."
- "Required if C(new_account_key_content) is not used and state is C(changed_key)."
new_account_key_content:
description:
- "Content of the Let's Encrypt account RSA or Elliptic Curve key to change to."
- "Same restrictions apply as to C(account_key_content)."
- "Mutually exclusive with C(new_account_key_src)."
- "Required if C(new_account_key_src) is not used and state is C(changed_key)."
'''
EXAMPLES = '''
- name: Make sure account exists and has given contacts. We agree to TOS.
acme_account:
account_key_src: /etc/pki/cert/private/account.key
state: present
terms_agreed: yes
contact:
- mailto:me@example.com
- mailto:myself@example.org
- name: Make sure account has given email address. Don't create account if it doesn't exist
acme_account:
account_key_src: /etc/pki/cert/private/account.key
state: present
allow_creation: no
contact:
- mailto:me@example.com
- name: Change account's key to the one stored in the variable new_account_key
acme_account:
account_key_src: /etc/pki/cert/private/account.key
new_account_key_content: '{{ new_account_key }}'
state: changed_key
- name: Delete account (we have to use the new key)
acme_account:
account_key_content: '{{ new_account_key }}'
state: absent
'''
RETURN = '''
account_uri:
description: ACME account URI, or None if account does not exist.
returned: always
type: string
'''
from ansible.module_utils.acme import (
ModuleFailException, ACMEAccount
)
import os
import tempfile
import traceback
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils._text import to_native
def main():
module = AnsibleModule(
argument_spec=dict(
account_key_src=dict(type='path', aliases=['account_key']),
account_key_content=dict(type='str', no_log=True),
acme_directory=dict(required=False, default='https://acme-staging.api.letsencrypt.org/directory', type='str'),
acme_version=dict(required=False, default=1, choices=[1, 2], type='int'),
validate_certs=dict(required=False, default=True, type='bool'),
terms_agreed=dict(required=False, default=False, type='bool'),
state=dict(required=True, choices=['absent', 'present', 'changed_key'], type='str'),
allow_creation=dict(required=False, default=True, type='bool'),
contact=dict(required=False, type='list', default=[]),
new_account_key_src=dict(type='path'),
new_account_key_content=dict(type='str', no_log=True),
),
required_one_of=(
['account_key_src', 'account_key_content'],
),
mutually_exclusive=(
['account_key_src', 'account_key_content'],
['new_account_key_src', 'new_account_key_content'],
),
required_if=(
# Make sure that for state == changed_key, one of
# new_account_key_src and new_account_key_content are specified
['state', 'changed_key', ['new_account_key_src', 'new_account_key_content'], True],
),
supports_check_mode=True,
)
if not module.params.get('validate_certs'):
module.warn(warning='Disabling certificate validation for communications with ACME endpoint. ' +
'This should only be done for testing against a local ACME server for ' +
'development purposes, but *never* for production purposes.')
if module.params.get('acme_version') < 2:
module.fail_json(msg='The acme_account module requires the ACME v2 protocol!')
try:
account = ACMEAccount(module)
state = module.params.get('state')
if state == 'absent':
changed = account.init_account(
[],
allow_creation=False,
update_contact=False,
)
if changed:
raise AssertionError('Unwanted account change')
if account.uri is not None:
# Account does exist
account_data = account.get_account_data()
if account_data is not None:
# Account is not yet deactivated
if not module.check_mode:
# Deactivate it
payload = {
'status': 'deactivated'
}
result, info = account.send_signed_request(account.uri, payload)
if info['status'] != 200:
raise ModuleFailException('Error deactivating account: {0} {1}'.format(info['status'], result))
module.exit_json(changed=True, account_uri=account.uri)
module.exit_json(changed=False, account_uri=account.uri)
elif state == 'present':
allow_creation = module.params.get('allow_creation')
contact = module.params.get('contact')
terms_agreed = module.params.get('terms_agreed')
changed = account.init_account(
contact,
terms_agreed=terms_agreed,
allow_creation=allow_creation,
)
if account.uri is None:
raise ModuleFailException(msg='Account does not exist or is deactivated.')
module.exit_json(changed=changed, account_uri=account.uri)
elif state == 'changed_key':
# Get hold of new account key
new_key = module.params.get('new_account_key_src')
if new_key is None:
fd, tmpsrc = tempfile.mkstemp()
module.add_cleanup_file(tmpsrc) # Ansible will delete the file on exit
f = os.fdopen(fd, 'wb')
try:
f.write(module.params.get('new_account_key_content').encode('utf-8'))
new_key = tmpsrc
except Exception as err:
try:
f.close()
except Exception as e:
pass
raise ModuleFailException("failed to create temporary content file: %s" % to_native(err), exception=traceback.format_exc())
f.close()
# Parse new account key
error, new_key_data = account.parse_account_key(new_key)
if error:
raise ModuleFailException("error while parsing account key: %s" % error)
# Verify that the account exists and has not been deactivated
changed = account.init_account(
[],
allow_creation=False,
update_contact=False,
)
if changed:
raise AssertionError('Unwanted account change')
if account.uri is None or account.get_account_data() is None:
raise ModuleFailException(msg='Account does not exist or is deactivated.')
# Now we can start the account key rollover
if not module.check_mode:
# Compose inner signed message
# https://tools.ietf.org/html/draft-ietf-acme-acme-12#section-7.3.6
url = account.directory['keyChange']
protected = {
"alg": new_key_data['alg'],
"jwk": new_key_data['jwk'],
"url": url,
}
payload = {
"account": account.uri,
"newKey": new_key_data['jwk'], # specified in draft 12
"oldKey": account.jwk, # discussed in https://github.com/ietf-wg-acme/acme/pull/425,
# might be required in draft 13
}
data = account.sign_request(protected, payload, new_key_data, new_key)
# Send request and verify result
result, info = account.send_signed_request(url, data)
if info['status'] != 200:
raise ModuleFailException('Error account key rollover: {0} {1}'.format(info['status'], result))
module.exit_json(changed=True, account_uri=account.uri)
except ModuleFailException as e:
e.do_fail(module)
if __name__ == '__main__':
main()
| gpl-3.0 |
ricmoo/pycoind | pycoind/util/pyaes/__init__.py | 1 | 2000 | # The MIT License (MIT)
#
# Copyright (c) 2014 Richard Moore
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
# This is a pure-Python implementation of the AES algorithm and AES common
# modes of operation.
# See: https://en.wikipedia.org/wiki/Advanced_Encryption_Standard
# See: https://en.wikipedia.org/wiki/Block_cipher_mode_of_operation
# Supported key sizes:
# 128-bit
# 192-bit
# 256-bit
# Supported modes of operation:
# ECB - Electronic Codebook
# CBC - Cipher-Block Chaining
# CFB - Cipher Feedback
# OFB - Output Feedback
# CTR - Counter
# See the README.md for API details and general information.
# Also useful, PyCrypto, a crypto library implemented in C with Python bindings:
# https://www.dlitz.net/software/pycrypto/
VERSION = [1, 1, 0]
from .aes import AES, AESModeOfOperationCTR, AESModeOfOperationCBC, AESModeOfOperationCFB, AESModeOfOperationECB, AESModeOfOperationOFB, AESModesOfOperation, Counter
from .blockfeeder import Decrypter, Encrypter
| mit |
pipermerriam/flex | tests/core/test_request_response_objects.py | 1 | 1186 | import json
from tests.factories import (
RequestFactory,
ResponseFactory,
)
def test_path_property():
request = RequestFactory(url='http://www.example.com/blog/25')
assert request.path == '/blog/25'
def test_query_property():
request = RequestFactory(url='http://www.example.com/api/?token=1234&secret=abcd')
assert request.query == 'token=1234&secret=abcd'
def test_query_data_for_singular_values():
request = RequestFactory(url='http://www.example.com/api/?token=1234&secret=abcd')
assert request.query_data == {'token': ['1234'], 'secret': ['abcd']}
def test_query_data_for_multi_value_keys():
request = RequestFactory(
url='http://www.example.com/api/?token=1234&token=5678&secret=abcd',
)
assert request.query_data == {'token': ['1234', '5678'], 'secret': ['abcd']}
def test_response_factory_propogates_url_to_request():
response = ResponseFactory(url='http://www.example.com/should-propogate-up/')
assert response.url == response.request.url
def test_response_data_as_json():
expected = {'foo': '1234'}
response = ResponseFactory(content=json.dumps(expected))
assert response.data == expected
| mit |
cytec/SickRage | sickbeard/metadata/helpers.py | 8 | 1427 | # coding=utf-8
# Author: Nic Wolfe <nic@wolfeden.ca>
# URL: http://code.google.com/p/sickbeard/
#
# This file is part of SickRage.
#
# SickRage is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# SickRage is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with SickRage. If not, see <http://www.gnu.org/licenses/>.
from sickbeard import helpers
from sickbeard import logger
meta_session = helpers.make_session()
def getShowImage(url, imgNum=None):
if url is None:
return None
# if they provided a fanart number try to use it instead
if imgNum is not None:
tempURL = url.split('-')[0] + "-" + str(imgNum) + ".jpg"
else:
tempURL = url
logger.log(u"Fetching image from " + tempURL, logger.DEBUG)
image_data = helpers.getURL(tempURL, session=meta_session, returns='content')
if image_data is None:
logger.log(u"There was an error trying to retrieve the image, aborting", logger.WARNING)
return
return image_data
| gpl-3.0 |
Huskerboy/startbootstrap-freelancer | freelancer_env/Lib/importlib/_bootstrap_external.py | 14 | 53530 | """Core implementation of path-based import.
This module is NOT meant to be directly imported! It has been designed such
that it can be bootstrapped into Python as the implementation of import. As
such it requires the injection of specific modules and attributes in order to
work. One should use importlib as the public-facing version of this module.
"""
#
# IMPORTANT: Whenever making changes to this module, be sure to run
# a top-level make in order to get the frozen version of the module
# updated. Not doing so will result in the Makefile to fail for
# all others who don't have a ./python around to freeze the module
# in the early stages of compilation.
#
# See importlib._setup() for what is injected into the global namespace.
# When editing this code be aware that code executed at import time CANNOT
# reference any injected objects! This includes not only global code but also
# anything specified at the class level.
# Bootstrap-related code ######################################################
_CASE_INSENSITIVE_PLATFORMS = 'win', 'cygwin', 'darwin'
def _make_relax_case():
if sys.platform.startswith(_CASE_INSENSITIVE_PLATFORMS):
def _relax_case():
"""True if filenames must be checked case-insensitively."""
return b'PYTHONCASEOK' in _os.environ
else:
def _relax_case():
"""True if filenames must be checked case-insensitively."""
return False
return _relax_case
def _w_long(x):
"""Convert a 32-bit integer to little-endian."""
return (int(x) & 0xFFFFFFFF).to_bytes(4, 'little')
def _r_long(int_bytes):
"""Convert 4 bytes in little-endian to an integer."""
return int.from_bytes(int_bytes, 'little')
def _path_join(*path_parts):
"""Replacement for os.path.join()."""
return path_sep.join([part.rstrip(path_separators)
for part in path_parts if part])
def _path_split(path):
"""Replacement for os.path.split()."""
if len(path_separators) == 1:
front, _, tail = path.rpartition(path_sep)
return front, tail
for x in reversed(path):
if x in path_separators:
front, tail = path.rsplit(x, maxsplit=1)
return front, tail
return '', path
def _path_stat(path):
"""Stat the path.
Made a separate function to make it easier to override in experiments
(e.g. cache stat results).
"""
return _os.stat(path)
def _path_is_mode_type(path, mode):
"""Test whether the path is the specified mode type."""
try:
stat_info = _path_stat(path)
except OSError:
return False
return (stat_info.st_mode & 0o170000) == mode
def _path_isfile(path):
"""Replacement for os.path.isfile."""
return _path_is_mode_type(path, 0o100000)
def _path_isdir(path):
"""Replacement for os.path.isdir."""
if not path:
path = _os.getcwd()
return _path_is_mode_type(path, 0o040000)
def _write_atomic(path, data, mode=0o666):
"""Best-effort function to write data to a path atomically.
Be prepared to handle a FileExistsError if concurrent writing of the
temporary file is attempted."""
# id() is used to generate a pseudo-random filename.
path_tmp = '{}.{}'.format(path, id(path))
fd = _os.open(path_tmp,
_os.O_EXCL | _os.O_CREAT | _os.O_WRONLY, mode & 0o666)
try:
# We first write data to a temporary file, and then use os.replace() to
# perform an atomic rename.
with _io.FileIO(fd, 'wb') as file:
file.write(data)
_os.replace(path_tmp, path)
except OSError:
try:
_os.unlink(path_tmp)
except OSError:
pass
raise
_code_type = type(_write_atomic.__code__)
# Finder/loader utility code ###############################################
# Magic word to reject .pyc files generated by other Python versions.
# It should change for each incompatible change to the bytecode.
#
# The value of CR and LF is incorporated so if you ever read or write
# a .pyc file in text mode the magic number will be wrong; also, the
# Apple MPW compiler swaps their values, botching string constants.
#
# The magic numbers must be spaced apart at least 2 values, as the
# -U interpeter flag will cause MAGIC+1 being used. They have been
# odd numbers for some time now.
#
# There were a variety of old schemes for setting the magic number.
# The current working scheme is to increment the previous value by
# 10.
#
# Starting with the adoption of PEP 3147 in Python 3.2, every bump in magic
# number also includes a new "magic tag", i.e. a human readable string used
# to represent the magic number in __pycache__ directories. When you change
# the magic number, you must also set a new unique magic tag. Generally this
# can be named after the Python major version of the magic number bump, but
# it can really be anything, as long as it's different than anything else
# that's come before. The tags are included in the following table, starting
# with Python 3.2a0.
#
# Known values:
# Python 1.5: 20121
# Python 1.5.1: 20121
# Python 1.5.2: 20121
# Python 1.6: 50428
# Python 2.0: 50823
# Python 2.0.1: 50823
# Python 2.1: 60202
# Python 2.1.1: 60202
# Python 2.1.2: 60202
# Python 2.2: 60717
# Python 2.3a0: 62011
# Python 2.3a0: 62021
# Python 2.3a0: 62011 (!)
# Python 2.4a0: 62041
# Python 2.4a3: 62051
# Python 2.4b1: 62061
# Python 2.5a0: 62071
# Python 2.5a0: 62081 (ast-branch)
# Python 2.5a0: 62091 (with)
# Python 2.5a0: 62092 (changed WITH_CLEANUP opcode)
# Python 2.5b3: 62101 (fix wrong code: for x, in ...)
# Python 2.5b3: 62111 (fix wrong code: x += yield)
# Python 2.5c1: 62121 (fix wrong lnotab with for loops and
# storing constants that should have been removed)
# Python 2.5c2: 62131 (fix wrong code: for x, in ... in listcomp/genexp)
# Python 2.6a0: 62151 (peephole optimizations and STORE_MAP opcode)
# Python 2.6a1: 62161 (WITH_CLEANUP optimization)
# Python 2.7a0: 62171 (optimize list comprehensions/change LIST_APPEND)
# Python 2.7a0: 62181 (optimize conditional branches:
# introduce POP_JUMP_IF_FALSE and POP_JUMP_IF_TRUE)
# Python 2.7a0 62191 (introduce SETUP_WITH)
# Python 2.7a0 62201 (introduce BUILD_SET)
# Python 2.7a0 62211 (introduce MAP_ADD and SET_ADD)
# Python 3000: 3000
# 3010 (removed UNARY_CONVERT)
# 3020 (added BUILD_SET)
# 3030 (added keyword-only parameters)
# 3040 (added signature annotations)
# 3050 (print becomes a function)
# 3060 (PEP 3115 metaclass syntax)
# 3061 (string literals become unicode)
# 3071 (PEP 3109 raise changes)
# 3081 (PEP 3137 make __file__ and __name__ unicode)
# 3091 (kill str8 interning)
# 3101 (merge from 2.6a0, see 62151)
# 3103 (__file__ points to source file)
# Python 3.0a4: 3111 (WITH_CLEANUP optimization).
# Python 3.0a5: 3131 (lexical exception stacking, including POP_EXCEPT)
# Python 3.1a0: 3141 (optimize list, set and dict comprehensions:
# change LIST_APPEND and SET_ADD, add MAP_ADD)
# Python 3.1a0: 3151 (optimize conditional branches:
# introduce POP_JUMP_IF_FALSE and POP_JUMP_IF_TRUE)
# Python 3.2a0: 3160 (add SETUP_WITH)
# tag: cpython-32
# Python 3.2a1: 3170 (add DUP_TOP_TWO, remove DUP_TOPX and ROT_FOUR)
# tag: cpython-32
# Python 3.2a2 3180 (add DELETE_DEREF)
# Python 3.3a0 3190 __class__ super closure changed
# Python 3.3a0 3200 (__qualname__ added)
# 3210 (added size modulo 2**32 to the pyc header)
# Python 3.3a1 3220 (changed PEP 380 implementation)
# Python 3.3a4 3230 (revert changes to implicit __class__ closure)
# Python 3.4a1 3250 (evaluate positional default arguments before
# keyword-only defaults)
# Python 3.4a1 3260 (add LOAD_CLASSDEREF; allow locals of class to override
# free vars)
# Python 3.4a1 3270 (various tweaks to the __class__ closure)
# Python 3.4a1 3280 (remove implicit class argument)
# Python 3.4a4 3290 (changes to __qualname__ computation)
# Python 3.4a4 3300 (more changes to __qualname__ computation)
# Python 3.4rc2 3310 (alter __qualname__ computation)
# Python 3.5a0 3320 (matrix multiplication operator)
# Python 3.5b1 3330 (PEP 448: Additional Unpacking Generalizations)
# Python 3.5b2 3340 (fix dictionary display evaluation order #11205)
# Python 3.5b2 3350 (add GET_YIELD_FROM_ITER opcode #24400)
#
# MAGIC must change whenever the bytecode emitted by the compiler may no
# longer be understood by older implementations of the eval loop (usually
# due to the addition of new opcodes).
#
# Whenever MAGIC_NUMBER is changed, the ranges in the magic_values array
# in PC/launcher.c must also be updated.
MAGIC_NUMBER = (3350).to_bytes(2, 'little') + b'\r\n'
_RAW_MAGIC_NUMBER = int.from_bytes(MAGIC_NUMBER, 'little') # For import.c
_PYCACHE = '__pycache__'
_OPT = 'opt-'
SOURCE_SUFFIXES = ['.py'] # _setup() adds .pyw as needed.
BYTECODE_SUFFIXES = ['.pyc']
# Deprecated.
DEBUG_BYTECODE_SUFFIXES = OPTIMIZED_BYTECODE_SUFFIXES = BYTECODE_SUFFIXES
def cache_from_source(path, debug_override=None, *, optimization=None):
"""Given the path to a .py file, return the path to its .pyc file.
The .py file does not need to exist; this simply returns the path to the
.pyc file calculated as if the .py file were imported.
The 'optimization' parameter controls the presumed optimization level of
the bytecode file. If 'optimization' is not None, the string representation
of the argument is taken and verified to be alphanumeric (else ValueError
is raised).
The debug_override parameter is deprecated. If debug_override is not None,
a True value is the same as setting 'optimization' to the empty string
while a False value is equivalent to setting 'optimization' to '1'.
If sys.implementation.cache_tag is None then NotImplementedError is raised.
"""
if debug_override is not None:
_warnings.warn('the debug_override parameter is deprecated; use '
"'optimization' instead", DeprecationWarning)
if optimization is not None:
message = 'debug_override or optimization must be set to None'
raise TypeError(message)
optimization = '' if debug_override else 1
head, tail = _path_split(path)
base, sep, rest = tail.rpartition('.')
tag = sys.implementation.cache_tag
if tag is None:
raise NotImplementedError('sys.implementation.cache_tag is None')
almost_filename = ''.join([(base if base else rest), sep, tag])
if optimization is None:
if sys.flags.optimize == 0:
optimization = ''
else:
optimization = sys.flags.optimize
optimization = str(optimization)
if optimization != '':
if not optimization.isalnum():
raise ValueError('{!r} is not alphanumeric'.format(optimization))
almost_filename = '{}.{}{}'.format(almost_filename, _OPT, optimization)
return _path_join(head, _PYCACHE, almost_filename + BYTECODE_SUFFIXES[0])
def source_from_cache(path):
"""Given the path to a .pyc. file, return the path to its .py file.
The .pyc file does not need to exist; this simply returns the path to
the .py file calculated to correspond to the .pyc file. If path does
not conform to PEP 3147/488 format, ValueError will be raised. If
sys.implementation.cache_tag is None then NotImplementedError is raised.
"""
if sys.implementation.cache_tag is None:
raise NotImplementedError('sys.implementation.cache_tag is None')
head, pycache_filename = _path_split(path)
head, pycache = _path_split(head)
if pycache != _PYCACHE:
raise ValueError('{} not bottom-level directory in '
'{!r}'.format(_PYCACHE, path))
dot_count = pycache_filename.count('.')
if dot_count not in {2, 3}:
raise ValueError('expected only 2 or 3 dots in '
'{!r}'.format(pycache_filename))
elif dot_count == 3:
optimization = pycache_filename.rsplit('.', 2)[-2]
if not optimization.startswith(_OPT):
raise ValueError("optimization portion of filename does not start "
"with {!r}".format(_OPT))
opt_level = optimization[len(_OPT):]
if not opt_level.isalnum():
raise ValueError("optimization level {!r} is not an alphanumeric "
"value".format(optimization))
base_filename = pycache_filename.partition('.')[0]
return _path_join(head, base_filename + SOURCE_SUFFIXES[0])
def _get_sourcefile(bytecode_path):
"""Convert a bytecode file path to a source path (if possible).
This function exists purely for backwards-compatibility for
PyImport_ExecCodeModuleWithFilenames() in the C API.
"""
if len(bytecode_path) == 0:
return None
rest, _, extension = bytecode_path.rpartition('.')
if not rest or extension.lower()[-3:-1] != 'py':
return bytecode_path
try:
source_path = source_from_cache(bytecode_path)
except (NotImplementedError, ValueError):
source_path = bytecode_path[:-1]
return source_path if _path_isfile(source_path) else bytecode_path
def _get_cached(filename):
if filename.endswith(tuple(SOURCE_SUFFIXES)):
try:
return cache_from_source(filename)
except NotImplementedError:
pass
elif filename.endswith(tuple(BYTECODE_SUFFIXES)):
return filename
else:
return None
def _calc_mode(path):
"""Calculate the mode permissions for a bytecode file."""
try:
mode = _path_stat(path).st_mode
except OSError:
mode = 0o666
# We always ensure write access so we can update cached files
# later even when the source files are read-only on Windows (#6074)
mode |= 0o200
return mode
def _verbose_message(message, *args, verbosity=1):
"""Print the message to stderr if -v/PYTHONVERBOSE is turned on."""
if sys.flags.verbose >= verbosity:
if not message.startswith(('#', 'import ')):
message = '# ' + message
print(message.format(*args), file=sys.stderr)
def _check_name(method):
"""Decorator to verify that the module being requested matches the one the
loader can handle.
The first argument (self) must define _name which the second argument is
compared against. If the comparison fails then ImportError is raised.
"""
def _check_name_wrapper(self, name=None, *args, **kwargs):
if name is None:
name = self.name
elif self.name != name:
raise ImportError('loader for %s cannot handle %s' %
(self.name, name), name=name)
return method(self, name, *args, **kwargs)
try:
_wrap = _bootstrap._wrap
except NameError:
# XXX yuck
def _wrap(new, old):
for replace in ['__module__', '__name__', '__qualname__', '__doc__']:
if hasattr(old, replace):
setattr(new, replace, getattr(old, replace))
new.__dict__.update(old.__dict__)
_wrap(_check_name_wrapper, method)
return _check_name_wrapper
def _find_module_shim(self, fullname):
"""Try to find a loader for the specified module by delegating to
self.find_loader().
This method is deprecated in favor of finder.find_spec().
"""
# Call find_loader(). If it returns a string (indicating this
# is a namespace package portion), generate a warning and
# return None.
loader, portions = self.find_loader(fullname)
if loader is None and len(portions):
msg = 'Not importing directory {}: missing __init__'
_warnings.warn(msg.format(portions[0]), ImportWarning)
return loader
def _validate_bytecode_header(data, source_stats=None, name=None, path=None):
"""Validate the header of the passed-in bytecode against source_stats (if
given) and returning the bytecode that can be compiled by compile().
All other arguments are used to enhance error reporting.
ImportError is raised when the magic number is incorrect or the bytecode is
found to be stale. EOFError is raised when the data is found to be
truncated.
"""
exc_details = {}
if name is not None:
exc_details['name'] = name
else:
# To prevent having to make all messages have a conditional name.
name = '<bytecode>'
if path is not None:
exc_details['path'] = path
magic = data[:4]
raw_timestamp = data[4:8]
raw_size = data[8:12]
if magic != MAGIC_NUMBER:
message = 'bad magic number in {!r}: {!r}'.format(name, magic)
_verbose_message('{}', message)
raise ImportError(message, **exc_details)
elif len(raw_timestamp) != 4:
message = 'reached EOF while reading timestamp in {!r}'.format(name)
_verbose_message('{}', message)
raise EOFError(message)
elif len(raw_size) != 4:
message = 'reached EOF while reading size of source in {!r}'.format(name)
_verbose_message('{}', message)
raise EOFError(message)
if source_stats is not None:
try:
source_mtime = int(source_stats['mtime'])
except KeyError:
pass
else:
if _r_long(raw_timestamp) != source_mtime:
message = 'bytecode is stale for {!r}'.format(name)
_verbose_message('{}', message)
raise ImportError(message, **exc_details)
try:
source_size = source_stats['size'] & 0xFFFFFFFF
except KeyError:
pass
else:
if _r_long(raw_size) != source_size:
raise ImportError('bytecode is stale for {!r}'.format(name),
**exc_details)
return data[12:]
def _compile_bytecode(data, name=None, bytecode_path=None, source_path=None):
"""Compile bytecode as returned by _validate_bytecode_header()."""
code = marshal.loads(data)
if isinstance(code, _code_type):
_verbose_message('code object from {!r}', bytecode_path)
if source_path is not None:
_imp._fix_co_filename(code, source_path)
return code
else:
raise ImportError('Non-code object in {!r}'.format(bytecode_path),
name=name, path=bytecode_path)
def _code_to_bytecode(code, mtime=0, source_size=0):
"""Compile a code object into bytecode for writing out to a byte-compiled
file."""
data = bytearray(MAGIC_NUMBER)
data.extend(_w_long(mtime))
data.extend(_w_long(source_size))
data.extend(marshal.dumps(code))
return data
def decode_source(source_bytes):
"""Decode bytes representing source code and return the string.
Universal newline support is used in the decoding.
"""
import tokenize # To avoid bootstrap issues.
source_bytes_readline = _io.BytesIO(source_bytes).readline
encoding = tokenize.detect_encoding(source_bytes_readline)
newline_decoder = _io.IncrementalNewlineDecoder(None, True)
return newline_decoder.decode(source_bytes.decode(encoding[0]))
# Module specifications #######################################################
_POPULATE = object()
def spec_from_file_location(name, location=None, *, loader=None,
submodule_search_locations=_POPULATE):
"""Return a module spec based on a file location.
To indicate that the module is a package, set
submodule_search_locations to a list of directory paths. An
empty list is sufficient, though its not otherwise useful to the
import system.
The loader must take a spec as its only __init__() arg.
"""
if location is None:
# The caller may simply want a partially populated location-
# oriented spec. So we set the location to a bogus value and
# fill in as much as we can.
location = '<unknown>'
if hasattr(loader, 'get_filename'):
# ExecutionLoader
try:
location = loader.get_filename(name)
except ImportError:
pass
# If the location is on the filesystem, but doesn't actually exist,
# we could return None here, indicating that the location is not
# valid. However, we don't have a good way of testing since an
# indirect location (e.g. a zip file or URL) will look like a
# non-existent file relative to the filesystem.
spec = _bootstrap.ModuleSpec(name, loader, origin=location)
spec._set_fileattr = True
# Pick a loader if one wasn't provided.
if loader is None:
for loader_class, suffixes in _get_supported_file_loaders():
if location.endswith(tuple(suffixes)):
loader = loader_class(name, location)
spec.loader = loader
break
else:
return None
# Set submodule_search_paths appropriately.
if submodule_search_locations is _POPULATE:
# Check the loader.
if hasattr(loader, 'is_package'):
try:
is_package = loader.is_package(name)
except ImportError:
pass
else:
if is_package:
spec.submodule_search_locations = []
else:
spec.submodule_search_locations = submodule_search_locations
if spec.submodule_search_locations == []:
if location:
dirname = _path_split(location)[0]
spec.submodule_search_locations.append(dirname)
return spec
# Loaders #####################################################################
class WindowsRegistryFinder:
"""Meta path finder for modules declared in the Windows registry."""
REGISTRY_KEY = (
'Software\\Python\\PythonCore\\{sys_version}'
'\\Modules\\{fullname}')
REGISTRY_KEY_DEBUG = (
'Software\\Python\\PythonCore\\{sys_version}'
'\\Modules\\{fullname}\\Debug')
DEBUG_BUILD = False # Changed in _setup()
@classmethod
def _open_registry(cls, key):
try:
return _winreg.OpenKey(_winreg.HKEY_CURRENT_USER, key)
except OSError:
return _winreg.OpenKey(_winreg.HKEY_LOCAL_MACHINE, key)
@classmethod
def _search_registry(cls, fullname):
if cls.DEBUG_BUILD:
registry_key = cls.REGISTRY_KEY_DEBUG
else:
registry_key = cls.REGISTRY_KEY
key = registry_key.format(fullname=fullname,
sys_version=sys.version[:3])
try:
with cls._open_registry(key) as hkey:
filepath = _winreg.QueryValue(hkey, '')
except OSError:
return None
return filepath
@classmethod
def find_spec(cls, fullname, path=None, target=None):
filepath = cls._search_registry(fullname)
if filepath is None:
return None
try:
_path_stat(filepath)
except OSError:
return None
for loader, suffixes in _get_supported_file_loaders():
if filepath.endswith(tuple(suffixes)):
spec = _bootstrap.spec_from_loader(fullname,
loader(fullname, filepath),
origin=filepath)
return spec
@classmethod
def find_module(cls, fullname, path=None):
"""Find module named in the registry.
This method is deprecated. Use exec_module() instead.
"""
spec = cls.find_spec(fullname, path)
if spec is not None:
return spec.loader
else:
return None
class _LoaderBasics:
"""Base class of common code needed by both SourceLoader and
SourcelessFileLoader."""
def is_package(self, fullname):
"""Concrete implementation of InspectLoader.is_package by checking if
the path returned by get_filename has a filename of '__init__.py'."""
filename = _path_split(self.get_filename(fullname))[1]
filename_base = filename.rsplit('.', 1)[0]
tail_name = fullname.rpartition('.')[2]
return filename_base == '__init__' and tail_name != '__init__'
def create_module(self, spec):
"""Use default semantics for module creation."""
def exec_module(self, module):
"""Execute the module."""
code = self.get_code(module.__name__)
if code is None:
raise ImportError('cannot load module {!r} when get_code() '
'returns None'.format(module.__name__))
_bootstrap._call_with_frames_removed(exec, code, module.__dict__)
def load_module(self, fullname):
return _bootstrap._load_module_shim(self, fullname)
class SourceLoader(_LoaderBasics):
def path_mtime(self, path):
"""Optional method that returns the modification time (an int) for the
specified path, where path is a str.
Raises IOError when the path cannot be handled.
"""
raise IOError
def path_stats(self, path):
"""Optional method returning a metadata dict for the specified path
to by the path (str).
Possible keys:
- 'mtime' (mandatory) is the numeric timestamp of last source
code modification;
- 'size' (optional) is the size in bytes of the source code.
Implementing this method allows the loader to read bytecode files.
Raises IOError when the path cannot be handled.
"""
return {'mtime': self.path_mtime(path)}
def _cache_bytecode(self, source_path, cache_path, data):
"""Optional method which writes data (bytes) to a file path (a str).
Implementing this method allows for the writing of bytecode files.
The source path is needed in order to correctly transfer permissions
"""
# For backwards compatibility, we delegate to set_data()
return self.set_data(cache_path, data)
def set_data(self, path, data):
"""Optional method which writes data (bytes) to a file path (a str).
Implementing this method allows for the writing of bytecode files.
"""
def get_source(self, fullname):
"""Concrete implementation of InspectLoader.get_source."""
path = self.get_filename(fullname)
try:
source_bytes = self.get_data(path)
except OSError as exc:
raise ImportError('source not available through get_data()',
name=fullname) from exc
return decode_source(source_bytes)
def source_to_code(self, data, path, *, _optimize=-1):
"""Return the code object compiled from source.
The 'data' argument can be any object type that compile() supports.
"""
return _bootstrap._call_with_frames_removed(compile, data, path, 'exec',
dont_inherit=True, optimize=_optimize)
def get_code(self, fullname):
"""Concrete implementation of InspectLoader.get_code.
Reading of bytecode requires path_stats to be implemented. To write
bytecode, set_data must also be implemented.
"""
source_path = self.get_filename(fullname)
source_mtime = None
try:
bytecode_path = cache_from_source(source_path)
except NotImplementedError:
bytecode_path = None
else:
try:
st = self.path_stats(source_path)
except IOError:
pass
else:
source_mtime = int(st['mtime'])
try:
data = self.get_data(bytecode_path)
except OSError:
pass
else:
try:
bytes_data = _validate_bytecode_header(data,
source_stats=st, name=fullname,
path=bytecode_path)
except (ImportError, EOFError):
pass
else:
_verbose_message('{} matches {}', bytecode_path,
source_path)
return _compile_bytecode(bytes_data, name=fullname,
bytecode_path=bytecode_path,
source_path=source_path)
source_bytes = self.get_data(source_path)
code_object = self.source_to_code(source_bytes, source_path)
_verbose_message('code object from {}', source_path)
if (not sys.dont_write_bytecode and bytecode_path is not None and
source_mtime is not None):
data = _code_to_bytecode(code_object, source_mtime,
len(source_bytes))
try:
self._cache_bytecode(source_path, bytecode_path, data)
_verbose_message('wrote {!r}', bytecode_path)
except NotImplementedError:
pass
return code_object
class FileLoader:
"""Base file loader class which implements the loader protocol methods that
require file system usage."""
def __init__(self, fullname, path):
"""Cache the module name and the path to the file found by the
finder."""
self.name = fullname
self.path = path
def __eq__(self, other):
return (self.__class__ == other.__class__ and
self.__dict__ == other.__dict__)
def __hash__(self):
return hash(self.name) ^ hash(self.path)
@_check_name
def load_module(self, fullname):
"""Load a module from a file.
This method is deprecated. Use exec_module() instead.
"""
# The only reason for this method is for the name check.
# Issue #14857: Avoid the zero-argument form of super so the implementation
# of that form can be updated without breaking the frozen module
return super(FileLoader, self).load_module(fullname)
@_check_name
def get_filename(self, fullname):
"""Return the path to the source file as found by the finder."""
return self.path
def get_data(self, path):
"""Return the data from path as raw bytes."""
with _io.FileIO(path, 'r') as file:
return file.read()
class SourceFileLoader(FileLoader, SourceLoader):
"""Concrete implementation of SourceLoader using the file system."""
def path_stats(self, path):
"""Return the metadata for the path."""
st = _path_stat(path)
return {'mtime': st.st_mtime, 'size': st.st_size}
def _cache_bytecode(self, source_path, bytecode_path, data):
# Adapt between the two APIs
mode = _calc_mode(source_path)
return self.set_data(bytecode_path, data, _mode=mode)
def set_data(self, path, data, *, _mode=0o666):
"""Write bytes data to a file."""
parent, filename = _path_split(path)
path_parts = []
# Figure out what directories are missing.
while parent and not _path_isdir(parent):
parent, part = _path_split(parent)
path_parts.append(part)
# Create needed directories.
for part in reversed(path_parts):
parent = _path_join(parent, part)
try:
_os.mkdir(parent)
except FileExistsError:
# Probably another Python process already created the dir.
continue
except OSError as exc:
# Could be a permission error, read-only filesystem: just forget
# about writing the data.
_verbose_message('could not create {!r}: {!r}', parent, exc)
return
try:
_write_atomic(path, data, _mode)
_verbose_message('created {!r}', path)
except OSError as exc:
# Same as above: just don't write the bytecode.
_verbose_message('could not create {!r}: {!r}', path, exc)
class SourcelessFileLoader(FileLoader, _LoaderBasics):
"""Loader which handles sourceless file imports."""
def get_code(self, fullname):
path = self.get_filename(fullname)
data = self.get_data(path)
bytes_data = _validate_bytecode_header(data, name=fullname, path=path)
return _compile_bytecode(bytes_data, name=fullname, bytecode_path=path)
def get_source(self, fullname):
"""Return None as there is no source code."""
return None
# Filled in by _setup().
EXTENSION_SUFFIXES = []
class ExtensionFileLoader(FileLoader, _LoaderBasics):
"""Loader for extension modules.
The constructor is designed to work with FileFinder.
"""
def __init__(self, name, path):
self.name = name
self.path = path
def __eq__(self, other):
return (self.__class__ == other.__class__ and
self.__dict__ == other.__dict__)
def __hash__(self):
return hash(self.name) ^ hash(self.path)
def create_module(self, spec):
"""Create an unitialized extension module"""
module = _bootstrap._call_with_frames_removed(
_imp.create_dynamic, spec)
_verbose_message('extension module {!r} loaded from {!r}',
spec.name, self.path)
return module
def exec_module(self, module):
"""Initialize an extension module"""
_bootstrap._call_with_frames_removed(_imp.exec_dynamic, module)
_verbose_message('extension module {!r} executed from {!r}',
self.name, self.path)
def is_package(self, fullname):
"""Return True if the extension module is a package."""
file_name = _path_split(self.path)[1]
return any(file_name == '__init__' + suffix
for suffix in EXTENSION_SUFFIXES)
def get_code(self, fullname):
"""Return None as an extension module cannot create a code object."""
return None
def get_source(self, fullname):
"""Return None as extension modules have no source code."""
return None
@_check_name
def get_filename(self, fullname):
"""Return the path to the source file as found by the finder."""
return self.path
class _NamespacePath:
"""Represents a namespace package's path. It uses the module name
to find its parent module, and from there it looks up the parent's
__path__. When this changes, the module's own path is recomputed,
using path_finder. For top-level modules, the parent module's path
is sys.path."""
def __init__(self, name, path, path_finder):
self._name = name
self._path = path
self._last_parent_path = tuple(self._get_parent_path())
self._path_finder = path_finder
def _find_parent_path_names(self):
"""Returns a tuple of (parent-module-name, parent-path-attr-name)"""
parent, dot, me = self._name.rpartition('.')
if dot == '':
# This is a top-level module. sys.path contains the parent path.
return 'sys', 'path'
# Not a top-level module. parent-module.__path__ contains the
# parent path.
return parent, '__path__'
def _get_parent_path(self):
parent_module_name, path_attr_name = self._find_parent_path_names()
return getattr(sys.modules[parent_module_name], path_attr_name)
def _recalculate(self):
# If the parent's path has changed, recalculate _path
parent_path = tuple(self._get_parent_path()) # Make a copy
if parent_path != self._last_parent_path:
spec = self._path_finder(self._name, parent_path)
# Note that no changes are made if a loader is returned, but we
# do remember the new parent path
if spec is not None and spec.loader is None:
if spec.submodule_search_locations:
self._path = spec.submodule_search_locations
self._last_parent_path = parent_path # Save the copy
return self._path
def __iter__(self):
return iter(self._recalculate())
def __len__(self):
return len(self._recalculate())
def __repr__(self):
return '_NamespacePath({!r})'.format(self._path)
def __contains__(self, item):
return item in self._recalculate()
def append(self, item):
self._path.append(item)
# We use this exclusively in module_from_spec() for backward-compatibility.
class _NamespaceLoader:
def __init__(self, name, path, path_finder):
self._path = _NamespacePath(name, path, path_finder)
@classmethod
def module_repr(cls, module):
"""Return repr for the module.
The method is deprecated. The import machinery does the job itself.
"""
return '<module {!r} (namespace)>'.format(module.__name__)
def is_package(self, fullname):
return True
def get_source(self, fullname):
return ''
def get_code(self, fullname):
return compile('', '<string>', 'exec', dont_inherit=True)
def create_module(self, spec):
"""Use default semantics for module creation."""
def exec_module(self, module):
pass
def load_module(self, fullname):
"""Load a namespace module.
This method is deprecated. Use exec_module() instead.
"""
# The import system never calls this method.
_verbose_message('namespace module loaded with path {!r}', self._path)
return _bootstrap._load_module_shim(self, fullname)
# Finders #####################################################################
class PathFinder:
"""Meta path finder for sys.path and package __path__ attributes."""
@classmethod
def invalidate_caches(cls):
"""Call the invalidate_caches() method on all path entry finders
stored in sys.path_importer_caches (where implemented)."""
for finder in sys.path_importer_cache.values():
if hasattr(finder, 'invalidate_caches'):
finder.invalidate_caches()
@classmethod
def _path_hooks(cls, path):
"""Search sequence of hooks for a finder for 'path'.
If 'hooks' is false then use sys.path_hooks.
"""
if sys.path_hooks is not None and not sys.path_hooks:
_warnings.warn('sys.path_hooks is empty', ImportWarning)
for hook in sys.path_hooks:
try:
return hook(path)
except ImportError:
continue
else:
return None
@classmethod
def _path_importer_cache(cls, path):
"""Get the finder for the path entry from sys.path_importer_cache.
If the path entry is not in the cache, find the appropriate finder
and cache it. If no finder is available, store None.
"""
if path == '':
try:
path = _os.getcwd()
except FileNotFoundError:
# Don't cache the failure as the cwd can easily change to
# a valid directory later on.
return None
try:
finder = sys.path_importer_cache[path]
except KeyError:
finder = cls._path_hooks(path)
sys.path_importer_cache[path] = finder
return finder
@classmethod
def _legacy_get_spec(cls, fullname, finder):
# This would be a good place for a DeprecationWarning if
# we ended up going that route.
if hasattr(finder, 'find_loader'):
loader, portions = finder.find_loader(fullname)
else:
loader = finder.find_module(fullname)
portions = []
if loader is not None:
return _bootstrap.spec_from_loader(fullname, loader)
spec = _bootstrap.ModuleSpec(fullname, None)
spec.submodule_search_locations = portions
return spec
@classmethod
def _get_spec(cls, fullname, path, target=None):
"""Find the loader or namespace_path for this module/package name."""
# If this ends up being a namespace package, namespace_path is
# the list of paths that will become its __path__
namespace_path = []
for entry in path:
if not isinstance(entry, (str, bytes)):
continue
finder = cls._path_importer_cache(entry)
if finder is not None:
if hasattr(finder, 'find_spec'):
spec = finder.find_spec(fullname, target)
else:
spec = cls._legacy_get_spec(fullname, finder)
if spec is None:
continue
if spec.loader is not None:
return spec
portions = spec.submodule_search_locations
if portions is None:
raise ImportError('spec missing loader')
# This is possibly part of a namespace package.
# Remember these path entries (if any) for when we
# create a namespace package, and continue iterating
# on path.
namespace_path.extend(portions)
else:
spec = _bootstrap.ModuleSpec(fullname, None)
spec.submodule_search_locations = namespace_path
return spec
@classmethod
def find_spec(cls, fullname, path=None, target=None):
"""find the module on sys.path or 'path' based on sys.path_hooks and
sys.path_importer_cache."""
if path is None:
path = sys.path
spec = cls._get_spec(fullname, path, target)
if spec is None:
return None
elif spec.loader is None:
namespace_path = spec.submodule_search_locations
if namespace_path:
# We found at least one namespace path. Return a
# spec which can create the namespace package.
spec.origin = 'namespace'
spec.submodule_search_locations = _NamespacePath(fullname, namespace_path, cls._get_spec)
return spec
else:
return None
else:
return spec
@classmethod
def find_module(cls, fullname, path=None):
"""find the module on sys.path or 'path' based on sys.path_hooks and
sys.path_importer_cache.
This method is deprecated. Use find_spec() instead.
"""
spec = cls.find_spec(fullname, path)
if spec is None:
return None
return spec.loader
class FileFinder:
"""File-based finder.
Interactions with the file system are cached for performance, being
refreshed when the directory the finder is handling has been modified.
"""
def __init__(self, path, *loader_details):
"""Initialize with the path to search on and a variable number of
2-tuples containing the loader and the file suffixes the loader
recognizes."""
loaders = []
for loader, suffixes in loader_details:
loaders.extend((suffix, loader) for suffix in suffixes)
self._loaders = loaders
# Base (directory) path
self.path = path or '.'
self._path_mtime = -1
self._path_cache = set()
self._relaxed_path_cache = set()
def invalidate_caches(self):
"""Invalidate the directory mtime."""
self._path_mtime = -1
find_module = _find_module_shim
def find_loader(self, fullname):
"""Try to find a loader for the specified module, or the namespace
package portions. Returns (loader, list-of-portions).
This method is deprecated. Use find_spec() instead.
"""
spec = self.find_spec(fullname)
if spec is None:
return None, []
return spec.loader, spec.submodule_search_locations or []
def _get_spec(self, loader_class, fullname, path, smsl, target):
loader = loader_class(fullname, path)
return spec_from_file_location(fullname, path, loader=loader,
submodule_search_locations=smsl)
def find_spec(self, fullname, target=None):
"""Try to find a spec for the specified module. Returns the
matching spec, or None if not found."""
is_namespace = False
tail_module = fullname.rpartition('.')[2]
try:
mtime = _path_stat(self.path or _os.getcwd()).st_mtime
except OSError:
mtime = -1
if mtime != self._path_mtime:
self._fill_cache()
self._path_mtime = mtime
# tail_module keeps the original casing, for __file__ and friends
if _relax_case():
cache = self._relaxed_path_cache
cache_module = tail_module.lower()
else:
cache = self._path_cache
cache_module = tail_module
# Check if the module is the name of a directory (and thus a package).
if cache_module in cache:
base_path = _path_join(self.path, tail_module)
for suffix, loader_class in self._loaders:
init_filename = '__init__' + suffix
full_path = _path_join(base_path, init_filename)
if _path_isfile(full_path):
return self._get_spec(loader_class, fullname, full_path, [base_path], target)
else:
# If a namespace package, return the path if we don't
# find a module in the next section.
is_namespace = _path_isdir(base_path)
# Check for a file w/ a proper suffix exists.
for suffix, loader_class in self._loaders:
full_path = _path_join(self.path, tail_module + suffix)
_verbose_message('trying {}'.format(full_path), verbosity=2)
if cache_module + suffix in cache:
if _path_isfile(full_path):
return self._get_spec(loader_class, fullname, full_path, None, target)
if is_namespace:
_verbose_message('possible namespace for {}'.format(base_path))
spec = _bootstrap.ModuleSpec(fullname, None)
spec.submodule_search_locations = [base_path]
return spec
return None
def _fill_cache(self):
"""Fill the cache of potential modules and packages for this directory."""
path = self.path
try:
contents = _os.listdir(path or _os.getcwd())
except (FileNotFoundError, PermissionError, NotADirectoryError):
# Directory has either been removed, turned into a file, or made
# unreadable.
contents = []
# We store two cached versions, to handle runtime changes of the
# PYTHONCASEOK environment variable.
if not sys.platform.startswith('win'):
self._path_cache = set(contents)
else:
# Windows users can import modules with case-insensitive file
# suffixes (for legacy reasons). Make the suffix lowercase here
# so it's done once instead of for every import. This is safe as
# the specified suffixes to check against are always specified in a
# case-sensitive manner.
lower_suffix_contents = set()
for item in contents:
name, dot, suffix = item.partition('.')
if dot:
new_name = '{}.{}'.format(name, suffix.lower())
else:
new_name = name
lower_suffix_contents.add(new_name)
self._path_cache = lower_suffix_contents
if sys.platform.startswith(_CASE_INSENSITIVE_PLATFORMS):
self._relaxed_path_cache = {fn.lower() for fn in contents}
@classmethod
def path_hook(cls, *loader_details):
"""A class method which returns a closure to use on sys.path_hook
which will return an instance using the specified loaders and the path
called on the closure.
If the path called on the closure is not a directory, ImportError is
raised.
"""
def path_hook_for_FileFinder(path):
"""Path hook for importlib.machinery.FileFinder."""
if not _path_isdir(path):
raise ImportError('only directories are supported', path=path)
return cls(path, *loader_details)
return path_hook_for_FileFinder
def __repr__(self):
return 'FileFinder({!r})'.format(self.path)
# Import setup ###############################################################
def _fix_up_module(ns, name, pathname, cpathname=None):
# This function is used by PyImport_ExecCodeModuleObject().
loader = ns.get('__loader__')
spec = ns.get('__spec__')
if not loader:
if spec:
loader = spec.loader
elif pathname == cpathname:
loader = SourcelessFileLoader(name, pathname)
else:
loader = SourceFileLoader(name, pathname)
if not spec:
spec = spec_from_file_location(name, pathname, loader=loader)
try:
ns['__spec__'] = spec
ns['__loader__'] = loader
ns['__file__'] = pathname
ns['__cached__'] = cpathname
except Exception:
# Not important enough to report.
pass
def _get_supported_file_loaders():
"""Returns a list of file-based module loaders.
Each item is a tuple (loader, suffixes).
"""
extensions = ExtensionFileLoader, _imp.extension_suffixes()
source = SourceFileLoader, SOURCE_SUFFIXES
bytecode = SourcelessFileLoader, BYTECODE_SUFFIXES
return [extensions, source, bytecode]
def _setup(_bootstrap_module):
"""Setup the path-based importers for importlib by importing needed
built-in modules and injecting them into the global namespace.
Other components are extracted from the core bootstrap module.
"""
global sys, _imp, _bootstrap
_bootstrap = _bootstrap_module
sys = _bootstrap.sys
_imp = _bootstrap._imp
# Directly load built-in modules needed during bootstrap.
self_module = sys.modules[__name__]
for builtin_name in ('_io', '_warnings', 'builtins', 'marshal'):
if builtin_name not in sys.modules:
builtin_module = _bootstrap._builtin_from_name(builtin_name)
else:
builtin_module = sys.modules[builtin_name]
setattr(self_module, builtin_name, builtin_module)
# Directly load the os module (needed during bootstrap).
os_details = ('posix', ['/']), ('nt', ['\\', '/'])
for builtin_os, path_separators in os_details:
# Assumption made in _path_join()
assert all(len(sep) == 1 for sep in path_separators)
path_sep = path_separators[0]
if builtin_os in sys.modules:
os_module = sys.modules[builtin_os]
break
else:
try:
os_module = _bootstrap._builtin_from_name(builtin_os)
break
except ImportError:
continue
else:
raise ImportError('importlib requires posix or nt')
setattr(self_module, '_os', os_module)
setattr(self_module, 'path_sep', path_sep)
setattr(self_module, 'path_separators', ''.join(path_separators))
# Directly load the _thread module (needed during bootstrap).
try:
thread_module = _bootstrap._builtin_from_name('_thread')
except ImportError:
# Python was built without threads
thread_module = None
setattr(self_module, '_thread', thread_module)
# Directly load the _weakref module (needed during bootstrap).
weakref_module = _bootstrap._builtin_from_name('_weakref')
setattr(self_module, '_weakref', weakref_module)
# Directly load the winreg module (needed during bootstrap).
if builtin_os == 'nt':
winreg_module = _bootstrap._builtin_from_name('winreg')
setattr(self_module, '_winreg', winreg_module)
# Constants
setattr(self_module, '_relax_case', _make_relax_case())
EXTENSION_SUFFIXES.extend(_imp.extension_suffixes())
if builtin_os == 'nt':
SOURCE_SUFFIXES.append('.pyw')
if '_d.pyd' in EXTENSION_SUFFIXES:
WindowsRegistryFinder.DEBUG_BUILD = True
def _install(_bootstrap_module):
"""Install the path-based import components."""
_setup(_bootstrap_module)
supported_loaders = _get_supported_file_loaders()
sys.path_hooks.extend([FileFinder.path_hook(*supported_loaders)])
if _os.__name__ == 'nt':
sys.meta_path.append(WindowsRegistryFinder)
sys.meta_path.append(PathFinder)
# XXX We expose a couple of classes in _bootstrap for the sake of
# a setuptools bug (https://bitbucket.org/pypa/setuptools/issue/378).
_bootstrap_module.FileFinder = FileFinder
_bootstrap_module.SourceFileLoader = SourceFileLoader
| mit |
jzmq/pinot | pinot-core/src/test/resources/generate-test-data.py | 13 | 1397 | #!/usr/bin/env python
import sys
import argparse
from fastavro import writer
parser = argparse.ArgumentParser()
parser.add_argument('output_file', help='Output Avro data file')
parser.add_argument('--num_records', dest='num_records', default=1024, type=int, help='Number of records to generate (default: 1024)')
parser.add_argument('--num_time_buckets', dest='num_time_buckets', default=16, type=int, help='Number of time buckets')
args = parser.parse_args()
print 'Generating {} records'.format(args.num_records)
schema = {
'name': 'TestRecord',
'type': 'record',
'fields': [
{ 'name': 'D0', 'type': 'string', 'pinotType': 'DIMENSION' },
{ 'name': 'D1', 'type': 'string', 'pinotType': 'DIMENSION' },
{ 'name': 'D2', 'type': 'string', 'pinotType': 'DIMENSION' },
{ 'name': 'daysSinceEpoch', 'type': 'long', 'pinotType': 'TIME' },
{ 'name': 'M0', 'type': 'long', 'pinotType': 'METRIC' },
{ 'name': 'M1', 'type': 'double', 'pinotType': 'METRIC' }
]
}
records = []
for i in xrange(args.num_records):
record = {
'D0': str(i % 2),
'D1': str(i % 4),
'D2': str(i % 8),
'daysSinceEpoch': int(i % args.num_time_buckets),
'M0': 1,
'M1': 1.0
}
records.append(record)
print 'Writing {}'.format(sys.argv[1])
with open(sys.argv[1], 'wb') as out:
writer(out, schema, records)
| apache-2.0 |
emilhetty/home-assistant | homeassistant/components/notify/googlevoice.py | 6 | 1966 | """
Google Voice SMS platform for notify component.
For more details about this platform, please refer to the documentation at
https://home-assistant.io/components/notify.google_voice/
"""
import logging
from homeassistant.components.notify import (
ATTR_TARGET, DOMAIN, BaseNotificationService)
from homeassistant.const import CONF_PASSWORD, CONF_USERNAME
from homeassistant.helpers import validate_config
_LOGGER = logging.getLogger(__name__)
REQUIREMENTS = ['https://github.com/w1ll1am23/pygooglevoice-sms/archive/'
'7c5ee9969b97a7992fc86a753fe9f20e3ffa3f7c.zip#'
'pygooglevoice-sms==0.0.1']
def get_service(hass, config):
"""Get the Google Voice SMS notification service."""
if not validate_config({DOMAIN: config},
{DOMAIN: [CONF_USERNAME,
CONF_PASSWORD]},
_LOGGER):
return None
return GoogleVoiceSMSNotificationService(config[CONF_USERNAME],
config[CONF_PASSWORD])
# pylint: disable=too-few-public-methods
class GoogleVoiceSMSNotificationService(BaseNotificationService):
"""Implement the notification service for the Google Voice SMS service."""
def __init__(self, username, password):
"""Initialize the service."""
from googlevoicesms import Voice
self.voice = Voice()
self.username = username
self.password = password
def send_message(self, message="", **kwargs):
"""Send SMS to specified target user cell."""
targets = kwargs.get(ATTR_TARGET)
if not targets:
_LOGGER.info('At least 1 target is required')
return
if not isinstance(targets, list):
targets = [targets]
self.voice.login(self.username, self.password)
for target in targets:
self.voice.send_sms(target, message)
self.voice.logout()
| mit |
shipci/boto | boto/sdb/queryresultset.py | 153 | 3674 | from boto.compat import six
# Copyright (c) 2006,2007 Mitch Garnaat http://garnaat.org/
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish, dis-
# tribute, sublicense, and/or sell copies of the Software, and to permit
# persons to whom the Software is furnished to do so, subject to the fol-
# lowing conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABIL-
# ITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
# SHALL THE AUTHOR BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
# WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
# IN THE SOFTWARE.
def query_lister(domain, query='', max_items=None, attr_names=None):
more_results = True
num_results = 0
next_token = None
while more_results:
rs = domain.connection.query_with_attributes(domain, query, attr_names,
next_token=next_token)
for item in rs:
if max_items:
if num_results == max_items:
raise StopIteration
yield item
num_results += 1
next_token = rs.next_token
more_results = next_token is not None
class QueryResultSet(object):
def __init__(self, domain=None, query='', max_items=None, attr_names=None):
self.max_items = max_items
self.domain = domain
self.query = query
self.attr_names = attr_names
def __iter__(self):
return query_lister(self.domain, self.query, self.max_items, self.attr_names)
def select_lister(domain, query='', max_items=None):
more_results = True
num_results = 0
next_token = None
while more_results:
rs = domain.connection.select(domain, query, next_token=next_token)
for item in rs:
if max_items:
if num_results == max_items:
raise StopIteration
yield item
num_results += 1
next_token = rs.next_token
more_results = next_token is not None
class SelectResultSet(object):
def __init__(self, domain=None, query='', max_items=None,
next_token=None, consistent_read=False):
self.domain = domain
self.query = query
self.consistent_read = consistent_read
self.max_items = max_items
self.next_token = next_token
def __iter__(self):
more_results = True
num_results = 0
while more_results:
rs = self.domain.connection.select(self.domain, self.query,
next_token=self.next_token,
consistent_read=self.consistent_read)
for item in rs:
if self.max_items and num_results >= self.max_items:
raise StopIteration
yield item
num_results += 1
self.next_token = rs.next_token
if self.max_items and num_results >= self.max_items:
raise StopIteration
more_results = self.next_token is not None
def next(self):
return next(self.__iter__())
| mit |
catsop/CATMAID | django/applications/catmaid/control/tree_util.py | 6 | 8539 | # A 'tree' is a networkx.DiGraph with a single root node (a node without parents)
from operator import itemgetter
from networkx import Graph, DiGraph
from collections import defaultdict
from math import sqrt
from itertools import izip, islice
from catmaid.models import Treenode
def find_root(tree):
""" Search and return the first node that has zero predecessors.
Will be the root node in directed graphs.
Avoids one database lookup. """
for node in tree:
if not next(tree.predecessors_iter(node), None):
return node
def edge_count_to_root(tree, root_node=None):
""" Return a map of nodeID vs number of edges from the first node that lacks predecessors (aka the root). If root_id is None, it will be searched for."""
distances = {}
count = 1
current_level = [root_node if root_node else find_root(tree)]
next_level = []
while current_level:
# Consume all elements in current_level
while current_level:
node = current_level.pop()
distances[node] = count
next_level.extend(tree.successors_iter(node))
# Rotate lists (current_level is now empty)
current_level, next_level = next_level, current_level
count += 1
return distances
def find_common_ancestor(tree, nodes, ds=None, root_node=None):
""" Return the node in tree that is the nearest common ancestor to all nodes.
Assumes that nodes contains at least 1 node.
Assumes that all nodes are present in tree.
Returns a tuple with the ancestor node and its distance to root. """
if 1 == len(nodes):
return nodes[0], 0
distances = ds if ds else edge_count_to_root(tree, root_node=root_node)
# Pick the pair with the shortest edge count to root
first, second = sorted({node: distances(node) for node in nodes}.iteritems(), key=itemgetter(1))[:2]
# Start from the second, and bring it to an edge count equal to the first
while second[1] < first[1]:
second = (tree.predecessors_iter(second[0]).next(), second[1] - 1)
# Walk parents up for both until finding the common ancestor
first = first[0]
second = second[0]
while first != second:
first = tree.predecessors_iter(first).next()
second = tree.predecessors_iter(second).next()
return first, distances[first]
def find_common_ancestors(tree, node_groups):
distances = edge_count_to_root(tree)
return (find_common_ancestor(tree, nodes, ds=distances) for nodes in node_groups)
def reroot(tree, new_root):
""" Reverse in place the direction of the edges from the new_root to root. """
parent = next(tree.predecessors_iter(new_root), None)
if not parent:
# new_root is already the root
return
path = [new_root]
while parent is not None:
tree.remove_edge(parent, path[-1])
path.append(parent)
parent = next(tree.predecessors_iter(parent), None)
tree.add_path(path)
def simplify(tree, keepers):
""" Given a tree and a set of nodes to keep, create a new tree
where only the nodes to keep and the branch points between them are preserved.
WARNING: will reroot the tree at the first of the keepers.
WARNING: keepers can't be empty. """
# Ensure no repeats
keepers = set(keepers)
# Add all keeper nodes to the minified graph
mini = Graph()
for node in keepers:
mini.add_node(node)
# Pick the first to be the root node of the tree, removing it
root = keepers.pop()
reroot(tree, root)
# For every keeper node, traverse towards the parent until
# finding one that is in the minified graph, or is a branch node
children = defaultdict(int)
seen_branch_nodes = set(keepers) # a copy
paths = []
# For all keeper nodes except the root
for node in keepers:
path = [node]
paths.append(path)
parent = next(tree.predecessors_iter(node), None)
while parent is not None:
if parent in mini:
# Reached one of the keeper nodes
path.append(parent)
break
elif len(tree.succ[parent]) > 1:
# Reached a branch node
children[parent] += 1
path.append(parent)
if parent in seen_branch_nodes:
break
seen_branch_nodes.add(parent)
parent = next(tree.predecessors_iter(parent), None)
for path in paths:
# A path starts and ends with desired nodes for the minified tree.
# The nodes in the middle of the path are branch nodes
# that must be added to mini only if they have been visited more than once.
origin = path[0]
for i in xrange(1, len(path) -1):
if children[path[i]] > 1:
mini.add_edge(origin, path[i])
origin = path[i]
mini.add_edge(origin, path[-1])
return mini
def partition(tree, root_node=None):
""" Partition the tree as a list of sequences of node IDs,
with branch nodes repeated as ends of all sequences except the longest
one that finishes at the root.
Each sequence runs from an end node to either the root or a branch node. """
distances = edge_count_to_root(tree, root_node=root_node) # distance in number of edges from root
seen = set()
# Iterate end nodes sorted from highest to lowest distance to root
endNodeIDs = (nID for nID in tree.nodes() if 0 == len(tree.successors(nID)))
for nodeID in sorted(endNodeIDs, key=distances.get, reverse=True):
sequence = [nodeID]
parentID = next(tree.predecessors_iter(nodeID), None)
while parentID is not None:
sequence.append(parentID)
if parentID in seen:
break
seen.add(parentID)
parentID = next(tree.predecessors_iter(parentID), None)
if len(sequence) > 1:
yield sequence
def spanning_tree(tree, preserve):
""" Return a new DiGraph with the spanning tree including the desired nodes.
preserve: the set of nodes that delimit the spanning tree. """
spanning = DiGraph()
preserve = set(preserve) # duplicate, will be altered
if 1 == len(preserve):
spanning.add_node(iter(preserve).next())
return spanning
if len(tree.successors(find_root(tree))) > 1:
tree = tree.copy()
# First end node found
endNode = (node for node in tree if not next(tree.successors_iter(node), None)).next()
reroot(tree, endNode)
n_seen = 0
# Start from shortest sequence
for seq in sorted(partition(tree), key=len):
path = []
for node in seq:
if node in preserve:
path.append(node)
if node not in spanning:
n_seen += 1
if len(preserve) == n_seen:
break
elif path:
path.append(node)
if path:
spanning.add_path(path)
if seq[-1] == path[-1]:
preserve.add(path[-1])
if len(preserve) == n_seen:
break
return spanning
def cable_length(tree, locations):
""" locations: a dictionary of nodeID vs iterable of node position (1d, 2d, 3d, ...)
Returns the total cable length. """
return sum(sqrt(sum(pow(loc2 - loc1, 2) for loc1, loc2 in izip(locations[a], locations[b]))) for a,b in tree.edges_iter())
def lazy_load_trees(skeleton_ids, node_properties):
""" Return a lazy collection of pairs of (long, DiGraph)
representing (skeleton_id, tree).
The node_properties is a list of strings, each being a name of a column
in the django model of the Treenode table that is not the treenode id, parent_id
or skeleton_id. """
values_list = ('id', 'parent_id', 'skeleton_id')
props = tuple(set(node_properties) - set(values_list))
values_list += props
ts = Treenode.objects.filter(skeleton__in=skeleton_ids) \
.order_by('skeleton') \
.values_list(*values_list)
skid = None
tree = None
for t in ts:
if t[2] != skid:
if tree:
yield (skid, tree)
# Prepare for the next one
skid = t[2]
tree = DiGraph()
fields = {k: v for k,v in izip(props, islice(t, 3, 3 + len(props)))}
tree.add_node(t[0], fields)
if t[1]:
# From child to parent
tree.add_edge(t[0], t[1])
if tree:
yield (skid, tree)
| gpl-3.0 |
brokenjacobs/ansible | test/units/modules/network/ovs/test_openvswitch_bridge.py | 76 | 8367 | #
# (c) 2016 Red Hat Inc.
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
# Make coding more python3-ish
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import json
from ansible.compat.tests.mock import patch
from ansible.modules.network.ovs import openvswitch_bridge
from .ovs_module import TestOpenVSwitchModule, load_fixture, set_module_args
test_name_side_effect_matrix = {
'test_openvswitch_bridge_absent_idempotent': [
(0, '', '')],
'test_openvswitch_bridge_absent_removes_bridge': [
(0, 'list_br_test_br.cfg', ''),
(0, '', ''),
(0, '', ''),
(0, '', ''),
(0, '', ''),
(0, '', '')],
'test_openvswitch_bridge_present_idempotent': [
(0, 'list_br_test_br.cfg', ''),
(0, 'br_to_parent_test_br.cfg', ''),
(0, 'br_to_vlan_zero.cfg', ''),
(0, 'get_fail_mode_secure.cfg', ''),
(0, 'br_get_external_id_foo_bar.cfg', '')],
'test_openvswitch_bridge_present_creates_bridge': [
(0, '', ''),
(0, '', ''),
(0, '', ''),
(0, '', '')],
'test_openvswitch_bridge_present_creates_fake_bridge': [
(0, '', ''),
(0, '', ''),
(0, '', ''),
(0, '', '')],
'test_openvswitch_bridge_present_adds_external_id': [
(0, 'list_br_test_br.cfg', ''),
(0, 'br_to_parent_test_br.cfg', ''),
(0, 'br_to_vlan_zero.cfg', ''),
(0, 'get_fail_mode_secure.cfg', ''),
(0, 'br_get_external_id_foo_bar.cfg', ''),
(0, '', '')],
'test_openvswitch_bridge_present_clears_external_id': [
(0, 'list_br_test_br.cfg', ''),
(0, 'br_to_parent_test_br.cfg', ''),
(0, 'br_to_vlan_zero.cfg', ''),
(0, 'get_fail_mode_secure.cfg', ''),
(0, 'br_get_external_id_foo_bar.cfg', ''),
(0, '', '')],
'test_openvswitch_bridge_present_changes_fail_mode': [
(0, 'list_br_test_br.cfg', ''),
(0, 'br_to_parent_test_br.cfg', ''),
(0, 'br_to_vlan_zero.cfg', ''),
(0, 'get_fail_mode_secure.cfg', ''),
(0, 'br_get_external_id_foo_bar.cfg', ''),
(0, '', '')],
'test_openvswitch_bridge_present_runs_set_mode': [
(0, '', ''),
(0, '', ''),
(0, '', ''),
(0, '', '')],
}
class TestOpenVSwitchBridgeModule(TestOpenVSwitchModule):
module = openvswitch_bridge
def setUp(self):
self.mock_run_command = (
patch('ansible.module_utils.basic.AnsibleModule.run_command'))
self.run_command = self.mock_run_command.start()
self.mock_get_bin_path = (
patch('ansible.module_utils.basic.AnsibleModule.get_bin_path'))
self.get_bin_path = self.mock_get_bin_path.start()
def tearDown(self):
self.mock_run_command.stop()
self.mock_get_bin_path.stop()
def load_fixtures(self, test_name):
test_side_effects = []
for s in test_name_side_effect_matrix[test_name]:
rc = s[0]
out = s[1] if s[1] == '' else str(load_fixture(s[1]))
err = s[2]
side_effect_with_fixture_loaded = (rc, out, err)
test_side_effects.append(side_effect_with_fixture_loaded)
self.run_command.side_effect = test_side_effects
self.get_bin_path.return_value = '/usr/bin/ovs-vsctl'
def test_openvswitch_bridge_absent_idempotent(self):
set_module_args(dict(state='absent',
bridge='test-br'))
self.execute_module(test_name='test_openvswitch_bridge_absent_idempotent')
def test_openvswitch_bridge_absent_removes_bridge(self):
set_module_args(dict(state='absent',
bridge='test-br'))
commands = ['/usr/bin/ovs-vsctl -t 5 del-br test-br']
self.execute_module(changed=True, commands=commands,
test_name='test_openvswitch_bridge_absent_removes_bridge')
def test_openvswitch_bridge_present_idempotent(self):
set_module_args(dict(state='present',
bridge='test-br',
fail_mode='secure',
external_ids={'foo': 'bar'}))
self.execute_module(test_name='test_openvswitch_bridge_present_idempotent')
def test_openvswitch_bridge_present_creates_bridge(self):
set_module_args(dict(state='present',
bridge='test-br',
fail_mode='secure',
external_ids={'foo': 'bar'}))
commands = [
'/usr/bin/ovs-vsctl -t 5 add-br test-br',
'/usr/bin/ovs-vsctl -t 5 set-fail-mode test-br secure',
'/usr/bin/ovs-vsctl -t 5 br-set-external-id test-br foo bar'
]
self.execute_module(changed=True, commands=commands,
test_name='test_openvswitch_bridge_present_creates_bridge')
def test_openvswitch_bridge_present_creates_fake_bridge(self):
set_module_args(dict(state='present',
bridge='test-br2',
parent='test-br',
vlan=10))
commands = [
'/usr/bin/ovs-vsctl -t 5 add-br test-br2 test-br 10',
]
self.execute_module(changed=True, commands=commands,
test_name='test_openvswitch_bridge_present_creates_fake_bridge')
def test_openvswitch_bridge_present_adds_external_id(self):
set_module_args(dict(state='present',
bridge='test-br',
fail_mode='secure',
external_ids={'bip': 'bop'}))
commands = [
'/usr/bin/ovs-vsctl -t 5 br-set-external-id test-br bip bop'
]
self.execute_module(changed=True, commands=commands,
test_name='test_openvswitch_bridge_present_adds_external_id')
def test_openvswitch_bridge_present_clears_external_id(self):
set_module_args(dict(state='present',
bridge='test-br',
fail_mode='secure',
external_ids={'foo': ''}))
commands = [
'/usr/bin/ovs-vsctl -t 5 br-set-external-id test-br foo '
]
self.execute_module(changed=True, commands=commands,
test_name='test_openvswitch_bridge_present_clears_external_id')
def test_openvswitch_bridge_present_changes_fail_mode(self):
set_module_args(dict(state='present',
bridge='test-br',
fail_mode='standalone',
external_ids={'foo': 'bar'}))
commands = [
'/usr/bin/ovs-vsctl -t 5 set-fail-mode test-br standalone'
]
self.execute_module(changed=True, commands=commands,
test_name='test_openvswitch_bridge_present_changes_fail_mode')
def test_openvswitch_bridge_present_runs_set_mode(self):
set_module_args(dict(state='present',
bridge='test-br',
fail_mode='secure',
external_ids={'foo': 'bar'},
set="bridge test-br datapath_type=netdev"))
commands = [
'/usr/bin/ovs-vsctl -t 5 add-br test-br -- set bridge test-br'
' datapath_type=netdev',
'/usr/bin/ovs-vsctl -t 5 set-fail-mode test-br secure',
'/usr/bin/ovs-vsctl -t 5 br-set-external-id test-br foo bar'
]
self.execute_module(changed=True, commands=commands,
test_name='test_openvswitch_bridge_present_runs_set_mode')
| gpl-3.0 |
roopali8/keystone | keystone/contrib/federation/migrate_repo/versions/007_add_remote_id_table.py | 5 | 1590 | # Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import sqlalchemy as orm
def upgrade(migrate_engine):
meta = orm.MetaData()
meta.bind = migrate_engine
idp_table = orm.Table('identity_provider', meta, autoload=True)
remote_id_table = orm.Table(
'idp_remote_ids',
meta,
orm.Column('idp_id',
orm.String(64),
orm.ForeignKey('identity_provider.id',
ondelete='CASCADE')),
orm.Column('remote_id',
orm.String(255),
primary_key=True),
mysql_engine='InnoDB',
mysql_charset='utf8')
remote_id_table.create(migrate_engine, checkfirst=True)
select = orm.sql.select([idp_table.c.id, idp_table.c.remote_id]).where(
idp_table.c.remote_id.isnot(None))
for identity in migrate_engine.execute(select):
remote_idp_entry = {'idp_id': identity.id,
'remote_id': identity.remote_id}
remote_id_table.insert(remote_idp_entry).execute()
idp_table.drop_column('remote_id')
| apache-2.0 |
xmendez/wfuzz | tests/filters/test_filter.py | 1 | 2559 | import pytest
@pytest.mark.parametrize(
"filter_string, expected_result",
[
("h=28 or w=6 or l=2", True),
("r.params.get.param2='2'", True),
("r.headers.response.Location", "https://wfuzz.readthedocs.io/en/latest/"),
("r.headers.response.notthere", {}),
("r.params.get.notthere", {}),
("r.cookies.response.notthere", {}),
("r.cookies.response.notthere='something'", False),
("r.cookies.response.notthere~'something'", False),
("r.headers.request.Host", "www.wfuzz.org"),
("r.headers.request.host", "www.wfuzz.org"),
("r.headers.response.SeRVEr", "nginx/1.14.0 (Ubuntu)"),
("r.headers.response.server", "nginx/1.14.0 (Ubuntu)"),
("r.cookies.request.cookie1", "1"),
("r.cookies.request.cOOkiE1", "1"),
("r.cookies.response.name", "Nicholas"),
("r.cookies.response.nAMe", "Nicholas"),
("r.params.get.param1", "1"),
("r.params.get.pAraM1", "1"),
],
)
def test_filter_ret_values(
filter_obj, example_full_fuzzres, filter_string, expected_result
):
assert filter_obj.is_visible(example_full_fuzzres, filter_string) == expected_result
@pytest.mark.parametrize(
"filter_string, expected_result",
[
("r.headers.response.notthere", {}),
("r.params.get.notthere", {}),
("r.cookies.response.notthere", {}),
("r.cookies.response.notthere='something'", False),
],
)
def test_filter_ret_values_no_response(
filter_obj, example_full_fuzzres_no_response, filter_string, expected_result
):
assert (
filter_obj.is_visible(example_full_fuzzres_no_response, filter_string)
== expected_result
)
@pytest.mark.parametrize(
"filter_string, expected_result",
[
(
"r.cookies.response.name|diff('test')",
"--- prev\n\n+++ current\n\n@@ -1 +1 @@\n\n-test\n+Nicholas",
),
("r.cookies.response.nAMe|upper()", "NICHOLAS"),
("r.cookies.response.name|upper()", "NICHOLAS"),
("r.cookies.response.name|lower()", "nicholas"),
("r.cookies.response.name|startswith('N')", True),
("r.cookies.response.name|replace('N','n')", "nicholas"),
("'%2e%2e'|unquote()", ".."),
("'%2e%2f'|decode('urlencode')", "./"),
("'%%'|encode('urlencode')", "%25%25"),
],
)
def test_filter_operators(
filter_obj, example_full_fuzzres, filter_string, expected_result
):
assert filter_obj.is_visible(example_full_fuzzres, filter_string) == expected_result
| gpl-2.0 |
chiviak/CouchPotatoServer | libs/bs4/dammit.py | 408 | 29302 | # -*- coding: utf-8 -*-
"""Beautiful Soup bonus library: Unicode, Dammit
This library converts a bytestream to Unicode through any means
necessary. It is heavily based on code from Mark Pilgrim's Universal
Feed Parser. It works best on XML and XML, but it does not rewrite the
XML or HTML to reflect a new encoding; that's the tree builder's job.
"""
import codecs
from htmlentitydefs import codepoint2name
import re
import logging
import string
# Import a library to autodetect character encodings.
chardet_type = None
try:
# First try the fast C implementation.
# PyPI package: cchardet
import cchardet
def chardet_dammit(s):
return cchardet.detect(s)['encoding']
except ImportError:
try:
# Fall back to the pure Python implementation
# Debian package: python-chardet
# PyPI package: chardet
import chardet
def chardet_dammit(s):
return chardet.detect(s)['encoding']
#import chardet.constants
#chardet.constants._debug = 1
except ImportError:
# No chardet available.
def chardet_dammit(s):
return None
# Available from http://cjkpython.i18n.org/.
try:
import iconv_codec
except ImportError:
pass
xml_encoding_re = re.compile(
'^<\?.*encoding=[\'"](.*?)[\'"].*\?>'.encode(), re.I)
html_meta_re = re.compile(
'<\s*meta[^>]+charset\s*=\s*["\']?([^>]*?)[ /;\'">]'.encode(), re.I)
class EntitySubstitution(object):
"""Substitute XML or HTML entities for the corresponding characters."""
def _populate_class_variables():
lookup = {}
reverse_lookup = {}
characters_for_re = []
for codepoint, name in list(codepoint2name.items()):
character = unichr(codepoint)
if codepoint != 34:
# There's no point in turning the quotation mark into
# ", unless it happens within an attribute value, which
# is handled elsewhere.
characters_for_re.append(character)
lookup[character] = name
# But we do want to turn " into the quotation mark.
reverse_lookup[name] = character
re_definition = "[%s]" % "".join(characters_for_re)
return lookup, reverse_lookup, re.compile(re_definition)
(CHARACTER_TO_HTML_ENTITY, HTML_ENTITY_TO_CHARACTER,
CHARACTER_TO_HTML_ENTITY_RE) = _populate_class_variables()
CHARACTER_TO_XML_ENTITY = {
"'": "apos",
'"': "quot",
"&": "amp",
"<": "lt",
">": "gt",
}
BARE_AMPERSAND_OR_BRACKET = re.compile("([<>]|"
"&(?!#\d+;|#x[0-9a-fA-F]+;|\w+;)"
")")
AMPERSAND_OR_BRACKET = re.compile("([<>&])")
@classmethod
def _substitute_html_entity(cls, matchobj):
entity = cls.CHARACTER_TO_HTML_ENTITY.get(matchobj.group(0))
return "&%s;" % entity
@classmethod
def _substitute_xml_entity(cls, matchobj):
"""Used with a regular expression to substitute the
appropriate XML entity for an XML special character."""
entity = cls.CHARACTER_TO_XML_ENTITY[matchobj.group(0)]
return "&%s;" % entity
@classmethod
def quoted_attribute_value(self, value):
"""Make a value into a quoted XML attribute, possibly escaping it.
Most strings will be quoted using double quotes.
Bob's Bar -> "Bob's Bar"
If a string contains double quotes, it will be quoted using
single quotes.
Welcome to "my bar" -> 'Welcome to "my bar"'
If a string contains both single and double quotes, the
double quotes will be escaped, and the string will be quoted
using double quotes.
Welcome to "Bob's Bar" -> "Welcome to "Bob's bar"
"""
quote_with = '"'
if '"' in value:
if "'" in value:
# The string contains both single and double
# quotes. Turn the double quotes into
# entities. We quote the double quotes rather than
# the single quotes because the entity name is
# """ whether this is HTML or XML. If we
# quoted the single quotes, we'd have to decide
# between ' and &squot;.
replace_with = """
value = value.replace('"', replace_with)
else:
# There are double quotes but no single quotes.
# We can use single quotes to quote the attribute.
quote_with = "'"
return quote_with + value + quote_with
@classmethod
def substitute_xml(cls, value, make_quoted_attribute=False):
"""Substitute XML entities for special XML characters.
:param value: A string to be substituted. The less-than sign
will become <, the greater-than sign will become >,
and any ampersands will become &. If you want ampersands
that appear to be part of an entity definition to be left
alone, use substitute_xml_containing_entities() instead.
:param make_quoted_attribute: If True, then the string will be
quoted, as befits an attribute value.
"""
# Escape angle brackets and ampersands.
value = cls.AMPERSAND_OR_BRACKET.sub(
cls._substitute_xml_entity, value)
if make_quoted_attribute:
value = cls.quoted_attribute_value(value)
return value
@classmethod
def substitute_xml_containing_entities(
cls, value, make_quoted_attribute=False):
"""Substitute XML entities for special XML characters.
:param value: A string to be substituted. The less-than sign will
become <, the greater-than sign will become >, and any
ampersands that are not part of an entity defition will
become &.
:param make_quoted_attribute: If True, then the string will be
quoted, as befits an attribute value.
"""
# Escape angle brackets, and ampersands that aren't part of
# entities.
value = cls.BARE_AMPERSAND_OR_BRACKET.sub(
cls._substitute_xml_entity, value)
if make_quoted_attribute:
value = cls.quoted_attribute_value(value)
return value
@classmethod
def substitute_html(cls, s):
"""Replace certain Unicode characters with named HTML entities.
This differs from data.encode(encoding, 'xmlcharrefreplace')
in that the goal is to make the result more readable (to those
with ASCII displays) rather than to recover from
errors. There's absolutely nothing wrong with a UTF-8 string
containg a LATIN SMALL LETTER E WITH ACUTE, but replacing that
character with "é" will make it more readable to some
people.
"""
return cls.CHARACTER_TO_HTML_ENTITY_RE.sub(
cls._substitute_html_entity, s)
class EncodingDetector:
"""Suggests a number of possible encodings for a bytestring.
Order of precedence:
1. Encodings you specifically tell EncodingDetector to try first
(the override_encodings argument to the constructor).
2. An encoding declared within the bytestring itself, either in an
XML declaration (if the bytestring is to be interpreted as an XML
document), or in a <meta> tag (if the bytestring is to be
interpreted as an HTML document.)
3. An encoding detected through textual analysis by chardet,
cchardet, or a similar external library.
4. UTF-8.
5. Windows-1252.
"""
def __init__(self, markup, override_encodings=None, is_html=False):
self.override_encodings = override_encodings or []
self.chardet_encoding = None
self.is_html = is_html
self.declared_encoding = None
# First order of business: strip a byte-order mark.
self.markup, self.sniffed_encoding = self.strip_byte_order_mark(markup)
def _usable(self, encoding, tried):
if encoding is not None:
encoding = encoding.lower()
if encoding not in tried:
tried.add(encoding)
return True
return False
@property
def encodings(self):
"""Yield a number of encodings that might work for this markup."""
tried = set()
for e in self.override_encodings:
if self._usable(e, tried):
yield e
# Did the document originally start with a byte-order mark
# that indicated its encoding?
if self._usable(self.sniffed_encoding, tried):
yield self.sniffed_encoding
# Look within the document for an XML or HTML encoding
# declaration.
if self.declared_encoding is None:
self.declared_encoding = self.find_declared_encoding(
self.markup, self.is_html)
if self._usable(self.declared_encoding, tried):
yield self.declared_encoding
# Use third-party character set detection to guess at the
# encoding.
if self.chardet_encoding is None:
self.chardet_encoding = chardet_dammit(self.markup)
if self._usable(self.chardet_encoding, tried):
yield self.chardet_encoding
# As a last-ditch effort, try utf-8 and windows-1252.
for e in ('utf-8', 'windows-1252'):
if self._usable(e, tried):
yield e
@classmethod
def strip_byte_order_mark(cls, data):
"""If a byte-order mark is present, strip it and return the encoding it implies."""
encoding = None
if (len(data) >= 4) and (data[:2] == b'\xfe\xff') \
and (data[2:4] != '\x00\x00'):
encoding = 'utf-16be'
data = data[2:]
elif (len(data) >= 4) and (data[:2] == b'\xff\xfe') \
and (data[2:4] != '\x00\x00'):
encoding = 'utf-16le'
data = data[2:]
elif data[:3] == b'\xef\xbb\xbf':
encoding = 'utf-8'
data = data[3:]
elif data[:4] == b'\x00\x00\xfe\xff':
encoding = 'utf-32be'
data = data[4:]
elif data[:4] == b'\xff\xfe\x00\x00':
encoding = 'utf-32le'
data = data[4:]
return data, encoding
@classmethod
def find_declared_encoding(cls, markup, is_html=False, search_entire_document=False):
"""Given a document, tries to find its declared encoding.
An XML encoding is declared at the beginning of the document.
An HTML encoding is declared in a <meta> tag, hopefully near the
beginning of the document.
"""
if search_entire_document:
xml_endpos = html_endpos = len(markup)
else:
xml_endpos = 1024
html_endpos = max(2048, int(len(markup) * 0.05))
declared_encoding = None
declared_encoding_match = xml_encoding_re.search(markup, endpos=xml_endpos)
if not declared_encoding_match and is_html:
declared_encoding_match = html_meta_re.search(markup, endpos=html_endpos)
if declared_encoding_match is not None:
declared_encoding = declared_encoding_match.groups()[0].decode(
'ascii')
if declared_encoding:
return declared_encoding.lower()
return None
class UnicodeDammit:
"""A class for detecting the encoding of a *ML document and
converting it to a Unicode string. If the source encoding is
windows-1252, can replace MS smart quotes with their HTML or XML
equivalents."""
# This dictionary maps commonly seen values for "charset" in HTML
# meta tags to the corresponding Python codec names. It only covers
# values that aren't in Python's aliases and can't be determined
# by the heuristics in find_codec.
CHARSET_ALIASES = {"macintosh": "mac-roman",
"x-sjis": "shift-jis"}
ENCODINGS_WITH_SMART_QUOTES = [
"windows-1252",
"iso-8859-1",
"iso-8859-2",
]
def __init__(self, markup, override_encodings=[],
smart_quotes_to=None, is_html=False):
self.smart_quotes_to = smart_quotes_to
self.tried_encodings = []
self.contains_replacement_characters = False
self.is_html = is_html
self.detector = EncodingDetector(markup, override_encodings, is_html)
# Short-circuit if the data is in Unicode to begin with.
if isinstance(markup, unicode) or markup == '':
self.markup = markup
self.unicode_markup = unicode(markup)
self.original_encoding = None
return
# The encoding detector may have stripped a byte-order mark.
# Use the stripped markup from this point on.
self.markup = self.detector.markup
u = None
for encoding in self.detector.encodings:
markup = self.detector.markup
u = self._convert_from(encoding)
if u is not None:
break
if not u:
# None of the encodings worked. As an absolute last resort,
# try them again with character replacement.
for encoding in self.detector.encodings:
if encoding != "ascii":
u = self._convert_from(encoding, "replace")
if u is not None:
logging.warning(
"Some characters could not be decoded, and were "
"replaced with REPLACEMENT CHARACTER.")
self.contains_replacement_characters = True
break
# If none of that worked, we could at this point force it to
# ASCII, but that would destroy so much data that I think
# giving up is better.
self.unicode_markup = u
if not u:
self.original_encoding = None
def _sub_ms_char(self, match):
"""Changes a MS smart quote character to an XML or HTML
entity, or an ASCII character."""
orig = match.group(1)
if self.smart_quotes_to == 'ascii':
sub = self.MS_CHARS_TO_ASCII.get(orig).encode()
else:
sub = self.MS_CHARS.get(orig)
if type(sub) == tuple:
if self.smart_quotes_to == 'xml':
sub = '&#x'.encode() + sub[1].encode() + ';'.encode()
else:
sub = '&'.encode() + sub[0].encode() + ';'.encode()
else:
sub = sub.encode()
return sub
def _convert_from(self, proposed, errors="strict"):
proposed = self.find_codec(proposed)
if not proposed or (proposed, errors) in self.tried_encodings:
return None
self.tried_encodings.append((proposed, errors))
markup = self.markup
# Convert smart quotes to HTML if coming from an encoding
# that might have them.
if (self.smart_quotes_to is not None
and proposed in self.ENCODINGS_WITH_SMART_QUOTES):
smart_quotes_re = b"([\x80-\x9f])"
smart_quotes_compiled = re.compile(smart_quotes_re)
markup = smart_quotes_compiled.sub(self._sub_ms_char, markup)
try:
#print "Trying to convert document to %s (errors=%s)" % (
# proposed, errors)
u = self._to_unicode(markup, proposed, errors)
self.markup = u
self.original_encoding = proposed
except Exception as e:
#print "That didn't work!"
#print e
return None
#print "Correct encoding: %s" % proposed
return self.markup
def _to_unicode(self, data, encoding, errors="strict"):
'''Given a string and its encoding, decodes the string into Unicode.
%encoding is a string recognized by encodings.aliases'''
return unicode(data, encoding, errors)
@property
def declared_html_encoding(self):
if not self.is_html:
return None
return self.detector.declared_encoding
def find_codec(self, charset):
value = (self._codec(self.CHARSET_ALIASES.get(charset, charset))
or (charset and self._codec(charset.replace("-", "")))
or (charset and self._codec(charset.replace("-", "_")))
or (charset and charset.lower())
or charset
)
if value:
return value.lower()
return None
def _codec(self, charset):
if not charset:
return charset
codec = None
try:
codecs.lookup(charset)
codec = charset
except (LookupError, ValueError):
pass
return codec
# A partial mapping of ISO-Latin-1 to HTML entities/XML numeric entities.
MS_CHARS = {b'\x80': ('euro', '20AC'),
b'\x81': ' ',
b'\x82': ('sbquo', '201A'),
b'\x83': ('fnof', '192'),
b'\x84': ('bdquo', '201E'),
b'\x85': ('hellip', '2026'),
b'\x86': ('dagger', '2020'),
b'\x87': ('Dagger', '2021'),
b'\x88': ('circ', '2C6'),
b'\x89': ('permil', '2030'),
b'\x8A': ('Scaron', '160'),
b'\x8B': ('lsaquo', '2039'),
b'\x8C': ('OElig', '152'),
b'\x8D': '?',
b'\x8E': ('#x17D', '17D'),
b'\x8F': '?',
b'\x90': '?',
b'\x91': ('lsquo', '2018'),
b'\x92': ('rsquo', '2019'),
b'\x93': ('ldquo', '201C'),
b'\x94': ('rdquo', '201D'),
b'\x95': ('bull', '2022'),
b'\x96': ('ndash', '2013'),
b'\x97': ('mdash', '2014'),
b'\x98': ('tilde', '2DC'),
b'\x99': ('trade', '2122'),
b'\x9a': ('scaron', '161'),
b'\x9b': ('rsaquo', '203A'),
b'\x9c': ('oelig', '153'),
b'\x9d': '?',
b'\x9e': ('#x17E', '17E'),
b'\x9f': ('Yuml', ''),}
# A parochial partial mapping of ISO-Latin-1 to ASCII. Contains
# horrors like stripping diacritical marks to turn á into a, but also
# contains non-horrors like turning “ into ".
MS_CHARS_TO_ASCII = {
b'\x80' : 'EUR',
b'\x81' : ' ',
b'\x82' : ',',
b'\x83' : 'f',
b'\x84' : ',,',
b'\x85' : '...',
b'\x86' : '+',
b'\x87' : '++',
b'\x88' : '^',
b'\x89' : '%',
b'\x8a' : 'S',
b'\x8b' : '<',
b'\x8c' : 'OE',
b'\x8d' : '?',
b'\x8e' : 'Z',
b'\x8f' : '?',
b'\x90' : '?',
b'\x91' : "'",
b'\x92' : "'",
b'\x93' : '"',
b'\x94' : '"',
b'\x95' : '*',
b'\x96' : '-',
b'\x97' : '--',
b'\x98' : '~',
b'\x99' : '(TM)',
b'\x9a' : 's',
b'\x9b' : '>',
b'\x9c' : 'oe',
b'\x9d' : '?',
b'\x9e' : 'z',
b'\x9f' : 'Y',
b'\xa0' : ' ',
b'\xa1' : '!',
b'\xa2' : 'c',
b'\xa3' : 'GBP',
b'\xa4' : '$', #This approximation is especially parochial--this is the
#generic currency symbol.
b'\xa5' : 'YEN',
b'\xa6' : '|',
b'\xa7' : 'S',
b'\xa8' : '..',
b'\xa9' : '',
b'\xaa' : '(th)',
b'\xab' : '<<',
b'\xac' : '!',
b'\xad' : ' ',
b'\xae' : '(R)',
b'\xaf' : '-',
b'\xb0' : 'o',
b'\xb1' : '+-',
b'\xb2' : '2',
b'\xb3' : '3',
b'\xb4' : ("'", 'acute'),
b'\xb5' : 'u',
b'\xb6' : 'P',
b'\xb7' : '*',
b'\xb8' : ',',
b'\xb9' : '1',
b'\xba' : '(th)',
b'\xbb' : '>>',
b'\xbc' : '1/4',
b'\xbd' : '1/2',
b'\xbe' : '3/4',
b'\xbf' : '?',
b'\xc0' : 'A',
b'\xc1' : 'A',
b'\xc2' : 'A',
b'\xc3' : 'A',
b'\xc4' : 'A',
b'\xc5' : 'A',
b'\xc6' : 'AE',
b'\xc7' : 'C',
b'\xc8' : 'E',
b'\xc9' : 'E',
b'\xca' : 'E',
b'\xcb' : 'E',
b'\xcc' : 'I',
b'\xcd' : 'I',
b'\xce' : 'I',
b'\xcf' : 'I',
b'\xd0' : 'D',
b'\xd1' : 'N',
b'\xd2' : 'O',
b'\xd3' : 'O',
b'\xd4' : 'O',
b'\xd5' : 'O',
b'\xd6' : 'O',
b'\xd7' : '*',
b'\xd8' : 'O',
b'\xd9' : 'U',
b'\xda' : 'U',
b'\xdb' : 'U',
b'\xdc' : 'U',
b'\xdd' : 'Y',
b'\xde' : 'b',
b'\xdf' : 'B',
b'\xe0' : 'a',
b'\xe1' : 'a',
b'\xe2' : 'a',
b'\xe3' : 'a',
b'\xe4' : 'a',
b'\xe5' : 'a',
b'\xe6' : 'ae',
b'\xe7' : 'c',
b'\xe8' : 'e',
b'\xe9' : 'e',
b'\xea' : 'e',
b'\xeb' : 'e',
b'\xec' : 'i',
b'\xed' : 'i',
b'\xee' : 'i',
b'\xef' : 'i',
b'\xf0' : 'o',
b'\xf1' : 'n',
b'\xf2' : 'o',
b'\xf3' : 'o',
b'\xf4' : 'o',
b'\xf5' : 'o',
b'\xf6' : 'o',
b'\xf7' : '/',
b'\xf8' : 'o',
b'\xf9' : 'u',
b'\xfa' : 'u',
b'\xfb' : 'u',
b'\xfc' : 'u',
b'\xfd' : 'y',
b'\xfe' : 'b',
b'\xff' : 'y',
}
# A map used when removing rogue Windows-1252/ISO-8859-1
# characters in otherwise UTF-8 documents.
#
# Note that \x81, \x8d, \x8f, \x90, and \x9d are undefined in
# Windows-1252.
WINDOWS_1252_TO_UTF8 = {
0x80 : b'\xe2\x82\xac', # €
0x82 : b'\xe2\x80\x9a', # ‚
0x83 : b'\xc6\x92', # ƒ
0x84 : b'\xe2\x80\x9e', # „
0x85 : b'\xe2\x80\xa6', # …
0x86 : b'\xe2\x80\xa0', # †
0x87 : b'\xe2\x80\xa1', # ‡
0x88 : b'\xcb\x86', # ˆ
0x89 : b'\xe2\x80\xb0', # ‰
0x8a : b'\xc5\xa0', # Š
0x8b : b'\xe2\x80\xb9', # ‹
0x8c : b'\xc5\x92', # Œ
0x8e : b'\xc5\xbd', # Ž
0x91 : b'\xe2\x80\x98', # ‘
0x92 : b'\xe2\x80\x99', # ’
0x93 : b'\xe2\x80\x9c', # “
0x94 : b'\xe2\x80\x9d', # ”
0x95 : b'\xe2\x80\xa2', # •
0x96 : b'\xe2\x80\x93', # –
0x97 : b'\xe2\x80\x94', # —
0x98 : b'\xcb\x9c', # ˜
0x99 : b'\xe2\x84\xa2', # ™
0x9a : b'\xc5\xa1', # š
0x9b : b'\xe2\x80\xba', # ›
0x9c : b'\xc5\x93', # œ
0x9e : b'\xc5\xbe', # ž
0x9f : b'\xc5\xb8', # Ÿ
0xa0 : b'\xc2\xa0', #
0xa1 : b'\xc2\xa1', # ¡
0xa2 : b'\xc2\xa2', # ¢
0xa3 : b'\xc2\xa3', # £
0xa4 : b'\xc2\xa4', # ¤
0xa5 : b'\xc2\xa5', # ¥
0xa6 : b'\xc2\xa6', # ¦
0xa7 : b'\xc2\xa7', # §
0xa8 : b'\xc2\xa8', # ¨
0xa9 : b'\xc2\xa9', # ©
0xaa : b'\xc2\xaa', # ª
0xab : b'\xc2\xab', # «
0xac : b'\xc2\xac', # ¬
0xad : b'\xc2\xad', #
0xae : b'\xc2\xae', # ®
0xaf : b'\xc2\xaf', # ¯
0xb0 : b'\xc2\xb0', # °
0xb1 : b'\xc2\xb1', # ±
0xb2 : b'\xc2\xb2', # ²
0xb3 : b'\xc2\xb3', # ³
0xb4 : b'\xc2\xb4', # ´
0xb5 : b'\xc2\xb5', # µ
0xb6 : b'\xc2\xb6', # ¶
0xb7 : b'\xc2\xb7', # ·
0xb8 : b'\xc2\xb8', # ¸
0xb9 : b'\xc2\xb9', # ¹
0xba : b'\xc2\xba', # º
0xbb : b'\xc2\xbb', # »
0xbc : b'\xc2\xbc', # ¼
0xbd : b'\xc2\xbd', # ½
0xbe : b'\xc2\xbe', # ¾
0xbf : b'\xc2\xbf', # ¿
0xc0 : b'\xc3\x80', # À
0xc1 : b'\xc3\x81', # Á
0xc2 : b'\xc3\x82', # Â
0xc3 : b'\xc3\x83', # Ã
0xc4 : b'\xc3\x84', # Ä
0xc5 : b'\xc3\x85', # Å
0xc6 : b'\xc3\x86', # Æ
0xc7 : b'\xc3\x87', # Ç
0xc8 : b'\xc3\x88', # È
0xc9 : b'\xc3\x89', # É
0xca : b'\xc3\x8a', # Ê
0xcb : b'\xc3\x8b', # Ë
0xcc : b'\xc3\x8c', # Ì
0xcd : b'\xc3\x8d', # Í
0xce : b'\xc3\x8e', # Î
0xcf : b'\xc3\x8f', # Ï
0xd0 : b'\xc3\x90', # Ð
0xd1 : b'\xc3\x91', # Ñ
0xd2 : b'\xc3\x92', # Ò
0xd3 : b'\xc3\x93', # Ó
0xd4 : b'\xc3\x94', # Ô
0xd5 : b'\xc3\x95', # Õ
0xd6 : b'\xc3\x96', # Ö
0xd7 : b'\xc3\x97', # ×
0xd8 : b'\xc3\x98', # Ø
0xd9 : b'\xc3\x99', # Ù
0xda : b'\xc3\x9a', # Ú
0xdb : b'\xc3\x9b', # Û
0xdc : b'\xc3\x9c', # Ü
0xdd : b'\xc3\x9d', # Ý
0xde : b'\xc3\x9e', # Þ
0xdf : b'\xc3\x9f', # ß
0xe0 : b'\xc3\xa0', # à
0xe1 : b'\xa1', # á
0xe2 : b'\xc3\xa2', # â
0xe3 : b'\xc3\xa3', # ã
0xe4 : b'\xc3\xa4', # ä
0xe5 : b'\xc3\xa5', # å
0xe6 : b'\xc3\xa6', # æ
0xe7 : b'\xc3\xa7', # ç
0xe8 : b'\xc3\xa8', # è
0xe9 : b'\xc3\xa9', # é
0xea : b'\xc3\xaa', # ê
0xeb : b'\xc3\xab', # ë
0xec : b'\xc3\xac', # ì
0xed : b'\xc3\xad', # í
0xee : b'\xc3\xae', # î
0xef : b'\xc3\xaf', # ï
0xf0 : b'\xc3\xb0', # ð
0xf1 : b'\xc3\xb1', # ñ
0xf2 : b'\xc3\xb2', # ò
0xf3 : b'\xc3\xb3', # ó
0xf4 : b'\xc3\xb4', # ô
0xf5 : b'\xc3\xb5', # õ
0xf6 : b'\xc3\xb6', # ö
0xf7 : b'\xc3\xb7', # ÷
0xf8 : b'\xc3\xb8', # ø
0xf9 : b'\xc3\xb9', # ù
0xfa : b'\xc3\xba', # ú
0xfb : b'\xc3\xbb', # û
0xfc : b'\xc3\xbc', # ü
0xfd : b'\xc3\xbd', # ý
0xfe : b'\xc3\xbe', # þ
}
MULTIBYTE_MARKERS_AND_SIZES = [
(0xc2, 0xdf, 2), # 2-byte characters start with a byte C2-DF
(0xe0, 0xef, 3), # 3-byte characters start with E0-EF
(0xf0, 0xf4, 4), # 4-byte characters start with F0-F4
]
FIRST_MULTIBYTE_MARKER = MULTIBYTE_MARKERS_AND_SIZES[0][0]
LAST_MULTIBYTE_MARKER = MULTIBYTE_MARKERS_AND_SIZES[-1][1]
@classmethod
def detwingle(cls, in_bytes, main_encoding="utf8",
embedded_encoding="windows-1252"):
"""Fix characters from one encoding embedded in some other encoding.
Currently the only situation supported is Windows-1252 (or its
subset ISO-8859-1), embedded in UTF-8.
The input must be a bytestring. If you've already converted
the document to Unicode, you're too late.
The output is a bytestring in which `embedded_encoding`
characters have been converted to their `main_encoding`
equivalents.
"""
if embedded_encoding.replace('_', '-').lower() not in (
'windows-1252', 'windows_1252'):
raise NotImplementedError(
"Windows-1252 and ISO-8859-1 are the only currently supported "
"embedded encodings.")
if main_encoding.lower() not in ('utf8', 'utf-8'):
raise NotImplementedError(
"UTF-8 is the only currently supported main encoding.")
byte_chunks = []
chunk_start = 0
pos = 0
while pos < len(in_bytes):
byte = in_bytes[pos]
if not isinstance(byte, int):
# Python 2.x
byte = ord(byte)
if (byte >= cls.FIRST_MULTIBYTE_MARKER
and byte <= cls.LAST_MULTIBYTE_MARKER):
# This is the start of a UTF-8 multibyte character. Skip
# to the end.
for start, end, size in cls.MULTIBYTE_MARKERS_AND_SIZES:
if byte >= start and byte <= end:
pos += size
break
elif byte >= 0x80 and byte in cls.WINDOWS_1252_TO_UTF8:
# We found a Windows-1252 character!
# Save the string up to this point as a chunk.
byte_chunks.append(in_bytes[chunk_start:pos])
# Now translate the Windows-1252 character into UTF-8
# and add it as another, one-byte chunk.
byte_chunks.append(cls.WINDOWS_1252_TO_UTF8[byte])
pos += 1
chunk_start = pos
else:
# Go on to the next character.
pos += 1
if chunk_start == 0:
# The string is unchanged.
return in_bytes
else:
# Store the final chunk.
byte_chunks.append(in_bytes[chunk_start:])
return b''.join(byte_chunks)
| gpl-3.0 |
blindroot/django-configurations | configurations/importer.py | 6 | 7851 | import imp
import logging
import os
import sys
from optparse import OptionParser, make_option
from django import VERSION as DJANGO_VERSION
from django.conf import ENVIRONMENT_VARIABLE as SETTINGS_ENVIRONMENT_VARIABLE
from django.core.exceptions import ImproperlyConfigured
from django.core.management import base
from .utils import uppercase_attributes, reraise
from .values import Value, setup_value
installed = False
CONFIGURATION_ENVIRONMENT_VARIABLE = 'DJANGO_CONFIGURATION'
CONFIGURATION_ARGUMENT = '--configuration'
CONFIGURATION_ARGUMENT_HELP = ('The name of the configuration class to load, '
'e.g. "Development". If this isn\'t provided, '
'the DJANGO_CONFIGURATION environment '
'variable will be used.')
configuration_options = (make_option(CONFIGURATION_ARGUMENT,
help=CONFIGURATION_ARGUMENT_HELP),)
def install(check_options=False):
global installed
if not installed:
if DJANGO_VERSION >= (1, 8):
orig_create_parser = base.BaseCommand.create_parser
def create_parser(self, prog_name, subcommand):
parser = orig_create_parser(self, prog_name, subcommand)
if isinstance(parser, OptionParser):
# in case the option_list is set the create_parser
# will actually return a OptionParser for backward
# compatibility. It uses BaseCommand.use_argparse
# to decide that, which checks for the option_list list
base.BaseCommand.option_list += configuration_options
else:
# probably argparse, let's not import argparse though
parser.add_argument(CONFIGURATION_ARGUMENT,
help=CONFIGURATION_ARGUMENT_HELP)
return parser
base.BaseCommand.create_parser = create_parser
else:
# add the configuration option to all management commands
base.BaseCommand.option_list += configuration_options
importer = ConfigurationImporter(check_options=check_options)
sys.meta_path.insert(0, importer)
installed = True
class ConfigurationImporter(object):
modvar = SETTINGS_ENVIRONMENT_VARIABLE
namevar = CONFIGURATION_ENVIRONMENT_VARIABLE
error_msg = ("Configuration cannot be imported, "
"environment variable {0} is undefined.")
def __init__(self, check_options=False):
self.argv = sys.argv[:]
self.logger = logging.getLogger(__name__)
self.logger.setLevel(logging.DEBUG)
handler = logging.StreamHandler()
self.logger.addHandler(handler)
if check_options:
self.check_options()
self.validate()
if check_options:
self.announce()
def __repr__(self):
return "<ConfigurationImporter for '{0}.{1}'>".format(self.module,
self.name)
@property
def module(self):
return os.environ.get(self.modvar)
@property
def name(self):
return os.environ.get(self.namevar)
def check_options(self):
# django switched to argparse in version 1.8
if DJANGO_VERSION >= (1, 8):
parser = base.CommandParser(None,
usage="%(prog)s subcommand [options] [args]",
add_help=False)
parser.add_argument('--settings')
parser.add_argument('--pythonpath')
parser.add_argument(CONFIGURATION_ARGUMENT,
help=CONFIGURATION_ARGUMENT_HELP)
parser.add_argument('args', nargs='*') # catch-all
try:
options, args = parser.parse_known_args(self.argv[2:])
if options.configuration:
os.environ[self.namevar] = options.configuration
base.handle_default_options(options)
except base.CommandError:
pass # Ignore any option errors at this point.
# django < 1.7 did use optparse
else:
from django.core.management import LaxOptionParser
parser = LaxOptionParser(option_list=configuration_options,
add_help_option=False)
try:
options, args = parser.parse_args(self.argv)
if options.configuration:
os.environ[self.namevar] = options.configuration
except:
pass # Ignore any option errors at this point.
def validate(self):
if self.name is None:
raise ImproperlyConfigured(self.error_msg.format(self.namevar))
if self.module is None:
raise ImproperlyConfigured(self.error_msg.format(self.modvar))
def announce(self):
if len(self.argv) > 1:
from . import __version__
from django.utils.termcolors import colorize
# Django >= 1.7 supports hiding the colorization in the shell
try:
from django.core.management.color import no_style
except ImportError:
no_style = None
if no_style is not None and '--no-color' in self.argv:
stylize = no_style()
else:
def stylize(text):
return colorize(text, fg='green')
if (self.argv[1] == 'runserver' and
os.environ.get('RUN_MAIN') == 'true'):
message = ("django-configurations version {0}, using "
"configuration '{1}'".format(__version__,
self.name))
self.logger.debug(stylize(message))
def find_module(self, fullname, path=None):
if fullname is not None and fullname == self.module:
module = fullname.rsplit('.', 1)[-1]
return ConfigurationLoader(self.name,
imp.find_module(module, path))
return None
class ConfigurationLoader(object):
def __init__(self, name, location):
self.name = name
self.location = location
def load_module(self, fullname):
if fullname in sys.modules:
mod = sys.modules[fullname] # pragma: no cover
else:
mod = imp.load_module(fullname, *self.location)
cls_path = '{0}.{1}'.format(mod.__name__, self.name)
try:
cls = getattr(mod, self.name)
except AttributeError as err: # pragma: no cover
reraise(err, "Couldn't find configuration '{0}' "
"in module '{1}'".format(self.name,
mod.__package__))
try:
cls.pre_setup()
cls.setup()
obj = cls()
attributes = uppercase_attributes(obj).items()
for name, value in attributes:
if callable(value) and not getattr(value, 'pristine', False):
value = value()
# in case a method returns a Value instance we have
# to do the same as the Configuration.setup method
if isinstance(value, Value):
setup_value(mod, name, value)
continue
setattr(mod, name, value)
setattr(mod, 'CONFIGURATION', '{0}.{1}'.format(fullname,
self.name))
cls.post_setup()
except Exception as err:
reraise(err, "Couldn't setup configuration '{0}'".format(cls_path))
return mod
| bsd-3-clause |
pipermerriam/populus | populus/utils/cli.py | 1 | 8587 | from __future__ import absolute_import
import itertools
import logging
import time
import click
from populus.compilation import (
compile_project_contracts,
)
from .accounts import (
is_account_locked,
)
from .compile import (
write_compiled_sources,
)
from .contracts import (
verify_contract_bytecode,
)
from .observers import (
DirWatcher,
)
from .wait import (
Timeout,
)
from web3.utils.empty import (
Empty,
)
def select_chain(project):
"""
Present the user with a prompt to select which of the project chains they
want to use.
"""
chain_options = set(project.config['chains'].keys())
choose_chain_msg = "\n".join(itertools.chain((
"Available Chains",
"----------------",
), (
"{0} - {1}".format(chain_index, chain_name)
for chain_index, chain_name in enumerate(sorted(chain_options))
), (
"",
"Enter ether the name, or number of the desired chain"
)))
chain_name = click.prompt(choose_chain_msg)
if chain_name in chain_options:
return chain_name
elif chain_name.isdigit() and int(chain_name) < len(chain_options):
return sorted(chain_options)[int(chain_name)]
else:
raise click.ClickException(
"Invalid choice: {0}. Please choose from one of the "
"provided options.".format(chain_name)
)
def select_account(chain):
"""
Present the user with a prompt to select which of the chain accounts they
would like to use.
"""
all_accounts = chain.web3.eth.accounts
if not all_accounts:
raise click.ClickException("No accounts found on chain.")
pick_account_message = '\n'.join(itertools.chain((
"Accounts",
"-----------------",
), (
"{index} - {account}".format(
account=account,
index=index,
) for (index, account) in enumerate(all_accounts)
), (
"",
"Enter the account address or the number of the desired account",
)))
account_choice = click.prompt(
pick_account_message,
default=chain.web3.eth.defaultAccount or chain.web3.eth.coinbase,
)
if account_choice in set(all_accounts):
return account_choice
elif account_choice.isdigit() and int(account_choice) < len(all_accounts):
return all_accounts[int(account_choice)]
else:
raise click.ClickException(
"Invalid choice: {0}. Please choose from one of the "
"provided options.".format(account_choice)
)
def request_account_unlock(chain, account, timeout):
"""
Present a password prompt to unlock the given account.
"""
if not is_account_locked(chain.web3, account):
raise click.ClickException(
"The account `{0}` is already unlocked".format(account)
)
unlock_account_msg = (
"Please provide the password to unlock account `{0}`.".format(account)
)
# default="" is for allowing empty password
unlock_successful = chain.web3.personal.unlockAccount(
account,
click.prompt(unlock_account_msg, hide_input=True, default=""),
timeout,
)
if not unlock_successful:
raise click.ClickException("Unable to unlock account: `{0}`".format(account))
def deploy_contract_and_verify(chain,
contract_name,
ContractFactory=None,
deploy_transaction=None,
deploy_args=None,
deploy_kwargs=None):
"""
This is a *loose* wrapper around `populus.utils.deploy.deploy_contract`
that handles the various concerns and logging that need to be present when
doing this as a CLI interaction.
Deploy a contract, displaying information about the deploy process as it
happens. This also verifies that the deployed contract's bytecode matches
the expected value.
"""
web3 = chain.web3
logger = logging.getLogger('populus.utils.cli.deploy_contract_and_verify')
if isinstance(web3.eth.defaultAccount, Empty):
# must have coinbase
# TODO: add --account arg (index or address), and set default account here if provided
web3.eth.defaultAccount = web3.eth.coinbase
if is_account_locked(web3, web3.eth.defaultAccount):
try:
chain.wait.for_unlock(web3.eth.defaultAccount or web3.eth.coinbase, 5)
except Timeout:
default_account = select_account(chain)
if is_account_locked(web3, default_account):
request_account_unlock(chain, default_account, None)
web3.eth.defaultAccount = default_account
logger.info("Deploying {0}".format(contract_name))
if ContractFactory is None:
ContractFactory = chain.provider.get_contract_factory(contract_name)
deploy_txn_hash = ContractFactory.deploy(
transaction=deploy_transaction,
args=deploy_args,
kwargs=deploy_kwargs,
)
deploy_txn = web3.eth.getTransaction(deploy_txn_hash)
logger.info("Deploy Transaction Sent: {0}".format(deploy_txn_hash))
logger.info("Waiting for confirmation...")
contract_address = chain.wait.for_contract_address(
deploy_txn_hash,
timeout=180,
)
deploy_receipt = web3.eth.getTransactionReceipt(deploy_txn_hash)
logger.info((
"\n"
"Transaction Mined\n"
"=================\n"
"Tx Hash : {0}\n"
"Address : {1}\n"
"Gas Provided : {2}\n"
"Gas Used : {3}\n\n".format(
deploy_txn_hash,
contract_address,
deploy_txn['gas'],
deploy_receipt['gasUsed'],
)
))
# Verification
deployed_bytecode = web3.eth.getCode(contract_address)
if ContractFactory.bytecode_runtime:
verify_contract_bytecode(web3, ContractFactory.bytecode_runtime, contract_address)
logger.info("Verified contract bytecode @ {0}".format(contract_address))
else:
logger.info(
"No runtime available. Falling back to verifying non-empty "
"bytecode."
)
if len(deployed_bytecode) <= 2:
logger.error("Bytecode @ {0} is unexpectedly empty.".format(contract_address))
raise click.ClickException("Error deploying contract")
else:
logger.info(
"Verified bytecode @ {0} is non-empty".format(contract_address)
)
return ContractFactory(address=contract_address)
def watch_project_contracts(project, compiler_settings):
logger = logging.getLogger('populus.utils.cli.watch_project_contracts')
def callback(file_path, event_name):
if event_name in {'modified', 'created'}:
logger.info("============ Compiling ==============")
logger.info("> Change detected in: %s", file_path)
for source_dir in project.contracts_source_dirs:
logger.info("> Loading source files from: %s", source_dir)
contract_source_paths, compiled_sources = compile_project_contracts(project)
write_compiled_sources(
project.compiled_contracts_asset_path,
compiled_sources,
)
logger.info("> Watching ...")
watcher = DirWatcher(project.contracts_dir, callback)
watcher.start()
try:
while True:
time.sleep(0.1)
except KeyboardInterrupt:
pass
def select_project_contract(project):
contract_names = sorted(project.compiled_contract_data.keys())
contract_choices = [
" {idx}: {name}".format(
idx=str(idx).rjust(3),
name=name,
) for idx, name
in enumerate(contract_names)
]
select_contract_message = (
"Please select the desired contract:\n\n"
"{0}\n\n".format(
'\n'.join(contract_choices)
)
)
contract_name = click.prompt(select_contract_message)
if contract_name in project.compiled_contract_data:
return contract_name
elif contract_name.isdigit() and int(contract_name) < len(contract_names):
return contract_names[int(contract_name)]
else:
bad_choice_message = (
"'{0}' is not a valid choice. Please enter either the numeric "
"index of the desired contract or the full name of the "
"contract.".format(
contract_name,
)
)
raise click.ClickException(bad_choice_message)
| mit |
vmindru/ansible | lib/ansible/modules/cloud/google/gcp_compute_backend_service_facts.py | 9 | 15181 | #!/usr/bin/python
# -*- coding: utf-8 -*-
#
# Copyright (C) 2017 Google
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
# ----------------------------------------------------------------------------
#
# *** AUTO GENERATED CODE *** AUTO GENERATED CODE ***
#
# ----------------------------------------------------------------------------
#
# This file is automatically generated by Magic Modules and manual
# changes will be clobbered when the file is regenerated.
#
# Please read more about how to change this file at
# https://www.github.com/GoogleCloudPlatform/magic-modules
#
# ----------------------------------------------------------------------------
from __future__ import absolute_import, division, print_function
__metaclass__ = type
################################################################################
# Documentation
################################################################################
ANSIBLE_METADATA = {'metadata_version': '1.1', 'status': ["preview"], 'supported_by': 'community'}
DOCUMENTATION = '''
---
module: gcp_compute_backend_service_facts
description:
- Gather facts for GCP BackendService
short_description: Gather facts for GCP BackendService
version_added: 2.7
author: Google Inc. (@googlecloudplatform)
requirements:
- python >= 2.6
- requests >= 2.18.4
- google-auth >= 1.3.0
options:
filters:
description:
- A list of filter value pairs. Available filters are listed here U(https://cloud.google.com/sdk/gcloud/reference/topic/filters.)
- Each additional filter in the list will act be added as an AND condition (filter1
and filter2) .
extends_documentation_fragment: gcp
'''
EXAMPLES = '''
- name: a backend service facts
gcp_compute_backend_service_facts:
filters:
- name = test_object
project: test_project
auth_kind: serviceaccount
service_account_file: "/tmp/auth.pem"
'''
RETURN = '''
items:
description: List of items
returned: always
type: complex
contains:
affinityCookieTtlSec:
description:
- Lifetime of cookies in seconds if session_affinity is GENERATED_COOKIE. If
set to 0, the cookie is non-persistent and lasts only until the end of the
browser session (or equivalent). The maximum allowed value for TTL is one
day.
- When the load balancing scheme is INTERNAL, this field is not used.
returned: success
type: int
backends:
description:
- The list of backends that serve this BackendService.
returned: success
type: complex
contains:
balancingMode:
description:
- Specifies the balancing mode for this backend.
- For global HTTP(S) or TCP/SSL load balancing, the default is UTILIZATION.
Valid values are UTILIZATION, RATE (for HTTP(S)) and CONNECTION (for TCP/SSL).
- This cannot be used for internal load balancing.
returned: success
type: str
capacityScaler:
description:
- A multiplier applied to the group's maximum servicing capacity (based
on UTILIZATION, RATE or CONNECTION).
- Default value is 1, which means the group will serve up to 100% of its
configured capacity (depending on balancingMode). A setting of 0 means
the group is completely drained, offering 0% of its available Capacity.
Valid range is [0.0,1.0].
- This cannot be used for internal load balancing.
returned: success
type: str
description:
description:
- An optional description of this resource.
- Provide this property when you create the resource.
returned: success
type: str
group:
description:
- This instance group defines the list of instances that serve traffic.
Member virtual machine instances from each instance group must live in
the same zone as the instance group itself.
- No two backends in a backend service are allowed to use same Instance
Group resource.
- When the BackendService has load balancing scheme INTERNAL, the instance
group must be in a zone within the same region as the BackendService.
returned: success
type: str
maxConnections:
description:
- The max number of simultaneous connections for the group. Can be used
with either CONNECTION or UTILIZATION balancing modes.
- For CONNECTION mode, either maxConnections or maxConnectionsPerInstance
must be set.
- This cannot be used for internal load balancing.
returned: success
type: int
maxConnectionsPerInstance:
description:
- The max number of simultaneous connections that a single backend instance
can handle. This is used to calculate the capacity of the group. Can be
used in either CONNECTION or UTILIZATION balancing modes.
- For CONNECTION mode, either maxConnections or maxConnectionsPerInstance
must be set.
- This cannot be used for internal load balancing.
returned: success
type: int
maxRate:
description:
- The max requests per second (RPS) of the group.
- Can be used with either RATE or UTILIZATION balancing modes, but required
if RATE mode. For RATE mode, either maxRate or maxRatePerInstance must
be set.
- This cannot be used for internal load balancing.
returned: success
type: int
maxRatePerInstance:
description:
- The max requests per second (RPS) that a single backend instance can handle.
This is used to calculate the capacity of the group. Can be used in either
balancing mode. For RATE mode, either maxRate or maxRatePerInstance must
be set.
- This cannot be used for internal load balancing.
returned: success
type: str
maxUtilization:
description:
- Used when balancingMode is UTILIZATION. This ratio defines the CPU utilization
target for the group. The default is 0.8. Valid range is [0.0, 1.0].
- This cannot be used for internal load balancing.
returned: success
type: str
cdnPolicy:
description:
- Cloud CDN configuration for this BackendService.
returned: success
type: complex
contains:
cacheKeyPolicy:
description:
- The CacheKeyPolicy for this CdnPolicy.
returned: success
type: complex
contains:
includeHost:
description:
- If true requests to different hosts will be cached separately.
returned: success
type: bool
includeProtocol:
description:
- If true, http and https requests will be cached separately.
returned: success
type: bool
includeQueryString:
description:
- If true, include query string parameters in the cache key according
to query_string_whitelist and query_string_blacklist. If neither is
set, the entire query string will be included.
- If false, the query string will be excluded from the cache key entirely.
returned: success
type: bool
queryStringBlacklist:
description:
- Names of query string parameters to exclude in cache keys.
- All other parameters will be included. Either specify query_string_whitelist
or query_string_blacklist, not both.
- "'&' and '=' will be percent encoded and not treated as delimiters."
returned: success
type: list
queryStringWhitelist:
description:
- Names of query string parameters to include in cache keys.
- All other parameters will be excluded. Either specify query_string_whitelist
or query_string_blacklist, not both.
- "'&' and '=' will be percent encoded and not treated as delimiters."
returned: success
type: list
connectionDraining:
description:
- Settings for connection draining.
returned: success
type: complex
contains:
drainingTimeoutSec:
description:
- Time for which instance will be drained (not accept new connections, but
still work to finish started).
returned: success
type: int
creationTimestamp:
description:
- Creation timestamp in RFC3339 text format.
returned: success
type: str
description:
description:
- An optional description of this resource.
returned: success
type: str
enableCDN:
description:
- If true, enable Cloud CDN for this BackendService.
- When the load balancing scheme is INTERNAL, this field is not used.
returned: success
type: bool
healthChecks:
description:
- The list of URLs to the HttpHealthCheck or HttpsHealthCheck resource for health
checking this BackendService. Currently at most one health check can be specified,
and a health check is required.
- For internal load balancing, a URL to a HealthCheck resource must be specified
instead.
returned: success
type: list
id:
description:
- The unique identifier for the resource.
returned: success
type: int
iap:
description:
- Settings for enabling Cloud Identity Aware Proxy.
returned: success
type: complex
contains:
enabled:
description:
- Enables IAP.
returned: success
type: bool
oauth2ClientId:
description:
- OAuth2 Client ID for IAP.
returned: success
type: str
oauth2ClientSecret:
description:
- OAuth2 Client Secret for IAP.
returned: success
type: str
oauth2ClientSecretSha256:
description:
- OAuth2 Client Secret SHA-256 for IAP.
returned: success
type: str
loadBalancingScheme:
description:
- Indicates whether the backend service will be used with internal or external
load balancing. A backend service created for one type of load balancing cannot
be used with the other.
returned: success
type: str
name:
description:
- Name of the resource. Provided by the client when the resource is created.
The name must be 1-63 characters long, and comply with RFC1035. Specifically,
the name must be 1-63 characters long and match the regular expression `[a-z]([-a-z0-9]*[a-z0-9])?`
which means the first character must be a lowercase letter, and all following
characters must be a dash, lowercase letter, or digit, except the last character,
which cannot be a dash.
returned: success
type: str
portName:
description:
- Name of backend port. The same name should appear in the instance groups referenced
by this service. Required when the load balancing scheme is EXTERNAL.
- When the load balancing scheme is INTERNAL, this field is not used.
returned: success
type: str
protocol:
description:
- The protocol this BackendService uses to communicate with backends.
- Possible values are HTTP, HTTPS, TCP, and SSL. The default is HTTP.
- For internal load balancing, the possible values are TCP and UDP, and the
default is TCP.
returned: success
type: str
region:
description:
- The region where the regional backend service resides.
- This field is not applicable to global backend services.
returned: success
type: str
sessionAffinity:
description:
- Type of session affinity to use. The default is NONE.
- When the load balancing scheme is EXTERNAL, can be NONE, CLIENT_IP, or GENERATED_COOKIE.
- When the load balancing scheme is INTERNAL, can be NONE, CLIENT_IP, CLIENT_IP_PROTO,
or CLIENT_IP_PORT_PROTO.
- When the protocol is UDP, this field is not used.
returned: success
type: str
timeoutSec:
description:
- How many seconds to wait for the backend before considering it a failed request.
Default is 30 seconds. Valid range is [1, 86400].
returned: success
type: int
'''
################################################################################
# Imports
################################################################################
from ansible.module_utils.gcp_utils import navigate_hash, GcpSession, GcpModule, GcpRequest
import json
################################################################################
# Main
################################################################################
def main():
module = GcpModule(argument_spec=dict(filters=dict(type='list', elements='str')))
if not module.params['scopes']:
module.params['scopes'] = ['https://www.googleapis.com/auth/compute']
items = fetch_list(module, collection(module), query_options(module.params['filters']))
if items.get('items'):
items = items.get('items')
else:
items = []
return_value = {'items': items}
module.exit_json(**return_value)
def collection(module):
return "https://www.googleapis.com/compute/v1/projects/{project}/global/backendServices".format(**module.params)
def fetch_list(module, link, query):
auth = GcpSession(module, 'compute')
response = auth.get(link, params={'filter': query})
return return_if_object(module, response)
def query_options(filters):
if not filters:
return ''
if len(filters) == 1:
return filters[0]
else:
queries = []
for f in filters:
# For multiple queries, all queries should have ()
if f[0] != '(' and f[-1] != ')':
queries.append("(%s)" % ''.join(f))
else:
queries.append(f)
return ' '.join(queries)
def return_if_object(module, response):
# If not found, return nothing.
if response.status_code == 404:
return None
# If no content, return nothing.
if response.status_code == 204:
return None
try:
module.raise_for_status(response)
result = response.json()
except getattr(json.decoder, 'JSONDecodeError', ValueError) as inst:
module.fail_json(msg="Invalid JSON response with error: %s" % inst)
if navigate_hash(result, ['error', 'errors']):
module.fail_json(msg=navigate_hash(result, ['error', 'errors']))
return result
if __name__ == "__main__":
main()
| gpl-3.0 |
NullSoldier/django | tests/admin_inlines/tests.py | 7 | 44052 | from __future__ import unicode_literals
import datetime
import warnings
from django.contrib.admin import ModelAdmin, TabularInline
from django.contrib.admin.helpers import InlineAdminForm
from django.contrib.admin.tests import AdminSeleniumWebDriverTestCase
from django.contrib.auth.models import Permission, User
from django.contrib.contenttypes.models import ContentType
from django.core.urlresolvers import reverse
from django.test import RequestFactory, TestCase, override_settings
from django.utils.encoding import force_text
from .admin import InnerInline, site as admin_site
from .models import (
Author, BinaryTree, Book, Chapter, Child, ChildModel1, ChildModel2,
Fashionista, FootNote, Holder, Holder2, Holder3, Holder4, Inner, Inner2,
Inner3, Inner4Stacked, Inner4Tabular, Novel, OutfitItem, Parent,
ParentModelWithCustomPk, Person, Poll, Profile, ProfileCollection,
Question, Sighting, SomeChildModel, SomeParentModel, Teacher,
)
INLINE_CHANGELINK_HTML = 'class="inlinechangelink">Change</a>'
class TestDataMixin(object):
@classmethod
def setUpTestData(cls):
# password = "secret"
User.objects.create(
pk=100, username='super', first_name='Super', last_name='User', email='super@example.com',
password='sha1$995a3$6011485ea3834267d719b4c801409b8b1ddd0158', is_active=True, is_superuser=True,
is_staff=True, last_login=datetime.datetime(2007, 5, 30, 13, 20, 10),
date_joined=datetime.datetime(2007, 5, 30, 13, 20, 10)
)
@override_settings(PASSWORD_HASHERS=['django.contrib.auth.hashers.SHA1PasswordHasher'],
ROOT_URLCONF="admin_inlines.urls")
class TestInline(TestDataMixin, TestCase):
def setUp(self):
holder = Holder(dummy=13)
holder.save()
Inner(dummy=42, holder=holder).save()
result = self.client.login(username='super', password='secret')
self.assertEqual(result, True)
self.factory = RequestFactory()
def test_can_delete(self):
"""
can_delete should be passed to inlineformset factory.
"""
holder = Holder.objects.get(dummy=13)
response = self.client.get(
reverse('admin:admin_inlines_holder_change', args=(holder.id,))
)
inner_formset = response.context['inline_admin_formsets'][0].formset
expected = InnerInline.can_delete
actual = inner_formset.can_delete
self.assertEqual(expected, actual, 'can_delete must be equal')
def test_readonly_stacked_inline_label(self):
"""Bug #13174."""
holder = Holder.objects.create(dummy=42)
Inner.objects.create(holder=holder, dummy=42, readonly='')
response = self.client.get(
reverse('admin:admin_inlines_holder_change', args=(holder.id,))
)
self.assertContains(response, '<label>Inner readonly label:</label>')
def test_many_to_many_inlines(self):
"Autogenerated many-to-many inlines are displayed correctly (#13407)"
response = self.client.get(reverse('admin:admin_inlines_author_add'))
# The heading for the m2m inline block uses the right text
self.assertContains(response, '<h2>Author-book relationships</h2>')
# The "add another" label is correct
self.assertContains(response, 'Add another Author\\u002Dbook relationship')
# The '+' is dropped from the autogenerated form prefix (Author_books+)
self.assertContains(response, 'id="id_Author_books-TOTAL_FORMS"')
def test_inline_primary(self):
person = Person.objects.create(firstname='Imelda')
item = OutfitItem.objects.create(name='Shoes')
# Imelda likes shoes, but can't carry her own bags.
data = {
'shoppingweakness_set-TOTAL_FORMS': 1,
'shoppingweakness_set-INITIAL_FORMS': 0,
'shoppingweakness_set-MAX_NUM_FORMS': 0,
'_save': 'Save',
'person': person.id,
'max_weight': 0,
'shoppingweakness_set-0-item': item.id,
}
response = self.client.post(reverse('admin:admin_inlines_fashionista_add'), data)
self.assertEqual(response.status_code, 302)
self.assertEqual(len(Fashionista.objects.filter(person__firstname='Imelda')), 1)
def test_tabular_non_field_errors(self):
"""
Ensure that non_field_errors are displayed correctly, including the
right value for colspan. Refs #13510.
"""
data = {
'title_set-TOTAL_FORMS': 1,
'title_set-INITIAL_FORMS': 0,
'title_set-MAX_NUM_FORMS': 0,
'_save': 'Save',
'title_set-0-title1': 'a title',
'title_set-0-title2': 'a different title',
}
response = self.client.post(reverse('admin:admin_inlines_titlecollection_add'), data)
# Here colspan is "4": two fields (title1 and title2), one hidden field and the delete checkbox.
self.assertContains(response, '<tr><td colspan="4"><ul class="errorlist nonfield"><li>The two titles must be the same</li></ul></td></tr>')
def test_no_parent_callable_lookup(self):
"""Admin inline `readonly_field` shouldn't invoke parent ModelAdmin callable"""
# Identically named callable isn't present in the parent ModelAdmin,
# rendering of the add view shouldn't explode
response = self.client.get(reverse('admin:admin_inlines_novel_add'))
self.assertEqual(response.status_code, 200)
# View should have the child inlines section
self.assertContains(response, '<div class="inline-group" id="chapter_set-group">')
def test_callable_lookup(self):
"""Admin inline should invoke local callable when its name is listed in readonly_fields"""
response = self.client.get(reverse('admin:admin_inlines_poll_add'))
self.assertEqual(response.status_code, 200)
# Add parent object view should have the child inlines section
self.assertContains(response, '<div class="inline-group" id="question_set-group">')
# The right callable should be used for the inline readonly_fields
# column cells
self.assertContains(response, '<p>Callable in QuestionInline</p>')
def test_help_text(self):
"""
Ensure that the inlines' model field help texts are displayed when
using both the stacked and tabular layouts.
Ref #8190.
"""
response = self.client.get(reverse('admin:admin_inlines_holder4_add'))
self.assertContains(response, '<p class="help">Awesome stacked help text is awesome.</p>', 4)
self.assertContains(response, '<img src="/static/admin/img/svg/icon-unknown.svg" class="help help-tooltip" width="10" height="10" alt="(Awesome tabular help text is awesome.)" title="Awesome tabular help text is awesome." />', 1)
# ReadOnly fields
response = self.client.get(reverse('admin:admin_inlines_capofamiglia_add'))
self.assertContains(response, '<img src="/static/admin/img/svg/icon-unknown.svg" class="help help-tooltip" width="10" height="10" alt="(Help text for ReadOnlyInline)" title="Help text for ReadOnlyInline" />', 1)
def test_inline_hidden_field_no_column(self):
"""#18263 -- Make sure hidden fields don't get a column in tabular inlines"""
parent = SomeParentModel.objects.create(name='a')
SomeChildModel.objects.create(name='b', position='0', parent=parent)
SomeChildModel.objects.create(name='c', position='1', parent=parent)
response = self.client.get(reverse('admin:admin_inlines_someparentmodel_change', args=(parent.pk,)))
self.assertNotContains(response, '<td class="field-position">')
self.assertContains(response, (
'<input id="id_somechildmodel_set-1-position" '
'name="somechildmodel_set-1-position" type="hidden" value="1" />'))
def test_non_related_name_inline(self):
"""
Ensure that multiple inlines with related_name='+' have correct form
prefixes. Bug #16838.
"""
response = self.client.get(reverse('admin:admin_inlines_capofamiglia_add'))
self.assertContains(response,
'<input type="hidden" name="-1-0-id" id="id_-1-0-id" />', html=True)
self.assertContains(response,
'<input type="hidden" name="-1-0-capo_famiglia" id="id_-1-0-capo_famiglia" />', html=True)
self.assertContains(response,
'<input id="id_-1-0-name" type="text" class="vTextField" '
'name="-1-0-name" maxlength="100" />', html=True)
self.assertContains(response,
'<input type="hidden" name="-2-0-id" id="id_-2-0-id" />', html=True)
self.assertContains(response,
'<input type="hidden" name="-2-0-capo_famiglia" id="id_-2-0-capo_famiglia" />', html=True)
self.assertContains(response,
'<input id="id_-2-0-name" type="text" class="vTextField" '
'name="-2-0-name" maxlength="100" />', html=True)
@override_settings(USE_L10N=True, USE_THOUSAND_SEPARATOR=True)
def test_localize_pk_shortcut(self):
"""
Ensure that the "View on Site" link is correct for locales that use
thousand separators
"""
holder = Holder.objects.create(pk=123456789, dummy=42)
inner = Inner.objects.create(pk=987654321, holder=holder, dummy=42, readonly='')
response = self.client.get(reverse('admin:admin_inlines_holder_change', args=(holder.id,)))
inner_shortcut = 'r/%s/%s/' % (ContentType.objects.get_for_model(inner).pk, inner.pk)
self.assertContains(response, inner_shortcut)
def test_custom_pk_shortcut(self):
"""
Ensure that the "View on Site" link is correct for models with a
custom primary key field. Bug #18433.
"""
parent = ParentModelWithCustomPk.objects.create(my_own_pk="foo", name="Foo")
child1 = ChildModel1.objects.create(my_own_pk="bar", name="Bar", parent=parent)
child2 = ChildModel2.objects.create(my_own_pk="baz", name="Baz", parent=parent)
response = self.client.get(reverse('admin:admin_inlines_parentmodelwithcustompk_change', args=('foo',)))
child1_shortcut = 'r/%s/%s/' % (ContentType.objects.get_for_model(child1).pk, child1.pk)
child2_shortcut = 'r/%s/%s/' % (ContentType.objects.get_for_model(child2).pk, child2.pk)
self.assertContains(response, child1_shortcut)
self.assertContains(response, child2_shortcut)
def test_create_inlines_on_inherited_model(self):
"""
Ensure that an object can be created with inlines when it inherits
another class. Bug #19524.
"""
data = {
'name': 'Martian',
'sighting_set-TOTAL_FORMS': 1,
'sighting_set-INITIAL_FORMS': 0,
'sighting_set-MAX_NUM_FORMS': 0,
'sighting_set-0-place': 'Zone 51',
'_save': 'Save',
}
response = self.client.post(reverse('admin:admin_inlines_extraterrestrial_add'), data)
self.assertEqual(response.status_code, 302)
self.assertEqual(Sighting.objects.filter(et__name='Martian').count(), 1)
def test_custom_get_extra_form(self):
bt_head = BinaryTree.objects.create(name="Tree Head")
BinaryTree.objects.create(name="First Child", parent=bt_head)
# The maximum number of forms should respect 'get_max_num' on the
# ModelAdmin
max_forms_input = '<input id="id_binarytree_set-MAX_NUM_FORMS" name="binarytree_set-MAX_NUM_FORMS" type="hidden" value="%d" />'
# The total number of forms will remain the same in either case
total_forms_hidden = '<input id="id_binarytree_set-TOTAL_FORMS" name="binarytree_set-TOTAL_FORMS" type="hidden" value="2" />'
response = self.client.get(reverse('admin:admin_inlines_binarytree_add'))
self.assertContains(response, max_forms_input % 3)
self.assertContains(response, total_forms_hidden)
response = self.client.get(reverse('admin:admin_inlines_binarytree_change', args=(bt_head.id,)))
self.assertContains(response, max_forms_input % 2)
self.assertContains(response, total_forms_hidden)
def test_min_num(self):
"""
Ensure that min_num and extra determine number of forms.
"""
class MinNumInline(TabularInline):
model = BinaryTree
min_num = 2
extra = 3
modeladmin = ModelAdmin(BinaryTree, admin_site)
modeladmin.inlines = [MinNumInline]
min_forms = '<input id="id_binarytree_set-MIN_NUM_FORMS" name="binarytree_set-MIN_NUM_FORMS" type="hidden" value="2" />'
total_forms = '<input id="id_binarytree_set-TOTAL_FORMS" name="binarytree_set-TOTAL_FORMS" type="hidden" value="5" />'
request = self.factory.get(reverse('admin:admin_inlines_binarytree_add'))
request.user = User(username='super', is_superuser=True)
response = modeladmin.changeform_view(request)
self.assertContains(response, min_forms)
self.assertContains(response, total_forms)
def test_custom_min_num(self):
"""
Ensure that get_min_num is called and used correctly.
"""
bt_head = BinaryTree.objects.create(name="Tree Head")
BinaryTree.objects.create(name="First Child", parent=bt_head)
class MinNumInline(TabularInline):
model = BinaryTree
extra = 3
def get_min_num(self, request, obj=None, **kwargs):
if obj:
return 5
return 2
modeladmin = ModelAdmin(BinaryTree, admin_site)
modeladmin.inlines = [MinNumInline]
min_forms = '<input id="id_binarytree_set-MIN_NUM_FORMS" name="binarytree_set-MIN_NUM_FORMS" type="hidden" value="%d" />'
total_forms = '<input id="id_binarytree_set-TOTAL_FORMS" name="binarytree_set-TOTAL_FORMS" type="hidden" value="%d" />'
request = self.factory.get(reverse('admin:admin_inlines_binarytree_add'))
request.user = User(username='super', is_superuser=True)
response = modeladmin.changeform_view(request)
self.assertContains(response, min_forms % 2)
self.assertContains(response, total_forms % 5)
request = self.factory.get(reverse('admin:admin_inlines_binarytree_change', args=(bt_head.id,)))
request.user = User(username='super', is_superuser=True)
response = modeladmin.changeform_view(request, object_id=str(bt_head.id))
self.assertContains(response, min_forms % 5)
self.assertContains(response, total_forms % 8)
def test_inline_nonauto_noneditable_pk(self):
response = self.client.get(reverse('admin:admin_inlines_author_add'))
self.assertContains(response,
'<input id="id_nonautopkbook_set-0-rand_pk" name="nonautopkbook_set-0-rand_pk" type="hidden" />',
html=True)
self.assertContains(response,
'<input id="id_nonautopkbook_set-2-0-rand_pk" name="nonautopkbook_set-2-0-rand_pk" type="hidden" />',
html=True)
def test_inline_editable_pk(self):
response = self.client.get(reverse('admin:admin_inlines_author_add'))
self.assertContains(response,
'<input class="vIntegerField" id="id_editablepkbook_set-0-manual_pk" name="editablepkbook_set-0-manual_pk" type="text" />',
html=True, count=1)
self.assertContains(response,
'<input class="vIntegerField" id="id_editablepkbook_set-2-0-manual_pk" name="editablepkbook_set-2-0-manual_pk" type="text" />',
html=True, count=1)
def test_stacked_inline_edit_form_contains_has_original_class(self):
holder = Holder.objects.create(dummy=1)
holder.inner_set.create(dummy=1)
response = self.client.get(reverse('admin:admin_inlines_holder_change', args=(holder.pk,)))
self.assertContains(
response,
'<div class="inline-related has_original" id="inner_set-0">',
count=1
)
self.assertContains(
response,
'<div class="inline-related" id="inner_set-1">',
count=1
)
def test_inlines_show_change_link_registered(self):
"Inlines `show_change_link` for registered models when enabled."
holder = Holder4.objects.create(dummy=1)
item1 = Inner4Stacked.objects.create(dummy=1, holder=holder)
item2 = Inner4Tabular.objects.create(dummy=1, holder=holder)
items = (
('inner4stacked', item1.pk),
('inner4tabular', item2.pk),
)
response = self.client.get(reverse('admin:admin_inlines_holder4_change', args=(holder.pk,)))
self.assertTrue(response.context['inline_admin_formset'].opts.has_registered_model)
for model, pk in items:
url = reverse('admin:admin_inlines_%s_change' % model, args=(pk,))
self.assertContains(response, '<a href="%s" %s' % (url, INLINE_CHANGELINK_HTML))
def test_inlines_show_change_link_unregistered(self):
"Inlines `show_change_link` disabled for unregistered models."
parent = ParentModelWithCustomPk.objects.create(my_own_pk="foo", name="Foo")
ChildModel1.objects.create(my_own_pk="bar", name="Bar", parent=parent)
ChildModel2.objects.create(my_own_pk="baz", name="Baz", parent=parent)
response = self.client.get(reverse('admin:admin_inlines_parentmodelwithcustompk_change', args=('foo',)))
self.assertFalse(response.context['inline_admin_formset'].opts.has_registered_model)
self.assertNotContains(response, INLINE_CHANGELINK_HTML)
def test_tabular_inline_show_change_link_false_registered(self):
"Inlines `show_change_link` disabled by default."
poll = Poll.objects.create(name="New poll")
Question.objects.create(poll=poll)
response = self.client.get(reverse('admin:admin_inlines_poll_change', args=(poll.pk,)))
self.assertTrue(response.context['inline_admin_formset'].opts.has_registered_model)
self.assertNotContains(response, INLINE_CHANGELINK_HTML)
@override_settings(PASSWORD_HASHERS=['django.contrib.auth.hashers.SHA1PasswordHasher'],
ROOT_URLCONF="admin_inlines.urls")
class TestInlineMedia(TestDataMixin, TestCase):
def setUp(self):
result = self.client.login(username='super', password='secret')
self.assertEqual(result, True)
def test_inline_media_only_base(self):
holder = Holder(dummy=13)
holder.save()
Inner(dummy=42, holder=holder).save()
change_url = reverse('admin:admin_inlines_holder_change', args=(holder.id,))
response = self.client.get(change_url)
self.assertContains(response, 'my_awesome_admin_scripts.js')
def test_inline_media_only_inline(self):
holder = Holder3(dummy=13)
holder.save()
Inner3(dummy=42, holder=holder).save()
change_url = reverse('admin:admin_inlines_holder3_change', args=(holder.id,))
response = self.client.get(change_url)
self.assertContains(response, 'my_awesome_inline_scripts.js')
def test_all_inline_media(self):
holder = Holder2(dummy=13)
holder.save()
Inner2(dummy=42, holder=holder).save()
change_url = reverse('admin:admin_inlines_holder2_change', args=(holder.id,))
response = self.client.get(change_url)
self.assertContains(response, 'my_awesome_admin_scripts.js')
self.assertContains(response, 'my_awesome_inline_scripts.js')
@override_settings(ROOT_URLCONF="admin_inlines.urls")
class TestInlineAdminForm(TestCase):
def test_immutable_content_type(self):
"""Regression for #9362
The problem depends only on InlineAdminForm and its "original"
argument, so we can safely set the other arguments to None/{}. We just
need to check that the content_type argument of Child isn't altered by
the internals of the inline form."""
sally = Teacher.objects.create(name='Sally')
john = Parent.objects.create(name='John')
joe = Child.objects.create(name='Joe', teacher=sally, parent=john)
iaf = InlineAdminForm(None, None, {}, {}, joe)
parent_ct = ContentType.objects.get_for_model(Parent)
self.assertEqual(iaf.original.content_type, parent_ct)
def test_original_content_type_id_deprecated(self):
"""
#23444 -- Verify a warning is raised when accessing
`original_content_type_id` attribute of `InlineAdminForm` object.
"""
iaf = InlineAdminForm(None, None, {}, {}, None)
poll = Poll.objects.create(name="poll")
iaf2 = InlineAdminForm(None, None, {}, {}, poll)
poll_ct = ContentType.objects.get_for_model(Poll)
with warnings.catch_warnings(record=True) as recorded:
warnings.filterwarnings('always')
with self.assertRaises(AttributeError):
iaf.original_content_type_id
msg = force_text(recorded.pop().message)
self.assertEqual(
msg,
'InlineAdminForm.original_content_type_id is deprecated and will be '
'removed in Django 1.10. If you were using this attribute to construct '
'the "view on site" URL, use the `absolute_url` attribute instead.'
)
self.assertEqual(iaf2.original_content_type_id, poll_ct.id)
@override_settings(PASSWORD_HASHERS=['django.contrib.auth.hashers.SHA1PasswordHasher'],
ROOT_URLCONF="admin_inlines.urls")
class TestInlineProtectedOnDelete(TestDataMixin, TestCase):
def setUp(self):
result = self.client.login(username='super', password='secret')
self.assertEqual(result, True)
def test_deleting_inline_with_protected_delete_does_not_validate(self):
lotr = Novel.objects.create(name='Lord of the rings')
chapter = Chapter.objects.create(novel=lotr, name='Many Meetings')
foot_note = FootNote.objects.create(chapter=chapter, note='yadda yadda')
change_url = reverse('admin:admin_inlines_novel_change', args=(lotr.id,))
response = self.client.get(change_url)
data = {
'name': lotr.name,
'chapter_set-TOTAL_FORMS': 1,
'chapter_set-INITIAL_FORMS': 1,
'chapter_set-MAX_NUM_FORMS': 1000,
'_save': 'Save',
'chapter_set-0-id': chapter.id,
'chapter_set-0-name': chapter.name,
'chapter_set-0-novel': lotr.id,
'chapter_set-0-DELETE': 'on'
}
response = self.client.post(change_url, data)
self.assertEqual(response.status_code, 200)
self.assertContains(response, "Deleting chapter %s would require deleting "
"the following protected related objects: foot note %s"
% (chapter, foot_note))
@override_settings(ROOT_URLCONF="admin_inlines.urls")
class TestInlinePermissions(TestCase):
"""
Make sure the admin respects permissions for objects that are edited
inline. Refs #8060.
"""
def setUp(self):
self.user = User(username='admin')
self.user.is_staff = True
self.user.is_active = True
self.user.set_password('secret')
self.user.save()
self.author_ct = ContentType.objects.get_for_model(Author)
self.holder_ct = ContentType.objects.get_for_model(Holder2)
self.book_ct = ContentType.objects.get_for_model(Book)
self.inner_ct = ContentType.objects.get_for_model(Inner2)
# User always has permissions to add and change Authors, and Holders,
# the main (parent) models of the inlines. Permissions on the inlines
# vary per test.
permission = Permission.objects.get(codename='add_author', content_type=self.author_ct)
self.user.user_permissions.add(permission)
permission = Permission.objects.get(codename='change_author', content_type=self.author_ct)
self.user.user_permissions.add(permission)
permission = Permission.objects.get(codename='add_holder2', content_type=self.holder_ct)
self.user.user_permissions.add(permission)
permission = Permission.objects.get(codename='change_holder2', content_type=self.holder_ct)
self.user.user_permissions.add(permission)
author = Author.objects.create(pk=1, name='The Author')
book = author.books.create(name='The inline Book')
self.author_change_url = reverse('admin:admin_inlines_author_change', args=(author.id,))
# Get the ID of the automatically created intermediate model for the Author-Book m2m
author_book_auto_m2m_intermediate = Author.books.through.objects.get(author=author, book=book)
self.author_book_auto_m2m_intermediate_id = author_book_auto_m2m_intermediate.pk
holder = Holder2.objects.create(dummy=13)
inner2 = Inner2.objects.create(dummy=42, holder=holder)
self.holder_change_url = reverse('admin:admin_inlines_holder2_change', args=(holder.id,))
self.inner2_id = inner2.id
self.assertEqual(
self.client.login(username='admin', password='secret'),
True)
def test_inline_add_m2m_noperm(self):
response = self.client.get(reverse('admin:admin_inlines_author_add'))
# No change permission on books, so no inline
self.assertNotContains(response, '<h2>Author-book relationships</h2>')
self.assertNotContains(response, 'Add another Author\\u002DBook Relationship')
self.assertNotContains(response, 'id="id_Author_books-TOTAL_FORMS"')
def test_inline_add_fk_noperm(self):
response = self.client.get(reverse('admin:admin_inlines_holder2_add'))
# No permissions on Inner2s, so no inline
self.assertNotContains(response, '<h2>Inner2s</h2>')
self.assertNotContains(response, 'Add another Inner2')
self.assertNotContains(response, 'id="id_inner2_set-TOTAL_FORMS"')
def test_inline_change_m2m_noperm(self):
response = self.client.get(self.author_change_url)
# No change permission on books, so no inline
self.assertNotContains(response, '<h2>Author-book relationships</h2>')
self.assertNotContains(response, 'Add another Author\\u002DBook Relationship')
self.assertNotContains(response, 'id="id_Author_books-TOTAL_FORMS"')
def test_inline_change_fk_noperm(self):
response = self.client.get(self.holder_change_url)
# No permissions on Inner2s, so no inline
self.assertNotContains(response, '<h2>Inner2s</h2>')
self.assertNotContains(response, 'Add another Inner2')
self.assertNotContains(response, 'id="id_inner2_set-TOTAL_FORMS"')
def test_inline_add_m2m_add_perm(self):
permission = Permission.objects.get(codename='add_book', content_type=self.book_ct)
self.user.user_permissions.add(permission)
response = self.client.get(reverse('admin:admin_inlines_author_add'))
# No change permission on Books, so no inline
self.assertNotContains(response, '<h2>Author-book relationships</h2>')
self.assertNotContains(response, 'Add another Author\\u002DBook Relationship')
self.assertNotContains(response, 'id="id_Author_books-TOTAL_FORMS"')
def test_inline_add_fk_add_perm(self):
permission = Permission.objects.get(codename='add_inner2', content_type=self.inner_ct)
self.user.user_permissions.add(permission)
response = self.client.get(reverse('admin:admin_inlines_holder2_add'))
# Add permission on inner2s, so we get the inline
self.assertContains(response, '<h2>Inner2s</h2>')
self.assertContains(response, 'Add another Inner2')
self.assertContains(response, '<input type="hidden" id="id_inner2_set-TOTAL_FORMS" '
'value="3" name="inner2_set-TOTAL_FORMS" />', html=True)
def test_inline_change_m2m_add_perm(self):
permission = Permission.objects.get(codename='add_book', content_type=self.book_ct)
self.user.user_permissions.add(permission)
response = self.client.get(self.author_change_url)
# No change permission on books, so no inline
self.assertNotContains(response, '<h2>Author-book relationships</h2>')
self.assertNotContains(response, 'Add another Author\\u002DBook Relationship')
self.assertNotContains(response, 'id="id_Author_books-TOTAL_FORMS"')
self.assertNotContains(response, 'id="id_Author_books-0-DELETE"')
def test_inline_change_m2m_change_perm(self):
permission = Permission.objects.get(codename='change_book', content_type=self.book_ct)
self.user.user_permissions.add(permission)
response = self.client.get(self.author_change_url)
# We have change perm on books, so we can add/change/delete inlines
self.assertContains(response, '<h2>Author-book relationships</h2>')
self.assertContains(response, 'Add another Author\\u002Dbook relationship')
self.assertContains(response, '<input type="hidden" id="id_Author_books-TOTAL_FORMS" '
'value="4" name="Author_books-TOTAL_FORMS" />', html=True)
self.assertContains(response, '<input type="hidden" id="id_Author_books-0-id" '
'value="%i" name="Author_books-0-id" />' % self.author_book_auto_m2m_intermediate_id, html=True)
self.assertContains(response, 'id="id_Author_books-0-DELETE"')
def test_inline_change_fk_add_perm(self):
permission = Permission.objects.get(codename='add_inner2', content_type=self.inner_ct)
self.user.user_permissions.add(permission)
response = self.client.get(self.holder_change_url)
# Add permission on inner2s, so we can add but not modify existing
self.assertContains(response, '<h2>Inner2s</h2>')
self.assertContains(response, 'Add another Inner2')
# 3 extra forms only, not the existing instance form
self.assertContains(response, '<input type="hidden" id="id_inner2_set-TOTAL_FORMS" '
'value="3" name="inner2_set-TOTAL_FORMS" />', html=True)
self.assertNotContains(response, '<input type="hidden" id="id_inner2_set-0-id" '
'value="%i" name="inner2_set-0-id" />' % self.inner2_id, html=True)
def test_inline_change_fk_change_perm(self):
permission = Permission.objects.get(codename='change_inner2', content_type=self.inner_ct)
self.user.user_permissions.add(permission)
response = self.client.get(self.holder_change_url)
# Change permission on inner2s, so we can change existing but not add new
self.assertContains(response, '<h2>Inner2s</h2>')
# Just the one form for existing instances
self.assertContains(response, '<input type="hidden" id="id_inner2_set-TOTAL_FORMS" '
'value="1" name="inner2_set-TOTAL_FORMS" />', html=True)
self.assertContains(response, '<input type="hidden" id="id_inner2_set-0-id" '
'value="%i" name="inner2_set-0-id" />' % self.inner2_id, html=True)
# max-num 0 means we can't add new ones
self.assertContains(response, '<input type="hidden" id="id_inner2_set-MAX_NUM_FORMS" '
'value="0" name="inner2_set-MAX_NUM_FORMS" />', html=True)
def test_inline_change_fk_add_change_perm(self):
permission = Permission.objects.get(codename='add_inner2', content_type=self.inner_ct)
self.user.user_permissions.add(permission)
permission = Permission.objects.get(codename='change_inner2', content_type=self.inner_ct)
self.user.user_permissions.add(permission)
response = self.client.get(self.holder_change_url)
# Add/change perm, so we can add new and change existing
self.assertContains(response, '<h2>Inner2s</h2>')
# One form for existing instance and three extra for new
self.assertContains(response, '<input type="hidden" id="id_inner2_set-TOTAL_FORMS" '
'value="4" name="inner2_set-TOTAL_FORMS" />', html=True)
self.assertContains(response, '<input type="hidden" id="id_inner2_set-0-id" '
'value="%i" name="inner2_set-0-id" />' % self.inner2_id, html=True)
def test_inline_change_fk_change_del_perm(self):
permission = Permission.objects.get(codename='change_inner2', content_type=self.inner_ct)
self.user.user_permissions.add(permission)
permission = Permission.objects.get(codename='delete_inner2', content_type=self.inner_ct)
self.user.user_permissions.add(permission)
response = self.client.get(self.holder_change_url)
# Change/delete perm on inner2s, so we can change/delete existing
self.assertContains(response, '<h2>Inner2s</h2>')
# One form for existing instance only, no new
self.assertContains(response, '<input type="hidden" id="id_inner2_set-TOTAL_FORMS" '
'value="1" name="inner2_set-TOTAL_FORMS" />', html=True)
self.assertContains(response, '<input type="hidden" id="id_inner2_set-0-id" '
'value="%i" name="inner2_set-0-id" />' % self.inner2_id, html=True)
self.assertContains(response, 'id="id_inner2_set-0-DELETE"')
def test_inline_change_fk_all_perms(self):
permission = Permission.objects.get(codename='add_inner2', content_type=self.inner_ct)
self.user.user_permissions.add(permission)
permission = Permission.objects.get(codename='change_inner2', content_type=self.inner_ct)
self.user.user_permissions.add(permission)
permission = Permission.objects.get(codename='delete_inner2', content_type=self.inner_ct)
self.user.user_permissions.add(permission)
response = self.client.get(self.holder_change_url)
# All perms on inner2s, so we can add/change/delete
self.assertContains(response, '<h2>Inner2s</h2>')
# One form for existing instance only, three for new
self.assertContains(response, '<input type="hidden" id="id_inner2_set-TOTAL_FORMS" '
'value="4" name="inner2_set-TOTAL_FORMS" />', html=True)
self.assertContains(response, '<input type="hidden" id="id_inner2_set-0-id" '
'value="%i" name="inner2_set-0-id" />' % self.inner2_id, html=True)
self.assertContains(response, 'id="id_inner2_set-0-DELETE"')
@override_settings(PASSWORD_HASHERS=['django.contrib.auth.hashers.SHA1PasswordHasher'],
ROOT_URLCONF="admin_inlines.urls")
class SeleniumFirefoxTests(AdminSeleniumWebDriverTestCase):
available_apps = ['admin_inlines'] + AdminSeleniumWebDriverTestCase.available_apps
webdriver_class = 'selenium.webdriver.firefox.webdriver.WebDriver'
def setUp(self):
# password = "secret"
User.objects.create(
pk=100, username='super', first_name='Super', last_name='User', email='super@example.com',
password='sha1$995a3$6011485ea3834267d719b4c801409b8b1ddd0158', is_active=True, is_superuser=True,
is_staff=True, last_login=datetime.datetime(2007, 5, 30, 13, 20, 10),
date_joined=datetime.datetime(2007, 5, 30, 13, 20, 10)
)
def test_add_stackeds(self):
"""
Ensure that the "Add another XXX" link correctly adds items to the
stacked formset.
"""
self.admin_login(username='super', password='secret')
self.selenium.get('%s%s' % (self.live_server_url,
reverse('admin:admin_inlines_holder4_add')))
inline_id = '#inner4stacked_set-group'
rows_length = lambda: len(self.selenium.find_elements_by_css_selector(
'%s .dynamic-inner4stacked_set' % inline_id))
self.assertEqual(rows_length(), 3)
add_button = self.selenium.find_element_by_link_text(
'Add another Inner4 stacked')
add_button.click()
self.assertEqual(rows_length(), 4)
def test_delete_stackeds(self):
self.admin_login(username='super', password='secret')
self.selenium.get('%s%s' % (self.live_server_url,
reverse('admin:admin_inlines_holder4_add')))
inline_id = '#inner4stacked_set-group'
rows_length = lambda: len(self.selenium.find_elements_by_css_selector(
'%s .dynamic-inner4stacked_set' % inline_id))
self.assertEqual(rows_length(), 3)
add_button = self.selenium.find_element_by_link_text(
'Add another Inner4 stacked')
add_button.click()
add_button.click()
self.assertEqual(rows_length(), 5, msg="sanity check")
for delete_link in self.selenium.find_elements_by_css_selector(
'%s .inline-deletelink' % inline_id):
delete_link.click()
self.assertEqual(rows_length(), 3)
def test_add_inlines(self):
"""
Ensure that the "Add another XXX" link correctly adds items to the
inline form.
"""
self.admin_login(username='super', password='secret')
self.selenium.get('%s%s' % (self.live_server_url,
reverse('admin:admin_inlines_profilecollection_add')))
# Check that there's only one inline to start with and that it has the
# correct ID.
self.assertEqual(len(self.selenium.find_elements_by_css_selector(
'.dynamic-profile_set')), 1)
self.assertEqual(self.selenium.find_elements_by_css_selector(
'.dynamic-profile_set')[0].get_attribute('id'),
'profile_set-0')
self.assertEqual(len(self.selenium.find_elements_by_css_selector(
'.dynamic-profile_set#profile_set-0 input[name=profile_set-0-first_name]')), 1)
self.assertEqual(len(self.selenium.find_elements_by_css_selector(
'.dynamic-profile_set#profile_set-0 input[name=profile_set-0-last_name]')), 1)
# Add an inline
self.selenium.find_element_by_link_text('Add another Profile').click()
# Check that the inline has been added, that it has the right id, and
# that it contains the right fields.
self.assertEqual(len(self.selenium.find_elements_by_css_selector(
'.dynamic-profile_set')), 2)
self.assertEqual(self.selenium.find_elements_by_css_selector(
'.dynamic-profile_set')[1].get_attribute('id'), 'profile_set-1')
self.assertEqual(len(self.selenium.find_elements_by_css_selector(
'.dynamic-profile_set#profile_set-1 input[name=profile_set-1-first_name]')), 1)
self.assertEqual(len(self.selenium.find_elements_by_css_selector(
'.dynamic-profile_set#profile_set-1 input[name=profile_set-1-last_name]')), 1)
# Let's add another one to be sure
self.selenium.find_element_by_link_text('Add another Profile').click()
self.assertEqual(len(self.selenium.find_elements_by_css_selector(
'.dynamic-profile_set')), 3)
self.assertEqual(self.selenium.find_elements_by_css_selector(
'.dynamic-profile_set')[2].get_attribute('id'), 'profile_set-2')
self.assertEqual(len(self.selenium.find_elements_by_css_selector(
'.dynamic-profile_set#profile_set-2 input[name=profile_set-2-first_name]')), 1)
self.assertEqual(len(self.selenium.find_elements_by_css_selector(
'.dynamic-profile_set#profile_set-2 input[name=profile_set-2-last_name]')), 1)
# Enter some data and click 'Save'
self.selenium.find_element_by_name('profile_set-0-first_name').send_keys('0 first name 1')
self.selenium.find_element_by_name('profile_set-0-last_name').send_keys('0 last name 2')
self.selenium.find_element_by_name('profile_set-1-first_name').send_keys('1 first name 1')
self.selenium.find_element_by_name('profile_set-1-last_name').send_keys('1 last name 2')
self.selenium.find_element_by_name('profile_set-2-first_name').send_keys('2 first name 1')
self.selenium.find_element_by_name('profile_set-2-last_name').send_keys('2 last name 2')
self.selenium.find_element_by_xpath('//input[@value="Save"]').click()
self.wait_page_loaded()
# Check that the objects have been created in the database
self.assertEqual(ProfileCollection.objects.all().count(), 1)
self.assertEqual(Profile.objects.all().count(), 3)
def test_delete_inlines(self):
self.admin_login(username='super', password='secret')
self.selenium.get('%s%s' % (self.live_server_url,
reverse('admin:admin_inlines_profilecollection_add')))
# Add a few inlines
self.selenium.find_element_by_link_text('Add another Profile').click()
self.selenium.find_element_by_link_text('Add another Profile').click()
self.selenium.find_element_by_link_text('Add another Profile').click()
self.selenium.find_element_by_link_text('Add another Profile').click()
self.assertEqual(len(self.selenium.find_elements_by_css_selector(
'#profile_set-group table tr.dynamic-profile_set')), 5)
self.assertEqual(len(self.selenium.find_elements_by_css_selector(
'form#profilecollection_form tr.dynamic-profile_set#profile_set-0')), 1)
self.assertEqual(len(self.selenium.find_elements_by_css_selector(
'form#profilecollection_form tr.dynamic-profile_set#profile_set-1')), 1)
self.assertEqual(len(self.selenium.find_elements_by_css_selector(
'form#profilecollection_form tr.dynamic-profile_set#profile_set-2')), 1)
self.assertEqual(len(self.selenium.find_elements_by_css_selector(
'form#profilecollection_form tr.dynamic-profile_set#profile_set-3')), 1)
self.assertEqual(len(self.selenium.find_elements_by_css_selector(
'form#profilecollection_form tr.dynamic-profile_set#profile_set-4')), 1)
# Click on a few delete buttons
self.selenium.find_element_by_css_selector(
'form#profilecollection_form tr.dynamic-profile_set#profile_set-1 td.delete a').click()
self.selenium.find_element_by_css_selector(
'form#profilecollection_form tr.dynamic-profile_set#profile_set-2 td.delete a').click()
# Verify that they're gone and that the IDs have been re-sequenced
self.assertEqual(len(self.selenium.find_elements_by_css_selector(
'#profile_set-group table tr.dynamic-profile_set')), 3)
self.assertEqual(len(self.selenium.find_elements_by_css_selector(
'form#profilecollection_form tr.dynamic-profile_set#profile_set-0')), 1)
self.assertEqual(len(self.selenium.find_elements_by_css_selector(
'form#profilecollection_form tr.dynamic-profile_set#profile_set-1')), 1)
self.assertEqual(len(self.selenium.find_elements_by_css_selector(
'form#profilecollection_form tr.dynamic-profile_set#profile_set-2')), 1)
def test_alternating_rows(self):
self.admin_login(username='super', password='secret')
self.selenium.get('%s%s' % (self.live_server_url,
reverse('admin:admin_inlines_profilecollection_add')))
# Add a few inlines
self.selenium.find_element_by_link_text('Add another Profile').click()
self.selenium.find_element_by_link_text('Add another Profile').click()
row_selector = 'form#profilecollection_form tr.dynamic-profile_set'
self.assertEqual(len(self.selenium.find_elements_by_css_selector(
"%s.row1" % row_selector)), 2, msg="Expect two row1 styled rows")
self.assertEqual(len(self.selenium.find_elements_by_css_selector(
"%s.row2" % row_selector)), 1, msg="Expect one row2 styled row")
class SeleniumChromeTests(SeleniumFirefoxTests):
webdriver_class = 'selenium.webdriver.chrome.webdriver.WebDriver'
class SeleniumIETests(SeleniumFirefoxTests):
webdriver_class = 'selenium.webdriver.ie.webdriver.WebDriver'
| bsd-3-clause |
MrOrioleCashback/snippets | TPCh13.py | 1 | 2805 | import string, random
words = open("Alice's Adventures in Wonderland by Lewis Carroll.txt").read().split('\n')
xwords = open('words.txt').read().split('\n')
emma = open('Emma by Jane Austen.txt').read().split('\n')
def break_book_into_words(book):
#apostrophes are a problem, don't => dont
lower_book = ''
no_punct = {ord(c): None for c in string.punctuation + string.digits}
for line in book:
line = line.replace('-', ' ') #hyphenated words are a problem and are removed
lower_book += (line.translate(no_punct).lower().strip() + ' ')
return lower_book.split()
#print(sorted(break_book_into_words(words), key=len))
"""
dictonary.setdefault(key, value) does the work of:
if key in dictonary:
dictonary[key].append(value)
else:
dictonary[key] = [value]
in 1 line. Use .setdefault() whenever creating a dict
"""
def unique_word_count(word_list):
#count each use of each word in a list of words
dict1 = {}
for word in word_list:
dict1.setdefault(word, 0)
dict1[word] += 1
return sorted([(value, key) for key, value in dict1.items()],reverse=True)
#print(unique_word_count(break_book_into_words(emma)))
def make_list_a_dict(word_list):
#make a list into a hashable dict
dict1 = {}
for word in word_list:
dict1.setdefault(word, 0)
return dict1
#print(make_list_a_dict(xwords))
def cross_check_words(word_list, check_list):
#remove words in word_list found in check_list and return result
dict1 = {}
for word in word_list:
if word not in check_list:
dict1.setdefault(word, 0)
dict1[word] += 1
return sorted([(value, key) for key, value in dict1.items()],reverse=True)
"""
nested loops with lists is slow
"""
#print(cross_check_words(break_book_into_words(words), xwords)) #[Finished in 27.7s]
"""
it's worth making a list into a dict when nesting loops.
"""
#print(cross_check_words(break_book_into_words(words), make_list_a_dict(xwords))) #[Finished in 0.1s]
#print(len(break_book_into_words(emma))) #total word count
#print(len(unique_word_count(break_book_into_words(emma)))) #total different words
def top_10_words(book, num=10):
hist = unique_word_count(break_book_into_words(book))
print('the top %s most common words are:' % num)
for freq, word in hist[:num]:
print(word, freq, sep='\t'*2)
#top_10_words(emma)
"""
#sep='\t'*2 tells python to use two tab seperators rather than a space like so:
the top 10 most common words are:
to 5242
the 5204
and 4897
of 4293
i 3191
a 3130
it 2529
her 2483
was 2400
she 2364
The optional argument 'num=10' defaults to 10 but can be changed when calling the function
optional arguments always follow the required ones.
"""
| mit |
alrifqi/django | django/utils/baseconv.py | 650 | 2982 | # Copyright (c) 2010 Guilherme Gondim. All rights reserved.
# Copyright (c) 2009 Simon Willison. All rights reserved.
# Copyright (c) 2002 Drew Perttula. All rights reserved.
#
# License:
# Python Software Foundation License version 2
#
# See the file "LICENSE" for terms & conditions for usage, and a DISCLAIMER OF
# ALL WARRANTIES.
#
# This Baseconv distribution contains no GNU General Public Licensed (GPLed)
# code so it may be used in proprietary projects just like prior ``baseconv``
# distributions.
#
# All trademarks referenced herein are property of their respective holders.
#
"""
Convert numbers from base 10 integers to base X strings and back again.
Sample usage::
>>> base20 = BaseConverter('0123456789abcdefghij')
>>> base20.encode(1234)
'31e'
>>> base20.decode('31e')
1234
>>> base20.encode(-1234)
'-31e'
>>> base20.decode('-31e')
-1234
>>> base11 = BaseConverter('0123456789-', sign='$')
>>> base11.encode('$1234')
'$-22'
>>> base11.decode('$-22')
'$1234'
"""
BASE2_ALPHABET = '01'
BASE16_ALPHABET = '0123456789ABCDEF'
BASE56_ALPHABET = '23456789ABCDEFGHJKLMNPQRSTUVWXYZabcdefghijkmnpqrstuvwxyz'
BASE36_ALPHABET = '0123456789abcdefghijklmnopqrstuvwxyz'
BASE62_ALPHABET = '0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz'
BASE64_ALPHABET = BASE62_ALPHABET + '-_'
class BaseConverter(object):
decimal_digits = '0123456789'
def __init__(self, digits, sign='-'):
self.sign = sign
self.digits = digits
if sign in self.digits:
raise ValueError('Sign character found in converter base digits.')
def __repr__(self):
return "<BaseConverter: base%s (%s)>" % (len(self.digits), self.digits)
def encode(self, i):
neg, value = self.convert(i, self.decimal_digits, self.digits, '-')
if neg:
return self.sign + value
return value
def decode(self, s):
neg, value = self.convert(s, self.digits, self.decimal_digits, self.sign)
if neg:
value = '-' + value
return int(value)
def convert(self, number, from_digits, to_digits, sign):
if str(number)[0] == sign:
number = str(number)[1:]
neg = 1
else:
neg = 0
# make an integer out of the number
x = 0
for digit in str(number):
x = x * len(from_digits) + from_digits.index(digit)
# create the result in base 'len(to_digits)'
if x == 0:
res = to_digits[0]
else:
res = ''
while x > 0:
digit = x % len(to_digits)
res = to_digits[digit] + res
x = int(x // len(to_digits))
return neg, res
base2 = BaseConverter(BASE2_ALPHABET)
base16 = BaseConverter(BASE16_ALPHABET)
base36 = BaseConverter(BASE36_ALPHABET)
base56 = BaseConverter(BASE56_ALPHABET)
base62 = BaseConverter(BASE62_ALPHABET)
base64 = BaseConverter(BASE64_ALPHABET, sign='$')
| bsd-3-clause |
pokelondon/pokeradio | web/pokeradio/history/migrations/0002_auto__add_field_artist_spotify_artist_href.py | 1 | 5505 | # -*- coding: utf-8 -*-
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding field 'Artist.spotify_artist_href'
db.add_column(u'history_artist', 'spotify_artist_href',
self.gf('django.db.models.fields.CharField')(default=None, max_length=255),
keep_default=False)
def backwards(self, orm):
# Deleting field 'Artist.spotify_artist_href'
db.delete_column(u'history_artist', 'spotify_artist_href')
models = {
u'auth.group': {
'Meta': {'object_name': 'Group'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
u'auth.permission': {
'Meta': {'ordering': "(u'content_type__app_label', u'content_type__model', u'codename')", 'unique_together': "((u'content_type', u'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['contenttypes.ContentType']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
u'auth.user': {
'Meta': {'object_name': 'User'},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['auth.Group']", 'symmetrical': 'False', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'})
},
u'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
u'history.archivetrack': {
'Meta': {'object_name': 'ArchiveTrack'},
'artist': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['history.Artist']"}),
'created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'length': ('django.db.models.fields.FloatField', [], {'null': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'spotify_album_href': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'spotify_href': ('django.db.models.fields.CharField', [], {'max_length': '255'})
},
u'history.artist': {
'Meta': {'object_name': 'Artist'},
'created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'spotify_artist_href': ('django.db.models.fields.CharField', [], {'max_length': '255'})
},
u'history.play': {
'Meta': {'object_name': 'Play'},
'created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'track': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['history.ArchiveTrack']"}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['auth.User']"})
}
}
complete_apps = ['history'] | gpl-3.0 |
J861449197/edx-platform | lms/djangoapps/courseware/features/problems.py | 98 | 6831 | '''
Steps for problem.feature lettuce tests
'''
# pylint: disable=missing-docstring
# pylint: disable=redefined-outer-name
from lettuce import world, step
from common import i_am_registered_for_the_course, visit_scenario_item
from problems_setup import PROBLEM_DICT, answer_problem, problem_has_answer, add_problem_to_course
def _view_problem(step, problem_type, problem_settings=None):
i_am_registered_for_the_course(step, 'model_course')
# Ensure that the course has this problem type
add_problem_to_course(world.scenario_dict['COURSE'].number, problem_type, problem_settings)
# Go to the one section in the factory-created course
# which should be loaded with the correct problem
visit_scenario_item('SECTION')
@step(u'I am viewing a "([^"]*)" problem with "([^"]*)" attempt')
def view_problem_with_attempts(step, problem_type, attempts):
_view_problem(step, problem_type, {'max_attempts': attempts})
@step(u'I am viewing a randomization "([^"]*)" "([^"]*)" problem with "([^"]*)" attempts with reset')
def view_problem_attempts_reset(step, randomization, problem_type, attempts, ):
_view_problem(step, problem_type, {'max_attempts': attempts,
'rerandomize': randomization,
'show_reset_button': True})
@step(u'I am viewing a "([^"]*)" that shows the answer "([^"]*)"')
def view_problem_with_show_answer(step, problem_type, answer):
_view_problem(step, problem_type, {'showanswer': answer})
@step(u'I am viewing a "([^"]*)" problem')
def view_problem(step, problem_type):
_view_problem(step, problem_type)
@step(u'I am viewing a randomization "([^"]*)" "([^"]*)" problem with reset button on')
def view_random_reset_problem(step, randomization, problem_type):
_view_problem(step, problem_type, {'rerandomize': randomization, 'show_reset_button': True})
@step(u'External graders respond "([^"]*)"')
def set_external_grader_response(step, correctness):
assert(correctness in ['correct', 'incorrect'])
response_dict = {
'correct': True if correctness == 'correct' else False,
'score': 1 if correctness == 'correct' else 0,
'msg': 'Your problem was graded {0}'.format(correctness)
}
# Set the fake xqueue server to always respond
# correct/incorrect when asked to grade a problem
world.xqueue.config['default'] = response_dict
@step(u'I answer a "([^"]*)" problem "([^"]*)ly"')
def answer_problem_step(step, problem_type, correctness):
""" Mark a given problem type correct or incorrect, then submit it.
*problem_type* is a string representing the type of problem (e.g. 'drop down')
*correctness* is in ['correct', 'incorrect']
"""
# Change the answer on the page
input_problem_answer(step, problem_type, correctness)
# Submit the problem
check_problem(step)
@step(u'I input an answer on a "([^"]*)" problem "([^"]*)ly"')
def input_problem_answer(_, problem_type, correctness):
"""
Have the browser input an answer (either correct or incorrect)
"""
assert correctness in ['correct', 'incorrect']
assert problem_type in PROBLEM_DICT
answer_problem(world.scenario_dict['COURSE'].number, problem_type, correctness)
@step(u'I check a problem')
def check_problem(step):
# first scroll down so the loading mathjax button does not
# cover up the Check button
world.browser.execute_script("window.scrollTo(0,1024)")
world.css_click("button.check")
# Wait for the problem to finish re-rendering
world.wait_for_ajax_complete()
@step(u'The "([^"]*)" problem displays a "([^"]*)" answer')
def assert_problem_has_answer(step, problem_type, answer_class):
'''
Assert that the problem is displaying a particular answer.
These correspond to the same correct/incorrect
answers we set in answer_problem()
We can also check that a problem has been left blank
by setting answer_class='blank'
'''
assert answer_class in ['correct', 'incorrect', 'blank']
assert problem_type in PROBLEM_DICT
problem_has_answer(world.scenario_dict['COURSE'].number, problem_type, answer_class)
@step(u'I reset the problem')
def reset_problem(_step):
world.css_click('button.reset')
# Wait for the problem to finish re-rendering
world.wait_for_ajax_complete()
@step(u'I press the button with the label "([^"]*)"$')
def press_the_button_with_label(_step, buttonname):
button_css = 'button span.show-label'
elem = world.css_find(button_css).first
world.css_has_text(button_css, elem)
world.css_click(button_css)
@step(u'The "([^"]*)" button does( not)? appear')
def action_button_present(_step, buttonname, doesnt_appear):
button_css = 'div.action button[data-value*="%s"]' % buttonname
if bool(doesnt_appear):
assert world.is_css_not_present(button_css)
else:
assert world.is_css_present(button_css)
@step(u'the Show/Hide button label is "([^"]*)"$')
def show_hide_label_is(_step, label_name):
# The label text is changed by static/xmodule_js/src/capa/display.js
# so give it some time to change on the page.
label_css = 'button.show span.show-label'
world.wait_for(lambda _: world.css_has_text(label_css, label_name))
@step(u'I should see a score of "([^"]*)"$')
def see_score(_step, score):
# The problem progress is changed by
# cms/static/xmodule_js/src/capa/display.js
# so give it some time to render on the page.
score_css = 'div.problem-progress'
expected_text = '({})'.format(score)
world.wait_for(lambda _: world.css_has_text(score_css, expected_text))
@step(u'[Mm]y "([^"]*)" answer is( NOT)? marked "([^"]*)"')
def assert_answer_mark(_step, problem_type, isnt_marked, correctness):
"""
Assert that the expected answer mark is visible
for a given problem type.
*problem_type* is a string identifying the type of problem (e.g. 'drop down')
*correctness* is in ['correct', 'incorrect', 'unanswered']
"""
# Determine which selector(s) to look for based on correctness
assert correctness in ['correct', 'incorrect', 'unanswered']
assert problem_type in PROBLEM_DICT
# At least one of the correct selectors should be present
for sel in PROBLEM_DICT[problem_type][correctness]:
if bool(isnt_marked):
world.wait_for(lambda _: world.is_css_not_present(sel)) # pylint: disable=cell-var-from-loop
has_expected = world.is_css_not_present(sel)
else:
world.css_find(sel) # css_find includes a wait_for pattern
has_expected = world.is_css_present(sel)
# As soon as we find the selector, break out of the loop
if has_expected:
break
# Expect that we found the expected selector
assert has_expected
| agpl-3.0 |
NMGRL/pychron | pychron/core/ui/display_editor.py | 2 | 1224 | # ===============================================================================
# Copyright 2013 Jake Ross
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ===============================================================================
# ============= enthought library imports =======================
# ============= standard library imports ========================
# ============= local library imports ==========================
from __future__ import absolute_import
from pychron.core.ui.factory import toolkit_factory
DisplayEditor = toolkit_factory('display_editor', 'DisplayEditor')
LoggerEditor = toolkit_factory('display_editor', 'LoggerEditor')
# ============= EOF =============================================
| apache-2.0 |
sydhenry/cookiecutter-django | cookiecutter-django/users/tests/test_admin.py | 264 | 1308 | from test_plus.test import TestCase
from ..admin import MyUserCreationForm
class TestMyUserCreationForm(TestCase):
def setUp(self):
self.user = self.make_user()
def test_clean_username_success(self):
# Instantiate the form with a new username
form = MyUserCreationForm({
'username': 'alamode',
'password1': '123456',
'password2': '123456',
})
# Run is_valid() to trigger the validation
valid = form.is_valid()
self.assertTrue(valid)
# Run the actual clean_username method
username = form.clean_username()
self.assertEqual('alamode', username)
def test_clean_username_false(self):
# Instantiate the form with the same username as self.user
form = MyUserCreationForm({
'username': self.user.username,
'password1': '123456',
'password2': '123456',
})
# Run is_valid() to trigger the validation, which is going to fail
# because the username is already taken
valid = form.is_valid()
self.assertFalse(valid)
# The form.errors dict should contain a single error called 'username'
self.assertTrue(len(form.errors) == 1)
self.assertTrue('username' in form.errors)
| bsd-3-clause |
sknepneklab/SAMoS | configurations/MakeConfigurations/regular_rods.py | 1 | 4354 | # * *************************************************************
# *
# * Soft Active Mater on Surfaces (SAMoS)
# *
# * Author: Rastko Sknepnek
# *
# * Division of Physics
# * School of Engineering, Physics and Mathematics
# * University of Dundee
# *
# * (c) 2013, 2014
# *
# * School of Science and Engineering
# * School of Life Sciences
# * University of Dundee
# *
# * (c) 2015
# *
# * Author: Silke Henkes
# *
# * Department of Physics
# * Institute for Complex Systems and Mathematical Biology
# * University of Aberdeen
# *
# * (c) 2014, 2015
# *
# * This program cannot be used, copied, or modified without
# * explicit written permission of the authors.
# *
# * ***************************************************************
# Utility code for building regular rods initial configuration on xy plane
from datetime import *
from random import uniform, randint
from math import *
import argparse
from particle import *
class Plane:
def __init__(self, Lx, Ly, N, lx, ly, sigma, l):
self.L = (Lx,Ly)
self.N = N
self.lx = lx
self.ly = ly
self.sigma = sigma
self.l = l
self.__generate()
def __generate(self):
self.particles = []
i = 0
n = 0
add_n = True
while add_n:
x = -0.5*self.L[0] + (n+0.5)*self.lx
if x > 0.5*(self.L[0]-self.lx)+1e-3:
add_n = False
else:
m = 0
add_m = True
while add_m:
y = -0.5*self.L[1] + (m+0.5)*self.ly
if y > 0.5*(self.L[1]-self.ly)+1e-3:
add_m = False
else:
self.particles.append(Particle(i))
self.particles[i].r = [x,y,0.0]
self.particles[i].n = [2.0*(randint(0,1)-0.5),0.0,0.0]
self.particles[i].v = [0.0,0.0,0.0]
self.particles[i].R = 0.5*self.sigma
self.particles[i].l = self.l
self.particles[i].omega = 0.0
i += 1
m += 1
n += 1
def write(self,outfile):
gentime = datetime.now()
out = open(outfile,'w')
out.write('# Total of %d particles\n' % len(self.particles))
out.write('# Generated on : %s\n' % str(gentime))
out.write('# id type radius x y z vx vy vz nx ny nz omega l\n')
for p in self.particles:
x, y, z = p.r
vx, vy, vz = p.v
nx, ny, nz = p.n
out.write('%d %d %f %f %f %f %f %f %f %f %f %f %f %f\n' % (p.idx,p.tp,p.R,x,y,z,vx,vy,vz,nx,ny,nz,p.omega,p.l))
out.close()
parser = argparse.ArgumentParser()
parser.add_argument("-x", "--lx", type=float, default=10.0, help="box length in x direction")
parser.add_argument("-y", "--ly", type=float, default=10.0, help="box length in y direction")
parser.add_argument("-f", "--phi", type=float, default=0.5, help="packing fraction")
parser.add_argument("-o", "--output", type=str, default='out.dat', help="output file")
parser.add_argument("-v", "--vavr", type=float, default=1.0, help="average velocity")
parser.add_argument("-a", "--radius", type=float, default=0.5, help="rod radius")
parser.add_argument("-l", "--length", type=float, default=2.0, help="rod length")
args = parser.parse_args()
area = args.lx*args.ly
sigma = 2.0*args.radius
Arod = sigma*(args.length+0.25*pi*sigma)
N = int(round(area*args.phi/Arod))
p = (args.length+sigma)/sigma
lx = sqrt(Arod*p/args.phi)
ly = sqrt(Arod/(p*args.phi))
print p, lx, ly
print
print "\tActive Particles on Curved Spaces (APCS)"
print "\tBuilding of a random flat configuration (xy plane)"
print
print "\tRastko Sknepnek"
print "\tUniversity of Dundee"
print "\t(c) 2013, 2014, 2015"
print "\t----------------------------------------------"
print
print "\tLx : ", args.lx
print "\tLy : ", args.ly
print "\tPacking fraction : ", args.phi
print "\tNumber of particles : ", N
print "\tOutput file : ", args.output
print "\tRod radius : ", args.radius
print "\tRod length : ", args.length
print
start = datetime.now()
random_orinet = True
#if args.l1 != 2.0 or args.l2 != 1.0:
#random_orinet = True
p = Plane(args.lx, args.ly, N, lx, ly, sigma, args.length)
p.write(args.output)
print "Actual packing fraction for this box : ", len(p.particles)*Arod/area
end = datetime.now()
total = end - start
print
print " *** Completed in ", total.total_seconds(), " seconds *** "
print
| gpl-3.0 |
sathnaga/virt-test | virttest/libvirt_xml/network_xml.py | 2 | 14137 | """
Module simplifying manipulation of XML described at
http://libvirt.org/formatnetwork.html
"""
import logging
from virttest import virsh, xml_utils
from virttest.libvirt_xml import base, xcepts, accessors
class RangeList(list):
"""
A list of start & end address tuples
"""
def __init__(self, iterable=None):
"""
Initialize from list/tuple of two-item tuple start/end address strings
"""
x_str = "iterable must contain two-item tuples of start/end addresses"
newone = []
for item in iterable:
if not issubclass(type(item), tuple):
raise xcepts.LibvirtXMLError(x_str)
if len(item) is not 2:
raise xcepts.LibvirtXMLError(x_str)
# Assume strings will be validated elsewhere
newone.append(tuple(item))
super(RangeList, self).__init__(newone)
def append_to_element(self, element):
"""
Adds range described by instance to ElementTree.element
"""
if not issubclass(type(element), xml_utils.ElementTree.Element):
raise ValueError("Element is not a ElementTree.Element or subclass")
for start, end in self:
serange = {'start':start, 'end':end}
element.append(xml_utils.ElementTree.Element('range', serange))
class IPXML(base.LibvirtXMLBase):
"""
IP address block, optionally containing DHCP range information
Properties:
dhcp_ranges: RangeList instances (list-like)
address: string IP address
netmask: string IP's netmask
"""
__slots__ = base.LibvirtXMLBase.__slots__ + ('dhcp_ranges', 'address',
'netmask')
def __init__(self, address='192.168.122.1', netmask='255.255.255.0',
virsh_instance=base.virsh):
"""
Create new IPXML instance based on address/mask
"""
accessors.XMLAttribute('address', self, parent_xpath='/', tag_name='ip',
attribute='address')
accessors.XMLAttribute('netmask', self, parent_xpath='/', tag_name='ip',
attribute='netmask')
super(IPXML, self).__init__(virsh_instance=virsh_instance)
self.xml = u"<ip address='%s' netmask='%s'></ip>" % (address, netmask)
def get_dhcp_ranges(self):
"""
Returns all XML described DHCP ranges as a RangeList object
"""
xmltreefile = self.dict_get('xml')
newlist = []
for element in xmltreefile.findall('/ip/dhcp/range'):
start = element.get('start') # attribute of range tag
end = element.get('end')
newlist.append((start, end, ))
return RangeList(newlist)
def set_dhcp_ranges(self, value):
"""
Sets XML described DHCP ranges from a RangeList object
"""
if not issubclass(type(value), RangeList) or value is not None:
raise xcepts.LibvirtXMLError("Value is not a RangeList or subclassa"
" instance.")
# Always start from clean-slate
self.del_dhcp_ranges()
if value is None:
return # ip element has no dhcp block
xmltreefile = self.dict_get('xml')
dhcp = xml_utils.ElementTree.Element('dhcp')
ip_elem = xmltreefile.find('/ip')
if ip_elem is None:
raise xcepts.LibvirtXMLError("Network contains no IP element")
ip_elem.append(dhcp)
value.append_to_element(dhcp)
xmltreefile.write()
def del_dhcp_ranges(self):
"""
Removes all DHCP ranges from XML
"""
xmltreefile = self.dict_get('xml')
element = xmltreefile.find('/dhcp')
if element is not None:
xmltreefile.remove(element)
class NetworkXMLBase(base.LibvirtXMLBase):
"""
Accessor methods for NetworkXML class.
Properties:
name: string, operates on XML name tag
uuid: string, operates on uuid tag
fwd_mode: string, operates on mode attribute of forward tag
mac: string, operates on address attribute of mac tag
ip: string operate on ip/dhcp ranges as IPXML instances
bridge: dict, operates on bridge attributes
defined: virtual boolean, callout to virsh methods
get: True if libvirt knows network name
set: True defines network, False undefines to libvirt
del: Undefines network to libvirt
active: virtual boolean, callout to virsh methods
get: True if network is active to libvirt
set: True activates network, False deactivates to libvirt
del: Deactivates network to libvirt
autostart: virtual boolean, callout to virsh methods
get: True if libvirt autostarts network with same name
set: True to set autostart, False to unset to libvirt
del: Unset autostart to libvirt
persistent: virtual boolean, callout to virsh methods
get: True if network was defined, False if only created.
set: Same as defined property
del: Same as defined property
"""
__slots__ = base.LibvirtXMLBase.__slots__ + ('name', 'uuid', 'bridge',
'defined', 'active',
'autostart', 'persistent',
'fwd_mode', 'mac', 'ip')
__uncompareable__ = base.LibvirtXMLBase.__uncompareable__ + (
'defined', 'active',
'autostart', 'persistent')
__schema_name__ = "network"
def __init__(self, virsh_instance=base.virsh):
accessors.XMLElementText('name', self, parent_xpath='/',
tag_name='name')
accessors.XMLElementText('uuid', self, parent_xpath='/',
tag_name='uuid')
accessors.XMLAttribute('fwd_mode', self, parent_xpath='/',
tag_name='forward', attribute='mode')
accessors.XMLAttribute('mac', self, parent_xpath='/',
tag_name='mac', attribute='address');
accessors.XMLElementDict('bridge', self, parent_xpath='/',
tag_name='bridge')
super(NetworkXMLBase, self).__init__(virsh_instance=virsh_instance)
def __check_undefined__(self, errmsg):
if not self.defined:
raise xcepts.LibvirtXMLError(errmsg)
def get_defined(self):
"""
Accessor for 'define' property - does this name exist in network list
"""
return self.name in self.virsh.net_state_dict(only_names=True).keys()
def set_defined(self, value):
"""Accessor method for 'define' property, set True to define."""
if not self.super_get('INITIALIZED'):
pass # do nothing
value = bool(value)
if value:
self.virsh.net_define(self.xml) # send it the filename
else:
del self.defined
def del_defined(self):
"""Accessor method for 'define' property, undefines network"""
self.__check_undefined__("Cannot undefine non-existant network")
self.virsh.net_undefine(self.name)
def get_active(self):
"""Accessor method for 'active' property (True/False)"""
self.__check_undefined__("Cannot determine activation for undefined "
"network")
state_dict = self.virsh.net_state_dict()
return state_dict[self.name]['active']
def set_active(self, value):
"""Accessor method for 'active' property, sets network active"""
if not self.super_get('INITIALIZED'):
pass # do nothing
self.__check_undefined__("Cannot activate undefined network")
value = bool(value)
if value:
if not self.active:
self.virsh.net_start(self.name)
else:
pass # don't activate twice
else:
if self.active:
del self.active
else:
pass # don't deactivate twice
def del_active(self):
"""Accessor method for 'active' property, stops network"""
self.__check_undefined__("Cannot deactivate undefined network")
if self.active:
self.virsh.net_destroy(self.name)
else:
pass # don't destroy twice
def get_autostart(self):
"""Accessor method for 'autostart' property, True if set"""
self.__check_undefined__("Cannot determine autostart for undefined "
"network")
state_dict = self.virsh.net_state_dict()
return state_dict[self.name]['autostart']
def set_autostart(self, value):
"""Accessor method for 'autostart' property, sets/unsets autostart"""
if not self.super_get('INITIALIZED'):
pass # do nothing
self.__check_undefined__("Cannot set autostart for undefined network")
value = bool(value)
if value:
if not self.autostart:
self.virsh.net_autostart(self.name)
else:
pass # don't set autostart twice
else:
if self.autostart:
del self.autostart
else:
pass # don't unset autostart twice
def del_autostart(self):
"""Accessor method for 'autostart' property, unsets autostart"""
if not self.defined:
raise xcepts.LibvirtXMLError("Can't autostart nonexistant network")
self.virsh.net_autostart(self.name, "--disable")
def get_persistent(self):
"""Accessor method for 'persistent' property"""
state_dict = self.virsh.net_state_dict()
return state_dict[self.name]['persistent']
# Copy behavior for consistency
set_persistent = set_defined
del_persistent = del_defined
def get_ip(self):
xmltreefile = self.dict_get('xml')
try:
ip_root = xmltreefile.reroot('/ip')
except KeyError, detail:
raise xcepts.LibvirtXMLError(detail)
ipxml = IPXML(virsh_instance = self.dict_get('virsh'))
ipxml.xmltreefile = ip_root
return ipxml
def set_ip(self, value):
if not issubclass(type(value), IPXML):
raise xcepts.LibvirtXMLError("value must be a IPXML or subclass")
xmltreefile = self.dict_get('xml')
# nuke any existing IP block
self.del_ip()
# IPXML root element is whole IP element tree
root = xmltreefile.getroot()
root.append(value.xmltreefile.getroot())
xmltreefile.write()
def del_ip(self):
xmltreefile = self.dict_get('xml')
element = xmltreefile.find('/ip')
if element is not None:
xmltreefile.remove(element)
xmltreefile.write()
class NetworkXML(NetworkXMLBase):
"""
Manipulators of a Virtual Network through it's XML definition.
"""
__slots__ = NetworkXMLBase.__slots__
def __init__(self, network_name='default', virsh_instance=base.virsh):
"""
Initialize new instance with empty XML
"""
super(NetworkXML, self).__init__(virsh_instance=virsh_instance)
self.xml = u"<network><name>%s</name></network>" % network_name
@staticmethod # wraps __new__
def new_all_networks_dict(virsh_instance=base.virsh):
"""
Return a dictionary of names to NetworkXML instances for all networks
@param: virsh: virsh module or instance to use
@return: Dictionary of network name to NetworkXML instance
"""
result = {}
# Values should all share virsh property
new_netxml = NetworkXML(virsh_instance=virsh_instance)
networks = new_netxml.virsh.net_state_dict(only_names=True).keys()
for net_name in networks:
new_copy = new_netxml.copy()
new_copy.xml = virsh.net_dumpxml(net_name).stdout.strip()
result[net_name] = new_copy
return result
@staticmethod
def new_from_net_dumpxml(network_name, virsh_instance=base.virsh):
"""
Return new NetworkXML instance from virsh net-dumpxml command
@param: network_name: Name of network to net-dumpxml
@param: virsh_instance: virsh module or instance to use
@return: New initialized NetworkXML instance
"""
netxml = NetworkXML(virsh_instance=virsh_instance)
netxml['xml'] = virsh_instance.net_dumpxml(network_name).stdout.strip()
return netxml
@staticmethod
def get_uuid_by_name(network_name, virsh_instance=base.virsh):
"""
Return Network's uuid by Network's name.
@param: network_name: Network's name
@return: Network's uuid
"""
network_xml = NetworkXML.new_from_net_dumpxml(network_name,
virsh_instance)
return network_xml.uuid
def debug_xml(self):
"""
Dump contents of XML file for debugging
"""
xml = str(self) # LibvirtXMLBase.__str__ returns XML content
for debug_line in str(xml).splitlines():
logging.debug("Network XML: %s", debug_line)
def create(self):
"""
Adds non-persistant / transient network to libvirt with net-create
"""
self.virsh.net_create(self.xml)
def orbital_nuclear_strike(self):
"""It's the only way to really be sure. Remove all libvirt state"""
try:
self['active'] = False # deactivate (stop) network if active
except xcepts.LibvirtXMLError, detail:
# inconsequential, network will be removed
logging.warning(detail)
try:
self['defined'] = False # undefine (delete) network if persistent
except xcepts.LibvirtXMLError, detail:
# network already gone
logging.warning(detail)
| gpl-2.0 |
laurent-george/weboob | modules/lcl/enterprise/browser.py | 1 | 4998 | # -*- coding: utf-8 -*-
# Copyright(C) 2010-2013 Romain Bignon, Pierre Mazière, Noé Rubinstein
#
# This file is part of weboob.
#
# weboob is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# weboob is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with weboob. If not, see <http://www.gnu.org/licenses/>.
from urllib import urlencode
from weboob.deprecated.browser import Browser, BrowserIncorrectPassword
from .pages import HomePage, MessagesPage, LogoutPage, LogoutOkPage, \
AlreadyConnectedPage, ExpiredPage, MovementsPage, RootPage
__all__ = ['LCLEnterpriseBrowser']
class LCLEnterpriseBrowser(Browser):
BASEURL = 'https://entreprises.secure.lcl.fr'
CERTHASH = ['04e3509c20ac8bdbdb3d0ed37bc34db2dde5ed4bc4c30a3605f63403413099a9',
'5fcf4a9ceeec25e406a04dffe0c6eacbdf72d11d394cd049701bfbaba3d853d9',
'774ac6f1c419083541a27d95672a87a5edf5c82d948368008eab2764e65866f9',
'3db256edfeb7ba255625724b7e62d4dab229557226336ba87b9753006721f16f']
ENCODING = 'utf-8'
USER_AGENT = Browser.USER_AGENTS['wget']
def __init__(self, *args, **kwargs):
BASEURL = self.BASEURL.rstrip('/')
self.PROTOCOL, self.DOMAIN = BASEURL.split('://', 2)
self.PAGES_REV = {
LogoutPage: BASEURL + '/outil/IQEN/Authentication/logout',
LogoutOkPage: BASEURL + '/outil/IQEN/Authentication/logoutOk',
HomePage: BASEURL + '/indexcle.html',
MessagesPage: BASEURL + '/outil/IQEN/Bureau/mesMessages',
MovementsPage: BASEURL + '/outil/IQMT/mvt.Synthese/syntheseMouvementPerso',
}
self.PAGES = {
self.PAGES_REV[HomePage]: HomePage,
self.PAGES_REV[LogoutPage]: LogoutPage,
self.PAGES_REV[LogoutOkPage]: LogoutOkPage,
self.PAGES_REV[MessagesPage]: MessagesPage,
self.PAGES_REV[MovementsPage]: MovementsPage,
BASEURL + '/outil/IQMT/mvt.Synthese/paginerReleve': MovementsPage,
BASEURL + '/': RootPage,
BASEURL + '/outil/IQEN/Authentication/dejaConnecte': AlreadyConnectedPage,
BASEURL + '/outil/IQEN/Authentication/sessionExpiree': ExpiredPage,
}
Browser.__init__(self, *args, **kwargs)
self._logged = False
def deinit(self):
super(LCLEnterpriseBrowser, self).deinit()
if self._logged:
self.logout()
def is_logged(self):
if self.page:
ID_XPATH = '//div[@id="headerIdentite"]'
self._logged = bool(self.page.document.xpath(ID_XPATH))
return self._logged
return False
def login(self):
assert isinstance(self.username, basestring)
assert isinstance(self.password, basestring)
if not self.is_on_page(HomePage):
self.location('/indexcle.html', no_login=True)
self.page.login(self.username, self.password)
if self.is_on_page(AlreadyConnectedPage):
raise BrowserIncorrectPassword("Another session is already open. Please try again later.")
if not self.is_logged():
raise BrowserIncorrectPassword(
"Invalid login/password.\n"
"If you did not change anything, be sure to check for password renewal request\n"
"on the original website.\n"
"Automatic renewal will be implemented later.")
def logout(self):
self.location(self.PAGES_REV[LogoutPage], no_login=True)
self.location(self.PAGES_REV[LogoutOkPage], no_login=True)
assert self.is_on_page(LogoutOkPage)
def get_accounts_list(self):
return [self.get_account()]
def get_account(self, id=None):
if not self.is_on_page(MovementsPage):
self.location(self.PAGES_REV[MovementsPage])
return self.page.get_account()
def get_history(self, account):
if not self.is_on_page(MovementsPage):
self.location(self.PAGES_REV[MovementsPage])
for n in range(1, self.page.nb_pages()):
self.location('/outil/IQMT/mvt.Synthese/paginerReleve',
urlencode({'numPage': str(n)}),
no_login=True)
for tr in self.page.get_operations():
yield tr
def get_cb_operations(self, account):
raise NotImplementedError()
class LCLEspaceProBrowser(LCLEnterpriseBrowser):
BASEURL = 'https://espacepro.secure.lcl.fr'
CERTHASH = '5f08e784703b7f0eeac03cacec12b49ffdc612eef60a9f7145eea6b4239f5bb6'
| agpl-3.0 |
jonparrott/gcloud-python | bigtable/docs/conf.py | 3 | 10244 | # -*- coding: utf-8 -*-
#
# google-cloud-bigtable documentation build configuration file
#
# This file is execfile()d with the current directory set to its
# containing dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
import sys
import os
import shlex
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
sys.path.insert(0, os.path.abspath('..'))
__version__ = '0.1.0'
# -- General configuration ------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
#needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
# ones.
extensions = [
'sphinx.ext.autodoc',
'sphinx.ext.autosummary',
'sphinx.ext.intersphinx',
'sphinx.ext.coverage',
'sphinx.ext.napoleon',
'sphinx.ext.viewcode',
]
# autodoc/autosummary flags
autoclass_content = 'both'
autodoc_default_flags = ['members']
autosummary_generate = True
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix(es) of source filenames.
# You can specify multiple suffix as a list of string:
# source_suffix = ['.rst', '.md']
source_suffix = '.rst'
# The encoding of source files.
#source_encoding = 'utf-8-sig'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = u'google-cloud-bigtable'
copyright = u'2017, Google'
author = u'Google APIs'
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The full version, including alpha/beta/rc tags.
release = __version__
# The short X.Y version.
version = '.'.join(release.split('.')[0:2])
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#
# This is also used if you do content translation via gettext catalogs.
# Usually you set "language" from the command line for these cases.
language = None
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
#today = ''
# Else, today_fmt is used as the format for a strftime call.
#today_fmt = '%B %d, %Y'
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
exclude_patterns = ['_build']
# The reST default role (used for this markup: `text`) to use for all
# documents.
#default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
#add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
#add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
#show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# A list of ignored prefixes for module index sorting.
#modindex_common_prefix = []
# If true, keep warnings as "system message" paragraphs in the built documents.
#keep_warnings = False
# If true, `todo` and `todoList` produce output, else they produce nothing.
todo_include_todos = True
# -- Options for HTML output ----------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
html_theme = 'sphinx_rtd_theme'
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
#html_theme_options = {}
# Add any paths that contain custom themes here, relative to this directory.
#html_theme_path = []
# The name for this set of Sphinx documents. If None, it defaults to
# "<project> v<release> documentation".
#html_title = None
# A shorter title for the navigation bar. Default is the same as html_title.
#html_short_title = None
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
#html_logo = None
# The name of an image file (within the static path) to use as favicon of the
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
#html_favicon = None
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
# html_static_path = []
# Add any extra paths that contain custom files (such as robots.txt or
# .htaccess) here, relative to this directory. These files are copied
# directly to the root of the documentation.
#html_extra_path = []
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
# using the given strftime format.
#html_last_updated_fmt = '%b %d, %Y'
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
#html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
#html_sidebars = {}
# Additional templates that should be rendered to pages, maps page names to
# template names.
#html_additional_pages = {}
# If false, no module index is generated.
#html_domain_indices = True
# If false, no index is generated.
#html_use_index = True
# If true, the index is split into individual pages for each letter.
#html_split_index = False
# If true, links to the reST sources are added to the pages.
#html_show_sourcelink = True
# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
#html_show_sphinx = True
# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
#html_show_copyright = True
# If true, an OpenSearch description file will be output, and all pages will
# contain a <link> tag referring to it. The value of this option must be the
# base URL from which the finished HTML is served.
#html_use_opensearch = ''
# This is the file name suffix for HTML files (e.g. ".xhtml").
#html_file_suffix = None
# Language to be used for generating the HTML full-text search index.
# Sphinx supports the following languages:
# 'da', 'de', 'en', 'es', 'fi', 'fr', 'hu', 'it', 'ja'
# 'nl', 'no', 'pt', 'ro', 'ru', 'sv', 'tr'
#html_search_language = 'en'
# A dictionary with options for the search language support, empty by default.
# Now only 'ja' uses this config value
#html_search_options = {'type': 'default'}
# The name of a javascript file (relative to the configuration directory) that
# implements a search results scorer. If empty, the default will be used.
#html_search_scorer = 'scorer.js'
# Output file base name for HTML help builder.
htmlhelp_basename = 'google-cloud-bigtable-doc'
# -- Options for LaTeX output ---------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
#'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
#'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
#'preamble': '',
# Latex figure (float) alignment
#'figure_align': 'htbp',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title,
# author, documentclass [howto, manual, or own class]).
latex_documents = [
(master_doc, 'google-cloud-bigtable.tex',
u'google-cloud-bigtable Documentation', author, 'manual'),
]
# The name of an image file (relative to this directory) to place at the top of
# the title page.
#latex_logo = None
# For "manual" documents, if this is true, then toplevel headings are parts,
# not chapters.
#latex_use_parts = False
# If true, show page references after internal links.
#latex_show_pagerefs = False
# If true, show URL addresses after external links.
#latex_show_urls = False
# Documents to append as an appendix to all manuals.
#latex_appendices = []
# If false, no module index is generated.
#latex_domain_indices = True
# -- Options for manual page output ---------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [(master_doc, 'google-cloud-bigtable',
u'google-cloud-bigtable Documentation', [author], 1)]
# If true, show URL addresses after external links.
#man_show_urls = False
# -- Options for Texinfo output -------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
(master_doc, 'google-cloud-bigtable',
u'google-cloud-bigtable Documentation', author, 'google-cloud-bigtable',
'GAPIC library for the {metadata.shortName} v2 service', 'APIs'),
]
# Documents to append as an appendix to all manuals.
#texinfo_appendices = []
# If false, no module index is generated.
#texinfo_domain_indices = True
# How to display URL addresses: 'footnote', 'no', or 'inline'.
#texinfo_show_urls = 'footnote'
# If true, do not generate a @detailmenu in the "Top" node's menu.
#texinfo_no_detailmenu = False
# Example configuration for intersphinx: refer to the Python standard library.
intersphinx_mapping = {
'python': ('http://python.readthedocs.org/en/latest/', None),
'gax': ('https://gax-python.readthedocs.org/en/latest/', None),
}
# Napoleon settings
napoleon_google_docstring = True
napoleon_numpy_docstring = True
napoleon_include_private_with_doc = False
napoleon_include_special_with_doc = True
napoleon_use_admonition_for_examples = False
napoleon_use_admonition_for_notes = False
napoleon_use_admonition_for_references = False
napoleon_use_ivar = False
napoleon_use_param = True
napoleon_use_rtype = True
| apache-2.0 |
xiandiancloud/edx-platform | lms/djangoapps/courseware/tests/test_word_cloud.py | 36 | 8306 | # -*- coding: utf-8 -*-
"""Word cloud integration tests using mongo modulestore."""
import json
from operator import itemgetter
from . import BaseTestXmodule
from xmodule.x_module import STUDENT_VIEW
class TestWordCloud(BaseTestXmodule):
"""Integration test for word cloud xmodule."""
CATEGORY = "word_cloud"
def _get_users_state(self):
"""Return current state for each user:
{username: json_state}
"""
# check word cloud response for every user
users_state = {}
for user in self.users:
response = self.clients[user.username].post(self.get_url('get_state'))
users_state[user.username] = json.loads(response.content)
return users_state
def _post_words(self, words):
"""Post `words` and return current state for each user:
{username: json_state}
"""
users_state = {}
for user in self.users:
response = self.clients[user.username].post(
self.get_url('submit'),
{'student_words[]': words},
HTTP_X_REQUESTED_WITH='XMLHttpRequest'
)
users_state[user.username] = json.loads(response.content)
return users_state
def _check_response(self, response_contents, correct_jsons):
"""Utility function that compares correct and real responses."""
for username, content in response_contents.items():
# Used in debugger for comparing objects.
# self.maxDiff = None
# We should compare top_words for manually,
# because they are unsorted.
keys_to_compare = set(content.keys()).difference(set(['top_words']))
self.assertDictEqual(
{k: content[k] for k in keys_to_compare},
{k: correct_jsons[username][k] for k in keys_to_compare})
# comparing top_words:
top_words_content = sorted(
content['top_words'],
key=itemgetter('text')
)
top_words_correct = sorted(
correct_jsons[username]['top_words'],
key=itemgetter('text')
)
self.assertListEqual(top_words_content, top_words_correct)
def test_initial_state(self):
"""Inital state of word cloud is correct. Those state that
is sended from server to frontend, when students load word
cloud page.
"""
users_state = self._get_users_state()
self.assertEqual(
''.join(set([
content['status']
for _, content in users_state.items()
])),
'success')
# correct initial data:
correct_initial_data = {
u'status': u'success',
u'student_words': {},
u'total_count': 0,
u'submitted': False,
u'top_words': {},
u'display_student_percents': False
}
for _, response_content in users_state.items():
self.assertEquals(response_content, correct_initial_data)
def test_post_words(self):
"""Students can submit data succesfully.
Word cloud data properly updates after students submit.
"""
input_words = [
"small",
"BIG",
" Spaced ",
" few words",
]
correct_words = [
u"small",
u"big",
u"spaced",
u"few words",
]
users_state = self._post_words(input_words)
self.assertEqual(
''.join(set([
content['status']
for _, content in users_state.items()
])),
'success')
correct_state = {}
for index, user in enumerate(self.users):
correct_state[user.username] = {
u'status': u'success',
u'submitted': True,
u'display_student_percents': True,
u'student_words': {word: 1 + index for word in correct_words},
u'total_count': len(input_words) * (1 + index),
u'top_words': [
{
u'text': word, u'percent': 100 / len(input_words),
u'size': (1 + index)
}
for word in correct_words
]
}
self._check_response(users_state, correct_state)
def test_collective_users_submits(self):
"""Test word cloud data flow per single and collective users submits.
Make sures that:
1. Inital state of word cloud is correct. Those state that
is sended from server to frontend, when students load word
cloud page.
2. Students can submit data succesfully.
3. Next submits produce "already voted" error. Next submits for user
are not allowed by user interface, but techically it possible, and
word_cloud should properly react.
4. State of word cloud after #3 is still as after #2.
"""
# 1.
users_state = self._get_users_state()
self.assertEqual(
''.join(set([
content['status']
for _, content in users_state.items()
])),
'success')
# 2.
# Invcemental state per user.
users_state_after_post = self._post_words(['word1', 'word2'])
self.assertEqual(
''.join(set([
content['status']
for _, content in users_state_after_post.items()
])),
'success')
# Final state after all posts.
users_state_before_fail = self._get_users_state()
# 3.
users_state_after_post = self._post_words(
['word1', 'word2', 'word3'])
self.assertEqual(
''.join(set([
content['status']
for _, content in users_state_after_post.items()
])),
'fail')
# 4.
current_users_state = self._get_users_state()
self._check_response(users_state_before_fail, current_users_state)
def test_unicode(self):
input_words = [u" this is unicode Юникод"]
correct_words = [u"this is unicode юникод"]
users_state = self._post_words(input_words)
self.assertEqual(
''.join(set([
content['status']
for _, content in users_state.items()
])),
'success')
for user in self.users:
self.assertListEqual(
users_state[user.username]['student_words'].keys(),
correct_words)
def test_handle_ajax_incorrect_dispatch(self):
responses = {
user.username: self.clients[user.username].post(
self.get_url('whatever'),
{},
HTTP_X_REQUESTED_WITH='XMLHttpRequest')
for user in self.users
}
self.assertEqual(
set([
response.status_code
for _, response in responses.items()
]).pop(),
200)
for user in self.users:
self.assertDictEqual(
json.loads(responses[user.username].content),
{
'status': 'fail',
'error': 'Unknown Command!'
})
def test_word_cloud_constructor(self):
"""Make sure that all parameters extracted correclty from xml"""
fragment = self.runtime.render(self.item_descriptor, STUDENT_VIEW)
expected_context = {
'ajax_url': self.item_descriptor.xmodule_runtime.ajax_url,
'element_class': self.item_descriptor.location.category,
'element_id': self.item_descriptor.location.html_id(),
'num_inputs': 5, # default value
'submitted': False # default value
}
self.assertEqual(fragment.content, self.runtime.render_template('word_cloud.html', expected_context))
| agpl-3.0 |
mKeRix/home-assistant | homeassistant/components/wink/light.py | 21 | 3635 | """Support for Wink lights."""
import pywink
from homeassistant.components.light import (
ATTR_BRIGHTNESS,
ATTR_COLOR_TEMP,
ATTR_HS_COLOR,
SUPPORT_BRIGHTNESS,
SUPPORT_COLOR,
SUPPORT_COLOR_TEMP,
LightEntity,
)
from homeassistant.util import color as color_util
from homeassistant.util.color import (
color_temperature_mired_to_kelvin as mired_to_kelvin,
)
from . import DOMAIN, WinkDevice
def setup_platform(hass, config, add_entities, discovery_info=None):
"""Set up the Wink lights."""
for light in pywink.get_light_bulbs():
_id = light.object_id() + light.name()
if _id not in hass.data[DOMAIN]["unique_ids"]:
add_entities([WinkLight(light, hass)])
for light in pywink.get_light_groups():
_id = light.object_id() + light.name()
if _id not in hass.data[DOMAIN]["unique_ids"]:
add_entities([WinkLight(light, hass)])
class WinkLight(WinkDevice, LightEntity):
"""Representation of a Wink light."""
async def async_added_to_hass(self):
"""Call when entity is added to hass."""
self.hass.data[DOMAIN]["entities"]["light"].append(self)
@property
def is_on(self):
"""Return true if light is on."""
return self.wink.state()
@property
def brightness(self):
"""Return the brightness of the light."""
if self.wink.brightness() is not None:
return int(self.wink.brightness() * 255)
return None
@property
def hs_color(self):
"""Define current bulb color."""
if self.wink.supports_xy_color():
return color_util.color_xy_to_hs(*self.wink.color_xy())
if self.wink.supports_hue_saturation():
hue = self.wink.color_hue()
saturation = self.wink.color_saturation()
if hue is not None and saturation is not None:
return hue * 360, saturation * 100
return None
@property
def color_temp(self):
"""Define current bulb color in degrees Kelvin."""
if not self.wink.supports_temperature():
return None
return color_util.color_temperature_kelvin_to_mired(
self.wink.color_temperature_kelvin()
)
@property
def supported_features(self):
"""Flag supported features."""
supports = SUPPORT_BRIGHTNESS
if self.wink.supports_temperature():
supports = supports | SUPPORT_COLOR_TEMP
if self.wink.supports_xy_color():
supports = supports | SUPPORT_COLOR
elif self.wink.supports_hue_saturation():
supports = supports | SUPPORT_COLOR
return supports
def turn_on(self, **kwargs):
"""Turn the switch on."""
brightness = kwargs.get(ATTR_BRIGHTNESS)
hs_color = kwargs.get(ATTR_HS_COLOR)
color_temp_mired = kwargs.get(ATTR_COLOR_TEMP)
state_kwargs = {}
if hs_color:
if self.wink.supports_xy_color():
xy_color = color_util.color_hs_to_xy(*hs_color)
state_kwargs["color_xy"] = xy_color
if self.wink.supports_hue_saturation():
hs_scaled = hs_color[0] / 360, hs_color[1] / 100
state_kwargs["color_hue_saturation"] = hs_scaled
if color_temp_mired:
state_kwargs["color_kelvin"] = mired_to_kelvin(color_temp_mired)
if brightness:
state_kwargs["brightness"] = brightness / 255.0
self.wink.set_state(True, **state_kwargs)
def turn_off(self, **kwargs):
"""Turn the switch off."""
self.wink.set_state(False)
| mit |
OTL/rostensorflow | image_recognition.py | 1 | 1825 | import rospy
from sensor_msgs.msg import Image
from std_msgs.msg import String
from cv_bridge import CvBridge
import cv2
import numpy as np
import tensorflow as tf
import classify_image
class RosTensorFlow():
def __init__(self):
classify_image.maybe_download_and_extract()
self._session = tf.Session()
classify_image.create_graph()
self._cv_bridge = CvBridge()
self._sub = rospy.Subscriber('image', Image, self.callback, queue_size=1)
self._pub = rospy.Publisher('result', String, queue_size=1)
self.score_threshold = rospy.get_param('~score_threshold', 0.1)
self.use_top_k = rospy.get_param('~use_top_k', 5)
def callback(self, image_msg):
cv_image = self._cv_bridge.imgmsg_to_cv2(image_msg, "bgr8")
# copy from
# classify_image.py
image_data = cv2.imencode('.jpg', cv_image)[1].tostring()
# Creates graph from saved GraphDef.
softmax_tensor = self._session.graph.get_tensor_by_name('softmax:0')
predictions = self._session.run(
softmax_tensor, {'DecodeJpeg/contents:0': image_data})
predictions = np.squeeze(predictions)
# Creates node ID --> English string lookup.
node_lookup = classify_image.NodeLookup()
top_k = predictions.argsort()[-self.use_top_k:][::-1]
for node_id in top_k:
human_string = node_lookup.id_to_string(node_id)
score = predictions[node_id]
if score > self.score_threshold:
rospy.loginfo('%s (score = %.5f)' % (human_string, score))
self._pub.publish(human_string)
def main(self):
rospy.spin()
if __name__ == '__main__':
classify_image.setup_args()
rospy.init_node('rostensorflow')
tensor = RosTensorFlow()
tensor.main()
| apache-2.0 |
gfyoung/pandas | pandas/tests/arrays/boolean/test_logical.py | 7 | 8486 | import operator
import numpy as np
import pytest
import pandas as pd
import pandas._testing as tm
from pandas.arrays import BooleanArray
from pandas.tests.extension.base import BaseOpsUtil
class TestLogicalOps(BaseOpsUtil):
def test_numpy_scalars_ok(self, all_logical_operators):
a = pd.array([True, False, None], dtype="boolean")
op = getattr(a, all_logical_operators)
tm.assert_extension_array_equal(op(True), op(np.bool_(True)))
tm.assert_extension_array_equal(op(False), op(np.bool_(False)))
def get_op_from_name(self, op_name):
short_opname = op_name.strip("_")
short_opname = short_opname if "xor" in short_opname else short_opname + "_"
try:
op = getattr(operator, short_opname)
except AttributeError:
# Assume it is the reverse operator
rop = getattr(operator, short_opname[1:])
op = lambda x, y: rop(y, x)
return op
def test_empty_ok(self, all_logical_operators):
a = pd.array([], dtype="boolean")
op_name = all_logical_operators
result = getattr(a, op_name)(True)
tm.assert_extension_array_equal(a, result)
result = getattr(a, op_name)(False)
tm.assert_extension_array_equal(a, result)
# FIXME: dont leave commented-out
# TODO: pd.NA
# result = getattr(a, op_name)(pd.NA)
# tm.assert_extension_array_equal(a, result)
def test_logical_length_mismatch_raises(self, all_logical_operators):
op_name = all_logical_operators
a = pd.array([True, False, None], dtype="boolean")
msg = "Lengths must match to compare"
with pytest.raises(ValueError, match=msg):
getattr(a, op_name)([True, False])
with pytest.raises(ValueError, match=msg):
getattr(a, op_name)(np.array([True, False]))
with pytest.raises(ValueError, match=msg):
getattr(a, op_name)(pd.array([True, False], dtype="boolean"))
def test_logical_nan_raises(self, all_logical_operators):
op_name = all_logical_operators
a = pd.array([True, False, None], dtype="boolean")
msg = "Got float instead"
with pytest.raises(TypeError, match=msg):
getattr(a, op_name)(np.nan)
@pytest.mark.parametrize("other", ["a", 1])
def test_non_bool_or_na_other_raises(self, other, all_logical_operators):
a = pd.array([True, False], dtype="boolean")
with pytest.raises(TypeError, match=str(type(other).__name__)):
getattr(a, all_logical_operators)(other)
def test_kleene_or(self):
# A clear test of behavior.
a = pd.array([True] * 3 + [False] * 3 + [None] * 3, dtype="boolean")
b = pd.array([True, False, None] * 3, dtype="boolean")
result = a | b
expected = pd.array(
[True, True, True, True, False, None, True, None, None], dtype="boolean"
)
tm.assert_extension_array_equal(result, expected)
result = b | a
tm.assert_extension_array_equal(result, expected)
# ensure we haven't mutated anything inplace
tm.assert_extension_array_equal(
a, pd.array([True] * 3 + [False] * 3 + [None] * 3, dtype="boolean")
)
tm.assert_extension_array_equal(
b, pd.array([True, False, None] * 3, dtype="boolean")
)
@pytest.mark.parametrize(
"other, expected",
[
(pd.NA, [True, None, None]),
(True, [True, True, True]),
(np.bool_(True), [True, True, True]),
(False, [True, False, None]),
(np.bool_(False), [True, False, None]),
],
)
def test_kleene_or_scalar(self, other, expected):
# TODO: test True & False
a = pd.array([True, False, None], dtype="boolean")
result = a | other
expected = pd.array(expected, dtype="boolean")
tm.assert_extension_array_equal(result, expected)
result = other | a
tm.assert_extension_array_equal(result, expected)
# ensure we haven't mutated anything inplace
tm.assert_extension_array_equal(
a, pd.array([True, False, None], dtype="boolean")
)
def test_kleene_and(self):
# A clear test of behavior.
a = pd.array([True] * 3 + [False] * 3 + [None] * 3, dtype="boolean")
b = pd.array([True, False, None] * 3, dtype="boolean")
result = a & b
expected = pd.array(
[True, False, None, False, False, False, None, False, None], dtype="boolean"
)
tm.assert_extension_array_equal(result, expected)
result = b & a
tm.assert_extension_array_equal(result, expected)
# ensure we haven't mutated anything inplace
tm.assert_extension_array_equal(
a, pd.array([True] * 3 + [False] * 3 + [None] * 3, dtype="boolean")
)
tm.assert_extension_array_equal(
b, pd.array([True, False, None] * 3, dtype="boolean")
)
@pytest.mark.parametrize(
"other, expected",
[
(pd.NA, [None, False, None]),
(True, [True, False, None]),
(False, [False, False, False]),
(np.bool_(True), [True, False, None]),
(np.bool_(False), [False, False, False]),
],
)
def test_kleene_and_scalar(self, other, expected):
a = pd.array([True, False, None], dtype="boolean")
result = a & other
expected = pd.array(expected, dtype="boolean")
tm.assert_extension_array_equal(result, expected)
result = other & a
tm.assert_extension_array_equal(result, expected)
# ensure we haven't mutated anything inplace
tm.assert_extension_array_equal(
a, pd.array([True, False, None], dtype="boolean")
)
def test_kleene_xor(self):
a = pd.array([True] * 3 + [False] * 3 + [None] * 3, dtype="boolean")
b = pd.array([True, False, None] * 3, dtype="boolean")
result = a ^ b
expected = pd.array(
[False, True, None, True, False, None, None, None, None], dtype="boolean"
)
tm.assert_extension_array_equal(result, expected)
result = b ^ a
tm.assert_extension_array_equal(result, expected)
# ensure we haven't mutated anything inplace
tm.assert_extension_array_equal(
a, pd.array([True] * 3 + [False] * 3 + [None] * 3, dtype="boolean")
)
tm.assert_extension_array_equal(
b, pd.array([True, False, None] * 3, dtype="boolean")
)
@pytest.mark.parametrize(
"other, expected",
[
(pd.NA, [None, None, None]),
(True, [False, True, None]),
(np.bool_(True), [False, True, None]),
(np.bool_(False), [True, False, None]),
],
)
def test_kleene_xor_scalar(self, other, expected):
a = pd.array([True, False, None], dtype="boolean")
result = a ^ other
expected = pd.array(expected, dtype="boolean")
tm.assert_extension_array_equal(result, expected)
result = other ^ a
tm.assert_extension_array_equal(result, expected)
# ensure we haven't mutated anything inplace
tm.assert_extension_array_equal(
a, pd.array([True, False, None], dtype="boolean")
)
@pytest.mark.parametrize("other", [True, False, pd.NA, [True, False, None] * 3])
def test_no_masked_assumptions(self, other, all_logical_operators):
# The logical operations should not assume that masked values are False!
a = pd.arrays.BooleanArray(
np.array([True, True, True, False, False, False, True, False, True]),
np.array([False] * 6 + [True, True, True]),
)
b = pd.array([True] * 3 + [False] * 3 + [None] * 3, dtype="boolean")
if isinstance(other, list):
other = pd.array(other, dtype="boolean")
result = getattr(a, all_logical_operators)(other)
expected = getattr(b, all_logical_operators)(other)
tm.assert_extension_array_equal(result, expected)
if isinstance(other, BooleanArray):
other._data[other._mask] = True
a._data[a._mask] = False
result = getattr(a, all_logical_operators)(other)
expected = getattr(b, all_logical_operators)(other)
tm.assert_extension_array_equal(result, expected)
| bsd-3-clause |
chrys87/orca-beep | test/keystrokes/firefox/line_nav_lists.py | 1 | 9274 | #!/usr/bin/python
"""Test of HTML list presentation."""
from macaroon.playback import *
import utils
sequence = MacroSequence()
#sequence.append(WaitForDocLoad())
sequence.append(PauseAction(5000))
# Work around some new quirk in Gecko that causes this test to fail if
# run via the test harness rather than manually.
sequence.append(KeyComboAction("<Control>r"))
sequence.append(utils.StartRecordingAction())
sequence.append(KeyComboAction("<Control>Home"))
sequence.append(utils.AssertPresentationAction(
"1. Top of file",
["BRAILLE LINE: 'Welcome to a List of Lists h1'",
" VISIBLE: 'Welcome to a List of Lists h1', cursor=1",
"SPEECH OUTPUT: 'Welcome to a List of Lists heading level 1'"]))
sequence.append(utils.StartRecordingAction())
sequence.append(KeyComboAction("Down"))
sequence.append(utils.AssertPresentationAction(
"2. Line Down",
["BRAILLE LINE: 'Lists are not only fun to make, they are fun to use. They help us:'",
" VISIBLE: 'Lists are not only fun to make, ', cursor=1",
"SPEECH OUTPUT: 'Lists are not only fun to make, they are fun to use. They help us:'"]))
sequence.append(utils.StartRecordingAction())
sequence.append(KeyComboAction("Down"))
sequence.append(utils.AssertPresentationAction(
"3. Line Down",
["BRAILLE LINE: '1. remember what the heck we are doing each day'",
" VISIBLE: '1. remember what the heck we are', cursor=1",
"SPEECH OUTPUT: '1. remember what the heck we are doing each day.'"]))
sequence.append(utils.StartRecordingAction())
sequence.append(KeyComboAction("Down"))
sequence.append(utils.AssertPresentationAction(
"4. Line Down",
["BRAILLE LINE: '2. arrange long and arbitrary lines of text into ordered lists that are pleasing to the eye and suggest some'",
" VISIBLE: '2. arrange long and arbitrary li', cursor=1",
"SPEECH OUTPUT: '2. arrange long and arbitrary lines of text into ordered lists that are pleasing to the eye and suggest some.'"]))
sequence.append(utils.StartRecordingAction())
sequence.append(KeyComboAction("Down"))
sequence.append(utils.AssertPresentationAction(
"5. Line Down",
["BRAILLE LINE: 'sense of priority, even if it is artificial'",
" VISIBLE: 'sense of priority, even if it is', cursor=1",
"SPEECH OUTPUT: 'sense of priority, even if it is artificial.'"]))
sequence.append(utils.StartRecordingAction())
sequence.append(KeyComboAction("Down"))
sequence.append(utils.AssertPresentationAction(
"6. Line Down",
["BRAILLE LINE: '3. look really cool when we carry them around on yellow Post-Itstm.'",
" VISIBLE: '3. look really cool when we carr', cursor=1",
"SPEECH OUTPUT: '3. look really cool when we carry them around on yellow Post-Itstm.'"]))
sequence.append(utils.StartRecordingAction())
sequence.append(KeyComboAction("Down"))
sequence.append(utils.AssertPresentationAction(
"7. Line Down",
["BRAILLE LINE: '4. and that other thing I keep forgetting.'",
" VISIBLE: '4. and that other thing I keep f', cursor=1",
"SPEECH OUTPUT: '4. and that other thing I keep forgetting.'"]))
sequence.append(utils.StartRecordingAction())
sequence.append(KeyComboAction("Down"))
sequence.append(utils.AssertPresentationAction(
"8. Line Down",
["BRAILLE LINE: 'Your ordered lists can start at a strange number, like:'",
" VISIBLE: 'Your ordered lists can start at ', cursor=1",
"SPEECH OUTPUT: 'Your ordered lists can start at a strange number, like:'"]))
sequence.append(utils.StartRecordingAction())
sequence.append(KeyComboAction("Down"))
sequence.append(utils.AssertPresentationAction(
"9. Line Down",
["KNOWN ISSUE: Gecko is not exposing this as a roman numeral.",
"BRAILLE LINE: '6. And use roman numerals,'",
" VISIBLE: '6. And use roman numerals,', cursor=1",
"SPEECH OUTPUT: '6. And use roman numerals,.'"]))
sequence.append(utils.StartRecordingAction())
sequence.append(KeyComboAction("Down"))
sequence.append(utils.AssertPresentationAction(
"10. Line Down",
["BRAILLE LINE: 'g. You might try using letters as well,'",
" VISIBLE: 'g. You might try using letters a', cursor=1",
"SPEECH OUTPUT: 'g. You might try using letters as well,.'"]))
sequence.append(utils.StartRecordingAction())
sequence.append(KeyComboAction("Down"))
sequence.append(utils.AssertPresentationAction(
"11. Line Down",
["BRAILLE LINE: 'H. Maybe you prefer Big Letters,'",
" VISIBLE: 'H. Maybe you prefer Big Letters,', cursor=1",
"SPEECH OUTPUT: 'H. Maybe you prefer Big Letters,.'"]))
sequence.append(utils.StartRecordingAction())
sequence.append(KeyComboAction("Down"))
sequence.append(utils.AssertPresentationAction(
"12. Line Down",
["KNOWN ISSUE: Gecko is not exposing this as a roman numeral.",
"BRAILLE LINE: '9. or small roman numerals'",
" VISIBLE: '9. or small roman numerals', cursor=1",
"SPEECH OUTPUT: '9. or small roman numerals.'"]))
sequence.append(utils.StartRecordingAction())
sequence.append(KeyComboAction("Up"))
sequence.append(utils.AssertPresentationAction(
"13. Line Up",
["BRAILLE LINE: 'H. Maybe you prefer Big Letters,'",
" VISIBLE: 'H. Maybe you prefer Big Letters,', cursor=1",
"SPEECH OUTPUT: 'H. Maybe you prefer Big Letters,.'"]))
sequence.append(utils.StartRecordingAction())
sequence.append(KeyComboAction("Up"))
sequence.append(utils.AssertPresentationAction(
"14. Line Up",
["BRAILLE LINE: 'g. You might try using letters as well,'",
" VISIBLE: 'g. You might try using letters a', cursor=1",
"SPEECH OUTPUT: 'g. You might try using letters as well,.'"]))
sequence.append(utils.StartRecordingAction())
sequence.append(KeyComboAction("Up"))
sequence.append(utils.AssertPresentationAction(
"15. Line Up",
["KNOWN ISSUE: Gecko is not exposing this as a roman numeral.",
"BRAILLE LINE: '6. And use roman numerals,'",
" VISIBLE: '6. And use roman numerals,', cursor=1",
"SPEECH OUTPUT: '6. And use roman numerals,.'"]))
sequence.append(utils.StartRecordingAction())
sequence.append(KeyComboAction("Up"))
sequence.append(utils.AssertPresentationAction(
"16. Line Up",
["BRAILLE LINE: 'Your ordered lists can start at a strange number, like:'",
" VISIBLE: 'Your ordered lists can start at ', cursor=1",
"SPEECH OUTPUT: 'Your ordered lists can start at a strange number, like:'"]))
sequence.append(utils.StartRecordingAction())
sequence.append(KeyComboAction("Up"))
sequence.append(utils.AssertPresentationAction(
"17. Line Up",
["BRAILLE LINE: '4. and that other thing I keep forgetting.'",
" VISIBLE: '4. and that other thing I keep f', cursor=1",
"SPEECH OUTPUT: '4. and that other thing I keep forgetting.'"]))
sequence.append(utils.StartRecordingAction())
sequence.append(KeyComboAction("Up"))
sequence.append(utils.AssertPresentationAction(
"18. Line Up",
["BRAILLE LINE: '3. look really cool when we carry them around on yellow Post-Itstm.'",
" VISIBLE: '3. look really cool when we carr', cursor=1",
"SPEECH OUTPUT: '3. look really cool when we carry them around on yellow Post-Itstm.'"]))
sequence.append(utils.StartRecordingAction())
sequence.append(KeyComboAction("Up"))
sequence.append(utils.AssertPresentationAction(
"19. Line Up",
["BRAILLE LINE: 'sense of priority, even if it is artificial'",
" VISIBLE: 'sense of priority, even if it is', cursor=1",
"SPEECH OUTPUT: 'sense of priority, even if it is artificial.'"]))
sequence.append(utils.StartRecordingAction())
sequence.append(KeyComboAction("Up"))
sequence.append(utils.AssertPresentationAction(
"20. Line Up",
["BRAILLE LINE: '2. arrange long and arbitrary lines of text into ordered lists that are pleasing to the eye and suggest some'",
" VISIBLE: '2. arrange long and arbitrary li', cursor=1",
"SPEECH OUTPUT: '2. arrange long and arbitrary lines of text into ordered lists that are pleasing to the eye and suggest some.'"]))
sequence.append(utils.StartRecordingAction())
sequence.append(KeyComboAction("Up"))
sequence.append(utils.AssertPresentationAction(
"21. Line Up",
["BRAILLE LINE: '1. remember what the heck we are doing each day'",
" VISIBLE: ' the heck we are doing each day', cursor=32",
"SPEECH OUTPUT: '1. remember what the heck we are doing each day.'"]))
sequence.append(utils.StartRecordingAction())
sequence.append(KeyComboAction("Up"))
sequence.append(utils.AssertPresentationAction(
"22. Line Up",
["BRAILLE LINE: 'Lists are not only fun to make, they are fun to use. They help us:'",
" VISIBLE: 'Lists are not only fun to make, ', cursor=1",
"SPEECH OUTPUT: 'Lists are not only fun to make, they are fun to use. They help us:'"]))
sequence.append(utils.StartRecordingAction())
sequence.append(KeyComboAction("Up"))
sequence.append(utils.AssertPresentationAction(
"23. Line Up",
["BRAILLE LINE: 'Welcome to a List of Lists h1'",
" VISIBLE: 'Welcome to a List of Lists h1', cursor=1",
"SPEECH OUTPUT: 'Welcome to a List of Lists heading level 1'"]))
sequence.append(utils.AssertionSummaryAction())
sequence.start()
| lgpl-2.1 |
CodigoSur/cyclope | cyclope/apps/polls/admin.py | 1 | 1951 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Copyright 2010-2013 Código Sur Sociedad Civil.
# All rights reserved.
#
# This file is part of Cyclope.
#
# Cyclope is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Cyclope is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
from django.contrib import admin
from django.utils.translation import ugettext_lazy as _
import cyclope.settings as cyc_settings
from django.conf import settings
from models import *
class AnswerInline(admin.StackedInline):
model = Answer
class QuestionAdmin(admin.ModelAdmin):
inlines = [AnswerInline]
list_filter = ('poll',)
fields = ('text', 'allow_multiple_answers', 'poll')
def changelist_view(self, request, extra_context=None):
# questions changelist should only show items from one poll.
# so we activate the filter to display the last poll questions when no
# filters have been selected by the user
if Poll.objects.count():
last_poll_id = Poll.objects.order_by('-pk')[0].id
if not request.GET:
request.GET = {u'poll__id__exact': unicode(last_poll_id)}
return super(QuestionAdmin, self).changelist_view(request, extra_context)
class QuestionInline(admin.StackedInline):
model = Question
extra = 0
class PollAdmin(admin.ModelAdmin):
inlines = [QuestionInline]
admin.site.register(Question, QuestionAdmin)
admin.site.register(Poll, PollAdmin)
| gpl-3.0 |
zhuyongyong/crosswalk-test-suite | apptools/apptools-android-tests/apptools/create_package_abbreviated_command.py | 11 | 10018 | #!/usr/bin/env python
#
# Copyright (c) 2015 Intel Corporation.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# * Redistributions of works must retain the original copyright notice, this
# list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the original copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# * Neither the name of Intel Corporation nor the names of its contributors
# may be used to endorse or promote products derived from this work without
# specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY INTEL CORPORATION "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL INTEL CORPORATION BE LIABLE FOR ANY DIRECT,
# INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
# OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
# NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE,
# EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
# Authors:
# Yun, Liu<yunx.liu@intel.com>
import unittest
import os
import comm
import zipfile
import shutil
from xml.etree import ElementTree
import json
class TestCrosswalkApptoolsFunctions(unittest.TestCase):
def test_create_pkg_basic(self):
comm.setUp()
os.chdir(comm.XwalkPath)
comm.clear("org.xwalk.test")
os.mkdir("org.xwalk.test")
os.chdir('org.xwalk.test')
cmd = comm.HOST_PREFIX + comm.PackTools + \
"crosswalk-pkg -p android -a " + comm.ANDROID_MODE + ' -t "' + comm.BIT + '" ' + comm.ConstPath + "/../testapp/create_package_basic/"
(return_code, output) = comm.getstatusoutput(cmd)
version = comm.check_crosswalk_version(self, "stable")
apks = os.listdir(os.getcwd())
apkLength = 0
if comm.MODE != " --android-shared":
for i in range(len(apks)):
if apks[i].endswith(".apk") and "x86" in apks[i]:
if comm.BIT == "64":
self.assertIn("64", apks[i])
apkLength = apkLength + 1
if apks[i].endswith(".apk") and "arm" in apks[i]:
if comm.BIT == "64":
self.assertIn("64", apks[i])
apkLength = apkLength + 1
self.assertEquals(apkLength, 2)
else:
for i in range(len(apks)):
if apks[i].endswith(".apk") and "shared" in apks[i]:
apkLength = apkLength + 1
appVersion = apks[i].split('-')[1]
self.assertEquals(apkLength, 1)
comm.run(self)
comm.clear("org.xwalk.test")
self.assertEquals(return_code, 0)
self.assertIn("Loading 'android' platform backend", output[0])
self.assertNotIn("candle", output[0])
self.assertNotIn("light", output[0])
self.assertIn(version, output[0])
def test_create_pkg_canary(self):
comm.setUp()
os.chdir(comm.XwalkPath)
comm.clear("org.xwalk.test")
os.mkdir("org.xwalk.test")
os.chdir('org.xwalk.test')
cmd = comm.HOST_PREFIX + comm.PackTools + \
"crosswalk-pkg --platforms=android --android=" + comm.ANDROID_MODE + ' -c canary -t "' + comm.BIT + '" ' + comm.ConstPath + "/../testapp/create_package_basic/"
(return_code, output) = comm.getstatusoutput(cmd)
version = comm.check_crosswalk_version(self, "canary")
apks = os.listdir(os.getcwd())
apkLength = 0
if comm.MODE != " --android-shared":
for i in range(len(apks)):
if apks[i].endswith(".apk") and "x86" in apks[i]:
if comm.BIT == "64":
self.assertIn("64", apks[i])
apkLength = apkLength + 1
if apks[i].endswith(".apk") and "arm" in apks[i]:
if comm.BIT == "64":
self.assertIn("64", apks[i])
apkLength = apkLength + 1
self.assertEquals(apkLength, 2)
else:
for i in range(len(apks)):
if apks[i].endswith(".apk") and "shared" in apks[i]:
apkLength = apkLength + 1
appVersion = apks[i].split('-')[1]
self.assertEquals(apkLength, 1)
comm.run(self)
comm.clear("org.xwalk.test")
self.assertEquals(return_code, 0)
self.assertIn(version, output[0])
def test_create_pkg_manifest(self):
comm.setUp()
os.chdir(comm.XwalkPath)
comm.clear("org.xwalk.test")
os.mkdir("org.xwalk.test")
if os.path.exists(comm.ConstPath + "/../testapp/start_url/manifest.json"):
os.remove(comm.ConstPath + "/../testapp/start_url/manifest.json")
os.chdir('org.xwalk.test')
cmd = comm.HOST_PREFIX + comm.PackTools + \
"crosswalk-pkg --platforms=android --android=" + comm.ANDROID_MODE + " --crosswalk=" + comm.crosswalkzip + " -m org.xwalk.test " + comm.ConstPath + "/../testapp/start_url/"
return_code = os.system(cmd)
with open(comm.ConstPath + "/../testapp/start_url/manifest.json") as json_file:
data = json.load(json_file)
apks = os.listdir(os.getcwd())
apkLength = 0
if comm.MODE != " --android-shared":
for i in range(len(apks)):
if apks[i].endswith(".apk") and "x86" in apks[i]:
if comm.BIT == "64":
self.assertIn("64", apks[i])
apkLength = apkLength + 1
if apks[i].endswith(".apk") and "arm" in apks[i]:
if comm.BIT == "64":
self.assertIn("64", apks[i])
apkLength = apkLength + 1
self.assertEquals(apkLength, 2)
else:
for i in range(len(apks)):
if apks[i].endswith(".apk") and "shared" in apks[i]:
apkLength = apkLength + 1
appVersion = apks[i].split('-')[1]
self.assertEquals(apkLength, 1)
comm.run(self)
comm.clear("org.xwalk.test")
os.remove(comm.ConstPath + "/../testapp/start_url/manifest.json")
self.assertEquals(return_code, 0)
self.assertEquals(data['xwalk_package_id'].strip(os.linesep), "org.xwalk.test")
def test_create_pkg_keep(self):
comm.setUp()
os.chdir(comm.XwalkPath)
comm.clear("org.xwalk.test")
os.mkdir("org.xwalk.test")
os.chdir('org.xwalk.test')
cmd = comm.HOST_PREFIX + comm.PackTools + \
"crosswalk-pkg --platforms=android --android=" + comm.ANDROID_MODE + " -k --crosswalk=" + comm.crosswalkzip + " " + comm.ConstPath + "/../testapp/create_package_basic/"
(return_code, output) = comm.getstatusoutput(cmd)
apks = os.listdir(os.getcwd())
apkLength = 0
if comm.MODE != " --android-shared":
for i in range(len(apks)):
if apks[i].endswith(".apk") and "x86" in apks[i]:
if comm.BIT == "64":
self.assertIn("64", apks[i])
apkLength = apkLength + 1
if apks[i].endswith(".apk") and "arm" in apks[i]:
if comm.BIT == "64":
self.assertIn("64", apks[i])
apkLength = apkLength + 1
self.assertEquals(apkLength, 2)
else:
for i in range(len(apks)):
if apks[i].endswith(".apk") and "shared" in apks[i]:
apkLength = apkLength + 1
appVersion = apks[i].split('-')[1]
self.assertEquals(apkLength, 1)
projectDir = output[0].split(" * " + os.linesep)[-1].split(' ')[-1].strip(os.linesep)
comm.run(self)
comm.clear("org.xwalk.test")
self.assertEquals(return_code, 0)
self.assertIn("app", os.listdir(projectDir))
self.assertIn("prj", os.listdir(projectDir))
def test_create_pkg_release(self):
comm.setUp()
os.chdir(comm.XwalkPath)
comm.clear("org.xwalk.test")
os.mkdir("org.xwalk.test")
os.chdir('org.xwalk.test')
cmd = comm.HOST_PREFIX + comm.PackTools + \
"crosswalk-pkg --platforms=android --android=" + comm.ANDROID_MODE + " -r true --crosswalk=" + comm.crosswalkzip + " " + comm.ConstPath + "/../testapp/create_package_basic/"
return_code = os.system(cmd)
apks = os.listdir(os.getcwd())
apkLength = 0
if comm.MODE != " --android-shared":
for i in range(len(apks)):
if apks[i].endswith(".apk") and "x86" in apks[i]:
if comm.BIT == "64":
self.assertIn("64", apks[i])
apkLength = apkLength + 1
if apks[i].endswith(".apk") and "arm" in apks[i]:
if comm.BIT == "64":
self.assertIn("64", apks[i])
apkLength = apkLength + 1
self.assertEquals(apkLength, 2)
else:
for i in range(len(apks)):
if apks[i].endswith(".apk") and "shared" in apks[i]:
apkLength = apkLength + 1
appVersion = apks[i].split('-')[1]
self.assertEquals(apkLength, 1)
comm.run(self)
comm.clear("org.xwalk.test")
self.assertEquals(return_code, 0)
if __name__ == '__main__':
unittest.main()
| bsd-3-clause |
DirectXMan12/nova-hacking | nova/api/openstack/compute/contrib/availability_zone.py | 3 | 7124 | # vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2012 OpenStack Foundation
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License
from oslo.config import cfg
from nova.api.openstack import common
from nova.api.openstack import extensions
from nova.api.openstack import wsgi
from nova.api.openstack import xmlutil
from nova import availability_zones
from nova import db
from nova import servicegroup
CONF = cfg.CONF
authorize_list = extensions.extension_authorizer('compute',
'availability_zone:list')
authorize_detail = extensions.extension_authorizer('compute',
'availability_zone:detail')
def make_availability_zone(elem):
elem.set('name', 'zoneName')
zoneStateElem = xmlutil.SubTemplateElement(elem, 'zoneState',
selector='zoneState')
zoneStateElem.set('available')
hostsElem = xmlutil.SubTemplateElement(elem, 'hosts', selector='hosts')
hostElem = xmlutil.SubTemplateElement(hostsElem, 'host',
selector=xmlutil.get_items)
hostElem.set('name', 0)
svcsElem = xmlutil.SubTemplateElement(hostElem, 'services', selector=1)
svcElem = xmlutil.SubTemplateElement(svcsElem, 'service',
selector=xmlutil.get_items)
svcElem.set('name', 0)
svcStateElem = xmlutil.SubTemplateElement(svcElem, 'serviceState',
selector=1)
svcStateElem.set('available')
svcStateElem.set('active')
svcStateElem.set('updated_at')
# Attach metadata node
elem.append(common.MetadataTemplate())
class AvailabilityZonesTemplate(xmlutil.TemplateBuilder):
def construct(self):
root = xmlutil.TemplateElement('availabilityZones')
zoneElem = xmlutil.SubTemplateElement(root, 'availabilityZone',
selector='availabilityZoneInfo')
make_availability_zone(zoneElem)
return xmlutil.MasterTemplate(root, 1, nsmap={
Availability_zone.alias: Availability_zone.namespace})
class AvailabilityZoneController(wsgi.Controller):
"""The Availability Zone API controller for the OpenStack API."""
def __init__(self):
super(AvailabilityZoneController, self).__init__()
self.servicegroup_api = servicegroup.API()
def _get_filtered_availability_zones(self, zones, is_available):
result = []
for zone in zones:
# Hide internal_service_availability_zone
if zone == CONF.internal_service_availability_zone:
continue
result.append({'zoneName': zone,
'zoneState': {'available': is_available},
"hosts": None})
return result
def _describe_availability_zones(self, context, **kwargs):
ctxt = context.elevated()
available_zones, not_available_zones = \
availability_zones.get_availability_zones(ctxt)
filtered_available_zones = \
self._get_filtered_availability_zones(available_zones, True)
filtered_not_available_zones = \
self._get_filtered_availability_zones(not_available_zones, False)
return {'availabilityZoneInfo': filtered_available_zones +
filtered_not_available_zones}
def _describe_availability_zones_verbose(self, context, **kwargs):
ctxt = context.elevated()
available_zones, not_available_zones = \
availability_zones.get_availability_zones(ctxt)
# Available services
enabled_services = db.service_get_all(context, False)
enabled_services = availability_zones.set_availability_zones(context,
enabled_services)
zone_hosts = {}
host_services = {}
for service in enabled_services:
zone_hosts.setdefault(service['availability_zone'], [])
if service['host'] not in zone_hosts[service['availability_zone']]:
zone_hosts[service['availability_zone']].append(
service['host'])
host_services.setdefault(service['availability_zone'] +
service['host'], [])
host_services[service['availability_zone'] + service['host']].\
append(service)
result = []
for zone in available_zones:
hosts = {}
for host in zone_hosts[zone]:
hosts[host] = {}
for service in host_services[zone + host]:
alive = self.servicegroup_api.service_is_up(service)
hosts[host][service['binary']] = {'available': alive,
'active': True != service['disabled'],
'updated_at': service['updated_at']}
result.append({'zoneName': zone,
'zoneState': {'available': True},
"hosts": hosts})
for zone in not_available_zones:
result.append({'zoneName': zone,
'zoneState': {'available': False},
"hosts": None})
return {'availabilityZoneInfo': result}
@wsgi.serializers(xml=AvailabilityZonesTemplate)
def index(self, req):
"""Returns a summary list of availability zone."""
context = req.environ['nova.context']
authorize_list(context)
return self._describe_availability_zones(context)
@wsgi.serializers(xml=AvailabilityZonesTemplate)
def detail(self, req):
"""Returns a detailed list of availability zone."""
context = req.environ['nova.context']
authorize_detail(context)
return self._describe_availability_zones_verbose(context)
class Availability_zone(extensions.ExtensionDescriptor):
"""1. Add availability_zone to the Create Server v1.1 API.
2. Add availability zones describing.
"""
name = "AvailabilityZone"
alias = "os-availability-zone"
namespace = ("http://docs.openstack.org/compute/ext/"
"availabilityzone/api/v1.1")
updated = "2012-12-21T00:00:00+00:00"
def get_resources(self):
resources = []
res = extensions.ResourceExtension('os-availability-zone',
AvailabilityZoneController(),
collection_actions={'detail': 'GET'})
resources.append(res)
return resources
| apache-2.0 |
pombredanne/cogen | setup.py | 4 | 2337 | #!/usr/bin/python
try:
import setuptools
except ImportError:
import ez_setup
ez_setup.use_setuptools()
from setuptools import setup, find_packages
import sys, os
from cogen import __version__ as version
setup(
name='cogen',
version=version,
description='''
Coroutines and asynchronous I/O using enhanced generators
from python 2.5, including a enhanced WSGI server.
''',
long_description=file('README.txt').read(),
author='Maries Ionel Cristian',
author_email='ionel.mc@gmail.com',
url='http://code.google.com/p/cogen/',
packages=['cogen', 'cogen.core', 'cogen.web', 'cogen.core.proactors', 'cogen.core.proactors.ctypes_iocp_impl'],
zip_safe=True,
classifiers=[
'Development Status :: 4 - Beta',
'Environment :: Web Environment',
'Intended Audience :: Developers',
'Intended Audience :: System Administrators',
'License :: OSI Approved :: MIT License',
'Operating System :: Microsoft :: Windows',
'Operating System :: POSIX :: Linux',
'Operating System :: POSIX :: BSD',
'Programming Language :: Python',
'Topic :: Internet :: WWW/HTTP :: HTTP Servers',
'Topic :: Internet :: WWW/HTTP :: WSGI :: Server',
'Topic :: System :: Networking',
],
entry_points={
'paste.server_factory': [
'wsgi=cogen.web.wsgi:server_factory',
'http=cogen.web.wsgi:server_factory',
],
'paste.filter_app_factory': [
'syncinput=cogen.web.async:SynchronousInputMiddleware',
'lazysr=cogen.web.async:LazyStartResponseMiddleware'
],
'apydia.themes': [
'cogen=docgen.theme',
'cogenwiki=docgen.wikitheme',
],
'apydia.docrenderers': [
'wiki=docgen.wikirender:WikiTextRenderer'
]
},
install_requires = \
(((["py-kqueue>=2.0"] if ('bsd' in sys.platform) or ('darwin' in sys.platform) else []) +
(["py-epoll>=1.2"] if 'linux' in sys.platform else [])) \
if (sys.version_info[0] == 2 and sys.version_info[1] < 6) else []) +\
(["py-sendfile>=1.2.2"] if ('linux' in sys.platform) or ('bsd' in sys.platform) else []),
test_suite='tests'
) | mit |
timothyparez/PyBitmessage | src/bitmessagecli.py | 2 | 72448 | #!/usr/bin/python2.7
# Created by Adam Melton (.dok) referenceing https://bitmessage.org/wiki/API_Reference for API documentation
# Distributed under the MIT/X11 software license. See http://www.opensource.org/licenses/mit-license.php.
# This is an example of a daemon client for PyBitmessage 0.4.2, by .dok (Version 0.3.0)
import ConfigParser
import xmlrpclib
import datetime
import hashlib
import getopt
import imghdr
import ntpath
import json
import time
import sys
import os
api = ''
keysName = 'keys.dat'
keysPath = 'keys.dat'
usrPrompt = 0 #0 = First Start, 1 = prompt, 2 = no prompt if the program is starting up
knownAddresses = dict()
def userInput(message): #Checks input for exit or quit. Also formats for input, etc
global usrPrompt
print '\n' + message
uInput = raw_input('> ')
if (uInput.lower() == 'exit'): #Returns the user to the main menu
usrPrompt = 1
main()
elif (uInput.lower() == 'quit'): #Quits the program
print '\n Bye\n'
sys.exit()
os.exit()
else:
return uInput
def restartBmNotify(): #Prompts the user to restart Bitmessage.
print '\n *******************************************************************'
print ' WARNING: If Bitmessage is running locally, you must restart it now.'
print ' *******************************************************************\n'
def safeConfigGetBoolean(section,field):
global keysPath
config = ConfigParser.SafeConfigParser()
config.read(keysPath)
try:
return config.getboolean(section,field)
except:
return False
#Begin keys.dat interactions
def lookupAppdataFolder(): #gets the appropriate folders for the .dat files depending on the OS. Taken from bitmessagemain.py
APPNAME = "PyBitmessage"
from os import path, environ
if sys.platform == 'darwin':
if "HOME" in environ:
dataFolder = path.join(os.environ["HOME"], "Library/Application support/", APPNAME) + '/'
else:
print ' Could not find home folder, please report this message and your OS X version to the Daemon Github.'
os.exit()
elif 'win32' in sys.platform or 'win64' in sys.platform:
dataFolder = path.join(environ['APPDATA'], APPNAME) + '\\'
else:
dataFolder = path.expanduser(path.join("~", ".config/" + APPNAME + "/"))
return dataFolder
def configInit():
global keysName
config = ConfigParser.SafeConfigParser()
config.add_section('bitmessagesettings')
config.set('bitmessagesettings', 'port', '8444') #Sets the bitmessage port to stop the warning about the api not properly being setup. This is in the event that the keys.dat is in a different directory or is created locally to connect to a machine remotely.
config.set('bitmessagesettings','apienabled','true') #Sets apienabled to true in keys.dat
with open(keysName, 'wb') as configfile:
config.write(configfile)
print '\n ' + str(keysName) + ' Initalized in the same directory as daemon.py'
print ' You will now need to configure the ' + str(keysName) + ' file.\n'
def apiInit(apiEnabled):
global keysPath
global usrPrompt
config = ConfigParser.SafeConfigParser()
config.read(keysPath)
if (apiEnabled == False): #API information there but the api is disabled.
uInput = userInput("The API is not enabled. Would you like to do that now, (Y)es or (N)o?").lower()
if uInput == "y": #
config.set('bitmessagesettings','apienabled','true') #Sets apienabled to true in keys.dat
with open(keysPath, 'wb') as configfile:
config.write(configfile)
print 'Done'
restartBmNotify()
return True
elif uInput == "n":
print ' \n************************************************************'
print ' Daemon will not work when the API is disabled. '
print ' Please refer to the Bitmessage Wiki on how to setup the API.'
print ' ************************************************************\n'
usrPrompt = 1
main()
else:
print '\n Invalid Entry\n'
usrPrompt = 1
main()
elif (apiEnabled == True): #API correctly setup
#Everything is as it should be
return True
else: #API information was not present.
print '\n ' + str(keysPath) + ' not properly configured!\n'
uInput = userInput("Would you like to do this now, (Y)es or (N)o?").lower()
if uInput == "y": #User said yes, initalize the api by writing these values to the keys.dat file
print ' '
apiUsr = userInput("API Username")
apiPwd = userInput("API Password")
apiInterface = userInput("API Interface. (127.0.0.1)")
apiPort = userInput("API Port")
apiEnabled = userInput("API Enabled? (True) or (False)").lower()
daemon = userInput("Daemon mode Enabled? (True) or (False)").lower()
if (daemon != 'true' and daemon != 'false'):
print '\n Invalid Entry for Daemon.\n'
uInput = 1
main()
print ' -----------------------------------\n'
config.set('bitmessagesettings', 'port', '8444') #sets the bitmessage port to stop the warning about the api not properly being setup. This is in the event that the keys.dat is in a different directory or is created locally to connect to a machine remotely.
config.set('bitmessagesettings','apienabled','true')
config.set('bitmessagesettings', 'apiport', apiPort)
config.set('bitmessagesettings', 'apiinterface', '127.0.0.1')
config.set('bitmessagesettings', 'apiusername', apiUsr)
config.set('bitmessagesettings', 'apipassword', apiPwd)
config.set('bitmessagesettings', 'daemon', daemon)
with open(keysPath, 'wb') as configfile:
config.write(configfile)
print '\n Finished configuring the keys.dat file with API information.\n'
restartBmNotify()
return True
elif uInput == "n":
print '\n ***********************************************************'
print ' Please refer to the Bitmessage Wiki on how to setup the API.'
print ' ***********************************************************\n'
usrPrompt = 1
main()
else:
print ' \nInvalid entry\n'
usrPrompt = 1
main()
def apiData():
global keysName
global keysPath
global usrPrompt
config = ConfigParser.SafeConfigParser()
config.read(keysPath) #First try to load the config file (the keys.dat file) from the program directory
try:
config.get('bitmessagesettings','port')
appDataFolder = ''
except:
#Could not load the keys.dat file in the program directory. Perhaps it is in the appdata directory.
appDataFolder = lookupAppdataFolder()
keysPath = appDataFolder + keysPath
config = ConfigParser.SafeConfigParser()
config.read(keysPath)
try:
config.get('bitmessagesettings','port')
except:
#keys.dat was not there either, something is wrong.
print '\n ******************************************************************'
print ' There was a problem trying to access the Bitmessage keys.dat file'
print ' or keys.dat is not set up correctly'
print ' Make sure that daemon is in the same directory as Bitmessage. '
print ' ******************************************************************\n'
uInput = userInput("Would you like to create a keys.dat in the local directory, (Y)es or (N)o?").lower()
if (uInput == "y" or uInput == "yes"):
configInit()
keysPath = keysName
usrPrompt = 0
main()
elif (uInput == "n" or uInput == "no"):
print '\n Trying Again.\n'
usrPrompt = 0
main()
else:
print '\n Invalid Input.\n'
usrPrompt = 1
main()
try: #checks to make sure that everyting is configured correctly. Excluding apiEnabled, it is checked after
config.get('bitmessagesettings', 'apiport')
config.get('bitmessagesettings', 'apiinterface')
config.get('bitmessagesettings', 'apiusername')
config.get('bitmessagesettings', 'apipassword')
except:
apiInit("") #Initalize the keys.dat file with API information
#keys.dat file was found or appropriately configured, allow information retrieval
apiEnabled = apiInit(safeConfigGetBoolean('bitmessagesettings','apienabled')) #if false it will prompt the user, if true it will return true
config.read(keysPath)#read again since changes have been made
apiPort = int(config.get('bitmessagesettings', 'apiport'))
apiInterface = config.get('bitmessagesettings', 'apiinterface')
apiUsername = config.get('bitmessagesettings', 'apiusername')
apiPassword = config.get('bitmessagesettings', 'apipassword')
print '\n API data successfully imported.\n'
return "http://" + apiUsername + ":" + apiPassword + "@" + apiInterface+ ":" + str(apiPort) + "/" #Build the api credentials
#End keys.dat interactions
def apiTest(): #Tests the API connection to bitmessage. Returns true if it is connected.
try:
result = api.add(2,3)
except:
return False
if (result == 5):
return True
else:
return False
def bmSettings(): #Allows the viewing and modification of keys.dat settings.
global keysPath
global usrPrompt
config = ConfigParser.SafeConfigParser()
keysPath = 'keys.dat'
config.read(keysPath)#Read the keys.dat
try:
port = config.get('bitmessagesettings', 'port')
except:
print '\n File not found.\n'
usrPrompt = 0
main()
startonlogon = safeConfigGetBoolean('bitmessagesettings', 'startonlogon')
minimizetotray = safeConfigGetBoolean('bitmessagesettings', 'minimizetotray')
showtraynotifications = safeConfigGetBoolean('bitmessagesettings', 'showtraynotifications')
startintray = safeConfigGetBoolean('bitmessagesettings', 'startintray')
defaultnoncetrialsperbyte = config.get('bitmessagesettings', 'defaultnoncetrialsperbyte')
defaultpayloadlengthextrabytes = config.get('bitmessagesettings', 'defaultpayloadlengthextrabytes')
daemon = safeConfigGetBoolean('bitmessagesettings', 'daemon')
socksproxytype = config.get('bitmessagesettings', 'socksproxytype')
sockshostname = config.get('bitmessagesettings', 'sockshostname')
socksport = config.get('bitmessagesettings', 'socksport')
socksauthentication = safeConfigGetBoolean('bitmessagesettings', 'socksauthentication')
socksusername = config.get('bitmessagesettings', 'socksusername')
sockspassword = config.get('bitmessagesettings', 'sockspassword')
print '\n -----------------------------------'
print ' | Current Bitmessage Settings |'
print ' -----------------------------------'
print ' port = ' + port
print ' startonlogon = ' + str(startonlogon)
print ' minimizetotray = ' + str(minimizetotray)
print ' showtraynotifications = ' + str(showtraynotifications)
print ' startintray = ' + str(startintray)
print ' defaultnoncetrialsperbyte = ' + defaultnoncetrialsperbyte
print ' defaultpayloadlengthextrabytes = ' + defaultpayloadlengthextrabytes
print ' daemon = ' + str(daemon)
print '\n ------------------------------------'
print ' | Current Connection Settings |'
print ' -----------------------------------'
print ' socksproxytype = ' + socksproxytype
print ' sockshostname = ' + sockshostname
print ' socksport = ' + socksport
print ' socksauthentication = ' + str(socksauthentication)
print ' socksusername = ' + socksusername
print ' sockspassword = ' + sockspassword
print ' '
uInput = userInput("Would you like to modify any of these settings, (Y)es or (N)o?").lower()
if uInput == "y":
while True: #loops if they mistype the setting name, they can exit the loop with 'exit'
invalidInput = False
uInput = userInput("What setting would you like to modify?").lower()
print ' '
if uInput == "port":
print ' Current port number: ' + port
uInput = userInput("Enter the new port number.")
config.set('bitmessagesettings', 'port', str(uInput))
elif uInput == "startonlogon":
print ' Current status: ' + str(startonlogon)
uInput = userInput("Enter the new status.")
config.set('bitmessagesettings', 'startonlogon', str(uInput))
elif uInput == "minimizetotray":
print ' Current status: ' + str(minimizetotray)
uInput = userInput("Enter the new status.")
config.set('bitmessagesettings', 'minimizetotray', str(uInput))
elif uInput == "showtraynotifications":
print ' Current status: ' + str(showtraynotifications)
uInput = userInput("Enter the new status.")
config.set('bitmessagesettings', 'showtraynotifications', str(uInput))
elif uInput == "startintray":
print ' Current status: ' + str(startintray)
uInput = userInput("Enter the new status.")
config.set('bitmessagesettings', 'startintray', str(uInput))
elif uInput == "defaultnoncetrialsperbyte":
print ' Current default nonce trials per byte: ' + defaultnoncetrialsperbyte
uInput = userInput("Enter the new defaultnoncetrialsperbyte.")
config.set('bitmessagesettings', 'defaultnoncetrialsperbyte', str(uInput))
elif uInput == "defaultpayloadlengthextrabytes":
print ' Current default payload length extra bytes: ' + defaultpayloadlengthextrabytes
uInput = userInput("Enter the new defaultpayloadlengthextrabytes.")
config.set('bitmessagesettings', 'defaultpayloadlengthextrabytes', str(uInput))
elif uInput == "daemon":
print ' Current status: ' + str(daemon)
uInput = userInput("Enter the new status.").lower()
config.set('bitmessagesettings', 'daemon', str(uInput))
elif uInput == "socksproxytype":
print ' Current socks proxy type: ' + socksproxytype
print "Possibilities: 'none', 'SOCKS4a', 'SOCKS5'."
uInput = userInput("Enter the new socksproxytype.")
config.set('bitmessagesettings', 'socksproxytype', str(uInput))
elif uInput == "sockshostname":
print ' Current socks host name: ' + sockshostname
uInput = userInput("Enter the new sockshostname.")
config.set('bitmessagesettings', 'sockshostname', str(uInput))
elif uInput == "socksport":
print ' Current socks port number: ' + socksport
uInput = userInput("Enter the new socksport.")
config.set('bitmessagesettings', 'socksport', str(uInput))
elif uInput == "socksauthentication":
print ' Current status: ' + str(socksauthentication)
uInput = userInput("Enter the new status.")
config.set('bitmessagesettings', 'socksauthentication', str(uInput))
elif uInput == "socksusername":
print ' Current socks username: ' + socksusername
uInput = userInput("Enter the new socksusername.")
config.set('bitmessagesettings', 'socksusername', str(uInput))
elif uInput == "sockspassword":
print ' Current socks password: ' + sockspassword
uInput = userInput("Enter the new password.")
config.set('bitmessagesettings', 'sockspassword', str(uInput))
else:
print "\n Invalid input. Please try again.\n"
invalidInput = True
if invalidInput != True: #don't prompt if they made a mistake.
uInput = userInput("Would you like to change another setting, (Y)es or (N)o?").lower()
if uInput != "y":
print '\n Changes Made.\n'
with open(keysPath, 'wb') as configfile:
config.write(configfile)
restartBmNotify()
break
elif uInput == "n":
usrPrompt = 1
main()
else:
print "Invalid input."
usrPrompt = 1
main()
def validAddress(address):
address_information = api.decodeAddress(address)
address_information = eval(address_information)
if 'success' in str(address_information.get('status')).lower():
return True
else:
return False
def getAddress(passphrase,vNumber,sNumber):
passphrase = passphrase.encode('base64')#passphrase must be encoded
return api.getDeterministicAddress(passphrase,vNumber,sNumber)
def subscribe():
global usrPrompt
while True:
address = userInput("What address would you like to subscribe to?")
if (address == "c"):
usrPrompt = 1
print ' '
main()
elif (validAddress(address)== False):
print '\n Invalid. "c" to cancel. Please try again.\n'
else:
break
label = userInput("Enter a label for this address.")
label = label.encode('base64')
api.addSubscription(address,label)
print ('\n You are now subscribed to: ' + address + '\n')
def unsubscribe():
global usrPrompt
while True:
address = userInput("What address would you like to unsubscribe from?")
if (address == "c"):
usrPrompt = 1
print ' '
main()
elif (validAddress(address)== False):
print '\n Invalid. "c" to cancel. Please try again.\n'
else:
break
uInput = userInput("Are you sure, (Y)es or (N)o?").lower()
api.deleteSubscription(address)
print ('\n You are now unsubscribed from: ' + address + '\n')
def listSubscriptions():
global usrPrompt
#jsonAddresses = json.loads(api.listSubscriptions())
#numAddresses = len(jsonAddresses['addresses']) #Number of addresses
print '\nLabel, Address, Enabled\n'
try:
print api.listSubscriptions()
except:
print '\n Connection Error\n'
usrPrompt = 0
main()
'''for addNum in range (0, numAddresses): #processes all of the addresses and lists them out
label = jsonAddresses['addresses'][addNum]['label']
address = jsonAddresses['addresses'][addNum]['address']
enabled = jsonAddresses['addresses'][addNum]['enabled']
print label, address, enabled
'''
print ' '
def createChan():
global usrPrompt
password = userInput("Enter channel name")
password = password.encode('base64')
try:
print api.createChan(password)
except:
print '\n Connection Error\n'
usrPrompt = 0
main()
def joinChan():
global usrPrompt
while True:
address = userInput("Enter channel address")
if (address == "c"):
usrPrompt = 1
print ' '
main()
elif (validAddress(address)== False):
print '\n Invalid. "c" to cancel. Please try again.\n'
else:
break
password = userInput("Enter channel name")
password = password.encode('base64')
try:
print api.joinChan(password,address)
except:
print '\n Connection Error\n'
usrPrompt = 0
main()
def leaveChan():
global usrPrompt
while True:
address = userInput("Enter channel address")
if (address == "c"):
usrPrompt = 1
print ' '
main()
elif (validAddress(address)== False):
print '\n Invalid. "c" to cancel. Please try again.\n'
else:
break
try:
print api.leaveChan(address)
except:
print '\n Connection Error\n'
usrPrompt = 0
main()
def listAdd(): #Lists all of the addresses and their info
global usrPrompt
try:
jsonAddresses = json.loads(api.listAddresses())
numAddresses = len(jsonAddresses['addresses']) #Number of addresses
except:
print '\n Connection Error\n'
usrPrompt = 0
main()
#print '\nAddress Number,Label,Address,Stream,Enabled\n'
print '\n --------------------------------------------------------------------------'
print ' | # | Label | Address |S#|Enabled|'
print ' |---|-------------------|-------------------------------------|--|-------|'
for addNum in range (0, numAddresses): #processes all of the addresses and lists them out
label = str(jsonAddresses['addresses'][addNum]['label'])
address = str(jsonAddresses['addresses'][addNum]['address'])
stream = str(jsonAddresses['addresses'][addNum]['stream'])
enabled = str(jsonAddresses['addresses'][addNum]['enabled'])
if (len(label) > 19):
label = label[:16] + '...'
print ' |' + str(addNum).ljust(3) + '|' + label.ljust(19) + '|' + address.ljust(37) + '|' + stream.ljust(1), '|' + enabled.ljust(7) + '|'
print ' --------------------------------------------------------------------------\n'
def genAdd(lbl,deterministic, passphrase, numOfAdd, addVNum, streamNum, ripe): #Generate address
global usrPrompt
if deterministic == False: #Generates a new address with the user defined label. non-deterministic
addressLabel = lbl.encode('base64')
try:
generatedAddress = api.createRandomAddress(addressLabel)
except:
print '\n Connection Error\n'
usrPrompt = 0
main()
return generatedAddress
elif deterministic == True: #Generates a new deterministic address with the user inputs.
passphrase = passphrase.encode('base64')
try:
generatedAddress = api.createDeterministicAddresses(passphrase, numOfAdd, addVNum, streamNum, ripe)
except:
print '\n Connection Error\n'
usrPrompt = 0
main()
return generatedAddress
else:
return 'Entry Error'
def delMilAddr(): #Generate address
global usrPrompt
try:
response = api.listAddresses2()
# if api is too old just return then fail
if "API Error 0020" in response: return
addresses = json.loads(response)
for entry in addresses['addresses']:
if entry['label'].decode('base64')[:6] == "random":
api.deleteAddress(entry['address'])
except:
print '\n Connection Error\n'
usrPrompt = 0
main()
def genMilAddr(): #Generate address
global usrPrompt
maxn = 0
try:
response = api.listAddresses2()
if "API Error 0020" in response: return
addresses = json.loads(response)
for entry in addresses['addresses']:
if entry['label'].decode('base64')[:6] == "random":
newn = int(entry['label'].decode('base64')[6:])
if maxn < newn:
maxn = newn
except:
print "\n Some error\n"
print "\n Starting at " + str(maxn) + "\n"
for i in range(maxn, 10000):
lbl = "random" + str(i)
addressLabel = lbl.encode('base64')
try:
generatedAddress = api.createRandomAddress(addressLabel)
except:
print '\n Connection Error\n'
usrPrompt = 0
main()
def saveFile(fileName, fileData): #Allows attachments and messages/broadcats to be saved
#This section finds all invalid characters and replaces them with ~
fileName = fileName.replace(" ", "")
fileName = fileName.replace("/", "~")
#fileName = fileName.replace("\\", "~") How do I get this to work...?
fileName = fileName.replace(":", "~")
fileName = fileName.replace("*", "~")
fileName = fileName.replace("?", "~")
fileName = fileName.replace('"', "~")
fileName = fileName.replace("<", "~")
fileName = fileName.replace(">", "~")
fileName = fileName.replace("|", "~")
directory = 'attachments'
if not os.path.exists(directory):
os.makedirs(directory)
filePath = directory +'/'+ fileName
'''try: #Checks if file already exists
with open(filePath):
print 'File Already Exists'
return
except IOError: pass'''
f = open(filePath, 'wb+') #Begin saving to file
f.write(fileData.decode("base64"))
f.close
print '\n Successfully saved '+ filePath + '\n'
def attachment(): #Allows users to attach a file to their message or broadcast
theAttachmentS = ''
while True:
isImage = False
theAttachment = ''
while True:#loops until valid path is entered
filePath = userInput('\nPlease enter the path to the attachment or just the attachment name if in this folder.')
try:
with open(filePath): break
except IOError:
print '\n %s was not found on your filesystem or can not be opened.\n' % filePath
pass
#print filesize, and encoding estimate with confirmation if file is over X size (1mb?)
invSize = os.path.getsize(filePath)
invSize = (invSize / 1024) #Converts to kilobytes
round(invSize,2) #Rounds to two decimal places
if (invSize > 500.0):#If over 500KB
print '\n WARNING:The file that you are trying to attach is ', invSize, 'KB and will take considerable time to send.\n'
uInput = userInput('Are you sure you still want to attach it, (Y)es or (N)o?').lower()
if uInput != "y":
print '\n Attachment discarded.\n'
return ''
elif (invSize > 184320.0): #If larger than 180MB, discard.
print '\n Attachment too big, maximum allowed size:180MB\n'
main()
pathLen = len(str(ntpath.basename(filePath))) #Gets the length of the filepath excluding the filename
fileName = filePath[(len(str(filePath)) - pathLen):] #reads the filename
filetype = imghdr.what(filePath) #Tests if it is an image file
if filetype is not None:
print '\n ---------------------------------------------------'
print ' Attachment detected as an Image.'
print ' <img> tags will automatically be included,'
print ' allowing the recipient to view the image'
print ' using the "View HTML code..." option in Bitmessage.'
print ' ---------------------------------------------------\n'
isImage = True
time.sleep(2)
print '\n Encoding Attachment, Please Wait ...\n' #Alert the user that the encoding process may take some time.
with open(filePath, 'rb') as f: #Begin the actual encoding
data = f.read(188743680) #Reads files up to 180MB, the maximum size for Bitmessage.
data = data.encode("base64")
if (isImage == True): #If it is an image, include image tags in the message
theAttachment = """
<!-- Note: Image attachment below. Please use the right click "View HTML code ..." option to view it. -->
<!-- Sent using Bitmessage Daemon. https://github.com/Dokument/PyBitmessage-Daemon -->
Filename:%s
Filesize:%sKB
Encoding:base64
<center>
<div id="image">
<img alt = "%s" src='data:image/%s;base64, %s' />
</div>
</center>""" % (fileName,invSize,fileName,filetype,data)
else: #Else it is not an image so do not include the embedded image code.
theAttachment = """
<!-- Note: File attachment below. Please use a base64 decoder, or Daemon, to save it. -->
<!-- Sent using Bitmessage Daemon. https://github.com/Dokument/PyBitmessage-Daemon -->
Filename:%s
Filesize:%sKB
Encoding:base64
<attachment alt = "%s" src='data:file/%s;base64, %s' />""" % (fileName,invSize,fileName,fileName,data)
uInput = userInput('Would you like to add another attachment, (Y)es or (N)o?').lower()
if (uInput == 'y' or uInput == 'yes'):#Allows multiple attachments to be added to one message
theAttachmentS = str(theAttachmentS) + str(theAttachment)+ '\n\n'
elif (uInput == 'n' or uInput == 'no'):
break
theAttachmentS = theAttachmentS + theAttachment
return theAttachmentS
def sendMsg(toAddress, fromAddress, subject, message): #With no arguments sent, sendMsg fills in the blanks. subject and message must be encoded before they are passed.
global usrPrompt
if (validAddress(toAddress)== False):
while True:
toAddress = userInput("What is the To Address?")
if (toAddress == "c"):
usrPrompt = 1
print ' '
main()
elif (validAddress(toAddress)== False):
print '\n Invalid Address. "c" to cancel. Please try again.\n'
else:
break
if (validAddress(fromAddress)== False):
try:
jsonAddresses = json.loads(api.listAddresses())
numAddresses = len(jsonAddresses['addresses']) #Number of addresses
except:
print '\n Connection Error\n'
usrPrompt = 0
main()
if (numAddresses > 1): #Ask what address to send from if multiple addresses
found = False
while True:
print ' '
fromAddress = userInput("Enter an Address or Address Label to send from.")
if fromAddress == "exit":
usrPrompt = 1
main()
for addNum in range (0, numAddresses): #processes all of the addresses
label = jsonAddresses['addresses'][addNum]['label']
address = jsonAddresses['addresses'][addNum]['address']
#stream = jsonAddresses['addresses'][addNum]['stream']
#enabled = jsonAddresses['addresses'][addNum]['enabled']
if (fromAddress == label): #address entered was a label and is found
fromAddress = address
found = True
break
if (found == False):
if(validAddress(fromAddress)== False):
print '\n Invalid Address. Please try again.\n'
else:
for addNum in range (0, numAddresses): #processes all of the addresses
#label = jsonAddresses['addresses'][addNum]['label']
address = jsonAddresses['addresses'][addNum]['address']
#stream = jsonAddresses['addresses'][addNum]['stream']
#enabled = jsonAddresses['addresses'][addNum]['enabled']
if (fromAddress == address): #address entered was a found in our addressbook.
found = True
break
if (found == False):
print '\n The address entered is not one of yours. Please try again.\n'
if (found == True):
break #Address was found
else: #Only one address in address book
print '\n Using the only address in the addressbook to send from.\n'
fromAddress = jsonAddresses['addresses'][0]['address']
if (subject == ''):
subject = userInput("Enter your Subject.")
subject = subject.encode('base64')
if (message == ''):
message = userInput("Enter your Message.")
uInput = userInput('Would you like to add an attachment, (Y)es or (N)o?').lower()
if uInput == "y":
message = message + '\n\n' + attachment()
message = message.encode('base64')
try:
ackData = api.sendMessage(toAddress, fromAddress, subject, message)
print '\n Message Status:', api.getStatus(ackData), '\n'
except:
print '\n Connection Error\n'
usrPrompt = 0
main()
def sendBrd(fromAddress, subject, message): #sends a broadcast
global usrPrompt
if (fromAddress == ''):
try:
jsonAddresses = json.loads(api.listAddresses())
numAddresses = len(jsonAddresses['addresses']) #Number of addresses
except:
print '\n Connection Error\n'
usrPrompt = 0
main()
if (numAddresses > 1): #Ask what address to send from if multiple addresses
found = False
while True:
fromAddress = userInput("\nEnter an Address or Address Label to send from.")
if fromAddress == "exit":
usrPrompt = 1
main()
for addNum in range (0, numAddresses): #processes all of the addresses
label = jsonAddresses['addresses'][addNum]['label']
address = jsonAddresses['addresses'][addNum]['address']
#stream = jsonAddresses['addresses'][addNum]['stream']
#enabled = jsonAddresses['addresses'][addNum]['enabled']
if (fromAddress == label): #address entered was a label and is found
fromAddress = address
found = True
break
if (found == False):
if(validAddress(fromAddress)== False):
print '\n Invalid Address. Please try again.\n'
else:
for addNum in range (0, numAddresses): #processes all of the addresses
#label = jsonAddresses['addresses'][addNum]['label']
address = jsonAddresses['addresses'][addNum]['address']
#stream = jsonAddresses['addresses'][addNum]['stream']
#enabled = jsonAddresses['addresses'][addNum]['enabled']
if (fromAddress == address): #address entered was a found in our addressbook.
found = True
break
if (found == False):
print '\n The address entered is not one of yours. Please try again.\n'
if (found == True):
break #Address was found
else: #Only one address in address book
print '\n Using the only address in the addressbook to send from.\n'
fromAddress = jsonAddresses['addresses'][0]['address']
if (subject == ''):
subject = userInput("Enter your Subject.")
subject = subject.encode('base64')
if (message == ''):
message = userInput("Enter your Message.")
uInput = userInput('Would you like to add an attachment, (Y)es or (N)o?').lower()
if uInput == "y":
message = message + '\n\n' + attachment()
message = message.encode('base64')
try:
ackData = api.sendBroadcast(fromAddress, subject, message)
print '\n Message Status:', api.getStatus(ackData), '\n'
except:
print '\n Connection Error\n'
usrPrompt = 0
main()
def inbox(unreadOnly = False): #Lists the messages by: Message Number, To Address Label, From Address Label, Subject, Received Time)
global usrPrompt
try:
inboxMessages = json.loads(api.getAllInboxMessages())
numMessages = len(inboxMessages['inboxMessages'])
except:
print '\n Connection Error\n'
usrPrompt = 0
main()
messagesPrinted = 0
messagesUnread = 0
for msgNum in range (0, numMessages): #processes all of the messages in the inbox
message = inboxMessages['inboxMessages'][msgNum]
# if we are displaying all messages or if this message is unread then display it
if not unreadOnly or not message['read']:
print ' -----------------------------------\n'
print ' Message Number:',msgNum #Message Number
print ' To:', getLabelForAddress(message['toAddress']) #Get the to address
print ' From:', getLabelForAddress(message['fromAddress']) #Get the from address
print ' Subject:', message['subject'].decode('base64') #Get the subject
print ' Received:', datetime.datetime.fromtimestamp(float(message['receivedTime'])).strftime('%Y-%m-%d %H:%M:%S')
messagesPrinted += 1
if not message['read']: messagesUnread += 1
if (messagesPrinted%20 == 0 and messagesPrinted != 0):
uInput = userInput('(Press Enter to continue or type (Exit) to return to the main menu.)').lower()
print '\n -----------------------------------'
print ' There are %d unread messages of %d messages in the inbox.' % (messagesUnread, numMessages)
print ' -----------------------------------\n'
def outbox():
global usrPrompt
try:
outboxMessages = json.loads(api.getAllSentMessages())
numMessages = len(outboxMessages['sentMessages'])
except:
print '\n Connection Error\n'
usrPrompt = 0
main()
for msgNum in range (0, numMessages): #processes all of the messages in the outbox
print '\n -----------------------------------\n'
print ' Message Number:',msgNum #Message Number
#print ' Message ID:', outboxMessages['sentMessages'][msgNum]['msgid']
print ' To:', getLabelForAddress(outboxMessages['sentMessages'][msgNum]['toAddress']) #Get the to address
print ' From:', getLabelForAddress(outboxMessages['sentMessages'][msgNum]['fromAddress']) #Get the from address
print ' Subject:', outboxMessages['sentMessages'][msgNum]['subject'].decode('base64') #Get the subject
print ' Status:', outboxMessages['sentMessages'][msgNum]['status'] #Get the subject
print ' Last Action Time:', datetime.datetime.fromtimestamp(float(outboxMessages['sentMessages'][msgNum]['lastActionTime'])).strftime('%Y-%m-%d %H:%M:%S')
if (msgNum%20 == 0 and msgNum != 0):
uInput = userInput('(Press Enter to continue or type (Exit) to return to the main menu.)').lower()
print '\n -----------------------------------'
print ' There are ',numMessages,' messages in the outbox.'
print ' -----------------------------------\n'
def readSentMsg(msgNum): #Opens a sent message for reading
global usrPrompt
try:
outboxMessages = json.loads(api.getAllSentMessages())
numMessages = len(outboxMessages['sentMessages'])
except:
print '\n Connection Error\n'
usrPrompt = 0
main()
print ' '
if (msgNum >= numMessages):
print '\n Invalid Message Number.\n'
main()
#Begin attachment detection
message = outboxMessages['sentMessages'][msgNum]['message'].decode('base64')
while True: #Allows multiple messages to be downloaded/saved
if (';base64,' in message): #Found this text in the message, there is probably an attachment.
attPos= message.index(";base64,") #Finds the attachment position
attEndPos = message.index("' />") #Finds the end of the attachment
#attLen = attEndPos - attPos #Finds the length of the message
if ('alt = "' in message): #We can get the filename too
fnPos = message.index('alt = "') #Finds position of the filename
fnEndPos = message.index('" src=') #Finds the end position
#fnLen = fnEndPos - fnPos #Finds the length of the filename
fileName = message[fnPos+7:fnEndPos]
else:
fnPos = attPos
fileName = 'Attachment'
uInput = userInput('\n Attachment Detected. Would you like to save the attachment, (Y)es or (N)o?').lower()
if (uInput == "y" or uInput == 'yes'):
attachment = message[attPos+9:attEndPos]
saveFile(fileName,attachment)
message = message[:fnPos] + '~<Attachment data removed for easier viewing>~' + message[(attEndPos+4):]
else:
break
#End attachment Detection
print '\n To:', getLabelForAddress(outboxMessages['sentMessages'][msgNum]['toAddress']) #Get the to address
print ' From:', getLabelForAddress(outboxMessages['sentMessages'][msgNum]['fromAddress']) #Get the from address
print ' Subject:', outboxMessages['sentMessages'][msgNum]['subject'].decode('base64') #Get the subject
print ' Status:', outboxMessages['sentMessages'][msgNum]['status'] #Get the subject
print ' Last Action Time:', datetime.datetime.fromtimestamp(float(outboxMessages['sentMessages'][msgNum]['lastActionTime'])).strftime('%Y-%m-%d %H:%M:%S')
print ' Message:\n'
print message #inboxMessages['inboxMessages'][msgNum]['message'].decode('base64')
print ' '
def readMsg(msgNum): #Opens a message for reading
global usrPrompt
try:
inboxMessages = json.loads(api.getAllInboxMessages())
numMessages = len(inboxMessages['inboxMessages'])
except:
print '\n Connection Error\n'
usrPrompt = 0
main()
if (msgNum >= numMessages):
print '\n Invalid Message Number.\n'
main()
#Begin attachment detection
message = inboxMessages['inboxMessages'][msgNum]['message'].decode('base64')
while True: #Allows multiple messages to be downloaded/saved
if (';base64,' in message): #Found this text in the message, there is probably an attachment.
attPos= message.index(";base64,") #Finds the attachment position
attEndPos = message.index("' />") #Finds the end of the attachment
#attLen = attEndPos - attPos #Finds the length of the message
if ('alt = "' in message): #We can get the filename too
fnPos = message.index('alt = "') #Finds position of the filename
fnEndPos = message.index('" src=') #Finds the end position
#fnLen = fnEndPos - fnPos #Finds the length of the filename
fileName = message[fnPos+7:fnEndPos]
else:
fnPos = attPos
fileName = 'Attachment'
uInput = userInput('\n Attachment Detected. Would you like to save the attachment, (Y)es or (N)o?').lower()
if (uInput == "y" or uInput == 'yes'):
attachment = message[attPos+9:attEndPos]
saveFile(fileName,attachment)
message = message[:fnPos] + '~<Attachment data removed for easier viewing>~' + message[(attEndPos+4):]
else:
break
#End attachment Detection
print '\n To:', getLabelForAddress(inboxMessages['inboxMessages'][msgNum]['toAddress']) #Get the to address
print ' From:', getLabelForAddress(inboxMessages['inboxMessages'][msgNum]['fromAddress']) #Get the from address
print ' Subject:', inboxMessages['inboxMessages'][msgNum]['subject'].decode('base64') #Get the subject
print ' Received:',datetime.datetime.fromtimestamp(float(inboxMessages['inboxMessages'][msgNum]['receivedTime'])).strftime('%Y-%m-%d %H:%M:%S')
print ' Message:\n'
print message #inboxMessages['inboxMessages'][msgNum]['message'].decode('base64')
print ' '
return inboxMessages['inboxMessages'][msgNum]['msgid']
def replyMsg(msgNum,forwardORreply): #Allows you to reply to the message you are currently on. Saves typing in the addresses and subject.
global usrPrompt
forwardORreply = forwardORreply.lower() #makes it lowercase
try:
inboxMessages = json.loads(api.getAllInboxMessages())
except:
print '\n Connection Error\n'
usrPrompt = 0
main()
fromAdd = inboxMessages['inboxMessages'][msgNum]['toAddress']#Address it was sent To, now the From address
message = inboxMessages['inboxMessages'][msgNum]['message'].decode('base64') #Message that you are replying too.
subject = inboxMessages['inboxMessages'][msgNum]['subject']
subject = subject.decode('base64')
if (forwardORreply == 'reply'):
toAdd = inboxMessages['inboxMessages'][msgNum]['fromAddress'] #Address it was From, now the To address
subject = "Re: " + subject
elif (forwardORreply == 'forward'):
subject = "Fwd: " + subject
while True:
toAdd = userInput("What is the To Address?")
if (toAdd == "c"):
usrPrompt = 1
print ' '
main()
elif (validAddress(toAdd)== False):
print '\n Invalid Address. "c" to cancel. Please try again.\n'
else:
break
else:
print '\n Invalid Selection. Reply or Forward only'
usrPrompt = 0
main()
subject = subject.encode('base64')
newMessage = userInput("Enter your Message.")
uInput = userInput('Would you like to add an attachment, (Y)es or (N)o?').lower()
if uInput == "y":
newMessage = newMessage + '\n\n' + attachment()
newMessage = newMessage + '\n\n------------------------------------------------------\n'
newMessage = newMessage + message
newMessage = newMessage.encode('base64')
sendMsg(toAdd, fromAdd, subject, newMessage)
main()
def delMsg(msgNum): #Deletes a specified message from the inbox
global usrPrompt
try:
inboxMessages = json.loads(api.getAllInboxMessages())
msgId = inboxMessages['inboxMessages'][int(msgNum)]['msgid'] #gets the message ID via the message index number
msgAck = api.trashMessage(msgId)
except:
print '\n Connection Error\n'
usrPrompt = 0
main()
return msgAck
def delSentMsg(msgNum): #Deletes a specified message from the outbox
global usrPrompt
try:
outboxMessages = json.loads(api.getAllSentMessages())
msgId = outboxMessages['sentMessages'][int(msgNum)]['msgid'] #gets the message ID via the message index number
msgAck = api.trashSentMessage(msgId)
except:
print '\n Connection Error\n'
usrPrompt = 0
main()
return msgAck
def getLabelForAddress(address):
global usrPrompt
if address in knownAddresses:
return knownAddresses[address]
else:
buildKnownAddresses()
if address in knownAddresses:
return knownAddresses[address]
return address
def buildKnownAddresses():
# add from address book
try:
response = api.listAddressBookEntries()
# if api is too old then fail
if "API Error 0020" in response: return
addressBook = json.loads(response)
for entry in addressBook['addresses']:
if entry['address'] not in knownAddresses:
knownAddresses[entry['address']] = "%s (%s)" % (entry['label'].decode('base64'), entry['address'])
except:
print '\n Connection Error\n'
usrPrompt = 0
main()
# add from my addresses
try:
response = api.listAddresses2()
# if api is too old just return then fail
if "API Error 0020" in response: return
addresses = json.loads(response)
for entry in addresses['addresses']:
if entry['address'] not in knownAddresses:
knownAddresses[entry['address']] = "%s (%s)" % (entry['label'].decode('base64'), entry['address'])
except:
print '\n Connection Error\n'
usrPrompt = 0
main()
def listAddressBookEntries():
try:
response = api.listAddressBookEntries()
if "API Error" in response:
return getAPIErrorCode(response)
addressBook = json.loads(response)
print
print ' --------------------------------------------------------------'
print ' | Label | Address |'
print ' |--------------------|---------------------------------------|'
for entry in addressBook['addresses']:
label = entry['label'].decode('base64')
address = entry['address']
if (len(label) > 19): label = label[:16] + '...'
print ' | ' + label.ljust(19) + '| ' + address.ljust(37) + ' |'
print ' --------------------------------------------------------------'
print
except:
print '\n Connection Error\n'
usrPrompt = 0
main()
def addAddressToAddressBook(address, label):
try:
response = api.addAddressBookEntry(address, label.encode('base64'))
if "API Error" in response:
return getAPIErrorCode(response)
except:
print '\n Connection Error\n'
usrPrompt = 0
main()
def deleteAddressFromAddressBook(address):
try:
response = api.deleteAddressBookEntry(address)
if "API Error" in response:
return getAPIErrorCode(response)
except:
print '\n Connection Error\n'
usrPrompt = 0
main()
def getAPIErrorCode(response):
if "API Error" in response:
# if we got an API error return the number by getting the number
# after the second space and removing the trailing colon
return int(response.split()[2][:-1])
def markMessageRead(messageID):
try:
response = api.getInboxMessageByID(messageID, True)
if "API Error" in response:
return getAPIErrorCode(response)
except:
print '\n Connection Error\n'
usrPrompt = 0
main()
def markMessageUnread(messageID):
try:
response = api.getInboxMessageByID(messageID, False)
if "API Error" in response:
return getAPIErrorCode(response)
except:
print '\n Connection Error\n'
usrPrompt = 0
main()
def markAllMessagesRead():
try:
inboxMessages = json.loads(api.getAllInboxMessages())['inboxMessages']
except:
print '\n Connection Error\n'
usrPrompt = 0
main()
for message in inboxMessages:
if not message['read']:
markMessageRead(message['msgid'])
def markAllMessagesUnread():
try:
inboxMessages = json.loads(api.getAllInboxMessages())['inboxMessages']
except:
print '\n Connection Error\n'
usrPrompt = 0
main()
for message in inboxMessages:
if message['read']:
markMessageUnread(message['msgid'])
def clientStatus():
try:
clientStatus = json.loads(api.clientStatus())
except:
print '\n Connection Error\n'
usrPrompt = 0
main()
print "\nnetworkStatus: " + clientStatus['networkStatus'] + "\n"
print "\nnetworkConnections: " + str(clientStatus['networkConnections']) + "\n"
print "\nnumberOfPubkeysProcessed: " + str(clientStatus['numberOfPubkeysProcessed']) + "\n"
print "\nnumberOfMessagesProcessed: " + str(clientStatus['numberOfMessagesProcessed']) + "\n"
print "\nnumberOfBroadcastsProcessed: " + str(clientStatus['numberOfBroadcastsProcessed']) + "\n"
def UI(usrInput): #Main user menu
global usrPrompt
if usrInput == "help" or usrInput == "h" or usrInput == "?":
print ' '
print ' -------------------------------------------------------------------------'
print ' | https://github.com/Dokument/PyBitmessage-Daemon |'
print ' |-----------------------------------------------------------------------|'
print ' | Command | Description |'
print ' |------------------------|----------------------------------------------|'
print ' | help | This help file. |'
print ' | apiTest | Tests the API |'
print ' | addInfo | Returns address information (If valid) |'
print ' | bmSettings | BitMessage settings |'
print ' | exit | Use anytime to return to main menu |'
print ' | quit | Quits the program |'
print ' |------------------------|----------------------------------------------|'
print ' | listAddresses | Lists all of the users addresses |'
print ' | generateAddress | Generates a new address |'
print ' | getAddress | Get determinist address from passphrase |'
print ' |------------------------|----------------------------------------------|'
print ' | listAddressBookEntries | Lists entries from the Address Book |'
print ' | addAddressBookEntry | Add address to the Address Book |'
print ' | deleteAddressBookEntry | Deletes address from the Address Book |'
print ' |------------------------|----------------------------------------------|'
print ' | subscribe | Subscribes to an address |'
print ' | unsubscribe | Unsubscribes from an address |'
#print ' | listSubscriptions | Lists all of the subscriptions. |'
print ' |------------------------|----------------------------------------------|'
print ' | create | Creates a channel |'
print ' | join | Joins a channel |'
print ' | leave | Leaves a channel |'
print ' |------------------------|----------------------------------------------|'
print ' | inbox | Lists the message information for the inbox |'
print ' | outbox | Lists the message information for the outbox |'
print ' | send | Send a new message or broadcast |'
print ' | unread | Lists all unread inbox messages |'
print ' | read | Reads a message from the inbox or outbox |'
print ' | save | Saves message to text file |'
print ' | delete | Deletes a message or all messages |'
print ' -------------------------------------------------------------------------'
print ' '
main()
elif usrInput == "apitest": #tests the API Connection.
if (apiTest() == True):
print '\n API connection test has: PASSED\n'
else:
print '\n API connection test has: FAILED\n'
main()
elif usrInput == "addinfo":
tmp_address = userInput('\nEnter the Bitmessage Address.')
address_information = api.decodeAddress(tmp_address)
address_information = eval(address_information)
print '\n------------------------------'
if 'success' in str(address_information.get('status')).lower():
print ' Valid Address'
print ' Address Version: %s' % str(address_information.get('addressVersion'))
print ' Stream Number: %s' % str(address_information.get('streamNumber'))
else:
print ' Invalid Address !'
print '------------------------------\n'
main()
elif usrInput == "bmsettings": #tests the API Connection.
bmSettings()
print ' '
main()
elif usrInput == "quit": #Quits the application
print '\n Bye\n'
sys.exit()
os.exit()
elif usrInput == "listaddresses": #Lists all of the identities in the addressbook
listAdd()
main()
elif usrInput == "generateaddress": #Generates a new address
uInput = userInput('\nWould you like to create a (D)eterministic or (R)andom address?').lower()
if uInput == "d" or uInput == "determinstic": #Creates a deterministic address
deterministic = True
#lbl = raw_input('Label the new address:') #currently not possible via the api
lbl = ''
passphrase = userInput('Enter the Passphrase.')#.encode('base64')
numOfAdd = int(userInput('How many addresses would you like to generate?'))
#addVNum = int(raw_input('Address version number (default "0"):'))
#streamNum = int(raw_input('Stream number (default "0"):'))
addVNum = 3
streamNum = 1
isRipe = userInput('Shorten the address, (Y)es or (N)o?').lower()
if isRipe == "y":
ripe = True
print genAdd(lbl,deterministic, passphrase, numOfAdd, addVNum, streamNum, ripe)
main()
elif isRipe == "n":
ripe = False
print genAdd(lbl, deterministic, passphrase, numOfAdd, addVNum, streamNum, ripe)
main()
elif isRipe == "exit":
usrPrompt = 1
main()
else:
print '\n Invalid input\n'
main()
elif uInput == "r" or uInput == "random": #Creates a random address with user-defined label
deterministic = False
null = ''
lbl = userInput('Enter the label for the new address.')
print genAdd(lbl,deterministic, null,null, null, null, null)
main()
else:
print '\n Invalid input\n'
main()
elif usrInput == "getaddress": #Gets the address for/from a passphrase
phrase = userInput("Enter the address passphrase.")
print '\n Working...\n'
#vNumber = int(raw_input("Enter the address version number:"))
#sNumber = int(raw_input("Enter the address stream number:"))
address = getAddress(phrase,4,1)#,vNumber,sNumber)
print ('\n Address: ' + address + '\n')
usrPrompt = 1
main()
elif usrInput == "subscribe": #Subsribe to an address
subscribe()
usrPrompt = 1
main()
elif usrInput == "unsubscribe": #Unsubscribe from an address
unsubscribe()
usrPrompt = 1
main()
elif usrInput == "listsubscriptions": #Unsubscribe from an address
listSubscriptions()
usrPrompt = 1
main()
elif usrInput == "create":
createChan()
userPrompt = 1
main()
elif usrInput == "join":
joinChan()
userPrompt = 1
main()
elif usrInput == "leave":
leaveChan()
userPrompt = 1
main()
elif usrInput == "inbox":
print '\n Loading...\n'
inbox()
main()
elif usrInput == "unread":
print '\n Loading...\n'
inbox(True)
main()
elif usrInput == "outbox":
print '\n Loading...\n'
outbox()
main()
elif usrInput == 'send': #Sends a message or broadcast
uInput = userInput('Would you like to send a (M)essage or (B)roadcast?').lower()
if (uInput == 'm' or uInput == 'message'):
null = ''
sendMsg(null,null,null,null)
main()
elif (uInput =='b' or uInput == 'broadcast'):
null = ''
sendBrd(null,null,null)
main()
elif usrInput == "read": #Opens a message from the inbox for viewing.
uInput = userInput("Would you like to read a message from the (I)nbox or (O)utbox?").lower()
if (uInput != 'i' and uInput != 'inbox' and uInput != 'o' and uInput != 'outbox'):
print '\n Invalid Input.\n'
usrPrompt = 1
main()
msgNum = int(userInput("What is the number of the message you wish to open?"))
if (uInput == 'i' or uInput == 'inbox'):
print '\n Loading...\n'
messageID = readMsg(msgNum)
uInput = userInput("\nWould you like to keep this message unread, (Y)es or (N)o?").lower()
if not (uInput == 'y' or uInput == 'yes'):
markMessageRead(messageID)
usrPrompt = 1
uInput = userInput("\nWould you like to (D)elete, (F)orward, (R)eply to, or (Exit) this message?").lower()
if (uInput == 'r' or uInput == 'reply'):
print '\n Loading...\n'
print ' '
replyMsg(msgNum,'reply')
usrPrompt = 1
elif (uInput == 'f' or uInput == 'forward'):
print '\n Loading...\n'
print ' '
replyMsg(msgNum,'forward')
usrPrompt = 1
elif (uInput == "d" or uInput == 'delete'):
uInput = userInput("Are you sure, (Y)es or (N)o?").lower()#Prevent accidental deletion
if uInput == "y":
delMsg(msgNum)
print '\n Message Deleted.\n'
usrPrompt = 1
else:
usrPrompt = 1
else:
print '\n Invalid entry\n'
usrPrompt = 1
elif (uInput == 'o' or uInput == 'outbox'):
readSentMsg(msgNum)
uInput = userInput("Would you like to (D)elete, or (Exit) this message?").lower() #Gives the user the option to delete the message
if (uInput == "d" or uInput == 'delete'):
uInput = userInput('Are you sure, (Y)es or (N)o?').lower() #Prevent accidental deletion
if uInput == "y":
delSentMsg(msgNum)
print '\n Message Deleted.\n'
usrPrompt = 1
else:
usrPrompt = 1
else:
print '\n Invalid Entry\n'
usrPrompt = 1
main()
elif usrInput == "save":
uInput = userInput("Would you like to save a message from the (I)nbox or (O)utbox?").lower()
if (uInput != 'i' and uInput == 'inbox' and uInput != 'o' and uInput == 'outbox'):
print '\n Invalid Input.\n'
usrPrompt = 1
main()
if (uInput == 'i' or uInput == 'inbox'):
inboxMessages = json.loads(api.getAllInboxMessages())
numMessages = len(inboxMessages['inboxMessages'])
while True:
msgNum = int(userInput("What is the number of the message you wish to save?"))
if (msgNum >= numMessages):
print '\n Invalid Message Number.\n'
else:
break
subject = inboxMessages['inboxMessages'][msgNum]['subject'].decode('base64')
message = inboxMessages['inboxMessages'][msgNum]['message']#Don't decode since it is done in the saveFile function
elif (uInput == 'o' or uInput == 'outbox'):
outboxMessages = json.loads(api.getAllSentMessages())
numMessages = len(outboxMessages['sentMessages'])
while True:
msgNum = int(userInput("What is the number of the message you wish to save?"))
if (msgNum >= numMessages):
print '\n Invalid Message Number.\n'
else:
break
subject = outboxMessages['sentMessages'][msgNum]['subject'].decode('base64')
message = outboxMessages['sentMessages'][msgNum]['message']#Don't decode since it is done in the saveFile function
subject = subject +'.txt'
saveFile(subject,message)
usrPrompt = 1
main()
elif usrInput == "delete": #will delete a message from the system, not reflected on the UI.
uInput = userInput("Would you like to delete a message from the (I)nbox or (O)utbox?").lower()
if (uInput == 'i' or uInput == 'inbox'):
inboxMessages = json.loads(api.getAllInboxMessages())
numMessages = len(inboxMessages['inboxMessages'])
while True:
msgNum = userInput('Enter the number of the message you wish to delete or (A)ll to empty the inbox.').lower()
if (msgNum == 'a' or msgNum == 'all'):
break
elif (int(msgNum) >= numMessages):
print '\n Invalid Message Number.\n'
else:
break
uInput = userInput("Are you sure, (Y)es or (N)o?").lower()#Prevent accidental deletion
if uInput == "y":
if (msgNum == 'a' or msgNum == 'all'):
print ' '
for msgNum in range (0, numMessages): #processes all of the messages in the inbox
print ' Deleting message ', msgNum+1, ' of ', numMessages
delMsg(0)
print '\n Inbox is empty.'
usrPrompt = 1
else:
delMsg(int(msgNum))
print '\n Notice: Message numbers may have changed.\n'
main()
else:
usrPrompt = 1
elif (uInput == 'o' or uInput == 'outbox'):
outboxMessages = json.loads(api.getAllSentMessages())
numMessages = len(outboxMessages['sentMessages'])
while True:
msgNum = userInput('Enter the number of the message you wish to delete or (A)ll to empty the inbox.').lower()
if (msgNum == 'a' or msgNum == 'all'):
break
elif (int(msgNum) >= numMessages):
print '\n Invalid Message Number.\n'
else:
break
uInput = userInput("Are you sure, (Y)es or (N)o?").lower()#Prevent accidental deletion
if uInput == "y":
if (msgNum == 'a' or msgNum == 'all'):
print ' '
for msgNum in range (0, numMessages): #processes all of the messages in the outbox
print ' Deleting message ', msgNum+1, ' of ', numMessages
delSentMsg(0)
print '\n Outbox is empty.'
usrPrompt = 1
else:
delSentMsg(int(msgNum))
print '\n Notice: Message numbers may have changed.\n'
main()
else:
usrPrompt = 1
else:
print '\n Invalid Entry.\n'
userPrompt = 1
main()
elif usrInput == "exit":
print '\n You are already at the main menu. Use "quit" to quit.\n'
usrPrompt = 1
main()
elif usrInput == "listaddressbookentries":
res = listAddressBookEntries()
if res == 20: print '\n Error: API function not supported.\n'
usrPrompt = 1
main()
elif usrInput == "addaddressbookentry":
address = userInput('Enter address')
label = userInput('Enter label')
res = addAddressToAddressBook(address, label)
if res == 16: print '\n Error: Address already exists in Address Book.\n'
if res == 20: print '\n Error: API function not supported.\n'
usrPrompt = 1
main()
elif usrInput == "deleteaddressbookentry":
address = userInput('Enter address')
res = deleteAddressFromAddressBook(address)
if res == 20: print '\n Error: API function not supported.\n'
usrPrompt = 1
main()
elif usrInput == "markallmessagesread":
markAllMessagesRead()
usrPrompt = 1
main()
elif usrInput == "markallmessagesunread":
markAllMessagesUnread()
usrPrompt = 1
main()
elif usrInput == "status":
clientStatus()
usrPrompt = 1
main()
elif usrInput == "million+":
genMilAddr()
usrPrompt = 1
main()
elif usrInput == "million-":
delMilAddr()
usrPrompt = 1
main()
else:
print '\n "',usrInput,'" is not a command.\n'
usrPrompt = 1
main()
def main():
global api
global usrPrompt
if (usrPrompt == 0):
print '\n ------------------------------'
print ' | Bitmessage Daemon by .dok |'
print ' | Version 0.2.6 for BM 0.3.5 |'
print ' ------------------------------'
api = xmlrpclib.ServerProxy(apiData()) #Connect to BitMessage using these api credentials
if (apiTest() == False):
print '\n ****************************************************************'
print ' WARNING: You are not connected to the Bitmessage client.'
print ' Either Bitmessage is not running or your settings are incorrect.'
print ' Use the command "apiTest" or "bmSettings" to resolve this issue.'
print ' ****************************************************************\n'
print 'Type (H)elp for a list of commands.' #Startup message
usrPrompt = 2
#if (apiTest() == False):#Preform a connection test #taken out until I get the error handler working
# print '*************************************'
# print 'WARNING: No connection to Bitmessage.'
# print '*************************************'
# print ' '
elif (usrPrompt == 1):
print '\nType (H)elp for a list of commands.' #Startup message
usrPrompt = 2
try:
UI((raw_input('>').lower()).replace(" ", ""))
except EOFError:
UI("quit")
if __name__ == "__main__":
main()
| mit |
eonpatapon/neutron | neutron/db/l3_db.py | 2 | 66385 | # Copyright 2012 VMware, Inc. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import netaddr
from oslo_log import log as logging
from oslo_utils import uuidutils
import sqlalchemy as sa
from sqlalchemy import orm
from sqlalchemy.orm import exc
from oslo_utils import excutils
import six
from neutron.api.rpc.agentnotifiers import l3_rpc_agent_api
from neutron.api.v2 import attributes
from neutron.callbacks import events
from neutron.callbacks import exceptions
from neutron.callbacks import registry
from neutron.callbacks import resources
from neutron.common import constants as l3_constants
from neutron.common import exceptions as n_exc
from neutron.common import rpc as n_rpc
from neutron.common import utils
from neutron.db import model_base
from neutron.db import models_v2
from neutron.extensions import external_net
from neutron.extensions import l3
from neutron.i18n import _LI, _LE
from neutron import manager
from neutron.plugins.common import constants
LOG = logging.getLogger(__name__)
DEVICE_OWNER_ROUTER_INTF = l3_constants.DEVICE_OWNER_ROUTER_INTF
DEVICE_OWNER_ROUTER_GW = l3_constants.DEVICE_OWNER_ROUTER_GW
DEVICE_OWNER_FLOATINGIP = l3_constants.DEVICE_OWNER_FLOATINGIP
EXTERNAL_GW_INFO = l3.EXTERNAL_GW_INFO
# Maps API field to DB column
# API parameter name and Database column names may differ.
# Useful to keep the filtering between API and Database.
API_TO_DB_COLUMN_MAP = {'port_id': 'fixed_port_id'}
CORE_ROUTER_ATTRS = ('id', 'name', 'tenant_id', 'admin_state_up', 'status')
class RouterPort(model_base.BASEV2):
router_id = sa.Column(
sa.String(36),
sa.ForeignKey('routers.id', ondelete="CASCADE"),
primary_key=True)
port_id = sa.Column(
sa.String(36),
sa.ForeignKey('ports.id', ondelete="CASCADE"),
primary_key=True)
# The port_type attribute is redundant as the port table already specifies
# it in DEVICE_OWNER.However, this redundancy enables more efficient
# queries on router ports, and also prevents potential error-prone
# conditions which might originate from users altering the DEVICE_OWNER
# property of router ports.
port_type = sa.Column(sa.String(255))
port = orm.relationship(
models_v2.Port,
backref=orm.backref('routerport', uselist=False, cascade="all,delete"),
lazy='joined')
class Router(model_base.BASEV2, models_v2.HasId, models_v2.HasTenant):
"""Represents a v2 neutron router."""
name = sa.Column(sa.String(255))
status = sa.Column(sa.String(16))
admin_state_up = sa.Column(sa.Boolean)
gw_port_id = sa.Column(sa.String(36), sa.ForeignKey('ports.id'))
gw_port = orm.relationship(models_v2.Port, lazy='joined')
attached_ports = orm.relationship(
RouterPort,
backref='router',
lazy='dynamic')
class FloatingIP(model_base.BASEV2, models_v2.HasId, models_v2.HasTenant):
"""Represents a floating IP address.
This IP address may or may not be allocated to a tenant, and may or
may not be associated with an internal port/ip address/router.
"""
floating_ip_address = sa.Column(sa.String(64), nullable=False)
floating_network_id = sa.Column(sa.String(36), nullable=False)
floating_port_id = sa.Column(sa.String(36),
sa.ForeignKey('ports.id', ondelete="CASCADE"),
nullable=False)
fixed_port_id = sa.Column(sa.String(36), sa.ForeignKey('ports.id'))
fixed_ip_address = sa.Column(sa.String(64))
router_id = sa.Column(sa.String(36), sa.ForeignKey('routers.id'))
# Additional attribute for keeping track of the router where the floating
# ip was associated in order to be able to ensure consistency even if an
# aysnchronous backend is unavailable when the floating IP is disassociated
last_known_router_id = sa.Column(sa.String(36))
status = sa.Column(sa.String(16))
router = orm.relationship(Router, backref='floating_ips')
class L3_NAT_dbonly_mixin(l3.RouterPluginBase):
"""Mixin class to add L3/NAT router methods to db_base_plugin_v2."""
router_device_owners = (
DEVICE_OWNER_ROUTER_INTF,
DEVICE_OWNER_ROUTER_GW,
DEVICE_OWNER_FLOATINGIP
)
@property
def _core_plugin(self):
return manager.NeutronManager.get_plugin()
def _get_router(self, context, router_id):
try:
router = self._get_by_id(context, Router, router_id)
except exc.NoResultFound:
raise l3.RouterNotFound(router_id=router_id)
return router
def _make_router_dict(self, router, fields=None, process_extensions=True):
res = dict((key, router[key]) for key in CORE_ROUTER_ATTRS)
if router['gw_port_id']:
ext_gw_info = {
'network_id': router.gw_port['network_id'],
'external_fixed_ips': [{'subnet_id': ip["subnet_id"],
'ip_address': ip["ip_address"]}
for ip in router.gw_port['fixed_ips']]}
else:
ext_gw_info = None
res.update({
EXTERNAL_GW_INFO: ext_gw_info,
'gw_port_id': router['gw_port_id'],
})
# NOTE(salv-orlando): The following assumes this mixin is used in a
# class inheriting from CommonDbMixin, which is true for all existing
# plugins.
if process_extensions:
self._apply_dict_extend_functions(l3.ROUTERS, res, router)
return self._fields(res, fields)
def _create_router_db(self, context, router, tenant_id):
"""Create the DB object."""
with context.session.begin(subtransactions=True):
# pre-generate id so it will be available when
# configuring external gw port
router_db = Router(id=(router.get('id') or
uuidutils.generate_uuid()),
tenant_id=tenant_id,
name=router['name'],
admin_state_up=router['admin_state_up'],
status="ACTIVE")
context.session.add(router_db)
return router_db
def create_router(self, context, router):
r = router['router']
gw_info = r.pop(EXTERNAL_GW_INFO, None)
tenant_id = self._get_tenant_id_for_create(context, r)
with context.session.begin(subtransactions=True):
router_db = self._create_router_db(context, r, tenant_id)
if gw_info:
self._update_router_gw_info(context, router_db['id'],
gw_info, router=router_db)
return self._make_router_dict(router_db)
def _update_router_db(self, context, router_id, data, gw_info):
"""Update the DB object."""
with context.session.begin(subtransactions=True):
router_db = self._get_router(context, router_id)
if data:
router_db.update(data)
return router_db
def update_router(self, context, id, router):
r = router['router']
gw_info = r.pop(EXTERNAL_GW_INFO, attributes.ATTR_NOT_SPECIFIED)
# check whether router needs and can be rescheduled to the proper
# l3 agent (associated with given external network);
# do check before update in DB as an exception will be raised
# in case no proper l3 agent found
if gw_info != attributes.ATTR_NOT_SPECIFIED:
candidates = self._check_router_needs_rescheduling(
context, id, gw_info)
# Update the gateway outside of the DB update since it involves L2
# calls that don't make sense to rollback and may cause deadlocks
# in a transaction.
self._update_router_gw_info(context, id, gw_info)
else:
candidates = None
router_db = self._update_router_db(context, id, r, gw_info)
if candidates:
l3_plugin = manager.NeutronManager.get_service_plugins().get(
constants.L3_ROUTER_NAT)
l3_plugin.reschedule_router(context, id, candidates)
return self._make_router_dict(router_db)
def _check_router_needs_rescheduling(self, context, router_id, gw_info):
"""Checks whether router's l3 agent can handle the given network
When external_network_bridge is set, each L3 agent can be associated
with at most one external network. If router's new external gateway
is on other network then the router needs to be rescheduled to the
proper l3 agent.
If external_network_bridge is not set then the agent
can support multiple external networks and rescheduling is not needed
:return: list of candidate agents if rescheduling needed,
None otherwise; raises exception if there is no eligible l3 agent
associated with target external network
"""
# TODO(obondarev): rethink placement of this func as l3 db manager is
# not really a proper place for agent scheduling stuff
network_id = gw_info.get('network_id') if gw_info else None
if not network_id:
return
nets = self._core_plugin.get_networks(
context, {external_net.EXTERNAL: [True]})
# nothing to do if there is only one external network
if len(nets) <= 1:
return
# first get plugin supporting l3 agent scheduling
# (either l3 service plugin or core_plugin)
l3_plugin = manager.NeutronManager.get_service_plugins().get(
constants.L3_ROUTER_NAT)
if (not utils.is_extension_supported(
l3_plugin,
l3_constants.L3_AGENT_SCHEDULER_EXT_ALIAS) or
l3_plugin.router_scheduler is None):
# that might mean that we are dealing with non-agent-based
# implementation of l3 services
return
cur_agents = l3_plugin.list_l3_agents_hosting_router(
context, router_id)['agents']
for agent in cur_agents:
ext_net_id = agent['configurations'].get(
'gateway_external_network_id')
ext_bridge = agent['configurations'].get(
'external_network_bridge', 'br-ex')
if (ext_net_id == network_id or
(not ext_net_id and not ext_bridge)):
return
# otherwise find l3 agent with matching gateway_external_network_id
active_agents = l3_plugin.get_l3_agents(context, active=True)
router = {
'id': router_id,
'external_gateway_info': {'network_id': network_id}
}
candidates = l3_plugin.get_l3_agent_candidates(context,
router,
active_agents)
if not candidates:
msg = (_('No eligible l3 agent associated with external network '
'%s found') % network_id)
raise n_exc.BadRequest(resource='router', msg=msg)
return candidates
def _create_router_gw_port(self, context, router, network_id, ext_ips):
# Port has no 'tenant-id', as it is hidden from user
gw_port = self._core_plugin.create_port(context.elevated(), {
'port': {'tenant_id': '', # intentionally not set
'network_id': network_id,
'mac_address': attributes.ATTR_NOT_SPECIFIED,
'fixed_ips': ext_ips or attributes.ATTR_NOT_SPECIFIED,
'device_id': router['id'],
'device_owner': DEVICE_OWNER_ROUTER_GW,
'admin_state_up': True,
'name': ''}})
if not gw_port['fixed_ips']:
LOG.debug('No IPs available for external network %s',
network_id)
with context.session.begin(subtransactions=True):
router.gw_port = self._core_plugin._get_port(context.elevated(),
gw_port['id'])
router_port = RouterPort(
router_id=router.id,
port_id=gw_port['id'],
port_type=DEVICE_OWNER_ROUTER_GW
)
context.session.add(router)
context.session.add(router_port)
def _validate_gw_info(self, context, gw_port, info, ext_ips):
network_id = info['network_id'] if info else None
if network_id:
network_db = self._core_plugin._get_network(context, network_id)
if not network_db.external:
msg = _("Network %s is not an external network") % network_id
raise n_exc.BadRequest(resource='router', msg=msg)
if ext_ips:
subnets = self._core_plugin._get_subnets_by_network(context,
network_id)
for s in subnets:
if not s['gateway_ip']:
continue
for ext_ip in ext_ips:
if ext_ip.get('ip_address') == s['gateway_ip']:
msg = _("External IP %s is the same as the "
"gateway IP") % ext_ip.get('ip_address')
raise n_exc.BadRequest(resource='router', msg=msg)
return network_id
def _delete_current_gw_port(self, context, router_id, router, new_network):
"""Delete gw port if attached to an old network."""
port_requires_deletion = (
router.gw_port and router.gw_port['network_id'] != new_network)
if not port_requires_deletion:
return
admin_ctx = context.elevated()
if self.get_floatingips_count(
admin_ctx, {'router_id': [router_id]}):
raise l3.RouterExternalGatewayInUseByFloatingIp(
router_id=router_id, net_id=router.gw_port['network_id'])
with context.session.begin(subtransactions=True):
gw_port = router.gw_port
router.gw_port = None
context.session.add(router)
context.session.expire(gw_port)
self._check_router_gw_port_in_use(context, router_id)
self._core_plugin.delete_port(
admin_ctx, gw_port['id'], l3_port_check=False)
def _check_router_gw_port_in_use(self, context, router_id):
try:
kwargs = {'context': context, 'router_id': router_id}
registry.notify(
resources.ROUTER_GATEWAY, events.BEFORE_DELETE, self, **kwargs)
except exceptions.CallbackFailure as e:
with excutils.save_and_reraise_exception():
# NOTE(armax): preserve old check's behavior
if len(e.errors) == 1:
raise e.errors[0].error
raise l3.RouterInUse(router_id=router_id, reason=e)
def _create_gw_port(self, context, router_id, router, new_network,
ext_ips):
new_valid_gw_port_attachment = (
new_network and (not router.gw_port or
router.gw_port['network_id'] != new_network))
if new_valid_gw_port_attachment:
subnets = self._core_plugin._get_subnets_by_network(context,
new_network)
for subnet in subnets:
self._check_for_dup_router_subnet(context, router,
new_network, subnet['id'],
subnet['cidr'])
self._create_router_gw_port(context, router, new_network, ext_ips)
def _update_current_gw_port(self, context, router_id, router, ext_ips):
self._core_plugin.update_port(context, router.gw_port['id'], {'port':
{'fixed_ips': ext_ips}})
context.session.expire(router.gw_port)
def _update_router_gw_info(self, context, router_id, info, router=None):
# TODO(salvatore-orlando): guarantee atomic behavior also across
# operations that span beyond the model classes handled by this
# class (e.g.: delete_port)
router = router or self._get_router(context, router_id)
gw_port = router.gw_port
ext_ips = info.get('external_fixed_ips') if info else []
ext_ip_change = self._check_for_external_ip_change(
context, gw_port, ext_ips)
network_id = self._validate_gw_info(context, gw_port, info, ext_ips)
if gw_port and ext_ip_change and gw_port['network_id'] == network_id:
self._update_current_gw_port(context, router_id, router,
ext_ips)
else:
self._delete_current_gw_port(context, router_id, router,
network_id)
self._create_gw_port(context, router_id, router, network_id,
ext_ips)
def _check_for_external_ip_change(self, context, gw_port, ext_ips):
# determine if new external IPs differ from the existing fixed_ips
if not ext_ips:
# no external_fixed_ips were included
return False
if not gw_port:
return True
subnet_ids = set(ip['subnet_id'] for ip in gw_port['fixed_ips'])
new_subnet_ids = set(f['subnet_id'] for f in ext_ips
if f.get('subnet_id'))
subnet_change = not new_subnet_ids == subnet_ids
if subnet_change:
return True
ip_addresses = set(ip['ip_address'] for ip in gw_port['fixed_ips'])
new_ip_addresses = set(f['ip_address'] for f in ext_ips
if f.get('ip_address'))
ip_address_change = not ip_addresses == new_ip_addresses
return ip_address_change
def _ensure_router_not_in_use(self, context, router_id):
"""Ensure that no internal network interface is attached
to the router.
"""
router = self._get_router(context, router_id)
device_owner = self._get_device_owner(context, router)
if any(rp.port_type == device_owner
for rp in router.attached_ports.all()):
raise l3.RouterInUse(router_id=router_id)
return router
def delete_router(self, context, id):
#TODO(nati) Refactor here when we have router insertion model
router = self._ensure_router_not_in_use(context, id)
self._delete_current_gw_port(context, id, router, None)
router_ports = router.attached_ports.all()
for rp in router_ports:
self._core_plugin.delete_port(context.elevated(),
rp.port.id,
l3_port_check=False)
with context.session.begin(subtransactions=True):
context.session.delete(router)
def get_router(self, context, id, fields=None):
router = self._get_router(context, id)
return self._make_router_dict(router, fields)
def get_routers(self, context, filters=None, fields=None,
sorts=None, limit=None, marker=None,
page_reverse=False):
marker_obj = self._get_marker_obj(context, 'router', limit, marker)
return self._get_collection(context, Router,
self._make_router_dict,
filters=filters, fields=fields,
sorts=sorts,
limit=limit,
marker_obj=marker_obj,
page_reverse=page_reverse)
def get_routers_count(self, context, filters=None):
return self._get_collection_count(context, Router,
filters=filters)
def _check_for_dup_router_subnet(self, context, router,
network_id, subnet_id, subnet_cidr):
try:
# It's possible these ports are on the same network, but
# different subnets.
new_ipnet = netaddr.IPNetwork(subnet_cidr)
for p in (rp.port for rp in router.attached_ports):
for ip in p['fixed_ips']:
if ip['subnet_id'] == subnet_id:
msg = (_("Router already has a port on subnet %s")
% subnet_id)
raise n_exc.BadRequest(resource='router', msg=msg)
sub_id = ip['subnet_id']
cidr = self._core_plugin._get_subnet(context.elevated(),
sub_id)['cidr']
ipnet = netaddr.IPNetwork(cidr)
match1 = netaddr.all_matching_cidrs(new_ipnet, [cidr])
match2 = netaddr.all_matching_cidrs(ipnet, [subnet_cidr])
if match1 or match2:
data = {'subnet_cidr': subnet_cidr,
'subnet_id': subnet_id,
'cidr': cidr,
'sub_id': sub_id}
msg = (_("Cidr %(subnet_cidr)s of subnet "
"%(subnet_id)s overlaps with cidr %(cidr)s "
"of subnet %(sub_id)s") % data)
raise n_exc.BadRequest(resource='router', msg=msg)
except exc.NoResultFound:
pass
def _get_device_owner(self, context, router=None):
"""Get device_owner for the specified router."""
# NOTE(armando-migliaccio): in the base case this is invariant
return DEVICE_OWNER_ROUTER_INTF
def _validate_interface_info(self, interface_info, for_removal=False):
port_id_specified = interface_info and 'port_id' in interface_info
subnet_id_specified = interface_info and 'subnet_id' in interface_info
if not (port_id_specified or subnet_id_specified):
msg = _("Either subnet_id or port_id must be specified")
raise n_exc.BadRequest(resource='router', msg=msg)
if not for_removal:
if port_id_specified and subnet_id_specified:
msg = _("Cannot specify both subnet-id and port-id")
raise n_exc.BadRequest(resource='router', msg=msg)
return port_id_specified, subnet_id_specified
def _add_interface_by_port(self, context, router, port_id, owner):
with context.session.begin(subtransactions=True):
port = self._core_plugin._get_port(context, port_id)
if port['device_id']:
raise n_exc.PortInUse(net_id=port['network_id'],
port_id=port['id'],
device_id=port['device_id'])
if not port['fixed_ips']:
msg = _LE('Router port must have at least one fixed IP')
raise n_exc.BadRequest(resource='router', msg=msg)
# Only allow one router port with IPv6 subnets per network id
if self._port_has_ipv6_address(port):
for existing_port in (rp.port for rp in router.attached_ports):
if (existing_port['network_id'] == port['network_id'] and
self._port_has_ipv6_address(existing_port)):
msg = _("Cannot have multiple router ports with the "
"same network id if both contain IPv6 "
"subnets. Existing port %(p)s has IPv6 "
"subnet(s) and network id %(nid)s")
raise n_exc.BadRequest(resource='router', msg=msg % {
'p': existing_port['id'],
'nid': existing_port['network_id']})
fixed_ips = [ip for ip in port['fixed_ips']]
subnets = []
for fixed_ip in fixed_ips:
subnet = self._core_plugin._get_subnet(context,
fixed_ip['subnet_id'])
subnets.append(subnet)
self._check_for_dup_router_subnet(context, router,
port['network_id'],
subnet['id'],
subnet['cidr'])
# Keep the restriction against multiple IPv4 subnets
if len([s for s in subnets if s['ip_version'] == 4]) > 1:
msg = _LE("Cannot have multiple "
"IPv4 subnets on router port")
raise n_exc.BadRequest(resource='router', msg=msg)
port.update({'device_id': router.id, 'device_owner': owner})
return port, subnets
def _port_has_ipv6_address(self, port):
for fixed_ip in port['fixed_ips']:
if netaddr.IPNetwork(fixed_ip['ip_address']).version == 6:
return True
def _find_ipv6_router_port_by_network(self, router, net_id):
for port in router.attached_ports:
p = port['port']
if p['network_id'] == net_id and self._port_has_ipv6_address(p):
return port
def _add_interface_by_subnet(self, context, router, subnet_id, owner):
subnet = self._core_plugin._get_subnet(context, subnet_id)
if not subnet['gateway_ip']:
msg = _('Subnet for router interface must have a gateway IP')
raise n_exc.BadRequest(resource='router', msg=msg)
if (subnet['ip_version'] == 6 and subnet['ipv6_ra_mode'] is None
and subnet['ipv6_address_mode'] is not None):
msg = (_('IPv6 subnet %s configured to receive RAs from an '
'external router cannot be added to Neutron Router.') %
subnet['id'])
raise n_exc.BadRequest(resource='router', msg=msg)
self._check_for_dup_router_subnet(context, router,
subnet['network_id'],
subnet_id,
subnet['cidr'])
fixed_ip = {'ip_address': subnet['gateway_ip'],
'subnet_id': subnet['id']}
if subnet['ip_version'] == 6:
# Add new prefix to an existing ipv6 port with the same network id
# if one exists
port = self._find_ipv6_router_port_by_network(router,
subnet['network_id'])
if port:
fixed_ips = list(port['port']['fixed_ips'])
fixed_ips.append(fixed_ip)
return self._core_plugin.update_port(context,
port['port_id'], {'port':
{'fixed_ips': fixed_ips}}), [subnet], False
return self._core_plugin.create_port(context, {
'port':
{'tenant_id': subnet['tenant_id'],
'network_id': subnet['network_id'],
'fixed_ips': [fixed_ip],
'mac_address': attributes.ATTR_NOT_SPECIFIED,
'admin_state_up': True,
'device_id': router.id,
'device_owner': owner,
'name': ''}}), [subnet], True
@staticmethod
def _make_router_interface_info(
router_id, tenant_id, port_id, subnet_id, subnet_ids):
return {
'id': router_id,
'tenant_id': tenant_id,
'port_id': port_id,
'subnet_id': subnet_id, # deprecated by IPv6 multi-prefix
'subnet_ids': subnet_ids
}
def add_router_interface(self, context, router_id, interface_info):
router = self._get_router(context, router_id)
add_by_port, add_by_sub = self._validate_interface_info(interface_info)
device_owner = self._get_device_owner(context, router_id)
# This should be True unless adding an IPv6 prefix to an existing port
new_port = True
if add_by_port:
port, subnets = self._add_interface_by_port(
context, router, interface_info['port_id'], device_owner)
# add_by_subnet is not used here, because the validation logic of
# _validate_interface_info ensures that either of add_by_* is True.
else:
port, subnets, new_port = self._add_interface_by_subnet(
context, router, interface_info['subnet_id'], device_owner)
if new_port:
with context.session.begin(subtransactions=True):
router_port = RouterPort(
port_id=port['id'],
router_id=router.id,
port_type=device_owner
)
context.session.add(router_port)
return self._make_router_interface_info(
router.id, port['tenant_id'], port['id'], subnets[-1]['id'],
[subnet['id'] for subnet in subnets])
def _confirm_router_interface_not_in_use(self, context, router_id,
subnet_id):
subnet_db = self._core_plugin._get_subnet(context, subnet_id)
subnet_cidr = netaddr.IPNetwork(subnet_db['cidr'])
fip_qry = context.session.query(FloatingIP)
try:
kwargs = {'context': context, 'subnet_id': subnet_id}
registry.notify(
resources.ROUTER_INTERFACE,
events.BEFORE_DELETE, self, **kwargs)
except exceptions.CallbackFailure as e:
with excutils.save_and_reraise_exception():
# NOTE(armax): preserve old check's behavior
if len(e.errors) == 1:
raise e.errors[0].error
raise l3.RouterInUse(router_id=router_id, reason=e)
for fip_db in fip_qry.filter_by(router_id=router_id):
if netaddr.IPAddress(fip_db['fixed_ip_address']) in subnet_cidr:
raise l3.RouterInterfaceInUseByFloatingIP(
router_id=router_id, subnet_id=subnet_id)
def _remove_interface_by_port(self, context, router_id,
port_id, subnet_id, owner):
qry = context.session.query(RouterPort)
qry = qry.filter_by(
port_id=port_id,
router_id=router_id,
port_type=owner
)
try:
port_db = qry.one().port
except exc.NoResultFound:
raise l3.RouterInterfaceNotFound(router_id=router_id,
port_id=port_id)
port_subnet_ids = [fixed_ip['subnet_id']
for fixed_ip in port_db['fixed_ips']]
if subnet_id and subnet_id not in port_subnet_ids:
raise n_exc.SubnetMismatchForPort(
port_id=port_id, subnet_id=subnet_id)
subnets = [self._core_plugin._get_subnet(context, port_subnet_id)
for port_subnet_id in port_subnet_ids]
for port_subnet_id in port_subnet_ids:
self._confirm_router_interface_not_in_use(
context, router_id, port_subnet_id)
self._core_plugin.delete_port(context, port_db['id'],
l3_port_check=False)
return (port_db, subnets)
def _remove_interface_by_subnet(self, context,
router_id, subnet_id, owner):
self._confirm_router_interface_not_in_use(
context, router_id, subnet_id)
subnet = self._core_plugin._get_subnet(context, subnet_id)
try:
rport_qry = context.session.query(models_v2.Port).join(RouterPort)
ports = rport_qry.filter(
RouterPort.router_id == router_id,
RouterPort.port_type == owner,
models_v2.Port.network_id == subnet['network_id']
)
for p in ports:
port_subnets = [fip['subnet_id'] for fip in p['fixed_ips']]
if subnet_id in port_subnets and len(port_subnets) > 1:
# multiple prefix port - delete prefix from port
fixed_ips = [fip for fip in p['fixed_ips'] if
fip['subnet_id'] != subnet_id]
self._core_plugin.update_port(context, p['id'],
{'port':
{'fixed_ips': fixed_ips}})
return (p, [subnet])
elif subnet_id in port_subnets:
# only one subnet on port - delete the port
self._core_plugin.delete_port(context, p['id'],
l3_port_check=False)
return (p, [subnet])
except exc.NoResultFound:
pass
raise l3.RouterInterfaceNotFoundForSubnet(router_id=router_id,
subnet_id=subnet_id)
def remove_router_interface(self, context, router_id, interface_info):
remove_by_port, remove_by_subnet = (
self._validate_interface_info(interface_info, for_removal=True)
)
port_id = interface_info.get('port_id')
subnet_id = interface_info.get('subnet_id')
device_owner = self._get_device_owner(context, router_id)
if remove_by_port:
port, subnets = self._remove_interface_by_port(context, router_id,
port_id, subnet_id,
device_owner)
# remove_by_subnet is not used here, because the validation logic of
# _validate_interface_info ensures that at least one of remote_by_*
# is True.
else:
port, subnets = self._remove_interface_by_subnet(
context, router_id, subnet_id, device_owner)
return self._make_router_interface_info(router_id, port['tenant_id'],
port['id'], subnets[0]['id'],
[subnet['id'] for subnet in
subnets])
def _get_floatingip(self, context, id):
try:
floatingip = self._get_by_id(context, FloatingIP, id)
except exc.NoResultFound:
raise l3.FloatingIPNotFound(floatingip_id=id)
return floatingip
def _make_floatingip_dict(self, floatingip, fields=None):
res = {'id': floatingip['id'],
'tenant_id': floatingip['tenant_id'],
'floating_ip_address': floatingip['floating_ip_address'],
'floating_network_id': floatingip['floating_network_id'],
'router_id': floatingip['router_id'],
'port_id': floatingip['fixed_port_id'],
'fixed_ip_address': floatingip['fixed_ip_address'],
'status': floatingip['status']}
return self._fields(res, fields)
def _get_interface_ports_for_network(self, context, network_id):
router_intf_qry = context.session.query(RouterPort)
router_intf_qry = router_intf_qry.join(models_v2.Port)
return router_intf_qry.filter(
models_v2.Port.network_id == network_id,
RouterPort.port_type == DEVICE_OWNER_ROUTER_INTF
)
def _get_router_for_floatingip(self, context, internal_port,
internal_subnet_id,
external_network_id):
subnet_db = self._core_plugin._get_subnet(context,
internal_subnet_id)
if not subnet_db['gateway_ip']:
msg = (_('Cannot add floating IP to port on subnet %s '
'which has no gateway_ip') % internal_subnet_id)
raise n_exc.BadRequest(resource='floatingip', msg=msg)
router_intf_ports = self._get_interface_ports_for_network(
context, internal_port['network_id'])
# This joins on port_id so is not a cross-join
routerport_qry = router_intf_ports.join(models_v2.IPAllocation)
routerport_qry = routerport_qry.filter(
models_v2.IPAllocation.subnet_id == internal_subnet_id
)
for router_port in routerport_qry:
router_id = router_port.router.id
router_gw_qry = context.session.query(models_v2.Port)
has_gw_port = router_gw_qry.filter_by(
network_id=external_network_id,
device_id=router_id,
device_owner=DEVICE_OWNER_ROUTER_GW).count()
if has_gw_port:
return router_id
raise l3.ExternalGatewayForFloatingIPNotFound(
subnet_id=internal_subnet_id,
external_network_id=external_network_id,
port_id=internal_port['id'])
def _port_ipv4_fixed_ips(self, port):
return [ip for ip in port['fixed_ips']
if netaddr.IPAddress(ip['ip_address']).version == 4]
def _internal_fip_assoc_data(self, context, fip):
"""Retrieve internal port data for floating IP.
Retrieve information concerning the internal port where
the floating IP should be associated to.
"""
internal_port = self._core_plugin._get_port(context, fip['port_id'])
if not internal_port['tenant_id'] == fip['tenant_id']:
port_id = fip['port_id']
if 'id' in fip:
floatingip_id = fip['id']
data = {'port_id': port_id,
'floatingip_id': floatingip_id}
msg = (_('Port %(port_id)s is associated with a different '
'tenant than Floating IP %(floatingip_id)s and '
'therefore cannot be bound.') % data)
else:
msg = (_('Cannot create floating IP and bind it to '
'Port %s, since that port is owned by a '
'different tenant.') % port_id)
raise n_exc.BadRequest(resource='floatingip', msg=msg)
internal_subnet_id = None
if fip.get('fixed_ip_address'):
internal_ip_address = fip['fixed_ip_address']
if netaddr.IPAddress(internal_ip_address).version != 4:
if 'id' in fip:
data = {'floatingip_id': fip['id'],
'internal_ip': internal_ip_address}
msg = (_('Floating IP %(floatingip_id) is associated '
'with non-IPv4 address %s(internal_ip)s and '
'therefore cannot be bound.') % data)
else:
msg = (_('Cannot create floating IP and bind it to %s, '
'since that is not an IPv4 address.') %
internal_ip_address)
raise n_exc.BadRequest(resource='floatingip', msg=msg)
for ip in internal_port['fixed_ips']:
if ip['ip_address'] == internal_ip_address:
internal_subnet_id = ip['subnet_id']
if not internal_subnet_id:
msg = (_('Port %(id)s does not have fixed ip %(address)s') %
{'id': internal_port['id'],
'address': internal_ip_address})
raise n_exc.BadRequest(resource='floatingip', msg=msg)
else:
ipv4_fixed_ips = self._port_ipv4_fixed_ips(internal_port)
if not ipv4_fixed_ips:
msg = (_('Cannot add floating IP to port %s that has '
'no fixed IPv4 addresses') % internal_port['id'])
raise n_exc.BadRequest(resource='floatingip', msg=msg)
if len(ipv4_fixed_ips) > 1:
msg = (_('Port %s has multiple fixed IPv4 addresses. Must '
'provide a specific IPv4 address when assigning a '
'floating IP') % internal_port['id'])
raise n_exc.BadRequest(resource='floatingip', msg=msg)
internal_ip_address = ipv4_fixed_ips[0]['ip_address']
internal_subnet_id = ipv4_fixed_ips[0]['subnet_id']
return internal_port, internal_subnet_id, internal_ip_address
def get_assoc_data(self, context, fip, floating_network_id):
"""Determine/extract data associated with the internal port.
When a floating IP is associated with an internal port,
we need to extract/determine some data associated with the
internal port, including the internal_ip_address, and router_id.
The confirmation of the internal port whether owned by the tenant who
owns the floating IP will be confirmed by _get_router_for_floatingip.
"""
(internal_port, internal_subnet_id,
internal_ip_address) = self._internal_fip_assoc_data(context, fip)
router_id = self._get_router_for_floatingip(context,
internal_port,
internal_subnet_id,
floating_network_id)
return (fip['port_id'], internal_ip_address, router_id)
def _check_and_get_fip_assoc(self, context, fip, floatingip_db):
port_id = internal_ip_address = router_id = None
if fip.get('fixed_ip_address') and not fip.get('port_id'):
msg = _("fixed_ip_address cannot be specified without a port_id")
raise n_exc.BadRequest(resource='floatingip', msg=msg)
if fip.get('port_id'):
port_id, internal_ip_address, router_id = self.get_assoc_data(
context,
fip,
floatingip_db['floating_network_id'])
fip_qry = context.session.query(FloatingIP)
try:
fip_qry.filter_by(
fixed_port_id=fip['port_id'],
floating_network_id=floatingip_db['floating_network_id'],
fixed_ip_address=internal_ip_address).one()
raise l3.FloatingIPPortAlreadyAssociated(
port_id=fip['port_id'],
fip_id=floatingip_db['id'],
floating_ip_address=floatingip_db['floating_ip_address'],
fixed_ip=internal_ip_address,
net_id=floatingip_db['floating_network_id'])
except exc.NoResultFound:
pass
return port_id, internal_ip_address, router_id
def _update_fip_assoc(self, context, fip, floatingip_db, external_port):
previous_router_id = floatingip_db.router_id
port_id, internal_ip_address, router_id = (
self._check_and_get_fip_assoc(context, fip, floatingip_db))
floatingip_db.update({'fixed_ip_address': internal_ip_address,
'fixed_port_id': port_id,
'router_id': router_id,
'last_known_router_id': previous_router_id})
def _is_ipv4_network(self, context, net_id):
net = self._core_plugin._get_network(context, net_id)
return any(s.ip_version == 4 for s in net.subnets)
def create_floatingip(self, context, floatingip,
initial_status=l3_constants.FLOATINGIP_STATUS_ACTIVE):
fip = floatingip['floatingip']
tenant_id = self._get_tenant_id_for_create(context, fip)
fip_id = uuidutils.generate_uuid()
f_net_id = fip['floating_network_id']
if not self._core_plugin._network_is_external(context, f_net_id):
msg = _("Network %s is not a valid external network") % f_net_id
raise n_exc.BadRequest(resource='floatingip', msg=msg)
if not self._is_ipv4_network(context, f_net_id):
msg = _("Network %s does not contain any IPv4 subnet") % f_net_id
raise n_exc.BadRequest(resource='floatingip', msg=msg)
with context.session.begin(subtransactions=True):
# This external port is never exposed to the tenant.
# it is used purely for internal system and admin use when
# managing floating IPs.
port = {'tenant_id': '', # tenant intentionally not set
'network_id': f_net_id,
'mac_address': attributes.ATTR_NOT_SPECIFIED,
'fixed_ips': attributes.ATTR_NOT_SPECIFIED,
'admin_state_up': True,
'device_id': fip_id,
'device_owner': DEVICE_OWNER_FLOATINGIP,
'status': l3_constants.PORT_STATUS_NOTAPPLICABLE,
'name': ''}
if fip.get('floating_ip_address'):
port['fixed_ips'] = [
{'ip_address': fip['floating_ip_address']}]
external_port = self._core_plugin.create_port(context.elevated(),
{'port': port})
# Ensure IPv4 addresses are allocated on external port
external_ipv4_ips = self._port_ipv4_fixed_ips(external_port)
if not external_ipv4_ips:
raise n_exc.ExternalIpAddressExhausted(net_id=f_net_id)
floating_fixed_ip = external_ipv4_ips[0]
floating_ip_address = floating_fixed_ip['ip_address']
floatingip_db = FloatingIP(
id=fip_id,
tenant_id=tenant_id,
status=initial_status,
floating_network_id=fip['floating_network_id'],
floating_ip_address=floating_ip_address,
floating_port_id=external_port['id'])
fip['tenant_id'] = tenant_id
# Update association with internal port
# and define external IP address
self._update_fip_assoc(context, fip,
floatingip_db, external_port)
context.session.add(floatingip_db)
return self._make_floatingip_dict(floatingip_db)
def _update_floatingip(self, context, id, floatingip):
fip = floatingip['floatingip']
with context.session.begin(subtransactions=True):
floatingip_db = self._get_floatingip(context, id)
old_floatingip = self._make_floatingip_dict(floatingip_db)
fip['tenant_id'] = floatingip_db['tenant_id']
fip['id'] = id
fip_port_id = floatingip_db['floating_port_id']
self._update_fip_assoc(context, fip, floatingip_db,
self._core_plugin.get_port(
context.elevated(), fip_port_id))
return old_floatingip, self._make_floatingip_dict(floatingip_db)
def _floatingips_to_router_ids(self, floatingips):
return list(set([floatingip['router_id']
for floatingip in floatingips
if floatingip['router_id']]))
def update_floatingip(self, context, id, floatingip):
_old_floatingip, floatingip = self._update_floatingip(
context, id, floatingip)
return floatingip
def update_floatingip_status(self, context, floatingip_id, status):
"""Update operational status for floating IP in neutron DB."""
fip_query = self._model_query(context, FloatingIP).filter(
FloatingIP.id == floatingip_id)
fip_query.update({'status': status}, synchronize_session=False)
def _delete_floatingip(self, context, id):
floatingip = self._get_floatingip(context, id)
router_id = floatingip['router_id']
# Foreign key cascade will take care of the removal of the
# floating IP record once the port is deleted. We can't start
# a transaction first to remove it ourselves because the delete_port
# method will yield in its post-commit activities.
self._core_plugin.delete_port(context.elevated(),
floatingip['floating_port_id'],
l3_port_check=False)
return router_id
def delete_floatingip(self, context, id):
self._delete_floatingip(context, id)
def get_floatingip(self, context, id, fields=None):
floatingip = self._get_floatingip(context, id)
return self._make_floatingip_dict(floatingip, fields)
def get_floatingips(self, context, filters=None, fields=None,
sorts=None, limit=None, marker=None,
page_reverse=False):
marker_obj = self._get_marker_obj(context, 'floatingip', limit,
marker)
if filters is not None:
for key, val in six.iteritems(API_TO_DB_COLUMN_MAP):
if key in filters:
filters[val] = filters.pop(key)
return self._get_collection(context, FloatingIP,
self._make_floatingip_dict,
filters=filters, fields=fields,
sorts=sorts,
limit=limit,
marker_obj=marker_obj,
page_reverse=page_reverse)
def delete_disassociated_floatingips(self, context, network_id):
query = self._model_query(context, FloatingIP)
query = query.filter_by(floating_network_id=network_id,
fixed_port_id=None,
router_id=None)
for fip in query:
self.delete_floatingip(context, fip.id)
def get_floatingips_count(self, context, filters=None):
return self._get_collection_count(context, FloatingIP,
filters=filters)
def prevent_l3_port_deletion(self, context, port_id):
"""Checks to make sure a port is allowed to be deleted.
Raises an exception if this is not the case. This should be called by
any plugin when the API requests the deletion of a port, since some
ports for L3 are not intended to be deleted directly via a DELETE
to /ports, but rather via other API calls that perform the proper
deletion checks.
"""
try:
port_db = self._core_plugin._get_port(context, port_id)
except n_exc.PortNotFound:
# non-existent ports don't need to be protected from deletion
return
if port_db['device_owner'] in self.router_device_owners:
# Raise port in use only if the port has IP addresses
# Otherwise it's a stale port that can be removed
fixed_ips = port_db['fixed_ips']
if fixed_ips:
reason = _('has device owner %s') % port_db['device_owner']
raise n_exc.ServicePortInUse(port_id=port_db['id'],
reason=reason)
else:
LOG.debug("Port %(port_id)s has owner %(port_owner)s, but "
"no IP address, so it can be deleted",
{'port_id': port_db['id'],
'port_owner': port_db['device_owner']})
def disassociate_floatingips(self, context, port_id):
"""Disassociate all floating IPs linked to specific port.
@param port_id: ID of the port to disassociate floating IPs.
@param do_notify: whether we should notify routers right away.
@return: set of router-ids that require notification updates
if do_notify is False, otherwise None.
"""
router_ids = set()
with context.session.begin(subtransactions=True):
fip_qry = context.session.query(FloatingIP)
floating_ips = fip_qry.filter_by(fixed_port_id=port_id)
for floating_ip in floating_ips:
router_ids.add(floating_ip['router_id'])
floating_ip.update({'fixed_port_id': None,
'fixed_ip_address': None,
'router_id': None})
return router_ids
def _build_routers_list(self, context, routers, gw_ports):
"""Subclasses can override this to add extra gateway info"""
return routers
def _make_router_dict_with_gw_port(self, router, fields):
result = self._make_router_dict(router, fields)
if router.get('gw_port'):
result['gw_port'] = self._core_plugin._make_port_dict(
router['gw_port'], None)
return result
def _get_sync_routers(self, context, router_ids=None, active=None):
"""Query routers and their gw ports for l3 agent.
Query routers with the router_ids. The gateway ports, if any,
will be queried too.
l3 agent has an option to deal with only one router id. In addition,
when we need to notify the agent the data about only one router
(when modification of router, its interfaces, gw_port and floatingips),
we will have router_ids.
@param router_ids: the list of router ids which we want to query.
if it is None, all of routers will be queried.
@return: a list of dicted routers with dicted gw_port populated if any
"""
filters = {'id': router_ids} if router_ids else {}
if active is not None:
filters['admin_state_up'] = [active]
router_dicts = self._get_collection(
context, Router, self._make_router_dict_with_gw_port,
filters=filters)
if not router_dicts:
return []
gw_ports = dict((r['gw_port']['id'], r['gw_port'])
for r in router_dicts
if r.get('gw_port'))
return self._build_routers_list(context, router_dicts, gw_ports)
def _get_sync_floating_ips(self, context, router_ids):
"""Query floating_ips that relate to list of router_ids."""
if not router_ids:
return []
return self.get_floatingips(context, {'router_id': router_ids})
def _get_sync_interfaces(self, context, router_ids, device_owners=None):
"""Query router interfaces that relate to list of router_ids."""
device_owners = device_owners or [DEVICE_OWNER_ROUTER_INTF]
if not router_ids:
return []
qry = context.session.query(RouterPort)
qry = qry.filter(
Router.id.in_(router_ids),
RouterPort.port_type.in_(device_owners)
)
interfaces = [self._core_plugin._make_port_dict(rp.port, None)
for rp in qry]
return interfaces
def _populate_subnets_for_ports(self, context, ports):
"""Populate ports with subnets.
These ports already have fixed_ips populated.
"""
if not ports:
return
def each_port_having_fixed_ips():
for port in ports:
fixed_ips = port.get('fixed_ips', [])
if not fixed_ips:
# Skip ports without IPs, which can occur if a subnet
# attached to a router is deleted
LOG.info(_LI("Skipping port %s as no IP is configure on "
"it"),
port['id'])
continue
yield port
network_ids = set(p['network_id']
for p in each_port_having_fixed_ips())
filters = {'network_id': [id for id in network_ids]}
fields = ['id', 'cidr', 'gateway_ip',
'network_id', 'ipv6_ra_mode']
subnets_by_network = dict((id, []) for id in network_ids)
for subnet in self._core_plugin.get_subnets(context, filters, fields):
subnets_by_network[subnet['network_id']].append(subnet)
for port in each_port_having_fixed_ips():
port['subnets'] = []
port['extra_subnets'] = []
for subnet in subnets_by_network[port['network_id']]:
# If this subnet is used by the port (has a matching entry
# in the port's fixed_ips), then add this subnet to the
# port's subnets list, and populate the fixed_ips entry
# entry with the subnet's prefix length.
subnet_info = {'id': subnet['id'],
'cidr': subnet['cidr'],
'gateway_ip': subnet['gateway_ip'],
'ipv6_ra_mode': subnet['ipv6_ra_mode']}
for fixed_ip in port['fixed_ips']:
if fixed_ip['subnet_id'] == subnet['id']:
port['subnets'].append(subnet_info)
prefixlen = netaddr.IPNetwork(
subnet['cidr']).prefixlen
fixed_ip['prefixlen'] = prefixlen
break
else:
# This subnet is not used by the port.
port['extra_subnets'].append(subnet_info)
def _process_floating_ips(self, context, routers_dict, floating_ips):
for floating_ip in floating_ips:
router = routers_dict.get(floating_ip['router_id'])
if router:
router_floatingips = router.get(l3_constants.FLOATINGIP_KEY,
[])
router_floatingips.append(floating_ip)
router[l3_constants.FLOATINGIP_KEY] = router_floatingips
def _process_interfaces(self, routers_dict, interfaces):
for interface in interfaces:
router = routers_dict.get(interface['device_id'])
if router:
router_interfaces = router.get(l3_constants.INTERFACE_KEY, [])
router_interfaces.append(interface)
router[l3_constants.INTERFACE_KEY] = router_interfaces
def _get_router_info_list(self, context, router_ids=None, active=None,
device_owners=None):
"""Query routers and their related floating_ips, interfaces."""
with context.session.begin(subtransactions=True):
routers = self._get_sync_routers(context,
router_ids=router_ids,
active=active)
router_ids = [router['id'] for router in routers]
interfaces = self._get_sync_interfaces(
context, router_ids, device_owners)
floating_ips = self._get_sync_floating_ips(context, router_ids)
return (routers, interfaces, floating_ips)
def get_sync_data(self, context, router_ids=None, active=None):
routers, interfaces, floating_ips = self._get_router_info_list(
context, router_ids=router_ids, active=active)
ports_to_populate = [router['gw_port'] for router in routers
if router.get('gw_port')] + interfaces
self._populate_subnets_for_ports(context, ports_to_populate)
routers_dict = dict((router['id'], router) for router in routers)
self._process_floating_ips(context, routers_dict, floating_ips)
self._process_interfaces(routers_dict, interfaces)
return routers_dict.values()
class L3RpcNotifierMixin(object):
"""Mixin class to add rpc notifier attribute to db_base_plugin_v2."""
@property
def l3_rpc_notifier(self):
if not hasattr(self, '_l3_rpc_notifier'):
self._l3_rpc_notifier = l3_rpc_agent_api.L3AgentNotifyAPI()
return self._l3_rpc_notifier
@l3_rpc_notifier.setter
def l3_rpc_notifier(self, value):
self._l3_rpc_notifier = value
def notify_router_updated(self, context, router_id,
operation=None):
if router_id:
self.l3_rpc_notifier.routers_updated(
context, [router_id], operation)
def notify_routers_updated(self, context, router_ids,
operation=None, data=None):
if router_ids:
self.l3_rpc_notifier.routers_updated(
context, router_ids, operation, data)
def notify_router_deleted(self, context, router_id):
self.l3_rpc_notifier.router_deleted(context, router_id)
class L3_NAT_db_mixin(L3_NAT_dbonly_mixin, L3RpcNotifierMixin):
"""Mixin class to add rpc notifier methods to db_base_plugin_v2."""
def update_router(self, context, id, router):
router_dict = super(L3_NAT_db_mixin, self).update_router(context,
id, router)
self.notify_router_updated(context, router_dict['id'], None)
return router_dict
def delete_router(self, context, id):
super(L3_NAT_db_mixin, self).delete_router(context, id)
self.notify_router_deleted(context, id)
def notify_router_interface_action(
self, context, router_interface_info, action):
l3_method = '%s_router_interface' % action
super(L3_NAT_db_mixin, self).notify_routers_updated(
context, [router_interface_info['id']], l3_method,
{'subnet_id': router_interface_info['subnet_id']})
mapping = {'add': 'create', 'remove': 'delete'}
notifier = n_rpc.get_notifier('network')
router_event = 'router.interface.%s' % mapping[action]
notifier.info(context, router_event,
{'router_interface': router_interface_info})
def add_router_interface(self, context, router_id, interface_info):
router_interface_info = super(
L3_NAT_db_mixin, self).add_router_interface(
context, router_id, interface_info)
self.notify_router_interface_action(
context, router_interface_info, 'add')
return router_interface_info
def remove_router_interface(self, context, router_id, interface_info):
router_interface_info = super(
L3_NAT_db_mixin, self).remove_router_interface(
context, router_id, interface_info)
self.notify_router_interface_action(
context, router_interface_info, 'remove')
return router_interface_info
def create_floatingip(self, context, floatingip,
initial_status=l3_constants.FLOATINGIP_STATUS_ACTIVE):
floatingip_dict = super(L3_NAT_db_mixin, self).create_floatingip(
context, floatingip, initial_status)
router_id = floatingip_dict['router_id']
self.notify_router_updated(context, router_id, 'create_floatingip')
return floatingip_dict
def update_floatingip(self, context, id, floatingip):
old_floatingip, floatingip = self._update_floatingip(
context, id, floatingip)
router_ids = self._floatingips_to_router_ids(
[old_floatingip, floatingip])
super(L3_NAT_db_mixin, self).notify_routers_updated(
context, router_ids, 'update_floatingip', {})
return floatingip
def delete_floatingip(self, context, id):
router_id = self._delete_floatingip(context, id)
self.notify_router_updated(context, router_id, 'delete_floatingip')
def disassociate_floatingips(self, context, port_id, do_notify=True):
"""Disassociate all floating IPs linked to specific port.
@param port_id: ID of the port to disassociate floating IPs.
@param do_notify: whether we should notify routers right away.
@return: set of router-ids that require notification updates
if do_notify is False, otherwise None.
"""
router_ids = super(L3_NAT_db_mixin, self).disassociate_floatingips(
context, port_id)
if do_notify:
self.notify_routers_updated(context, router_ids)
# since caller assumes that we handled notifications on its
# behalf, return nothing
return
return router_ids
def notify_routers_updated(self, context, router_ids):
super(L3_NAT_db_mixin, self).notify_routers_updated(
context, list(router_ids), 'disassociate_floatingips', {})
def _prevent_l3_port_delete_callback(resource, event, trigger, **kwargs):
context = kwargs['context']
port_id = kwargs['port_id']
port_check = kwargs['port_check']
l3plugin = manager.NeutronManager.get_service_plugins().get(
constants.L3_ROUTER_NAT)
if l3plugin and port_check:
l3plugin.prevent_l3_port_deletion(context, port_id)
def _notify_routers_callback(resource, event, trigger, **kwargs):
context = kwargs['context']
router_ids = kwargs['router_ids']
l3plugin = manager.NeutronManager.get_service_plugins().get(
constants.L3_ROUTER_NAT)
l3plugin.notify_routers_updated(context, router_ids)
def subscribe():
registry.subscribe(
_prevent_l3_port_delete_callback, resources.PORT, events.BEFORE_DELETE)
registry.subscribe(
_notify_routers_callback, resources.PORT, events.AFTER_DELETE)
# NOTE(armax): multiple l3 service plugins (potentially out of tree) inherit
# from l3_db and may need the callbacks to be processed. Having an implicit
# subscription (through the module import) preserves the existing behavior,
# and at the same time it avoids fixing it manually in each and every l3 plugin
# out there. That said, The subscription is also made explicit in the
# reference l3 plugin. The subscription operation is idempotent so there is no
# harm in registering the same callback multiple times.
subscribe()
| apache-2.0 |
junhuac/MQUIC | depot_tools/external_bin/gsutil/gsutil_4.15/gsutil/third_party/rsa/rsa/_version200.py | 177 | 15007 | """RSA module
Module for calculating large primes, and RSA encryption, decryption,
signing and verification. Includes generating public and private keys.
WARNING: this implementation does not use random padding, compression of the
cleartext input to prevent repetitions, or other common security improvements.
Use with care.
"""
__author__ = "Sybren Stuvel, Marloes de Boer, Ivo Tamboer, and Barry Mead"
__date__ = "2010-02-08"
__version__ = '2.0'
import math
import os
import random
import sys
import types
from rsa._compat import byte
# Display a warning that this insecure version is imported.
import warnings
warnings.warn('Insecure version of the RSA module is imported as %s' % __name__)
def bit_size(number):
"""Returns the number of bits required to hold a specific long number"""
return int(math.ceil(math.log(number,2)))
def gcd(p, q):
"""Returns the greatest common divisor of p and q
>>> gcd(48, 180)
12
"""
# Iterateive Version is faster and uses much less stack space
while q != 0:
if p < q: (p,q) = (q,p)
(p,q) = (q, p % q)
return p
def bytes2int(bytes):
"""Converts a list of bytes or a string to an integer
>>> (((128 * 256) + 64) * 256) + 15
8405007
>>> l = [128, 64, 15]
>>> bytes2int(l) #same as bytes2int('\x80@\x0f')
8405007
"""
if not (type(bytes) is types.ListType or type(bytes) is types.StringType):
raise TypeError("You must pass a string or a list")
# Convert byte stream to integer
integer = 0
for byte in bytes:
integer *= 256
if type(byte) is types.StringType: byte = ord(byte)
integer += byte
return integer
def int2bytes(number):
"""
Converts a number to a string of bytes
"""
if not (type(number) is types.LongType or type(number) is types.IntType):
raise TypeError("You must pass a long or an int")
string = ""
while number > 0:
string = "%s%s" % (byte(number & 0xFF), string)
number /= 256
return string
def to64(number):
"""Converts a number in the range of 0 to 63 into base 64 digit
character in the range of '0'-'9', 'A'-'Z', 'a'-'z','-','_'.
>>> to64(10)
'A'
"""
if not (type(number) is types.LongType or type(number) is types.IntType):
raise TypeError("You must pass a long or an int")
if 0 <= number <= 9: #00-09 translates to '0' - '9'
return byte(number + 48)
if 10 <= number <= 35:
return byte(number + 55) #10-35 translates to 'A' - 'Z'
if 36 <= number <= 61:
return byte(number + 61) #36-61 translates to 'a' - 'z'
if number == 62: # 62 translates to '-' (minus)
return byte(45)
if number == 63: # 63 translates to '_' (underscore)
return byte(95)
raise ValueError('Invalid Base64 value: %i' % number)
def from64(number):
"""Converts an ordinal character value in the range of
0-9,A-Z,a-z,-,_ to a number in the range of 0-63.
>>> from64(49)
1
"""
if not (type(number) is types.LongType or type(number) is types.IntType):
raise TypeError("You must pass a long or an int")
if 48 <= number <= 57: #ord('0') - ord('9') translates to 0-9
return(number - 48)
if 65 <= number <= 90: #ord('A') - ord('Z') translates to 10-35
return(number - 55)
if 97 <= number <= 122: #ord('a') - ord('z') translates to 36-61
return(number - 61)
if number == 45: #ord('-') translates to 62
return(62)
if number == 95: #ord('_') translates to 63
return(63)
raise ValueError('Invalid Base64 value: %i' % number)
def int2str64(number):
"""Converts a number to a string of base64 encoded characters in
the range of '0'-'9','A'-'Z,'a'-'z','-','_'.
>>> int2str64(123456789)
'7MyqL'
"""
if not (type(number) is types.LongType or type(number) is types.IntType):
raise TypeError("You must pass a long or an int")
string = ""
while number > 0:
string = "%s%s" % (to64(number & 0x3F), string)
number /= 64
return string
def str642int(string):
"""Converts a base64 encoded string into an integer.
The chars of this string in in the range '0'-'9','A'-'Z','a'-'z','-','_'
>>> str642int('7MyqL')
123456789
"""
if not (type(string) is types.ListType or type(string) is types.StringType):
raise TypeError("You must pass a string or a list")
integer = 0
for byte in string:
integer *= 64
if type(byte) is types.StringType: byte = ord(byte)
integer += from64(byte)
return integer
def read_random_int(nbits):
"""Reads a random integer of approximately nbits bits rounded up
to whole bytes"""
nbytes = int(math.ceil(nbits/8.))
randomdata = os.urandom(nbytes)
return bytes2int(randomdata)
def randint(minvalue, maxvalue):
"""Returns a random integer x with minvalue <= x <= maxvalue"""
# Safety - get a lot of random data even if the range is fairly
# small
min_nbits = 32
# The range of the random numbers we need to generate
range = (maxvalue - minvalue) + 1
# Which is this number of bytes
rangebytes = ((bit_size(range) + 7) / 8)
# Convert to bits, but make sure it's always at least min_nbits*2
rangebits = max(rangebytes * 8, min_nbits * 2)
# Take a random number of bits between min_nbits and rangebits
nbits = random.randint(min_nbits, rangebits)
return (read_random_int(nbits) % range) + minvalue
def jacobi(a, b):
"""Calculates the value of the Jacobi symbol (a/b)
where both a and b are positive integers, and b is odd
"""
if a == 0: return 0
result = 1
while a > 1:
if a & 1:
if ((a-1)*(b-1) >> 2) & 1:
result = -result
a, b = b % a, a
else:
if (((b * b) - 1) >> 3) & 1:
result = -result
a >>= 1
if a == 0: return 0
return result
def jacobi_witness(x, n):
"""Returns False if n is an Euler pseudo-prime with base x, and
True otherwise.
"""
j = jacobi(x, n) % n
f = pow(x, (n-1)/2, n)
if j == f: return False
return True
def randomized_primality_testing(n, k):
"""Calculates whether n is composite (which is always correct) or
prime (which is incorrect with error probability 2**-k)
Returns False if the number is composite, and True if it's
probably prime.
"""
# 50% of Jacobi-witnesses can report compositness of non-prime numbers
for i in range(k):
x = randint(1, n-1)
if jacobi_witness(x, n): return False
return True
def is_prime(number):
"""Returns True if the number is prime, and False otherwise.
>>> is_prime(42)
0
>>> is_prime(41)
1
"""
if randomized_primality_testing(number, 6):
# Prime, according to Jacobi
return True
# Not prime
return False
def getprime(nbits):
"""Returns a prime number of max. 'math.ceil(nbits/8)*8' bits. In
other words: nbits is rounded up to whole bytes.
>>> p = getprime(8)
>>> is_prime(p-1)
0
>>> is_prime(p)
1
>>> is_prime(p+1)
0
"""
while True:
integer = read_random_int(nbits)
# Make sure it's odd
integer |= 1
# Test for primeness
if is_prime(integer): break
# Retry if not prime
return integer
def are_relatively_prime(a, b):
"""Returns True if a and b are relatively prime, and False if they
are not.
>>> are_relatively_prime(2, 3)
1
>>> are_relatively_prime(2, 4)
0
"""
d = gcd(a, b)
return (d == 1)
def find_p_q(nbits):
"""Returns a tuple of two different primes of nbits bits"""
pbits = nbits + (nbits/16) #Make sure that p and q aren't too close
qbits = nbits - (nbits/16) #or the factoring programs can factor n
p = getprime(pbits)
while True:
q = getprime(qbits)
#Make sure p and q are different.
if not q == p: break
return (p, q)
def extended_gcd(a, b):
"""Returns a tuple (r, i, j) such that r = gcd(a, b) = ia + jb
"""
# r = gcd(a,b) i = multiplicitive inverse of a mod b
# or j = multiplicitive inverse of b mod a
# Neg return values for i or j are made positive mod b or a respectively
# Iterateive Version is faster and uses much less stack space
x = 0
y = 1
lx = 1
ly = 0
oa = a #Remember original a/b to remove
ob = b #negative values from return results
while b != 0:
q = long(a/b)
(a, b) = (b, a % b)
(x, lx) = ((lx - (q * x)),x)
(y, ly) = ((ly - (q * y)),y)
if (lx < 0): lx += ob #If neg wrap modulo orignal b
if (ly < 0): ly += oa #If neg wrap modulo orignal a
return (a, lx, ly) #Return only positive values
# Main function: calculate encryption and decryption keys
def calculate_keys(p, q, nbits):
"""Calculates an encryption and a decryption key for p and q, and
returns them as a tuple (e, d)"""
n = p * q
phi_n = (p-1) * (q-1)
while True:
# Make sure e has enough bits so we ensure "wrapping" through
# modulo n
e = max(65537,getprime(nbits/4))
if are_relatively_prime(e, n) and are_relatively_prime(e, phi_n): break
(d, i, j) = extended_gcd(e, phi_n)
if not d == 1:
raise Exception("e (%d) and phi_n (%d) are not relatively prime" % (e, phi_n))
if (i < 0):
raise Exception("New extended_gcd shouldn't return negative values")
if not (e * i) % phi_n == 1:
raise Exception("e (%d) and i (%d) are not mult. inv. modulo phi_n (%d)" % (e, i, phi_n))
return (e, i)
def gen_keys(nbits):
"""Generate RSA keys of nbits bits. Returns (p, q, e, d).
Note: this can take a long time, depending on the key size.
"""
(p, q) = find_p_q(nbits)
(e, d) = calculate_keys(p, q, nbits)
return (p, q, e, d)
def newkeys(nbits):
"""Generates public and private keys, and returns them as (pub,
priv).
The public key consists of a dict {e: ..., , n: ....). The private
key consists of a dict {d: ...., p: ...., q: ....).
"""
nbits = max(9,nbits) # Don't let nbits go below 9 bits
(p, q, e, d) = gen_keys(nbits)
return ( {'e': e, 'n': p*q}, {'d': d, 'p': p, 'q': q} )
def encrypt_int(message, ekey, n):
"""Encrypts a message using encryption key 'ekey', working modulo n"""
if type(message) is types.IntType:
message = long(message)
if not type(message) is types.LongType:
raise TypeError("You must pass a long or int")
if message < 0 or message > n:
raise OverflowError("The message is too long")
#Note: Bit exponents start at zero (bit counts start at 1) this is correct
safebit = bit_size(n) - 2 #compute safe bit (MSB - 1)
message += (1 << safebit) #add safebit to ensure folding
return pow(message, ekey, n)
def decrypt_int(cyphertext, dkey, n):
"""Decrypts a cypher text using the decryption key 'dkey', working
modulo n"""
message = pow(cyphertext, dkey, n)
safebit = bit_size(n) - 2 #compute safe bit (MSB - 1)
message -= (1 << safebit) #remove safebit before decode
return message
def encode64chops(chops):
"""base64encodes chops and combines them into a ',' delimited string"""
chips = [] #chips are character chops
for value in chops:
chips.append(int2str64(value))
#delimit chops with comma
encoded = ','.join(chips)
return encoded
def decode64chops(string):
"""base64decodes and makes a ',' delimited string into chops"""
chips = string.split(',') #split chops at commas
chops = []
for string in chips: #make char chops (chips) into chops
chops.append(str642int(string))
return chops
def chopstring(message, key, n, funcref):
"""Chops the 'message' into integers that fit into n,
leaving room for a safebit to be added to ensure that all
messages fold during exponentiation. The MSB of the number n
is not independant modulo n (setting it could cause overflow), so
use the next lower bit for the safebit. Therefore reserve 2-bits
in the number n for non-data bits. Calls specified encryption
function for each chop.
Used by 'encrypt' and 'sign'.
"""
msglen = len(message)
mbits = msglen * 8
#Set aside 2-bits so setting of safebit won't overflow modulo n.
nbits = bit_size(n) - 2 # leave room for safebit
nbytes = nbits / 8
blocks = msglen / nbytes
if msglen % nbytes > 0:
blocks += 1
cypher = []
for bindex in range(blocks):
offset = bindex * nbytes
block = message[offset:offset+nbytes]
value = bytes2int(block)
cypher.append(funcref(value, key, n))
return encode64chops(cypher) #Encode encrypted ints to base64 strings
def gluechops(string, key, n, funcref):
"""Glues chops back together into a string. calls
funcref(integer, key, n) for each chop.
Used by 'decrypt' and 'verify'.
"""
message = ""
chops = decode64chops(string) #Decode base64 strings into integer chops
for cpart in chops:
mpart = funcref(cpart, key, n) #Decrypt each chop
message += int2bytes(mpart) #Combine decrypted strings into a msg
return message
def encrypt(message, key):
"""Encrypts a string 'message' with the public key 'key'"""
if 'n' not in key:
raise Exception("You must use the public key with encrypt")
return chopstring(message, key['e'], key['n'], encrypt_int)
def sign(message, key):
"""Signs a string 'message' with the private key 'key'"""
if 'p' not in key:
raise Exception("You must use the private key with sign")
return chopstring(message, key['d'], key['p']*key['q'], encrypt_int)
def decrypt(cypher, key):
"""Decrypts a string 'cypher' with the private key 'key'"""
if 'p' not in key:
raise Exception("You must use the private key with decrypt")
return gluechops(cypher, key['d'], key['p']*key['q'], decrypt_int)
def verify(cypher, key):
"""Verifies a string 'cypher' with the public key 'key'"""
if 'n' not in key:
raise Exception("You must use the public key with verify")
return gluechops(cypher, key['e'], key['n'], decrypt_int)
# Do doctest if we're not imported
if __name__ == "__main__":
import doctest
doctest.testmod()
__all__ = ["newkeys", "encrypt", "decrypt", "sign", "verify"]
| mit |
singhkays/azure-quickstart-templates | 201-vmss-bottle-autoscale/workserver.py | 103 | 1704 | # workserver.py - simple HTTP server with a do_work / stop_work API
# GET /do_work activates a worker thread which uses CPU
# GET /stop_work signals worker thread to stop
import math
import socket
import threading
import time
from bottle import route, run
hostname = socket.gethostname()
hostport = 9000
keepworking = False # boolean to switch worker thread on or off
# thread which maximizes CPU usage while the keepWorking global is True
def workerthread():
# outer loop to run while waiting
while (True):
# main loop to thrash the CPI
while (keepworking == True):
for x in range(1, 69):
y = math.factorial(x)
time.sleep(3)
# start the worker thread
worker_thread = threading.Thread(target=workerthread, args=())
worker_thread.start()
def writebody():
body = '<html><head><title>Work interface - build</title></head>'
body += '<body><h2>Worker interface on ' + hostname + '</h2><ul><h3>'
if keepworking == False:
body += '<br/>Worker thread is not running. <a href="./do_work">Start work</a><br/>'
else:
body += '<br/>Worker thread is running. <a href="./stop_work">Stop work</a><br/>'
body += '<br/>Usage:<br/><br/>/do_work = start worker thread<br/>/stop_work = stop worker thread<br/>'
body += '</h3></ul></body></html>'
return body
@route('/')
def root():
return writebody()
@route('/do_work')
def do_work():
global keepworking
# start worker thread
keepworking = True
return writebody()
@route('/stop_work')
def stop_work():
global keepworking
# stop worker thread
keepworking = False
return writebody()
run(host=hostname, port=hostport)
| mit |
nirmeshk/oh-mainline | vendor/packages/twisted/twisted/names/dns.py | 18 | 53586 | # -*- test-case-name: twisted.names.test.test_dns -*-
# Copyright (c) Twisted Matrix Laboratories.
# See LICENSE for details.
"""
DNS protocol implementation.
Future Plans:
- Get rid of some toplevels, maybe.
@author: Moshe Zadka
@author: Jean-Paul Calderone
"""
__all__ = [
'IEncodable', 'IRecord',
'A', 'A6', 'AAAA', 'AFSDB', 'CNAME', 'DNAME', 'HINFO',
'MAILA', 'MAILB', 'MB', 'MD', 'MF', 'MG', 'MINFO', 'MR', 'MX',
'NAPTR', 'NS', 'NULL', 'PTR', 'RP', 'SOA', 'SPF', 'SRV', 'TXT', 'WKS',
'ANY', 'CH', 'CS', 'HS', 'IN',
'ALL_RECORDS', 'AXFR', 'IXFR',
'EFORMAT', 'ENAME', 'ENOTIMP', 'EREFUSED', 'ESERVER',
'Record_A', 'Record_A6', 'Record_AAAA', 'Record_AFSDB', 'Record_CNAME',
'Record_DNAME', 'Record_HINFO', 'Record_MB', 'Record_MD', 'Record_MF',
'Record_MG', 'Record_MINFO', 'Record_MR', 'Record_MX', 'Record_NAPTR',
'Record_NS', 'Record_NULL', 'Record_PTR', 'Record_RP', 'Record_SOA',
'Record_SPF', 'Record_SRV', 'Record_TXT', 'Record_WKS',
'QUERY_CLASSES', 'QUERY_TYPES', 'REV_CLASSES', 'REV_TYPES', 'EXT_QUERIES',
'Charstr', 'Message', 'Name', 'Query', 'RRHeader', 'SimpleRecord',
'DNSDatagramProtocol', 'DNSMixin', 'DNSProtocol',
'OK', 'OP_INVERSE', 'OP_NOTIFY', 'OP_QUERY', 'OP_STATUS', 'OP_UPDATE',
'PORT',
'AuthoritativeDomainError', 'DNSQueryTimeoutError', 'DomainError',
]
# System imports
import warnings
import struct, random, types, socket
try:
import cStringIO as StringIO
except ImportError:
import StringIO
AF_INET6 = socket.AF_INET6
from zope.interface import implements, Interface, Attribute
# Twisted imports
from twisted.internet import protocol, defer
from twisted.internet.error import CannotListenError
from twisted.python import log, failure
from twisted.python import util as tputil
from twisted.python import randbytes
def randomSource():
"""
Wrapper around L{randbytes.secureRandom} to return 2 random chars.
"""
return struct.unpack('H', randbytes.secureRandom(2, fallback=True))[0]
PORT = 53
(A, NS, MD, MF, CNAME, SOA, MB, MG, MR, NULL, WKS, PTR, HINFO, MINFO, MX, TXT,
RP, AFSDB) = range(1, 19)
AAAA = 28
SRV = 33
NAPTR = 35
A6 = 38
DNAME = 39
SPF = 99
QUERY_TYPES = {
A: 'A',
NS: 'NS',
MD: 'MD',
MF: 'MF',
CNAME: 'CNAME',
SOA: 'SOA',
MB: 'MB',
MG: 'MG',
MR: 'MR',
NULL: 'NULL',
WKS: 'WKS',
PTR: 'PTR',
HINFO: 'HINFO',
MINFO: 'MINFO',
MX: 'MX',
TXT: 'TXT',
RP: 'RP',
AFSDB: 'AFSDB',
# 19 through 27? Eh, I'll get to 'em.
AAAA: 'AAAA',
SRV: 'SRV',
NAPTR: 'NAPTR',
A6: 'A6',
DNAME: 'DNAME',
SPF: 'SPF'
}
IXFR, AXFR, MAILB, MAILA, ALL_RECORDS = range(251, 256)
# "Extended" queries (Hey, half of these are deprecated, good job)
EXT_QUERIES = {
IXFR: 'IXFR',
AXFR: 'AXFR',
MAILB: 'MAILB',
MAILA: 'MAILA',
ALL_RECORDS: 'ALL_RECORDS'
}
REV_TYPES = dict([
(v, k) for (k, v) in QUERY_TYPES.items() + EXT_QUERIES.items()
])
IN, CS, CH, HS = range(1, 5)
ANY = 255
QUERY_CLASSES = {
IN: 'IN',
CS: 'CS',
CH: 'CH',
HS: 'HS',
ANY: 'ANY'
}
REV_CLASSES = dict([
(v, k) for (k, v) in QUERY_CLASSES.items()
])
# Opcodes
OP_QUERY, OP_INVERSE, OP_STATUS = range(3)
OP_NOTIFY = 4 # RFC 1996
OP_UPDATE = 5 # RFC 2136
# Response Codes
OK, EFORMAT, ESERVER, ENAME, ENOTIMP, EREFUSED = range(6)
class IRecord(Interface):
"""
An single entry in a zone of authority.
"""
TYPE = Attribute("An indicator of what kind of record this is.")
# Backwards compatibility aliases - these should be deprecated or something I
# suppose. -exarkun
from twisted.names.error import DomainError, AuthoritativeDomainError
from twisted.names.error import DNSQueryTimeoutError
def str2time(s):
suffixes = (
('S', 1), ('M', 60), ('H', 60 * 60), ('D', 60 * 60 * 24),
('W', 60 * 60 * 24 * 7), ('Y', 60 * 60 * 24 * 365)
)
if isinstance(s, types.StringType):
s = s.upper().strip()
for (suff, mult) in suffixes:
if s.endswith(suff):
return int(float(s[:-1]) * mult)
try:
s = int(s)
except ValueError:
raise ValueError, "Invalid time interval specifier: " + s
return s
def readPrecisely(file, l):
buff = file.read(l)
if len(buff) < l:
raise EOFError
return buff
class IEncodable(Interface):
"""
Interface for something which can be encoded to and decoded
from a file object.
"""
def encode(strio, compDict = None):
"""
Write a representation of this object to the given
file object.
@type strio: File-like object
@param strio: The stream to which to write bytes
@type compDict: C{dict} or C{None}
@param compDict: A dictionary of backreference addresses that have
have already been written to this stream and that may be used for
compression.
"""
def decode(strio, length = None):
"""
Reconstruct an object from data read from the given
file object.
@type strio: File-like object
@param strio: The stream from which bytes may be read
@type length: C{int} or C{None}
@param length: The number of bytes in this RDATA field. Most
implementations can ignore this value. Only in the case of
records similar to TXT where the total length is in no way
encoded in the data is it necessary.
"""
class Charstr(object):
implements(IEncodable)
def __init__(self, string=''):
if not isinstance(string, str):
raise ValueError("%r is not a string" % (string,))
self.string = string
def encode(self, strio, compDict=None):
"""
Encode this Character string into the appropriate byte format.
@type strio: file
@param strio: The byte representation of this Charstr will be written
to this file.
"""
string = self.string
ind = len(string)
strio.write(chr(ind))
strio.write(string)
def decode(self, strio, length=None):
"""
Decode a byte string into this Name.
@type strio: file
@param strio: Bytes will be read from this file until the full string
is decoded.
@raise EOFError: Raised when there are not enough bytes available from
C{strio}.
"""
self.string = ''
l = ord(readPrecisely(strio, 1))
self.string = readPrecisely(strio, l)
def __eq__(self, other):
if isinstance(other, Charstr):
return self.string == other.string
return False
def __hash__(self):
return hash(self.string)
def __str__(self):
return self.string
class Name:
implements(IEncodable)
def __init__(self, name=''):
assert isinstance(name, types.StringTypes), "%r is not a string" % (name,)
self.name = name
def encode(self, strio, compDict=None):
"""
Encode this Name into the appropriate byte format.
@type strio: file
@param strio: The byte representation of this Name will be written to
this file.
@type compDict: dict
@param compDict: dictionary of Names that have already been encoded
and whose addresses may be backreferenced by this Name (for the purpose
of reducing the message size).
"""
name = self.name
while name:
if compDict is not None:
if name in compDict:
strio.write(
struct.pack("!H", 0xc000 | compDict[name]))
return
else:
compDict[name] = strio.tell() + Message.headerSize
ind = name.find('.')
if ind > 0:
label, name = name[:ind], name[ind + 1:]
else:
label, name = name, ''
ind = len(label)
strio.write(chr(ind))
strio.write(label)
strio.write(chr(0))
def decode(self, strio, length=None):
"""
Decode a byte string into this Name.
@type strio: file
@param strio: Bytes will be read from this file until the full Name
is decoded.
@raise EOFError: Raised when there are not enough bytes available
from C{strio}.
"""
self.name = ''
off = 0
while 1:
l = ord(readPrecisely(strio, 1))
if l == 0:
if off > 0:
strio.seek(off)
return
if (l >> 6) == 3:
new_off = ((l&63) << 8
| ord(readPrecisely(strio, 1)))
if off == 0:
off = strio.tell()
strio.seek(new_off)
continue
label = readPrecisely(strio, l)
if self.name == '':
self.name = label
else:
self.name = self.name + '.' + label
def __eq__(self, other):
if isinstance(other, Name):
return str(self) == str(other)
return 0
def __hash__(self):
return hash(str(self))
def __str__(self):
return self.name
class Query:
"""
Represent a single DNS query.
@ivar name: The name about which this query is requesting information.
@ivar type: The query type.
@ivar cls: The query class.
"""
implements(IEncodable)
name = None
type = None
cls = None
def __init__(self, name='', type=A, cls=IN):
"""
@type name: C{str}
@param name: The name about which to request information.
@type type: C{int}
@param type: The query type.
@type cls: C{int}
@param cls: The query class.
"""
self.name = Name(name)
self.type = type
self.cls = cls
def encode(self, strio, compDict=None):
self.name.encode(strio, compDict)
strio.write(struct.pack("!HH", self.type, self.cls))
def decode(self, strio, length = None):
self.name.decode(strio)
buff = readPrecisely(strio, 4)
self.type, self.cls = struct.unpack("!HH", buff)
def __hash__(self):
return hash((str(self.name).lower(), self.type, self.cls))
def __cmp__(self, other):
return isinstance(other, Query) and cmp(
(str(self.name).lower(), self.type, self.cls),
(str(other.name).lower(), other.type, other.cls)
) or cmp(self.__class__, other.__class__)
def __str__(self):
t = QUERY_TYPES.get(self.type, EXT_QUERIES.get(self.type, 'UNKNOWN (%d)' % self.type))
c = QUERY_CLASSES.get(self.cls, 'UNKNOWN (%d)' % self.cls)
return '<Query %s %s %s>' % (self.name, t, c)
def __repr__(self):
return 'Query(%r, %r, %r)' % (str(self.name), self.type, self.cls)
class RRHeader(tputil.FancyEqMixin):
"""
A resource record header.
@cvar fmt: C{str} specifying the byte format of an RR.
@ivar name: The name about which this reply contains information.
@ivar type: The query type of the original request.
@ivar cls: The query class of the original request.
@ivar ttl: The time-to-live for this record.
@ivar payload: An object that implements the IEncodable interface
@ivar auth: Whether this header is authoritative or not.
"""
implements(IEncodable)
compareAttributes = ('name', 'type', 'cls', 'ttl', 'payload', 'auth')
fmt = "!HHIH"
name = None
type = None
cls = None
ttl = None
payload = None
rdlength = None
cachedResponse = None
def __init__(self, name='', type=A, cls=IN, ttl=0, payload=None, auth=False):
"""
@type name: C{str}
@param name: The name about which this reply contains information.
@type type: C{int}
@param type: The query type.
@type cls: C{int}
@param cls: The query class.
@type ttl: C{int}
@param ttl: Time to live for this record.
@type payload: An object implementing C{IEncodable}
@param payload: A Query Type specific data object.
"""
assert (payload is None) or (payload.TYPE == type)
self.name = Name(name)
self.type = type
self.cls = cls
self.ttl = ttl
self.payload = payload
self.auth = auth
def encode(self, strio, compDict=None):
self.name.encode(strio, compDict)
strio.write(struct.pack(self.fmt, self.type, self.cls, self.ttl, 0))
if self.payload:
prefix = strio.tell()
self.payload.encode(strio, compDict)
aft = strio.tell()
strio.seek(prefix - 2, 0)
strio.write(struct.pack('!H', aft - prefix))
strio.seek(aft, 0)
def decode(self, strio, length = None):
self.name.decode(strio)
l = struct.calcsize(self.fmt)
buff = readPrecisely(strio, l)
r = struct.unpack(self.fmt, buff)
self.type, self.cls, self.ttl, self.rdlength = r
def isAuthoritative(self):
return self.auth
def __str__(self):
t = QUERY_TYPES.get(self.type, EXT_QUERIES.get(self.type, 'UNKNOWN (%d)' % self.type))
c = QUERY_CLASSES.get(self.cls, 'UNKNOWN (%d)' % self.cls)
return '<RR name=%s type=%s class=%s ttl=%ds auth=%s>' % (self.name, t, c, self.ttl, self.auth and 'True' or 'False')
__repr__ = __str__
class SimpleRecord(tputil.FancyStrMixin, tputil.FancyEqMixin):
"""
A Resource Record which consists of a single RFC 1035 domain-name.
@type name: L{Name}
@ivar name: The name associated with this record.
@type ttl: C{int}
@ivar ttl: The maximum number of seconds which this record should be
cached.
"""
implements(IEncodable, IRecord)
showAttributes = (('name', 'name', '%s'), 'ttl')
compareAttributes = ('name', 'ttl')
TYPE = None
name = None
def __init__(self, name='', ttl=None):
self.name = Name(name)
self.ttl = str2time(ttl)
def encode(self, strio, compDict = None):
self.name.encode(strio, compDict)
def decode(self, strio, length = None):
self.name = Name()
self.name.decode(strio)
def __hash__(self):
return hash(self.name)
# Kinds of RRs - oh my!
class Record_NS(SimpleRecord):
"""
An authoritative nameserver.
"""
TYPE = NS
fancybasename = 'NS'
class Record_MD(SimpleRecord):
"""
A mail destination.
This record type is obsolete.
@see: L{Record_MX}
"""
TYPE = MD
fancybasename = 'MD'
class Record_MF(SimpleRecord):
"""
A mail forwarder.
This record type is obsolete.
@see: L{Record_MX}
"""
TYPE = MF
fancybasename = 'MF'
class Record_CNAME(SimpleRecord):
"""
The canonical name for an alias.
"""
TYPE = CNAME
fancybasename = 'CNAME'
class Record_MB(SimpleRecord):
"""
A mailbox domain name.
This is an experimental record type.
"""
TYPE = MB
fancybasename = 'MB'
class Record_MG(SimpleRecord):
"""
A mail group member.
This is an experimental record type.
"""
TYPE = MG
fancybasename = 'MG'
class Record_MR(SimpleRecord):
"""
A mail rename domain name.
This is an experimental record type.
"""
TYPE = MR
fancybasename = 'MR'
class Record_PTR(SimpleRecord):
"""
A domain name pointer.
"""
TYPE = PTR
fancybasename = 'PTR'
class Record_DNAME(SimpleRecord):
"""
A non-terminal DNS name redirection.
This record type provides the capability to map an entire subtree of the
DNS name space to another domain. It differs from the CNAME record which
maps a single node of the name space.
@see: U{http://www.faqs.org/rfcs/rfc2672.html}
@see: U{http://www.faqs.org/rfcs/rfc3363.html}
"""
TYPE = DNAME
fancybasename = 'DNAME'
class Record_A(tputil.FancyEqMixin):
"""
An IPv4 host address.
@type address: C{str}
@ivar address: The packed network-order representation of the IPv4 address
associated with this record.
@type ttl: C{int}
@ivar ttl: The maximum number of seconds which this record should be
cached.
"""
implements(IEncodable, IRecord)
compareAttributes = ('address', 'ttl')
TYPE = A
address = None
def __init__(self, address='0.0.0.0', ttl=None):
address = socket.inet_aton(address)
self.address = address
self.ttl = str2time(ttl)
def encode(self, strio, compDict = None):
strio.write(self.address)
def decode(self, strio, length = None):
self.address = readPrecisely(strio, 4)
def __hash__(self):
return hash(self.address)
def __str__(self):
return '<A address=%s ttl=%s>' % (self.dottedQuad(), self.ttl)
__repr__ = __str__
def dottedQuad(self):
return socket.inet_ntoa(self.address)
class Record_SOA(tputil.FancyEqMixin, tputil.FancyStrMixin):
"""
Marks the start of a zone of authority.
This record describes parameters which are shared by all records within a
particular zone.
@type mname: L{Name}
@ivar mname: The domain-name of the name server that was the original or
primary source of data for this zone.
@type rname: L{Name}
@ivar rname: A domain-name which specifies the mailbox of the person
responsible for this zone.
@type serial: C{int}
@ivar serial: The unsigned 32 bit version number of the original copy of
the zone. Zone transfers preserve this value. This value wraps and
should be compared using sequence space arithmetic.
@type refresh: C{int}
@ivar refresh: A 32 bit time interval before the zone should be refreshed.
@type minimum: C{int}
@ivar minimum: The unsigned 32 bit minimum TTL field that should be
exported with any RR from this zone.
@type expire: C{int}
@ivar expire: A 32 bit time value that specifies the upper limit on the
time interval that can elapse before the zone is no longer
authoritative.
@type retry: C{int}
@ivar retry: A 32 bit time interval that should elapse before a failed
refresh should be retried.
@type ttl: C{int}
@ivar ttl: The default TTL to use for records served from this zone.
"""
implements(IEncodable, IRecord)
fancybasename = 'SOA'
compareAttributes = ('serial', 'mname', 'rname', 'refresh', 'expire', 'retry', 'minimum', 'ttl')
showAttributes = (('mname', 'mname', '%s'), ('rname', 'rname', '%s'), 'serial', 'refresh', 'retry', 'expire', 'minimum', 'ttl')
TYPE = SOA
def __init__(self, mname='', rname='', serial=0, refresh=0, retry=0, expire=0, minimum=0, ttl=None):
self.mname, self.rname = Name(mname), Name(rname)
self.serial, self.refresh = str2time(serial), str2time(refresh)
self.minimum, self.expire = str2time(minimum), str2time(expire)
self.retry = str2time(retry)
self.ttl = str2time(ttl)
def encode(self, strio, compDict = None):
self.mname.encode(strio, compDict)
self.rname.encode(strio, compDict)
strio.write(
struct.pack(
'!LlllL',
self.serial, self.refresh, self.retry, self.expire,
self.minimum
)
)
def decode(self, strio, length = None):
self.mname, self.rname = Name(), Name()
self.mname.decode(strio)
self.rname.decode(strio)
r = struct.unpack('!LlllL', readPrecisely(strio, 20))
self.serial, self.refresh, self.retry, self.expire, self.minimum = r
def __hash__(self):
return hash((
self.serial, self.mname, self.rname,
self.refresh, self.expire, self.retry
))
class Record_NULL(tputil.FancyStrMixin, tputil.FancyEqMixin):
"""
A null record.
This is an experimental record type.
@type ttl: C{int}
@ivar ttl: The maximum number of seconds which this record should be
cached.
"""
implements(IEncodable, IRecord)
fancybasename = 'NULL'
showAttributes = compareAttributes = ('payload', 'ttl')
TYPE = NULL
def __init__(self, payload=None, ttl=None):
self.payload = payload
self.ttl = str2time(ttl)
def encode(self, strio, compDict = None):
strio.write(self.payload)
def decode(self, strio, length = None):
self.payload = readPrecisely(strio, length)
def __hash__(self):
return hash(self.payload)
class Record_WKS(tputil.FancyEqMixin, tputil.FancyStrMixin):
"""
A well known service description.
This record type is obsolete. See L{Record_SRV}.
@type address: C{str}
@ivar address: The packed network-order representation of the IPv4 address
associated with this record.
@type protocol: C{int}
@ivar protocol: The 8 bit IP protocol number for which this service map is
relevant.
@type map: C{str}
@ivar map: A bitvector indicating the services available at the specified
address.
@type ttl: C{int}
@ivar ttl: The maximum number of seconds which this record should be
cached.
"""
implements(IEncodable, IRecord)
fancybasename = "WKS"
compareAttributes = ('address', 'protocol', 'map', 'ttl')
showAttributes = [('_address', 'address', '%s'), 'protocol', 'ttl']
TYPE = WKS
_address = property(lambda self: socket.inet_ntoa(self.address))
def __init__(self, address='0.0.0.0', protocol=0, map='', ttl=None):
self.address = socket.inet_aton(address)
self.protocol, self.map = protocol, map
self.ttl = str2time(ttl)
def encode(self, strio, compDict = None):
strio.write(self.address)
strio.write(struct.pack('!B', self.protocol))
strio.write(self.map)
def decode(self, strio, length = None):
self.address = readPrecisely(strio, 4)
self.protocol = struct.unpack('!B', readPrecisely(strio, 1))[0]
self.map = readPrecisely(strio, length - 5)
def __hash__(self):
return hash((self.address, self.protocol, self.map))
class Record_AAAA(tputil.FancyEqMixin, tputil.FancyStrMixin):
"""
An IPv6 host address.
@type address: C{str}
@ivar address: The packed network-order representation of the IPv6 address
associated with this record.
@type ttl: C{int}
@ivar ttl: The maximum number of seconds which this record should be
cached.
@see: U{http://www.faqs.org/rfcs/rfc1886.html}
"""
implements(IEncodable, IRecord)
TYPE = AAAA
fancybasename = 'AAAA'
showAttributes = (('_address', 'address', '%s'), 'ttl')
compareAttributes = ('address', 'ttl')
_address = property(lambda self: socket.inet_ntop(AF_INET6, self.address))
def __init__(self, address = '::', ttl=None):
self.address = socket.inet_pton(AF_INET6, address)
self.ttl = str2time(ttl)
def encode(self, strio, compDict = None):
strio.write(self.address)
def decode(self, strio, length = None):
self.address = readPrecisely(strio, 16)
def __hash__(self):
return hash(self.address)
class Record_A6(tputil.FancyStrMixin, tputil.FancyEqMixin):
"""
An IPv6 address.
This is an experimental record type.
@type prefixLen: C{int}
@ivar prefixLen: The length of the suffix.
@type suffix: C{str}
@ivar suffix: An IPv6 address suffix in network order.
@type prefix: L{Name}
@ivar prefix: If specified, a name which will be used as a prefix for other
A6 records.
@type bytes: C{int}
@ivar bytes: The length of the prefix.
@type ttl: C{int}
@ivar ttl: The maximum number of seconds which this record should be
cached.
@see: U{http://www.faqs.org/rfcs/rfc2874.html}
@see: U{http://www.faqs.org/rfcs/rfc3363.html}
@see: U{http://www.faqs.org/rfcs/rfc3364.html}
"""
implements(IEncodable, IRecord)
TYPE = A6
fancybasename = 'A6'
showAttributes = (('_suffix', 'suffix', '%s'), ('prefix', 'prefix', '%s'), 'ttl')
compareAttributes = ('prefixLen', 'prefix', 'suffix', 'ttl')
_suffix = property(lambda self: socket.inet_ntop(AF_INET6, self.suffix))
def __init__(self, prefixLen=0, suffix='::', prefix='', ttl=None):
self.prefixLen = prefixLen
self.suffix = socket.inet_pton(AF_INET6, suffix)
self.prefix = Name(prefix)
self.bytes = int((128 - self.prefixLen) / 8.0)
self.ttl = str2time(ttl)
def encode(self, strio, compDict = None):
strio.write(struct.pack('!B', self.prefixLen))
if self.bytes:
strio.write(self.suffix[-self.bytes:])
if self.prefixLen:
# This may not be compressed
self.prefix.encode(strio, None)
def decode(self, strio, length = None):
self.prefixLen = struct.unpack('!B', readPrecisely(strio, 1))[0]
self.bytes = int((128 - self.prefixLen) / 8.0)
if self.bytes:
self.suffix = '\x00' * (16 - self.bytes) + readPrecisely(strio, self.bytes)
if self.prefixLen:
self.prefix.decode(strio)
def __eq__(self, other):
if isinstance(other, Record_A6):
return (self.prefixLen == other.prefixLen and
self.suffix[-self.bytes:] == other.suffix[-self.bytes:] and
self.prefix == other.prefix and
self.ttl == other.ttl)
return NotImplemented
def __hash__(self):
return hash((self.prefixLen, self.suffix[-self.bytes:], self.prefix))
def __str__(self):
return '<A6 %s %s (%d) ttl=%s>' % (
self.prefix,
socket.inet_ntop(AF_INET6, self.suffix),
self.prefixLen, self.ttl
)
class Record_SRV(tputil.FancyEqMixin, tputil.FancyStrMixin):
"""
The location of the server(s) for a specific protocol and domain.
This is an experimental record type.
@type priority: C{int}
@ivar priority: The priority of this target host. A client MUST attempt to
contact the target host with the lowest-numbered priority it can reach;
target hosts with the same priority SHOULD be tried in an order defined
by the weight field.
@type weight: C{int}
@ivar weight: Specifies a relative weight for entries with the same
priority. Larger weights SHOULD be given a proportionately higher
probability of being selected.
@type port: C{int}
@ivar port: The port on this target host of this service.
@type target: L{Name}
@ivar target: The domain name of the target host. There MUST be one or
more address records for this name, the name MUST NOT be an alias (in
the sense of RFC 1034 or RFC 2181). Implementors are urged, but not
required, to return the address record(s) in the Additional Data
section. Unless and until permitted by future standards action, name
compression is not to be used for this field.
@type ttl: C{int}
@ivar ttl: The maximum number of seconds which this record should be
cached.
@see: U{http://www.faqs.org/rfcs/rfc2782.html}
"""
implements(IEncodable, IRecord)
TYPE = SRV
fancybasename = 'SRV'
compareAttributes = ('priority', 'weight', 'target', 'port', 'ttl')
showAttributes = ('priority', 'weight', ('target', 'target', '%s'), 'port', 'ttl')
def __init__(self, priority=0, weight=0, port=0, target='', ttl=None):
self.priority = int(priority)
self.weight = int(weight)
self.port = int(port)
self.target = Name(target)
self.ttl = str2time(ttl)
def encode(self, strio, compDict = None):
strio.write(struct.pack('!HHH', self.priority, self.weight, self.port))
# This can't be compressed
self.target.encode(strio, None)
def decode(self, strio, length = None):
r = struct.unpack('!HHH', readPrecisely(strio, struct.calcsize('!HHH')))
self.priority, self.weight, self.port = r
self.target = Name()
self.target.decode(strio)
def __hash__(self):
return hash((self.priority, self.weight, self.port, self.target))
class Record_NAPTR(tputil.FancyEqMixin, tputil.FancyStrMixin):
"""
The location of the server(s) for a specific protocol and domain.
@type order: C{int}
@ivar order: An integer specifying the order in which the NAPTR records
MUST be processed to ensure the correct ordering of rules. Low numbers
are processed before high numbers.
@type preference: C{int}
@ivar preference: An integer that specifies the order in which NAPTR
records with equal "order" values SHOULD be processed, low numbers
being processed before high numbers.
@type flag: L{Charstr}
@ivar flag: A <character-string> containing flags to control aspects of the
rewriting and interpretation of the fields in the record. Flags
aresingle characters from the set [A-Z0-9]. The case of the alphabetic
characters is not significant.
At this time only four flags, "S", "A", "U", and "P", are defined.
@type service: L{Charstr}
@ivar service: Specifies the service(s) available down this rewrite path.
It may also specify the particular protocol that is used to talk with a
service. A protocol MUST be specified if the flags field states that
the NAPTR is terminal.
@type regexp: L{Charstr}
@ivar regexp: A STRING containing a substitution expression that is applied
to the original string held by the client in order to construct the
next domain name to lookup.
@type replacement: L{Name}
@ivar replacement: The next NAME to query for NAPTR, SRV, or address
records depending on the value of the flags field. This MUST be a
fully qualified domain-name.
@type ttl: C{int}
@ivar ttl: The maximum number of seconds which this record should be
cached.
@see: U{http://www.faqs.org/rfcs/rfc2915.html}
"""
implements(IEncodable, IRecord)
TYPE = NAPTR
compareAttributes = ('order', 'preference', 'flags', 'service', 'regexp',
'replacement')
fancybasename = 'NAPTR'
showAttributes = ('order', 'preference', ('flags', 'flags', '%s'),
('service', 'service', '%s'), ('regexp', 'regexp', '%s'),
('replacement', 'replacement', '%s'), 'ttl')
def __init__(self, order=0, preference=0, flags='', service='', regexp='',
replacement='', ttl=None):
self.order = int(order)
self.preference = int(preference)
self.flags = Charstr(flags)
self.service = Charstr(service)
self.regexp = Charstr(regexp)
self.replacement = Name(replacement)
self.ttl = str2time(ttl)
def encode(self, strio, compDict=None):
strio.write(struct.pack('!HH', self.order, self.preference))
# This can't be compressed
self.flags.encode(strio, None)
self.service.encode(strio, None)
self.regexp.encode(strio, None)
self.replacement.encode(strio, None)
def decode(self, strio, length=None):
r = struct.unpack('!HH', readPrecisely(strio, struct.calcsize('!HH')))
self.order, self.preference = r
self.flags = Charstr()
self.service = Charstr()
self.regexp = Charstr()
self.replacement = Name()
self.flags.decode(strio)
self.service.decode(strio)
self.regexp.decode(strio)
self.replacement.decode(strio)
def __hash__(self):
return hash((
self.order, self.preference, self.flags,
self.service, self.regexp, self.replacement))
class Record_AFSDB(tputil.FancyStrMixin, tputil.FancyEqMixin):
"""
Map from a domain name to the name of an AFS cell database server.
@type subtype: C{int}
@ivar subtype: In the case of subtype 1, the host has an AFS version 3.0
Volume Location Server for the named AFS cell. In the case of subtype
2, the host has an authenticated name server holding the cell-root
directory node for the named DCE/NCA cell.
@type hostname: L{Name}
@ivar hostname: The domain name of a host that has a server for the cell
named by this record.
@type ttl: C{int}
@ivar ttl: The maximum number of seconds which this record should be
cached.
@see: U{http://www.faqs.org/rfcs/rfc1183.html}
"""
implements(IEncodable, IRecord)
TYPE = AFSDB
fancybasename = 'AFSDB'
compareAttributes = ('subtype', 'hostname', 'ttl')
showAttributes = ('subtype', ('hostname', 'hostname', '%s'), 'ttl')
def __init__(self, subtype=0, hostname='', ttl=None):
self.subtype = int(subtype)
self.hostname = Name(hostname)
self.ttl = str2time(ttl)
def encode(self, strio, compDict = None):
strio.write(struct.pack('!H', self.subtype))
self.hostname.encode(strio, compDict)
def decode(self, strio, length = None):
r = struct.unpack('!H', readPrecisely(strio, struct.calcsize('!H')))
self.subtype, = r
self.hostname.decode(strio)
def __hash__(self):
return hash((self.subtype, self.hostname))
class Record_RP(tputil.FancyEqMixin, tputil.FancyStrMixin):
"""
The responsible person for a domain.
@type mbox: L{Name}
@ivar mbox: A domain name that specifies the mailbox for the responsible
person.
@type txt: L{Name}
@ivar txt: A domain name for which TXT RR's exist (indirection through
which allows information sharing about the contents of this RP record).
@type ttl: C{int}
@ivar ttl: The maximum number of seconds which this record should be
cached.
@see: U{http://www.faqs.org/rfcs/rfc1183.html}
"""
implements(IEncodable, IRecord)
TYPE = RP
fancybasename = 'RP'
compareAttributes = ('mbox', 'txt', 'ttl')
showAttributes = (('mbox', 'mbox', '%s'), ('txt', 'txt', '%s'), 'ttl')
def __init__(self, mbox='', txt='', ttl=None):
self.mbox = Name(mbox)
self.txt = Name(txt)
self.ttl = str2time(ttl)
def encode(self, strio, compDict = None):
self.mbox.encode(strio, compDict)
self.txt.encode(strio, compDict)
def decode(self, strio, length = None):
self.mbox = Name()
self.txt = Name()
self.mbox.decode(strio)
self.txt.decode(strio)
def __hash__(self):
return hash((self.mbox, self.txt))
class Record_HINFO(tputil.FancyStrMixin, tputil.FancyEqMixin):
"""
Host information.
@type cpu: C{str}
@ivar cpu: Specifies the CPU type.
@type os: C{str}
@ivar os: Specifies the OS.
@type ttl: C{int}
@ivar ttl: The maximum number of seconds which this record should be
cached.
"""
implements(IEncodable, IRecord)
TYPE = HINFO
fancybasename = 'HINFO'
showAttributes = compareAttributes = ('cpu', 'os', 'ttl')
def __init__(self, cpu='', os='', ttl=None):
self.cpu, self.os = cpu, os
self.ttl = str2time(ttl)
def encode(self, strio, compDict = None):
strio.write(struct.pack('!B', len(self.cpu)) + self.cpu)
strio.write(struct.pack('!B', len(self.os)) + self.os)
def decode(self, strio, length = None):
cpu = struct.unpack('!B', readPrecisely(strio, 1))[0]
self.cpu = readPrecisely(strio, cpu)
os = struct.unpack('!B', readPrecisely(strio, 1))[0]
self.os = readPrecisely(strio, os)
def __eq__(self, other):
if isinstance(other, Record_HINFO):
return (self.os.lower() == other.os.lower() and
self.cpu.lower() == other.cpu.lower() and
self.ttl == other.ttl)
return NotImplemented
def __hash__(self):
return hash((self.os.lower(), self.cpu.lower()))
class Record_MINFO(tputil.FancyEqMixin, tputil.FancyStrMixin):
"""
Mailbox or mail list information.
This is an experimental record type.
@type rmailbx: L{Name}
@ivar rmailbx: A domain-name which specifies a mailbox which is responsible
for the mailing list or mailbox. If this domain name names the root,
the owner of the MINFO RR is responsible for itself.
@type emailbx: L{Name}
@ivar emailbx: A domain-name which specifies a mailbox which is to receive
error messages related to the mailing list or mailbox specified by the
owner of the MINFO record. If this domain name names the root, errors
should be returned to the sender of the message.
@type ttl: C{int}
@ivar ttl: The maximum number of seconds which this record should be
cached.
"""
implements(IEncodable, IRecord)
TYPE = MINFO
rmailbx = None
emailbx = None
fancybasename = 'MINFO'
compareAttributes = ('rmailbx', 'emailbx', 'ttl')
showAttributes = (('rmailbx', 'responsibility', '%s'),
('emailbx', 'errors', '%s'),
'ttl')
def __init__(self, rmailbx='', emailbx='', ttl=None):
self.rmailbx, self.emailbx = Name(rmailbx), Name(emailbx)
self.ttl = str2time(ttl)
def encode(self, strio, compDict = None):
self.rmailbx.encode(strio, compDict)
self.emailbx.encode(strio, compDict)
def decode(self, strio, length = None):
self.rmailbx, self.emailbx = Name(), Name()
self.rmailbx.decode(strio)
self.emailbx.decode(strio)
def __hash__(self):
return hash((self.rmailbx, self.emailbx))
class Record_MX(tputil.FancyStrMixin, tputil.FancyEqMixin):
"""
Mail exchange.
@type preference: C{int}
@ivar preference: Specifies the preference given to this RR among others at
the same owner. Lower values are preferred.
@type name: L{Name}
@ivar name: A domain-name which specifies a host willing to act as a mail
exchange.
@type ttl: C{int}
@ivar ttl: The maximum number of seconds which this record should be
cached.
"""
implements(IEncodable, IRecord)
TYPE = MX
fancybasename = 'MX'
compareAttributes = ('preference', 'name', 'ttl')
showAttributes = ('preference', ('name', 'name', '%s'), 'ttl')
def __init__(self, preference=0, name='', ttl=None, **kwargs):
self.preference, self.name = int(preference), Name(kwargs.get('exchange', name))
self.ttl = str2time(ttl)
def encode(self, strio, compDict = None):
strio.write(struct.pack('!H', self.preference))
self.name.encode(strio, compDict)
def decode(self, strio, length = None):
self.preference = struct.unpack('!H', readPrecisely(strio, 2))[0]
self.name = Name()
self.name.decode(strio)
def exchange(self):
warnings.warn("use Record_MX.name instead", DeprecationWarning, stacklevel=2)
return self.name
exchange = property(exchange)
def __hash__(self):
return hash((self.preference, self.name))
# Oh god, Record_TXT how I hate thee.
class Record_TXT(tputil.FancyEqMixin, tputil.FancyStrMixin):
"""
Freeform text.
@type data: C{list} of C{str}
@ivar data: Freeform text which makes up this record.
@type ttl: C{int}
@ivar ttl: The maximum number of seconds which this record should be cached.
"""
implements(IEncodable, IRecord)
TYPE = TXT
fancybasename = 'TXT'
showAttributes = compareAttributes = ('data', 'ttl')
def __init__(self, *data, **kw):
self.data = list(data)
# arg man python sucks so bad
self.ttl = str2time(kw.get('ttl', None))
def encode(self, strio, compDict = None):
for d in self.data:
strio.write(struct.pack('!B', len(d)) + d)
def decode(self, strio, length = None):
soFar = 0
self.data = []
while soFar < length:
L = struct.unpack('!B', readPrecisely(strio, 1))[0]
self.data.append(readPrecisely(strio, L))
soFar += L + 1
if soFar != length:
log.msg(
"Decoded %d bytes in %s record, but rdlength is %d" % (
soFar, self.fancybasename, length
)
)
def __hash__(self):
return hash(tuple(self.data))
class Record_SPF(Record_TXT):
"""
Structurally, freeform text. Semantically, a policy definition, formatted
as defined in U{rfc 4408<http://www.faqs.org/rfcs/rfc4408.html>}.
@type data: C{list} of C{str}
@ivar data: Freeform text which makes up this record.
@type ttl: C{int}
@ivar ttl: The maximum number of seconds which this record should be cached.
"""
TYPE = SPF
fancybasename = 'SPF'
class Message:
"""
L{Message} contains all the information represented by a single
DNS request or response.
"""
headerFmt = "!H2B4H"
headerSize = struct.calcsize(headerFmt)
# Question, answer, additional, and nameserver lists
queries = answers = add = ns = None
def __init__(self, id=0, answer=0, opCode=0, recDes=0, recAv=0,
auth=0, rCode=OK, trunc=0, maxSize=512):
self.maxSize = maxSize
self.id = id
self.answer = answer
self.opCode = opCode
self.auth = auth
self.trunc = trunc
self.recDes = recDes
self.recAv = recAv
self.rCode = rCode
self.queries = []
self.answers = []
self.authority = []
self.additional = []
def addQuery(self, name, type=ALL_RECORDS, cls=IN):
"""
Add another query to this Message.
@type name: C{str}
@param name: The name to query.
@type type: C{int}
@param type: Query type
@type cls: C{int}
@param cls: Query class
"""
self.queries.append(Query(name, type, cls))
def encode(self, strio):
compDict = {}
body_tmp = StringIO.StringIO()
for q in self.queries:
q.encode(body_tmp, compDict)
for q in self.answers:
q.encode(body_tmp, compDict)
for q in self.authority:
q.encode(body_tmp, compDict)
for q in self.additional:
q.encode(body_tmp, compDict)
body = body_tmp.getvalue()
size = len(body) + self.headerSize
if self.maxSize and size > self.maxSize:
self.trunc = 1
body = body[:self.maxSize - self.headerSize]
byte3 = (( ( self.answer & 1 ) << 7 )
| ((self.opCode & 0xf ) << 3 )
| ((self.auth & 1 ) << 2 )
| ((self.trunc & 1 ) << 1 )
| ( self.recDes & 1 ) )
byte4 = ( ( (self.recAv & 1 ) << 7 )
| (self.rCode & 0xf ) )
strio.write(struct.pack(self.headerFmt, self.id, byte3, byte4,
len(self.queries), len(self.answers),
len(self.authority), len(self.additional)))
strio.write(body)
def decode(self, strio, length=None):
self.maxSize = 0
header = readPrecisely(strio, self.headerSize)
r = struct.unpack(self.headerFmt, header)
self.id, byte3, byte4, nqueries, nans, nns, nadd = r
self.answer = ( byte3 >> 7 ) & 1
self.opCode = ( byte3 >> 3 ) & 0xf
self.auth = ( byte3 >> 2 ) & 1
self.trunc = ( byte3 >> 1 ) & 1
self.recDes = byte3 & 1
self.recAv = ( byte4 >> 7 ) & 1
self.rCode = byte4 & 0xf
self.queries = []
for i in range(nqueries):
q = Query()
try:
q.decode(strio)
except EOFError:
return
self.queries.append(q)
items = ((self.answers, nans), (self.authority, nns), (self.additional, nadd))
for (l, n) in items:
self.parseRecords(l, n, strio)
def parseRecords(self, list, num, strio):
for i in range(num):
header = RRHeader()
try:
header.decode(strio)
except EOFError:
return
t = self.lookupRecordType(header.type)
if not t:
continue
header.payload = t(ttl=header.ttl)
try:
header.payload.decode(strio, header.rdlength)
except EOFError:
return
list.append(header)
# Create a mapping from record types to their corresponding Record_*
# classes. This relies on the global state which has been created so
# far in initializing this module (so don't define Record classes after
# this).
_recordTypes = {}
for name in globals():
if name.startswith('Record_'):
_recordTypes[globals()[name].TYPE] = globals()[name]
# Clear the iteration variable out of the class namespace so it
# doesn't become an attribute.
del name
def lookupRecordType(self, type):
"""
Retrieve the L{IRecord} implementation for the given record type.
@param type: A record type, such as L{A} or L{NS}.
@type type: C{int}
@return: An object which implements L{IRecord} or C{None} if none
can be found for the given type.
@rtype: L{types.ClassType}
"""
return self._recordTypes.get(type, None)
def toStr(self):
strio = StringIO.StringIO()
self.encode(strio)
return strio.getvalue()
def fromStr(self, str):
strio = StringIO.StringIO(str)
self.decode(strio)
class DNSMixin(object):
"""
DNS protocol mixin shared by UDP and TCP implementations.
@ivar _reactor: A L{IReactorTime} and L{IReactorUDP} provider which will
be used to issue DNS queries and manage request timeouts.
"""
id = None
liveMessages = None
def __init__(self, controller, reactor=None):
self.controller = controller
self.id = random.randrange(2 ** 10, 2 ** 15)
if reactor is None:
from twisted.internet import reactor
self._reactor = reactor
def pickID(self):
"""
Return a unique ID for queries.
"""
while True:
id = randomSource()
if id not in self.liveMessages:
return id
def callLater(self, period, func, *args):
"""
Wrapper around reactor.callLater, mainly for test purpose.
"""
return self._reactor.callLater(period, func, *args)
def _query(self, queries, timeout, id, writeMessage):
"""
Send out a message with the given queries.
@type queries: C{list} of C{Query} instances
@param queries: The queries to transmit
@type timeout: C{int} or C{float}
@param timeout: How long to wait before giving up
@type id: C{int}
@param id: Unique key for this request
@type writeMessage: C{callable}
@param writeMessage: One-parameter callback which writes the message
@rtype: C{Deferred}
@return: a C{Deferred} which will be fired with the result of the
query, or errbacked with any errors that could happen (exceptions
during writing of the query, timeout errors, ...).
"""
m = Message(id, recDes=1)
m.queries = queries
try:
writeMessage(m)
except:
return defer.fail()
resultDeferred = defer.Deferred()
cancelCall = self.callLater(timeout, self._clearFailed, resultDeferred, id)
self.liveMessages[id] = (resultDeferred, cancelCall)
return resultDeferred
def _clearFailed(self, deferred, id):
"""
Clean the Deferred after a timeout.
"""
try:
del self.liveMessages[id]
except KeyError:
pass
deferred.errback(failure.Failure(DNSQueryTimeoutError(id)))
class DNSDatagramProtocol(DNSMixin, protocol.DatagramProtocol):
"""
DNS protocol over UDP.
"""
resends = None
def stopProtocol(self):
"""
Stop protocol: reset state variables.
"""
self.liveMessages = {}
self.resends = {}
self.transport = None
def startProtocol(self):
"""
Upon start, reset internal state.
"""
self.liveMessages = {}
self.resends = {}
def writeMessage(self, message, address):
"""
Send a message holding DNS queries.
@type message: L{Message}
"""
self.transport.write(message.toStr(), address)
def startListening(self):
self._reactor.listenUDP(0, self, maxPacketSize=512)
def datagramReceived(self, data, addr):
"""
Read a datagram, extract the message in it and trigger the associated
Deferred.
"""
m = Message()
try:
m.fromStr(data)
except EOFError:
log.msg("Truncated packet (%d bytes) from %s" % (len(data), addr))
return
except:
# Nothing should trigger this, but since we're potentially
# invoking a lot of different decoding methods, we might as well
# be extra cautious. Anything that triggers this is itself
# buggy.
log.err(failure.Failure(), "Unexpected decoding error")
return
if m.id in self.liveMessages:
d, canceller = self.liveMessages[m.id]
del self.liveMessages[m.id]
canceller.cancel()
# XXX we shouldn't need this hack of catching exception on callback()
try:
d.callback(m)
except:
log.err()
else:
if m.id not in self.resends:
self.controller.messageReceived(m, self, addr)
def removeResend(self, id):
"""
Mark message ID as no longer having duplication suppression.
"""
try:
del self.resends[id]
except KeyError:
pass
def query(self, address, queries, timeout=10, id=None):
"""
Send out a message with the given queries.
@type address: C{tuple} of C{str} and C{int}
@param address: The address to which to send the query
@type queries: C{list} of C{Query} instances
@param queries: The queries to transmit
@rtype: C{Deferred}
"""
if not self.transport:
# XXX transport might not get created automatically, use callLater?
try:
self.startListening()
except CannotListenError:
return defer.fail()
if id is None:
id = self.pickID()
else:
self.resends[id] = 1
def writeMessage(m):
self.writeMessage(m, address)
return self._query(queries, timeout, id, writeMessage)
class DNSProtocol(DNSMixin, protocol.Protocol):
"""
DNS protocol over TCP.
"""
length = None
buffer = ''
def writeMessage(self, message):
"""
Send a message holding DNS queries.
@type message: L{Message}
"""
s = message.toStr()
self.transport.write(struct.pack('!H', len(s)) + s)
def connectionMade(self):
"""
Connection is made: reset internal state, and notify the controller.
"""
self.liveMessages = {}
self.controller.connectionMade(self)
def connectionLost(self, reason):
"""
Notify the controller that this protocol is no longer
connected.
"""
self.controller.connectionLost(self)
def dataReceived(self, data):
self.buffer += data
while self.buffer:
if self.length is None and len(self.buffer) >= 2:
self.length = struct.unpack('!H', self.buffer[:2])[0]
self.buffer = self.buffer[2:]
if len(self.buffer) >= self.length:
myChunk = self.buffer[:self.length]
m = Message()
m.fromStr(myChunk)
try:
d, canceller = self.liveMessages[m.id]
except KeyError:
self.controller.messageReceived(m, self)
else:
del self.liveMessages[m.id]
canceller.cancel()
# XXX we shouldn't need this hack
try:
d.callback(m)
except:
log.err()
self.buffer = self.buffer[self.length:]
self.length = None
else:
break
def query(self, queries, timeout=60):
"""
Send out a message with the given queries.
@type queries: C{list} of C{Query} instances
@param queries: The queries to transmit
@rtype: C{Deferred}
"""
id = self.pickID()
return self._query(queries, timeout, id, self.writeMessage)
| agpl-3.0 |
babble/babble | include/jython/Lib/tabnanny.py | 394 | 11336 | #! /usr/bin/env python
"""The Tab Nanny despises ambiguous indentation. She knows no mercy.
tabnanny -- Detection of ambiguous indentation
For the time being this module is intended to be called as a script.
However it is possible to import it into an IDE and use the function
check() described below.
Warning: The API provided by this module is likely to change in future
releases; such changes may not be backward compatible.
"""
# Released to the public domain, by Tim Peters, 15 April 1998.
# XXX Note: this is now a standard library module.
# XXX The API needs to undergo changes however; the current code is too
# XXX script-like. This will be addressed later.
__version__ = "6"
import os
import sys
import getopt
import tokenize
if not hasattr(tokenize, 'NL'):
raise ValueError("tokenize.NL doesn't exist -- tokenize module too old")
__all__ = ["check", "NannyNag", "process_tokens"]
verbose = 0
filename_only = 0
def errprint(*args):
sep = ""
for arg in args:
sys.stderr.write(sep + str(arg))
sep = " "
sys.stderr.write("\n")
def main():
global verbose, filename_only
try:
opts, args = getopt.getopt(sys.argv[1:], "qv")
except getopt.error, msg:
errprint(msg)
return
for o, a in opts:
if o == '-q':
filename_only = filename_only + 1
if o == '-v':
verbose = verbose + 1
if not args:
errprint("Usage:", sys.argv[0], "[-v] file_or_directory ...")
return
for arg in args:
check(arg)
class NannyNag(Exception):
"""
Raised by tokeneater() if detecting an ambiguous indent.
Captured and handled in check().
"""
def __init__(self, lineno, msg, line):
self.lineno, self.msg, self.line = lineno, msg, line
def get_lineno(self):
return self.lineno
def get_msg(self):
return self.msg
def get_line(self):
return self.line
def check(file):
"""check(file_or_dir)
If file_or_dir is a directory and not a symbolic link, then recursively
descend the directory tree named by file_or_dir, checking all .py files
along the way. If file_or_dir is an ordinary Python source file, it is
checked for whitespace related problems. The diagnostic messages are
written to standard output using the print statement.
"""
if os.path.isdir(file) and not os.path.islink(file):
if verbose:
print "%r: listing directory" % (file,)
names = os.listdir(file)
for name in names:
fullname = os.path.join(file, name)
if (os.path.isdir(fullname) and
not os.path.islink(fullname) or
os.path.normcase(name[-3:]) == ".py"):
check(fullname)
return
try:
f = open(file)
except IOError, msg:
errprint("%r: I/O Error: %s" % (file, msg))
return
if verbose > 1:
print "checking %r ..." % file
try:
process_tokens(tokenize.generate_tokens(f.readline))
except tokenize.TokenError, msg:
errprint("%r: Token Error: %s" % (file, msg))
return
except IndentationError, msg:
errprint("%r: Indentation Error: %s" % (file, msg))
return
except NannyNag, nag:
badline = nag.get_lineno()
line = nag.get_line()
if verbose:
print "%r: *** Line %d: trouble in tab city! ***" % (file, badline)
print "offending line: %r" % (line,)
print nag.get_msg()
else:
if ' ' in file: file = '"' + file + '"'
if filename_only: print file
else: print file, badline, repr(line)
return
if verbose:
print "%r: Clean bill of health." % (file,)
class Whitespace:
# the characters used for space and tab
S, T = ' \t'
# members:
# raw
# the original string
# n
# the number of leading whitespace characters in raw
# nt
# the number of tabs in raw[:n]
# norm
# the normal form as a pair (count, trailing), where:
# count
# a tuple such that raw[:n] contains count[i]
# instances of S * i + T
# trailing
# the number of trailing spaces in raw[:n]
# It's A Theorem that m.indent_level(t) ==
# n.indent_level(t) for all t >= 1 iff m.norm == n.norm.
# is_simple
# true iff raw[:n] is of the form (T*)(S*)
def __init__(self, ws):
self.raw = ws
S, T = Whitespace.S, Whitespace.T
count = []
b = n = nt = 0
for ch in self.raw:
if ch == S:
n = n + 1
b = b + 1
elif ch == T:
n = n + 1
nt = nt + 1
if b >= len(count):
count = count + [0] * (b - len(count) + 1)
count[b] = count[b] + 1
b = 0
else:
break
self.n = n
self.nt = nt
self.norm = tuple(count), b
self.is_simple = len(count) <= 1
# return length of longest contiguous run of spaces (whether or not
# preceding a tab)
def longest_run_of_spaces(self):
count, trailing = self.norm
return max(len(count)-1, trailing)
def indent_level(self, tabsize):
# count, il = self.norm
# for i in range(len(count)):
# if count[i]:
# il = il + (i/tabsize + 1)*tabsize * count[i]
# return il
# quicker:
# il = trailing + sum (i/ts + 1)*ts*count[i] =
# trailing + ts * sum (i/ts + 1)*count[i] =
# trailing + ts * sum i/ts*count[i] + count[i] =
# trailing + ts * [(sum i/ts*count[i]) + (sum count[i])] =
# trailing + ts * [(sum i/ts*count[i]) + num_tabs]
# and note that i/ts*count[i] is 0 when i < ts
count, trailing = self.norm
il = 0
for i in range(tabsize, len(count)):
il = il + i/tabsize * count[i]
return trailing + tabsize * (il + self.nt)
# return true iff self.indent_level(t) == other.indent_level(t)
# for all t >= 1
def equal(self, other):
return self.norm == other.norm
# return a list of tuples (ts, i1, i2) such that
# i1 == self.indent_level(ts) != other.indent_level(ts) == i2.
# Intended to be used after not self.equal(other) is known, in which
# case it will return at least one witnessing tab size.
def not_equal_witness(self, other):
n = max(self.longest_run_of_spaces(),
other.longest_run_of_spaces()) + 1
a = []
for ts in range(1, n+1):
if self.indent_level(ts) != other.indent_level(ts):
a.append( (ts,
self.indent_level(ts),
other.indent_level(ts)) )
return a
# Return True iff self.indent_level(t) < other.indent_level(t)
# for all t >= 1.
# The algorithm is due to Vincent Broman.
# Easy to prove it's correct.
# XXXpost that.
# Trivial to prove n is sharp (consider T vs ST).
# Unknown whether there's a faster general way. I suspected so at
# first, but no longer.
# For the special (but common!) case where M and N are both of the
# form (T*)(S*), M.less(N) iff M.len() < N.len() and
# M.num_tabs() <= N.num_tabs(). Proof is easy but kinda long-winded.
# XXXwrite that up.
# Note that M is of the form (T*)(S*) iff len(M.norm[0]) <= 1.
def less(self, other):
if self.n >= other.n:
return False
if self.is_simple and other.is_simple:
return self.nt <= other.nt
n = max(self.longest_run_of_spaces(),
other.longest_run_of_spaces()) + 1
# the self.n >= other.n test already did it for ts=1
for ts in range(2, n+1):
if self.indent_level(ts) >= other.indent_level(ts):
return False
return True
# return a list of tuples (ts, i1, i2) such that
# i1 == self.indent_level(ts) >= other.indent_level(ts) == i2.
# Intended to be used after not self.less(other) is known, in which
# case it will return at least one witnessing tab size.
def not_less_witness(self, other):
n = max(self.longest_run_of_spaces(),
other.longest_run_of_spaces()) + 1
a = []
for ts in range(1, n+1):
if self.indent_level(ts) >= other.indent_level(ts):
a.append( (ts,
self.indent_level(ts),
other.indent_level(ts)) )
return a
def format_witnesses(w):
firsts = map(lambda tup: str(tup[0]), w)
prefix = "at tab size"
if len(w) > 1:
prefix = prefix + "s"
return prefix + " " + ', '.join(firsts)
def process_tokens(tokens):
INDENT = tokenize.INDENT
DEDENT = tokenize.DEDENT
NEWLINE = tokenize.NEWLINE
JUNK = tokenize.COMMENT, tokenize.NL
indents = [Whitespace("")]
check_equal = 0
for (type, token, start, end, line) in tokens:
if type == NEWLINE:
# a program statement, or ENDMARKER, will eventually follow,
# after some (possibly empty) run of tokens of the form
# (NL | COMMENT)* (INDENT | DEDENT+)?
# If an INDENT appears, setting check_equal is wrong, and will
# be undone when we see the INDENT.
check_equal = 1
elif type == INDENT:
check_equal = 0
thisguy = Whitespace(token)
if not indents[-1].less(thisguy):
witness = indents[-1].not_less_witness(thisguy)
msg = "indent not greater e.g. " + format_witnesses(witness)
raise NannyNag(start[0], msg, line)
indents.append(thisguy)
elif type == DEDENT:
# there's nothing we need to check here! what's important is
# that when the run of DEDENTs ends, the indentation of the
# program statement (or ENDMARKER) that triggered the run is
# equal to what's left at the top of the indents stack
# Ouch! This assert triggers if the last line of the source
# is indented *and* lacks a newline -- then DEDENTs pop out
# of thin air.
# assert check_equal # else no earlier NEWLINE, or an earlier INDENT
check_equal = 1
del indents[-1]
elif check_equal and type not in JUNK:
# this is the first "real token" following a NEWLINE, so it
# must be the first token of the next program statement, or an
# ENDMARKER; the "line" argument exposes the leading whitespace
# for this statement; in the case of ENDMARKER, line is an empty
# string, so will properly match the empty string with which the
# "indents" stack was seeded
check_equal = 0
thisguy = Whitespace(line)
if not indents[-1].equal(thisguy):
witness = indents[-1].not_equal_witness(thisguy)
msg = "indent not equal e.g. " + format_witnesses(witness)
raise NannyNag(start[0], msg, line)
if __name__ == '__main__':
main()
| apache-2.0 |
GodBlessPP/cadp_w12 | wsgi/pybean.py | 562 | 8617 | #coding: utf-8
import sqlite3
from pkg_resources import parse_version
__version__ = "0.2.1"
__author__ = "Mickael Desfrenes"
__email__ = "desfrenes@gmail.com"
# Yen 2013.04.08, 將 Python2 的 .next() 改為 next(), 以便在 Python 3 中使用
class SQLiteWriter(object):
"""
In frozen mode (the default), the writer will not alter db schema.
Just add frozen=False to enable column creation (or just add False
as second parameter):
query_writer = SQLiteWriter(":memory:", False)
"""
def __init__(self, db_path=":memory:", frozen=True):
self.db = sqlite3.connect(db_path)
self.db.isolation_level = None
self.db.row_factory = sqlite3.Row
self.frozen = frozen
self.cursor = self.db.cursor()
self.cursor.execute("PRAGMA foreign_keys=ON;")
self.cursor.execute('PRAGMA encoding = "UTF-8";')
self.cursor.execute('BEGIN;')
def __del__(self):
self.db.close()
def replace(self, bean):
keys = []
values = []
write_operation = "replace"
if "id" not in bean.__dict__:
write_operation = "insert"
keys.append("id")
values.append(None)
self.__create_table(bean.__class__.__name__)
columns = self.__get_columns(bean.__class__.__name__)
for key in bean.__dict__:
keys.append(key)
if key not in columns:
self.__create_column(bean.__class__.__name__, key,
type(bean.__dict__[key]))
values.append(bean.__dict__[key])
sql = write_operation + " into " + bean.__class__.__name__ + "("
sql += ",".join(keys) + ") values ("
sql += ",".join(["?" for i in keys]) + ")"
self.cursor.execute(sql, values)
if write_operation == "insert":
bean.id = self.cursor.lastrowid
return bean.id
def __create_column(self, table, column, sqltype):
if self.frozen:
return
if sqltype in [float, int, bool]:
sqltype = "NUMERIC"
else:
sqltype = "TEXT"
sql = "alter table " + table + " add " + column + " " + sqltype
self.cursor.execute(sql)
def __get_columns(self, table):
columns = []
if self.frozen:
return columns
self.cursor.execute("PRAGMA table_info(" + table + ")")
for row in self.cursor:
columns.append(row["name"])
return columns
def __create_table(self, table):
if self.frozen:
return
sql = "create table if not exists " + table + "(id INTEGER PRIMARY KEY AUTOINCREMENT)"
self.cursor.execute(sql)
def get_rows(self, table_name, sql = "1", replace = None):
if replace is None : replace = []
self.__create_table(table_name)
sql = "SELECT * FROM " + table_name + " WHERE " + sql
try:
self.cursor.execute(sql, replace)
for row in self.cursor:
yield row
except sqlite3.OperationalError:
return
def get_count(self, table_name, sql="1", replace = None):
if replace is None : replace = []
self.__create_table(table_name)
sql = "SELECT count(*) AS cnt FROM " + table_name + " WHERE " + sql
try:
self.cursor.execute(sql, replace)
except sqlite3.OperationalError:
return 0
for row in self.cursor:
return row["cnt"]
def delete(self, bean):
self.__create_table(bean.__class__.__name__)
sql = "delete from " + bean.__class__.__name__ + " where id=?"
self.cursor.execute(sql,[bean.id])
def link(self, bean_a, bean_b):
self.replace(bean_a)
self.replace(bean_b)
table_a = bean_a.__class__.__name__
table_b = bean_b.__class__.__name__
assoc_table = self.__create_assoc_table(table_a, table_b)
sql = "replace into " + assoc_table + "(" + table_a + "_id," + table_b
sql += "_id) values(?,?)"
self.cursor.execute(sql,
[bean_a.id, bean_b.id])
def unlink(self, bean_a, bean_b):
table_a = bean_a.__class__.__name__
table_b = bean_b.__class__.__name__
assoc_table = self.__create_assoc_table(table_a, table_b)
sql = "delete from " + assoc_table + " where " + table_a
sql += "_id=? and " + table_b + "_id=?"
self.cursor.execute(sql,
[bean_a.id, bean_b.id])
def get_linked_rows(self, bean, table_name):
bean_table = bean.__class__.__name__
assoc_table = self.__create_assoc_table(bean_table, table_name)
sql = "select t.* from " + table_name + " t inner join " + assoc_table
sql += " a on a." + table_name + "_id = t.id where a."
sql += bean_table + "_id=?"
self.cursor.execute(sql,[bean.id])
for row in self.cursor:
yield row
def __create_assoc_table(self, table_a, table_b):
assoc_table = "_".join(sorted([table_a, table_b]))
if not self.frozen:
sql = "create table if not exists " + assoc_table + "("
sql+= table_a + "_id NOT NULL REFERENCES " + table_a + "(id) ON DELETE cascade,"
sql+= table_b + "_id NOT NULL REFERENCES " + table_b + "(id) ON DELETE cascade,"
sql+= " PRIMARY KEY (" + table_a + "_id," + table_b + "_id));"
self.cursor.execute(sql)
# no real support for foreign keys until sqlite3 v3.6.19
# so here's the hack
if cmp(parse_version(sqlite3.sqlite_version),parse_version("3.6.19")) < 0:
sql = "create trigger if not exists fk_" + table_a + "_" + assoc_table
sql+= " before delete on " + table_a
sql+= " for each row begin delete from " + assoc_table + " where " + table_a + "_id = OLD.id;end;"
self.cursor.execute(sql)
sql = "create trigger if not exists fk_" + table_b + "_" + assoc_table
sql+= " before delete on " + table_b
sql+= " for each row begin delete from " + assoc_table + " where " + table_b + "_id = OLD.id;end;"
self.cursor.execute(sql)
return assoc_table
def delete_all(self, table_name, sql = "1", replace = None):
if replace is None : replace = []
self.__create_table(table_name)
sql = "DELETE FROM " + table_name + " WHERE " + sql
try:
self.cursor.execute(sql, replace)
return True
except sqlite3.OperationalError:
return False
def commit(self):
self.db.commit()
class Store(object):
"""
A SQL writer should be passed to the constructor:
beans_save = Store(SQLiteWriter(":memory"), frozen=False)
"""
def __init__(self, SQLWriter):
self.writer = SQLWriter
def new(self, table_name):
new_object = type(table_name,(object,),{})()
return new_object
def save(self, bean):
self.writer.replace(bean)
def load(self, table_name, id):
for row in self.writer.get_rows(table_name, "id=?", [id]):
return self.row_to_object(table_name, row)
def count(self, table_name, sql = "1", replace=None):
return self.writer.get_count(table_name, sql, replace if replace is not None else [])
def find(self, table_name, sql = "1", replace=None):
for row in self.writer.get_rows(table_name, sql, replace if replace is not None else []):
yield self.row_to_object(table_name, row)
def find_one(self, table_name, sql = "1", replace=None):
try:
return next(self.find(table_name, sql, replace))
except StopIteration:
return None
def delete(self, bean):
self.writer.delete(bean)
def link(self, bean_a, bean_b):
self.writer.link(bean_a, bean_b)
def unlink(self, bean_a, bean_b):
self.writer.unlink(bean_a, bean_b)
def get_linked(self, bean, table_name):
for row in self.writer.get_linked_rows(bean, table_name):
yield self.row_to_object(table_name, row)
def delete_all(self, table_name, sql = "1", replace=None):
return self.writer.delete_all(table_name, sql, replace if replace is not None else [])
def row_to_object(self, table_name, row):
new_object = type(table_name,(object,),{})()
for key in row.keys():
new_object.__dict__[key] = row[key]
return new_object
def commit(self):
self.writer.commit()
| gpl-3.0 |
vritant/subscription-manager | src/subscription_manager/release.py | 3 | 6540 | #
# Subscription manager command line utility. This script is a modified version of
# cp_client.py from candlepin scripts
#
# Copyright (c) 2012 Red Hat, Inc.
#
# This software is licensed to you under the GNU General Public License,
# version 2 (GPLv2). There is NO WARRANTY for this software, express or
# implied, including the implied warranties of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. You should have received a copy of GPLv2
# along with this software; if not, see
# http://www.gnu.org/licenses/old-licenses/gpl-2.0.txt.
#
# Red Hat trademarks are not licensed under GPLv2. No permission is
# granted to use or replicate Red Hat trademarks that are incorporated
# in this software or its documentation.
#
import gettext
import httplib
import logging
import socket
from M2Crypto.SSL import SSLError
import rhsm.config
from subscription_manager import injection as inj
from subscription_manager import listing
from subscription_manager import rhelproduct
_ = gettext.gettext
log = logging.getLogger('rhsm-app.' + __name__)
cfg = rhsm.config.initConfig()
class ContentConnectionProvider(object):
def __init__(self):
pass
class ReleaseBackend(object):
def get_releases(self):
provider = self._get_release_version_provider()
return provider.get_releases()
def _get_release_version_provider(self):
release_provider = ApiReleaseVersionProvider()
if release_provider.api_supported():
return release_provider
return CdnReleaseVersionProvider()
class ApiReleaseVersionProvider(object):
def __init__(self):
self.cp_provider = inj.require(inj.CP_PROVIDER)
self.identity = inj.require(inj.IDENTITY)
def api_supported(self):
return self._conn().supports_resource("available_releases")
def get_releases(self):
return self._conn().getAvailableReleases(self.identity.uuid)
def _conn(self):
return self.cp_provider.get_consumer_auth_cp()
class CdnReleaseVersionProvider(object):
def __init__(self):
self.entitlement_dir = inj.require(inj.ENT_DIR)
self.product_dir = inj.require(inj.PROD_DIR)
self.cp_provider = inj.require(inj.CP_PROVIDER)
self.content_connection = self.cp_provider.get_content_connection()
def get_releases(self):
# cdn base url
# find the rhel product
release_product = None
installed_products = self.product_dir.get_installed_products()
for product_hash in installed_products:
product_cert = installed_products[product_hash]
products = product_cert.products
for product in products:
rhel_matcher = rhelproduct.RHELProductMatcher(product)
if rhel_matcher.is_rhel():
release_product = product
if release_product is None:
log.debug("No products with RHEL product tags found")
return []
entitlements = self.entitlement_dir.list_for_product(release_product.id)
listings = []
for entitlement in entitlements:
contents = entitlement.content
for content in contents:
# ignore content that is not enabled
# see bz #820639
if not content.enabled:
continue
if self._is_correct_rhel(release_product.provided_tags,
content.required_tags):
listing_path = self._build_listing_path(content.url)
listings.append(listing_path)
# FIXME: not sure how to get the "base" content if we have multiple
# entitlements for a product
# for a entitlement, grant the corresponding entitlement cert
# use it for this connection
# hmm. We are really only supposed to have one product
# with one content with one listing file. We shall see.
releases = []
listings = sorted(set(listings))
for listing_path in listings:
try:
data = self.content_connection.get_versions(listing_path)
except (socket.error,
httplib.HTTPException,
SSLError) as e:
# content connection doesn't handle any exceptions
# and the code that invokes this doesn't either, so
# swallow them here.
log.exception(e)
continue
# any non 200 response on fetching the release version
# listing file returns a None here
if not data:
continue
ver_listing = listing.ListingFile(data=data)
# ver_listing.releases can be empty
releases = releases + ver_listing.get_releases()
releases_set = sorted(set(releases))
return releases_set
def _build_listing_path(self, content_url):
listing_parts = content_url.split('$releasever', 1)
listing_base = listing_parts[0]
listing_path = u"%s/listing" % listing_base
# FIXME: cleanup paths ("//"'s, etc)
# Make sure content URLS are encoded to the default utf8
# as unicode strings aren't valid. See rhbz#1134963
return listing_path.encode()
# require tags provided by installed products?
def _is_correct_rhel(self, product_tags, content_tags):
# easy to pass a string instead of a list
assert not isinstance(product_tags, basestring)
assert not isinstance(content_tags, basestring)
for product_tag in product_tags:
# we are comparing the lists to see if they
# have a matching rhel-#
product_split = product_tag.split('-', 2)
if product_split[0] == "rhel":
# look for match in content tags
for content_tag in content_tags:
content_split = content_tag.split('-', 2)
# ignore non rhel content tags
if content_split[0] != "rhel":
continue
# exact match
if product_tag == content_tag:
return True
# is this content for a base of this variant
if product_tag.startswith(content_tag):
return True
# else, we don't match, keep looking
log.debug("No matching products with RHEL product tags found")
return False
| gpl-2.0 |
ehashman/oh-mainline | vendor/packages/docutils/docutils/utils/math/math2html.py | 106 | 174512 | #! /usr/bin/env python
# -*- coding: utf-8 -*-
# math2html: convert LaTeX equations to HTML output.
#
# Copyright (C) 2009-2011 Alex Fernández
#
# Released under the terms of the `2-Clause BSD license'_, in short:
# Copying and distribution of this file, with or without modification,
# are permitted in any medium without royalty provided the copyright
# notice and this notice are preserved.
# This file is offered as-is, without any warranty.
#
# .. _2-Clause BSD license: http://www.spdx.org/licenses/BSD-2-Clause
# Based on eLyXer: convert LyX source files to HTML output.
# http://elyxer.nongnu.org/
# --end--
# Alex 20101110
# eLyXer standalone formula conversion to HTML.
import sys
class Trace(object):
"A tracing class"
debugmode = False
quietmode = False
showlinesmode = False
prefix = None
def debug(cls, message):
"Show a debug message"
if not Trace.debugmode or Trace.quietmode:
return
Trace.show(message, sys.stdout)
def message(cls, message):
"Show a trace message"
if Trace.quietmode:
return
if Trace.prefix and Trace.showlinesmode:
message = Trace.prefix + message
Trace.show(message, sys.stdout)
def error(cls, message):
"Show an error message"
message = '* ' + message
if Trace.prefix and Trace.showlinesmode:
message = Trace.prefix + message
Trace.show(message, sys.stderr)
def fatal(cls, message):
"Show an error message and terminate"
Trace.error('FATAL: ' + message)
exit(-1)
def show(cls, message, channel):
"Show a message out of a channel"
if sys.version_info < (3,0):
message = message.encode('utf-8')
channel.write(message + '\n')
debug = classmethod(debug)
message = classmethod(message)
error = classmethod(error)
fatal = classmethod(fatal)
show = classmethod(show)
import os.path
import sys
class BibStylesConfig(object):
"Configuration class from elyxer.config file"
abbrvnat = {
u'@article':u'$authors. $title. <i>$journal</i>,{ {$volume:}$pages,} $month $year.{ doi: $doi.}{ URL <a href="$url">$url</a>.}{ $note.}',
u'cite':u'$surname($year)',
u'default':u'$authors. <i>$title</i>. $publisher, $year.{ URL <a href="$url">$url</a>.}{ $note.}',
}
alpha = {
u'@article':u'$authors. $title.{ <i>$journal</i>{, {$volume}{($number)}}{: $pages}{, $year}.}{ <a href="$url">$url</a>.}{ <a href="$filename">$filename</a>.}{ $note.}',
u'cite':u'$Sur$YY',
u'default':u'$authors. $title.{ <i>$journal</i>,} $year.{ <a href="$url">$url</a>.}{ <a href="$filename">$filename</a>.}{ $note.}',
}
authordate2 = {
u'@article':u'$authors. $year. $title. <i>$journal</i>, <b>$volume</b>($number), $pages.{ URL <a href="$url">$url</a>.}{ $note.}',
u'@book':u'$authors. $year. <i>$title</i>. $publisher.{ URL <a href="$url">$url</a>.}{ $note.}',
u'cite':u'$surname, $year',
u'default':u'$authors. $year. <i>$title</i>. $publisher.{ URL <a href="$url">$url</a>.}{ $note.}',
}
default = {
u'@article':u'$authors: “$title”, <i>$journal</i>,{ pp. $pages,} $year.{ URL <a href="$url">$url</a>.}{ $note.}',
u'@book':u'{$authors: }<i>$title</i>{ ($editor, ed.)}.{{ $publisher,} $year.}{ URL <a href="$url">$url</a>.}{ $note.}',
u'@booklet':u'$authors: <i>$title</i>.{{ $publisher,} $year.}{ URL <a href="$url">$url</a>.}{ $note.}',
u'@conference':u'$authors: “$title”, <i>$journal</i>,{ pp. $pages,} $year.{ URL <a href="$url">$url</a>.}{ $note.}',
u'@inbook':u'$authors: <i>$title</i>.{{ $publisher,} $year.}{ URL <a href="$url">$url</a>.}{ $note.}',
u'@incollection':u'$authors: <i>$title</i>{ in <i>$booktitle</i>{ ($editor, ed.)}}.{{ $publisher,} $year.}{ URL <a href="$url">$url</a>.}{ $note.}',
u'@inproceedings':u'$authors: “$title”, <i>$journal</i>,{ pp. $pages,} $year.{ URL <a href="$url">$url</a>.}{ $note.}',
u'@manual':u'$authors: <i>$title</i>.{{ $publisher,} $year.}{ URL <a href="$url">$url</a>.}{ $note.}',
u'@mastersthesis':u'$authors: <i>$title</i>.{{ $publisher,} $year.}{ URL <a href="$url">$url</a>.}{ $note.}',
u'@misc':u'$authors: <i>$title</i>.{{ $publisher,}{ $howpublished,} $year.}{ URL <a href="$url">$url</a>.}{ $note.}',
u'@phdthesis':u'$authors: <i>$title</i>.{{ $publisher,} $year.}{ URL <a href="$url">$url</a>.}{ $note.}',
u'@proceedings':u'$authors: “$title”, <i>$journal</i>,{ pp. $pages,} $year.{ URL <a href="$url">$url</a>.}{ $note.}',
u'@techreport':u'$authors: <i>$title</i>, $year.{ URL <a href="$url">$url</a>.}{ $note.}',
u'@unpublished':u'$authors: “$title”, <i>$journal</i>, $year.{ URL <a href="$url">$url</a>.}{ $note.}',
u'cite':u'$index',
u'default':u'$authors: <i>$title</i>.{{ $publisher,} $year.}{ URL <a href="$url">$url</a>.}{ $note.}',
}
defaulttags = {
u'YY':u'??', u'authors':u'', u'surname':u'',
}
ieeetr = {
u'@article':u'$authors, “$title”, <i>$journal</i>, vol. $volume, no. $number, pp. $pages, $year.{ URL <a href="$url">$url</a>.}{ $note.}',
u'@book':u'$authors, <i>$title</i>. $publisher, $year.{ URL <a href="$url">$url</a>.}{ $note.}',
u'cite':u'$index',
u'default':u'$authors, “$title”. $year.{ URL <a href="$url">$url</a>.}{ $note.}',
}
plain = {
u'@article':u'$authors. $title.{ <i>$journal</i>{, {$volume}{($number)}}{:$pages}{, $year}.}{ URL <a href="$url">$url</a>.}{ $note.}',
u'@book':u'$authors. <i>$title</i>. $publisher,{ $month} $year.{ URL <a href="$url">$url</a>.}{ $note.}',
u'@incollection':u'$authors. $title.{ In <i>$booktitle</i> {($editor, ed.)}.} $publisher,{ $month} $year.{ URL <a href="$url">$url</a>.}{ $note.}',
u'@inproceedings':u'$authors. $title. { <i>$booktitle</i>{, {$volume}{($number)}}{:$pages}{, $year}.}{ URL <a href="$url">$url</a>.}{ $note.}',
u'cite':u'$index',
u'default':u'{$authors. }$title.{{ $publisher,} $year.}{ URL <a href="$url">$url</a>.}{ $note.}',
}
vancouver = {
u'@article':u'$authors. $title. <i>$journal</i>, $year{;{<b>$volume</b>}{($number)}{:$pages}}.{ URL: <a href="$url">$url</a>.}{ $note.}',
u'@book':u'$authors. $title. {$publisher, }$year.{ URL: <a href="$url">$url</a>.}{ $note.}',
u'cite':u'$index',
u'default':u'$authors. $title; {$publisher, }$year.{ $howpublished.}{ URL: <a href="$url">$url</a>.}{ $note.}',
}
class BibTeXConfig(object):
"Configuration class from elyxer.config file"
replaced = {
u'--':u'—', u'..':u'.',
}
class ContainerConfig(object):
"Configuration class from elyxer.config file"
endings = {
u'Align':u'\\end_layout', u'BarredText':u'\\bar',
u'BoldText':u'\\series', u'Cell':u'</cell',
u'ChangeDeleted':u'\\change_unchanged',
u'ChangeInserted':u'\\change_unchanged', u'ColorText':u'\\color',
u'EmphaticText':u'\\emph', u'Hfill':u'\\hfill', u'Inset':u'\\end_inset',
u'Layout':u'\\end_layout', u'LyXFooter':u'\\end_document',
u'LyXHeader':u'\\end_header', u'Row':u'</row', u'ShapedText':u'\\shape',
u'SizeText':u'\\size', u'StrikeOut':u'\\strikeout',
u'TextFamily':u'\\family', u'VersalitasText':u'\\noun',
}
extracttext = {
u'allowed':[u'StringContainer',u'Constant',u'FormulaConstant',],
u'cloned':[u'',],
u'extracted':[u'PlainLayout',u'TaggedText',u'Align',u'Caption',u'TextFamily',u'EmphaticText',u'VersalitasText',u'BarredText',u'SizeText',u'ColorText',u'LangLine',u'Formula',u'Bracket',u'RawText',u'BibTag',u'FormulaNumber',u'AlphaCommand',u'EmptyCommand',u'OneParamFunction',u'SymbolFunction',u'TextFunction',u'FontFunction',u'CombiningFunction',u'DecoratingFunction',u'FormulaSymbol',u'BracketCommand',u'TeXCode',],
}
startendings = {
u'\\begin_deeper':u'\\end_deeper', u'\\begin_inset':u'\\end_inset',
u'\\begin_layout':u'\\end_layout',
}
starts = {
u'':u'StringContainer', u'#LyX':u'BlackBox', u'</lyxtabular':u'BlackBox',
u'<cell':u'Cell', u'<column':u'Column', u'<row':u'Row',
u'\\align':u'Align', u'\\bar':u'BarredText',
u'\\bar default':u'BlackBox', u'\\bar no':u'BlackBox',
u'\\begin_body':u'BlackBox', u'\\begin_deeper':u'DeeperList',
u'\\begin_document':u'BlackBox', u'\\begin_header':u'LyXHeader',
u'\\begin_inset Argument':u'ShortTitle',
u'\\begin_inset Box':u'BoxInset', u'\\begin_inset Branch':u'Branch',
u'\\begin_inset Caption':u'Caption',
u'\\begin_inset CommandInset bibitem':u'BiblioEntry',
u'\\begin_inset CommandInset bibtex':u'BibTeX',
u'\\begin_inset CommandInset citation':u'BiblioCitation',
u'\\begin_inset CommandInset href':u'URL',
u'\\begin_inset CommandInset include':u'IncludeInset',
u'\\begin_inset CommandInset index_print':u'PrintIndex',
u'\\begin_inset CommandInset label':u'Label',
u'\\begin_inset CommandInset line':u'LineInset',
u'\\begin_inset CommandInset nomencl_print':u'PrintNomenclature',
u'\\begin_inset CommandInset nomenclature':u'NomenclatureEntry',
u'\\begin_inset CommandInset ref':u'Reference',
u'\\begin_inset CommandInset toc':u'TableOfContents',
u'\\begin_inset ERT':u'ERT', u'\\begin_inset Flex':u'FlexInset',
u'\\begin_inset Flex Chunkref':u'NewfangledChunkRef',
u'\\begin_inset Flex Marginnote':u'SideNote',
u'\\begin_inset Flex Sidenote':u'SideNote',
u'\\begin_inset Flex URL':u'FlexURL', u'\\begin_inset Float':u'Float',
u'\\begin_inset FloatList':u'ListOf', u'\\begin_inset Foot':u'Footnote',
u'\\begin_inset Formula':u'Formula',
u'\\begin_inset FormulaMacro':u'FormulaMacro',
u'\\begin_inset Graphics':u'Image',
u'\\begin_inset Index':u'IndexReference',
u'\\begin_inset Info':u'InfoInset',
u'\\begin_inset LatexCommand bibitem':u'BiblioEntry',
u'\\begin_inset LatexCommand bibtex':u'BibTeX',
u'\\begin_inset LatexCommand cite':u'BiblioCitation',
u'\\begin_inset LatexCommand citealt':u'BiblioCitation',
u'\\begin_inset LatexCommand citep':u'BiblioCitation',
u'\\begin_inset LatexCommand citet':u'BiblioCitation',
u'\\begin_inset LatexCommand htmlurl':u'URL',
u'\\begin_inset LatexCommand index':u'IndexReference',
u'\\begin_inset LatexCommand label':u'Label',
u'\\begin_inset LatexCommand nomenclature':u'NomenclatureEntry',
u'\\begin_inset LatexCommand prettyref':u'Reference',
u'\\begin_inset LatexCommand printindex':u'PrintIndex',
u'\\begin_inset LatexCommand printnomenclature':u'PrintNomenclature',
u'\\begin_inset LatexCommand ref':u'Reference',
u'\\begin_inset LatexCommand tableofcontents':u'TableOfContents',
u'\\begin_inset LatexCommand url':u'URL',
u'\\begin_inset LatexCommand vref':u'Reference',
u'\\begin_inset Marginal':u'SideNote',
u'\\begin_inset Newline':u'NewlineInset',
u'\\begin_inset Newpage':u'NewPageInset', u'\\begin_inset Note':u'Note',
u'\\begin_inset OptArg':u'ShortTitle',
u'\\begin_inset Phantom':u'PhantomText',
u'\\begin_inset Quotes':u'QuoteContainer',
u'\\begin_inset Tabular':u'Table', u'\\begin_inset Text':u'InsetText',
u'\\begin_inset VSpace':u'VerticalSpace', u'\\begin_inset Wrap':u'Wrap',
u'\\begin_inset listings':u'Listing', u'\\begin_inset space':u'Space',
u'\\begin_layout':u'Layout', u'\\begin_layout Abstract':u'Abstract',
u'\\begin_layout Author':u'Author',
u'\\begin_layout Bibliography':u'Bibliography',
u'\\begin_layout Chunk':u'NewfangledChunk',
u'\\begin_layout Description':u'Description',
u'\\begin_layout Enumerate':u'ListItem',
u'\\begin_layout Itemize':u'ListItem', u'\\begin_layout List':u'List',
u'\\begin_layout LyX-Code':u'LyXCode',
u'\\begin_layout Plain':u'PlainLayout',
u'\\begin_layout Standard':u'StandardLayout',
u'\\begin_layout Title':u'Title', u'\\begin_preamble':u'LyXPreamble',
u'\\change_deleted':u'ChangeDeleted',
u'\\change_inserted':u'ChangeInserted',
u'\\change_unchanged':u'BlackBox', u'\\color':u'ColorText',
u'\\color inherit':u'BlackBox', u'\\color none':u'BlackBox',
u'\\emph default':u'BlackBox', u'\\emph off':u'BlackBox',
u'\\emph on':u'EmphaticText', u'\\emph toggle':u'EmphaticText',
u'\\end_body':u'LyXFooter', u'\\family':u'TextFamily',
u'\\family default':u'BlackBox', u'\\family roman':u'BlackBox',
u'\\hfill':u'Hfill', u'\\labelwidthstring':u'BlackBox',
u'\\lang':u'LangLine', u'\\length':u'InsetLength',
u'\\lyxformat':u'LyXFormat', u'\\lyxline':u'LyXLine',
u'\\newline':u'Newline', u'\\newpage':u'NewPage',
u'\\noindent':u'BlackBox', u'\\noun default':u'BlackBox',
u'\\noun off':u'BlackBox', u'\\noun on':u'VersalitasText',
u'\\paragraph_spacing':u'BlackBox', u'\\series bold':u'BoldText',
u'\\series default':u'BlackBox', u'\\series medium':u'BlackBox',
u'\\shape':u'ShapedText', u'\\shape default':u'BlackBox',
u'\\shape up':u'BlackBox', u'\\size':u'SizeText',
u'\\size normal':u'BlackBox', u'\\start_of_appendix':u'StartAppendix',
u'\\strikeout default':u'BlackBox', u'\\strikeout on':u'StrikeOut',
}
string = {
u'startcommand':u'\\',
}
table = {
u'headers':[u'<lyxtabular',u'<features',],
}
class EscapeConfig(object):
"Configuration class from elyxer.config file"
chars = {
u'\n':u'', u' -- ':u' — ', u'\'':u'’', u'---':u'—', u'`':u'‘',
}
commands = {
u'\\InsetSpace \\space{}':u' ', u'\\InsetSpace \\thinspace{}':u' ',
u'\\InsetSpace ~':u' ', u'\\SpecialChar \\-':u'',
u'\\SpecialChar \\@.':u'.', u'\\SpecialChar \\ldots{}':u'…',
u'\\SpecialChar \\menuseparator':u' ▷ ',
u'\\SpecialChar \\nobreakdash-':u'-', u'\\SpecialChar \\slash{}':u'/',
u'\\SpecialChar \\textcompwordmark{}':u'', u'\\backslash':u'\\',
}
entities = {
u'&':u'&', u'<':u'<', u'>':u'>',
}
html = {
u'/>':u'>',
}
iso885915 = {
u' ':u' ', u' ':u' ', u' ':u' ',
}
nonunicode = {
u' ':u' ',
}
class FormulaConfig(object):
"Configuration class from elyxer.config file"
alphacommands = {
u'\\AA':u'Å', u'\\AE':u'Æ',
u'\\AmS':u'<span class="versalitas">AmS</span>', u'\\DH':u'Ð',
u'\\L':u'Ł', u'\\O':u'Ø', u'\\OE':u'Œ', u'\\TH':u'Þ', u'\\aa':u'å',
u'\\ae':u'æ', u'\\alpha':u'α', u'\\beta':u'β', u'\\delta':u'δ',
u'\\dh':u'ð', u'\\epsilon':u'ϵ', u'\\eta':u'η', u'\\gamma':u'γ',
u'\\i':u'ı', u'\\imath':u'ı', u'\\iota':u'ι', u'\\j':u'ȷ',
u'\\jmath':u'ȷ', u'\\kappa':u'κ', u'\\l':u'ł', u'\\lambda':u'λ',
u'\\mu':u'μ', u'\\nu':u'ν', u'\\o':u'ø', u'\\oe':u'œ', u'\\omega':u'ω',
u'\\phi':u'φ', u'\\pi':u'π', u'\\psi':u'ψ', u'\\rho':u'ρ',
u'\\sigma':u'σ', u'\\ss':u'ß', u'\\tau':u'τ', u'\\textcrh':u'ħ',
u'\\th':u'þ', u'\\theta':u'θ', u'\\upsilon':u'υ', u'\\varDelta':u'∆',
u'\\varGamma':u'Γ', u'\\varLambda':u'Λ', u'\\varOmega':u'Ω',
u'\\varPhi':u'Φ', u'\\varPi':u'Π', u'\\varPsi':u'Ψ', u'\\varSigma':u'Σ',
u'\\varTheta':u'Θ', u'\\varUpsilon':u'Υ', u'\\varXi':u'Ξ',
u'\\varepsilon':u'ε', u'\\varkappa':u'ϰ', u'\\varphi':u'φ',
u'\\varpi':u'ϖ', u'\\varrho':u'ϱ', u'\\varsigma':u'ς',
u'\\vartheta':u'ϑ', u'\\xi':u'ξ', u'\\zeta':u'ζ',
}
array = {
u'begin':u'\\begin', u'cellseparator':u'&', u'end':u'\\end',
u'rowseparator':u'\\\\',
}
bigbrackets = {
u'(':[u'⎛',u'⎜',u'⎝',], u')':[u'⎞',u'⎟',u'⎠',], u'[':[u'⎡',u'⎢',u'⎣',],
u']':[u'⎤',u'⎥',u'⎦',], u'{':[u'⎧',u'⎪',u'⎨',u'⎩',], u'|':[u'|',],
u'}':[u'⎫',u'⎪',u'⎬',u'⎭',], u'∥':[u'∥',],
}
bigsymbols = {
u'∑':[u'⎲',u'⎳',], u'∫':[u'⌠',u'⌡',],
}
bracketcommands = {
u'\\left':u'span class="symbol"',
u'\\left.':u'<span class="leftdot"></span>',
u'\\middle':u'span class="symbol"', u'\\right':u'span class="symbol"',
u'\\right.':u'<span class="rightdot"></span>',
}
combiningfunctions = {
u'\\"':u'̈', u'\\\'':u'́', u'\\^':u'̂', u'\\`':u'̀', u'\\acute':u'́',
u'\\bar':u'̄', u'\\breve':u'̆', u'\\c':u'̧', u'\\check':u'̌',
u'\\dddot':u'⃛', u'\\ddot':u'̈', u'\\dot':u'̇', u'\\grave':u'̀',
u'\\hat':u'̂', u'\\mathring':u'̊', u'\\overleftarrow':u'⃖',
u'\\overrightarrow':u'⃗', u'\\r':u'̊', u'\\s':u'̩',
u'\\textcircled':u'⃝', u'\\textsubring':u'̥', u'\\tilde':u'̃',
u'\\v':u'̌', u'\\vec':u'⃗', u'\\~':u'̃',
}
commands = {
u'\\ ':u' ', u'\\!':u'', u'\\#':u'#', u'\\$':u'$', u'\\%':u'%',
u'\\&':u'&', u'\\,':u' ', u'\\:':u' ', u'\\;':u' ',
u'\\APLdownarrowbox':u'⍗', u'\\APLleftarrowbox':u'⍇',
u'\\APLrightarrowbox':u'⍈', u'\\APLuparrowbox':u'⍐', u'\\Box':u'□',
u'\\Bumpeq':u'≎', u'\\CIRCLE':u'●', u'\\Cap':u'⋒', u'\\CheckedBox':u'☑',
u'\\Circle':u'○', u'\\Coloneqq':u'⩴', u'\\Corresponds':u'≙',
u'\\Cup':u'⋓', u'\\Delta':u'Δ', u'\\Diamond':u'◇', u'\\Downarrow':u'⇓',
u'\\EUR':u'€', u'\\Game':u'⅁', u'\\Gamma':u'Γ', u'\\Im':u'ℑ',
u'\\Join':u'⨝', u'\\LEFTCIRCLE':u'◖', u'\\LEFTcircle':u'◐',
u'\\Lambda':u'Λ', u'\\Leftarrow':u'⇐', u'\\Lleftarrow':u'⇚',
u'\\Longleftarrow':u'⟸', u'\\Longleftrightarrow':u'⟺',
u'\\Longrightarrow':u'⟹', u'\\Lsh':u'↰', u'\\Mapsfrom':u'⇐|',
u'\\Mapsto':u'|⇒', u'\\Omega':u'Ω', u'\\P':u'¶', u'\\Phi':u'Φ',
u'\\Pi':u'Π', u'\\Pr':u'Pr', u'\\Psi':u'Ψ', u'\\RIGHTCIRCLE':u'◗',
u'\\RIGHTcircle':u'◑', u'\\Re':u'ℜ', u'\\Rrightarrow':u'⇛',
u'\\Rsh':u'↱', u'\\S':u'§', u'\\Sigma':u'Σ', u'\\Square':u'☐',
u'\\Subset':u'⋐', u'\\Supset':u'⋑', u'\\Theta':u'Θ', u'\\Uparrow':u'⇑',
u'\\Updownarrow':u'⇕', u'\\Upsilon':u'Υ', u'\\Vdash':u'⊩',
u'\\Vert':u'∥', u'\\Vvdash':u'⊪', u'\\XBox':u'☒', u'\\Xi':u'Ξ',
u'\\Yup':u'⅄', u'\\\\':u'<br/>', u'\\_':u'_', u'\\aleph':u'ℵ',
u'\\amalg':u'∐', u'\\angle':u'∠', u'\\aquarius':u'♒',
u'\\arccos':u'arccos', u'\\arcsin':u'arcsin', u'\\arctan':u'arctan',
u'\\arg':u'arg', u'\\aries':u'♈', u'\\ast':u'∗', u'\\asymp':u'≍',
u'\\backepsilon':u'∍', u'\\backprime':u'‵', u'\\backsimeq':u'⋍',
u'\\backslash':u'\\', u'\\barwedge':u'⊼', u'\\because':u'∵',
u'\\beth':u'ℶ', u'\\between':u'≬', u'\\bigcap':u'∩', u'\\bigcirc':u'○',
u'\\bigcup':u'∪', u'\\bigodot':u'⊙', u'\\bigoplus':u'⊕',
u'\\bigotimes':u'⊗', u'\\bigsqcup':u'⊔', u'\\bigstar':u'★',
u'\\bigtriangledown':u'▽', u'\\bigtriangleup':u'△', u'\\biguplus':u'⊎',
u'\\bigvee':u'∨', u'\\bigwedge':u'∧', u'\\blacklozenge':u'⧫',
u'\\blacksmiley':u'☻', u'\\blacksquare':u'■', u'\\blacktriangle':u'▲',
u'\\blacktriangledown':u'▼', u'\\blacktriangleright':u'▶', u'\\bot':u'⊥',
u'\\bowtie':u'⋈', u'\\box':u'▫', u'\\boxdot':u'⊡', u'\\bullet':u'•',
u'\\bumpeq':u'≏', u'\\cancer':u'♋', u'\\cap':u'∩', u'\\capricornus':u'♑',
u'\\cdot':u'⋅', u'\\cdots':u'⋯', u'\\centerdot':u'∙',
u'\\checkmark':u'✓', u'\\chi':u'χ', u'\\circ':u'○', u'\\circeq':u'≗',
u'\\circledR':u'®', u'\\circledast':u'⊛', u'\\circledcirc':u'⊚',
u'\\circleddash':u'⊝', u'\\clubsuit':u'♣', u'\\coloneqq':u'≔',
u'\\complement':u'∁', u'\\cong':u'≅', u'\\coprod':u'∐',
u'\\copyright':u'©', u'\\cos':u'cos', u'\\cosh':u'cosh', u'\\cot':u'cot',
u'\\coth':u'coth', u'\\csc':u'csc', u'\\cup':u'∪',
u'\\curvearrowleft':u'↶', u'\\curvearrowright':u'↷', u'\\dag':u'†',
u'\\dagger':u'†', u'\\daleth':u'ℸ', u'\\dashleftarrow':u'⇠',
u'\\dashv':u'⊣', u'\\ddag':u'‡', u'\\ddagger':u'‡', u'\\ddots':u'⋱',
u'\\deg':u'deg', u'\\det':u'det', u'\\diagdown':u'╲', u'\\diagup':u'╱',
u'\\diamond':u'◇', u'\\diamondsuit':u'♦', u'\\dim':u'dim', u'\\div':u'÷',
u'\\divideontimes':u'⋇', u'\\dotdiv':u'∸', u'\\doteq':u'≐',
u'\\doteqdot':u'≑', u'\\dotplus':u'∔', u'\\dots':u'…',
u'\\doublebarwedge':u'⌆', u'\\downarrow':u'↓', u'\\downdownarrows':u'⇊',
u'\\downharpoonleft':u'⇃', u'\\downharpoonright':u'⇂', u'\\earth':u'♁',
u'\\ell':u'ℓ', u'\\emptyset':u'∅', u'\\eqcirc':u'≖', u'\\eqcolon':u'≕',
u'\\eqsim':u'≂', u'\\euro':u'€', u'\\exists':u'∃', u'\\exp':u'exp',
u'\\fallingdotseq':u'≒', u'\\female':u'♀', u'\\flat':u'♭',
u'\\forall':u'∀', u'\\frown':u'⌢', u'\\frownie':u'☹', u'\\gcd':u'gcd',
u'\\gemini':u'♊', u'\\geq)':u'≥', u'\\geqq':u'≧', u'\\geqslant':u'≥',
u'\\gets':u'←', u'\\gg':u'≫', u'\\ggg':u'⋙', u'\\gimel':u'ℷ',
u'\\gneqq':u'≩', u'\\gnsim':u'⋧', u'\\gtrdot':u'⋗', u'\\gtreqless':u'⋚',
u'\\gtreqqless':u'⪌', u'\\gtrless':u'≷', u'\\gtrsim':u'≳',
u'\\guillemotleft':u'«', u'\\guillemotright':u'»', u'\\hbar':u'ℏ',
u'\\heartsuit':u'♥', u'\\hfill':u'<span class="hfill"> </span>',
u'\\hom':u'hom', u'\\hookleftarrow':u'↩', u'\\hookrightarrow':u'↪',
u'\\hslash':u'ℏ', u'\\idotsint':u'<span class="bigsymbol">∫⋯∫</span>',
u'\\iiint':u'<span class="bigsymbol">∭</span>',
u'\\iint':u'<span class="bigsymbol">∬</span>', u'\\imath':u'ı',
u'\\inf':u'inf', u'\\infty':u'∞', u'\\invneg':u'⌐', u'\\jmath':u'ȷ',
u'\\jupiter':u'♃', u'\\ker':u'ker', u'\\land':u'∧',
u'\\landupint':u'<span class="bigsymbol">∱</span>', u'\\langle':u'⟨',
u'\\lbrace':u'{', u'\\lbrace)':u'{', u'\\lbrack':u'[', u'\\lceil':u'⌈',
u'\\ldots':u'…', u'\\leadsto':u'⇝', u'\\leftarrow)':u'←',
u'\\leftarrowtail':u'↢', u'\\leftarrowtobar':u'⇤',
u'\\leftharpoondown':u'↽', u'\\leftharpoonup':u'↼',
u'\\leftleftarrows':u'⇇', u'\\leftleftharpoons':u'⥢', u'\\leftmoon':u'☾',
u'\\leftrightarrow':u'↔', u'\\leftrightarrows':u'⇆',
u'\\leftrightharpoons':u'⇋', u'\\leftthreetimes':u'⋋', u'\\leo':u'♌',
u'\\leq)':u'≤', u'\\leqq':u'≦', u'\\leqslant':u'≤', u'\\lessdot':u'⋖',
u'\\lesseqgtr':u'⋛', u'\\lesseqqgtr':u'⪋', u'\\lessgtr':u'≶',
u'\\lesssim':u'≲', u'\\lfloor':u'⌊', u'\\lg':u'lg', u'\\lhd':u'⊲',
u'\\libra':u'♎', u'\\lightning':u'↯', u'\\liminf':u'liminf',
u'\\limsup':u'limsup', u'\\ll':u'≪', u'\\lll':u'⋘', u'\\ln':u'ln',
u'\\lneqq':u'≨', u'\\lnot':u'¬', u'\\lnsim':u'⋦', u'\\log':u'log',
u'\\longleftarrow':u'⟵', u'\\longleftrightarrow':u'⟷',
u'\\longmapsto':u'⟼', u'\\longrightarrow':u'⟶', u'\\looparrowleft':u'↫',
u'\\looparrowright':u'↬', u'\\lor':u'∨', u'\\lozenge':u'◊',
u'\\ltimes':u'⋉', u'\\lyxlock':u'', u'\\male':u'♂', u'\\maltese':u'✠',
u'\\mapsfrom':u'↤', u'\\mapsto':u'↦', u'\\mathcircumflex':u'^',
u'\\max':u'max', u'\\measuredangle':u'∡', u'\\mercury':u'☿',
u'\\mho':u'℧', u'\\mid':u'∣', u'\\min':u'min', u'\\models':u'⊨',
u'\\mp':u'∓', u'\\multimap':u'⊸', u'\\nLeftarrow':u'⇍',
u'\\nLeftrightarrow':u'⇎', u'\\nRightarrow':u'⇏', u'\\nVDash':u'⊯',
u'\\nabla':u'∇', u'\\napprox':u'≉', u'\\natural':u'♮', u'\\ncong':u'≇',
u'\\nearrow':u'↗', u'\\neg':u'¬', u'\\neg)':u'¬', u'\\neptune':u'♆',
u'\\nequiv':u'≢', u'\\newline':u'<br/>', u'\\nexists':u'∄',
u'\\ngeqslant':u'≱', u'\\ngtr':u'≯', u'\\ngtrless':u'≹', u'\\ni':u'∋',
u'\\ni)':u'∋', u'\\nleftarrow':u'↚', u'\\nleftrightarrow':u'↮',
u'\\nleqslant':u'≰', u'\\nless':u'≮', u'\\nlessgtr':u'≸', u'\\nmid':u'∤',
u'\\nolimits':u'', u'\\nonumber':u'', u'\\not':u'¬', u'\\not<':u'≮',
u'\\not=':u'≠', u'\\not>':u'≯', u'\\notbackslash':u'⍀', u'\\notin':u'∉',
u'\\notni':u'∌', u'\\notslash':u'⌿', u'\\nparallel':u'∦',
u'\\nprec':u'⊀', u'\\nrightarrow':u'↛', u'\\nsim':u'≁', u'\\nsimeq':u'≄',
u'\\nsqsubset':u'⊏̸', u'\\nsubseteq':u'⊈', u'\\nsucc':u'⊁',
u'\\nsucccurlyeq':u'⋡', u'\\nsupset':u'⊅', u'\\nsupseteq':u'⊉',
u'\\ntriangleleft':u'⋪', u'\\ntrianglelefteq':u'⋬',
u'\\ntriangleright':u'⋫', u'\\ntrianglerighteq':u'⋭', u'\\nvDash':u'⊭',
u'\\nvdash':u'⊬', u'\\nwarrow':u'↖', u'\\odot':u'⊙',
u'\\officialeuro':u'€', u'\\oiiint':u'<span class="bigsymbol">∰</span>',
u'\\oiint':u'<span class="bigsymbol">∯</span>',
u'\\oint':u'<span class="bigsymbol">∮</span>',
u'\\ointclockwise':u'<span class="bigsymbol">∲</span>',
u'\\ointctrclockwise':u'<span class="bigsymbol">∳</span>',
u'\\ominus':u'⊖', u'\\oplus':u'⊕', u'\\oslash':u'⊘', u'\\otimes':u'⊗',
u'\\owns':u'∋', u'\\parallel':u'∥', u'\\partial':u'∂', u'\\perp':u'⊥',
u'\\pisces':u'♓', u'\\pitchfork':u'⋔', u'\\pluto':u'♇', u'\\pm':u'±',
u'\\pointer':u'➪', u'\\pounds':u'£', u'\\prec':u'≺',
u'\\preccurlyeq':u'≼', u'\\preceq':u'≼', u'\\precsim':u'≾',
u'\\prime':u'′', u'\\prompto':u'∝', u'\\qquad':u' ', u'\\quad':u' ',
u'\\quarternote':u'♩', u'\\rangle':u'⟩', u'\\rbrace':u'}',
u'\\rbrace)':u'}', u'\\rbrack':u']', u'\\rceil':u'⌉', u'\\rfloor':u'⌋',
u'\\rhd':u'⊳', u'\\rightarrow)':u'→', u'\\rightarrowtail':u'↣',
u'\\rightarrowtobar':u'⇥', u'\\rightharpoondown':u'⇁',
u'\\rightharpoonup':u'⇀', u'\\rightharpooondown':u'⇁',
u'\\rightharpooonup':u'⇀', u'\\rightleftarrows':u'⇄',
u'\\rightleftharpoons':u'⇌', u'\\rightmoon':u'☽',
u'\\rightrightarrows':u'⇉', u'\\rightrightharpoons':u'⥤',
u'\\rightthreetimes':u'⋌', u'\\risingdotseq':u'≓', u'\\rtimes':u'⋊',
u'\\sagittarius':u'♐', u'\\saturn':u'♄', u'\\scorpio':u'♏',
u'\\searrow':u'↘', u'\\sec':u'sec', u'\\setminus':u'∖', u'\\sharp':u'♯',
u'\\simeq':u'≃', u'\\sin':u'sin', u'\\sinh':u'sinh', u'\\slash':u'∕',
u'\\smile':u'⌣', u'\\smiley':u'☺', u'\\spadesuit':u'♠',
u'\\sphericalangle':u'∢', u'\\sqcap':u'⊓', u'\\sqcup':u'⊔',
u'\\sqsubset':u'⊏', u'\\sqsubseteq':u'⊑', u'\\sqsupset':u'⊐',
u'\\sqsupseteq':u'⊒', u'\\square':u'□', u'\\star':u'⋆',
u'\\subseteqq':u'⫅', u'\\subsetneqq':u'⫋', u'\\succ':u'≻',
u'\\succcurlyeq':u'≽', u'\\succeq':u'≽', u'\\succnsim':u'⋩',
u'\\succsim':u'≿', u'\\sun':u'☼', u'\\sup':u'sup', u'\\supseteqq':u'⫆',
u'\\supsetneqq':u'⫌', u'\\surd':u'√', u'\\swarrow':u'↙', u'\\tan':u'tan',
u'\\tanh':u'tanh', u'\\taurus':u'♉', u'\\textasciicircum':u'^',
u'\\textasciitilde':u'~', u'\\textbackslash':u'\\',
u'\\textcopyright':u'©\'', u'\\textdegree':u'°', u'\\textellipsis':u'…',
u'\\textemdash':u'—', u'\\textendash':u'—', u'\\texteuro':u'€',
u'\\textgreater':u'>', u'\\textless':u'<', u'\\textordfeminine':u'ª',
u'\\textordmasculine':u'º', u'\\textquotedblleft':u'“',
u'\\textquotedblright':u'”', u'\\textquoteright':u'’',
u'\\textregistered':u'®', u'\\textrightarrow':u'→',
u'\\textsection':u'§', u'\\texttrademark':u'™',
u'\\texttwosuperior':u'²', u'\\textvisiblespace':u' ',
u'\\therefore':u'∴', u'\\top':u'⊤', u'\\triangle':u'△',
u'\\triangleleft':u'⊲', u'\\trianglelefteq':u'⊴', u'\\triangleq':u'≜',
u'\\triangleright':u'▷', u'\\trianglerighteq':u'⊵',
u'\\twoheadleftarrow':u'↞', u'\\twoheadrightarrow':u'↠',
u'\\twonotes':u'♫', u'\\udot':u'⊍', u'\\unlhd':u'⊴', u'\\unrhd':u'⊵',
u'\\unrhl':u'⊵', u'\\uparrow':u'↑', u'\\updownarrow':u'↕',
u'\\upharpoonleft':u'↿', u'\\upharpoonright':u'↾', u'\\uplus':u'⊎',
u'\\upuparrows':u'⇈', u'\\uranus':u'♅', u'\\vDash':u'⊨',
u'\\varclubsuit':u'♧', u'\\vardiamondsuit':u'♦', u'\\varheartsuit':u'♥',
u'\\varnothing':u'∅', u'\\varspadesuit':u'♤', u'\\vdash':u'⊢',
u'\\vdots':u'⋮', u'\\vee':u'∨', u'\\vee)':u'∨', u'\\veebar':u'⊻',
u'\\vert':u'∣', u'\\virgo':u'♍', u'\\wedge':u'∧', u'\\wedge)':u'∧',
u'\\wp':u'℘', u'\\wr':u'≀', u'\\yen':u'¥', u'\\{':u'{', u'\\|':u'∥',
u'\\}':u'}',
}
decoratedcommand = {
}
decoratingfunctions = {
u'\\overleftarrow':u'⟵', u'\\overrightarrow':u'⟶', u'\\widehat':u'^',
}
endings = {
u'bracket':u'}', u'complex':u'\\]', u'endafter':u'}',
u'endbefore':u'\\end{', u'squarebracket':u']',
}
environments = {
u'align':[u'r',u'l',], u'eqnarray':[u'r',u'c',u'l',],
u'gathered':[u'l',u'l',],
}
fontfunctions = {
u'\\boldsymbol':u'b', u'\\mathbb':u'span class="blackboard"',
u'\\mathbb{A}':u'𝔸', u'\\mathbb{B}':u'𝔹', u'\\mathbb{C}':u'ℂ',
u'\\mathbb{D}':u'𝔻', u'\\mathbb{E}':u'𝔼', u'\\mathbb{F}':u'𝔽',
u'\\mathbb{G}':u'𝔾', u'\\mathbb{H}':u'ℍ', u'\\mathbb{J}':u'𝕁',
u'\\mathbb{K}':u'𝕂', u'\\mathbb{L}':u'𝕃', u'\\mathbb{N}':u'ℕ',
u'\\mathbb{O}':u'𝕆', u'\\mathbb{P}':u'ℙ', u'\\mathbb{Q}':u'ℚ',
u'\\mathbb{R}':u'ℝ', u'\\mathbb{S}':u'𝕊', u'\\mathbb{T}':u'𝕋',
u'\\mathbb{W}':u'𝕎', u'\\mathbb{Z}':u'ℤ', u'\\mathbf':u'b',
u'\\mathcal':u'span class="scriptfont"', u'\\mathcal{B}':u'ℬ',
u'\\mathcal{E}':u'ℰ', u'\\mathcal{F}':u'ℱ', u'\\mathcal{H}':u'ℋ',
u'\\mathcal{I}':u'ℐ', u'\\mathcal{L}':u'ℒ', u'\\mathcal{M}':u'ℳ',
u'\\mathcal{R}':u'ℛ', u'\\mathfrak':u'span class="fraktur"',
u'\\mathfrak{C}':u'ℭ', u'\\mathfrak{F}':u'𝔉', u'\\mathfrak{H}':u'ℌ',
u'\\mathfrak{I}':u'ℑ', u'\\mathfrak{R}':u'ℜ', u'\\mathfrak{Z}':u'ℨ',
u'\\mathit':u'i', u'\\mathring{A}':u'Å', u'\\mathring{U}':u'Ů',
u'\\mathring{a}':u'å', u'\\mathring{u}':u'ů', u'\\mathring{w}':u'ẘ',
u'\\mathring{y}':u'ẙ', u'\\mathrm':u'span class="mathrm"',
u'\\mathscr':u'span class="scriptfont"', u'\\mathscr{B}':u'ℬ',
u'\\mathscr{E}':u'ℰ', u'\\mathscr{F}':u'ℱ', u'\\mathscr{H}':u'ℋ',
u'\\mathscr{I}':u'ℐ', u'\\mathscr{L}':u'ℒ', u'\\mathscr{M}':u'ℳ',
u'\\mathscr{R}':u'ℛ', u'\\mathsf':u'span class="mathsf"',
u'\\mathtt':u'tt',
}
hybridfunctions = {
u'\\binom':[u'{$1}{$2}',u'f2{(}f0{f1{$1}f1{$2}}f2{)}',u'span class="binom"',u'span class="binomstack"',u'span class="bigsymbol"',],
u'\\boxed':[u'{$1}',u'f0{$1}',u'span class="boxed"',],
u'\\cfrac':[u'[$p!]{$1}{$2}',u'f0{f3{(}f1{$1}f3{)/(}f2{$2}f3{)}}',u'span class="fullfraction"',u'span class="numerator align-$p"',u'span class="denominator"',u'span class="ignored"',],
u'\\color':[u'{$p!}{$1}',u'f0{$1}',u'span style="color: $p;"',],
u'\\colorbox':[u'{$p!}{$1}',u'f0{$1}',u'span class="colorbox" style="background: $p;"',],
u'\\dbinom':[u'{$1}{$2}',u'(f0{f1{f2{$1}}f1{f2{ }}f1{f2{$2}}})',u'span class="binomial"',u'span class="binomrow"',u'span class="binomcell"',],
u'\\dfrac':[u'{$1}{$2}',u'f0{f3{(}f1{$1}f3{)/(}f2{$2}f3{)}}',u'span class="fullfraction"',u'span class="numerator"',u'span class="denominator"',u'span class="ignored"',],
u'\\displaystyle':[u'{$1}',u'f0{$1}',u'span class="displaystyle"',],
u'\\fbox':[u'{$1}',u'f0{$1}',u'span class="fbox"',],
u'\\fboxrule':[u'{$p!}',u'f0{}',u'ignored',],
u'\\fboxsep':[u'{$p!}',u'f0{}',u'ignored',],
u'\\fcolorbox':[u'{$p!}{$q!}{$1}',u'f0{$1}',u'span class="boxed" style="border-color: $p; background: $q;"',],
u'\\frac':[u'{$1}{$2}',u'f0{f3{(}f1{$1}f3{)/(}f2{$2}f3{)}}',u'span class="fraction"',u'span class="numerator"',u'span class="denominator"',u'span class="ignored"',],
u'\\framebox':[u'[$p!][$q!]{$1}',u'f0{$1}',u'span class="framebox align-$q" style="width: $p;"',],
u'\\href':[u'[$o]{$u!}{$t!}',u'f0{$t}',u'a href="$u"',],
u'\\hspace':[u'{$p!}',u'f0{ }',u'span class="hspace" style="width: $p;"',],
u'\\leftroot':[u'{$p!}',u'f0{ }',u'span class="leftroot" style="width: $p;px"',],
u'\\nicefrac':[u'{$1}{$2}',u'f0{f1{$1}⁄f2{$2}}',u'span class="fraction"',u'sup class="numerator"',u'sub class="denominator"',u'span class="ignored"',],
u'\\parbox':[u'[$p!]{$w!}{$1}',u'f0{1}',u'div class="Boxed" style="width: $w;"',],
u'\\raisebox':[u'{$p!}{$1}',u'f0{$1.font}',u'span class="raisebox" style="vertical-align: $p;"',],
u'\\renewenvironment':[u'{$1!}{$2!}{$3!}',u'',],
u'\\rule':[u'[$v!]{$w!}{$h!}',u'f0/',u'hr class="line" style="width: $w; height: $h;"',],
u'\\scriptscriptstyle':[u'{$1}',u'f0{$1}',u'span class="scriptscriptstyle"',],
u'\\scriptstyle':[u'{$1}',u'f0{$1}',u'span class="scriptstyle"',],
u'\\sqrt':[u'[$0]{$1}',u'f0{f1{$0}f2{√}f4{(}f3{$1}f4{)}}',u'span class="sqrt"',u'sup class="root"',u'span class="radical"',u'span class="root"',u'span class="ignored"',],
u'\\stackrel':[u'{$1}{$2}',u'f0{f1{$1}f2{$2}}',u'span class="stackrel"',u'span class="upstackrel"',u'span class="downstackrel"',],
u'\\tbinom':[u'{$1}{$2}',u'(f0{f1{f2{$1}}f1{f2{ }}f1{f2{$2}}})',u'span class="binomial"',u'span class="binomrow"',u'span class="binomcell"',],
u'\\textcolor':[u'{$p!}{$1}',u'f0{$1}',u'span style="color: $p;"',],
u'\\textstyle':[u'{$1}',u'f0{$1}',u'span class="textstyle"',],
u'\\unit':[u'[$0]{$1}',u'$0f0{$1.font}',u'span class="unit"',],
u'\\unitfrac':[u'[$0]{$1}{$2}',u'$0f0{f1{$1.font}⁄f2{$2.font}}',u'span class="fraction"',u'sup class="unit"',u'sub class="unit"',],
u'\\uproot':[u'{$p!}',u'f0{ }',u'span class="uproot" style="width: $p;px"',],
u'\\url':[u'{$u!}',u'f0{$u}',u'a href="$u"',],
u'\\vspace':[u'{$p!}',u'f0{ }',u'span class="vspace" style="height: $p;"',],
}
hybridsizes = {
u'\\binom':u'$1+$2', u'\\cfrac':u'$1+$2', u'\\dbinom':u'$1+$2+1',
u'\\dfrac':u'$1+$2', u'\\frac':u'$1+$2', u'\\tbinom':u'$1+$2+1',
}
labelfunctions = {
u'\\label':u'a name="#"',
}
limitcommands = {
u'\\int':u'∫', u'\\intop':u'∫', u'\\lim':u'lim', u'\\prod':u'∏',
u'\\smallint':u'∫', u'\\sum':u'∑',
}
# TODO: setting for simple enlarged vs. piecewise symbols
for key in (u'\\int', u'\\intop', u'\\prod', u'\\sum'):
limitcommands[key] = '<span class="symbol">%s</span>' % limitcommands[key]
misccommands = {
u'\\limits':u'LimitPreviousCommand', u'\\newcommand':u'MacroDefinition',
u'\\renewcommand':u'MacroDefinition',
u'\\setcounter':u'SetCounterFunction', u'\\tag':u'FormulaTag',
u'\\tag*':u'FormulaTag',
}
modified = {
u'\n':u'', u' ':u'', u'$':u'', u'&':u' ', u'\'':u'’', u'+':u' + ',
u',':u', ', u'-':u' − ', u'/':u' ⁄ ', u'<':u' < ', u'=':u' = ',
u'>':u' > ', u'@':u'', u'~':u'',
}
onefunctions = {
u'\\Big':u'span class="bigsymbol"', u'\\Bigg':u'span class="hugesymbol"',
u'\\bar':u'span class="bar"', u'\\begin{array}':u'span class="arraydef"',
u'\\big':u'span class="symbol"', u'\\bigg':u'span class="largesymbol"',
u'\\bigl':u'span class="bigsymbol"', u'\\bigr':u'span class="bigsymbol"',
u'\\centering':u'span class="align-center"',
u'\\ensuremath':u'span class="ensuremath"',
u'\\hphantom':u'span class="phantom"',
u'\\noindent':u'span class="noindent"',
u'\\overbrace':u'span class="overbrace"',
u'\\overline':u'span class="overline"',
u'\\phantom':u'span class="phantom"',
u'\\underbrace':u'span class="underbrace"', u'\\underline':u'u',
u'\\vphantom':u'span class="phantom"',
}
spacedcommands = {
u'\\Leftrightarrow':u'⇔', u'\\Rightarrow':u'⇒', u'\\approx':u'≈',
u'\\dashrightarrow':u'⇢', u'\\equiv':u'≡', u'\\ge':u'≥', u'\\geq':u'≥',
u'\\implies':u' ⇒ ', u'\\in':u'∈', u'\\le':u'≤', u'\\leftarrow':u'←',
u'\\leq':u'≤', u'\\ne':u'≠', u'\\neq':u'≠', u'\\not\\in':u'∉',
u'\\propto':u'∝', u'\\rightarrow':u'→', u'\\rightsquigarrow':u'⇝',
u'\\sim':u'~', u'\\subset':u'⊂', u'\\subseteq':u'⊆', u'\\supset':u'⊃',
u'\\supseteq':u'⊇', u'\\times':u'×', u'\\to':u'→',
}
starts = {
u'beginafter':u'}', u'beginbefore':u'\\begin{', u'bracket':u'{',
u'command':u'\\', u'comment':u'%', u'complex':u'\\[', u'simple':u'$',
u'squarebracket':u'[', u'unnumbered':u'*',
}
symbolfunctions = {
u'^':u'sup', u'_':u'sub',
}
textfunctions = {
u'\\mbox':u'span class="mbox"', u'\\text':u'span class="text"',
u'\\textbf':u'b', u'\\textipa':u'span class="textipa"', u'\\textit':u'i',
u'\\textnormal':u'span class="textnormal"',
u'\\textrm':u'span class="textrm"',
u'\\textsc':u'span class="versalitas"',
u'\\textsf':u'span class="textsf"', u'\\textsl':u'i', u'\\texttt':u'tt',
u'\\textup':u'span class="normal"',
}
unmodified = {
u'characters':[u'.',u'*',u'€',u'(',u')',u'[',u']',u':',u'·',u'!',u';',u'|',u'§',u'"',],
}
urls = {
u'googlecharts':u'http://chart.googleapis.com/chart?cht=tx&chl=',
}
class GeneralConfig(object):
"Configuration class from elyxer.config file"
version = {
u'date':u'2011-06-27', u'lyxformat':u'413', u'number':u'1.2.3',
}
class HeaderConfig(object):
"Configuration class from elyxer.config file"
parameters = {
u'beginpreamble':u'\\begin_preamble', u'branch':u'\\branch',
u'documentclass':u'\\textclass', u'endbranch':u'\\end_branch',
u'endpreamble':u'\\end_preamble', u'language':u'\\language',
u'lstset':u'\\lstset', u'outputchanges':u'\\output_changes',
u'paragraphseparation':u'\\paragraph_separation',
u'pdftitle':u'\\pdf_title', u'secnumdepth':u'\\secnumdepth',
u'tocdepth':u'\\tocdepth',
}
styles = {
u'article':[u'article',u'aastex',u'aapaper',u'acmsiggraph',u'sigplanconf',u'achemso',u'amsart',u'apa',u'arab-article',u'armenian-article',u'article-beamer',u'chess',u'dtk',u'elsarticle',u'heb-article',u'IEEEtran',u'iopart',u'kluwer',u'scrarticle-beamer',u'scrartcl',u'extarticle',u'paper',u'mwart',u'revtex4',u'spie',u'svglobal3',u'ltugboat',u'agu-dtd',u'jgrga',u'agums',u'entcs',u'egs',u'ijmpc',u'ijmpd',u'singlecol-new',u'doublecol-new',u'isprs',u'tarticle',u'jsarticle',u'jarticle',u'jss',u'literate-article',u'siamltex',u'cl2emult',u'llncs',u'svglobal',u'svjog',u'svprobth',],
u'book':[u'book',u'amsbook',u'scrbook',u'extbook',u'tufte-book',u'report',u'extreport',u'scrreprt',u'memoir',u'tbook',u'jsbook',u'jbook',u'mwbk',u'svmono',u'svmult',u'treport',u'jreport',u'mwrep',],
}
class ImageConfig(object):
"Configuration class from elyxer.config file"
converters = {
u'imagemagick':u'convert[ -density $scale][ -define $format:use-cropbox=true] "$input" "$output"',
u'inkscape':u'inkscape "$input" --export-png="$output"',
}
cropboxformats = {
u'.eps':u'ps', u'.pdf':u'pdf', u'.ps':u'ps',
}
formats = {
u'default':u'.png', u'vector':[u'.svg',u'.eps',],
}
class LayoutConfig(object):
"Configuration class from elyxer.config file"
groupable = {
u'allowed':[u'StringContainer',u'Constant',u'TaggedText',u'Align',u'TextFamily',u'EmphaticText',u'VersalitasText',u'BarredText',u'SizeText',u'ColorText',u'LangLine',u'Formula',],
}
class NewfangleConfig(object):
"Configuration class from elyxer.config file"
constants = {
u'chunkref':u'chunkref{', u'endcommand':u'}', u'endmark':u'>',
u'startcommand':u'\\', u'startmark':u'=<',
}
class NumberingConfig(object):
"Configuration class from elyxer.config file"
layouts = {
u'ordered':[u'Chapter',u'Section',u'Subsection',u'Subsubsection',u'Paragraph',],
u'roman':[u'Part',u'Book',],
}
sequence = {
u'symbols':[u'*',u'**',u'†',u'‡',u'§',u'§§',u'¶',u'¶¶',u'#',u'##',],
}
class StyleConfig(object):
"Configuration class from elyxer.config file"
hspaces = {
u'\\enskip{}':u' ', u'\\hfill{}':u'<span class="hfill"> </span>',
u'\\hspace*{\\fill}':u' ', u'\\hspace*{}':u'', u'\\hspace{}':u' ',
u'\\negthinspace{}':u'', u'\\qquad{}':u' ', u'\\quad{}':u' ',
u'\\space{}':u' ', u'\\thinspace{}':u' ', u'~':u' ',
}
quotes = {
u'ald':u'»', u'als':u'›', u'ard':u'«', u'ars':u'‹', u'eld':u'“',
u'els':u'‘', u'erd':u'”', u'ers':u'’', u'fld':u'«',
u'fls':u'‹', u'frd':u'»', u'frs':u'›', u'gld':u'„', u'gls':u'‚',
u'grd':u'“', u'grs':u'‘', u'pld':u'„', u'pls':u'‚', u'prd':u'”',
u'prs':u'’', u'sld':u'”', u'srd':u'”',
}
referenceformats = {
u'eqref':u'(@↕)', u'formatted':u'¶↕', u'nameref':u'$↕', u'pageref':u'#↕',
u'ref':u'@↕', u'vpageref':u'on-page#↕', u'vref':u'@on-page#↕',
}
size = {
u'ignoredtexts':[u'col',u'text',u'line',u'page',u'theight',u'pheight',],
}
vspaces = {
u'bigskip':u'<div class="bigskip"> </div>',
u'defskip':u'<div class="defskip"> </div>',
u'medskip':u'<div class="medskip"> </div>',
u'smallskip':u'<div class="smallskip"> </div>',
u'vfill':u'<div class="vfill"> </div>',
}
class TOCConfig(object):
"Configuration class from elyxer.config file"
extractplain = {
u'allowed':[u'StringContainer',u'Constant',u'TaggedText',u'Align',u'TextFamily',u'EmphaticText',u'VersalitasText',u'BarredText',u'SizeText',u'ColorText',u'LangLine',u'Formula',],
u'cloned':[u'',], u'extracted':[u'',],
}
extracttitle = {
u'allowed':[u'StringContainer',u'Constant',u'Space',],
u'cloned':[u'TextFamily',u'EmphaticText',u'VersalitasText',u'BarredText',u'SizeText',u'ColorText',u'LangLine',u'Formula',],
u'extracted':[u'PlainLayout',u'TaggedText',u'Align',u'Caption',u'StandardLayout',u'FlexInset',],
}
class TagConfig(object):
"Configuration class from elyxer.config file"
barred = {
u'under':u'u',
}
family = {
u'sans':u'span class="sans"', u'typewriter':u'tt',
}
flex = {
u'CharStyle:Code':u'span class="code"',
u'CharStyle:MenuItem':u'span class="menuitem"',
u'Code':u'span class="code"', u'MenuItem':u'span class="menuitem"',
u'Noun':u'span class="noun"', u'Strong':u'span class="strong"',
}
group = {
u'layouts':[u'Quotation',u'Quote',],
}
layouts = {
u'Center':u'div', u'Chapter':u'h?', u'Date':u'h2', u'Paragraph':u'div',
u'Part':u'h1', u'Quotation':u'blockquote', u'Quote':u'blockquote',
u'Section':u'h?', u'Subsection':u'h?', u'Subsubsection':u'h?',
}
listitems = {
u'Enumerate':u'ol', u'Itemize':u'ul',
}
notes = {
u'Comment':u'', u'Greyedout':u'span class="greyedout"', u'Note':u'',
}
shaped = {
u'italic':u'i', u'slanted':u'i', u'smallcaps':u'span class="versalitas"',
}
class TranslationConfig(object):
"Configuration class from elyxer.config file"
constants = {
u'Appendix':u'Appendix', u'Book':u'Book', u'Chapter':u'Chapter',
u'Paragraph':u'Paragraph', u'Part':u'Part', u'Section':u'Section',
u'Subsection':u'Subsection', u'Subsubsection':u'Subsubsection',
u'abstract':u'Abstract', u'bibliography':u'Bibliography',
u'figure':u'figure', u'float-algorithm':u'Algorithm ',
u'float-figure':u'Figure ', u'float-listing':u'Listing ',
u'float-table':u'Table ', u'float-tableau':u'Tableau ',
u'footnotes':u'Footnotes', u'generated-by':u'Document generated by ',
u'generated-on':u' on ', u'index':u'Index',
u'jsmath-enable':u'Please enable JavaScript on your browser.',
u'jsmath-requires':u' requires JavaScript to correctly process the mathematics on this page. ',
u'jsmath-warning':u'Warning: ', u'list-algorithm':u'List of Algorithms',
u'list-figure':u'List of Figures', u'list-table':u'List of Tables',
u'list-tableau':u'List of Tableaux', u'main-page':u'Main page',
u'next':u'Next', u'nomenclature':u'Nomenclature',
u'on-page':u' on page ', u'prev':u'Prev', u'references':u'References',
u'toc':u'Table of Contents', u'toc-for':u'Contents for ', u'up':u'Up',
}
languages = {
u'american':u'en', u'british':u'en', u'deutsch':u'de', u'dutch':u'nl',
u'english':u'en', u'french':u'fr', u'ngerman':u'de', u'spanish':u'es',
}
class CommandLineParser(object):
"A parser for runtime options"
def __init__(self, options):
self.options = options
def parseoptions(self, args):
"Parse command line options"
if len(args) == 0:
return None
while len(args) > 0 and args[0].startswith('--'):
key, value = self.readoption(args)
if not key:
return 'Option ' + value + ' not recognized'
if not value:
return 'Option ' + key + ' needs a value'
setattr(self.options, key, value)
return None
def readoption(self, args):
"Read the key and value for an option"
arg = args[0][2:]
del args[0]
if '=' in arg:
key = self.readequalskey(arg, args)
else:
key = arg.replace('-', '')
if not hasattr(self.options, key):
return None, key
current = getattr(self.options, key)
if isinstance(current, bool):
return key, True
# read value
if len(args) == 0:
return key, None
if args[0].startswith('"'):
initial = args[0]
del args[0]
return key, self.readquoted(args, initial)
value = args[0]
del args[0]
if isinstance(current, list):
current.append(value)
return key, current
return key, value
def readquoted(self, args, initial):
"Read a value between quotes"
value = initial[1:]
while len(args) > 0 and not args[0].endswith('"') and not args[0].startswith('--'):
value += ' ' + args[0]
del args[0]
if len(args) == 0 or args[0].startswith('--'):
return None
value += ' ' + args[0:-1]
return value
def readequalskey(self, arg, args):
"Read a key using equals"
split = arg.split('=', 1)
key = split[0]
value = split[1]
args.insert(0, value)
return key
class Options(object):
"A set of runtime options"
instance = None
location = None
nocopy = False
copyright = False
debug = False
quiet = False
version = False
hardversion = False
versiondate = False
html = False
help = False
showlines = True
unicode = False
iso885915 = False
css = []
title = None
directory = None
destdirectory = None
toc = False
toctarget = ''
tocfor = None
forceformat = None
lyxformat = False
target = None
splitpart = None
memory = True
lowmem = False
nobib = False
converter = 'imagemagick'
raw = False
jsmath = None
mathjax = None
nofooter = False
simplemath = False
template = None
noconvert = False
notoclabels = False
letterfoot = True
numberfoot = False
symbolfoot = False
hoverfoot = True
marginfoot = False
endfoot = False
supfoot = True
alignfoot = False
footnotes = None
imageformat = None
copyimages = False
googlecharts = False
embedcss = []
branches = dict()
def parseoptions(self, args):
"Parse command line options"
Options.location = args[0]
del args[0]
parser = CommandLineParser(Options)
result = parser.parseoptions(args)
if result:
Trace.error(result)
self.usage()
self.processoptions()
def processoptions(self):
"Process all options parsed."
if Options.help:
self.usage()
if Options.version:
self.showversion()
if Options.hardversion:
self.showhardversion()
if Options.versiondate:
self.showversiondate()
if Options.lyxformat:
self.showlyxformat()
if Options.splitpart:
try:
Options.splitpart = int(Options.splitpart)
if Options.splitpart <= 0:
Trace.error('--splitpart requires a number bigger than zero')
self.usage()
except:
Trace.error('--splitpart needs a numeric argument, not ' + Options.splitpart)
self.usage()
if Options.lowmem or Options.toc or Options.tocfor:
Options.memory = False
self.parsefootnotes()
if Options.forceformat and not Options.imageformat:
Options.imageformat = Options.forceformat
if Options.imageformat == 'copy':
Options.copyimages = True
if Options.css == []:
Options.css = ['http://elyxer.nongnu.org/lyx.css']
if Options.html:
Options.simplemath = True
if Options.toc and not Options.tocfor:
Trace.error('Option --toc is deprecated; use --tocfor "page" instead')
Options.tocfor = Options.toctarget
if Options.nocopy:
Trace.error('Option --nocopy is deprecated; it is no longer needed')
# set in Trace if necessary
for param in dir(Trace):
if param.endswith('mode'):
setattr(Trace, param, getattr(self, param[:-4]))
def usage(self):
"Show correct usage"
Trace.error('Usage: ' + os.path.basename(Options.location) + ' [options] [filein] [fileout]')
Trace.error('Convert LyX input file "filein" to HTML file "fileout".')
Trace.error('If filein (or fileout) is not given use standard input (or output).')
Trace.error('Main program of the eLyXer package (http://elyxer.nongnu.org/).')
self.showoptions()
def parsefootnotes(self):
"Parse footnotes options."
if not Options.footnotes:
return
Options.marginfoot = False
Options.letterfoot = False
options = Options.footnotes.split(',')
for option in options:
footoption = option + 'foot'
if hasattr(Options, footoption):
setattr(Options, footoption, True)
else:
Trace.error('Unknown footnotes option: ' + option)
if not Options.endfoot and not Options.marginfoot and not Options.hoverfoot:
Options.hoverfoot = True
if not Options.numberfoot and not Options.symbolfoot:
Options.letterfoot = True
def showoptions(self):
"Show all possible options"
Trace.error(' Common options:')
Trace.error(' --help: show this online help')
Trace.error(' --quiet: disables all runtime messages')
Trace.error('')
Trace.error(' Advanced options:')
Trace.error(' --debug: enable debugging messages (for developers)')
Trace.error(' --version: show version number and release date')
Trace.error(' --lyxformat: return the highest LyX version supported')
Trace.error(' Options for HTML output:')
Trace.error(' --title "title": set the generated page title')
Trace.error(' --css "file.css": use a custom CSS file')
Trace.error(' --embedcss "file.css": embed styles from elyxer.a CSS file into the output')
Trace.error(' --html: output HTML 4.0 instead of the default XHTML')
Trace.error(' --unicode: full Unicode output')
Trace.error(' --iso885915: output a document with ISO-8859-15 encoding')
Trace.error(' --nofooter: remove the footer "generated by eLyXer"')
Trace.error(' --simplemath: do not generate fancy math constructions')
Trace.error(' Options for image output:')
Trace.error(' --directory "img_dir": look for images in the specified directory')
Trace.error(' --destdirectory "dest": put converted images into this directory')
Trace.error(' --imageformat ".ext": image output format, or "copy" to copy images')
Trace.error(' --noconvert: do not convert images, use in original locations')
Trace.error(' --converter "inkscape": use an alternative program to convert images')
Trace.error(' Options for footnote display:')
Trace.error(' --numberfoot: mark footnotes with numbers instead of letters')
Trace.error(' --symbolfoot: mark footnotes with symbols (*, **...)')
Trace.error(' --hoverfoot: show footnotes as hovering text (default)')
Trace.error(' --marginfoot: show footnotes on the page margin')
Trace.error(' --endfoot: show footnotes at the end of the page')
Trace.error(' --supfoot: use superscript for footnote markers (default)')
Trace.error(' --alignfoot: use aligned text for footnote markers')
Trace.error(' --footnotes "options": specify several comma-separated footnotes options')
Trace.error(' Available options are: "number", "symbol", "hover", "margin", "end",')
Trace.error(' "sup", "align"')
Trace.error(' Advanced output options:')
Trace.error(' --splitpart "depth": split the resulting webpage at the given depth')
Trace.error(' --tocfor "page": generate a TOC that points to the given page')
Trace.error(' --target "frame": make all links point to the given frame')
Trace.error(' --notoclabels: omit the part labels in the TOC, such as Chapter')
Trace.error(' --lowmem: do the conversion on the fly (conserve memory)')
Trace.error(' --raw: generate HTML without header or footer.')
Trace.error(' --jsmath "URL": use jsMath from elyxer.the given URL to display equations')
Trace.error(' --mathjax "URL": use MathJax from elyxer.the given URL to display equations')
Trace.error(' --googlecharts: use Google Charts to generate formula images')
Trace.error(' --template "file": use a template, put everything in <!--$content-->')
Trace.error(' --copyright: add a copyright notice at the bottom')
Trace.error(' Deprecated options:')
Trace.error(' --toc: (deprecated) create a table of contents')
Trace.error(' --toctarget "page": (deprecated) generate a TOC for the given page')
Trace.error(' --nocopy: (deprecated) maintained for backwards compatibility')
sys.exit()
def showversion(self):
"Return the current eLyXer version string"
string = 'eLyXer version ' + GeneralConfig.version['number']
string += ' (' + GeneralConfig.version['date'] + ')'
Trace.error(string)
sys.exit()
def showhardversion(self):
"Return just the version string"
Trace.message(GeneralConfig.version['number'])
sys.exit()
def showversiondate(self):
"Return just the version dte"
Trace.message(GeneralConfig.version['date'])
sys.exit()
def showlyxformat(self):
"Return just the lyxformat parameter"
Trace.message(GeneralConfig.version['lyxformat'])
sys.exit()
class BranchOptions(object):
"A set of options for a branch"
def __init__(self, name):
self.name = name
self.options = {'color':'#ffffff'}
def set(self, key, value):
"Set a branch option"
if not key.startswith(ContainerConfig.string['startcommand']):
Trace.error('Invalid branch option ' + key)
return
key = key.replace(ContainerConfig.string['startcommand'], '')
self.options[key] = value
def isselected(self):
"Return if the branch is selected"
if not 'selected' in self.options:
return False
return self.options['selected'] == '1'
def __unicode__(self):
"String representation"
return 'options for ' + self.name + ': ' + unicode(self.options)
import urllib
class Cloner(object):
"An object used to clone other objects."
def clone(cls, original):
"Return an exact copy of an object."
"The original object must have an empty constructor."
return cls.create(original.__class__)
def create(cls, type):
"Create an object of a given class."
clone = type.__new__(type)
clone.__init__()
return clone
clone = classmethod(clone)
create = classmethod(create)
class ContainerExtractor(object):
"A class to extract certain containers."
def __init__(self, config):
"The config parameter is a map containing three lists: allowed, copied and extracted."
"Each of the three is a list of class names for containers."
"Allowed containers are included as is into the result."
"Cloned containers are cloned and placed into the result."
"Extracted containers are looked into."
"All other containers are silently ignored."
self.allowed = config['allowed']
self.cloned = config['cloned']
self.extracted = config['extracted']
def extract(self, container):
"Extract a group of selected containers from elyxer.a container."
list = []
locate = lambda c: c.__class__.__name__ in self.allowed + self.cloned
recursive = lambda c: c.__class__.__name__ in self.extracted
process = lambda c: self.process(c, list)
container.recursivesearch(locate, recursive, process)
return list
def process(self, container, list):
"Add allowed containers, clone cloned containers and add the clone."
name = container.__class__.__name__
if name in self.allowed:
list.append(container)
elif name in self.cloned:
list.append(self.safeclone(container))
else:
Trace.error('Unknown container class ' + name)
def safeclone(self, container):
"Return a new container with contents only in a safe list, recursively."
clone = Cloner.clone(container)
clone.output = container.output
clone.contents = self.extract(container)
return clone
class Parser(object):
"A generic parser"
def __init__(self):
self.begin = 0
self.parameters = dict()
def parseheader(self, reader):
"Parse the header"
header = reader.currentline().split()
reader.nextline()
self.begin = reader.linenumber
return header
def parseparameter(self, reader):
"Parse a parameter"
if reader.currentline().strip().startswith('<'):
key, value = self.parsexml(reader)
self.parameters[key] = value
return
split = reader.currentline().strip().split(' ', 1)
reader.nextline()
if len(split) == 0:
return
key = split[0]
if len(split) == 1:
self.parameters[key] = True
return
if not '"' in split[1]:
self.parameters[key] = split[1].strip()
return
doublesplit = split[1].split('"')
self.parameters[key] = doublesplit[1]
def parsexml(self, reader):
"Parse a parameter in xml form: <param attr1=value...>"
strip = reader.currentline().strip()
reader.nextline()
if not strip.endswith('>'):
Trace.error('XML parameter ' + strip + ' should be <...>')
split = strip[1:-1].split()
if len(split) == 0:
Trace.error('Empty XML parameter <>')
return None, None
key = split[0]
del split[0]
if len(split) == 0:
return key, dict()
attrs = dict()
for attr in split:
if not '=' in attr:
Trace.error('Erroneous attribute for ' + key + ': ' + attr)
attr += '="0"'
parts = attr.split('=')
attrkey = parts[0]
value = parts[1].split('"')[1]
attrs[attrkey] = value
return key, attrs
def parseending(self, reader, process):
"Parse until the current ending is found"
if not self.ending:
Trace.error('No ending for ' + unicode(self))
return
while not reader.currentline().startswith(self.ending):
process()
def parsecontainer(self, reader, contents):
container = self.factory.createcontainer(reader)
if container:
container.parent = self.parent
contents.append(container)
def __unicode__(self):
"Return a description"
return self.__class__.__name__ + ' (' + unicode(self.begin) + ')'
class LoneCommand(Parser):
"A parser for just one command line"
def parse(self,reader):
"Read nothing"
return []
class TextParser(Parser):
"A parser for a command and a bit of text"
stack = []
def __init__(self, container):
Parser.__init__(self)
self.ending = None
if container.__class__.__name__ in ContainerConfig.endings:
self.ending = ContainerConfig.endings[container.__class__.__name__]
self.endings = []
def parse(self, reader):
"Parse lines as long as they are text"
TextParser.stack.append(self.ending)
self.endings = TextParser.stack + [ContainerConfig.endings['Layout'],
ContainerConfig.endings['Inset'], self.ending]
contents = []
while not self.isending(reader):
self.parsecontainer(reader, contents)
return contents
def isending(self, reader):
"Check if text is ending"
current = reader.currentline().split()
if len(current) == 0:
return False
if current[0] in self.endings:
if current[0] in TextParser.stack:
TextParser.stack.remove(current[0])
else:
TextParser.stack = []
return True
return False
class ExcludingParser(Parser):
"A parser that excludes the final line"
def parse(self, reader):
"Parse everything up to (and excluding) the final line"
contents = []
self.parseending(reader, lambda: self.parsecontainer(reader, contents))
return contents
class BoundedParser(ExcludingParser):
"A parser bound by a final line"
def parse(self, reader):
"Parse everything, including the final line"
contents = ExcludingParser.parse(self, reader)
# skip last line
reader.nextline()
return contents
class BoundedDummy(Parser):
"A bound parser that ignores everything"
def parse(self, reader):
"Parse the contents of the container"
self.parseending(reader, lambda: reader.nextline())
# skip last line
reader.nextline()
return []
class StringParser(Parser):
"Parses just a string"
def parseheader(self, reader):
"Do nothing, just take note"
self.begin = reader.linenumber + 1
return []
def parse(self, reader):
"Parse a single line"
contents = reader.currentline()
reader.nextline()
return contents
class InsetParser(BoundedParser):
"Parses a LyX inset"
def parse(self, reader):
"Parse inset parameters into a dictionary"
startcommand = ContainerConfig.string['startcommand']
while reader.currentline() != '' and not reader.currentline().startswith(startcommand):
self.parseparameter(reader)
return BoundedParser.parse(self, reader)
class ContainerOutput(object):
"The generic HTML output for a container."
def gethtml(self, container):
"Show an error."
Trace.error('gethtml() not implemented for ' + unicode(self))
def isempty(self):
"Decide if the output is empty: by default, not empty."
return False
class EmptyOutput(ContainerOutput):
def gethtml(self, container):
"Return empty HTML code."
return []
def isempty(self):
"This output is particularly empty."
return True
class FixedOutput(ContainerOutput):
"Fixed output"
def gethtml(self, container):
"Return constant HTML code"
return container.html
class ContentsOutput(ContainerOutput):
"Outputs the contents converted to HTML"
def gethtml(self, container):
"Return the HTML code"
html = []
if container.contents == None:
return html
for element in container.contents:
if not hasattr(element, 'gethtml'):
Trace.error('No html in ' + element.__class__.__name__ + ': ' + unicode(element))
return html
html += element.gethtml()
return html
class TaggedOutput(ContentsOutput):
"Outputs an HTML tag surrounding the contents."
tag = None
breaklines = False
empty = False
def settag(self, tag, breaklines=False, empty=False):
"Set the value for the tag and other attributes."
self.tag = tag
if breaklines:
self.breaklines = breaklines
if empty:
self.empty = empty
return self
def setbreaklines(self, breaklines):
"Set the value for breaklines."
self.breaklines = breaklines
return self
def gethtml(self, container):
"Return the HTML code."
if self.empty:
return [self.selfclosing(container)]
html = [self.open(container)]
html += ContentsOutput.gethtml(self, container)
html.append(self.close(container))
return html
def open(self, container):
"Get opening line."
if not self.checktag():
return ''
open = '<' + self.tag + '>'
if self.breaklines:
return open + '\n'
return open
def close(self, container):
"Get closing line."
if not self.checktag():
return ''
close = '</' + self.tag.split()[0] + '>'
if self.breaklines:
return '\n' + close + '\n'
return close
def selfclosing(self, container):
"Get self-closing line."
if not self.checktag():
return ''
selfclosing = '<' + self.tag + '/>'
if self.breaklines:
return selfclosing + '\n'
return selfclosing
def checktag(self):
"Check that the tag is valid."
if not self.tag:
Trace.error('No tag in ' + unicode(container))
return False
if self.tag == '':
return False
return True
class FilteredOutput(ContentsOutput):
"Returns the output in the contents, but filtered:"
"some strings are replaced by others."
def __init__(self):
"Initialize the filters."
self.filters = []
def addfilter(self, original, replacement):
"Add a new filter: replace the original by the replacement."
self.filters.append((original, replacement))
def gethtml(self, container):
"Return the HTML code"
result = []
html = ContentsOutput.gethtml(self, container)
for line in html:
result.append(self.filter(line))
return result
def filter(self, line):
"Filter a single line with all available filters."
for original, replacement in self.filters:
if original in line:
line = line.replace(original, replacement)
return line
class StringOutput(ContainerOutput):
"Returns a bare string as output"
def gethtml(self, container):
"Return a bare string"
return [container.string]
import sys
import codecs
class LineReader(object):
"Reads a file line by line"
def __init__(self, filename):
if isinstance(filename, file):
self.file = filename
else:
self.file = codecs.open(filename, 'rU', 'utf-8')
self.linenumber = 1
self.lastline = None
self.current = None
self.mustread = True
self.depleted = False
try:
self.readline()
except UnicodeDecodeError:
# try compressed file
import gzip
self.file = gzip.open(filename, 'rb')
self.readline()
def setstart(self, firstline):
"Set the first line to read."
for i in range(firstline):
self.file.readline()
self.linenumber = firstline
def setend(self, lastline):
"Set the last line to read."
self.lastline = lastline
def currentline(self):
"Get the current line"
if self.mustread:
self.readline()
return self.current
def nextline(self):
"Go to next line"
if self.depleted:
Trace.fatal('Read beyond file end')
self.mustread = True
def readline(self):
"Read a line from elyxer.file"
self.current = self.file.readline()
if not isinstance(self.file, codecs.StreamReaderWriter):
self.current = self.current.decode('utf-8')
if len(self.current) == 0:
self.depleted = True
self.current = self.current.rstrip('\n\r')
self.linenumber += 1
self.mustread = False
Trace.prefix = 'Line ' + unicode(self.linenumber) + ': '
if self.linenumber % 1000 == 0:
Trace.message('Parsing')
def finished(self):
"Find out if the file is finished"
if self.lastline and self.linenumber == self.lastline:
return True
if self.mustread:
self.readline()
return self.depleted
def close(self):
self.file.close()
class LineWriter(object):
"Writes a file as a series of lists"
file = False
def __init__(self, filename):
if isinstance(filename, file):
self.file = filename
self.filename = None
else:
self.filename = filename
def write(self, strings):
"Write a list of strings"
for string in strings:
if not isinstance(string, basestring):
Trace.error('Not a string: ' + unicode(string) + ' in ' + unicode(strings))
return
self.writestring(string)
def writestring(self, string):
"Write a string"
if not self.file:
self.file = codecs.open(self.filename, 'w', "utf-8")
if self.file == sys.stdout and sys.version_info < (3,0):
string = string.encode('utf-8')
self.file.write(string)
def writeline(self, line):
"Write a line to file"
self.writestring(line + '\n')
def close(self):
self.file.close()
class Globable(object):
"""A bit of text which can be globbed (lumped together in bits).
Methods current(), skipcurrent(), checkfor() and isout() have to be
implemented by subclasses."""
leavepending = False
def __init__(self):
self.endinglist = EndingList()
def checkbytemark(self):
"Check for a Unicode byte mark and skip it."
if self.finished():
return
if ord(self.current()) == 0xfeff:
self.skipcurrent()
def isout(self):
"Find out if we are out of the position yet."
Trace.error('Unimplemented isout()')
return True
def current(self):
"Return the current character."
Trace.error('Unimplemented current()')
return ''
def checkfor(self, string):
"Check for the given string in the current position."
Trace.error('Unimplemented checkfor()')
return False
def finished(self):
"Find out if the current text has finished."
if self.isout():
if not self.leavepending:
self.endinglist.checkpending()
return True
return self.endinglist.checkin(self)
def skipcurrent(self):
"Return the current character and skip it."
Trace.error('Unimplemented skipcurrent()')
return ''
def glob(self, currentcheck):
"Glob a bit of text that satisfies a check on the current char."
glob = ''
while not self.finished() and currentcheck():
glob += self.skipcurrent()
return glob
def globalpha(self):
"Glob a bit of alpha text"
return self.glob(lambda: self.current().isalpha())
def globnumber(self):
"Glob a row of digits."
return self.glob(lambda: self.current().isdigit())
def isidentifier(self):
"Return if the current character is alphanumeric or _."
if self.current().isalnum() or self.current() == '_':
return True
return False
def globidentifier(self):
"Glob alphanumeric and _ symbols."
return self.glob(self.isidentifier)
def isvalue(self):
"Return if the current character is a value character:"
"not a bracket or a space."
if self.current().isspace():
return False
if self.current() in '{}()':
return False
return True
def globvalue(self):
"Glob a value: any symbols but brackets."
return self.glob(self.isvalue)
def skipspace(self):
"Skip all whitespace at current position."
return self.glob(lambda: self.current().isspace())
def globincluding(self, magicchar):
"Glob a bit of text up to (including) the magic char."
glob = self.glob(lambda: self.current() != magicchar) + magicchar
self.skip(magicchar)
return glob
def globexcluding(self, excluded):
"Glob a bit of text up until (excluding) any excluded character."
return self.glob(lambda: self.current() not in excluded)
def pushending(self, ending, optional = False):
"Push a new ending to the bottom"
self.endinglist.add(ending, optional)
def popending(self, expected = None):
"Pop the ending found at the current position"
if self.isout() and self.leavepending:
return expected
ending = self.endinglist.pop(self)
if expected and expected != ending:
Trace.error('Expected ending ' + expected + ', got ' + ending)
self.skip(ending)
return ending
def nextending(self):
"Return the next ending in the queue."
nextending = self.endinglist.findending(self)
if not nextending:
return None
return nextending.ending
class EndingList(object):
"A list of position endings"
def __init__(self):
self.endings = []
def add(self, ending, optional = False):
"Add a new ending to the list"
self.endings.append(PositionEnding(ending, optional))
def pickpending(self, pos):
"Pick any pending endings from a parse position."
self.endings += pos.endinglist.endings
def checkin(self, pos):
"Search for an ending"
if self.findending(pos):
return True
return False
def pop(self, pos):
"Remove the ending at the current position"
if pos.isout():
Trace.error('No ending out of bounds')
return ''
ending = self.findending(pos)
if not ending:
Trace.error('No ending at ' + pos.current())
return ''
for each in reversed(self.endings):
self.endings.remove(each)
if each == ending:
return each.ending
elif not each.optional:
Trace.error('Removed non-optional ending ' + each)
Trace.error('No endings left')
return ''
def findending(self, pos):
"Find the ending at the current position"
if len(self.endings) == 0:
return None
for index, ending in enumerate(reversed(self.endings)):
if ending.checkin(pos):
return ending
if not ending.optional:
return None
return None
def checkpending(self):
"Check if there are any pending endings"
if len(self.endings) != 0:
Trace.error('Pending ' + unicode(self) + ' left open')
def __unicode__(self):
"Printable representation"
string = 'endings ['
for ending in self.endings:
string += unicode(ending) + ','
if len(self.endings) > 0:
string = string[:-1]
return string + ']'
class PositionEnding(object):
"An ending for a parsing position"
def __init__(self, ending, optional):
self.ending = ending
self.optional = optional
def checkin(self, pos):
"Check for the ending"
return pos.checkfor(self.ending)
def __unicode__(self):
"Printable representation"
string = 'Ending ' + self.ending
if self.optional:
string += ' (optional)'
return string
class Position(Globable):
"""A position in a text to parse.
Including those in Globable, functions to implement by subclasses are:
skip(), identifier(), extract(), isout() and current()."""
def __init__(self):
Globable.__init__(self)
def skip(self, string):
"Skip a string"
Trace.error('Unimplemented skip()')
def identifier(self):
"Return an identifier for the current position."
Trace.error('Unimplemented identifier()')
return 'Error'
def extract(self, length):
"Extract the next string of the given length, or None if not enough text,"
"without advancing the parse position."
Trace.error('Unimplemented extract()')
return None
def checkfor(self, string):
"Check for a string at the given position."
return string == self.extract(len(string))
def checkforlower(self, string):
"Check for a string in lower case."
extracted = self.extract(len(string))
if not extracted:
return False
return string.lower() == self.extract(len(string)).lower()
def skipcurrent(self):
"Return the current character and skip it."
current = self.current()
self.skip(current)
return current
def next(self):
"Advance the position and return the next character."
self.skipcurrent()
return self.current()
def checkskip(self, string):
"Check for a string at the given position; if there, skip it"
if not self.checkfor(string):
return False
self.skip(string)
return True
def error(self, message):
"Show an error message and the position identifier."
Trace.error(message + ': ' + self.identifier())
class TextPosition(Position):
"A parse position based on a raw text."
def __init__(self, text):
"Create the position from elyxer.some text."
Position.__init__(self)
self.pos = 0
self.text = text
self.checkbytemark()
def skip(self, string):
"Skip a string of characters."
self.pos += len(string)
def identifier(self):
"Return a sample of the remaining text."
length = 30
if self.pos + length > len(self.text):
length = len(self.text) - self.pos
return '*' + self.text[self.pos:self.pos + length] + '*'
def isout(self):
"Find out if we are out of the text yet."
return self.pos >= len(self.text)
def current(self):
"Return the current character, assuming we are not out."
return self.text[self.pos]
def extract(self, length):
"Extract the next string of the given length, or None if not enough text."
if self.pos + length > len(self.text):
return None
return self.text[self.pos : self.pos + length]
class FilePosition(Position):
"A parse position based on an underlying file."
def __init__(self, filename):
"Create the position from a file."
Position.__init__(self)
self.reader = LineReader(filename)
self.pos = 0
self.checkbytemark()
def skip(self, string):
"Skip a string of characters."
length = len(string)
while self.pos + length > len(self.reader.currentline()):
length -= len(self.reader.currentline()) - self.pos + 1
self.nextline()
self.pos += length
def currentline(self):
"Get the current line of the underlying file."
return self.reader.currentline()
def nextline(self):
"Go to the next line."
self.reader.nextline()
self.pos = 0
def linenumber(self):
"Return the line number of the file."
return self.reader.linenumber + 1
def identifier(self):
"Return the current line and line number in the file."
before = self.reader.currentline()[:self.pos - 1]
after = self.reader.currentline()[self.pos:]
return 'line ' + unicode(self.getlinenumber()) + ': ' + before + '*' + after
def isout(self):
"Find out if we are out of the text yet."
if self.pos > len(self.reader.currentline()):
if self.pos > len(self.reader.currentline()) + 1:
Trace.error('Out of the line ' + self.reader.currentline() + ': ' + unicode(self.pos))
self.nextline()
return self.reader.finished()
def current(self):
"Return the current character, assuming we are not out."
if self.pos == len(self.reader.currentline()):
return '\n'
if self.pos > len(self.reader.currentline()):
Trace.error('Out of the line ' + self.reader.currentline() + ': ' + unicode(self.pos))
return '*'
return self.reader.currentline()[self.pos]
def extract(self, length):
"Extract the next string of the given length, or None if not enough text."
if self.pos + length > len(self.reader.currentline()):
return None
return self.reader.currentline()[self.pos : self.pos + length]
class Container(object):
"A container for text and objects in a lyx file"
partkey = None
parent = None
begin = None
def __init__(self):
self.contents = list()
def process(self):
"Process contents"
pass
def gethtml(self):
"Get the resulting HTML"
html = self.output.gethtml(self)
if isinstance(html, basestring):
Trace.error('Raw string ' + html)
html = [html]
return self.escapeall(html)
def escapeall(self, lines):
"Escape all lines in an array according to the output options."
result = []
for line in lines:
if Options.html:
line = self.escape(line, EscapeConfig.html)
if Options.iso885915:
line = self.escape(line, EscapeConfig.iso885915)
line = self.escapeentities(line)
elif not Options.unicode:
line = self.escape(line, EscapeConfig.nonunicode)
result.append(line)
return result
def escape(self, line, replacements = EscapeConfig.entities):
"Escape a line with replacements from elyxer.a map"
pieces = replacements.keys()
# do them in order
pieces.sort()
for piece in pieces:
if piece in line:
line = line.replace(piece, replacements[piece])
return line
def escapeentities(self, line):
"Escape all Unicode characters to HTML entities."
result = ''
pos = TextPosition(line)
while not pos.finished():
if ord(pos.current()) > 128:
codepoint = hex(ord(pos.current()))
if codepoint == '0xd835':
codepoint = hex(ord(pos.next()) + 0xf800)
result += '&#' + codepoint[1:] + ';'
else:
result += pos.current()
pos.skipcurrent()
return result
def searchall(self, type):
"Search for all embedded containers of a given type"
list = []
self.searchprocess(type, lambda container: list.append(container))
return list
def searchremove(self, type):
"Search for all containers of a type and remove them"
list = self.searchall(type)
for container in list:
container.parent.contents.remove(container)
return list
def searchprocess(self, type, process):
"Search for elements of a given type and process them"
self.locateprocess(lambda container: isinstance(container, type), process)
def locateprocess(self, locate, process):
"Search for all embedded containers and process them"
for container in self.contents:
container.locateprocess(locate, process)
if locate(container):
process(container)
def recursivesearch(self, locate, recursive, process):
"Perform a recursive search in the container."
for container in self.contents:
if recursive(container):
container.recursivesearch(locate, recursive, process)
if locate(container):
process(container)
def extracttext(self):
"Extract all text from elyxer.allowed containers."
result = ''
constants = ContainerExtractor(ContainerConfig.extracttext).extract(self)
for constant in constants:
result += constant.string
return result
def group(self, index, group, isingroup):
"Group some adjoining elements into a group"
if index >= len(self.contents):
return
if hasattr(self.contents[index], 'grouped'):
return
while index < len(self.contents) and isingroup(self.contents[index]):
self.contents[index].grouped = True
group.contents.append(self.contents[index])
self.contents.pop(index)
self.contents.insert(index, group)
def remove(self, index):
"Remove a container but leave its contents"
container = self.contents[index]
self.contents.pop(index)
while len(container.contents) > 0:
self.contents.insert(index, container.contents.pop())
def tree(self, level = 0):
"Show in a tree"
Trace.debug(" " * level + unicode(self))
for container in self.contents:
container.tree(level + 1)
def getparameter(self, name):
"Get the value of a parameter, if present."
if not name in self.parameters:
return None
return self.parameters[name]
def getparameterlist(self, name):
"Get the value of a comma-separated parameter as a list."
paramtext = self.getparameter(name)
if not paramtext:
return []
return paramtext.split(',')
def hasemptyoutput(self):
"Check if the parent's output is empty."
current = self.parent
while current:
if current.output.isempty():
return True
current = current.parent
return False
def __unicode__(self):
"Get a description"
if not self.begin:
return self.__class__.__name__
return self.__class__.__name__ + '@' + unicode(self.begin)
class BlackBox(Container):
"A container that does not output anything"
def __init__(self):
self.parser = LoneCommand()
self.output = EmptyOutput()
self.contents = []
class LyXFormat(BlackBox):
"Read the lyxformat command"
def process(self):
"Show warning if version < 276"
version = int(self.header[1])
if version < 276:
Trace.error('Warning: unsupported old format version ' + str(version))
if version > int(GeneralConfig.version['lyxformat']):
Trace.error('Warning: unsupported new format version ' + str(version))
class StringContainer(Container):
"A container for a single string"
parsed = None
def __init__(self):
self.parser = StringParser()
self.output = StringOutput()
self.string = ''
def process(self):
"Replace special chars from elyxer.the contents."
if self.parsed:
self.string = self.replacespecial(self.parsed)
self.parsed = None
def replacespecial(self, line):
"Replace all special chars from elyxer.a line"
replaced = self.escape(line, EscapeConfig.entities)
replaced = self.changeline(replaced)
if ContainerConfig.string['startcommand'] in replaced and len(replaced) > 1:
# unprocessed commands
if self.begin:
message = 'Unknown command at ' + unicode(self.begin) + ': '
else:
message = 'Unknown command: '
Trace.error(message + replaced.strip())
return replaced
def changeline(self, line):
line = self.escape(line, EscapeConfig.chars)
if not ContainerConfig.string['startcommand'] in line:
return line
line = self.escape(line, EscapeConfig.commands)
return line
def extracttext(self):
"Return all text."
return self.string
def __unicode__(self):
"Return a printable representation."
result = 'StringContainer'
if self.begin:
result += '@' + unicode(self.begin)
ellipsis = '...'
if len(self.string.strip()) <= 15:
ellipsis = ''
return result + ' (' + self.string.strip()[:15] + ellipsis + ')'
class Constant(StringContainer):
"A constant string"
def __init__(self, text):
self.contents = []
self.string = text
self.output = StringOutput()
def __unicode__(self):
return 'Constant: ' + self.string
class TaggedText(Container):
"Text inside a tag"
output = None
def __init__(self):
self.parser = TextParser(self)
self.output = TaggedOutput()
def complete(self, contents, tag, breaklines=False):
"Complete the tagged text and return it"
self.contents = contents
self.output.tag = tag
self.output.breaklines = breaklines
return self
def constant(self, text, tag, breaklines=False):
"Complete the tagged text with a constant"
constant = Constant(text)
return self.complete([constant], tag, breaklines)
def __unicode__(self):
"Return a printable representation."
if not hasattr(self.output, 'tag'):
return 'Emtpy tagged text'
if not self.output.tag:
return 'Tagged <unknown tag>'
return 'Tagged <' + self.output.tag + '>'
class DocumentParameters(object):
"Global parameters for the document."
pdftitle = None
indentstandard = False
tocdepth = 10
startinglevel = 0
maxdepth = 10
language = None
bibliography = None
outputchanges = False
displaymode = False
class FormulaParser(Parser):
"Parses a formula"
def parseheader(self, reader):
"See if the formula is inlined"
self.begin = reader.linenumber + 1
type = self.parsetype(reader)
if not type:
reader.nextline()
type = self.parsetype(reader)
if not type:
Trace.error('Unknown formula type in ' + reader.currentline().strip())
return ['unknown']
return [type]
def parsetype(self, reader):
"Get the formula type from the first line."
if reader.currentline().find(FormulaConfig.starts['simple']) >= 0:
return 'inline'
if reader.currentline().find(FormulaConfig.starts['complex']) >= 0:
return 'block'
if reader.currentline().find(FormulaConfig.starts['unnumbered']) >= 0:
return 'block'
if reader.currentline().find(FormulaConfig.starts['beginbefore']) >= 0:
return 'numbered'
return None
def parse(self, reader):
"Parse the formula until the end"
formula = self.parseformula(reader)
while not reader.currentline().startswith(self.ending):
stripped = reader.currentline().strip()
if len(stripped) > 0:
Trace.error('Unparsed formula line ' + stripped)
reader.nextline()
reader.nextline()
return formula
def parseformula(self, reader):
"Parse the formula contents"
simple = FormulaConfig.starts['simple']
if simple in reader.currentline():
rest = reader.currentline().split(simple, 1)[1]
if simple in rest:
# formula is $...$
return self.parsesingleliner(reader, simple, simple)
# formula is multiline $...$
return self.parsemultiliner(reader, simple, simple)
if FormulaConfig.starts['complex'] in reader.currentline():
# formula of the form \[...\]
return self.parsemultiliner(reader, FormulaConfig.starts['complex'],
FormulaConfig.endings['complex'])
beginbefore = FormulaConfig.starts['beginbefore']
beginafter = FormulaConfig.starts['beginafter']
if beginbefore in reader.currentline():
if reader.currentline().strip().endswith(beginafter):
current = reader.currentline().strip()
endsplit = current.split(beginbefore)[1].split(beginafter)
startpiece = beginbefore + endsplit[0] + beginafter
endbefore = FormulaConfig.endings['endbefore']
endafter = FormulaConfig.endings['endafter']
endpiece = endbefore + endsplit[0] + endafter
return startpiece + self.parsemultiliner(reader, startpiece, endpiece) + endpiece
Trace.error('Missing ' + beginafter + ' in ' + reader.currentline())
return ''
begincommand = FormulaConfig.starts['command']
beginbracket = FormulaConfig.starts['bracket']
if begincommand in reader.currentline() and beginbracket in reader.currentline():
endbracket = FormulaConfig.endings['bracket']
return self.parsemultiliner(reader, beginbracket, endbracket)
Trace.error('Formula beginning ' + reader.currentline() + ' is unknown')
return ''
def parsesingleliner(self, reader, start, ending):
"Parse a formula in one line"
line = reader.currentline().strip()
if not start in line:
Trace.error('Line ' + line + ' does not contain formula start ' + start)
return ''
if not line.endswith(ending):
Trace.error('Formula ' + line + ' does not end with ' + ending)
return ''
index = line.index(start)
rest = line[index + len(start):-len(ending)]
reader.nextline()
return rest
def parsemultiliner(self, reader, start, ending):
"Parse a formula in multiple lines"
formula = ''
line = reader.currentline()
if not start in line:
Trace.error('Line ' + line.strip() + ' does not contain formula start ' + start)
return ''
index = line.index(start)
line = line[index + len(start):].strip()
while not line.endswith(ending):
formula += line + '\n'
reader.nextline()
line = reader.currentline()
formula += line[:-len(ending)]
reader.nextline()
return formula
class MacroParser(FormulaParser):
"A parser for a formula macro."
def parseheader(self, reader):
"See if the formula is inlined"
self.begin = reader.linenumber + 1
return ['inline']
def parse(self, reader):
"Parse the formula until the end"
formula = self.parsemultiliner(reader, self.parent.start, self.ending)
reader.nextline()
return formula
class FormulaBit(Container):
"A bit of a formula"
type = None
size = 1
original = ''
def __init__(self):
"The formula bit type can be 'alpha', 'number', 'font'."
self.contents = []
self.output = ContentsOutput()
def setfactory(self, factory):
"Set the internal formula factory."
self.factory = factory
return self
def add(self, bit):
"Add any kind of formula bit already processed"
self.contents.append(bit)
self.original += bit.original
bit.parent = self
def skiporiginal(self, string, pos):
"Skip a string and add it to the original formula"
self.original += string
if not pos.checkskip(string):
Trace.error('String ' + string + ' not at ' + pos.identifier())
def computesize(self):
"Compute the size of the bit as the max of the sizes of all contents."
if len(self.contents) == 0:
return 1
self.size = max([element.size for element in self.contents])
return self.size
def clone(self):
"Return a copy of itself."
return self.factory.parseformula(self.original)
def __unicode__(self):
"Get a string representation"
return self.__class__.__name__ + ' read in ' + self.original
class TaggedBit(FormulaBit):
"A tagged string in a formula"
def constant(self, constant, tag):
"Set the constant and the tag"
self.output = TaggedOutput().settag(tag)
self.add(FormulaConstant(constant))
return self
def complete(self, contents, tag, breaklines = False):
"Set the constant and the tag"
self.contents = contents
self.output = TaggedOutput().settag(tag, breaklines)
return self
def selfcomplete(self, tag):
"Set the self-closing tag, no contents (as in <hr/>)."
self.output = TaggedOutput().settag(tag, empty = True)
return self
class FormulaConstant(Constant):
"A constant string in a formula"
def __init__(self, string):
"Set the constant string"
Constant.__init__(self, string)
self.original = string
self.size = 1
self.type = None
def computesize(self):
"Compute the size of the constant: always 1."
return self.size
def clone(self):
"Return a copy of itself."
return FormulaConstant(self.original)
def __unicode__(self):
"Return a printable representation."
return 'Formula constant: ' + self.string
class RawText(FormulaBit):
"A bit of text inside a formula"
def detect(self, pos):
"Detect a bit of raw text"
return pos.current().isalpha()
def parsebit(self, pos):
"Parse alphabetic text"
alpha = pos.globalpha()
self.add(FormulaConstant(alpha))
self.type = 'alpha'
class FormulaSymbol(FormulaBit):
"A symbol inside a formula"
modified = FormulaConfig.modified
unmodified = FormulaConfig.unmodified['characters']
def detect(self, pos):
"Detect a symbol"
if pos.current() in FormulaSymbol.unmodified:
return True
if pos.current() in FormulaSymbol.modified:
return True
return False
def parsebit(self, pos):
"Parse the symbol"
if pos.current() in FormulaSymbol.unmodified:
self.addsymbol(pos.current(), pos)
return
if pos.current() in FormulaSymbol.modified:
self.addsymbol(FormulaSymbol.modified[pos.current()], pos)
return
Trace.error('Symbol ' + pos.current() + ' not found')
def addsymbol(self, symbol, pos):
"Add a symbol"
self.skiporiginal(pos.current(), pos)
self.contents.append(FormulaConstant(symbol))
class FormulaNumber(FormulaBit):
"A string of digits in a formula"
def detect(self, pos):
"Detect a digit"
return pos.current().isdigit()
def parsebit(self, pos):
"Parse a bunch of digits"
digits = pos.glob(lambda: pos.current().isdigit())
self.add(FormulaConstant(digits))
self.type = 'number'
class Comment(FormulaBit):
"A LaTeX comment: % to the end of the line."
start = FormulaConfig.starts['comment']
def detect(self, pos):
"Detect the %."
return pos.current() == self.start
def parsebit(self, pos):
"Parse to the end of the line."
self.original += pos.globincluding('\n')
class WhiteSpace(FormulaBit):
"Some white space inside a formula."
def detect(self, pos):
"Detect the white space."
return pos.current().isspace()
def parsebit(self, pos):
"Parse all whitespace."
self.original += pos.skipspace()
def __unicode__(self):
"Return a printable representation."
return 'Whitespace: *' + self.original + '*'
class Bracket(FormulaBit):
"A {} bracket inside a formula"
start = FormulaConfig.starts['bracket']
ending = FormulaConfig.endings['bracket']
def __init__(self):
"Create a (possibly literal) new bracket"
FormulaBit.__init__(self)
self.inner = None
def detect(self, pos):
"Detect the start of a bracket"
return pos.checkfor(self.start)
def parsebit(self, pos):
"Parse the bracket"
self.parsecomplete(pos, self.innerformula)
return self
def parsetext(self, pos):
"Parse a text bracket"
self.parsecomplete(pos, self.innertext)
return self
def parseliteral(self, pos):
"Parse a literal bracket"
self.parsecomplete(pos, self.innerliteral)
return self
def parsecomplete(self, pos, innerparser):
"Parse the start and end marks"
if not pos.checkfor(self.start):
Trace.error('Bracket should start with ' + self.start + ' at ' + pos.identifier())
return None
self.skiporiginal(self.start, pos)
pos.pushending(self.ending)
innerparser(pos)
self.original += pos.popending(self.ending)
self.computesize()
def innerformula(self, pos):
"Parse a whole formula inside the bracket"
while not pos.finished():
self.add(self.factory.parseany(pos))
def innertext(self, pos):
"Parse some text inside the bracket, following textual rules."
specialchars = FormulaConfig.symbolfunctions.keys()
specialchars.append(FormulaConfig.starts['command'])
specialchars.append(FormulaConfig.starts['bracket'])
specialchars.append(Comment.start)
while not pos.finished():
if pos.current() in specialchars:
self.add(self.factory.parseany(pos))
if pos.checkskip(' '):
self.original += ' '
else:
self.add(FormulaConstant(pos.skipcurrent()))
def innerliteral(self, pos):
"Parse a literal inside the bracket, which does not generate HTML."
self.literal = ''
while not pos.finished() and not pos.current() == self.ending:
if pos.current() == self.start:
self.parseliteral(pos)
else:
self.literal += pos.skipcurrent()
self.original += self.literal
class SquareBracket(Bracket):
"A [] bracket inside a formula"
start = FormulaConfig.starts['squarebracket']
ending = FormulaConfig.endings['squarebracket']
def clone(self):
"Return a new square bracket with the same contents."
bracket = SquareBracket()
bracket.contents = self.contents
return bracket
class MathsProcessor(object):
"A processor for a maths construction inside the FormulaProcessor."
def process(self, contents, index):
"Process an element inside a formula."
Trace.error('Unimplemented process() in ' + unicode(self))
def __unicode__(self):
"Return a printable description."
return 'Maths processor ' + self.__class__.__name__
class FormulaProcessor(object):
"A processor specifically for formulas."
processors = []
def process(self, bit):
"Process the contents of every formula bit, recursively."
self.processcontents(bit)
self.processinsides(bit)
self.traversewhole(bit)
def processcontents(self, bit):
"Process the contents of a formula bit."
if not isinstance(bit, FormulaBit):
return
bit.process()
for element in bit.contents:
self.processcontents(element)
def processinsides(self, bit):
"Process the insides (limits, brackets) in a formula bit."
if not isinstance(bit, FormulaBit):
return
for index, element in enumerate(bit.contents):
for processor in self.processors:
processor.process(bit.contents, index)
# continue with recursive processing
self.processinsides(element)
def traversewhole(self, formula):
"Traverse over the contents to alter variables and space units."
last = None
for bit, contents in self.traverse(formula):
if bit.type == 'alpha':
self.italicize(bit, contents)
elif bit.type == 'font' and last and last.type == 'number':
bit.contents.insert(0, FormulaConstant(u' '))
last = bit
def traverse(self, bit):
"Traverse a formula and yield a flattened structure of (bit, list) pairs."
for element in bit.contents:
if hasattr(element, 'type') and element.type:
yield (element, bit.contents)
elif isinstance(element, FormulaBit):
for pair in self.traverse(element):
yield pair
def italicize(self, bit, contents):
"Italicize the given bit of text."
index = contents.index(bit)
contents[index] = TaggedBit().complete([bit], 'i')
class Formula(Container):
"A LaTeX formula"
def __init__(self):
self.parser = FormulaParser()
self.output = TaggedOutput().settag('span class="formula"')
def process(self):
"Convert the formula to tags"
if self.header[0] == 'inline':
DocumentParameters.displaymode = False
else:
DocumentParameters.displaymode = True
self.output.settag('div class="formula"', True)
if Options.jsmath:
self.jsmath()
elif Options.mathjax:
self.mathjax()
elif Options.googlecharts:
self.googlecharts()
else:
self.classic()
def jsmath(self):
"Make the contents for jsMath."
if self.header[0] != 'inline':
self.output = TaggedOutput().settag('div class="math"')
else:
self.output = TaggedOutput().settag('span class="math"')
self.contents = [Constant(self.parsed)]
def mathjax(self):
"Make the contents for MathJax."
self.output.tag = 'span class="MathJax_Preview"'
tag = 'script type="math/tex'
if self.header[0] != 'inline':
tag += ';mode=display'
self.contents = [TaggedText().constant(self.parsed, tag + '"', True)]
def googlecharts(self):
"Make the contents using Google Charts http://code.google.com/apis/chart/."
url = FormulaConfig.urls['googlecharts'] + urllib.quote_plus(self.parsed)
img = '<img class="chart" src="' + url + '" alt="' + self.parsed + '"/>'
self.contents = [Constant(img)]
def classic(self):
"Make the contents using classic output generation with XHTML and CSS."
whole = FormulaFactory().parseformula(self.parsed)
FormulaProcessor().process(whole)
whole.parent = self
self.contents = [whole]
def parse(self, pos):
"Parse using a parse position instead of self.parser."
if pos.checkskip('$$'):
self.parsedollarblock(pos)
elif pos.checkskip('$'):
self.parsedollarinline(pos)
elif pos.checkskip('\\('):
self.parseinlineto(pos, '\\)')
elif pos.checkskip('\\['):
self.parseblockto(pos, '\\]')
else:
pos.error('Unparseable formula')
self.process()
return self
def parsedollarinline(self, pos):
"Parse a $...$ formula."
self.header = ['inline']
self.parsedollar(pos)
def parsedollarblock(self, pos):
"Parse a $$...$$ formula."
self.header = ['block']
self.parsedollar(pos)
if not pos.checkskip('$'):
pos.error('Formula should be $$...$$, but last $ is missing.')
def parsedollar(self, pos):
"Parse to the next $."
pos.pushending('$')
self.parsed = pos.globexcluding('$')
pos.popending('$')
def parseinlineto(self, pos, limit):
"Parse a \\(...\\) formula."
self.header = ['inline']
self.parseupto(pos, limit)
def parseblockto(self, pos, limit):
"Parse a \\[...\\] formula."
self.header = ['block']
self.parseupto(pos, limit)
def parseupto(self, pos, limit):
"Parse a formula that ends with the given command."
pos.pushending(limit)
self.parsed = pos.glob(lambda: True)
pos.popending(limit)
def __unicode__(self):
"Return a printable representation."
if self.partkey and self.partkey.number:
return 'Formula (' + self.partkey.number + ')'
return 'Unnumbered formula'
class WholeFormula(FormulaBit):
"Parse a whole formula"
def detect(self, pos):
"Not outside the formula is enough."
return not pos.finished()
def parsebit(self, pos):
"Parse with any formula bit"
while not pos.finished():
self.add(self.factory.parseany(pos))
class FormulaFactory(object):
"Construct bits of formula"
# bit types will be appended later
types = [FormulaSymbol, RawText, FormulaNumber, Bracket, Comment, WhiteSpace]
skippedtypes = [Comment, WhiteSpace]
defining = False
def __init__(self):
"Initialize the map of instances."
self.instances = dict()
def detecttype(self, type, pos):
"Detect a bit of a given type."
if pos.finished():
return False
return self.instance(type).detect(pos)
def instance(self, type):
"Get an instance of the given type."
if not type in self.instances or not self.instances[type]:
self.instances[type] = self.create(type)
return self.instances[type]
def create(self, type):
"Create a new formula bit of the given type."
return Cloner.create(type).setfactory(self)
def clearskipped(self, pos):
"Clear any skipped types."
while not pos.finished():
if not self.skipany(pos):
return
return
def skipany(self, pos):
"Skip any skipped types."
for type in self.skippedtypes:
if self.instance(type).detect(pos):
return self.parsetype(type, pos)
return None
def parseany(self, pos):
"Parse any formula bit at the current location."
for type in self.types + self.skippedtypes:
if self.detecttype(type, pos):
return self.parsetype(type, pos)
Trace.error('Unrecognized formula at ' + pos.identifier())
return FormulaConstant(pos.skipcurrent())
def parsetype(self, type, pos):
"Parse the given type and return it."
bit = self.instance(type)
self.instances[type] = None
returnedbit = bit.parsebit(pos)
if returnedbit:
return returnedbit.setfactory(self)
return bit
def parseformula(self, formula):
"Parse a string of text that contains a whole formula."
pos = TextPosition(formula)
whole = self.create(WholeFormula)
if whole.detect(pos):
whole.parsebit(pos)
return whole
# no formula found
if not pos.finished():
Trace.error('Unknown formula at: ' + pos.identifier())
whole.add(TaggedBit().constant(formula, 'span class="unknown"'))
return whole
import unicodedata
import gettext
class Translator(object):
"Reads the configuration file and tries to find a translation."
"Otherwise falls back to the messages in the config file."
instance = None
def translate(cls, key):
"Get the translated message for a key."
return cls.instance.getmessage(key)
translate = classmethod(translate)
def __init__(self):
self.translation = None
self.first = True
def findtranslation(self):
"Find the translation for the document language."
self.langcodes = None
if not DocumentParameters.language:
Trace.error('No language in document')
return
if not DocumentParameters.language in TranslationConfig.languages:
Trace.error('Unknown language ' + DocumentParameters.language)
return
if TranslationConfig.languages[DocumentParameters.language] == 'en':
return
langcodes = [TranslationConfig.languages[DocumentParameters.language]]
try:
self.translation = gettext.translation('elyxer', None, langcodes)
except IOError:
Trace.error('No translation for ' + unicode(langcodes))
def getmessage(self, key):
"Get the translated message for the given key."
if self.first:
self.findtranslation()
self.first = False
message = self.getuntranslated(key)
if not self.translation:
return message
try:
message = self.translation.ugettext(message)
except IOError:
pass
return message
def getuntranslated(self, key):
"Get the untranslated message."
if not key in TranslationConfig.constants:
Trace.error('Cannot translate ' + key)
return key
return TranslationConfig.constants[key]
Translator.instance = Translator()
class NumberCounter(object):
"A counter for numbers (by default)."
"The type can be changed to return letters, roman numbers..."
name = None
value = None
mode = None
master = None
letters = 'ABCDEFGHIJKLMNOPQRSTUVWXYZ'
symbols = NumberingConfig.sequence['symbols']
romannumerals = [
('M', 1000), ('CM', 900), ('D', 500), ('CD', 400), ('C', 100),
('XC', 90), ('L', 50), ('XL', 40), ('X', 10), ('IX', 9), ('V', 5),
('IV', 4), ('I', 1)
]
def __init__(self, name):
"Give a name to the counter."
self.name = name
def setmode(self, mode):
"Set the counter mode. Can be changed at runtime."
self.mode = mode
return self
def init(self, value):
"Set an initial value."
self.value = value
def gettext(self):
"Get the next value as a text string."
return unicode(self.value)
def getletter(self):
"Get the next value as a letter."
return self.getsequence(self.letters)
def getsymbol(self):
"Get the next value as a symbol."
return self.getsequence(self.symbols)
def getsequence(self, sequence):
"Get the next value from elyxer.a sequence."
return sequence[(self.value - 1) % len(sequence)]
def getroman(self):
"Get the next value as a roman number."
result = ''
number = self.value
for numeral, value in self.romannumerals:
if number >= value:
result += numeral * (number / value)
number = number % value
return result
def getvalue(self):
"Get the current value as configured in the current mode."
if not self.mode or self.mode in ['text', '1']:
return self.gettext()
if self.mode == 'A':
return self.getletter()
if self.mode == 'a':
return self.getletter().lower()
if self.mode == 'I':
return self.getroman()
if self.mode == '*':
return self.getsymbol()
Trace.error('Unknown counter mode ' + self.mode)
return self.gettext()
def getnext(self):
"Increase the current value and get the next value as configured."
if not self.value:
self.value = 0
self.value += 1
return self.getvalue()
def reset(self):
"Reset the counter."
self.value = 0
def __unicode__(self):
"Return a printable representation."
result = 'Counter ' + self.name
if self.mode:
result += ' in mode ' + self.mode
return result
class DependentCounter(NumberCounter):
"A counter which depends on another one (the master)."
def setmaster(self, master):
"Set the master counter."
self.master = master
self.last = self.master.getvalue()
return self
def getnext(self):
"Increase or, if the master counter has changed, restart."
if self.last != self.master.getvalue():
self.reset()
value = NumberCounter.getnext(self)
self.last = self.master.getvalue()
return value
def getvalue(self):
"Get the value of the combined counter: master.dependent."
return self.master.getvalue() + '.' + NumberCounter.getvalue(self)
class NumberGenerator(object):
"A number generator for unique sequences and hierarchical structures. Used in:"
" * ordered part numbers: Chapter 3, Section 5.3."
" * unique part numbers: Footnote 15, Bibliography cite [15]."
" * chaptered part numbers: Figure 3.15, Equation (8.3)."
" * unique roman part numbers: Part I, Book IV."
chaptered = None
generator = None
romanlayouts = [x.lower() for x in NumberingConfig.layouts['roman']]
orderedlayouts = [x.lower() for x in NumberingConfig.layouts['ordered']]
counters = dict()
appendix = None
def deasterisk(self, type):
"Remove the possible asterisk in a layout type."
return type.replace('*', '')
def isunique(self, type):
"Find out if the layout type corresponds to a unique part."
return self.isroman(type)
def isroman(self, type):
"Find out if the layout type should have roman numeration."
return self.deasterisk(type).lower() in self.romanlayouts
def isinordered(self, type):
"Find out if the layout type corresponds to an (un)ordered part."
return self.deasterisk(type).lower() in self.orderedlayouts
def isnumbered(self, type):
"Find out if the type for a layout corresponds to a numbered layout."
if '*' in type:
return False
if self.isroman(type):
return True
if not self.isinordered(type):
return False
if self.getlevel(type) > DocumentParameters.maxdepth:
return False
return True
def isunordered(self, type):
"Find out if the type contains an asterisk, basically."
return '*' in type
def getlevel(self, type):
"Get the level that corresponds to a layout type."
if self.isunique(type):
return 0
if not self.isinordered(type):
Trace.error('Unknown layout type ' + type)
return 0
type = self.deasterisk(type).lower()
level = self.orderedlayouts.index(type) + 1
return level - DocumentParameters.startinglevel
def getparttype(self, type):
"Obtain the type for the part: without the asterisk, "
"and switched to Appendix if necessary."
if NumberGenerator.appendix and self.getlevel(type) == 1:
return 'Appendix'
return self.deasterisk(type)
def generate(self, type):
"Generate a number for a layout type."
"Unique part types such as Part or Book generate roman numbers: Part I."
"Ordered part types return dot-separated tuples: Chapter 5, Subsection 2.3.5."
"Everything else generates unique numbers: Bibliography [1]."
"Each invocation results in a new number."
return self.getcounter(type).getnext()
def getcounter(self, type):
"Get the counter for the given type."
type = type.lower()
if not type in self.counters:
self.counters[type] = self.create(type)
return self.counters[type]
def create(self, type):
"Create a counter for the given type."
if self.isnumbered(type) and self.getlevel(type) > 1:
index = self.orderedlayouts.index(type)
above = self.orderedlayouts[index - 1]
master = self.getcounter(above)
return self.createdependent(type, master)
counter = NumberCounter(type)
if self.isroman(type):
counter.setmode('I')
return counter
def getdependentcounter(self, type, master):
"Get (or create) a counter of the given type that depends on another."
if not type in self.counters or not self.counters[type].master:
self.counters[type] = self.createdependent(type, master)
return self.counters[type]
def createdependent(self, type, master):
"Create a dependent counter given the master."
return DependentCounter(type).setmaster(master)
def startappendix(self):
"Start appendices here."
firsttype = self.orderedlayouts[DocumentParameters.startinglevel]
counter = self.getcounter(firsttype)
counter.setmode('A').reset()
NumberGenerator.appendix = True
class ChapteredGenerator(NumberGenerator):
"Generate chaptered numbers, as in Chapter.Number."
"Used in equations, figures: Equation (5.3), figure 8.15."
def generate(self, type):
"Generate a number which goes with first-level numbers (chapters). "
"For the article classes a unique number is generated."
if DocumentParameters.startinglevel > 0:
return NumberGenerator.generator.generate(type)
chapter = self.getcounter('Chapter')
return self.getdependentcounter(type, chapter).getnext()
NumberGenerator.chaptered = ChapteredGenerator()
NumberGenerator.generator = NumberGenerator()
class ContainerSize(object):
"The size of a container."
width = None
height = None
maxwidth = None
maxheight = None
scale = None
def set(self, width = None, height = None):
"Set the proper size with width and height."
self.setvalue('width', width)
self.setvalue('height', height)
return self
def setmax(self, maxwidth = None, maxheight = None):
"Set max width and/or height."
self.setvalue('maxwidth', maxwidth)
self.setvalue('maxheight', maxheight)
return self
def readparameters(self, container):
"Read some size parameters off a container."
self.setparameter(container, 'width')
self.setparameter(container, 'height')
self.setparameter(container, 'scale')
self.checkvalidheight(container)
return self
def setparameter(self, container, name):
"Read a size parameter off a container, and set it if present."
value = container.getparameter(name)
self.setvalue(name, value)
def setvalue(self, name, value):
"Set the value of a parameter name, only if it's valid."
value = self.processparameter(value)
if value:
setattr(self, name, value)
def checkvalidheight(self, container):
"Check if the height parameter is valid; otherwise erase it."
heightspecial = container.getparameter('height_special')
if self.height and self.extractnumber(self.height) == '1' and heightspecial == 'totalheight':
self.height = None
def processparameter(self, value):
"Do the full processing on a parameter."
if not value:
return None
if self.extractnumber(value) == '0':
return None
for ignored in StyleConfig.size['ignoredtexts']:
if ignored in value:
value = value.replace(ignored, '')
return value
def extractnumber(self, text):
"Extract the first number in the given text."
result = ''
decimal = False
for char in text:
if char.isdigit():
result += char
elif char == '.' and not decimal:
result += char
decimal = True
else:
return result
return result
def checkimage(self, width, height):
"Check image dimensions, set them if possible."
if width:
self.maxwidth = unicode(width) + 'px'
if self.scale and not self.width:
self.width = self.scalevalue(width)
if height:
self.maxheight = unicode(height) + 'px'
if self.scale and not self.height:
self.height = self.scalevalue(height)
if self.width and not self.height:
self.height = 'auto'
if self.height and not self.width:
self.width = 'auto'
def scalevalue(self, value):
"Scale the value according to the image scale and return it as unicode."
scaled = value * int(self.scale) / 100
return unicode(int(scaled)) + 'px'
def removepercentwidth(self):
"Remove percent width if present, to set it at the figure level."
if not self.width:
return None
if not '%' in self.width:
return None
width = self.width
self.width = None
if self.height == 'auto':
self.height = None
return width
def addstyle(self, container):
"Add the proper style attribute to the output tag."
if not isinstance(container.output, TaggedOutput):
Trace.error('No tag to add style, in ' + unicode(container))
if not self.width and not self.height and not self.maxwidth and not self.maxheight:
# nothing to see here; move along
return
tag = ' style="'
tag += self.styleparameter('width')
tag += self.styleparameter('maxwidth')
tag += self.styleparameter('height')
tag += self.styleparameter('maxheight')
if tag[-1] == ' ':
tag = tag[:-1]
tag += '"'
container.output.tag += tag
def styleparameter(self, name):
"Get the style for a single parameter."
value = getattr(self, name)
if value:
return name.replace('max', 'max-') + ': ' + value + '; '
return ''
class QuoteContainer(Container):
"A container for a pretty quote"
def __init__(self):
self.parser = BoundedParser()
self.output = FixedOutput()
def process(self):
"Process contents"
self.type = self.header[2]
if not self.type in StyleConfig.quotes:
Trace.error('Quote type ' + self.type + ' not found')
self.html = ['"']
return
self.html = [StyleConfig.quotes[self.type]]
class LyXLine(Container):
"A Lyx line"
def __init__(self):
self.parser = LoneCommand()
self.output = FixedOutput()
def process(self):
self.html = ['<hr class="line" />']
class EmphaticText(TaggedText):
"Text with emphatic mode"
def process(self):
self.output.tag = 'i'
class ShapedText(TaggedText):
"Text shaped (italic, slanted)"
def process(self):
self.type = self.header[1]
if not self.type in TagConfig.shaped:
Trace.error('Unrecognized shape ' + self.header[1])
self.output.tag = 'span'
return
self.output.tag = TagConfig.shaped[self.type]
class VersalitasText(TaggedText):
"Text in versalitas"
def process(self):
self.output.tag = 'span class="versalitas"'
class ColorText(TaggedText):
"Colored text"
def process(self):
self.color = self.header[1]
self.output.tag = 'span class="' + self.color + '"'
class SizeText(TaggedText):
"Sized text"
def process(self):
self.size = self.header[1]
self.output.tag = 'span class="' + self.size + '"'
class BoldText(TaggedText):
"Bold text"
def process(self):
self.output.tag = 'b'
class TextFamily(TaggedText):
"A bit of text from elyxer.a different family"
def process(self):
"Parse the type of family"
self.type = self.header[1]
if not self.type in TagConfig.family:
Trace.error('Unrecognized family ' + type)
self.output.tag = 'span'
return
self.output.tag = TagConfig.family[self.type]
class Hfill(TaggedText):
"Horizontall fill"
def process(self):
self.output.tag = 'span class="hfill"'
class BarredText(TaggedText):
"Text with a bar somewhere"
def process(self):
"Parse the type of bar"
self.type = self.header[1]
if not self.type in TagConfig.barred:
Trace.error('Unknown bar type ' + self.type)
self.output.tag = 'span'
return
self.output.tag = TagConfig.barred[self.type]
class LangLine(BlackBox):
"A line with language information"
def process(self):
self.lang = self.header[1]
class InsetLength(BlackBox):
"A length measure inside an inset."
def process(self):
self.length = self.header[1]
class Space(Container):
"A space of several types"
def __init__(self):
self.parser = InsetParser()
self.output = FixedOutput()
def process(self):
self.type = self.header[2]
if self.type not in StyleConfig.hspaces:
Trace.error('Unknown space type ' + self.type)
self.html = [' ']
return
self.html = [StyleConfig.hspaces[self.type]]
length = self.getlength()
if not length:
return
self.output = TaggedOutput().settag('span class="hspace"', False)
ContainerSize().set(length).addstyle(self)
def getlength(self):
"Get the space length from elyxer.the contents or parameters."
if len(self.contents) == 0 or not isinstance(self.contents[0], InsetLength):
return None
return self.contents[0].length
class VerticalSpace(Container):
"An inset that contains a vertical space."
def __init__(self):
self.parser = InsetParser()
self.output = FixedOutput()
def process(self):
"Set the correct tag"
self.type = self.header[2]
if self.type not in StyleConfig.vspaces:
self.output = TaggedOutput().settag('div class="vspace" style="height: ' + self.type + ';"', True)
return
self.html = [StyleConfig.vspaces[self.type]]
class Align(Container):
"Bit of aligned text"
def __init__(self):
self.parser = ExcludingParser()
self.output = TaggedOutput().setbreaklines(True)
def process(self):
self.output.tag = 'div class="' + self.header[1] + '"'
class Newline(Container):
"A newline"
def __init__(self):
self.parser = LoneCommand()
self.output = FixedOutput()
def process(self):
"Process contents"
self.html = ['<br/>\n']
class NewPage(Newline):
"A new page"
def process(self):
"Process contents"
self.html = ['<p><br/>\n</p>\n']
class Separator(Container):
"A separator string which is not extracted by extracttext()."
def __init__(self, constant):
self.output = FixedOutput()
self.contents = []
self.html = [constant]
class StrikeOut(TaggedText):
"Striken out text."
def process(self):
"Set the output tag to strike."
self.output.tag = 'strike'
class StartAppendix(BlackBox):
"Mark to start an appendix here."
"From this point on, all chapters become appendices."
def process(self):
"Activate the special numbering scheme for appendices, using letters."
NumberGenerator.generator.startappendix()
class Link(Container):
"A link to another part of the document"
anchor = None
url = None
type = None
page = None
target = None
destination = None
title = None
def __init__(self):
"Initialize the link, add target if configured."
self.contents = []
self.parser = InsetParser()
self.output = LinkOutput()
if Options.target:
self.target = Options.target
def complete(self, text, anchor = None, url = None, type = None, title = None):
"Complete the link."
self.contents = [Constant(text)]
if anchor:
self.anchor = anchor
if url:
self.url = url
if type:
self.type = type
if title:
self.title = title
return self
def computedestination(self):
"Use the destination link to fill in the destination URL."
if not self.destination:
return
self.url = ''
if self.destination.anchor:
self.url = '#' + self.destination.anchor
if self.destination.page:
self.url = self.destination.page + self.url
def setmutualdestination(self, destination):
"Set another link as destination, and set its destination to this one."
self.destination = destination
destination.destination = self
def __unicode__(self):
"Return a printable representation."
result = 'Link'
if self.anchor:
result += ' #' + self.anchor
if self.url:
result += ' to ' + self.url
return result
class URL(Link):
"A clickable URL"
def process(self):
"Read URL from elyxer.parameters"
target = self.escape(self.getparameter('target'))
self.url = target
type = self.getparameter('type')
if type:
self.url = self.escape(type) + target
name = self.getparameter('name')
if not name:
name = target
self.contents = [Constant(name)]
class FlexURL(URL):
"A flexible URL"
def process(self):
"Read URL from elyxer.contents"
self.url = self.extracttext()
class LinkOutput(ContainerOutput):
"A link pointing to some destination"
"Or an anchor (destination)"
def gethtml(self, link):
"Get the HTML code for the link"
type = link.__class__.__name__
if link.type:
type = link.type
tag = 'a class="' + type + '"'
if link.anchor:
tag += ' name="' + link.anchor + '"'
if link.destination:
link.computedestination()
if link.url:
tag += ' href="' + link.url + '"'
if link.target:
tag += ' target="' + link.target + '"'
if link.title:
tag += ' title="' + link.title + '"'
return TaggedOutput().settag(tag).gethtml(link)
class Postprocessor(object):
"Postprocess a container keeping some context"
stages = []
def __init__(self):
self.stages = StageDict(Postprocessor.stages, self)
self.current = None
self.last = None
def postprocess(self, next):
"Postprocess a container and its contents."
self.postrecursive(self.current)
result = self.postcurrent(next)
self.last = self.current
self.current = next
return result
def postrecursive(self, container):
"Postprocess the container contents recursively"
if not hasattr(container, 'contents'):
return
if len(container.contents) == 0:
return
if hasattr(container, 'postprocess'):
if not container.postprocess:
return
postprocessor = Postprocessor()
contents = []
for element in container.contents:
post = postprocessor.postprocess(element)
if post:
contents.append(post)
# two rounds to empty the pipeline
for i in range(2):
post = postprocessor.postprocess(None)
if post:
contents.append(post)
container.contents = contents
def postcurrent(self, next):
"Postprocess the current element taking into account next and last."
stage = self.stages.getstage(self.current)
if not stage:
return self.current
return stage.postprocess(self.last, self.current, next)
class StageDict(object):
"A dictionary of stages corresponding to classes"
def __init__(self, classes, postprocessor):
"Instantiate an element from elyxer.each class and store as a dictionary"
instances = self.instantiate(classes, postprocessor)
self.stagedict = dict([(x.processedclass, x) for x in instances])
def instantiate(self, classes, postprocessor):
"Instantiate an element from elyxer.each class"
stages = [x.__new__(x) for x in classes]
for element in stages:
element.__init__()
element.postprocessor = postprocessor
return stages
def getstage(self, element):
"Get the stage for a given element, if the type is in the dict"
if not element.__class__ in self.stagedict:
return None
return self.stagedict[element.__class__]
class Label(Link):
"A label to be referenced"
names = dict()
lastlayout = None
def __init__(self):
Link.__init__(self)
self.lastnumbered = None
def process(self):
"Process a label container."
key = self.getparameter('name')
self.create(' ', key)
self.lastnumbered = Label.lastlayout
def create(self, text, key, type = 'Label'):
"Create the label for a given key."
self.key = key
self.complete(text, anchor = key, type = type)
Label.names[key] = self
if key in Reference.references:
for reference in Reference.references[key]:
reference.destination = self
return self
def findpartkey(self):
"Get the part key for the latest numbered container seen."
numbered = self.numbered(self)
if numbered and numbered.partkey:
return numbered.partkey
return ''
def numbered(self, container):
"Get the numbered container for the label."
if container.partkey:
return container
if not container.parent:
if self.lastnumbered:
return self.lastnumbered
return None
return self.numbered(container.parent)
def __unicode__(self):
"Return a printable representation."
if not hasattr(self, 'key'):
return 'Unnamed label'
return 'Label ' + self.key
class Reference(Link):
"A reference to a label."
references = dict()
key = 'none'
def process(self):
"Read the reference and set the arrow."
self.key = self.getparameter('reference')
if self.key in Label.names:
self.direction = u'↑'
label = Label.names[self.key]
else:
self.direction = u'↓'
label = Label().complete(' ', self.key, 'preref')
self.destination = label
self.formatcontents()
if not self.key in Reference.references:
Reference.references[self.key] = []
Reference.references[self.key].append(self)
def formatcontents(self):
"Format the reference contents."
formatkey = self.getparameter('LatexCommand')
if not formatkey:
formatkey = 'ref'
self.formatted = u'↕'
if formatkey in StyleConfig.referenceformats:
self.formatted = StyleConfig.referenceformats[formatkey]
else:
Trace.error('Unknown reference format ' + formatkey)
self.replace(u'↕', self.direction)
self.replace('#', '1')
self.replace('on-page', Translator.translate('on-page'))
partkey = self.destination.findpartkey()
# only if partkey and partkey.number are not null, send partkey.number
self.replace('@', partkey and partkey.number)
self.replace(u'¶', partkey and partkey.tocentry)
if not '$' in self.formatted or not partkey or not partkey.titlecontents:
if '$' in self.formatted:
Trace.error('No title in ' + unicode(partkey))
self.contents = [Constant(self.formatted)]
return
pieces = self.formatted.split('$')
self.contents = [Constant(pieces[0])]
for piece in pieces[1:]:
self.contents += partkey.titlecontents
self.contents.append(Constant(piece))
def replace(self, key, value):
"Replace a key in the format template with a value."
if not key in self.formatted:
return
if not value:
value = ''
self.formatted = self.formatted.replace(key, value)
def __unicode__(self):
"Return a printable representation."
return 'Reference ' + self.key
class FormulaCommand(FormulaBit):
"A LaTeX command inside a formula"
types = []
start = FormulaConfig.starts['command']
commandmap = None
def detect(self, pos):
"Find the current command."
return pos.checkfor(FormulaCommand.start)
def parsebit(self, pos):
"Parse the command."
command = self.extractcommand(pos)
bit = self.parsewithcommand(command, pos)
if bit:
return bit
if command.startswith('\\up') or command.startswith('\\Up'):
upgreek = self.parseupgreek(command, pos)
if upgreek:
return upgreek
if not self.factory.defining:
Trace.error('Unknown command ' + command)
self.output = TaggedOutput().settag('span class="unknown"')
self.add(FormulaConstant(command))
return None
def parsewithcommand(self, command, pos):
"Parse the command type once we have the command."
for type in FormulaCommand.types:
if command in type.commandmap:
return self.parsecommandtype(command, type, pos)
return None
def parsecommandtype(self, command, type, pos):
"Parse a given command type."
bit = self.factory.create(type)
bit.setcommand(command)
returned = bit.parsebit(pos)
if returned:
return returned
return bit
def extractcommand(self, pos):
"Extract the command from elyxer.the current position."
if not pos.checkskip(FormulaCommand.start):
pos.error('Missing command start ' + FormulaCommand.start)
return
if pos.finished():
return self.emptycommand(pos)
if pos.current().isalpha():
# alpha command
command = FormulaCommand.start + pos.globalpha()
# skip mark of short command
pos.checkskip('*')
return command
# symbol command
return FormulaCommand.start + pos.skipcurrent()
def emptycommand(self, pos):
"""Check for an empty command: look for command disguised as ending.
Special case against '{ \{ \} }' situation."""
command = ''
if not pos.isout():
ending = pos.nextending()
if ending and pos.checkskip(ending):
command = ending
return FormulaCommand.start + command
def parseupgreek(self, command, pos):
"Parse the Greek \\up command.."
if len(command) < 4:
return None
if command.startswith('\\up'):
upcommand = '\\' + command[3:]
elif pos.checkskip('\\Up'):
upcommand = '\\' + command[3:4].upper() + command[4:]
else:
Trace.error('Impossible upgreek command: ' + command)
return
upgreek = self.parsewithcommand(upcommand, pos)
if upgreek:
upgreek.type = 'font'
return upgreek
class CommandBit(FormulaCommand):
"A formula bit that includes a command"
def setcommand(self, command):
"Set the command in the bit"
self.command = command
if self.commandmap:
self.original += command
self.translated = self.commandmap[self.command]
def parseparameter(self, pos):
"Parse a parameter at the current position"
self.factory.clearskipped(pos)
if pos.finished():
return None
parameter = self.factory.parseany(pos)
self.add(parameter)
return parameter
def parsesquare(self, pos):
"Parse a square bracket"
self.factory.clearskipped(pos)
if not self.factory.detecttype(SquareBracket, pos):
return None
bracket = self.factory.parsetype(SquareBracket, pos)
self.add(bracket)
return bracket
def parseliteral(self, pos):
"Parse a literal bracket."
self.factory.clearskipped(pos)
if not self.factory.detecttype(Bracket, pos):
if not pos.isvalue():
Trace.error('No literal parameter found at: ' + pos.identifier())
return None
return pos.globvalue()
bracket = Bracket().setfactory(self.factory)
self.add(bracket.parseliteral(pos))
return bracket.literal
def parsesquareliteral(self, pos):
"Parse a square bracket literally."
self.factory.clearskipped(pos)
if not self.factory.detecttype(SquareBracket, pos):
return None
bracket = SquareBracket().setfactory(self.factory)
self.add(bracket.parseliteral(pos))
return bracket.literal
def parsetext(self, pos):
"Parse a text parameter."
self.factory.clearskipped(pos)
if not self.factory.detecttype(Bracket, pos):
Trace.error('No text parameter for ' + self.command)
return None
bracket = Bracket().setfactory(self.factory).parsetext(pos)
self.add(bracket)
return bracket
class EmptyCommand(CommandBit):
"An empty command (without parameters)"
commandmap = FormulaConfig.commands
def parsebit(self, pos):
"Parse a command without parameters"
self.contents = [FormulaConstant(self.translated)]
class SpacedCommand(CommandBit):
"An empty command which should have math spacing in formulas."
commandmap = FormulaConfig.spacedcommands
def parsebit(self, pos):
"Place as contents the command translated and spaced."
self.contents = [FormulaConstant(u' ' + self.translated + u' ')]
class AlphaCommand(EmptyCommand):
"A command without paramters whose result is alphabetical"
commandmap = FormulaConfig.alphacommands
def parsebit(self, pos):
"Parse the command and set type to alpha"
EmptyCommand.parsebit(self, pos)
self.type = 'alpha'
class OneParamFunction(CommandBit):
"A function of one parameter"
commandmap = FormulaConfig.onefunctions
simplified = False
def parsebit(self, pos):
"Parse a function with one parameter"
self.output = TaggedOutput().settag(self.translated)
self.parseparameter(pos)
self.simplifyifpossible()
def simplifyifpossible(self):
"Try to simplify to a single character."
if self.original in self.commandmap:
self.output = FixedOutput()
self.html = [self.commandmap[self.original]]
self.simplified = True
class SymbolFunction(CommandBit):
"Find a function which is represented by a symbol (like _ or ^)"
commandmap = FormulaConfig.symbolfunctions
def detect(self, pos):
"Find the symbol"
return pos.current() in SymbolFunction.commandmap
def parsebit(self, pos):
"Parse the symbol"
self.setcommand(pos.current())
pos.skip(self.command)
self.output = TaggedOutput().settag(self.translated)
self.parseparameter(pos)
class TextFunction(CommandBit):
"A function where parameters are read as text."
commandmap = FormulaConfig.textfunctions
def parsebit(self, pos):
"Parse a text parameter"
self.output = TaggedOutput().settag(self.translated)
self.parsetext(pos)
def process(self):
"Set the type to font"
self.type = 'font'
class LabelFunction(CommandBit):
"A function that acts as a label"
commandmap = FormulaConfig.labelfunctions
def parsebit(self, pos):
"Parse a literal parameter"
self.key = self.parseliteral(pos)
def process(self):
"Add an anchor with the label contents."
self.type = 'font'
self.label = Label().create(' ', self.key, type = 'eqnumber')
self.contents = [self.label]
# store as a Label so we know it's been seen
Label.names[self.key] = self.label
class FontFunction(OneParamFunction):
"A function of one parameter that changes the font"
commandmap = FormulaConfig.fontfunctions
def process(self):
"Simplify if possible using a single character."
self.type = 'font'
self.simplifyifpossible()
FormulaFactory.types += [FormulaCommand, SymbolFunction]
FormulaCommand.types = [
AlphaCommand, EmptyCommand, OneParamFunction, FontFunction, LabelFunction,
TextFunction, SpacedCommand,
]
class BigSymbol(object):
"A big symbol generator."
symbols = FormulaConfig.bigsymbols
def __init__(self, symbol):
"Create the big symbol."
self.symbol = symbol
def getpieces(self):
"Get an array with all pieces."
if not self.symbol in self.symbols:
return [self.symbol]
if self.smalllimit():
return [self.symbol]
return self.symbols[self.symbol]
def smalllimit(self):
"Decide if the limit should be a small, one-line symbol."
if not DocumentParameters.displaymode:
return True
if len(self.symbols[self.symbol]) == 1:
return True
return Options.simplemath
class BigBracket(BigSymbol):
"A big bracket generator."
def __init__(self, size, bracket, alignment='l'):
"Set the size and symbol for the bracket."
self.size = size
self.original = bracket
self.alignment = alignment
self.pieces = None
if bracket in FormulaConfig.bigbrackets:
self.pieces = FormulaConfig.bigbrackets[bracket]
def getpiece(self, index):
"Return the nth piece for the bracket."
function = getattr(self, 'getpiece' + unicode(len(self.pieces)))
return function(index)
def getpiece1(self, index):
"Return the only piece for a single-piece bracket."
return self.pieces[0]
def getpiece3(self, index):
"Get the nth piece for a 3-piece bracket: parenthesis or square bracket."
if index == 0:
return self.pieces[0]
if index == self.size - 1:
return self.pieces[-1]
return self.pieces[1]
def getpiece4(self, index):
"Get the nth piece for a 4-piece bracket: curly bracket."
if index == 0:
return self.pieces[0]
if index == self.size - 1:
return self.pieces[3]
if index == (self.size - 1)/2:
return self.pieces[2]
return self.pieces[1]
def getcell(self, index):
"Get the bracket piece as an array cell."
piece = self.getpiece(index)
span = 'span class="bracket align-' + self.alignment + '"'
return TaggedBit().constant(piece, span)
def getcontents(self):
"Get the bracket as an array or as a single bracket."
if self.size == 1 or not self.pieces:
return self.getsinglebracket()
rows = []
for index in range(self.size):
cell = self.getcell(index)
rows.append(TaggedBit().complete([cell], 'span class="arrayrow"'))
return [TaggedBit().complete(rows, 'span class="array"')]
def getsinglebracket(self):
"Return the bracket as a single sign."
if self.original == '.':
return [TaggedBit().constant('', 'span class="emptydot"')]
return [TaggedBit().constant(self.original, 'span class="symbol"')]
class FormulaEquation(CommandBit):
"A simple numbered equation."
piece = 'equation'
def parsebit(self, pos):
"Parse the array"
self.output = ContentsOutput()
self.add(self.factory.parsetype(WholeFormula, pos))
class FormulaCell(FormulaCommand):
"An array cell inside a row"
def setalignment(self, alignment):
self.alignment = alignment
self.output = TaggedOutput().settag('span class="arraycell align-' + alignment +'"', True)
return self
def parsebit(self, pos):
self.factory.clearskipped(pos)
if pos.finished():
return
self.add(self.factory.parsetype(WholeFormula, pos))
class FormulaRow(FormulaCommand):
"An array row inside an array"
cellseparator = FormulaConfig.array['cellseparator']
def setalignments(self, alignments):
self.alignments = alignments
self.output = TaggedOutput().settag('span class="arrayrow"', True)
return self
def parsebit(self, pos):
"Parse a whole row"
index = 0
pos.pushending(self.cellseparator, optional=True)
while not pos.finished():
cell = self.createcell(index)
cell.parsebit(pos)
self.add(cell)
index += 1
pos.checkskip(self.cellseparator)
if len(self.contents) == 0:
self.output = EmptyOutput()
def createcell(self, index):
"Create the cell that corresponds to the given index."
alignment = self.alignments[index % len(self.alignments)]
return self.factory.create(FormulaCell).setalignment(alignment)
class MultiRowFormula(CommandBit):
"A formula with multiple rows."
def parserows(self, pos):
"Parse all rows, finish when no more row ends"
self.rows = []
first = True
for row in self.iteraterows(pos):
if first:
first = False
else:
# intersparse empty rows
self.addempty()
row.parsebit(pos)
self.addrow(row)
self.size = len(self.rows)
def iteraterows(self, pos):
"Iterate over all rows, end when no more row ends"
rowseparator = FormulaConfig.array['rowseparator']
while True:
pos.pushending(rowseparator, True)
row = self.factory.create(FormulaRow)
yield row.setalignments(self.alignments)
if pos.checkfor(rowseparator):
self.original += pos.popending(rowseparator)
else:
return
def addempty(self):
"Add an empty row."
row = self.factory.create(FormulaRow).setalignments(self.alignments)
for index, originalcell in enumerate(self.rows[-1].contents):
cell = row.createcell(index)
cell.add(FormulaConstant(u' '))
row.add(cell)
self.addrow(row)
def addrow(self, row):
"Add a row to the contents and to the list of rows."
self.rows.append(row)
self.add(row)
class FormulaArray(MultiRowFormula):
"An array within a formula"
piece = 'array'
def parsebit(self, pos):
"Parse the array"
self.output = TaggedOutput().settag('span class="array"', False)
self.parsealignments(pos)
self.parserows(pos)
def parsealignments(self, pos):
"Parse the different alignments"
# vertical
self.valign = 'c'
literal = self.parsesquareliteral(pos)
if literal:
self.valign = literal
# horizontal
literal = self.parseliteral(pos)
self.alignments = []
for l in literal:
self.alignments.append(l)
class FormulaMatrix(MultiRowFormula):
"A matrix (array with center alignment)."
piece = 'matrix'
def parsebit(self, pos):
"Parse the matrix, set alignments to 'c'."
self.output = TaggedOutput().settag('span class="array"', False)
self.valign = 'c'
self.alignments = ['c']
self.parserows(pos)
class FormulaCases(MultiRowFormula):
"A cases statement"
piece = 'cases'
def parsebit(self, pos):
"Parse the cases"
self.output = ContentsOutput()
self.alignments = ['l', 'l']
self.parserows(pos)
for row in self.contents:
for cell in row.contents:
cell.output.settag('span class="case align-l"', True)
cell.contents.append(FormulaConstant(u' '))
array = TaggedBit().complete(self.contents, 'span class="bracketcases"', True)
brace = BigBracket(len(self.contents), '{', 'l')
self.contents = brace.getcontents() + [array]
class EquationEnvironment(MultiRowFormula):
"A \\begin{}...\\end equation environment with rows and cells."
def parsebit(self, pos):
"Parse the whole environment."
self.output = TaggedOutput().settag('span class="environment"', False)
environment = self.piece.replace('*', '')
if environment in FormulaConfig.environments:
self.alignments = FormulaConfig.environments[environment]
else:
Trace.error('Unknown equation environment ' + self.piece)
self.alignments = ['l']
self.parserows(pos)
class BeginCommand(CommandBit):
"A \\begin{}...\end command and what it entails (array, cases, aligned)"
commandmap = {FormulaConfig.array['begin']:''}
types = [FormulaEquation, FormulaArray, FormulaCases, FormulaMatrix]
def parsebit(self, pos):
"Parse the begin command"
command = self.parseliteral(pos)
bit = self.findbit(command)
ending = FormulaConfig.array['end'] + '{' + command + '}'
pos.pushending(ending)
bit.parsebit(pos)
self.add(bit)
self.original += pos.popending(ending)
self.size = bit.size
def findbit(self, piece):
"Find the command bit corresponding to the \\begin{piece}"
for type in BeginCommand.types:
if piece.replace('*', '') == type.piece:
return self.factory.create(type)
bit = self.factory.create(EquationEnvironment)
bit.piece = piece
return bit
FormulaCommand.types += [BeginCommand]
class CombiningFunction(OneParamFunction):
commandmap = FormulaConfig.combiningfunctions
def parsebit(self, pos):
"Parse a combining function."
self.type = 'alpha'
combining = self.translated
parameter = self.parsesingleparameter(pos)
if not parameter:
Trace.error('Empty parameter for combining function ' + self.command)
elif len(parameter.extracttext()) != 1:
Trace.error('Applying combining function ' + self.command + ' to invalid string "' + parameter.extracttext() + '"')
self.contents.append(Constant(combining))
def parsesingleparameter(self, pos):
"Parse a parameter, or a single letter."
self.factory.clearskipped(pos)
if pos.finished():
Trace.error('Error while parsing single parameter at ' + pos.identifier())
return None
if self.factory.detecttype(Bracket, pos) \
or self.factory.detecttype(FormulaCommand, pos):
return self.parseparameter(pos)
letter = FormulaConstant(pos.skipcurrent())
self.add(letter)
return letter
class DecoratingFunction(OneParamFunction):
"A function that decorates some bit of text"
commandmap = FormulaConfig.decoratingfunctions
def parsebit(self, pos):
"Parse a decorating function"
self.type = 'alpha'
symbol = self.translated
self.symbol = TaggedBit().constant(symbol, 'span class="symbolover"')
self.parameter = self.parseparameter(pos)
self.output = TaggedOutput().settag('span class="withsymbol"')
self.contents.insert(0, self.symbol)
self.parameter.output = TaggedOutput().settag('span class="undersymbol"')
self.simplifyifpossible()
class LimitCommand(EmptyCommand):
"A command which accepts limits above and below, in display mode."
commandmap = FormulaConfig.limitcommands
def parsebit(self, pos):
"Parse a limit command."
pieces = BigSymbol(self.translated).getpieces()
self.output = TaggedOutput().settag('span class="limits"')
for piece in pieces:
self.contents.append(TaggedBit().constant(piece, 'span class="limit"'))
class LimitPreviousCommand(LimitCommand):
"A command to limit the previous command."
commandmap = None
def parsebit(self, pos):
"Do nothing."
self.output = TaggedOutput().settag('span class="limits"')
self.factory.clearskipped(pos)
def __unicode__(self):
"Return a printable representation."
return 'Limit previous command'
class LimitsProcessor(MathsProcessor):
"A processor for limits inside an element."
def process(self, contents, index):
"Process the limits for an element."
if Options.simplemath:
return
if self.checklimits(contents, index):
self.modifylimits(contents, index)
if self.checkscript(contents, index) and self.checkscript(contents, index + 1):
self.modifyscripts(contents, index)
def checklimits(self, contents, index):
"Check if the current position has a limits command."
if not DocumentParameters.displaymode:
return False
if self.checkcommand(contents, index + 1, LimitPreviousCommand):
self.limitsahead(contents, index)
return False
if not isinstance(contents[index], LimitCommand):
return False
return self.checkscript(contents, index + 1)
def limitsahead(self, contents, index):
"Limit the current element based on the next."
contents[index + 1].add(contents[index].clone())
contents[index].output = EmptyOutput()
def modifylimits(self, contents, index):
"Modify a limits commands so that the limits appear above and below."
limited = contents[index]
subscript = self.getlimit(contents, index + 1)
limited.contents.append(subscript)
if self.checkscript(contents, index + 1):
superscript = self.getlimit(contents, index + 1)
else:
superscript = TaggedBit().constant(u' ', 'sup class="limit"')
limited.contents.insert(0, superscript)
def getlimit(self, contents, index):
"Get the limit for a limits command."
limit = self.getscript(contents, index)
limit.output.tag = limit.output.tag.replace('script', 'limit')
return limit
def modifyscripts(self, contents, index):
"Modify the super- and subscript to appear vertically aligned."
subscript = self.getscript(contents, index)
# subscript removed so instead of index + 1 we get index again
superscript = self.getscript(contents, index)
scripts = TaggedBit().complete([superscript, subscript], 'span class="scripts"')
contents.insert(index, scripts)
def checkscript(self, contents, index):
"Check if the current element is a sub- or superscript."
return self.checkcommand(contents, index, SymbolFunction)
def checkcommand(self, contents, index, type):
"Check for the given type as the current element."
if len(contents) <= index:
return False
return isinstance(contents[index], type)
def getscript(self, contents, index):
"Get the sub- or superscript."
bit = contents[index]
bit.output.tag += ' class="script"'
del contents[index]
return bit
class BracketCommand(OneParamFunction):
"A command which defines a bracket."
commandmap = FormulaConfig.bracketcommands
def parsebit(self, pos):
"Parse the bracket."
OneParamFunction.parsebit(self, pos)
def create(self, direction, character):
"Create the bracket for the given character."
self.original = character
self.command = '\\' + direction
self.contents = [FormulaConstant(character)]
return self
class BracketProcessor(MathsProcessor):
"A processor for bracket commands."
def process(self, contents, index):
"Convert the bracket using Unicode pieces, if possible."
if Options.simplemath:
return
if self.checkleft(contents, index):
return self.processleft(contents, index)
def processleft(self, contents, index):
"Process a left bracket."
rightindex = self.findright(contents, index + 1)
if not rightindex:
return
size = self.findmax(contents, index, rightindex)
self.resize(contents[index], size)
self.resize(contents[rightindex], size)
def checkleft(self, contents, index):
"Check if the command at the given index is left."
return self.checkdirection(contents[index], '\\left')
def checkright(self, contents, index):
"Check if the command at the given index is right."
return self.checkdirection(contents[index], '\\right')
def checkdirection(self, bit, command):
"Check if the given bit is the desired bracket command."
if not isinstance(bit, BracketCommand):
return False
return bit.command == command
def findright(self, contents, index):
"Find the right bracket starting at the given index, or 0."
depth = 1
while index < len(contents):
if self.checkleft(contents, index):
depth += 1
if self.checkright(contents, index):
depth -= 1
if depth == 0:
return index
index += 1
return None
def findmax(self, contents, leftindex, rightindex):
"Find the max size of the contents between the two given indices."
sliced = contents[leftindex:rightindex]
return max([element.size for element in sliced])
def resize(self, command, size):
"Resize a bracket command to the given size."
character = command.extracttext()
alignment = command.command.replace('\\', '')
bracket = BigBracket(size, character, alignment)
command.output = ContentsOutput()
command.contents = bracket.getcontents()
FormulaCommand.types += [
DecoratingFunction, CombiningFunction, LimitCommand, BracketCommand,
]
FormulaProcessor.processors += [
LimitsProcessor(), BracketProcessor(),
]
class ParameterDefinition(object):
"The definition of a parameter in a hybrid function."
"[] parameters are optional, {} parameters are mandatory."
"Each parameter has a one-character name, like {$1} or {$p}."
"A parameter that ends in ! like {$p!} is a literal."
"Example: [$1]{$p!} reads an optional parameter $1 and a literal mandatory parameter p."
parambrackets = [('[', ']'), ('{', '}')]
def __init__(self):
self.name = None
self.literal = False
self.optional = False
self.value = None
self.literalvalue = None
def parse(self, pos):
"Parse a parameter definition: [$0], {$x}, {$1!}..."
for (opening, closing) in ParameterDefinition.parambrackets:
if pos.checkskip(opening):
if opening == '[':
self.optional = True
if not pos.checkskip('$'):
Trace.error('Wrong parameter name, did you mean $' + pos.current() + '?')
return None
self.name = pos.skipcurrent()
if pos.checkskip('!'):
self.literal = True
if not pos.checkskip(closing):
Trace.error('Wrong parameter closing ' + pos.skipcurrent())
return None
return self
Trace.error('Wrong character in parameter template: ' + pos.skipcurrent())
return None
def read(self, pos, function):
"Read the parameter itself using the definition."
if self.literal:
if self.optional:
self.literalvalue = function.parsesquareliteral(pos)
else:
self.literalvalue = function.parseliteral(pos)
if self.literalvalue:
self.value = FormulaConstant(self.literalvalue)
elif self.optional:
self.value = function.parsesquare(pos)
else:
self.value = function.parseparameter(pos)
def __unicode__(self):
"Return a printable representation."
result = 'param ' + self.name
if self.value:
result += ': ' + unicode(self.value)
else:
result += ' (empty)'
return result
class ParameterFunction(CommandBit):
"A function with a variable number of parameters defined in a template."
"The parameters are defined as a parameter definition."
def readparams(self, readtemplate, pos):
"Read the params according to the template."
self.params = dict()
for paramdef in self.paramdefs(readtemplate):
paramdef.read(pos, self)
self.params['$' + paramdef.name] = paramdef
def paramdefs(self, readtemplate):
"Read each param definition in the template"
pos = TextPosition(readtemplate)
while not pos.finished():
paramdef = ParameterDefinition().parse(pos)
if paramdef:
yield paramdef
def getparam(self, name):
"Get a parameter as parsed."
if not name in self.params:
return None
return self.params[name]
def getvalue(self, name):
"Get the value of a parameter."
return self.getparam(name).value
def getliteralvalue(self, name):
"Get the literal value of a parameter."
param = self.getparam(name)
if not param or not param.literalvalue:
return None
return param.literalvalue
class HybridFunction(ParameterFunction):
"""
A parameter function where the output is also defined using a template.
The template can use a number of functions; each function has an associated
tag.
Example: [f0{$1},span class="fbox"] defines a function f0 which corresponds
to a span of class fbox, yielding <span class="fbox">$1</span>.
Literal parameters can be used in tags definitions:
[f0{$1},span style="color: $p;"]
yields <span style="color: $p;">$1</span>, where $p is a literal parameter.
Sizes can be specified in hybridsizes, e.g. adding parameter sizes. By
default the resulting size is the max of all arguments. Sizes are used
to generate the right parameters.
A function followed by a single / is output as a self-closing XHTML tag:
[f0/,hr]
will generate <hr/>.
"""
commandmap = FormulaConfig.hybridfunctions
def parsebit(self, pos):
"Parse a function with [] and {} parameters"
readtemplate = self.translated[0]
writetemplate = self.translated[1]
self.readparams(readtemplate, pos)
self.contents = self.writeparams(writetemplate)
self.computehybridsize()
def writeparams(self, writetemplate):
"Write all params according to the template"
return self.writepos(TextPosition(writetemplate))
def writepos(self, pos):
"Write all params as read in the parse position."
result = []
while not pos.finished():
if pos.checkskip('$'):
param = self.writeparam(pos)
if param:
result.append(param)
elif pos.checkskip('f'):
function = self.writefunction(pos)
if function:
function.type = None
result.append(function)
elif pos.checkskip('('):
result.append(self.writebracket('left', '('))
elif pos.checkskip(')'):
result.append(self.writebracket('right', ')'))
else:
result.append(FormulaConstant(pos.skipcurrent()))
return result
def writeparam(self, pos):
"Write a single param of the form $0, $x..."
name = '$' + pos.skipcurrent()
if not name in self.params:
Trace.error('Unknown parameter ' + name)
return None
if not self.params[name]:
return None
if pos.checkskip('.'):
self.params[name].value.type = pos.globalpha()
return self.params[name].value
def writefunction(self, pos):
"Write a single function f0,...,fn."
tag = self.readtag(pos)
if not tag:
return None
if pos.checkskip('/'):
# self-closing XHTML tag, such as <hr/>
return TaggedBit().selfcomplete(tag)
if not pos.checkskip('{'):
Trace.error('Function should be defined in {}')
return None
pos.pushending('}')
contents = self.writepos(pos)
pos.popending()
if len(contents) == 0:
return None
return TaggedBit().complete(contents, tag)
def readtag(self, pos):
"Get the tag corresponding to the given index. Does parameter substitution."
if not pos.current().isdigit():
Trace.error('Function should be f0,...,f9: f' + pos.current())
return None
index = int(pos.skipcurrent())
if 2 + index > len(self.translated):
Trace.error('Function f' + unicode(index) + ' is not defined')
return None
tag = self.translated[2 + index]
if not '$' in tag:
return tag
for variable in self.params:
if variable in tag:
param = self.params[variable]
if not param.literal:
Trace.error('Parameters in tag ' + tag + ' should be literal: {' + variable + '!}')
continue
if param.literalvalue:
value = param.literalvalue
else:
value = ''
tag = tag.replace(variable, value)
return tag
def writebracket(self, direction, character):
"Return a new bracket looking at the given direction."
return self.factory.create(BracketCommand).create(direction, character)
def computehybridsize(self):
"Compute the size of the hybrid function."
if not self.command in HybridSize.configsizes:
self.computesize()
return
self.size = HybridSize().getsize(self)
# set the size in all elements at first level
for element in self.contents:
element.size = self.size
class HybridSize(object):
"The size associated with a hybrid function."
configsizes = FormulaConfig.hybridsizes
def getsize(self, function):
"Read the size for a function and parse it."
sizestring = self.configsizes[function.command]
for name in function.params:
if name in sizestring:
size = function.params[name].value.computesize()
sizestring = sizestring.replace(name, unicode(size))
if '$' in sizestring:
Trace.error('Unconverted variable in hybrid size: ' + sizestring)
return 1
return eval(sizestring)
FormulaCommand.types += [HybridFunction]
class HeaderParser(Parser):
"Parses the LyX header"
def parse(self, reader):
"Parse header parameters into a dictionary, return the preamble."
contents = []
self.parseending(reader, lambda: self.parseline(reader, contents))
# skip last line
reader.nextline()
return contents
def parseline(self, reader, contents):
"Parse a single line as a parameter or as a start"
line = reader.currentline()
if line.startswith(HeaderConfig.parameters['branch']):
self.parsebranch(reader)
return
elif line.startswith(HeaderConfig.parameters['lstset']):
LstParser().parselstset(reader)
return
elif line.startswith(HeaderConfig.parameters['beginpreamble']):
contents.append(self.factory.createcontainer(reader))
return
# no match
self.parseparameter(reader)
def parsebranch(self, reader):
"Parse all branch definitions."
branch = reader.currentline().split()[1]
reader.nextline()
subparser = HeaderParser().complete(HeaderConfig.parameters['endbranch'])
subparser.parse(reader)
options = BranchOptions(branch)
for key in subparser.parameters:
options.set(key, subparser.parameters[key])
Options.branches[branch] = options
def complete(self, ending):
"Complete the parser with the given ending."
self.ending = ending
return self
class PreambleParser(Parser):
"A parser for the LyX preamble."
preamble = []
def parse(self, reader):
"Parse the full preamble with all statements."
self.ending = HeaderConfig.parameters['endpreamble']
self.parseending(reader, lambda: self.parsepreambleline(reader))
return []
def parsepreambleline(self, reader):
"Parse a single preamble line."
PreambleParser.preamble.append(reader.currentline())
reader.nextline()
class LstParser(object):
"Parse global and local lstparams."
globalparams = dict()
def parselstset(self, reader):
"Parse a declaration of lstparams in lstset."
paramtext = self.extractlstset(reader)
if not '{' in paramtext:
Trace.error('Missing opening bracket in lstset: ' + paramtext)
return
lefttext = paramtext.split('{')[1]
croppedtext = lefttext[:-1]
LstParser.globalparams = self.parselstparams(croppedtext)
def extractlstset(self, reader):
"Extract the global lstset parameters."
paramtext = ''
while not reader.finished():
paramtext += reader.currentline()
reader.nextline()
if paramtext.endswith('}'):
return paramtext
Trace.error('Could not find end of \\lstset settings; aborting')
def parsecontainer(self, container):
"Parse some lstparams from elyxer.a container."
container.lstparams = LstParser.globalparams.copy()
paramlist = container.getparameterlist('lstparams')
container.lstparams.update(self.parselstparams(paramlist))
def parselstparams(self, paramlist):
"Process a number of lstparams from elyxer.a list."
paramdict = dict()
for param in paramlist:
if not '=' in param:
if len(param.strip()) > 0:
Trace.error('Invalid listing parameter ' + param)
else:
key, value = param.split('=', 1)
paramdict[key] = value
return paramdict
class MacroDefinition(CommandBit):
"A function that defines a new command (a macro)."
macros = dict()
def parsebit(self, pos):
"Parse the function that defines the macro."
self.output = EmptyOutput()
self.parameternumber = 0
self.defaults = []
self.factory.defining = True
self.parseparameters(pos)
self.factory.defining = False
Trace.debug('New command ' + self.newcommand + ' (' + \
unicode(self.parameternumber) + ' parameters)')
self.macros[self.newcommand] = self
def parseparameters(self, pos):
"Parse all optional parameters (number of parameters, default values)"
"and the mandatory definition."
self.newcommand = self.parsenewcommand(pos)
# parse number of parameters
literal = self.parsesquareliteral(pos)
if literal:
self.parameternumber = int(literal)
# parse all default values
bracket = self.parsesquare(pos)
while bracket:
self.defaults.append(bracket)
bracket = self.parsesquare(pos)
# parse mandatory definition
self.definition = self.parseparameter(pos)
def parsenewcommand(self, pos):
"Parse the name of the new command."
self.factory.clearskipped(pos)
if self.factory.detecttype(Bracket, pos):
return self.parseliteral(pos)
if self.factory.detecttype(FormulaCommand, pos):
return self.factory.create(FormulaCommand).extractcommand(pos)
Trace.error('Unknown formula bit in defining function at ' + pos.identifier())
return 'unknown'
def instantiate(self):
"Return an instance of the macro."
return self.definition.clone()
class MacroParameter(FormulaBit):
"A parameter from elyxer.a macro."
def detect(self, pos):
"Find a macro parameter: #n."
return pos.checkfor('#')
def parsebit(self, pos):
"Parse the parameter: #n."
if not pos.checkskip('#'):
Trace.error('Missing parameter start #.')
return
self.number = int(pos.skipcurrent())
self.original = '#' + unicode(self.number)
self.contents = [TaggedBit().constant('#' + unicode(self.number), 'span class="unknown"')]
class MacroFunction(CommandBit):
"A function that was defined using a macro."
commandmap = MacroDefinition.macros
def parsebit(self, pos):
"Parse a number of input parameters."
self.output = FilteredOutput()
self.values = []
macro = self.translated
self.parseparameters(pos, macro)
self.completemacro(macro)
def parseparameters(self, pos, macro):
"Parse as many parameters as are needed."
self.parseoptional(pos, list(macro.defaults))
self.parsemandatory(pos, macro.parameternumber - len(macro.defaults))
if len(self.values) < macro.parameternumber:
Trace.error('Missing parameters in macro ' + unicode(self))
def parseoptional(self, pos, defaults):
"Parse optional parameters."
optional = []
while self.factory.detecttype(SquareBracket, pos):
optional.append(self.parsesquare(pos))
if len(optional) > len(defaults):
break
for value in optional:
default = defaults.pop()
if len(value.contents) > 0:
self.values.append(value)
else:
self.values.append(default)
self.values += defaults
def parsemandatory(self, pos, number):
"Parse a number of mandatory parameters."
for index in range(number):
parameter = self.parsemacroparameter(pos, number - index)
if not parameter:
return
self.values.append(parameter)
def parsemacroparameter(self, pos, remaining):
"Parse a macro parameter. Could be a bracket or a single letter."
"If there are just two values remaining and there is a running number,"
"parse as two separater numbers."
self.factory.clearskipped(pos)
if pos.finished():
return None
if self.factory.detecttype(FormulaNumber, pos):
return self.parsenumbers(pos, remaining)
return self.parseparameter(pos)
def parsenumbers(self, pos, remaining):
"Parse the remaining parameters as a running number."
"For example, 12 would be {1}{2}."
number = self.factory.parsetype(FormulaNumber, pos)
if not len(number.original) == remaining:
return number
for digit in number.original:
value = self.factory.create(FormulaNumber)
value.add(FormulaConstant(digit))
value.type = number
self.values.append(value)
return None
def completemacro(self, macro):
"Complete the macro with the parameters read."
self.contents = [macro.instantiate()]
replaced = [False] * len(self.values)
for parameter in self.searchall(MacroParameter):
index = parameter.number - 1
if index >= len(self.values):
Trace.error('Macro parameter index out of bounds: ' + unicode(index))
return
replaced[index] = True
parameter.contents = [self.values[index].clone()]
for index in range(len(self.values)):
if not replaced[index]:
self.addfilter(index, self.values[index])
def addfilter(self, index, value):
"Add a filter for the given parameter number and parameter value."
original = '#' + unicode(index + 1)
value = ''.join(self.values[0].gethtml())
self.output.addfilter(original, value)
class FormulaMacro(Formula):
"A math macro defined in an inset."
def __init__(self):
self.parser = MacroParser()
self.output = EmptyOutput()
def __unicode__(self):
"Return a printable representation."
return 'Math macro'
FormulaFactory.types += [ MacroParameter ]
FormulaCommand.types += [
MacroFunction,
]
def math2html(formula):
"Convert some TeX math to HTML."
factory = FormulaFactory()
whole = factory.parseformula(formula)
FormulaProcessor().process(whole)
whole.process()
return ''.join(whole.gethtml())
def main():
"Main function, called if invoked from elyxer.the command line"
args = sys.argv
Options().parseoptions(args)
if len(args) != 1:
Trace.error('Usage: math2html.py escaped_string')
exit()
result = math2html(args[0])
Trace.message(result)
if __name__ == '__main__':
main()
| agpl-3.0 |
openstack/python-openstacksdk | openstack/tests/functional/cloud/test_aggregate.py | 2 | 2014 | # Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
test_aggregate
----------------------------------
Functional tests for `shade` aggregate resource.
"""
from openstack.tests.functional import base
class TestAggregate(base.BaseFunctionalTest):
def test_aggregates(self):
aggregate_name = self.getUniqueString()
availability_zone = self.getUniqueString()
self.addCleanup(self.cleanup, aggregate_name)
aggregate = self.operator_cloud.create_aggregate(aggregate_name)
aggregate_ids = [v['id']
for v in self.operator_cloud.list_aggregates()]
self.assertIn(aggregate['id'], aggregate_ids)
aggregate = self.operator_cloud.update_aggregate(
aggregate_name,
availability_zone=availability_zone
)
self.assertEqual(availability_zone, aggregate['availability_zone'])
aggregate = self.operator_cloud.set_aggregate_metadata(
aggregate_name,
{'key': 'value'}
)
self.assertIn('key', aggregate['metadata'])
aggregate = self.operator_cloud.set_aggregate_metadata(
aggregate_name,
{'key': None}
)
self.assertNotIn('key', aggregate['metadata'])
self.operator_cloud.delete_aggregate(aggregate_name)
def cleanup(self, aggregate_name):
aggregate = self.operator_cloud.get_aggregate(aggregate_name)
if aggregate:
self.operator_cloud.delete_aggregate(aggregate_name)
| apache-2.0 |
phase-dev/phase | libmproxy/protocol/tcp.py | 1 | 2582 | from . import ProtocolHandler
import select, socket
from cStringIO import StringIO
class TCPHandler(ProtocolHandler):
"""
TCPHandler acts as a generic TCP forwarder.
Data will be .log()ed, but not stored any further.
"""
def handle_messages(self):
self.c.establish_server_connection()
conns = [self.c.client_conn.rfile, self.c.server_conn.rfile]
while not self.c.close:
r, _, _ = select.select(conns, [], [], 10)
for rfile in r:
if self.c.client_conn.rfile == rfile:
src, dst = self.c.client_conn, self.c.server_conn
direction = "-> tcp ->"
dst_str = "%s:%s" % self.c.server_conn.address()[:2]
else:
dst, src = self.c.client_conn, self.c.server_conn
direction = "<- tcp <-"
dst_str = "client"
data = StringIO()
while range(4096):
# Do non-blocking select() to see if there is further data on in the buffer.
r, _, _ = select.select([rfile], [], [], 0)
if len(r):
d = rfile.read(1)
if d == "": # connection closed
break
data.write(d)
# OpenSSL Connections have an internal buffer that might
# contain data altough everything is read from the socket.
# Thankfully, connection.pending() returns the amount of
# bytes in this buffer, so we can read it completely at
# once.
if src.ssl_established:
data.write(rfile.read(src.connection.pending()))
else: # no data left, but not closed yet
break
data = data.getvalue()
if data == "": # no data received, rfile is closed
self.c.log("Close writing connection to %s" % dst_str)
conns.remove(rfile)
if dst.ssl_established:
dst.connection.shutdown()
else:
dst.connection.shutdown(socket.SHUT_WR)
if len(conns) == 0:
self.c.close = True
break
self.c.log("%s %s\r\n%s" % (direction, dst_str,data))
dst.wfile.write(data)
dst.wfile.flush()
| gpl-3.0 |
tudorbarascu/QGIS | python/plugins/processing/algs/gdal/proximity.py | 15 | 9534 | # -*- coding: utf-8 -*-
"""
***************************************************************************
proximity.py
---------------------
Date : August 2012
Copyright : (C) 2012 by Victor Olaya
Email : volayaf at gmail dot com
***************************************************************************
* *
* This program is free software; you can redistribute it and/or modify *
* it under the terms of the GNU General Public License as published by *
* the Free Software Foundation; either version 2 of the License, or *
* (at your option) any later version. *
* *
***************************************************************************
"""
__author__ = 'Victor Olaya'
__date__ = 'August 2012'
__copyright__ = '(C) 2012, Victor Olaya'
import os
from qgis.PyQt.QtGui import QIcon
from qgis.core import (QgsRasterFileWriter,
QgsProcessingException,
QgsProcessingParameterDefinition,
QgsProcessingParameterRasterLayer,
QgsProcessingParameterBand,
QgsProcessingParameterEnum,
QgsProcessingParameterString,
QgsProcessingParameterNumber,
QgsProcessingParameterRasterDestination)
from processing.algs.gdal.GdalAlgorithm import GdalAlgorithm
from processing.algs.gdal.GdalUtils import GdalUtils
from processing.tools.system import isWindows
pluginPath = os.path.split(os.path.split(os.path.dirname(__file__))[0])[0]
class proximity(GdalAlgorithm):
INPUT = 'INPUT'
BAND = 'BAND'
VALUES = 'VALUES'
MAX_DISTANCE = 'MAX_DISTANCE'
REPLACE = 'REPLACE'
UNITS = 'UNITS'
NODATA = 'NODATA'
OPTIONS = 'OPTIONS'
EXTRA = 'EXTRA'
DATA_TYPE = 'DATA_TYPE'
OUTPUT = 'OUTPUT'
TYPES = ['Byte', 'Int16', 'UInt16', 'UInt32', 'Int32', 'Float32', 'Float64', 'CInt16', 'CInt32', 'CFloat32', 'CFloat64']
def icon(self):
return QIcon(os.path.join(pluginPath, 'images', 'gdaltools', 'proximity.png'))
def __init__(self):
super().__init__()
def initAlgorithm(self, config=None):
self.distanceUnits = ((self.tr('Georeferenced coordinates'), 'GEO'),
(self.tr('Pixel coordinates'), 'PIXEL'))
self.addParameter(QgsProcessingParameterRasterLayer(self.INPUT,
self.tr('Input layer')))
self.addParameter(QgsProcessingParameterBand(self.BAND,
self.tr('Band number'),
1,
parentLayerParameterName=self.INPUT))
self.addParameter(QgsProcessingParameterString(self.VALUES,
self.tr('A list of pixel values in the source image to be considered target pixels'),
optional=True))
self.addParameter(QgsProcessingParameterEnum(self.UNITS,
self.tr('Distance units'),
options=[i[0] for i in self.distanceUnits],
allowMultiple=False,
defaultValue=1))
self.addParameter(QgsProcessingParameterNumber(self.MAX_DISTANCE,
self.tr('The maximum distance to be generated'),
type=QgsProcessingParameterNumber.Double,
minValue=0.0,
defaultValue=0.0,
optional=True))
self.addParameter(QgsProcessingParameterNumber(self.REPLACE,
self.tr('Value to be applied to all pixels that are within the -maxdist of target pixels'),
type=QgsProcessingParameterNumber.Double,
defaultValue=0.0,
optional=True))
self.addParameter(QgsProcessingParameterNumber(self.NODATA,
self.tr('Nodata value to use for the destination proximity raster'),
type=QgsProcessingParameterNumber.Double,
defaultValue=0.0,
optional=True))
options_param = QgsProcessingParameterString(self.OPTIONS,
self.tr('Additional creation options'),
defaultValue='',
optional=True)
options_param.setFlags(options_param.flags() | QgsProcessingParameterDefinition.FlagAdvanced)
options_param.setMetadata({
'widget_wrapper': {
'class': 'processing.algs.gdal.ui.RasterOptionsWidget.RasterOptionsWidgetWrapper'}})
self.addParameter(options_param)
extra_param = QgsProcessingParameterString(self.EXTRA,
self.tr('Additional command-line parameters'),
defaultValue=None,
optional=True)
extra_param.setFlags(extra_param.flags() | QgsProcessingParameterDefinition.FlagAdvanced)
self.addParameter(extra_param)
dataType_param = QgsProcessingParameterEnum(self.DATA_TYPE,
self.tr('Output data type'),
self.TYPES,
allowMultiple=False,
defaultValue=5)
dataType_param.setFlags(dataType_param.flags() | QgsProcessingParameterDefinition.FlagAdvanced)
self.addParameter(dataType_param)
self.addParameter(QgsProcessingParameterRasterDestination(self.OUTPUT,
self.tr('Proximity map')))
def name(self):
return 'proximity'
def displayName(self):
return self.tr('Proximity (raster distance)')
def group(self):
return self.tr('Raster analysis')
def groupId(self):
return 'rasteranalysis'
def commandName(self):
return 'gdal_proximity'
def getConsoleCommands(self, parameters, context, feedback, executing=True):
inLayer = self.parameterAsRasterLayer(parameters, self.INPUT, context)
if inLayer is None:
raise QgsProcessingException(self.invalidRasterError(parameters, self.INPUT))
distance = self.parameterAsDouble(parameters, self.MAX_DISTANCE, context)
replaceValue = self.parameterAsDouble(parameters, self.REPLACE, context)
if self.NODATA in parameters and parameters[self.NODATA] is not None:
nodata = self.parameterAsDouble(parameters, self.NODATA, context)
else:
nodata = None
options = self.parameterAsString(parameters, self.OPTIONS, context)
out = self.parameterAsOutputLayer(parameters, self.OUTPUT, context)
self.setOutputValue(self.OUTPUT, out)
arguments = []
arguments.append('-srcband')
arguments.append(str(self.parameterAsInt(parameters, self.BAND, context)))
arguments.append('-distunits')
arguments.append(self.distanceUnits[self.parameterAsEnum(parameters, self.UNITS, context)][1])
values = self.parameterAsString(parameters, self.VALUES, context)
if values:
arguments.append('-values')
arguments.append(values)
if distance:
arguments.append('-maxdist')
arguments.append(str(distance))
if nodata is not None:
arguments.append('-nodata')
arguments.append(str(nodata))
if replaceValue:
arguments.append('-fixed-buf-val')
arguments.append(str(replaceValue))
arguments.append('-ot')
arguments.append(self.TYPES[self.parameterAsEnum(parameters, self.DATA_TYPE, context)])
arguments.append('-of')
arguments.append(QgsRasterFileWriter.driverForExtension(os.path.splitext(out)[1]))
if options:
arguments.extend(GdalUtils.parseCreationOptions(options))
if self.EXTRA in parameters and parameters[self.EXTRA] not in (None, ''):
extra = self.parameterAsString(parameters, self.EXTRA, context)
arguments.append(extra)
arguments.append(inLayer.source())
arguments.append(out)
if isWindows():
commands = ["python3", "-m", self.commandName()]
else:
commands = [self.commandName() + '.py']
commands.append(GdalUtils.escapeAndJoin(arguments))
return commands
| gpl-2.0 |
beiko-lab/gengis | bin/Lib/ctypes/test/test_funcptr.py | 4 | 4029 | import os, unittest
from ctypes import *
try:
WINFUNCTYPE
except NameError:
# fake to enable this test on Linux
WINFUNCTYPE = CFUNCTYPE
import _ctypes_test
lib = CDLL(_ctypes_test.__file__)
class CFuncPtrTestCase(unittest.TestCase):
def test_basic(self):
X = WINFUNCTYPE(c_int, c_int, c_int)
def func(*args):
return len(args)
x = X(func)
self.assertEqual(x.restype, c_int)
self.assertEqual(x.argtypes, (c_int, c_int))
self.assertEqual(sizeof(x), sizeof(c_voidp))
self.assertEqual(sizeof(X), sizeof(c_voidp))
def test_first(self):
StdCallback = WINFUNCTYPE(c_int, c_int, c_int)
CdeclCallback = CFUNCTYPE(c_int, c_int, c_int)
def func(a, b):
return a + b
s = StdCallback(func)
c = CdeclCallback(func)
self.assertEqual(s(1, 2), 3)
self.assertEqual(c(1, 2), 3)
# The following no longer raises a TypeError - it is now
# possible, as in C, to call cdecl functions with more parameters.
#self.assertRaises(TypeError, c, 1, 2, 3)
self.assertEqual(c(1, 2, 3, 4, 5, 6), 3)
if not WINFUNCTYPE is CFUNCTYPE and os.name != "ce":
self.assertRaises(TypeError, s, 1, 2, 3)
def test_structures(self):
WNDPROC = WINFUNCTYPE(c_long, c_int, c_int, c_int, c_int)
def wndproc(hwnd, msg, wParam, lParam):
return hwnd + msg + wParam + lParam
HINSTANCE = c_int
HICON = c_int
HCURSOR = c_int
LPCTSTR = c_char_p
class WNDCLASS(Structure):
_fields_ = [("style", c_uint),
("lpfnWndProc", WNDPROC),
("cbClsExtra", c_int),
("cbWndExtra", c_int),
("hInstance", HINSTANCE),
("hIcon", HICON),
("hCursor", HCURSOR),
("lpszMenuName", LPCTSTR),
("lpszClassName", LPCTSTR)]
wndclass = WNDCLASS()
wndclass.lpfnWndProc = WNDPROC(wndproc)
WNDPROC_2 = WINFUNCTYPE(c_long, c_int, c_int, c_int, c_int)
# This is no longer true, now that WINFUNCTYPE caches created types internally.
## # CFuncPtr subclasses are compared by identity, so this raises a TypeError:
## self.assertRaises(TypeError, setattr, wndclass,
## "lpfnWndProc", WNDPROC_2(wndproc))
# instead:
self.assertTrue(WNDPROC is WNDPROC_2)
# 'wndclass.lpfnWndProc' leaks 94 references. Why?
self.assertEqual(wndclass.lpfnWndProc(1, 2, 3, 4), 10)
f = wndclass.lpfnWndProc
del wndclass
del wndproc
self.assertEqual(f(10, 11, 12, 13), 46)
def test_dllfunctions(self):
def NoNullHandle(value):
if not value:
raise WinError()
return value
strchr = lib.my_strchr
strchr.restype = c_char_p
strchr.argtypes = (c_char_p, c_char)
self.assertEqual(strchr("abcdefghi", "b"), "bcdefghi")
self.assertEqual(strchr("abcdefghi", "x"), None)
strtok = lib.my_strtok
strtok.restype = c_char_p
# Neither of this does work: strtok changes the buffer it is passed
## strtok.argtypes = (c_char_p, c_char_p)
## strtok.argtypes = (c_string, c_char_p)
def c_string(init):
size = len(init) + 1
return (c_char*size)(*init)
s = "a\nb\nc"
b = c_string(s)
## b = (c_char * (len(s)+1))()
## b.value = s
## b = c_string(s)
self.assertEqual(strtok(b, "\n"), "a")
self.assertEqual(strtok(None, "\n"), "b")
self.assertEqual(strtok(None, "\n"), "c")
self.assertEqual(strtok(None, "\n"), None)
if __name__ == '__main__':
unittest.main()
| gpl-3.0 |
cfarquhar/openstack-ansible | tests/test_filesystem.py | 2 | 3423 | #!/usr/bin/env python
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
#
import mock
import os
from os import path
import sys
import unittest
from test_inventory import cleanup
from test_inventory import get_inventory
from test_inventory import make_config
INV_DIR = 'inventory'
sys.path.append(path.join(os.getcwd(), INV_DIR))
from osa_toolkit import filesystem as fs
TARGET_DIR = path.join(os.getcwd(), 'tests', 'inventory')
USER_CONFIG_FILE = path.join(TARGET_DIR, 'openstack_user_config.yml')
def setUpModule():
# The setUpModule function is used by the unittest framework.
make_config()
def tearDownModule():
# This file should only be removed after all tests are run,
# thus it is excluded from cleanup.
os.remove(USER_CONFIG_FILE)
class TestMultipleRuns(unittest.TestCase):
def test_creating_backup_file(self):
inventory_file_path = os.path.join(TARGET_DIR,
'openstack_inventory.json')
get_backup_name_path = 'osa_toolkit.filesystem._get_backup_name'
backup_name = 'openstack_inventory.json-20160531_171804.json'
tar_file = mock.MagicMock()
tar_file.__enter__.return_value = tar_file
# run make backup with faked tarfiles and date
with mock.patch('osa_toolkit.filesystem.tarfile.open') as tar_open:
tar_open.return_value = tar_file
with mock.patch(get_backup_name_path) as backup_mock:
backup_mock.return_value = backup_name
fs._make_backup(TARGET_DIR, inventory_file_path)
backup_path = path.join(TARGET_DIR, 'backup_openstack_inventory.tar')
tar_open.assert_called_with(backup_path, 'a')
# This chain is present because of how tarfile.open is called to
# make a context manager inside the make_backup function.
tar_file.add.assert_called_with(inventory_file_path,
arcname=backup_name)
def test_recreating_files(self):
# Deleting the files after the first run should cause the files to be
# completely remade
get_inventory()
get_inventory()
backup_path = path.join(TARGET_DIR, 'backup_openstack_inventory.tar')
self.assertFalse(os.path.exists(backup_path))
def test_rereading_files(self):
# Generate the initial inventory files
get_inventory(clean=False)
inv, path = fs.load_inventory(TARGET_DIR)
self.assertIsInstance(inv, dict)
self.assertIn('_meta', inv)
# This test is basically just making sure we get more than
# INVENTORY_SKEL populated, so we're not going to do deep testing
self.assertIn('log_hosts', inv)
def tearDown(self):
# Clean up here since get_inventory will not do it by design in
# this test.
cleanup()
if __name__ == '__main__':
unittest.main(catchbreak=True)
| apache-2.0 |
eandersson/amqpstorm | amqpstorm/management/basic.py | 1 | 3707 | from amqpstorm.compatibility import json
from amqpstorm.compatibility import quote
from amqpstorm.management.base import ManagementHandler
from amqpstorm.message import Message
API_BASIC_PUBLISH = 'exchanges/%s/%s/publish'
API_BASIC_GET_MESSAGE = 'queues/%s/%s/get'
class Basic(ManagementHandler):
def publish(self, body, routing_key, exchange='amq.default',
virtual_host='/', properties=None, payload_encoding='string'):
"""Publish a Message.
:param bytes,str,unicode body: Message payload
:param str routing_key: Message routing key
:param str exchange: The exchange to publish the message to
:param str virtual_host: Virtual host name
:param dict properties: Message properties
:param str payload_encoding: Payload encoding.
:raises ApiError: Raises if the remote server encountered an error.
:raises ApiConnectionError: Raises if there was a connectivity issue.
:rtype: dict
"""
exchange = quote(exchange, '')
properties = properties or {}
body = json.dumps(
{
'routing_key': routing_key,
'payload': body,
'payload_encoding': payload_encoding,
'properties': properties,
'vhost': virtual_host
}
)
virtual_host = quote(virtual_host, '')
return self.http_client.post(API_BASIC_PUBLISH %
(
virtual_host,
exchange),
payload=body)
def get(self, queue, virtual_host='/', requeue=False, to_dict=False,
count=1, truncate=50000, encoding='auto'):
"""Get Messages.
:param str queue: Queue name
:param str virtual_host: Virtual host name
:param bool requeue: Re-queue message
:param bool to_dict: Should incoming messages be converted to a
dictionary before delivery.
:param int count: How many messages should we try to fetch.
:param int truncate: The maximum length in bytes, beyond that the
server will truncate the message.
:param str encoding: Message encoding.
:raises ApiError: Raises if the remote server encountered an error.
:raises ApiConnectionError: Raises if there was a connectivity issue.
:rtype: list
"""
ackmode = 'ack_requeue_false'
if requeue:
ackmode = 'ack_requeue_true'
get_messages = json.dumps(
{
'count': count,
'requeue': requeue,
'ackmode': ackmode,
'encoding': encoding,
'truncate': truncate,
'vhost': virtual_host
}
)
virtual_host = quote(virtual_host, '')
response = self.http_client.post(API_BASIC_GET_MESSAGE %
(
virtual_host,
queue
),
payload=get_messages)
if to_dict:
return response
messages = []
for message in response:
body = message.get('body')
if not body:
body = message.get('payload')
messages.append(Message(
channel=None,
body=body,
properties=message.get('properties'),
auto_decode=True,
))
return messages
| mit |
X-dark/Flexget | flexget/plugins/search_sceneaccess.py | 5 | 11553 | from __future__ import unicode_literals, division, absolute_import
import logging
import re
from urllib import quote
from flexget import plugin
from flexget import validator
from flexget.entry import Entry
from flexget.event import event
from flexget.utils.soup import get_soup
from flexget.utils.search import torrent_availability, normalize_unicode, clean_title
from flexget.utils.requests import Session
log = logging.getLogger('search_sceneaccess')
CATEGORIES = {
'browse':
{
'Movies/DVD-R': 8,
'Movies/x264': 22,
'Movies/XviD': 7,
'TV/HD-x264': 27,
'TV/SD-x264': 17,
'TV/XviD': 11,
'Games/PC': 3,
'Games/PS3': 5,
'Games/PSP': 20,
'Games/WII': 28,
'Games/XBOX360': 23,
'APPS/ISO': 1,
'DOX': 14,
'MISC': 21
},
'nonscene':
{
'Movies/HD-x264': 41,
'Movies/SD-x264': 42,
'Movies/XviD': 43,
'TV/HD': 44,
'TV/SD': 45
},
'mp3/0day':
{
'0DAY/APPS': 2,
'FLAC': 40,
'MP3': 13,
'MVID': 15,
},
'archive':
{
'Movies/Packs': 4,
'TV/Packs': 26,
'Games/Packs': 29,
'XXX/Packs': 37,
'Music/Packs': 38
},
'foreign':
{
'Movies/DVD-R': 31,
'Movies/x264': 32,
'Movies/XviD': 30,
'TV/x264': 34,
'TV/XviD': 33,
},
'xxx':
{
'XXX/XviD': 12,
'XXX/x264': 35,
'XXX/0DAY': 36
}
}
URL = 'https://sceneaccess.eu/'
class SceneAccessSearch(object):
""" Scene Access Search plugin
== Basic usage:
sceneaccess:
username: XXXX (required)
password: XXXX (required)
category: Movies/x264 (optional)
gravity_multiplier: 200 (optional)
== Categories:
+---------------+----------------+-----------+--------------+--------------+----------+
| browse | nonscene | mp3/0day | archive | foreign | xxx |
+---------------+----------------+-----------+--------------+--------------+----------+
| APPS/ISO | Movies/HD-x264 | 0DAY/APPS | Games/Packs | Movies/DVD-R | XXX/0DAY |
| DOX | Movies/SD-x264 | FLAC | Movies/Packs | Movies/x264 | XXX/x264 |
| Games/PC | Movies/XviD | MP3 | Music/Packs | Movies/XviD | XXX/XviD |
| Games/PS3 | TV/HD | MVID | TV/Packs | TV/x264 | |
| Games/PSP | TV/SD | | XXX/Packs | TV/XviD | |
| Games/WII | | | | | |
| Games/XBOX360 | | | | | |
| MISC | | | | | |
| Movies/DVD-R | | | | | |
| Movies/x264 | | | | | |
| Movies/XviD | | | | | |
| TV/HD-x264 | | | | | |
| TV/SD-x264 | | | | | |
| TV/XviD | | | | | |
+---------------+----------------+-----------+--------------+--------------+----------+
You can combine the categories almost any way you want, here are some examples:
category:
archive: yes => Will search all categories within archive section
category: Movies/x264 => Search Movies/x264 within 'browse' section (browse is always default if unspecified)
category:
browse:
- 22 => This is custom category ID
- Movies/XviD
foreign:
- Movies/x264
- Movies/XviD
Specifying specific category ID is also possible, you can extract ID from URL, for example
if you hover or click on category on the site you'll see similar address:
http://sceneaccess.URL/browse?cat=22
In this example, according to this bit ?cat=22 , category id is 22.
== Priority
gravity_multiplier is optional parameter that increases odds of downloading found matches from sceneaccess
instead of other search providers, that may have higer odds due to their higher number of peers.
Although sceneaccess does not have many peers as some public trackers, the torrents are usually faster.
By default, Flexget give higher priority to found matches according to following formula:
gravity = number of seeds * 2 + number of leechers
gravity_multiplier will multiply the above number by specified amount.
If you use public trackers for searches, you may want to use this feature.
"""
def validator(self):
"""Return config validator."""
root = validator.factory('dict')
root.accept('text', key='username', required=True)
root.accept('text', key='password', required=True)
root.accept('number', key='gravity_multiplier')
# Scope as in pages like `browse`, `mp3/0day`, `foreign`, etc.
# Will only accept categories from `browse` which will it default to, unless user specifies other scopes
# via dict
root.accept('choice', key='category').accept_choices(CATEGORIES['browse'])
root.accept('number', key='category')
categories = root.accept('dict', key='category')
category_list = root.accept('list', key='category')
category_list.accept('choice').accept_choices(CATEGORIES['browse'])
for category in CATEGORIES:
categories.accept('choice', key=category).accept_choices(CATEGORIES[category])
categories.accept('boolean', key=category)
categories.accept('number', key=category)
category_list = categories.accept('list', key=category)
category_list.accept('choice', key=category).accept_choices(CATEGORIES[category])
category_list.accept('number', key=category)
return root
def processCategories(self, config):
toProcess = dict()
# Build request urls from config
try:
scope = 'browse' # Default scope to search in
category = config['category']
if isinstance(category, dict): # Categories have search scope specified.
for scope in category:
if isinstance(category[scope], bool): # If provided boolean, search all categories
category[scope] = []
elif not isinstance(category[scope], list): # Convert single category into list
category[scope] = [category[scope]]
toProcess[scope] = category[scope]
else: # Single category specified, will default to `browse` scope.
category = [category]
toProcess[scope] = category
except KeyError: # Category was not set, will default to `browse` scope and all categories.
toProcess[scope] = []
finally: # Process the categories to be actually in usable format for search() method
ret = list()
for scope, categories in toProcess.iteritems():
cat_id = list()
for category in categories:
try:
id = CATEGORIES[scope][category]
except KeyError: # User provided category id directly
id = category
finally:
if isinstance(id, list): #
[cat_id.append(l) for l in id]
else:
cat_id.append(id)
if scope == 'mp3/0day': # mp3/0day is actually /spam?search= in URL, can safely change it now
scope = 'spam'
category_url_string = ''.join(['&c' + str(id) + '=' + str(id) for id in cat_id]) # &c<id>=<id>&...
ret.append({'url_path': scope, 'category_url_string': category_url_string})
return ret
@plugin.internet(log)
def search(self, entry, config=None):
"""
Search for entries on SceneAccess
"""
try:
multip = int(config['gravity_multiplier'])
except KeyError:
multip = 1
# Login...
params = {'username': config['username'],
'password': config['password'],
'submit': 'come on in'}
session = Session()
session.headers = {'User agent': 'Mozilla/5.0 (Windows NT 6.3; WOW64; rv:27.0) Gecko/20100101 Firefox/27.0'}
log.debug('Logging in to %s...' % URL)
session.post(URL + 'login', data=params)
# Prepare queries...
BASE_URLS = list()
entries = set()
for category in self.processCategories(config):
BASE_URLS.append(URL + '%(url_path)s?method=2%(category_url_string)s' % category)
# Search...
for search_string in entry.get('search_strings', [entry['title']]):
search_string_normalized = normalize_unicode(clean_title(search_string))
search_string_url_fragment = '&search=' + quote(search_string_normalized.encode('utf8'))
for url in BASE_URLS:
url += search_string_url_fragment
log.debug('Search URL for `%s`: %s' % (search_string, url))
page = session.get(url).content
soup = get_soup(page)
for result in soup.findAll('tr', attrs={'class': 'tt_row'}):
entry = Entry()
entry['title'] = result.find('a', href=re.compile(r'details\?id=\d+'))['title']
entry['url'] = URL + result.find('a', href=re.compile(r'.torrent$'))['href']
entry['torrent_seeds'] = result.find('td', attrs={'class': 'ttr_seeders'}).string
entry['torrent_leeches'] = result.find('td', attrs={'class': 'ttr_leechers'}).string
entry['search_sort'] = torrent_availability(entry['torrent_seeds'], entry['torrent_leeches'])*multip
size = result.find('td', attrs={'class': 'ttr_size'}).next
size = re.search('(\d+(?:[.,]\d+)*)\s?([KMG]B)', size)
if size:
if size.group(2) == 'GB':
entry['content_size'] = int(float(size.group(1)) * 1000 ** 3 / 1024 ** 2)
elif size.group(2) == 'MB':
entry['content_size'] = int(float(size.group(1)) * 1000 ** 2 / 1024 ** 2)
elif size.group(2) == 'KB':
entry['content_size'] = int(float(size.group(1)) * 1000 / 1024 ** 2)
else:
entry['content_size'] = int(float(size.group(1)) / 1024 ** 2)
entries.add(entry)
return entries
@event('plugin.register')
def register_plugin():
plugin.register(SceneAccessSearch, 'sceneaccess', groups=['search'], api_ver=2)
| mit |
mambocab/cassandra-dtest | jmx_auth_test.py | 1 | 3388 | from distutils.version import LooseVersion
from ccmlib.node import NodetoolError
from dtest import Tester
from jmxutils import apply_jmx_authentication
from tools import known_failure, since
@since('3.6')
class TestJMXAuth(Tester):
@known_failure(failure_source='test',
jira_url='https://issues.apache.org/jira/browse/CASSANDRA-11730',
flaky=False, notes='windows')
def basic_auth_test(self):
"""
Some basic smoke testing of JMX authentication and authorization.
Uses nodetool as a means of exercising the JMX interface as JolokiaAgent
exposes its own connector which bypasses the in-built security features
@jira_ticket CASSANDRA-10091
"""
self.prepare()
[node] = self.cluster.nodelist()
node.nodetool('-u cassandra -pw cassandra status')
session = self.patient_cql_connection(node, user='cassandra', password='cassandra')
# the jmx_user role has no login privilege but give it a password anyway
# to demonstrate that LOGIN is required for JMX authentication
session.execute("CREATE ROLE jmx_user WITH LOGIN=false AND PASSWORD='321cba'")
session.execute("GRANT SELECT ON MBEAN 'org.apache.cassandra.net:type=FailureDetector' TO jmx_user")
session.execute("GRANT DESCRIBE ON ALL MBEANS TO jmx_user")
session.execute("CREATE ROLE test WITH LOGIN=true and PASSWORD='abc123'")
with self.assertRaisesRegexp(NodetoolError, self.authentication_fail_message(node, 'baduser')):
node.nodetool('-u baduser -pw abc123 gossipinfo')
with self.assertRaisesRegexp(NodetoolError, self.authentication_fail_message(node, 'test')):
node.nodetool('-u test -pw badpassword gossipinfo')
with self.assertRaisesRegexp(NodetoolError, "Required key 'username' is missing"):
node.nodetool('gossipinfo')
# role must have LOGIN attribute
with self.assertRaisesRegexp(NodetoolError, 'jmx_user is not permitted to log in'):
node.nodetool('-u jmx_user -pw 321cba gossipinfo')
# test doesn't yet have any privileges on the necessary JMX resources
with self.assertRaisesRegexp(NodetoolError, 'Access Denied'):
node.nodetool('-u test -pw abc123 gossipinfo')
session.execute("GRANT jmx_user TO test")
node.nodetool('-u test -pw abc123 gossipinfo')
# superuser status applies to JMX authz too
node.nodetool('-u cassandra -pw cassandra gossipinfo')
def prepare(self, nodes=1, permissions_validity=0):
config = {'authenticator': 'org.apache.cassandra.auth.PasswordAuthenticator',
'authorizer': 'org.apache.cassandra.auth.CassandraAuthorizer',
'permissions_validity_in_ms': permissions_validity}
self.cluster.set_configuration_options(values=config)
self.cluster.populate(nodes)
[node] = self.cluster.nodelist()
apply_jmx_authentication(node)
node.start()
node.watch_log_for('Created default superuser')
def authentication_fail_message(self, node, username):
return "Provided username {user} and/or password are incorrect".format(user=username) \
if LooseVersion(node.cluster.version()) >= LooseVersion('3.10') \
else "Username and/or password are incorrect"
| apache-2.0 |
tensorflow/datasets | tensorflow_datasets/image_classification/stanford_online_products.py | 1 | 3281 | # coding=utf-8
# Copyright 2021 The TensorFlow Datasets Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Stanford Online Products Dataset."""
import csv
import os
import tensorflow.compat.v2 as tf
import tensorflow_datasets.public_api as tfds
_DOWNLOAD_LINK = "ftp://cs.stanford.edu/cs/cvgl/Stanford_Online_Products.zip"
_SPLITS = {tfds.Split.TRAIN: "Ebay_train", tfds.Split.TEST: "Ebay_test"}
_SUPER_CLASSES = [
"bicycle", "cabinet", "chair", "coffee_maker", "fan", "kettle", "lamp",
"mug", "sofa", "stapler", "table", "toaster"
]
_CITATION = """\
@inproceedings{song2016deep,
author = {Song, Hyun Oh and Xiang, Yu and Jegelka, Stefanie and Savarese, Silvio},
title = {Deep Metric Learning via Lifted Structured Feature Embedding},
booktitle = {IEEE Conference on Computer Vision and Pattern Recognition (CVPR)},
year = {2016}
}
"""
class StanfordOnlineProducts(tfds.core.GeneratorBasedBuilder):
"""Stanford Online Products Dataset."""
VERSION = tfds.core.Version("1.0.0")
def _info(self):
return tfds.core.DatasetInfo(
description=("Stanford Online Products Dataset"),
builder=self,
citation=_CITATION,
homepage="http://cvgl.stanford.edu/projects/lifted_struct/",
features=tfds.features.FeaturesDict({
"class_id":
tfds.features.ClassLabel(num_classes=22634),
"super_class_id/num":
tfds.features.ClassLabel(num_classes=len(_SUPER_CLASSES)),
"super_class_id":
tfds.features.ClassLabel(names=_SUPER_CLASSES),
"image":
tfds.features.Image()
}))
def _split_generators(self, dl_manager):
dl_path = dl_manager.download_and_extract(_DOWNLOAD_LINK)
folder_path = os.path.join(dl_path, "Stanford_Online_Products")
return [ # pylint:disable=g-complex-comprehension
tfds.core.SplitGenerator(
name=k,
gen_kwargs={"file_path": os.path.join(folder_path, "%s.txt" % v)})
for k, v in _SPLITS.items()
]
def _generate_examples(self, file_path):
"""Images of Product from the Data Directory.
Args:
file_path: str, path to the Ebay_(train/test/info).txt file. Having
Columns ['class_id', 'super_class_id', 'path']
Yields:
Dataset examples.
"""
with tf.io.gfile.GFile(file_path, "r") as file_:
dataset = csv.DictReader(file_, delimiter=" ")
for i, row in enumerate(dataset):
yield i, {
"class_id": int(row["class_id"]) - 1,
"super_class_id/num": int(row["super_class_id"]) - 1,
"super_class_id": _SUPER_CLASSES[int(row["super_class_id"]) - 1],
"image": os.path.join(os.path.dirname(file_path), row["path"])
}
| apache-2.0 |
Ervii/garage-time | garage/src/python/pants/base/payload.py | 2 | 3803 | # coding=utf-8
# Copyright 2014 Pants project contributors (see CONTRIBUTORS.md).
# Licensed under the Apache License, Version 2.0 (see LICENSE).
from __future__ import (nested_scopes, generators, division, absolute_import, with_statement,
print_function, unicode_literals)
from hashlib import sha1
class PayloadFieldAlreadyDefinedError(Exception): pass
class PayloadFrozenError(Exception): pass
class Payload(object):
"""A mapping from field names to PayloadField instances.
A Target will add PayloadFields to its Payload until instantiation is finished, at which point
freeze() will be called and make the Payload immutable.
"""
def __init__(self):
self._fields = {}
self._frozen = False
self._fingerprint_memo_map = {}
def freeze(self):
"""Permanently make this Payload instance immutable.
No more fields can be added after calling freeze().
"""
self._frozen = True
def get_field(self, key, default=None):
"""An alternative to attribute access for duck typing Payload instances.
Has the same semantics as dict.get, and in fact just delegates to the underlying field mapping.
"""
return self._fields.get(key, default)
def get_field_value(self, key, default=None):
"""Retrieves the value in the payload field if the field exists, otherwise returns the default.
"""
if key in self._fields:
payload_field = self._fields[key]
if payload_field:
return payload_field.value
return default
def add_fields(self, field_dict):
"""Add a mapping of field names to PayloadField instances."""
for key, field in field_dict.items():
self.add_field(key, field)
def add_field(self, key, field):
"""Add a field to the Payload.
:param string key: The key for the field. Fields can be accessed using attribute access as
well as `get_field` using `key`.
:param PayloadField field: A PayloadField instance. None is an allowable value for `field`,
in which case it will be skipped during hashing.
"""
if key in self._fields:
raise PayloadFieldAlreadyDefinedError(
'Key {key} is already set on this payload. The existing field was {existing_field}.'
' Tried to set new field {field}.'
.format(key=key, existing_field=self._fields[key], field=field))
elif self._frozen:
raise PayloadFrozenError(
'Payload is frozen, field with key {key} cannot be added to it.'
.format(key=key))
else:
self._fields[key] = field
self._fingerprint_memo = None
def fingerprint(self, field_keys=None):
"""A memoizing fingerprint that rolls together the fingerprints of underlying PayloadFields.
If no fields were hashed (or all fields opted out of being hashed by returning `None`), then
`fingerprint()` also returns `None`.
:param iterable<string> field_keys: A subset of fields to use for the fingerprint. Defaults
to all fields.
"""
field_keys = frozenset(field_keys or self._fields.keys())
if field_keys not in self._fingerprint_memo_map:
self._fingerprint_memo_map[field_keys] = self._compute_fingerprint(field_keys)
return self._fingerprint_memo_map[field_keys]
def _compute_fingerprint(self, field_keys):
hasher = sha1()
empty_hash = True
for key in sorted(field_keys):
field = self._fields[key]
if field is not None:
fp = field.fingerprint()
if fp is not None:
empty_hash = False
hasher.update(sha1(key).hexdigest())
hasher.update(fp)
if empty_hash:
return None
else:
return hasher.hexdigest()
def __getattr__(self, attr):
field = self._fields[attr]
if field is not None:
return field.value
else:
return None
| apache-2.0 |
DrMeers/django | django/conf/locale/sr/formats.py | 394 | 2011 | # -*- encoding: utf-8 -*-
# This file is distributed under the same license as the Django package.
#
from __future__ import unicode_literals
# The *_FORMAT strings use the Django date format syntax,
# see http://docs.djangoproject.com/en/dev/ref/templates/builtins/#date
DATE_FORMAT = 'j. F Y.'
TIME_FORMAT = 'H:i'
DATETIME_FORMAT = 'j. F Y. H:i'
YEAR_MONTH_FORMAT = 'F Y.'
MONTH_DAY_FORMAT = 'j. F'
SHORT_DATE_FORMAT = 'j.m.Y.'
SHORT_DATETIME_FORMAT = 'j.m.Y. H:i'
FIRST_DAY_OF_WEEK = 1
# The *_INPUT_FORMATS strings use the Python strftime format syntax,
# see http://docs.python.org/library/datetime.html#strftime-strptime-behavior
DATE_INPUT_FORMATS = (
'%d.%m.%Y.', '%d.%m.%y.', # '25.10.2006.', '25.10.06.'
'%d. %m. %Y.', '%d. %m. %y.', # '25. 10. 2006.', '25. 10. 06.'
# '%d. %b %y.', '%d. %B %y.', # '25. Oct 06.', '25. October 06.'
# '%d. %b \'%y.', '%d. %B \'%y.', # '25. Oct '06.', '25. October '06.'
# '%d. %b %Y.', '%d. %B %Y.', # '25. Oct 2006.', '25. October 2006.'
)
DATETIME_INPUT_FORMATS = (
'%d.%m.%Y. %H:%M:%S', # '25.10.2006. 14:30:59'
'%d.%m.%Y. %H:%M:%S.%f', # '25.10.2006. 14:30:59.000200'
'%d.%m.%Y. %H:%M', # '25.10.2006. 14:30'
'%d.%m.%Y.', # '25.10.2006.'
'%d.%m.%y. %H:%M:%S', # '25.10.06. 14:30:59'
'%d.%m.%y. %H:%M:%S.%f', # '25.10.06. 14:30:59.000200'
'%d.%m.%y. %H:%M', # '25.10.06. 14:30'
'%d.%m.%y.', # '25.10.06.'
'%d. %m. %Y. %H:%M:%S', # '25. 10. 2006. 14:30:59'
'%d. %m. %Y. %H:%M:%S.%f', # '25. 10. 2006. 14:30:59.000200'
'%d. %m. %Y. %H:%M', # '25. 10. 2006. 14:30'
'%d. %m. %Y.', # '25. 10. 2006.'
'%d. %m. %y. %H:%M:%S', # '25. 10. 06. 14:30:59'
'%d. %m. %y. %H:%M:%S.%f', # '25. 10. 06. 14:30:59.000200'
'%d. %m. %y. %H:%M', # '25. 10. 06. 14:30'
'%d. %m. %y.', # '25. 10. 06.'
)
DECIMAL_SEPARATOR = ','
THOUSAND_SEPARATOR = '.'
NUMBER_GROUPING = 3
| bsd-3-clause |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.