repo_name stringlengths 6 100 | path stringlengths 4 294 | copies stringlengths 1 5 | size stringlengths 4 6 | content stringlengths 606 896k | license stringclasses 15
values |
|---|---|---|---|---|---|
aeijdenberg/certificate-transparency | python/ct/crypto/verify_test.py | 1 | 17248 | #!/usr/bin/env python
import gflags
import os
import sys
import unittest
from ct.crypto import cert
from ct.crypto import error
from ct.crypto import verify
from ct.proto import client_pb2
from ct.serialization import tls_message
import mock
FLAGS = gflags.FLAGS
gflags.DEFINE_string("testdata_dir", "../test/testdata",
"Location of test certs")
def read_testdata_file(test_file):
with open(os.path.join(FLAGS.testdata_dir, test_file), 'rb') as f:
return f.read()
class LogVerifierTest(object):
"""Defines tests for STH and SCT verification logic.
In order to run these tests, one or more derived classes must be created.
These classes should also inherit from unittest.TestCase. The derived
classes must define the following members for use by the tests:
- self.key_info_fixture: A client_pb2.KeyInfo object
- self.sth_fixture: A client_pb2.SthResponse object
This is so that the tests can be run repeatedly with a variety of public
keys and STHs (e.g. RSA, ECDSA).
"""
def test_verify_sth(self):
verifier = verify.LogVerifier(self.key_info_fixture)
self.assertTrue(verifier.verify_sth(self.sth_fixture))
def test_verify_sth_fails_for_bad_signature(self):
verifier = verify.LogVerifier(self.key_info_fixture)
sth_fixture = self.sth_fixture
for i in range(len(sth_fixture.tree_head_signature)):
# Skip the bytes that encode ASN.1 lengths: this is covered in a
# separate test
if i == 5 or i == 7 or i == 42:
continue
sth = client_pb2.SthResponse()
sth.CopyFrom(sth_fixture)
sth.tree_head_signature = (
sth_fixture.tree_head_signature[:i] +
chr(ord(sth_fixture.tree_head_signature[i]) ^ 1) +
sth_fixture.tree_head_signature[i+1:])
# Encoding- or SignatureError, depending on whether the modified
# byte is a content byte or not.
self.assertRaises((error.EncodingError, error.SignatureError),
verifier.verify_sth, sth)
def test_verify_sth_consistency(self):
old_sth = self.sth_fixture
new_sth = client_pb2.SthResponse()
new_sth.CopyFrom(old_sth)
new_sth.tree_size = old_sth.tree_size + 1
new_sth.timestamp = old_sth.timestamp + 1
new_sth.sha256_root_hash = "a new hash"
proof = ["some proof the mock does not care about"]
mock_merkle_verifier = mock.Mock()
mock_merkle_verifier.verify_tree_consistency.return_value = True
verifier = verify.LogVerifier(self.key_info_fixture,
mock_merkle_verifier)
self.assertTrue(verifier.verify_sth_consistency(old_sth, new_sth,
proof))
mock_merkle_verifier.verify_tree_consistency.assert_called_once_with(
old_sth.tree_size, new_sth.tree_size, old_sth.sha256_root_hash,
new_sth.sha256_root_hash, proof)
def test_verify_sth_temporal_consistency(self):
old_sth = self.sth_fixture
new_sth = client_pb2.SthResponse()
new_sth.CopyFrom(old_sth)
new_sth.tree_size = old_sth.tree_size + 1
new_sth.timestamp = old_sth.timestamp + 1
# Merkle verifier is never used so simply set to None
verifier = verify.LogVerifier(self.key_info_fixture,
None)
# Note we do not care about root hash inconsistency here.
self.assertTrue(verifier.verify_sth_temporal_consistency(
old_sth, new_sth))
def test_verify_sth_temporal_consistency_equal_timestamps(self):
old_sth = self.sth_fixture
new_sth = client_pb2.SthResponse()
new_sth.CopyFrom(old_sth)
new_sth.tree_size = old_sth.tree_size + 1
# Merkle verifier is never used so simply set to None
verifier = verify.LogVerifier(self.key_info_fixture,
None)
self.assertRaises(error.ConsistencyError,
verifier.verify_sth_temporal_consistency,
old_sth, new_sth)
new_sth.tree_size = old_sth.tree_size - 1
self.assertRaises(error.ConsistencyError,
verifier.verify_sth_temporal_consistency,
old_sth, new_sth)
# But identical STHs are OK
self.assertTrue(verifier.verify_sth_temporal_consistency(
old_sth, old_sth))
def test_verify_sth_temporal_consistency_reversed_timestamps(self):
old_sth = self.sth_fixture
new_sth = client_pb2.SthResponse()
new_sth.CopyFrom(old_sth)
new_sth.timestamp = old_sth.timestamp + 1
new_sth.tree_size = old_sth.tree_size + 1
# Merkle verifier is never used so simply set to None
verifier = verify.LogVerifier(self.key_info_fixture,
None)
self.assertRaises(ValueError,
verifier.verify_sth_temporal_consistency,
new_sth, old_sth)
def test_verify_sth_temporal_consistency_newer_tree_is_smaller(self):
old_sth = self.sth_fixture
new_sth = client_pb2.SthResponse()
new_sth.CopyFrom(old_sth)
new_sth.timestamp = old_sth.timestamp + 1
new_sth.tree_size = old_sth.tree_size - 1
# Merkle verifier is never used so simply set to None
verifier = verify.LogVerifier(self.key_info_fixture,
None)
self.assertRaises(error.ConsistencyError,
verifier.verify_sth_temporal_consistency,
old_sth, new_sth)
def test_verify_sth_consistency_invalid_proof(self):
old_sth = self.sth_fixture
new_sth = client_pb2.SthResponse()
new_sth.CopyFrom(old_sth)
new_sth.tree_size = old_sth.tree_size + 1
new_sth.timestamp = old_sth.timestamp + 1
new_sth.sha256_root_hash = "a new hash"
proof = ["some proof the mock does not care about"]
mock_merkle_verifier = mock.Mock()
mock_merkle_verifier.verify_tree_consistency.side_effect = (
error.ConsistencyError("Evil"))
verifier = verify.LogVerifier(self.key_info_fixture,
mock_merkle_verifier)
self.assertRaises(error.ConsistencyError,
verifier.verify_sth_consistency,
old_sth, new_sth, proof)
def _test_verify_sct(self, proof, chain, fake_timestamp = None):
sct = client_pb2.SignedCertificateTimestamp()
tls_message.decode(read_testdata_file(proof), sct)
if fake_timestamp is not None:
sct.timestamp = fake_timestamp
chain = map(lambda name: cert.Certificate.from_pem_file(
os.path.join(FLAGS.testdata_dir, name)), chain)
key_info = client_pb2.KeyInfo()
key_info.type = client_pb2.KeyInfo.ECDSA
key_info.pem_key = read_testdata_file('ct-server-key-public.pem')
verifier = verify.LogVerifier(key_info)
return verifier.verify_sct(sct, chain)
def _test_verify_embedded_scts(self, chain):
chain = map(lambda name: cert.Certificate.from_pem_file(
os.path.join(FLAGS.testdata_dir, name)), chain)
key_info = client_pb2.KeyInfo()
key_info.type = client_pb2.KeyInfo.ECDSA
key_info.pem_key = read_testdata_file('ct-server-key-public.pem')
verifier = verify.LogVerifier(key_info)
return verifier.verify_embedded_scts(chain)
def test_verify_sct_valid_signature(self):
self.assertTrue(self._test_verify_sct(
'test-cert.proof',
['test-cert.pem', 'ca-cert.pem']))
def test_verify_sct_invalid_signature(self):
self.assertRaises(error.SignatureError,
self._test_verify_sct,
'test-cert.proof',
['test-cert.pem', 'ca-cert.pem'],
fake_timestamp = 1234567)
def test_verify_sct_precertificate_valid_signature(self):
self.assertTrue(self._test_verify_sct(
'test-embedded-pre-cert.proof',
['test-embedded-pre-cert.pem', 'ca-cert.pem']))
def test_verify_sct_precertificate_invalid_signature(self):
self.assertRaises(error.SignatureError,
self._test_verify_sct,
'test-embedded-pre-cert.proof',
['test-embedded-pre-cert.pem', 'ca-cert.pem'],
fake_timestamp = 1234567)
def test_verify_sct_precertificate_with_preca_valid_signature(self):
self.assertTrue(self._test_verify_sct(
'test-embedded-with-preca-pre-cert.proof',
['test-embedded-with-preca-pre-cert.pem',
'ca-pre-cert.pem', 'ca-cert.pem']))
def test_verify_sct_missing_leaf_cert(self):
self.assertRaises(error.IncompleteChainError,
self._test_verify_sct,
'test-cert.proof',
[])
def test_verify_sct_missing_issuer_cert(self):
self.assertRaises(error.IncompleteChainError,
self._test_verify_sct,
'test-embedded-pre-cert.proof',
['test-embedded-pre-cert.pem'])
def test_verify_sct_with_preca_missing_issuer_cert(self):
self.assertRaises(error.IncompleteChainError,
self._test_verify_sct,
'test-embedded-with-preca-pre-cert.proof',
['test-embedded-with-preca-pre-cert.pem',
'ca-pre-cert.pem'])
def test_verify_embedded_scts_valid_signature(self):
sct = client_pb2.SignedCertificateTimestamp()
tls_message.decode(read_testdata_file('test-embedded-pre-cert.proof'),
sct)
result = self._test_verify_embedded_scts(
['test-embedded-cert.pem', 'ca-cert.pem'])
self.assertEqual(result, [(sct, True)])
def test_verify_embedded_scts_invalid_signature(self):
result = self._test_verify_embedded_scts(
['test-invalid-embedded-cert.pem', 'ca-cert.pem'])
self.assertFalse(result[0][1])
def test_verify_embedded_scts_with_preca_valid_signature(self):
sct = client_pb2.SignedCertificateTimestamp()
tls_message.decode(
read_testdata_file('test-embedded-with-preca-pre-cert.proof'),
sct)
result = self._test_verify_embedded_scts(
['test-embedded-with-preca-cert.pem', 'ca-cert.pem'])
self.assertEqual(result, [(sct, True)])
class LogVerifierRsaTest(LogVerifierTest, unittest.TestCase):
sth_fixture = client_pb2.SthResponse()
sth_fixture.tree_size = 1130
sth_fixture.timestamp = 1442500998291
sth_fixture.sha256_root_hash = (
"58f4e84d26f179829da3359a23f2ec519f83e99d9230aad6bfb37e2faa82c663"
).decode("hex")
sth_fixture.tree_head_signature = (
"040101002595c278829d558feb560c5024048ce1ca9e5329cc79b074307f0b6168dda1"
"5b27f84c94cce39f8371aa8205d73a7101b434b6aeaf3c852b8471daa05d654463b334"
"5103c7406dbd4642c8cc89eababa84e9ad663ffb3cc87940c3689d0c2ac6246915f221"
"5da254981206fed8505eed268bcc94e05cd83c8e8e5a14407a6d15c8071fabaed9728a"
"02830c6aef95969b0576c7ae09d50bdfc8b0b58fa759458c6d62383d6fe1072c0da103"
"1baddfa363b58ca78f93f329b1f1a15b9575988974dcba2421b9a1bb2a617d8b3f4046"
"ead6095f8496075edc686ae4fa672d4974de0fb9326dc3c628f7e44c7675d2c56d1c66"
"32bbb9e4a69e0a7e34bd1d6dc7b4b2").decode("hex")
key_info_fixture = client_pb2.KeyInfo()
key_info_fixture.type = client_pb2.KeyInfo.RSA
key_info_fixture.pem_key = (
"-----BEGIN PUBLIC KEY-----\n"
"MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAolpIHxdSlTXLo1s6H1OC\n"
"dpSj/4DyHDc8wLG9wVmLqy1lk9fz4ATVmm+/1iN2Nk8jmctUKK2MFUtlWXZBSpym\n"
"97M7frGlSaQXUWyA3CqQUEuIJOmlEjKTBEiQAvpfDjCHjlV2Be4qTM6jamkJbiWt\n"
"gnYPhJL6ONaGTiSPm7Byy57iaz/hbckldSOIoRhYBiMzeNoA0DiRZ9KmfSeXZ1rB\n"
"8y8X5urSW+iBzf2SaOfzBvDpcoTuAaWx2DPazoOl28fP1hZ+kHUYvxbcMjttjauC\n"
"Fx+JII0dmuZNIwjfeG/GBb9frpSX219k1O4Wi6OEbHEr8at/XQ0y7gTikOxBn/s5\n"
"wQIDAQAB\n"
"-----END PUBLIC KEY-----\n")
class LogVerifierEcdsaTest(LogVerifierTest, unittest.TestCase):
sth_fixture = client_pb2.SthResponse()
sth_fixture.tree_size = 42
sth_fixture.timestamp = 1348589667204
sth_fixture.sha256_root_hash = (
"18041bd4665083001fba8c5411d2d748e8abbfdcdfd9218cb02b68a78e7d4c23"
).decode("hex")
sth_fixture.tree_head_signature = (
"040300483046022100befd8060563763a5e49ba53e6443c13f7624fd6403178113736e"
"16012aca983e022100f572568dbfe9a86490eb915c4ee16ad5ecd708fed35ed4e5cd1b"
"2c3f087b4130").decode("hex")
key_info_fixture = client_pb2.KeyInfo()
key_info_fixture.type = client_pb2.KeyInfo.ECDSA
key_info_fixture.pem_key = (
"-----BEGIN PUBLIC KEY-----\nMFkwEwYHKoZIzj0CAQYIKoZIzj0DAQcDQgAES0AfBk"
"jr7b8b19p5Gk8plSAN16wW\nXZyhYsH6FMCEUK60t7pem/ckoPX8hupuaiJzJS0ZQ0SEoJ"
"GlFxkUFwft5g==\n-----END PUBLIC KEY-----\n")
def test_verify_sth_for_bad_asn1_length(self):
verifier = verify.LogVerifier(self.key_info_fixture)
sth_fixture = self.sth_fixture
# The byte that encodes the length of the ASN.1 signature sequence
i = 5
# Decreasing the length truncates the sequence and causes a decoding
# error.
sth = client_pb2.SthResponse()
sth.CopyFrom(sth_fixture)
sth.tree_head_signature = (
sth_fixture.tree_head_signature[:i] +
chr(ord(sth_fixture.tree_head_signature[i]) - 1) +
sth_fixture.tree_head_signature[i+1:])
self.assertRaises(error.EncodingError, verifier.verify_sth, sth)
# Increasing the length means there are not enough ASN.1 bytes left to
# decode the sequence, however the ecdsa module silently slices it.
# TODO(ekasper): contribute a patch to upstream and make the tests fail
sth = client_pb2.SthResponse()
sth.CopyFrom(sth_fixture)
sth.tree_head_signature = (
sth_fixture.tree_head_signature[:i] +
chr(ord(sth_fixture.tree_head_signature[i]) + 1) +
sth_fixture.tree_head_signature[i+1:])
self.assertTrue(verifier.verify_sth(sth))
# The byte that encodes the length of the first integer r in the
# sequence (r, s). Modifying the length corrupts the second integer
# offset and causes a decoding error.
i = 7
sth = client_pb2.SthResponse()
sth.CopyFrom(sth_fixture)
sth.tree_head_signature = (
sth_fixture.tree_head_signature[:i] +
chr(ord(sth_fixture.tree_head_signature[i]) - 1) +
sth_fixture.tree_head_signature[i+1:])
self.assertRaises(error.EncodingError, verifier.verify_sth, sth)
sth = client_pb2.SthResponse()
sth.CopyFrom(sth_fixture)
sth.tree_head_signature = (
sth_fixture.tree_head_signature[:i] +
chr(ord(sth_fixture.tree_head_signature[i]) + 1) +
sth_fixture.tree_head_signature[i+1:])
self.assertRaises(error.EncodingError, verifier.verify_sth, sth)
# The byte that encodes the length of the second integer s in the
# sequence (r, s). Decreasing this length corrupts the integer, however
# increased length is silently sliced, as above.
i = 42
sth = client_pb2.SthResponse()
sth.CopyFrom(sth_fixture)
sth.tree_head_signature = (
sth_fixture.tree_head_signature[:i] +
chr(ord(sth_fixture.tree_head_signature[i]) - 1) +
sth_fixture.tree_head_signature[i+1:])
self.assertRaises(error.EncodingError, verifier.verify_sth, sth)
sth = client_pb2.SthResponse()
sth.CopyFrom(sth_fixture)
sth.tree_head_signature = (
sth_fixture.tree_head_signature[:i] +
chr(ord(sth_fixture.tree_head_signature[i]) + 1) +
sth_fixture.tree_head_signature[i+1:])
self.assertTrue(verifier.verify_sth(sth))
# Trailing garbage is correctly detected.
sth = client_pb2.SthResponse()
sth.CopyFrom(sth_fixture)
sth.tree_head_signature = (
sth_fixture.tree_head_signature[:3] +
# Correct outer length to include trailing garbage.
chr(ord(sth_fixture.tree_head_signature[3]) + 1) +
sth_fixture.tree_head_signature[4:]) + "\x01"
self.assertRaises(error.EncodingError, verifier.verify_sth, sth)
if __name__ == "__main__":
sys.argv = FLAGS(sys.argv)
unittest.main()
| apache-2.0 |
foursquare/pants | tests/python/pants_test/pants_run_integration_test.py | 1 | 20486 | # coding=utf-8
# Copyright 2014 Pants project contributors (see CONTRIBUTORS.md).
# Licensed under the Apache License, Version 2.0 (see LICENSE).
from __future__ import absolute_import, division, print_function, unicode_literals
import configparser
import glob
import os
import shutil
import unittest
from collections import namedtuple
from contextlib import contextmanager
from operator import eq, ne
from threading import Lock
from colors import strip_color
from pants.base.build_environment import get_buildroot
from pants.base.build_file import BuildFile
from pants.fs.archive import ZIP
from pants.subsystem.subsystem import Subsystem
from pants.util.contextutil import environment_as, pushd, temporary_dir
from pants.util.dirutil import safe_mkdir, safe_mkdir_for, safe_open
from pants.util.process_handler import SubprocessProcessHandler, subprocess
from pants_test.testutils.file_test_util import check_symlinks, contains_exact_files
PantsResult = namedtuple(
'PantsResult',
['command', 'returncode', 'stdout_data', 'stderr_data', 'workdir'])
def ensure_cached(expected_num_artifacts=None):
"""Decorator for asserting cache writes in an integration test.
:param expected_num_artifacts: Expected number of artifacts to be in the task's
cache after running the test. If unspecified, will
assert that the number of artifacts in the cache is
non-zero.
"""
def decorator(test_fn):
def wrapper(self, *args, **kwargs):
with temporary_dir() as artifact_cache:
cache_args = '--cache-write-to=["{}"]'.format(artifact_cache)
test_fn(self, *args + (cache_args,), **kwargs)
num_artifacts = 0
for (root, _, files) in os.walk(artifact_cache):
print(root, files)
num_artifacts += len(files)
if expected_num_artifacts is None:
self.assertNotEqual(num_artifacts, 0)
else:
self.assertEqual(num_artifacts, expected_num_artifacts)
return wrapper
return decorator
def ensure_resolver(f):
"""A decorator for running an integration test with ivy and coursier as the resolver."""
def wrapper(self, *args, **kwargs):
for env_var_value in ('ivy', 'coursier'):
with environment_as(HERMETIC_ENV='PANTS_RESOLVER_RESOLVER', PANTS_RESOLVER_RESOLVER=env_var_value):
f(self, *args, **kwargs)
return wrapper
def ensure_daemon(f):
"""A decorator for running an integration test with and without the daemon enabled."""
def wrapper(self, *args, **kwargs):
for enable_daemon in ('false', 'true',):
with temporary_dir() as subprocess_dir:
env = {
'HERMETIC_ENV': 'PANTS_ENABLE_PANTSD,PANTS_ENABLE_V2_ENGINE,PANTS_SUBPROCESSDIR',
'PANTS_ENABLE_PANTSD': enable_daemon,
'PANTS_ENABLE_V2_ENGINE': enable_daemon,
'PANTS_SUBPROCESSDIR': subprocess_dir,
}
with environment_as(**env):
try:
f(self, *args, **kwargs)
finally:
if enable_daemon:
self.assert_success(self.run_pants(['kill-pantsd']))
return wrapper
class PantsRunIntegrationTest(unittest.TestCase):
"""A base class useful for integration tests for targets in the same repo."""
PANTS_SUCCESS_CODE = 0
PANTS_SCRIPT_NAME = 'pants'
@classmethod
def hermetic(cls):
"""Subclasses may override to acknowledge that they are hermetic.
That is, that they should run without reading the real pants.ini.
"""
return False
@classmethod
def hermetic_env_whitelist(cls):
"""A whitelist of environment variables to propagate to tests when hermetic=True."""
return [
# Used in the wrapper script to locate a rust install.
'HOME',
'PANTS_PROFILE',
]
@classmethod
def has_python_version(cls, version):
"""Returns true if the current system has the specified version of python.
:param version: A python version string, such as 2.7, 3.
"""
return cls.python_interpreter_path(version) is not None
@classmethod
def python_interpreter_path(cls, version):
"""Returns the interpreter path if the current system has the specified version of python.
:param version: A python version string, such as 2.7, 3.
"""
try:
py_path = subprocess.check_output(['python%s' % version,
'-c',
'import sys; print(sys.executable)']).strip()
return os.path.realpath(py_path)
except OSError:
return None
def setUp(self):
super(PantsRunIntegrationTest, self).setUp()
# Some integration tests rely on clean subsystem state (e.g., to set up a DistributionLocator).
Subsystem.reset()
def temporary_workdir(self, cleanup=True):
# We can hard-code '.pants.d' here because we know that will always be its value
# in the pantsbuild/pants repo (e.g., that's what we .gitignore in that repo).
# Grabbing the pants_workdir config would require this pants's config object,
# which we don't have a reference to here.
root = os.path.join(get_buildroot(), '.pants.d', 'tmp')
safe_mkdir(root)
return temporary_dir(root_dir=root, cleanup=cleanup, suffix='.pants.d')
def temporary_cachedir(self):
return temporary_dir(suffix='__CACHEDIR')
def temporary_sourcedir(self):
return temporary_dir(root_dir=get_buildroot())
@contextmanager
def source_clone(self, source_dir):
with self.temporary_sourcedir() as clone_dir:
target_spec_dir = os.path.relpath(clone_dir)
for dir_path, dir_names, file_names in os.walk(source_dir):
clone_dir_path = os.path.join(clone_dir, os.path.relpath(dir_path, source_dir))
for dir_name in dir_names:
os.mkdir(os.path.join(clone_dir_path, dir_name))
for file_name in file_names:
with open(os.path.join(dir_path, file_name), 'r') as f:
content = f.read()
if BuildFile._is_buildfile_name(file_name):
content = content.replace(source_dir, target_spec_dir)
with open(os.path.join(clone_dir_path, file_name), 'w') as f:
f.write(content)
yield clone_dir
# Incremented each time we spawn a pants subprocess.
# Appended to PANTS_PROFILE in the called pants process, so that each subprocess
# writes to its own profile file, instead of all stomping on the parent process's profile.
_profile_disambiguator = 0
_profile_disambiguator_lock = Lock()
@classmethod
def _get_profile_disambiguator(cls):
with cls._profile_disambiguator_lock:
ret = cls._profile_disambiguator
cls._profile_disambiguator += 1
return ret
def get_cache_subdir(self, cache_dir, subdir_glob='*/', other_dirs=()):
"""Check that there is only one entry of `cache_dir` which matches the glob
specified by `subdir_glob`, excluding `other_dirs`, and
return it.
:param str cache_dir: absolute path to some directory.
:param str subdir_glob: string specifying a glob for (one level down)
subdirectories of `cache_dir`.
:param list other_dirs: absolute paths to subdirectories of `cache_dir`
which must exist and match `subdir_glob`.
:return: Assert that there is a single remaining directory entry matching
`subdir_glob` after removing `other_dirs`, and return it.
This method oes not check if its arguments or return values are
files or directories. If `subdir_glob` has a trailing slash, so
will the return value of this method.
"""
subdirs = set(glob.glob(os.path.join(cache_dir, subdir_glob)))
other_dirs = set(other_dirs)
self.assertTrue(other_dirs.issubset(subdirs))
remaining_dirs = subdirs - other_dirs
self.assertEqual(len(remaining_dirs), 1)
return list(remaining_dirs)[0]
def run_pants_with_workdir_without_waiting(self, command, workdir, config=None, extra_env=None,
build_root=None, print_exception_stacktrace=True,
**kwargs):
args = [
'--no-pantsrc',
'--pants-workdir={}'.format(workdir),
'--kill-nailguns',
'--print-exception-stacktrace={}'.format(print_exception_stacktrace),
]
if self.hermetic():
args.extend(['--pants-config-files=[]',
# Turn off cache globally. A hermetic integration test shouldn't rely on cache,
# or we have no idea if it's actually testing anything.
'--no-cache-read', '--no-cache-write',
# Turn cache on just for tool bootstrapping, for performance.
'--cache-bootstrap-read', '--cache-bootstrap-write'
])
if config:
config_data = config.copy()
# TODO(python3port): RawConfigParser is legacy. Investigate updating to modern API.
ini = configparser.RawConfigParser(defaults=config_data.pop('DEFAULT', None))
for section, section_config in config_data.items():
ini.add_section(section)
for key, value in section_config.items():
ini.set(section, key, value)
ini_file_name = os.path.join(workdir, 'pants.ini')
with safe_open(ini_file_name, mode='w') as fp:
ini.write(fp)
args.append('--pants-config-files=' + ini_file_name)
pants_script = os.path.join(build_root or get_buildroot(), self.PANTS_SCRIPT_NAME)
# Permit usage of shell=True and string-based commands to allow e.g. `./pants | head`.
if kwargs.get('shell') is True:
assert not isinstance(command, list), 'must pass command as a string when using shell=True'
pants_command = ' '.join([pants_script, ' '.join(args), command])
else:
pants_command = [pants_script] + args + command
# Only whitelisted entries will be included in the environment if hermetic=True.
if self.hermetic():
env = dict()
for h in self.hermetic_env_whitelist():
value = os.getenv(h)
if value is not None:
env[h] = value
hermetic_env = os.getenv('HERMETIC_ENV')
if hermetic_env:
for h in hermetic_env.strip(',').split(','):
env[h] = os.getenv(h)
else:
env = os.environ.copy()
if extra_env:
env.update(extra_env)
# Don't overwrite the profile of this process in the called process.
# Instead, write the profile into a sibling file.
if env.get('PANTS_PROFILE'):
prof = '{}.{}'.format(env['PANTS_PROFILE'], self._get_profile_disambiguator())
env['PANTS_PROFILE'] = prof
# Make a note the subprocess command, so the user can correctly interpret the profile files.
with open('{}.cmd'.format(prof), 'w') as fp:
fp.write(' '.join(pants_command))
return pants_command, subprocess.Popen(pants_command, env=env, stdin=subprocess.PIPE,
stdout=subprocess.PIPE, stderr=subprocess.PIPE, **kwargs)
def run_pants_with_workdir(self, command, workdir, config=None, stdin_data=None, tee_output=False, **kwargs):
if config:
kwargs["config"] = config
pants_command, proc = self.run_pants_with_workdir_without_waiting(command, workdir, **kwargs)
communicate_fn = proc.communicate
if tee_output:
communicate_fn = SubprocessProcessHandler(proc).communicate_teeing_stdout_and_stderr
(stdout_data, stderr_data) = communicate_fn(stdin_data)
return PantsResult(pants_command, proc.returncode, stdout_data.decode("utf-8"),
stderr_data.decode("utf-8"), workdir)
def run_pants(self, command, config=None, stdin_data=None, extra_env=None, **kwargs):
"""Runs pants in a subprocess.
:param list command: A list of command line arguments coming after `./pants`.
:param config: Optional data for a generated ini file. A map of <section-name> ->
map of key -> value. If order in the ini file matters, this should be an OrderedDict.
:param kwargs: Extra keyword args to pass to `subprocess.Popen`.
:returns a PantsResult instance.
"""
with self.temporary_workdir() as workdir:
return self.run_pants_with_workdir(
command,
workdir,
config,
stdin_data=stdin_data,
extra_env=extra_env,
**kwargs
)
@contextmanager
def pants_results(self, command, config=None, stdin_data=None, extra_env=None, **kwargs):
"""Similar to run_pants in that it runs pants in a subprocess, but yields in order to give
callers a chance to do any necessary validations on the workdir.
:param list command: A list of command line arguments coming after `./pants`.
:param config: Optional data for a generated ini file. A map of <section-name> ->
map of key -> value. If order in the ini file matters, this should be an OrderedDict.
:param kwargs: Extra keyword args to pass to `subprocess.Popen`.
:returns a PantsResult instance.
"""
with self.temporary_workdir() as workdir:
yield self.run_pants_with_workdir(
command,
workdir,
config,
stdin_data=stdin_data,
extra_env=extra_env,
**kwargs
)
def bundle_and_run(self, target, bundle_name, bundle_jar_name=None, bundle_options=None,
args=None,
expected_bundle_jar_content=None,
expected_bundle_content=None,
library_jars_are_symlinks=True):
"""Creates the bundle with pants, then does java -jar {bundle_name}.jar to execute the bundle.
:param target: target name to compile
:param bundle_name: resulting bundle filename (minus .zip extension)
:param bundle_jar_name: monolithic jar filename (minus .jar extension), if None will be the
same as bundle_name
:param bundle_options: additional options for bundle
:param args: optional arguments to pass to executable
:param expected_bundle_content: verify the bundle zip content
:param expected_bundle_jar_content: verify the bundle jar content
:param library_jars_are_symlinks: verify library jars are symlinks if True, and actual
files if False. Default `True` because we always create symlinks for both external and internal
dependencies, only exception is when shading is used.
:return: stdout as a string on success, raises an Exception on error
"""
bundle_jar_name = bundle_jar_name or bundle_name
bundle_options = bundle_options or []
bundle_options = ['bundle.jvm'] + bundle_options + ['--archive=zip', target]
with self.pants_results(bundle_options) as pants_run:
self.assert_success(pants_run)
self.assertTrue(check_symlinks('dist/{bundle_name}-bundle/libs'.format(bundle_name=bundle_name),
library_jars_are_symlinks))
# TODO(John Sirois): We need a zip here to suck in external library classpath elements
# pointed to by symlinks in the run_pants ephemeral tmpdir. Switch run_pants to be a
# contextmanager that yields its results while the tmpdir workdir is still active and change
# this test back to using an un-archived bundle.
with temporary_dir() as workdir:
ZIP.extract('dist/{bundle_name}.zip'.format(bundle_name=bundle_name), workdir)
if expected_bundle_content:
self.assertTrue(contains_exact_files(workdir, expected_bundle_content))
if expected_bundle_jar_content:
with temporary_dir() as check_bundle_jar_dir:
bundle_jar = os.path.join(workdir, '{bundle_jar_name}.jar'
.format(bundle_jar_name=bundle_jar_name))
ZIP.extract(bundle_jar, check_bundle_jar_dir)
self.assertTrue(contains_exact_files(check_bundle_jar_dir, expected_bundle_jar_content))
optional_args = []
if args:
optional_args = args
java_run = subprocess.Popen(['java',
'-jar',
'{bundle_jar_name}.jar'.format(bundle_jar_name=bundle_jar_name)]
+ optional_args,
stdout=subprocess.PIPE,
cwd=workdir)
stdout, _ = java_run.communicate()
java_returncode = java_run.returncode
self.assertEquals(java_returncode, 0)
return stdout
def assert_success(self, pants_run, msg=None):
self.assert_result(pants_run, self.PANTS_SUCCESS_CODE, expected=True, msg=msg)
def assert_failure(self, pants_run, msg=None):
self.assert_result(pants_run, self.PANTS_SUCCESS_CODE, expected=False, msg=msg)
def assert_result(self, pants_run, value, expected=True, msg=None):
check, assertion = (eq, self.assertEqual) if expected else (ne, self.assertNotEqual)
if check(pants_run.returncode, value):
return
details = [msg] if msg else []
details.append(' '.join(pants_run.command))
details.append('returncode: {returncode}'.format(returncode=pants_run.returncode))
def indent(content):
return '\n\t'.join(content.splitlines())
details.append('stdout:\n\t{stdout}'.format(stdout=indent(pants_run.stdout_data)))
details.append('stderr:\n\t{stderr}'.format(stderr=indent(pants_run.stderr_data)))
error_msg = '\n'.join(details)
assertion(value, pants_run.returncode, error_msg)
def normalize(self, s):
"""Removes escape sequences (e.g. colored output) and all whitespace from string s."""
return ''.join(strip_color(s).split())
@contextmanager
def file_renamed(self, prefix, test_name, real_name):
real_path = os.path.join(prefix, real_name)
test_path = os.path.join(prefix, test_name)
try:
os.rename(test_path, real_path)
yield
finally:
os.rename(real_path, test_path)
@contextmanager
def temporary_file_content(self, path, content):
"""Temporarily write content to a file for the purpose of an integration test."""
path = os.path.realpath(path)
assert path.startswith(
os.path.realpath(get_buildroot())), 'cannot write paths outside of the buildroot!'
assert not os.path.exists(path), 'refusing to overwrite an existing path!'
with open(path, 'wb') as fh:
fh.write(content)
try:
yield
finally:
os.unlink(path)
@contextmanager
def mock_buildroot(self, dirs_to_copy=None):
"""Construct a mock buildroot and return a helper object for interacting with it."""
Manager = namedtuple('Manager', 'write_file pushd new_buildroot')
# N.B. BUILD.tools, contrib, 3rdparty needs to be copied vs symlinked to avoid
# symlink prefix check error in v1 and v2 engine.
files_to_copy = ('BUILD.tools',)
files_to_link = ('.pants.d',
'build-support',
'pants',
'pants-plugins',
'pants.ini',
'pants.travis-ci.ini',
'rust-toolchain',
'src')
dirs_to_copy = ('3rdparty', 'contrib') + tuple(dirs_to_copy or [])
with self.temporary_workdir() as tmp_dir:
for filename in files_to_copy:
shutil.copy(os.path.join(get_buildroot(), filename), os.path.join(tmp_dir, filename))
for dirname in dirs_to_copy:
shutil.copytree(os.path.join(get_buildroot(), dirname), os.path.join(tmp_dir, dirname))
for filename in files_to_link:
os.symlink(os.path.join(get_buildroot(), filename), os.path.join(tmp_dir, filename))
def write_file(file_path, contents):
full_file_path = os.path.join(tmp_dir, *file_path.split(os.pathsep))
safe_mkdir_for(full_file_path)
with open(full_file_path, 'wb') as fh:
fh.write(contents)
@contextmanager
def dir_context():
with pushd(tmp_dir):
yield
yield Manager(write_file, dir_context, tmp_dir)
def do_command(self, *args, **kwargs):
"""Wrapper around run_pants method.
:param args: command line arguments used to run pants
:param kwargs: handles 1 key
success - indicate whether to expect pants run to succeed or fail.
:return: a PantsResult object
"""
success = kwargs.get('success', True)
cmd = []
cmd.extend(list(args))
pants_run = self.run_pants(cmd)
if success:
self.assert_success(pants_run)
else:
self.assert_failure(pants_run)
return pants_run
| apache-2.0 |
frodrigo/osmose-backend | analysers/analyser_merge_healthcare_FR_finess.py | 3 | 7042 | #!/usr/bin/env python
#-*- coding: utf-8 -*-
###########################################################################
## ##
## Copyrights Frédéric Rodrigo 2018 ##
## ##
## This program is free software: you can redistribute it and/or modify ##
## it under the terms of the GNU General Public License as published by ##
## the Free Software Foundation, either version 3 of the License, or ##
## (at your option) any later version. ##
## ##
## This program is distributed in the hope that it will be useful, ##
## but WITHOUT ANY WARRANTY; without even the implied warranty of ##
## MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the ##
## GNU General Public License for more details. ##
## ##
## You should have received a copy of the GNU General Public License ##
## along with this program. If not, see <http://www.gnu.org/licenses/>. ##
## ##
###########################################################################
import json
import csv
import io
from modules.OsmoseTranslation import T_
from .Analyser_Merge_Dynamic import Analyser_Merge_Dynamic, SubAnalyser_Merge_Dynamic
from .Analyser_Merge import SourceDataGouv, CSV, Load, Conflate, Select, Mapping
class Analyser_Merge_Healthcare_FR_Finess(Analyser_Merge_Dynamic):
def __init__(self, config, logger = None):
Analyser_Merge_Dynamic.__init__(self, config, logger)
if config.db_schema == 'france_guadeloupe':
srid = 2970
is_in = lambda dep: dep == "9A"
elif config.db_schema == 'france_guyane':
srid = 2972
is_in = lambda dep: dep == "9C"
elif config.db_schema == 'france_reunion':
srid = 2975
is_in = lambda dep: dep == "9D"
elif config.db_schema == 'france_martinique':
srid = 2973
is_in = lambda dep: dep == "9B"
elif config.db_schema == 'france_saintpierreetmiquelon':
srid = 32621
is_in = lambda dep: dep == "9E"
elif config.db_schema == 'france_mayotte':
srid = 32738
is_in = lambda dep: dep == "9F"
else:
srid = 2154
is_in = lambda dep: dep not in ("9A", "9B", "9C", "9D")
mapingfile = json.loads(open("merge_data/healthcare_FR_finess.mapping.json").read())
for r in mapingfile:
self.classFactory(SubAnalyser_Merge_Healthcare_FR_Finess, r['classes'], srid, is_in, r['categories'], r['items'], r.get('missing_osm', True), r['classes'], r['level'], r['title:fr'], r['tags_select'], r['tags_generate1'], r['tags_generate2'])
class SubAnalyser_Merge_Healthcare_FR_Finess(SubAnalyser_Merge_Dynamic):
def __init__(self, config, error_file, logger, srid, is_in, categories, items, missing_osm, classs, level, title, tags_select, tags_generate1, tags_generate2):
SubAnalyser_Merge_Dynamic.__init__(self, config, error_file, logger)
self.def_class_missing_official(item =str(items[0]), id = classs+1, level = level, tags = ['merge', 'public equipment', 'fix:imagery', 'fix:survey'],
title = T_('{0} not integrated', title))
if missing_osm is not False:
self.def_class_missing_osm(item =str(items[1]), id = classs+2, level = level, tags = ['merge', 'public equipment', 'fix:chair'],
title = T_('{0} without tag "{1}" or invalid', title, 'ref:FR:FINESS'))
self.def_class_possible_merge(item =str(items[0]+1), id = classs+3, level = level, tags = ['merge', 'public equipment', 'fix:chair', 'fix:survey'],
title = T_('{0}, integration suggestion', title))
self.init(
"https://www.data.gouv.fr/fr/datasets/finess-extraction-du-fichier-des-etablissements/",
"FINESS Extraction du Fichier des établissements",
CSV(Source_Finess(
attribution="Le ministère des solidarités et de la santé",
encoding="ISO-8859-1",
dataset="53699569a3a729239d2046eb",
resource="51a04fc8-50fa-4844-9b92-b51c69be742e")),
Load("coordxet", "coordyet", srid = srid,
select = {"categetab": categories},
where = lambda res: is_in(res["departement"])),
Conflate(
select = Select(
types = ["nodes", "ways", "relations"],
tags = tags_select),
osmRef = "ref:FR:FINESS",
conflationDistance = 200,
mapping = Mapping(
static1 = tags_generate1,
static2 = dict({"source": self.source}, **tags_generate2),
mapping1 = {"ref:FR:FINESS": "nofinesset"},
mapping2 = {
"type:FR:FINESS": "categetab",
"ref:FR:SIRET": "siret",
"phone": lambda fields: self.phone(fields["telephone"]),
"fax": lambda fields: self.phone(fields["telecopie"]),
},
text = lambda tags, fields: {"en": ", ".join(filter(lambda i: i not in (None, 'None'), [fields["rs"], fields["rslongue"], fields["complrs"], fields["compldistrib"], fields["numvoie"], fields["typvoie"], fields["voie"], fields["compvoie"], fields["lieuditbp"], fields["ligneacheminement"], fields["libcategetab"], fields["numuai"]]))} )))
def phone(self, number):
if number and len(number) == 10 and number[0] == "0":
return "+33" + number[1:]
class Source_Finess(SourceDataGouv):
def open(self):
# Cheat the parent open
self.encoding = 'UTF-8'
f = super().open()
csvreader = csv.reader(f, delimiter=u';')
structureet = [u'nofinesset,nofinessej,rs,rslongue,complrs,compldistrib,numvoie,typvoie,voie,compvoie,lieuditbp,commune,departement,libdepartement,ligneacheminement,telephone,telecopie,categetab,libcategetab,categagretab,libcategagretab,siret,codeape,codemft,libmft,codesph,libsph,dateouv,dateautor,datemaj,numuai,coordxet,coordyet,sourcecoordet,datemajcoord'.split(',')]
geolocalisation = {}
for row in csvreader:
if row[0] == 'structureet':
structureet.append(row[1:])
elif row[0] == 'geolocalisation':
geolocalisation[row[1]] = row[2:]
for row in structureet:
row += geolocalisation.get(row[0], [])
csvfile = io.StringIO()
writer = csv.writer(csvfile)
for row in structureet:
writer.writerow(row)
csvfile.seek(0)
return csvfile
| gpl-3.0 |
nitin-cherian/LifeLongLearning | Python/Experiments/JINJA/RealPython/jinja_env/lib/python3.5/site-packages/pip/_vendor/requests/packages/urllib3/connectionpool.py | 359 | 33591 | from __future__ import absolute_import
import errno
import logging
import sys
import warnings
from socket import error as SocketError, timeout as SocketTimeout
import socket
try: # Python 3
from queue import LifoQueue, Empty, Full
except ImportError:
from Queue import LifoQueue, Empty, Full
# Queue is imported for side effects on MS Windows
import Queue as _unused_module_Queue # noqa: unused
from .exceptions import (
ClosedPoolError,
ProtocolError,
EmptyPoolError,
HeaderParsingError,
HostChangedError,
LocationValueError,
MaxRetryError,
ProxyError,
ReadTimeoutError,
SSLError,
TimeoutError,
InsecureRequestWarning,
NewConnectionError,
)
from .packages.ssl_match_hostname import CertificateError
from .packages import six
from .connection import (
port_by_scheme,
DummyConnection,
HTTPConnection, HTTPSConnection, VerifiedHTTPSConnection,
HTTPException, BaseSSLError,
)
from .request import RequestMethods
from .response import HTTPResponse
from .util.connection import is_connection_dropped
from .util.response import assert_header_parsing
from .util.retry import Retry
from .util.timeout import Timeout
from .util.url import get_host, Url
xrange = six.moves.xrange
log = logging.getLogger(__name__)
_Default = object()
# Pool objects
class ConnectionPool(object):
"""
Base class for all connection pools, such as
:class:`.HTTPConnectionPool` and :class:`.HTTPSConnectionPool`.
"""
scheme = None
QueueCls = LifoQueue
def __init__(self, host, port=None):
if not host:
raise LocationValueError("No host specified.")
# httplib doesn't like it when we include brackets in ipv6 addresses
# Specifically, if we include brackets but also pass the port then
# httplib crazily doubles up the square brackets on the Host header.
# Instead, we need to make sure we never pass ``None`` as the port.
# However, for backward compatibility reasons we can't actually
# *assert* that.
self.host = host.strip('[]')
self.port = port
def __str__(self):
return '%s(host=%r, port=%r)' % (type(self).__name__,
self.host, self.port)
def __enter__(self):
return self
def __exit__(self, exc_type, exc_val, exc_tb):
self.close()
# Return False to re-raise any potential exceptions
return False
def close(self):
"""
Close all pooled connections and disable the pool.
"""
pass
# This is taken from http://hg.python.org/cpython/file/7aaba721ebc0/Lib/socket.py#l252
_blocking_errnos = set([errno.EAGAIN, errno.EWOULDBLOCK])
class HTTPConnectionPool(ConnectionPool, RequestMethods):
"""
Thread-safe connection pool for one host.
:param host:
Host used for this HTTP Connection (e.g. "localhost"), passed into
:class:`httplib.HTTPConnection`.
:param port:
Port used for this HTTP Connection (None is equivalent to 80), passed
into :class:`httplib.HTTPConnection`.
:param strict:
Causes BadStatusLine to be raised if the status line can't be parsed
as a valid HTTP/1.0 or 1.1 status line, passed into
:class:`httplib.HTTPConnection`.
.. note::
Only works in Python 2. This parameter is ignored in Python 3.
:param timeout:
Socket timeout in seconds for each individual connection. This can
be a float or integer, which sets the timeout for the HTTP request,
or an instance of :class:`urllib3.util.Timeout` which gives you more
fine-grained control over request timeouts. After the constructor has
been parsed, this is always a `urllib3.util.Timeout` object.
:param maxsize:
Number of connections to save that can be reused. More than 1 is useful
in multithreaded situations. If ``block`` is set to False, more
connections will be created but they will not be saved once they've
been used.
:param block:
If set to True, no more than ``maxsize`` connections will be used at
a time. When no free connections are available, the call will block
until a connection has been released. This is a useful side effect for
particular multithreaded situations where one does not want to use more
than maxsize connections per host to prevent flooding.
:param headers:
Headers to include with all requests, unless other headers are given
explicitly.
:param retries:
Retry configuration to use by default with requests in this pool.
:param _proxy:
Parsed proxy URL, should not be used directly, instead, see
:class:`urllib3.connectionpool.ProxyManager`"
:param _proxy_headers:
A dictionary with proxy headers, should not be used directly,
instead, see :class:`urllib3.connectionpool.ProxyManager`"
:param \**conn_kw:
Additional parameters are used to create fresh :class:`urllib3.connection.HTTPConnection`,
:class:`urllib3.connection.HTTPSConnection` instances.
"""
scheme = 'http'
ConnectionCls = HTTPConnection
ResponseCls = HTTPResponse
def __init__(self, host, port=None, strict=False,
timeout=Timeout.DEFAULT_TIMEOUT, maxsize=1, block=False,
headers=None, retries=None,
_proxy=None, _proxy_headers=None,
**conn_kw):
ConnectionPool.__init__(self, host, port)
RequestMethods.__init__(self, headers)
self.strict = strict
if not isinstance(timeout, Timeout):
timeout = Timeout.from_float(timeout)
if retries is None:
retries = Retry.DEFAULT
self.timeout = timeout
self.retries = retries
self.pool = self.QueueCls(maxsize)
self.block = block
self.proxy = _proxy
self.proxy_headers = _proxy_headers or {}
# Fill the queue up so that doing get() on it will block properly
for _ in xrange(maxsize):
self.pool.put(None)
# These are mostly for testing and debugging purposes.
self.num_connections = 0
self.num_requests = 0
self.conn_kw = conn_kw
if self.proxy:
# Enable Nagle's algorithm for proxies, to avoid packet fragmentation.
# We cannot know if the user has added default socket options, so we cannot replace the
# list.
self.conn_kw.setdefault('socket_options', [])
def _new_conn(self):
"""
Return a fresh :class:`HTTPConnection`.
"""
self.num_connections += 1
log.info("Starting new HTTP connection (%d): %s",
self.num_connections, self.host)
conn = self.ConnectionCls(host=self.host, port=self.port,
timeout=self.timeout.connect_timeout,
strict=self.strict, **self.conn_kw)
return conn
def _get_conn(self, timeout=None):
"""
Get a connection. Will return a pooled connection if one is available.
If no connections are available and :prop:`.block` is ``False``, then a
fresh connection is returned.
:param timeout:
Seconds to wait before giving up and raising
:class:`urllib3.exceptions.EmptyPoolError` if the pool is empty and
:prop:`.block` is ``True``.
"""
conn = None
try:
conn = self.pool.get(block=self.block, timeout=timeout)
except AttributeError: # self.pool is None
raise ClosedPoolError(self, "Pool is closed.")
except Empty:
if self.block:
raise EmptyPoolError(self,
"Pool reached maximum size and no more "
"connections are allowed.")
pass # Oh well, we'll create a new connection then
# If this is a persistent connection, check if it got disconnected
if conn and is_connection_dropped(conn):
log.info("Resetting dropped connection: %s", self.host)
conn.close()
if getattr(conn, 'auto_open', 1) == 0:
# This is a proxied connection that has been mutated by
# httplib._tunnel() and cannot be reused (since it would
# attempt to bypass the proxy)
conn = None
return conn or self._new_conn()
def _put_conn(self, conn):
"""
Put a connection back into the pool.
:param conn:
Connection object for the current host and port as returned by
:meth:`._new_conn` or :meth:`._get_conn`.
If the pool is already full, the connection is closed and discarded
because we exceeded maxsize. If connections are discarded frequently,
then maxsize should be increased.
If the pool is closed, then the connection will be closed and discarded.
"""
try:
self.pool.put(conn, block=False)
return # Everything is dandy, done.
except AttributeError:
# self.pool is None.
pass
except Full:
# This should never happen if self.block == True
log.warning(
"Connection pool is full, discarding connection: %s",
self.host)
# Connection never got put back into the pool, close it.
if conn:
conn.close()
def _validate_conn(self, conn):
"""
Called right before a request is made, after the socket is created.
"""
pass
def _prepare_proxy(self, conn):
# Nothing to do for HTTP connections.
pass
def _get_timeout(self, timeout):
""" Helper that always returns a :class:`urllib3.util.Timeout` """
if timeout is _Default:
return self.timeout.clone()
if isinstance(timeout, Timeout):
return timeout.clone()
else:
# User passed us an int/float. This is for backwards compatibility,
# can be removed later
return Timeout.from_float(timeout)
def _raise_timeout(self, err, url, timeout_value):
"""Is the error actually a timeout? Will raise a ReadTimeout or pass"""
if isinstance(err, SocketTimeout):
raise ReadTimeoutError(self, url, "Read timed out. (read timeout=%s)" % timeout_value)
# See the above comment about EAGAIN in Python 3. In Python 2 we have
# to specifically catch it and throw the timeout error
if hasattr(err, 'errno') and err.errno in _blocking_errnos:
raise ReadTimeoutError(self, url, "Read timed out. (read timeout=%s)" % timeout_value)
# Catch possible read timeouts thrown as SSL errors. If not the
# case, rethrow the original. We need to do this because of:
# http://bugs.python.org/issue10272
if 'timed out' in str(err) or 'did not complete (read)' in str(err): # Python 2.6
raise ReadTimeoutError(self, url, "Read timed out. (read timeout=%s)" % timeout_value)
def _make_request(self, conn, method, url, timeout=_Default, chunked=False,
**httplib_request_kw):
"""
Perform a request on a given urllib connection object taken from our
pool.
:param conn:
a connection from one of our connection pools
:param timeout:
Socket timeout in seconds for the request. This can be a
float or integer, which will set the same timeout value for
the socket connect and the socket read, or an instance of
:class:`urllib3.util.Timeout`, which gives you more fine-grained
control over your timeouts.
"""
self.num_requests += 1
timeout_obj = self._get_timeout(timeout)
timeout_obj.start_connect()
conn.timeout = timeout_obj.connect_timeout
# Trigger any extra validation we need to do.
try:
self._validate_conn(conn)
except (SocketTimeout, BaseSSLError) as e:
# Py2 raises this as a BaseSSLError, Py3 raises it as socket timeout.
self._raise_timeout(err=e, url=url, timeout_value=conn.timeout)
raise
# conn.request() calls httplib.*.request, not the method in
# urllib3.request. It also calls makefile (recv) on the socket.
if chunked:
conn.request_chunked(method, url, **httplib_request_kw)
else:
conn.request(method, url, **httplib_request_kw)
# Reset the timeout for the recv() on the socket
read_timeout = timeout_obj.read_timeout
# App Engine doesn't have a sock attr
if getattr(conn, 'sock', None):
# In Python 3 socket.py will catch EAGAIN and return None when you
# try and read into the file pointer created by http.client, which
# instead raises a BadStatusLine exception. Instead of catching
# the exception and assuming all BadStatusLine exceptions are read
# timeouts, check for a zero timeout before making the request.
if read_timeout == 0:
raise ReadTimeoutError(
self, url, "Read timed out. (read timeout=%s)" % read_timeout)
if read_timeout is Timeout.DEFAULT_TIMEOUT:
conn.sock.settimeout(socket.getdefaulttimeout())
else: # None or a value
conn.sock.settimeout(read_timeout)
# Receive the response from the server
try:
try: # Python 2.7, use buffering of HTTP responses
httplib_response = conn.getresponse(buffering=True)
except TypeError: # Python 2.6 and older, Python 3
try:
httplib_response = conn.getresponse()
except Exception as e:
# Remove the TypeError from the exception chain in Python 3;
# otherwise it looks like a programming error was the cause.
six.raise_from(e, None)
except (SocketTimeout, BaseSSLError, SocketError) as e:
self._raise_timeout(err=e, url=url, timeout_value=read_timeout)
raise
# AppEngine doesn't have a version attr.
http_version = getattr(conn, '_http_vsn_str', 'HTTP/?')
log.debug("\"%s %s %s\" %s %s", method, url, http_version,
httplib_response.status, httplib_response.length)
try:
assert_header_parsing(httplib_response.msg)
except HeaderParsingError as hpe: # Platform-specific: Python 3
log.warning(
'Failed to parse headers (url=%s): %s',
self._absolute_url(url), hpe, exc_info=True)
return httplib_response
def _absolute_url(self, path):
return Url(scheme=self.scheme, host=self.host, port=self.port, path=path).url
def close(self):
"""
Close all pooled connections and disable the pool.
"""
# Disable access to the pool
old_pool, self.pool = self.pool, None
try:
while True:
conn = old_pool.get(block=False)
if conn:
conn.close()
except Empty:
pass # Done.
def is_same_host(self, url):
"""
Check if the given ``url`` is a member of the same host as this
connection pool.
"""
if url.startswith('/'):
return True
# TODO: Add optional support for socket.gethostbyname checking.
scheme, host, port = get_host(url)
# Use explicit default port for comparison when none is given
if self.port and not port:
port = port_by_scheme.get(scheme)
elif not self.port and port == port_by_scheme.get(scheme):
port = None
return (scheme, host, port) == (self.scheme, self.host, self.port)
def urlopen(self, method, url, body=None, headers=None, retries=None,
redirect=True, assert_same_host=True, timeout=_Default,
pool_timeout=None, release_conn=None, chunked=False,
**response_kw):
"""
Get a connection from the pool and perform an HTTP request. This is the
lowest level call for making a request, so you'll need to specify all
the raw details.
.. note::
More commonly, it's appropriate to use a convenience method provided
by :class:`.RequestMethods`, such as :meth:`request`.
.. note::
`release_conn` will only behave as expected if
`preload_content=False` because we want to make
`preload_content=False` the default behaviour someday soon without
breaking backwards compatibility.
:param method:
HTTP request method (such as GET, POST, PUT, etc.)
:param body:
Data to send in the request body (useful for creating
POST requests, see HTTPConnectionPool.post_url for
more convenience).
:param headers:
Dictionary of custom headers to send, such as User-Agent,
If-None-Match, etc. If None, pool headers are used. If provided,
these headers completely replace any pool-specific headers.
:param retries:
Configure the number of retries to allow before raising a
:class:`~urllib3.exceptions.MaxRetryError` exception.
Pass ``None`` to retry until you receive a response. Pass a
:class:`~urllib3.util.retry.Retry` object for fine-grained control
over different types of retries.
Pass an integer number to retry connection errors that many times,
but no other types of errors. Pass zero to never retry.
If ``False``, then retries are disabled and any exception is raised
immediately. Also, instead of raising a MaxRetryError on redirects,
the redirect response will be returned.
:type retries: :class:`~urllib3.util.retry.Retry`, False, or an int.
:param redirect:
If True, automatically handle redirects (status codes 301, 302,
303, 307, 308). Each redirect counts as a retry. Disabling retries
will disable redirect, too.
:param assert_same_host:
If ``True``, will make sure that the host of the pool requests is
consistent else will raise HostChangedError. When False, you can
use the pool on an HTTP proxy and request foreign hosts.
:param timeout:
If specified, overrides the default timeout for this one
request. It may be a float (in seconds) or an instance of
:class:`urllib3.util.Timeout`.
:param pool_timeout:
If set and the pool is set to block=True, then this method will
block for ``pool_timeout`` seconds and raise EmptyPoolError if no
connection is available within the time period.
:param release_conn:
If False, then the urlopen call will not release the connection
back into the pool once a response is received (but will release if
you read the entire contents of the response such as when
`preload_content=True`). This is useful if you're not preloading
the response's content immediately. You will need to call
``r.release_conn()`` on the response ``r`` to return the connection
back into the pool. If None, it takes the value of
``response_kw.get('preload_content', True)``.
:param chunked:
If True, urllib3 will send the body using chunked transfer
encoding. Otherwise, urllib3 will send the body using the standard
content-length form. Defaults to False.
:param \**response_kw:
Additional parameters are passed to
:meth:`urllib3.response.HTTPResponse.from_httplib`
"""
if headers is None:
headers = self.headers
if not isinstance(retries, Retry):
retries = Retry.from_int(retries, redirect=redirect, default=self.retries)
if release_conn is None:
release_conn = response_kw.get('preload_content', True)
# Check host
if assert_same_host and not self.is_same_host(url):
raise HostChangedError(self, url, retries)
conn = None
# Track whether `conn` needs to be released before
# returning/raising/recursing. Update this variable if necessary, and
# leave `release_conn` constant throughout the function. That way, if
# the function recurses, the original value of `release_conn` will be
# passed down into the recursive call, and its value will be respected.
#
# See issue #651 [1] for details.
#
# [1] <https://github.com/shazow/urllib3/issues/651>
release_this_conn = release_conn
# Merge the proxy headers. Only do this in HTTP. We have to copy the
# headers dict so we can safely change it without those changes being
# reflected in anyone else's copy.
if self.scheme == 'http':
headers = headers.copy()
headers.update(self.proxy_headers)
# Must keep the exception bound to a separate variable or else Python 3
# complains about UnboundLocalError.
err = None
# Keep track of whether we cleanly exited the except block. This
# ensures we do proper cleanup in finally.
clean_exit = False
try:
# Request a connection from the queue.
timeout_obj = self._get_timeout(timeout)
conn = self._get_conn(timeout=pool_timeout)
conn.timeout = timeout_obj.connect_timeout
is_new_proxy_conn = self.proxy is not None and not getattr(conn, 'sock', None)
if is_new_proxy_conn:
self._prepare_proxy(conn)
# Make the request on the httplib connection object.
httplib_response = self._make_request(conn, method, url,
timeout=timeout_obj,
body=body, headers=headers,
chunked=chunked)
# If we're going to release the connection in ``finally:``, then
# the response doesn't need to know about the connection. Otherwise
# it will also try to release it and we'll have a double-release
# mess.
response_conn = conn if not release_conn else None
# Import httplib's response into our own wrapper object
response = self.ResponseCls.from_httplib(httplib_response,
pool=self,
connection=response_conn,
**response_kw)
# Everything went great!
clean_exit = True
except Empty:
# Timed out by queue.
raise EmptyPoolError(self, "No pool connections are available.")
except (BaseSSLError, CertificateError) as e:
# Close the connection. If a connection is reused on which there
# was a Certificate error, the next request will certainly raise
# another Certificate error.
clean_exit = False
raise SSLError(e)
except SSLError:
# Treat SSLError separately from BaseSSLError to preserve
# traceback.
clean_exit = False
raise
except (TimeoutError, HTTPException, SocketError, ProtocolError) as e:
# Discard the connection for these exceptions. It will be
# be replaced during the next _get_conn() call.
clean_exit = False
if isinstance(e, (SocketError, NewConnectionError)) and self.proxy:
e = ProxyError('Cannot connect to proxy.', e)
elif isinstance(e, (SocketError, HTTPException)):
e = ProtocolError('Connection aborted.', e)
retries = retries.increment(method, url, error=e, _pool=self,
_stacktrace=sys.exc_info()[2])
retries.sleep()
# Keep track of the error for the retry warning.
err = e
finally:
if not clean_exit:
# We hit some kind of exception, handled or otherwise. We need
# to throw the connection away unless explicitly told not to.
# Close the connection, set the variable to None, and make sure
# we put the None back in the pool to avoid leaking it.
conn = conn and conn.close()
release_this_conn = True
if release_this_conn:
# Put the connection back to be reused. If the connection is
# expired then it will be None, which will get replaced with a
# fresh connection during _get_conn.
self._put_conn(conn)
if not conn:
# Try again
log.warning("Retrying (%r) after connection "
"broken by '%r': %s", retries, err, url)
return self.urlopen(method, url, body, headers, retries,
redirect, assert_same_host,
timeout=timeout, pool_timeout=pool_timeout,
release_conn=release_conn, **response_kw)
# Handle redirect?
redirect_location = redirect and response.get_redirect_location()
if redirect_location:
if response.status == 303:
method = 'GET'
try:
retries = retries.increment(method, url, response=response, _pool=self)
except MaxRetryError:
if retries.raise_on_redirect:
# Release the connection for this response, since we're not
# returning it to be released manually.
response.release_conn()
raise
return response
log.info("Redirecting %s -> %s", url, redirect_location)
return self.urlopen(
method, redirect_location, body, headers,
retries=retries, redirect=redirect,
assert_same_host=assert_same_host,
timeout=timeout, pool_timeout=pool_timeout,
release_conn=release_conn, **response_kw)
# Check if we should retry the HTTP response.
if retries.is_forced_retry(method, status_code=response.status):
try:
retries = retries.increment(method, url, response=response, _pool=self)
except MaxRetryError:
if retries.raise_on_status:
# Release the connection for this response, since we're not
# returning it to be released manually.
response.release_conn()
raise
return response
retries.sleep()
log.info("Forced retry: %s", url)
return self.urlopen(
method, url, body, headers,
retries=retries, redirect=redirect,
assert_same_host=assert_same_host,
timeout=timeout, pool_timeout=pool_timeout,
release_conn=release_conn, **response_kw)
return response
class HTTPSConnectionPool(HTTPConnectionPool):
"""
Same as :class:`.HTTPConnectionPool`, but HTTPS.
When Python is compiled with the :mod:`ssl` module, then
:class:`.VerifiedHTTPSConnection` is used, which *can* verify certificates,
instead of :class:`.HTTPSConnection`.
:class:`.VerifiedHTTPSConnection` uses one of ``assert_fingerprint``,
``assert_hostname`` and ``host`` in this order to verify connections.
If ``assert_hostname`` is False, no verification is done.
The ``key_file``, ``cert_file``, ``cert_reqs``, ``ca_certs``,
``ca_cert_dir``, and ``ssl_version`` are only used if :mod:`ssl` is
available and are fed into :meth:`urllib3.util.ssl_wrap_socket` to upgrade
the connection socket into an SSL socket.
"""
scheme = 'https'
ConnectionCls = HTTPSConnection
def __init__(self, host, port=None,
strict=False, timeout=Timeout.DEFAULT_TIMEOUT, maxsize=1,
block=False, headers=None, retries=None,
_proxy=None, _proxy_headers=None,
key_file=None, cert_file=None, cert_reqs=None,
ca_certs=None, ssl_version=None,
assert_hostname=None, assert_fingerprint=None,
ca_cert_dir=None, **conn_kw):
HTTPConnectionPool.__init__(self, host, port, strict, timeout, maxsize,
block, headers, retries, _proxy, _proxy_headers,
**conn_kw)
if ca_certs and cert_reqs is None:
cert_reqs = 'CERT_REQUIRED'
self.key_file = key_file
self.cert_file = cert_file
self.cert_reqs = cert_reqs
self.ca_certs = ca_certs
self.ca_cert_dir = ca_cert_dir
self.ssl_version = ssl_version
self.assert_hostname = assert_hostname
self.assert_fingerprint = assert_fingerprint
def _prepare_conn(self, conn):
"""
Prepare the ``connection`` for :meth:`urllib3.util.ssl_wrap_socket`
and establish the tunnel if proxy is used.
"""
if isinstance(conn, VerifiedHTTPSConnection):
conn.set_cert(key_file=self.key_file,
cert_file=self.cert_file,
cert_reqs=self.cert_reqs,
ca_certs=self.ca_certs,
ca_cert_dir=self.ca_cert_dir,
assert_hostname=self.assert_hostname,
assert_fingerprint=self.assert_fingerprint)
conn.ssl_version = self.ssl_version
return conn
def _prepare_proxy(self, conn):
"""
Establish tunnel connection early, because otherwise httplib
would improperly set Host: header to proxy's IP:port.
"""
# Python 2.7+
try:
set_tunnel = conn.set_tunnel
except AttributeError: # Platform-specific: Python 2.6
set_tunnel = conn._set_tunnel
if sys.version_info <= (2, 6, 4) and not self.proxy_headers: # Python 2.6.4 and older
set_tunnel(self.host, self.port)
else:
set_tunnel(self.host, self.port, self.proxy_headers)
conn.connect()
def _new_conn(self):
"""
Return a fresh :class:`httplib.HTTPSConnection`.
"""
self.num_connections += 1
log.info("Starting new HTTPS connection (%d): %s",
self.num_connections, self.host)
if not self.ConnectionCls or self.ConnectionCls is DummyConnection:
raise SSLError("Can't connect to HTTPS URL because the SSL "
"module is not available.")
actual_host = self.host
actual_port = self.port
if self.proxy is not None:
actual_host = self.proxy.host
actual_port = self.proxy.port
conn = self.ConnectionCls(host=actual_host, port=actual_port,
timeout=self.timeout.connect_timeout,
strict=self.strict, **self.conn_kw)
return self._prepare_conn(conn)
def _validate_conn(self, conn):
"""
Called right before a request is made, after the socket is created.
"""
super(HTTPSConnectionPool, self)._validate_conn(conn)
# Force connect early to allow us to validate the connection.
if not getattr(conn, 'sock', None): # AppEngine might not have `.sock`
conn.connect()
if not conn.is_verified:
warnings.warn((
'Unverified HTTPS request is being made. '
'Adding certificate verification is strongly advised. See: '
'https://urllib3.readthedocs.io/en/latest/security.html'),
InsecureRequestWarning)
def connection_from_url(url, **kw):
"""
Given a url, return an :class:`.ConnectionPool` instance of its host.
This is a shortcut for not having to parse out the scheme, host, and port
of the url before creating an :class:`.ConnectionPool` instance.
:param url:
Absolute URL string that must include the scheme. Port is optional.
:param \**kw:
Passes additional parameters to the constructor of the appropriate
:class:`.ConnectionPool`. Useful for specifying things like
timeout, maxsize, headers, etc.
Example::
>>> conn = connection_from_url('http://google.com/')
>>> r = conn.request('GET', '/')
"""
scheme, host, port = get_host(url)
port = port or port_by_scheme.get(scheme, 80)
if scheme == 'https':
return HTTPSConnectionPool(host, port=port, **kw)
else:
return HTTPConnectionPool(host, port=port, **kw)
| mit |
jschaul/ComplexNetworkSim | examples/Skype_model/model_Skype.py | 1 | 4858 | '''Example model specification for a Skype-like system
@author: Joe Schaul <joe.schaul@gmail.com>
'''
import networkx as nx
from ComplexNetworkSim import NetworkSimulation, AnimationCreator, PlotCreator
from agent_skypeClient import DISABLED, ENABLED, ENABLED_S, DISABLED_S
from agent_skypeClient import Skype
from environment_Skype import crash
# Simulation constants
MAX_SIMULATION_TIME = 50.0
TRIALS = 1
def main():
combs = []
combs.append({})
combs[0]["nodes"] = 600
combs[0]["supernodes"] = 100
combs[0]["permanent_supernodes"] = 1
combs[0]['threshold'] = 12
combs[0]['cache_size'] = 50
combs[0]['kills'] = 40
combs[0]['restart_time'] = "self.r.expovariate(1/4.0)"
combs[0]['restart_time_str'] = "exp"
combs[0]['agentClass'] = Skype
combs[0]['environmentAgent'] = crash
combs.append(combs[0].copy())
combs[1]['cache_size'] = 3
combs.append(combs[0].copy())
combs[2]['kills'] = 60
combs.append(combs[0].copy())
combs[3]['kills'] = 60
combs[3]['cache_size'] = 3
# combs.append(combs[0].copy())
# combs[4]['kills'] = 40
# combs[4]['cache_size'] = 1
#
# combs.append(combs[0].copy())
# combs[4]['kills'] = 60
# combs[4]['cache_size'] = 1
fixes = []
for c in combs:
fixes.append(c.copy())
for fix in fixes:
fix['restart_time'] = "4"
fix['restart_time_str'] = "fix"
gausses = []
for c in combs:
gausses.append(c.copy())
for gauss in gausses:
gauss['restart_time'] = "abs(self.r.gauss(4, 1))"
gauss['restart_time_str'] = "gauss"
parameter_combinations = []
parameter_combinations.extend(combs)
parameter_combinations.extend(fixes)
parameter_combinations.extend(gausses)
for parameters in parameter_combinations:
dir = '_'.join(("skype600-",
str(parameters['supernodes']),
str(parameters['restart_time_str']),
str(parameters['threshold']),
str(parameters['cache_size']),
str(parameters['kills']),
))
parameters['directory'] = dir
simulate(**parameters)
def simulate(**kwargs):
# Model parameters
directory = kwargs.get("directory", 'skype')
agentClass = kwargs.get("agentClass", Skype)
environmentAgent = kwargs.get("environmentAgent", crash)
NUMBER_SUPERNODES = kwargs.get("supernodes", 90)
NUMBER_NODES = kwargs.get("nodes", 600)
NUMBER_PERMANENT_SUPERNODES = kwargs.get("permanent_supernodes", 1)
globalSharedParameters = {}
globalSharedParameters['supernodes'] = range(NUMBER_SUPERNODES)
globalSharedParameters['permanent_supernodes'] = range(NUMBER_PERMANENT_SUPERNODES)
globalSharedParameters['threshold'] = kwargs.get("threshold", 12)
globalSharedParameters['cache_size'] = kwargs.get("cache_size", 20)
globalSharedParameters['kills'] = kwargs.get("kills", 30)
globalSharedParameters['restart_time'] = kwargs.get('restart_time', "4.0")
# Output Parameters
statesToMonitor = [ENABLED, ENABLED_S]
colours = ["g", "^b", "0.5"]
mapping = {ENABLED:"g", ENABLED_S: "b", DISABLED:"0.2", DISABLED_S: "0.4"}
labels = ["Online nodes", "Online supernodes", "Offline"]
name = "skype"
titlePlot = "Skype simulation, %i trials, threshold=%i, cacheSize=%i \n supernode_restart_distr=%s" % \
(TRIALS, globalSharedParameters['threshold'], globalSharedParameters['cache_size'],
str(globalSharedParameters['restart_time']))
titleVisual = "Skype visualisation"
#####topology####
G = nx.Graph(nx.complete_graph(NUMBER_SUPERNODES))
G.add_nodes_from(xrange(NUMBER_SUPERNODES,NUMBER_NODES+NUMBER_SUPERNODES))
states = [DISABLED for node in G.nodes_iter()]
# run simulation
simulation = NetworkSimulation(G,
states,
agentClass,
directory,
MAX_SIMULATION_TIME,
TRIALS,
environmentAgent,
**globalSharedParameters)
simulation.runSimulation()
# # run visualisation
# gif = AnimationCreator(directory, name, titleVisual, mapping)
# gif.create_gif()
# plot results
p = PlotCreator(directory, name, titlePlot, statesToMonitor,
colours, labels)
p.plotSimulation()
if __name__ == '__main__':
main()
| bsd-2-clause |
Superjom/NeuralNetworks | logistic_regression.py | 1 | 3887 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
'''
Created on Feb 22, 2014
@author: Chunwei Yan @ PKU
@mail: yanchunwei@outlook.com
'''
import theano
import numpy
from theano import tensor as T
rng = numpy.random
class LogisticRegression(object):
'''
pass in the dataset as a matrix
'''
def __init__(self, n_features):
self.n_features = n_features
self.x = T.matrix("x")
self.y = T.vector("y")
self.W = theano.shared(rng.randn(n_features).astype(theano.config.floatX), name="W")
self.b = theano.shared(numpy.asarray(0., dtype=theano.config.floatX), name="b")
def grad(self):
p_1 = 1/ (1 + T.exp(-T.dot(self.x, self.W) - self.b))
self.prediction = p_1 > 0.5
self.xent = - self.y * T.log(p_1) - (1 - self.y) * (1 - p_1)
cost = self.xent.mean() + 0.01 * ( self.W ** 2).sum()
gw, gb = T.grad(cost, [self.W, self.b])
return gw, gb
def compile(self):
gw, gb = self.grad()
self.trainer = theano.function(
inputs = [self.x, self.y],
outputs = [self.prediction, self.xent],
updates = [ (self.W, self.W - 0.01 * gw),
(self.b, self.b - 0.01 * gb)],
name = 'train')
self.predict = theano.function(
inputs = [self.x],
outputs = self.prediction,
name = 'predict')
def test(self):
D = (rng.randn(400, self.n_features).astype(theano.config.floatX),
rng.randint(size=400, low=0, high=2).astype(theano.config.floatX))
training_steps = 5000
for i in range(training_steps):
pred, err = self.trainer(D[0], D[1])
print 'error:', numpy.sum(err ** 2) / len(D[0])
print "target values for D"
print D[1]
print "prediction on D"
print self.predict(D[0])
class LogisticRegressionOne(object):
'''
pass in one record each time
'''
def __init__(self, n_features):
self.n_features = n_features
self.x = T.fvector("x")
self.y = T.bscalar("y")
self.W = theano.shared(rng.randn(n_features).astype(theano.config.floatX), name="W")
self.b = theano.shared(numpy.asarray(0., dtype=theano.config.floatX), name="b")
def grad(self):
p_1 = 1/ (1 + T.exp(-T.dot(self.x, self.W) - self.b))
self.prediction = p_1 > 0.5
self.xent = - self.y * T.log(p_1) - (1 - self.y) * (1 - p_1)
cost = self.xent.mean() + 0.01 * ( self.W ** 2).sum()
gw, gb = T.grad(cost, [self.W, self.b])
return gw, gb
def compile(self):
gw, gb = self.grad()
self.trainer = theano.function(
inputs = [self.x, self.y],
outputs = [self.prediction, self.xent],
updates = [ (self.W, self.W - 0.01 * gw),
(self.b, self.b - 0.01 * gb)],
allow_input_downcast=True,
name = 'train')
self.predict = theano.function(
inputs = [self.x],
outputs = self.prediction,
allow_input_downcast=True,
name = 'predict')
def test(self):
data, label = (rng.randn(400, self.n_features).astype(theano.config.floatX),
rng.randint(size=400, low=0, high=2).astype(theano.config.floatX))
training_steps = 5000
for i in range(1000):
errs = []
for i in range(400):
test_data, test_label = (numpy.array(data[i])).astype(theano.config.floatX), label[i]
pred, err = self.trainer(test_data, test_label)
errs.append(err)
print 'err:', numpy.array(errs).sum() / 400
if __name__ == "__main__":
l = LogisticRegressionOne(784)
l.compile()
l.test()
| apache-2.0 |
omicsnut/bioconda-recipes | recipes/wtforms-components/setup.py | 63 | 2635 | """
WTForms-Components
------------------
Additional fields, validators and widgets for WTForms.
"""
from setuptools import setup
import os
import re
import sys
HERE = os.path.dirname(os.path.abspath(__file__))
PY3 = sys.version_info[0] == 3
def get_version():
filename = os.path.join(HERE, 'wtforms_components', '__init__.py')
with open(filename) as f:
contents = f.read()
pattern = r"^__version__ = '(.*?)'$"
return re.search(pattern, contents, re.MULTILINE).group(1)
extras_require = {
'test': [
'pytest>=2.2.3',
'flexmock>=0.9.7',
'WTForms-Test>=0.1.1',
'flake8>=2.4.0',
'isort>=4.2.2',
],
'color': ['colour>=0.0.4'],
'ipaddress': ['ipaddr'] if not PY3 else [],
'timezone': ['python-dateutil'],
}
# Add all optional dependencies to testing requirements.
for name, requirements in extras_require.items():
if name != 'test':
extras_require['test'] += requirements
setup(
name='WTForms-Components',
version=get_version(),
url='https://github.com/kvesteri/wtforms-components',
license='BSD',
author='Konsta Vesterinen',
author_email='konsta@fastmonkeys.com',
description='Additional fields, validators and widgets for WTForms.',
long_description=__doc__,
packages=[
'wtforms_components',
'wtforms_components.fields'
],
zip_safe=False,
include_package_data=True,
platforms='any',
dependency_links=[
# 5.6b1 only supports python 3.x / pending release
'git+git://github.com/daviddrysdale/python-phonenumbers.git@python3'
'#egg=phonenumbers3k-5.6b1',
],
install_requires=[
'WTForms>=1.0.4',
'six>=1.4.1',
'validators>=0.5.0',
'intervals>=0.6.0'
],
extras_require=extras_require,
classifiers=[
'Environment :: Web Environment',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: Implementation :: CPython',
'Programming Language :: Python :: Implementation :: PyPy',
'Topic :: Internet :: WWW/HTTP :: Dynamic Content',
'Topic :: Software Development :: Libraries :: Python Modules'
]
)
| mit |
ruibarreira/linuxtrail | usr/lib/python2.7/lib-tk/Tkdnd.py | 198 | 11488 | """Drag-and-drop support for Tkinter.
This is very preliminary. I currently only support dnd *within* one
application, between different windows (or within the same window).
I an trying to make this as generic as possible -- not dependent on
the use of a particular widget or icon type, etc. I also hope that
this will work with Pmw.
To enable an object to be dragged, you must create an event binding
for it that starts the drag-and-drop process. Typically, you should
bind <ButtonPress> to a callback function that you write. The function
should call Tkdnd.dnd_start(source, event), where 'source' is the
object to be dragged, and 'event' is the event that invoked the call
(the argument to your callback function). Even though this is a class
instantiation, the returned instance should not be stored -- it will
be kept alive automatically for the duration of the drag-and-drop.
When a drag-and-drop is already in process for the Tk interpreter, the
call is *ignored*; this normally averts starting multiple simultaneous
dnd processes, e.g. because different button callbacks all
dnd_start().
The object is *not* necessarily a widget -- it can be any
application-specific object that is meaningful to potential
drag-and-drop targets.
Potential drag-and-drop targets are discovered as follows. Whenever
the mouse moves, and at the start and end of a drag-and-drop move, the
Tk widget directly under the mouse is inspected. This is the target
widget (not to be confused with the target object, yet to be
determined). If there is no target widget, there is no dnd target
object. If there is a target widget, and it has an attribute
dnd_accept, this should be a function (or any callable object). The
function is called as dnd_accept(source, event), where 'source' is the
object being dragged (the object passed to dnd_start() above), and
'event' is the most recent event object (generally a <Motion> event;
it can also be <ButtonPress> or <ButtonRelease>). If the dnd_accept()
function returns something other than None, this is the new dnd target
object. If dnd_accept() returns None, or if the target widget has no
dnd_accept attribute, the target widget's parent is considered as the
target widget, and the search for a target object is repeated from
there. If necessary, the search is repeated all the way up to the
root widget. If none of the target widgets can produce a target
object, there is no target object (the target object is None).
The target object thus produced, if any, is called the new target
object. It is compared with the old target object (or None, if there
was no old target widget). There are several cases ('source' is the
source object, and 'event' is the most recent event object):
- Both the old and new target objects are None. Nothing happens.
- The old and new target objects are the same object. Its method
dnd_motion(source, event) is called.
- The old target object was None, and the new target object is not
None. The new target object's method dnd_enter(source, event) is
called.
- The new target object is None, and the old target object is not
None. The old target object's method dnd_leave(source, event) is
called.
- The old and new target objects differ and neither is None. The old
target object's method dnd_leave(source, event), and then the new
target object's method dnd_enter(source, event) is called.
Once this is done, the new target object replaces the old one, and the
Tk mainloop proceeds. The return value of the methods mentioned above
is ignored; if they raise an exception, the normal exception handling
mechanisms take over.
The drag-and-drop processes can end in two ways: a final target object
is selected, or no final target object is selected. When a final
target object is selected, it will always have been notified of the
potential drop by a call to its dnd_enter() method, as described
above, and possibly one or more calls to its dnd_motion() method; its
dnd_leave() method has not been called since the last call to
dnd_enter(). The target is notified of the drop by a call to its
method dnd_commit(source, event).
If no final target object is selected, and there was an old target
object, its dnd_leave(source, event) method is called to complete the
dnd sequence.
Finally, the source object is notified that the drag-and-drop process
is over, by a call to source.dnd_end(target, event), specifying either
the selected target object, or None if no target object was selected.
The source object can use this to implement the commit action; this is
sometimes simpler than to do it in the target's dnd_commit(). The
target's dnd_commit() method could then simply be aliased to
dnd_leave().
At any time during a dnd sequence, the application can cancel the
sequence by calling the cancel() method on the object returned by
dnd_start(). This will call dnd_leave() if a target is currently
active; it will never call dnd_commit().
"""
import Tkinter
# The factory function
def dnd_start(source, event):
h = DndHandler(source, event)
if h.root:
return h
else:
return None
# The class that does the work
class DndHandler:
root = None
def __init__(self, source, event):
if event.num > 5:
return
root = event.widget._root()
try:
root.__dnd
return # Don't start recursive dnd
except AttributeError:
root.__dnd = self
self.root = root
self.source = source
self.target = None
self.initial_button = button = event.num
self.initial_widget = widget = event.widget
self.release_pattern = "<B%d-ButtonRelease-%d>" % (button, button)
self.save_cursor = widget['cursor'] or ""
widget.bind(self.release_pattern, self.on_release)
widget.bind("<Motion>", self.on_motion)
widget['cursor'] = "hand2"
def __del__(self):
root = self.root
self.root = None
if root:
try:
del root.__dnd
except AttributeError:
pass
def on_motion(self, event):
x, y = event.x_root, event.y_root
target_widget = self.initial_widget.winfo_containing(x, y)
source = self.source
new_target = None
while target_widget:
try:
attr = target_widget.dnd_accept
except AttributeError:
pass
else:
new_target = attr(source, event)
if new_target:
break
target_widget = target_widget.master
old_target = self.target
if old_target is new_target:
if old_target:
old_target.dnd_motion(source, event)
else:
if old_target:
self.target = None
old_target.dnd_leave(source, event)
if new_target:
new_target.dnd_enter(source, event)
self.target = new_target
def on_release(self, event):
self.finish(event, 1)
def cancel(self, event=None):
self.finish(event, 0)
def finish(self, event, commit=0):
target = self.target
source = self.source
widget = self.initial_widget
root = self.root
try:
del root.__dnd
self.initial_widget.unbind(self.release_pattern)
self.initial_widget.unbind("<Motion>")
widget['cursor'] = self.save_cursor
self.target = self.source = self.initial_widget = self.root = None
if target:
if commit:
target.dnd_commit(source, event)
else:
target.dnd_leave(source, event)
finally:
source.dnd_end(target, event)
# ----------------------------------------------------------------------
# The rest is here for testing and demonstration purposes only!
class Icon:
def __init__(self, name):
self.name = name
self.canvas = self.label = self.id = None
def attach(self, canvas, x=10, y=10):
if canvas is self.canvas:
self.canvas.coords(self.id, x, y)
return
if self.canvas:
self.detach()
if not canvas:
return
label = Tkinter.Label(canvas, text=self.name,
borderwidth=2, relief="raised")
id = canvas.create_window(x, y, window=label, anchor="nw")
self.canvas = canvas
self.label = label
self.id = id
label.bind("<ButtonPress>", self.press)
def detach(self):
canvas = self.canvas
if not canvas:
return
id = self.id
label = self.label
self.canvas = self.label = self.id = None
canvas.delete(id)
label.destroy()
def press(self, event):
if dnd_start(self, event):
# where the pointer is relative to the label widget:
self.x_off = event.x
self.y_off = event.y
# where the widget is relative to the canvas:
self.x_orig, self.y_orig = self.canvas.coords(self.id)
def move(self, event):
x, y = self.where(self.canvas, event)
self.canvas.coords(self.id, x, y)
def putback(self):
self.canvas.coords(self.id, self.x_orig, self.y_orig)
def where(self, canvas, event):
# where the corner of the canvas is relative to the screen:
x_org = canvas.winfo_rootx()
y_org = canvas.winfo_rooty()
# where the pointer is relative to the canvas widget:
x = event.x_root - x_org
y = event.y_root - y_org
# compensate for initial pointer offset
return x - self.x_off, y - self.y_off
def dnd_end(self, target, event):
pass
class Tester:
def __init__(self, root):
self.top = Tkinter.Toplevel(root)
self.canvas = Tkinter.Canvas(self.top, width=100, height=100)
self.canvas.pack(fill="both", expand=1)
self.canvas.dnd_accept = self.dnd_accept
def dnd_accept(self, source, event):
return self
def dnd_enter(self, source, event):
self.canvas.focus_set() # Show highlight border
x, y = source.where(self.canvas, event)
x1, y1, x2, y2 = source.canvas.bbox(source.id)
dx, dy = x2-x1, y2-y1
self.dndid = self.canvas.create_rectangle(x, y, x+dx, y+dy)
self.dnd_motion(source, event)
def dnd_motion(self, source, event):
x, y = source.where(self.canvas, event)
x1, y1, x2, y2 = self.canvas.bbox(self.dndid)
self.canvas.move(self.dndid, x-x1, y-y1)
def dnd_leave(self, source, event):
self.top.focus_set() # Hide highlight border
self.canvas.delete(self.dndid)
self.dndid = None
def dnd_commit(self, source, event):
self.dnd_leave(source, event)
x, y = source.where(self.canvas, event)
source.attach(self.canvas, x, y)
def test():
root = Tkinter.Tk()
root.geometry("+1+1")
Tkinter.Button(command=root.quit, text="Quit").pack()
t1 = Tester(root)
t1.top.geometry("+1+60")
t2 = Tester(root)
t2.top.geometry("+120+60")
t3 = Tester(root)
t3.top.geometry("+240+60")
i1 = Icon("ICON1")
i2 = Icon("ICON2")
i3 = Icon("ICON3")
i1.attach(t1.canvas)
i2.attach(t2.canvas)
i3.attach(t3.canvas)
root.mainloop()
if __name__ == '__main__':
test()
| gpl-3.0 |
betoesquivel/fil2014 | build/django/build/lib.linux-x86_64-2.7/django/views/decorators/csrf.py | 228 | 2201 | from django.middleware.csrf import CsrfViewMiddleware, get_token
from django.utils.decorators import decorator_from_middleware, available_attrs
from functools import wraps
csrf_protect = decorator_from_middleware(CsrfViewMiddleware)
csrf_protect.__name__ = "csrf_protect"
csrf_protect.__doc__ = """
This decorator adds CSRF protection in exactly the same way as
CsrfViewMiddleware, but it can be used on a per view basis. Using both, or
using the decorator multiple times, is harmless and efficient.
"""
class _EnsureCsrfToken(CsrfViewMiddleware):
# We need this to behave just like the CsrfViewMiddleware, but not reject
# requests or log warnings.
def _reject(self, request, reason):
return None
requires_csrf_token = decorator_from_middleware(_EnsureCsrfToken)
requires_csrf_token.__name__ = 'requires_csrf_token'
requires_csrf_token.__doc__ = """
Use this decorator on views that need a correct csrf_token available to
RequestContext, but without the CSRF protection that csrf_protect
enforces.
"""
class _EnsureCsrfCookie(CsrfViewMiddleware):
def _reject(self, request, reason):
return None
def process_view(self, request, callback, callback_args, callback_kwargs):
retval = super(_EnsureCsrfCookie, self).process_view(request, callback, callback_args, callback_kwargs)
# Forces process_response to send the cookie
get_token(request)
return retval
ensure_csrf_cookie = decorator_from_middleware(_EnsureCsrfCookie)
ensure_csrf_cookie.__name__ = 'ensure_csrf_cookie'
ensure_csrf_cookie.__doc__ = """
Use this decorator to ensure that a view sets a CSRF cookie, whether or not it
uses the csrf_token template tag, or the CsrfViewMiddleware is used.
"""
def csrf_exempt(view_func):
"""
Marks a view function as being exempt from the CSRF view protection.
"""
# We could just do view_func.csrf_exempt = True, but decorators
# are nicer if they don't have side-effects, so we return a new
# function.
def wrapped_view(*args, **kwargs):
return view_func(*args, **kwargs)
wrapped_view.csrf_exempt = True
return wraps(view_func, assigned=available_attrs(view_func))(wrapped_view)
| mit |
Theer108/invenio | invenio/legacy/bibconvert/registry.py | 16 | 1654 | # -*- coding: utf-8 -*-
#
# This file is part of Invenio.
# Copyright (C) 2013, 2014, 2015 CERN.
#
# Invenio is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License as
# published by the Free Software Foundation; either version 2 of the
# License, or (at your option) any later version.
#
# Invenio is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Invenio; if not, write to the Free Software Foundation, Inc.,
# 59 Temple Place, Suite 330, Boston, MA 02111-1307, USA
import os
from flask_registry import PkgResourcesDirDiscoveryRegistry, \
ModuleAutoDiscoveryRegistry, RegistryProxy
from invenio.utils.datastructures import LazyDict
converterext = RegistryProxy(
'converterext', ModuleAutoDiscoveryRegistry, 'converterext'
)
kb = LazyDict(lambda: dict((os.path.basename(f), f)
for f in RegistryProxy('converterext.kb',
PkgResourcesDirDiscoveryRegistry,
'kb', registry_namespace=converterext)))
templates = LazyDict(lambda: dict((os.path.basename(f), f)
for f in RegistryProxy('converterext.templates',
PkgResourcesDirDiscoveryRegistry,
'templates',
registry_namespace=converterext)))
| gpl-2.0 |
ghjm/ansible | lib/ansible/module_utils/common/_collections_compat.py | 115 | 1190 | # Copyright (c), Sviatoslav Sydorenko <ssydoren@redhat.com> 2018
# Simplified BSD License (see licenses/simplified_bsd.txt or https://opensource.org/licenses/BSD-2-Clause)
"""Collections ABC import shim.
This module is intended only for internal use.
It will go away once the bundled copy of six includes equivalent functionality.
Third parties should not use this.
"""
from __future__ import absolute_import, division, print_function
__metaclass__ = type
try:
"""Python 3.3+ branch."""
from collections.abc import (
MappingView,
ItemsView,
KeysView,
ValuesView,
Mapping, MutableMapping,
Sequence, MutableSequence,
Set, MutableSet,
Container,
Hashable,
Sized,
Callable,
Iterable,
Iterator,
)
except ImportError:
"""Use old lib location under 2.6-3.2."""
from collections import (
MappingView,
ItemsView,
KeysView,
ValuesView,
Mapping, MutableMapping,
Sequence, MutableSequence,
Set, MutableSet,
Container,
Hashable,
Sized,
Callable,
Iterable,
Iterator,
)
| gpl-3.0 |
surgebiswas/poker | PokerBots_2017/Johnny/scipy/optimize/nnls.py | 116 | 1423 | from __future__ import division, print_function, absolute_import
from . import _nnls
from numpy import asarray_chkfinite, zeros, double
__all__ = ['nnls']
def nnls(A, b):
"""
Solve ``argmin_x || Ax - b ||_2`` for ``x>=0``. This is a wrapper
for a FORTAN non-negative least squares solver.
Parameters
----------
A : ndarray
Matrix ``A`` as shown above.
b : ndarray
Right-hand side vector.
Returns
-------
x : ndarray
Solution vector.
rnorm : float
The residual, ``|| Ax-b ||_2``.
Notes
-----
The FORTRAN code was published in the book below. The algorithm
is an active set method. It solves the KKT (Karush-Kuhn-Tucker)
conditions for the non-negative least squares problem.
References
----------
Lawson C., Hanson R.J., (1987) Solving Least Squares Problems, SIAM
"""
A, b = map(asarray_chkfinite, (A, b))
if len(A.shape) != 2:
raise ValueError("expected matrix")
if len(b.shape) != 1:
raise ValueError("expected vector")
m, n = A.shape
if m != b.shape[0]:
raise ValueError("incompatible dimensions")
w = zeros((n,), dtype=double)
zz = zeros((m,), dtype=double)
index = zeros((n,), dtype=int)
x, rnorm, mode = _nnls.nnls(A, m, n, b, w, zz, index)
if mode != 1:
raise RuntimeError("too many iterations")
return x, rnorm
| mit |
jcai19/smm_gem5 | src/python/m5/util/attrdict.py | 84 | 3421 | # Copyright (c) 2006 The Regents of The University of Michigan
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met: redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer;
# redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution;
# neither the name of the copyright holders nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
# Authors: Nathan Binkert
__all__ = [ 'attrdict', 'multiattrdict', 'optiondict' ]
class attrdict(dict):
"""Wrap dict, so you can use attribute access to get/set elements"""
def __getattr__(self, attr):
if attr in self:
return self.__getitem__(attr)
return super(attrdict, self).__getattribute__(attr)
def __setattr__(self, attr, value):
if attr in dir(self) or attr.startswith('_'):
return super(attrdict, self).__setattr__(attr, value)
return self.__setitem__(attr, value)
def __delattr__(self, attr):
if attr in self:
return self.__delitem__(attr)
return super(attrdict, self).__delattr__(attr)
def __getstate__(self):
return dict(self)
def __setstate__(self, state):
self.update(state)
class multiattrdict(attrdict):
"""Wrap attrdict so that nested attribute accesses automatically create
nested dictionaries."""
def __getattr__(self, attr):
try:
return super(multiattrdict, self).__getattr__(attr)
except AttributeError:
if attr.startswith('_'):
raise
d = multiattrdict()
setattr(self, attr, d)
return d
class optiondict(attrdict):
"""Modify attrdict so that a missing attribute just returns None"""
def __getattr__(self, attr):
try:
return super(optiondict, self).__getattr__(attr)
except AttributeError:
return None
if __name__ == '__main__':
x = attrdict()
x.y = 1
x['z'] = 2
print x['y'], x.y
print x['z'], x.z
print dir(x)
print x
print
del x['y']
del x.z
print dir(x)
print(x)
print
print "multiattrdict"
x = multiattrdict()
x.x.x.x = 9
x.y.z = 9
print x
print x.y
print x.y.z
print x.z.z
| bsd-3-clause |
jbair34/moose | framework/contrib/nsiqcppstyle/rules/RULE_6_1_A_do_not_omit_function_parameter_names.py | 43 | 5441 | """
Do not ommit function parameter names in the function declaration.
It checks function decls only.
== Violation ==
void functionA(int a, int); <== Violation. The second parameter int has no name.
void functionB(int ); <== Violation. The first parameter in has no name
== Good ==
void functionA(int a, int b, int c, int d, int e); <== Good.
void functionB(int, int, int c, int d) <== Don't care. it's the function definition.
{
}
"""
from nsiqcppstyle_rulehelper import *
from nsiqcppstyle_reporter import *
from nsiqcppstyle_rulemanager import *
def RunRule(lexer, fullName, decl, contextStack, context) :
if decl :
t2 = lexer.GetCurToken()
lexer.GetNextTokenInType("LPAREN", False, True)
lexer.PushTokenIndex()
rparen = lexer.GetNextMatchingToken()
lexer.PopTokenIndex()
count = 0
while(True) :
t = lexer.GetNextTokenSkipWhiteSpaceAndCommentAndPreprocess()
if rparen == None or t == rparen or t == None :
break
if t.type in ["ID", "BOOL", "CHAR", "INT", "LONG", "DOUBLE", "FLOAT", "SHORT", "VOID"] :
if t.type == "VOID" :
nt = lexer.PeekNextTokenSkipWhiteSpaceAndCommentAndPreprocess()
if nt == rparen :
return
count += 1
elif t.type == "LT" :
lexer.GetNextMatchingGT()
elif t.type == "COMMA" :
if count == 1 :
nsiqcppstyle_reporter.Error(t2, __name__, "function (%s) has non named parameter. use named parameter." % fullName)
break;
count = 0
elif rparen.lexpos <= t.lexpos :
if count == 1 :
nsiqcppstyle_reporter.Error(t2, __name__, "function (%s) has non named parameter. use named parameter." % fullName)
break;
ruleManager.AddFunctionNameRule(RunRule)
###########################################################################################
# Unit Test
###########################################################################################
from nsiqunittest.nsiqcppstyle_unittestbase import *
class testRule(nct):
def setUpRule(self):
ruleManager.AddFunctionNameRule(RunRule)
def test1(self):
self.Analyze("thisfile.c",
"""
int functionA(int *a, K<a, b>, int b, int c, int c);
""")
assert CheckErrorContent(__name__)
def test2(self):
self.Analyze("thisfile.c",
"""
int functionA(int, int, int, Scope<T,J> a) {
}
int B;
""")
assert not CheckErrorContent(__name__)
def test3(self):
self.Analyze("thisfile.c",
"""
class K {
int functionA(int *a, int, int, tt&b, aa*s, k a);
int B;
}
""")
assert CheckErrorContent(__name__)
def test4(self):
self.Analyze("thisfile.c",
"""
class K {
int functionA(int *a, int c, int d, tt&b, aa*s, k a);
int B;
}
""")
assert not CheckErrorContent(__name__)
def test5(self):
self.Analyze("thisfile.c",
"""
class K {
int functionA(void);
int B;
}
""")
assert not CheckErrorContent(__name__)
def test6(self):
self.Analyze("thisfile.c",
"""
class K {
int functionA(void*);
int B;
}
""")
assert not CheckErrorContent(__name__)
def test7(self):
self.Analyze("thisfile.c",
"""
#include <stdio.h>
#include <sys/socket.h> // getpeername()
#define ILOG_WARN(...) \\
iota::BoxLog::Instance().WriteFormat(box::Warn, __FILE__, __LINE__, __VA_ARGS__)
void func(void)
{
if (::getpeername(nFileDescriptor, (struct sockaddr*) &oSockAddr, (socklen_t*) &nSockAddrSize) == -1)
{
int nErrorCode = errno;
ILOG_WARN("Initialize() - internal error. (getpeername) : \n\t\t"
"* this=[%p], fd=[%d], \n\t\t"
"* error-code=[%d], error-message=[%s]",
this, nFileDescriptor, nErrorCode, strerror(nErrorCode));
return false;
}
}
""")
assert not CheckErrorContent(__name__)
def test8(self):
self.Analyze("thisfile.c",
"""
#define ILOG_WARN(A) \\
iota::BoxLog::Instance().WriteFormat(box::Warn, __FILE__, __LINE__, __VA_ARGS__)
""")
assert not CheckErrorContent(__name__)
def test9(self) :
self.Analyze("thisfile.c",
"""
/**
* @brief constructor with map
*/
ExeOptionDetail& ExeOptionDetail::operator=(const nano::Variant::Map& mapOptions)
{
m_nTimeout = _getItemAsInt(mapOptions, "TIMEOUT", -1);
return *this;
};
""")
assert not CheckErrorContent(__name__)
def test10(self):
self.Analyze("thisfile.c",
"""struct FnVisibility
{
void operator () (const DSObjMap::value_type& pair)
{
DSObject* pObject = pair.second;
CHTMLDomUtility::SetStyleProperty(pObject, _T("display"), _T("none")); // ==> Original Code : Rule_6_1_A_Error
}
};
""")
assert not CheckErrorContent(__name__)
def test11(self):
self.Analyze("thisfile.c",
"""
CPoint BtnTeamPos[] = { CPoint(BTN_SINGLE_POS_X, BTN_SINGLE_POS_Y),
CPoint(BTN_TEAM_A_POS_X, BTN_TEAM_A_POS_Y),
CPoint(BTN_TEAM_B_POS_X, BTN_TEAM_B_POS_Y),
CPoint(BTN_TEAM_C_POS_X, BTN_TEAM_C_POS_Y)
};
""")
assert not CheckErrorContent(__name__)
| lgpl-2.1 |
ltilve/chromium | native_client_sdk/src/build_tools/build_projects.py | 19 | 11468 | #!/usr/bin/env python
# Copyright (c) 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import argparse
import multiprocessing
import os
import posixpath
import sys
import urllib2
import buildbot_common
import build_version
import generate_make
import parse_dsc
from build_paths import SDK_SRC_DIR, OUT_DIR, SDK_RESOURCE_DIR
from build_paths import GSTORE
from generate_index import LandingPage
sys.path.append(os.path.join(SDK_SRC_DIR, 'tools'))
import getos
MAKE = 'nacl_sdk/make_3.99.90-26-gf80222c/make.exe'
LIB_DICT = {
'linux': [],
'mac': [],
'win': ['x86_32']
}
VALID_TOOLCHAINS = [
'bionic',
'newlib',
'clang-newlib',
'glibc',
'pnacl',
'win',
'linux',
'mac',
]
# Global verbosity setting.
# If set to True (normally via a command line arg) then build_projects will
# add V=1 to all calls to 'make'
verbose = False
def Trace(msg):
if verbose:
sys.stderr.write(str(msg) + '\n')
def CopyFilesFromTo(filelist, srcdir, dstdir):
for filename in filelist:
srcpath = os.path.join(srcdir, filename)
dstpath = os.path.join(dstdir, filename)
buildbot_common.CopyFile(srcpath, dstpath)
def UpdateHelpers(pepperdir, clobber=False):
tools_dir = os.path.join(pepperdir, 'tools')
if not os.path.exists(tools_dir):
buildbot_common.ErrorExit('SDK tools dir is missing: %s' % tools_dir)
exampledir = os.path.join(pepperdir, 'examples')
if clobber:
buildbot_common.RemoveDir(exampledir)
buildbot_common.MakeDir(exampledir)
# Copy files for individual build and landing page
files = ['favicon.ico', 'httpd.cmd', 'index.css', 'index.js',
'button_close.png', 'button_close_hover.png']
CopyFilesFromTo(files, SDK_RESOURCE_DIR, exampledir)
# Copy tools scripts and make includes
buildbot_common.CopyDir(os.path.join(SDK_SRC_DIR, 'tools', '*.py'),
tools_dir)
buildbot_common.CopyDir(os.path.join(SDK_SRC_DIR, 'tools', '*.mk'),
tools_dir)
# Copy tools/lib scripts
tools_lib_dir = os.path.join(pepperdir, 'tools', 'lib')
buildbot_common.MakeDir(tools_lib_dir)
buildbot_common.CopyDir(os.path.join(SDK_SRC_DIR, 'tools', 'lib', '*.py'),
tools_lib_dir)
# On Windows add a prebuilt make
if getos.GetPlatform() == 'win':
buildbot_common.BuildStep('Add MAKE')
make_url = posixpath.join(GSTORE, MAKE)
make_exe = os.path.join(tools_dir, 'make.exe')
with open(make_exe, 'wb') as f:
f.write(urllib2.urlopen(make_url).read())
def ValidateToolchains(toolchains):
invalid_toolchains = set(toolchains) - set(VALID_TOOLCHAINS)
if invalid_toolchains:
buildbot_common.ErrorExit('Invalid toolchain(s): %s' % (
', '.join(invalid_toolchains)))
def GetDeps(projects):
out = {}
# Build list of all project names
localtargets = [proj['NAME'] for proj in projects]
# For each project
for proj in projects:
deplist = []
# generate a list of dependencies
for targ in proj.get('TARGETS', []):
deplist.extend(targ.get('DEPS', []) + targ.get('LIBS', []))
# and add dependencies to targets built in this subtree
localdeps = [dep for dep in deplist if dep in localtargets]
if localdeps:
out[proj['NAME']] = localdeps
return out
def UpdateProjects(pepperdir, project_tree, toolchains,
clobber=False, configs=None, first_toolchain=False):
if configs is None:
configs = ['Debug', 'Release']
if not os.path.exists(os.path.join(pepperdir, 'tools')):
buildbot_common.ErrorExit('Examples depend on missing tools.')
if not os.path.exists(os.path.join(pepperdir, 'toolchain')):
buildbot_common.ErrorExit('Examples depend on missing toolchains.')
ValidateToolchains(toolchains)
# Create the library output directories
libdir = os.path.join(pepperdir, 'lib')
platform = getos.GetPlatform()
for config in configs:
for arch in LIB_DICT[platform]:
dirpath = os.path.join(libdir, '%s_%s_host' % (platform, arch), config)
if clobber:
buildbot_common.RemoveDir(dirpath)
buildbot_common.MakeDir(dirpath)
landing_page = None
for branch, projects in project_tree.iteritems():
dirpath = os.path.join(pepperdir, branch)
if clobber:
buildbot_common.RemoveDir(dirpath)
buildbot_common.MakeDir(dirpath)
targets = [desc['NAME'] for desc in projects]
deps = GetDeps(projects)
# Generate master make for this branch of projects
generate_make.GenerateMasterMakefile(pepperdir,
os.path.join(pepperdir, branch),
targets, deps)
if branch.startswith('examples') and not landing_page:
landing_page = LandingPage()
# Generate individual projects
for desc in projects:
srcroot = os.path.dirname(desc['FILEPATH'])
generate_make.ProcessProject(pepperdir, srcroot, pepperdir, desc,
toolchains, configs=configs,
first_toolchain=first_toolchain)
if branch.startswith('examples'):
landing_page.AddDesc(desc)
if landing_page:
# Generate the landing page text file.
index_html = os.path.join(pepperdir, 'examples', 'index.html')
index_template = os.path.join(SDK_RESOURCE_DIR, 'index.html.template')
with open(index_html, 'w') as fh:
out = landing_page.GeneratePage(index_template)
fh.write(out)
# Generate top Make for examples
targets = ['api', 'demo', 'getting_started', 'tutorial']
targets = [x for x in targets if 'examples/'+x in project_tree]
branch_name = 'examples'
generate_make.GenerateMasterMakefile(pepperdir,
os.path.join(pepperdir, branch_name),
targets, {})
def BuildProjectsBranch(pepperdir, branch, deps, clean, config, args=None):
make_dir = os.path.join(pepperdir, branch)
print "\nMake: " + make_dir
if getos.GetPlatform() == 'win':
# We need to modify the environment to build host on Windows.
make = os.path.join(make_dir, 'make.bat')
else:
make = 'make'
env = None
if os.environ.get('USE_GOMA') == '1':
env = dict(os.environ)
env['NACL_COMPILER_PREFIX'] = 'gomacc'
# Add -m32 to the CFLAGS when building using i686-nacl-gcc
# otherwise goma won't recognise it as different to the x86_64
# build.
env['X86_32_CFLAGS'] = '-m32'
env['X86_32_CXXFLAGS'] = '-m32'
jobs = '50'
else:
jobs = str(multiprocessing.cpu_count())
make_cmd = [make, '-j', jobs]
make_cmd.append('CONFIG='+config)
# We always ENABLE_BIONIC in case we need it. If neither --bionic nor
# -t bionic have been provided on the command line, then VALID_TOOLCHAINS
# will not contain a bionic target.
make_cmd.append('ENABLE_BIONIC=1')
if not deps:
make_cmd.append('IGNORE_DEPS=1')
if verbose:
make_cmd.append('V=1')
if args:
make_cmd += args
else:
make_cmd.append('TOOLCHAIN=all')
buildbot_common.Run(make_cmd, cwd=make_dir, env=env)
if clean:
# Clean to remove temporary files but keep the built
buildbot_common.Run(make_cmd + ['clean'], cwd=make_dir, env=env)
def BuildProjects(pepperdir, project_tree, deps=True,
clean=False, config='Debug'):
# Make sure we build libraries (which live in 'src') before
# any of the examples.
build_first = [p for p in project_tree if p != 'src']
build_second = [p for p in project_tree if p == 'src']
for branch in build_first + build_second:
BuildProjectsBranch(pepperdir, branch, deps, clean, config)
def main(args):
parser = argparse.ArgumentParser(description=__doc__)
parser.add_argument('-c', '--clobber',
help='Clobber project directories before copying new files',
action='store_true', default=False)
parser.add_argument('-b', '--build',
help='Build the projects. Otherwise the projects are only copied.',
action='store_true')
parser.add_argument('--config',
help='Choose configuration to build (Debug or Release). Builds both '
'by default')
parser.add_argument('--bionic',
help='Enable bionic projects', action='store_true')
parser.add_argument('-x', '--experimental',
help='Build experimental projects', action='store_true')
parser.add_argument('-t', '--toolchain',
help='Build using toolchain. Can be passed more than once.',
action='append', default=[])
parser.add_argument('-d', '--dest',
help='Select which build destinations (project types) are valid.',
action='append')
parser.add_argument('projects', nargs='*',
help='Select which projects to build.')
parser.add_argument('-v', '--verbose', action='store_true')
# To setup bash completion for this command first install optcomplete
# and then add this line to your .bashrc:
# complete -F _optcomplete build_projects.py
try:
import optcomplete
optcomplete.autocomplete(parser)
except ImportError:
pass
options = parser.parse_args(args)
global verbose
if options.verbose:
verbose = True
buildbot_common.verbose = verbose
if 'NACL_SDK_ROOT' in os.environ:
# We don't want the currently configured NACL_SDK_ROOT to have any effect
# on the build.
del os.environ['NACL_SDK_ROOT']
pepper_ver = str(int(build_version.ChromeMajorVersion()))
pepperdir = os.path.join(OUT_DIR, 'pepper_' + pepper_ver)
if not options.toolchain:
# Order matters here: the default toolchain for an example's Makefile will
# be the first toolchain in this list that is available in the example.
# e.g. If an example supports newlib and glibc, then the default will be
# newlib.
options.toolchain = ['pnacl', 'newlib', 'glibc', 'host', 'clang-newlib']
if options.experimental or options.bionic:
options.toolchain.append('bionic')
if 'host' in options.toolchain:
options.toolchain.remove('host')
options.toolchain.append(getos.GetPlatform())
Trace('Adding platform: ' + getos.GetPlatform())
ValidateToolchains(options.toolchain)
filters = {}
if options.toolchain:
filters['TOOLS'] = options.toolchain
Trace('Filter by toolchain: ' + str(options.toolchain))
if not options.experimental:
filters['EXPERIMENTAL'] = False
if options.dest:
filters['DEST'] = options.dest
Trace('Filter by type: ' + str(options.dest))
if options.projects:
filters['NAME'] = options.projects
Trace('Filter by name: ' + str(options.projects))
try:
project_tree = parse_dsc.LoadProjectTree(SDK_SRC_DIR, include=filters)
except parse_dsc.ValidationError as e:
buildbot_common.ErrorExit(str(e))
if verbose:
parse_dsc.PrintProjectTree(project_tree)
UpdateHelpers(pepperdir, clobber=options.clobber)
UpdateProjects(pepperdir, project_tree, options.toolchain,
clobber=options.clobber)
if options.build:
if options.config:
configs = [options.config]
else:
configs = ['Debug', 'Release']
for config in configs:
BuildProjects(pepperdir, project_tree, config=config, deps=False)
return 0
if __name__ == '__main__':
script_name = os.path.basename(sys.argv[0])
try:
sys.exit(main(sys.argv[1:]))
except parse_dsc.ValidationError as e:
buildbot_common.ErrorExit('%s: %s' % (script_name, e))
except KeyboardInterrupt:
buildbot_common.ErrorExit('%s: interrupted' % script_name)
| bsd-3-clause |
eliangidoni/rethinkdb | test/common/http_support/werkzeug/script.py | 147 | 11249 | # -*- coding: utf-8 -*-
r'''
werkzeug.script
~~~~~~~~~~~~~~~
.. admonition:: Deprecated Functionality
``werkzeug.script`` is deprecated without replacement functionality.
Python's command line support improved greatly with :mod:`argparse`
and a bunch of alternative modules.
Most of the time you have recurring tasks while writing an application
such as starting up an interactive python interpreter with some prefilled
imports, starting the development server, initializing the database or
something similar.
For that purpose werkzeug provides the `werkzeug.script` module which
helps you writing such scripts.
Basic Usage
-----------
The following snippet is roughly the same in every werkzeug script::
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from werkzeug import script
# actions go here
if __name__ == '__main__':
script.run()
Starting this script now does nothing because no actions are defined.
An action is a function in the same module starting with ``"action_"``
which takes a number of arguments where every argument has a default. The
type of the default value specifies the type of the argument.
Arguments can then be passed by position or using ``--name=value`` from
the shell.
Because a runserver and shell command is pretty common there are two
factory functions that create such commands::
def make_app():
from yourapplication import YourApplication
return YourApplication(...)
action_runserver = script.make_runserver(make_app, use_reloader=True)
action_shell = script.make_shell(lambda: {'app': make_app()})
Using The Scripts
-----------------
The script from above can be used like this from the shell now:
.. sourcecode:: text
$ ./manage.py --help
$ ./manage.py runserver localhost 8080 --debugger --no-reloader
$ ./manage.py runserver -p 4000
$ ./manage.py shell
As you can see it's possible to pass parameters as positional arguments
or as named parameters, pretty much like Python function calls.
:copyright: (c) 2014 by the Werkzeug Team, see AUTHORS for more details.
:license: BSD, see LICENSE for more details.
'''
from __future__ import print_function
import sys
import inspect
import getopt
from os.path import basename
from werkzeug._compat import iteritems
argument_types = {
bool: 'boolean',
str: 'string',
int: 'integer',
float: 'float'
}
converters = {
'boolean': lambda x: x.lower() in ('1', 'true', 'yes', 'on'),
'string': str,
'integer': int,
'float': float
}
def run(namespace=None, action_prefix='action_', args=None):
"""Run the script. Participating actions are looked up in the caller's
namespace if no namespace is given, otherwise in the dict provided.
Only items that start with action_prefix are processed as actions. If
you want to use all items in the namespace provided as actions set
action_prefix to an empty string.
:param namespace: An optional dict where the functions are looked up in.
By default the local namespace of the caller is used.
:param action_prefix: The prefix for the functions. Everything else
is ignored.
:param args: the arguments for the function. If not specified
:data:`sys.argv` without the first argument is used.
"""
if namespace is None:
namespace = sys._getframe(1).f_locals
actions = find_actions(namespace, action_prefix)
if args is None:
args = sys.argv[1:]
if not args or args[0] in ('-h', '--help'):
return print_usage(actions)
elif args[0] not in actions:
fail('Unknown action \'%s\'' % args[0])
arguments = {}
types = {}
key_to_arg = {}
long_options = []
formatstring = ''
func, doc, arg_def = actions[args.pop(0)]
for idx, (arg, shortcut, default, option_type) in enumerate(arg_def):
real_arg = arg.replace('-', '_')
if shortcut:
formatstring += shortcut
if not isinstance(default, bool):
formatstring += ':'
key_to_arg['-' + shortcut] = real_arg
long_options.append(isinstance(default, bool) and arg or arg + '=')
key_to_arg['--' + arg] = real_arg
key_to_arg[idx] = real_arg
types[real_arg] = option_type
arguments[real_arg] = default
try:
optlist, posargs = getopt.gnu_getopt(args, formatstring, long_options)
except getopt.GetoptError as e:
fail(str(e))
specified_arguments = set()
for key, value in enumerate(posargs):
try:
arg = key_to_arg[key]
except IndexError:
fail('Too many parameters')
specified_arguments.add(arg)
try:
arguments[arg] = converters[types[arg]](value)
except ValueError:
fail('Invalid value for argument %s (%s): %s' % (key, arg, value))
for key, value in optlist:
arg = key_to_arg[key]
if arg in specified_arguments:
fail('Argument \'%s\' is specified twice' % arg)
if types[arg] == 'boolean':
if arg.startswith('no_'):
value = 'no'
else:
value = 'yes'
try:
arguments[arg] = converters[types[arg]](value)
except ValueError:
fail('Invalid value for \'%s\': %s' % (key, value))
newargs = {}
for k, v in iteritems(arguments):
newargs[k.startswith('no_') and k[3:] or k] = v
arguments = newargs
return func(**arguments)
def fail(message, code=-1):
"""Fail with an error."""
print('Error: %s' % message, file=sys.stderr)
sys.exit(code)
def find_actions(namespace, action_prefix):
"""Find all the actions in the namespace."""
actions = {}
for key, value in iteritems(namespace):
if key.startswith(action_prefix):
actions[key[len(action_prefix):]] = analyse_action(value)
return actions
def print_usage(actions):
"""Print the usage information. (Help screen)"""
actions = actions.items()
actions.sort()
print('usage: %s <action> [<options>]' % basename(sys.argv[0]))
print(' %s --help' % basename(sys.argv[0]))
print()
print('actions:')
for name, (func, doc, arguments) in actions:
print(' %s:' % name)
for line in doc.splitlines():
print(' %s' % line)
if arguments:
print()
for arg, shortcut, default, argtype in arguments:
if isinstance(default, bool):
print(' %s' % (
(shortcut and '-%s, ' % shortcut or '') + '--' + arg
))
else:
print(' %-30s%-10s%s' % (
(shortcut and '-%s, ' % shortcut or '') + '--' + arg,
argtype, default
))
print()
def analyse_action(func):
"""Analyse a function."""
description = inspect.getdoc(func) or 'undocumented action'
arguments = []
args, varargs, kwargs, defaults = inspect.getargspec(func)
if varargs or kwargs:
raise TypeError('variable length arguments for action not allowed.')
if len(args) != len(defaults or ()):
raise TypeError('not all arguments have proper definitions')
for idx, (arg, definition) in enumerate(zip(args, defaults or ())):
if arg.startswith('_'):
raise TypeError('arguments may not start with an underscore')
if not isinstance(definition, tuple):
shortcut = None
default = definition
else:
shortcut, default = definition
argument_type = argument_types[type(default)]
if isinstance(default, bool) and default is True:
arg = 'no-' + arg
arguments.append((arg.replace('_', '-'), shortcut,
default, argument_type))
return func, description, arguments
def make_shell(init_func=None, banner=None, use_ipython=True):
"""Returns an action callback that spawns a new interactive
python shell.
:param init_func: an optional initialization function that is
called before the shell is started. The return
value of this function is the initial namespace.
:param banner: the banner that is displayed before the shell. If
not specified a generic banner is used instead.
:param use_ipython: if set to `True` ipython is used if available.
"""
if banner is None:
banner = 'Interactive Werkzeug Shell'
if init_func is None:
init_func = dict
def action(ipython=use_ipython):
"""Start a new interactive python session."""
namespace = init_func()
if ipython:
try:
try:
from IPython.frontend.terminal.embed import InteractiveShellEmbed
sh = InteractiveShellEmbed(banner1=banner)
except ImportError:
from IPython.Shell import IPShellEmbed
sh = IPShellEmbed(banner=banner)
except ImportError:
pass
else:
sh(global_ns={}, local_ns=namespace)
return
from code import interact
interact(banner, local=namespace)
return action
def make_runserver(app_factory, hostname='localhost', port=5000,
use_reloader=False, use_debugger=False, use_evalex=True,
threaded=False, processes=1, static_files=None,
extra_files=None, ssl_context=None):
"""Returns an action callback that spawns a new development server.
.. versionadded:: 0.5
`static_files` and `extra_files` was added.
..versionadded:: 0.6.1
`ssl_context` was added.
:param app_factory: a function that returns a new WSGI application.
:param hostname: the default hostname the server should listen on.
:param port: the default port of the server.
:param use_reloader: the default setting for the reloader.
:param use_evalex: the default setting for the evalex flag of the debugger.
:param threaded: the default threading setting.
:param processes: the default number of processes to start.
:param static_files: optional dict of static files.
:param extra_files: optional list of extra files to track for reloading.
:param ssl_context: optional SSL context for running server in HTTPS mode.
"""
def action(hostname=('h', hostname), port=('p', port),
reloader=use_reloader, debugger=use_debugger,
evalex=use_evalex, threaded=threaded, processes=processes):
"""Start a new development server."""
from werkzeug.serving import run_simple
app = app_factory()
run_simple(hostname, port, app, reloader, debugger, evalex,
extra_files, 1, threaded, processes,
static_files=static_files, ssl_context=ssl_context)
return action
| agpl-3.0 |
codrut3/tensorflow | tensorflow/python/keras/_impl/keras/layers/lstm_test.py | 16 | 13149 | # Copyright 2016 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Tests for LSTM layer."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import numpy as np
from tensorflow.python.keras._impl import keras
from tensorflow.python.keras._impl.keras import testing_utils
from tensorflow.python.platform import test
class LSTMLayerTest(test.TestCase):
def test_return_sequences_LSTM(self):
num_samples = 2
timesteps = 3
embedding_dim = 4
units = 2
with self.test_session():
testing_utils.layer_test(
keras.layers.LSTM,
kwargs={'units': units,
'return_sequences': True},
input_shape=(num_samples, timesteps, embedding_dim))
def test_dynamic_behavior_LSTM(self):
num_samples = 2
timesteps = 3
embedding_dim = 4
units = 2
with self.test_session():
layer = keras.layers.LSTM(units, input_shape=(None, embedding_dim))
model = keras.models.Sequential()
model.add(layer)
model.compile('sgd', 'mse')
x = np.random.random((num_samples, timesteps, embedding_dim))
y = np.random.random((num_samples, units))
model.train_on_batch(x, y)
def test_dropout_LSTM(self):
num_samples = 2
timesteps = 3
embedding_dim = 4
units = 2
with self.test_session():
testing_utils.layer_test(
keras.layers.LSTM,
kwargs={'units': units,
'dropout': 0.1,
'recurrent_dropout': 0.1},
input_shape=(num_samples, timesteps, embedding_dim))
def test_implementation_mode_LSTM(self):
num_samples = 2
timesteps = 3
embedding_dim = 4
units = 2
with self.test_session():
for mode in [0, 1, 2]:
testing_utils.layer_test(
keras.layers.LSTM,
kwargs={'units': units,
'implementation': mode},
input_shape=(num_samples, timesteps, embedding_dim))
def test_statefulness_LSTM(self):
num_samples = 2
timesteps = 3
embedding_dim = 4
units = 2
layer_class = keras.layers.LSTM
with self.test_session():
model = keras.models.Sequential()
model.add(
keras.layers.Embedding(
4,
embedding_dim,
mask_zero=True,
input_length=timesteps,
batch_input_shape=(num_samples, timesteps)))
layer = layer_class(
units, return_sequences=False, stateful=True, weights=None)
model.add(layer)
model.compile(optimizer='sgd', loss='mse')
out1 = model.predict(np.ones((num_samples, timesteps)))
self.assertEqual(out1.shape, (num_samples, units))
# train once so that the states change
model.train_on_batch(
np.ones((num_samples, timesteps)), np.ones((num_samples, units)))
out2 = model.predict(np.ones((num_samples, timesteps)))
# if the state is not reset, output should be different
self.assertNotEqual(out1.max(), out2.max())
# check that output changes after states are reset
# (even though the model itself didn't change)
layer.reset_states()
out3 = model.predict(np.ones((num_samples, timesteps)))
self.assertNotEqual(out2.max(), out3.max())
# check that container-level reset_states() works
model.reset_states()
out4 = model.predict(np.ones((num_samples, timesteps)))
self.assertAllClose(out3, out4, atol=1e-5)
# check that the call to `predict` updated the states
out5 = model.predict(np.ones((num_samples, timesteps)))
self.assertNotEqual(out4.max(), out5.max())
# Check masking
layer.reset_states()
left_padded_input = np.ones((num_samples, timesteps))
left_padded_input[0, :1] = 0
left_padded_input[1, :2] = 0
out6 = model.predict(left_padded_input)
layer.reset_states()
right_padded_input = np.ones((num_samples, timesteps))
right_padded_input[0, -1:] = 0
right_padded_input[1, -2:] = 0
out7 = model.predict(right_padded_input)
self.assertAllClose(out7, out6, atol=1e-5)
def test_regularizers_LSTM(self):
embedding_dim = 4
layer_class = keras.layers.LSTM
with self.test_session():
layer = layer_class(
5,
return_sequences=False,
weights=None,
input_shape=(None, embedding_dim),
kernel_regularizer=keras.regularizers.l1(0.01),
recurrent_regularizer=keras.regularizers.l1(0.01),
bias_regularizer='l2',
activity_regularizer='l1')
layer.build((None, None, 2))
self.assertEqual(len(layer.losses), 3)
x = keras.backend.variable(np.ones((2, 3, 2)))
layer(x)
self.assertEqual(len(layer.get_losses_for(x)), 1)
def test_constraints_LSTM(self):
embedding_dim = 4
layer_class = keras.layers.LSTM
with self.test_session():
k_constraint = keras.constraints.max_norm(0.01)
r_constraint = keras.constraints.max_norm(0.01)
b_constraint = keras.constraints.max_norm(0.01)
layer = layer_class(
5,
return_sequences=False,
weights=None,
input_shape=(None, embedding_dim),
kernel_constraint=k_constraint,
recurrent_constraint=r_constraint,
bias_constraint=b_constraint)
layer.build((None, None, embedding_dim))
self.assertEqual(layer.cell.kernel.constraint, k_constraint)
self.assertEqual(layer.cell.recurrent_kernel.constraint, r_constraint)
self.assertEqual(layer.cell.bias.constraint, b_constraint)
def test_with_masking_layer_LSTM(self):
layer_class = keras.layers.LSTM
with self.test_session():
inputs = np.random.random((2, 3, 4))
targets = np.abs(np.random.random((2, 3, 5)))
targets /= targets.sum(axis=-1, keepdims=True)
model = keras.models.Sequential()
model.add(keras.layers.Masking(input_shape=(3, 4)))
model.add(layer_class(units=5, return_sequences=True, unroll=False))
model.compile(loss='categorical_crossentropy', optimizer='adam')
model.fit(inputs, targets, epochs=1, batch_size=2, verbose=1)
def test_from_config_LSTM(self):
layer_class = keras.layers.LSTM
for stateful in (False, True):
l1 = layer_class(units=1, stateful=stateful)
l2 = layer_class.from_config(l1.get_config())
assert l1.get_config() == l2.get_config()
def test_specify_initial_state_keras_tensor(self):
num_states = 2
timesteps = 3
embedding_dim = 4
units = 3
num_samples = 2
with self.test_session():
# Test with Keras tensor
inputs = keras.Input((timesteps, embedding_dim))
initial_state = [keras.Input((units,)) for _ in range(num_states)]
layer = keras.layers.LSTM(units)
if len(initial_state) == 1:
output = layer(inputs, initial_state=initial_state[0])
else:
output = layer(inputs, initial_state=initial_state)
assert initial_state[0] in layer._inbound_nodes[0].input_tensors
model = keras.models.Model([inputs] + initial_state, output)
model.compile(loss='categorical_crossentropy', optimizer='adam')
inputs = np.random.random((num_samples, timesteps, embedding_dim))
initial_state = [np.random.random((num_samples, units))
for _ in range(num_states)]
targets = np.random.random((num_samples, units))
model.train_on_batch([inputs] + initial_state, targets)
def test_specify_initial_state_non_keras_tensor(self):
num_states = 2
timesteps = 3
embedding_dim = 4
units = 3
num_samples = 2
with self.test_session():
# Test with non-Keras tensor
inputs = keras.Input((timesteps, embedding_dim))
initial_state = [keras.backend.random_normal_variable(
(num_samples, units), 0, 1)
for _ in range(num_states)]
layer = keras.layers.LSTM(units)
output = layer(inputs, initial_state=initial_state)
model = keras.models.Model(inputs, output)
model.compile(loss='categorical_crossentropy', optimizer='adam')
inputs = np.random.random((num_samples, timesteps, embedding_dim))
targets = np.random.random((num_samples, units))
model.train_on_batch(inputs, targets)
def test_reset_states_with_values(self):
num_states = 2
timesteps = 3
embedding_dim = 4
units = 3
num_samples = 2
with self.test_session():
layer = keras.layers.LSTM(units, stateful=True)
layer.build((num_samples, timesteps, embedding_dim))
layer.reset_states()
assert len(layer.states) == num_states
assert layer.states[0] is not None
self.assertAllClose(
keras.backend.eval(layer.states[0]),
np.zeros(keras.backend.int_shape(layer.states[0])),
atol=1e-4)
state_shapes = [keras.backend.int_shape(state) for state in layer.states]
values = [np.ones(shape) for shape in state_shapes]
if len(values) == 1:
values = values[0]
layer.reset_states(values)
self.assertAllClose(
keras.backend.eval(layer.states[0]),
np.ones(keras.backend.int_shape(layer.states[0])),
atol=1e-4)
# Test with invalid data
with self.assertRaises(ValueError):
layer.reset_states([1] * (len(layer.states) + 1))
def test_specify_state_with_masking(self):
num_states = 2
timesteps = 3
embedding_dim = 4
units = 3
num_samples = 2
with self.test_session():
inputs = keras.Input((timesteps, embedding_dim))
_ = keras.layers.Masking()(inputs)
initial_state = [keras.Input((units,)) for _ in range(num_states)]
output = keras.layers.LSTM(units)(inputs, initial_state=initial_state)
model = keras.models.Model([inputs] + initial_state, output)
model.compile(loss='categorical_crossentropy', optimizer='adam')
inputs = np.random.random((num_samples, timesteps, embedding_dim))
initial_state = [np.random.random((num_samples, units))
for _ in range(num_states)]
targets = np.random.random((num_samples, units))
model.train_on_batch([inputs] + initial_state, targets)
def test_return_state(self):
num_states = 2
timesteps = 3
embedding_dim = 4
units = 3
num_samples = 2
with self.test_session():
inputs = keras.Input(batch_shape=(num_samples, timesteps, embedding_dim))
layer = keras.layers.LSTM(units, return_state=True, stateful=True)
outputs = layer(inputs)
state = outputs[1:]
assert len(state) == num_states
model = keras.models.Model(inputs, state[0])
inputs = np.random.random((num_samples, timesteps, embedding_dim))
state = model.predict(inputs)
self.assertAllClose(keras.backend.eval(layer.states[0]), state, atol=1e-4)
def test_state_reuse(self):
timesteps = 3
embedding_dim = 4
units = 3
num_samples = 2
with self.test_session():
inputs = keras.Input(batch_shape=(num_samples, timesteps, embedding_dim))
layer = keras.layers.LSTM(units, return_state=True, return_sequences=True)
outputs = layer(inputs)
output, state = outputs[0], outputs[1:]
output = keras.layers.LSTM(units)(output, initial_state=state)
model = keras.models.Model(inputs, output)
inputs = np.random.random((num_samples, timesteps, embedding_dim))
outputs = model.predict(inputs)
def test_initial_states_as_other_inputs(self):
timesteps = 3
embedding_dim = 4
units = 3
num_samples = 2
num_states = 2
layer_class = keras.layers.LSTM
with self.test_session():
# Test with Keras tensor
main_inputs = keras.Input((timesteps, embedding_dim))
initial_state = [keras.Input((units,)) for _ in range(num_states)]
inputs = [main_inputs] + initial_state
layer = layer_class(units)
output = layer(inputs)
assert initial_state[0] in layer._inbound_nodes[0].input_tensors
model = keras.models.Model(inputs, output)
model.compile(loss='categorical_crossentropy', optimizer='adam')
main_inputs = np.random.random((num_samples, timesteps, embedding_dim))
initial_state = [np.random.random((num_samples, units))
for _ in range(num_states)]
targets = np.random.random((num_samples, units))
model.train_on_batch([main_inputs] + initial_state, targets)
if __name__ == '__main__':
test.main()
| apache-2.0 |
cindyyu/kuma | vendor/packages/pygments/lexers/diff.py | 75 | 3243 | # -*- coding: utf-8 -*-
"""
pygments.lexers.diff
~~~~~~~~~~~~~~~~~~~~
Lexers for diff/patch formats.
:copyright: Copyright 2006-2014 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
from pygments.lexer import RegexLexer, include, bygroups
from pygments.token import Text, Comment, Operator, Keyword, Name, Generic, \
Literal
__all__ = ['DiffLexer', 'DarcsPatchLexer']
class DiffLexer(RegexLexer):
"""
Lexer for unified or context-style diffs or patches.
"""
name = 'Diff'
aliases = ['diff', 'udiff']
filenames = ['*.diff', '*.patch']
mimetypes = ['text/x-diff', 'text/x-patch']
tokens = {
'root': [
(r' .*\n', Text),
(r'\+.*\n', Generic.Inserted),
(r'-.*\n', Generic.Deleted),
(r'!.*\n', Generic.Strong),
(r'@.*\n', Generic.Subheading),
(r'([Ii]ndex|diff).*\n', Generic.Heading),
(r'=.*\n', Generic.Heading),
(r'.*\n', Text),
]
}
def analyse_text(text):
if text[:7] == 'Index: ':
return True
if text[:5] == 'diff ':
return True
if text[:4] == '--- ':
return 0.9
class DarcsPatchLexer(RegexLexer):
"""
DarcsPatchLexer is a lexer for the various versions of the darcs patch
format. Examples of this format are derived by commands such as
``darcs annotate --patch`` and ``darcs send``.
.. versionadded:: 0.10
"""
name = 'Darcs Patch'
aliases = ['dpatch']
filenames = ['*.dpatch', '*.darcspatch']
DPATCH_KEYWORDS = ('hunk', 'addfile', 'adddir', 'rmfile', 'rmdir', 'move',
'replace')
tokens = {
'root': [
(r'<', Operator),
(r'>', Operator),
(r'\{', Operator),
(r'\}', Operator),
(r'(\[)((?:TAG )?)(.*)(\n)(.*)(\*\*)(\d+)(\s?)(\])',
bygroups(Operator, Keyword, Name, Text, Name, Operator,
Literal.Date, Text, Operator)),
(r'(\[)((?:TAG )?)(.*)(\n)(.*)(\*\*)(\d+)(\s?)',
bygroups(Operator, Keyword, Name, Text, Name, Operator,
Literal.Date, Text), 'comment'),
(r'New patches:', Generic.Heading),
(r'Context:', Generic.Heading),
(r'Patch bundle hash:', Generic.Heading),
(r'(\s*)(%s)(.*\n)' % '|'.join(DPATCH_KEYWORDS),
bygroups(Text, Keyword, Text)),
(r'\+', Generic.Inserted, "insert"),
(r'-', Generic.Deleted, "delete"),
(r'.*\n', Text),
],
'comment': [
(r'[^\]].*\n', Comment),
(r'\]', Operator, "#pop"),
],
'specialText': [ # darcs add [_CODE_] special operators for clarity
(r'\n', Text, "#pop"), # line-based
(r'\[_[^_]*_]', Operator),
],
'insert': [
include('specialText'),
(r'\[', Generic.Inserted),
(r'[^\n\[]+', Generic.Inserted),
],
'delete': [
include('specialText'),
(r'\[', Generic.Deleted),
(r'[^\n\[]+', Generic.Deleted),
],
}
| mpl-2.0 |
sudheesh001/oh-mainline | vendor/packages/python-openid/openid/dh.py | 168 | 1608 | from openid import cryptutil
from openid import oidutil
def strxor(x, y):
if len(x) != len(y):
raise ValueError('Inputs to strxor must have the same length')
xor = lambda (a, b): chr(ord(a) ^ ord(b))
return "".join(map(xor, zip(x, y)))
class DiffieHellman(object):
DEFAULT_MOD = 155172898181473697471232257763715539915724801966915404479707795314057629378541917580651227423698188993727816152646631438561595825688188889951272158842675419950341258706556549803580104870537681476726513255747040765857479291291572334510643245094715007229621094194349783925984760375594985848253359305585439638443L
DEFAULT_GEN = 2
def fromDefaults(cls):
return cls(cls.DEFAULT_MOD, cls.DEFAULT_GEN)
fromDefaults = classmethod(fromDefaults)
def __init__(self, modulus, generator):
self.modulus = long(modulus)
self.generator = long(generator)
self._setPrivate(cryptutil.randrange(1, modulus - 1))
def _setPrivate(self, private):
"""This is here to make testing easier"""
self.private = private
self.public = pow(self.generator, self.private, self.modulus)
def usingDefaultValues(self):
return (self.modulus == self.DEFAULT_MOD and
self.generator == self.DEFAULT_GEN)
def getSharedSecret(self, composite):
return pow(composite, self.private, self.modulus)
def xorSecret(self, composite, secret, hash_func):
dh_shared = self.getSharedSecret(composite)
hashed_dh_shared = hash_func(cryptutil.longToBinary(dh_shared))
return strxor(secret, hashed_dh_shared)
| agpl-3.0 |
gaurav9991/hk | hooker_analysis/hooker_analysis.py | 2 | 25469 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
#+---------------------------------------------------------------------------+
#| |
#| Android's Hooker |
#| |
#+---------------------------------------------------------------------------+
#| Copyright (C) 2011 Georges Bossert and Dimitri Kirchner |
#| This program is free software: you can redistribute it and/or modify |
#| it under the terms of the GNU General Public License as published by |
#| the Free Software Foundation, either version 3 of the License, or |
#| (at your option) any later version. |
#| |
#| This program is distributed in the hope that it will be useful, |
#| but WITHOUT ANY WARRANTY; without even the implied warranty of |
#| MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
#| GNU General Public License for more details. |
#| |
#| You should have received a copy of the GNU General Public License |
#| along with this program. If not, see <http://www.gnu.org/licenses/>. |
#+---------------------------------------------------------------------------+
#| @url : http://www.amossys.fr |
#| @contact : android-hooker@amossys.fr |
#| @sponsors : Amossys, http://www.amossys.fr |
#+---------------------------------------------------------------------------+
#+---------------------------------------------------------------------------+
#| Standard library imports
#+---------------------------------------------------------------------------+
import sys
import pygeoip
#+---------------------------------------------------------------------------+
#| Local imports
#+---------------------------------------------------------------------------+
from hooker_common import Logger
from hooker_common.elasticsearch.Es import Es
from hooker_common.elasticsearch.EsInterrogator import EsInterrogator
#ES_IP = "192.168.0.16"
ES_IP = "127.0.0.1"
ES_PORT = 9200
logger = Logger.getLogger(__name__)
def eventToString(event):
result = []
result.append("Event {0}".format(event.HookerName))
result.append("ClassName: {0}".format(event.ClassName))
result.append("MethodName: {0}".format(event.MethodName))
result.append("InstanceID: {0}".format(event.InstanceID))
if event.Parameters is not None and len(event.Parameters) > 0:
result.append("Parameters:")
for parameter in event.Parameters:
if parameter is not None and "ParameterType" in parameter.keys() and "ParameterValue" in parameter.keys():
result.append("- {0} ({1})".format(parameter["ParameterValue"], parameter["ParameterType"]))
if event.Return is not None:
result.append("Return: {0}".format(event.Return))
return "\n".join(result)
def filterEvents(events, className=None, methodName=None):
result = []
for event in events:
insert = True
if className is not None and event.ClassName != className:
insert = False
if methodName is not None and event.MethodName != methodName:
insert = False
if insert:
result.append(event)
return result
def macroAnalyzeTelephony(esInterrogator):
logger.warn("Macro Analysis of Telephony")
logger.warn("------------------------------------------------")
telEvents = esInterrogator.getAllEvents(HookerName="Telephony")
telPerXP = dict()
for event in telEvents:
method = event.MethodName
if method not in telPerXP:
telPerXP[method]=[]
if event.Parent not in telPerXP[method]:
telPerXP[method].append(event.Parent)
logger.info("{0} events related to Telephony".format(len(telEvents)))
for method, occ in telPerXP.iteritems():
logger.info("{0}\t\t\t:{1}".format(method, len(occ)))
def macroAnalyzeNetwork(esInterrogator):
logger.warn("Macro Analysis of Network")
logger.warn("------------------------------------------------")
telEvents = esInterrogator.getAllEvents(HookerName="Network")
telPerXP = dict()
for event in telEvents:
method = event.MethodName
if method not in telPerXP:
telPerXP[method]=[]
if event.Parent not in telPerXP[method]:
telPerXP[method].append(event.Parent)
logger.info("{0} events related to Network".format(len(telEvents)))
for method, occ in telPerXP.iteritems():
logger.info("{0}\t\t\t:{1}".format(method, len(occ)))
def macroAnalyzeIPC(esInterrogator):
logger.warn("Macro Analysis of IPC")
logger.warn("------------------------------------------------")
telEvents = esInterrogator.getAllEvents(HookerName="IPC", ClassName="android.content.intent", MethodName="Intent")
for event in telEvents:
if len(event.Parameters)>=1 and "ParameterType" in event.Parameters[0].keys() and event.Parameters[0]['ParameterType'] == "java.lang.String":
logger.info(eventToString(event))
def macroAnalyzeX509CustomVerification(esInterrogator):
logger.warn("Macro Analysis of x509 custom verification")
logger.warn("------------------------------------------------")
initEvents = esInterrogator.getAllEvents(HookerName="Network", ClassName="javax.net.ssl.SSLContext", MethodName="init")
customTrustManagers = dict()
for event in initEvents:
logger.debug(eventToString(event))
if len(event.Parameters)>=1 and len(event.Parameters)<4:
if "ParameterType" in event.Parameters[1].keys() and event.Parameters[1]['ParameterType'] == "[Ljavax.net.ssl.TrustManager;":
tmp_trustManager = event.Parameters[1]['ParameterValue']
trustManager = tmp_trustManager.split('{')[1].split('}')[0].split('@')[0]
logger.warn(trustManager)
if not "org.apache.harmony" in trustManager:
if trustManager not in customTrustManagers.keys():
customTrustManagers[trustManager] = []
if event.Parent not in customTrustManagers[trustManager]:
customTrustManagers[trustManager].append(event.Parent)
logger.fatal("Found custom TrustManager: {0}".format(eventToString(event)))
else:
logger.info("Found apache TrustManager: {0}".format(eventToString(event)))
for k, v in customTrustManagers.iteritems():
logger.warn("TrustManager: {0}".format(k))
for w in v:
apk = esInterrogator.getAPKInXP(w)
logger.info("APK : {0}, market={1}, sha1={2}".format(apk.Name, apk.Market, apk.Filesha1))
# # Finally, check if first and last parameters are different from null
# if len(event.Parameters[0]) > 0:
# logger.info("Found client authentication: {0}".format(eventToString(event)))
# if len(event.Parameters[2]) > 0:
# logger.info("Found random different from default: {0}".format(eventToString(event)))
def macroAnalyzeWebview(esInterrogator):
logger.warn("Macro Analysis of Webview components")
logger.warn("------------------------------------------------")
eventsPerXP = []
initEvents = esInterrogator.getAllEvents(HookerName="Network", ClassName="android.webkit.WebView", MethodName="addJavascriptInterface")
for event in initEvents:
if event.Parent not in eventsPerXP:
eventsPerXP.append(event.Parent)
logger.info(eventToString(event))
logger.warn("List of XP :")
for xp in eventsPerXP:
apk = esInterrogator.getAPKInXP(xp)
logger.warn("{0}: {1} {2} ({3})".format(xp, apk.Name, apk.Market, apk.Filesha1))
logger.warn("{0} XP founds.".format(len(xp)))
def macroAnalyzeSocketListening(esInterrogator):
logger.warn("Macro Analysis of socket listening on network")
logger.warn("------------------------------------------------")
eventsPerXP = []
initEvents = esInterrogator.getAllEvents(HookerName="Network", ClassName="java.net.SocketServer", MethodName="SocketServer")
for event in initEvents:
if event.Parent not in eventsPerXP:
eventsPerXP.append(event.Parent)
logger.info(eventToString(event))
logger.warn("List of XP :")
for xp in eventsPerXP:
apk = esInterrogator.getAPKInXP(xp)
logger.warn("{0}: {1} {2} ({3})".format(xp, apk.Name, apk.Market, apk.Filesha1))
logger.warn("{0} XP founds.".format(len(xp)))
def macroAnalyzeGeolocation(esInterrogator):
logger.warn("Macro Analysis of geolocation requests")
logger.warn("------------------------------------------------")
eventsPerXP = []
initEvents = esInterrogator.getAllEvents(HookerName="Geolocation", ClassName="android.location.locationmanager", MethodName="getlastknownlocation")
for event in initEvents:
if event.Parent not in eventsPerXP:
eventsPerXP.append(event.Parent)
logger.info(eventToString(event))
initEvents = esInterrogator.getAllEvents(HookerName="Geolocation", ClassName="android.location.location", MethodName="getlongitude")
for event in initEvents:
if event.Parent not in eventsPerXP:
eventsPerXP.append(event.Parent)
logger.info(eventToString(event))
initEvents = esInterrogator.getAllEvents(HookerName="Geolocation", ClassName="android.location.location", MethodName="getlatitude")
for event in initEvents:
if event.Parent not in eventsPerXP:
eventsPerXP.append(event.Parent)
logger.info(eventToString(event))
logger.warn("List of XP :")
for xp in eventsPerXP:
apk = esInterrogator.getAPKInXP(xp)
logger.warn("{0}: {1} ({2})".format(xp, apk.Name, apk.Filesha1))
logger.warn("{0} XP founds.".format(len(xp)))
def macroAnalyzeConnectTo(esInterrogator):
logger.warn("Macro Analysis of IP where APK are connecting")
logger.warn("------------------------------------------------")
connectIPs = dict()
connectPorts = dict()
connectURLs = dict()
initEvents = esInterrogator.getAllEvents(HookerName="Network", ClassName="java.net.Socket", MethodName="connect")
for event in initEvents:
if "ParameterType" in event.Parameters[0].keys() and event.Parameters[0]['ParameterType'] == "java.net.InetSocketAddress":
tmp = event.Parameters[0]['ParameterValue'].split('/')
url = tmp[0]
tmp = tmp[1].split(':')
ip = tmp[0]
port = tmp[1]
#logger.info("url: {0}, ip: {1}, port: {2}".format(url, ip, port))
# Get URLs
if url not in connectURLs:
connectURLs[url] = []
if event.Parent not in connectURLs[url]:
connectURLs[url].append(event.Parent)
# Get IPs
if ip not in connectIPs:
connectIPs[ip] = []
if event.Parent not in connectIPs[ip]:
connectIPs[ip].append(event.Parent)
# Get ports
if port not in connectPorts:
connectPorts[port] = []
if event.Parent not in connectPorts[port]:
connectPorts[port].append(event.Parent)
logger.warn("------------------------------------------------")
logger.warn("List of URLs and number XP associated :")
for url, xp in connectURLs.iteritems():
logger.info("URL: {0} -> number of xp: {1}".format(url, len(xp)))
logger.warn("------------------------------------------------")
logger.warn("List of ports and number XP associated :")
for port, xp in connectPorts.iteritems():
logger.info("Port: {0} -> number of xp: {1}".format(port, len(xp)))
if port!='80' and port !='443':
for _xp in xp:
apk = esInterrogator.getAPKInXP(_xp)
logger.warn("Port {0} APK: {1} {2} {3}".format(port, apk.Name, apk.Market, apk.Filesha1))
logger.warn("------------------------------------------------")
gic = pygeoip.GeoIP('GeoLiteCity.dat')
connectCities = dict()
for ip, xp in connectIPs.iteritems():
city = gic.time_zone_by_addr(ip)
if city not in connectCities:
connectCities[city] = []
if xp not in connectCities[city]:
connectCities[city].append(xp)
logger.warn("List of cities and XP associated :")
for city, xp in connectCities.iteritems():
logger.info("{0} -> number of xp: {1}".format(city, len(xp)))
def macroAnalyzeDeviceLockBypass(esInterrogator):
logger.warn("Macro Analysis of Device Lock Bypass (CVE-2013-6271)")
logger.warn("------------------------------------------------")
initEvents = esInterrogator.getAllEvents(HookerName="IPC", ClassName="android.content.intent", MethodName="putExtra")
for event in initEvents:
if len(event.Parameters)==2:
# TODO : do this better if we have interesting results...
if "ParameterType" in event.Parameters[0].keys() and event.Parameters[0]['ParameterType'] == "java.lang.String":
param = event.Parameters[0]['ParameterValue']
if "command" in param or "imei" in param or "sms" in param or "PrivateUrls" in param:
logger.info(eventToString(event))
# if "confirm_credentials" in params:
# # TODO if necessary: check value of the second param
# logger.fatal(eventToString(event))
def macroAnalyzeCrypto(esInterrogator):
logger.warn("Macro Analysis of Crypto Events")
logger.warn("------------------------------------------------")
logger.warn("------------------------------------------------")
logger.warn("Base 64")
logger.warn("------------------------------------------------")
b64Events = esInterrogator.getAllEvents(HookerName="Crypto", ClassName="android.util.Base64")
for event in b64Events:
logger.info(eventToString(event))
logger.info("{0} events related to Base 64".format(len(b64Events)))
logger.warn("------------------------------------------------")
logger.warn("Cipher")
logger.warn("------------------------------------------------")
# Retrieves all the events related to the getInstance call in javax.Crypto.Cipher Class
cipherEvents = esInterrogator.getAllEvents(HookerName="Crypto", ClassName="javax.crypto.Cipher", MethodName="getInstance")
algosPerXP = dict()
for event in cipherEvents:
# Stores the firt parameter of this method call (the algorithm used)
algo = event.Parameters[0]["ParameterValue"]
if algo not in algosPerXP:
algosPerXP[algo]=[]
if event.Parent not in algosPerXP[algo]:
algosPerXP[algo].append(event.Parent)
# List the Android applications that triggered each type of Cipher algorithm
for algo, xps in algosPerXP.iteritems():
apks = [esInterrogator.getAPKInXP(xp) for xp in xps]
logger.warn("{0} ({1} apks): ".format(algo, len(xps)))
for apk in apks:
logger.info("\t- {0} ({1})".format(apk.Name, apk.Filesha1))
logger.warn("------------------------------------------------")
logger.warn("Mac")
logger.warn("------------------------------------------------")
cipherEvents = esInterrogator.getAllEvents(HookerName="Crypto", ClassName="javax.crypto.Mac", MethodName="getInstance")
algosPerXP = dict()
for event in cipherEvents:
algo = event.Parameters[0]["ParameterValue"]
if algo not in algosPerXP:
algosPerXP[algo]=[]
if event.Parent not in algosPerXP[algo]:
algosPerXP[algo].append(event.Parent)
logger.info("{0} events related to Mac".format(len(cipherEvents)))
for algo, occ in algosPerXP.iteritems():
logger.info("{0}\t\t\t:{1}".format(algo, len(occ)))
logger.warn("------------------------------------------------")
logger.warn("MessageDigest")
logger.warn("------------------------------------------------")
cipherEvents = esInterrogator.getAllEvents(HookerName="Crypto", ClassName="java.security.MessageDigest", MethodName="getInstance")
algosPerXP = dict()
for event in cipherEvents:
algo = event.Parameters[0]["ParameterValue"]
if algo not in algosPerXP:
algosPerXP[algo]=[]
if event.Parent not in algosPerXP[algo]:
algosPerXP[algo].append(event.Parent)
logger.info("{0} events related to MessageDigest".format(len(cipherEvents)))
# List the Android applications that triggered each type of Cipher algorithm
for algo, xps in algosPerXP.iteritems():
apks = [esInterrogator.getAPKInXP(xp) for xp in xps]
logger.warn("{0} ({1} apks): ".format(algo, len(xps)))
for apk in apks:
logger.info("\t- {0} ({1})".format(apk.Name, apk.Filesha1))
for algo, occ in algosPerXP.iteritems():
logger.info("{0}\t\t\t:{1}".format(algo, len(occ)))
def deleteDuplicatedAPK(esInterrogator):
logger.info("Register unknown APK")
# search for all the experiments
experiments = esInterrogator.getAllExperiments()
logger.debug("{0} experiments found".format(len(experiments)))
errorXp = []
experimentToAPK = dict()
# check if an apk is registered for each experiment
for experiment in experiments:
apk = esInterrogator.getAPKInXP(experiment.IDXP)
experimentToAPK[experiment] = apk
if apk is None:
errorXp.append(experiment)
for experiment, apk in experimentToAPK.iteritems():
if apk is None:
raise "An APK is missing."
elif isinstance(apk, list):
# multiple apk, we delete last inserted
logger.debug(apk)
toDeleteApk = []
keptApk = None
for a in apk:
if keptApk is None or a.Timestamp < keptApk.Timestamp:
if keptApk is not None:
toDeleteApk.append(keptApk)
keptApk = a
else:
toDeleteApk.append(a)
logger.warn("We delete the APK : ")
logger.warn(toDeleteApk)
for apk in toDeleteApk:
try:
esInterrogator.es.deleteAPK(apk)
except Exception, e:
logger.warn("An occured and prevented the suppression of the APK, {0}".format(e))
logger.debug("{0}: {1}".format(experiment.IDXP, apk))
logger.warn("{0} experiment has an invalid number of APK".format(len(errorXp)))
def main():
"""Main function that execute the various post-analysis functions"""
logger.info("Execution of the Post-Analysis functions")
# Initialization of the connection to an ES database
es = Es([{"host":ES_IP, 'port':ES_PORT}])
esInterrogator = EsInterrogator(es)
# ======================================================
# Activate here the kind of analysis you want to perform
# ======================================================
# macroAnalyzeConnectTo(esInterrogator)
# deleteDuplicatedAPK(esInterrogator))
# macroAnalyzeCrypto(esInterrogator)
# macroAnalyzeTelephony(esInterrogator)
# macroAnalyzeNetwork(esInterrogator)
# macroAnalyzeIPC(esInterrogator)
# macroAnalyzeWebview(esInterrogator)
# macroAnalyzeX509CustomVerification(esInterrogator)
# macroAnalyzeDeviceLockBypass(esInterrogator)
# macroAnalyzeGeolocation(esInterrogator)
# # Retrieve all the known APKs (analyzed or not)
# # =============================================
# logger.warn("List of APKs")
# logger.warn("------------------------------------------------")
# apks = esInterrogator.getAllAPKs()
# for apk in apks:
# logger.info("Name:{0} - Market:{1} - Downloaded Date:{2} - Filesha1:{3} - Filename:{4}".format(apk.Name, apk.Market, apk.Timestamp, apk.Filesha1, apk.Filename))
# logger.warn("> We found {0} apks.".format(len(apks)))
# logger.warn("------------------------------------------------")
# # Retrieve all the known experimentations
# # =======================================
# logger.warn("List of Experimentations")
# logger.warn("------------------------------------------------")
# experiments = esInterrogator.getAllExperiments()
# for experiment in experiments:
# logger.info("IDXP:{0} - Filename:{1} - Filesha1:{2} - Emulator:{3}".format(experiment.IDXP, experiment.Filename, experiment.Filesha1, experiment.Emulator))
# logger.warn("> We found {0} experiments.".format(len(experiments)))
# logger.warn("------------------------------------------------")
# Now we list the experimentations that cover each APK
# ====================================================
# logger.warn("List of Experimentations per APK")
# logger.warn("------------------------------------------------")
# for apk in apks:
# logger.info("APK {0} (sha1 = {1})".format(apk.Name, apk.Filesha1))
# experiments = esInterrogator.getExperimentsWithAPKSha1(apk.Filesha1)
# for experiment in experiments:
# events = esInterrogator.getAllEventsOfExperiment(experiment.IDXP)
# logger.warn("\t- XP {0} : {1} events captured.".format(experiment.IDXP, len(events)))
# logger.warn("------------------------------------------------")
# Retrieve all the events associated with one Experiment
# ======================================================
# idXp = "93deb34a13c8a958d75dea4beaea7718"
# logger.warn("List of events related to XP {0}".format(idXp))
# logger.warn("------------------------------------------------")
# events = esInterrogator.getAllEventsOfExperiment(idXp)
# for event in events:
# logger.info("{0} - {1} - {2} - {3}".format(event.HookerName, event.ClassName, event.MethodName, event.PackageName))
# if event.Parameters is not None:
# for parameter in event.Parameters:
# logger.debug("Parameter: "+str(parameter))
# logger.error("> We found {0} events.".format(len(events)))
# logger.warn("------------------------------------------------")
# logger.warn("Events by Hooker")
# logger.warn("------------------------------------------------")
# logger.warn("DynamicCodeLoader Events")
# logger.warn("------------------------------------------------")
# dynEvents = esInterrogator.getAllEvents(HookerName="Account")
# for event in dynEvents:
# logger.info(eventToString(event))
# logger.warn("Telephony Events")
# logger.warn("------------------------------------------------")
# telephonyEvents = esInterrogator.getAllEvents(HookerName="Telephony")
# for event in telephonyEvents:
# logger.info(eventToString(event))
# logger.warn("Runtime Events")
# logger.warn("------------------------------------------------")
# runtimeEvents = esInterrogator.getAllEvents(HookerName="RunTime")
# for event in runtimeEvents:
# logger.info(eventToString(event))
# logger.warn("System Events")
# logger.warn("------------------------------------------------")
# systemEvents = esInterrogator.getAllEvents(HookerName="System")
# for event in systemEvents:
# logger.info(eventToString(event))
# logger.warn("Network Events")
# logger.warn("------------------------------------------------")
# networkEvents = esInterrogator.getAllEvents(HookerName="Network")
# for event in networkEvents:
# logger.info(eventToString(event))
# Lets retrieve only java.net.URL:URL method calss
# logger.warn("List of URLs")
# logger.warn("------------------------------------------------")
# listOfURLs = dict()
# urlEvents = esInterrogator.getAllEvents(ClassName="java.net.URL", MethodName="URL")
# for urlEvent in urlEvents:
# if urlEvent.Parameters is not None and len(urlEvent.Parameters) == 1:
# url = urlEvent.Parameters[0]["ParameterValue"]
# if url not in listOfURLs.keys():
# listOfURLs[url] = 0
# listOfURLs[url] += 1
# else:
# logger.warn(eventToString(urlEvent))
# for url, nbDef in listOfURLs.iteritems():
# logger.info("{0} ({1})".format(url, nbDef))
# Let's find all the apks
#apks = es.getAllAPKs()
# CLI entry point
if __name__ == "__main__":
main()
| gpl-3.0 |
0x0aNL/p2pool-0x0a | p2pool/bitcoin/networks/kittehcoin.py | 10 | 1210 | import os
import platform
from twisted.internet import defer
from .. import data, helper
from p2pool.util import pack
P2P_PREFIX = 'c0c0c0c0'.decode('hex') #pchmessagestart
P2P_PORT = 22566
ADDRESS_VERSION = 45 #pubkey_address
RPC_PORT = 22565
RPC_CHECK = defer.inlineCallbacks(lambda bitcoind: defer.returnValue(
'kittehcoinaddress' in (yield bitcoind.rpc_help()) and
not (yield bitcoind.rpc_getinfo())['testnet']
))
SUBSIDY_FUNC = lambda height: 1000*100000000
POW_FUNC = lambda data: pack.IntType(256).unpack(__import__('ltc_scrypt').getPoWHash(data))
BLOCK_PERIOD = 60 # s
SYMBOL = 'MEOW'
CONF_FILE_FUNC = lambda: os.path.join(os.path.join(os.environ['APPDATA'], 'kittehcoin')
if platform.system() == 'Windows' else os.path.expanduser('~/Library/Application Support/kittehcoin/')
if platform.system() == 'Darwin' else os.path.expanduser('~/.kittehcoin'), 'kittehcoin.conf')
BLOCK_EXPLORER_URL_PREFIX = 'http://kitexplorer.tk/block/'
ADDRESS_EXPLORER_URL_PREFIX = 'http://kitexplorer.tk/address/'
TX_EXPLORER_URL_PREFIX = 'http://kitexplorer.tk/tx/'
SANE_TARGET_RANGE = (2**256//1000000000 - 1, 2**256//1000 - 1)
DUMB_SCRYPT_DIFF = 2**16
DUST_THRESHOLD = 0.00001e8
| gpl-3.0 |
DoubleNegativeVisualEffects/open-gto | python/gtoContainer/example.py | 6 | 2331 | #!/usr/bin/env python
#
# Copyright (C) 2004 Tweak Films
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public License
# as published by the Free Software Foundation; either version 2 of
# the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307
# USA
#
#
# This isn't so much of an example in and of itself. All it does create a
# gtoContainer called "gc", which loads the GTO file given on the command line.
# It then dumps you into the Python interpreter, so you can play around with
# the file interactively. When you quit, you are given the option of writing
# the file back out again. This can be used to manually edit any Object,
# Component, or Property of a GTO file.
#
import sys
import code
import gto
from gtoContainer import *
#############################################
def interact(banner=None):
import code, sys
# use exception trick to pick up the current frame
try:
raise None
except:
frame = sys.exc_info()[2].tb_frame.f_back
# evaluate commands in current namespace
namespace = frame.f_globals.copy()
namespace.update(frame.f_locals)
code.interact(banner=banner, local=namespace)
#############################################
# Start here....
if( len(sys.argv) != 2 ):
print
print "Usage: example <gtofile>"
print
sys.exit(0)
gc = gtoContainer( sys.argv[1] )
for obj in gc:
print obj
for comp in obj:
print "\t",comp
for prop in comp:
print "\t\t",prop
print
print ">>> gc"
print `gc`
interact("")
action = raw_input( "\nWrite changes? (y/N/<filename>) " )
if( action == 'y' ):
file.write( sys.argv[1] )
print
print "Wrote to", sys.argv[1]
print
if( len(action) > 3 ):
file.write( action )
print
print "Wrote to", action
print
| bsd-3-clause |
prdsmehmetstc/ozgurlukicin | tema/utils.py | 3 | 8292 | #!/usr/bin/python
# -*- coding: utf-8 -*-
#
# Copyright 2007 TÜBİTAK UEKAE
# Licensed under the GNU General Public License, version 3.
# See the file http://www.gnu.org/copyleft/gpl.txt.
from oi.settings import MEDIA_ROOT,MEDIA_URL
from django.core.cache import get_cache
from django.db.models.fields import FileField
import Image
import re, os, urlparse, fnmatch
import shutil, os
#use local cache thing
image_cache = get_cache('locmem:///')
#is it alot ?
_FILE_CACHE_TIMEOUT = 5184000 # 1 day
_THUMBNAIL_GLOB = '%s_t*%s'
#some value not important for now
DEFAULT_HEIGHT=100
DEFAULT_WIDTH=100
def _get_thumbnail_path(path, width=DEFAULT_WIDTH, height=DEFAULT_HEIGHT):
"""
create thumbnail path from path and required width and/or height.
thumbnail file name is constructed like this:
<basename>_t_[w<width>][_h<height>].<extension>
"""
basedir = os.path.dirname(path) + '/'
base, ext = os.path.splitext(os.path.basename(path))
# make thumbnail filename
th_name = base + '_t'
if (width is not None) and (height is not None):
th_name += '_w%d_h%d' % (width, height)
elif width is not None:
th_name += '%d' % width # for compatibility with admin
elif height is not None:
th_name += '_h%d' % height
th_name += ext
return urlparse.urljoin(basedir, th_name)
def _get_path_from_url(url, root=MEDIA_ROOT, url_root=MEDIA_URL):
""" make filesystem path from url """
if url.startswith(url_root):
url = url[len(url_root):] # strip media root url
return os.path.normpath(os.path.join(root, url))
def _get_url_from_path(path, root=MEDIA_ROOT, url_root=MEDIA_URL):
""" make url from filesystem path """
if path.startswith(root):
path = path[len(root):] # strip media root
return urlparse.urljoin(root, path.replace('\\', '/'))
def _has_thumbnail(photo_url, width=None, height=None, root=MEDIA_ROOT, url_root=MEDIA_URL):
# one of width/height is required
#assert (width is not None) or (height is not None)
#if we just want to get a default one
if not width and not height:
place=_get_path_from_url(photo_url)
if place:
import fnmatch, os
base, ext = os.path.splitext(os.path.basename(place))
basedir = os.path.dirname(place)
for file in fnmatch.filter(os.listdir(basedir), _THUMBNAIL_GLOB % (base, ext)):
#if it comes here it has a thumbnail
return True
else:
return False
else:
import os
return os.path.isfile(_get_path_from_url(_get_thumbnail_path(photo_url, width, height), root, url_root))
def model_has_thumbnail(model):
""" To see if given model has a thumbnail"""
for obj in model._meta.fields:
if isinstance(obj, FileField):
x=getattr(obj,'file')
if x :
x=_get_url_from_path(x)
return _has_thumbnail(x)
else:
return False
def make_thumbnail(photo_url, width=DEFAULT_HEIGHT, height=DEFAULT_WIDTH, root=MEDIA_ROOT, url_root=MEDIA_URL):
""" create thumbnail """
# one of width/height is required
assert (width is not None) or (height is not None)
if not photo_url: return None
th_url = _get_thumbnail_path(photo_url, width, height)
th_path = _get_path_from_url(th_url, root, url_root)
photo_path = _get_path_from_url(photo_url, root, url_root)
if _has_thumbnail(photo_url, width, height, root, url_root):
# thumbnail already exists
if not (os.path.getmtime(photo_path) > os.path.getmtime(th_path)):
# if photo mtime is newer than thumbnail recreate thumbnail
return th_url
# make thumbnail
# get original image size
orig_w, orig_h = get_image_size(photo_url, root, url_root)
if (orig_w is None) and (orig_h) is None:
# something is wrong with image
return photo_url
# make proper size
if (width is not None) and (height is not None):
if (orig_w == width) and (orig_h == height):
# same dimensions
return None
size = (width, height)
elif width is not None:
if orig_w == width:
# same dimensions
return None
size = (width, orig_h)
elif height is not None:
if orig_h == height:
# same dimensions
return None
size = (orig_w, height)
try:
img = Image.open(photo_path).copy()
img.thumbnail(size, Image.ANTIALIAS)
img.save(th_path)
except Exception, err:
# this goes to webserver error log
#import sys
#print >>sys.stderr, '[MAKE THUMBNAIL] error %s for file %r' % (err, photo_url)
return photo_url
return th_url
def _remove_thumbnails(photo_url, root=MEDIA_ROOT, url_root=MEDIA_URL):
if not photo_url: return # empty url
file_name = _get_path_from_url(photo_url, root, url_root)
import fnmatch, os
base, ext = os.path.splitext(os.path.basename(file_name))
basedir = os.path.dirname(file_name)
for file in fnmatch.filter(os.listdir(basedir), _THUMBNAIL_GLOB % (base, ext)):
path = os.path.join(basedir, file)
os.remove(path)
# delete from cache
image_cache.delete(path)
def remove_model_thumbnails(model):
""" remove all thumbnails for all ImageFields (and subclasses) in the model """
for obj in model._meta.fields:
#print obj
if isinstance(obj, FileField):
url = getattr(model,'file')
_remove_thumbnails(url)
def _make_admin_thumbnail(url):
""" make thumbnails for admin interface """
make_thumbnail(url, width=120)
def make_admin_thumbnails(model):
""" create thumbnails for admin interface for all ImageFields (and subclasses) in the model """
for obj in model._meta.fields:
if isinstance(obj, FileField):
url = getattr(model,'file')
make_thumbnail(url, width=120)
def _get_thumbnail_url(photo_url, width=DEFAULT_HEIGHT, height=DEFAULT_WIDTH, root=MEDIA_ROOT, url_root=MEDIA_URL):
"""
return thumbnail URL for requested photo_url and required width and/or height
if thumbnail file do not exists returns original URL
"""
# one of width/height is required
assert (width is not None) or (height is not None)
if _has_thumbnail(photo_url, width, height, root, url_root):
return _get_thumbnail_path(photo_url, width, height)
else:
return photo_url
def _set_cached_file(path, value):
"""
Store file dependent data in cache.
Timeout is set to _FILE_CACHE_TIMEOUT (1month).
"""
mtime = os.path.getmtime(path)
image_cache.set(path, (mtime, value,), _FILE_CACHE_TIMEOUT)
def _get_cached_file(path, default=None):
"""
Get file content from cache.
If modification time differ return None and delete
data from cache.
"""
cached = image_cache.get(path, default)
if cached is None:
return None
mtime, value = cached
if (not os.path.isfile(path)) or (os.path.getmtime(path) != mtime): # file is changed or deleted
image_cache.delete(path) # delete from cache
# remove thumbnails if exists
base, ext = os.path.splitext(os.path.basename(path))
basedir = os.path.dirname(path)
for file in fnmatch.filter(os.listdir(basedir), _THUMBNAIL_GLOB % (base, ext)):
os.remove(os.path.join(basedir, file))
return None
else:
return value
#
def get_image_size(photo_url, root=MEDIA_ROOT, url_root=MEDIA_URL):
"""
returns image size.
image sizes are cached (using separate locmem:/// cache instance)
"""
path = os.path.join(root, _get_path_from_url(photo_url, root, url_root))
size = _get_cached_file(path)
if size is None:
try:
size = Image.open(path).size
except Exception, err:
# this goes to webserver error log
#import sys
#print >>sys.stderr, '[GET IMAGE SIZE] error %s for file %r' % (err, photo_url)
return None, None
if size is not None:
_set_cached_file(path, size)
else:
return None, None
return size
| gpl-3.0 |
Navdy/lightblue-0.4 | src/mac/_IOBluetoothUI.py | 82 | 2076 | # Copyright (c) 2009 Bea Lam. All rights reserved.
#
# This file is part of LightBlue.
#
# LightBlue is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# LightBlue is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
# You should have received a copy of the GNU General Public License
# along with LightBlue. If not, see <http://www.gnu.org/licenses/>.
"""
Provides a python interface to the Mac OSX IOBluetoothUI Framework classes,
through PyObjC.
For example:
>>> from lightblue import _IOBluetoothUI
>>> selector = _IOBluetoothUI.IOBluetoothDeviceSelectorController.deviceSelector()
>>> selector.runModal() # ask user to select a device
-1000
>>> for device in selector.getResults():
... print device.getName() # show name of selected device
...
Nokia 6600
>>>
See http://developer.apple.com/documentation/DeviceDrivers/Reference/IOBluetoothUI/index.html
for Apple's IOBluetoothUI documentation.
See http://pyobjc.sourceforge.net for details on how to access Objective-C
classes through PyObjC.
"""
import objc
try:
# mac os 10.5 loads frameworks using bridgesupport metadata
__bundle__ = objc.initFrameworkWrapper("IOBluetoothUI",
frameworkIdentifier="com.apple.IOBluetoothUI",
frameworkPath=objc.pathForFramework(
"/System/Library/Frameworks/IOBluetoothUI.framework"),
globals=globals())
except (AttributeError, ValueError):
# earlier versions use loadBundle() and setSignatureForSelector()
objc.loadBundle("IOBluetoothUI", globals(),
bundle_path=objc.pathForFramework(u'/System/Library/Frameworks/IOBluetoothUI.framework'))
del objc
| gpl-3.0 |
ianlivingstone/PythonSQLEntity | db.py | 1 | 1138 | """
Database Module
Define your entities and indexes here, along with that handle connecting
to the database(s).
An example entity:
user = Entity('users', 'user_id', {
'username':'joe',
'password':'',
'passkey':'',
'join_date':0.0
})
user.add_index(Index(user,'username', shard_on='user_id'))
Then to use the entity in another part of your script just import it:
from datastore import db.user
user_1 = db.user.find(id=uuid)
user_2 = db.user.find(username='')
You can also index by the relationship between two entities like the
following for a user and comments.
from datastore import db.user, db.comment
comments = db.comment.find(user_id=uuid)
for comment in comments:
print comment['message']
"""
import tornado.database
from entity import Entity
from index import Index
db = tornado.database.Connection(
'127.0.0.1',
'gitlytics',
'root',
''
)
user = Entity('users', 'user_id', {
'name':'some name',
'email':'test@email.com',
}, db)
user.add_index(Index(user, 'name'))
user.add_index(Index(user, 'email'))
| mit |
cafecivet/django_girls_tutorial | Lib/site-packages/django/db/models/aggregates.py | 89 | 2785 | """
Classes to represent the definitions of aggregate functions.
"""
from django.db.models.constants import LOOKUP_SEP
__all__ = [
'Aggregate', 'Avg', 'Count', 'Max', 'Min', 'StdDev', 'Sum', 'Variance',
]
def refs_aggregate(lookup_parts, aggregates):
"""
A little helper method to check if the lookup_parts contains references
to the given aggregates set. Because the LOOKUP_SEP is contained in the
default annotation names we must check each prefix of the lookup_parts
for match.
"""
for n in range(len(lookup_parts) + 1):
level_n_lookup = LOOKUP_SEP.join(lookup_parts[0:n])
if level_n_lookup in aggregates:
return aggregates[level_n_lookup], lookup_parts[n:]
return False, ()
class Aggregate(object):
"""
Default Aggregate definition.
"""
def __init__(self, lookup, **extra):
"""Instantiate a new aggregate.
* lookup is the field on which the aggregate operates.
* extra is a dictionary of additional data to provide for the
aggregate definition
Also utilizes the class variables:
* name, the identifier for this aggregate function.
"""
self.lookup = lookup
self.extra = extra
def _default_alias(self):
return '%s__%s' % (self.lookup, self.name.lower())
default_alias = property(_default_alias)
def add_to_query(self, query, alias, col, source, is_summary):
"""Add the aggregate to the nominated query.
This method is used to convert the generic Aggregate definition into a
backend-specific definition.
* query is the backend-specific query instance to which the aggregate
is to be added.
* col is a column reference describing the subject field
of the aggregate. It can be an alias, or a tuple describing
a table and column name.
* source is the underlying field or aggregate definition for
the column reference. If the aggregate is not an ordinal or
computed type, this reference is used to determine the coerced
output type of the aggregate.
* is_summary is a boolean that is set True if the aggregate is a
summary value rather than an annotation.
"""
klass = getattr(query.aggregates_module, self.name)
aggregate = klass(col, source=source, is_summary=is_summary, **self.extra)
query.aggregates[alias] = aggregate
class Avg(Aggregate):
name = 'Avg'
class Count(Aggregate):
name = 'Count'
class Max(Aggregate):
name = 'Max'
class Min(Aggregate):
name = 'Min'
class StdDev(Aggregate):
name = 'StdDev'
class Sum(Aggregate):
name = 'Sum'
class Variance(Aggregate):
name = 'Variance'
| gpl-2.0 |
KaranToor/MA450 | google-cloud-sdk/.install/.backup/lib/googlecloudsdk/api_lib/compute/client_adapter.py | 2 | 4259 | # Copyright 2016 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Backend service."""
import urlparse
from apitools.base.py import batch
from apitools.base.py import exceptions as apitools_exceptions
from googlecloudsdk.api_lib.compute import request_helper
from googlecloudsdk.api_lib.compute import utils
from googlecloudsdk.api_lib.util import exceptions as api_exceptions
from googlecloudsdk.core import apis as core_apis
from googlecloudsdk.core import exceptions as core_exceptions
# Upper bound on batch size
# https://cloud.google.com/compute/docs/api/how-tos/batch
_BATCH_SIZE_LIMIT = 1000
class Error(core_exceptions.Error):
"""Errors raised by this module."""
def GetBatchUrl(endpoint_url):
"""Return a batch URL for the given endpoint URL."""
parsed_endpoint = urlparse.urlparse(endpoint_url)
return urlparse.urljoin(
'{0}://{1}'.format(parsed_endpoint.scheme, parsed_endpoint.netloc),
'batch')
class ClientAdapter(object):
"""Encapsulates compute apitools interactions."""
_API_NAME = 'compute'
def __init__(self, api_default_version='v1', no_http=False):
self._api_version = core_apis.ResolveVersion(
self._API_NAME, api_default_version)
self._client = core_apis.GetClientInstance(
self._API_NAME, self._api_version, no_http=no_http)
# Turn the endpoint into just the host.
# eg. https://www.googleapis.com/compute/v1 -> https://www.googleapis.com
endpoint_url = core_apis.GetEffectiveApiEndpoint(
self._API_NAME, self._api_version)
self._batch_url = GetBatchUrl(endpoint_url)
@property
def api_version(self):
return self._api_version
@property
def apitools_client(self):
return self._client
@property
def batch_url(self):
return self._batch_url
@property
def messages(self):
return self._client.MESSAGES_MODULE
def MakeRequests(self, requests, errors_to_collect=None):
"""Sends given request in batch mode."""
errors = errors_to_collect if errors_to_collect is not None else []
objects = list(request_helper.MakeRequests(
requests=requests,
http=self._client.http,
batch_url=self._batch_url,
errors=errors))
if errors_to_collect is None and errors:
utils.RaiseToolException(
errors, error_message='Could not fetch resource:')
return objects
def BatchRequests(self, requests, errors_to_collect=None):
"""Issues batch request for given set of requests.
Args:
requests: list(tuple(service, method, payload)), where
service is apitools.base.py.base_api.BaseApiService,
method is str, method name, e.g. 'Get', 'CreateInstance',
payload is a subclass of apitools.base.protorpclite.messages.Message.
errors_to_collect: list, output only, can be None, contains instances of
api_exceptions.HttpException for each request with exception.
Returns:
list of responses, matching list of requests. Some responses can be
errors.
"""
batch_request = batch.BatchApiRequest(batch_url=self._batch_url)
for service, method, request in requests:
batch_request.Add(service, method, request)
payloads = batch_request.Execute(self._client.http,
max_batch_size=_BATCH_SIZE_LIMIT)
responses = []
errors = errors_to_collect if errors_to_collect is not None else []
for payload in payloads:
if payload.is_error:
if isinstance(payload.exception, apitools_exceptions.HttpError):
errors.append(api_exceptions.HttpException(payload.exception))
else:
errors.append(Error(payload.exception.message))
responses.append(payload.response)
return responses
| apache-2.0 |
fulmicoton/pylearn2 | pylearn2/sandbox/cuda_convnet/tests/test_weight_acts.py | 44 | 5133 | from __future__ import print_function
__authors__ = "Ian Goodfellow"
__copyright__ = "Copyright 2010-2012, Universite de Montreal"
__credits__ = ["Ian Goodfellow"]
__license__ = "3-clause BSD"
__maintainer__ = "LISA Lab"
__email__ = "pylearn-dev@googlegroups"
from pylearn2.testing.skip import skip_if_no_gpu
skip_if_no_gpu()
import numpy as np
from theano import shared
from pylearn2.sandbox.cuda_convnet.filter_acts import FilterActs
from pylearn2.sandbox.cuda_convnet.weight_acts import WeightActs
from theano.sandbox.cuda import gpu_from_host
from theano.sandbox.cuda import host_from_gpu
from theano.sandbox.rng_mrg import MRG_RandomStreams
import theano.tensor as T
from theano.tensor.nnet.conv import conv2d
from theano.tensor import as_tensor_variable
from theano import function
import warnings
def test_match_grad_valid_conv():
# Tests that weightActs is the gradient of FilterActs
# with respect to the weights.
for partial_sum in [0, 1, 4]:
rng = np.random.RandomState([2012, 10, 9])
batch_size = 3
rows = 7
cols = 9
channels = 8
filter_rows = 4
filter_cols = filter_rows
num_filters = 16
images = shared(rng.uniform(-1., 1., (channels, rows, cols,
batch_size)).astype('float32'),
name='images')
filters = rng.uniform(-1., 1.,
(channels, filter_rows,
filter_cols, num_filters)).astype('float32')
filters = shared(filters, name='filters')
gpu_images = gpu_from_host(images)
gpu_filters = gpu_from_host(filters)
output = FilterActs(partial_sum=partial_sum)(gpu_images, gpu_filters)
output = host_from_gpu(output)
images_bc01 = images.dimshuffle(3, 0, 1, 2)
filters_bc01 = filters.dimshuffle(3, 0, 1, 2)
filters_bc01 = filters_bc01[:, :, ::-1, ::-1]
output_conv2d = conv2d(images_bc01, filters_bc01,
border_mode='valid')
output_conv2d = output_conv2d.dimshuffle(1, 2, 3, 0)
theano_rng = MRG_RandomStreams(2013 + 1 + 31)
coeffs = theano_rng.normal(avg=0., std=1.,
size=output_conv2d.shape, dtype='float32')
cost_conv2d = (coeffs * output_conv2d).sum()
weights_grad_conv2d = T.grad(cost_conv2d, filters)
cost = (coeffs * output).sum()
hid_acts_grad = T.grad(cost, output)
weights_grad = WeightActs(partial_sum=partial_sum)(
gpu_images,
gpu_from_host(hid_acts_grad),
as_tensor_variable((4, 4))
)[0]
weights_grad = host_from_gpu(weights_grad)
f = function([], [output, output_conv2d, weights_grad,
weights_grad_conv2d])
output, output_conv2d, weights_grad, weights_grad_conv2d = f()
if np.abs(output - output_conv2d).max() > 8e-6:
assert type(output) == type(output_conv2d)
assert output.dtype == output_conv2d.dtype
if output.shape != output_conv2d.shape:
print('cuda-convnet shape: ', output.shape)
print('theano shape: ', output_conv2d.shape)
assert False
err = np.abs(output - output_conv2d)
print('absolute error range: ', (err.min(), err.max()))
print('mean absolute error: ', err.mean())
print('cuda-convnet value range: ', (output.min(), output.max()))
print('theano value range: ', (output_conv2d.min(),
output_conv2d.max()))
assert False
warnings.warn(
"test_match_grad_valid_conv success criterion is not very strict."
" Can we verify that this is OK? One possibility is that theano"
" is numerically unstable and Alex's code is better. Probably"
" theano CPU 64 bit is OK but it's worth checking the others.")
if np.abs(weights_grad - weights_grad_conv2d).max() > 8.6e-6:
if type(weights_grad) != type(weights_grad_conv2d):
raise AssertionError("weights_grad is of type " +
str(weights_grad))
assert weights_grad.dtype == weights_grad_conv2d.dtype
if weights_grad.shape != weights_grad_conv2d.shape:
print('cuda-convnet shape: ', weights_grad.shape)
print('theano shape: ', weights_grad_conv2d.shape)
assert False
err = np.abs(weights_grad - weights_grad_conv2d)
print('absolute error range: ', (err.min(), err.max()))
print('mean absolute error: ', err.mean())
print('cuda-convnet value range: ', (weights_grad.min(),
weights_grad.max()))
print('theano value range: ', (weights_grad_conv2d.min(),
weights_grad_conv2d.max()))
assert False
if __name__ == '__main__':
test_match_grad_valid_conv()
| bsd-3-clause |
adlai/p2pool | SOAPpy/SOAPBuilder.py | 289 | 22852 | """
################################################################################
# Copyright (c) 2003, Pfizer
# Copyright (c) 2001, Cayce Ullman.
# Copyright (c) 2001, Brian Matthews.
#
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
# Redistributions of source code must retain the above copyright notice, this
# list of conditions and the following disclaimer.
#
# Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
#
# Neither the name of actzero, inc. nor the names of its contributors may
# be used to endorse or promote products derived from this software without
# specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL THE REGENTS OR CONTRIBUTORS BE LIABLE FOR
# ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
# (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
# ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
################################################################################
"""
ident = '$Id: SOAPBuilder.py 1498 2010-03-12 02:13:19Z pooryorick $'
from version import __version__
import cgi
from wstools.XMLname import toXMLname, fromXMLname
import fpconst
# SOAPpy modules
from Config import Config
from NS import NS
from Types import *
# Test whether this Python version has Types.BooleanType
# If it doesn't have it, then False and True are serialized as integers
try:
BooleanType
pythonHasBooleanType = 1
except NameError:
pythonHasBooleanType = 0
################################################################################
# SOAP Builder
################################################################################
class SOAPBuilder:
_xml_top = '<?xml version="1.0"?>\n'
_xml_enc_top = '<?xml version="1.0" encoding="%s"?>\n'
_env_top = ( '%(ENV_T)s:Envelope\n' + \
' %(ENV_T)s:encodingStyle="%(ENC)s"\n' ) % \
NS.__dict__
_env_bot = '</%(ENV_T)s:Envelope>\n' % NS.__dict__
# Namespaces potentially defined in the Envelope tag.
_env_ns = {NS.ENC: NS.ENC_T, NS.ENV: NS.ENV_T,
NS.XSD: NS.XSD_T, NS.XSD2: NS.XSD2_T, NS.XSD3: NS.XSD3_T,
NS.XSI: NS.XSI_T, NS.XSI2: NS.XSI2_T, NS.XSI3: NS.XSI3_T}
def __init__(self, args = (), kw = {}, method = None, namespace = None,
header = None, methodattrs = None, envelope = 1, encoding = 'UTF-8',
use_refs = 0, config = Config, noroot = 0):
# Test the encoding, raising an exception if it's not known
if encoding != None:
''.encode(encoding)
self.args = args
self.kw = kw
self.envelope = envelope
self.encoding = encoding
self.method = method
self.namespace = namespace
self.header = header
self.methodattrs= methodattrs
self.use_refs = use_refs
self.config = config
self.out = []
self.tcounter = 0
self.ncounter = 1
self.icounter = 1
self.envns = {}
self.ids = {}
self.depth = 0
self.multirefs = []
self.multis = 0
self.body = not isinstance(args, bodyType)
self.noroot = noroot
def build(self):
if Config.debug: print "In build."
ns_map = {}
# Cache whether typing is on or not
typed = self.config.typed
if self.header:
# Create a header.
self.dump(self.header, "Header", typed = typed)
#self.header = None # Wipe it out so no one is using it.
if self.body:
# Call genns to record that we've used SOAP-ENV.
self.depth += 1
body_ns = self.genns(ns_map, NS.ENV)[0]
self.out.append("<%sBody>\n" % body_ns)
if self.method:
# Save the NS map so that it can be restored when we
# fall out of the scope of the method definition
save_ns_map = ns_map.copy()
self.depth += 1
a = ''
if self.methodattrs:
for (k, v) in self.methodattrs.items():
a += ' %s="%s"' % (k, v)
if self.namespace: # Use the namespace info handed to us
methodns, n = self.genns(ns_map, self.namespace)
else:
methodns, n = '', ''
self.out.append('<%s%s%s%s%s>\n' % (
methodns, self.method, n, a, self.genroot(ns_map)))
try:
if type(self.args) != TupleType:
args = (self.args,)
else:
args = self.args
for i in args:
self.dump(i, typed = typed, ns_map = ns_map)
if hasattr(self.config, "argsOrdering") and self.config.argsOrdering.has_key(self.method):
for k in self.config.argsOrdering.get(self.method):
self.dump(self.kw.get(k), k, typed = typed, ns_map = ns_map)
else:
for (k, v) in self.kw.items():
self.dump(v, k, typed = typed, ns_map = ns_map)
except RecursionError:
if self.use_refs == 0:
# restart
b = SOAPBuilder(args = self.args, kw = self.kw,
method = self.method, namespace = self.namespace,
header = self.header, methodattrs = self.methodattrs,
envelope = self.envelope, encoding = self.encoding,
use_refs = 1, config = self.config)
return b.build()
raise
if self.method:
self.out.append("</%s%s>\n" % (methodns, self.method))
# End of the method definition; drop any local namespaces
ns_map = save_ns_map
self.depth -= 1
if self.body:
# dump may add to self.multirefs, but the for loop will keep
# going until it has used all of self.multirefs, even those
# entries added while in the loop.
self.multis = 1
for obj, tag in self.multirefs:
self.dump(obj, tag, typed = typed, ns_map = ns_map)
self.out.append("</%sBody>\n" % body_ns)
self.depth -= 1
if self.envelope:
e = map (lambda ns: ' xmlns:%s="%s"\n' % (ns[1], ns[0]),
self.envns.items())
self.out = ['<', self._env_top] + e + ['>\n'] + \
self.out + \
[self._env_bot]
if self.encoding != None:
self.out.insert(0, self._xml_enc_top % self.encoding)
return ''.join(self.out).encode(self.encoding)
self.out.insert(0, self._xml_top)
return ''.join(self.out)
def gentag(self):
if Config.debug: print "In gentag."
self.tcounter += 1
return "v%d" % self.tcounter
def genns(self, ns_map, nsURI):
if nsURI == None:
return ('', '')
if type(nsURI) == TupleType: # already a tuple
if len(nsURI) == 2:
ns, nsURI = nsURI
else:
ns, nsURI = None, nsURI[0]
else:
ns = None
if ns_map.has_key(nsURI):
return (ns_map[nsURI] + ':', '')
if self._env_ns.has_key(nsURI):
ns = self.envns[nsURI] = ns_map[nsURI] = self._env_ns[nsURI]
return (ns + ':', '')
if not ns:
ns = "ns%d" % self.ncounter
self.ncounter += 1
ns_map[nsURI] = ns
if self.config.buildWithNamespacePrefix:
return (ns + ':', ' xmlns:%s="%s"' % (ns, nsURI))
else:
return ('', ' xmlns="%s"' % (nsURI))
def genroot(self, ns_map):
if self.noroot:
return ''
if self.depth != 2:
return ''
ns, n = self.genns(ns_map, NS.ENC)
return ' %sroot="%d"%s' % (ns, not self.multis, n)
# checkref checks an element to see if it needs to be encoded as a
# multi-reference element or not. If it returns None, the element has
# been handled and the caller can continue with subsequent elements.
# If it returns a string, the string should be included in the opening
# tag of the marshaled element.
def checkref(self, obj, tag, ns_map):
if self.depth < 2:
return ''
if not self.ids.has_key(id(obj)):
n = self.ids[id(obj)] = self.icounter
self.icounter = n + 1
if self.use_refs == 0:
return ''
if self.depth == 2:
return ' id="i%d"' % n
self.multirefs.append((obj, tag))
else:
if self.use_refs == 0:
raise RecursionError, "Cannot serialize recursive object"
n = self.ids[id(obj)]
if self.multis and self.depth == 2:
return ' id="i%d"' % n
self.out.append('<%s href="#i%d"%s/>\n' %
(tag, n, self.genroot(ns_map)))
return None
# dumpers
def dump(self, obj, tag = None, typed = 1, ns_map = {}):
if Config.debug: print "In dump.", "obj=", obj
ns_map = ns_map.copy()
self.depth += 1
if type(tag) not in (NoneType, StringType, UnicodeType):
raise KeyError, "tag must be a string or None"
self.dump_dispatch(obj, tag, typed, ns_map)
self.depth -= 1
# generic dumper
def dumper(self, nsURI, obj_type, obj, tag, typed = 1, ns_map = {},
rootattr = '', id = '',
xml = '<%(tag)s%(type)s%(id)s%(attrs)s%(root)s>%(data)s</%(tag)s>\n'):
if Config.debug: print "In dumper."
if nsURI == None:
nsURI = self.config.typesNamespaceURI
tag = tag or self.gentag()
tag = toXMLname(tag) # convert from SOAP 1.2 XML name encoding
a = n = t = ''
if typed and obj_type:
ns, n = self.genns(ns_map, nsURI)
ins = self.genns(ns_map, self.config.schemaNamespaceURI)[0]
t = ' %stype="%s%s"%s' % (ins, ns, obj_type, n)
try: a = obj._marshalAttrs(ns_map, self)
except: pass
try: data = obj._marshalData()
except:
if (obj_type != "string"): # strings are already encoded
data = cgi.escape(str(obj))
else:
data = obj
return xml % {"tag": tag, "type": t, "data": data, "root": rootattr,
"id": id, "attrs": a}
def dump_float(self, obj, tag, typed = 1, ns_map = {}):
if Config.debug: print "In dump_float."
tag = tag or self.gentag()
tag = toXMLname(tag) # convert from SOAP 1.2 XML name encoding
if Config.strict_range:
doubleType(obj)
if fpconst.isPosInf(obj):
obj = "INF"
elif fpconst.isNegInf(obj):
obj = "-INF"
elif fpconst.isNaN(obj):
obj = "NaN"
else:
obj = repr(obj)
# Note: python 'float' is actually a SOAP 'double'.
self.out.append(self.dumper(
None, "double", obj, tag, typed, ns_map, self.genroot(ns_map)))
def dump_int(self, obj, tag, typed = 1, ns_map = {}):
if Config.debug: print "In dump_int."
self.out.append(self.dumper(None, 'integer', obj, tag, typed,
ns_map, self.genroot(ns_map)))
def dump_bool(self, obj, tag, typed = 1, ns_map = {}):
if Config.debug: print "In dump_bool."
self.out.append(self.dumper(None, 'boolean', obj, tag, typed,
ns_map, self.genroot(ns_map)))
def dump_string(self, obj, tag, typed = 0, ns_map = {}):
if Config.debug: print "In dump_string."
tag = tag or self.gentag()
tag = toXMLname(tag) # convert from SOAP 1.2 XML name encoding
id = self.checkref(obj, tag, ns_map)
if id == None:
return
try: data = obj._marshalData()
except: data = obj
self.out.append(self.dumper(None, "string", cgi.escape(data), tag,
typed, ns_map, self.genroot(ns_map), id))
dump_str = dump_string # For Python 2.2+
dump_unicode = dump_string
def dump_None(self, obj, tag, typed = 0, ns_map = {}):
if Config.debug: print "In dump_None."
tag = tag or self.gentag()
tag = toXMLname(tag) # convert from SOAP 1.2 XML name encoding
ns = self.genns(ns_map, self.config.schemaNamespaceURI)[0]
self.out.append('<%s %snull="1"%s/>\n' %
(tag, ns, self.genroot(ns_map)))
dump_NoneType = dump_None # For Python 2.2+
def dump_list(self, obj, tag, typed = 1, ns_map = {}):
if Config.debug: print "In dump_list.", "obj=", obj
tag = tag or self.gentag()
tag = toXMLname(tag) # convert from SOAP 1.2 XML name encoding
if type(obj) == InstanceType:
data = obj.data
else:
data = obj
if typed:
id = self.checkref(obj, tag, ns_map)
if id == None:
return
try:
sample = data[0]
empty = 0
except:
# preserve type if present
if getattr(obj,"_typed",None) and getattr(obj,"_type",None):
if getattr(obj, "_complexType", None):
sample = typedArrayType(typed=obj._type,
complexType = obj._complexType)
sample._typename = obj._type
if not getattr(obj,"_ns",None): obj._ns = NS.URN
else:
sample = typedArrayType(typed=obj._type)
else:
sample = structType()
empty = 1
# First scan list to see if all are the same type
same_type = 1
if not empty:
for i in data[1:]:
if type(sample) != type(i) or \
(type(sample) == InstanceType and \
sample.__class__ != i.__class__):
same_type = 0
break
ndecl = ''
if same_type:
if (isinstance(sample, structType)) or \
type(sample) == DictType or \
(isinstance(sample, anyType) and \
(getattr(sample, "_complexType", None) and \
sample._complexType)): # force to urn struct
try:
tns = obj._ns or NS.URN
except:
tns = NS.URN
ns, ndecl = self.genns(ns_map, tns)
try:
typename = sample._typename
except:
typename = "SOAPStruct"
t = ns + typename
elif isinstance(sample, anyType):
ns = sample._validNamespaceURI(self.config.typesNamespaceURI,
self.config.strictNamespaces)
if ns:
ns, ndecl = self.genns(ns_map, ns)
t = ns + str(sample._type)
else:
t = 'ur-type'
else:
typename = type(sample).__name__
# For Python 2.2+
if type(sample) == StringType: typename = 'string'
# HACK: unicode is a SOAP string
if type(sample) == UnicodeType: typename = 'string'
# HACK: python 'float' is actually a SOAP 'double'.
if typename=="float": typename="double"
t = self.genns(
ns_map, self.config.typesNamespaceURI)[0] + typename
else:
t = self.genns(ns_map, self.config.typesNamespaceURI)[0] + \
"ur-type"
try: a = obj._marshalAttrs(ns_map, self)
except: a = ''
ens, edecl = self.genns(ns_map, NS.ENC)
ins, idecl = self.genns(ns_map, self.config.schemaNamespaceURI)
if typed:
self.out.append(
'<%s %sarrayType="%s[%d]" %stype="%sArray"%s%s%s%s%s%s>\n' %
(tag, ens, t, len(data), ins, ens, ndecl, edecl, idecl,
self.genroot(ns_map), id, a))
if typed:
try: elemsname = obj._elemsname
except: elemsname = "item"
else:
elemsname = tag
if isinstance(data, (list, tuple, arrayType)):
should_drill = True
else:
should_drill = not same_type
for i in data:
self.dump(i, elemsname, should_drill, ns_map)
if typed: self.out.append('</%s>\n' % tag)
dump_tuple = dump_list
def dump_exception(self, obj, tag, typed = 0, ns_map = {}):
if isinstance(obj, faultType): # Fault
cns, cdecl = self.genns(ns_map, NS.ENC)
vns, vdecl = self.genns(ns_map, NS.ENV)
self.out.append('<%sFault %sroot="1"%s%s>' % (vns, cns, vdecl, cdecl))
self.dump(obj.faultcode, "faultcode", typed, ns_map)
self.dump(obj.faultstring, "faultstring", typed, ns_map)
if hasattr(obj, "detail"):
self.dump(obj.detail, "detail", typed, ns_map)
self.out.append("</%sFault>\n" % vns)
def dump_dictionary(self, obj, tag, typed = 1, ns_map = {}):
if Config.debug: print "In dump_dictionary."
tag = tag or self.gentag()
tag = toXMLname(tag) # convert from SOAP 1.2 XML name encoding
id = self.checkref(obj, tag, ns_map)
if id == None:
return
try: a = obj._marshalAttrs(ns_map, self)
except: a = ''
self.out.append('<%s%s%s%s>\n' %
(tag, id, a, self.genroot(ns_map)))
for (k, v) in obj.items():
if k[0] != "_":
self.dump(v, k, 1, ns_map)
self.out.append('</%s>\n' % tag)
dump_dict = dump_dictionary # For Python 2.2+
def dump_dispatch(self, obj, tag, typed = 1, ns_map = {}):
if not tag:
# If it has a name use it.
if isinstance(obj, anyType) and obj._name:
tag = obj._name
else:
tag = self.gentag()
# watch out for order!
dumpmap = (
(Exception, self.dump_exception),
(arrayType, self.dump_list),
(basestring, self.dump_string),
(NoneType, self.dump_None),
(bool, self.dump_bool),
(int, self.dump_int),
(long, self.dump_int),
(list, self.dump_list),
(tuple, self.dump_list),
(dict, self.dump_dictionary),
(float, self.dump_float),
)
for dtype, func in dumpmap:
if isinstance(obj, dtype):
func(obj, tag, typed, ns_map)
return
r = self.genroot(ns_map)
try: a = obj._marshalAttrs(ns_map, self)
except: a = ''
if isinstance(obj, voidType): # void
self.out.append("<%s%s%s></%s>\n" % (tag, a, r, tag))
else:
id = self.checkref(obj, tag, ns_map)
if id == None:
return
if isinstance(obj, structType):
# Check for namespace
ndecl = ''
ns = obj._validNamespaceURI(self.config.typesNamespaceURI,
self.config.strictNamespaces)
if ns:
ns, ndecl = self.genns(ns_map, ns)
tag = ns + tag
self.out.append("<%s%s%s%s%s>\n" % (tag, ndecl, id, a, r))
keylist = obj.__dict__.keys()
# first write out items with order information
if hasattr(obj, '_keyord'):
for i in range(len(obj._keyord)):
self.dump(obj._aslist(i), obj._keyord[i], 1, ns_map)
keylist.remove(obj._keyord[i])
# now write out the rest
for k in keylist:
if (k[0] != "_"):
self.dump(getattr(obj,k), k, 1, ns_map)
if isinstance(obj, bodyType):
self.multis = 1
for v, k in self.multirefs:
self.dump(v, k, typed = typed, ns_map = ns_map)
self.out.append('</%s>\n' % tag)
elif isinstance(obj, anyType):
t = ''
if typed:
ns = obj._validNamespaceURI(self.config.typesNamespaceURI,
self.config.strictNamespaces)
if ns:
ons, ondecl = self.genns(ns_map, ns)
ins, indecl = self.genns(ns_map,
self.config.schemaNamespaceURI)
t = ' %stype="%s%s"%s%s' % \
(ins, ons, obj._type, ondecl, indecl)
self.out.append('<%s%s%s%s%s>%s</%s>\n' %
(tag, t, id, a, r, obj._marshalData(), tag))
else: # Some Class
self.out.append('<%s%s%s>\n' % (tag, id, r))
d1 = getattr(obj, '__dict__', None)
if d1 is not None:
for (k, v) in d1:
if k[0] != "_":
self.dump(v, k, 1, ns_map)
self.out.append('</%s>\n' % tag)
################################################################################
# SOAPBuilder's more public interface
################################################################################
def buildSOAP(args=(), kw={}, method=None, namespace=None,
header=None, methodattrs=None, envelope=1, encoding='UTF-8',
config=Config, noroot = 0):
t = SOAPBuilder(args=args, kw=kw, method=method, namespace=namespace,
header=header, methodattrs=methodattrs,envelope=envelope,
encoding=encoding, config=config,noroot=noroot)
return t.build()
| gpl-3.0 |
formath/mxnet | docs/mxdoc.py | 11 | 12953 | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""A sphnix-doc plugin to build mxnet docs"""
import subprocess
import re
import os
import json
import sys
from recommonmark import transform
import pypandoc
# import StringIO from io for python3 compatibility
from io import StringIO
import contextlib
# white list to evaluate the code block output, such as ['tutorials/gluon']
_EVAL_WHILTELIST = []
# start or end of a code block
_CODE_MARK = re.compile('^([ ]*)```([\w]*)')
# language names and the according file extensions and comment symbol
_LANGS = {'python' : ('py', '#'),
'r' : ('R','#'),
'scala' : ('scala', '#'),
'julia' : ('jl', '#'),
'perl' : ('pl', '#'),
'cpp' : ('cc', '//'),
'bash' : ('sh', '#')}
_LANG_SELECTION_MARK = 'INSERT SELECTION BUTTONS'
_SRC_DOWNLOAD_MARK = 'INSERT SOURCE DOWNLOAD BUTTONS'
def _run_cmd(cmds):
"""Run commands, raise exception if failed"""
if not isinstance(cmds, str):
cmds = "".join(cmds)
print("Execute \"%s\"" % cmds)
try:
subprocess.check_call(cmds, shell=True)
except subprocess.CalledProcessError as err:
print(err)
raise err
def generate_doxygen(app):
"""Run the doxygen make commands"""
_run_cmd("cd %s/.. && make doxygen" % app.builder.srcdir)
_run_cmd("cp -rf doxygen/html %s/doxygen" % app.builder.outdir)
def build_mxnet(app):
"""Build mxnet .so lib"""
if not os.path.exists(os.path.join(app.builder.srcdir, '..', 'config.mk')):
_run_cmd("cd %s/.. && cp make/config.mk config.mk && make -j$(nproc) DEBUG=1" %
app.builder.srcdir)
else:
_run_cmd("cd %s/.. && make -j$(nproc) DEBUG=1" %
app.builder.srcdir)
def build_r_docs(app):
"""build r pdf"""
r_root = app.builder.srcdir + '/../R-package'
pdf_path = root_path + '/docs/api/r/mxnet-r-reference-manual.pdf'
_run_cmd('cd ' + r_root +
'; R -e "roxygen2::roxygenize()"; R CMD Rd2pdf . --no-preview -o ' + pdf_path)
dest_path = app.builder.outdir + '/api/r/'
_run_cmd('mkdir -p ' + dest_path + '; mv ' + pdf_path + ' ' + dest_path)
def build_scala_docs(app):
"""build scala doc and then move the outdir"""
scala_path = app.builder.srcdir + '/../scala-package/core/src/main/scala/ml/dmlc/mxnet'
# scaldoc fails on some apis, so exit 0 to pass the check
_run_cmd('cd ' + scala_path + '; scaladoc `find . | grep .*scala`; exit 0')
dest_path = app.builder.outdir + '/api/scala/docs'
_run_cmd('rm -rf ' + dest_path)
_run_cmd('mkdir -p ' + dest_path)
scaladocs = ['index', 'index.html', 'ml', 'lib', 'index.js', 'package.html']
for doc_file in scaladocs:
_run_cmd('cd ' + scala_path + ' && mv -f ' + doc_file + ' ' + dest_path)
def _convert_md_table_to_rst(table):
"""Convert a markdown table to rst format"""
if len(table) < 3:
return ''
out = '```eval_rst\n.. list-table::\n :header-rows: 1\n\n'
for i,l in enumerate(table):
cols = l.split('|')[1:-1]
if i == 0:
ncol = len(cols)
else:
if len(cols) != ncol:
return ''
if i == 1:
for c in cols:
if len(c) is not 0 and '---' not in c:
return ''
else:
for j,c in enumerate(cols):
out += ' * - ' if j == 0 else ' - '
out += pypandoc.convert_text(
c, 'rst', format='md').replace('\n', ' ').replace('\r', '') + '\n'
out += '```\n'
return out
def convert_table(app, docname, source):
"""Find tables in a markdown and then convert them into the rst format"""
num_tables = 0
for i,j in enumerate(source):
table = []
output = ''
in_table = False
for l in j.split('\n'):
r = l.strip()
if r.startswith('|'):
table.append(r)
in_table = True
else:
if in_table is True:
converted = _convert_md_table_to_rst(table)
if converted is '':
print("Failed to convert the markdown table")
print(table)
else:
num_tables += 1
output += converted
in_table = False
table = []
output += l + '\n'
source[i] = output
if num_tables > 0:
print('Converted %d tables in %s' % (num_tables, docname))
def _parse_code_lines(lines):
"""A iterator that returns if a line is within a code block
Returns
-------
iterator of (str, bool, str, int)
- line: the line
- in_code: if this line is in a code block
- lang: the code block langunage
- indent: the code indent
"""
in_code = False
lang = None
indent = None
for l in lines:
m = _CODE_MARK.match(l)
if m is not None:
if not in_code:
if m.groups()[1].lower() in _LANGS:
lang = m.groups()[1].lower()
indent = len(m.groups()[0])
in_code = True
yield (l, in_code, lang, indent)
else:
yield (l, in_code, lang, indent)
lang = None
indent = None
in_code = False
else:
yield (l, in_code, lang, indent)
def _get_lang_selection_btn(langs):
active = True
btngroup = '<div class="text-center">\n<div class="btn-group opt-group" role="group">'
for l in langs:
btngroup += '<button type="button" class="btn btn-default opt %s">%s</button>\n' % (
'active' if active else '', l[0].upper()+l[1:].lower())
active = False
btngroup += '</div>\n</div> <script type="text/javascript" src="../../_static/js/options.js"></script>'
return btngroup
def _get_blocks(lines):
"""split lines into code and non-code blocks
Returns
-------
iterator of (bool, str, list of str)
- if it is a code block
- source language
- lines of source
"""
cur_block = []
pre_lang = None
pre_in_code = None
for (l, in_code, cur_lang, _) in _parse_code_lines(lines):
if in_code != pre_in_code:
if pre_in_code and len(cur_block) >= 2:
cur_block = cur_block[1:-1] # remove ```
# remove empty lines at head
while len(cur_block) > 0:
if len(cur_block[0]) == 0:
cur_block.pop(0)
else:
break
# remove empty lines at tail
while len(cur_block) > 0:
if len(cur_block[-1]) == 0:
cur_block.pop()
else:
break
if len(cur_block):
yield (pre_in_code, pre_lang, cur_block)
cur_block = []
cur_block.append(l)
pre_lang = cur_lang
pre_in_code = in_code
if len(cur_block):
yield (pre_in_code, pre_lang, cur_block)
def _get_mk_code_block(src, lang):
"""Return a markdown code block
E.g.
```python
import mxnet
````
"""
if lang is None:
lang = ''
return '```'+lang+'\n'+src.rstrip()+'\n'+'```\n'
@contextlib.contextmanager
def _string_io():
oldout = sys.stdout
olderr = sys.stderr
strio = StringIO.StringIO()
sys.stdout = strio
sys.stderr = strio
yield strio
sys.stdout = oldout
sys.stderr = olderr
def _get_python_block_output(src, global_dict, local_dict):
"""Evaluate python source codes
Returns
(bool, str):
- True if success
- output
"""
src = '\n'.join([l for l in src.split('\n')
if not l.startswith('%') and not 'plt.show()' in l])
ret_status = True
err = ''
with _string_io() as s:
try:
exec(src, global_dict, global_dict)
except Exception as e:
err = str(e)
ret_status = False
return (ret_status, s.getvalue()+err)
def _get_jupyter_notebook(lang, lines):
cells = []
for in_code, blk_lang, lines in _get_blocks(lines):
if blk_lang != lang:
in_code = False
src = '\n'.join(lines)
cell = {
"cell_type": "code" if in_code else "markdown",
"metadata": {},
"source": src
}
if in_code:
cell.update({
"outputs": [],
"execution_count": None,
})
cells.append(cell)
ipynb = {"nbformat" : 4,
"nbformat_minor" : 2,
"metadata" : {"language":lang, "display_name":'', "name":''},
"cells" : cells}
return ipynb
def _get_source(lang, lines):
cmt = _LANGS[lang][1] + ' '
out = []
for in_code, lines in _get_blocks(lang, lines):
if in_code:
out.append('')
for l in lines:
if in_code:
if '%matplotlib' not in l:
out.append(l)
else:
if ('<div>' in l or '</div>' in l or
'<script>' in l or '</script>' in l or
'<!--' in l or '-->' in l or
'%matplotlib' in l ):
continue
out.append(cmt+l)
if in_code:
out.append('')
return out
def _get_src_download_btn(out_prefix, langs, lines):
btn = '<div class="btn-group" role="group">\n'
for lang in langs:
ipynb = out_prefix
if lang == 'python':
ipynb += '.ipynb'
else:
ipynb += '_' + lang + '.ipynb'
with open(ipynb, 'w') as f:
json.dump(_get_jupyter_notebook(lang, lines), f)
f = ipynb.split('/')[-1]
btn += '<div class="download-btn"><a href="%s" download="%s">' \
'<span class="glyphicon glyphicon-download-alt"></span> %s</a></div>' % (f, f, f)
btn += '</div>\n'
return btn
def add_buttons(app, docname, source):
out_prefix = app.builder.outdir + '/' + docname
dirname = os.path.dirname(out_prefix)
if not os.path.exists(dirname):
os.makedirs(dirname)
for i,j in enumerate(source):
local_dict = {}
global_dict = {}
lines = j.split('\n')
langs = set([l for (_, _, l, _) in _parse_code_lines(lines)
if l is not None and l in _LANGS])
# first convert
for k,l in enumerate(lines):
if _SRC_DOWNLOAD_MARK in l:
lines[k] = _get_src_download_btn(
out_prefix, langs, lines)
# # then add lang buttons
# for k,l in enumerate(lines):
# if _LANG_SELECTION_MARK in l:
# lines[k] = _get_lang_selection_btn(langs)
output = ''
for in_code, lang, lines in _get_blocks(lines):
src = '\n'.join(lines)+'\n'
if in_code:
output += _get_mk_code_block(src, lang)
if lang == 'python' and any([w in docname for w in _EVAL_WHILTELIST]):
status, blk_out = _get_python_block_output(src, global_dict, local_dict)
if len(blk_out):
output += '<div class=\"cell-results-header\">Output:</div>\n\n'
output += _get_mk_code_block(blk_out, 'results')
else:
output += src
source[i] = output
# source[i] = '\n'.join(lines)
def setup(app):
app.connect("builder-inited", build_mxnet)
app.connect("builder-inited", generate_doxygen)
app.connect("builder-inited", build_scala_docs)
# skipped to build r, it requires to install latex, which is kinds of too heavy
# app.connect("builder-inited", build_r_docs)
app.connect('source-read', convert_table)
app.connect('source-read', add_buttons)
app.add_config_value('recommonmark_config', {
'url_resolver': lambda url: 'http://mxnet.io/' + url,
'enable_eval_rst': True,
}, True)
app.add_transform(transform.AutoStructify)
| apache-2.0 |
cloudnull/ansible | test/units/plugins/strategies/test_strategy_base.py | 62 | 14837 | # (c) 2012-2014, Michael DeHaan <michael.dehaan@gmail.com>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
# Make coding more python3-ish
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
from ansible.compat.tests import unittest
from ansible.compat.tests.mock import patch, MagicMock
from ansible.errors import AnsibleError, AnsibleParserError
from ansible.plugins.strategy import StrategyBase
from ansible.executor.task_queue_manager import TaskQueueManager
from ansible.executor.task_result import TaskResult
from six.moves import queue as Queue
from units.mock.loader import DictDataLoader
class TestStrategyBase(unittest.TestCase):
def setUp(self):
pass
def tearDown(self):
pass
def test_strategy_base_init(self):
mock_tqm = MagicMock(TaskQueueManager)
mock_tqm._final_q = MagicMock()
mock_tqm._options = MagicMock()
strategy_base = StrategyBase(tqm=mock_tqm)
def test_strategy_base_run(self):
mock_tqm = MagicMock(TaskQueueManager)
mock_tqm._final_q = MagicMock()
mock_tqm._stats = MagicMock()
mock_tqm.send_callback.return_value = None
mock_iterator = MagicMock()
mock_iterator._play = MagicMock()
mock_iterator._play.handlers = []
mock_play_context = MagicMock()
mock_tqm._failed_hosts = dict()
mock_tqm._unreachable_hosts = dict()
mock_tqm._options = MagicMock()
strategy_base = StrategyBase(tqm=mock_tqm)
self.assertEqual(strategy_base.run(iterator=mock_iterator, play_context=mock_play_context), 0)
self.assertEqual(strategy_base.run(iterator=mock_iterator, play_context=mock_play_context, result=False), 1)
mock_tqm._failed_hosts = dict(host1=True)
self.assertEqual(strategy_base.run(iterator=mock_iterator, play_context=mock_play_context, result=False), 2)
mock_tqm._unreachable_hosts = dict(host1=True)
self.assertEqual(strategy_base.run(iterator=mock_iterator, play_context=mock_play_context, result=False), 3)
def test_strategy_base_get_hosts(self):
mock_hosts = []
for i in range(0, 5):
mock_host = MagicMock()
mock_host.name = "host%02d" % (i+1)
mock_hosts.append(mock_host)
mock_inventory = MagicMock()
mock_inventory.get_hosts.return_value = mock_hosts
mock_tqm = MagicMock()
mock_tqm._final_q = MagicMock()
mock_tqm.get_inventory.return_value = mock_inventory
mock_play = MagicMock()
mock_play.hosts = ["host%02d" % (i+1) for i in range(0, 5)]
strategy_base = StrategyBase(tqm=mock_tqm)
mock_tqm._failed_hosts = []
mock_tqm._unreachable_hosts = []
self.assertEqual(strategy_base.get_hosts_remaining(play=mock_play), mock_hosts)
mock_tqm._failed_hosts = ["host01"]
self.assertEqual(strategy_base.get_hosts_remaining(play=mock_play), mock_hosts[1:])
self.assertEqual(strategy_base.get_failed_hosts(play=mock_play), [mock_hosts[0]])
mock_tqm._unreachable_hosts = ["host02"]
self.assertEqual(strategy_base.get_hosts_remaining(play=mock_play), mock_hosts[2:])
def test_strategy_base_queue_task(self):
fake_loader = DictDataLoader()
workers = []
for i in range(0, 3):
worker_main_q = MagicMock()
worker_main_q.put.return_value = None
worker_result_q = MagicMock()
workers.append([i, worker_main_q, worker_result_q])
mock_tqm = MagicMock()
mock_tqm._final_q = MagicMock()
mock_tqm.get_workers.return_value = workers
mock_tqm.get_loader.return_value = fake_loader
strategy_base = StrategyBase(tqm=mock_tqm)
strategy_base._cur_worker = 0
strategy_base._pending_results = 0
strategy_base._queue_task(host=MagicMock(), task=MagicMock(), task_vars=dict(), play_context=MagicMock())
self.assertEqual(strategy_base._cur_worker, 1)
self.assertEqual(strategy_base._pending_results, 1)
strategy_base._queue_task(host=MagicMock(), task=MagicMock(), task_vars=dict(), play_context=MagicMock())
self.assertEqual(strategy_base._cur_worker, 2)
self.assertEqual(strategy_base._pending_results, 2)
strategy_base._queue_task(host=MagicMock(), task=MagicMock(), task_vars=dict(), play_context=MagicMock())
self.assertEqual(strategy_base._cur_worker, 0)
self.assertEqual(strategy_base._pending_results, 3)
workers[0][1].put.side_effect = EOFError
strategy_base._queue_task(host=MagicMock(), task=MagicMock(), task_vars=dict(), play_context=MagicMock())
self.assertEqual(strategy_base._cur_worker, 1)
self.assertEqual(strategy_base._pending_results, 3)
def test_strategy_base_process_pending_results(self):
mock_tqm = MagicMock()
mock_tqm._terminated = False
mock_tqm._failed_hosts = dict()
mock_tqm._unreachable_hosts = dict()
mock_tqm.send_callback.return_value = None
queue_items = []
def _queue_empty(*args, **kwargs):
return len(queue_items) == 0
def _queue_get(*args, **kwargs):
if len(queue_items) == 0:
raise Queue.Empty
else:
return queue_items.pop()
mock_queue = MagicMock()
mock_queue.empty.side_effect = _queue_empty
mock_queue.get.side_effect = _queue_get
mock_tqm._final_q = mock_queue
mock_tqm._stats = MagicMock()
mock_tqm._stats.increment.return_value = None
mock_iterator = MagicMock()
mock_iterator.mark_host_failed.return_value = None
mock_host = MagicMock()
mock_host.name = 'test01'
mock_host.vars = dict()
mock_task = MagicMock()
mock_task._role = None
mock_task.ignore_errors = False
mock_group = MagicMock()
mock_group.add_host.return_value = None
def _get_host(host_name):
if host_name == 'test01':
return mock_host
return None
def _get_group(group_name):
if group_name in ('all', 'foo'):
return mock_group
return None
mock_inventory = MagicMock()
mock_inventory._hosts_cache = dict()
mock_inventory.get_host.side_effect = _get_host
mock_inventory.get_group.side_effect = _get_group
mock_inventory.clear_pattern_cache.return_value = None
mock_var_mgr = MagicMock()
mock_var_mgr.set_host_variable.return_value = None
mock_var_mgr.set_host_facts.return_value = None
strategy_base = StrategyBase(tqm=mock_tqm)
strategy_base._inventory = mock_inventory
strategy_base._variable_manager = mock_var_mgr
strategy_base._blocked_hosts = dict()
strategy_base._notified_handlers = dict()
results = strategy_base._wait_on_pending_results(iterator=mock_iterator)
self.assertEqual(len(results), 0)
task_result = TaskResult(host=mock_host, task=mock_task, return_data=dict(changed=True))
queue_items.append(('host_task_ok', task_result))
strategy_base._blocked_hosts['test01'] = True
strategy_base._pending_results = 1
results = strategy_base._wait_on_pending_results(iterator=mock_iterator)
self.assertEqual(len(results), 1)
self.assertEqual(results[0], task_result)
self.assertEqual(strategy_base._pending_results, 0)
self.assertNotIn('test01', strategy_base._blocked_hosts)
task_result = TaskResult(host=mock_host, task=mock_task, return_data='{"failed":true}')
queue_items.append(('host_task_failed', task_result))
strategy_base._blocked_hosts['test01'] = True
strategy_base._pending_results = 1
results = strategy_base._process_pending_results(iterator=mock_iterator)
self.assertEqual(len(results), 1)
self.assertEqual(results[0], task_result)
self.assertEqual(strategy_base._pending_results, 0)
self.assertNotIn('test01', strategy_base._blocked_hosts)
self.assertIn('test01', mock_tqm._failed_hosts)
del mock_tqm._failed_hosts['test01']
task_result = TaskResult(host=mock_host, task=mock_task, return_data='{}')
queue_items.append(('host_unreachable', task_result))
strategy_base._blocked_hosts['test01'] = True
strategy_base._pending_results = 1
results = strategy_base._wait_on_pending_results(iterator=mock_iterator)
self.assertEqual(len(results), 1)
self.assertEqual(results[0], task_result)
self.assertEqual(strategy_base._pending_results, 0)
self.assertNotIn('test01', strategy_base._blocked_hosts)
self.assertIn('test01', mock_tqm._unreachable_hosts)
del mock_tqm._unreachable_hosts['test01']
task_result = TaskResult(host=mock_host, task=mock_task, return_data='{}')
queue_items.append(('host_task_skipped', task_result))
strategy_base._blocked_hosts['test01'] = True
strategy_base._pending_results = 1
results = strategy_base._wait_on_pending_results(iterator=mock_iterator)
self.assertEqual(len(results), 1)
self.assertEqual(results[0], task_result)
self.assertEqual(strategy_base._pending_results, 0)
self.assertNotIn('test01', strategy_base._blocked_hosts)
strategy_base._blocked_hosts['test01'] = True
strategy_base._pending_results = 1
queue_items.append(('add_host', dict(add_host=dict(host_name='newhost01', new_groups=['foo']))))
results = strategy_base._process_pending_results(iterator=mock_iterator)
self.assertEqual(len(results), 0)
self.assertEqual(strategy_base._pending_results, 1)
self.assertIn('test01', strategy_base._blocked_hosts)
queue_items.append(('add_group', mock_host, dict(add_group=dict(group_name='foo'))))
results = strategy_base._process_pending_results(iterator=mock_iterator)
self.assertEqual(len(results), 0)
self.assertEqual(strategy_base._pending_results, 1)
self.assertIn('test01', strategy_base._blocked_hosts)
task_result = TaskResult(host=mock_host, task=mock_task, return_data=dict(changed=True))
queue_items.append(('notify_handler', task_result, 'test handler'))
results = strategy_base._process_pending_results(iterator=mock_iterator)
self.assertEqual(len(results), 0)
self.assertEqual(strategy_base._pending_results, 1)
self.assertIn('test01', strategy_base._blocked_hosts)
self.assertIn('test handler', strategy_base._notified_handlers)
self.assertIn(mock_host, strategy_base._notified_handlers['test handler'])
queue_items.append(('set_host_var', mock_host, mock_task, None, 'foo', 'bar'))
results = strategy_base._process_pending_results(iterator=mock_iterator)
self.assertEqual(len(results), 0)
self.assertEqual(strategy_base._pending_results, 1)
queue_items.append(('set_host_facts', mock_host, mock_task, None, 'foo', dict()))
results = strategy_base._process_pending_results(iterator=mock_iterator)
self.assertEqual(len(results), 0)
self.assertEqual(strategy_base._pending_results, 1)
queue_items.append(('bad'))
self.assertRaises(AnsibleError, strategy_base._process_pending_results, iterator=mock_iterator)
def test_strategy_base_load_included_file(self):
fake_loader = DictDataLoader({
"test.yml": """
- debug: msg='foo'
""",
"bad.yml": """
""",
})
mock_tqm = MagicMock()
mock_tqm._final_q = MagicMock()
strategy_base = StrategyBase(tqm=mock_tqm)
strategy_base._loader = fake_loader
mock_play = MagicMock()
mock_block = MagicMock()
mock_block._play = mock_play
mock_block.vars = dict()
mock_task = MagicMock()
mock_task._block = mock_block
mock_task._role = None
mock_iterator = MagicMock()
mock_iterator.mark_host_failed.return_value = None
mock_inc_file = MagicMock()
mock_inc_file._task = mock_task
mock_inc_file._filename = "test.yml"
res = strategy_base._load_included_file(included_file=mock_inc_file, iterator=mock_iterator)
mock_inc_file._filename = "bad.yml"
res = strategy_base._load_included_file(included_file=mock_inc_file, iterator=mock_iterator)
self.assertEqual(res, [])
def test_strategy_base_run_handlers(self):
workers = []
for i in range(0, 3):
worker_main_q = MagicMock()
worker_main_q.put.return_value = None
worker_result_q = MagicMock()
workers.append([i, worker_main_q, worker_result_q])
mock_tqm = MagicMock()
mock_tqm._final_q = MagicMock()
mock_tqm.get_workers.return_value = workers
mock_tqm.send_callback.return_value = None
mock_play_context = MagicMock()
mock_handler_task = MagicMock()
mock_handler_task.get_name.return_value = "test handler"
mock_handler_task.has_triggered.return_value = False
mock_handler = MagicMock()
mock_handler.block = [mock_handler_task]
mock_handler.flag_for_host.return_value = False
mock_play = MagicMock()
mock_play.handlers = [mock_handler]
mock_host = MagicMock()
mock_host.name = "test01"
mock_iterator = MagicMock()
mock_inventory = MagicMock()
mock_inventory.get_hosts.return_value = [mock_host]
mock_var_mgr = MagicMock()
mock_var_mgr.get_vars.return_value = dict()
mock_iterator = MagicMock
mock_iterator._play = mock_play
strategy_base = StrategyBase(tqm=mock_tqm)
strategy_base._inventory = mock_inventory
strategy_base._notified_handlers = {"test handler": [mock_host]}
result = strategy_base.run_handlers(iterator=mock_iterator, play_context=mock_play_context)
| gpl-3.0 |
m-vdb/github-buildservice-boilerplate | buildservice/tests/views/test_api.py | 1 | 3665 | from django.contrib.auth import get_user_model
from django.core.urlresolvers import reverse
from django.test import TestCase, Client, override_settings
from mock import patch
from buildservice.models import Build, Repository
from buildservice.utils.testing import create_user_token
@override_settings(BUILDSERVICE_API_KEY='the_key')
class UpdateBuildStatusTestCase(TestCase):
def setUp(self):
self.client = Client()
self.dummy_url = reverse('api_build_status', args=('unknown/repo', '42'))
self.repo = Repository.objects.create(name='my/repo')
self.user = get_user_model().objects.create_user('wiqheq', password='ttt')
self.repo.users.add(self.user)
self.build = Build.objects.create(
repository=self.repo, branch='master',
sha='0000', pusher_name='mvdb'
)
self.url = reverse('api_build_status', args=('my/repo', self.build.number))
def test_get(self):
resp = self.client.get(self.dummy_url)
self.assertEqual(resp.status_code, 405)
def test_post_not_json(self):
resp = self.client.post(
self.dummy_url, data='hello',
content_type="application/json"
)
self.assertEqual(resp.status_code, 400)
def test_post_missing_status(self):
resp = self.client.post(
self.dummy_url + '?api_key=the_key', data='{"key": "value"}',
content_type="application/json"
)
self.assertEqual(resp.status_code, 400)
self.assertEqual(resp.json(), {'error': 'Missing status field.'})
def test_post_json_not_dict(self):
resp = self.client.post(
self.dummy_url + '?api_key=the_key', data='[1, 2, 3]',
content_type="application/json"
)
self.assertEqual(resp.status_code, 400)
self.assertEqual(resp.json(), {'error': 'Missing status field.'})
def test_post_no_api_key(self):
resp = self.client.post(
self.dummy_url, data='{"status": "success"}',
content_type="application/json"
)
self.assertEqual(resp.status_code, 401)
def test_post_unknown_build(self):
resp = self.client.post(
self.dummy_url + '?api_key=the_key', data='{"status": "success"}',
content_type="application/json"
)
self.assertEqual(resp.status_code, 404)
def test_post_missing_token(self):
resp = self.client.post(
self.url + '?api_key=the_key', data='{"status": "something"}',
content_type="application/json"
)
self.assertEqual(resp.status_code, 400)
self.assertEqual(resp.json(), {'error': 'No token.'})
def test_post_bad_status(self):
create_user_token(self.user, self.repo)
resp = self.client.post(
self.url + '?api_key=the_key', data='{"status": "something"}',
content_type="application/json"
)
self.assertEqual(resp.status_code, 400)
self.assertEqual(resp.json(), {'error': 'Invalid status.'})
@patch('buildservice.utils.github.create_status')
def test_post_ok(self, create_status):
token = create_user_token(self.user, self.repo)
resp = self.client.post(
self.url + '?api_key=the_key', data='{"status": "success"}',
content_type="application/json"
)
self.assertEqual(resp.status_code, 200)
self.build.refresh_from_db()
self.assertTrue(self.build.is_success)
create_status.assert_called_with(
token.value, 'my/repo', self.build.sha,
state='success', target_url=self.build.url
)
| mit |
alsrgv/tensorflow | tensorflow/python/kernel_tests/sparse_tensor_dense_matmul_op_test.py | 16 | 16170 | # Copyright 2015 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Tests for sparse_ops.sparse_tensor_dense_matmul."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import sys
import time
import numpy as np
from tensorflow.core.protobuf import config_pb2
from tensorflow.python.client import session
from tensorflow.python.framework import constant_op
from tensorflow.python.framework import dtypes
from tensorflow.python.framework import ops
from tensorflow.python.framework import sparse_tensor
from tensorflow.python.framework import tensor_shape
from tensorflow.python.framework import test_util
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import control_flow_ops
from tensorflow.python.ops import math_ops
from tensorflow.python.ops import sparse_ops
from tensorflow.python.platform import app
from tensorflow.python.platform import test
def _maybe_complex(x):
if x.dtype.kind == "c": # complex
return (x + 1j * x) / 2
return x
class SparseTensorDenseMatMulTest(test.TestCase):
def _testMatmul(self,
x,
y,
adjoint_a=False,
adjoint_b=False,
indices_dtype=np.int64):
x_mat = np.matrix(x)
if adjoint_a:
x_mat = x_mat.H
y_mat = np.matrix(y)
if adjoint_b:
y_mat = y_mat.H
np_ans = x_mat * y_mat
x_indices = np.vstack(np.where(x)).astype(indices_dtype).T
x_values = x[np.where(x)]
x_shape = x.shape
with self.cached_session(use_gpu=True):
sp_x_value = sparse_tensor.SparseTensorValue(
indices=x_indices, values=x_values, dense_shape=x_shape)
tf_value_ans = sparse_ops.sparse_tensor_dense_matmul(
sp_x_value, y, adjoint_a=adjoint_a, adjoint_b=adjoint_b)
tf_tensor_ans = sparse_ops.sparse_tensor_dense_matmul(
sparse_tensor.SparseTensor.from_value(sp_x_value),
y,
adjoint_a=adjoint_a,
adjoint_b=adjoint_b)
# Ensure that the RHS shape is known at least.
self.assertEqual(tf_value_ans.get_shape()[1], np_ans.shape[1])
self.assertEqual(tf_tensor_ans.get_shape()[1], np_ans.shape[1])
for out in (self.evaluate(tf_value_ans), self.evaluate(tf_tensor_ans)):
if x.dtype == np.float32:
self.assertAllClose(np_ans, out, rtol=1e-4, atol=1e-4)
elif x.dtype == np.float64:
self.assertAllClose(np_ans, out, rtol=1e-6, atol=1e-6)
else:
self.assertAllClose(np_ans, out, rtol=1e-4, atol=1e-4)
def _testBasic(self, value_dtype, indices_dtype=np.int64):
x = _maybe_complex(np.random.rand(10, 10).astype(value_dtype))
x[np.abs(x) < 0.5] = 0 # Make it sparse
y = _maybe_complex(np.random.randn(10, 20).astype(value_dtype))
self._testMatmul(x, y, indices_dtype=indices_dtype)
@test_util.run_deprecated_v1
def testBasic(self):
np.random.seed(127) # Repeatable results
self._testBasic(np.int32)
self._testBasic(np.float32)
self._testBasic(np.float64)
self._testBasic(np.complex64)
self._testBasic(np.complex128)
self._testBasic(np.int32, indices_dtype=np.int32)
self._testBasic(np.float32, indices_dtype=np.int32)
@test_util.run_deprecated_v1
def testShapeInference(self):
x = np.random.rand(10, 10)
x[np.abs(x) < 0.5] = 0 # Make it sparse
y = np.random.randn(10, 20)
x_indices = np.vstack(np.where(x)).astype(np.int64).T
x_values = x[np.where(x)]
x_shape = x.shape
x_st = sparse_tensor.SparseTensor(x_indices, x_values, x_shape)
result = sparse_ops.sparse_tensor_dense_matmul(x_st, y)
self.assertEqual(result.get_shape(), (10, 20))
x_shape_unknown = array_ops.placeholder(dtype=dtypes.int64, shape=None)
x_st_shape_unknown = sparse_tensor.SparseTensor(x_indices, x_values,
x_shape_unknown)
result_left_shape_unknown = sparse_ops.sparse_tensor_dense_matmul(
x_st_shape_unknown, y)
self.assertEqual(result_left_shape_unknown.get_shape().as_list(),
[None, 20])
x_shape_inconsistent = [10, 15]
x_st_shape_inconsistent = sparse_tensor.SparseTensor(x_indices, x_values,
x_shape_inconsistent)
with self.assertRaisesRegexp(ValueError, "Dimensions must be equal"):
sparse_ops.sparse_tensor_dense_matmul(x_st_shape_inconsistent, y)
@test_util.deprecated_graph_mode_only
def testInvalidIndicesForSparseTensorDenseMatmul(self):
# Note: use_gpu=False because nice errors are only returned from CPU kernel.
with self.session(use_gpu=False):
indices = np.matrix([[1, 10]]).astype(np.int64)
values = np.array([10]).astype(np.float32)
shape = [3, 2]
sparse_t = sparse_tensor.SparseTensor(indices, values, shape)
# Test multiplying by both a small and large dense matrix, to hit
# both cases in the kernel.
dense_t = np.matrix([[1] * 5, [2] * 5], dtype=np.float32)
with self.assertRaisesOpError(
"k .10. from index.0,1. out of bounds .>=2."):
self.evaluate(sparse_ops.sparse_tensor_dense_matmul(sparse_t, dense_t))
dense_t = np.matrix([[1] * 500, [2] * 500], dtype=np.float32)
with self.assertRaisesOpError(
"k .10. from index.0,1. out of bounds .>=2."):
self.evaluate(sparse_ops.sparse_tensor_dense_matmul(sparse_t, dense_t))
# Repeat with adjoint_a, to get a different error.
dense_t = np.matrix([[1] * 5, [2] * 5, [3] * 5], dtype=np.float32)
with self.assertRaisesOpError(
"m .10. from index.0,1. out of bounds .>=2."):
self.evaluate(
sparse_ops.sparse_tensor_dense_matmul(
sparse_t, dense_t, adjoint_a=True))
dense_t = np.matrix([[1] * 500, [2] * 500, [3] * 500], dtype=np.float32)
with self.assertRaisesOpError(
"m .10. from index.0,1. out of bounds .>=2."):
self.evaluate(
sparse_ops.sparse_tensor_dense_matmul(
sparse_t, dense_t, adjoint_a=True))
def testInvalidIndicesForSparseTensorDenseMatmulOnGPU(self):
# Note: use_gpu=False because nice errors are only returned from CPU kerne
if not test.is_gpu_available():
return
with self.session(use_gpu=True):
indices = np.array([[1, 10]]).astype(np.int64)
values = np.array([10]).astype(np.float32)
shape = [3, 2]
sparse_t = sparse_tensor.SparseTensor(indices, values, shape)
# Test multiplying by both a small and large dense matrix, to hit
# both cases in the kernel.
dense_t = np.matrix([[1] * 5, [2] * 5], dtype=np.float32)
expected_t = np.array([[0] * 5, [np.nan] * 5, [0] * 5], dtype=np.float32)
self.assertAllClose(expected_t,
sparse_ops.sparse_tensor_dense_matmul(
sparse_t, dense_t))
dense_t = np.matrix([[1] * 500, [2] * 500], dtype=np.float32)
expected_t = np.array(
[[0] * 500, [np.nan] * 500, [0] * 500], dtype=np.float32)
self.assertAllClose(expected_t,
sparse_ops.sparse_tensor_dense_matmul(
sparse_t, dense_t))
# Repeat with adjoint_a, now the error is that the sparse index
# is OOO w.r.t. the output. The GPU kernel can't do much here,
# so it just doesn't accumulate.
dense_t = np.matrix([[1] * 5, [2] * 5, [3] * 5], dtype=np.float32)
expected_t = np.array([[0] * 5, [0] * 5], dtype=np.float32)
self.assertAllClose(expected_t,
sparse_ops.sparse_tensor_dense_matmul(
sparse_t, dense_t, adjoint_a=True))
dense_t = np.matrix([[1] * 500, [2] * 500, [3] * 500], dtype=np.float32)
expected_t = np.array([[0] * 500, [0] * 500], dtype=np.float32)
self.assertAllClose(expected_t,
sparse_ops.sparse_tensor_dense_matmul(
sparse_t, dense_t, adjoint_a=True))
# Tests setting one dimension to be a high value.
def _testLarge(self, np_dtype):
r1 = np.random.randint(6000, 20000)
r2 = np.random.randint(1, 10)
r3 = np.random.randint(1, 10)
for m, k, n in [(r1, r2, r3),
(r2, r1, r3),
(r2, r3, r1)]:
x = _maybe_complex(np.random.rand(m, k).astype(np_dtype))
x[np.abs(x) < 0.8] = 0
y = _maybe_complex(np.random.randn(k, n).astype(np_dtype))
self._testMatmul(x, y, adjoint_a=False, adjoint_b=False)
self._testMatmul(x.transpose(), y, adjoint_a=True, adjoint_b=False)
self._testMatmul(x, y.transpose(), adjoint_a=False, adjoint_b=True)
self._testMatmul(
x.transpose(), y.transpose(), adjoint_a=True, adjoint_b=True)
np.random.seed(127) # Repeatable results
self._testLarge(np.float32)
self._testLarge(np.float64)
self._testLarge(np.complex64)
self._testLarge(np.complex128)
# Tests random sized matrices.
@test_util.run_deprecated_v1
def testFloatRandom(self):
np.random.seed(127) # Repeatable results
for _ in range(8):
for adjoint_a in [True, False]:
for adjoint_b in [True, False]:
for thresh in [0.0, 0.2, 0.8, 1.0]:
n, k, m = np.random.randint(1, 100, size=3)
x = np.random.rand(n, k).astype(np.float32)
x[x < thresh] = 0 # Make it sparse
y = np.random.randn(k, m).astype(np.float32)
x = x.transpose() if adjoint_a else x
y = y.transpose() if adjoint_b else y
self._testMatmul(x, y, adjoint_a, adjoint_b)
def _sparse_tensor_dense_vs_dense_matmul_benchmark_dense(x, y, adjoint_a,
adjoint_b):
def body(t, prev):
with ops.control_dependencies([prev]):
return (t + 1, math_ops.matmul(
x,
y,
transpose_a=adjoint_a,
transpose_b=adjoint_b,
a_is_sparse=True,
b_is_sparse=False))
t0 = constant_op.constant(0)
v0 = constant_op.constant(0.0)
def _timeit(iterations, _):
(_, final) = control_flow_ops.while_loop(
lambda t, _: t < iterations,
body, (t0, v0),
parallel_iterations=1,
back_prop=False,
shape_invariants=(tensor_shape.TensorShape(()),
tensor_shape.TensorShape(None)))
return [final]
return _timeit
def _sparse_tensor_dense_vs_dense_matmul_benchmark_sparse(x_ind, x_val, x_shape,
y, adjoint_a,
adjoint_b):
sp_x = sparse_tensor.SparseTensor(
indices=x_ind, values=x_val, dense_shape=x_shape)
def body(t, prev):
with ops.control_dependencies([prev]):
return (t + 1, sparse_ops.sparse_tensor_dense_matmul(
sp_x, y, adjoint_a=adjoint_a, adjoint_b=adjoint_b))
t0 = constant_op.constant(0)
v0 = constant_op.constant(0.0)
def _timeit(iterations, _):
(_, final) = control_flow_ops.while_loop(
lambda t, _: t < iterations,
body, (t0, v0),
parallel_iterations=1,
back_prop=False,
shape_invariants=(tensor_shape.TensorShape(()),
tensor_shape.TensorShape(None)))
return [final]
return _timeit
def sparse_tensor_dense_vs_dense_matmul_benchmark(thresh,
m,
k,
n,
adjoint_a,
adjoint_b,
use_gpu,
skip_dense=False):
config = config_pb2.ConfigProto()
config.allow_soft_placement = True
# Configurable for benchmarking:
# config.intra_op_parallelism_threads = 100
# config.gpu_options.per_process_gpu_memory_fraction = 0.3
np.random.seed([6, 117]) # Reproducibility
x = np.random.rand(m, k).astype(np.float32)
x[x < thresh] = 0
y = np.random.randn(k, n).astype(np.float32)
if adjoint_a:
x = x.T
if adjoint_b:
y = y.T
def _timer(sess, ops_fn, iterations):
# Warm in
sess.run(ops_fn(10, sess))
# Timing run
start = time.time()
sess.run(ops_fn(iterations, sess))
end = time.time()
return (end - start) / (1.0 * iterations) # Average runtime per iteration
# Using regular matmul, marking one of the matrices as dense.
if skip_dense:
delta_dense = float("nan")
else:
with session.Session(config=config, graph=ops.Graph()) as sess:
if not use_gpu:
with ops.device("/cpu:0"):
x_t = constant_op.constant(x)
y_t = constant_op.constant(y)
ops_fn = _sparse_tensor_dense_vs_dense_matmul_benchmark_dense(
x_t, y_t, adjoint_a, adjoint_b)
else:
with ops.device("/device:GPU:0"):
x_t = constant_op.constant(x)
y_t = constant_op.constant(y)
ops_fn = _sparse_tensor_dense_vs_dense_matmul_benchmark_dense(
x_t, y_t, adjoint_a, adjoint_b)
delta_dense = _timer(sess, ops_fn, 200)
# Using sparse_tensor_dense_matmul.
with session.Session("", config=config, graph=ops.Graph()) as sess:
if not use_gpu:
with ops.device("/cpu:0"):
x_ind = constant_op.constant(np.vstack(np.where(x)).astype(np.int64).T)
x_val = constant_op.constant(x[np.where(x)])
x_shape = constant_op.constant(np.array(x.shape).astype(np.int64))
y_t = constant_op.constant(y)
ops_fn = _sparse_tensor_dense_vs_dense_matmul_benchmark_sparse(
x_ind, x_val, x_shape, y_t, adjoint_a, adjoint_b)
else:
with ops.device("/device:GPU:0"):
x_ind = constant_op.constant(np.vstack(np.where(x)).astype(np.int64).T)
x_val = constant_op.constant(x[np.where(x)])
x_shape = constant_op.constant(np.array(x.shape).astype(np.int64))
y_t = constant_op.constant(y)
ops_fn = _sparse_tensor_dense_vs_dense_matmul_benchmark_sparse(
x_ind, x_val, x_shape, y_t, adjoint_a, adjoint_b)
delta_sparse = _timer(sess, ops_fn, 200)
print("%g \t %d \t %s \t %d \t %d \t %g \t %g \t %g" %
(1 - thresh, n, use_gpu, m, k, delta_dense, delta_sparse,
delta_sparse / delta_dense))
def main(_):
print("DenseDense MatMul (w/ Sparse Flag) vs. SparseTensorDense MatMul")
print("Matrix sizes:")
print(" A sparse [m, k] with % nonzero values between 1% and 80%")
print(" B dense [k, n]")
print("")
print("% nnz \t n \t gpu \t m \t k \t dt(dense) \t dt(sparse) "
"\t dt(sparse)/dt(dense)")
for thresh in (0.99, 0.8, 0.5, 0.2):
for n in (50, 100):
for use_gpu in (True, False):
for m in (100, 1000):
for k in (100, 1000):
sparse_tensor_dense_vs_dense_matmul_benchmark(
thresh, m, k, n, False, False, use_gpu=use_gpu)
# Enable for large scale benchmarks, these ones take a long time to run.
#
# for use_gpu in (True, False):
# sparse_tensor_dense_vs_dense_matmul_benchmark(
# thresh=0.99, m=1000000, k=1000, n=100, adjoint_a=False,
# adjoint_b=False, use_gpu=use_gpu, skip_dense=True)
if __name__ == "__main__":
if "--benchmarks" in sys.argv:
sys.argv.remove("--benchmarks")
app.run()
else:
test.main()
| apache-2.0 |
mbonsma/studyGroup | lessons/python/matplotlib/hwk3.1.py | 12 | 2149 | # -*- coding: utf-8 -*-
from numpy import float32
from numpy import linspace
from numpy import polyfit
from numpy import polyval
import matplotlib.pyplot as plt
#Read in data from csv
f=open('data.csv','r')
line=f.readlines()
#Empty array for data
FN=[]
EFN=[]
#This loop goes through every line, strips new line character and then splits the data on ,. It will then save data into the arrays
for l in line:
a=l.strip()
x,y=a.split(",")
FN.append(float32(x))
EFN.append(float32(y))
f.close()
#Generate linear space but this was not used as of yet
z=linspace(-1,4)
#Create grid and plot data
fig = plt.figure(figsize = (4,4), dpi = 600)
a = fig.add_subplot(1,1,1)
plt.plot(FN,EFN,'ks',markersize=3)
#Created a fitted line for the data
fit=polyfit(FN,EFN,1)
plt.plot(z,polyval(fit,z),label=fit,color='k')
#Reset font size
for t in a.yaxis.get_major_ticks():
t.label.set_fontsize(6)
for t in a.xaxis.get_major_ticks():
t.label.set_fontsize(6)
#Set the subplot sizing
fig.subplots_adjust(top=0.95, right =0.89, left=0.13,bottom=0.25)
#Set limits and labels
plt.xlim(-0.2,3.5)
plt.ylim(0,0.8)
plt.ylabel(r'Extrafloral Nectar (mg of sugar per extrafloral nectary)',fontsize=6,verticalalignment='center')
plt.xlabel(r'Floral Nectar (mg of sugar per flower)',fontsize=6,horizontalalignment='center')
#Save as pdf
fig.savefig('EFNvFN.pdf',dpi=600)
plt.show()
"""In ecology, animals and plants interact with one another in an ecosystem.
There are several types of interactions that may occur such as predation,
parasitisim and mutualism. Mutualism is where the animals and plants both give
one another a survival benefit. So if a trait is not useful why invest energy
into producing it?
Different interactions have generally been studied individually even though
they occur in a community. This plot shows the relationship between EFN and FN
production in T. ulmifolia. There is a positive correlation, which suggests that
plants that produce more of one also produce more of the other
This is probably because of overall plant vigour. This was an initial figure
for a later experiment showing interactions."""
| apache-2.0 |
LingyuGitHub/codingofly | python/processmonitor/main.py | 1 | 2670 | #!/usr/bin/env python3
#########################################################################
# File Name: main.py
# Author: lingy
# Created Time: Fri 13 Jan 2017 11:32:15 AM CST
# Description:
#########################################################################
# -*- coding: utf-8 -*-
import sys
import os
import time
import logging
import signal
import mysubprocess
logging.basicConfig(level=logging.DEBUG,
format='%(asctime)s %(filename)s[line:%(lineno)d] %(levelname)s %(message)s',
datefmt='%a, %d %b %Y %H:%M:%S',
# filename='listenning.log',
filemode='w')
pro_arr=[]
def addpro(prostr):
global pro_arr
try:
prodict=eval(prostr)
except Exception as e:
logging.warn('addpro warn '+prostr)
return False
proname=list(prodict.keys())[0]
for p in pro_arr:
if proname==p.pro_name:
logging.error('startpro error '+proname+' is running')
return False
pro=mysubprocess.mysubprocess(pro_name=proname, pro_exe=prodict[proname]['exe'], pro_out=prodict[proname]['out'], pro_err=prodict[proname]['err'])
pro.start()
pro_arr.append(pro)
logging.info('startpro ok '+proname)
return True
def killpro(proname):
global pro_arr
for p in pro_arr:
if proname==p.pro_name:
p.stopprocess()
pro_arr.remove(p)
logging.info('killpro ok '+proname)
return True
continue
logging.error('killpro error '+proname+' is not running')
return False
def killall():
global pro_arr
for pro in pro_arr:
pro.stopprocess()
continue
pro_arr=[]
return
def stop(signal, frame):
logging.info("recv signal %d" %(signal))
logging.info("kill all listenning process")
killall()
sys.exit()
return
if __name__=='__main__':
signal.signal(signal.SIGINT, stop)
signal.signal(signal.SIGTERM, stop)
while(1):
prostr=input()
if (type(prostr) is not str) or len(prostr)==0 or prostr[0]=='#':
continue
logging.info(prostr)
ss=prostr.split(' ')
if ss[0]=='kill':
for i in range(1,len(ss)):
killpro(ss[i])
pass
elif ss[0]=='killall':
killall()
pass
elif ss[0]=='add':
addpro(prostr[3:])
pass
elif ss[0]=='exit':
os.kill(os.getpid(),signal.SIGINT)
break
elif ss[0]=='print':
for p in pro_arr:
logging.info(p.getinfo())
pass
continue
| gpl-3.0 |
mihaisoloi/conpaas | conpaas-services/contrib/libcloud/dns/drivers/route53.py | 5 | 11355 | # Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
__all__ = [
'Route53DNSDriver'
]
import base64
import hmac
import datetime
import uuid
from libcloud.utils.py3 import httplib
from hashlib import sha1
from xml.etree import ElementTree as ET
from libcloud.utils.py3 import b, urlencode
from libcloud.utils.xml import findtext, findall, fixxpath
from libcloud.dns.types import Provider, RecordType
from libcloud.dns.types import ZoneDoesNotExistError, RecordDoesNotExistError
from libcloud.dns.base import DNSDriver, Zone, Record
from libcloud.common.types import LibcloudError
from libcloud.common.aws import AWSGenericResponse
from libcloud.common.base import ConnectionUserAndKey
API_VERSION = '2012-02-29'
API_HOST = 'route53.amazonaws.com'
API_ROOT = '/%s/' % (API_VERSION)
NAMESPACE = 'https://%s/doc%s' % (API_HOST, API_ROOT)
class InvalidChangeBatch(LibcloudError):
pass
class Route53DNSResponse(AWSGenericResponse):
"""
Amazon Route53 response class.
"""
namespace = NAMESPACE
xpath = 'Error'
exceptions = {
'NoSuchHostedZone': ZoneDoesNotExistError,
'InvalidChangeBatch': InvalidChangeBatch,
}
class Route53Connection(ConnectionUserAndKey):
host = API_HOST
responseCls = Route53DNSResponse
def pre_connect_hook(self, params, headers):
time_string = datetime.datetime.utcnow() \
.strftime('%a, %d %b %Y %H:%M:%S GMT')
headers['Date'] = time_string
tmp = []
signature = self._get_aws_auth_b64(self.key, time_string)
auth = {'AWSAccessKeyId': self.user_id, 'Signature': signature,
'Algorithm': 'HmacSHA1'}
for k, v in auth.items():
tmp.append('%s=%s' % (k, v))
headers['X-Amzn-Authorization'] = 'AWS3-HTTPS ' + ','.join(tmp)
return params, headers
def _get_aws_auth_b64(self, secret_key, time_string):
b64_hmac = base64.b64encode(
hmac.new(b(secret_key), b(time_string), digestmod=sha1).digest()
)
return b64_hmac.decode('utf-8')
class Route53DNSDriver(DNSDriver):
type = Provider.ROUTE53
name = 'Route53 DNS'
website = 'http://aws.amazon.com/route53/'
connectionCls = Route53Connection
RECORD_TYPE_MAP = {
RecordType.NS: 'NS',
RecordType.MX: 'MX',
RecordType.A: 'A',
RecordType.AAAA: 'AAAA',
RecordType.CNAME: 'CNAME',
RecordType.TXT: 'TXT',
RecordType.SRV: 'SRV',
RecordType.PTR: 'PTR',
RecordType.SOA: 'SOA',
RecordType.SPF: 'SPF',
RecordType.TXT: 'TXT'
}
def list_zones(self):
data = self.connection.request(API_ROOT + 'hostedzone').object
zones = self._to_zones(data=data)
return zones
def list_records(self, zone):
self.connection.set_context({'zone_id': zone.id})
uri = API_ROOT + 'hostedzone/' + zone.id + '/rrset'
data = self.connection.request(uri).object
records = self._to_records(data=data, zone=zone)
return records
def get_zone(self, zone_id):
self.connection.set_context({'zone_id': zone_id})
uri = API_ROOT + 'hostedzone/' + zone_id
data = self.connection.request(uri).object
elem = findall(element=data, xpath='HostedZone',
namespace=NAMESPACE)[0]
return self._to_zone(elem)
def get_record(self, zone_id, record_id):
zone = self.get_zone(zone_id=zone_id)
record_type, name = record_id.split(':', 1)
if name:
full_name = ".".join((name, zone.domain))
else:
full_name = zone.domain
self.connection.set_context({'zone_id': zone_id})
params = urlencode({
'name': full_name,
'type': record_type,
'maxitems': '1'
})
uri = API_ROOT + 'hostedzone/' + zone_id + '/rrset?' + params
data = self.connection.request(uri).object
record = self._to_records(data=data, zone=zone)[0]
# A cute aspect of the /rrset filters is that they are more pagination
# hints than filters!!
# So will return a result even if its not what you asked for.
record_type_num = self._string_to_record_type(record_type)
if record.name != name or record.type != record_type_num:
raise RecordDoesNotExistError(value='', driver=self,
record_id=record_id)
return record
def create_zone(self, domain, type='master', ttl=None, extra=None):
zone = ET.Element('CreateHostedZoneRequest', {'xmlns': NAMESPACE})
ET.SubElement(zone, 'Name').text = domain
ET.SubElement(zone, 'CallerReference').text = str(uuid.uuid4())
if extra and 'Comment' in extra:
hzg = ET.SubElement(zone, 'HostedZoneConfig')
ET.SubElement(hzg, 'Comment').text = extra['Comment']
uri = API_ROOT + 'hostedzone'
data = ET.tostring(zone)
rsp = self.connection.request(uri, method='POST', data=data).object
elem = findall(element=rsp, xpath='HostedZone', namespace=NAMESPACE)[0]
return self._to_zone(elem=elem)
def delete_zone(self, zone, ex_delete_records=False):
self.connection.set_context({'zone_id': zone.id})
if ex_delete_records:
self.ex_delete_all_records(zone=zone)
uri = API_ROOT + 'hostedzone/%s' % (zone.id)
response = self.connection.request(uri, method='DELETE')
return response.status in [httplib.OK]
def create_record(self, name, zone, type, data, extra=None):
batch = [('CREATE', name, type, data, extra)]
self._post_changeset(zone, batch)
id = ':'.join((self.RECORD_TYPE_MAP[type], name))
return Record(id=id, name=name, type=type, data=data, zone=zone,
driver=self, extra=extra)
def update_record(self, record, name, type, data, extra):
batch = [
('DELETE', record.name, record.type, record.data, record.extra),
('CREATE', name, type, data, extra)]
self._post_changeset(record.zone, batch)
id = ':'.join((self.RECORD_TYPE_MAP[type], name))
return Record(id=id, name=name, type=type, data=data, zone=record.zone,
driver=self, extra=extra)
def delete_record(self, record):
try:
r = record
batch = [('DELETE', r.name, r.type, r.data, r.extra)]
self._post_changeset(record.zone, batch)
except InvalidChangeBatch:
raise RecordDoesNotExistError(value='', driver=self,
record_id=r.id)
return True
def ex_delete_all_records(self, zone):
"""
Remove all the records for the provided zone.
@param zone: Zone to delete records for.
@type zone: L{Zone}
"""
deletions = []
for r in zone.list_records():
if r.type in (RecordType.NS, RecordType.SOA):
continue
deletions.append(('DELETE', r.name, r.type, r.data, r.extra))
if deletions:
self._post_changeset(zone, deletions)
def _post_changeset(self, zone, changes_list):
attrs = {'xmlns': NAMESPACE}
changeset = ET.Element('ChangeResourceRecordSetsRequest', attrs)
batch = ET.SubElement(changeset, 'ChangeBatch')
changes = ET.SubElement(batch, 'Changes')
for action, name, type_, data, extra in changes_list:
change = ET.SubElement(changes, 'Change')
ET.SubElement(change, 'Action').text = action
rrs = ET.SubElement(change, 'ResourceRecordSet')
ET.SubElement(rrs, 'Name').text = name + "." + zone.domain
ET.SubElement(rrs, 'Type').text = self.RECORD_TYPE_MAP[type_]
ET.SubElement(rrs, 'TTL').text = str(extra.get('ttl', '0'))
rrecs = ET.SubElement(rrs, 'ResourceRecords')
rrec = ET.SubElement(rrecs, 'ResourceRecord')
ET.SubElement(rrec, 'Value').text = data
uri = API_ROOT + 'hostedzone/' + zone.id + '/rrset'
data = ET.tostring(changeset)
self.connection.set_context({'zone_id': zone.id})
self.connection.request(uri, method='POST', data=data)
def _to_zones(self, data):
zones = []
for element in data.findall(fixxpath(xpath='HostedZones/HostedZone',
namespace=NAMESPACE)):
zones.append(self._to_zone(element))
return zones
def _to_zone(self, elem):
name = findtext(element=elem, xpath='Name', namespace=NAMESPACE)
id = findtext(element=elem, xpath='Id',
namespace=NAMESPACE).replace('/hostedzone/', '')
comment = findtext(element=elem, xpath='Config/Comment',
namespace=NAMESPACE)
resource_record_count = int(findtext(element=elem,
xpath='ResourceRecordSetCount',
namespace=NAMESPACE))
extra = {'Comment': comment, 'ResourceRecordSetCount':
resource_record_count}
zone = Zone(id=id, domain=name, type='master', ttl=0, driver=self,
extra=extra)
return zone
def _to_records(self, data, zone):
records = []
elems = data.findall(
fixxpath(xpath='ResourceRecordSets/ResourceRecordSet',
namespace=NAMESPACE))
for elem in elems:
records.append(self._to_record(elem, zone))
return records
def _to_record(self, elem, zone):
name = findtext(element=elem, xpath='Name',
namespace=NAMESPACE)
name = name[:-len(zone.domain) - 1]
type = self._string_to_record_type(findtext(element=elem, xpath='Type',
namespace=NAMESPACE))
ttl = findtext(element=elem, xpath='TTL', namespace=NAMESPACE)
# TODO: Support records with multiple values
value_elem = elem.findall(
fixxpath(xpath='ResourceRecords/ResourceRecord',
namespace=NAMESPACE))[0]
data = findtext(element=(value_elem), xpath='Value',
namespace=NAMESPACE)
extra = {'ttl': ttl}
id = ':'.join((self.RECORD_TYPE_MAP[type], name))
record = Record(id=id, name=name, type=type, data=data, zone=zone,
driver=self, extra=extra)
return record
| bsd-3-clause |
JioCloud/nova_test_latest | nova/tests/unit/servicegroup/test_zk_driver.py | 59 | 4183 | # Copyright (c) AT&T 2012-2013 Yun Mao <yunmao@gmail.com>
# Copyright 2012 IBM Corp.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""Test the ZooKeeper driver for servicegroup.
You need to install ZooKeeper locally and related dependencies
to run the test. It's unclear how to install python-zookeeper lib
in venv so you might have to run the test without it.
To set up in Ubuntu 12.04:
$ sudo apt-get install zookeeper zookeeperd python-zookeeper
$ sudo pip install evzookeeper
$ nosetests nova.tests.unit.servicegroup.test_zk_driver
"""
import os
import mock
from nova import servicegroup
from nova.servicegroup.drivers import zk
from nova import test
class ZKServiceGroupTestCase(test.NoDBTestCase):
def setUp(self):
super(ZKServiceGroupTestCase, self).setUp()
self.flags(servicegroup_driver='zk')
self.flags(address='localhost:2181', group="zookeeper")
try:
__import__('evzookeeper')
__import__('zookeeper')
except ImportError:
self.skipTest("Unable to test due to lack of ZooKeeper")
# Need to do this here, as opposed to the setUp() method, otherwise
# the decorate will cause an import error...
@mock.patch('evzookeeper.ZKSession')
def _setup_sg_api(self, zk_sess_mock):
self.zk_sess = mock.MagicMock()
zk_sess_mock.return_value = self.zk_sess
self.flags(servicegroup_driver='zk')
self.flags(address='ignored', group="zookeeper")
self.servicegroup_api = servicegroup.API()
def test_zookeeper_hierarchy_structure(self):
"""Test that hierarchy created by join method contains process id."""
from zookeeper import NoNodeException
self.servicegroup_api = servicegroup.API()
service_id = {'topic': 'unittest', 'host': 'serviceC'}
# use existing session object
session = self.servicegroup_api._driver._session
# prepare a path that contains process id
pid = os.getpid()
path = '/servicegroups/%s/%s/%s' % (service_id['topic'],
service_id['host'],
pid)
# assert that node doesn't exist yet
self.assertRaises(NoNodeException, session.get, path)
# join
self.servicegroup_api.join(service_id['host'],
service_id['topic'],
None)
# expected existing "process id" node
self.assertTrue(session.get(path))
def test_lazy_session(self):
"""Session object (contains zk handle) should be created in
lazy manner, because handle cannot be shared by forked processes.
"""
# insied import because this test runs conditionaly (look at setUp)
import evzookeeper
driver = zk.ZooKeeperDriver()
# check that internal private attribute session is empty
self.assertIsNone(driver.__dict__['_ZooKeeperDriver__session'])
# after first use of property ...
driver._session
# check that internal private session attribute is ready
self.assertIsInstance(driver.__dict__['_ZooKeeperDriver__session'],
evzookeeper.ZKSession)
@mock.patch('evzookeeper.membership.Membership')
def test_join(self, mem_mock):
self._setup_sg_api()
mem_mock.return_value = mock.sentinel.zk_mem
self.servicegroup_api.join('fake-host', 'fake-topic')
mem_mock.assert_called_once_with(self.zk_sess,
'/fake-topic',
'fake-host')
| apache-2.0 |
chrismbarnes/ndnSIM | doc/manual/source/conf.py | 75 | 7047 | # -*- coding: utf-8 -*-
#
# ns-3 documentation build configuration file, created by
# sphinx-quickstart on Tue Dec 14 09:00:39 2010.
#
# This file is execfile()d with the current directory set to its containing dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
import sys, os
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
#sys.path.insert(0, os.path.abspath('.'))
# -- General configuration -----------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
#needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be extensions
# coming with Sphinx (named 'sphinx.ext.*') or your custom ones.
extensions = ['sphinx.ext.pngmath']
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix of source filenames.
source_suffix = '.rst'
# The encoding of source files.
#source_encoding = 'utf-8-sig'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = u'ns-3'
copyright = u'2010, ns-3 project'
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
version = 'ns-3-dev'
# The full version, including alpha/beta/rc tags.
release = 'ns-3-dev'
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#language = None
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
#today = ''
# Else, today_fmt is used as the format for a strftime call.
#today_fmt = '%B %d, %Y'
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
exclude_patterns = []
# The reST default role (used for this markup: `text`) to use for all documents.
#default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
#add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
#add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
#show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# A list of ignored prefixes for module index sorting.
#modindex_common_prefix = []
# -- Options for HTML output ---------------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
html_theme = 'ns3_html_theme'
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
#html_theme_options = {}
# Add any paths that contain custom themes here, relative to this directory.
html_theme_path = ['../..']
# The name for this set of Sphinx documents. If None, it defaults to
# "<project> v<release> documentation".
html_title = 'Manual'
# A shorter title for the navigation bar. Default is the same as html_title.
#html_short_title = None
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
#html_logo = None
# The name of an image file (within the static path) to use as favicon of the
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
#html_favicon = None
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
# using the given strftime format.
html_last_updated_fmt = '%b %d, %Y %H:%M'
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
#html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
#html_sidebars = {}
# Additional templates that should be rendered to pages, maps page names to
# template names.
#html_additional_pages = {}
# If false, no module index is generated.
#html_domain_indices = True
# If false, no index is generated.
#html_use_index = True
# If true, the index is split into individual pages for each letter.
#html_split_index = False
# If true, links to the reST sources are added to the pages.
#html_show_sourcelink = True
# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
#html_show_sphinx = True
# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
#html_show_copyright = True
# If true, an OpenSearch description file will be output, and all pages will
# contain a <link> tag referring to it. The value of this option must be the
# base URL from which the finished HTML is served.
#html_use_opensearch = ''
# This is the file name suffix for HTML files (e.g. ".xhtml").
#html_file_suffix = None
# Output file base name for HTML help builder.
htmlhelp_basename = 'ns-3doc'
# -- Options for LaTeX output --------------------------------------------------
# The paper size ('letter' or 'a4').
#latex_paper_size = 'letter'
# The font size ('10pt', '11pt' or '12pt').
#latex_font_size = '10pt'
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title, author, documentclass [howto/manual]).
latex_documents = [
('index', 'ns-3-manual.tex', u'ns-3 Manual',
u'ns-3 project', 'manual'),
]
# The name of an image file (relative to this directory) to place at the top of
# the title page.
latex_logo = '../../ns3_html_theme/static/ns-3.png'
# For "manual" documents, if this is true, then toplevel headings are parts,
# not chapters.
#latex_use_parts = False
# If true, show page references after internal links.
#latex_show_pagerefs = False
# If true, show URL addresses after external links.
#latex_show_urls = False
# Additional stuff for the LaTeX preamble.
latex_preamble = '\usepackage{amssymb}'
# Documents to append as an appendix to all manuals.
#latex_appendices = []
# If false, no module index is generated.
#latex_domain_indices = True
# -- Options for manual page output --------------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
('index', 'ns-3-manual', u'ns-3 Manual',
[u'ns-3 project'], 1)
]
| gpl-2.0 |
tumbl3w33d/ansible | lib/ansible/parsing/splitter.py | 49 | 11324 | # (c) 2014 James Cammarata, <jcammarata@ansible.com>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
# Make coding more python3-ish
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import codecs
import re
from ansible.errors import AnsibleParserError
from ansible.module_utils._text import to_text
from ansible.parsing.quoting import unquote
# Decode escapes adapted from rspeer's answer here:
# http://stackoverflow.com/questions/4020539/process-escape-sequences-in-a-string-in-python
_HEXCHAR = '[a-fA-F0-9]'
_ESCAPE_SEQUENCE_RE = re.compile(r'''
( \\U{0} # 8-digit hex escapes
| \\u{1} # 4-digit hex escapes
| \\x{2} # 2-digit hex escapes
| \\N\{{[^}}]+\}} # Unicode characters by name
| \\[\\'"abfnrtv] # Single-character escapes
)'''.format(_HEXCHAR * 8, _HEXCHAR * 4, _HEXCHAR * 2), re.UNICODE | re.VERBOSE)
def _decode_escapes(s):
def decode_match(match):
return codecs.decode(match.group(0), 'unicode-escape')
return _ESCAPE_SEQUENCE_RE.sub(decode_match, s)
def parse_kv(args, check_raw=False):
'''
Convert a string of key/value items to a dict. If any free-form params
are found and the check_raw option is set to True, they will be added
to a new parameter called '_raw_params'. If check_raw is not enabled,
they will simply be ignored.
'''
args = to_text(args, nonstring='passthru')
options = {}
if args is not None:
try:
vargs = split_args(args)
except IndexError as e:
raise AnsibleParserError("Unable to parse argument string", orig_exc=e)
except ValueError as ve:
if 'no closing quotation' in str(ve).lower():
raise AnsibleParserError("error parsing argument string, try quoting the entire line.", orig_exc=ve)
else:
raise
raw_params = []
for orig_x in vargs:
x = _decode_escapes(orig_x)
if "=" in x:
pos = 0
try:
while True:
pos = x.index('=', pos + 1)
if pos > 0 and x[pos - 1] != '\\':
break
except ValueError:
# ran out of string, but we must have some escaped equals,
# so replace those and append this to the list of raw params
raw_params.append(x.replace('\\=', '='))
continue
k = x[:pos]
v = x[pos + 1:]
# FIXME: make the retrieval of this list of shell/command
# options a function, so the list is centralized
if check_raw and k not in ('creates', 'removes', 'chdir', 'executable', 'warn'):
raw_params.append(orig_x)
else:
options[k.strip()] = unquote(v.strip())
else:
raw_params.append(orig_x)
# recombine the free-form params, if any were found, and assign
# them to a special option for use later by the shell/command module
if len(raw_params) > 0:
options[u'_raw_params'] = join_args(raw_params)
return options
def _get_quote_state(token, quote_char):
'''
the goal of this block is to determine if the quoted string
is unterminated in which case it needs to be put back together
'''
# the char before the current one, used to see if
# the current character is escaped
prev_char = None
for idx, cur_char in enumerate(token):
if idx > 0:
prev_char = token[idx - 1]
if cur_char in '"\'' and prev_char != '\\':
if quote_char:
if cur_char == quote_char:
quote_char = None
else:
quote_char = cur_char
return quote_char
def _count_jinja2_blocks(token, cur_depth, open_token, close_token):
'''
this function counts the number of opening/closing blocks for a
given opening/closing type and adjusts the current depth for that
block based on the difference
'''
num_open = token.count(open_token)
num_close = token.count(close_token)
if num_open != num_close:
cur_depth += (num_open - num_close)
if cur_depth < 0:
cur_depth = 0
return cur_depth
def join_args(s):
'''
Join the original cmd based on manipulations by split_args().
This retains the original newlines and whitespaces.
'''
result = ''
for p in s:
if len(result) == 0 or result.endswith('\n'):
result += p
else:
result += ' ' + p
return result
def split_args(args):
'''
Splits args on whitespace, but intelligently reassembles
those that may have been split over a jinja2 block or quotes.
When used in a remote module, we won't ever have to be concerned about
jinja2 blocks, however this function is/will be used in the
core portions as well before the args are templated.
example input: a=b c="foo bar"
example output: ['a=b', 'c="foo bar"']
Basically this is a variation shlex that has some more intelligence for
how Ansible needs to use it.
'''
# the list of params parsed out of the arg string
# this is going to be the result value when we are done
params = []
# Initial split on newlines
items = args.split('\n')
# iterate over the tokens, and reassemble any that may have been
# split on a space inside a jinja2 block.
# ex if tokens are "{{", "foo", "}}" these go together
# These variables are used
# to keep track of the state of the parsing, since blocks and quotes
# may be nested within each other.
quote_char = None
inside_quotes = False
print_depth = 0 # used to count nested jinja2 {{ }} blocks
block_depth = 0 # used to count nested jinja2 {% %} blocks
comment_depth = 0 # used to count nested jinja2 {# #} blocks
# now we loop over each split chunk, coalescing tokens if the white space
# split occurred within quotes or a jinja2 block of some kind
for (itemidx, item) in enumerate(items):
# we split on spaces and newlines separately, so that we
# can tell which character we split on for reassembly
# inside quotation characters
tokens = item.split(' ')
line_continuation = False
for (idx, token) in enumerate(tokens):
# Empty entries means we have subsequent spaces
# We want to hold onto them so we can reconstruct them later
if len(token) == 0 and idx != 0:
params[-1] += ' '
continue
# if we hit a line continuation character, but
# we're not inside quotes, ignore it and continue
# on to the next token while setting a flag
if token == '\\' and not inside_quotes:
line_continuation = True
continue
# store the previous quoting state for checking later
was_inside_quotes = inside_quotes
quote_char = _get_quote_state(token, quote_char)
inside_quotes = quote_char is not None
# multiple conditions may append a token to the list of params,
# so we keep track with this flag to make sure it only happens once
# append means add to the end of the list, don't append means concatenate
# it to the end of the last token
appended = False
# if we're inside quotes now, but weren't before, append the token
# to the end of the list, since we'll tack on more to it later
# otherwise, if we're inside any jinja2 block, inside quotes, or we were
# inside quotes (but aren't now) concat this token to the last param
if inside_quotes and not was_inside_quotes and not(print_depth or block_depth or comment_depth):
params.append(token)
appended = True
elif print_depth or block_depth or comment_depth or inside_quotes or was_inside_quotes:
if idx == 0 and was_inside_quotes:
params[-1] = "%s%s" % (params[-1], token)
elif len(tokens) > 1:
spacer = ''
if idx > 0:
spacer = ' '
params[-1] = "%s%s%s" % (params[-1], spacer, token)
else:
params[-1] = "%s\n%s" % (params[-1], token)
appended = True
# if the number of paired block tags is not the same, the depth has changed, so we calculate that here
# and may append the current token to the params (if we haven't previously done so)
prev_print_depth = print_depth
print_depth = _count_jinja2_blocks(token, print_depth, "{{", "}}")
if print_depth != prev_print_depth and not appended:
params.append(token)
appended = True
prev_block_depth = block_depth
block_depth = _count_jinja2_blocks(token, block_depth, "{%", "%}")
if block_depth != prev_block_depth and not appended:
params.append(token)
appended = True
prev_comment_depth = comment_depth
comment_depth = _count_jinja2_blocks(token, comment_depth, "{#", "#}")
if comment_depth != prev_comment_depth and not appended:
params.append(token)
appended = True
# finally, if we're at zero depth for all blocks and not inside quotes, and have not
# yet appended anything to the list of params, we do so now
if not (print_depth or block_depth or comment_depth) and not inside_quotes and not appended and token != '':
params.append(token)
# if this was the last token in the list, and we have more than
# one item (meaning we split on newlines), add a newline back here
# to preserve the original structure
if len(items) > 1 and itemidx != len(items) - 1 and not line_continuation:
params[-1] += '\n'
# always clear the line continuation flag
line_continuation = False
# If we're done and things are not at zero depth or we're still inside quotes,
# raise an error to indicate that the args were unbalanced
if print_depth or block_depth or comment_depth or inside_quotes:
raise AnsibleParserError(u"failed at splitting arguments, either an unbalanced jinja2 block or quotes: {0}".format(args))
return params
| gpl-3.0 |
stefanweller/ansible-modules-extras | cloud/amazon/ecs_taskdefinition.py | 60 | 7958 | #!/usr/bin/python
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
DOCUMENTATION = '''
---
module: ecs_taskdefinition
short_description: register a task definition in ecs
description:
- Creates or terminates task definitions
version_added: "2.0"
author: Mark Chance(@Java1Guy)
requirements: [ json, boto, botocore, boto3 ]
options:
state:
description:
- State whether the task definition should exist or be deleted
required: true
choices: ['present', 'absent']
arn:
description:
- The arn of the task description to delete
required: false
family:
description:
- A Name that would be given to the task definition
required: false
revision:
description:
- A revision number for the task definition
required: False
type: int
containers:
description:
- A list of containers definitions
required: False
type: list of dicts with container definitions
volumes:
description:
- A list of names of volumes to be attached
required: False
type: list of name
extends_documentation_fragment:
- aws
- ec2
'''
EXAMPLES = '''
- name: "Create task definition"
ecs_taskdefinition:
containers:
- name: simple-app
cpu: 10
essential: true
image: "httpd:2.4"
memory: 300
mountPoints:
- containerPath: /usr/local/apache2/htdocs
sourceVolume: my-vol
portMappings:
- containerPort: 80
hostPort: 80
- name: busybox
command:
- "/bin/sh -c \"while true; do echo '<html> <head> <title>Amazon ECS Sample App</title> <style>body {margin-top: 40px; background-color: #333;} </style> </head><body> <div style=color:white;text-align:center> <h1>Amazon ECS Sample App</h1> <h2>Congratulations!</h2> <p>Your application is now running on a container in Amazon ECS.</p>' > top; /bin/date > date ; echo '</div></body></html>' > bottom; cat top date bottom > /usr/local/apache2/htdocs/index.html ; sleep 1; done\""
cpu: 10
entryPoint:
- sh
- "-c"
essential: false
image: busybox
memory: 200
volumesFrom:
- sourceContainer: simple-app
volumes:
- name: my-vol
family: test-cluster-taskdef
state: present
register: task_output
'''
RETURN = '''
taskdefinition:
description: a reflection of the input parameters
type: dict inputs plus revision, status, taskDefinitionArn
'''
try:
import boto
import botocore
HAS_BOTO = True
except ImportError:
HAS_BOTO = False
try:
import boto3
HAS_BOTO3 = True
except ImportError:
HAS_BOTO3 = False
class EcsTaskManager:
"""Handles ECS Tasks"""
def __init__(self, module):
self.module = module
try:
region, ec2_url, aws_connect_kwargs = get_aws_connection_info(module, boto3=True)
if not region:
module.fail_json(msg="Region must be specified as a parameter, in EC2_REGION or AWS_REGION environment variables or in boto configuration file")
self.ecs = boto3_conn(module, conn_type='client', resource='ecs', region=region, endpoint=ec2_url, **aws_connect_kwargs)
except boto.exception.NoAuthHandlerFound, e:
module.fail_json(msg="Can't authorize connection - "+str(e))
def describe_task(self, task_name):
try:
response = self.ecs.describe_task_definition(taskDefinition=task_name)
return response['taskDefinition']
except botocore.exceptions.ClientError:
return None
def register_task(self, family, container_definitions, volumes):
response = self.ecs.register_task_definition(family=family,
containerDefinitions=container_definitions, volumes=volumes)
return response['taskDefinition']
def deregister_task(self, taskArn):
response = self.ecs.deregister_task_definition(taskDefinition=taskArn)
return response['taskDefinition']
def main():
argument_spec = ec2_argument_spec()
argument_spec.update(dict(
state=dict(required=True, choices=['present', 'absent'] ),
arn=dict(required=False, type='str' ),
family=dict(required=False, type='str' ),
revision=dict(required=False, type='int' ),
containers=dict(required=False, type='list' ),
volumes=dict(required=False, type='list' )
))
module = AnsibleModule(argument_spec=argument_spec, supports_check_mode=True)
if not HAS_BOTO:
module.fail_json(msg='boto is required.')
if not HAS_BOTO3:
module.fail_json(msg='boto3 is required.')
task_to_describe = None
# When deregistering a task, we can specify the ARN OR
# the family and revision.
if module.params['state'] == 'absent':
if 'arn' in module.params and module.params['arn'] is not None:
task_to_describe = module.params['arn']
elif 'family' in module.params and module.params['family'] is not None and 'revision' in module.params and module.params['revision'] is not None:
task_to_describe = module.params['family']+":"+str(module.params['revision'])
else:
module.fail_json(msg="To use task definitions, an arn or family and revision must be specified")
# When registering a task, we can specify the ARN OR
# the family and revision.
if module.params['state'] == 'present':
if not 'family' in module.params:
module.fail_json(msg="To use task definitions, a family must be specified")
if not 'containers' in module.params:
module.fail_json(msg="To use task definitions, a list of containers must be specified")
task_to_describe = module.params['family']
task_mgr = EcsTaskManager(module)
existing = task_mgr.describe_task(task_to_describe)
results = dict(changed=False)
if module.params['state'] == 'present':
if existing and 'status' in existing and existing['status']=="ACTIVE":
results['taskdefinition']=existing
else:
if not module.check_mode:
# doesn't exist. create it.
volumes = []
if 'volumes' in module.params:
volumes = module.params['volumes']
if volumes is None:
volumes = []
results['taskdefinition'] = task_mgr.register_task(module.params['family'],
module.params['containers'], volumes)
results['changed'] = True
# delete the cloudtrai
elif module.params['state'] == 'absent':
if not existing:
pass
else:
# it exists, so we should delete it and mark changed.
# return info about the cluster deleted
results['taskdefinition'] = existing
if 'status' in existing and existing['status']=="INACTIVE":
results['changed'] = False
else:
if not module.check_mode:
task_mgr.deregister_task(task_to_describe)
results['changed'] = True
module.exit_json(**results)
# import module snippets
from ansible.module_utils.basic import *
from ansible.module_utils.ec2 import *
if __name__ == '__main__':
main()
| gpl-3.0 |
lmEshoo/st2contrib | packs/cubesensors/sensors/measurements_sensor.py | 2 | 5791 | import time
from rauth import OAuth1Session
from st2common.util import isotime
from st2reactor.sensor.base import PollingSensor
__all__ = [
'CubeSensorsMeasurementsSensor'
]
BASE_URL = 'https://api.cubesensors.com/v1'
FIELD_CONVERT_FUNCS = {
'temp': lambda value: (float(value) / 100)
}
class CubeSensorsMeasurementsSensor(PollingSensor):
DATASTORE_KEY_NAME = 'last_measurements_timestamp'
def __init__(self, sensor_service, config=None, poll_interval=None):
super(CubeSensorsMeasurementsSensor, self).__init__(sensor_service=sensor_service,
config=config,
poll_interval=poll_interval)
self._device_uids = self._config['sensor'].get('device_uids', [])
self._logger = self._sensor_service.get_logger(__name__)
self._device_info_cache = {}
self._last_measurement_timestamps = {} # maps device_uid -> last mes. timestamp
def setup(self):
if not self._device_uids:
raise ValueError('No "device_uids" configured!')
self._session = self._get_session()
# todo cache deviice names
# Populate device info cache
for device_uid in self._device_uids:
data = self._get_device_info(device_uid=device_uid)
self._device_info_cache[device_uid] = data
def poll(self):
for device_uid in self._device_uids:
result = self._get_measurements(device_uid=device_uid)
if not result:
continue
self._handle_result(device_uid=device_uid, result=result)
def cleanup(self):
pass
def add_trigger(self, trigger):
pass
def update_trigger(self, trigger):
pass
def remove_trigger(self, trigger):
pass
def _handle_result(self, device_uid, result):
existing_last_measurement_timestamp = self._get_last_measurement_timestamp(
device_uid=device_uid)
new_last_measurement_timestamp = isotime.parse(result['time'])
new_last_measurement_timestamp = int(time.mktime(
new_last_measurement_timestamp.timetuple()))
if (existing_last_measurement_timestamp and
new_last_measurement_timestamp <= existing_last_measurement_timestamp):
# We have already seen this measurement, skip it
self._logger.debug(('No new measurements, skipping results we have already seen'
'for device %s' % (device_uid)))
return
# Dispatch trigger
self._dispatch_trigger(device_uid=device_uid, result=result)
# Store last measurement timestamp
self._set_last_measurement_timestamp(
device_uid=device_uid, last_measurement_timestamp=new_last_measurement_timestamp)
def _get_last_measurement_timestamp(self, device_uid):
"""
Retrieve last measurement timestamp for a particular device.
:rtype: ``int``
"""
last_measurement_timestamp = self._last_measurement_timestamps.get(device_uid, None)
if not last_measurement_timestamp:
name = self._get_datastore_key_name(device_uid=device_uid)
value = self._sensor_service.get_value(name=name)
self._last_measurement_timestamps[device_uid] = int(value) if value else 0
return self._last_measurement_timestamps[device_uid]
def _set_last_measurement_timestamp(self, device_uid, last_measurement_timestamp):
"""
Store a last measurement timestamp for a particular device.
"""
self._last_measurement_timestamps[device_uid] = last_measurement_timestamp
name = self._get_datastore_key_name(device_uid=device_uid)
value = self._sensor_service.get_value(name=name)
value = str(last_measurement_timestamp)
self._sensor_service.set_value(name=name, value=value)
return last_measurement_timestamp
def _get_datastore_key_name(self, device_uid):
name = self.DATASTORE_KEY_NAME + '.' + device_uid
return name
def _dispatch_trigger(self, device_uid, result):
trigger = 'cubesensors.measurements'
device_info = self._device_info_cache.get(device_uid, {})
device_name = device_info.get('extra', {}).get('name', 'unknown')
payload = {
'device_uid': device_uid,
'device_name': device_name,
'measurements': result
}
self._sensor_service.dispatch(trigger=trigger, payload=payload)
def _get_device_info(self, device_uid):
response = self._session.get('%s/devices/%s' % (BASE_URL, device_uid))
data = response.json()
return data['device']
def _get_measurements(self, device_uid):
"""
Retrieve measurements for a particular device.
"""
response = self._session.get('%s/devices/%s/current' % (BASE_URL, device_uid))
data = response.json()
values = data['results'][0]
field_list = data['field_list']
result = {}
for index, field_name in enumerate(field_list):
value = values[index]
convert_func = FIELD_CONVERT_FUNCS.get(field_name, None)
if convert_func:
value = convert_func(value=value)
result[field_name] = value
return result
def _get_session(self):
session = OAuth1Session(consumer_key=self._config['consumer_key'],
consumer_secret=self._config['consumer_secret'],
access_token=self._config['access_token'],
access_token_secret=self._config['access_token_secret'])
return session
| apache-2.0 |
IPMITMO/statan | coala-bears/bears/general/FilenameBear.py | 1 | 4829 | import os.path
from coalib.bears.LocalBear import LocalBear
from coalib.results.Diff import Diff
from coalib.results.Result import Result
from coalib.bearlib.naming_conventions import (
to_camelcase, to_kebabcase, to_pascalcase, to_snakecase, to_spacecase)
class FilenameBear(LocalBear):
LANGUAGES = {'All'}
AUTHORS = {'The coala developers'}
AUTHORS_EMAILS = {'coala-devel@googlegroups.com'}
LICENSE = 'AGPL-3.0'
_naming_convention = {'camel': to_camelcase,
'kebab': to_kebabcase,
'pascal': to_pascalcase,
'snake': to_snakecase,
'space': to_spacecase}
_language_naming_convention = {
'.java': 'pascal',
'.js': 'kebab',
'.py': 'snake',
}
def run(self, filename, file,
max_filename_length: int=260,
file_naming_convention: str=None,
ignore_uppercase_filenames: bool=True,
filename_prefix: str='',
filename_suffix: str=''):
"""
Checks whether the filename follows a certain naming-convention.
:param file_naming_convention:
The naming-convention. Supported values are:
- ``auto`` to guess the correct convention. Defaults to ``snake``
if the correct convention cannot be guessed.
- ``camel`` (``thisIsCamelCase``)
- ``kebab`` (``this-is-kebab-case``)
- ``pascal`` (``ThisIsPascalCase``)
- ``snake`` (``this_is_snake_case``)
- ``space`` (``This Is Space Case``)
:param max_filename_length:
Maximum filename length on both Windows and Unix-like systems.
:param ignore_uppercase_filenames:
Whether or not to ignore fully uppercase filenames completely,
e.g. COPYING, LICENSE etc.
:param filename_prefix:
Check whether the filename uses a certain prefix.
The file's extension is ignored.
:param filename_suffix:
Check whether the filename uses a certain suffix.
The file's extension is ignored.
"""
head, tail = os.path.split(filename)
filename_without_extension, extension = os.path.splitext(tail)
if file_naming_convention is None:
self.warn('Please specify a file naming convention explicitly'
' or use "auto".')
file_naming_convention = 'auto'
else:
file_naming_convention = file_naming_convention.lower()
if file_naming_convention == 'auto':
if extension in self._language_naming_convention:
file_naming_convention = self._language_naming_convention[
extension]
else:
self.warn('The file naming convention could not be guessed. '
'Using the default "snake" naming convention.')
file_naming_convention = 'snake'
messages = []
try:
new_name = self._naming_convention[file_naming_convention](
filename_without_extension)
except KeyError:
self.err('Invalid file-naming-convention provided: ' +
file_naming_convention)
return
if new_name != filename_without_extension:
messages.append(
'Filename does not follow {} naming-convention.'.format(
file_naming_convention))
if not filename_without_extension.startswith(filename_prefix):
new_name = filename_prefix + new_name
messages.append(
'Filename does not use the prefix {!r}.'.format(
filename_prefix))
if not filename_without_extension.endswith(filename_suffix):
new_name = new_name + filename_suffix
messages.append(
'Filename does not use the suffix {!r}.'.format(
filename_suffix))
if len(filename) > max_filename_length:
messages.append(
'Filename is too long ({} > {}).'.format(
len(filename),
max_filename_length))
if ignore_uppercase_filenames and filename_without_extension.isupper():
return
if messages:
diff = Diff(file, rename=os.path.join(head, new_name + extension))
message = ('\n'.join('- ' + mes for mes in messages)
if len(messages) > 1 else messages[0])
yield Result(
self,
message,
diff.affected_code(filename),
diffs={filename: diff})
| mit |
dmr/validictory | validictory/tests/test_properties.py | 12 | 8736 | from unittest import TestCase
import validictory
class TestProperties(TestCase):
props = {
"prop01": {"type": "string"},
"prop02": {"type": "number", "required": False},
"prop03": {"type": "integer"},
"prop04": {"type": "boolean"},
"prop05": {
"type": "object",
"required": False,
"properties": {
"subprop01": {"type": "string"},
"subprop02": {"type": "string", "required": True}
}
}
}
schema = {"type": "object", "properties": props}
def test_properties1(self):
data = {
"prop01": "test",
"prop02": 1.20,
"prop03": 1,
"prop04": True,
"prop05": {
"subprop01": "test",
"subprop02": "test2",
}
}
try:
validictory.validate(data, self.schema)
except ValueError as e:
self.fail("Unexpected failure: %s" % e)
def test_properties2(self):
data = {
"prop01": "test",
"prop02": 1.20,
"prop03": 1,
"prop04": True
}
try:
validictory.validate(data, self.schema)
except ValueError as e:
self.fail("Unexpected failure: %s" % e)
def test_properties3(self):
data = {
"prop02": 1.60,
"prop05": {
"subprop01": "test"
}
}
self.assertRaises(ValueError, validictory.validate, data, self.schema)
class TestPatternProperties(TestCase):
schema = {'patternProperties': {'[abc]': {'type': 'boolean'}}}
def test_patternproperties_pass(self):
data = {'a': True}
try:
validictory.validate(data, self.schema)
except ValueError as e:
self.fail("Unexpected failure: %s" % e)
def test_patternproperties_nonmatch(self):
data = {'a': True, 'd': 'foo'}
try:
validictory.validate(data, self.schema)
except ValueError as e:
self.fail("Unexpected failure: %s" % e)
def test_patternproperties_nested(self):
schema = {'patternProperties': {'[abc]': {
'patternProperties': {'[abc]': {'type': 'boolean'}}
}}}
data = {'a': {'b': False}}
try:
validictory.validate(data, schema)
except ValueError as e:
self.fail("Unexpected failure: %s" % e)
def test_patternproperties_fail_multiple(self):
data = {'a': True, 'b': False, 'c': 'foo'}
self.assertRaises(ValueError, validictory.validate, data, self.schema)
def test_patternproperties_fail(self):
data = {'a': 12}
self.assertRaises(ValueError, validictory.validate, data, self.schema)
def test_patternproperties_missing(self):
schema = {'properties': {'patprops': {
'required': False, 'type': 'object',
'patternProperties': {'[abc]': {'required': True,
'type': 'array'}}
}}}
data = {'id': 1}
try:
validictory.validate(data, schema)
except ValueError as e:
self.fail("Unexpected failure: %s" % e)
class TestAdditionalProperties(TestCase):
def test_no_properties(self):
schema = {"additionalProperties": {"type": "integer"}}
for x in [1, 89, 48, 32, 49, 42]:
try:
data = {"prop": x}
validictory.validate(data, schema)
except ValueError as e:
self.fail("Unexpected failure: %s" % e)
#failures
for x in [1.2, "bad", {"test": "blah"}, [32, 49], None, True]:
self.assertRaises(ValueError, validictory.validate, {"prop": x},
schema)
def test_with_properties(self):
schema = {
"properties": {
"prop1": {"type": "integer"},
"prop2": {"type": "string"}
},
"additionalProperties": {"type": ["string", "number"]}
}
for x in [1, "test", 48, "ok", 4.9, 42]:
try:
data = {
"prop1": 123,
"prop2": "this is prop2",
"prop3": x
}
validictory.validate(data, schema)
except ValueError as e:
self.fail("Unexpected failure: %s" % e)
#failures
for x in [{"test": "blah"}, [32, 49], None, True]:
data = {
"prop1": 123,
"prop2": "this is prop2",
"prop3": x
}
self.assertRaises(ValueError, validictory.validate, data, schema)
def test_true(self):
schema = {"additionalProperties": True}
for x in [1.2, 1, {"test": "blah"}, [32, 49], None, True, "blah"]:
try:
validictory.validate({"prop": x}, schema)
except ValueError as e:
self.fail("Unexpected failure: %s" % e)
def test_false(self):
schema = {"additionalProperties": False}
for x in ["bad", {"test": "blah"}, [32.42, 494242], None, True, 1.34]:
self.assertRaises(ValueError, validictory.validate, {"prop": x},
schema)
def test_false_with_type_string(self):
schema = {
"type": ["object", "string"],
"properties": {
"key": {"type": "string"}
},
"additionalProperties": False
}
for data in ["foobar", {'key': 'value'}]:
try:
validictory.validate(data, schema)
except ValueError as e:
self.fail("Unexpected failure: %s" % e)
#failures
for data in [['foo', 'bar'], None, True, {'roses': 'red'}]:
self.assertRaises(ValueError, validictory.validate, data, schema)
class TestDependencies(TestCase):
props = {
"prop01": {"type": "string", "required": False},
"prop02": {"type": "number", "required": False,
"dependencies": "prop01"}
}
schema = {"type": "object", "properties": props}
props_array = {
"prop01": {"type": "string", "required": False},
"prop02": {"type": "string", "required": False},
"prop03": {"type": "number", "required": False,
"dependencies": ["prop01", "prop02"]}
}
schema_array = {"type": "object", "properties": props_array}
def test_dependencies_pass(self):
data1 = {}
data2 = {"prop01": "test"}
data3 = {"prop01": "test", "prop02": 2}
data4 = {"prop01": "a", "prop02": "b", "prop03": 7}
try:
validictory.validate(data1, self.schema)
validictory.validate(data2, self.schema)
validictory.validate(data3, self.schema)
validictory.validate(data4, self.schema_array)
except ValueError as e:
self.fail("Unexpected failure: %s" % e)
def test_dependencies_fail(self):
data1 = {"prop02": 2}
data2 = {"prop01": "x", "prop03": 7}
self.assertRaises(ValueError, validictory.validate, data1, self.schema)
self.assertRaises(ValueError, validictory.validate, data2,
self.schema_array)
class TestRequired(TestCase):
props = {
"prop_def": {"type": "string"},
"prop_opt": {"type": "number", "required": False},
"prop_req": {"type": "boolean", "required": True}
}
schema = {"type": "object", "properties": props}
def_and_req = {"prop_def": "test", "prop_req": False}
req_only = {"prop_req": True}
opt_only = {"prop_opt": 7}
def test_required_pass(self):
try:
# should pass if def and req are there
validictory.validate(self.def_and_req, self.schema)
# should pass if default is missing but req_by_default=False
validictory.validate(self.req_only, self.schema,
required_by_default=False)
except ValueError as e:
self.fail("Unexpected failure: %s" % e)
def test_required_fail(self):
# missing required should always fail
self.assertRaises(ValueError, validictory.validate, self.opt_only,
self.schema)
self.assertRaises(ValueError, validictory.validate, self.opt_only,
self.schema, required_by_default=False)
# missing the default, fail if required_by_default=True
self.assertRaises(ValueError, validictory.validate, self.req_only,
self.schema)
| mit |
kshmirko/sage-reader | convert/converter.py | 1 | 15858 | # -*- coding: utf-8 -*-
"""
Created on Wed Mar 19 10:55:22 2014
@author: Администратор
"""
import netCDF4 as nc
from datetime import datetime, timedelta
import numpy as np
from sage2.sageIItypes import _INDEX, _SPECITEM
import h5py as h5
class NCPack:
def __init__(self, path, format='NETCDF4', **kwargs):
self.F = self.initNC(path, format, **kwargs)
self.idx = 0
self.dtformat = '%Y%m%d%H%M%S'
def initNC(self,path, format, **kwargs):
"""
Создает файл с необходимой структкрой данных
Дата+Время
- единицы измерения / начало отсчета
- календарь
Широта
- единицы измерения
Долгота
- единицы измерения
Высота
- единицы измерения
- Диапазон
Высота для средней атмосферы
- единицы измерения
- диапазон
Давление
- единицы измерения
Температура
- единицы измерения
Плотность
- единицы измерения
Ошибка плотности
- единицы измерения
Высота тропопаузы
- единицы измерения
O3
- единицы измерения
- диапазон значения
- диапазон высот
NO2
- единицы измерения
- диапазон значения
- диапазон высот
H2O
- единицы измерения
- диапазон значения
- диапазон высот
Ext386
- единицы измерения
- диапазон значения
- диапазон высот
Ext452
- единицы измерения
- диапазон значения
- диапазон высот
Ext525
- единицы измерения
- диапазон значения
- диапазон высот
Ext1020
- единицы измерения
- диапазон значения
- диапазон высот
Density
- единицы измерения
- диапазон значения
- диапазон высот
площадная концентрация частиц
- единицы измерения
- диапазон значения
- диапазон высот
Радиус частиц
- единицы измерения
- диапазон значения
- диапазон высот
Плотность в средней атмосфере
- единицы измерения
- диапазон значения
- диапазон высот
O3_Err
- единицы измерения
- диапазон значения
- диапазон высот
NO2_Err
- единицы измерения
- диапазон значения
- диапазон высот
H2O_Err
- единицы измерения
- диапазон значения
- диапазон высот
Ext386_Err
- единицы измерения
- диапазон значения
- диапазон высот
Ext452_Err
- единицы измерения
- диапазон значения
- диапазон высот
Ext525_Err
- единицы измерения
- диапазон значения
- диапазон высот
Ext1020_Err
- единицы измерения
- диапазон значения
- диапазон высот
Density_Err
- единицы измерения
- диапазон значения
- диапазон высот
площадная концентрация частиц_Err
- единицы измерения
- диапазон значения
- диапазон высот
Радиус частиц_Err
- единицы измерения
- диапазон значения
- диапазон высот
Плотность в средней атмосфере_Err
- единицы измерения
- диапазон значения
- диапазон высот
InfVec
"""
#Создаем файл
F = nc.Dataset(path,'w', format)
#определяем размерности
F.createDimension('time',None)
F.createDimension('L1',140)
F.createDimension('L2',100)
F.createDimension('L3',80)
F.createDimension('L4',70)
#Определяем переменные
# Время
var = F.createVariable('TP','f8',('time',), fill_value=-999.0, **kwargs)
var.units = 'days since 1984-02-08'
var.calendar= 'gregorian'
# Широта
var = F.createVariable('Lat','f4',('time',), fill_value=-999.0, **kwargs)
var.units = 'Degrees North'
var.range = [-90.0, 90.0]
# Долгота
var = F.createVariable('Lon','f4',('time',), fill_value=-999.0, **kwargs)
var.units = 'Degrees East'
var.range = [-180.0, 180.0]
# NMC_Pres
var = F.createVariable('NMC_Pres','f4',('time','L1',), fill_value=-999.0, **kwargs)
var.units = 'mb'
# NMC_Temp
var = F.createVariable('NMC_Temp','f4',('time','L1',), fill_value=-999.0, **kwargs)
var.units = 'K'
# NMC_Dens
var = F.createVariable('NMC_Dens','f4',('time','L1',), fill_value=-999.0, **kwargs)
var.units = 'cm^-3'
# NMC_Dens_Err
var = F.createVariable('NMC_Dens_Err','i2',('time','L1',), fill_value=-999, **kwargs)
var.units = '1000*%'
# Trop_Height
var = F.createVariable('Trop_Height','f4',('time',), fill_value=-999.0, **kwargs)
var.units = 'km'
# O3
var = F.createVariable('O3','f4',('time','L1',), fill_value=-999.0, **kwargs)
var.units = 'cm^-3'
var.alt_range = [0.5, 70.0]
var.description = 'O3 Density Profile'
# NO2
var = F.createVariable('NO2','f4',('time','L2',), fill_value=-999.0, **kwargs)
var.units = 'cm^-3'
var.alt_range = [0.5, 50.0]
var.description = 'NO2 Density Profile'
# H2O
var = F.createVariable('H2O','f4',('time','L2',), fill_value=-999.0, **kwargs)
var.units = 'ppp'
var.alt_range = [0.5, 50.0]
var.description = 'H2O Mixing Ratio Profile'
# Ext386
var = F.createVariable('Ext386','f4',('time','L3',), fill_value=-999.0, **kwargs)
var.units = '1/km'
var.alt_range = [0.5, 40.0]
var.description = '386 nm Extinction'
# Ext452
var = F.createVariable('Ext452','f4',('time','L3',), fill_value=-999.0, **kwargs)
var.units = '1/km'
var.alt_range = [0.5, 40.0]
var.description = '452 nm Extinction'
# Ext525
var = F.createVariable('Ext525','f4',('time','L3',), fill_value=-999.0, **kwargs)
var.units = '1/km'
var.alt_range = [0.5, 40.0]
var.description = '525 nm Extinction'
# Ext1020
var = F.createVariable('Ext1020','f4',('time','L3',), fill_value=-999.0, **kwargs)
var.units = '1/km'
var.alt_range = [0.5, 40.0]
var.description = '1020 nm Extinction'
# Density
var = F.createVariable('Density','f4',('time','L1',), fill_value=-999.0, **kwargs)
var.units = 'cm^-3'
var.alt_range = [0.5, 70.0]
var.description = 'Calculated density'
# SurfDen
var = F.createVariable('SurfDen','f4',('time','L3',), fill_value=-999.0, **kwargs)
var.units = 'um^2/cm^3'
var.alt_range = [0.5, 40.0]
var.description = 'Aerosol surface area density'
# Radius
var = F.createVariable('Radius','f4',('time','L3',), fill_value=-999.0, **kwargs)
var.units = 'um'
var.alt_range = [0.5, 40.0]
var.description = 'Aerosol effective radius'
# Dens_Mid_Atm
var = F.createVariable('Dens_Mid_Atm','f4',('time','L4',), fill_value=-999.0, **kwargs)
var.units = 'cm^-3'
var.alt_range = [40.5, 75.0]
var.description = 'Middle Atmosphere Density'
# O3_Err
var = F.createVariable('O3_Err','i2',('time','L1',), fill_value=-999, **kwargs)
var.units = '100*%'
var.alt_range = [0.5, 70.0]
var.description = 'O3 Density Profile Error'
# NO2_Err
var = F.createVariable('NO2_Err','i2',('time','L2',), fill_value=-999, **kwargs)
var.units = '100*%'
var.alt_range = [0.5, 50.0]
var.description = 'NO2 Density Profile Error'
# H2O_Err
var = F.createVariable('H2O_Err','i2',('time','L2',), fill_value=-999, **kwargs)
var.units = '100*%'
var.alt_range = [0.5, 50.0]
var.description = 'H2O Mixing Ratio Profile Error'
# Ext386_Err
var = F.createVariable('Ext386_Err','i2',('time','L3',), fill_value=-999, **kwargs)
var.units = '100*%'
var.alt_range = [0.5, 40.0]
var.description = '386 nm Extinction Error'
# Ext452_err
var = F.createVariable('Ext452_Err','i2',('time','L3',), fill_value=-999, **kwargs)
var.units = '100*%'
var.alt_range = [0.5, 40.0]
var.description = '452 nm Extinction Error'
# Ext525_Err
var = F.createVariable('Ext525_Err','i2',('time','L3',), fill_value=-999, **kwargs)
var.units = '100*%'
var.alt_range = [0.5, 40.0]
var.description = '525 nm Extinction Error'
# Ext1020_Err
var = F.createVariable('Ext1020_Err','i2',('time','L3',), fill_value=-999, **kwargs)
var.units = '100*%'
var.alt_range = [0.5, 40.0]
var.description = '1020 nm Extinction Error'
# Density_Err
var = F.createVariable('Density_Err','i2',('time','L1',), fill_value=-999, **kwargs)
var.units = '%*100'
var.alt_range = [0.5, 70.0]
var.description = 'Calculated density error'
# SurfDen_Err
var = F.createVariable('SurfDen_Err','i2',('time','L3',), fill_value=-999, **kwargs)
var.units = '%*100'
var.alt_range = [0.5, 40.0]
var.description = 'Aerosol surface area density error'
# Radius_Err
var = F.createVariable('Radius_Err','i2',('time','L3',), fill_value=-999, **kwargs)
var.units = '%*100'
var.alt_range = [0.5, 40.0]
var.description = 'Aerosol effective radius Error'
# Dens_Mid_Atm_Err
var = F.createVariable('Dens_Mid_Atm_Err','i2',('time','L4',), fill_value=-999, **kwargs)
var.units = '%*100'
var.alt_range = [40.5, 75.0]
var.description = 'Middle Atmosphere Density Error'
return F
def close(self):
self.F.close()
def add(self, index, spec):
"""
"""
vTP = self.F.variables['TP']
Num_Prof = index['Num_Prof'][0]
for i in range(0,Num_Prof):
yyyy = np.floor(index['YYYYMMDD'][0][i]/10000.0)
DayFrac = index['DayFrac'][0][i]
# Формируем объект даты
if (DayFrac==-999.0):
TP = DayFrac
else:
TP = datetime(int(yyyy),1,1,0,0,0)+timedelta(days=DayFrac-1)
TP = nc.date2num(TP, vTP.units, vTP.calendar)
# ======= Запись в файл
# Записываем время в файл
vTP[i+self.idx] = TP
self.F.variables['Lat'][i+self.idx] = index['Lat'][0][i]
self.F.variables['Lon'][i+self.idx] = index['Lon'][0][i]
self.F.variables['NMC_Pres'][i+self.idx,:] = spec['NMC_Pres'][i,:]
self.F.variables['NMC_Temp'][i+self.idx,:] = spec['NMC_Temp'][i,:]
self.F.variables['NMC_Dens'][i+self.idx,:] = spec['NMC_Dens'][i,:]
self.F.variables['NMC_Dens_Err'][i+self.idx,:] = spec['NMC_Dens_Err'][i,:]
self.F.variables['Trop_Height'][i+self.idx] = spec['Trop_Height'][i]
self.F.variables['O3'][i+self.idx,:] = spec['O3'][i,:]
self.F.variables['NO2'][i+self.idx,:] = spec['NO2'][i,:]
self.F.variables['H2O'][i+self.idx,:] = spec['H2O'][i,:]
self.F.variables['Ext386'][i+self.idx,:] = spec['Ext386'][i,:]
self.F.variables['Ext452'][i+self.idx,:] = spec['Ext452'][i,:]
self.F.variables['Ext525'][i+self.idx,:] = spec['Ext525'][i,:]
self.F.variables['Ext1020'][i+self.idx,:] = spec['Ext1020'][i,:]
self.F.variables['Density'][i+self.idx,:] = spec['Density'][i,:]
self.F.variables['SurfDen'][i+self.idx,:] = spec['SurfDen'][i,:]
self.F.variables['Radius'][i+self.idx,:] = spec['Radius'][i,:]
self.F.variables['Dens_Mid_Atm'][i+self.idx,:] = spec['Dens_Mid_Atm'][i,:]
self.F.variables['O3_Err'][i+self.idx,:] = spec['O3_Err'][i,:]
self.F.variables['NO2_Err'][i+self.idx,:] = spec['NO2_Err'][i,:]
self.F.variables['H2O_Err'][i+self.idx,:] = spec['H2O_Err'][i,:]
self.F.variables['Ext386_Err'][i+self.idx,:] = spec['Ext386_Err'][i,:]
self.F.variables['Ext452_Err'][i+self.idx,:] = spec['Ext452_Err'][i,:]
self.F.variables['Ext525_Err'][i+self.idx,:] = spec['Ext525_Err'][i,:]
self.F.variables['Ext1020_Err'][i+self.idx,:] = spec['Ext1020_Err'][i,:]
self.F.variables['Density_Err'][i+self.idx,:] = spec['Density_Err'][i,:]
self.F.variables['SurfDen_Err'][i+self.idx,:] = spec['SurfDen_Err'][i,:]
self.F.variables['Radius_Err'][i+self.idx,:] = spec['Radius_Err'][i,:]
self.F.variables['Dens_Mid_Atm_Err'][i+self.idx,:] = spec['Dens_Mid_Atm_Err'][i,:]
self.F.sync()
self.idx+=Num_Prof
print("Num_Prof = %d"%Num_Prof)
def saveash5(filename, index, spec):
'''Сохраняет данные в hdf5 файл'''
F = h5.File(filename, 'w')
indexDS = F.create_dataset('index',(1,), dtype=_INDEX, compression="gzip",
compression_opts=7)
indexDS[...] = index
specDS = F.create_dataset('spec', shape=spec.shape, maxshape=(None,),
dtype=_SPECITEM, compression="gzip",
compression_opts=7)
print(spec.shape)
for i in range(len(spec)):
specDS[i]=spec[i]
F.close()
| gpl-3.0 |
kybriainfotech/iSocioCRM | addons/google_calendar/__openerp__.py | 299 | 1671 | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2012 OpenERP SA (<http://www.openerp.com>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
{
'name': 'Google Calendar',
'version': '1.0',
'category': 'Tools',
'description': """
The module adds the possibility to synchronize Google Calendar with OpenERP
===========================================================================
""",
'author': 'OpenERP SA',
'website': 'https://www.odoo.com/page/crm',
'depends': ['google_account', 'calendar'],
'qweb': ['static/src/xml/*.xml'],
'data': [
'res_config_view.xml',
'security/ir.model.access.csv',
'views/google_calendar.xml',
'views/res_users.xml',
'google_calendar.xml',
],
'demo': [],
'installable': True,
'auto_install': False,
}
| agpl-3.0 |
WSCU/crazyflie_ros | src/cfclient/ui/dialogs/connectiondialogue.py | 22 | 3978 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# || ____ _ __
# +------+ / __ )(_) /_______________ _____ ___
# | 0xBC | / __ / / __/ ___/ ___/ __ `/_ / / _ \
# +------+ / /_/ / / /_/ /__/ / / /_/ / / /_/ __/
# || || /_____/_/\__/\___/_/ \__,_/ /___/\___/
#
# Copyright (C) 2011-2013 Bitcraze AB
#
# Crazyflie Nano Quadcopter Client
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
# You should have received a copy of the GNU General Public License along with
# this program; if not, write to the Free Software Foundation, Inc., 51
# Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
"""
Dialogue that lists available Crazyflies, lets user choose which to connect to.
"""
__author__ = 'Bitcraze AB'
__all__ = ['ConnectionDialogue']
import sys
from PyQt4 import QtGui, uic
from PyQt4.QtCore import pyqtSignal, pyqtSlot, QThread
import cflib.crtp
(connect_widget_class,
connect_widget_base_class) = (uic.loadUiType(sys.path[0] +
'/cfclient/ui/dialogs/connectiondialogue.ui'))
class ConnectDialogue(QtGui.QWidget, connect_widget_class):
# Outgoing signal for connecting a Crazyflie
requestConnectionSignal = pyqtSignal(str)
def __init__(self, *args):
super(ConnectDialogue, self).__init__(*args)
self.setupUi(self)
self.scanner = ScannerThread()
self.scanner.start()
# Connect signals to slots
self.connectButton.clicked.connect(self.openConnection)
self.scanButton.clicked.connect(self.rescan)
self.cancelButton.clicked.connect(self.cancel)
self.interfaceList.itemDoubleClicked.connect(self.interfaceSelected)
self.scanner.interfaceFoundSignal.connect(self.foundInterfaces)
self.box = None
self.available_interfaces = []
def rescan(self):
"""Disable all buttons and scan signals from Crazyflies."""
self.interfaceList.clear()
self.interfaceList.addItem("Scanning...")
self.scanButton.setEnabled(False)
self.cancelButton.setEnabled(False)
self.connectButton.setEnabled(False)
self.scanner.scanSignal.emit()
def foundInterfaces(self, interfaces):
"""
Add found interfaces to list and enable buttons in UI.
"""
self.interfaceList.clear()
self.available_interfaces = interfaces
for i in interfaces:
if (len(i[1]) > 0):
self.interfaceList.addItem("%s - %s" % (i[0], i[1]))
else:
self.interfaceList.addItem(i[0])
if len(interfaces) > 0:
self.interfaceList.setCurrentRow(0)
self.connectButton.setEnabled(True)
self.cancelButton.setEnabled(True)
self.scanButton.setEnabled(True)
def interfaceSelected(self, listItem):
self.requestConnectionSignal.emit(
self.available_interfaces[self.interfaceList.currentRow()][0])
self.close()
def openConnection(self):
self.interfaceSelected(self.interfaceList.currentItem())
def cancel(self):
self.close()
def showEvent(self, ev):
self.rescan()
class ScannerThread(QThread):
scanSignal = pyqtSignal()
interfaceFoundSignal = pyqtSignal(object)
def __init__(self):
QThread.__init__(self)
self.moveToThread(self)
self.scanSignal.connect(self.scan)
@pyqtSlot()
def scan(self):
self.interfaceFoundSignal.emit(cflib.crtp.scan_interfaces())
| gpl-2.0 |
Edraak/edraak-platform | common/lib/xmodule/setup.py | 5 | 3180 | from setuptools import find_packages, setup
XMODULES = [
"book = xmodule.backcompat_module:TranslateCustomTagDescriptor",
"chapter = xmodule.seq_module:SectionDescriptor",
"conditional = xmodule.conditional_module:ConditionalDescriptor",
"course = xmodule.course_module:CourseDescriptor",
"customtag = xmodule.template_module:CustomTagDescriptor",
"discuss = xmodule.backcompat_module:TranslateCustomTagDescriptor",
"html = xmodule.html_module:HtmlDescriptor",
"image = xmodule.backcompat_module:TranslateCustomTagDescriptor",
"library_content = xmodule.library_content_module:LibraryContentDescriptor",
"error = xmodule.error_module:ErrorDescriptor",
"poll_question = xmodule.poll_module:PollDescriptor",
"problem = xmodule.capa_module:CapaDescriptor",
"problemset = xmodule.seq_module:SequenceDescriptor",
"randomize = xmodule.randomize_module:RandomizeDescriptor",
"split_test = xmodule.split_test_module:SplitTestDescriptor",
"section = xmodule.backcompat_module:SemanticSectionDescriptor",
"sequential = xmodule.seq_module:SequenceDescriptor",
"slides = xmodule.backcompat_module:TranslateCustomTagDescriptor",
"video = xmodule.video_module:VideoDescriptor",
"videoalpha = xmodule.video_module:VideoDescriptor",
"videodev = xmodule.backcompat_module:TranslateCustomTagDescriptor",
"videosequence = xmodule.seq_module:SequenceDescriptor",
"course_info = xmodule.html_module:CourseInfoDescriptor",
"static_tab = xmodule.html_module:StaticTabDescriptor",
"custom_tag_template = xmodule.raw_module:RawDescriptor",
"about = xmodule.html_module:AboutDescriptor",
"annotatable = xmodule.annotatable_module:AnnotatableDescriptor",
"textannotation = xmodule.textannotation_module:TextAnnotationDescriptor",
"videoannotation = xmodule.videoannotation_module:VideoAnnotationDescriptor",
"imageannotation = xmodule.imageannotation_module:ImageAnnotationDescriptor",
"word_cloud = xmodule.word_cloud_module:WordCloudDescriptor",
"hidden = xmodule.hidden_module:HiddenDescriptor",
"raw = xmodule.raw_module:RawDescriptor",
"lti = xmodule.lti_module:LTIDescriptor",
]
XBLOCKS = [
"library = xmodule.library_root_xblock:LibraryRoot",
"vertical = xmodule.vertical_block:VerticalBlock",
"wrapper = xmodule.wrapper_module:WrapperBlock",
]
XBLOCKS_ASIDES = [
'tagging_aside = cms.lib.xblock.tagging:StructuredTagsAside',
]
setup(
name="XModule",
version="0.1.1",
packages=find_packages(exclude=["tests"]),
install_requires=[
'setuptools',
'docopt',
'capa',
'path.py',
'webob',
'edx-opaque-keys>=0.4.0,<1.0.0',
],
package_data={
'xmodule': ['js/module/*'],
},
# See https://setuptools.readthedocs.io/en/latest/setuptools.html#dynamic-discovery-of-services-and-plugins
# for a description of entry_points
entry_points={
'xblock.v1': XMODULES + XBLOCKS,
'xmodule.v1': XMODULES,
'xblock_asides.v1': XBLOCKS_ASIDES,
'console_scripts': [
'xmodule_assets = xmodule.static_content:main',
],
},
)
| agpl-3.0 |
an7oine/WinVHS | Cygwin/lib/python2.7/xml/dom/xmlbuilder.py | 239 | 12337 | """Implementation of the DOM Level 3 'LS-Load' feature."""
import copy
import xml.dom
from xml.dom.NodeFilter import NodeFilter
__all__ = ["DOMBuilder", "DOMEntityResolver", "DOMInputSource"]
class Options:
"""Features object that has variables set for each DOMBuilder feature.
The DOMBuilder class uses an instance of this class to pass settings to
the ExpatBuilder class.
"""
# Note that the DOMBuilder class in LoadSave constrains which of these
# values can be set using the DOM Level 3 LoadSave feature.
namespaces = 1
namespace_declarations = True
validation = False
external_parameter_entities = True
external_general_entities = True
external_dtd_subset = True
validate_if_schema = False
validate = False
datatype_normalization = False
create_entity_ref_nodes = True
entities = True
whitespace_in_element_content = True
cdata_sections = True
comments = True
charset_overrides_xml_encoding = True
infoset = False
supported_mediatypes_only = False
errorHandler = None
filter = None
class DOMBuilder:
entityResolver = None
errorHandler = None
filter = None
ACTION_REPLACE = 1
ACTION_APPEND_AS_CHILDREN = 2
ACTION_INSERT_AFTER = 3
ACTION_INSERT_BEFORE = 4
_legal_actions = (ACTION_REPLACE, ACTION_APPEND_AS_CHILDREN,
ACTION_INSERT_AFTER, ACTION_INSERT_BEFORE)
def __init__(self):
self._options = Options()
def _get_entityResolver(self):
return self.entityResolver
def _set_entityResolver(self, entityResolver):
self.entityResolver = entityResolver
def _get_errorHandler(self):
return self.errorHandler
def _set_errorHandler(self, errorHandler):
self.errorHandler = errorHandler
def _get_filter(self):
return self.filter
def _set_filter(self, filter):
self.filter = filter
def setFeature(self, name, state):
if self.supportsFeature(name):
state = state and 1 or 0
try:
settings = self._settings[(_name_xform(name), state)]
except KeyError:
raise xml.dom.NotSupportedErr(
"unsupported feature: %r" % (name,))
else:
for name, value in settings:
setattr(self._options, name, value)
else:
raise xml.dom.NotFoundErr("unknown feature: " + repr(name))
def supportsFeature(self, name):
return hasattr(self._options, _name_xform(name))
def canSetFeature(self, name, state):
key = (_name_xform(name), state and 1 or 0)
return key in self._settings
# This dictionary maps from (feature,value) to a list of
# (option,value) pairs that should be set on the Options object.
# If a (feature,value) setting is not in this dictionary, it is
# not supported by the DOMBuilder.
#
_settings = {
("namespace_declarations", 0): [
("namespace_declarations", 0)],
("namespace_declarations", 1): [
("namespace_declarations", 1)],
("validation", 0): [
("validation", 0)],
("external_general_entities", 0): [
("external_general_entities", 0)],
("external_general_entities", 1): [
("external_general_entities", 1)],
("external_parameter_entities", 0): [
("external_parameter_entities", 0)],
("external_parameter_entities", 1): [
("external_parameter_entities", 1)],
("validate_if_schema", 0): [
("validate_if_schema", 0)],
("create_entity_ref_nodes", 0): [
("create_entity_ref_nodes", 0)],
("create_entity_ref_nodes", 1): [
("create_entity_ref_nodes", 1)],
("entities", 0): [
("create_entity_ref_nodes", 0),
("entities", 0)],
("entities", 1): [
("entities", 1)],
("whitespace_in_element_content", 0): [
("whitespace_in_element_content", 0)],
("whitespace_in_element_content", 1): [
("whitespace_in_element_content", 1)],
("cdata_sections", 0): [
("cdata_sections", 0)],
("cdata_sections", 1): [
("cdata_sections", 1)],
("comments", 0): [
("comments", 0)],
("comments", 1): [
("comments", 1)],
("charset_overrides_xml_encoding", 0): [
("charset_overrides_xml_encoding", 0)],
("charset_overrides_xml_encoding", 1): [
("charset_overrides_xml_encoding", 1)],
("infoset", 0): [],
("infoset", 1): [
("namespace_declarations", 0),
("validate_if_schema", 0),
("create_entity_ref_nodes", 0),
("entities", 0),
("cdata_sections", 0),
("datatype_normalization", 1),
("whitespace_in_element_content", 1),
("comments", 1),
("charset_overrides_xml_encoding", 1)],
("supported_mediatypes_only", 0): [
("supported_mediatypes_only", 0)],
("namespaces", 0): [
("namespaces", 0)],
("namespaces", 1): [
("namespaces", 1)],
}
def getFeature(self, name):
xname = _name_xform(name)
try:
return getattr(self._options, xname)
except AttributeError:
if name == "infoset":
options = self._options
return (options.datatype_normalization
and options.whitespace_in_element_content
and options.comments
and options.charset_overrides_xml_encoding
and not (options.namespace_declarations
or options.validate_if_schema
or options.create_entity_ref_nodes
or options.entities
or options.cdata_sections))
raise xml.dom.NotFoundErr("feature %s not known" % repr(name))
def parseURI(self, uri):
if self.entityResolver:
input = self.entityResolver.resolveEntity(None, uri)
else:
input = DOMEntityResolver().resolveEntity(None, uri)
return self.parse(input)
def parse(self, input):
options = copy.copy(self._options)
options.filter = self.filter
options.errorHandler = self.errorHandler
fp = input.byteStream
if fp is None and options.systemId:
import urllib2
fp = urllib2.urlopen(input.systemId)
return self._parse_bytestream(fp, options)
def parseWithContext(self, input, cnode, action):
if action not in self._legal_actions:
raise ValueError("not a legal action")
raise NotImplementedError("Haven't written this yet...")
def _parse_bytestream(self, stream, options):
import xml.dom.expatbuilder
builder = xml.dom.expatbuilder.makeBuilder(options)
return builder.parseFile(stream)
def _name_xform(name):
return name.lower().replace('-', '_')
class DOMEntityResolver(object):
__slots__ = '_opener',
def resolveEntity(self, publicId, systemId):
assert systemId is not None
source = DOMInputSource()
source.publicId = publicId
source.systemId = systemId
source.byteStream = self._get_opener().open(systemId)
# determine the encoding if the transport provided it
source.encoding = self._guess_media_encoding(source)
# determine the base URI is we can
import posixpath, urlparse
parts = urlparse.urlparse(systemId)
scheme, netloc, path, params, query, fragment = parts
# XXX should we check the scheme here as well?
if path and not path.endswith("/"):
path = posixpath.dirname(path) + "/"
parts = scheme, netloc, path, params, query, fragment
source.baseURI = urlparse.urlunparse(parts)
return source
def _get_opener(self):
try:
return self._opener
except AttributeError:
self._opener = self._create_opener()
return self._opener
def _create_opener(self):
import urllib2
return urllib2.build_opener()
def _guess_media_encoding(self, source):
info = source.byteStream.info()
if "Content-Type" in info:
for param in info.getplist():
if param.startswith("charset="):
return param.split("=", 1)[1].lower()
class DOMInputSource(object):
__slots__ = ('byteStream', 'characterStream', 'stringData',
'encoding', 'publicId', 'systemId', 'baseURI')
def __init__(self):
self.byteStream = None
self.characterStream = None
self.stringData = None
self.encoding = None
self.publicId = None
self.systemId = None
self.baseURI = None
def _get_byteStream(self):
return self.byteStream
def _set_byteStream(self, byteStream):
self.byteStream = byteStream
def _get_characterStream(self):
return self.characterStream
def _set_characterStream(self, characterStream):
self.characterStream = characterStream
def _get_stringData(self):
return self.stringData
def _set_stringData(self, data):
self.stringData = data
def _get_encoding(self):
return self.encoding
def _set_encoding(self, encoding):
self.encoding = encoding
def _get_publicId(self):
return self.publicId
def _set_publicId(self, publicId):
self.publicId = publicId
def _get_systemId(self):
return self.systemId
def _set_systemId(self, systemId):
self.systemId = systemId
def _get_baseURI(self):
return self.baseURI
def _set_baseURI(self, uri):
self.baseURI = uri
class DOMBuilderFilter:
"""Element filter which can be used to tailor construction of
a DOM instance.
"""
# There's really no need for this class; concrete implementations
# should just implement the endElement() and startElement()
# methods as appropriate. Using this makes it easy to only
# implement one of them.
FILTER_ACCEPT = 1
FILTER_REJECT = 2
FILTER_SKIP = 3
FILTER_INTERRUPT = 4
whatToShow = NodeFilter.SHOW_ALL
def _get_whatToShow(self):
return self.whatToShow
def acceptNode(self, element):
return self.FILTER_ACCEPT
def startContainer(self, element):
return self.FILTER_ACCEPT
del NodeFilter
class DocumentLS:
"""Mixin to create documents that conform to the load/save spec."""
async = False
def _get_async(self):
return False
def _set_async(self, async):
if async:
raise xml.dom.NotSupportedErr(
"asynchronous document loading is not supported")
def abort(self):
# What does it mean to "clear" a document? Does the
# documentElement disappear?
raise NotImplementedError(
"haven't figured out what this means yet")
def load(self, uri):
raise NotImplementedError("haven't written this yet")
def loadXML(self, source):
raise NotImplementedError("haven't written this yet")
def saveXML(self, snode):
if snode is None:
snode = self
elif snode.ownerDocument is not self:
raise xml.dom.WrongDocumentErr()
return snode.toxml()
class DOMImplementationLS:
MODE_SYNCHRONOUS = 1
MODE_ASYNCHRONOUS = 2
def createDOMBuilder(self, mode, schemaType):
if schemaType is not None:
raise xml.dom.NotSupportedErr(
"schemaType not yet supported")
if mode == self.MODE_SYNCHRONOUS:
return DOMBuilder()
if mode == self.MODE_ASYNCHRONOUS:
raise xml.dom.NotSupportedErr(
"asynchronous builders are not supported")
raise ValueError("unknown value for mode")
def createDOMWriter(self):
raise NotImplementedError(
"the writer interface hasn't been written yet!")
def createDOMInputSource(self):
return DOMInputSource()
| gpl-3.0 |
flwh/KK_mt6589_iq451 | prebuilts/python/linux-x86/2.7.5/lib/python2.7/test/test_contains.py | 136 | 3153 | from test.test_support import have_unicode, run_unittest
import unittest
class base_set:
def __init__(self, el):
self.el = el
class set(base_set):
def __contains__(self, el):
return self.el == el
class seq(base_set):
def __getitem__(self, n):
return [self.el][n]
class TestContains(unittest.TestCase):
def test_common_tests(self):
a = base_set(1)
b = set(1)
c = seq(1)
self.assertIn(1, b)
self.assertNotIn(0, b)
self.assertIn(1, c)
self.assertNotIn(0, c)
self.assertRaises(TypeError, lambda: 1 in a)
self.assertRaises(TypeError, lambda: 1 not in a)
# test char in string
self.assertIn('c', 'abc')
self.assertNotIn('d', 'abc')
self.assertIn('', '')
self.assertIn('', 'abc')
self.assertRaises(TypeError, lambda: None in 'abc')
if have_unicode:
def test_char_in_unicode(self):
self.assertIn('c', unicode('abc'))
self.assertNotIn('d', unicode('abc'))
self.assertIn('', unicode(''))
self.assertIn(unicode(''), '')
self.assertIn(unicode(''), unicode(''))
self.assertIn('', unicode('abc'))
self.assertIn(unicode(''), 'abc')
self.assertIn(unicode(''), unicode('abc'))
self.assertRaises(TypeError, lambda: None in unicode('abc'))
# test Unicode char in Unicode
self.assertIn(unicode('c'), unicode('abc'))
self.assertNotIn(unicode('d'), unicode('abc'))
# test Unicode char in string
self.assertIn(unicode('c'), 'abc')
self.assertNotIn(unicode('d'), 'abc')
def test_builtin_sequence_types(self):
# a collection of tests on builtin sequence types
a = range(10)
for i in a:
self.assertIn(i, a)
self.assertNotIn(16, a)
self.assertNotIn(a, a)
a = tuple(a)
for i in a:
self.assertIn(i, a)
self.assertNotIn(16, a)
self.assertNotIn(a, a)
class Deviant1:
"""Behaves strangely when compared
This class is designed to make sure that the contains code
works when the list is modified during the check.
"""
aList = range(15)
def __cmp__(self, other):
if other == 12:
self.aList.remove(12)
self.aList.remove(13)
self.aList.remove(14)
return 1
self.assertNotIn(Deviant1(), Deviant1.aList)
class Deviant2:
"""Behaves strangely when compared
This class raises an exception during comparison. That in
turn causes the comparison to fail with a TypeError.
"""
def __cmp__(self, other):
if other == 4:
raise RuntimeError, "gotcha"
try:
self.assertNotIn(Deviant2(), a)
except TypeError:
pass
def test_main():
run_unittest(TestContains)
if __name__ == '__main__':
test_main()
| gpl-2.0 |
PKRoma/isapi-wsgi | setup.py | 5 | 1211 |
try:
from setuptools import setup
except ImportError:
from distutils.core import setup
try:
from distutils.command.build_py import build_py_2to3 as build_py
except ImportError:
from distutils.command.build_py import build_py
try:
from distutils.command.build_scripts import build_scripts_2to3 as build_scripts
except ImportError:
from distutils.command.build_scripts import build_scripts
# Load the reStructuredText document from docs
import os
this_dir = os.path.dirname(__file__)
doc_file = os.path.join(this_dir, 'docs', 'ABOUT.txt')
long_description = open(doc_file).read()
changes = os.path.join(this_dir, 'CHANGES.txt')
long_description += open(changes).read()
# PyPI will use rst2html from docutils to convert ABOUT.txt to HTML
setup(name='isapi_wsgi',
version='0.4.2',
description='A WSGI handler for ISAPI',
long_description=long_description,
author='Mark Rees',
author_email='mark dot john dot rees at gmail dot com',
url = "http://code.google.com/p/isapi-wsgi",
license='MIT',
py_modules=['isapi_wsgi'],
packages=['tests'],
cmdclass = {'build_py':build_py,
'build_scripts':build_scripts,
},
zip_safe=False,
)
| mit |
bplancher/odoo | addons/website_blog/models/website_blog.py | 12 | 12111 | # -*- coding: utf-8 -*-
from datetime import datetime
import lxml
import random
from openerp import tools
from openerp import SUPERUSER_ID
from openerp.addons.website.models.website import slug
from openerp.osv import osv, fields
from openerp.tools.translate import _
from openerp.tools.translate import html_translate
class Blog(osv.Model):
_name = 'blog.blog'
_description = 'Blogs'
_inherit = ['mail.thread', 'website.seo.metadata']
_order = 'name'
_columns = {
'name': fields.char('Blog Name', required=True, translate=True),
'subtitle': fields.char('Blog Subtitle', translate=True),
}
def all_tags(self, cr, uid, ids, min_limit=1, context=None):
req = """
SELECT
p.blog_id, count(*), r.blog_tag_id
FROM
blog_post_blog_tag_rel r
join blog_post p on r.blog_post_id=p.id
WHERE
p.blog_id in %s
GROUP BY
p.blog_id,
r.blog_tag_id
ORDER BY
count(*) DESC
"""
cr.execute(req, [tuple(ids)])
tag_by_blog = {i: [] for i in ids}
for blog_id, freq, tag_id in cr.fetchall():
if freq >= min_limit:
tag_by_blog[blog_id].append(tag_id)
tag_obj = self.pool['blog.tag']
for blog_id in tag_by_blog:
tag_by_blog[blog_id] = tag_obj.browse(cr, uid, tag_by_blog[blog_id], context=context)
return tag_by_blog
class BlogTag(osv.Model):
_name = 'blog.tag'
_description = 'Blog Tag'
_inherit = ['website.seo.metadata']
_order = 'name'
_columns = {
'name': fields.char('Name', required=True),
'post_ids': fields.many2many(
'blog.post', string='Posts',
),
}
_sql_constraints = [
('name_uniq', 'unique (name)', "Tag name already exists !"),
]
class BlogPost(osv.Model):
_name = "blog.post"
_description = "Blog Post"
_inherit = ['mail.thread', 'website.seo.metadata', 'website.published.mixin']
_order = 'id DESC'
_mail_post_access = 'read'
def _website_url(self, cr, uid, ids, field_name, arg, context=None):
res = super(BlogPost, self)._website_url(cr, uid, ids, field_name, arg, context=context)
for blog_post in self.browse(cr, uid, ids, context=context):
res[blog_post.id] = "/blog/%s/post/%s" % (slug(blog_post.blog_id), slug(blog_post))
return res
def _compute_ranking(self, cr, uid, ids, name, arg, context=None):
res = {}
for blog_post in self.browse(cr, uid, ids, context=context):
age = datetime.now() - datetime.strptime(blog_post.create_date, tools.DEFAULT_SERVER_DATETIME_FORMAT)
res[blog_post.id] = blog_post.visits * (0.5+random.random()) / max(3, age.days)
return res
def _default_content(self, cr, uid, context=None):
return ''' <div class="container">
<section class="mt16 mb16">
<p class="o_default_snippet_text">''' + _("Start writing here...") + '''</p>
</section>
</div> '''
_columns = {
'name': fields.char('Title', required=True, translate=True),
'subtitle': fields.char('Sub Title', translate=True),
'author_id': fields.many2one('res.partner', 'Author'),
'cover_properties': fields.text('Cover Properties'),
'blog_id': fields.many2one(
'blog.blog', 'Blog',
required=True, ondelete='cascade',
),
'tag_ids': fields.many2many(
'blog.tag', string='Tags',
),
'content': fields.html('Content', translate=html_translate, sanitize=False),
'website_message_ids': fields.one2many(
'mail.message', 'res_id',
domain=lambda self: [
'&', '&', ('model', '=', self._name), ('message_type', '=', 'comment'), ('path', '=', False)
],
string='Website Messages',
help="Website communication history",
),
# creation / update stuff
'create_date': fields.datetime(
'Created on',
select=True, readonly=True,
),
'create_uid': fields.many2one(
'res.users', 'Author',
select=True, readonly=True,
),
'write_date': fields.datetime(
'Last Modified on',
select=True, readonly=True,
),
'write_uid': fields.many2one(
'res.users', 'Last Contributor',
select=True, readonly=True,
),
'author_avatar': fields.related(
'author_id', 'image_small',
string="Avatar", type="binary"),
'visits': fields.integer('No of Views'),
'ranking': fields.function(_compute_ranking, string='Ranking', type='float'),
}
_defaults = {
'name': '',
'content': _default_content,
'cover_properties': '{"background-image": "none", "background-color": "oe_none", "opacity": "0.6", "resize_class": ""}',
'author_id': lambda self, cr, uid, ctx=None: self.pool['res.users'].browse(cr, uid, uid, context=ctx).partner_id.id,
}
def html_tag_nodes(self, html, attribute=None, tags=None, context=None):
""" Processing of html content to tag paragraphs and set them an unique
ID.
:return result: (html, mappin), where html is the updated html with ID
and mapping is a list of (old_ID, new_ID), where old_ID
is None is the paragraph is a new one. """
existing_attributes = []
mapping = []
if not html:
return html, mapping
if tags is None:
tags = ['p']
if attribute is None:
attribute = 'data-unique-id'
# form a tree
root = lxml.html.fragment_fromstring(html, create_parent='div')
if not len(root) and root.text is None and root.tail is None:
return html, mapping
# check all nodes, replace :
# - img src -> check URL
# - a href -> check URL
for node in root.iter():
if node.tag not in tags:
continue
ancestor_tags = [parent.tag for parent in node.iterancestors()]
old_attribute = node.get(attribute)
new_attribute = old_attribute
if not new_attribute or (old_attribute in existing_attributes):
if ancestor_tags:
ancestor_tags.pop()
counter = random.randint(10000, 99999)
ancestor_tags.append('counter_%s' % counter)
new_attribute = '/'.join(reversed(ancestor_tags))
node.set(attribute, new_attribute)
existing_attributes.append(new_attribute)
mapping.append((old_attribute, new_attribute))
html = lxml.html.tostring(root, pretty_print=False, method='html')
# this is ugly, but lxml/etree tostring want to put everything in a 'div' that breaks the editor -> remove that
if html.startswith('<div>') and html.endswith('</div>'):
html = html[5:-6]
return html, mapping
def _postproces_content(self, cr, uid, id, content=None, context=None):
if content is None:
content = self.browse(cr, uid, id, context=context).content
if content is False:
return content
content, mapping = self.html_tag_nodes(content, attribute='data-chatter-id', tags=['p'], context=context)
if id: # not creating
existing = [x[0] for x in mapping if x[0]]
msg_ids = self.pool['mail.message'].search(cr, SUPERUSER_ID, [
('res_id', '=', id),
('model', '=', self._name),
('path', 'not in', existing),
('path', '!=', False)
], context=context)
self.pool['mail.message'].unlink(cr, SUPERUSER_ID, msg_ids, context=context)
return content
def _check_for_publication(self, cr, uid, ids, vals, context=None):
if vals.get('website_published'):
base_url = self.pool['ir.config_parameter'].get_param(cr, uid, 'web.base.url')
for post in self.browse(cr, uid, ids, context=context):
post.blog_id.message_post(
body='<p>%(post_publication)s <a href="%(base_url)s/blog/%(blog_slug)s/post/%(post_slug)s">%(post_link)s</a></p>' % {
'post_publication': _('A new post %s has been published on the %s blog.') % (post.name, post.blog_id.name),
'post_link': _('Click here to access the post.'),
'base_url': base_url,
'blog_slug': slug(post.blog_id),
'post_slug': slug(post),
},
subtype='website_blog.mt_blog_blog_published')
return True
return False
def create(self, cr, uid, vals, context=None):
if context is None:
context = {}
if 'content' in vals:
vals['content'] = self._postproces_content(cr, uid, None, vals['content'], context=context)
create_context = dict(context, mail_create_nolog=True)
post_id = super(BlogPost, self).create(cr, uid, vals, context=create_context)
self._check_for_publication(cr, uid, [post_id], vals, context=context)
return post_id
def write(self, cr, uid, ids, vals, context=None):
if isinstance(ids, (int, long)):
ids = [ids]
if 'content' in vals:
vals['content'] = self._postproces_content(cr, uid, ids[0], vals['content'], context=context)
result = super(BlogPost, self).write(cr, uid, ids, vals, context)
self._check_for_publication(cr, uid, ids, vals, context=context)
return result
def get_access_action(self, cr, uid, ids, context=None):
""" Override method that generated the link to access the document. Instead
of the classic form view, redirect to the post on the website directly """
post = self.browse(cr, uid, ids[0], context=context)
return {
'type': 'ir.actions.act_url',
'url': '/blog/%s/post/%s' % (post.blog_id.id, post.id),
'target': 'self',
'res_id': post.id,
}
def _notification_get_recipient_groups(self, cr, uid, ids, message, recipients, context=None):
""" Override to set the access button: everyone can see an access button
on their notification email. It will lead on the website view of the
post. """
res = super(BlogPost, self)._notification_get_recipient_groups(cr, uid, ids, message, recipients, context=context)
access_action = self._notification_link_helper(cr, uid, ids, 'view', model=message.model, res_id=message.res_id)
for category, data in res.iteritems():
res[category]['button_access'] = {'url': access_action, 'title': _('View Blog Post')}
return res
class Website(osv.Model):
_inherit = "website"
def page_search_dependencies(self, cr, uid, view_id, context=None):
dep = super(Website, self).page_search_dependencies(cr, uid, view_id, context=context)
post_obj = self.pool.get('blog.post')
view = self.pool.get('ir.ui.view').browse(cr, uid, view_id, context=context)
name = view.key.replace("website.", "")
fullname = "website.%s" % name
dom = [
'|', ('content', 'ilike', '/page/%s' % name), ('content', 'ilike', '/page/%s' % fullname)
]
posts = post_obj.search(cr, uid, dom, context=context)
if posts:
page_key = _('Blog Post')
dep[page_key] = []
for p in post_obj.browse(cr, uid, posts, context=context):
dep[page_key].append({
'text': _('Blog Post <b>%s</b> seems to have a link to this page !') % p.name,
'link': p.website_url
})
return dep
| agpl-3.0 |
bplancher/odoo | addons/stock/wizard/make_procurement_product.py | 10 | 4813 | # -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
from openerp.osv import fields, osv
class make_procurement(osv.osv_memory):
_name = 'make.procurement'
_description = 'Make Procurements'
def onchange_product_id(self, cr, uid, ids, prod_id, context=None):
product = self.pool.get('product.product').browse(cr, uid, prod_id, context=context)
return {'value': {
'uom_id': product.uom_id.id,
'product_tmpl_id': product.product_tmpl_id.id,
'product_variant_count': product.product_tmpl_id.product_variant_count
}}
_columns = {
'qty': fields.float('Quantity', digits=(16,2), required=True),
'res_model': fields.char('Res Model'),
'product_id': fields.many2one('product.product', 'Product', required=True),
'product_tmpl_id': fields.many2one('product.template', 'Template', required=True),
'product_variant_count': fields.related('product_tmpl_id', 'product_variant_count', type='integer', string='Variant Number'),
'uom_id': fields.many2one('product.uom', 'Unit of Measure', required=True),
'warehouse_id': fields.many2one('stock.warehouse', 'Warehouse', required=True),
'date_planned': fields.date('Planned Date', required=True),
'route_ids': fields.many2many('stock.location.route', string='Preferred Routes'),
}
_defaults = {
'date_planned': fields.date.context_today,
'qty': lambda *args: 1.0,
}
def make_procurement(self, cr, uid, ids, context=None):
""" Creates procurement order for selected product. """
user = self.pool.get('res.users').browse(cr, uid, uid, context=context).login
wh_obj = self.pool.get('stock.warehouse')
procurement_obj = self.pool.get('procurement.order')
data_obj = self.pool.get('ir.model.data')
for proc in self.browse(cr, uid, ids, context=context):
wh = wh_obj.browse(cr, uid, proc.warehouse_id.id, context=context)
procure_id = procurement_obj.create(cr, uid, {
'name':'INT: '+str(user),
'date_planned': proc.date_planned,
'product_id': proc.product_id.id,
'product_qty': proc.qty,
'product_uom': proc.uom_id.id,
'warehouse_id': proc.warehouse_id.id,
'location_id': wh.lot_stock_id.id,
'company_id': wh.company_id.id,
'route_ids': [(6, 0, proc.route_ids.ids)],
}, context=context)
procurement_obj.signal_workflow(cr, uid, [procure_id], 'button_confirm')
id2 = data_obj._get_id(cr, uid, 'procurement', 'procurement_tree_view')
id3 = data_obj._get_id(cr, uid, 'procurement', 'procurement_form_view')
if id2:
id2 = data_obj.browse(cr, uid, id2, context=context).res_id
if id3:
id3 = data_obj.browse(cr, uid, id3, context=context).res_id
return {
'view_type': 'form',
'view_mode': 'tree,form',
'res_model': 'procurement.order',
'res_id' : procure_id,
'views': [(id3,'form'),(id2,'tree')],
'type': 'ir.actions.act_window',
}
def default_get(self, cr, uid, fields, context=None):
if context is None:
context = {}
record_id = context.get('active_id')
if context.get('active_model') == 'product.template':
product_ids = self.pool.get('product.product').search(cr, uid, [('product_tmpl_id', '=', context.get('active_id'))], context=context)
if product_ids:
record_id = product_ids[0]
res = super(make_procurement, self).default_get(cr, uid, fields, context=context)
if record_id and 'product_id' in fields:
proxy = self.pool.get('product.product')
product_ids = proxy.search(cr, uid, [('id', '=', record_id)], context=context, limit=1)
if product_ids:
product_id = product_ids[0]
product = self.pool.get('product.product').browse(cr, uid, product_id, context=context)
res['product_id'] = product.id
res['uom_id'] = product.uom_id.id
if 'warehouse_id' in fields:
warehouse_id = self.pool.get('stock.warehouse').search(cr, uid, [], context=context)
res['warehouse_id'] = warehouse_id[0] if warehouse_id else False
return res
def create(self, cr, uid, values, context=None):
if values.get('product_id'):
values.update(self.onchange_product_id(cr, uid, None, values['product_id'], context=context)['value'])
return super(make_procurement, self).create(cr, uid, values, context=context)
| agpl-3.0 |
synexxus/synnix | scripts/tracing/draw_functrace.py | 14676 | 3560 | #!/usr/bin/python
"""
Copyright 2008 (c) Frederic Weisbecker <fweisbec@gmail.com>
Licensed under the terms of the GNU GPL License version 2
This script parses a trace provided by the function tracer in
kernel/trace/trace_functions.c
The resulted trace is processed into a tree to produce a more human
view of the call stack by drawing textual but hierarchical tree of
calls. Only the functions's names and the the call time are provided.
Usage:
Be sure that you have CONFIG_FUNCTION_TRACER
# mount -t debugfs nodev /sys/kernel/debug
# echo function > /sys/kernel/debug/tracing/current_tracer
$ cat /sys/kernel/debug/tracing/trace_pipe > ~/raw_trace_func
Wait some times but not too much, the script is a bit slow.
Break the pipe (Ctrl + Z)
$ scripts/draw_functrace.py < raw_trace_func > draw_functrace
Then you have your drawn trace in draw_functrace
"""
import sys, re
class CallTree:
""" This class provides a tree representation of the functions
call stack. If a function has no parent in the kernel (interrupt,
syscall, kernel thread...) then it is attached to a virtual parent
called ROOT.
"""
ROOT = None
def __init__(self, func, time = None, parent = None):
self._func = func
self._time = time
if parent is None:
self._parent = CallTree.ROOT
else:
self._parent = parent
self._children = []
def calls(self, func, calltime):
""" If a function calls another one, call this method to insert it
into the tree at the appropriate place.
@return: A reference to the newly created child node.
"""
child = CallTree(func, calltime, self)
self._children.append(child)
return child
def getParent(self, func):
""" Retrieve the last parent of the current node that
has the name given by func. If this function is not
on a parent, then create it as new child of root
@return: A reference to the parent.
"""
tree = self
while tree != CallTree.ROOT and tree._func != func:
tree = tree._parent
if tree == CallTree.ROOT:
child = CallTree.ROOT.calls(func, None)
return child
return tree
def __repr__(self):
return self.__toString("", True)
def __toString(self, branch, lastChild):
if self._time is not None:
s = "%s----%s (%s)\n" % (branch, self._func, self._time)
else:
s = "%s----%s\n" % (branch, self._func)
i = 0
if lastChild:
branch = branch[:-1] + " "
while i < len(self._children):
if i != len(self._children) - 1:
s += "%s" % self._children[i].__toString(branch +\
" |", False)
else:
s += "%s" % self._children[i].__toString(branch +\
" |", True)
i += 1
return s
class BrokenLineException(Exception):
"""If the last line is not complete because of the pipe breakage,
we want to stop the processing and ignore this line.
"""
pass
class CommentLineException(Exception):
""" If the line is a comment (as in the beginning of the trace file),
just ignore it.
"""
pass
def parseLine(line):
line = line.strip()
if line.startswith("#"):
raise CommentLineException
m = re.match("[^]]+?\\] +([0-9.]+): (\\w+) <-(\\w+)", line)
if m is None:
raise BrokenLineException
return (m.group(1), m.group(2), m.group(3))
def main():
CallTree.ROOT = CallTree("Root (Nowhere)", None, None)
tree = CallTree.ROOT
for line in sys.stdin:
try:
calltime, callee, caller = parseLine(line)
except BrokenLineException:
break
except CommentLineException:
continue
tree = tree.getParent(caller)
tree = tree.calls(callee, calltime)
print CallTree.ROOT
if __name__ == "__main__":
main()
| gpl-2.0 |
elkingtowa/alphacoin | Bitcoin/ngcccbase-master/ngcccbase/tests/test_asset.py | 5 | 7711 | #!/usr/bin/env python
import unittest
from coloredcoinlib import OBColorDefinition, ColorSet, SimpleColorValue, IncompatibleTypesError
from coloredcoinlib.tests.test_colorset import MockColorMap
from coloredcoinlib.tests.test_txspec import MockUTXO
from ngcccbase.asset import (AssetDefinition, AdditiveAssetValue,
AssetTarget, AssetDefinitionManager)
class TestAssetDefinition(unittest.TestCase):
def setUp(self):
self.colormap = MockColorMap()
d = self.colormap.d
self.colorset0 = ColorSet(self.colormap, [''])
self.colorset1 = ColorSet(self.colormap, [d[1], d[2]])
self.colorset2 = ColorSet(self.colormap, [d[3]])
self.def0 = {'monikers': ['bitcoin'],
'color_set': self.colorset0.get_data(),
'unit':100000000}
self.def1 = {'monikers': ['test1'],
'color_set': self.colorset1.get_data(),
'unit':10}
self.def2 = {'monikers': ['test2','test2alt'],
'color_set': self.colorset2.get_data(),
'unit':1}
self.asset0 = AssetDefinition(self.colormap, self.def0)
self.asset1 = AssetDefinition(self.colormap, self.def1)
self.asset2 = AssetDefinition(self.colormap, self.def2)
self.assetvalue0 = AdditiveAssetValue(asset=self.asset0, value=5)
self.assetvalue1 = AdditiveAssetValue(asset=self.asset0, value=6)
self.assetvalue2 = AdditiveAssetValue(asset=self.asset1, value=7)
self.assettarget0 = AssetTarget('address0', self.assetvalue0)
self.assettarget1 = AssetTarget('address1', self.assetvalue1)
self.assettarget2 = AssetTarget('address2', self.assetvalue2)
config = {'asset_definitions': [self.def1, self.def2]}
self.adm = AssetDefinitionManager(self.colormap, config)
def test_repr(self):
self.assertEquals(self.asset0.__repr__(), "['bitcoin']: ['']")
self.assertEquals(
self.asset1.__repr__(),
"['test1']: ['obc:color_desc_1:0:0', 'obc:color_desc_2:0:1']")
self.assertEquals(self.asset2.__repr__(),
"['test2', 'test2alt']: ['obc:color_desc_3:0:1']")
def test_get_monikers(self):
self.assertEquals(self.asset0.get_monikers(), ['bitcoin'])
self.assertEquals(self.asset1.get_monikers(), ['test1'])
self.assertEquals(self.asset2.get_monikers(), ['test2', 'test2alt'])
def test_get_color_set(self):
self.assertTrue(self.asset0.get_color_set().equals(self.colorset0))
self.assertTrue(self.asset1.get_color_set().equals(self.colorset1))
self.assertTrue(self.asset2.get_color_set().equals(self.colorset2))
def test_get_colorvalue(self):
g = {'txhash':'blah', 'height':1, 'outindex':0}
cid0 = list(self.colorset0.color_id_set)[0]
cdef0 = OBColorDefinition(cid0, g)
cid1 = list(self.colorset1.color_id_set)[0]
cdef1 = OBColorDefinition(cid1, g)
cid2 = list(self.colorset2.color_id_set)[0]
cdef2 = OBColorDefinition(cid2, g)
cv0 = SimpleColorValue(colordef=cdef0, value=1)
cv1 = SimpleColorValue(colordef=cdef1, value=2)
cv2 = SimpleColorValue(colordef=cdef2, value=3)
utxo = MockUTXO([cv0, cv1, cv2])
self.assertEquals(self.asset0.get_colorvalue(utxo), cv0)
self.assertEquals(self.asset1.get_colorvalue(utxo), cv1)
self.assertEquals(self.asset2.get_colorvalue(utxo), cv2)
utxo = MockUTXO([cv0, cv2])
self.assertRaises(Exception, self.asset1.get_colorvalue, utxo)
def test_parse_value(self):
self.assertEquals(self.asset0.parse_value(1.25), 125000000)
self.assertEquals(self.asset1.parse_value(2), 20)
self.assertEquals(self.asset2.parse_value(5), 5)
def test_format_value(self):
self.assertEquals(self.asset0.format_value(10000),'0.0001')
self.assertEquals(self.asset1.format_value(2),'0.2')
self.assertEquals(self.asset2.format_value(5),'5')
def test_get_data(self):
self.assertEquals(self.asset0.get_data(), self.def0)
self.assertEquals(self.asset1.get_data(), self.def1)
self.assertEquals(self.asset2.get_data(), self.def2)
def test_register_asset_definition(self):
self.assertRaises(Exception, self.adm.register_asset_definition,
self.asset1)
def test_add_asset_definition(self):
colorset3 = ColorSet(self.colormap, [self.colormap.d[4]])
def4 = {'monikers': ['test3'], 'color_set': colorset3.get_data()}
self.adm.add_asset_definition(def4)
self.assertTrue(self.adm.get_asset_by_moniker('test3').get_color_set()
.equals(colorset3))
def test_all_assets(self):
reprs = [asset.__repr__() for asset in self.adm.get_all_assets()]
self.assertTrue(self.asset0.__repr__() in reprs)
self.assertTrue(self.asset1.__repr__() in reprs)
self.assertTrue(self.asset2.__repr__() in reprs)
def test_get_asset_and_address(self):
ch = self.asset1.get_color_set().get_color_hash()
addr = '1CC3X2gu58d6wXUWMffpuzN9JAfTUWu4Kj'
coloraddress = "%s@%s" % (ch, addr)
asset, address = self.adm.get_asset_and_address(coloraddress)
self.assertEquals(asset.__repr__(), self.asset1.__repr__())
self.assertEquals(addr, address)
asset, address = self.adm.get_asset_and_address(addr)
self.assertEquals(asset.__repr__(), self.asset0.__repr__())
self.assertEquals(addr, address)
self.assertRaises(Exception, self.adm.get_asset_and_address, '0@0')
def test_add(self):
assetvalue3 = self.assetvalue0 + self.assetvalue1
self.assertEqual(assetvalue3.get_value(), 11)
assetvalue3 = 0 + self.assetvalue1
self.assertEqual(assetvalue3.get_value(), 6)
self.assertRaises(IncompatibleTypesError, self.assetvalue0.__add__,
self.assetvalue2)
def test_iadd(self):
assetvalue = self.assetvalue0.clone()
assetvalue += self.assetvalue1
self.assertEqual(assetvalue.get_value(), 11)
def test_sub(self):
assetvalue = self.assetvalue1 - self.assetvalue0
self.assertEqual(assetvalue.get_value(), 1)
assetvalue = self.assetvalue1 - 0
self.assertEqual(assetvalue.get_value(), self.assetvalue1.get_value())
def test_lt(self):
self.assertTrue(self.assetvalue0 < self.assetvalue1)
self.assertTrue(self.assetvalue1 > self.assetvalue0)
self.assertTrue(self.assetvalue1 >= self.assetvalue0)
self.assertTrue(self.assetvalue1 > 0)
def test_sum(self):
assetvalues = [self.assetvalue0, self.assetvalue1,
AdditiveAssetValue(asset=self.asset0, value=3)]
self.assertEqual(AdditiveAssetValue.sum(assetvalues).get_value(), 14)
def test_get_asset(self):
self.assertEqual(self.assettarget0.get_asset(), self.asset0)
def test_get_value(self):
self.assertEqual(self.assettarget0.get_value(), self.assetvalue0.get_value())
def test_sum(self):
assettargets = [self.assettarget0, self.assettarget1,
AssetTarget('address3',self.assettarget1)]
self.assertEqual(AssetTarget.sum(assettargets).get_value(), 17)
self.assertEqual(AssetTarget.sum([]), 0)
def test_get_address(self):
self.assertEqual(self.assettarget0.get_address(), 'address0')
def test_repr(self):
self.assertEqual(self.assettarget0.__repr__(), 'address0: Asset Value: 5')
if __name__ == '__main__':
unittest.main()
| mit |
witgo/spark | examples/src/main/python/mllib/latent_dirichlet_allocation_example.py | 27 | 2150 | #
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from pyspark import SparkContext
# $example on$
from pyspark.mllib.clustering import LDA, LDAModel
from pyspark.mllib.linalg import Vectors
# $example off$
if __name__ == "__main__":
sc = SparkContext(appName="LatentDirichletAllocationExample") # SparkContext
# $example on$
# Load and parse the data
data = sc.textFile("data/mllib/sample_lda_data.txt")
parsedData = data.map(lambda line: Vectors.dense([float(x) for x in line.strip().split(' ')]))
# Index documents with unique IDs
corpus = parsedData.zipWithIndex().map(lambda x: [x[1], x[0]]).cache()
# Cluster the documents into three topics using LDA
ldaModel = LDA.train(corpus, k=3)
# Output topics. Each is a distribution over words (matching word count vectors)
print("Learned topics (as distributions over vocab of " + str(ldaModel.vocabSize())
+ " words):")
topics = ldaModel.topicsMatrix()
for topic in range(3):
print("Topic " + str(topic) + ":")
for word in range(0, ldaModel.vocabSize()):
print(" " + str(topics[word][topic]))
# Save and load model
ldaModel.save(sc, "target/org/apache/spark/PythonLatentDirichletAllocationExample/LDAModel")
sameModel = LDAModel\
.load(sc, "target/org/apache/spark/PythonLatentDirichletAllocationExample/LDAModel")
# $example off$
sc.stop()
| apache-2.0 |
sgraham/nope | third_party/tlslite/tlslite/utils/openssl_rsakey.py | 200 | 4670 | # Author: Trevor Perrin
# See the LICENSE file for legal information regarding use of this file.
"""OpenSSL/M2Crypto RSA implementation."""
from .cryptomath import *
from .rsakey import *
from .python_rsakey import Python_RSAKey
#copied from M2Crypto.util.py, so when we load the local copy of m2
#we can still use it
def password_callback(v, prompt1='Enter private key passphrase:',
prompt2='Verify passphrase:'):
from getpass import getpass
while 1:
try:
p1=getpass(prompt1)
if v:
p2=getpass(prompt2)
if p1==p2:
break
else:
break
except KeyboardInterrupt:
return None
return p1
if m2cryptoLoaded:
class OpenSSL_RSAKey(RSAKey):
def __init__(self, n=0, e=0):
self.rsa = None
self._hasPrivateKey = False
if (n and not e) or (e and not n):
raise AssertionError()
if n and e:
self.rsa = m2.rsa_new()
m2.rsa_set_n(self.rsa, numberToMPI(n))
m2.rsa_set_e(self.rsa, numberToMPI(e))
def __del__(self):
if self.rsa:
m2.rsa_free(self.rsa)
def __getattr__(self, name):
if name == 'e':
if not self.rsa:
return 0
return mpiToNumber(m2.rsa_get_e(self.rsa))
elif name == 'n':
if not self.rsa:
return 0
return mpiToNumber(m2.rsa_get_n(self.rsa))
else:
raise AttributeError
def hasPrivateKey(self):
return self._hasPrivateKey
def _rawPrivateKeyOp(self, m):
b = numberToByteArray(m, numBytes(self.n))
s = m2.rsa_private_encrypt(self.rsa, bytes(b), m2.no_padding)
c = bytesToNumber(bytearray(s))
return c
def _rawPublicKeyOp(self, c):
b = numberToByteArray(c, numBytes(self.n))
s = m2.rsa_public_decrypt(self.rsa, bytes(b), m2.no_padding)
m = bytesToNumber(bytearray(s))
return m
def acceptsPassword(self): return True
def write(self, password=None):
bio = m2.bio_new(m2.bio_s_mem())
if self._hasPrivateKey:
if password:
def f(v): return password
m2.rsa_write_key(self.rsa, bio, m2.des_ede_cbc(), f)
else:
def f(): pass
m2.rsa_write_key_no_cipher(self.rsa, bio, f)
else:
if password:
raise AssertionError()
m2.rsa_write_pub_key(self.rsa, bio)
s = m2.bio_read(bio, m2.bio_ctrl_pending(bio))
m2.bio_free(bio)
return s
def generate(bits):
key = OpenSSL_RSAKey()
def f():pass
key.rsa = m2.rsa_generate_key(bits, 3, f)
key._hasPrivateKey = True
return key
generate = staticmethod(generate)
def parse(s, passwordCallback=None):
# Skip forward to the first PEM header
start = s.find("-----BEGIN ")
if start == -1:
raise SyntaxError()
s = s[start:]
if s.startswith("-----BEGIN "):
if passwordCallback==None:
callback = password_callback
else:
def f(v, prompt1=None, prompt2=None):
return passwordCallback()
callback = f
bio = m2.bio_new(m2.bio_s_mem())
try:
m2.bio_write(bio, s)
key = OpenSSL_RSAKey()
if s.startswith("-----BEGIN RSA PRIVATE KEY-----"):
def f():pass
key.rsa = m2.rsa_read_key(bio, callback)
if key.rsa == None:
raise SyntaxError()
key._hasPrivateKey = True
elif s.startswith("-----BEGIN PUBLIC KEY-----"):
key.rsa = m2.rsa_read_pub_key(bio)
if key.rsa == None:
raise SyntaxError()
key._hasPrivateKey = False
else:
raise SyntaxError()
return key
finally:
m2.bio_free(bio)
else:
raise SyntaxError()
parse = staticmethod(parse)
| bsd-3-clause |
geodynamics/pylith | tests/libtests/feassemble/data/obsolete/ElasticityImplicitLgDeform.py | 1 | 2844 | #!/usr/bin/env python
#
# ----------------------------------------------------------------------
#
# Brad T. Aagaard, U.S. Geological Survey
# Charles A. Williams, GNS Science
# Matthew G. Knepley, University of Chicago
#
# This code was developed as part of the Computational Infrastructure
# for Geodynamics (http://geodynamics.org).
#
# Copyright (c) 2010-2017 University of California, Davis
#
# See COPYING for license information.
#
# ----------------------------------------------------------------------
#
## @file tests/libtests/feassemble/data/ElasticityImplicitLgDeform.py
## @brief Python application for generating C++ data files for testing
## C++ ElasticityImplicitLgDeform object.
from ElasticityImplicit import ElasticityImplicit
import numpy
# ----------------------------------------------------------------------
# ElasticityImplicitLgDeform class
class ElasticityImplicitLgDeform(ElasticityImplicit):
"""
Python application for generating C++ data files for testing C++
ElasticityImplicitLgDeform object.
"""
# PUBLIC METHODS /////////////////////////////////////////////////////
def __init__(self, name="elasticityimplicitlgdeform"):
"""
Constructor.
"""
ElasticityImplicit.__init__(self, name)
return
# PRIVATE METHODS ////////////////////////////////////////////////////
def calculateResidual(self, integrator):
"""
Calculate contribution to residual of operator for integrator.
{r} = -Sum(wt * [BL]^T [S})
"""
import feutils
residual = numpy.zeros( (integrator.spaceDim*integrator.numVertices),
dtype=numpy.float64)
# Matrix of elasticity values
D = integrator._calculateElasticityMat()
for cell in integrator.cells:
cellR = numpy.zeros( (integrator.spaceDim*integrator.numBasis, 1),
dtype=numpy.float64)
vertices = integrator.vertices[cell, :]
(jacobian, jacobianInv, jacobianDet, basisDeriv) = \
feutils.calculateJacobian(integrator.quadrature, vertices)
fieldTpdt = integrator.fieldT + integrator.fieldTIncr
for iQuad in xrange(integrator.numQuadPts):
wt = integrator.quadWts[iQuad] * jacobianDet[iQuad]
BL0 = integrator._calculateBasisDerivMatLinear0(basisDeriv, iQuad)
BL1 = integrator._calculateBasisDerivMatLinear1(basisDeriv, iQuad, fieldTpdt)
BL = BL0 + BL1
strain = integrator._calculateStrain(basisDeriv, iQuad, fieldTpdt)
S = numpy.dot(D, strain.transpose())
cellR -= wt * numpy.dot(BL.transpose(), S)
feutils.assembleVec(residual, cellR.flatten(), cell, integrator.spaceDim)
return residual
# FACTORY //////////////////////////////////////////////////////////////
def formulation():
return ElasticityImplicitLgDeform()
# End of file
| mit |
mandli/multilayer-examples | 1d/setplot_shelf.py | 1 | 12827 |
"""
Set up the plot figures, axes, and items to be done for each frame.
This module is imported by the plotting routines and then the
function setplot is called to set the plot parameters.
"""
import numpy as np
# Plot customization
import matplotlib
# Markers and line widths
matplotlib.rcParams['lines.linewidth'] = 2.0
matplotlib.rcParams['lines.markersize'] = 6
matplotlib.rcParams['lines.markersize'] = 8
# Font Sizes
matplotlib.rcParams['font.size'] = 16
matplotlib.rcParams['axes.labelsize'] = 15
matplotlib.rcParams['legend.fontsize'] = 12
matplotlib.rcParams['xtick.labelsize'] = 12
matplotlib.rcParams['ytick.labelsize'] = 12
# DPI of output images
matplotlib.rcParams['savefig.dpi'] = 300
# Need to do this after the above
import matplotlib.pyplot as mpl
from clawpack.pyclaw.solution import Solution
from multilayer.aux import bathy_index,kappa_index,wind_index
import multilayer.plot as plot
# matplotlib.rcParams['figure.figsize'] = [6.0,10.0]
def setplot(plotdata,eta=[0.0,-300.0],rho=[1025.0,1045.0],g=9.81,dry_tolerance=1e-3,bathy_ref_lines=[-30e3]):
"""
Specify what is to be plotted at each frame.
Input: plotdata, an instance of pyclaw.plotters.data.ClawPlotData.
Output: a modified version of plotdata.
"""
# Fetch bathymetry once
b = Solution(0,path=plotdata.outdir,read_aux=True).state.aux[bathy_index,:]
# ========================================================================
# Plot variable functions
def bathy(cd):
return b
def kappa(cd):
return Solution(cd.frameno,path=plotdata.outdir,read_aux=True).state.aux[kappa_index,:]
def wind(cd):
return Solution(cd.frameno,path=plotdata.outdir,read_aux=True).state.aux[wind_index,:]
def h_1(cd):
return cd.q[0,:] / rho[0]
def h_2(cd):
return cd.q[2,:] / rho[1]
def eta_2(cd):
return h_2(cd) + bathy(cd)
def eta_1(cd):
return h_1(cd) + eta_2(cd)
def u_1(cd):
index = np.nonzero(h_1(cd) > dry_tolerance)
u_1 = np.zeros(h_1(cd).shape)
u_1[index] = cd.q[1,index] / cd.q[0,index]
return u_1
def u_2(cd):
index = np.nonzero(h_2(cd) > dry_tolerance)
u_2 = np.zeros(h_2(cd).shape)
u_2[index] = cd.q[3,index] / cd.q[2,index]
return u_2
def hu_1(cd):
index = np.nonzero(h_1(cd) > dry_tolerance)
hu_1 = np.zeros(h_1(cd).shape)
hu_1[index] = cd.q[1,index] / rho[0]
return hu_1
def hu_2(cd):
index = np.nonzero(h_2(cd) > dry_tolerance)
hu_2 = np.zeros(h_2(cd).shape)
hu_2[index] = cd.q[3,index] / rho[1]
return hu_2
# ========================================================================
# Labels
def add_bathy_dashes(current_data):
for ref_line in bathy_ref_lines:
mpl.plot([ref_line,ref_line],[-10,10],'k--')
def add_horizontal_dashes(current_data):
mpl.plot([-400e3,0.0],[0.0,0.0],'k--')
def km_labels(current_data):
r"""Flips xaxis and labels with km"""
mpl.xlabel('km')
locs,labels = mpl.xticks()
labels = np.flipud(locs)/1.e3
mpl.xticks(locs,labels)
def time_labels(current_data):
r"""Convert time to hours"""
pass
# ========================================================================
# Limit Settings
xlimits = [-400e3,0.0]
ylimits_depth = [-4000.0,100.0]
xlimits_zoomed = [-30e3-1e3,-30e3+1e3]
ylimits_surface_zoomed = [eta[0] - 0.5,eta[0] + 0.5]
ylimits_internal_zoomed = [eta[1] - 2.5,eta[1] + 2.5]
ylimits_momentum = [-40,10]
# ylimits_velocities = [-1.0,1.0]
ylimits_velocities = [-0.04,0.04]
ylimits_kappa = [0.0,1.2]
# Create data object
plotdata.clearfigures() # clear any old figures,axes,items data
# ========================================================================
# Function for doing depth drawing
# ========================================================================
def fill_items(plotaxes):
# Top layer
plotitem = plotaxes.new_plotitem(plot_type='1d_fill_between')
plotitem.plot_var = eta_1
plotitem.plot_var2 = eta_2
plotitem.color = plot.top_color
plotitem.plotstyle = plot.surface_linestyle
plotitem.show = True
# Bottom Layer
plotitem = plotaxes.new_plotitem(plot_type='1d_fill_between')
plotitem.plot_var = eta_2
plotitem.plot_var2 = bathy
plotitem.color = plot.bottom_color
plotitem.plotstyle = plot.internal_linestyle
plotitem.show = True
# Plot bathy
plotitem = plotaxes.new_plotitem(plot_type='1d_plot')
plotitem.plot_var = bathy
plotitem.plotstyle = plot.bathy_linestyle
plotitem.show = True
# Plot line in between layers
plotitem = plotaxes.new_plotitem(plot_type='1d_plot')
plotitem.plot_var = eta_2
plotitem.color = 'k'
plotitem.plotstyle = plot.internal_linestyle
plotitem.show = True
# Plot line on top layer
plotitem = plotaxes.new_plotitem(plot_type='1d_plot')
plotitem.plot_var = eta_1
plotitem.color = 'k'
plotitem.plotstyle = plot.surface_linestyle
plotitem.show = True
# ========================================================================
# Full Depths
# ========================================================================
plotfigure = plotdata.new_plotfigure(name='Full Depths',figno=102)
plotfigure.show = True
def bathy_axes(cd):
km_labels(cd)
mpl.xticks([-300e3,-200e3,-100e3,-30e3],[300,200,100,30],fontsize=15)
mpl.xlabel('km')
plotaxes = plotfigure.new_plotaxes()
plotaxes.title = 'Full Depths'
plotaxes.xlimits = xlimits
plotaxes.ylimits = [-4100,100]
plotaxes.afteraxes = bathy_axes
fill_items(plotaxes)
# ========================================================================
# Momentum
# ========================================================================
plotfigure = plotdata.new_plotfigure(name="momentum")
plotfigure.show = True
def momentum_axes(cd):
km_labels(cd)
mpl.xticks([-300e3,-200e3,-100e3,-30e3],[300,200,100,30],fontsize=15)
mpl.xlabel('km')
mpl.title("Layer Momenta at t = %4.1f s" % cd.t)
mpl.legend(['Top Layer Momentum','Bottom Layer Momentum'],loc=4)
def inset_momentum_axes(cd):
# TODO: This plot does not refresh correctly, skip the inset
fig = mpl.figure(cd.plotfigure.figno)
axes = fig.add_subplot(111)
# Plot main figure
axes.plot(cd.x, hu_1(cd), 'b-')
axes.plot(cd.x, hu_2(cd), 'k--')
axes.set_xlim(xlimits)
axes.set_ylim(ylimits_momentum)
momentum_axes(cd)
# Create inset plot
from mpl_toolkits.axes_grid1.inset_locator import zoomed_inset_axes
from mpl_toolkits.axes_grid1.inset_locator import mark_inset
inset_axes = zoomed_inset_axes(axes, 0.5, loc=3)
inset_axes.plot(cd.x, hu_1(cd), 'b-')
inset_axes.plot(cd.x, hu_2(cd), 'k--')
inset_axes.set_xticklabels([])
inset_axes.set_yticklabels([])
x_zoom = [-120e3,-30e3]
y_zoom = [-10,10]
inset_axes.set_xlim(x_zoom)
inset_axes.set_ylim(y_zoom)
mark_inset(axes, inset_axes, loc1=2, loc2=4, fc='none', ec="0.5")
# mpl.ion()
mpl.draw()
# mpl.show()
plotaxes = plotfigure.new_plotaxes()
plotaxes.title = "Momentum"
plotaxes.xlimits = xlimits
plotaxes.ylimits = ylimits_momentum
# plotaxes.afteraxes = inset_momentum_axes
# Top layer
plotitem = plotaxes.new_plotitem(plot_type='1d')
plotitem.plot_var = hu_1
plotitem.plotstyle = 'b-'
plotitem.show = True
# Bottom layer
plotitem = plotaxes.new_plotitem(plot_type='1d')
plotitem.plot_var = hu_2
plotitem.plotstyle = 'k--'
plotitem.show = True
# ========================================================================
# Velocities with Kappa
# ========================================================================
include_kappa = False
if include_kappa:
plotfigure = plotdata.new_plotfigure(name='Velocity and Kappa',figno=14)
else:
plotfigure = plotdata.new_plotfigure(name='Velocities',figno=14)
plotfigure.show = True
# plotfigure.kwargs = {'figsize':(7,6)}
def twin_axes(cd):
fig = mpl.gcf()
fig.clf()
# Get x coordinate values
x = cd.patch.dimensions[0].centers
# Draw velocity and kappa plot
vel_axes = fig.add_subplot(111) # the velocity scale
# kappa_axes = vel_axes.twinx() # the kappa scale
# Bottom layer velocity
bottom_layer = vel_axes.plot(x,u_2(cd),'k-',label="Bottom Layer Velocity")
# Top Layer velocity
top_layer = vel_axes.plot(x,u_1(cd),'b--',label="Top Layer velocity")
if include_kappa:
# Kappa
kappa_line = kappa_axes.plot(x,kappa(cd),'r-.',label="Kappa")
kappa_axes.plot(x,np.ones(x.shape),'r:')
vel_axes.set_xlabel('km')
mpl.xticks([-300e3,-200e3,-100e3,-30e3],[300,200,100,30],fontsize=15)
for ref_line in bathy_ref_lines:
vel_axes.plot([ref_line,ref_line],ylimits_velocities,'k:')
if include_kappa:
vel_axes.set_title("Layer Velocities and Kappa at t = %4.1f s" % cd.t)
else:
vel_axes.set_title("Layer Velocities at t = %4.1f s" % cd.t)
vel_axes.set_ylabel('Velocities (m/s)')
vel_axes.set_xlim(xlimits)
vel_axes.set_ylim(ylimits_velocities)
if include_kappa:
plot.add_legend(vel_axes,'Kappa',location=3,color='r',linestyle='-.')
kappa_axes.set_ylabel('Kappa')
kappa_axes.set_ylim(ylimits_kappa)
else:
vel_axes.legend(loc=3)
try:
mpl.subplots_adjust(hspace=0.1)
except:
pass
plotaxes = plotfigure.new_plotaxes()
plotaxes.afteraxes = twin_axes
# ========================================================================
# Combined Top and Internal Surface
# ========================================================================
plotfigure = plotdata.new_plotfigure(name='Zoomed Depths',figno=13)
plotfigure.show = True
plotfigure.kwargs = {'figsize':(6,6)}
# Top surface
plotaxes = plotfigure.new_plotaxes()
plotaxes.axescmd = 'subplot(2,1,1)'
plotaxes.title = 'Surfaces'
plotaxes.xlimits = xlimits
plotaxes.ylimits = ylimits_surface_zoomed
def top_afteraxes(cd):
mpl.xlabel('')
locs,labels = mpl.xticks()
# labels = np.flipud(locs)/1.e3
labels = ['' for i in range(len(locs))]
mpl.xticks(locs,labels)
add_bathy_dashes(cd)
mpl.ylabel('m')
mpl.title("Surfaces t = %4.1f s" % cd.t)
plotaxes.afteraxes = top_afteraxes
plotaxes = fill_items(plotaxes)
# Internal surface
plotaxes = plotfigure.new_plotaxes()
plotaxes.axescmd = 'subplot(2,1,2)'
plotaxes.title = ''
plotaxes.xlimits = xlimits
plotaxes.ylimits = ylimits_internal_zoomed
def internal_surf_afteraxes(cd):
km_labels(cd)
mpl.title('')
mpl.ylabel('m')
mpl.subplots_adjust(hspace=0.05)
mpl.xticks([-300e3,-200e3,-100e3,-30e3],[300,200,100,30],fontsize=15)
mpl.xlabel('km')
plotaxes.afteraxes = internal_surf_afteraxes
plotaxes = fill_items(plotaxes)
# Parameters used only when creating html and/or latex hardcopy
# e.g., via pyclaw.plotters.frametools.printframes:
plotdata.printfigs = True # print figures
plotdata.print_format = 'png' # file format
# plotdata.print_framenos = 'all' # list of frames to print
plotdata.print_framenos = [0,30,100,200,300]
plotdata.print_fignos = 'all' # list of figures to print
plotdata.html = True # create html files of plots?
plotdata.html_homelink = '../README.html' # pointer for top of index
plotdata.latex = True # create latex file of plots?
plotdata.latex_figsperline = 2 # layout of plots
plotdata.latex_framesperline = 1 # layout of plots
plotdata.latex_makepdf = False # also run pdflatex?
return plotdata
| mit |
CLVsol/odoo_addons | clv_insurance_client/wkf/clv_insurance_client_wkf.py | 1 | 2636 | # -*- encoding: utf-8 -*-
################################################################################
# #
# Copyright (C) 2013-Today Carlos Eduardo Vercelino - CLVsol #
# #
# This program is free software: you can redistribute it and/or modify #
# it under the terms of the GNU Affero General Public License as published by #
# the Free Software Foundation, either version 3 of the License, or #
# (at your option) any later version. #
# #
# This program is distributed in the hope that it will be useful, #
# but WITHOUT ANY WARRANTY; without even the implied warranty of #
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the #
# GNU Affero General Public License for more details. #
# #
# You should have received a copy of the GNU Affero General Public License #
# along with this program. If not, see <http://www.gnu.org/licenses/>. #
################################################################################
from openerp import models, fields, api
from datetime import *
class clv_insurance_client(models.Model):
_inherit = 'clv_insurance_client'
state_date = fields.Datetime("Status change date", required=True, readonly=True)
state = fields.Selection([('new','New'),
('active','Active'),
('suspended','Suspended'),
('canceled','Canceled'),
], string='Status', default='new', readonly=True, required=True, help="")
_defaults = {
'state_date': lambda *a: datetime.now().strftime('%Y-%m-%d %H:%M:%S'),
}
@api.one
def button_new(self):
self.state_date = datetime.now().strftime('%Y-%m-%d %H:%M:%S')
self.state = 'new'
@api.one
def button_activate(self):
self.state_date = datetime.now().strftime('%Y-%m-%d %H:%M:%S')
self.state = 'active'
@api.one
def button_suspend(self):
self.state_date = datetime.now().strftime('%Y-%m-%d %H:%M:%S')
self.state = 'suspended'
@api.one
def button_cancel(self):
self.state_date = datetime.now().strftime('%Y-%m-%d %H:%M:%S')
self.state = 'canceled'
| agpl-3.0 |
iemejia/incubator-beam | sdks/python/apache_beam/internal/gcp/json_value.py | 3 | 6107 | #
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""JSON conversion utility functions."""
# pytype: skip-file
from __future__ import absolute_import
from past.builtins import long
from past.builtins import unicode
from apache_beam.options.value_provider import ValueProvider
# Protect against environments where apitools library is not available.
# pylint: disable=wrong-import-order, wrong-import-position
try:
from apitools.base.py import extra_types
except ImportError:
extra_types = None
# pylint: enable=wrong-import-order, wrong-import-position
_MAXINT64 = (1 << 63) - 1
_MININT64 = -(1 << 63)
def get_typed_value_descriptor(obj):
"""For internal use only; no backwards-compatibility guarantees.
Converts a basic type into a @type/value dictionary.
Args:
obj: A bytes, unicode, bool, int, or float to be converted.
Returns:
A dictionary containing the keys ``@type`` and ``value`` with the value for
the ``@type`` of appropriate type.
Raises:
TypeError: if the Python object has a type that is not
supported.
"""
if isinstance(obj, (bytes, unicode)):
type_name = 'Text'
elif isinstance(obj, bool):
type_name = 'Boolean'
elif isinstance(obj, int):
type_name = 'Integer'
elif isinstance(obj, float):
type_name = 'Float'
else:
raise TypeError('Cannot get a type descriptor for %s.' % repr(obj))
return {'@type': 'http://schema.org/%s' % type_name, 'value': obj}
def to_json_value(obj, with_type=False):
"""For internal use only; no backwards-compatibility guarantees.
Converts Python objects into extra_types.JsonValue objects.
Args:
obj: Python object to be converted. Can be :data:`None`.
with_type: If true then the basic types (``bytes``, ``unicode``, ``int``,
``float``, ``bool``) will be wrapped in ``@type:value`` dictionaries.
Otherwise the straight value is encoded into a ``JsonValue``.
Returns:
A ``JsonValue`` object using ``JsonValue``, ``JsonArray`` and ``JsonObject``
types for the corresponding values, lists, or dictionaries.
Raises:
TypeError: if the Python object contains a type that is not
supported.
The types supported are ``str``, ``bool``, ``list``, ``tuple``, ``dict``, and
``None``. The Dataflow API requires JsonValue(s) in many places, and it is
quite convenient to be able to specify these hierarchical objects using
Python syntax.
"""
if obj is None:
return extra_types.JsonValue(is_null=True)
elif isinstance(obj, (list, tuple)):
return extra_types.JsonValue(
array_value=extra_types.JsonArray(
entries=[to_json_value(o, with_type=with_type) for o in obj]))
elif isinstance(obj, dict):
json_object = extra_types.JsonObject()
for k, v in obj.items():
json_object.properties.append(
extra_types.JsonObject.Property(
key=k, value=to_json_value(v, with_type=with_type)))
return extra_types.JsonValue(object_value=json_object)
elif with_type:
return to_json_value(get_typed_value_descriptor(obj), with_type=False)
elif isinstance(obj, (str, unicode)):
return extra_types.JsonValue(string_value=obj)
elif isinstance(obj, bytes):
return extra_types.JsonValue(string_value=obj.decode('utf8'))
elif isinstance(obj, bool):
return extra_types.JsonValue(boolean_value=obj)
elif isinstance(obj, (int, long)):
if _MININT64 <= obj <= _MAXINT64:
return extra_types.JsonValue(integer_value=obj)
else:
raise TypeError('Can not encode {} as a 64-bit integer'.format(obj))
elif isinstance(obj, float):
return extra_types.JsonValue(double_value=obj)
elif isinstance(obj, ValueProvider):
if obj.is_accessible():
return to_json_value(obj.get())
return extra_types.JsonValue(is_null=True)
else:
raise TypeError('Cannot convert %s to a JSON value.' % repr(obj))
def from_json_value(v):
"""For internal use only; no backwards-compatibility guarantees.
Converts ``extra_types.JsonValue`` objects into Python objects.
Args:
v: ``JsonValue`` object to be converted.
Returns:
A Python object structured as values, lists, and dictionaries corresponding
to ``JsonValue``, ``JsonArray`` and ``JsonObject`` types.
Raises:
TypeError: if the ``JsonValue`` object contains a type that is
not supported.
The types supported are ``str``, ``bool``, ``list``, ``dict``, and ``None``.
The Dataflow API returns JsonValue(s) in many places and it is quite
convenient to be able to convert these hierarchical objects to much simpler
Python objects.
"""
if isinstance(v, extra_types.JsonValue):
if v.string_value is not None:
return v.string_value
elif v.boolean_value is not None:
return v.boolean_value
elif v.integer_value is not None:
return v.integer_value
elif v.double_value is not None:
return v.double_value
elif v.array_value is not None:
return from_json_value(v.array_value)
elif v.object_value is not None:
return from_json_value(v.object_value)
elif v.is_null:
return None
elif isinstance(v, extra_types.JsonArray):
return [from_json_value(e) for e in v.entries]
elif isinstance(v, extra_types.JsonObject):
return {p.key: from_json_value(p.value) for p in v.properties}
raise TypeError('Cannot convert %s from a JSON value.' % repr(v))
| apache-2.0 |
catacgc/ansible-modules-core | cloud/linode/linode.py | 60 | 17838 | #!/usr/bin/python
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
DOCUMENTATION = '''
---
module: linode
short_description: create / delete / stop / restart an instance in Linode Public Cloud
description:
- creates / deletes a Linode Public Cloud instance and optionally waits for it to be 'running'.
version_added: "1.3"
options:
state:
description:
- Indicate desired state of the resource
choices: ['present', 'active', 'started', 'absent', 'deleted', 'stopped', 'restarted']
default: present
api_key:
description:
- Linode API key
default: null
name:
description:
- Name to give the instance (alphanumeric, dashes, underscore)
- To keep sanity on the Linode Web Console, name is prepended with LinodeID_
default: null
type: string
linode_id:
description:
- Unique ID of a linode server
aliases: lid
default: null
type: integer
plan:
description:
- plan to use for the instance (Linode plan)
default: null
type: integer
payment_term:
description:
- payment term to use for the instance (payment term in months)
default: 1
type: integer
choices: [1, 12, 24]
password:
description:
- root password to apply to a new server (auto generated if missing)
default: null
type: string
ssh_pub_key:
description:
- SSH public key applied to root user
default: null
type: string
swap:
description:
- swap size in MB
default: 512
type: integer
distribution:
description:
- distribution to use for the instance (Linode Distribution)
default: null
type: integer
datacenter:
description:
- datacenter to create an instance in (Linode Datacenter)
default: null
type: integer
wait:
description:
- wait for the instance to be in state 'running' before returning
default: "no"
choices: [ "yes", "no" ]
wait_timeout:
description:
- how long before wait gives up, in seconds
default: 300
requirements: [ "linode-python", "pycurl" ]
author: Vincent Viallet
notes:
- LINODE_API_KEY env variable can be used instead
'''
EXAMPLES = '''
# Create a server
- local_action:
module: linode
api_key: 'longStringFromLinodeApi'
name: linode-test1
plan: 1
datacenter: 2
distribution: 99
password: 'superSecureRootPassword'
ssh_pub_key: 'ssh-rsa qwerty'
swap: 768
wait: yes
wait_timeout: 600
state: present
# Ensure a running server (create if missing)
- local_action:
module: linode
api_key: 'longStringFromLinodeApi'
name: linode-test1
linode_id: 12345678
plan: 1
datacenter: 2
distribution: 99
password: 'superSecureRootPassword'
ssh_pub_key: 'ssh-rsa qwerty'
swap: 768
wait: yes
wait_timeout: 600
state: present
# Delete a server
- local_action:
module: linode
api_key: 'longStringFromLinodeApi'
name: linode-test1
linode_id: 12345678
state: absent
# Stop a server
- local_action:
module: linode
api_key: 'longStringFromLinodeApi'
name: linode-test1
linode_id: 12345678
state: stopped
# Reboot a server
- local_action:
module: linode
api_key: 'longStringFromLinodeApi'
name: linode-test1
linode_id: 12345678
state: restarted
'''
import sys
import time
import os
try:
import pycurl
except ImportError:
print("failed=True msg='pycurl required for this module'")
sys.exit(1)
try:
from linode import api as linode_api
except ImportError:
print("failed=True msg='linode-python required for this module'")
sys.exit(1)
def randompass():
'''
Generate a long random password that comply to Linode requirements
'''
# Linode API currently requires the following:
# It must contain at least two of these four character classes:
# lower case letters - upper case letters - numbers - punctuation
# we play it safe :)
import random
import string
# as of python 2.4, this reseeds the PRNG from urandom
random.seed()
lower = ''.join(random.choice(string.ascii_lowercase) for x in range(6))
upper = ''.join(random.choice(string.ascii_uppercase) for x in range(6))
number = ''.join(random.choice(string.digits) for x in range(6))
punct = ''.join(random.choice(string.punctuation) for x in range(6))
p = lower + upper + number + punct
return ''.join(random.sample(p, len(p)))
def getInstanceDetails(api, server):
'''
Return the details of an instance, populating IPs, etc.
'''
instance = {'id': server['LINODEID'],
'name': server['LABEL'],
'public': [],
'private': []}
# Populate with ips
for ip in api.linode_ip_list(LinodeId=server['LINODEID']):
if ip['ISPUBLIC'] and 'ipv4' not in instance:
instance['ipv4'] = ip['IPADDRESS']
instance['fqdn'] = ip['RDNS_NAME']
if ip['ISPUBLIC']:
instance['public'].append({'ipv4': ip['IPADDRESS'],
'fqdn': ip['RDNS_NAME'],
'ip_id': ip['IPADDRESSID']})
else:
instance['private'].append({'ipv4': ip['IPADDRESS'],
'fqdn': ip['RDNS_NAME'],
'ip_id': ip['IPADDRESSID']})
return instance
def linodeServers(module, api, state, name, plan, distribution, datacenter, linode_id,
payment_term, password, ssh_pub_key, swap, wait, wait_timeout):
instances = []
changed = False
new_server = False
servers = []
disks = []
configs = []
jobs = []
# See if we can match an existing server details with the provided linode_id
if linode_id:
# For the moment we only consider linode_id as criteria for match
# Later we can use more (size, name, etc.) and update existing
servers = api.linode_list(LinodeId=linode_id)
# Attempt to fetch details about disks and configs only if servers are
# found with linode_id
if servers:
disks = api.linode_disk_list(LinodeId=linode_id)
configs = api.linode_config_list(LinodeId=linode_id)
# Act on the state
if state in ('active', 'present', 'started'):
# TODO: validate all the plan / distribution / datacenter are valid
# Multi step process/validation:
# - need linode_id (entity)
# - need disk_id for linode_id - create disk from distrib
# - need config_id for linode_id - create config (need kernel)
# Any create step triggers a job that need to be waited for.
if not servers:
for arg in ('name', 'plan', 'distribution', 'datacenter'):
if not eval(arg):
module.fail_json(msg='%s is required for active state' % arg)
# Create linode entity
new_server = True
try:
res = api.linode_create(DatacenterID=datacenter, PlanID=plan,
PaymentTerm=payment_term)
linode_id = res['LinodeID']
# Update linode Label to match name
api.linode_update(LinodeId=linode_id, Label='%s_%s' % (linode_id, name))
# Save server
servers = api.linode_list(LinodeId=linode_id)
except Exception, e:
module.fail_json(msg = '%s' % e.value[0]['ERRORMESSAGE'])
if not disks:
for arg in ('name', 'linode_id', 'distribution'):
if not eval(arg):
module.fail_json(msg='%s is required for active state' % arg)
# Create disks (1 from distrib, 1 for SWAP)
new_server = True
try:
if not password:
# Password is required on creation, if not provided generate one
password = randompass()
if not swap:
swap = 512
# Create data disk
size = servers[0]['TOTALHD'] - swap
if ssh_pub_key:
res = api.linode_disk_createfromdistribution(
LinodeId=linode_id, DistributionID=distribution,
rootPass=password, rootSSHKey=ssh_pub_key,
Label='%s data disk (lid: %s)' % (name, linode_id), Size=size)
else:
res = api.linode_disk_createfromdistribution(
LinodeId=linode_id, DistributionID=distribution, rootPass=password,
Label='%s data disk (lid: %s)' % (name, linode_id), Size=size)
jobs.append(res['JobID'])
# Create SWAP disk
res = api.linode_disk_create(LinodeId=linode_id, Type='swap',
Label='%s swap disk (lid: %s)' % (name, linode_id),
Size=swap)
jobs.append(res['JobID'])
except Exception, e:
# TODO: destroy linode ?
module.fail_json(msg = '%s' % e.value[0]['ERRORMESSAGE'])
if not configs:
for arg in ('name', 'linode_id', 'distribution'):
if not eval(arg):
module.fail_json(msg='%s is required for active state' % arg)
# Check architecture
for distrib in api.avail_distributions():
if distrib['DISTRIBUTIONID'] != distribution:
continue
arch = '32'
if distrib['IS64BIT']:
arch = '64'
break
# Get latest kernel matching arch
for kernel in api.avail_kernels():
if not kernel['LABEL'].startswith('Latest %s' % arch):
continue
kernel_id = kernel['KERNELID']
break
# Get disk list
disks_id = []
for disk in api.linode_disk_list(LinodeId=linode_id):
if disk['TYPE'] == 'ext3':
disks_id.insert(0, str(disk['DISKID']))
continue
disks_id.append(str(disk['DISKID']))
# Trick to get the 9 items in the list
while len(disks_id) < 9:
disks_id.append('')
disks_list = ','.join(disks_id)
# Create config
new_server = True
try:
api.linode_config_create(LinodeId=linode_id, KernelId=kernel_id,
Disklist=disks_list, Label='%s config' % name)
configs = api.linode_config_list(LinodeId=linode_id)
except Exception, e:
module.fail_json(msg = '%s' % e.value[0]['ERRORMESSAGE'])
# Start / Ensure servers are running
for server in servers:
# Refresh server state
server = api.linode_list(LinodeId=server['LINODEID'])[0]
# Ensure existing servers are up and running, boot if necessary
if server['STATUS'] != 1:
res = api.linode_boot(LinodeId=linode_id)
jobs.append(res['JobID'])
changed = True
# wait here until the instances are up
wait_timeout = time.time() + wait_timeout
while wait and wait_timeout > time.time():
# refresh the server details
server = api.linode_list(LinodeId=server['LINODEID'])[0]
# status:
# -2: Boot failed
# 1: Running
if server['STATUS'] in (-2, 1):
break
time.sleep(5)
if wait and wait_timeout <= time.time():
# waiting took too long
module.fail_json(msg = 'Timeout waiting on %s (lid: %s)' %
(server['LABEL'], server['LINODEID']))
# Get a fresh copy of the server details
server = api.linode_list(LinodeId=server['LINODEID'])[0]
if server['STATUS'] == -2:
module.fail_json(msg = '%s (lid: %s) failed to boot' %
(server['LABEL'], server['LINODEID']))
# From now on we know the task is a success
# Build instance report
instance = getInstanceDetails(api, server)
# depending on wait flag select the status
if wait:
instance['status'] = 'Running'
else:
instance['status'] = 'Starting'
# Return the root password if this is a new box and no SSH key
# has been provided
if new_server and not ssh_pub_key:
instance['password'] = password
instances.append(instance)
elif state in ('stopped'):
for arg in ('name', 'linode_id'):
if not eval(arg):
module.fail_json(msg='%s is required for active state' % arg)
if not servers:
module.fail_json(msg = 'Server %s (lid: %s) not found' % (name, linode_id))
for server in servers:
instance = getInstanceDetails(api, server)
if server['STATUS'] != 2:
try:
res = api.linode_shutdown(LinodeId=linode_id)
except Exception, e:
module.fail_json(msg = '%s' % e.value[0]['ERRORMESSAGE'])
instance['status'] = 'Stopping'
changed = True
else:
instance['status'] = 'Stopped'
instances.append(instance)
elif state in ('restarted'):
for arg in ('name', 'linode_id'):
if not eval(arg):
module.fail_json(msg='%s is required for active state' % arg)
if not servers:
module.fail_json(msg = 'Server %s (lid: %s) not found' % (name, linode_id))
for server in servers:
instance = getInstanceDetails(api, server)
try:
res = api.linode_reboot(LinodeId=server['LINODEID'])
except Exception, e:
module.fail_json(msg = '%s' % e.value[0]['ERRORMESSAGE'])
instance['status'] = 'Restarting'
changed = True
instances.append(instance)
elif state in ('absent', 'deleted'):
for server in servers:
instance = getInstanceDetails(api, server)
try:
api.linode_delete(LinodeId=server['LINODEID'], skipChecks=True)
except Exception, e:
module.fail_json(msg = '%s' % e.value[0]['ERRORMESSAGE'])
instance['status'] = 'Deleting'
changed = True
instances.append(instance)
# Ease parsing if only 1 instance
if len(instances) == 1:
module.exit_json(changed=changed, instance=instances[0])
module.exit_json(changed=changed, instances=instances)
def main():
module = AnsibleModule(
argument_spec = dict(
state = dict(default='present', choices=['active', 'present', 'started',
'deleted', 'absent', 'stopped',
'restarted']),
api_key = dict(),
name = dict(type='str'),
plan = dict(type='int'),
distribution = dict(type='int'),
datacenter = dict(type='int'),
linode_id = dict(type='int', aliases=['lid']),
payment_term = dict(type='int', default=1, choices=[1, 12, 24]),
password = dict(type='str'),
ssh_pub_key = dict(type='str'),
swap = dict(type='int', default=512),
wait = dict(type='bool', default=True),
wait_timeout = dict(default=300),
)
)
state = module.params.get('state')
api_key = module.params.get('api_key')
name = module.params.get('name')
plan = module.params.get('plan')
distribution = module.params.get('distribution')
datacenter = module.params.get('datacenter')
linode_id = module.params.get('linode_id')
payment_term = module.params.get('payment_term')
password = module.params.get('password')
ssh_pub_key = module.params.get('ssh_pub_key')
swap = module.params.get('swap')
wait = module.params.get('wait')
wait_timeout = int(module.params.get('wait_timeout'))
# Setup the api_key
if not api_key:
try:
api_key = os.environ['LINODE_API_KEY']
except KeyError, e:
module.fail_json(msg = 'Unable to load %s' % e.message)
# setup the auth
try:
api = linode_api.Api(api_key)
api.test_echo()
except Exception, e:
module.fail_json(msg = '%s' % e.value[0]['ERRORMESSAGE'])
linodeServers(module, api, state, name, plan, distribution, datacenter, linode_id,
payment_term, password, ssh_pub_key, swap, wait, wait_timeout)
# import module snippets
from ansible.module_utils.basic import *
main()
| gpl-3.0 |
denisenkom/django | django/db/backends/mysql/client.py | 84 | 1380 | import os
import sys
from django.db.backends import BaseDatabaseClient
class DatabaseClient(BaseDatabaseClient):
executable_name = 'mysql'
def runshell(self):
settings_dict = self.connection.settings_dict
args = [self.executable_name]
db = settings_dict['OPTIONS'].get('db', settings_dict['NAME'])
user = settings_dict['OPTIONS'].get('user', settings_dict['USER'])
passwd = settings_dict['OPTIONS'].get('passwd', settings_dict['PASSWORD'])
host = settings_dict['OPTIONS'].get('host', settings_dict['HOST'])
port = settings_dict['OPTIONS'].get('port', settings_dict['PORT'])
defaults_file = settings_dict['OPTIONS'].get('read_default_file')
# Seems to be no good way to set sql_mode with CLI.
if defaults_file:
args += ["--defaults-file=%s" % defaults_file]
if user:
args += ["--user=%s" % user]
if passwd:
args += ["--password=%s" % passwd]
if host:
if '/' in host:
args += ["--socket=%s" % host]
else:
args += ["--host=%s" % host]
if port:
args += ["--port=%s" % port]
if db:
args += [db]
if os.name == 'nt':
sys.exit(os.system(" ".join(args)))
else:
os.execvp(self.executable_name, args)
| bsd-3-clause |
ivanbusthomi/inasafe | safe_extras/simplejson/encoder.py | 89 | 24688 | """Implementation of JSONEncoder
"""
from __future__ import absolute_import
import re
from operator import itemgetter
from decimal import Decimal
from .compat import u, unichr, binary_type, string_types, integer_types, PY3
def _import_speedups():
try:
from . import _speedups
return _speedups.encode_basestring_ascii, _speedups.make_encoder
except ImportError:
return None, None
c_encode_basestring_ascii, c_make_encoder = _import_speedups()
from simplejson.decoder import PosInf
#ESCAPE = re.compile(ur'[\x00-\x1f\\"\b\f\n\r\t\u2028\u2029]')
# This is required because u() will mangle the string and ur'' isn't valid
# python3 syntax
ESCAPE = re.compile(u'[\\x00-\\x1f\\\\"\\b\\f\\n\\r\\t\u2028\u2029]')
ESCAPE_ASCII = re.compile(r'([\\"]|[^\ -~])')
HAS_UTF8 = re.compile(r'[\x80-\xff]')
ESCAPE_DCT = {
'\\': '\\\\',
'"': '\\"',
'\b': '\\b',
'\f': '\\f',
'\n': '\\n',
'\r': '\\r',
'\t': '\\t',
}
for i in range(0x20):
#ESCAPE_DCT.setdefault(chr(i), '\\u{0:04x}'.format(i))
ESCAPE_DCT.setdefault(chr(i), '\\u%04x' % (i,))
for i in [0x2028, 0x2029]:
ESCAPE_DCT.setdefault(unichr(i), '\\u%04x' % (i,))
FLOAT_REPR = repr
def encode_basestring(s, _PY3=PY3, _q=u('"')):
"""Return a JSON representation of a Python string
"""
if _PY3:
if isinstance(s, binary_type):
s = s.decode('utf-8')
else:
if isinstance(s, str) and HAS_UTF8.search(s) is not None:
s = s.decode('utf-8')
def replace(match):
return ESCAPE_DCT[match.group(0)]
return _q + ESCAPE.sub(replace, s) + _q
def py_encode_basestring_ascii(s, _PY3=PY3):
"""Return an ASCII-only JSON representation of a Python string
"""
if _PY3:
if isinstance(s, binary_type):
s = s.decode('utf-8')
else:
if isinstance(s, str) and HAS_UTF8.search(s) is not None:
s = s.decode('utf-8')
def replace(match):
s = match.group(0)
try:
return ESCAPE_DCT[s]
except KeyError:
n = ord(s)
if n < 0x10000:
#return '\\u{0:04x}'.format(n)
return '\\u%04x' % (n,)
else:
# surrogate pair
n -= 0x10000
s1 = 0xd800 | ((n >> 10) & 0x3ff)
s2 = 0xdc00 | (n & 0x3ff)
#return '\\u{0:04x}\\u{1:04x}'.format(s1, s2)
return '\\u%04x\\u%04x' % (s1, s2)
return '"' + str(ESCAPE_ASCII.sub(replace, s)) + '"'
encode_basestring_ascii = (
c_encode_basestring_ascii or py_encode_basestring_ascii)
class JSONEncoder(object):
"""Extensible JSON <http://json.org> encoder for Python data structures.
Supports the following objects and types by default:
+-------------------+---------------+
| Python | JSON |
+===================+===============+
| dict, namedtuple | object |
+-------------------+---------------+
| list, tuple | array |
+-------------------+---------------+
| str, unicode | string |
+-------------------+---------------+
| int, long, float | number |
+-------------------+---------------+
| True | true |
+-------------------+---------------+
| False | false |
+-------------------+---------------+
| None | null |
+-------------------+---------------+
To extend this to recognize other objects, subclass and implement a
``.default()`` method with another method that returns a serializable
object for ``o`` if possible, otherwise it should call the superclass
implementation (to raise ``TypeError``).
"""
item_separator = ', '
key_separator = ': '
def __init__(self, skipkeys=False, ensure_ascii=True,
check_circular=True, allow_nan=True, sort_keys=False,
indent=None, separators=None, encoding='utf-8', default=None,
use_decimal=True, namedtuple_as_object=True,
tuple_as_array=True, bigint_as_string=False,
item_sort_key=None, for_json=False, ignore_nan=False):
"""Constructor for JSONEncoder, with sensible defaults.
If skipkeys is false, then it is a TypeError to attempt
encoding of keys that are not str, int, long, float or None. If
skipkeys is True, such items are simply skipped.
If ensure_ascii is true, the output is guaranteed to be str
objects with all incoming unicode characters escaped. If
ensure_ascii is false, the output will be unicode object.
If check_circular is true, then lists, dicts, and custom encoded
objects will be checked for circular references during encoding to
prevent an infinite recursion (which would cause an OverflowError).
Otherwise, no such check takes place.
If allow_nan is true, then NaN, Infinity, and -Infinity will be
encoded as such. This behavior is not JSON specification compliant,
but is consistent with most JavaScript based encoders and decoders.
Otherwise, it will be a ValueError to encode such floats.
If sort_keys is true, then the output of dictionaries will be
sorted by key; this is useful for regression tests to ensure
that JSON serializations can be compared on a day-to-day basis.
If indent is a string, then JSON array elements and object members
will be pretty-printed with a newline followed by that string repeated
for each level of nesting. ``None`` (the default) selects the most compact
representation without any newlines. For backwards compatibility with
versions of simplejson earlier than 2.1.0, an integer is also accepted
and is converted to a string with that many spaces.
If specified, separators should be an (item_separator, key_separator)
tuple. The default is (', ', ': ') if *indent* is ``None`` and
(',', ': ') otherwise. To get the most compact JSON representation,
you should specify (',', ':') to eliminate whitespace.
If specified, default is a function that gets called for objects
that can't otherwise be serialized. It should return a JSON encodable
version of the object or raise a ``TypeError``.
If encoding is not None, then all input strings will be
transformed into unicode using that encoding prior to JSON-encoding.
The default is UTF-8.
If use_decimal is true (not the default), ``decimal.Decimal`` will
be supported directly by the encoder. For the inverse, decode JSON
with ``parse_float=decimal.Decimal``.
If namedtuple_as_object is true (the default), objects with
``_asdict()`` methods will be encoded as JSON objects.
If tuple_as_array is true (the default), tuple (and subclasses) will
be encoded as JSON arrays.
If bigint_as_string is true (not the default), ints 2**53 and higher
or lower than -2**53 will be encoded as strings. This is to avoid the
rounding that happens in Javascript otherwise.
If specified, item_sort_key is a callable used to sort the items in
each dictionary. This is useful if you want to sort items other than
in alphabetical order by key.
If for_json is true (not the default), objects with a ``for_json()``
method will use the return value of that method for encoding as JSON
instead of the object.
If *ignore_nan* is true (default: ``False``), then out of range
:class:`float` values (``nan``, ``inf``, ``-inf``) will be serialized
as ``null`` in compliance with the ECMA-262 specification. If true,
this will override *allow_nan*.
"""
self.skipkeys = skipkeys
self.ensure_ascii = ensure_ascii
self.check_circular = check_circular
self.allow_nan = allow_nan
self.sort_keys = sort_keys
self.use_decimal = use_decimal
self.namedtuple_as_object = namedtuple_as_object
self.tuple_as_array = tuple_as_array
self.bigint_as_string = bigint_as_string
self.item_sort_key = item_sort_key
self.for_json = for_json
self.ignore_nan = ignore_nan
if indent is not None and not isinstance(indent, string_types):
indent = indent * ' '
self.indent = indent
if separators is not None:
self.item_separator, self.key_separator = separators
elif indent is not None:
self.item_separator = ','
if default is not None:
self.default = default
self.encoding = encoding
def default(self, o):
"""Implement this method in a subclass such that it returns
a serializable object for ``o``, or calls the base implementation
(to raise a ``TypeError``).
For example, to support arbitrary iterators, you could
implement default like this::
def default(self, o):
try:
iterable = iter(o)
except TypeError:
pass
else:
return list(iterable)
return JSONEncoder.default(self, o)
"""
raise TypeError(repr(o) + " is not JSON serializable")
def encode(self, o):
"""Return a JSON string representation of a Python data structure.
>>> from simplejson import JSONEncoder
>>> JSONEncoder().encode({"foo": ["bar", "baz"]})
'{"foo": ["bar", "baz"]}'
"""
# This is for extremely simple cases and benchmarks.
if isinstance(o, binary_type):
_encoding = self.encoding
if (_encoding is not None and not (_encoding == 'utf-8')):
o = o.decode(_encoding)
if isinstance(o, string_types):
if self.ensure_ascii:
return encode_basestring_ascii(o)
else:
return encode_basestring(o)
# This doesn't pass the iterator directly to ''.join() because the
# exceptions aren't as detailed. The list call should be roughly
# equivalent to the PySequence_Fast that ''.join() would do.
chunks = self.iterencode(o, _one_shot=True)
if not isinstance(chunks, (list, tuple)):
chunks = list(chunks)
if self.ensure_ascii:
return ''.join(chunks)
else:
return u''.join(chunks)
def iterencode(self, o, _one_shot=False):
"""Encode the given object and yield each string
representation as available.
For example::
for chunk in JSONEncoder().iterencode(bigobject):
mysocket.write(chunk)
"""
if self.check_circular:
markers = {}
else:
markers = None
if self.ensure_ascii:
_encoder = encode_basestring_ascii
else:
_encoder = encode_basestring
if self.encoding != 'utf-8':
def _encoder(o, _orig_encoder=_encoder, _encoding=self.encoding):
if isinstance(o, binary_type):
o = o.decode(_encoding)
return _orig_encoder(o)
def floatstr(o, allow_nan=self.allow_nan, ignore_nan=self.ignore_nan,
_repr=FLOAT_REPR, _inf=PosInf, _neginf=-PosInf):
# Check for specials. Note that this type of test is processor
# and/or platform-specific, so do tests which don't depend on
# the internals.
if o != o:
text = 'NaN'
elif o == _inf:
text = 'Infinity'
elif o == _neginf:
text = '-Infinity'
else:
return _repr(o)
if ignore_nan:
text = 'null'
elif not allow_nan:
raise ValueError(
"Out of range float values are not JSON compliant: " +
repr(o))
return text
key_memo = {}
if (_one_shot and c_make_encoder is not None
and self.indent is None):
_iterencode = c_make_encoder(
markers, self.default, _encoder, self.indent,
self.key_separator, self.item_separator, self.sort_keys,
self.skipkeys, self.allow_nan, key_memo, self.use_decimal,
self.namedtuple_as_object, self.tuple_as_array,
self.bigint_as_string, self.item_sort_key,
self.encoding, self.for_json, self.ignore_nan,
Decimal)
else:
_iterencode = _make_iterencode(
markers, self.default, _encoder, self.indent, floatstr,
self.key_separator, self.item_separator, self.sort_keys,
self.skipkeys, _one_shot, self.use_decimal,
self.namedtuple_as_object, self.tuple_as_array,
self.bigint_as_string, self.item_sort_key,
self.encoding, self.for_json,
Decimal=Decimal)
try:
return _iterencode(o, 0)
finally:
key_memo.clear()
class JSONEncoderForHTML(JSONEncoder):
"""An encoder that produces JSON safe to embed in HTML.
To embed JSON content in, say, a script tag on a web page, the
characters &, < and > should be escaped. They cannot be escaped
with the usual entities (e.g. &) because they are not expanded
within <script> tags.
"""
def encode(self, o):
# Override JSONEncoder.encode because it has hacks for
# performance that make things more complicated.
chunks = self.iterencode(o, True)
if self.ensure_ascii:
return ''.join(chunks)
else:
return u''.join(chunks)
def iterencode(self, o, _one_shot=False):
chunks = super(JSONEncoderForHTML, self).iterencode(o, _one_shot)
for chunk in chunks:
chunk = chunk.replace('&', '\\u0026')
chunk = chunk.replace('<', '\\u003c')
chunk = chunk.replace('>', '\\u003e')
yield chunk
def _make_iterencode(markers, _default, _encoder, _indent, _floatstr,
_key_separator, _item_separator, _sort_keys, _skipkeys, _one_shot,
_use_decimal, _namedtuple_as_object, _tuple_as_array,
_bigint_as_string, _item_sort_key, _encoding, _for_json,
## HACK: hand-optimized bytecode; turn globals into locals
_PY3=PY3,
ValueError=ValueError,
string_types=string_types,
Decimal=Decimal,
dict=dict,
float=float,
id=id,
integer_types=integer_types,
isinstance=isinstance,
list=list,
str=str,
tuple=tuple,
):
if _item_sort_key and not callable(_item_sort_key):
raise TypeError("item_sort_key must be None or callable")
elif _sort_keys and not _item_sort_key:
_item_sort_key = itemgetter(0)
def _iterencode_list(lst, _current_indent_level):
if not lst:
yield '[]'
return
if markers is not None:
markerid = id(lst)
if markerid in markers:
raise ValueError("Circular reference detected")
markers[markerid] = lst
buf = '['
if _indent is not None:
_current_indent_level += 1
newline_indent = '\n' + (_indent * _current_indent_level)
separator = _item_separator + newline_indent
buf += newline_indent
else:
newline_indent = None
separator = _item_separator
first = True
for value in lst:
if first:
first = False
else:
buf = separator
if (isinstance(value, string_types) or
(_PY3 and isinstance(value, binary_type))):
yield buf + _encoder(value)
elif value is None:
yield buf + 'null'
elif value is True:
yield buf + 'true'
elif value is False:
yield buf + 'false'
elif isinstance(value, integer_types):
yield ((buf + str(value))
if (not _bigint_as_string or
(-1 << 53) < value < (1 << 53))
else (buf + '"' + str(value) + '"'))
elif isinstance(value, float):
yield buf + _floatstr(value)
elif _use_decimal and isinstance(value, Decimal):
yield buf + str(value)
else:
yield buf
for_json = _for_json and getattr(value, 'for_json', None)
if for_json and callable(for_json):
chunks = _iterencode(for_json(), _current_indent_level)
elif isinstance(value, list):
chunks = _iterencode_list(value, _current_indent_level)
else:
_asdict = _namedtuple_as_object and getattr(value, '_asdict', None)
if _asdict and callable(_asdict):
chunks = _iterencode_dict(_asdict(),
_current_indent_level)
elif _tuple_as_array and isinstance(value, tuple):
chunks = _iterencode_list(value, _current_indent_level)
elif isinstance(value, dict):
chunks = _iterencode_dict(value, _current_indent_level)
else:
chunks = _iterencode(value, _current_indent_level)
for chunk in chunks:
yield chunk
if newline_indent is not None:
_current_indent_level -= 1
yield '\n' + (_indent * _current_indent_level)
yield ']'
if markers is not None:
del markers[markerid]
def _stringify_key(key):
if isinstance(key, string_types): # pragma: no cover
pass
elif isinstance(key, binary_type):
key = key.decode(_encoding)
elif isinstance(key, float):
key = _floatstr(key)
elif key is True:
key = 'true'
elif key is False:
key = 'false'
elif key is None:
key = 'null'
elif isinstance(key, integer_types):
key = str(key)
elif _use_decimal and isinstance(key, Decimal):
key = str(key)
elif _skipkeys:
key = None
else:
raise TypeError("key " + repr(key) + " is not a string")
return key
def _iterencode_dict(dct, _current_indent_level):
if not dct:
yield '{}'
return
if markers is not None:
markerid = id(dct)
if markerid in markers:
raise ValueError("Circular reference detected")
markers[markerid] = dct
yield '{'
if _indent is not None:
_current_indent_level += 1
newline_indent = '\n' + (_indent * _current_indent_level)
item_separator = _item_separator + newline_indent
yield newline_indent
else:
newline_indent = None
item_separator = _item_separator
first = True
if _PY3:
iteritems = dct.items()
else:
iteritems = dct.iteritems()
if _item_sort_key:
items = []
for k, v in dct.items():
if not isinstance(k, string_types):
k = _stringify_key(k)
if k is None:
continue
items.append((k, v))
items.sort(key=_item_sort_key)
else:
items = iteritems
for key, value in items:
if not (_item_sort_key or isinstance(key, string_types)):
key = _stringify_key(key)
if key is None:
# _skipkeys must be True
continue
if first:
first = False
else:
yield item_separator
yield _encoder(key)
yield _key_separator
if (isinstance(value, string_types) or
(_PY3 and isinstance(value, binary_type))):
yield _encoder(value)
elif value is None:
yield 'null'
elif value is True:
yield 'true'
elif value is False:
yield 'false'
elif isinstance(value, integer_types):
yield (str(value)
if (not _bigint_as_string or
(-1 << 53) < value < (1 << 53))
else ('"' + str(value) + '"'))
elif isinstance(value, float):
yield _floatstr(value)
elif _use_decimal and isinstance(value, Decimal):
yield str(value)
else:
for_json = _for_json and getattr(value, 'for_json', None)
if for_json and callable(for_json):
chunks = _iterencode(for_json(), _current_indent_level)
elif isinstance(value, list):
chunks = _iterencode_list(value, _current_indent_level)
else:
_asdict = _namedtuple_as_object and getattr(value, '_asdict', None)
if _asdict and callable(_asdict):
chunks = _iterencode_dict(_asdict(),
_current_indent_level)
elif _tuple_as_array and isinstance(value, tuple):
chunks = _iterencode_list(value, _current_indent_level)
elif isinstance(value, dict):
chunks = _iterencode_dict(value, _current_indent_level)
else:
chunks = _iterencode(value, _current_indent_level)
for chunk in chunks:
yield chunk
if newline_indent is not None:
_current_indent_level -= 1
yield '\n' + (_indent * _current_indent_level)
yield '}'
if markers is not None:
del markers[markerid]
def _iterencode(o, _current_indent_level):
if (isinstance(o, string_types) or
(_PY3 and isinstance(o, binary_type))):
yield _encoder(o)
elif o is None:
yield 'null'
elif o is True:
yield 'true'
elif o is False:
yield 'false'
elif isinstance(o, integer_types):
yield (str(o)
if (not _bigint_as_string or
(-1 << 53) < o < (1 << 53))
else ('"' + str(o) + '"'))
elif isinstance(o, float):
yield _floatstr(o)
else:
for_json = _for_json and getattr(o, 'for_json', None)
if for_json and callable(for_json):
for chunk in _iterencode(for_json(), _current_indent_level):
yield chunk
elif isinstance(o, list):
for chunk in _iterencode_list(o, _current_indent_level):
yield chunk
else:
_asdict = _namedtuple_as_object and getattr(o, '_asdict', None)
if _asdict and callable(_asdict):
for chunk in _iterencode_dict(_asdict(),
_current_indent_level):
yield chunk
elif (_tuple_as_array and isinstance(o, tuple)):
for chunk in _iterencode_list(o, _current_indent_level):
yield chunk
elif isinstance(o, dict):
for chunk in _iterencode_dict(o, _current_indent_level):
yield chunk
elif _use_decimal and isinstance(o, Decimal):
yield str(o)
else:
if markers is not None:
markerid = id(o)
if markerid in markers:
raise ValueError("Circular reference detected")
markers[markerid] = o
o = _default(o)
for chunk in _iterencode(o, _current_indent_level):
yield chunk
if markers is not None:
del markers[markerid]
return _iterencode
| gpl-3.0 |
totallybradical/temp_servo2 | tests/wpt/harness/wptrunner/wptmanifest/backends/static.py | 190 | 6645 | # This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this file,
# You can obtain one at http://mozilla.org/MPL/2.0/.
import operator
from ..node import NodeVisitor
from ..parser import parse
class Compiler(NodeVisitor):
"""Compiler backend that evaluates conditional expressions
to give static output"""
def compile(self, tree, expr_data, data_cls_getter=None, **kwargs):
"""Compile a raw AST into a form with conditional expressions
evaluated.
tree - The root node of the wptmanifest AST to compile
expr_data - A dictionary of key / value pairs to use when
evaluating conditional expressions
data_cls_getter - A function taking two parameters; the previous
output node and the current ast node and returning
the class of the output node to use for the current
ast node
"""
self._kwargs = kwargs
self.expr_data = expr_data
if data_cls_getter is None:
self.data_cls_getter = lambda x, y: ManifestItem
else:
self.data_cls_getter = data_cls_getter
self.output_node = None
self.visit(tree)
return self.output_node
def visit_DataNode(self, node):
output_parent = self.output_node
if self.output_node is None:
assert node.parent is None
self.output_node = self.data_cls_getter(None, None)(None, **self._kwargs)
else:
self.output_node = self.data_cls_getter(self.output_node, node)(node.data)
for child in node.children:
self.visit(child)
if output_parent is not None:
output_parent.append(self.output_node)
self.output_node = self.output_node.parent
def visit_KeyValueNode(self, node):
key_name = node.data
key_value = None
for child in node.children:
value = self.visit(child)
if value is not None:
key_value = value
break
if key_value is not None:
self.output_node.set(key_name, key_value)
def visit_ValueNode(self, node):
return node.data
def visit_AtomNode(self, node):
return node.data
def visit_ListNode(self, node):
return [self.visit(child) for child in node.children]
def visit_ConditionalNode(self, node):
assert len(node.children) == 2
if self.visit(node.children[0]):
return self.visit(node.children[1])
def visit_StringNode(self, node):
value = node.data
for child in node.children:
value = self.visit(child)(value)
return value
def visit_NumberNode(self, node):
if "." in node.data:
return float(node.data)
else:
return int(node.data)
def visit_VariableNode(self, node):
value = self.expr_data[node.data]
for child in node.children:
value = self.visit(child)(value)
return value
def visit_IndexNode(self, node):
assert len(node.children) == 1
index = self.visit(node.children[0])
return lambda x: x[index]
def visit_UnaryExpressionNode(self, node):
assert len(node.children) == 2
operator = self.visit(node.children[0])
operand = self.visit(node.children[1])
return operator(operand)
def visit_BinaryExpressionNode(self, node):
assert len(node.children) == 3
operator = self.visit(node.children[0])
operand_0 = self.visit(node.children[1])
operand_1 = self.visit(node.children[2])
return operator(operand_0, operand_1)
def visit_UnaryOperatorNode(self, node):
return {"not": operator.not_}[node.data]
def visit_BinaryOperatorNode(self, node):
return {"and": operator.and_,
"or": operator.or_,
"==": operator.eq,
"!=": operator.ne}[node.data]
class ManifestItem(object):
def __init__(self, name, **kwargs):
self.parent = None
self.name = name
self.children = []
self._data = {}
def __repr__(self):
return "<ManifestItem %s>" % (self.name)
def __str__(self):
rv = [repr(self)]
for item in self.children:
rv.extend(" %s" % line for line in str(item).split("\n"))
return "\n".join(rv)
@property
def is_empty(self):
if self._data:
return False
return all(child.is_empty for child in self.children)
@property
def root(self):
node = self
while node.parent is not None:
node = node.parent
return node
def has_key(self, key):
for node in [self, self.root]:
if key in node._data:
return True
return False
def get(self, key):
for node in [self, self.root]:
if key in node._data:
return node._data[key]
raise KeyError
def set(self, name, value):
self._data[name] = value
def remove(self):
if self.parent:
self.parent._remove_child(self)
def _remove_child(self, child):
self.children.remove(child)
child.parent = None
def iterchildren(self, name=None):
for item in self.children:
if item.name == name or name is None:
yield item
def _flatten(self):
rv = {}
for node in [self, self.root]:
for name, value in node._data.iteritems():
if name not in rv:
rv[name] = value
return rv
def iteritems(self):
for item in self._flatten().iteritems():
yield item
def iterkeys(self):
for item in self._flatten().iterkeys():
yield item
def itervalues(self):
for item in self._flatten().itervalues():
yield item
def append(self, child):
child.parent = self
self.children.append(child)
return child
def compile_ast(ast, expr_data, data_cls_getter=None, **kwargs):
return Compiler().compile(ast,
expr_data,
data_cls_getter=data_cls_getter,
**kwargs)
def compile(stream, expr_data, data_cls_getter=None, **kwargs):
return compile_ast(parse(stream),
expr_data,
data_cls_getter=data_cls_getter,
**kwargs)
| mpl-2.0 |
hofschroeer/shinysdr | shinysdr/web.py | 1 | 33243 | # Copyright 2013, 2014, 2015 Kevin Reid <kpreid@switchb.org>
#
# This file is part of ShinySDR.
#
# ShinySDR is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# ShinySDR is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with ShinySDR. If not, see <http://www.gnu.org/licenses/>.
# pylint: disable=maybe-no-member, attribute-defined-outside-init, no-init, method-hidden, signature-differs
# (maybe-no-member is incorrect)
# (attribute-defined-outside-init is a Twisted convention for protocol objects)
# (no-init is pylint being confused by interfaces)
# (method-hidden: done on purpose)
# (signature-differs: twisted is inconsistent about connectionMade/connectionLost)
from __future__ import absolute_import, division
import json
import urllib
import os.path
import struct
import time
import weakref
from twisted.application import strports
from twisted.application.service import Service
from twisted.internet import defer
from twisted.internet import protocol
from twisted.internet import reactor as the_reactor # TODO fix
from twisted.plugin import IPlugin, getPlugins
from twisted.python import log
from twisted.web import http, static, server, template
from twisted.web.resource import Resource
from zope.interface import Interface, implements, providedBy # available via Twisted
from gnuradio import gr
import txws
import shinysdr.plugins
import shinysdr.db
from shinysdr.ephemeris import EphemerisResource
from shinysdr.modes import get_modes
from shinysdr.signals import SignalType
from shinysdr.values import ExportedState, BaseCell, BlockCell, StreamCell, IWritableCollection, the_poller
# temporary kludge until upstream takes our patch
if hasattr(txws, 'WebSocketProtocol') and not hasattr(txws.WebSocketProtocol, 'setBinaryMode'):
raise ImportError('The installed version of txWS does not support sending binary messages and cannot be used.')
# used externally
staticResourcePath = os.path.join(os.path.dirname(__file__), 'webstatic')
_templatePath = os.path.join(os.path.dirname(__file__), 'webparts')
# Do not use this directly in general; use _serialize.
_json_encoder_for_serial = json.JSONEncoder(
ensure_ascii=False,
check_circular=False,
allow_nan=True,
sort_keys=True,
separators=(',', ':'))
def _transform_for_json(obj):
# Cannot implement this using the default hook in JSONEncoder because we want to override the behavior for namedtuples, which cannot be done otherwise.
if isinstance(obj, SignalType):
return {
u'kind': obj.get_kind(),
u'sample_rate': obj.get_sample_rate(),
}
elif isinstance(obj, tuple) and hasattr(obj, '_asdict'): # namedtuple -- TODO better recognition?
return {k: _transform_for_json(v) for k, v in obj._asdict().iteritems()}
elif isinstance(obj, dict):
return {k: _transform_for_json(v) for k, v in obj.iteritems()}
elif isinstance(obj, (list, tuple)):
return map(_transform_for_json, obj)
else:
return obj
# JSON-encode values for clients, including in the state stream
def _serialize(obj):
structure = _transform_for_json(obj)
return _json_encoder_for_serial.encode(structure)
class _SlashedResource(Resource):
'''Redirects /.../this to /.../this/.'''
def render(self, request):
request.setHeader('Location', request.childLink(''))
request.setResponseCode(http.MOVED_PERMANENTLY)
return ''
class CellResource(Resource):
isLeaf = True
def __init__(self, cell, noteDirty):
self._cell = cell
# TODO: instead of needing this hook, main should use poller
self._noteDirty = noteDirty
def grparse(self, value):
raise NotImplementedError()
def grrender(self, value, request):
return str(value)
def render_GET(self, request):
return self.grrender(self._cell.get(), request)
def render_PUT(self, request):
data = request.content.read()
self._cell.set(self.grparse(data))
request.setResponseCode(204)
self._noteDirty()
return ''
def resourceDescription(self):
return self._cell.description()
class ValueCellResource(CellResource):
def __init__(self, cell, noteDirty):
CellResource.__init__(self, cell, noteDirty)
def grparse(self, value):
return json.loads(value)
def grrender(self, value, request):
return _serialize(value).encode('utf-8')
def not_deletable():
raise Exception('Attempt to delete session root')
class BlockResource(Resource):
isLeaf = False
def __init__(self, block, noteDirty, deleteSelf):
Resource.__init__(self)
self._block = block
self._noteDirty = noteDirty
self._deleteSelf = deleteSelf
self._dynamic = block.state_is_dynamic()
# Weak dict ensures that we don't hold references to blocks that are no longer held by this block
self._blockResourceCache = weakref.WeakKeyDictionary()
if not self._dynamic: # currently dynamic blocks can only have block children
self._blockCells = {}
for key, cell in block.state().iteritems():
if cell.isBlock():
self._blockCells[key] = cell
else:
self.putChild(key, ValueCellResource(cell, self._noteDirty))
self.__element = _BlockHtmlElement()
def getChild(self, name, request):
if self._dynamic:
curstate = self._block.state()
if name in curstate:
cell = curstate[name]
if cell.isBlock():
return self.__getBlockChild(name, cell.get())
else:
if name in self._blockCells:
return self.__getBlockChild(name, self._blockCells[name].get())
# old-style-class super call
return Resource.getChild(self, name, request)
def __getBlockChild(self, name, block):
r = self._blockResourceCache.get(block)
if r is None:
r = self.__makeChildBlockResource(name, block)
self._blockResourceCache[block] = r
return r
def __makeChildBlockResource(self, name, block):
def deleter():
if not IWritableCollection.providedBy(self._block):
raise Exception('Block is not a writable collection')
self._block.delete_child(name)
return BlockResource(block, self._noteDirty, deleter)
def render_GET(self, request):
accept = request.getHeader('Accept')
if accept is not None and 'application/json' in accept: # TODO: Implement or obtain correct Accept interpretation
request.setHeader('Content-Type', 'application/json')
return _serialize(self.resourceDescription()).encode('utf-8')
else:
request.setHeader('Content-Type', 'text/html;charset=utf-8')
return renderElement(request, self.__element)
def render_POST(self, request):
'''currently only meaningful to create children of CollectionResources'''
block = self._block
if not IWritableCollection.providedBy(block):
raise Exception('Block is not a writable collection')
assert request.getHeader('Content-Type') == 'application/json'
reqjson = json.load(request.content)
key = block.create_child(reqjson) # note may fail
self._noteDirty()
url = request.prePathURL() + '/receivers/' + urllib.quote(key, safe='')
request.setResponseCode(201) # Created
request.setHeader('Location', url)
# TODO consider a more useful response
return _serialize(url).encode('utf-8')
def render_DELETE(self, request):
self._deleteSelf()
self._noteDirty()
request.setResponseCode(204) # No Content
return ''
def resourceDescription(self):
return self._block.state_description()
def isForBlock(self, block):
return self._block is block
class _BlockHtmlElement(template.Element):
'''
Template element for HTML page for an arbitrary block.
'''
loader = template.XMLFile(os.path.join(_templatePath, 'block.template.xhtml'))
@template.renderer
def _block_url(self, request, tag):
return tag('/' + '/'.join([urllib.quote(x, safe='') for x in request.prepath]))
class FlowgraphVizResource(Resource):
isLeaf = True
def __init__(self, reactor, block):
self.__reactor = reactor
self.__block = block
def render_GET(self, request):
request.setHeader('Content-Type', 'image/png')
process = self.__reactor.spawnProcess(
DotProcessProtocol(request),
'/usr/bin/env',
env=None, # inherit environment
args=['env', 'dot', '-Tpng'],
childFDs={
0: 'w',
1: 'r',
2: 2
})
process.pipes[0].write(self.__block.dot_graph())
process.pipes[0].loseConnection()
return server.NOT_DONE_YET
class DotProcessProtocol(protocol.ProcessProtocol):
def __init__(self, request):
self.__request = request
def outReceived(self, data):
self.__request.write(data)
def outConnectionLost(self):
self.__request.finish()
def _fqn(class_):
# per http://stackoverflow.com/questions/2020014/get-fully-qualified-class-name-of-an-object-in-python
return class_.__module__ + '.' + class_.__name__
def _get_interfaces(obj):
return [_fqn(interface) for interface in providedBy(obj)]
class _StateStreamObjectRegistration(object):
# TODO messy
def __init__(self, ssi, poller, obj, serial, url, refcount):
self.__ssi = ssi
self.obj = obj
self.serial = serial
self.url = url
self.has_previous_value = False
self.previous_value = None
self.value_is_references = False
self.__dead = False
if isinstance(obj, BaseCell):
self.__obj_is_cell = True
if isinstance(obj, StreamCell): # TODO kludge
self.__poller_registration = poller.subscribe(obj, self.__listen_binary_stream)
self.send_now_if_needed = lambda: None
else:
self.__poller_registration = poller.subscribe(obj, self.__listen_cell)
self.send_now_if_needed = self.__listen_cell
else:
self.__obj_is_cell = False
self.__poller_registration = poller.subscribe_state(obj, self.__listen_state)
self.send_now_if_needed = lambda: self.__listen_state(self.obj.state())
self.__refcount = refcount
def __str__(self):
return self.url
def set_previous(self, value, is_references):
if is_references:
for obj in value.itervalues():
if obj not in self.__ssi._registered_objs:
raise Exception("shouldn't happen: previous value not registered", obj)
self.has_previous_value = True
self.previous_value = value
self.value_is_references = is_references
def send_initial_value(self):
'''kludge to get initial state sent'''
def send_now_if_needed(self):
# should be overridden in instance
raise Exception('This placeholder should never get called')
def get_object_which_is_cell(self):
if not self.__obj_is_cell:
raise Exception('This object is not a cell')
return self.obj
def __listen_cell(self):
if self.__dead:
return
obj = self.obj
if isinstance(obj, StreamCell):
raise Exception("shouldn't happen: StreamCell here")
if obj.isBlock():
block = obj.get()
self.__ssi._lookup_or_register(block, self.url)
self.__maybesend_reference({u'value': block}, True)
else:
value = obj.get()
self.__maybesend(value, value)
def __listen_binary_stream(self, value):
if self.__dead:
return
self.__ssi._send1(True, struct.pack('I', self.serial) + value)
def __listen_state(self, state):
if self.__dead:
return
self.__maybesend_reference(state, False)
# TODO fix private refs to ssi here
def __maybesend(self, compare_value, update_value):
if not self.has_previous_value or compare_value != self.previous_value[u'value']:
self.set_previous({u'value': compare_value}, False)
self.__ssi._send1(False, ('value', self.serial, update_value))
def __maybesend_reference(self, objs, is_single):
registrations = {
k: self.__ssi._lookup_or_register(v, self.url + '/' + urllib.unquote(k))
for k, v in objs.iteritems()
}
serials = {k: v.serial for k, v in registrations.iteritems()}
if not self.has_previous_value or objs != self.previous_value:
for reg in registrations.itervalues():
reg.inc_refcount()
if is_single:
self.__ssi._send1(False, ('value', self.serial, serials[u'value']))
else:
self.__ssi._send1(False, ('value', self.serial, serials))
if self.has_previous_value:
refs = self.previous_value.values()
refs.sort() # ensure determinism
for obj in refs:
if obj not in self.__ssi._registered_objs:
raise Exception("Shouldn't happen: previous value not registered", obj)
self.__ssi._registered_objs[obj].dec_refcount_and_maybe_notify()
self.set_previous(objs, True)
def drop(self):
# TODO this should go away in refcount world
if self.__poller_registration is not None:
self.__poller_registration.unsubscribe()
def inc_refcount(self):
if self.__dead:
raise Exception('incing dead reference')
self.__refcount += 1
def dec_refcount_and_maybe_notify(self):
if self.__dead:
raise Exception('decing dead reference')
self.__refcount -= 1
if self.__refcount == 0:
self.__dead = True
self.__ssi.do_delete(self)
# capture refs to decrement
if self.value_is_references:
refs = self.previous_value.values()
refs.sort() # ensure determinism
else:
refs = []
# drop previous value
self.previous_value = None
self.has_previous_value = False
self.value_is_references = False
# decrement refs
for obj in refs:
self.__ssi._registered_objs[obj].dec_refcount_and_maybe_notify()
# TODO: Better name for this category of object
class StateStreamInner(object):
def __init__(self, send, root_object, root_url, noteDirty, poller=the_poller):
self.__poller = poller
self._send = send
self.__root_object = root_object
self._cell = BlockCell(self, '_root_object')
self._lastSerial = 0
root_registration = _StateStreamObjectRegistration(ssi=self, poller=self.__poller, obj=self._cell, serial=0, url=root_url, refcount=0)
self._registered_objs = {self._cell: root_registration}
self.__registered_serials = {root_registration.serial: root_registration}
self._send_batch = []
self.__batch_delay = None
self.__root_url = root_url
self.__noteDirty = noteDirty
root_registration.send_now_if_needed()
def connectionLost(self, reason):
for obj in self._registered_objs.keys():
self.__drop(obj)
def dataReceived(self, data):
# TODO: handle json parse failure or other failures meaningfully
command = json.loads(data)
op = command[0]
if op == 'set':
op, serial, value, message_id = command
registration = self.__registered_serials[serial]
cell = registration.get_object_which_is_cell()
t0 = time.time()
cell.set(value)
registration.send_now_if_needed()
self._send1(False, ['done', message_id])
t1 = time.time()
# TODO: Define self.__str__ or similar such that we can easily log which client is sending the command
log.msg('set %s to %r (%1.2fs)' % (registration, value, t1 - t0))
self.__noteDirty() # TODO fix things so noteDirty is not needed
else:
log.msg('Unrecognized state stream op received: %r' % (command,))
def get__root_object(self):
'''Accessor for implementing self._cell.'''
return self.__root_object
def do_delete(self, reg):
self._send1(False, ('delete', reg.serial))
self.__drop(reg.obj)
def __drop(self, obj):
registration = self._registered_objs[obj]
registration.drop()
del self.__registered_serials[registration.serial]
del self._registered_objs[obj]
def _lookup_or_register(self, obj, url):
if obj in self._registered_objs:
return self._registered_objs[obj]
else:
self._lastSerial += 1
serial = self._lastSerial
registration = _StateStreamObjectRegistration(ssi=self, poller=self.__poller, obj=obj, serial=serial, url=url, refcount=0)
self._registered_objs[obj] = registration
self.__registered_serials[serial] = registration
if isinstance(obj, BaseCell):
self._send1(False, ('register_cell', serial, url, obj.description()))
if isinstance(obj, StreamCell): # TODO kludge
pass
elif not obj.isBlock(): # TODO condition is a kludge due to block cell values being gook
registration.set_previous({u'value': obj.get()}, False)
elif isinstance(obj, ExportedState):
self._send1(False, ('register_block', serial, url, _get_interfaces(obj)))
else:
# TODO: not implemented on client (but shouldn't happen)
self._send1(False, ('register', serial, url))
registration.send_now_if_needed()
return registration
def _flush(self): # exposed for testing
self.__batch_delay = None
if len(self._send_batch) > 0:
# unicode() because JSONEncoder does not reliably return a unicode rather than str object
self._send(unicode(_serialize(self._send_batch)))
self._send_batch = []
def _send1(self, binary, value):
if binary:
# preserve order by flushing stored non-binary msgs
# TODO: Implement batching for binary messages.
self._flush()
self._send(value)
else:
# Messages are batched in order to increase client-side efficiency since each incoming WebSocket message is always a separate JS event.
self._send_batch.append(value)
# TODO: Parameterize with reactor so we can test properly
if not (self.__batch_delay is not None and self.__batch_delay.active()):
self.__batch_delay = the_reactor.callLater(0, self._flush)
class AudioStreamInner(object):
def __init__(self, reactor, send, block, audio_rate):
self._send = send
self._queue = gr.msg_queue(limit=100)
self.__running = [True]
self._block = block
self._block.add_audio_queue(self._queue, audio_rate)
send(unicode(self._block.get_audio_queue_channels()))
reactor.callInThread(_AudioStream_read_loop, reactor, self._queue, self.__deliver, self.__running)
def dataReceived(self, data):
pass
def connectionLost(self, reason):
self._block.remove_audio_queue(self._queue)
self.__running[0] = False
# Insert a dummy message to ensure the loop thread unblocks; otherwise it will sit around forever, including preventing process shutdown.
self._queue.insert_tail(gr.message())
def __deliver(self, data_string):
self._send(data_string, safe_to_drop=True)
def _AudioStream_read_loop(reactor, queue, deliver, running):
# RUNS IN A SEPARATE THREAD.
while running[0]:
buf = ''
message = queue.delete_head() # blocking call
if message.length() > 0: # avoid crash bug
buf += message.to_string()
# Collect more queue contents to batch data
while not queue.empty_p():
message = queue.delete_head()
if message.length() > 0: # avoid crash bug
buf += message.to_string()
reactor.callFromThread(deliver, buf)
def _lookup_block(block, path):
for i, path_elem in enumerate(path):
cell = block.state().get(path_elem)
if cell is None:
raise Exception('Not found: %r in %r' % (path[:i + 1], path))
elif not cell.isBlock():
raise Exception('Not a block: %r in %r' % (path[:i + 1], path))
block = cell.get()
return block
class OurStreamProtocol(protocol.Protocol):
def __init__(self, caps, noteDirty):
self._caps = caps
self._seenValues = {}
self.inner = None
self.__noteDirty = noteDirty
def dataReceived(self, data):
"""Twisted Protocol implementation.
Additionally, txWS takes no care with exceptions here, so we catch and log."""
# pylint: disable=broad-except
try:
if self.inner is None:
# To work around txWS's lack of a notification when the URL is available, all clients send a dummy first message.
self.__dispatch_url()
else:
self.inner.dataReceived(data)
except Exception as e:
log.err(e)
def __dispatch_url(self):
loc = self.transport.location
log.msg('Stream connection to ', loc)
path = [urllib.unquote(x) for x in loc.split('/')]
assert path[0] == ''
path[0:1] = []
if path[0] in self._caps:
root_object = self._caps[path[0]]
path[0:1] = []
elif None in self._caps:
root_object = self._caps[None]
else:
raise Exception('Unknown cap') # TODO better error reporting
if len(path) == 1 and path[0].startswith('audio?rate='):
rate = int(json.loads(urllib.unquote(path[0][len('audio?rate='):])))
self.inner = AudioStreamInner(the_reactor, self.__send, root_object, rate)
elif len(path) >= 1 and path[0] == 'radio':
# note _lookup_block may throw. TODO: Better error reporting
root_object = _lookup_block(root_object, path[1:])
self.inner = StateStreamInner(self.__send, root_object, loc, self.__noteDirty) # note reuse of loc as HTTP path; probably will regret this
else:
raise Exception('Unknown path: %r' % (path,))
def connectionMade(self):
"""twisted Protocol implementation"""
self.transport.setBinaryMode(True)
# Unfortunately, txWS calls this too soon for transport.location to be available
def connectionLost(self, reason):
"""twisted Protocol implementation"""
if self.inner is not None:
self.inner.connectionLost(reason)
def __send(self, message, safe_to_drop=False):
if len(self.transport.transport.dataBuffer) > 1000000:
# TODO: condition is horrible implementation-diving kludge
# Don't accumulate indefinite buffer if we aren't successfully getting it onto the network.
if safe_to_drop:
log.err('Dropping data going to stream ' + self.transport.location)
else:
log.err('Dropping connection due to too much data on stream ' + self.transport.location)
self.transport.close(reason='Too much data buffered')
else:
self.transport.write(message)
class OurStreamFactory(protocol.Factory):
protocol = OurStreamProtocol
def __init__(self, caps, noteDirty):
self.__caps = caps
self.__noteDirty = noteDirty
def buildProtocol(self, addr):
"""twisted Factory implementation"""
p = self.protocol(self.__caps, self.__noteDirty)
p.factory = self
return p
class IClientResourceDef(Interface):
'''
Client plugin interface object
'''
# Only needed to make the plugin system work
# TODO write interface methods anyway
class ClientResourceDef(object):
implements(IPlugin, IClientResourceDef)
def __init__(self, key, resource, load_css_path=None, load_js_path=None):
self.key = key
self.resource = resource
self.load_css_path = load_css_path
self.load_js_path = load_js_path
def _make_static(filePath):
r = static.File(filePath)
r.contentTypes['.csv'] = 'text/csv'
r.indexNames = ['index.html']
r.ignoreExt('.html')
return r
def _reify(parent, name):
'''
Construct an explicit twisted.web.static.File child identical to the implicit one so that non-filesystem children can be added to it.
'''
r = parent.createSimilarFile(parent.child(name).path)
parent.putChild(name, r)
return r
def _strport_to_url(desc, scheme='http', path='/', socket_port=0):
'''Construct a URL from a twisted.application.strports string.'''
# TODO: need to know canonical domain name, not localhost; can we extract from the ssl cert?
# TODO: strports.parse is deprecated
(method, args, _) = strports.parse(desc, None)
if socket_port == 0:
socket_port = args[0]
if method == 'TCP':
return scheme + '://localhost:' + str(socket_port) + path
elif method == 'SSL':
return scheme + 's://localhost:' + str(socket_port) + path
else:
# TODO better error return
return '???'
class _RadioIndexHtmlElement(template.Element):
loader = template.XMLFile(os.path.join(_templatePath, 'index.template.xhtml'))
def __init__(self, title):
self.__title = unicode(title)
@template.renderer
def title(self, request, tag):
return tag(self.__title)
class _RadioIndexHtmlResource(Resource):
isLeaf = True
def __init__(self, title):
self.__element = _RadioIndexHtmlElement(title)
def render_GET(self, request):
return renderElement(request, self.__element)
def renderElement(request, element):
# per http://stackoverflow.com/questions/8160061/twisted-web-resource-resource-with-twisted-web-template-element-example
# should be replaced with twisted.web.template.renderElement once we have Twisted >= 12.1.0 available in MacPorts.
# TODO: Instead of this kludge (here because it would be a syntax error in the XHTML template}, serve XHTML and fix the client-side issues that pop up due to element-name capitalization.
request.write('<!doctype html>')
d = template.flatten(request, element, request.write)
def done(ignored):
request.finish()
return ignored
d.addBoth(done)
return server.NOT_DONE_YET
class WebService(Service):
# TODO: Too many parameters
def __init__(self, reactor, root_object, note_dirty, read_only_dbs, writable_db, http_endpoint, ws_endpoint, root_cap, title, flowgraph_for_debug):
self.__http_port = http_endpoint
self.__ws_port = ws_endpoint
# Roots of resource trees
# - appRoot is everything stateful/authority-bearing
# - serverRoot is the HTTP '/' and static resources are placed there
serverRoot = _make_static(staticResourcePath)
if root_cap is None:
appRoot = serverRoot
self.__visit_path = '/'
ws_caps = {None: root_object}
else:
serverRoot = _make_static(staticResourcePath)
appRoot = _SlashedResource()
serverRoot.putChild(root_cap, appRoot)
self.__visit_path = '/' + urllib.quote(root_cap, safe='') + '/'
ws_caps = {root_cap: root_object}
self.__ws_protocol = txws.WebSocketFactory(OurStreamFactory(ws_caps, note_dirty))
# UI entry point
appRoot.putChild('', _RadioIndexHtmlResource(title))
# Exported radio control objects
appRoot.putChild('radio', BlockResource(root_object, note_dirty, not_deletable))
# Frequency DB
appRoot.putChild('dbs', shinysdr.db.DatabasesResource(read_only_dbs))
appRoot.putChild('wdb', shinysdr.db.DatabaseResource(writable_db))
# Debug graph
appRoot.putChild('flow-graph', FlowgraphVizResource(reactor, flowgraph_for_debug))
# Ephemeris
appRoot.putChild('ephemeris', EphemerisResource())
# Construct explicit resources for merge.
test = _reify(serverRoot, 'test')
jasmine = _reify(test, 'jasmine')
for name in ['jasmine.css', 'jasmine.js', 'jasmine-html.js']:
jasmine.putChild(name, static.File(os.path.join(
os.path.dirname(__file__), 'deps/jasmine/lib/jasmine-core/', name)))
client = _reify(serverRoot, 'client')
client.putChild('require.js', static.File(os.path.join(
os.path.dirname(__file__), 'deps/require.js')))
client.putChild('text.js', static.File(os.path.join(
os.path.dirname(__file__), 'deps/text.js')))
_add_plugin_resources(client)
self.__site = server.Site(serverRoot)
self.__ws_port_obj = None
self.__http_port_obj = None
def startService(self):
Service.startService(self)
if self.__ws_port_obj is not None:
raise Exception('Already started')
self.__ws_port_obj = strports.listen(self.__ws_port, self.__ws_protocol)
self.__http_port_obj = strports.listen(self.__http_port, self.__site)
def stopService(self):
Service.stopService(self)
if self.__ws_port_obj is None:
raise Exception('Not started, cannot stop')
# TODO: Does Twisted already have something to bundle up a bunch of ports for shutdown?
return defer.DeferredList([
self.__http_port_obj.stopListening(),
self.__ws_port_obj.stopListening()])
def get_url(self):
port_num = self.__http_port_obj.socket.getsockname()[1] # TODO touching implementation, report need for a better way (web_port_obj.port is 0 if specified port is 0, not actual port)
return _strport_to_url(self.__http_port, socket_port=port_num, path=self.__visit_path)
def announce(self, open_client):
'''interface used by shinysdr.main'''
url = self.get_url()
if open_client:
log.msg('Opening ' + url)
import webbrowser # lazy load
webbrowser.open(url, new=1, autoraise=True)
else:
log.msg('Visit ' + url)
def _add_plugin_resources(client_resource):
# Plugin resources and plugin info
load_list_css = []
load_list_js = []
mode_table = {}
plugin_resources = Resource()
client_resource.putChild('plugins', plugin_resources)
for resource_def in getPlugins(IClientResourceDef, shinysdr.plugins):
# Add the plugin's resource to static serving
plugin_resources.putChild(resource_def.key, resource_def.resource)
plugin_resource_url = '/client/plugins/' + urllib.quote(resource_def.key, safe='') + '/'
# Tell the client to load the plugins
# TODO constrain path values to be relative (not on a different origin, to not leak urls)
if resource_def.load_css_path is not None:
load_list_css.append(plugin_resource_url + resource_def.load_cs_path)
if resource_def.load_js_path is not None:
# TODO constrain value to be in the directory
load_list_js.append(plugin_resource_url + resource_def.load_js_path)
for mode_def in get_modes():
mode_table[mode_def.mode] = {
u'label': mode_def.label,
u'can_transmit': mode_def.mod_class is not None
}
# Client gets info about plugins through this resource
client_resource.putChild('plugin-index.json', static.Data(_serialize({
u'css': load_list_css,
u'js': load_list_js,
u'modes': mode_table,
}).encode('utf-8'), 'application/json'))
| gpl-3.0 |
transientskp/aartfaac-arthur | arthur/stream.py | 1 | 2792 | import os
from casacore.images import image as casa_image
from PIL import Image
import subprocess
import time
import monotonic
import logging
import atexit
logger = logging.getLogger(__name__)
FPS = 25
cmd = ["ffmpeg",
# for ffmpeg always first set input then output
# silent audio
'-f', 'lavfi',
'-i', 'anullsrc=channel_layout=stereo:sample_rate=44100',
# image
'-re',
'-f', 'rawvideo', # probably required for reading from stdin
'-s', '1024x1024', # should match image size
'-pix_fmt', 'gray',
'-i', '-', # read from stdin
# encoding settings
"-r", str(FPS), # the framerate
"-vcodec", "libx264", # probably required for flv & rtmp
"-preset", "ultrafast", # the encoding quality preset
"-g", "20",
"-codec:a", "libmp3lame", # mp3 for audio
"-ar", "44100", # 44k audio rate
"-threads", "6",
"-bufsize", "512k",
"-f", "flv", # required for rtmp
]
def setup_stream_pipe(rtmp_url):
"""
Setup a encoding process where you can pipe images to.
args:
rtmp_url (str): a rtmp url, for example rtmp://a.rtmp.youtube.com/live2/{SECRET}
returns:
subprocess.Popen: a subprocess pipe. Use pipe.stdin.write for writing images.
"""
pipe = subprocess.Popen(cmd + [rtmp_url], stdin=subprocess.PIPE)
atexit.register(pipe.kill)
return pipe
def serialize_array(array):
"""
serialize a numpy array into a binary stream which can be streamed
args:
array (numpy.array)
returns:
bytes
"""
data = array.squeeze()
data -= data.min()
max_ = data.max()
if max_ > 0:
data *= (255 / data.max())
arr = data.astype('uint8')
im = Image.fromarray(arr, 'L')
return im.tobytes()
def loop_images_in_path(path):
"""
args:
path (str): path to folder containing images
returns:
generator: yields casacore images
"""
images = sorted([os.path.join(path, i) for i in os.listdir(path)])
while True:
for image_path in images:
yield casa_image(image_path).getdata()
def stream(frame, pipe):
"""
stream the images returned by generated to rtmp server.
args:
frame: an image frame
pipe (subprocess.Popen): a pipe created with setup_stream_pipe()
"""
logger.debug("streaming new image")
serialised = serialize_array(frame)
for i in range(FPS):
pipe.stdin.write(serialised)
duty_cycle = 1 # seconds
if pipe.poll():
print("looks like the video encoder died!")
return
time.sleep(duty_cycle - monotonic.monotonic() % duty_cycle)
| gpl-3.0 |
ktkirk/HSSI | IoT/requests/packages/chardet/escprober.py | 2936 | 3187 | ######################## BEGIN LICENSE BLOCK ########################
# The Original Code is mozilla.org code.
#
# The Initial Developer of the Original Code is
# Netscape Communications Corporation.
# Portions created by the Initial Developer are Copyright (C) 1998
# the Initial Developer. All Rights Reserved.
#
# Contributor(s):
# Mark Pilgrim - port to Python
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
# 02110-1301 USA
######################### END LICENSE BLOCK #########################
from . import constants
from .escsm import (HZSMModel, ISO2022CNSMModel, ISO2022JPSMModel,
ISO2022KRSMModel)
from .charsetprober import CharSetProber
from .codingstatemachine import CodingStateMachine
from .compat import wrap_ord
class EscCharSetProber(CharSetProber):
def __init__(self):
CharSetProber.__init__(self)
self._mCodingSM = [
CodingStateMachine(HZSMModel),
CodingStateMachine(ISO2022CNSMModel),
CodingStateMachine(ISO2022JPSMModel),
CodingStateMachine(ISO2022KRSMModel)
]
self.reset()
def reset(self):
CharSetProber.reset(self)
for codingSM in self._mCodingSM:
if not codingSM:
continue
codingSM.active = True
codingSM.reset()
self._mActiveSM = len(self._mCodingSM)
self._mDetectedCharset = None
def get_charset_name(self):
return self._mDetectedCharset
def get_confidence(self):
if self._mDetectedCharset:
return 0.99
else:
return 0.00
def feed(self, aBuf):
for c in aBuf:
# PY3K: aBuf is a byte array, so c is an int, not a byte
for codingSM in self._mCodingSM:
if not codingSM:
continue
if not codingSM.active:
continue
codingState = codingSM.next_state(wrap_ord(c))
if codingState == constants.eError:
codingSM.active = False
self._mActiveSM -= 1
if self._mActiveSM <= 0:
self._mState = constants.eNotMe
return self.get_state()
elif codingState == constants.eItsMe:
self._mState = constants.eFoundIt
self._mDetectedCharset = codingSM.get_coding_state_machine() # nopep8
return self.get_state()
return self.get_state()
| bsd-2-clause |
DataDog/py-find-injection | setup.py | 2 | 1235 | from setuptools import setup, find_packages
setup(
name="py-find-injection",
version="0.1.1",
author="James Brown",
author_email="jbrown@uber.com",
url="https://github.com/uber/py-find-injection",
description="simple python ast consumer which searches for common SQL injection attacks",
license='MIT (Expat)',
classifiers=[
"Programming Language :: Python",
"Operating System :: OS Independent",
"Topic :: Security",
"Topic :: Security",
"Intended Audience :: Developers",
"Development Status :: 3 - Alpha",
"Programming Language :: Python :: 2.7",
"License :: OSI Approved :: MIT License",
],
packages=find_packages(exclude=["tests"]),
entry_points={
"console_scripts": [
"py-find-injection = py_find_injection:main",
]
},
tests_require=["nose==1.3.0", "mock==1.0.1"],
test_suite="nose.collector",
long_description="""py_find_injection
Walks the AST and looks for arguments to cursor.execute or session.execute; then
determines whether string interpolation, concatenation or the .format() call is used
on those arguments. Not at all comprehensive, but better than nothing.
"""
)
| mit |
picardie-nature/clicnat-qgis | __init__.py | 1 | 1557 | # -*- coding: utf-8 -*-
"""
/***************************************************************************
Clicnat
A QGIS plugin
Plugin Clicnat
Generated by Plugin Builder: http://g-sherman.github.io/Qgis-Plugin-Builder/
-------------------
begin : 2020-05-20
copyright : (C) 2020 by JB Desbas
email : jean-baptiste.desbas@picardie-nature.org
git sha : $Format:%H$
***************************************************************************/
/***************************************************************************
* *
* This program is free software; you can redistribute it and/or modify *
* it under the terms of the GNU General Public License as published by *
* the Free Software Foundation; either version 2 of the License, or *
* (at your option) any later version. *
* *
***************************************************************************/
This script initializes the plugin, making it known to QGIS.
"""
# noinspection PyPep8Naming
def classFactory(iface): # pylint: disable=invalid-name
"""Load Clicnat class from file Clicnat.
:param iface: A QGIS interface instance.
:type iface: QgsInterface
"""
#
from .clicnat import Clicnat
return Clicnat(iface)
| gpl-2.0 |
indashnet/InDashNet.Open.UN2000 | android/build/tools/fileslist.py | 14 | 1299 | #!/usr/bin/env python
#
# Copyright (C) 2009 The Android Open Source Project
#
# Licensed under the Apache License, Version 2.0 (the 'License');
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an 'AS IS' BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import operator, os, sys
def get_file_size(path):
st = os.lstat(path)
return st.st_size;
def main(argv):
output = []
roots = argv[1:]
for root in roots:
base = len(root[:root.rfind(os.path.sep)])
for dir, dirs, files in os.walk(root):
relative = dir[base:]
for f in files:
try:
row = (
get_file_size(os.path.sep.join((dir, f))),
os.path.sep.join((relative, f)),
)
output.append(row)
except os.error:
pass
output.sort(key=operator.itemgetter(0), reverse=True)
for row in output:
print "%12d %s" % row
if __name__ == '__main__':
main(sys.argv)
| apache-2.0 |
no2a/ansible | lib/ansible/plugins/callback/timer.py | 6 | 1141 | # Make coding more python3-ish
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
from datetime import datetime
from ansible.plugins.callback import CallbackBase
class CallbackModule(CallbackBase):
"""
This callback module tells you how long your plays ran for.
"""
CALLBACK_VERSION = 2.0
CALLBACK_TYPE = 'aggregate'
CALLBACK_NAME = 'timer'
CALLBACK_NEEDS_WHITELIST = True
def __init__(self, display):
super(CallbackModule, self).__init__(display)
self.start_time = datetime.now()
def days_hours_minutes_seconds(self, runtime):
minutes = (runtime.seconds // 60) % 60
r_seconds = runtime.seconds - (minutes * 60)
return runtime.days, runtime.seconds // 3600, minutes, r_seconds
def playbook_on_stats(self, stats):
self.v2_playbook_on_stats(stats)
def v2_playbook_on_stats(self, stats):
end_time = datetime.now()
runtime = end_time - self.start_time
self._display.display("Playbook run took %s days, %s hours, %s minutes, %s seconds" % (self.days_hours_minutes_seconds(runtime)))
| gpl-3.0 |
le9i0nx/ansible | lib/ansible/modules/storage/zfs/zfs.py | 33 | 9335 | #!/usr/bin/python
# -*- coding: utf-8 -*-
# Copyright: (c) 2013, Johan Wiren <johan.wiren.se@gmail.com>
# Copyright: (c) 2017, Ansible Project
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = '''
---
module: zfs
short_description: Manage zfs
description:
- Manages ZFS file systems, volumes, clones and snapshots
version_added: "1.1"
options:
name:
description:
- File system, snapshot or volume name e.g. C(rpool/myfs).
required: true
state:
description:
- Whether to create (C(present)), or remove (C(absent)) a
file system, snapshot or volume. All parents/children
will be created/destroyed as needed to reach the desired state.
choices: [ absent, present ]
required: true
origin:
description:
- Snapshot from which to create a clone.
key_value:
description:
- (**DEPRECATED**) This will be removed in Ansible-2.9. Set these values in the
- C(extra_zfs_properties) option instead.
- The C(zfs) module takes key=value pairs for zfs properties to be set.
- See the zfs(8) man page for more information.
extra_zfs_properties:
description:
- A dictionary of zfs properties to be set.
- See the zfs(8) man page for more information.
version_added: "2.5"
author:
- Johan Wiren (@johanwiren)
'''
EXAMPLES = '''
- name: Create a new file system called myfs in pool rpool with the setuid property turned off
zfs:
name: rpool/myfs
state: present
extra_zfs_properties:
setuid: off
- name: Create a new volume called myvol in pool rpool.
zfs:
name: rpool/myvol
state: present
extra_zfs_properties:
volsize: 10M
- name: Create a snapshot of rpool/myfs file system.
zfs:
name: rpool/myfs@mysnapshot
state: present
- name: Create a new file system called myfs2 with snapdir enabled
zfs:
name: rpool/myfs2
state: present
extra_zfs_properties:
snapdir: enabled
- name: Create a new file system by cloning a snapshot
zfs:
name: rpool/cloned_fs
state: present
extra_zfs_properties:
origin: rpool/myfs@mysnapshot
- name: Destroy a filesystem
zfs:
name: rpool/myfs
state: absent
'''
import os
from ansible.module_utils.basic import AnsibleModule
class Zfs(object):
def __init__(self, module, name, properties):
self.module = module
self.name = name
self.properties = properties
self.changed = False
self.zfs_cmd = module.get_bin_path('zfs', True)
self.zpool_cmd = module.get_bin_path('zpool', True)
self.pool = name.split('/')[0]
self.is_solaris = os.uname()[0] == 'SunOS'
self.is_openzfs = self.check_openzfs()
self.enhanced_sharing = self.check_enhanced_sharing()
def check_openzfs(self):
cmd = [self.zpool_cmd]
cmd.extend(['get', 'version'])
cmd.append(self.pool)
(rc, out, err) = self.module.run_command(cmd, check_rc=True)
version = out.splitlines()[-1].split()[2]
if version == '-':
return True
if int(version) == 5000:
return True
return False
def check_enhanced_sharing(self):
if self.is_solaris and not self.is_openzfs:
cmd = [self.zpool_cmd]
cmd.extend(['get', 'version'])
cmd.append(self.pool)
(rc, out, err) = self.module.run_command(cmd, check_rc=True)
version = out.splitlines()[-1].split()[2]
if int(version) >= 34:
return True
return False
def exists(self):
cmd = [self.zfs_cmd, 'list', '-t', 'all', self.name]
(rc, out, err) = self.module.run_command(' '.join(cmd))
if rc == 0:
return True
else:
return False
def create(self):
if self.module.check_mode:
self.changed = True
return
properties = self.properties
volsize = properties.pop('volsize', None)
volblocksize = properties.pop('volblocksize', None)
origin = properties.pop('origin', None)
cmd = [self.zfs_cmd]
if "@" in self.name:
action = 'snapshot'
elif origin:
action = 'clone'
else:
action = 'create'
cmd.append(action)
if action in ['create', 'clone']:
cmd += ['-p']
if volsize:
cmd += ['-V', volsize]
if volblocksize:
cmd += ['-b', volblocksize]
if properties:
for prop, value in properties.items():
cmd += ['-o', '%s="%s"' % (prop, value)]
if origin:
cmd.append(origin)
cmd.append(self.name)
(rc, out, err) = self.module.run_command(' '.join(cmd))
if rc == 0:
self.changed = True
else:
self.module.fail_json(msg=err)
def destroy(self):
if self.module.check_mode:
self.changed = True
return
cmd = [self.zfs_cmd, 'destroy', '-R', self.name]
(rc, out, err) = self.module.run_command(' '.join(cmd))
if rc == 0:
self.changed = True
else:
self.module.fail_json(msg=err)
def set_property(self, prop, value):
if self.module.check_mode:
self.changed = True
return
cmd = [self.zfs_cmd, 'set', prop + '=' + str(value), self.name]
(rc, out, err) = self.module.run_command(cmd)
if rc == 0:
self.changed = True
else:
self.module.fail_json(msg=err)
def set_properties_if_changed(self):
current_properties = self.get_current_properties()
for prop, value in self.properties.items():
if current_properties.get(prop, None) != value:
self.set_property(prop, value)
def get_current_properties(self):
cmd = [self.zfs_cmd, 'get', '-H']
if self.enhanced_sharing:
cmd += ['-e']
cmd += ['all', self.name]
rc, out, err = self.module.run_command(" ".join(cmd))
properties = dict()
for prop, value, source in [l.split('\t')[1:4] for l in out.splitlines()]:
if source == 'local':
properties[prop] = value
# Add alias for enhanced sharing properties
if self.enhanced_sharing:
properties['sharenfs'] = properties.get('share.nfs', None)
properties['sharesmb'] = properties.get('share.smb', None)
return properties
def main():
module = AnsibleModule(
argument_spec=dict(
name=dict(type='str', required=True),
state=dict(type='str', required=True, choices=['absent', 'present']),
# No longer used. Deprecated and due for removal
createparent=dict(type='bool', default=None),
extra_zfs_properties=dict(type='dict', default={}),
),
supports_check_mode=True,
# Remove this in Ansible 2.9
check_invalid_arguments=False,
)
state = module.params.pop('state')
name = module.params.pop('name')
# The following is deprecated. Remove in Ansible 2.9
# Get all valid zfs-properties
properties = dict()
for prop, value in module.params.items():
# All freestyle params are zfs properties
if prop not in module.argument_spec:
if isinstance(value, bool):
if value is True:
properties[prop] = 'on'
else:
properties[prop] = 'off'
else:
properties[prop] = value
if properties:
module.deprecate('Passing zfs properties as arbitrary parameters to the zfs module is'
' deprecated. Send them as a dictionary in the extra_zfs_properties'
' parameter instead.', version='2.9')
# Merge, giving the module_params precedence
for prop, value in module.params['extra_zfs_properties'].items():
properties[prop] = value
module.params['extras_zfs_properties'] = properties
# End deprecated section
# Reverse the boolification of zfs properties
for prop, value in module.params['extra_zfs_properties'].items():
if isinstance(value, bool):
if value is True:
module.params['extra_zfs_properties'][prop] = 'on'
else:
module.params['extra_zfs_properties'][prop] = 'off'
else:
module.params['extra_zfs_properties'][prop] = value
result = dict(
name=name,
state=state,
)
zfs = Zfs(module, name, module.params['extra_zfs_properties'])
if state == 'present':
if zfs.exists():
zfs.set_properties_if_changed()
else:
zfs.create()
elif state == 'absent':
if zfs.exists():
zfs.destroy()
result.update(zfs.properties)
result['changed'] = zfs.changed
module.exit_json(**result)
if __name__ == '__main__':
main()
| gpl-3.0 |
kamalx/edx-platform | openedx/core/djangoapps/credit/tests/test_models.py | 5 | 3016 | # -*- coding: utf-8 -*-
"""
Tests for credit course models.
"""
import ddt
from django.test import TestCase
from opaque_keys.edx.keys import CourseKey
from openedx.core.djangoapps.credit.models import CreditCourse, CreditRequirement
@ddt.ddt
class CreditEligibilityModelTests(TestCase):
"""
Tests for credit models used to track credit eligibility.
"""
def setUp(self, **kwargs):
super(CreditEligibilityModelTests, self).setUp()
self.course_key = CourseKey.from_string("edX/DemoX/Demo_Course")
@ddt.data(False, True)
def test_is_credit_course(self, is_credit):
CreditCourse(course_key=self.course_key, enabled=is_credit).save()
if is_credit:
self.assertTrue(CreditCourse.is_credit_course(self.course_key))
else:
self.assertFalse(CreditCourse.is_credit_course(self.course_key))
def test_get_course_requirements(self):
credit_course = self.add_credit_course()
requirement = {
"namespace": "grade",
"name": "grade",
"display_name": "Grade",
"criteria": {
"min_grade": 0.8
}
}
credit_req, created = CreditRequirement.add_or_update_course_requirement(credit_course, requirement)
self.assertEqual(credit_course, credit_req.course)
self.assertEqual(created, True)
requirements = CreditRequirement.get_course_requirements(self.course_key)
self.assertEqual(len(requirements), 1)
def test_add_course_requirement_namespace(self):
credit_course = self.add_credit_course()
requirement = {
"namespace": "grade",
"name": "grade",
"display_name": "Grade",
"criteria": {
"min_grade": 0.8
}
}
credit_req, created = CreditRequirement.add_or_update_course_requirement(credit_course, requirement)
self.assertEqual(credit_course, credit_req.course)
self.assertEqual(created, True)
requirement = {
"namespace": "reverification",
"name": "i4x://edX/DemoX/edx-reverification-block/assessment_uuid",
"display_name": "Assessment 1",
"criteria": {}
}
credit_req, created = CreditRequirement.add_or_update_course_requirement(credit_course, requirement)
self.assertEqual(credit_course, credit_req.course)
self.assertEqual(created, True)
requirements = CreditRequirement.get_course_requirements(self.course_key)
self.assertEqual(len(requirements), 2)
requirements = CreditRequirement.get_course_requirements(self.course_key, namespace="grade")
self.assertEqual(len(requirements), 1)
def add_credit_course(self):
""" Add the course as a credit
Returns:
CreditCourse object
"""
credit_course = CreditCourse(course_key=self.course_key, enabled=True)
credit_course.save()
return credit_course
| agpl-3.0 |
gotlium/django-geoip-redis | geoip/management/commands/update_geo_db.py | 1 | 2560 | # -*- coding: utf-8 -*-
import thread
import time
import sys
import os
from django.contrib.contenttypes.models import ContentType
from django.core.management.base import NoArgsCommand
from django.core.management import call_command
from django.utils.termcolors import colorize
from requests import get
from geoip.redis_wrapper import RedisSync
from geoip.defaults import DB_IMPORT_URL
DB_FILE_NAME = os.path.basename(DB_IMPORT_URL)
class Command(NoArgsCommand):
_status = ''
def handle_noargs(self, **options):
sys.stdout.write(colorize(
'Please, wait. This will take some time\n', fg='magenta'))
self._clean_database()
self._clean_redis()
self._sync()
def _clean_database(self):
for model in ContentType.objects.filter(app_label='geoip'):
model.model_class().objects.all().delete()
def _clean_redis(self):
sync = RedisSync()
sync.clean_all()
def _sync(self):
thread.start_new_thread(self._bg_sync, ())
self._progress()
def _bg_sync(self):
try:
self.__download()
self.__loaddata()
except:
sys.stdout.write(' Error\n')
self.__clean()
self._status = None
def _progress_line(self, label, task, end='', color='white'):
label = colorize('[%s]' % label, fg=color)
sys.stdout.write("\r%s %s ...%s" % (label, task, end))
sys.stdout.flush()
def __download(self):
self._status = 'downloading database'
open(DB_FILE_NAME, 'w').write(get(DB_IMPORT_URL).content)
def __loaddata(self):
self._status = 'importing database'
call_command('loaddata', DB_FILE_NAME, verbosity=0)
time.sleep(3)
def __clean(self):
self._status = 'cleaning'
if os.path.exists(DB_FILE_NAME):
os.unlink(DB_FILE_NAME)
time.sleep(2)
def _progress(self, i=0):
progress = ['-', '\\', '|', '/']
old_status = None
while True:
if self._status:
if self._status and old_status and self._status != old_status:
self._progress_line('done', old_status, '\n', 'green')
else:
self._progress_line(progress[i], self._status)
old_status = self._status
time.sleep(1)
i = 0 if i == 3 else i + 1
if self._status is None:
self._progress_line('done', old_status, '\n', 'green')
break
| gpl-3.0 |
prakritish/ansible | test/sanity/code-smell/ansible-var-precedence-check.py | 27 | 18559 | #!/usr/bin/env python
# A tool to check the order of precedence for ansible variables
# https://github.com/ansible/ansible/blob/devel/test/integration/test_var_precedence.yml
import json
import os
import sys
import shutil
import stat
import subprocess
import tempfile
import yaml
from pprint import pprint
from optparse import OptionParser
from jinja2 import Environment
ENV = Environment()
TESTDIR = tempfile.mkdtemp()
def run_command(args, cwd=None):
p = subprocess.Popen(
args,
stderr=subprocess.PIPE,
stdout=subprocess.PIPE,
shell=True,
cwd=cwd,
)
(so, se) = p.communicate()
return (p.returncode, so, se)
def clean_test_dir():
if os.path.isdir(TESTDIR):
shutil.rmtree(TESTDIR)
os.makedirs(TESTDIR)
class Role(object):
def __init__(self, name):
self.name = name
self.load = True
self.dependencies = []
self.defaults = False
self.vars = False
self.tasks = []
self.params = dict()
def write_role(self):
fpath = os.path.join(TESTDIR, 'roles', self.name)
if not os.path.isdir(fpath):
os.makedirs(fpath)
if self.defaults:
# roles/x/defaults/main.yml
fpath = os.path.join(TESTDIR, 'roles', self.name, 'defaults')
if not os.path.isdir(fpath):
os.makedirs(fpath)
fname = os.path.join(fpath, 'main.yml')
with open(fname, 'wb') as f:
f.write('findme: %s\n' % self.name)
if self.vars:
# roles/x/vars/main.yml
fpath = os.path.join(TESTDIR, 'roles', self.name, 'vars')
if not os.path.isdir(fpath):
os.makedirs(fpath)
fname = os.path.join(fpath, 'main.yml')
with open(fname, 'wb') as f:
f.write('findme: %s\n' % self.name)
if self.dependencies:
fpath = os.path.join(TESTDIR, 'roles', self.name, 'meta')
if not os.path.isdir(fpath):
os.makedirs(fpath)
fname = os.path.join(fpath, 'main.yml')
with open(fname, 'wb') as f:
f.write('dependencies:\n')
for dep in self.dependencies:
f.write('- { role: %s }\n' % dep)
class DynamicInventory(object):
BASESCRIPT = '''#!/usr/bin/python
import json
data = """{{ data }}"""
data = json.loads(data)
print(json.dumps(data, indent=2, sort_keys=True))
'''
BASEINV = {
'_meta': {
'hostvars': {
'testhost': {}
}
}
}
def __init__(self, features):
self.ENV = Environment()
self.features = features
self.fpath = None
self.inventory = self.BASEINV.copy()
self.build()
def build(self):
xhost = 'testhost'
if 'script_host' in self.features:
self.inventory['_meta']['hostvars'][xhost]['findme'] = 'script_host'
else:
self.inventory['_meta']['hostvars'][xhost] = {}
if 'script_child' in self.features:
self.inventory['child'] = {
'hosts': [xhost],
'vars': {'findme': 'script_child'}
}
if 'script_parent' in self.features:
self.inventory['parent'] = {
'vars': {'findme': 'script_parent'}
}
if 'script_child' in self.features:
self.inventory['parent']['children'] = ['child']
else:
self.inventory['parent']['hosts'] = [xhost]
if 'script_all' in self.features:
self.inventory['all'] = {
'hosts': [xhost],
'vars': {
'findme': 'script_all'
},
}
else:
self.inventory['all'] = {
'hosts': [xhost],
}
def write_script(self):
fdir = os.path.join(TESTDIR, 'inventory')
if not os.path.isdir(fdir):
os.makedirs(fdir)
fpath = os.path.join(fdir, 'hosts')
#fpath = os.path.join(TESTDIR, 'inventory')
self.fpath = fpath
data = json.dumps(self.inventory)
t = self.ENV.from_string(self.BASESCRIPT)
fdata = t.render(data=data)
with open(fpath, 'wb') as f:
f.write(fdata + '\n')
st = os.stat(fpath)
os.chmod(fpath, st.st_mode | stat.S_IEXEC)
class VarTestMaker(object):
def __init__(self, features, dynamic_inventory=False):
clean_test_dir()
self.dynamic_inventory = dynamic_inventory
self.di = None
self.features = features[:]
self.inventory = ''
self.playvars = dict()
self.varsfiles = []
self.playbook = dict(hosts='testhost', gather_facts=False)
self.tasks = []
self.roles = []
self.ansible_command = None
self.stdout = None
def write_playbook(self):
fname = os.path.join(TESTDIR, 'site.yml')
pb_copy = self.playbook.copy()
if self.playvars:
pb_copy['vars'] = self.playvars
if self.varsfiles:
pb_copy['vars_files'] = self.varsfiles
if self.roles:
pb_copy['roles'] = []
for role in self.roles:
role.write_role()
role_def = dict(role=role.name)
role_def.update(role.params)
pb_copy['roles'].append(role_def)
if self.tasks:
pb_copy['tasks'] = self.tasks
with open(fname, 'wb') as f:
pb_yaml = yaml.dump([pb_copy], f, default_flow_style=False, indent=2)
def build(self):
if self.dynamic_inventory:
# python based inventory file
self.di = DynamicInventory(self.features)
self.di.write_script()
else:
# ini based inventory file
if 'ini_host' in self.features:
self.inventory += 'testhost findme=ini_host\n'
else:
self.inventory += 'testhost\n'
self.inventory += '\n'
if 'ini_child' in self.features:
self.inventory += '[child]\n'
self.inventory += 'testhost\n'
self.inventory += '\n'
self.inventory += '[child:vars]\n'
self.inventory += 'findme=ini_child\n'
self.inventory += '\n'
if 'ini_parent' in self.features:
if 'ini_child' in self.features:
self.inventory += '[parent:children]\n'
self.inventory += 'child\n'
else:
self.inventory += '[parent]\n'
self.inventory += 'testhost\n'
self.inventory += '\n'
self.inventory += '[parent:vars]\n'
self.inventory += 'findme=ini_parent\n'
self.inventory += '\n'
if 'ini_all' in self.features:
self.inventory += '[all:vars]\n'
self.inventory += 'findme=ini_all\n'
self.inventory += '\n'
# default to a single file called inventory
invfile = os.path.join(TESTDIR, 'inventory', 'hosts')
ipath = os.path.join(TESTDIR, 'inventory')
if not os.path.isdir(ipath):
os.makedirs(ipath)
with open(invfile, 'wb') as f:
f.write(self.inventory)
hpath = os.path.join(TESTDIR, 'inventory', 'host_vars')
if not os.path.isdir(hpath):
os.makedirs(hpath)
gpath = os.path.join(TESTDIR, 'inventory', 'group_vars')
if not os.path.isdir(gpath):
os.makedirs(gpath)
if 'ini_host_vars_file' in self.features:
hfile = os.path.join(hpath, 'testhost')
with open(hfile, 'wb') as f:
f.write('findme: ini_host_vars_file\n')
if 'ini_group_vars_file_all' in self.features:
hfile = os.path.join(gpath, 'all')
with open(hfile, 'wb') as f:
f.write('findme: ini_group_vars_file_all\n')
if 'ini_group_vars_file_child' in self.features:
hfile = os.path.join(gpath, 'child')
with open(hfile, 'wb') as f:
f.write('findme: ini_group_vars_file_child\n')
if 'ini_group_vars_file_parent' in self.features:
hfile = os.path.join(gpath, 'parent')
with open(hfile, 'wb') as f:
f.write('findme: ini_group_vars_file_parent\n')
if 'pb_host_vars_file' in self.features:
os.makedirs(os.path.join(TESTDIR, 'host_vars'))
fname = os.path.join(TESTDIR, 'host_vars', 'testhost')
with open(fname, 'wb') as f:
f.write('findme: pb_host_vars_file\n')
if 'pb_group_vars_file_parent' in self.features:
if not os.path.isdir(os.path.join(TESTDIR, 'group_vars')):
os.makedirs(os.path.join(TESTDIR, 'group_vars'))
fname = os.path.join(TESTDIR, 'group_vars', 'parent')
with open(fname, 'wb') as f:
f.write('findme: pb_group_vars_file_parent\n')
if 'pb_group_vars_file_child' in self.features:
if not os.path.isdir(os.path.join(TESTDIR, 'group_vars')):
os.makedirs(os.path.join(TESTDIR, 'group_vars'))
fname = os.path.join(TESTDIR, 'group_vars', 'child')
with open(fname, 'wb') as f:
f.write('findme: pb_group_vars_file_child\n')
if 'pb_group_vars_file_all' in self.features:
if not os.path.isdir(os.path.join(TESTDIR, 'group_vars')):
os.makedirs(os.path.join(TESTDIR, 'group_vars'))
fname = os.path.join(TESTDIR, 'group_vars', 'all')
with open(fname, 'wb') as f:
f.write('findme: pb_group_vars_file_all\n')
if 'play_var' in self.features:
self.playvars['findme'] = 'play_var'
if 'set_fact' in self.features:
self.tasks.append(dict(set_fact='findme="set_fact"'))
if 'vars_file' in self.features:
self.varsfiles.append('varsfile.yml')
fname = os.path.join(TESTDIR, 'varsfile.yml')
with open(fname, 'wb') as f:
f.write('findme: vars_file\n')
if 'include_vars' in self.features:
self.tasks.append(dict(include_vars='included_vars.yml'))
fname = os.path.join(TESTDIR, 'included_vars.yml')
with open(fname, 'wb') as f:
f.write('findme: include_vars\n')
if 'role_var' in self.features:
role = Role('role_var')
role.vars = True
role.load = True
self.roles.append(role)
if 'role_parent_default' in self.features:
role = Role('role_default')
role.load = False
role.defaults = True
self.roles.append(role)
role = Role('role_parent_default')
role.dependencies.append('role_default')
role.defaults = True
role.load = True
if 'role_params' in self.features:
role.params = dict(findme='role_params')
self.roles.append(role)
elif 'role_default' in self.features:
role = Role('role_default')
role.defaults = True
role.load = True
if 'role_params' in self.features:
role.params = dict(findme='role_params')
self.roles.append(role)
debug_task = dict(debug='var=findme')
test_task = {'assert': dict(that=['findme == "%s"' % self.features[0]])}
if 'task_vars' in self.features:
test_task['vars'] = dict(findme="task_vars")
if 'registered_vars' in self.features:
test_task['register'] = 'findme'
if 'block_vars' in self.features:
block_wrapper = [
debug_task,
{
'block': [test_task],
'vars': dict(findme="block_vars"),
}
]
else:
block_wrapper = [debug_task, test_task]
if 'include_params' in self.features:
self.tasks.append(dict(name='including tasks', include='included_tasks.yml', vars=dict(findme='include_params')))
else:
self.tasks.append(dict(include='included_tasks.yml'))
fname = os.path.join(TESTDIR, 'included_tasks.yml')
with open(fname, 'wb') as f:
f.write(yaml.dump(block_wrapper))
self.write_playbook()
def run(self):
'''
if self.dynamic_inventory:
cmd = 'ansible-playbook -c local -i inventory/hosts site.yml'
else:
cmd = 'ansible-playbook -c local -i inventory site.yml'
'''
cmd = 'ansible-playbook -c local -i inventory site.yml'
if 'extra_vars' in self.features:
cmd += ' --extra-vars="findme=extra_vars"'
self.ansible_command = cmd
(rc, so, se) = run_command(cmd, cwd=TESTDIR)
self.stdout = so
if rc != 0:
raise Exception("playbook failed (rc=%s), stdout: '%s' stderr: '%s'" % (rc, so, se))
def show_tree(self):
print('## TREE')
cmd = 'tree %s' % TESTDIR
(rc, so, se) = run_command(cmd)
lines = so.split('\n')
lines = lines[:-3]
print('\n'.join(lines))
def show_content(self):
print('## CONTENT')
cmd = 'find %s -type f | xargs tail -n +1' % TESTDIR
(rc, so, se) = run_command(cmd)
print(so)
def show_stdout(self):
print('## COMMAND')
print(self.ansible_command)
print('## STDOUT')
print(self.stdout)
def main():
features = [
'extra_vars',
'include_params',
#'role_params', # FIXME: we don't yet validate tasks within a role
'set_fact',
#'registered_vars', # FIXME: hard to simulate
'include_vars',
#'role_dep_params',
'task_vars',
'block_vars',
'role_var',
'vars_file',
'play_var',
#'host_facts', # FIXME: hard to simulate
'pb_host_vars_file',
'ini_host_vars_file',
'ini_host',
'pb_group_vars_file_child',
'ini_group_vars_file_child',
'pb_group_vars_file_parent',
'ini_group_vars_file_parent',
'pb_group_vars_file_all',
'ini_group_vars_file_all',
'ini_child',
'ini_parent',
'ini_all',
'role_parent_default',
'role_default',
]
parser = OptionParser()
parser.add_option('-f', '--feature', action='append')
parser.add_option('--use_dynamic_inventory', action='store_true')
parser.add_option('--show_tree', action='store_true')
parser.add_option('--show_content', action='store_true')
parser.add_option('--show_stdout', action='store_true')
parser.add_option('--copy_testcases_to_local_dir', action='store_true')
(options, args) = parser.parse_args()
if options.feature:
for f in options.feature:
if f not in features:
print('%s is not a valid feature' % f)
sys.exit(1)
features = [x for x in options.feature]
fdesc = {
'ini_host': 'host var inside the ini',
'script_host': 'host var inside the script _meta',
'ini_child': 'child group var inside the ini',
'script_child': 'child group var inside the script',
'ini_parent': 'parent group var inside the ini',
'script_parent': 'parent group var inside the script',
'ini_all': 'all group var inside the ini',
'script_all': 'all group var inside the script',
'ini_host_vars_file': 'var in inventory/host_vars/host',
'ini_group_vars_file_parent': 'var in inventory/group_vars/parent',
'ini_group_vars_file_child': 'var in inventory/group_vars/child',
'ini_group_vars_file_all': 'var in inventory/group_vars/all',
'pb_group_vars_file_parent': 'var in playbook/group_vars/parent',
'pb_group_vars_file_child': 'var in playbook/group_vars/child',
'pb_group_vars_file_all': 'var in playbook/group_vars/all',
'pb_host_vars_file': 'var in playbook/host_vars/host',
'play_var': 'var set in playbook header',
'role_parent_default': 'var in roles/role_parent/defaults/main.yml',
'role_default': 'var in roles/role/defaults/main.yml',
'role_var': 'var in ???',
'include_vars': 'var in included file',
'set_fact': 'var made by set_fact',
'vars_file': 'var in file added by vars_file',
'block_vars': 'vars defined on the block',
'task_vars': 'vars defined on the task',
'extra_vars': 'var passed via the cli'
}
dinv = options.use_dynamic_inventory
if dinv:
# some features are specific to ini, so swap those
for idx,x in enumerate(features):
if x.startswith('ini_') and 'vars_file' not in x:
features[idx] = x.replace('ini_', 'script_')
dinv = options.use_dynamic_inventory
index = 1
while features:
VTM = VarTestMaker(features, dynamic_inventory=dinv)
VTM.build()
if options.show_tree or options.show_content or options.show_stdout:
print('')
if options.show_tree:
VTM.show_tree()
if options.show_content:
VTM.show_content()
try:
print("CHECKING: %s (%s)" % (features[0], fdesc.get(features[0], '')))
VTM.run()
if options.show_stdout:
VTM.show_stdout()
features.pop(0)
if options.copy_testcases_to_local_dir:
topdir = 'testcases'
if index == 1 and os.path.isdir(topdir):
shutil.rmtree(topdir)
if not os.path.isdir(topdir):
os.makedirs(topdir)
thisindex = str(index)
if len(thisindex) == 1:
thisindex = '0' + thisindex
thisdir = os.path.join(topdir, '%s.%s' % (thisindex, res))
shutil.copytree(TESTDIR, thisdir)
except Exception as e:
print("ERROR !!!")
print(e)
print('feature: %s failed' % features[0])
sys.exit(1)
finally:
shutil.rmtree(TESTDIR)
index += 1
if __name__ == "__main__":
main()
| gpl-3.0 |
GuillaumeBadi/Python-App-Engine | lib/requests/packages/chardet/langhebrewmodel.py | 2763 | 11318 | ######################## BEGIN LICENSE BLOCK ########################
# The Original Code is Mozilla Universal charset detector code.
#
# The Initial Developer of the Original Code is
# Simon Montagu
# Portions created by the Initial Developer are Copyright (C) 2005
# the Initial Developer. All Rights Reserved.
#
# Contributor(s):
# Mark Pilgrim - port to Python
# Shy Shalom - original C code
# Shoshannah Forbes - original C code (?)
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
# 02110-1301 USA
######################### END LICENSE BLOCK #########################
# 255: Control characters that usually does not exist in any text
# 254: Carriage/Return
# 253: symbol (punctuation) that does not belong to word
# 252: 0 - 9
# Windows-1255 language model
# Character Mapping Table:
win1255_CharToOrderMap = (
255,255,255,255,255,255,255,255,255,255,254,255,255,254,255,255, # 00
255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255, # 10
253,253,253,253,253,253,253,253,253,253,253,253,253,253,253,253, # 20
252,252,252,252,252,252,252,252,252,252,253,253,253,253,253,253, # 30
253, 69, 91, 79, 80, 92, 89, 97, 90, 68,111,112, 82, 73, 95, 85, # 40
78,121, 86, 71, 67,102,107, 84,114,103,115,253,253,253,253,253, # 50
253, 50, 74, 60, 61, 42, 76, 70, 64, 53,105, 93, 56, 65, 54, 49, # 60
66,110, 51, 43, 44, 63, 81, 77, 98, 75,108,253,253,253,253,253, # 70
124,202,203,204,205, 40, 58,206,207,208,209,210,211,212,213,214,
215, 83, 52, 47, 46, 72, 32, 94,216,113,217,109,218,219,220,221,
34,116,222,118,100,223,224,117,119,104,125,225,226, 87, 99,227,
106,122,123,228, 55,229,230,101,231,232,120,233, 48, 39, 57,234,
30, 59, 41, 88, 33, 37, 36, 31, 29, 35,235, 62, 28,236,126,237,
238, 38, 45,239,240,241,242,243,127,244,245,246,247,248,249,250,
9, 8, 20, 16, 3, 2, 24, 14, 22, 1, 25, 15, 4, 11, 6, 23,
12, 19, 13, 26, 18, 27, 21, 17, 7, 10, 5,251,252,128, 96,253,
)
# Model Table:
# total sequences: 100%
# first 512 sequences: 98.4004%
# first 1024 sequences: 1.5981%
# rest sequences: 0.087%
# negative sequences: 0.0015%
HebrewLangModel = (
0,3,3,3,3,3,3,3,3,3,3,2,3,3,3,3,3,3,3,3,3,3,3,2,3,2,1,2,0,1,0,0,
3,0,3,1,0,0,1,3,2,0,1,1,2,0,2,2,2,1,1,1,1,2,1,1,1,2,0,0,2,2,0,1,
3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,2,2,2,2,
1,2,1,2,1,2,0,0,2,0,0,0,0,0,1,0,1,0,0,0,0,0,0,1,0,0,0,0,0,0,1,0,
3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,2,2,2,
1,2,1,3,1,1,0,0,2,0,0,0,1,0,1,0,1,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,
3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,1,0,1,2,2,1,3,
1,2,1,1,2,2,0,0,2,2,0,0,0,0,1,0,1,0,0,0,1,0,0,0,0,0,0,1,0,1,1,0,
3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,2,3,3,2,2,2,2,3,2,
1,2,1,2,2,2,0,0,1,0,0,0,0,0,1,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,1,0,
3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,2,3,3,2,3,2,2,3,2,2,2,1,2,2,2,2,
1,2,1,1,2,2,0,1,2,0,0,0,0,0,0,0,1,0,0,0,1,0,0,0,0,0,0,0,0,0,1,0,
3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,2,0,2,2,2,2,2,
0,2,0,2,2,2,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,1,0,
3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,2,3,0,2,2,2,
0,2,1,2,2,2,0,0,2,1,0,0,0,0,1,0,1,0,0,0,0,0,0,2,0,0,0,0,0,0,1,0,
3,3,3,3,3,3,3,3,3,3,3,2,3,3,3,3,3,3,3,3,3,3,3,3,3,2,1,2,3,2,2,2,
1,2,1,2,2,2,0,0,1,0,0,0,0,0,1,0,0,0,0,0,0,0,0,1,0,0,0,0,0,1,1,0,
3,3,3,3,3,3,3,3,3,2,3,3,3,2,3,3,3,3,3,3,3,3,3,3,3,3,3,1,0,2,0,2,
0,2,1,2,2,2,0,0,1,2,0,0,0,0,1,0,1,0,0,0,0,0,0,1,0,0,0,2,0,0,1,0,
3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,2,3,2,3,2,2,3,2,1,2,1,1,1,
0,1,1,1,1,1,3,0,1,0,0,0,0,2,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,
3,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,0,1,1,0,1,1,0,0,1,0,0,1,0,0,0,0,
0,0,1,0,0,0,0,0,2,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,2,2,2,2,2,2,2,
0,2,0,1,2,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,
3,3,3,3,3,3,3,3,3,2,3,3,3,2,1,2,3,3,2,3,3,3,3,2,3,2,1,2,0,2,1,2,
0,2,0,2,2,2,0,0,1,2,0,0,0,0,1,0,1,0,0,0,0,0,0,0,0,0,0,1,0,0,1,0,
3,3,3,3,3,3,3,3,3,2,3,3,3,1,2,2,3,3,2,3,2,3,2,2,3,1,2,2,0,2,2,2,
0,2,1,2,2,2,0,0,1,2,0,0,0,0,1,0,0,0,0,0,1,0,0,1,0,0,0,1,0,0,1,0,
3,3,3,3,3,3,3,3,3,3,3,3,3,2,3,3,3,2,3,3,2,2,2,3,3,3,3,1,3,2,2,2,
0,2,0,1,2,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,
3,3,3,3,3,3,3,3,3,3,3,3,3,3,2,2,3,3,3,2,3,2,2,2,1,2,2,0,2,2,2,2,
0,2,0,2,2,2,0,0,1,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,
3,3,3,3,3,3,3,3,3,3,3,2,3,3,3,1,3,2,3,3,2,3,3,2,2,1,2,2,2,2,2,2,
0,2,1,2,1,2,0,0,1,0,0,0,0,0,1,0,0,0,0,0,1,0,0,1,0,0,0,0,0,0,1,0,
3,3,3,3,3,3,2,3,2,3,3,2,3,3,3,3,2,3,2,3,3,3,3,3,2,2,2,2,2,2,2,1,
0,2,0,1,2,1,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,1,0,
3,3,3,3,3,3,3,3,3,2,1,2,3,3,3,3,3,3,3,2,3,2,3,2,1,2,3,0,2,1,2,2,
0,2,1,1,2,1,0,0,1,0,0,0,0,0,1,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,2,0,
3,3,3,3,3,3,3,3,3,2,3,3,3,3,2,1,3,1,2,2,2,1,2,3,3,1,2,1,2,2,2,2,
0,1,1,1,1,1,0,0,0,0,0,0,0,0,1,0,0,0,0,0,1,0,0,2,0,0,0,0,0,0,0,0,
3,3,3,3,3,3,3,3,3,3,0,2,3,3,3,1,3,3,3,1,2,2,2,2,1,1,2,2,2,2,2,2,
0,2,0,1,1,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,1,0,
3,3,3,3,3,3,2,3,3,3,2,2,3,3,3,2,1,2,3,2,3,2,2,2,2,1,2,1,1,1,2,2,
0,2,1,1,1,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,
3,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,0,1,0,0,0,1,0,0,0,0,0,
1,0,1,0,0,0,0,0,2,0,0,0,0,0,1,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
3,3,3,3,3,2,3,3,2,3,1,2,2,2,2,3,2,3,1,1,2,2,1,2,2,1,1,0,2,2,2,2,
0,1,0,1,2,2,0,0,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,1,0,
3,0,0,1,1,0,1,0,0,1,1,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,2,2,0,
0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
3,0,1,0,1,0,1,1,0,1,1,0,0,0,1,1,0,1,1,1,0,0,0,0,0,0,1,0,0,0,0,0,
0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
3,0,0,0,1,1,0,1,0,1,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,
0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,
3,2,2,1,2,2,2,2,2,2,2,1,2,2,1,2,2,1,1,1,1,1,1,1,1,2,1,1,0,3,3,3,
0,3,0,2,2,2,2,0,0,1,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,
2,2,2,3,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,1,2,2,1,2,2,2,1,1,1,2,0,1,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
2,2,2,2,2,2,2,2,2,2,2,1,2,2,2,2,2,2,2,2,2,2,2,0,2,2,0,0,0,0,0,0,
0,0,0,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
2,3,1,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,1,2,1,0,2,1,0,
0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
3,1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,1,0,1,1,1,1,0,0,0,0,0,0,0,0,0,0,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,
0,3,1,1,2,2,2,2,2,1,2,2,2,1,1,2,2,2,2,2,2,2,1,2,2,1,0,1,1,1,1,0,
0,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
3,2,1,1,1,1,2,1,1,2,1,0,1,1,1,1,1,1,1,1,1,1,1,0,1,0,0,0,0,0,0,0,
0,0,2,0,0,0,0,0,0,0,0,1,1,0,0,0,0,1,1,0,0,1,1,0,0,0,0,0,0,1,0,0,
2,1,1,2,2,2,2,2,2,2,2,2,2,2,1,2,2,2,2,2,1,2,1,2,1,1,1,1,0,0,0,0,
0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
1,2,1,2,2,2,2,2,2,2,2,2,2,1,2,1,2,1,1,2,1,1,1,2,1,2,1,2,0,1,0,1,
0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
0,3,1,2,2,2,1,2,2,2,2,2,2,2,2,1,2,1,1,1,1,1,1,2,1,2,1,1,0,1,0,1,
0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
2,1,2,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,2,2,
0,2,0,1,2,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,
3,0,0,0,1,0,0,0,0,0,0,0,0,0,0,1,0,1,0,0,0,1,0,0,0,0,0,0,0,0,0,0,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
2,1,1,1,1,1,1,1,0,1,1,0,1,0,0,1,0,0,1,0,0,0,0,0,1,0,0,0,0,0,0,0,
0,0,0,0,0,0,0,0,2,0,1,1,1,0,1,0,0,0,1,1,0,1,1,0,0,0,0,0,1,1,0,0,
0,1,1,1,2,1,2,2,2,0,2,0,2,0,1,1,2,1,1,1,1,2,1,0,1,1,0,0,0,0,0,0,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,
1,0,1,0,0,0,0,0,1,0,1,2,2,0,1,0,0,1,1,2,2,1,2,0,2,0,0,0,1,2,0,1,
2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
0,0,0,0,0,0,0,0,2,0,2,1,2,0,2,0,0,1,1,1,1,1,1,0,1,0,0,0,1,0,0,1,
2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
0,0,1,0,0,0,0,0,1,0,2,1,1,0,1,0,0,1,1,1,2,2,0,0,1,0,0,0,1,0,0,1,
1,1,2,1,0,1,1,1,0,1,0,1,1,1,1,0,0,0,1,0,1,0,0,0,0,0,0,0,0,2,2,1,
0,2,0,1,2,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
2,1,0,0,1,0,1,1,1,1,0,0,0,0,0,1,0,0,0,0,1,1,0,0,0,0,0,0,0,0,0,0,
0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
1,1,1,1,1,1,1,1,1,2,1,0,1,1,1,1,1,1,1,1,1,1,1,0,1,0,0,0,0,0,0,0,
0,0,0,0,0,0,0,0,0,0,1,1,1,0,0,0,0,1,1,1,0,1,1,0,1,0,0,0,1,1,0,1,
2,0,1,0,1,0,1,0,0,1,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
0,0,0,0,0,0,0,0,1,0,1,1,1,0,1,0,0,1,1,2,1,1,2,0,1,0,0,0,1,1,0,1,
1,0,0,1,0,0,1,0,0,0,1,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
0,0,0,0,0,0,0,0,1,0,1,1,2,0,1,0,0,0,0,2,1,1,2,0,2,0,0,0,1,1,0,1,
1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
0,0,0,0,0,0,0,0,1,0,2,1,1,0,1,0,0,2,2,1,2,1,1,0,1,0,0,0,1,1,0,1,
2,0,1,0,0,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
0,0,0,0,0,0,0,0,0,0,1,2,2,0,0,0,0,0,1,1,0,1,0,0,1,0,0,0,0,1,0,1,
1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
0,0,0,0,0,0,0,0,0,0,1,2,2,0,0,0,0,2,1,1,1,0,2,1,1,0,0,0,2,1,0,1,
1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
0,0,0,0,0,0,0,0,1,0,1,1,2,0,1,0,0,1,1,0,2,1,1,0,1,0,0,0,1,1,0,1,
2,2,1,1,1,0,1,1,0,1,1,0,1,0,0,0,0,0,0,1,0,0,0,1,0,0,0,0,0,0,0,0,
0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
0,0,0,0,0,0,0,0,1,0,2,1,1,0,1,0,0,1,1,0,1,2,1,0,2,0,0,0,1,1,0,1,
2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,
0,1,0,0,2,0,2,1,1,0,1,0,1,0,0,1,0,0,0,0,1,0,0,0,1,0,0,0,0,0,1,0,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
0,0,0,0,0,0,0,0,1,0,1,1,2,0,1,0,0,1,1,1,0,1,0,0,1,0,0,0,1,0,0,1,
1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
1,0,0,0,0,0,0,0,1,0,1,1,0,0,1,0,0,2,1,1,1,1,1,0,1,0,0,0,0,1,0,1,
0,1,1,1,2,1,1,1,1,0,1,1,1,1,1,1,1,1,1,1,1,1,0,1,1,0,0,0,0,0,0,0,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
0,0,0,0,0,0,0,0,0,0,1,2,1,0,0,0,0,0,1,1,1,1,1,0,1,0,0,0,1,1,0,0,
)
Win1255HebrewModel = {
'charToOrderMap': win1255_CharToOrderMap,
'precedenceMatrix': HebrewLangModel,
'mTypicalPositiveRatio': 0.984004,
'keepEnglishLetter': False,
'charsetName': "windows-1255"
}
# flake8: noqa
| apache-2.0 |
sclabs/sccms-nonrel | django/core/management/commands/shell.py | 230 | 3263 | import os
from django.core.management.base import NoArgsCommand
from optparse import make_option
class Command(NoArgsCommand):
option_list = NoArgsCommand.option_list + (
make_option('--plain', action='store_true', dest='plain',
help='Tells Django to use plain Python, not IPython.'),
)
help = "Runs a Python interactive interpreter. Tries to use IPython, if it's available."
shells = ['ipython', 'bpython']
requires_model_validation = False
def ipython(self):
try:
from IPython.frontend.terminal.embed import TerminalInteractiveShell
shell = TerminalInteractiveShell()
shell.mainloop()
except ImportError:
# IPython < 0.11
# Explicitly pass an empty list as arguments, because otherwise
# IPython would use sys.argv from this script.
try:
from IPython.Shell import IPShell
shell = IPShell(argv=[])
shell.mainloop()
except ImportError:
# IPython not found at all, raise ImportError
raise
def bpython(self):
import bpython
bpython.embed()
def run_shell(self):
for shell in self.shells:
try:
return getattr(self, shell)()
except ImportError:
pass
raise ImportError
def handle_noargs(self, **options):
# XXX: (Temporary) workaround for ticket #1796: force early loading of all
# models from installed apps.
from django.db.models.loading import get_models
loaded_models = get_models()
use_plain = options.get('plain', False)
try:
if use_plain:
# Don't bother loading IPython, because the user wants plain Python.
raise ImportError
self.run_shell()
except ImportError:
import code
# Set up a dictionary to serve as the environment for the shell, so
# that tab completion works on objects that are imported at runtime.
# See ticket 5082.
imported_objects = {}
try: # Try activating rlcompleter, because it's handy.
import readline
except ImportError:
pass
else:
# We don't have to wrap the following import in a 'try', because
# we already know 'readline' was imported successfully.
import rlcompleter
readline.set_completer(rlcompleter.Completer(imported_objects).complete)
readline.parse_and_bind("tab:complete")
# We want to honor both $PYTHONSTARTUP and .pythonrc.py, so follow system
# conventions and get $PYTHONSTARTUP first then import user.
if not use_plain:
pythonrc = os.environ.get("PYTHONSTARTUP")
if pythonrc and os.path.isfile(pythonrc):
try:
execfile(pythonrc)
except NameError:
pass
# This will import .pythonrc.py as a side-effect
import user
code.interact(local=imported_objects)
| bsd-3-clause |
dominjune/LintCode | Triangle Count.py | 4 | 1265 | """
Given an array of integers, how many three numbers can be found in the array, so that we can build an triangle whose
three edges length is the three numbers that we find?
Example
Given array S = [3,4,6,7], return 3. They are:
[3,4,6]
[3,6,7]
[4,6,7]
Given array S = [4,4,4,4], return 4. They are:
[4(1),4(2),4(3)]
[4(1),4(2),4(4)]
[4(1),4(3),4(4)]
[4(2),4(3),4(4)]
"""
__author__ = 'Daniel'
class Solution:
def triangleCount(self, S):
"""
Brute force: O(n^3)
Two pointers with binary search: O(n^2 lg n)
Two-pointer algorithm: O(n^2)
Triangle inequality a+b>c
Fix the higher pointer; two other pointers are set at start and end respectively and converge them.
The converging two pointers should be in the same side of triangle inequality
:param S: a list of integers
:return: a integer
"""
S.sort()
cnt = 0
for h in xrange(len(S)-1, 1, -1):
s = 0
e = h-1
while s<e:
if S[s]+S[e]>S[h]:
cnt += e-s
e -= 1
else:
s += 1
return cnt
if __name__ == "__main__":
assert Solution().triangleCount([3, 4, 6, 7]) == 3 | apache-2.0 |
pombredanne/MOG | nova/tests/integrated/v3/test_extended_volumes.py | 11 | 6023 | # vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2012 Nebula, Inc.
# Copyright 2013 IBM Corp.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from nova.compute import api as compute_api
from nova.compute import manager as compute_manager
from nova import context
from nova import db
from nova.tests.api.openstack import fakes
from nova.tests.integrated.v3 import test_servers
from nova.volume import cinder
class ExtendedVolumesSampleJsonTests(test_servers.ServersSampleBase):
extension_name = "os-extended-volumes"
def _stub_compute_api_get_instance_bdms(self, server_id):
def fake_compute_api_get_instance_bdms(self, context, instance):
bdms = [
{'volume_id': 'a26887c6-c47b-4654-abb5-dfadf7d3f803',
'instance_uuid': server_id,
'device_name': '/dev/sdd'},
{'volume_id': 'a26887c6-c47b-4654-abb5-dfadf7d3f804',
'instance_uuid': server_id,
'device_name': '/dev/sdc'}
]
return bdms
self.stubs.Set(compute_api.API, "get_instance_bdms",
fake_compute_api_get_instance_bdms)
def _stub_compute_api_get(self):
def fake_compute_api_get(self, context, instance_id,
want_objects=False):
return {'uuid': instance_id}
self.stubs.Set(compute_api.API, 'get', fake_compute_api_get)
def test_show(self):
uuid = self._post_server()
self.stubs.Set(db, 'block_device_mapping_get_all_by_instance',
fakes.stub_bdm_get_all_by_instance)
response = self._do_get('servers/%s' % uuid)
subs = self._get_regexes()
subs['hostid'] = '[a-f0-9]+'
self._verify_response('server-get-resp', subs, response, 200)
def test_detail(self):
uuid = self._post_server()
self.stubs.Set(db, 'block_device_mapping_get_all_by_instance',
fakes.stub_bdm_get_all_by_instance)
response = self._do_get('servers/detail')
subs = self._get_regexes()
subs['id'] = uuid
subs['hostid'] = '[a-f0-9]+'
self._verify_response('servers-detail-resp', subs, response, 200)
def test_attach_volume(self):
device_name = '/dev/vdd'
self.stubs.Set(cinder.API, 'get', fakes.stub_volume_get)
self.stubs.Set(cinder.API, 'check_attach', lambda *a, **k: None)
self.stubs.Set(cinder.API, 'reserve_volume', lambda *a, **k: None)
self.stubs.Set(compute_manager.ComputeManager,
"reserve_block_device_name",
lambda *a, **k: device_name)
self.stubs.Set(compute_manager.ComputeManager,
'attach_volume',
lambda *a, **k: None)
volume = fakes.stub_volume_get(None, context.get_admin_context(),
'a26887c6-c47b-4654-abb5-dfadf7d3f803')
subs = {
'volume_id': volume['id'],
'device': device_name
}
server_id = self._post_server()
response = self._do_post('servers/%s/action'
% server_id,
'attach-volume-req', subs)
self.assertEqual(response.status, 202)
self.assertEqual(response.read(), '')
def test_detach_volume(self):
server_id = self._post_server()
attach_id = "a26887c6-c47b-4654-abb5-dfadf7d3f803"
self._stub_compute_api_get_instance_bdms(server_id)
self._stub_compute_api_get()
self.stubs.Set(cinder.API, 'get', fakes.stub_volume_get)
self.stubs.Set(compute_api.API, 'detach_volume', lambda *a, **k: None)
subs = {
'volume_id': attach_id,
}
response = self._do_post('servers/%s/action'
% server_id, 'detach-volume-req', subs)
self.assertEqual(response.status, 202)
self.assertEqual(response.read(), '')
def test_swap_volume(self):
server_id = self._post_server()
old_volume_id = "a26887c6-c47b-4654-abb5-dfadf7d3f803"
old_new_volume = 'a26887c6-c47b-4654-abb5-dfadf7d3f805'
self._stub_compute_api_get_instance_bdms(server_id)
def stub_volume_get(self, context, volume_id):
if volume_id == old_volume_id:
return fakes.stub_volume(volume_id, instance_uuid=server_id)
else:
return fakes.stub_volume(volume_id, instance_uuid=None,
attach_status='detached')
self.stubs.Set(cinder.API, 'get', stub_volume_get)
self.stubs.Set(cinder.API, 'begin_detaching', lambda *a, **k: None)
self.stubs.Set(cinder.API, 'check_attach', lambda *a, **k: None)
self.stubs.Set(cinder.API, 'check_detach', lambda *a, **k: None)
self.stubs.Set(cinder.API, 'reserve_volume', lambda *a, **k: None)
self.stubs.Set(compute_manager.ComputeManager, 'swap_volume',
lambda *a, **k: None)
subs = {
'old_volume_id': old_volume_id,
'new_volume_id': old_new_volume
}
response = self._do_post('servers/%s/action' % server_id,
'swap-volume-req', subs)
self.assertEqual(response.status, 202)
self.assertEqual(response.read(), '')
class ExtendedVolumesSampleXmlTests(ExtendedVolumesSampleJsonTests):
ctype = 'xml'
| apache-2.0 |
cherrygirl/micronaet7 | product_adr/__openerp__.py | 1 | 1595 | # -*- coding: utf-8 -*-
###############################################################################
#
# Copyright (C) 2001-2014 Micronaet SRL (<http://www.micronaet.it>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published
# by the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
###############################################################################
{
'name': 'Product ADR',
'version': '0.0.1',
'category': 'Generic Modules/Customization',
'description': """
Module for add a check in product list for ADR product
(this is important for offer and other documents)
Used also for import export with duty code
""",
'author': 'Micronaet s.r.l.',
'website': 'http://www.micronaet.it',
'depends': [
'base',
'product',
'sale',
],
'init_xml': [],
'update_xml': [
'security/ir.model.access.csv',
'product_views.xml',
],
'demo_xml': [],
'active': False,
'installable': True,
}
| agpl-3.0 |
BMJHayward/numpy | numpy/distutils/intelccompiler.py | 20 | 3054 | from __future__ import division, absolute_import, print_function
from distutils.unixccompiler import UnixCCompiler
from numpy.distutils.exec_command import find_executable
from numpy.distutils.msvc9compiler import MSVCCompiler
from numpy.distutils.ccompiler import simple_version_match
class IntelCCompiler(UnixCCompiler):
"""A modified Intel compiler compatible with a GCC-built Python."""
compiler_type = 'intel'
cc_exe = 'icc'
cc_args = 'fPIC'
def __init__(self, verbose=0, dry_run=0, force=0):
UnixCCompiler.__init__(self, verbose, dry_run, force)
self.cc_exe = 'icc -fPIC'
compiler = self.cc_exe
self.set_executables(compiler=compiler,
compiler_so=compiler,
compiler_cxx=compiler,
linker_exe=compiler,
linker_so=compiler + ' -shared')
class IntelItaniumCCompiler(IntelCCompiler):
compiler_type = 'intele'
# On Itanium, the Intel Compiler used to be called ecc, let's search for
# it (now it's also icc, so ecc is last in the search).
for cc_exe in map(find_executable, ['icc', 'ecc']):
if cc_exe:
break
class IntelEM64TCCompiler(UnixCCompiler):
"""
A modified Intel x86_64 compiler compatible with a 64bit GCC-built Python.
"""
compiler_type = 'intelem'
cc_exe = 'icc -m64 -fPIC'
cc_args = "-fPIC"
def __init__(self, verbose=0, dry_run=0, force=0):
UnixCCompiler.__init__(self, verbose, dry_run, force)
self.cc_exe = 'icc -m64 -fPIC'
compiler = self.cc_exe
self.set_executables(compiler=compiler,
compiler_so=compiler,
compiler_cxx=compiler,
linker_exe=compiler,
linker_so=compiler + ' -shared')
class IntelCCompilerW(MSVCCompiler):
"""
A modified Intel compiler on Windows compatible with an MSVC-built Python.
"""
compiler_type = 'intelw'
def __init__(self, verbose=0, dry_run=0, force=0):
MSVCCompiler.__init__(self, verbose, dry_run, force)
version_match = simple_version_match(start='Intel\(R\).*?32,')
self.__version = version_match
def initialize(self, plat_name=None):
MSVCCompiler.initialize(self, plat_name)
self.cc = self.find_exe("icl.exe")
self.linker = self.find_exe("xilink")
self.compile_options = ['/nologo', '/O3', '/MD', '/W3']
self.compile_options_debug = ['/nologo', '/Od', '/MDd', '/W3', '/Z7',
'/D_DEBUG']
class IntelEM64TCCompilerW(IntelCCompilerW):
"""
A modified Intel x86_64 compiler compatible with a 64bit MSVC-built Python.
"""
compiler_type = 'intelemw'
def __init__(self, verbose=0, dry_run=0, force=0):
MSVCCompiler.__init__(self, verbose, dry_run, force)
version_match = simple_version_match(start='Intel\(R\).*?64,')
self.__version = version_match
| bsd-3-clause |
thaumos/ansible | lib/ansible/modules/cloud/vmware/vmware_guest_find.py | 16 | 4730 | #!/usr/bin/python
# -*- coding: utf-8 -*-
#
# Copyright: (c) 2017, Ansible Project
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {
'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'community'
}
DOCUMENTATION = '''
---
module: vmware_guest_find
short_description: Find the folder path(s) for a virtual machine by name or UUID
description:
- Find the folder path(s) for a virtual machine by name or UUID
version_added: 2.4
author:
- Abhijeet Kasurde (@Akasurde) <akasurde@redhat.com>
notes:
- Tested on vSphere 6.5
requirements:
- "python >= 2.6"
- PyVmomi
options:
name:
description:
- Name of the VM to work with.
- This is required if C(uuid) parameter is not supplied.
uuid:
description:
- UUID of the instance to manage if known, this is VMware's BIOS UUID by default.
- This is required if C(name) parameter is not supplied.
use_instance_uuid:
description:
- Whether to use the VMWare instance UUID rather than the BIOS UUID.
default: no
type: bool
version_added: '2.8'
datacenter:
description:
- Destination datacenter for the find operation.
- Deprecated in 2.5, will be removed in 2.9 release.
extends_documentation_fragment: vmware.documentation
'''
EXAMPLES = r'''
- name: Find Guest's Folder using name
vmware_guest_find:
hostname: "{{ vcenter_hostname }}"
username: "{{ vcenter_username }}"
password: "{{ vcenter_password }}"
validate_certs: no
name: testvm
delegate_to: localhost
register: vm_folder
- name: Find Guest's Folder using UUID
vmware_guest_find:
hostname: "{{ vcenter_hostname }}"
username: "{{ vcenter_username }}"
password: "{{ vcenter_password }}"
uuid: 38c4c89c-b3d7-4ae6-ae4e-43c5118eae49
delegate_to: localhost
register: vm_folder
'''
RETURN = r"""
folders:
description: List of folders for user specified virtual machine
returned: on success
type: list
sample: [
'/DC0/vm',
]
"""
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils._text import to_native
from ansible.module_utils.vmware import PyVmomi, vmware_argument_spec, find_vm_by_id
try:
from pyVmomi import vim
except ImportError:
pass
class PyVmomiHelper(PyVmomi):
def __init__(self, module):
super(PyVmomiHelper, self).__init__(module)
self.name = self.params['name']
self.uuid = self.params['uuid']
self.use_instance_uuid = self.params['use_instance_uuid']
def getvm_folder_paths(self):
results = []
vms = []
if self.uuid:
if self.use_instance_uuid:
vm_obj = find_vm_by_id(self.content, vm_id=self.uuid, vm_id_type="instance_uuid")
else:
vm_obj = find_vm_by_id(self.content, vm_id=self.uuid, vm_id_type="uuid")
if vm_obj is None:
self.module.fail_json(msg="Failed to find the virtual machine with UUID : %s" % self.uuid)
vms = [vm_obj]
elif self.name:
objects = self.get_managed_objects_properties(vim_type=vim.VirtualMachine, properties=['name'])
for temp_vm_object in objects:
if temp_vm_object.obj.name == self.name:
vms.append(temp_vm_object.obj)
for vm in vms:
folder_path = self.get_vm_path(self.content, vm)
results.append(folder_path)
return results
def main():
argument_spec = vmware_argument_spec()
argument_spec.update(
name=dict(type='str'),
uuid=dict(type='str'),
use_instance_uuid=dict(type='bool', default=False),
datacenter=dict(removed_in_version=2.9, type='str')
)
module = AnsibleModule(argument_spec=argument_spec,
required_one_of=[['name', 'uuid']],
mutually_exclusive=[['name', 'uuid']],
)
pyv = PyVmomiHelper(module)
# Check if the VM exists before continuing
folders = pyv.getvm_folder_paths()
# VM already exists
if folders:
try:
module.exit_json(folders=folders)
except Exception as exc:
module.fail_json(msg="Folder enumeration failed with exception %s" % to_native(exc))
else:
module.fail_json(msg="Unable to find folders for virtual machine %s" % (module.params.get('name') or
module.params.get('uuid')))
if __name__ == '__main__':
main()
| gpl-3.0 |
gwindes/three.js | utils/exporters/blender/addons/io_three/exporter/io.py | 201 | 2836 | import os
import shutil
from .. import constants, logger
from . import _json
def copy_registered_textures(dest, registration):
"""Copy the registered textures to the destination (root) path
:param dest: destination directory
:param registration: registered textures
:type dest: str
:type registration: dict
"""
logger.debug("io.copy_registered_textures(%s, %s)", dest, registration)
os.makedirs(dest, exist_ok=True)
for value in registration.values():
copy(value['file_path'], dest)
def copy(src, dst):
"""Copy a file to a destination
:param src: source file
:param dst: destination file/path
"""
logger.debug("io.copy(%s, %s)" % (src, dst))
if os.path.isdir(dst):
file_name = os.path.basename(src)
dst = os.path.join(dst, file_name)
if src != dst:
shutil.copy(src, dst)
def dump(filepath, data, options=None):
"""Dump the output to disk (JSON, msgpack, etc)
:param filepath: output file path
:param data: serializable data to write to disk
:param options: (Default value = None)
:type options: dict
"""
options = options or {}
logger.debug("io.dump(%s, data, options=%s)", filepath, options)
compress = options.get(constants.COMPRESSION, constants.NONE)
if compress == constants.MSGPACK:
try:
import msgpack
except ImportError:
logger.error("msgpack module not found")
raise
logger.info("Dumping to msgpack")
func = lambda x, y: msgpack.dump(x, y)
mode = 'wb'
else:
round_off = options.get(constants.ENABLE_PRECISION)
if round_off:
_json.ROUND = options[constants.PRECISION]
else:
_json.ROUND = None
indent = options.get(constants.INDENT, True)
indent = 4 if indent else None
logger.info("Dumping to JSON")
func = lambda x, y: _json.json.dump(x, y, indent=indent)
mode = 'w'
logger.info("Writing to %s", filepath)
with open(filepath, mode=mode) as stream:
func(data, stream)
def load(filepath, options):
"""Load the contents of the file path with the correct parser
:param filepath: input file path
:param options:
:type options: dict
"""
logger.debug("io.load(%s, %s)", filepath, options)
compress = options.get(constants.COMPRESSION, constants.NONE)
if compress == constants.MSGPACK:
try:
import msgpack
except ImportError:
logger.error("msgpack module not found")
raise
module = msgpack
mode = 'rb'
else:
logger.info("Loading JSON")
module = _json.json
mode = 'r'
with open(filepath, mode=mode) as stream:
data = module.load(stream)
return data
| mit |
TommasoPadovan/Baobab | baobab_nifti/baobab-usingNifti.py | 1 | 2095 | from nifti import NiftiImage
from datetime import datetime
prima=datetime.now()
#FLAGS
#############################
background_threshold=25 #
reverse_YAxis=True #
#############################
nim=NiftiImage("cervello")
print "Processing '%s'" %nim.filename,
nim.load()
d=nim.extent #genearatin a 4-uple containing the dimension of nifti image (x,y,z,time)
O=(d[0]/2,d[1]/2)
print "(%dx%dx%d)\n" %(d[0],d[1],d[2])
print "--------------------------------------------------------------------------------"
print
bb=nim.bbox
#ASSUMING IMAGE HAS TIME SIZE=1 -> program will work just on first istant on 4-dimensional images
print "\tLeft Pole\tRight Pole\tAverage Pole"
for z in range(bb[0][0],bb[0][1]) : #bottom-up scroll
y=bb[1][0]
found=False
maximum=background_threshold
while (y<bb[1][1] and found==False) :
for x in range(bb[2][0],O[0]) :
val=nim.data[0,z,y,x]
if val>background_threshold :
found=True
if val>=maximum :
maximum=val
Lx=x
y+=1
if found : Lpole=[Lx,y-1,z]
else : Lpole=None
y=bb[1][0]
found=False
maximum=background_threshold
while (y<bb[1][1] and found==False) :
for x in range(O[0],bb[2][1]) :
val=nim.data[0,z,y,x]
if val>background_threshold :
found=True
if val>=maximum :
maximum=val
Rx=x
y+=1
if found : Rpole=[Rx,y-1,z]
else : Rpole=None
if Lpole and Rpole :
Apole=[(Lpole[0]+Rpole[0])/2.0,(Lpole[1]+Rpole[1])/2.0,z]
if reverse_YAxis : #reversing y-axis
Lpole[1]= -(Lpole[1]-d[1]+1)
Rpole[1]= -(Rpole[1]-d[1]+1)
Apole[1]= -(Apole[1]-d[1]+1)
else :
Apole=None
print "%d)\t" %z,Lpole,"\t",Rpole,"\t",Apole
dopo=datetime.now()
print
print "--------------------------------------------------------------------------------"
print
print "total time",dopo-prima
| mit |
mastizada/kuma | vendor/packages/Babel/babel/__init__.py | 71 | 1333 | # -*- coding: utf-8 -*-
#
# Copyright (C) 2007-2008 Edgewall Software
# All rights reserved.
#
# This software is licensed as described in the file COPYING, which
# you should have received as part of this distribution. The terms
# are also available at http://babel.edgewall.org/wiki/License.
#
# This software consists of voluntary contributions made by many
# individuals. For the exact contribution history, see the revision
# history and logs, available at http://babel.edgewall.org/log/.
"""Integrated collection of utilities that assist in internationalizing and
localizing applications.
This package is basically composed of two major parts:
* tools to build and work with ``gettext`` message catalogs
* a Python interface to the CLDR (Common Locale Data Repository), providing
access to various locale display names, localized number and date
formatting, etc.
:see: http://www.gnu.org/software/gettext/
:see: http://docs.python.org/lib/module-gettext.html
:see: http://www.unicode.org/cldr/
"""
from babel.core import *
__docformat__ = 'restructuredtext en'
try:
from pkg_resources import get_distribution, ResolutionError
try:
__version__ = get_distribution('Babel').version
except ResolutionError:
__version__ = None # unknown
except ImportError:
__version__ = None # unknown
| mpl-2.0 |
jaja14/project4 | lib/werkzeug/local.py | 310 | 14094 | # -*- coding: utf-8 -*-
"""
werkzeug.local
~~~~~~~~~~~~~~
This module implements context-local objects.
:copyright: (c) 2013 by the Werkzeug Team, see AUTHORS for more details.
:license: BSD, see LICENSE for more details.
"""
from functools import update_wrapper
from werkzeug.wsgi import ClosingIterator
from werkzeug._compat import PY2, implements_bool
# since each thread has its own greenlet we can just use those as identifiers
# for the context. If greenlets are not available we fall back to the
# current thread ident depending on where it is.
try:
from greenlet import getcurrent as get_ident
except ImportError:
try:
from thread import get_ident
except ImportError:
from _thread import get_ident
def release_local(local):
"""Releases the contents of the local for the current context.
This makes it possible to use locals without a manager.
Example::
>>> loc = Local()
>>> loc.foo = 42
>>> release_local(loc)
>>> hasattr(loc, 'foo')
False
With this function one can release :class:`Local` objects as well
as :class:`LocalStack` objects. However it is not possible to
release data held by proxies that way, one always has to retain
a reference to the underlying local object in order to be able
to release it.
.. versionadded:: 0.6.1
"""
local.__release_local__()
class Local(object):
__slots__ = ('__storage__', '__ident_func__')
def __init__(self):
object.__setattr__(self, '__storage__', {})
object.__setattr__(self, '__ident_func__', get_ident)
def __iter__(self):
return iter(self.__storage__.items())
def __call__(self, proxy):
"""Create a proxy for a name."""
return LocalProxy(self, proxy)
def __release_local__(self):
self.__storage__.pop(self.__ident_func__(), None)
def __getattr__(self, name):
try:
return self.__storage__[self.__ident_func__()][name]
except KeyError:
raise AttributeError(name)
def __setattr__(self, name, value):
ident = self.__ident_func__()
storage = self.__storage__
try:
storage[ident][name] = value
except KeyError:
storage[ident] = {name: value}
def __delattr__(self, name):
try:
del self.__storage__[self.__ident_func__()][name]
except KeyError:
raise AttributeError(name)
class LocalStack(object):
"""This class works similar to a :class:`Local` but keeps a stack
of objects instead. This is best explained with an example::
>>> ls = LocalStack()
>>> ls.push(42)
>>> ls.top
42
>>> ls.push(23)
>>> ls.top
23
>>> ls.pop()
23
>>> ls.top
42
They can be force released by using a :class:`LocalManager` or with
the :func:`release_local` function but the correct way is to pop the
item from the stack after using. When the stack is empty it will
no longer be bound to the current context (and as such released).
By calling the stack without arguments it returns a proxy that resolves to
the topmost item on the stack.
.. versionadded:: 0.6.1
"""
def __init__(self):
self._local = Local()
def __release_local__(self):
self._local.__release_local__()
def _get__ident_func__(self):
return self._local.__ident_func__
def _set__ident_func__(self, value):
object.__setattr__(self._local, '__ident_func__', value)
__ident_func__ = property(_get__ident_func__, _set__ident_func__)
del _get__ident_func__, _set__ident_func__
def __call__(self):
def _lookup():
rv = self.top
if rv is None:
raise RuntimeError('object unbound')
return rv
return LocalProxy(_lookup)
def push(self, obj):
"""Pushes a new item to the stack"""
rv = getattr(self._local, 'stack', None)
if rv is None:
self._local.stack = rv = []
rv.append(obj)
return rv
def pop(self):
"""Removes the topmost item from the stack, will return the
old value or `None` if the stack was already empty.
"""
stack = getattr(self._local, 'stack', None)
if stack is None:
return None
elif len(stack) == 1:
release_local(self._local)
return stack[-1]
else:
return stack.pop()
@property
def top(self):
"""The topmost item on the stack. If the stack is empty,
`None` is returned.
"""
try:
return self._local.stack[-1]
except (AttributeError, IndexError):
return None
class LocalManager(object):
"""Local objects cannot manage themselves. For that you need a local
manager. You can pass a local manager multiple locals or add them later
by appending them to `manager.locals`. Everytime the manager cleans up
it, will clean up all the data left in the locals for this context.
The `ident_func` parameter can be added to override the default ident
function for the wrapped locals.
.. versionchanged:: 0.6.1
Instead of a manager the :func:`release_local` function can be used
as well.
.. versionchanged:: 0.7
`ident_func` was added.
"""
def __init__(self, locals=None, ident_func=None):
if locals is None:
self.locals = []
elif isinstance(locals, Local):
self.locals = [locals]
else:
self.locals = list(locals)
if ident_func is not None:
self.ident_func = ident_func
for local in self.locals:
object.__setattr__(local, '__ident_func__', ident_func)
else:
self.ident_func = get_ident
def get_ident(self):
"""Return the context identifier the local objects use internally for
this context. You cannot override this method to change the behavior
but use it to link other context local objects (such as SQLAlchemy's
scoped sessions) to the Werkzeug locals.
.. versionchanged:: 0.7
Yu can pass a different ident function to the local manager that
will then be propagated to all the locals passed to the
constructor.
"""
return self.ident_func()
def cleanup(self):
"""Manually clean up the data in the locals for this context. Call
this at the end of the request or use `make_middleware()`.
"""
for local in self.locals:
release_local(local)
def make_middleware(self, app):
"""Wrap a WSGI application so that cleaning up happens after
request end.
"""
def application(environ, start_response):
return ClosingIterator(app(environ, start_response), self.cleanup)
return application
def middleware(self, func):
"""Like `make_middleware` but for decorating functions.
Example usage::
@manager.middleware
def application(environ, start_response):
...
The difference to `make_middleware` is that the function passed
will have all the arguments copied from the inner application
(name, docstring, module).
"""
return update_wrapper(self.make_middleware(func), func)
def __repr__(self):
return '<%s storages: %d>' % (
self.__class__.__name__,
len(self.locals)
)
@implements_bool
class LocalProxy(object):
"""Acts as a proxy for a werkzeug local. Forwards all operations to
a proxied object. The only operations not supported for forwarding
are right handed operands and any kind of assignment.
Example usage::
from werkzeug.local import Local
l = Local()
# these are proxies
request = l('request')
user = l('user')
from werkzeug.local import LocalStack
_response_local = LocalStack()
# this is a proxy
response = _response_local()
Whenever something is bound to l.user / l.request the proxy objects
will forward all operations. If no object is bound a :exc:`RuntimeError`
will be raised.
To create proxies to :class:`Local` or :class:`LocalStack` objects,
call the object as shown above. If you want to have a proxy to an
object looked up by a function, you can (as of Werkzeug 0.6.1) pass
a function to the :class:`LocalProxy` constructor::
session = LocalProxy(lambda: get_current_request().session)
.. versionchanged:: 0.6.1
The class can be instanciated with a callable as well now.
"""
__slots__ = ('__local', '__dict__', '__name__')
def __init__(self, local, name=None):
object.__setattr__(self, '_LocalProxy__local', local)
object.__setattr__(self, '__name__', name)
def _get_current_object(self):
"""Return the current object. This is useful if you want the real
object behind the proxy at a time for performance reasons or because
you want to pass the object into a different context.
"""
if not hasattr(self.__local, '__release_local__'):
return self.__local()
try:
return getattr(self.__local, self.__name__)
except AttributeError:
raise RuntimeError('no object bound to %s' % self.__name__)
@property
def __dict__(self):
try:
return self._get_current_object().__dict__
except RuntimeError:
raise AttributeError('__dict__')
def __repr__(self):
try:
obj = self._get_current_object()
except RuntimeError:
return '<%s unbound>' % self.__class__.__name__
return repr(obj)
def __bool__(self):
try:
return bool(self._get_current_object())
except RuntimeError:
return False
def __unicode__(self):
try:
return unicode(self._get_current_object())
except RuntimeError:
return repr(self)
def __dir__(self):
try:
return dir(self._get_current_object())
except RuntimeError:
return []
def __getattr__(self, name):
if name == '__members__':
return dir(self._get_current_object())
return getattr(self._get_current_object(), name)
def __setitem__(self, key, value):
self._get_current_object()[key] = value
def __delitem__(self, key):
del self._get_current_object()[key]
if PY2:
__getslice__ = lambda x, i, j: x._get_current_object()[i:j]
def __setslice__(self, i, j, seq):
self._get_current_object()[i:j] = seq
def __delslice__(self, i, j):
del self._get_current_object()[i:j]
__setattr__ = lambda x, n, v: setattr(x._get_current_object(), n, v)
__delattr__ = lambda x, n: delattr(x._get_current_object(), n)
__str__ = lambda x: str(x._get_current_object())
__lt__ = lambda x, o: x._get_current_object() < o
__le__ = lambda x, o: x._get_current_object() <= o
__eq__ = lambda x, o: x._get_current_object() == o
__ne__ = lambda x, o: x._get_current_object() != o
__gt__ = lambda x, o: x._get_current_object() > o
__ge__ = lambda x, o: x._get_current_object() >= o
__cmp__ = lambda x, o: cmp(x._get_current_object(), o)
__hash__ = lambda x: hash(x._get_current_object())
__call__ = lambda x, *a, **kw: x._get_current_object()(*a, **kw)
__len__ = lambda x: len(x._get_current_object())
__getitem__ = lambda x, i: x._get_current_object()[i]
__iter__ = lambda x: iter(x._get_current_object())
__contains__ = lambda x, i: i in x._get_current_object()
__add__ = lambda x, o: x._get_current_object() + o
__sub__ = lambda x, o: x._get_current_object() - o
__mul__ = lambda x, o: x._get_current_object() * o
__floordiv__ = lambda x, o: x._get_current_object() // o
__mod__ = lambda x, o: x._get_current_object() % o
__divmod__ = lambda x, o: x._get_current_object().__divmod__(o)
__pow__ = lambda x, o: x._get_current_object() ** o
__lshift__ = lambda x, o: x._get_current_object() << o
__rshift__ = lambda x, o: x._get_current_object() >> o
__and__ = lambda x, o: x._get_current_object() & o
__xor__ = lambda x, o: x._get_current_object() ^ o
__or__ = lambda x, o: x._get_current_object() | o
__div__ = lambda x, o: x._get_current_object().__div__(o)
__truediv__ = lambda x, o: x._get_current_object().__truediv__(o)
__neg__ = lambda x: -(x._get_current_object())
__pos__ = lambda x: +(x._get_current_object())
__abs__ = lambda x: abs(x._get_current_object())
__invert__ = lambda x: ~(x._get_current_object())
__complex__ = lambda x: complex(x._get_current_object())
__int__ = lambda x: int(x._get_current_object())
__long__ = lambda x: long(x._get_current_object())
__float__ = lambda x: float(x._get_current_object())
__oct__ = lambda x: oct(x._get_current_object())
__hex__ = lambda x: hex(x._get_current_object())
__index__ = lambda x: x._get_current_object().__index__()
__coerce__ = lambda x, o: x._get_current_object().__coerce__(x, o)
__enter__ = lambda x: x._get_current_object().__enter__()
__exit__ = lambda x, *a, **kw: x._get_current_object().__exit__(*a, **kw)
__radd__ = lambda x, o: o + x._get_current_object()
__rsub__ = lambda x, o: o - x._get_current_object()
__rmul__ = lambda x, o: o * x._get_current_object()
__rdiv__ = lambda x, o: o / x._get_current_object()
if PY2:
__rtruediv__ = lambda x, o: x._get_current_object().__rtruediv__(o)
else:
__rtruediv__ = __rdiv__
__rfloordiv__ = lambda x, o: o // x._get_current_object()
__rmod__ = lambda x, o: o % x._get_current_object()
__rdivmod__ = lambda x, o: x._get_current_object().__rdivmod__(o)
| apache-2.0 |
majidaldo/ansible | lib/ansible/utils/module_docs_fragments/files.py | 37 | 2379 | # (c) 2014, Matt Martz <matt@sivel.net>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
class ModuleDocFragment(object):
# Standard files documentation fragment
DOCUMENTATION = """
options:
mode:
required: false
default: null
description:
- mode the file or directory should be, such as 0644 as would be fed to I(chmod). As of version 1.8, the mode may be specified as a symbolic mode (for example, C(u+rwx) or C(u=rw,g=r,o=r)).
owner:
required: false
default: null
description:
- name of the user that should own the file/directory, as would be fed to I(chown)
group:
required: false
default: null
description:
- name of the group that should own the file/directory, as would be fed to I(chown)
seuser:
required: false
default: null
description:
- user part of SELinux file context. Will default to system policy, if
applicable. If set to C(_default), it will use the C(user) portion of the
policy if available
serole:
required: false
default: null
description:
- role part of SELinux file context, C(_default) feature works as for I(seuser).
setype:
required: false
default: null
description:
- type part of SELinux file context, C(_default) feature works as for I(seuser).
selevel:
required: false
default: "s0"
description:
- level part of the SELinux file context. This is the MLS/MCS attribute,
sometimes known as the C(range). C(_default) feature works as for
I(seuser).
follow:
required: false
default: "no"
choices: [ "yes", "no" ]
version_added: "1.8"
description:
- 'This flag indicates that filesystem links, if they exist, should be followed.'
"""
| gpl-3.0 |
cryptobanana/ansible | lib/ansible/modules/network/mlnxos/mlnxos_mlag_ipl.py | 1 | 6810 | #!/usr/bin/python
#
# Copyright: Ansible Project
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = """
---
module: mlnxos_mlag_ipl
version_added: "2.5"
author: "Samer Deeb (@samerd)"
short_description: Manage IPL (inter-peer link) on Mellanox MLNX-OS network devices
description:
- This module provides declarative management of IPL (inter-peer link)
management on Mellanox MLNX-OS network devices.
notes:
- Tested on MLNX-OS 3.6.4000
options:
name:
description:
- Name of the interface (port-channel) IPL should be configured on.
required: true
vlan_interface:
description:
- Name of the IPL vlan interface.
state:
description:
- IPL state.
default: present
choices: ['present', 'absent']
peer_address:
description:
- IPL peer IP address.
"""
EXAMPLES = """
- name: run configure ipl
mlnxos_mlag_ipl:
name: Po1
vlan_interface: Vlan 322
state: present
peer_address: 192.168.7.1
- name: run remove ipl
mlnxos_mlag_ipl:
name: Po1
state: absent
"""
RETURN = """
commands:
description: The list of configuration mode commands to send to the device.
returned: always
type: list
sample:
- interface port-channel 1 ipl 1
- interface vlan 1024 ipl 1 peer-address 10.10.10.10
"""
import re
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.network.mlnxos.mlnxos import BaseMlnxosModule
from ansible.module_utils.network.mlnxos.mlnxos import show_cmd
class MlnxosMlagIplModule(BaseMlnxosModule):
VLAN_IF_REGEX = re.compile(r'^Vlan \d+')
@classmethod
def _get_element_spec(cls):
return dict(
name=dict(required=True),
state=dict(default='present',
choices=['present', 'absent']),
peer_address=dict(),
vlan_interface=dict(),
)
def init_module(self):
""" module initialization
"""
element_spec = self._get_element_spec()
argument_spec = dict()
argument_spec.update(element_spec)
self._module = AnsibleModule(
argument_spec=argument_spec,
supports_check_mode=True)
def get_required_config(self):
module_params = self._module.params
self._required_config = dict(
name=module_params['name'],
state=module_params['state'],
peer_address=module_params['peer_address'],
vlan_interface=module_params['vlan_interface'])
self.validate_param_values(self._required_config)
def _update_mlag_data(self, mlag_data):
if not mlag_data:
return
mlag_summary = mlag_data.get("MLAG IPLs Summary", {})
ipl_id = "1"
ipl_list = mlag_summary.get(ipl_id)
if ipl_list:
ipl_data = ipl_list[0]
vlan_id = ipl_data.get("Vlan Interface")
vlan_interface = ""
if vlan_id != "N/A":
vlan_interface = "Vlan %s" % vlan_id
peer_address = ipl_data.get("Peer IP address")
name = ipl_data.get("Group Port-Channel")
self._current_config = dict(
name=name,
peer_address=peer_address,
vlan_interface=vlan_interface)
def _show_mlag_data(self):
cmd = "show mlag"
return show_cmd(self._module, cmd, json_fmt=True, fail_on_error=False)
def load_current_config(self):
# called in base class in run function
self._current_config = dict()
mlag_data = self._show_mlag_data()
self._update_mlag_data(mlag_data)
def _get_interface_cmd_name(self, if_name):
if if_name.startswith('Po'):
return if_name.replace("Po", "port-channel ")
self._module.fail_json(
msg='invalid interface name: %s' % if_name)
def _generate_port_channel_command(self, if_name, enable):
if_cmd_name = self._get_interface_cmd_name(if_name)
if enable:
ipl_cmd = 'ipl 1'
else:
ipl_cmd = "no ipl 1"
cmd = "interface %s %s" % (if_cmd_name, ipl_cmd)
return cmd
def _generate_vlan_if_command(self, if_name, enable, peer_address):
if_cmd_name = if_name.lower()
if enable:
ipl_cmd = 'ipl 1 peer-address %s' % peer_address
else:
ipl_cmd = "no ipl 1"
cmd = "interface %s %s" % (if_cmd_name, ipl_cmd)
return cmd
def _generate_no_ipl_commands(self):
curr_interface = self._current_config.get('name')
req_interface = self._required_config.get('name')
if curr_interface == req_interface:
cmd = self._generate_port_channel_command(
req_interface, enable=False)
self._commands.append(cmd)
def _generate_ipl_commands(self):
curr_interface = self._current_config.get('name')
req_interface = self._required_config.get('name')
if curr_interface != req_interface:
if curr_interface and curr_interface != 'N/A':
cmd = self._generate_port_channel_command(
curr_interface, enable=False)
self._commands.append(cmd)
cmd = self._generate_port_channel_command(
req_interface, enable=True)
self._commands.append(cmd)
curr_vlan = self._current_config.get('vlan_interface')
req_vlan = self._required_config.get('vlan_interface')
add_peer = False
if curr_vlan != req_vlan:
add_peer = True
if curr_vlan:
cmd = self._generate_vlan_if_command(curr_vlan, enable=False,
peer_address=None)
self._commands.append(cmd)
curr_peer = self._current_config.get('peer_address')
req_peer = self._required_config.get('peer_address')
if req_peer != curr_peer:
add_peer = True
if add_peer and req_peer:
cmd = self._generate_vlan_if_command(req_vlan, enable=True,
peer_address=req_peer)
self._commands.append(cmd)
def generate_commands(self):
state = self._required_config['state']
if state == 'absent':
self._generate_no_ipl_commands()
else:
self._generate_ipl_commands()
def main():
""" main entry point for module execution
"""
MlnxosMlagIplModule.main()
if __name__ == '__main__':
main()
| gpl-3.0 |
martastain/dc-caspar | actions.py | 1 | 6436 | import os
import json
import time
from caspar import CasparServer
SESSION_DIR = "sessions"
KEYS = {
"LG01" : 20,
"LG02" : 21,
"LG03" : 22,
"LG04" : 23,
"LG05" : 16,
"LG06" : 17,
"LG07" : 18,
"LG08" : 19,
"LG09" : 12,
"LG10" : 13,
"LG11" : 14,
"LG12" : 15,
"LG13" : 8,
"LG14" : 9,
"LG15" : 10,
"LG16" : 11,
"SM01" : 00,
"SM02" : 01,
"SM03" : 02,
"SM04" : 03,
"SM05" : 0,
"SM06" : 1,
"SM07" : 2,
"SM08" : 3,
}
class BaseAction():
def __init__(self, parent, id_button):
self.parent = parent
self.id_button = id_button
self.state = 0
self.on_init()
def on_init(self):
pass
def on_press(self):
pass
def on_release(self):
pass
def on_main(self):
pass
def _main(self):
ch = self.caspar[self.id_channel]
if not ch:
return
l = ch[self.id_layer]
if l.current_file:
if l.current_file == self.filename.lower():
if self.state != 1:
self.state = 1
self.enable()
elif l.cued_file == self.filename.lower():
if self.state != 2:
self.state = 2
self.blink()
elif self.state != 0:
self.state = 0
self.disable()
elif self.state != 0:
self.state = 0
self.disable()
self.on_main()
def enable(self):
self.parent.button_enable(self.id_button)
def disable(self):
self.parent.button_disable(self.id_button)
def blink(self):
self.parent.button_blink(self.id_button)
class ClipAction(BaseAction):
def on_init(self):
self.id_channel = 1
self.id_layer = 1
self.filename = False
self.loop = False
self.caspar = self.parent.caspar
def on_press(self):
if self.parent.shift_state("b"):
cmds = ["CLEAR %s-%s" % (self.id_channel, self.id_layer)]
elif self.parent.shift_state("a"):
cmds = ["LOADBG %s-%s %s%s AUTO" % (self.id_channel, self.id_layer, self.filename, [""," LOOP"][self.loop])]
else:
cmds = ["PLAY %s-%s %s%s AUTO" % (self.id_channel, self.id_layer, self.filename, [""," LOOP"][self.loop])]
if self.end_action == "CLEAR":
cmds.append("LOADBG %s-%s BLANK AUTO" % (self.id_channel, self.id_layer))
elif str(self.end_action).startswith("LOOP"):
cmds.append( "LOADBG %s-%s %s LOOP AUTO" % (self.id_channel, self.id_layer, self.end_action.split(" ")[1]))
elif str(self.end_action).startswith("PLAY"):
cmds.append("LOADBG %s-%s %s AUTO" % (self.id_channel, self.id_layer, self.end_action.split(" ")[1]))
for cmd in cmds:
self.caspar.query(cmd)
class ImageAction(BaseAction):
def on_init(self):
self.caspar = self.parent.caspar
self.id_channel = 1
self.id_layer = 1
self.filename = False
self.auto_hide = False
self.show_time = 0
self.show = ""
self.hide = ""
def on_press(self):
if self.parent.shift_state("b"):
self.caspar.query("CLEAR %s-%s" % (self.id_channel, self.id_layer))
return
if self.state == 0:
self.do_show()
else:
self.do_hide()
def do_show(self):
cmd = "PLAY %s-%s %s %s" % (self.id_channel, self.id_layer, self.filename, self.show)
self.show_time = time.time()
self.caspar.query(cmd)
def do_hide(self):
cmd = "PLAY %s-%s BLANK %s" % (self.id_channel, self.id_layer, self.show)
self.caspar.query(cmd)
def on_main(self):
if self.state == 1 and self.auto_hide and time.time() - self.show_time > self.auto_hide:
self.do_hide()
class Session():
def __init__(self, parent):
self.parent = parent
self.actions = {}
self.caspar = CasparServer()
def load(self, fname):
data = json.loads(open(os.path.join(SESSION_DIR, fname)).read())
for id_button in data["actions"]:
action = data["actions"][id_button]
id_button = KEYS[id_button]
if action["class"] == "clip":
a = ClipAction(self, id_button)
a.filename = action["filename"]
a.title = action.get("title", a.filename)
a.id_channel = action.get("channel", 1)
a.id_layer = action.get("layer", 1)
a.loop = action.get("loop", False)
a.end_action = action.get("end_action", False)
self.parent.buttons[id_button].setText(a.title)
#self.parent.buttons[id_button].setText("<font color='red'>%s</font> %s" % (a.id_layer, a.title))
self.actions[id_button] = a
elif action["class"] == "image":
a = ImageAction(self, id_button)
a.filename = action["filename"]
a.title = action.get("title", a.filename)
a.id_channel = action.get("channel", 1)
a.id_layer = action.get("layer", 1)
a.auto_hide = action.get("auto_hide", False)
a.show = action.get("show", "")
a.hide = action.get("hide", "")
self.parent.buttons[id_button].setText(a.title)
self.actions[id_button] = a
def shift_state(self, key):
return key in self.parent.shift_state
def button_enable(self, id_button):
self.parent.buttons[id_button].enable()
def button_disable(self, id_button):
self.parent.buttons[id_button].disable()
def button_blink(self, id_button):
self.parent.buttons[id_button].blink()
def main(self):
self.caspar.main()
for action in self.actions:
self.actions[action]._main() | mit |
camptocamp/QGIS | python/plugins/processing/algs/VectorLayerHistogram.py | 1 | 2809 | # -*- coding: utf-8 -*-
"""
***************************************************************************
EquivalentNumField.py
---------------------
Date : January 2013
Copyright : (C) 2013 by Victor Olaya
Email : volayaf at gmail dot com
***************************************************************************
* *
* This program is free software; you can redistribute it and/or modify *
* it under the terms of the GNU General Public License as published by *
* the Free Software Foundation; either version 2 of the License, or *
* (at your option) any later version. *
* *
***************************************************************************
"""
__author__ = 'Victor Olaya'
__date__ = 'January 2013'
__copyright__ = '(C) 2013, Victor Olaya'
# This will get replaced with a git SHA1 when you do a git archive
__revision__ = '$Format:%H$'
import matplotlib.pyplot as plt
import matplotlib.pylab as lab
from PyQt4.QtCore import *
from qgis.core import *
from processing.parameters.ParameterVector import ParameterVector
from processing.parameters.ParameterTableField import ParameterTableField
from processing.core.GeoAlgorithm import GeoAlgorithm
from processing.outputs.OutputHTML import OutputHTML
from processing.tools import *
from processing.tools import dataobjects
from processing.parameters.ParameterNumber import ParameterNumber
class VectorLayerHistogram(GeoAlgorithm):
INPUT = "INPUT"
OUTPUT = "OUTPUT"
FIELD = "FIELD"
BINS = "BINS"
def processAlgorithm(self, progress):
uri = self.getParameterValue(self.INPUT)
layer = dataobjects.getObjectFromUri(uri)
fieldname = self.getParameterValue(self.FIELD)
output = self.getOutputValue(self.OUTPUT)
values = vector.getAttributeValues(layer, fieldname)
plt.close()
bins = self.getParameterValue(self.BINS)
plt.hist(values[fieldname], bins)
plotFilename = output +".png"
lab.savefig(plotFilename)
f = open(output, "w")
f.write("<img src=\"" + plotFilename + "\"/>")
f.close()
def defineCharacteristics(self):
self.name = "Vector layer histogram"
self.group = "Graphics"
self.addParameter(ParameterVector(self.INPUT, "Input layer", [ParameterVector.VECTOR_TYPE_ANY]))
self.addParameter(ParameterTableField(self.FIELD, "Attribute", self.INPUT,ParameterTableField.DATA_TYPE_NUMBER))
self.addParameter(ParameterNumber(self.BINS, "number of bins", 2, None, 10))
self.addOutput(OutputHTML(self.OUTPUT, "Output"))
| gpl-2.0 |
camptocamp/QGIS | python/plugins/GdalTools/tools/widgetBatchBase.py | 2 | 5310 | # -*- coding: utf-8 -*-
"""
***************************************************************************
widgetBatchBase.py
---------------------
Date : June 2010
Copyright : (C) 2010 by Giuseppe Sucameli
Email : brush dot tyler at gmail dot com
***************************************************************************
* *
* This program is free software; you can redistribute it and/or modify *
* it under the terms of the GNU General Public License as published by *
* the Free Software Foundation; either version 2 of the License, or *
* (at your option) any later version. *
* *
***************************************************************************
"""
__author__ = 'Giuseppe Sucameli'
__date__ = 'June 2010'
__copyright__ = '(C) 2010, Giuseppe Sucameli'
# This will get replaced with a git SHA1 when you do a git archive
__revision__ = '$Format:%H$'
from PyQt4.QtCore import *
from PyQt4.QtGui import *
from qgis.core import *
from qgis.gui import *
from widgetPluginBase import GdalToolsBasePluginWidget as BasePluginWidget
import GdalTools_utils as Utils
class GdalToolsBaseBatchWidget(BasePluginWidget):
def __init__(self, iface, commandName):
BasePluginWidget.__init__(self, iface, commandName)
def getBatchArguments(self, inFile, outFile = None):
arguments = []
arguments.extend( self.getArguments() )
arguments.append( inFile )
if outFile != None:
arguments.append(outFile)
return arguments
def isBatchEnabled(self):
return False
def isRecursiveScanEnabled(self):
return False
def setProgressRange(self, maximum):
pass
def updateProgress(self, value, maximum):
pass
def getBatchOutputFileName(self, fn):
inDir = self.getInputFileName()
outDir = self.getOutputFileName()
# if overwrites existent files
if outDir == None or outDir == inDir:
return fn + ".tmp"
return outDir + fn[len(inDir):]
def onRun( self ):
if not self.isBatchEnabled():
BasePluginWidget.onRun(self)
return
self.batchRun()
def batchRun(self):
self.base.enableRun( False )
self.base.setCursor( Qt.WaitCursor )
inDir = self.getInputFileName()
self.inFiles = Utils.getRasterFiles( inDir, self.isRecursiveScanEnabled() )
self.outFiles = []
for f in self.inFiles:
self.outFiles.append( self.getBatchOutputFileName( f ) )
self.errors = []
self.batchIndex = 0
self.batchTotal = len( self.inFiles )
self.setProgressRange( self.batchTotal )
self.runItem( self.batchIndex, self.batchTotal )
def runItem(self, index, total):
self.updateProgress(index, total)
if index >= total:
self.batchFinished()
return
outFile = None
if len(self.outFiles) > index:
outFile = self.outFiles[ index ]
args = self.getBatchArguments( self.inFiles[index], outFile )
self.base.refreshArgs(args)
BasePluginWidget.onRun(self)
def onFinished(self, exitCode, status):
if not self.isBatchEnabled():
BasePluginWidget.onFinished(self, exitCode, status)
return
msg = bytes.decode( bytes( self.base.process.readAllStandardError() ) )
if msg != '':
self.errors.append( ">> " + self.inFiles[self.batchIndex] + "<br>" + msg.replace( "\n", "<br>" ) )
self.base.process.close()
# overwrite existent files
inDir = self.getInputFileName()
outDir = self.getOutputFileName()
if outDir == None or inDir == outDir:
oldFile = QFile( self.inFiles[self.batchIndex] )
newFile = QFile( self.outFiles[self.batchIndex] )
if oldFile.remove():
newFile.rename(self.inFiles[self.batchIndex])
self.batchIndex += 1
self.runItem( self.batchIndex, self.batchTotal )
def batchFinished( self ):
self.base.stop()
if len(self.errors) > 0:
msg = u"Processing of the following files ended with error: <br><br>" + "<br><br>".join( self.errors )
QErrorMessage( self ).showMessage( msg )
inDir = self.getInputFileName()
outDir = self.getOutputFileName()
if outDir == None or inDir == outDir:
self.outFiles = self.inFiles
# load layers managing the render flag to avoid waste of time
canvas = self.iface.mapCanvas()
previousRenderFlag = canvas.renderFlag()
canvas.setRenderFlag( False )
notCreatedList = []
for item in self.outFiles:
fileInfo = QFileInfo( item )
if fileInfo.exists():
if self.base.loadCheckBox.isChecked():
self.addLayerIntoCanvas( fileInfo )
else:
notCreatedList.append( item )
canvas.setRenderFlag( previousRenderFlag )
if len( notCreatedList ) == 0:
QMessageBox.information( self, self.tr( "Finished" ), self.tr( "Operation completed." ) )
else:
QMessageBox.warning( self, self.tr( "Warning" ), self.tr( "The following files were not created: \n{0}" ).format( ', '.join( notCreatedList ) ) )
| gpl-2.0 |
IRSO/irsosav | nodejs.git/lib/node_modules/npm/node_modules/node-gyp/gyp/pylib/gyp/generator/gypsh.py | 2779 | 1665 | # Copyright (c) 2011 Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""gypsh output module
gypsh is a GYP shell. It's not really a generator per se. All it does is
fire up an interactive Python session with a few local variables set to the
variables passed to the generator. Like gypd, it's intended as a debugging
aid, to facilitate the exploration of .gyp structures after being processed
by the input module.
The expected usage is "gyp -f gypsh -D OS=desired_os".
"""
import code
import sys
# All of this stuff about generator variables was lovingly ripped from gypd.py.
# That module has a much better description of what's going on and why.
_generator_identity_variables = [
'EXECUTABLE_PREFIX',
'EXECUTABLE_SUFFIX',
'INTERMEDIATE_DIR',
'PRODUCT_DIR',
'RULE_INPUT_ROOT',
'RULE_INPUT_DIRNAME',
'RULE_INPUT_EXT',
'RULE_INPUT_NAME',
'RULE_INPUT_PATH',
'SHARED_INTERMEDIATE_DIR',
]
generator_default_variables = {
}
for v in _generator_identity_variables:
generator_default_variables[v] = '<(%s)' % v
def GenerateOutput(target_list, target_dicts, data, params):
locals = {
'target_list': target_list,
'target_dicts': target_dicts,
'data': data,
}
# Use a banner that looks like the stock Python one and like what
# code.interact uses by default, but tack on something to indicate what
# locals are available, and identify gypsh.
banner='Python %s on %s\nlocals.keys() = %s\ngypsh' % \
(sys.version, sys.platform, repr(sorted(locals.keys())))
code.interact(banner, local=locals)
| gpl-3.0 |
microelly2/cadquery-freecad-module | CadQuery/Libs/pygments/lexers/matlab.py | 72 | 29146 | # -*- coding: utf-8 -*-
"""
pygments.lexers.matlab
~~~~~~~~~~~~~~~~~~~~~~
Lexers for Matlab and related languages.
:copyright: Copyright 2006-2014 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
import re
from pygments.lexer import Lexer, RegexLexer, bygroups, words, do_insertions
from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
Number, Punctuation, Generic, Whitespace
from pygments.lexers import _scilab_builtins
__all__ = ['MatlabLexer', 'MatlabSessionLexer', 'OctaveLexer', 'ScilabLexer']
class MatlabLexer(RegexLexer):
"""
For Matlab source code.
.. versionadded:: 0.10
"""
name = 'Matlab'
aliases = ['matlab']
filenames = ['*.m']
mimetypes = ['text/matlab']
#
# These lists are generated automatically.
# Run the following in bash shell:
#
# for f in elfun specfun elmat; do
# echo -n "$f = "
# matlab -nojvm -r "help $f;exit;" | perl -ne \
# 'push(@c,$1) if /^ (\w+)\s+-/; END {print q{["}.join(q{","},@c).qq{"]\n};}'
# done
#
# elfun: Elementary math functions
# specfun: Special Math functions
# elmat: Elementary matrices and matrix manipulation
#
# taken from Matlab version 7.4.0.336 (R2007a)
#
elfun = ("sin", "sind", "sinh", "asin", "asind", "asinh", "cos", "cosd", "cosh",
"acos", "acosd", "acosh", "tan", "tand", "tanh", "atan", "atand", "atan2",
"atanh", "sec", "secd", "sech", "asec", "asecd", "asech", "csc", "cscd",
"csch", "acsc", "acscd", "acsch", "cot", "cotd", "coth", "acot", "acotd",
"acoth", "hypot", "exp", "expm1", "log", "log1p", "log10", "log2", "pow2",
"realpow", "reallog", "realsqrt", "sqrt", "nthroot", "nextpow2", "abs",
"angle", "complex", "conj", "imag", "real", "unwrap", "isreal", "cplxpair",
"fix", "floor", "ceil", "round", "mod", "rem", "sign")
specfun = ("airy", "besselj", "bessely", "besselh", "besseli", "besselk", "beta",
"betainc", "betaln", "ellipj", "ellipke", "erf", "erfc", "erfcx",
"erfinv", "expint", "gamma", "gammainc", "gammaln", "psi", "legendre",
"cross", "dot", "factor", "isprime", "primes", "gcd", "lcm", "rat",
"rats", "perms", "nchoosek", "factorial", "cart2sph", "cart2pol",
"pol2cart", "sph2cart", "hsv2rgb", "rgb2hsv")
elmat = ("zeros", "ones", "eye", "repmat", "rand", "randn", "linspace", "logspace",
"freqspace", "meshgrid", "accumarray", "size", "length", "ndims", "numel",
"disp", "isempty", "isequal", "isequalwithequalnans", "cat", "reshape",
"diag", "blkdiag", "tril", "triu", "fliplr", "flipud", "flipdim", "rot90",
"find", "end", "sub2ind", "ind2sub", "bsxfun", "ndgrid", "permute",
"ipermute", "shiftdim", "circshift", "squeeze", "isscalar", "isvector",
"ans", "eps", "realmax", "realmin", "pi", "i", "inf", "nan", "isnan",
"isinf", "isfinite", "j", "why", "compan", "gallery", "hadamard", "hankel",
"hilb", "invhilb", "magic", "pascal", "rosser", "toeplitz", "vander",
"wilkinson")
tokens = {
'root': [
# line starting with '!' is sent as a system command. not sure what
# label to use...
(r'^!.*', String.Other),
(r'%\{\s*\n', Comment.Multiline, 'blockcomment'),
(r'%.*$', Comment),
(r'^\s*function', Keyword, 'deffunc'),
# from 'iskeyword' on version 7.11 (R2010):
(words((
'break', 'case', 'catch', 'classdef', 'continue', 'else', 'elseif',
'end', 'enumerated', 'events', 'for', 'function', 'global', 'if',
'methods', 'otherwise', 'parfor', 'persistent', 'properties',
'return', 'spmd', 'switch', 'try', 'while'), suffix=r'\b'),
Keyword),
("(" + "|".join(elfun + specfun + elmat) + r')\b', Name.Builtin),
# line continuation with following comment:
(r'\.\.\..*$', Comment),
# operators:
(r'-|==|~=|<|>|<=|>=|&&|&|~|\|\|?', Operator),
# operators requiring escape for re:
(r'\.\*|\*|\+|\.\^|\.\\|\.\/|\/|\\', Operator),
# punctuation:
(r'\[|\]|\(|\)|\{|\}|:|@|\.|,', Punctuation),
(r'=|:|;', Punctuation),
# quote can be transpose, instead of string:
# (not great, but handles common cases...)
(r'(?<=[\w)\].])\'+', Operator),
(r'(\d+\.\d*|\d*\.\d+)([eEf][+-]?[0-9]+)?', Number.Float),
(r'\d+[eEf][+-]?[0-9]+', Number.Float),
(r'\d+', Number.Integer),
(r'(?<![\w)\].])\'', String, 'string'),
(r'[a-zA-Z_]\w*', Name),
(r'.', Text),
],
'string': [
(r'[^\']*\'', String, '#pop')
],
'blockcomment': [
(r'^\s*%\}', Comment.Multiline, '#pop'),
(r'^.*\n', Comment.Multiline),
(r'.', Comment.Multiline),
],
'deffunc': [
(r'(\s*)(?:(.+)(\s*)(=)(\s*))?(.+)(\()(.*)(\))(\s*)',
bygroups(Whitespace, Text, Whitespace, Punctuation,
Whitespace, Name.Function, Punctuation, Text,
Punctuation, Whitespace), '#pop'),
# function with no args
(r'(\s*)([a-zA-Z_]\w*)', bygroups(Text, Name.Function), '#pop'),
],
}
def analyse_text(text):
if re.match('^\s*%', text, re.M): # comment
return 0.2
elif re.match('^!\w+', text, re.M): # system cmd
return 0.2
line_re = re.compile('.*?\n')
class MatlabSessionLexer(Lexer):
"""
For Matlab sessions. Modeled after PythonConsoleLexer.
Contributed by Ken Schutte <kschutte@csail.mit.edu>.
.. versionadded:: 0.10
"""
name = 'Matlab session'
aliases = ['matlabsession']
def get_tokens_unprocessed(self, text):
mlexer = MatlabLexer(**self.options)
curcode = ''
insertions = []
for match in line_re.finditer(text):
line = match.group()
if line.startswith('>> '):
insertions.append((len(curcode),
[(0, Generic.Prompt, line[:3])]))
curcode += line[3:]
elif line.startswith('>>'):
insertions.append((len(curcode),
[(0, Generic.Prompt, line[:2])]))
curcode += line[2:]
elif line.startswith('???'):
idx = len(curcode)
# without is showing error on same line as before...?
# line = "\n" + line
token = (0, Generic.Traceback, line)
insertions.append((idx, [token]))
else:
if curcode:
for item in do_insertions(
insertions, mlexer.get_tokens_unprocessed(curcode)):
yield item
curcode = ''
insertions = []
yield match.start(), Generic.Output, line
if curcode: # or item:
for item in do_insertions(
insertions, mlexer.get_tokens_unprocessed(curcode)):
yield item
class OctaveLexer(RegexLexer):
"""
For GNU Octave source code.
.. versionadded:: 1.5
"""
name = 'Octave'
aliases = ['octave']
filenames = ['*.m']
mimetypes = ['text/octave']
# These lists are generated automatically.
# Run the following in bash shell:
#
# First dump all of the Octave manual into a plain text file:
#
# $ info octave --subnodes -o octave-manual
#
# Now grep through it:
# for i in \
# "Built-in Function" "Command" "Function File" \
# "Loadable Function" "Mapping Function";
# do
# perl -e '@name = qw('"$i"');
# print lc($name[0]),"_kw = [\n"';
#
# perl -n -e 'print "\"$1\",\n" if /-- '"$i"': .* (\w*) \(/;' \
# octave-manual | sort | uniq ;
# echo "]" ;
# echo;
# done
# taken from Octave Mercurial changeset 8cc154f45e37 (30-jan-2011)
builtin_kw = (
"addlistener", "addpath", "addproperty", "all",
"and", "any", "argnames", "argv", "assignin",
"atexit", "autoload",
"available_graphics_toolkits", "beep_on_error",
"bitand", "bitmax", "bitor", "bitshift", "bitxor",
"cat", "cell", "cellstr", "char", "class", "clc",
"columns", "command_line_path",
"completion_append_char", "completion_matches",
"complex", "confirm_recursive_rmdir", "cputime",
"crash_dumps_octave_core", "ctranspose", "cumprod",
"cumsum", "debug_on_error", "debug_on_interrupt",
"debug_on_warning", "default_save_options",
"dellistener", "diag", "diff", "disp",
"doc_cache_file", "do_string_escapes", "double",
"drawnow", "e", "echo_executing_commands", "eps",
"eq", "errno", "errno_list", "error", "eval",
"evalin", "exec", "exist", "exit", "eye", "false",
"fclear", "fclose", "fcntl", "fdisp", "feof",
"ferror", "feval", "fflush", "fgetl", "fgets",
"fieldnames", "file_in_loadpath", "file_in_path",
"filemarker", "filesep", "find_dir_in_path",
"fixed_point_format", "fnmatch", "fopen", "fork",
"formula", "fprintf", "fputs", "fread", "freport",
"frewind", "fscanf", "fseek", "fskipl", "ftell",
"functions", "fwrite", "ge", "genpath", "get",
"getegid", "getenv", "geteuid", "getgid",
"getpgrp", "getpid", "getppid", "getuid", "glob",
"gt", "gui_mode", "history_control",
"history_file", "history_size",
"history_timestamp_format_string", "home",
"horzcat", "hypot", "ifelse",
"ignore_function_time_stamp", "inferiorto",
"info_file", "info_program", "inline", "input",
"intmax", "intmin", "ipermute",
"is_absolute_filename", "isargout", "isbool",
"iscell", "iscellstr", "ischar", "iscomplex",
"isempty", "isfield", "isfloat", "isglobal",
"ishandle", "isieee", "isindex", "isinteger",
"islogical", "ismatrix", "ismethod", "isnull",
"isnumeric", "isobject", "isreal",
"is_rooted_relative_filename", "issorted",
"isstruct", "isvarname", "kbhit", "keyboard",
"kill", "lasterr", "lasterror", "lastwarn",
"ldivide", "le", "length", "link", "linspace",
"logical", "lstat", "lt", "make_absolute_filename",
"makeinfo_program", "max_recursion_depth", "merge",
"methods", "mfilename", "minus", "mislocked",
"mkdir", "mkfifo", "mkstemp", "mldivide", "mlock",
"mouse_wheel_zoom", "mpower", "mrdivide", "mtimes",
"munlock", "nargin", "nargout",
"native_float_format", "ndims", "ne", "nfields",
"nnz", "norm", "not", "numel", "nzmax",
"octave_config_info", "octave_core_file_limit",
"octave_core_file_name",
"octave_core_file_options", "ones", "or",
"output_max_field_width", "output_precision",
"page_output_immediately", "page_screen_output",
"path", "pathsep", "pause", "pclose", "permute",
"pi", "pipe", "plus", "popen", "power",
"print_empty_dimensions", "printf",
"print_struct_array_contents", "prod",
"program_invocation_name", "program_name",
"putenv", "puts", "pwd", "quit", "rats", "rdivide",
"readdir", "readlink", "read_readline_init_file",
"realmax", "realmin", "rehash", "rename",
"repelems", "re_read_readline_init_file", "reset",
"reshape", "resize", "restoredefaultpath",
"rethrow", "rmdir", "rmfield", "rmpath", "rows",
"save_header_format_string", "save_precision",
"saving_history", "scanf", "set", "setenv",
"shell_cmd", "sighup_dumps_octave_core",
"sigterm_dumps_octave_core", "silent_functions",
"single", "size", "size_equal", "sizemax",
"sizeof", "sleep", "source", "sparse_auto_mutate",
"split_long_rows", "sprintf", "squeeze", "sscanf",
"stat", "stderr", "stdin", "stdout", "strcmp",
"strcmpi", "string_fill_char", "strncmp",
"strncmpi", "struct", "struct_levels_to_print",
"strvcat", "subsasgn", "subsref", "sum", "sumsq",
"superiorto", "suppress_verbose_help_message",
"symlink", "system", "tic", "tilde_expand",
"times", "tmpfile", "tmpnam", "toc", "toupper",
"transpose", "true", "typeinfo", "umask", "uminus",
"uname", "undo_string_escapes", "unlink", "uplus",
"upper", "usage", "usleep", "vec", "vectorize",
"vertcat", "waitpid", "warning", "warranty",
"whos_line_format", "yes_or_no", "zeros",
"inf", "Inf", "nan", "NaN")
command_kw = ("close", "load", "who", "whos")
function_kw = (
"accumarray", "accumdim", "acosd", "acotd",
"acscd", "addtodate", "allchild", "ancestor",
"anova", "arch_fit", "arch_rnd", "arch_test",
"area", "arma_rnd", "arrayfun", "ascii", "asctime",
"asecd", "asind", "assert", "atand",
"autoreg_matrix", "autumn", "axes", "axis", "bar",
"barh", "bartlett", "bartlett_test", "beep",
"betacdf", "betainv", "betapdf", "betarnd",
"bicgstab", "bicubic", "binary", "binocdf",
"binoinv", "binopdf", "binornd", "bitcmp",
"bitget", "bitset", "blackman", "blanks",
"blkdiag", "bone", "box", "brighten", "calendar",
"cast", "cauchy_cdf", "cauchy_inv", "cauchy_pdf",
"cauchy_rnd", "caxis", "celldisp", "center", "cgs",
"chisquare_test_homogeneity",
"chisquare_test_independence", "circshift", "cla",
"clabel", "clf", "clock", "cloglog", "closereq",
"colon", "colorbar", "colormap", "colperm",
"comet", "common_size", "commutation_matrix",
"compan", "compare_versions", "compass",
"computer", "cond", "condest", "contour",
"contourc", "contourf", "contrast", "conv",
"convhull", "cool", "copper", "copyfile", "cor",
"corrcoef", "cor_test", "cosd", "cotd", "cov",
"cplxpair", "cross", "cscd", "cstrcat", "csvread",
"csvwrite", "ctime", "cumtrapz", "curl", "cut",
"cylinder", "date", "datenum", "datestr",
"datetick", "datevec", "dblquad", "deal",
"deblank", "deconv", "delaunay", "delaunayn",
"delete", "demo", "detrend", "diffpara", "diffuse",
"dir", "discrete_cdf", "discrete_inv",
"discrete_pdf", "discrete_rnd", "display",
"divergence", "dlmwrite", "dos", "dsearch",
"dsearchn", "duplication_matrix", "durbinlevinson",
"ellipsoid", "empirical_cdf", "empirical_inv",
"empirical_pdf", "empirical_rnd", "eomday",
"errorbar", "etime", "etreeplot", "example",
"expcdf", "expinv", "expm", "exppdf", "exprnd",
"ezcontour", "ezcontourf", "ezmesh", "ezmeshc",
"ezplot", "ezpolar", "ezsurf", "ezsurfc", "factor",
"factorial", "fail", "fcdf", "feather", "fftconv",
"fftfilt", "fftshift", "figure", "fileattrib",
"fileparts", "fill", "findall", "findobj",
"findstr", "finv", "flag", "flipdim", "fliplr",
"flipud", "fpdf", "fplot", "fractdiff", "freqz",
"freqz_plot", "frnd", "fsolve",
"f_test_regression", "ftp", "fullfile", "fzero",
"gamcdf", "gaminv", "gampdf", "gamrnd", "gca",
"gcbf", "gcbo", "gcf", "genvarname", "geocdf",
"geoinv", "geopdf", "geornd", "getfield", "ginput",
"glpk", "gls", "gplot", "gradient",
"graphics_toolkit", "gray", "grid", "griddata",
"griddatan", "gtext", "gunzip", "gzip", "hadamard",
"hamming", "hankel", "hanning", "hggroup",
"hidden", "hilb", "hist", "histc", "hold", "hot",
"hotelling_test", "housh", "hsv", "hurst",
"hygecdf", "hygeinv", "hygepdf", "hygernd",
"idivide", "ifftshift", "image", "imagesc",
"imfinfo", "imread", "imshow", "imwrite", "index",
"info", "inpolygon", "inputname", "interpft",
"interpn", "intersect", "invhilb", "iqr", "isa",
"isdefinite", "isdir", "is_duplicate_entry",
"isequal", "isequalwithequalnans", "isfigure",
"ishermitian", "ishghandle", "is_leap_year",
"isletter", "ismac", "ismember", "ispc", "isprime",
"isprop", "isscalar", "issquare", "isstrprop",
"issymmetric", "isunix", "is_valid_file_id",
"isvector", "jet", "kendall",
"kolmogorov_smirnov_cdf",
"kolmogorov_smirnov_test", "kruskal_wallis_test",
"krylov", "kurtosis", "laplace_cdf", "laplace_inv",
"laplace_pdf", "laplace_rnd", "legend", "legendre",
"license", "line", "linkprop", "list_primes",
"loadaudio", "loadobj", "logistic_cdf",
"logistic_inv", "logistic_pdf", "logistic_rnd",
"logit", "loglog", "loglogerr", "logm", "logncdf",
"logninv", "lognpdf", "lognrnd", "logspace",
"lookfor", "ls_command", "lsqnonneg", "magic",
"mahalanobis", "manova", "matlabroot",
"mcnemar_test", "mean", "meansq", "median", "menu",
"mesh", "meshc", "meshgrid", "meshz", "mexext",
"mget", "mkpp", "mode", "moment", "movefile",
"mpoles", "mput", "namelengthmax", "nargchk",
"nargoutchk", "nbincdf", "nbininv", "nbinpdf",
"nbinrnd", "nchoosek", "ndgrid", "newplot", "news",
"nonzeros", "normcdf", "normest", "norminv",
"normpdf", "normrnd", "now", "nthroot", "null",
"ocean", "ols", "onenormest", "optimget",
"optimset", "orderfields", "orient", "orth",
"pack", "pareto", "parseparams", "pascal", "patch",
"pathdef", "pcg", "pchip", "pcolor", "pcr",
"peaks", "periodogram", "perl", "perms", "pie",
"pink", "planerot", "playaudio", "plot",
"plotmatrix", "plotyy", "poisscdf", "poissinv",
"poisspdf", "poissrnd", "polar", "poly",
"polyaffine", "polyarea", "polyderiv", "polyfit",
"polygcd", "polyint", "polyout", "polyreduce",
"polyval", "polyvalm", "postpad", "powerset",
"ppder", "ppint", "ppjumps", "ppplot", "ppval",
"pqpnonneg", "prepad", "primes", "print",
"print_usage", "prism", "probit", "qp", "qqplot",
"quadcc", "quadgk", "quadl", "quadv", "quiver",
"qzhess", "rainbow", "randi", "range", "rank",
"ranks", "rat", "reallog", "realpow", "realsqrt",
"record", "rectangle_lw", "rectangle_sw",
"rectint", "refresh", "refreshdata",
"regexptranslate", "repmat", "residue", "ribbon",
"rindex", "roots", "rose", "rosser", "rotdim",
"rref", "run", "run_count", "rundemos", "run_test",
"runtests", "saveas", "saveaudio", "saveobj",
"savepath", "scatter", "secd", "semilogx",
"semilogxerr", "semilogy", "semilogyerr",
"setaudio", "setdiff", "setfield", "setxor",
"shading", "shift", "shiftdim", "sign_test",
"sinc", "sind", "sinetone", "sinewave", "skewness",
"slice", "sombrero", "sortrows", "spaugment",
"spconvert", "spdiags", "spearman", "spectral_adf",
"spectral_xdf", "specular", "speed", "spencer",
"speye", "spfun", "sphere", "spinmap", "spline",
"spones", "sprand", "sprandn", "sprandsym",
"spring", "spstats", "spy", "sqp", "stairs",
"statistics", "std", "stdnormal_cdf",
"stdnormal_inv", "stdnormal_pdf", "stdnormal_rnd",
"stem", "stft", "strcat", "strchr", "strjust",
"strmatch", "strread", "strsplit", "strtok",
"strtrim", "strtrunc", "structfun", "studentize",
"subplot", "subsindex", "subspace", "substr",
"substruct", "summer", "surf", "surface", "surfc",
"surfl", "surfnorm", "svds", "swapbytes",
"sylvester_matrix", "symvar", "synthesis", "table",
"tand", "tar", "tcdf", "tempdir", "tempname",
"test", "text", "textread", "textscan", "tinv",
"title", "toeplitz", "tpdf", "trace", "trapz",
"treelayout", "treeplot", "triangle_lw",
"triangle_sw", "tril", "trimesh", "triplequad",
"triplot", "trisurf", "triu", "trnd", "tsearchn",
"t_test", "t_test_regression", "type", "unidcdf",
"unidinv", "unidpdf", "unidrnd", "unifcdf",
"unifinv", "unifpdf", "unifrnd", "union", "unique",
"unix", "unmkpp", "unpack", "untabify", "untar",
"unwrap", "unzip", "u_test", "validatestring",
"vander", "var", "var_test", "vech", "ver",
"version", "view", "voronoi", "voronoin",
"waitforbuttonpress", "wavread", "wavwrite",
"wblcdf", "wblinv", "wblpdf", "wblrnd", "weekday",
"welch_test", "what", "white", "whitebg",
"wienrnd", "wilcoxon_test", "wilkinson", "winter",
"xlabel", "xlim", "ylabel", "yulewalker", "zip",
"zlabel", "z_test")
loadable_kw = (
"airy", "amd", "balance", "besselh", "besseli",
"besselj", "besselk", "bessely", "bitpack",
"bsxfun", "builtin", "ccolamd", "cellfun",
"cellslices", "chol", "choldelete", "cholinsert",
"cholinv", "cholshift", "cholupdate", "colamd",
"colloc", "convhulln", "convn", "csymamd",
"cummax", "cummin", "daspk", "daspk_options",
"dasrt", "dasrt_options", "dassl", "dassl_options",
"dbclear", "dbdown", "dbstack", "dbstatus",
"dbstop", "dbtype", "dbup", "dbwhere", "det",
"dlmread", "dmperm", "dot", "eig", "eigs",
"endgrent", "endpwent", "etree", "fft", "fftn",
"fftw", "filter", "find", "full", "gcd",
"getgrent", "getgrgid", "getgrnam", "getpwent",
"getpwnam", "getpwuid", "getrusage", "givens",
"gmtime", "gnuplot_binary", "hess", "ifft",
"ifftn", "inv", "isdebugmode", "issparse", "kron",
"localtime", "lookup", "lsode", "lsode_options",
"lu", "luinc", "luupdate", "matrix_type", "max",
"min", "mktime", "pinv", "qr", "qrdelete",
"qrinsert", "qrshift", "qrupdate", "quad",
"quad_options", "qz", "rand", "rande", "randg",
"randn", "randp", "randperm", "rcond", "regexp",
"regexpi", "regexprep", "schur", "setgrent",
"setpwent", "sort", "spalloc", "sparse", "spparms",
"sprank", "sqrtm", "strfind", "strftime",
"strptime", "strrep", "svd", "svd_driver", "syl",
"symamd", "symbfact", "symrcm", "time", "tsearch",
"typecast", "urlread", "urlwrite")
mapping_kw = (
"abs", "acos", "acosh", "acot", "acoth", "acsc",
"acsch", "angle", "arg", "asec", "asech", "asin",
"asinh", "atan", "atanh", "beta", "betainc",
"betaln", "bincoeff", "cbrt", "ceil", "conj", "cos",
"cosh", "cot", "coth", "csc", "csch", "erf", "erfc",
"erfcx", "erfinv", "exp", "finite", "fix", "floor",
"fmod", "gamma", "gammainc", "gammaln", "imag",
"isalnum", "isalpha", "isascii", "iscntrl",
"isdigit", "isfinite", "isgraph", "isinf",
"islower", "isna", "isnan", "isprint", "ispunct",
"isspace", "isupper", "isxdigit", "lcm", "lgamma",
"log", "lower", "mod", "real", "rem", "round",
"roundb", "sec", "sech", "sign", "sin", "sinh",
"sqrt", "tan", "tanh", "toascii", "tolower", "xor")
builtin_consts = (
"EDITOR", "EXEC_PATH", "I", "IMAGE_PATH", "NA",
"OCTAVE_HOME", "OCTAVE_VERSION", "PAGER",
"PAGER_FLAGS", "SEEK_CUR", "SEEK_END", "SEEK_SET",
"SIG", "S_ISBLK", "S_ISCHR", "S_ISDIR", "S_ISFIFO",
"S_ISLNK", "S_ISREG", "S_ISSOCK", "WCONTINUE",
"WCOREDUMP", "WEXITSTATUS", "WIFCONTINUED",
"WIFEXITED", "WIFSIGNALED", "WIFSTOPPED", "WNOHANG",
"WSTOPSIG", "WTERMSIG", "WUNTRACED")
tokens = {
'root': [
# We should look into multiline comments
(r'[%#].*$', Comment),
(r'^\s*function', Keyword, 'deffunc'),
# from 'iskeyword' on hg changeset 8cc154f45e37
(words((
'__FILE__', '__LINE__', 'break', 'case', 'catch', 'classdef', 'continue', 'do', 'else',
'elseif', 'end', 'end_try_catch', 'end_unwind_protect', 'endclassdef',
'endevents', 'endfor', 'endfunction', 'endif', 'endmethods', 'endproperties',
'endswitch', 'endwhile', 'events', 'for', 'function', 'get', 'global', 'if', 'methods',
'otherwise', 'persistent', 'properties', 'return', 'set', 'static', 'switch', 'try',
'until', 'unwind_protect', 'unwind_protect_cleanup', 'while'), suffix=r'\b'),
Keyword),
(words(builtin_kw + command_kw + function_kw + loadable_kw + mapping_kw,
suffix=r'\b'), Name.Builtin),
(words(builtin_consts, suffix=r'\b'), Name.Constant),
# operators in Octave but not Matlab:
(r'-=|!=|!|/=|--', Operator),
# operators:
(r'-|==|~=|<|>|<=|>=|&&|&|~|\|\|?', Operator),
# operators in Octave but not Matlab requiring escape for re:
(r'\*=|\+=|\^=|\/=|\\=|\*\*|\+\+|\.\*\*', Operator),
# operators requiring escape for re:
(r'\.\*|\*|\+|\.\^|\.\\|\.\/|\/|\\', Operator),
# punctuation:
(r'[\[\](){}:@.,]', Punctuation),
(r'=|:|;', Punctuation),
(r'"[^"]*"', String),
(r'(\d+\.\d*|\d*\.\d+)([eEf][+-]?[0-9]+)?', Number.Float),
(r'\d+[eEf][+-]?[0-9]+', Number.Float),
(r'\d+', Number.Integer),
# quote can be transpose, instead of string:
# (not great, but handles common cases...)
(r'(?<=[\w)\].])\'+', Operator),
(r'(?<![\w)\].])\'', String, 'string'),
(r'[a-zA-Z_]\w*', Name),
(r'.', Text),
],
'string': [
(r"[^']*'", String, '#pop'),
],
'deffunc': [
(r'(\s*)(?:(.+)(\s*)(=)(\s*))?(.+)(\()(.*)(\))(\s*)',
bygroups(Whitespace, Text, Whitespace, Punctuation,
Whitespace, Name.Function, Punctuation, Text,
Punctuation, Whitespace), '#pop'),
# function with no args
(r'(\s*)([a-zA-Z_]\w*)', bygroups(Text, Name.Function), '#pop'),
],
}
class ScilabLexer(RegexLexer):
"""
For Scilab source code.
.. versionadded:: 1.5
"""
name = 'Scilab'
aliases = ['scilab']
filenames = ['*.sci', '*.sce', '*.tst']
mimetypes = ['text/scilab']
tokens = {
'root': [
(r'//.*?$', Comment.Single),
(r'^\s*function', Keyword, 'deffunc'),
(words((
'__FILE__', '__LINE__', 'break', 'case', 'catch', 'classdef', 'continue', 'do', 'else',
'elseif', 'end', 'end_try_catch', 'end_unwind_protect', 'endclassdef',
'endevents', 'endfor', 'endfunction', 'endif', 'endmethods', 'endproperties',
'endswitch', 'endwhile', 'events', 'for', 'function', 'get', 'global', 'if', 'methods',
'otherwise', 'persistent', 'properties', 'return', 'set', 'static', 'switch', 'try',
'until', 'unwind_protect', 'unwind_protect_cleanup', 'while'), suffix=r'\b'),
Keyword),
(words(_scilab_builtins.functions_kw +
_scilab_builtins.commands_kw +
_scilab_builtins.macros_kw, suffix=r'\b'), Name.Builtin),
(words(_scilab_builtins.variables_kw, suffix=r'\b'), Name.Constant),
# operators:
(r'-|==|~=|<|>|<=|>=|&&|&|~|\|\|?', Operator),
# operators requiring escape for re:
(r'\.\*|\*|\+|\.\^|\.\\|\.\/|\/|\\', Operator),
# punctuation:
(r'[\[\](){}@.,=:;]', Punctuation),
(r'"[^"]*"', String),
# quote can be transpose, instead of string:
# (not great, but handles common cases...)
(r'(?<=[\w)\].])\'+', Operator),
(r'(?<![\w)\].])\'', String, 'string'),
(r'(\d+\.\d*|\d*\.\d+)([eEf][+-]?[0-9]+)?', Number.Float),
(r'\d+[eEf][+-]?[0-9]+', Number.Float),
(r'\d+', Number.Integer),
(r'[a-zA-Z_]\w*', Name),
(r'.', Text),
],
'string': [
(r"[^']*'", String, '#pop'),
(r'.', String, '#pop'),
],
'deffunc': [
(r'(\s*)(?:(.+)(\s*)(=)(\s*))?(.+)(\()(.*)(\))(\s*)',
bygroups(Whitespace, Text, Whitespace, Punctuation,
Whitespace, Name.Function, Punctuation, Text,
Punctuation, Whitespace), '#pop'),
# function with no args
(r'(\s*)([a-zA-Z_]\w*)', bygroups(Text, Name.Function), '#pop'),
],
}
| lgpl-3.0 |
pwz3n0/pinball | pinball/persistence/token_data.py | 6 | 1860 | # Copyright 2015, Pinterest, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Token data represents the entities stored in the token data attribute."""
import abc
__author__ = 'Pawel Garbacki'
__copyright__ = 'Copyright 2015, Pinterest, Inc.'
__credits__ = [__author__]
__license__ = 'Apache'
__version__ = '2.0'
class TokenData(object):
"""Parent class for entities stored in token data field."""
__metaclass__ = abc.ABCMeta
@property
def _COMPATIBILITY_ATTRIBUTES(self):
"""Return attributes that should be set if missing.
The set of object attributes may change between versions. We need the
ability to deserialize (unpickle) older versions of an object into new
versions of object implementation. To this end, we offer the ability
to fill attributes missing in the serialized (old) objects with default
values.
This property may be overridden in subclasses.
Returns:
Dictionary of attribute names and their default values to be filled
in if missing during unpickling.
"""
return {}
def __setstate__(self, state):
self.__dict__ = state
for attribute, default in self._COMPATIBILITY_ATTRIBUTES.items():
if attribute not in self.__dict__:
self.__dict__[attribute] = default
| apache-2.0 |
scode/pants | src/python/pants/backend/jvm/tasks/jvm_compile/scala/zinc_analysis.py | 9 | 2544 | # coding=utf-8
# Copyright 2014 Pants project contributors (see CONTRIBUTORS.md).
# Licensed under the Apache License, Version 2.0 (see LICENSE).
from __future__ import (absolute_import, division, generators, nested_scopes, print_function,
unicode_literals, with_statement)
import os
from zincutils.zinc_analysis import ZincAnalysis as UnderlyingAnalysis
from pants.backend.jvm.tasks.jvm_compile.analysis import Analysis
from pants.base.build_environment import get_buildroot
class ZincAnalysis(Analysis):
"""Parsed representation of a zinc analysis.
Implemented by delegating to an underlying zincutils.ZincAnalysis instance.
Note also that all files in keys/values are full-path, just as they appear in the analysis file.
If you want paths relative to the build root or the classes dir or whatever, you must compute
those yourself.
zincutils code is at https://github.com/pantsbuild/zincutils.
See its README on how to publish it.
To run a dev pants against zincutils sources, without having to publish, just add its root dir
to your PYTHONPATH, e.g.:
PYTHONPATH=/path/to/zincutils/ \
./pants test tests/python/pants_test/backend/jvm/tasks/jvm_compile/scala:zinc_analysis
"""
FORMAT_VERSION_LINE = UnderlyingAnalysis.FORMAT_VERSION_LINE
@classmethod
def merge(cls, analyses):
return ZincAnalysis(UnderlyingAnalysis.merge([a._underlying_analysis for a in analyses]))
def __init__(self, underlying_analysis):
self._underlying_analysis = underlying_analysis
@property
def underlying_analysis(self):
return self._underlying_analysis
def sources(self):
return self._underlying_analysis.sources()
def split(self, splits, catchall=False):
buildroot = get_buildroot()
abs_splits = [set([s if os.path.isabs(s) else os.path.join(buildroot, s) for s in x])
for x in splits]
return [ZincAnalysis(s) for s in self.underlying_analysis.split(abs_splits, catchall)]
def write(self, outfile):
self.underlying_analysis.write(outfile)
def diff(self, other):
return self.underlying_analysis.diff(other.underlying_analysis)
def __eq__(self, other):
if other is None:
return False
return self.underlying_analysis == other.underlying_analysis
def __ne__(self, other):
return not self.__eq__(other)
def __hash__(self):
return hash(self.underlying_analysis)
def __str__(self):
return str(self.underlying_analysis)
def __unicode__(self):
return unicode(self.underlying_analysis)
| apache-2.0 |
CyanogenMod/lge-kernel-omap4 | tools/perf/scripts/python/Perf-Trace-Util/lib/Perf/Trace/Util.py | 12527 | 1935 | # Util.py - Python extension for perf script, miscellaneous utility code
#
# Copyright (C) 2010 by Tom Zanussi <tzanussi@gmail.com>
#
# This software may be distributed under the terms of the GNU General
# Public License ("GPL") version 2 as published by the Free Software
# Foundation.
import errno, os
FUTEX_WAIT = 0
FUTEX_WAKE = 1
FUTEX_PRIVATE_FLAG = 128
FUTEX_CLOCK_REALTIME = 256
FUTEX_CMD_MASK = ~(FUTEX_PRIVATE_FLAG | FUTEX_CLOCK_REALTIME)
NSECS_PER_SEC = 1000000000
def avg(total, n):
return total / n
def nsecs(secs, nsecs):
return secs * NSECS_PER_SEC + nsecs
def nsecs_secs(nsecs):
return nsecs / NSECS_PER_SEC
def nsecs_nsecs(nsecs):
return nsecs % NSECS_PER_SEC
def nsecs_str(nsecs):
str = "%5u.%09u" % (nsecs_secs(nsecs), nsecs_nsecs(nsecs)),
return str
def add_stats(dict, key, value):
if not dict.has_key(key):
dict[key] = (value, value, value, 1)
else:
min, max, avg, count = dict[key]
if value < min:
min = value
if value > max:
max = value
avg = (avg + value) / 2
dict[key] = (min, max, avg, count + 1)
def clear_term():
print("\x1b[H\x1b[2J")
audit_package_warned = False
try:
import audit
machine_to_id = {
'x86_64': audit.MACH_86_64,
'alpha' : audit.MACH_ALPHA,
'ia64' : audit.MACH_IA64,
'ppc' : audit.MACH_PPC,
'ppc64' : audit.MACH_PPC64,
's390' : audit.MACH_S390,
's390x' : audit.MACH_S390X,
'i386' : audit.MACH_X86,
'i586' : audit.MACH_X86,
'i686' : audit.MACH_X86,
}
try:
machine_to_id['armeb'] = audit.MACH_ARMEB
except:
pass
machine_id = machine_to_id[os.uname()[4]]
except:
if not audit_package_warned:
audit_package_warned = True
print "Install the audit-libs-python package to get syscall names"
def syscall_name(id):
try:
return audit.audit_syscall_to_name(id, machine_id)
except:
return str(id)
def strerror(nr):
try:
return errno.errorcode[abs(nr)]
except:
return "Unknown %d errno" % nr
| gpl-2.0 |
LubyRuffy/spiderfoot | ext/stem/descriptor/remote.py | 12 | 26854 | # Copyright 2013-2015, Damian Johnson and The Tor Project
# See LICENSE for licensing information
"""
Module for remotely retrieving descriptors from directory authorities and
mirrors. This is most easily done through the
:class:`~stem.descriptor.remote.DescriptorDownloader` class, which issues
:class:`~stem.descriptor.remote.Query` instances to get you the descriptor
content. For example...
::
from stem.descriptor.remote import DescriptorDownloader
downloader = DescriptorDownloader(
use_mirrors = True,
timeout = 10,
)
query = downloader.get_server_descriptors()
print 'Exit Relays:'
try:
for desc in query.run():
if desc.exit_policy.is_exiting_allowed():
print ' %s (%s)' % (desc.nickname, desc.fingerprint)
print
print 'Query took %0.2f seconds' % query.runtime
except Exception as exc:
print 'Unable to retrieve the server descriptors: %s' % exc
If you don't care about errors then you can also simply iterate over the query
itself...
::
for desc in downloader.get_server_descriptors():
if desc.exit_policy.is_exiting_allowed():
print ' %s (%s)' % (desc.nickname, desc.fingerprint)
::
get_authorities - Provides tor directory information.
DirectoryAuthority - Information about a tor directory authority.
Query - Asynchronous request to download tor descriptors
|- start - issues the query if it isn't already running
+- run - blocks until the request is finished and provides the results
DescriptorDownloader - Configurable class for issuing queries
|- use_directory_mirrors - use directory mirrors to download future descriptors
|- get_server_descriptors - provides present server descriptors
|- get_extrainfo_descriptors - provides present extrainfo descriptors
|- get_microdescriptors - provides present microdescriptors
|- get_consensus - provides the present consensus or router status entries
|- get_key_certificates - provides present authority key certificates
+- query - request an arbitrary descriptor resource
.. versionadded:: 1.1.0
.. data:: MAX_FINGERPRINTS
Maximum number of descriptors that can requested at a time by their
fingerprints.
.. data:: MAX_MICRODESCRIPTOR_HASHES
Maximum number of microdescriptors that can requested at a time by their
hashes.
"""
import io
import random
import sys
import threading
import time
import zlib
try:
import urllib.request as urllib
except ImportError:
import urllib2 as urllib
import stem.descriptor
from stem import Flag
from stem.util import log
# Tor has a limited number of descriptors we can fetch explicitly by their
# fingerprint or hashes due to a limit on the url length by squid proxies.
MAX_FINGERPRINTS = 96
MAX_MICRODESCRIPTOR_HASHES = 92
# We commonly only want authorities that vote in the consensus, and hence have
# a v3ident.
HAS_V3IDENT = lambda auth: auth.v3ident is not None
def _guess_descriptor_type(resource):
# Attempts to determine the descriptor type based on the resource url. This
# raises a ValueError if the resource isn't recognized.
if resource.startswith('/tor/server/'):
return 'server-descriptor 1.0'
elif resource.startswith('/tor/extra/'):
return 'extra-info 1.0'
elif resource.startswith('/tor/micro/'):
return 'microdescriptor 1.0'
elif resource.startswith('/tor/status-vote/'):
return 'network-status-consensus-3 1.0'
elif resource.startswith('/tor/keys/'):
return 'dir-key-certificate-3 1.0'
else:
raise ValueError("Unable to determine the descriptor type for '%s'" % resource)
class Query(object):
"""
Asynchronous request for descriptor content from a directory authority or
mirror. These can either be made through the
:class:`~stem.descriptor.remote.DescriptorDownloader` or directly for more
advanced usage.
To block on the response and get results either call
:func:`~stem.descriptor.remote.Query.run` or iterate over the Query. The
:func:`~stem.descriptor.remote.Query.run` method pass along any errors that
arise...
::
from stem.descriptor.remote import Query
query = Query(
'/tor/server/all.z',
block = True,
timeout = 30,
)
print 'Current relays:'
if not query.error:
for desc in query:
print desc.fingerprint
else:
print 'Unable to retrieve the server descriptors: %s' % query.error
... while iterating fails silently...
::
print 'Current relays:'
for desc in Query('/tor/server/all.z', 'server-descriptor 1.0'):
print desc.fingerprint
In either case exceptions are available via our 'error' attribute.
Tor provides quite a few different descriptor resources via its directory
protocol (see section 4.2 and later of the `dir-spec
<https://gitweb.torproject.org/torspec.git/tree/dir-spec.txt>`_).
Commonly useful ones include...
===================================== ===========
Resource Description
===================================== ===========
/tor/server/all.z all present server descriptors
/tor/server/fp/<fp1>+<fp2>+<fp3>.z server descriptors with the given fingerprints
/tor/extra/all.z all present extrainfo descriptors
/tor/extra/fp/<fp1>+<fp2>+<fp3>.z extrainfo descriptors with the given fingerprints
/tor/micro/d/<hash1>-<hash2>.z microdescriptors with the given hashes
/tor/status-vote/current/consensus.z present consensus
/tor/keys/all.z key certificates for the authorities
/tor/keys/fp/<v3ident1>+<v3ident2>.z key certificates for specific authorities
===================================== ===========
The '.z' suffix can be excluded to get a plaintext rather than compressed
response. Compression is handled transparently, so this shouldn't matter to
the caller.
:var str resource: resource being fetched, such as '/tor/server/all.z'
:var str descriptor_type: type of descriptors being fetched (for options see
:func:`~stem.descriptor.__init__.parse_file`), this is guessed from the
resource if **None**
:var list endpoints: (address, dirport) tuples of the authority or mirror
we're querying, this uses authorities if undefined
:var int retries: number of times to attempt the request if downloading it
fails
:var bool fall_back_to_authority: when retrying request issues the last
request to a directory authority if **True**
:var str content: downloaded descriptor content
:var Exception error: exception if a problem occured
:var bool is_done: flag that indicates if our request has finished
:var str download_url: last url used to download the descriptor, this is
unset until we've actually made a download attempt
:var float start_time: unix timestamp when we first started running
:var float timeout: duration before we'll time out our request
:var float runtime: time our query took, this is **None** if it's not yet
finished
:var bool validate: checks the validity of the descriptor's content if
**True**, skips these checks otherwise
:var stem.descriptor.__init__.DocumentHandler document_handler: method in
which to parse a :class:`~stem.descriptor.networkstatus.NetworkStatusDocument`
:var dict kwargs: additional arguments for the descriptor constructor
:param bool start: start making the request when constructed (default is **True**)
:param bool block: only return after the request has been completed, this is
the same as running **query.run(True)** (default is **False**)
"""
def __init__(self, resource, descriptor_type = None, endpoints = None, retries = 2, fall_back_to_authority = False, timeout = None, start = True, block = False, validate = False, document_handler = stem.descriptor.DocumentHandler.ENTRIES, **kwargs):
if not resource.startswith('/'):
raise ValueError("Resources should start with a '/': %s" % resource)
self.resource = resource
if descriptor_type:
self.descriptor_type = descriptor_type
else:
self.descriptor_type = _guess_descriptor_type(resource)
self.endpoints = endpoints if endpoints else []
self.retries = retries
self.fall_back_to_authority = fall_back_to_authority
self.content = None
self.error = None
self.is_done = False
self.download_url = None
self.start_time = None
self.timeout = timeout
self.runtime = None
self.validate = validate
self.document_handler = document_handler
self.kwargs = kwargs
self._downloader_thread = None
self._downloader_thread_lock = threading.RLock()
if start:
self.start()
if block:
self.run(True)
def start(self):
"""
Starts downloading the scriptors if we haven't started already.
"""
with self._downloader_thread_lock:
if self._downloader_thread is None:
self._downloader_thread = threading.Thread(
name = 'Descriptor Query',
target = self._download_descriptors,
args = (self.retries,)
)
self._downloader_thread.setDaemon(True)
self._downloader_thread.start()
def run(self, suppress = False):
"""
Blocks until our request is complete then provides the descriptors. If we
haven't yet started our request then this does so.
:param bool suppress: avoids raising exceptions if **True**
:returns: list for the requested :class:`~stem.descriptor.__init__.Descriptor` instances
:raises:
Using the iterator can fail with the following if **suppress** is
**False**...
* **ValueError** if the descriptor contents is malformed
* **socket.timeout** if our request timed out
* **urllib2.URLError** for most request failures
Note that the urllib2 module may fail with other exception types, in
which case we'll pass it along.
"""
return list(self._run(suppress))
def _run(self, suppress):
with self._downloader_thread_lock:
self.start()
self._downloader_thread.join()
if self.error:
if suppress:
return
raise self.error
else:
if self.content is None:
if suppress:
return
raise ValueError('BUG: _download_descriptors() finished without either results or an error')
try:
results = stem.descriptor.parse_file(
io.BytesIO(self.content),
self.descriptor_type,
validate = self.validate,
document_handler = self.document_handler,
**self.kwargs
)
for desc in results:
yield desc
except ValueError as exc:
self.error = exc # encountered a parsing error
if suppress:
return
raise self.error
def __iter__(self):
for desc in self._run(True):
yield desc
def _pick_url(self, use_authority = False):
"""
Provides a url that can be queried. If we have multiple endpoints then one
will be picked randomly.
:param bool use_authority: ignores our endpoints and uses a directory
authority instead
:returns: **str** for the url being queried by this request
"""
if use_authority or not self.endpoints:
authority = random.choice(filter(HAS_V3IDENT, get_authorities().values()))
address, dirport = authority.address, authority.dir_port
else:
address, dirport = random.choice(self.endpoints)
return 'http://%s:%i/%s' % (address, dirport, self.resource.lstrip('/'))
def _download_descriptors(self, retries):
try:
use_authority = retries == 0 and self.fall_back_to_authority
self.download_url = self._pick_url(use_authority)
self.start_time = time.time()
response = urllib.urlopen(self.download_url, timeout = self.timeout).read()
if self.download_url.endswith('.z'):
response = zlib.decompress(response)
self.content = response.strip()
self.runtime = time.time() - self.start_time
log.trace("Descriptors retrieved from '%s' in %0.2fs" % (self.download_url, self.runtime))
except:
exc = sys.exc_info()[1]
if retries > 0:
log.debug("Unable to download descriptors from '%s' (%i retries remaining): %s" % (self.download_url, retries, exc))
return self._download_descriptors(retries - 1)
else:
log.debug("Unable to download descriptors from '%s': %s" % (self.download_url, exc))
self.error = exc
finally:
self.is_done = True
class DescriptorDownloader(object):
"""
Configurable class that issues :class:`~stem.descriptor.remote.Query`
instances on your behalf.
:param bool use_mirrors: downloads the present consensus and uses the directory
mirrors to fetch future requests, this fails silently if the consensus
cannot be downloaded
:param default_args: default arguments for the
:class:`~stem.descriptor.remote.Query` constructor
"""
def __init__(self, use_mirrors = False, **default_args):
self._default_args = default_args
authorities = filter(HAS_V3IDENT, get_authorities().values())
self._endpoints = [(auth.address, auth.dir_port) for auth in authorities]
if use_mirrors:
try:
start_time = time.time()
self.use_directory_mirrors()
log.debug('Retrieved directory mirrors (took %0.2fs)' % (time.time() - start_time))
except Exception as exc:
log.debug('Unable to retrieve directory mirrors: %s' % exc)
def use_directory_mirrors(self):
"""
Downloads the present consensus and configures ourselves to use directory
mirrors, in addition to authorities.
:returns: :class:`~stem.descriptor.networkstatus.NetworkStatusDocumentV3`
from which we got the directory mirrors
:raises: **Exception** if unable to determine the directory mirrors
"""
authorities = filter(HAS_V3IDENT, get_authorities().values())
new_endpoints = set([(auth.address, auth.dir_port) for auth in authorities])
consensus = list(self.get_consensus(document_handler = stem.descriptor.DocumentHandler.DOCUMENT).run())[0]
for desc in consensus.routers.values():
if Flag.V2DIR in desc.flags:
new_endpoints.add((desc.address, desc.dir_port))
# we need our endpoints to be a list rather than set for random.choice()
self._endpoints = list(new_endpoints)
return consensus
def get_server_descriptors(self, fingerprints = None, **query_args):
"""
Provides the server descriptors with the given fingerprints. If no
fingerprints are provided then this returns all descriptors in the present
consensus.
:param str,list fingerprints: fingerprint or list of fingerprints to be
retrieved, gets all descriptors if **None**
:param query_args: additional arguments for the
:class:`~stem.descriptor.remote.Query` constructor
:returns: :class:`~stem.descriptor.remote.Query` for the server descriptors
:raises: **ValueError** if we request more than 96 descriptors by their
fingerprints (this is due to a limit on the url length by squid proxies).
"""
resource = '/tor/server/all.z'
if isinstance(fingerprints, str):
fingerprints = [fingerprints]
if fingerprints:
if len(fingerprints) > MAX_FINGERPRINTS:
raise ValueError('Unable to request more than %i descriptors at a time by their fingerprints' % MAX_FINGERPRINTS)
resource = '/tor/server/fp/%s.z' % '+'.join(fingerprints)
return self.query(resource, **query_args)
def get_extrainfo_descriptors(self, fingerprints = None, **query_args):
"""
Provides the extrainfo descriptors with the given fingerprints. If no
fingerprints are provided then this returns all descriptors in the present
consensus.
:param str,list fingerprints: fingerprint or list of fingerprints to be
retrieved, gets all descriptors if **None**
:param query_args: additional arguments for the
:class:`~stem.descriptor.remote.Query` constructor
:returns: :class:`~stem.descriptor.remote.Query` for the extrainfo descriptors
:raises: **ValueError** if we request more than 96 descriptors by their
fingerprints (this is due to a limit on the url length by squid proxies).
"""
resource = '/tor/extra/all.z'
if isinstance(fingerprints, str):
fingerprints = [fingerprints]
if fingerprints:
if len(fingerprints) > MAX_FINGERPRINTS:
raise ValueError('Unable to request more than %i descriptors at a time by their fingerprints' % MAX_FINGERPRINTS)
resource = '/tor/extra/fp/%s.z' % '+'.join(fingerprints)
return self.query(resource, **query_args)
def get_microdescriptors(self, hashes, **query_args):
"""
Provides the microdescriptors with the given hashes. To get these see the
'microdescriptor_hashes' attribute of
:class:`~stem.descriptor.router_status_entry.RouterStatusEntryV3`. Note
that these are only provided via a microdescriptor consensus (such as
'cached-microdesc-consensus' in your data directory).
:param str,list hashes: microdescriptor hash or list of hashes to be
retrieved
:param query_args: additional arguments for the
:class:`~stem.descriptor.remote.Query` constructor
:returns: :class:`~stem.descriptor.remote.Query` for the microdescriptors
:raises: **ValueError** if we request more than 92 microdescriptors by their
hashes (this is due to a limit on the url length by squid proxies).
"""
if isinstance(hashes, str):
hashes = [hashes]
if len(hashes) > MAX_MICRODESCRIPTOR_HASHES:
raise ValueError('Unable to request more than %i microdescriptors at a time by their hashes' % MAX_MICRODESCRIPTOR_HASHES)
return self.query('/tor/micro/d/%s.z' % '-'.join(hashes), **query_args)
def get_consensus(self, authority_v3ident = None, **query_args):
"""
Provides the present router status entries.
:param str authority_v3ident: fingerprint of the authority key for which
to get the consensus, see `'v3ident' in tor's config.c
<https://gitweb.torproject.org/tor.git/tree/src/or/config.c#n819>`_
for the values.
:param query_args: additional arguments for the
:class:`~stem.descriptor.remote.Query` constructor
:returns: :class:`~stem.descriptor.remote.Query` for the router status
entries
"""
resource = '/tor/status-vote/current/consensus'
if authority_v3ident:
resource += '/%s' % authority_v3ident
return self.query(resource + '.z', **query_args)
def get_vote(self, authority, **query_args):
"""
Provides the present vote for a given directory authority.
:param stem.descriptor.remote.DirectoryAuthority authority: authority for which to retrieve a vote for
:param query_args: additional arguments for the
:class:`~stem.descriptor.remote.Query` constructor
:returns: :class:`~stem.descriptor.remote.Query` for the router status
entries
"""
resource = '/tor/status-vote/current/authority'
if 'endpoint' not in query_args:
query_args['endpoints'] = [(authority.address, authority.dir_port)]
return self.query(resource + '.z', **query_args)
def get_key_certificates(self, authority_v3idents = None, **query_args):
"""
Provides the key certificates for authorities with the given fingerprints.
If no fingerprints are provided then this returns all present key
certificates.
:param str authority_v3idents: fingerprint or list of fingerprints of the
authority keys, see `'v3ident' in tor's config.c
<https://gitweb.torproject.org/tor.git/tree/src/or/config.c#n819>`_
for the values.
:param query_args: additional arguments for the
:class:`~stem.descriptor.remote.Query` constructor
:returns: :class:`~stem.descriptor.remote.Query` for the key certificates
:raises: **ValueError** if we request more than 96 key certificates by
their identity fingerprints (this is due to a limit on the url length by
squid proxies).
"""
resource = '/tor/keys/all.z'
if isinstance(authority_v3idents, str):
authority_v3idents = [authority_v3idents]
if authority_v3idents:
if len(authority_v3idents) > MAX_FINGERPRINTS:
raise ValueError('Unable to request more than %i key certificates at a time by their identity fingerprints' % MAX_FINGERPRINTS)
resource = '/tor/keys/fp/%s.z' % '+'.join(authority_v3idents)
return self.query(resource, **query_args)
def query(self, resource, **query_args):
"""
Issues a request for the given resource.
:param str resource: resource being fetched, such as '/tor/server/all.z'
:param query_args: additional arguments for the
:class:`~stem.descriptor.remote.Query` constructor
:returns: :class:`~stem.descriptor.remote.Query` for the descriptors
:raises: **ValueError** if resource is clearly invalid or the descriptor
type can't be determined when 'descriptor_type' is **None**
"""
args = dict(self._default_args)
args.update(query_args)
if 'endpoints' not in args:
args['endpoints'] = self._endpoints
if 'fall_back_to_authority' not in args:
args['fall_back_to_authority'] = True
return Query(
resource,
**args
)
class DirectoryAuthority(object):
"""
Tor directory authority, a special type of relay `hardcoded into tor
<https://gitweb.torproject.org/tor.git/tree/src/or/config.c#n819>`_
that enumerates the other relays within the network.
At a very high level tor works as follows...
1. A volunteer starts up a new tor relay, during which it sends a `server
descriptor <server_descriptor.html>`_ to each of the directory
authorities.
2. Each hour the directory authorities make a `vote <networkstatus.html>`_
that says who they think the active relays are in the network and some
attributes about them.
3. The directory authorities send each other their votes, and compile that
into the `consensus <networkstatus.html>`_. This document is very similar
to the votes, the only difference being that the majority of the
authorities agree upon and sign this document. The idividual relay entries
in the vote or consensus is called `router status entries
<router_status_entry.html>`_.
4. Tor clients (people using the service) download the consensus from one of
the authorities or a mirror to determine the active relays within the
network. They in turn use this to construct their circuits and use the
network.
.. versionchanged:: 1.3.0
Added the is_bandwidth_authority attribute.
:var str nickname: nickname of the authority
:var str address: IP address of the authority, currently they're all IPv4 but
this may not always be the case
:var int or_port: port on which the relay services relay traffic
:var int dir_port: port on which directory information is available
:var str fingerprint: relay fingerprint
:var str v3ident: identity key fingerprint used to sign votes and consensus
"""
def __init__(self, nickname = None, address = None, or_port = None, dir_port = None, is_bandwidth_authority = False, fingerprint = None, v3ident = None):
self.nickname = nickname
self.address = address
self.or_port = or_port
self.dir_port = dir_port
self.is_bandwidth_authority = is_bandwidth_authority
self.fingerprint = fingerprint
self.v3ident = v3ident
DIRECTORY_AUTHORITIES = {
'moria1': DirectoryAuthority(
nickname = 'moria1',
address = '128.31.0.39',
or_port = 9101,
dir_port = 9131,
is_bandwidth_authority = True,
fingerprint = '9695DFC35FFEB861329B9F1AB04C46397020CE31',
v3ident = 'D586D18309DED4CD6D57C18FDB97EFA96D330566',
),
'tor26': DirectoryAuthority(
nickname = 'tor26',
address = '86.59.21.38',
or_port = 443,
dir_port = 80,
is_bandwidth_authority = True,
fingerprint = '847B1F850344D7876491A54892F904934E4EB85D',
v3ident = '14C131DFC5C6F93646BE72FA1401C02A8DF2E8B4',
),
'dizum': DirectoryAuthority(
nickname = 'dizum',
address = '194.109.206.212',
or_port = 443,
dir_port = 80,
is_bandwidth_authority = False,
fingerprint = '7EA6EAD6FD83083C538F44038BBFA077587DD755',
v3ident = 'E8A9C45EDE6D711294FADF8E7951F4DE6CA56B58',
),
'Tonga': DirectoryAuthority(
nickname = 'Tonga',
address = '82.94.251.203',
or_port = 443,
dir_port = 80,
is_bandwidth_authority = False,
fingerprint = '4A0CCD2DDC7995083D73F5D667100C8A5831F16D',
v3ident = None, # does not vote in the consensus
),
'gabelmoo': DirectoryAuthority(
nickname = 'gabelmoo',
address = '131.188.40.189',
or_port = 443,
dir_port = 80,
is_bandwidth_authority = True,
fingerprint = 'F2044413DAC2E02E3D6BCF4735A19BCA1DE97281',
v3ident = 'ED03BB616EB2F60BEC80151114BB25CEF515B226',
),
'dannenberg': DirectoryAuthority(
nickname = 'dannenberg',
address = '193.23.244.244',
or_port = 443,
dir_port = 80,
is_bandwidth_authority = False,
fingerprint = '7BE683E65D48141321C5ED92F075C55364AC7123',
v3ident = '585769C78764D58426B8B52B6651A5A71137189A',
),
'urras': DirectoryAuthority(
nickname = 'urras',
address = '208.83.223.34',
or_port = 80,
dir_port = 443,
is_bandwidth_authority = False,
fingerprint = '0AD3FA884D18F89EEA2D89C019379E0E7FD94417',
v3ident = '80550987E1D626E3EBA5E5E75A458DE0626D088C',
),
'maatuska': DirectoryAuthority(
nickname = 'maatuska',
address = '171.25.193.9',
or_port = 80,
dir_port = 443,
is_bandwidth_authority = True,
fingerprint = 'BD6A829255CB08E66FBE7D3748363586E46B3810',
v3ident = '49015F787433103580E3B66A1707A00E60F2D15B',
),
'Faravahar': DirectoryAuthority(
nickname = 'Faravahar',
address = '154.35.175.225',
or_port = 443,
dir_port = 80,
is_bandwidth_authority = False,
fingerprint = 'CF6D0AAFB385BE71B8E111FC5CFF4B47923733BC',
v3ident = 'EFCBE720AB3A82B99F9E953CD5BF50F7EEFC7B97',
),
'longclaw': DirectoryAuthority(
nickname = 'longclaw',
address = '199.254.238.52',
or_port = 443,
dir_port = 80,
is_bandwidth_authority = True,
fingerprint = '74A910646BCEEFBCD2E874FC1DC997430F968145',
v3ident = '23D15D965BC35114467363C165C4F724B64B4F66',
),
}
def get_authorities():
"""
Provides the Tor directory authority information as of **Tor on 11/21/14**.
The directory information hardcoded into Tor and occasionally changes, so the
information this provides might not necessarily match your version of tor.
:returns: dict of str nicknames to :class:`~stem.descriptor.remote.DirectoryAuthority` instances
"""
return dict(DIRECTORY_AUTHORITIES)
| gpl-2.0 |
alkemics/luigi | examples/foo_complex.py | 8 | 1870 | # -*- coding: utf-8 -*-
#
# Copyright 2012-2015 Spotify AB
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import os
import shutil
import time
import random
import luigi
max_depth = 10
max_total_nodes = 50
current_nodes = 0
class Foo(luigi.Task):
task_namespace = 'examples'
def run(self):
print("Running Foo")
def requires(self):
global current_nodes
for i in range(30 / max_depth):
current_nodes += 1
yield Bar(i)
class Bar(luigi.Task):
task_namespace = 'examples'
num = luigi.IntParameter()
def run(self):
time.sleep(1)
self.output().open('w').close()
def requires(self):
global current_nodes
if max_total_nodes > current_nodes:
valor = int(random.uniform(1, 30))
for i in range(valor / max_depth):
current_nodes += 1
yield Bar(current_nodes)
def output(self):
"""
Returns the target output for this task.
:return: the target output for this task.
:rtype: object (:py:class:`~luigi.target.Target`)
"""
time.sleep(1)
return luigi.LocalTarget('/tmp/bar/%d' % self.num)
if __name__ == "__main__":
if os.path.exists('/tmp/bar'):
shutil.rmtree('/tmp/bar')
luigi.run(['--task', 'Foo', '--workers', '2'], use_optparse=True)
| apache-2.0 |
Ormod/zookeeper | src/contrib/monitoring/test.py | 114 | 8195 | #! /usr/bin/env python
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import unittest
import socket
import sys
from StringIO import StringIO
from check_zookeeper import ZooKeeperServer, NagiosHandler, CactiHandler, GangliaHandler
ZK_MNTR_OUTPUT = """zk_version\t3.4.0--1, built on 06/19/2010 15:07 GMT
zk_avg_latency\t1
zk_max_latency\t132
zk_min_latency\t0
zk_packets_received\t640
zk_packets_sent\t639
zk_outstanding_requests\t0
zk_server_state\tfollower
zk_znode_count\t4
zk_watch_count\t0
zk_ephemerals_count\t0
zk_approximate_data_size\t27
zk_open_file_descriptor_count\t22
zk_max_file_descriptor_count\t1024
"""
ZK_MNTR_OUTPUT_WITH_BROKEN_LINES = """zk_version\t3.4.0
zk_avg_latency\t23
broken-line
"""
ZK_STAT_OUTPUT = """Zookeeper version: 3.3.0-943314, built on 05/11/2010 22:20 GMT
Clients:
/0:0:0:0:0:0:0:1:34564[0](queued=0,recved=1,sent=0)
Latency min/avg/max: 0/40/121
Received: 11
Sent: 10
Outstanding: 0
Zxid: 0x700000003
Mode: follower
Node count: 4
"""
class SocketMock(object):
def __init__(self):
self.sent = []
def settimeout(self, timeout):
self.timeout = timeout
def connect(self, address):
self.address = address
def send(self, data):
self.sent.append(data)
return len(data)
def recv(self, size):
return ZK_MNTR_OUTPUT[:size]
def close(self): pass
class ZK33xSocketMock(SocketMock):
def __init__(self):
SocketMock.__init__(self)
self.got_stat_cmd = False
def recv(self, size):
if 'stat' in self.sent:
return ZK_STAT_OUTPUT[:size]
else:
return ''
class UnableToConnectSocketMock(SocketMock):
def connect(self, _):
raise socket.error('[Errno 111] Connection refused')
def create_server_mock(socket_class):
class ZooKeeperServerMock(ZooKeeperServer):
def _create_socket(self):
return socket_class()
return ZooKeeperServerMock()
class TestCheckZookeeper(unittest.TestCase):
def setUp(self):
self.zk = ZooKeeperServer()
def test_parse_valid_line(self):
key, value = self.zk._parse_line('something\t5')
self.assertEqual(key, 'something')
self.assertEqual(value, 5)
def test_parse_line_raises_exception_on_invalid_output(self):
invalid_lines = ['something', '', 'a\tb\tc', '\t1']
for line in invalid_lines:
self.assertRaises(ValueError, self.zk._parse_line, line)
def test_parser_on_valid_output(self):
data = self.zk._parse(ZK_MNTR_OUTPUT)
self.assertEqual(len(data), 14)
self.assertEqual(data['zk_znode_count'], 4)
def test_parse_should_ignore_invalid_lines(self):
data = self.zk._parse(ZK_MNTR_OUTPUT_WITH_BROKEN_LINES)
self.assertEqual(len(data), 2)
def test_parse_stat_valid_output(self):
data = self.zk._parse_stat(ZK_STAT_OUTPUT)
result = {
'zk_version' : '3.3.0-943314, built on 05/11/2010 22:20 GMT',
'zk_min_latency' : 0,
'zk_avg_latency' : 40,
'zk_max_latency' : 121,
'zk_packets_received': 11,
'zk_packets_sent': 10,
'zk_server_state': 'follower',
'zk_znode_count': 4
}
for k, v in result.iteritems():
self.assertEqual(v, data[k])
def test_recv_valid_output(self):
zk = create_server_mock(SocketMock)
data = zk.get_stats()
self.assertEqual(len(data), 14)
self.assertEqual(data['zk_znode_count'], 4)
def test_socket_unable_to_connect(self):
zk = create_server_mock(UnableToConnectSocketMock)
self.assertRaises(socket.error, zk.get_stats)
def test_use_stat_cmd_if_mntr_is_not_available(self):
zk = create_server_mock(ZK33xSocketMock)
data = zk.get_stats()
self.assertEqual(data['zk_version'], '3.3.0-943314, built on 05/11/2010 22:20 GMT')
class HandlerTestCase(unittest.TestCase):
def setUp(self):
try:
sys._stdout
except:
sys._stdout = sys.stdout
sys.stdout = StringIO()
def tearDown(self):
sys.stdout = sys._stdout
def output(self):
sys.stdout.seek(0)
return sys.stdout.read()
class TestNagiosHandler(HandlerTestCase):
def _analyze(self, w, c, k, stats):
class Opts(object):
warning = w
critical = c
key = k
return NagiosHandler().analyze(Opts(), {'localhost:2181':stats})
def test_ok_status(self):
r = self._analyze(10, 20, 'a', {'a': 5})
self.assertEqual(r, 0)
self.assertEqual(self.output(), 'Ok "a"!|localhost:2181=5;10;20\n')
r = self._analyze(20, 10, 'a', {'a': 30})
self.assertEqual(r, 0)
def test_warning_status(self):
r = self._analyze(10, 20, 'a', {'a': 15})
self.assertEqual(r, 1)
self.assertEqual(self.output(),
'Warning "a" localhost:2181!|localhost:2181=15;10;20\n')
r = self._analyze(20, 10, 'a', {'a': 15})
self.assertEqual(r, 1)
def test_critical_status(self):
r = self._analyze(10, 20, 'a', {'a': 30})
self.assertEqual(r, 2)
self.assertEqual(self.output(),
'Critical "a" localhost:2181!|localhost:2181=30;10;20\n')
r = self._analyze(20, 10, 'a', {'a': 5})
self.assertEqual(r, 2)
def test_check_a_specific_key_on_all_hosts(self):
class Opts(object):
warning = 10
critical = 20
key = 'latency'
r = NagiosHandler().analyze(Opts(), {
's1:2181': {'latency': 5},
's2:2181': {'latency': 15},
's3:2181': {'latency': 35},
})
self.assertEqual(r, 2)
self.assertEqual(self.output(),
'Critical "latency" s3:2181!|s1:2181=5;10;20 '\
's3:2181=35;10;20 s2:2181=15;10;20\n')
class TestCactiHandler(HandlerTestCase):
class Opts(object):
key = 'a'
leader = False
def __init__(self, leader=False):
self.leader = leader
def test_output_values_for_all_hosts(self):
r = CactiHandler().analyze(TestCactiHandler.Opts(), {
's1:2181':{'a':1},
's2:2181':{'a':2, 'b':3}
})
self.assertEqual(r, None)
self.assertEqual(self.output(), 's1_2181:1 s2_2181:2')
def test_output_single_value_for_leader(self):
r = CactiHandler().analyze(TestCactiHandler.Opts(leader=True), {
's1:2181': {'a':1, 'zk_server_state': 'leader'},
's2:2181': {'a':2}
})
self.assertEqual(r, 0)
self.assertEqual(self.output(), '1\n')
class TestGangliaHandler(unittest.TestCase):
class TestableGangliaHandler(GangliaHandler):
def __init__(self):
GangliaHandler.__init__(self)
self.cli_calls = []
def call(self, cli):
self.cli_calls.append(' '.join(cli))
def test_send_single_metric(self):
class Opts(object):
@property
def gmetric(self): return '/usr/bin/gmetric'
opts = Opts()
h = TestGangliaHandler.TestableGangliaHandler()
h.analyze(opts, {'localhost:2181':{'latency':10}})
cmd = "%s -n latency -v 10 -t uint32" % opts.gmetric
assert cmd in h.cli_calls
if __name__ == '__main__':
unittest.main()
| apache-2.0 |
drawks/ansible | test/units/modules/network/aireos/test_aireos_config.py | 68 | 5363 | #
# (c) 2016 Red Hat Inc.
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
# Make coding more python3-ish
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
from units.compat.mock import patch
from ansible.modules.network.aireos import aireos_config
from units.modules.utils import set_module_args
from .aireos_module import TestCiscoWlcModule, load_fixture
class TestCiscoWlcConfigModule(TestCiscoWlcModule):
module = aireos_config
def setUp(self):
super(TestCiscoWlcConfigModule, self).setUp()
self.mock_get_config = patch('ansible.modules.network.aireos.aireos_config.get_config')
self.get_config = self.mock_get_config.start()
self.mock_load_config = patch('ansible.modules.network.aireos.aireos_config.load_config')
self.load_config = self.mock_load_config.start()
self.mock_run_commands = patch('ansible.modules.network.aireos.aireos_config.run_commands')
self.run_commands = self.mock_run_commands.start()
self.mock_save_config = patch('ansible.modules.network.aireos.aireos_config.save_config')
self.save_config = self.mock_save_config.start()
def tearDown(self):
super(TestCiscoWlcConfigModule, self).tearDown()
self.mock_get_config.stop()
self.mock_load_config.stop()
self.mock_run_commands.stop()
def load_fixtures(self, commands=None):
config_file = 'aireos_config_config.cfg'
self.get_config.return_value = load_fixture(config_file)
self.load_config.return_value = None
def test_aireos_config_unchanged(self):
src = load_fixture('aireos_config_config.cfg')
set_module_args(dict(src=src))
self.execute_module()
def test_aireos_config_src(self):
src = load_fixture('aireos_config_src.cfg')
set_module_args(dict(src=src))
commands = ['sysname foo', 'interface address dynamic-interface mtc-1 10.33.20.4 255.255.255.0 10.33.20.2']
self.execute_module(changed=True, commands=commands)
def test_aireos_config_backup(self):
set_module_args(dict(backup=True))
result = self.execute_module()
self.assertIn('__backup__', result)
def test_aireos_config_save(self):
set_module_args(dict(save=True))
self.execute_module()
self.assertEqual(self.save_config.call_count, 1)
self.assertEqual(self.get_config.call_count, 0)
self.assertEqual(self.load_config.call_count, 0)
def test_aireos_config_before(self):
set_module_args(dict(lines=['sysname foo'], before=['test1', 'test2']))
commands = ['test1', 'test2', 'sysname foo']
self.execute_module(changed=True, commands=commands, sort=False)
def test_aireos_config_after(self):
set_module_args(dict(lines=['sysname foo'], after=['test1', 'test2']))
commands = ['sysname foo', 'test1', 'test2']
self.execute_module(changed=True, commands=commands, sort=False)
def test_aireos_config_before_after_no_change(self):
set_module_args(dict(lines=['sysname router'],
before=['test1', 'test2'],
after=['test3', 'test4']))
self.execute_module()
def test_aireos_config_config(self):
config = 'sysname localhost'
set_module_args(dict(lines=['sysname router'], config=config))
commands = ['sysname router']
self.execute_module(changed=True, commands=commands)
def test_aireos_config_match_none(self):
lines = ['sysname router', 'interface create mtc-1 1']
set_module_args(dict(lines=lines, match='none'))
self.execute_module(changed=True, commands=lines, sort=False)
def test_nxos_config_save_always(self):
args = dict(save_when='always')
set_module_args(args)
self.execute_module()
self.assertEqual(self.save_config.call_count, 1)
self.assertEqual(self.get_config.call_count, 0)
self.assertEqual(self.load_config.call_count, 0)
def test_nxos_config_save_changed_true(self):
args = dict(save_when='changed', lines=['sysname foo', 'interface create mtc-3 3'])
set_module_args(args)
self.execute_module(changed=True)
self.assertEqual(self.save_config.call_count, 1)
self.assertEqual(self.get_config.call_count, 1)
self.assertEqual(self.load_config.call_count, 1)
def test_nxos_config_save_changed_false(self):
args = dict(save_when='changed')
set_module_args(args)
self.execute_module()
self.assertEqual(self.save_config.call_count, 0)
self.assertEqual(self.get_config.call_count, 0)
self.assertEqual(self.load_config.call_count, 0)
| gpl-3.0 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.