text
stringlengths 4
1.02M
| meta
dict |
|---|---|
"""Python wrappers around TensorFlow ops.
This file is MACHINE GENERATED! Do not edit.
Original C++ source file: audio_ops.cc
"""
import collections as _collections
import six as _six
from tensorflow.python import pywrap_tensorflow as _pywrap_tensorflow
from tensorflow.python.eager import context as _context
from tensorflow.python.eager import core as _core
from tensorflow.python.eager import execute as _execute
from tensorflow.python.framework import dtypes as _dtypes
from tensorflow.python.framework import errors as _errors
from tensorflow.python.framework import tensor_shape as _tensor_shape
from tensorflow.core.framework import op_def_pb2 as _op_def_pb2
# Needed to trigger the call to _set_call_cpp_shape_fn.
from tensorflow.python.framework import common_shapes as _common_shapes
from tensorflow.python.framework import op_def_registry as _op_def_registry
from tensorflow.python.framework import ops as _ops
from tensorflow.python.framework import op_def_library as _op_def_library
from tensorflow.python.util.tf_export import tf_export
@tf_export('audio_spectrogram')
def audio_spectrogram(input, window_size, stride, magnitude_squared=False, name=None):
r"""Produces a visualization of audio data over time.
Spectrograms are a standard way of representing audio information as a series of
slices of frequency information, one slice for each window of time. By joining
these together into a sequence, they form a distinctive fingerprint of the sound
over time.
This op expects to receive audio data as an input, stored as floats in the range
-1 to 1, together with a window width in samples, and a stride specifying how
far to move the window between slices. From this it generates a three
dimensional output. The lowest dimension has an amplitude value for each
frequency during that time slice. The next dimension is time, with successive
frequency slices. The final dimension is for the channels in the input, so a
stereo audio input would have two here for example.
This means the layout when converted and saved as an image is rotated 90 degrees
clockwise from a typical spectrogram. Time is descending down the Y axis, and
the frequency decreases from left to right.
Each value in the result represents the square root of the sum of the real and
imaginary parts of an FFT on the current window of samples. In this way, the
lowest dimension represents the power of each frequency in the current window,
and adjacent windows are concatenated in the next dimension.
To get a more intuitive and visual look at what this operation does, you can run
tensorflow/examples/wav_to_spectrogram to read in an audio file and save out the
resulting spectrogram as a PNG image.
Args:
input: A `Tensor` of type `float32`. Float representation of audio data.
window_size: An `int`.
How wide the input window is in samples. For the highest efficiency
this should be a power of two, but other values are accepted.
stride: An `int`.
How widely apart the center of adjacent sample windows should be.
magnitude_squared: An optional `bool`. Defaults to `False`.
Whether to return the squared magnitude or just the
magnitude. Using squared magnitude can avoid extra calculations.
name: A name for the operation (optional).
Returns:
A `Tensor` of type `float32`.
"""
_ctx = _context._context
if _ctx is None or not _ctx._eager_context.is_eager:
window_size = _execute.make_int(window_size, "window_size")
stride = _execute.make_int(stride, "stride")
if magnitude_squared is None:
magnitude_squared = False
magnitude_squared = _execute.make_bool(magnitude_squared, "magnitude_squared")
_, _, _op = _op_def_lib._apply_op_helper(
"AudioSpectrogram", input=input, window_size=window_size,
stride=stride, magnitude_squared=magnitude_squared, name=name)
_result = _op.outputs[:]
_inputs_flat = _op.inputs
_attrs = ("window_size", _op.get_attr("window_size"), "stride",
_op.get_attr("stride"), "magnitude_squared",
_op.get_attr("magnitude_squared"))
_execute.record_gradient(
"AudioSpectrogram", _inputs_flat, _attrs, _result, name)
_result, = _result
return _result
else:
try:
_result = _pywrap_tensorflow.TFE_Py_FastPathExecute(
_ctx._context_handle, _ctx._eager_context.device_name,
"AudioSpectrogram", name, _ctx._post_execution_callbacks, input,
"window_size", window_size, "stride", stride, "magnitude_squared",
magnitude_squared)
return _result
except _core._FallbackException:
return audio_spectrogram_eager_fallback(
input, window_size=window_size, stride=stride,
magnitude_squared=magnitude_squared, name=name, ctx=_ctx)
except _core._NotOkStatusException as e:
if name is not None:
message = e.message + " name: " + name
else:
message = e.message
_six.raise_from(_core._status_to_exception(e.code, message), None)
def audio_spectrogram_eager_fallback(input, window_size, stride, magnitude_squared=False, name=None, ctx=None):
r"""This is the slowpath function for Eager mode.
This is for function audio_spectrogram
"""
_ctx = ctx if ctx else _context.context()
window_size = _execute.make_int(window_size, "window_size")
stride = _execute.make_int(stride, "stride")
if magnitude_squared is None:
magnitude_squared = False
magnitude_squared = _execute.make_bool(magnitude_squared, "magnitude_squared")
input = _ops.convert_to_tensor(input, _dtypes.float32)
_inputs_flat = [input]
_attrs = ("window_size", window_size, "stride", stride, "magnitude_squared",
magnitude_squared)
_result = _execute.execute(b"AudioSpectrogram", 1, inputs=_inputs_flat,
attrs=_attrs, ctx=_ctx, name=name)
_execute.record_gradient(
"AudioSpectrogram", _inputs_flat, _attrs, _result, name)
_result, = _result
return _result
_decode_wav_outputs = ["audio", "sample_rate"]
_DecodeWavOutput = _collections.namedtuple(
"DecodeWav", _decode_wav_outputs)
@tf_export('decode_wav')
def decode_wav(contents, desired_channels=-1, desired_samples=-1, name=None):
r"""Decode a 16-bit PCM WAV file to a float tensor.
The -32768 to 32767 signed 16-bit values will be scaled to -1.0 to 1.0 in float.
When desired_channels is set, if the input contains fewer channels than this
then the last channel will be duplicated to give the requested number, else if
the input has more channels than requested then the additional channels will be
ignored.
If desired_samples is set, then the audio will be cropped or padded with zeroes
to the requested length.
The first output contains a Tensor with the content of the audio samples. The
lowest dimension will be the number of channels, and the second will be the
number of samples. For example, a ten-sample-long stereo WAV file should give an
output shape of [10, 2].
Args:
contents: A `Tensor` of type `string`.
The WAV-encoded audio, usually from a file.
desired_channels: An optional `int`. Defaults to `-1`.
Number of sample channels wanted.
desired_samples: An optional `int`. Defaults to `-1`.
Length of audio requested.
name: A name for the operation (optional).
Returns:
A tuple of `Tensor` objects (audio, sample_rate).
audio: A `Tensor` of type `float32`.
sample_rate: A `Tensor` of type `int32`.
"""
_ctx = _context._context
if _ctx is None or not _ctx._eager_context.is_eager:
if desired_channels is None:
desired_channels = -1
desired_channels = _execute.make_int(desired_channels, "desired_channels")
if desired_samples is None:
desired_samples = -1
desired_samples = _execute.make_int(desired_samples, "desired_samples")
_, _, _op = _op_def_lib._apply_op_helper(
"DecodeWav", contents=contents, desired_channels=desired_channels,
desired_samples=desired_samples, name=name)
_result = _op.outputs[:]
_inputs_flat = _op.inputs
_attrs = ("desired_channels", _op.get_attr("desired_channels"),
"desired_samples", _op.get_attr("desired_samples"))
_execute.record_gradient(
"DecodeWav", _inputs_flat, _attrs, _result, name)
_result = _DecodeWavOutput._make(_result)
return _result
else:
try:
_result = _pywrap_tensorflow.TFE_Py_FastPathExecute(
_ctx._context_handle, _ctx._eager_context.device_name, "DecodeWav",
name, _ctx._post_execution_callbacks, contents, "desired_channels",
desired_channels, "desired_samples", desired_samples)
_result = _DecodeWavOutput._make(_result)
return _result
except _core._FallbackException:
return decode_wav_eager_fallback(
contents, desired_channels=desired_channels,
desired_samples=desired_samples, name=name, ctx=_ctx)
except _core._NotOkStatusException as e:
if name is not None:
message = e.message + " name: " + name
else:
message = e.message
_six.raise_from(_core._status_to_exception(e.code, message), None)
def decode_wav_eager_fallback(contents, desired_channels=-1, desired_samples=-1, name=None, ctx=None):
r"""This is the slowpath function for Eager mode.
This is for function decode_wav
"""
_ctx = ctx if ctx else _context.context()
if desired_channels is None:
desired_channels = -1
desired_channels = _execute.make_int(desired_channels, "desired_channels")
if desired_samples is None:
desired_samples = -1
desired_samples = _execute.make_int(desired_samples, "desired_samples")
contents = _ops.convert_to_tensor(contents, _dtypes.string)
_inputs_flat = [contents]
_attrs = ("desired_channels", desired_channels, "desired_samples",
desired_samples)
_result = _execute.execute(b"DecodeWav", 2, inputs=_inputs_flat,
attrs=_attrs, ctx=_ctx, name=name)
_execute.record_gradient(
"DecodeWav", _inputs_flat, _attrs, _result, name)
_result = _DecodeWavOutput._make(_result)
return _result
@tf_export('encode_wav')
def encode_wav(audio, sample_rate, name=None):
r"""Encode audio data using the WAV file format.
This operation will generate a string suitable to be saved out to create a .wav
audio file. It will be encoded in the 16-bit PCM format. It takes in float
values in the range -1.0f to 1.0f, and any outside that value will be clamped to
that range.
`audio` is a 2-D float Tensor of shape `[length, channels]`.
`sample_rate` is a scalar Tensor holding the rate to use (e.g. 44100).
Args:
audio: A `Tensor` of type `float32`. 2-D with shape `[length, channels]`.
sample_rate: A `Tensor` of type `int32`.
Scalar containing the sample frequency.
name: A name for the operation (optional).
Returns:
A `Tensor` of type `string`.
"""
_ctx = _context._context
if _ctx is None or not _ctx._eager_context.is_eager:
_, _, _op = _op_def_lib._apply_op_helper(
"EncodeWav", audio=audio, sample_rate=sample_rate, name=name)
_result = _op.outputs[:]
_inputs_flat = _op.inputs
_attrs = None
_execute.record_gradient(
"EncodeWav", _inputs_flat, _attrs, _result, name)
_result, = _result
return _result
else:
try:
_result = _pywrap_tensorflow.TFE_Py_FastPathExecute(
_ctx._context_handle, _ctx._eager_context.device_name, "EncodeWav",
name, _ctx._post_execution_callbacks, audio, sample_rate)
return _result
except _core._FallbackException:
return encode_wav_eager_fallback(
audio, sample_rate, name=name, ctx=_ctx)
except _core._NotOkStatusException as e:
if name is not None:
message = e.message + " name: " + name
else:
message = e.message
_six.raise_from(_core._status_to_exception(e.code, message), None)
def encode_wav_eager_fallback(audio, sample_rate, name=None, ctx=None):
r"""This is the slowpath function for Eager mode.
This is for function encode_wav
"""
_ctx = ctx if ctx else _context.context()
audio = _ops.convert_to_tensor(audio, _dtypes.float32)
sample_rate = _ops.convert_to_tensor(sample_rate, _dtypes.int32)
_inputs_flat = [audio, sample_rate]
_attrs = None
_result = _execute.execute(b"EncodeWav", 1, inputs=_inputs_flat,
attrs=_attrs, ctx=_ctx, name=name)
_execute.record_gradient(
"EncodeWav", _inputs_flat, _attrs, _result, name)
_result, = _result
return _result
@tf_export('mfcc')
def mfcc(spectrogram, sample_rate, upper_frequency_limit=4000, lower_frequency_limit=20, filterbank_channel_count=40, dct_coefficient_count=13, name=None):
r"""Transforms a spectrogram into a form that's useful for speech recognition.
Mel Frequency Cepstral Coefficients are a way of representing audio data that's
been effective as an input feature for machine learning. They are created by
taking the spectrum of a spectrogram (a 'cepstrum'), and discarding some of the
higher frequencies that are less significant to the human ear. They have a long
history in the speech recognition world, and https://en.wikipedia.org/wiki/Mel-frequency_cepstrum
is a good resource to learn more.
Args:
spectrogram: A `Tensor` of type `float32`.
Typically produced by the Spectrogram op, with magnitude_squared
set to true.
sample_rate: A `Tensor` of type `int32`.
How many samples per second the source audio used.
upper_frequency_limit: An optional `float`. Defaults to `4000`.
The highest frequency to use when calculating the
ceptstrum.
lower_frequency_limit: An optional `float`. Defaults to `20`.
The lowest frequency to use when calculating the
ceptstrum.
filterbank_channel_count: An optional `int`. Defaults to `40`.
Resolution of the Mel bank used internally.
dct_coefficient_count: An optional `int`. Defaults to `13`.
How many output channels to produce per time slice.
name: A name for the operation (optional).
Returns:
A `Tensor` of type `float32`.
"""
_ctx = _context._context
if _ctx is None or not _ctx._eager_context.is_eager:
if upper_frequency_limit is None:
upper_frequency_limit = 4000
upper_frequency_limit = _execute.make_float(upper_frequency_limit, "upper_frequency_limit")
if lower_frequency_limit is None:
lower_frequency_limit = 20
lower_frequency_limit = _execute.make_float(lower_frequency_limit, "lower_frequency_limit")
if filterbank_channel_count is None:
filterbank_channel_count = 40
filterbank_channel_count = _execute.make_int(filterbank_channel_count, "filterbank_channel_count")
if dct_coefficient_count is None:
dct_coefficient_count = 13
dct_coefficient_count = _execute.make_int(dct_coefficient_count, "dct_coefficient_count")
_, _, _op = _op_def_lib._apply_op_helper(
"Mfcc", spectrogram=spectrogram, sample_rate=sample_rate,
upper_frequency_limit=upper_frequency_limit,
lower_frequency_limit=lower_frequency_limit,
filterbank_channel_count=filterbank_channel_count,
dct_coefficient_count=dct_coefficient_count, name=name)
_result = _op.outputs[:]
_inputs_flat = _op.inputs
_attrs = ("upper_frequency_limit", _op.get_attr("upper_frequency_limit"),
"lower_frequency_limit", _op.get_attr("lower_frequency_limit"),
"filterbank_channel_count",
_op.get_attr("filterbank_channel_count"),
"dct_coefficient_count", _op.get_attr("dct_coefficient_count"))
_execute.record_gradient(
"Mfcc", _inputs_flat, _attrs, _result, name)
_result, = _result
return _result
else:
try:
_result = _pywrap_tensorflow.TFE_Py_FastPathExecute(
_ctx._context_handle, _ctx._eager_context.device_name, "Mfcc", name,
_ctx._post_execution_callbacks, spectrogram, sample_rate,
"upper_frequency_limit", upper_frequency_limit,
"lower_frequency_limit", lower_frequency_limit,
"filterbank_channel_count", filterbank_channel_count,
"dct_coefficient_count", dct_coefficient_count)
return _result
except _core._FallbackException:
return mfcc_eager_fallback(
spectrogram, sample_rate,
upper_frequency_limit=upper_frequency_limit,
lower_frequency_limit=lower_frequency_limit,
filterbank_channel_count=filterbank_channel_count,
dct_coefficient_count=dct_coefficient_count, name=name, ctx=_ctx)
except _core._NotOkStatusException as e:
if name is not None:
message = e.message + " name: " + name
else:
message = e.message
_six.raise_from(_core._status_to_exception(e.code, message), None)
def mfcc_eager_fallback(spectrogram, sample_rate, upper_frequency_limit=4000, lower_frequency_limit=20, filterbank_channel_count=40, dct_coefficient_count=13, name=None, ctx=None):
r"""This is the slowpath function for Eager mode.
This is for function mfcc
"""
_ctx = ctx if ctx else _context.context()
if upper_frequency_limit is None:
upper_frequency_limit = 4000
upper_frequency_limit = _execute.make_float(upper_frequency_limit, "upper_frequency_limit")
if lower_frequency_limit is None:
lower_frequency_limit = 20
lower_frequency_limit = _execute.make_float(lower_frequency_limit, "lower_frequency_limit")
if filterbank_channel_count is None:
filterbank_channel_count = 40
filterbank_channel_count = _execute.make_int(filterbank_channel_count, "filterbank_channel_count")
if dct_coefficient_count is None:
dct_coefficient_count = 13
dct_coefficient_count = _execute.make_int(dct_coefficient_count, "dct_coefficient_count")
spectrogram = _ops.convert_to_tensor(spectrogram, _dtypes.float32)
sample_rate = _ops.convert_to_tensor(sample_rate, _dtypes.int32)
_inputs_flat = [spectrogram, sample_rate]
_attrs = ("upper_frequency_limit", upper_frequency_limit,
"lower_frequency_limit", lower_frequency_limit, "filterbank_channel_count",
filterbank_channel_count, "dct_coefficient_count", dct_coefficient_count)
_result = _execute.execute(b"Mfcc", 1, inputs=_inputs_flat, attrs=_attrs,
ctx=_ctx, name=name)
_execute.record_gradient(
"Mfcc", _inputs_flat, _attrs, _result, name)
_result, = _result
return _result
def _InitOpDefLibrary(op_list_proto_bytes):
op_list = _op_def_pb2.OpList()
op_list.ParseFromString(op_list_proto_bytes)
_op_def_registry.register_op_list(op_list)
op_def_lib = _op_def_library.OpDefLibrary()
op_def_lib.add_op_list(op_list)
return op_def_lib
# op {
# name: "AudioSpectrogram"
# input_arg {
# name: "input"
# type: DT_FLOAT
# }
# output_arg {
# name: "spectrogram"
# type: DT_FLOAT
# }
# attr {
# name: "window_size"
# type: "int"
# }
# attr {
# name: "stride"
# type: "int"
# }
# attr {
# name: "magnitude_squared"
# type: "bool"
# default_value {
# b: false
# }
# }
# }
# op {
# name: "DecodeWav"
# input_arg {
# name: "contents"
# type: DT_STRING
# }
# output_arg {
# name: "audio"
# type: DT_FLOAT
# }
# output_arg {
# name: "sample_rate"
# type: DT_INT32
# }
# attr {
# name: "desired_channels"
# type: "int"
# default_value {
# i: -1
# }
# }
# attr {
# name: "desired_samples"
# type: "int"
# default_value {
# i: -1
# }
# }
# }
# op {
# name: "EncodeWav"
# input_arg {
# name: "audio"
# type: DT_FLOAT
# }
# input_arg {
# name: "sample_rate"
# type: DT_INT32
# }
# output_arg {
# name: "contents"
# type: DT_STRING
# }
# }
# op {
# name: "Mfcc"
# input_arg {
# name: "spectrogram"
# type: DT_FLOAT
# }
# input_arg {
# name: "sample_rate"
# type: DT_INT32
# }
# output_arg {
# name: "output"
# type: DT_FLOAT
# }
# attr {
# name: "upper_frequency_limit"
# type: "float"
# default_value {
# f: 4000
# }
# }
# attr {
# name: "lower_frequency_limit"
# type: "float"
# default_value {
# f: 20
# }
# }
# attr {
# name: "filterbank_channel_count"
# type: "int"
# default_value {
# i: 40
# }
# }
# attr {
# name: "dct_coefficient_count"
# type: "int"
# default_value {
# i: 13
# }
# }
# }
_op_def_lib = _InitOpDefLibrary(b"\np\n\020AudioSpectrogram\022\t\n\005input\030\001\032\017\n\013spectrogram\030\001\"\022\n\013window_size\022\003int\"\r\n\006stride\022\003int\"\035\n\021magnitude_squared\022\004bool\032\002(\000\n\200\001\n\tDecodeWav\022\014\n\010contents\030\007\032\t\n\005audio\030\001\032\017\n\013sample_rate\030\003\"$\n\020desired_channels\022\003int\032\013\030\377\377\377\377\377\377\377\377\377\001\"#\n\017desired_samples\022\003int\032\013\030\377\377\377\377\377\377\377\377\377\001\n5\n\tEncodeWav\022\t\n\005audio\030\001\022\017\n\013sample_rate\030\003\032\014\n\010contents\030\007\n\311\001\n\004Mfcc\022\017\n\013spectrogram\030\001\022\017\n\013sample_rate\030\003\032\n\n\006output\030\001\"%\n\025upper_frequency_limit\022\005float\032\005%\000\000zE\"%\n\025lower_frequency_limit\022\005float\032\005%\000\000\240A\"#\n\030filterbank_channel_count\022\003int\032\002\030(\" \n\025dct_coefficient_count\022\003int\032\002\030\r")
|
{
"content_hash": "a0ed615d184f746482c02553b2da56f2",
"timestamp": "",
"source": "github",
"line_count": 542,
"max_line_length": 975,
"avg_line_length": 39.6309963099631,
"alnum_prop": 0.6806797020484171,
"repo_name": "ryfeus/lambda-packs",
"id": "7187ddb0a6a8f61dfb3019d043e53ec6c2b1a394",
"size": "21480",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "Keras_tensorflow_nightly/source2.7/tensorflow/python/ops/gen_audio_ops.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "C",
"bytes": "9768343"
},
{
"name": "C++",
"bytes": "76566960"
},
{
"name": "CMake",
"bytes": "191097"
},
{
"name": "CSS",
"bytes": "153538"
},
{
"name": "Cuda",
"bytes": "61768"
},
{
"name": "Cython",
"bytes": "3110222"
},
{
"name": "Fortran",
"bytes": "110284"
},
{
"name": "HTML",
"bytes": "248658"
},
{
"name": "JavaScript",
"bytes": "62920"
},
{
"name": "MATLAB",
"bytes": "17384"
},
{
"name": "Makefile",
"bytes": "152150"
},
{
"name": "Python",
"bytes": "549307737"
},
{
"name": "Roff",
"bytes": "26398"
},
{
"name": "SWIG",
"bytes": "142"
},
{
"name": "Shell",
"bytes": "7790"
},
{
"name": "Smarty",
"bytes": "4090"
},
{
"name": "TeX",
"bytes": "152062"
},
{
"name": "XSLT",
"bytes": "305540"
}
],
"symlink_target": ""
}
|
import os
import pytest
from pandas import read_csv, read_table
class BaseParser:
engine = None
low_memory = True
float_precision_choices = []
def update_kwargs(self, kwargs):
kwargs = kwargs.copy()
kwargs.update(dict(engine=self.engine, low_memory=self.low_memory))
return kwargs
def read_csv(self, *args, **kwargs):
kwargs = self.update_kwargs(kwargs)
return read_csv(*args, **kwargs)
def read_table(self, *args, **kwargs):
kwargs = self.update_kwargs(kwargs)
return read_table(*args, **kwargs)
class CParser(BaseParser):
engine = "c"
float_precision_choices = [None, "high", "round_trip"]
class CParserHighMemory(CParser):
low_memory = False
class CParserLowMemory(CParser):
low_memory = True
class PythonParser(BaseParser):
engine = "python"
float_precision_choices = [None]
@pytest.fixture
def csv_dir_path(datapath):
return datapath("io", "parser", "data")
@pytest.fixture
def csv1(csv_dir_path):
return os.path.join(csv_dir_path, "test1.csv")
_cParserHighMemory = CParserHighMemory()
_cParserLowMemory = CParserLowMemory()
_pythonParser = PythonParser()
_py_parsers_only = [_pythonParser]
_c_parsers_only = [_cParserHighMemory, _cParserLowMemory]
_all_parsers = _c_parsers_only + _py_parsers_only
_py_parser_ids = ["python"]
_c_parser_ids = ["c_high", "c_low"]
_all_parser_ids = _c_parser_ids + _py_parser_ids
@pytest.fixture(params=_all_parsers, ids=_all_parser_ids)
def all_parsers(request):
return request.param
@pytest.fixture(params=_c_parsers_only, ids=_c_parser_ids)
def c_parser_only(request):
return request.param
@pytest.fixture(params=_py_parsers_only, ids=_py_parser_ids)
def python_parser_only(request):
return request.param
|
{
"content_hash": "936001179ef41539b264ad9a256c697b",
"timestamp": "",
"source": "github",
"line_count": 81,
"max_line_length": 75,
"avg_line_length": 22.209876543209877,
"alnum_prop": 0.6837131739855475,
"repo_name": "kushalbhola/MyStuff",
"id": "2c347a096006a9a052fdb77b8bfad03bc96f9ce2",
"size": "1799",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "Practice/PythonApplication/env/Lib/site-packages/pandas/tests/io/parser/conftest.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Batchfile",
"bytes": "1330"
},
{
"name": "C#",
"bytes": "332967"
},
{
"name": "CSS",
"bytes": "1451"
},
{
"name": "HTML",
"bytes": "7539"
},
{
"name": "Java",
"bytes": "14860"
},
{
"name": "JavaScript",
"bytes": "9843"
},
{
"name": "Jupyter Notebook",
"bytes": "374013"
},
{
"name": "PowerShell",
"bytes": "1448"
},
{
"name": "Python",
"bytes": "6511820"
},
{
"name": "Tcl",
"bytes": "24289"
},
{
"name": "TypeScript",
"bytes": "15697"
}
],
"symlink_target": ""
}
|
import sys
import os
up_time_quanta = 2000
f = open(sys.argv[1])
announce_histogram = {}
node_uptime_histogram = {}
counter = 0;
for line in f:
counter += 1
# if counter % 1000 == 0:
# print '\r%d' % counter,
try:
if 'distance:' in line:
l = line.split(' ')
idx = l.index('distance:')
d = int(l[idx+1].strip())
if not d in announce_histogram: announce_histogram[d] = 0
announce_histogram[d] += 1
if 'NODE FAILED' in line:
l = line.split(' ')
idx = l.index('fails:')
if int(l[idx+1].strip()) != 1: continue;
idx = l.index('up-time:')
d = int(l[idx+1].strip())
# quantize
d = d - (d % up_time_quanta)
if not d in node_uptime_histogram: node_uptime_histogram[d] = 0
node_uptime_histogram[d] += 1
except Exception, e:
print line.split(' ')
out = open('dht_announce_distribution.dat', 'w+')
for k,v in announce_histogram.items():
print >>out, '%d %d' % (k, v)
out.close()
out = open('dht_node_uptime_distribution.dat', 'w+')
for k,v in node_uptime_histogram.items():
print >>out, '%d %d' % (k + up_time_quanta/2, v)
out.close()
out = open('dht.gnuplot', 'w+')
out.write('''
set term png size 1200,700 small
set output "dht_announce_distribution.png"
set title "bucket # announces are made against relative to target node-id"
set ylabel "# of announces"
set style fill solid border -1 pattern 2
plot "dht_announce_distribution.dat" using 1:2 title "announces" with boxes
set terminal postscript
set output "dht_announce_distribution.ps"
replot
set term png size 1200,700 small
set output "dht_node_uptime_distribution.png"
set title "node up time"
set ylabel "# of nodes"
set xlabel "uptime (seconds)"
set boxwidth %f
set style fill solid border -1 pattern 2
plot "dht_node_uptime_distribution.dat" using 1:2 title "nodes" with boxes
''' % up_time_quanta)
out.close()
os.system('gnuplot dht.gnuplot');
|
{
"content_hash": "b60eb6803709c0a4d96c0def606388c7",
"timestamp": "",
"source": "github",
"line_count": 74,
"max_line_length": 76,
"avg_line_length": 25.216216216216218,
"alnum_prop": 0.6666666666666666,
"repo_name": "dikshie/instrumented-rasterbar-libtorrent",
"id": "9303e8c86582322729cd64d9610bda620db0b1a1",
"size": "1889",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "parse_dht_log.py",
"mode": "33261",
"license": "bsd-3-clause",
"language": [],
"symlink_target": ""
}
|
import numpy as np
def plot_place(ax, location="upper right", offset=0.02):
bbox = ax.get_position()
ratio = bbox.bounds[2]/bbox.bounds[3]
if ratio < 1.0:
xoffset, yoffset = offset, offset/ratio
else:
xoffset, yoffset = offset/ratio, offset
if location == "upper right":
x, y = 1.0 - xoffset, 1.0 - yoffset
ha, va = "right", "top"
elif location == "lower right":
x, y = 1.0 - xoffset, 0.0 + yoffset
ha, va = "right", "bottom"
elif location == "lower left":
x, y = 0.0 + xoffset, 0.0 + yoffset
ha, va = "left", "bottom"
elif location == "upper left":
x, y = 0.0 + xoffset, 1.0 - yoffset
ha, va = "left", "top"
else:
raise ValueError("Unrecognized location \"{:s}\".".format(location))
return x, y, ha, va
def letter_label(
ax, label, location="upper right", offset=0.02, **kwargs):
"""Adds a letter label to the plot."""
x, y, ha, va = plot_place(ax, location, offset)
t = ax.text(
x, y, label,
transform=ax.transAxes,
horizontalalignment=ha,
verticalalignment=va,
**kwargs)
return t
def generate_plot_labels(num_labels):
return ['(' + chr(n) + ')' for n in range(65, 65+26)]
def rmse(x, y=np.array([0])):
return np.linalg.norm(x.ravel() - y.ravel())/np.sqrt(x.size)
|
{
"content_hash": "e68db257242557cf0480f707cf8b39df",
"timestamp": "",
"source": "github",
"line_count": 47,
"max_line_length": 76,
"avg_line_length": 29.70212765957447,
"alnum_prop": 0.5508595988538681,
"repo_name": "mrshannon/asen-3113-lab3-software",
"id": "b9916c47beb3701fa4aa9b61b2f20501b08f2d8f",
"size": "2009",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "util.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Matlab",
"bytes": "92984"
},
{
"name": "Python",
"bytes": "7711"
}
],
"symlink_target": ""
}
|
import os
import eventlet
from oslo.config import cfg
from nova import exception
from nova.i18n import _
from nova.openstack.common import importutils
from nova.openstack.common import log as logging
from nova.openstack.common import loopingcall
from nova.servicegroup import api
evzookeeper = importutils.try_import('evzookeeper')
membership = importutils.try_import('evzookeeper.membership')
zookeeper = importutils.try_import('zookeeper')
zk_driver_opts = [
cfg.StrOpt('address',
help='The ZooKeeper addresses for servicegroup service in the '
'format of host1:port,host2:port,host3:port'),
cfg.IntOpt('recv_timeout',
default=4000,
help='The recv_timeout parameter for the zk session'),
cfg.StrOpt('sg_prefix',
default="/servicegroups",
help='The prefix used in ZooKeeper to store ephemeral nodes'),
cfg.IntOpt('sg_retry_interval',
default=5,
help='Number of seconds to wait until retrying to join the '
'session'),
]
CONF = cfg.CONF
CONF.register_opts(zk_driver_opts, group="zookeeper")
LOG = logging.getLogger(__name__)
class ZooKeeperDriver(api.ServiceGroupDriver):
"""ZooKeeper driver for the service group API."""
def __init__(self, *args, **kwargs):
"""Create the zk session object."""
if not all([evzookeeper, membership, zookeeper]):
raise ImportError('zookeeper module not found')
null = open(os.devnull, "w")
self._session = evzookeeper.ZKSession(CONF.zookeeper.address,
recv_timeout=
CONF.zookeeper.recv_timeout,
zklog_fd=null)
self._memberships = {}
self._monitors = {}
# Make sure the prefix exists
try:
self._session.create(CONF.zookeeper.sg_prefix, "",
acl=[evzookeeper.ZOO_OPEN_ACL_UNSAFE])
except zookeeper.NodeExistsException:
pass
super(ZooKeeperDriver, self).__init__()
def join(self, member_id, group, service=None):
"""Join the given service with its group."""
LOG.debug('ZooKeeperDriver: join new member %(id)s to the '
'%(gr)s group, service=%(sr)s',
{'id': member_id, 'gr': group, 'sr': service})
member = self._memberships.get((group, member_id), None)
if member is None:
# the first time to join. Generate a new object
path = "%s/%s" % (CONF.zookeeper.sg_prefix, group)
try:
member = membership.Membership(self._session, path, member_id)
except RuntimeError:
LOG.exception(_("Unable to join. It is possible that either "
"another node exists with the same name, or "
"this node just restarted. We will try "
"again in a short while to make sure."))
eventlet.sleep(CONF.zookeeper.sg_retry_interval)
member = membership.Membership(self._session, path, member_id)
self._memberships[(group, member_id)] = member
return FakeLoopingCall(self, member_id, group)
def leave(self, member_id, group):
"""Remove the given member from the service group."""
LOG.debug('ZooKeeperDriver.leave: %(member)s from group %(group)s',
{'member': member_id, 'group': group})
try:
key = (group, member_id)
member = self._memberships[key]
member.leave()
del self._memberships[key]
except KeyError:
LOG.error(_('ZooKeeperDriver.leave: %(id)s has not joined to the '
'%(gr)s group'), {'id': member_id, 'gr': group})
def is_up(self, service_ref):
group_id = service_ref['topic']
member_id = service_ref['host']
all_members = self.get_all(group_id)
return member_id in all_members
def get_all(self, group_id):
"""Return all members in a list, or a ServiceGroupUnavailable
exception.
"""
monitor = self._monitors.get(group_id, None)
if monitor is None:
path = "%s/%s" % (CONF.zookeeper.sg_prefix, group_id)
monitor = membership.MembershipMonitor(self._session, path)
self._monitors[group_id] = monitor
# Note(maoy): When initialized for the first time, it takes a
# while to retrieve all members from zookeeper. To prevent
# None to be returned, we sleep 5 sec max to wait for data to
# be ready.
for _retry in range(50):
eventlet.sleep(0.1)
all_members = monitor.get_all()
if all_members is not None:
return all_members
all_members = monitor.get_all()
if all_members is None:
raise exception.ServiceGroupUnavailable(driver="ZooKeeperDriver")
return all_members
class FakeLoopingCall(loopingcall.LoopingCallBase):
"""The fake Looping Call implementation, created for backward
compatibility with a membership based on DB.
"""
def __init__(self, driver, host, group):
self._driver = driver
self._group = group
self._host = host
def stop(self):
self._driver.leave(self._host, self._group)
def start(self, interval, initial_delay=None):
pass
def wait(self):
pass
|
{
"content_hash": "9035ceb5f1b269b7a8c036397c5b7e9f",
"timestamp": "",
"source": "github",
"line_count": 142,
"max_line_length": 78,
"avg_line_length": 39.66901408450704,
"alnum_prop": 0.5778448428901118,
"repo_name": "srajag/nova",
"id": "9ba3ae64f98287b053fdcbf1681c08146fb622ea",
"size": "6266",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "nova/servicegroup/drivers/zk.py",
"mode": "33188",
"license": "apache-2.0",
"language": [],
"symlink_target": ""
}
|
import urllib
import os
from PIL import Image
import numpy as np
def download(url, path, overwrite=False):
if os.path.exists(path) and not overwrite:
return
print('Downloading {} to {}.'.format(url, path))
urllib.URLopener().retrieve(url, path)
def get_mobilenet():
url = 'https://docs-assets.developer.apple.com/coreml/models/MobileNet.mlmodel'
dst = 'mobilenet.mlmodel'
real_dst = os.path.abspath(os.path.join(os.path.dirname(__file__), dst))
download(url, real_dst)
return os.path.abspath(real_dst)
def get_resnet50():
url = 'https://docs-assets.developer.apple.com/coreml/models/Resnet50.mlmodel'
dst = 'resnet50.mlmodel'
real_dst = os.path.abspath(os.path.join(os.path.dirname(__file__), dst))
download(url, real_dst)
return os.path.abspath(real_dst)
def get_cat_image():
url = 'https://gist.githubusercontent.com/zhreshold/bcda4716699ac97ea44f791c24310193/raw/fa7ef0e9c9a5daea686d6473a62aacd1a5885849/cat.png'
dst = 'cat.png'
real_dst = os.path.abspath(os.path.join(os.path.dirname(__file__), dst))
download(url, real_dst)
img = Image.open(real_dst).resize((224, 224))
img = np.transpose(img, (2, 0, 1))[np.newaxis, :]
return np.asarray(img)
|
{
"content_hash": "38d3f94ea70491d4bfd368323fe1517c",
"timestamp": "",
"source": "github",
"line_count": 33,
"max_line_length": 142,
"avg_line_length": 37.57575757575758,
"alnum_prop": 0.6895161290322581,
"repo_name": "mlperf/training_results_v0.6",
"id": "0a39053b6d47edfb608effd356421a43b884fa9e",
"size": "1240",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "Fujitsu/benchmarks/resnet/implementations/mxnet/3rdparty/tvm/nnvm/tests/python/frontend/coreml/model_zoo/__init__.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "ANTLR",
"bytes": "1731"
},
{
"name": "Batchfile",
"bytes": "13941"
},
{
"name": "C",
"bytes": "208630"
},
{
"name": "C++",
"bytes": "10999411"
},
{
"name": "CMake",
"bytes": "129712"
},
{
"name": "CSS",
"bytes": "64767"
},
{
"name": "Clojure",
"bytes": "396764"
},
{
"name": "Cuda",
"bytes": "2272433"
},
{
"name": "Dockerfile",
"bytes": "67820"
},
{
"name": "Groovy",
"bytes": "62557"
},
{
"name": "HTML",
"bytes": "19753082"
},
{
"name": "Java",
"bytes": "166294"
},
{
"name": "JavaScript",
"bytes": "71846"
},
{
"name": "Julia",
"bytes": "408765"
},
{
"name": "Jupyter Notebook",
"bytes": "2713169"
},
{
"name": "Lua",
"bytes": "4430"
},
{
"name": "MATLAB",
"bytes": "34903"
},
{
"name": "Makefile",
"bytes": "115694"
},
{
"name": "Perl",
"bytes": "1535873"
},
{
"name": "Perl 6",
"bytes": "7280"
},
{
"name": "PowerShell",
"bytes": "6150"
},
{
"name": "Python",
"bytes": "24905683"
},
{
"name": "R",
"bytes": "351865"
},
{
"name": "Roff",
"bytes": "293052"
},
{
"name": "Scala",
"bytes": "1189019"
},
{
"name": "Shell",
"bytes": "794096"
},
{
"name": "Smalltalk",
"bytes": "3497"
},
{
"name": "TypeScript",
"bytes": "361164"
}
],
"symlink_target": ""
}
|
from graphql import build_schema, execute_sync, parse
from graphql.utilities import get_introspection_query
from ..fixtures import big_schema_sdl # noqa: F401
def test_execute_introspection_query(benchmark, big_schema_sdl): # noqa: F811
schema = build_schema(big_schema_sdl, assume_valid=True)
document = parse(get_introspection_query())
result = benchmark(lambda: execute_sync(schema=schema, document=document))
assert result.errors is None
|
{
"content_hash": "1279cad2f320c9708c7a02212bcf90e2",
"timestamp": "",
"source": "github",
"line_count": 11,
"max_line_length": 78,
"avg_line_length": 42.09090909090909,
"alnum_prop": 0.7602591792656588,
"repo_name": "graphql-python/graphql-core",
"id": "4c30d96596b4f7557e4a86de0095f55a0d01251a",
"size": "463",
"binary": false,
"copies": "1",
"ref": "refs/heads/main",
"path": "tests/benchmarks/test_introspection_from_schema.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "2235538"
}
],
"symlink_target": ""
}
|
"""
RenderPipeline
Copyright (c) 2014-2016 tobspr <tobias.springer1@gmail.com>
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
"""
from __future__ import print_function
import os
import sys
import zipfile
import shutil
from rplibs.six.moves import urllib
from rplibs.six import BytesIO, binary_type
def download_file(url, chunk_size=100*1024):
""" Helper method to download a file displaying a progress bar """
print("Fetching:", url)
file_content = None
progressbar = None
if sys.version_info.major <= 2:
# Import progressbar library
from rplibs.progressbar import FileTransferSpeed, ETA, ProgressBar, Percentage
from rplibs.progressbar import Bar
widgets = ['\tDownloading: ', FileTransferSpeed(), ' ', Bar(), Percentage(), ' ', ETA()]
file_content = []
bytes_read = 0
# Progressively download the file
try:
usock = urllib.request.urlopen(url)
file_size = int(usock.headers.get("Content-Length", 1e10))
print("File size is", round(file_size / (1024**2), 2), "MB")
progressbar = ProgressBar(widgets=widgets, maxval=file_size).start()
while True:
data = usock.read(chunk_size)
file_content.append(data)
bytes_read += len(data)
progressbar.update(bytes_read)
if not data:
break
usock.close()
except Exception:
print("ERROR: Could not fetch", url, "!", file=sys.stderr)
raise
else:
# Don't use progressbar in python 3
print("Downloading .. (progressbar disabled due to python 3 build)")
try:
usock = urllib.request.urlopen(url)
file_content = []
while True:
data = usock.read(chunk_size)
file_content.append(data)
if not data:
break
usock.close()
except Exception:
print("ERROR: Could not fetch", url, "!", file=sys.stderr)
raise
if progressbar:
progressbar.finish()
return binary_type().join(file_content)
def download_submodule(author, module_name, dest_path, ignore_list):
""" Downloads a submodule from the given author and module name, and extracts
all files which are not on the ignore_list to the dest_path.
Example: download_submodule("tobspr", "RenderPipeline", ".", ["README.md", "LICENSE"])
"""
# Make directory, if it does not exist yet
if not os.path.isdir(dest_path):
os.makedirs(dest_path)
# Construct download url
source_url = "https://github.com/" + author + "/" + module_name + "/archive/master.zip"
prefix = module_name + "-master"
# Extract the zip
zip_ptr = BytesIO(download_file(source_url))
print("Extracting ZIP ...")
try:
zip_handle = zipfile.ZipFile(zip_ptr)
except zipfile.BadZipfile:
print("ERROR: Invalid zip file!", file=sys.stderr)
sys.exit(3)
if zip_handle.testzip() is not None:
print("ERROR: Invalid zip file checksums!", file=sys.stderr)
sys.exit(1)
num_files, num_dirs = 0, 0
for fname in zip_handle.namelist():
rel_name = fname.replace(prefix, "").replace("\\", "/").lstrip("/")
if not rel_name:
continue
is_file = not rel_name.endswith("/")
rel_name = dest_path.rstrip("/\\") + "/" + rel_name
# Files
if is_file:
for ignore in ignore_list:
if ignore in rel_name:
break
else:
with zip_handle.open(fname, "r") as source, open(rel_name, "wb") as dest:
shutil.copyfileobj(source, dest)
num_files += 1
# Directories
else:
if not os.path.isdir(rel_name):
os.makedirs(rel_name)
num_dirs += 1
print("Extracted", num_files, "files and", num_dirs, "directories")
|
{
"content_hash": "4866d3b9b98298aef2ef09a6a98589cf",
"timestamp": "",
"source": "github",
"line_count": 146,
"max_line_length": 98,
"avg_line_length": 35.39041095890411,
"alnum_prop": 0.6018966518289143,
"repo_name": "croxis/SpaceDrive",
"id": "79f090b7f3ffbda5811fb2bd4ae443d2a5a559be",
"size": "5167",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "spacedrive/renderpipeline/rpcore/util/submodule_downloader.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Batchfile",
"bytes": "1288"
},
{
"name": "C",
"bytes": "21897"
},
{
"name": "C++",
"bytes": "165025"
},
{
"name": "GLSL",
"bytes": "741524"
},
{
"name": "Groff",
"bytes": "119"
},
{
"name": "Python",
"bytes": "1523574"
}
],
"symlink_target": ""
}
|
"""pool but no finish."""
import multiprocessing as mp
import wandb
import yea
def do_run(num):
run = wandb.init()
run.config.id = num
run.log(dict(s=num))
return num
def main():
wandb.require("service")
wandb.setup()
num_proc = 4
pool = mp.Pool(processes=num_proc)
result = pool.map_async(do_run, range(num_proc))
data = result.get(60)
print(f"DEBUG: {data}")
assert len(data) == 4
if __name__ == "__main__":
yea.setup() # Use ":yea:start_method:" to set mp.set_start_method()
main()
|
{
"content_hash": "5006af1d9753c98d2933a1c8334ac6e4",
"timestamp": "",
"source": "github",
"line_count": 30,
"max_line_length": 72,
"avg_line_length": 18.366666666666667,
"alnum_prop": 0.5970961887477314,
"repo_name": "wandb/client",
"id": "64713e29d07cd9c8742e80b6c4366feeb4f6d5cf",
"size": "573",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "tests/functional_tests/t0_main/mp/05-pool-nofinish.py",
"mode": "33261",
"license": "mit",
"language": [
{
"name": "C",
"bytes": "4902"
},
{
"name": "Dockerfile",
"bytes": "3491"
},
{
"name": "Jupyter Notebook",
"bytes": "7751"
},
{
"name": "Makefile",
"bytes": "1863"
},
{
"name": "Objective-C",
"bytes": "80764"
},
{
"name": "Python",
"bytes": "3634228"
},
{
"name": "Shell",
"bytes": "4662"
}
],
"symlink_target": ""
}
|
import os
import sys
sys.path.insert(0, os.path.abspath('../tk_tools'))
print('cwd: ', os.getcwd())
print('sys path', sys.path)
# -- General configuration ------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
#
# needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
# ones.
extensions = ['sphinx.ext.autodoc',
'sphinx.ext.todo',
'sphinx.ext.coverage',
'sphinx.ext.mathjax']
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix(es) of source filenames.
# You can specify multiple suffix as a list of string:
#
# source_suffix = ['.rst', '.md']
source_suffix = '.rst'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = 'tk_tools'
copyright = '2018, Jason R. Jones'
author = 'Jason R. Jones'
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
version = ''
# The full version, including alpha/beta/rc tags.
release = ''
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#
# This is also used if you do content translation via gettext catalogs.
# Usually you set "language" from the command line for these cases.
language = None
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
# This patterns also effect to html_static_path and html_extra_path
exclude_patterns = []
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# If true, `todo` and `todoList` produce output, else they produce nothing.
todo_include_todos = True
# -- Options for HTML output ----------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
#
html_theme = 'alabaster'
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
#
# html_theme_options = {}
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
# Custom sidebar templates, must be a dictionary that maps document names
# to template names.
#
# This is required for the alabaster theme
# refs: http://alabaster.readthedocs.io/en/latest/installation.html#sidebars
html_sidebars = {
'**': [
'relations.html', # needs 'show_related': True theme option to display
'globaltoc.html',
'searchbox.html',
]
}
# -- Options for HTMLHelp output ------------------------------------------
# Output file base name for HTML help builder.
htmlhelp_basename = 'tk_toolsdoc'
# -- Options for LaTeX output ---------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
#
# 'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
#
# 'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
#
# 'preamble': '',
# Latex figure (float) alignment
#
# 'figure_align': 'htbp',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title,
# author, documentclass [howto, manual, or own class]).
latex_documents = [
(master_doc, 'tk_tools.tex', 'tk\\_tools Documentation',
'Jason R. Jones', 'manual'),
]
# -- Options for manual page output ---------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
(master_doc, 'tk_tools', 'tk_tools Documentation',
[author], 1)
]
# -- Options for Texinfo output -------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
(master_doc, 'tk_tools', 'tk_tools Documentation',
author, 'tk_tools', 'One line description of project.',
'Miscellaneous'),
]
|
{
"content_hash": "a9f2402274fe4c0c24692364f5160545",
"timestamp": "",
"source": "github",
"line_count": 154,
"max_line_length": 79,
"avg_line_length": 29.746753246753247,
"alnum_prop": 0.6540056756166776,
"repo_name": "slightlynybbled/tk_tools",
"id": "837e267619f80852d7f3db86ed6c75ca9593c517",
"size": "5265",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "docs/conf.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "294147"
}
],
"symlink_target": ""
}
|
"""Support for HomeKit Controller locks."""
from __future__ import annotations
from typing import Any
from aiohomekit.model.characteristics import CharacteristicsTypes
from aiohomekit.model.services import Service, ServicesTypes
from homeassistant.components.lock import STATE_JAMMED, LockEntity
from homeassistant.config_entries import ConfigEntry
from homeassistant.const import (
ATTR_BATTERY_LEVEL,
STATE_LOCKED,
STATE_UNKNOWN,
STATE_UNLOCKED,
)
from homeassistant.core import HomeAssistant, callback
from homeassistant.helpers.entity_platform import AddEntitiesCallback
from . import KNOWN_DEVICES, HomeKitEntity
CURRENT_STATE_MAP = {
0: STATE_UNLOCKED,
1: STATE_LOCKED,
2: STATE_JAMMED,
3: STATE_UNKNOWN,
}
TARGET_STATE_MAP = {STATE_UNLOCKED: 0, STATE_LOCKED: 1}
REVERSED_TARGET_STATE_MAP = {v: k for k, v in TARGET_STATE_MAP.items()}
async def async_setup_entry(
hass: HomeAssistant,
config_entry: ConfigEntry,
async_add_entities: AddEntitiesCallback,
) -> None:
"""Set up Homekit lock."""
hkid = config_entry.data["AccessoryPairingID"]
conn = hass.data[KNOWN_DEVICES][hkid]
@callback
def async_add_service(service: Service) -> bool:
if service.type != ServicesTypes.LOCK_MECHANISM:
return False
info = {"aid": service.accessory.aid, "iid": service.iid}
async_add_entities([HomeKitLock(conn, info)], True)
return True
conn.add_listener(async_add_service)
class HomeKitLock(HomeKitEntity, LockEntity):
"""Representation of a HomeKit Controller Lock."""
def get_characteristic_types(self) -> list[str]:
"""Define the homekit characteristics the entity cares about."""
return [
CharacteristicsTypes.LOCK_MECHANISM_CURRENT_STATE,
CharacteristicsTypes.LOCK_MECHANISM_TARGET_STATE,
CharacteristicsTypes.BATTERY_LEVEL,
]
@property
def is_locked(self) -> bool | None:
"""Return true if device is locked."""
value = self.service.value(CharacteristicsTypes.LOCK_MECHANISM_CURRENT_STATE)
if CURRENT_STATE_MAP[value] == STATE_UNKNOWN:
return None
return CURRENT_STATE_MAP[value] == STATE_LOCKED
@property
def is_locking(self) -> bool:
"""Return true if device is locking."""
current_value = self.service.value(
CharacteristicsTypes.LOCK_MECHANISM_CURRENT_STATE
)
target_value = self.service.value(
CharacteristicsTypes.LOCK_MECHANISM_TARGET_STATE
)
return (
CURRENT_STATE_MAP[current_value] == STATE_UNLOCKED
and REVERSED_TARGET_STATE_MAP.get(target_value) == STATE_LOCKED
)
@property
def is_unlocking(self) -> bool:
"""Return true if device is unlocking."""
current_value = self.service.value(
CharacteristicsTypes.LOCK_MECHANISM_CURRENT_STATE
)
target_value = self.service.value(
CharacteristicsTypes.LOCK_MECHANISM_TARGET_STATE
)
return (
CURRENT_STATE_MAP[current_value] == STATE_LOCKED
and REVERSED_TARGET_STATE_MAP.get(target_value) == STATE_UNLOCKED
)
@property
def is_jammed(self) -> bool:
"""Return true if device is jammed."""
value = self.service.value(CharacteristicsTypes.LOCK_MECHANISM_CURRENT_STATE)
return CURRENT_STATE_MAP[value] == STATE_JAMMED
async def async_lock(self, **kwargs: Any) -> None:
"""Lock the device."""
await self._set_lock_state(STATE_LOCKED)
async def async_unlock(self, **kwargs: Any) -> None:
"""Unlock the device."""
await self._set_lock_state(STATE_UNLOCKED)
async def _set_lock_state(self, state: str) -> None:
"""Send state command."""
await self.async_put_characteristics(
{CharacteristicsTypes.LOCK_MECHANISM_TARGET_STATE: TARGET_STATE_MAP[state]}
)
@property
def extra_state_attributes(self) -> dict[str, Any]:
"""Return the optional state attributes."""
attributes = {}
battery_level = self.service.value(CharacteristicsTypes.BATTERY_LEVEL)
if battery_level:
attributes[ATTR_BATTERY_LEVEL] = battery_level
return attributes
|
{
"content_hash": "dd0cf80e7106d5ccc3ea6a635740c577",
"timestamp": "",
"source": "github",
"line_count": 130,
"max_line_length": 87,
"avg_line_length": 33.207692307692305,
"alnum_prop": 0.6583275422747278,
"repo_name": "rohitranjan1991/home-assistant",
"id": "248bb93a68f6c82bf935cf466ade6a332945243e",
"size": "4317",
"binary": false,
"copies": "3",
"ref": "refs/heads/dev",
"path": "homeassistant/components/homekit_controller/lock.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "HTML",
"bytes": "1017265"
},
{
"name": "Python",
"bytes": "1051086"
},
{
"name": "Shell",
"bytes": "3946"
}
],
"symlink_target": ""
}
|
"""generated automatically by auto_dao.py"""
from __future__ import division
from sql_dao import SQLDAO
from vistrails.db.versions.v1_0_4.domain import *
class DBMashupAliasSQLDAOBase(SQLDAO):
def __init__(self, daoList):
self.daoList = daoList
self.table = 'mashup_alias'
def getDao(self, dao):
return self.daoList[dao]
def get_sql_columns(self, db, global_props,lock=False):
columns = ['id', 'name', 'parent_id', 'entity_id', 'entity_type']
table = 'mashup_alias'
whereMap = global_props
orderBy = 'id'
dbCommand = self.createSQLSelect(table, columns, whereMap, orderBy, lock)
data = self.executeSQL(db, dbCommand, True)
res = {}
for row in data:
id = self.convertFromDB(row[0], 'long', 'int')
name = self.convertFromDB(row[1], 'str', 'varchar(255)')
parent = self.convertFromDB(row[2], 'long', 'int')
entity_id = self.convertFromDB(row[3], 'long', 'int')
entity_type = self.convertFromDB(row[4], 'str', 'char(16)')
mashup_alias = DBMashupAlias(name=name,
id=id)
mashup_alias.db_parent = parent
mashup_alias.db_entity_id = entity_id
mashup_alias.db_entity_type = entity_type
mashup_alias.is_dirty = False
res[('mashup_alias', id)] = mashup_alias
return res
def get_sql_select(self, db, global_props,lock=False):
columns = ['id', 'name', 'parent_id', 'entity_id', 'entity_type']
table = 'mashup_alias'
whereMap = global_props
orderBy = 'id'
return self.createSQLSelect(table, columns, whereMap, orderBy, lock)
def process_sql_columns(self, data, global_props):
res = {}
for row in data:
id = self.convertFromDB(row[0], 'long', 'int')
name = self.convertFromDB(row[1], 'str', 'varchar(255)')
parent = self.convertFromDB(row[2], 'long', 'int')
entity_id = self.convertFromDB(row[3], 'long', 'int')
entity_type = self.convertFromDB(row[4], 'str', 'char(16)')
mashup_alias = DBMashupAlias(name=name,
id=id)
mashup_alias.db_parent = parent
mashup_alias.db_entity_id = entity_id
mashup_alias.db_entity_type = entity_type
mashup_alias.is_dirty = False
res[('mashup_alias', id)] = mashup_alias
return res
def from_sql_fast(self, obj, all_objects):
if ('mashup', obj.db_parent) in all_objects:
p = all_objects[('mashup', obj.db_parent)]
p.db_add_alias(obj)
def set_sql_columns(self, db, obj, global_props, do_copy=True):
if not do_copy and not obj.is_dirty:
return
columns = ['id', 'name', 'parent_id', 'entity_id', 'entity_type']
table = 'mashup_alias'
whereMap = {}
whereMap.update(global_props)
if obj.db_id is not None:
keyStr = self.convertToDB(obj.db_id, 'long', 'int')
whereMap['id'] = keyStr
columnMap = {}
if hasattr(obj, 'db_id') and obj.db_id is not None:
columnMap['id'] = \
self.convertToDB(obj.db_id, 'long', 'int')
if hasattr(obj, 'db_name') and obj.db_name is not None:
columnMap['name'] = \
self.convertToDB(obj.db_name, 'str', 'varchar(255)')
if hasattr(obj, 'db_parent') and obj.db_parent is not None:
columnMap['parent_id'] = \
self.convertToDB(obj.db_parent, 'long', 'int')
if hasattr(obj, 'db_entity_id') and obj.db_entity_id is not None:
columnMap['entity_id'] = \
self.convertToDB(obj.db_entity_id, 'long', 'int')
if hasattr(obj, 'db_entity_type') and obj.db_entity_type is not None:
columnMap['entity_type'] = \
self.convertToDB(obj.db_entity_type, 'str', 'char(16)')
columnMap.update(global_props)
if obj.is_new or do_copy:
dbCommand = self.createSQLInsert(table, columnMap)
else:
dbCommand = self.createSQLUpdate(table, columnMap, whereMap)
lastId = self.executeSQL(db, dbCommand, False)
def set_sql_command(self, db, obj, global_props, do_copy=True):
if not do_copy and not obj.is_dirty:
return None
columns = ['id', 'name', 'parent_id', 'entity_id', 'entity_type']
table = 'mashup_alias'
whereMap = {}
whereMap.update(global_props)
if obj.db_id is not None:
keyStr = self.convertToDB(obj.db_id, 'long', 'int')
whereMap['id'] = keyStr
columnMap = {}
if hasattr(obj, 'db_id') and obj.db_id is not None:
columnMap['id'] = \
self.convertToDB(obj.db_id, 'long', 'int')
if hasattr(obj, 'db_name') and obj.db_name is not None:
columnMap['name'] = \
self.convertToDB(obj.db_name, 'str', 'varchar(255)')
if hasattr(obj, 'db_parent') and obj.db_parent is not None:
columnMap['parent_id'] = \
self.convertToDB(obj.db_parent, 'long', 'int')
if hasattr(obj, 'db_entity_id') and obj.db_entity_id is not None:
columnMap['entity_id'] = \
self.convertToDB(obj.db_entity_id, 'long', 'int')
if hasattr(obj, 'db_entity_type') and obj.db_entity_type is not None:
columnMap['entity_type'] = \
self.convertToDB(obj.db_entity_type, 'str', 'char(16)')
columnMap.update(global_props)
if obj.is_new or do_copy:
dbCommand = self.createSQLInsert(table, columnMap)
else:
dbCommand = self.createSQLUpdate(table, columnMap, whereMap)
return dbCommand
def set_sql_process(self, obj, global_props, lastId):
pass
def to_sql_fast(self, obj, do_copy=True):
if obj.db_component is not None:
child = obj.db_component
child.db_mashup_alias = obj.db_id
def delete_sql_column(self, db, obj, global_props):
table = 'mashup_alias'
whereMap = {}
whereMap.update(global_props)
if obj.db_id is not None:
keyStr = self.convertToDB(obj.db_id, 'long', 'int')
whereMap['id'] = keyStr
dbCommand = self.createSQLDelete(table, whereMap)
self.executeSQL(db, dbCommand, False)
class DBGroupSQLDAOBase(SQLDAO):
def __init__(self, daoList):
self.daoList = daoList
self.table = 'group_tbl'
def getDao(self, dao):
return self.daoList[dao]
def get_sql_columns(self, db, global_props,lock=False):
columns = ['id', 'cache', 'name', 'namespace', 'package', 'version', 'parent_type', 'entity_id', 'entity_type', 'parent_id']
table = 'group_tbl'
whereMap = global_props
orderBy = 'id'
dbCommand = self.createSQLSelect(table, columns, whereMap, orderBy, lock)
data = self.executeSQL(db, dbCommand, True)
res = {}
for row in data:
id = self.convertFromDB(row[0], 'long', 'int')
cache = self.convertFromDB(row[1], 'int', 'int')
name = self.convertFromDB(row[2], 'str', 'varchar(255)')
namespace = self.convertFromDB(row[3], 'str', 'varchar(255)')
package = self.convertFromDB(row[4], 'str', 'varchar(511)')
version = self.convertFromDB(row[5], 'str', 'varchar(255)')
parentType = self.convertFromDB(row[6], 'str', 'char(32)')
entity_id = self.convertFromDB(row[7], 'long', 'int')
entity_type = self.convertFromDB(row[8], 'str', 'char(16)')
parent = self.convertFromDB(row[9], 'long', 'long')
group = DBGroup(cache=cache,
name=name,
namespace=namespace,
package=package,
version=version,
id=id)
group.db_parentType = parentType
group.db_entity_id = entity_id
group.db_entity_type = entity_type
group.db_parent = parent
group.is_dirty = False
res[('group', id)] = group
return res
def get_sql_select(self, db, global_props,lock=False):
columns = ['id', 'cache', 'name', 'namespace', 'package', 'version', 'parent_type', 'entity_id', 'entity_type', 'parent_id']
table = 'group_tbl'
whereMap = global_props
orderBy = 'id'
return self.createSQLSelect(table, columns, whereMap, orderBy, lock)
def process_sql_columns(self, data, global_props):
res = {}
for row in data:
id = self.convertFromDB(row[0], 'long', 'int')
cache = self.convertFromDB(row[1], 'int', 'int')
name = self.convertFromDB(row[2], 'str', 'varchar(255)')
namespace = self.convertFromDB(row[3], 'str', 'varchar(255)')
package = self.convertFromDB(row[4], 'str', 'varchar(511)')
version = self.convertFromDB(row[5], 'str', 'varchar(255)')
parentType = self.convertFromDB(row[6], 'str', 'char(32)')
entity_id = self.convertFromDB(row[7], 'long', 'int')
entity_type = self.convertFromDB(row[8], 'str', 'char(16)')
parent = self.convertFromDB(row[9], 'long', 'long')
group = DBGroup(cache=cache,
name=name,
namespace=namespace,
package=package,
version=version,
id=id)
group.db_parentType = parentType
group.db_entity_id = entity_id
group.db_entity_type = entity_type
group.db_parent = parent
group.is_dirty = False
res[('group', id)] = group
return res
def from_sql_fast(self, obj, all_objects):
if obj.db_parentType == 'workflow':
p = all_objects[('workflow', obj.db_parent)]
p.db_add_module(obj)
elif obj.db_parentType == 'add':
p = all_objects[('add', obj.db_parent)]
p.db_add_data(obj)
elif obj.db_parentType == 'change':
p = all_objects[('change', obj.db_parent)]
p.db_add_data(obj)
def set_sql_columns(self, db, obj, global_props, do_copy=True):
if not do_copy and not obj.is_dirty:
return
columns = ['id', 'cache', 'name', 'namespace', 'package', 'version', 'parent_type', 'entity_id', 'entity_type', 'parent_id']
table = 'group_tbl'
whereMap = {}
whereMap.update(global_props)
if obj.db_id is not None:
keyStr = self.convertToDB(obj.db_id, 'long', 'int')
whereMap['id'] = keyStr
columnMap = {}
if hasattr(obj, 'db_id') and obj.db_id is not None:
columnMap['id'] = \
self.convertToDB(obj.db_id, 'long', 'int')
if hasattr(obj, 'db_cache') and obj.db_cache is not None:
columnMap['cache'] = \
self.convertToDB(obj.db_cache, 'int', 'int')
if hasattr(obj, 'db_name') and obj.db_name is not None:
columnMap['name'] = \
self.convertToDB(obj.db_name, 'str', 'varchar(255)')
if hasattr(obj, 'db_namespace') and obj.db_namespace is not None:
columnMap['namespace'] = \
self.convertToDB(obj.db_namespace, 'str', 'varchar(255)')
if hasattr(obj, 'db_package') and obj.db_package is not None:
columnMap['package'] = \
self.convertToDB(obj.db_package, 'str', 'varchar(511)')
if hasattr(obj, 'db_version') and obj.db_version is not None:
columnMap['version'] = \
self.convertToDB(obj.db_version, 'str', 'varchar(255)')
if hasattr(obj, 'db_parentType') and obj.db_parentType is not None:
columnMap['parent_type'] = \
self.convertToDB(obj.db_parentType, 'str', 'char(32)')
if hasattr(obj, 'db_entity_id') and obj.db_entity_id is not None:
columnMap['entity_id'] = \
self.convertToDB(obj.db_entity_id, 'long', 'int')
if hasattr(obj, 'db_entity_type') and obj.db_entity_type is not None:
columnMap['entity_type'] = \
self.convertToDB(obj.db_entity_type, 'str', 'char(16)')
if hasattr(obj, 'db_parent') and obj.db_parent is not None:
columnMap['parent_id'] = \
self.convertToDB(obj.db_parent, 'long', 'long')
columnMap.update(global_props)
if obj.is_new or do_copy:
dbCommand = self.createSQLInsert(table, columnMap)
else:
dbCommand = self.createSQLUpdate(table, columnMap, whereMap)
lastId = self.executeSQL(db, dbCommand, False)
def set_sql_command(self, db, obj, global_props, do_copy=True):
if not do_copy and not obj.is_dirty:
return None
columns = ['id', 'cache', 'name', 'namespace', 'package', 'version', 'parent_type', 'entity_id', 'entity_type', 'parent_id']
table = 'group_tbl'
whereMap = {}
whereMap.update(global_props)
if obj.db_id is not None:
keyStr = self.convertToDB(obj.db_id, 'long', 'int')
whereMap['id'] = keyStr
columnMap = {}
if hasattr(obj, 'db_id') and obj.db_id is not None:
columnMap['id'] = \
self.convertToDB(obj.db_id, 'long', 'int')
if hasattr(obj, 'db_cache') and obj.db_cache is not None:
columnMap['cache'] = \
self.convertToDB(obj.db_cache, 'int', 'int')
if hasattr(obj, 'db_name') and obj.db_name is not None:
columnMap['name'] = \
self.convertToDB(obj.db_name, 'str', 'varchar(255)')
if hasattr(obj, 'db_namespace') and obj.db_namespace is not None:
columnMap['namespace'] = \
self.convertToDB(obj.db_namespace, 'str', 'varchar(255)')
if hasattr(obj, 'db_package') and obj.db_package is not None:
columnMap['package'] = \
self.convertToDB(obj.db_package, 'str', 'varchar(511)')
if hasattr(obj, 'db_version') and obj.db_version is not None:
columnMap['version'] = \
self.convertToDB(obj.db_version, 'str', 'varchar(255)')
if hasattr(obj, 'db_parentType') and obj.db_parentType is not None:
columnMap['parent_type'] = \
self.convertToDB(obj.db_parentType, 'str', 'char(32)')
if hasattr(obj, 'db_entity_id') and obj.db_entity_id is not None:
columnMap['entity_id'] = \
self.convertToDB(obj.db_entity_id, 'long', 'int')
if hasattr(obj, 'db_entity_type') and obj.db_entity_type is not None:
columnMap['entity_type'] = \
self.convertToDB(obj.db_entity_type, 'str', 'char(16)')
if hasattr(obj, 'db_parent') and obj.db_parent is not None:
columnMap['parent_id'] = \
self.convertToDB(obj.db_parent, 'long', 'long')
columnMap.update(global_props)
if obj.is_new or do_copy:
dbCommand = self.createSQLInsert(table, columnMap)
else:
dbCommand = self.createSQLUpdate(table, columnMap, whereMap)
return dbCommand
def set_sql_process(self, obj, global_props, lastId):
pass
def to_sql_fast(self, obj, do_copy=True):
if obj.db_workflow is not None:
child = obj.db_workflow
child.db_group = obj.db_id
if obj.db_location is not None:
child = obj.db_location
child.db_parentType = obj.vtType
child.db_parent = obj.db_id
for child in obj.db_functions:
child.db_parentType = obj.vtType
child.db_parent = obj.db_id
for child in obj.db_annotations:
child.db_parentType = obj.vtType
child.db_parent = obj.db_id
for child in obj.db_controlParameters:
child.db_parentType = obj.vtType
child.db_parent = obj.db_id
def delete_sql_column(self, db, obj, global_props):
table = 'group_tbl'
whereMap = {}
whereMap.update(global_props)
if obj.db_id is not None:
keyStr = self.convertToDB(obj.db_id, 'long', 'int')
whereMap['id'] = keyStr
dbCommand = self.createSQLDelete(table, whereMap)
self.executeSQL(db, dbCommand, False)
class DBAddSQLDAOBase(SQLDAO):
def __init__(self, daoList):
self.daoList = daoList
self.table = 'add_tbl'
def getDao(self, dao):
return self.daoList[dao]
def get_sql_columns(self, db, global_props,lock=False):
columns = ['id', 'what', 'object_id', 'par_obj_id', 'par_obj_type', 'action_id', 'entity_id', 'entity_type']
table = 'add_tbl'
whereMap = global_props
orderBy = 'id'
dbCommand = self.createSQLSelect(table, columns, whereMap, orderBy, lock)
data = self.executeSQL(db, dbCommand, True)
res = {}
for row in data:
id = self.convertFromDB(row[0], 'long', 'int')
what = self.convertFromDB(row[1], 'str', 'varchar(255)')
objectId = self.convertFromDB(row[2], 'long', 'int')
parentObjId = self.convertFromDB(row[3], 'long', 'int')
parentObjType = self.convertFromDB(row[4], 'str', 'char(16)')
action = self.convertFromDB(row[5], 'long', 'int')
entity_id = self.convertFromDB(row[6], 'long', 'int')
entity_type = self.convertFromDB(row[7], 'str', 'char(16)')
add = DBAdd(what=what,
objectId=objectId,
parentObjId=parentObjId,
parentObjType=parentObjType,
id=id)
add.db_action = action
add.db_entity_id = entity_id
add.db_entity_type = entity_type
add.is_dirty = False
res[('add', id)] = add
return res
def get_sql_select(self, db, global_props,lock=False):
columns = ['id', 'what', 'object_id', 'par_obj_id', 'par_obj_type', 'action_id', 'entity_id', 'entity_type']
table = 'add_tbl'
whereMap = global_props
orderBy = 'id'
return self.createSQLSelect(table, columns, whereMap, orderBy, lock)
def process_sql_columns(self, data, global_props):
res = {}
for row in data:
id = self.convertFromDB(row[0], 'long', 'int')
what = self.convertFromDB(row[1], 'str', 'varchar(255)')
objectId = self.convertFromDB(row[2], 'long', 'int')
parentObjId = self.convertFromDB(row[3], 'long', 'int')
parentObjType = self.convertFromDB(row[4], 'str', 'char(16)')
action = self.convertFromDB(row[5], 'long', 'int')
entity_id = self.convertFromDB(row[6], 'long', 'int')
entity_type = self.convertFromDB(row[7], 'str', 'char(16)')
add = DBAdd(what=what,
objectId=objectId,
parentObjId=parentObjId,
parentObjType=parentObjType,
id=id)
add.db_action = action
add.db_entity_id = entity_id
add.db_entity_type = entity_type
add.is_dirty = False
res[('add', id)] = add
return res
def from_sql_fast(self, obj, all_objects):
if ('action', obj.db_action) in all_objects:
p = all_objects[('action', obj.db_action)]
p.db_add_operation(obj)
def set_sql_columns(self, db, obj, global_props, do_copy=True):
if not do_copy and not obj.is_dirty:
return
columns = ['id', 'what', 'object_id', 'par_obj_id', 'par_obj_type', 'action_id', 'entity_id', 'entity_type']
table = 'add_tbl'
whereMap = {}
whereMap.update(global_props)
if obj.db_id is not None:
keyStr = self.convertToDB(obj.db_id, 'long', 'int')
whereMap['id'] = keyStr
columnMap = {}
if hasattr(obj, 'db_id') and obj.db_id is not None:
columnMap['id'] = \
self.convertToDB(obj.db_id, 'long', 'int')
if hasattr(obj, 'db_what') and obj.db_what is not None:
columnMap['what'] = \
self.convertToDB(obj.db_what, 'str', 'varchar(255)')
if hasattr(obj, 'db_objectId') and obj.db_objectId is not None:
columnMap['object_id'] = \
self.convertToDB(obj.db_objectId, 'long', 'int')
if hasattr(obj, 'db_parentObjId') and obj.db_parentObjId is not None:
columnMap['par_obj_id'] = \
self.convertToDB(obj.db_parentObjId, 'long', 'int')
if hasattr(obj, 'db_parentObjType') and obj.db_parentObjType is not None:
columnMap['par_obj_type'] = \
self.convertToDB(obj.db_parentObjType, 'str', 'char(16)')
if hasattr(obj, 'db_action') and obj.db_action is not None:
columnMap['action_id'] = \
self.convertToDB(obj.db_action, 'long', 'int')
if hasattr(obj, 'db_entity_id') and obj.db_entity_id is not None:
columnMap['entity_id'] = \
self.convertToDB(obj.db_entity_id, 'long', 'int')
if hasattr(obj, 'db_entity_type') and obj.db_entity_type is not None:
columnMap['entity_type'] = \
self.convertToDB(obj.db_entity_type, 'str', 'char(16)')
columnMap.update(global_props)
if obj.is_new or do_copy:
dbCommand = self.createSQLInsert(table, columnMap)
else:
dbCommand = self.createSQLUpdate(table, columnMap, whereMap)
lastId = self.executeSQL(db, dbCommand, False)
def set_sql_command(self, db, obj, global_props, do_copy=True):
if not do_copy and not obj.is_dirty:
return None
columns = ['id', 'what', 'object_id', 'par_obj_id', 'par_obj_type', 'action_id', 'entity_id', 'entity_type']
table = 'add_tbl'
whereMap = {}
whereMap.update(global_props)
if obj.db_id is not None:
keyStr = self.convertToDB(obj.db_id, 'long', 'int')
whereMap['id'] = keyStr
columnMap = {}
if hasattr(obj, 'db_id') and obj.db_id is not None:
columnMap['id'] = \
self.convertToDB(obj.db_id, 'long', 'int')
if hasattr(obj, 'db_what') and obj.db_what is not None:
columnMap['what'] = \
self.convertToDB(obj.db_what, 'str', 'varchar(255)')
if hasattr(obj, 'db_objectId') and obj.db_objectId is not None:
columnMap['object_id'] = \
self.convertToDB(obj.db_objectId, 'long', 'int')
if hasattr(obj, 'db_parentObjId') and obj.db_parentObjId is not None:
columnMap['par_obj_id'] = \
self.convertToDB(obj.db_parentObjId, 'long', 'int')
if hasattr(obj, 'db_parentObjType') and obj.db_parentObjType is not None:
columnMap['par_obj_type'] = \
self.convertToDB(obj.db_parentObjType, 'str', 'char(16)')
if hasattr(obj, 'db_action') and obj.db_action is not None:
columnMap['action_id'] = \
self.convertToDB(obj.db_action, 'long', 'int')
if hasattr(obj, 'db_entity_id') and obj.db_entity_id is not None:
columnMap['entity_id'] = \
self.convertToDB(obj.db_entity_id, 'long', 'int')
if hasattr(obj, 'db_entity_type') and obj.db_entity_type is not None:
columnMap['entity_type'] = \
self.convertToDB(obj.db_entity_type, 'str', 'char(16)')
columnMap.update(global_props)
if obj.is_new or do_copy:
dbCommand = self.createSQLInsert(table, columnMap)
else:
dbCommand = self.createSQLUpdate(table, columnMap, whereMap)
return dbCommand
def set_sql_process(self, obj, global_props, lastId):
pass
def to_sql_fast(self, obj, do_copy=True):
if obj.db_data is not None:
child = obj.db_data
child.db_parentType = obj.vtType
child.db_parent = obj.db_id
def delete_sql_column(self, db, obj, global_props):
table = 'add_tbl'
whereMap = {}
whereMap.update(global_props)
if obj.db_id is not None:
keyStr = self.convertToDB(obj.db_id, 'long', 'int')
whereMap['id'] = keyStr
dbCommand = self.createSQLDelete(table, whereMap)
self.executeSQL(db, dbCommand, False)
class DBGroupExecSQLDAOBase(SQLDAO):
def __init__(self, daoList):
self.daoList = daoList
self.table = 'group_exec'
def getDao(self, dao):
return self.daoList[dao]
def get_sql_columns(self, db, global_props,lock=False):
columns = ['id', 'ts_start', 'ts_end', 'cached', 'module_id', 'group_name', 'group_type', 'completed', 'error', 'machine_id', 'parent_type', 'entity_id', 'entity_type', 'parent_id']
table = 'group_exec'
whereMap = global_props
orderBy = 'id'
dbCommand = self.createSQLSelect(table, columns, whereMap, orderBy, lock)
data = self.executeSQL(db, dbCommand, True)
res = {}
for row in data:
id = self.convertFromDB(row[0], 'long', 'int')
ts_start = self.convertFromDB(row[1], 'datetime', 'datetime')
ts_end = self.convertFromDB(row[2], 'datetime', 'datetime')
cached = self.convertFromDB(row[3], 'int', 'int')
module_id = self.convertFromDB(row[4], 'long', 'int')
group_name = self.convertFromDB(row[5], 'str', 'varchar(255)')
group_type = self.convertFromDB(row[6], 'str', 'varchar(255)')
completed = self.convertFromDB(row[7], 'int', 'int')
error = self.convertFromDB(row[8], 'str', 'varchar(1023)')
machine_id = self.convertFromDB(row[9], 'long', 'int')
parentType = self.convertFromDB(row[10], 'str', 'char(32)')
entity_id = self.convertFromDB(row[11], 'long', 'int')
entity_type = self.convertFromDB(row[12], 'str', 'char(16)')
parent = self.convertFromDB(row[13], 'long', 'long')
group_exec = DBGroupExec(ts_start=ts_start,
ts_end=ts_end,
cached=cached,
module_id=module_id,
group_name=group_name,
group_type=group_type,
completed=completed,
error=error,
machine_id=machine_id,
id=id)
group_exec.db_parentType = parentType
group_exec.db_entity_id = entity_id
group_exec.db_entity_type = entity_type
group_exec.db_parent = parent
group_exec.is_dirty = False
res[('group_exec', id)] = group_exec
return res
def get_sql_select(self, db, global_props,lock=False):
columns = ['id', 'ts_start', 'ts_end', 'cached', 'module_id', 'group_name', 'group_type', 'completed', 'error', 'machine_id', 'parent_type', 'entity_id', 'entity_type', 'parent_id']
table = 'group_exec'
whereMap = global_props
orderBy = 'id'
return self.createSQLSelect(table, columns, whereMap, orderBy, lock)
def process_sql_columns(self, data, global_props):
res = {}
for row in data:
id = self.convertFromDB(row[0], 'long', 'int')
ts_start = self.convertFromDB(row[1], 'datetime', 'datetime')
ts_end = self.convertFromDB(row[2], 'datetime', 'datetime')
cached = self.convertFromDB(row[3], 'int', 'int')
module_id = self.convertFromDB(row[4], 'long', 'int')
group_name = self.convertFromDB(row[5], 'str', 'varchar(255)')
group_type = self.convertFromDB(row[6], 'str', 'varchar(255)')
completed = self.convertFromDB(row[7], 'int', 'int')
error = self.convertFromDB(row[8], 'str', 'varchar(1023)')
machine_id = self.convertFromDB(row[9], 'long', 'int')
parentType = self.convertFromDB(row[10], 'str', 'char(32)')
entity_id = self.convertFromDB(row[11], 'long', 'int')
entity_type = self.convertFromDB(row[12], 'str', 'char(16)')
parent = self.convertFromDB(row[13], 'long', 'long')
group_exec = DBGroupExec(ts_start=ts_start,
ts_end=ts_end,
cached=cached,
module_id=module_id,
group_name=group_name,
group_type=group_type,
completed=completed,
error=error,
machine_id=machine_id,
id=id)
group_exec.db_parentType = parentType
group_exec.db_entity_id = entity_id
group_exec.db_entity_type = entity_type
group_exec.db_parent = parent
group_exec.is_dirty = False
res[('group_exec', id)] = group_exec
return res
def from_sql_fast(self, obj, all_objects):
if obj.db_parentType == 'workflow_exec':
p = all_objects[('workflow_exec', obj.db_parent)]
p.db_add_item_exec(obj)
elif obj.db_parentType == 'loop_iteration':
p = all_objects[('loop_iteration', obj.db_parent)]
p.db_add_item_exec(obj)
elif obj.db_parentType == 'group_exec':
p = all_objects[('group_exec', obj.db_parent)]
p.db_add_item_exec(obj)
def set_sql_columns(self, db, obj, global_props, do_copy=True):
if not do_copy and not obj.is_dirty:
return
columns = ['id', 'ts_start', 'ts_end', 'cached', 'module_id', 'group_name', 'group_type', 'completed', 'error', 'machine_id', 'parent_type', 'entity_id', 'entity_type', 'parent_id']
table = 'group_exec'
whereMap = {}
whereMap.update(global_props)
if obj.db_id is not None:
keyStr = self.convertToDB(obj.db_id, 'long', 'int')
whereMap['id'] = keyStr
columnMap = {}
if hasattr(obj, 'db_id') and obj.db_id is not None:
columnMap['id'] = \
self.convertToDB(obj.db_id, 'long', 'int')
if hasattr(obj, 'db_ts_start') and obj.db_ts_start is not None:
columnMap['ts_start'] = \
self.convertToDB(obj.db_ts_start, 'datetime', 'datetime')
if hasattr(obj, 'db_ts_end') and obj.db_ts_end is not None:
columnMap['ts_end'] = \
self.convertToDB(obj.db_ts_end, 'datetime', 'datetime')
if hasattr(obj, 'db_cached') and obj.db_cached is not None:
columnMap['cached'] = \
self.convertToDB(obj.db_cached, 'int', 'int')
if hasattr(obj, 'db_module_id') and obj.db_module_id is not None:
columnMap['module_id'] = \
self.convertToDB(obj.db_module_id, 'long', 'int')
if hasattr(obj, 'db_group_name') and obj.db_group_name is not None:
columnMap['group_name'] = \
self.convertToDB(obj.db_group_name, 'str', 'varchar(255)')
if hasattr(obj, 'db_group_type') and obj.db_group_type is not None:
columnMap['group_type'] = \
self.convertToDB(obj.db_group_type, 'str', 'varchar(255)')
if hasattr(obj, 'db_completed') and obj.db_completed is not None:
columnMap['completed'] = \
self.convertToDB(obj.db_completed, 'int', 'int')
if hasattr(obj, 'db_error') and obj.db_error is not None:
columnMap['error'] = \
self.convertToDB(obj.db_error, 'str', 'varchar(1023)')
if hasattr(obj, 'db_machine_id') and obj.db_machine_id is not None:
columnMap['machine_id'] = \
self.convertToDB(obj.db_machine_id, 'long', 'int')
if hasattr(obj, 'db_parentType') and obj.db_parentType is not None:
columnMap['parent_type'] = \
self.convertToDB(obj.db_parentType, 'str', 'char(32)')
if hasattr(obj, 'db_entity_id') and obj.db_entity_id is not None:
columnMap['entity_id'] = \
self.convertToDB(obj.db_entity_id, 'long', 'int')
if hasattr(obj, 'db_entity_type') and obj.db_entity_type is not None:
columnMap['entity_type'] = \
self.convertToDB(obj.db_entity_type, 'str', 'char(16)')
if hasattr(obj, 'db_parent') and obj.db_parent is not None:
columnMap['parent_id'] = \
self.convertToDB(obj.db_parent, 'long', 'long')
columnMap.update(global_props)
if obj.is_new or do_copy:
dbCommand = self.createSQLInsert(table, columnMap)
else:
dbCommand = self.createSQLUpdate(table, columnMap, whereMap)
lastId = self.executeSQL(db, dbCommand, False)
def set_sql_command(self, db, obj, global_props, do_copy=True):
if not do_copy and not obj.is_dirty:
return None
columns = ['id', 'ts_start', 'ts_end', 'cached', 'module_id', 'group_name', 'group_type', 'completed', 'error', 'machine_id', 'parent_type', 'entity_id', 'entity_type', 'parent_id']
table = 'group_exec'
whereMap = {}
whereMap.update(global_props)
if obj.db_id is not None:
keyStr = self.convertToDB(obj.db_id, 'long', 'int')
whereMap['id'] = keyStr
columnMap = {}
if hasattr(obj, 'db_id') and obj.db_id is not None:
columnMap['id'] = \
self.convertToDB(obj.db_id, 'long', 'int')
if hasattr(obj, 'db_ts_start') and obj.db_ts_start is not None:
columnMap['ts_start'] = \
self.convertToDB(obj.db_ts_start, 'datetime', 'datetime')
if hasattr(obj, 'db_ts_end') and obj.db_ts_end is not None:
columnMap['ts_end'] = \
self.convertToDB(obj.db_ts_end, 'datetime', 'datetime')
if hasattr(obj, 'db_cached') and obj.db_cached is not None:
columnMap['cached'] = \
self.convertToDB(obj.db_cached, 'int', 'int')
if hasattr(obj, 'db_module_id') and obj.db_module_id is not None:
columnMap['module_id'] = \
self.convertToDB(obj.db_module_id, 'long', 'int')
if hasattr(obj, 'db_group_name') and obj.db_group_name is not None:
columnMap['group_name'] = \
self.convertToDB(obj.db_group_name, 'str', 'varchar(255)')
if hasattr(obj, 'db_group_type') and obj.db_group_type is not None:
columnMap['group_type'] = \
self.convertToDB(obj.db_group_type, 'str', 'varchar(255)')
if hasattr(obj, 'db_completed') and obj.db_completed is not None:
columnMap['completed'] = \
self.convertToDB(obj.db_completed, 'int', 'int')
if hasattr(obj, 'db_error') and obj.db_error is not None:
columnMap['error'] = \
self.convertToDB(obj.db_error, 'str', 'varchar(1023)')
if hasattr(obj, 'db_machine_id') and obj.db_machine_id is not None:
columnMap['machine_id'] = \
self.convertToDB(obj.db_machine_id, 'long', 'int')
if hasattr(obj, 'db_parentType') and obj.db_parentType is not None:
columnMap['parent_type'] = \
self.convertToDB(obj.db_parentType, 'str', 'char(32)')
if hasattr(obj, 'db_entity_id') and obj.db_entity_id is not None:
columnMap['entity_id'] = \
self.convertToDB(obj.db_entity_id, 'long', 'int')
if hasattr(obj, 'db_entity_type') and obj.db_entity_type is not None:
columnMap['entity_type'] = \
self.convertToDB(obj.db_entity_type, 'str', 'char(16)')
if hasattr(obj, 'db_parent') and obj.db_parent is not None:
columnMap['parent_id'] = \
self.convertToDB(obj.db_parent, 'long', 'long')
columnMap.update(global_props)
if obj.is_new or do_copy:
dbCommand = self.createSQLInsert(table, columnMap)
else:
dbCommand = self.createSQLUpdate(table, columnMap, whereMap)
return dbCommand
def set_sql_process(self, obj, global_props, lastId):
pass
def to_sql_fast(self, obj, do_copy=True):
for child in obj.db_annotations:
child.db_parentType = obj.vtType
child.db_parent = obj.db_id
for child in obj.db_item_execs:
child.db_parentType = obj.vtType
child.db_parent = obj.db_id
def delete_sql_column(self, db, obj, global_props):
table = 'group_exec'
whereMap = {}
whereMap.update(global_props)
if obj.db_id is not None:
keyStr = self.convertToDB(obj.db_id, 'long', 'int')
whereMap['id'] = keyStr
dbCommand = self.createSQLDelete(table, whereMap)
self.executeSQL(db, dbCommand, False)
class DBParameterSQLDAOBase(SQLDAO):
def __init__(self, daoList):
self.daoList = daoList
self.table = 'parameter'
def getDao(self, dao):
return self.daoList[dao]
def get_sql_columns(self, db, global_props,lock=False):
columns = ['id', 'pos', 'name', 'type', 'val', 'alias', 'parent_type', 'entity_id', 'entity_type', 'parent_id']
table = 'parameter'
whereMap = global_props
orderBy = 'id'
dbCommand = self.createSQLSelect(table, columns, whereMap, orderBy, lock)
data = self.executeSQL(db, dbCommand, True)
res = {}
for row in data:
id = self.convertFromDB(row[0], 'long', 'int')
pos = self.convertFromDB(row[1], 'long', 'int')
name = self.convertFromDB(row[2], 'str', 'varchar(255)')
type = self.convertFromDB(row[3], 'str', 'varchar(255)')
val = self.convertFromDB(row[4], 'str', 'mediumtext')
alias = self.convertFromDB(row[5], 'str', 'varchar(255)')
parentType = self.convertFromDB(row[6], 'str', 'char(32)')
entity_id = self.convertFromDB(row[7], 'long', 'int')
entity_type = self.convertFromDB(row[8], 'str', 'char(16)')
parent = self.convertFromDB(row[9], 'long', 'long')
parameter = DBParameter(pos=pos,
name=name,
type=type,
val=val,
alias=alias,
id=id)
parameter.db_parentType = parentType
parameter.db_entity_id = entity_id
parameter.db_entity_type = entity_type
parameter.db_parent = parent
parameter.is_dirty = False
res[('parameter', id)] = parameter
return res
def get_sql_select(self, db, global_props,lock=False):
columns = ['id', 'pos', 'name', 'type', 'val', 'alias', 'parent_type', 'entity_id', 'entity_type', 'parent_id']
table = 'parameter'
whereMap = global_props
orderBy = 'id'
return self.createSQLSelect(table, columns, whereMap, orderBy, lock)
def process_sql_columns(self, data, global_props):
res = {}
for row in data:
id = self.convertFromDB(row[0], 'long', 'int')
pos = self.convertFromDB(row[1], 'long', 'int')
name = self.convertFromDB(row[2], 'str', 'varchar(255)')
type = self.convertFromDB(row[3], 'str', 'varchar(255)')
val = self.convertFromDB(row[4], 'str', 'mediumtext')
alias = self.convertFromDB(row[5], 'str', 'varchar(255)')
parentType = self.convertFromDB(row[6], 'str', 'char(32)')
entity_id = self.convertFromDB(row[7], 'long', 'int')
entity_type = self.convertFromDB(row[8], 'str', 'char(16)')
parent = self.convertFromDB(row[9], 'long', 'long')
parameter = DBParameter(pos=pos,
name=name,
type=type,
val=val,
alias=alias,
id=id)
parameter.db_parentType = parentType
parameter.db_entity_id = entity_id
parameter.db_entity_type = entity_type
parameter.db_parent = parent
parameter.is_dirty = False
res[('parameter', id)] = parameter
return res
def from_sql_fast(self, obj, all_objects):
if obj.db_parentType == 'function':
p = all_objects[('function', obj.db_parent)]
p.db_add_parameter(obj)
elif obj.db_parentType == 'add':
p = all_objects[('add', obj.db_parent)]
p.db_add_data(obj)
elif obj.db_parentType == 'change':
p = all_objects[('change', obj.db_parent)]
p.db_add_data(obj)
def set_sql_columns(self, db, obj, global_props, do_copy=True):
if not do_copy and not obj.is_dirty:
return
columns = ['id', 'pos', 'name', 'type', 'val', 'alias', 'parent_type', 'entity_id', 'entity_type', 'parent_id']
table = 'parameter'
whereMap = {}
whereMap.update(global_props)
if obj.db_id is not None:
keyStr = self.convertToDB(obj.db_id, 'long', 'int')
whereMap['id'] = keyStr
columnMap = {}
if hasattr(obj, 'db_id') and obj.db_id is not None:
columnMap['id'] = \
self.convertToDB(obj.db_id, 'long', 'int')
if hasattr(obj, 'db_pos') and obj.db_pos is not None:
columnMap['pos'] = \
self.convertToDB(obj.db_pos, 'long', 'int')
if hasattr(obj, 'db_name') and obj.db_name is not None:
columnMap['name'] = \
self.convertToDB(obj.db_name, 'str', 'varchar(255)')
if hasattr(obj, 'db_type') and obj.db_type is not None:
columnMap['type'] = \
self.convertToDB(obj.db_type, 'str', 'varchar(255)')
if hasattr(obj, 'db_val') and obj.db_val is not None:
columnMap['val'] = \
self.convertToDB(obj.db_val, 'str', 'mediumtext')
if hasattr(obj, 'db_alias') and obj.db_alias is not None:
columnMap['alias'] = \
self.convertToDB(obj.db_alias, 'str', 'varchar(255)')
if hasattr(obj, 'db_parentType') and obj.db_parentType is not None:
columnMap['parent_type'] = \
self.convertToDB(obj.db_parentType, 'str', 'char(32)')
if hasattr(obj, 'db_entity_id') and obj.db_entity_id is not None:
columnMap['entity_id'] = \
self.convertToDB(obj.db_entity_id, 'long', 'int')
if hasattr(obj, 'db_entity_type') and obj.db_entity_type is not None:
columnMap['entity_type'] = \
self.convertToDB(obj.db_entity_type, 'str', 'char(16)')
if hasattr(obj, 'db_parent') and obj.db_parent is not None:
columnMap['parent_id'] = \
self.convertToDB(obj.db_parent, 'long', 'long')
columnMap.update(global_props)
if obj.is_new or do_copy:
dbCommand = self.createSQLInsert(table, columnMap)
else:
dbCommand = self.createSQLUpdate(table, columnMap, whereMap)
lastId = self.executeSQL(db, dbCommand, False)
def set_sql_command(self, db, obj, global_props, do_copy=True):
if not do_copy and not obj.is_dirty:
return None
columns = ['id', 'pos', 'name', 'type', 'val', 'alias', 'parent_type', 'entity_id', 'entity_type', 'parent_id']
table = 'parameter'
whereMap = {}
whereMap.update(global_props)
if obj.db_id is not None:
keyStr = self.convertToDB(obj.db_id, 'long', 'int')
whereMap['id'] = keyStr
columnMap = {}
if hasattr(obj, 'db_id') and obj.db_id is not None:
columnMap['id'] = \
self.convertToDB(obj.db_id, 'long', 'int')
if hasattr(obj, 'db_pos') and obj.db_pos is not None:
columnMap['pos'] = \
self.convertToDB(obj.db_pos, 'long', 'int')
if hasattr(obj, 'db_name') and obj.db_name is not None:
columnMap['name'] = \
self.convertToDB(obj.db_name, 'str', 'varchar(255)')
if hasattr(obj, 'db_type') and obj.db_type is not None:
columnMap['type'] = \
self.convertToDB(obj.db_type, 'str', 'varchar(255)')
if hasattr(obj, 'db_val') and obj.db_val is not None:
columnMap['val'] = \
self.convertToDB(obj.db_val, 'str', 'mediumtext')
if hasattr(obj, 'db_alias') and obj.db_alias is not None:
columnMap['alias'] = \
self.convertToDB(obj.db_alias, 'str', 'varchar(255)')
if hasattr(obj, 'db_parentType') and obj.db_parentType is not None:
columnMap['parent_type'] = \
self.convertToDB(obj.db_parentType, 'str', 'char(32)')
if hasattr(obj, 'db_entity_id') and obj.db_entity_id is not None:
columnMap['entity_id'] = \
self.convertToDB(obj.db_entity_id, 'long', 'int')
if hasattr(obj, 'db_entity_type') and obj.db_entity_type is not None:
columnMap['entity_type'] = \
self.convertToDB(obj.db_entity_type, 'str', 'char(16)')
if hasattr(obj, 'db_parent') and obj.db_parent is not None:
columnMap['parent_id'] = \
self.convertToDB(obj.db_parent, 'long', 'long')
columnMap.update(global_props)
if obj.is_new or do_copy:
dbCommand = self.createSQLInsert(table, columnMap)
else:
dbCommand = self.createSQLUpdate(table, columnMap, whereMap)
return dbCommand
def set_sql_process(self, obj, global_props, lastId):
pass
def to_sql_fast(self, obj, do_copy=True):
pass
def delete_sql_column(self, db, obj, global_props):
table = 'parameter'
whereMap = {}
whereMap.update(global_props)
if obj.db_id is not None:
keyStr = self.convertToDB(obj.db_id, 'long', 'int')
whereMap['id'] = keyStr
dbCommand = self.createSQLDelete(table, whereMap)
self.executeSQL(db, dbCommand, False)
class DBVistrailSQLDAOBase(SQLDAO):
def __init__(self, daoList):
self.daoList = daoList
self.table = 'vistrail'
def getDao(self, dao):
return self.daoList[dao]
def get_sql_columns(self, db, global_props,lock=False):
columns = ['id', 'entity_type', 'version', 'name', 'last_modified']
table = 'vistrail'
whereMap = global_props
orderBy = 'id'
dbCommand = self.createSQLSelect(table, columns, whereMap, orderBy, lock)
data = self.executeSQL(db, dbCommand, True)
res = {}
for row in data:
id = self.convertFromDB(row[0], 'long', 'int')
global_props['entity_id'] = self.convertToDB(id, 'long', 'int')
entity_type = self.convertFromDB(row[1], 'str', 'char(16)')
global_props['entity_type'] = self.convertToDB(entity_type, 'str', 'char(16)')
version = self.convertFromDB(row[2], 'str', 'char(16)')
name = self.convertFromDB(row[3], 'str', 'varchar(255)')
last_modified = self.convertFromDB(row[4], 'datetime', 'datetime')
vistrail = DBVistrail(entity_type=entity_type,
version=version,
name=name,
last_modified=last_modified,
id=id)
vistrail.is_dirty = False
res[('vistrail', id)] = vistrail
return res
def get_sql_select(self, db, global_props,lock=False):
columns = ['id', 'entity_type', 'version', 'name', 'last_modified']
table = 'vistrail'
whereMap = global_props
orderBy = 'id'
return self.createSQLSelect(table, columns, whereMap, orderBy, lock)
def process_sql_columns(self, data, global_props):
res = {}
for row in data:
id = self.convertFromDB(row[0], 'long', 'int')
global_props['entity_id'] = self.convertToDB(id, 'long', 'int')
entity_type = self.convertFromDB(row[1], 'str', 'char(16)')
global_props['entity_type'] = self.convertToDB(entity_type, 'str', 'char(16)')
version = self.convertFromDB(row[2], 'str', 'char(16)')
name = self.convertFromDB(row[3], 'str', 'varchar(255)')
last_modified = self.convertFromDB(row[4], 'datetime', 'datetime')
vistrail = DBVistrail(entity_type=entity_type,
version=version,
name=name,
last_modified=last_modified,
id=id)
vistrail.is_dirty = False
res[('vistrail', id)] = vistrail
return res
def from_sql_fast(self, obj, all_objects):
pass
def set_sql_columns(self, db, obj, global_props, do_copy=True):
if not do_copy and not obj.is_dirty:
return
columns = ['id', 'entity_type', 'version', 'name', 'last_modified']
table = 'vistrail'
whereMap = {}
whereMap.update(global_props)
if obj.db_id is not None:
keyStr = self.convertToDB(obj.db_id, 'long', 'int')
whereMap['id'] = keyStr
columnMap = {}
if hasattr(obj, 'db_id') and obj.db_id is not None:
columnMap['id'] = \
self.convertToDB(obj.db_id, 'long', 'int')
if hasattr(obj, 'db_entity_type') and obj.db_entity_type is not None:
columnMap['entity_type'] = \
self.convertToDB(obj.db_entity_type, 'str', 'char(16)')
if hasattr(obj, 'db_version') and obj.db_version is not None:
columnMap['version'] = \
self.convertToDB(obj.db_version, 'str', 'char(16)')
if hasattr(obj, 'db_name') and obj.db_name is not None:
columnMap['name'] = \
self.convertToDB(obj.db_name, 'str', 'varchar(255)')
if hasattr(obj, 'db_last_modified') and obj.db_last_modified is not None:
columnMap['last_modified'] = \
self.convertToDB(obj.db_last_modified, 'datetime', 'datetime')
columnMap.update(global_props)
if obj.is_new or do_copy:
dbCommand = self.createSQLInsert(table, columnMap)
else:
dbCommand = self.createSQLUpdate(table, columnMap, whereMap)
lastId = self.executeSQL(db, dbCommand, False)
if obj.db_id is None:
obj.db_id = lastId
keyStr = self.convertToDB(obj.db_id, 'long', 'int')
if hasattr(obj, 'db_entity_type') and obj.db_entity_type is not None:
global_props['entity_type'] = self.convertToDB(obj.db_entity_type, 'str', 'char(16)')
if hasattr(obj, 'db_id') and obj.db_id is not None:
global_props['entity_id'] = self.convertToDB(obj.db_id, 'long', 'int')
def set_sql_command(self, db, obj, global_props, do_copy=True):
if not do_copy and not obj.is_dirty:
return None
columns = ['id', 'entity_type', 'version', 'name', 'last_modified']
table = 'vistrail'
whereMap = {}
whereMap.update(global_props)
if obj.db_id is not None:
keyStr = self.convertToDB(obj.db_id, 'long', 'int')
whereMap['id'] = keyStr
columnMap = {}
if hasattr(obj, 'db_id') and obj.db_id is not None:
columnMap['id'] = \
self.convertToDB(obj.db_id, 'long', 'int')
if hasattr(obj, 'db_entity_type') and obj.db_entity_type is not None:
columnMap['entity_type'] = \
self.convertToDB(obj.db_entity_type, 'str', 'char(16)')
if hasattr(obj, 'db_version') and obj.db_version is not None:
columnMap['version'] = \
self.convertToDB(obj.db_version, 'str', 'char(16)')
if hasattr(obj, 'db_name') and obj.db_name is not None:
columnMap['name'] = \
self.convertToDB(obj.db_name, 'str', 'varchar(255)')
if hasattr(obj, 'db_last_modified') and obj.db_last_modified is not None:
columnMap['last_modified'] = \
self.convertToDB(obj.db_last_modified, 'datetime', 'datetime')
columnMap.update(global_props)
if obj.is_new or do_copy:
dbCommand = self.createSQLInsert(table, columnMap)
else:
dbCommand = self.createSQLUpdate(table, columnMap, whereMap)
return dbCommand
def set_sql_process(self, obj, global_props, lastId):
if obj.db_id is None:
obj.db_id = lastId
keyStr = self.convertToDB(obj.db_id, 'long', 'int')
if hasattr(obj, 'db_entity_type') and obj.db_entity_type is not None:
global_props['entity_type'] = self.convertToDB(obj.db_entity_type, 'str', 'char(16)')
if hasattr(obj, 'db_id') and obj.db_id is not None:
global_props['entity_id'] = self.convertToDB(obj.db_id, 'long', 'int')
pass
def to_sql_fast(self, obj, do_copy=True):
for child in obj.db_actions:
child.db_vistrail = obj.db_id
for child in obj.db_tags:
child.db_vistrail = obj.db_id
for child in obj.db_annotations:
child.db_parentType = obj.vtType
child.db_parent = obj.db_id
for child in obj.db_controlParameters:
child.db_parentType = obj.vtType
child.db_parent = obj.db_id
for child in obj.db_vistrailVariables:
child.db_vistrail = obj.db_id
for child in obj.db_parameter_explorations:
child.db_vistrail = obj.db_id
for child in obj.db_actionAnnotations:
child.db_vistrail = obj.db_id
def delete_sql_column(self, db, obj, global_props):
table = 'vistrail'
whereMap = {}
whereMap.update(global_props)
if obj.db_id is not None:
keyStr = self.convertToDB(obj.db_id, 'long', 'int')
whereMap['id'] = keyStr
dbCommand = self.createSQLDelete(table, whereMap)
self.executeSQL(db, dbCommand, False)
class DBModuleSQLDAOBase(SQLDAO):
def __init__(self, daoList):
self.daoList = daoList
self.table = 'module'
def getDao(self, dao):
return self.daoList[dao]
def get_sql_columns(self, db, global_props,lock=False):
columns = ['id', 'cache', 'name', 'namespace', 'package', 'version', 'parent_type', 'entity_id', 'entity_type', 'parent_id']
table = 'module'
whereMap = global_props
orderBy = 'id'
dbCommand = self.createSQLSelect(table, columns, whereMap, orderBy, lock)
data = self.executeSQL(db, dbCommand, True)
res = {}
for row in data:
id = self.convertFromDB(row[0], 'long', 'int')
cache = self.convertFromDB(row[1], 'int', 'int')
name = self.convertFromDB(row[2], 'str', 'varchar(255)')
namespace = self.convertFromDB(row[3], 'str', 'varchar(255)')
package = self.convertFromDB(row[4], 'str', 'varchar(511)')
version = self.convertFromDB(row[5], 'str', 'varchar(255)')
parentType = self.convertFromDB(row[6], 'str', 'char(32)')
entity_id = self.convertFromDB(row[7], 'long', 'int')
entity_type = self.convertFromDB(row[8], 'str', 'char(16)')
parent = self.convertFromDB(row[9], 'long', 'long')
module = DBModule(cache=cache,
name=name,
namespace=namespace,
package=package,
version=version,
id=id)
module.db_parentType = parentType
module.db_entity_id = entity_id
module.db_entity_type = entity_type
module.db_parent = parent
module.is_dirty = False
res[('module', id)] = module
return res
def get_sql_select(self, db, global_props,lock=False):
columns = ['id', 'cache', 'name', 'namespace', 'package', 'version', 'parent_type', 'entity_id', 'entity_type', 'parent_id']
table = 'module'
whereMap = global_props
orderBy = 'id'
return self.createSQLSelect(table, columns, whereMap, orderBy, lock)
def process_sql_columns(self, data, global_props):
res = {}
for row in data:
id = self.convertFromDB(row[0], 'long', 'int')
cache = self.convertFromDB(row[1], 'int', 'int')
name = self.convertFromDB(row[2], 'str', 'varchar(255)')
namespace = self.convertFromDB(row[3], 'str', 'varchar(255)')
package = self.convertFromDB(row[4], 'str', 'varchar(511)')
version = self.convertFromDB(row[5], 'str', 'varchar(255)')
parentType = self.convertFromDB(row[6], 'str', 'char(32)')
entity_id = self.convertFromDB(row[7], 'long', 'int')
entity_type = self.convertFromDB(row[8], 'str', 'char(16)')
parent = self.convertFromDB(row[9], 'long', 'long')
module = DBModule(cache=cache,
name=name,
namespace=namespace,
package=package,
version=version,
id=id)
module.db_parentType = parentType
module.db_entity_id = entity_id
module.db_entity_type = entity_type
module.db_parent = parent
module.is_dirty = False
res[('module', id)] = module
return res
def from_sql_fast(self, obj, all_objects):
if obj.db_parentType == 'workflow':
p = all_objects[('workflow', obj.db_parent)]
p.db_add_module(obj)
elif obj.db_parentType == 'add':
p = all_objects[('add', obj.db_parent)]
p.db_add_data(obj)
elif obj.db_parentType == 'change':
p = all_objects[('change', obj.db_parent)]
p.db_add_data(obj)
def set_sql_columns(self, db, obj, global_props, do_copy=True):
if not do_copy and not obj.is_dirty:
return
columns = ['id', 'cache', 'name', 'namespace', 'package', 'version', 'parent_type', 'entity_id', 'entity_type', 'parent_id']
table = 'module'
whereMap = {}
whereMap.update(global_props)
if obj.db_id is not None:
keyStr = self.convertToDB(obj.db_id, 'long', 'int')
whereMap['id'] = keyStr
columnMap = {}
if hasattr(obj, 'db_id') and obj.db_id is not None:
columnMap['id'] = \
self.convertToDB(obj.db_id, 'long', 'int')
if hasattr(obj, 'db_cache') and obj.db_cache is not None:
columnMap['cache'] = \
self.convertToDB(obj.db_cache, 'int', 'int')
if hasattr(obj, 'db_name') and obj.db_name is not None:
columnMap['name'] = \
self.convertToDB(obj.db_name, 'str', 'varchar(255)')
if hasattr(obj, 'db_namespace') and obj.db_namespace is not None:
columnMap['namespace'] = \
self.convertToDB(obj.db_namespace, 'str', 'varchar(255)')
if hasattr(obj, 'db_package') and obj.db_package is not None:
columnMap['package'] = \
self.convertToDB(obj.db_package, 'str', 'varchar(511)')
if hasattr(obj, 'db_version') and obj.db_version is not None:
columnMap['version'] = \
self.convertToDB(obj.db_version, 'str', 'varchar(255)')
if hasattr(obj, 'db_parentType') and obj.db_parentType is not None:
columnMap['parent_type'] = \
self.convertToDB(obj.db_parentType, 'str', 'char(32)')
if hasattr(obj, 'db_entity_id') and obj.db_entity_id is not None:
columnMap['entity_id'] = \
self.convertToDB(obj.db_entity_id, 'long', 'int')
if hasattr(obj, 'db_entity_type') and obj.db_entity_type is not None:
columnMap['entity_type'] = \
self.convertToDB(obj.db_entity_type, 'str', 'char(16)')
if hasattr(obj, 'db_parent') and obj.db_parent is not None:
columnMap['parent_id'] = \
self.convertToDB(obj.db_parent, 'long', 'long')
columnMap.update(global_props)
if obj.is_new or do_copy:
dbCommand = self.createSQLInsert(table, columnMap)
else:
dbCommand = self.createSQLUpdate(table, columnMap, whereMap)
lastId = self.executeSQL(db, dbCommand, False)
def set_sql_command(self, db, obj, global_props, do_copy=True):
if not do_copy and not obj.is_dirty:
return None
columns = ['id', 'cache', 'name', 'namespace', 'package', 'version', 'parent_type', 'entity_id', 'entity_type', 'parent_id']
table = 'module'
whereMap = {}
whereMap.update(global_props)
if obj.db_id is not None:
keyStr = self.convertToDB(obj.db_id, 'long', 'int')
whereMap['id'] = keyStr
columnMap = {}
if hasattr(obj, 'db_id') and obj.db_id is not None:
columnMap['id'] = \
self.convertToDB(obj.db_id, 'long', 'int')
if hasattr(obj, 'db_cache') and obj.db_cache is not None:
columnMap['cache'] = \
self.convertToDB(obj.db_cache, 'int', 'int')
if hasattr(obj, 'db_name') and obj.db_name is not None:
columnMap['name'] = \
self.convertToDB(obj.db_name, 'str', 'varchar(255)')
if hasattr(obj, 'db_namespace') and obj.db_namespace is not None:
columnMap['namespace'] = \
self.convertToDB(obj.db_namespace, 'str', 'varchar(255)')
if hasattr(obj, 'db_package') and obj.db_package is not None:
columnMap['package'] = \
self.convertToDB(obj.db_package, 'str', 'varchar(511)')
if hasattr(obj, 'db_version') and obj.db_version is not None:
columnMap['version'] = \
self.convertToDB(obj.db_version, 'str', 'varchar(255)')
if hasattr(obj, 'db_parentType') and obj.db_parentType is not None:
columnMap['parent_type'] = \
self.convertToDB(obj.db_parentType, 'str', 'char(32)')
if hasattr(obj, 'db_entity_id') and obj.db_entity_id is not None:
columnMap['entity_id'] = \
self.convertToDB(obj.db_entity_id, 'long', 'int')
if hasattr(obj, 'db_entity_type') and obj.db_entity_type is not None:
columnMap['entity_type'] = \
self.convertToDB(obj.db_entity_type, 'str', 'char(16)')
if hasattr(obj, 'db_parent') and obj.db_parent is not None:
columnMap['parent_id'] = \
self.convertToDB(obj.db_parent, 'long', 'long')
columnMap.update(global_props)
if obj.is_new or do_copy:
dbCommand = self.createSQLInsert(table, columnMap)
else:
dbCommand = self.createSQLUpdate(table, columnMap, whereMap)
return dbCommand
def set_sql_process(self, obj, global_props, lastId):
pass
def to_sql_fast(self, obj, do_copy=True):
if obj.db_location is not None:
child = obj.db_location
child.db_parentType = obj.vtType
child.db_parent = obj.db_id
for child in obj.db_functions:
child.db_parentType = obj.vtType
child.db_parent = obj.db_id
for child in obj.db_annotations:
child.db_parentType = obj.vtType
child.db_parent = obj.db_id
for child in obj.db_controlParameters:
child.db_parentType = obj.vtType
child.db_parent = obj.db_id
for child in obj.db_portSpecs:
child.db_parentType = obj.vtType
child.db_parent = obj.db_id
def delete_sql_column(self, db, obj, global_props):
table = 'module'
whereMap = {}
whereMap.update(global_props)
if obj.db_id is not None:
keyStr = self.convertToDB(obj.db_id, 'long', 'int')
whereMap['id'] = keyStr
dbCommand = self.createSQLDelete(table, whereMap)
self.executeSQL(db, dbCommand, False)
class DBPortSQLDAOBase(SQLDAO):
def __init__(self, daoList):
self.daoList = daoList
self.table = 'port'
def getDao(self, dao):
return self.daoList[dao]
def get_sql_columns(self, db, global_props,lock=False):
columns = ['id', 'type', 'moduleId', 'moduleName', 'name', 'signature', 'parent_type', 'entity_id', 'entity_type', 'parent_id']
table = 'port'
whereMap = global_props
orderBy = 'id'
dbCommand = self.createSQLSelect(table, columns, whereMap, orderBy, lock)
data = self.executeSQL(db, dbCommand, True)
res = {}
for row in data:
id = self.convertFromDB(row[0], 'long', 'int')
type = self.convertFromDB(row[1], 'str', 'varchar(255)')
moduleId = self.convertFromDB(row[2], 'long', 'int')
moduleName = self.convertFromDB(row[3], 'str', 'varchar(255)')
name = self.convertFromDB(row[4], 'str', 'varchar(255)')
signature = self.convertFromDB(row[5], 'str', 'varchar(4095)')
parentType = self.convertFromDB(row[6], 'str', 'char(32)')
entity_id = self.convertFromDB(row[7], 'long', 'int')
entity_type = self.convertFromDB(row[8], 'str', 'char(16)')
parent = self.convertFromDB(row[9], 'long', 'long')
port = DBPort(type=type,
moduleId=moduleId,
moduleName=moduleName,
name=name,
signature=signature,
id=id)
port.db_parentType = parentType
port.db_entity_id = entity_id
port.db_entity_type = entity_type
port.db_parent = parent
port.is_dirty = False
res[('port', id)] = port
return res
def get_sql_select(self, db, global_props,lock=False):
columns = ['id', 'type', 'moduleId', 'moduleName', 'name', 'signature', 'parent_type', 'entity_id', 'entity_type', 'parent_id']
table = 'port'
whereMap = global_props
orderBy = 'id'
return self.createSQLSelect(table, columns, whereMap, orderBy, lock)
def process_sql_columns(self, data, global_props):
res = {}
for row in data:
id = self.convertFromDB(row[0], 'long', 'int')
type = self.convertFromDB(row[1], 'str', 'varchar(255)')
moduleId = self.convertFromDB(row[2], 'long', 'int')
moduleName = self.convertFromDB(row[3], 'str', 'varchar(255)')
name = self.convertFromDB(row[4], 'str', 'varchar(255)')
signature = self.convertFromDB(row[5], 'str', 'varchar(4095)')
parentType = self.convertFromDB(row[6], 'str', 'char(32)')
entity_id = self.convertFromDB(row[7], 'long', 'int')
entity_type = self.convertFromDB(row[8], 'str', 'char(16)')
parent = self.convertFromDB(row[9], 'long', 'long')
port = DBPort(type=type,
moduleId=moduleId,
moduleName=moduleName,
name=name,
signature=signature,
id=id)
port.db_parentType = parentType
port.db_entity_id = entity_id
port.db_entity_type = entity_type
port.db_parent = parent
port.is_dirty = False
res[('port', id)] = port
return res
def from_sql_fast(self, obj, all_objects):
if obj.db_parentType == 'connection':
p = all_objects[('connection', obj.db_parent)]
p.db_add_port(obj)
elif obj.db_parentType == 'add':
p = all_objects[('add', obj.db_parent)]
p.db_add_data(obj)
elif obj.db_parentType == 'change':
p = all_objects[('change', obj.db_parent)]
p.db_add_data(obj)
def set_sql_columns(self, db, obj, global_props, do_copy=True):
if not do_copy and not obj.is_dirty:
return
columns = ['id', 'type', 'moduleId', 'moduleName', 'name', 'signature', 'parent_type', 'entity_id', 'entity_type', 'parent_id']
table = 'port'
whereMap = {}
whereMap.update(global_props)
if obj.db_id is not None:
keyStr = self.convertToDB(obj.db_id, 'long', 'int')
whereMap['id'] = keyStr
columnMap = {}
if hasattr(obj, 'db_id') and obj.db_id is not None:
columnMap['id'] = \
self.convertToDB(obj.db_id, 'long', 'int')
if hasattr(obj, 'db_type') and obj.db_type is not None:
columnMap['type'] = \
self.convertToDB(obj.db_type, 'str', 'varchar(255)')
if hasattr(obj, 'db_moduleId') and obj.db_moduleId is not None:
columnMap['moduleId'] = \
self.convertToDB(obj.db_moduleId, 'long', 'int')
if hasattr(obj, 'db_moduleName') and obj.db_moduleName is not None:
columnMap['moduleName'] = \
self.convertToDB(obj.db_moduleName, 'str', 'varchar(255)')
if hasattr(obj, 'db_name') and obj.db_name is not None:
columnMap['name'] = \
self.convertToDB(obj.db_name, 'str', 'varchar(255)')
if hasattr(obj, 'db_signature') and obj.db_signature is not None:
columnMap['signature'] = \
self.convertToDB(obj.db_signature, 'str', 'varchar(4095)')
if hasattr(obj, 'db_parentType') and obj.db_parentType is not None:
columnMap['parent_type'] = \
self.convertToDB(obj.db_parentType, 'str', 'char(32)')
if hasattr(obj, 'db_entity_id') and obj.db_entity_id is not None:
columnMap['entity_id'] = \
self.convertToDB(obj.db_entity_id, 'long', 'int')
if hasattr(obj, 'db_entity_type') and obj.db_entity_type is not None:
columnMap['entity_type'] = \
self.convertToDB(obj.db_entity_type, 'str', 'char(16)')
if hasattr(obj, 'db_parent') and obj.db_parent is not None:
columnMap['parent_id'] = \
self.convertToDB(obj.db_parent, 'long', 'long')
columnMap.update(global_props)
if obj.is_new or do_copy:
dbCommand = self.createSQLInsert(table, columnMap)
else:
dbCommand = self.createSQLUpdate(table, columnMap, whereMap)
lastId = self.executeSQL(db, dbCommand, False)
def set_sql_command(self, db, obj, global_props, do_copy=True):
if not do_copy and not obj.is_dirty:
return None
columns = ['id', 'type', 'moduleId', 'moduleName', 'name', 'signature', 'parent_type', 'entity_id', 'entity_type', 'parent_id']
table = 'port'
whereMap = {}
whereMap.update(global_props)
if obj.db_id is not None:
keyStr = self.convertToDB(obj.db_id, 'long', 'int')
whereMap['id'] = keyStr
columnMap = {}
if hasattr(obj, 'db_id') and obj.db_id is not None:
columnMap['id'] = \
self.convertToDB(obj.db_id, 'long', 'int')
if hasattr(obj, 'db_type') and obj.db_type is not None:
columnMap['type'] = \
self.convertToDB(obj.db_type, 'str', 'varchar(255)')
if hasattr(obj, 'db_moduleId') and obj.db_moduleId is not None:
columnMap['moduleId'] = \
self.convertToDB(obj.db_moduleId, 'long', 'int')
if hasattr(obj, 'db_moduleName') and obj.db_moduleName is not None:
columnMap['moduleName'] = \
self.convertToDB(obj.db_moduleName, 'str', 'varchar(255)')
if hasattr(obj, 'db_name') and obj.db_name is not None:
columnMap['name'] = \
self.convertToDB(obj.db_name, 'str', 'varchar(255)')
if hasattr(obj, 'db_signature') and obj.db_signature is not None:
columnMap['signature'] = \
self.convertToDB(obj.db_signature, 'str', 'varchar(4095)')
if hasattr(obj, 'db_parentType') and obj.db_parentType is not None:
columnMap['parent_type'] = \
self.convertToDB(obj.db_parentType, 'str', 'char(32)')
if hasattr(obj, 'db_entity_id') and obj.db_entity_id is not None:
columnMap['entity_id'] = \
self.convertToDB(obj.db_entity_id, 'long', 'int')
if hasattr(obj, 'db_entity_type') and obj.db_entity_type is not None:
columnMap['entity_type'] = \
self.convertToDB(obj.db_entity_type, 'str', 'char(16)')
if hasattr(obj, 'db_parent') and obj.db_parent is not None:
columnMap['parent_id'] = \
self.convertToDB(obj.db_parent, 'long', 'long')
columnMap.update(global_props)
if obj.is_new or do_copy:
dbCommand = self.createSQLInsert(table, columnMap)
else:
dbCommand = self.createSQLUpdate(table, columnMap, whereMap)
return dbCommand
def set_sql_process(self, obj, global_props, lastId):
pass
def to_sql_fast(self, obj, do_copy=True):
pass
def delete_sql_column(self, db, obj, global_props):
table = 'port'
whereMap = {}
whereMap.update(global_props)
if obj.db_id is not None:
keyStr = self.convertToDB(obj.db_id, 'long', 'int')
whereMap['id'] = keyStr
dbCommand = self.createSQLDelete(table, whereMap)
self.executeSQL(db, dbCommand, False)
class DBPEFunctionSQLDAOBase(SQLDAO):
def __init__(self, daoList):
self.daoList = daoList
self.table = 'pe_function'
def getDao(self, dao):
return self.daoList[dao]
def get_sql_columns(self, db, global_props,lock=False):
columns = ['id', 'module_id', 'port_name', 'is_alias', 'parent_type', 'parent_id', 'entity_id', 'entity_type']
table = 'pe_function'
whereMap = global_props
orderBy = 'id'
dbCommand = self.createSQLSelect(table, columns, whereMap, orderBy, lock)
data = self.executeSQL(db, dbCommand, True)
res = {}
for row in data:
id = self.convertFromDB(row[0], 'long', 'int')
module_id = self.convertFromDB(row[1], 'long', 'int')
port_name = self.convertFromDB(row[2], 'str', 'varchar(255)')
is_alias = self.convertFromDB(row[3], 'long', 'int')
parentType = self.convertFromDB(row[4], 'str', 'char(32)')
parameter_exploration = self.convertFromDB(row[5], 'long', 'int')
entity_id = self.convertFromDB(row[6], 'long', 'int')
entity_type = self.convertFromDB(row[7], 'str', 'char(16)')
pe_function = DBPEFunction(module_id=module_id,
port_name=port_name,
id=id)
pe_function.db_parentType = parentType
pe_function.db_parameter_exploration = parameter_exploration
pe_function.db_entity_id = entity_id
pe_function.db_entity_type = entity_type
pe_function.is_dirty = False
res[('pe_function', id)] = pe_function
return res
def get_sql_select(self, db, global_props,lock=False):
columns = ['id', 'module_id', 'port_name', 'is_alias', 'parent_type', 'parent_id', 'entity_id', 'entity_type']
table = 'pe_function'
whereMap = global_props
orderBy = 'id'
return self.createSQLSelect(table, columns, whereMap, orderBy, lock)
def process_sql_columns(self, data, global_props):
res = {}
for row in data:
id = self.convertFromDB(row[0], 'long', 'int')
module_id = self.convertFromDB(row[1], 'long', 'int')
port_name = self.convertFromDB(row[2], 'str', 'varchar(255)')
is_alias = self.convertFromDB(row[3], 'long', 'int')
parentType = self.convertFromDB(row[4], 'str', 'char(32)')
parameter_exploration = self.convertFromDB(row[5], 'long', 'int')
entity_id = self.convertFromDB(row[6], 'long', 'int')
entity_type = self.convertFromDB(row[7], 'str', 'char(16)')
pe_function = DBPEFunction(module_id=module_id,
port_name=port_name,
id=id)
pe_function.db_parentType = parentType
pe_function.db_parameter_exploration = parameter_exploration
pe_function.db_entity_id = entity_id
pe_function.db_entity_type = entity_type
pe_function.is_dirty = False
res[('pe_function', id)] = pe_function
return res
def from_sql_fast(self, obj, all_objects):
if ('parameter_exploration', obj.db_parameter_exploration) in all_objects:
p = all_objects[('parameter_exploration', obj.db_parameter_exploration)]
p.db_add_function(obj)
def set_sql_columns(self, db, obj, global_props, do_copy=True):
if not do_copy and not obj.is_dirty:
return
columns = ['id', 'module_id', 'port_name', 'is_alias', 'parent_type', 'parent_id', 'entity_id', 'entity_type']
table = 'pe_function'
whereMap = {}
whereMap.update(global_props)
if obj.db_id is not None:
keyStr = self.convertToDB(obj.db_id, 'long', 'int')
whereMap['id'] = keyStr
columnMap = {}
if hasattr(obj, 'db_id') and obj.db_id is not None:
columnMap['id'] = \
self.convertToDB(obj.db_id, 'long', 'int')
if hasattr(obj, 'db_module_id') and obj.db_module_id is not None:
columnMap['module_id'] = \
self.convertToDB(obj.db_module_id, 'long', 'int')
if hasattr(obj, 'db_port_name') and obj.db_port_name is not None:
columnMap['port_name'] = \
self.convertToDB(obj.db_port_name, 'str', 'varchar(255)')
if hasattr(obj, 'db_is_alias') and obj.db_is_alias is not None:
columnMap['is_alias'] = \
self.convertToDB(obj.db_is_alias, 'long', 'int')
if hasattr(obj, 'db_parentType') and obj.db_parentType is not None:
columnMap['parent_type'] = \
self.convertToDB(obj.db_parentType, 'str', 'char(32)')
if hasattr(obj, 'db_parameter_exploration') and obj.db_parameter_exploration is not None:
columnMap['parent_id'] = \
self.convertToDB(obj.db_parameter_exploration, 'long', 'int')
if hasattr(obj, 'db_entity_id') and obj.db_entity_id is not None:
columnMap['entity_id'] = \
self.convertToDB(obj.db_entity_id, 'long', 'int')
if hasattr(obj, 'db_entity_type') and obj.db_entity_type is not None:
columnMap['entity_type'] = \
self.convertToDB(obj.db_entity_type, 'str', 'char(16)')
columnMap.update(global_props)
if obj.is_new or do_copy:
dbCommand = self.createSQLInsert(table, columnMap)
else:
dbCommand = self.createSQLUpdate(table, columnMap, whereMap)
lastId = self.executeSQL(db, dbCommand, False)
def set_sql_command(self, db, obj, global_props, do_copy=True):
if not do_copy and not obj.is_dirty:
return None
columns = ['id', 'module_id', 'port_name', 'is_alias', 'parent_type', 'parent_id', 'entity_id', 'entity_type']
table = 'pe_function'
whereMap = {}
whereMap.update(global_props)
if obj.db_id is not None:
keyStr = self.convertToDB(obj.db_id, 'long', 'int')
whereMap['id'] = keyStr
columnMap = {}
if hasattr(obj, 'db_id') and obj.db_id is not None:
columnMap['id'] = \
self.convertToDB(obj.db_id, 'long', 'int')
if hasattr(obj, 'db_module_id') and obj.db_module_id is not None:
columnMap['module_id'] = \
self.convertToDB(obj.db_module_id, 'long', 'int')
if hasattr(obj, 'db_port_name') and obj.db_port_name is not None:
columnMap['port_name'] = \
self.convertToDB(obj.db_port_name, 'str', 'varchar(255)')
if hasattr(obj, 'db_is_alias') and obj.db_is_alias is not None:
columnMap['is_alias'] = \
self.convertToDB(obj.db_is_alias, 'long', 'int')
if hasattr(obj, 'db_parentType') and obj.db_parentType is not None:
columnMap['parent_type'] = \
self.convertToDB(obj.db_parentType, 'str', 'char(32)')
if hasattr(obj, 'db_parameter_exploration') and obj.db_parameter_exploration is not None:
columnMap['parent_id'] = \
self.convertToDB(obj.db_parameter_exploration, 'long', 'int')
if hasattr(obj, 'db_entity_id') and obj.db_entity_id is not None:
columnMap['entity_id'] = \
self.convertToDB(obj.db_entity_id, 'long', 'int')
if hasattr(obj, 'db_entity_type') and obj.db_entity_type is not None:
columnMap['entity_type'] = \
self.convertToDB(obj.db_entity_type, 'str', 'char(16)')
columnMap.update(global_props)
if obj.is_new or do_copy:
dbCommand = self.createSQLInsert(table, columnMap)
else:
dbCommand = self.createSQLUpdate(table, columnMap, whereMap)
return dbCommand
def set_sql_process(self, obj, global_props, lastId):
pass
def to_sql_fast(self, obj, do_copy=True):
for child in obj.db_parameters:
child.db_pe_function = obj.db_id
def delete_sql_column(self, db, obj, global_props):
table = 'pe_function'
whereMap = {}
whereMap.update(global_props)
if obj.db_id is not None:
keyStr = self.convertToDB(obj.db_id, 'long', 'int')
whereMap['id'] = keyStr
dbCommand = self.createSQLDelete(table, whereMap)
self.executeSQL(db, dbCommand, False)
class DBWorkflowSQLDAOBase(SQLDAO):
def __init__(self, daoList):
self.daoList = daoList
self.table = 'workflow'
def getDao(self, dao):
return self.daoList[dao]
def get_sql_columns(self, db, global_props,lock=False):
columns = ['id', 'entity_id', 'entity_type', 'name', 'version', 'last_modified', 'vistrail_id', 'parent_id']
table = 'workflow'
whereMap = global_props
orderBy = 'id'
dbCommand = self.createSQLSelect(table, columns, whereMap, orderBy, lock)
data = self.executeSQL(db, dbCommand, True)
res = {}
for row in data:
id = self.convertFromDB(row[0], 'long', 'int')
global_props['entity_id'] = self.convertToDB(id, 'long', 'int')
entity_id = self.convertFromDB(row[1], 'long', 'int')
entity_type = self.convertFromDB(row[2], 'str', 'char(16)')
global_props['entity_type'] = self.convertToDB(entity_type, 'str', 'char(16)')
name = self.convertFromDB(row[3], 'str', 'varchar(255)')
version = self.convertFromDB(row[4], 'str', 'char(16)')
last_modified = self.convertFromDB(row[5], 'datetime', 'datetime')
vistrail_id = self.convertFromDB(row[6], 'long', 'int')
group = self.convertFromDB(row[7], 'long', 'int')
workflow = DBWorkflow(entity_type=entity_type,
name=name,
version=version,
last_modified=last_modified,
vistrail_id=vistrail_id,
id=id)
workflow.db_entity_id = entity_id
workflow.db_group = group
workflow.is_dirty = False
res[('workflow', id)] = workflow
return res
def get_sql_select(self, db, global_props,lock=False):
columns = ['id', 'entity_id', 'entity_type', 'name', 'version', 'last_modified', 'vistrail_id', 'parent_id']
table = 'workflow'
whereMap = global_props
orderBy = 'id'
return self.createSQLSelect(table, columns, whereMap, orderBy, lock)
def process_sql_columns(self, data, global_props):
res = {}
for row in data:
id = self.convertFromDB(row[0], 'long', 'int')
global_props['entity_id'] = self.convertToDB(id, 'long', 'int')
entity_id = self.convertFromDB(row[1], 'long', 'int')
entity_type = self.convertFromDB(row[2], 'str', 'char(16)')
global_props['entity_type'] = self.convertToDB(entity_type, 'str', 'char(16)')
name = self.convertFromDB(row[3], 'str', 'varchar(255)')
version = self.convertFromDB(row[4], 'str', 'char(16)')
last_modified = self.convertFromDB(row[5], 'datetime', 'datetime')
vistrail_id = self.convertFromDB(row[6], 'long', 'int')
group = self.convertFromDB(row[7], 'long', 'int')
workflow = DBWorkflow(entity_type=entity_type,
name=name,
version=version,
last_modified=last_modified,
vistrail_id=vistrail_id,
id=id)
workflow.db_entity_id = entity_id
workflow.db_group = group
workflow.is_dirty = False
res[('workflow', id)] = workflow
return res
def from_sql_fast(self, obj, all_objects):
if ('group', obj.db_group) in all_objects:
p = all_objects[('group', obj.db_group)]
p.db_add_workflow(obj)
def set_sql_columns(self, db, obj, global_props, do_copy=True):
if not do_copy and not obj.is_dirty:
return
columns = ['id', 'entity_id', 'entity_type', 'name', 'version', 'last_modified', 'vistrail_id', 'parent_id']
table = 'workflow'
whereMap = {}
whereMap.update(global_props)
if obj.db_id is not None:
keyStr = self.convertToDB(obj.db_id, 'long', 'int')
whereMap['id'] = keyStr
columnMap = {}
if hasattr(obj, 'db_id') and obj.db_id is not None:
columnMap['id'] = \
self.convertToDB(obj.db_id, 'long', 'int')
if hasattr(obj, 'db_entity_id') and obj.db_entity_id is not None:
columnMap['entity_id'] = \
self.convertToDB(obj.db_entity_id, 'long', 'int')
if hasattr(obj, 'db_entity_type') and obj.db_entity_type is not None:
columnMap['entity_type'] = \
self.convertToDB(obj.db_entity_type, 'str', 'char(16)')
if hasattr(obj, 'db_name') and obj.db_name is not None:
columnMap['name'] = \
self.convertToDB(obj.db_name, 'str', 'varchar(255)')
if hasattr(obj, 'db_version') and obj.db_version is not None:
columnMap['version'] = \
self.convertToDB(obj.db_version, 'str', 'char(16)')
if hasattr(obj, 'db_last_modified') and obj.db_last_modified is not None:
columnMap['last_modified'] = \
self.convertToDB(obj.db_last_modified, 'datetime', 'datetime')
if hasattr(obj, 'db_vistrail_id') and obj.db_vistrail_id is not None:
columnMap['vistrail_id'] = \
self.convertToDB(obj.db_vistrail_id, 'long', 'int')
if hasattr(obj, 'db_group') and obj.db_group is not None:
columnMap['parent_id'] = \
self.convertToDB(obj.db_group, 'long', 'int')
columnMap.update(global_props)
if obj.is_new or do_copy:
dbCommand = self.createSQLInsert(table, columnMap)
else:
dbCommand = self.createSQLUpdate(table, columnMap, whereMap)
lastId = self.executeSQL(db, dbCommand, False)
if obj.db_id is None:
obj.db_id = lastId
keyStr = self.convertToDB(obj.db_id, 'long', 'int')
if hasattr(obj, 'db_entity_type') and obj.db_entity_type is not None:
global_props['entity_type'] = self.convertToDB(obj.db_entity_type, 'str', 'char(16)')
if hasattr(obj, 'db_id') and obj.db_id is not None:
global_props['entity_id'] = self.convertToDB(obj.db_id, 'long', 'int')
def set_sql_command(self, db, obj, global_props, do_copy=True):
if not do_copy and not obj.is_dirty:
return None
columns = ['id', 'entity_id', 'entity_type', 'name', 'version', 'last_modified', 'vistrail_id', 'parent_id']
table = 'workflow'
whereMap = {}
whereMap.update(global_props)
if obj.db_id is not None:
keyStr = self.convertToDB(obj.db_id, 'long', 'int')
whereMap['id'] = keyStr
columnMap = {}
if hasattr(obj, 'db_id') and obj.db_id is not None:
columnMap['id'] = \
self.convertToDB(obj.db_id, 'long', 'int')
if hasattr(obj, 'db_entity_id') and obj.db_entity_id is not None:
columnMap['entity_id'] = \
self.convertToDB(obj.db_entity_id, 'long', 'int')
if hasattr(obj, 'db_entity_type') and obj.db_entity_type is not None:
columnMap['entity_type'] = \
self.convertToDB(obj.db_entity_type, 'str', 'char(16)')
if hasattr(obj, 'db_name') and obj.db_name is not None:
columnMap['name'] = \
self.convertToDB(obj.db_name, 'str', 'varchar(255)')
if hasattr(obj, 'db_version') and obj.db_version is not None:
columnMap['version'] = \
self.convertToDB(obj.db_version, 'str', 'char(16)')
if hasattr(obj, 'db_last_modified') and obj.db_last_modified is not None:
columnMap['last_modified'] = \
self.convertToDB(obj.db_last_modified, 'datetime', 'datetime')
if hasattr(obj, 'db_vistrail_id') and obj.db_vistrail_id is not None:
columnMap['vistrail_id'] = \
self.convertToDB(obj.db_vistrail_id, 'long', 'int')
if hasattr(obj, 'db_group') and obj.db_group is not None:
columnMap['parent_id'] = \
self.convertToDB(obj.db_group, 'long', 'int')
columnMap.update(global_props)
if obj.is_new or do_copy:
dbCommand = self.createSQLInsert(table, columnMap)
else:
dbCommand = self.createSQLUpdate(table, columnMap, whereMap)
return dbCommand
def set_sql_process(self, obj, global_props, lastId):
if obj.db_id is None:
obj.db_id = lastId
keyStr = self.convertToDB(obj.db_id, 'long', 'int')
if hasattr(obj, 'db_entity_type') and obj.db_entity_type is not None:
global_props['entity_type'] = self.convertToDB(obj.db_entity_type, 'str', 'char(16)')
if hasattr(obj, 'db_id') and obj.db_id is not None:
global_props['entity_id'] = self.convertToDB(obj.db_id, 'long', 'int')
pass
def to_sql_fast(self, obj, do_copy=True):
for child in obj.db_connections:
child.db_parentType = obj.vtType
child.db_parent = obj.db_id
for child in obj.db_annotations:
child.db_parentType = obj.vtType
child.db_parent = obj.db_id
for child in obj.db_plugin_datas:
child.db_parentType = obj.vtType
child.db_parent = obj.db_id
for child in obj.db_others:
child.db_parentType = obj.vtType
child.db_parent = obj.db_id
for child in obj.db_modules:
child.db_parentType = obj.vtType
child.db_parent = obj.db_id
def delete_sql_column(self, db, obj, global_props):
table = 'workflow'
whereMap = {}
whereMap.update(global_props)
if obj.db_id is not None:
keyStr = self.convertToDB(obj.db_id, 'long', 'int')
whereMap['id'] = keyStr
dbCommand = self.createSQLDelete(table, whereMap)
self.executeSQL(db, dbCommand, False)
class DBMashupActionSQLDAOBase(SQLDAO):
def __init__(self, daoList):
self.daoList = daoList
self.table = 'mashup_action'
def getDao(self, dao):
return self.daoList[dao]
def get_sql_columns(self, db, global_props,lock=False):
columns = ['id', 'prev_id', 'date', 'user', 'parent_id', 'entity_id', 'entity_type']
table = 'mashup_action'
whereMap = global_props
orderBy = 'id'
dbCommand = self.createSQLSelect(table, columns, whereMap, orderBy, lock)
data = self.executeSQL(db, dbCommand, True)
res = {}
for row in data:
id = self.convertFromDB(row[0], 'long', 'int')
prevId = self.convertFromDB(row[1], 'long', 'int')
date = self.convertFromDB(row[2], 'datetime', 'datetime')
user = self.convertFromDB(row[3], 'str', 'varchar(255)')
mashuptrail = self.convertFromDB(row[4], 'long', 'int')
entity_id = self.convertFromDB(row[5], 'long', 'int')
entity_type = self.convertFromDB(row[6], 'str', 'char(16)')
mashup_action = DBMashupAction(prevId=prevId,
date=date,
user=user,
id=id)
mashup_action.db_mashuptrail = mashuptrail
mashup_action.db_entity_id = entity_id
mashup_action.db_entity_type = entity_type
mashup_action.is_dirty = False
res[('mashup_action', id)] = mashup_action
return res
def get_sql_select(self, db, global_props,lock=False):
columns = ['id', 'prev_id', 'date', 'user', 'parent_id', 'entity_id', 'entity_type']
table = 'mashup_action'
whereMap = global_props
orderBy = 'id'
return self.createSQLSelect(table, columns, whereMap, orderBy, lock)
def process_sql_columns(self, data, global_props):
res = {}
for row in data:
id = self.convertFromDB(row[0], 'long', 'int')
prevId = self.convertFromDB(row[1], 'long', 'int')
date = self.convertFromDB(row[2], 'datetime', 'datetime')
user = self.convertFromDB(row[3], 'str', 'varchar(255)')
mashuptrail = self.convertFromDB(row[4], 'long', 'int')
entity_id = self.convertFromDB(row[5], 'long', 'int')
entity_type = self.convertFromDB(row[6], 'str', 'char(16)')
mashup_action = DBMashupAction(prevId=prevId,
date=date,
user=user,
id=id)
mashup_action.db_mashuptrail = mashuptrail
mashup_action.db_entity_id = entity_id
mashup_action.db_entity_type = entity_type
mashup_action.is_dirty = False
res[('mashup_action', id)] = mashup_action
return res
def from_sql_fast(self, obj, all_objects):
if ('mashuptrail', obj.db_mashuptrail) in all_objects:
p = all_objects[('mashuptrail', obj.db_mashuptrail)]
p.db_add_action(obj)
def set_sql_columns(self, db, obj, global_props, do_copy=True):
if not do_copy and not obj.is_dirty:
return
columns = ['id', 'prev_id', 'date', 'user', 'parent_id', 'entity_id', 'entity_type']
table = 'mashup_action'
whereMap = {}
whereMap.update(global_props)
if obj.db_id is not None:
keyStr = self.convertToDB(obj.db_id, 'long', 'int')
whereMap['id'] = keyStr
columnMap = {}
if hasattr(obj, 'db_id') and obj.db_id is not None:
columnMap['id'] = \
self.convertToDB(obj.db_id, 'long', 'int')
if hasattr(obj, 'db_prevId') and obj.db_prevId is not None:
columnMap['prev_id'] = \
self.convertToDB(obj.db_prevId, 'long', 'int')
if hasattr(obj, 'db_date') and obj.db_date is not None:
columnMap['date'] = \
self.convertToDB(obj.db_date, 'datetime', 'datetime')
if hasattr(obj, 'db_user') and obj.db_user is not None:
columnMap['user'] = \
self.convertToDB(obj.db_user, 'str', 'varchar(255)')
if hasattr(obj, 'db_mashuptrail') and obj.db_mashuptrail is not None:
columnMap['parent_id'] = \
self.convertToDB(obj.db_mashuptrail, 'long', 'int')
if hasattr(obj, 'db_entity_id') and obj.db_entity_id is not None:
columnMap['entity_id'] = \
self.convertToDB(obj.db_entity_id, 'long', 'int')
if hasattr(obj, 'db_entity_type') and obj.db_entity_type is not None:
columnMap['entity_type'] = \
self.convertToDB(obj.db_entity_type, 'str', 'char(16)')
columnMap.update(global_props)
if obj.is_new or do_copy:
dbCommand = self.createSQLInsert(table, columnMap)
else:
dbCommand = self.createSQLUpdate(table, columnMap, whereMap)
lastId = self.executeSQL(db, dbCommand, False)
def set_sql_command(self, db, obj, global_props, do_copy=True):
if not do_copy and not obj.is_dirty:
return None
columns = ['id', 'prev_id', 'date', 'user', 'parent_id', 'entity_id', 'entity_type']
table = 'mashup_action'
whereMap = {}
whereMap.update(global_props)
if obj.db_id is not None:
keyStr = self.convertToDB(obj.db_id, 'long', 'int')
whereMap['id'] = keyStr
columnMap = {}
if hasattr(obj, 'db_id') and obj.db_id is not None:
columnMap['id'] = \
self.convertToDB(obj.db_id, 'long', 'int')
if hasattr(obj, 'db_prevId') and obj.db_prevId is not None:
columnMap['prev_id'] = \
self.convertToDB(obj.db_prevId, 'long', 'int')
if hasattr(obj, 'db_date') and obj.db_date is not None:
columnMap['date'] = \
self.convertToDB(obj.db_date, 'datetime', 'datetime')
if hasattr(obj, 'db_user') and obj.db_user is not None:
columnMap['user'] = \
self.convertToDB(obj.db_user, 'str', 'varchar(255)')
if hasattr(obj, 'db_mashuptrail') and obj.db_mashuptrail is not None:
columnMap['parent_id'] = \
self.convertToDB(obj.db_mashuptrail, 'long', 'int')
if hasattr(obj, 'db_entity_id') and obj.db_entity_id is not None:
columnMap['entity_id'] = \
self.convertToDB(obj.db_entity_id, 'long', 'int')
if hasattr(obj, 'db_entity_type') and obj.db_entity_type is not None:
columnMap['entity_type'] = \
self.convertToDB(obj.db_entity_type, 'str', 'char(16)')
columnMap.update(global_props)
if obj.is_new or do_copy:
dbCommand = self.createSQLInsert(table, columnMap)
else:
dbCommand = self.createSQLUpdate(table, columnMap, whereMap)
return dbCommand
def set_sql_process(self, obj, global_props, lastId):
pass
def to_sql_fast(self, obj, do_copy=True):
if obj.db_mashup is not None:
child = obj.db_mashup
child.db_parent = obj.db_id
def delete_sql_column(self, db, obj, global_props):
table = 'mashup_action'
whereMap = {}
whereMap.update(global_props)
if obj.db_id is not None:
keyStr = self.convertToDB(obj.db_id, 'long', 'int')
whereMap['id'] = keyStr
dbCommand = self.createSQLDelete(table, whereMap)
self.executeSQL(db, dbCommand, False)
class DBChangeSQLDAOBase(SQLDAO):
def __init__(self, daoList):
self.daoList = daoList
self.table = 'change_tbl'
def getDao(self, dao):
return self.daoList[dao]
def get_sql_columns(self, db, global_props,lock=False):
columns = ['id', 'what', 'old_obj_id', 'new_obj_id', 'par_obj_id', 'par_obj_type', 'action_id', 'entity_id', 'entity_type']
table = 'change_tbl'
whereMap = global_props
orderBy = 'id'
dbCommand = self.createSQLSelect(table, columns, whereMap, orderBy, lock)
data = self.executeSQL(db, dbCommand, True)
res = {}
for row in data:
id = self.convertFromDB(row[0], 'long', 'int')
what = self.convertFromDB(row[1], 'str', 'varchar(255)')
oldObjId = self.convertFromDB(row[2], 'long', 'int')
newObjId = self.convertFromDB(row[3], 'long', 'int')
parentObjId = self.convertFromDB(row[4], 'long', 'int')
parentObjType = self.convertFromDB(row[5], 'str', 'char(16)')
action = self.convertFromDB(row[6], 'long', 'int')
entity_id = self.convertFromDB(row[7], 'long', 'int')
entity_type = self.convertFromDB(row[8], 'str', 'char(16)')
change = DBChange(what=what,
oldObjId=oldObjId,
newObjId=newObjId,
parentObjId=parentObjId,
parentObjType=parentObjType,
id=id)
change.db_action = action
change.db_entity_id = entity_id
change.db_entity_type = entity_type
change.is_dirty = False
res[('change', id)] = change
return res
def get_sql_select(self, db, global_props,lock=False):
columns = ['id', 'what', 'old_obj_id', 'new_obj_id', 'par_obj_id', 'par_obj_type', 'action_id', 'entity_id', 'entity_type']
table = 'change_tbl'
whereMap = global_props
orderBy = 'id'
return self.createSQLSelect(table, columns, whereMap, orderBy, lock)
def process_sql_columns(self, data, global_props):
res = {}
for row in data:
id = self.convertFromDB(row[0], 'long', 'int')
what = self.convertFromDB(row[1], 'str', 'varchar(255)')
oldObjId = self.convertFromDB(row[2], 'long', 'int')
newObjId = self.convertFromDB(row[3], 'long', 'int')
parentObjId = self.convertFromDB(row[4], 'long', 'int')
parentObjType = self.convertFromDB(row[5], 'str', 'char(16)')
action = self.convertFromDB(row[6], 'long', 'int')
entity_id = self.convertFromDB(row[7], 'long', 'int')
entity_type = self.convertFromDB(row[8], 'str', 'char(16)')
change = DBChange(what=what,
oldObjId=oldObjId,
newObjId=newObjId,
parentObjId=parentObjId,
parentObjType=parentObjType,
id=id)
change.db_action = action
change.db_entity_id = entity_id
change.db_entity_type = entity_type
change.is_dirty = False
res[('change', id)] = change
return res
def from_sql_fast(self, obj, all_objects):
if ('action', obj.db_action) in all_objects:
p = all_objects[('action', obj.db_action)]
p.db_add_operation(obj)
def set_sql_columns(self, db, obj, global_props, do_copy=True):
if not do_copy and not obj.is_dirty:
return
columns = ['id', 'what', 'old_obj_id', 'new_obj_id', 'par_obj_id', 'par_obj_type', 'action_id', 'entity_id', 'entity_type']
table = 'change_tbl'
whereMap = {}
whereMap.update(global_props)
if obj.db_id is not None:
keyStr = self.convertToDB(obj.db_id, 'long', 'int')
whereMap['id'] = keyStr
columnMap = {}
if hasattr(obj, 'db_id') and obj.db_id is not None:
columnMap['id'] = \
self.convertToDB(obj.db_id, 'long', 'int')
if hasattr(obj, 'db_what') and obj.db_what is not None:
columnMap['what'] = \
self.convertToDB(obj.db_what, 'str', 'varchar(255)')
if hasattr(obj, 'db_oldObjId') and obj.db_oldObjId is not None:
columnMap['old_obj_id'] = \
self.convertToDB(obj.db_oldObjId, 'long', 'int')
if hasattr(obj, 'db_newObjId') and obj.db_newObjId is not None:
columnMap['new_obj_id'] = \
self.convertToDB(obj.db_newObjId, 'long', 'int')
if hasattr(obj, 'db_parentObjId') and obj.db_parentObjId is not None:
columnMap['par_obj_id'] = \
self.convertToDB(obj.db_parentObjId, 'long', 'int')
if hasattr(obj, 'db_parentObjType') and obj.db_parentObjType is not None:
columnMap['par_obj_type'] = \
self.convertToDB(obj.db_parentObjType, 'str', 'char(16)')
if hasattr(obj, 'db_action') and obj.db_action is not None:
columnMap['action_id'] = \
self.convertToDB(obj.db_action, 'long', 'int')
if hasattr(obj, 'db_entity_id') and obj.db_entity_id is not None:
columnMap['entity_id'] = \
self.convertToDB(obj.db_entity_id, 'long', 'int')
if hasattr(obj, 'db_entity_type') and obj.db_entity_type is not None:
columnMap['entity_type'] = \
self.convertToDB(obj.db_entity_type, 'str', 'char(16)')
columnMap.update(global_props)
if obj.is_new or do_copy:
dbCommand = self.createSQLInsert(table, columnMap)
else:
dbCommand = self.createSQLUpdate(table, columnMap, whereMap)
lastId = self.executeSQL(db, dbCommand, False)
def set_sql_command(self, db, obj, global_props, do_copy=True):
if not do_copy and not obj.is_dirty:
return None
columns = ['id', 'what', 'old_obj_id', 'new_obj_id', 'par_obj_id', 'par_obj_type', 'action_id', 'entity_id', 'entity_type']
table = 'change_tbl'
whereMap = {}
whereMap.update(global_props)
if obj.db_id is not None:
keyStr = self.convertToDB(obj.db_id, 'long', 'int')
whereMap['id'] = keyStr
columnMap = {}
if hasattr(obj, 'db_id') and obj.db_id is not None:
columnMap['id'] = \
self.convertToDB(obj.db_id, 'long', 'int')
if hasattr(obj, 'db_what') and obj.db_what is not None:
columnMap['what'] = \
self.convertToDB(obj.db_what, 'str', 'varchar(255)')
if hasattr(obj, 'db_oldObjId') and obj.db_oldObjId is not None:
columnMap['old_obj_id'] = \
self.convertToDB(obj.db_oldObjId, 'long', 'int')
if hasattr(obj, 'db_newObjId') and obj.db_newObjId is not None:
columnMap['new_obj_id'] = \
self.convertToDB(obj.db_newObjId, 'long', 'int')
if hasattr(obj, 'db_parentObjId') and obj.db_parentObjId is not None:
columnMap['par_obj_id'] = \
self.convertToDB(obj.db_parentObjId, 'long', 'int')
if hasattr(obj, 'db_parentObjType') and obj.db_parentObjType is not None:
columnMap['par_obj_type'] = \
self.convertToDB(obj.db_parentObjType, 'str', 'char(16)')
if hasattr(obj, 'db_action') and obj.db_action is not None:
columnMap['action_id'] = \
self.convertToDB(obj.db_action, 'long', 'int')
if hasattr(obj, 'db_entity_id') and obj.db_entity_id is not None:
columnMap['entity_id'] = \
self.convertToDB(obj.db_entity_id, 'long', 'int')
if hasattr(obj, 'db_entity_type') and obj.db_entity_type is not None:
columnMap['entity_type'] = \
self.convertToDB(obj.db_entity_type, 'str', 'char(16)')
columnMap.update(global_props)
if obj.is_new or do_copy:
dbCommand = self.createSQLInsert(table, columnMap)
else:
dbCommand = self.createSQLUpdate(table, columnMap, whereMap)
return dbCommand
def set_sql_process(self, obj, global_props, lastId):
pass
def to_sql_fast(self, obj, do_copy=True):
if obj.db_data is not None:
child = obj.db_data
child.db_parentType = obj.vtType
child.db_parent = obj.db_id
def delete_sql_column(self, db, obj, global_props):
table = 'change_tbl'
whereMap = {}
whereMap.update(global_props)
if obj.db_id is not None:
keyStr = self.convertToDB(obj.db_id, 'long', 'int')
whereMap['id'] = keyStr
dbCommand = self.createSQLDelete(table, whereMap)
self.executeSQL(db, dbCommand, False)
class DBPackageSQLDAOBase(SQLDAO):
def __init__(self, daoList):
self.daoList = daoList
self.table = 'package'
def getDao(self, dao):
return self.daoList[dao]
def get_sql_columns(self, db, global_props,lock=False):
columns = ['id', 'name', 'identifier', 'codepath', 'load_configuration', 'version', 'description', 'parent_id', 'entity_id', 'entity_type']
table = 'package'
whereMap = global_props
orderBy = 'id'
dbCommand = self.createSQLSelect(table, columns, whereMap, orderBy, lock)
data = self.executeSQL(db, dbCommand, True)
res = {}
for row in data:
id = self.convertFromDB(row[0], 'long', 'int')
name = self.convertFromDB(row[1], 'str', 'varchar(255)')
identifier = self.convertFromDB(row[2], 'str', 'varchar(1023)')
codepath = self.convertFromDB(row[3], 'str', 'varchar(1023)')
load_configuration = self.convertFromDB(row[4], 'int', 'int')
version = self.convertFromDB(row[5], 'str', 'varchar(255)')
description = self.convertFromDB(row[6], 'str', 'varchar(1023)')
registry = self.convertFromDB(row[7], 'long', 'int')
entity_id = self.convertFromDB(row[8], 'long', 'int')
entity_type = self.convertFromDB(row[9], 'str', 'char(16)')
package = DBPackage(name=name,
identifier=identifier,
codepath=codepath,
load_configuration=load_configuration,
version=version,
description=description,
id=id)
package.db_registry = registry
package.db_entity_id = entity_id
package.db_entity_type = entity_type
package.is_dirty = False
res[('package', id)] = package
return res
def get_sql_select(self, db, global_props,lock=False):
columns = ['id', 'name', 'identifier', 'codepath', 'load_configuration', 'version', 'description', 'parent_id', 'entity_id', 'entity_type']
table = 'package'
whereMap = global_props
orderBy = 'id'
return self.createSQLSelect(table, columns, whereMap, orderBy, lock)
def process_sql_columns(self, data, global_props):
res = {}
for row in data:
id = self.convertFromDB(row[0], 'long', 'int')
name = self.convertFromDB(row[1], 'str', 'varchar(255)')
identifier = self.convertFromDB(row[2], 'str', 'varchar(1023)')
codepath = self.convertFromDB(row[3], 'str', 'varchar(1023)')
load_configuration = self.convertFromDB(row[4], 'int', 'int')
version = self.convertFromDB(row[5], 'str', 'varchar(255)')
description = self.convertFromDB(row[6], 'str', 'varchar(1023)')
registry = self.convertFromDB(row[7], 'long', 'int')
entity_id = self.convertFromDB(row[8], 'long', 'int')
entity_type = self.convertFromDB(row[9], 'str', 'char(16)')
package = DBPackage(name=name,
identifier=identifier,
codepath=codepath,
load_configuration=load_configuration,
version=version,
description=description,
id=id)
package.db_registry = registry
package.db_entity_id = entity_id
package.db_entity_type = entity_type
package.is_dirty = False
res[('package', id)] = package
return res
def from_sql_fast(self, obj, all_objects):
if ('registry', obj.db_registry) in all_objects:
p = all_objects[('registry', obj.db_registry)]
p.db_add_package(obj)
def set_sql_columns(self, db, obj, global_props, do_copy=True):
if not do_copy and not obj.is_dirty:
return
columns = ['id', 'name', 'identifier', 'codepath', 'load_configuration', 'version', 'description', 'parent_id', 'entity_id', 'entity_type']
table = 'package'
whereMap = {}
whereMap.update(global_props)
if obj.db_id is not None:
keyStr = self.convertToDB(obj.db_id, 'long', 'int')
whereMap['id'] = keyStr
columnMap = {}
if hasattr(obj, 'db_id') and obj.db_id is not None:
columnMap['id'] = \
self.convertToDB(obj.db_id, 'long', 'int')
if hasattr(obj, 'db_name') and obj.db_name is not None:
columnMap['name'] = \
self.convertToDB(obj.db_name, 'str', 'varchar(255)')
if hasattr(obj, 'db_identifier') and obj.db_identifier is not None:
columnMap['identifier'] = \
self.convertToDB(obj.db_identifier, 'str', 'varchar(1023)')
if hasattr(obj, 'db_codepath') and obj.db_codepath is not None:
columnMap['codepath'] = \
self.convertToDB(obj.db_codepath, 'str', 'varchar(1023)')
if hasattr(obj, 'db_load_configuration') and obj.db_load_configuration is not None:
columnMap['load_configuration'] = \
self.convertToDB(obj.db_load_configuration, 'int', 'int')
if hasattr(obj, 'db_version') and obj.db_version is not None:
columnMap['version'] = \
self.convertToDB(obj.db_version, 'str', 'varchar(255)')
if hasattr(obj, 'db_description') and obj.db_description is not None:
columnMap['description'] = \
self.convertToDB(obj.db_description, 'str', 'varchar(1023)')
if hasattr(obj, 'db_registry') and obj.db_registry is not None:
columnMap['parent_id'] = \
self.convertToDB(obj.db_registry, 'long', 'int')
if hasattr(obj, 'db_entity_id') and obj.db_entity_id is not None:
columnMap['entity_id'] = \
self.convertToDB(obj.db_entity_id, 'long', 'int')
if hasattr(obj, 'db_entity_type') and obj.db_entity_type is not None:
columnMap['entity_type'] = \
self.convertToDB(obj.db_entity_type, 'str', 'char(16)')
columnMap.update(global_props)
if obj.is_new or do_copy:
dbCommand = self.createSQLInsert(table, columnMap)
else:
dbCommand = self.createSQLUpdate(table, columnMap, whereMap)
lastId = self.executeSQL(db, dbCommand, False)
if obj.db_id is None:
obj.db_id = lastId
keyStr = self.convertToDB(obj.db_id, 'long', 'int')
def set_sql_command(self, db, obj, global_props, do_copy=True):
if not do_copy and not obj.is_dirty:
return None
columns = ['id', 'name', 'identifier', 'codepath', 'load_configuration', 'version', 'description', 'parent_id', 'entity_id', 'entity_type']
table = 'package'
whereMap = {}
whereMap.update(global_props)
if obj.db_id is not None:
keyStr = self.convertToDB(obj.db_id, 'long', 'int')
whereMap['id'] = keyStr
columnMap = {}
if hasattr(obj, 'db_id') and obj.db_id is not None:
columnMap['id'] = \
self.convertToDB(obj.db_id, 'long', 'int')
if hasattr(obj, 'db_name') and obj.db_name is not None:
columnMap['name'] = \
self.convertToDB(obj.db_name, 'str', 'varchar(255)')
if hasattr(obj, 'db_identifier') and obj.db_identifier is not None:
columnMap['identifier'] = \
self.convertToDB(obj.db_identifier, 'str', 'varchar(1023)')
if hasattr(obj, 'db_codepath') and obj.db_codepath is not None:
columnMap['codepath'] = \
self.convertToDB(obj.db_codepath, 'str', 'varchar(1023)')
if hasattr(obj, 'db_load_configuration') and obj.db_load_configuration is not None:
columnMap['load_configuration'] = \
self.convertToDB(obj.db_load_configuration, 'int', 'int')
if hasattr(obj, 'db_version') and obj.db_version is not None:
columnMap['version'] = \
self.convertToDB(obj.db_version, 'str', 'varchar(255)')
if hasattr(obj, 'db_description') and obj.db_description is not None:
columnMap['description'] = \
self.convertToDB(obj.db_description, 'str', 'varchar(1023)')
if hasattr(obj, 'db_registry') and obj.db_registry is not None:
columnMap['parent_id'] = \
self.convertToDB(obj.db_registry, 'long', 'int')
if hasattr(obj, 'db_entity_id') and obj.db_entity_id is not None:
columnMap['entity_id'] = \
self.convertToDB(obj.db_entity_id, 'long', 'int')
if hasattr(obj, 'db_entity_type') and obj.db_entity_type is not None:
columnMap['entity_type'] = \
self.convertToDB(obj.db_entity_type, 'str', 'char(16)')
columnMap.update(global_props)
if obj.is_new or do_copy:
dbCommand = self.createSQLInsert(table, columnMap)
else:
dbCommand = self.createSQLUpdate(table, columnMap, whereMap)
return dbCommand
def set_sql_process(self, obj, global_props, lastId):
if obj.db_id is None:
obj.db_id = lastId
keyStr = self.convertToDB(obj.db_id, 'long', 'int')
pass
def to_sql_fast(self, obj, do_copy=True):
for child in obj.db_module_descriptors:
child.db_package = obj.db_id
def delete_sql_column(self, db, obj, global_props):
table = 'package'
whereMap = {}
whereMap.update(global_props)
if obj.db_id is not None:
keyStr = self.convertToDB(obj.db_id, 'long', 'int')
whereMap['id'] = keyStr
dbCommand = self.createSQLDelete(table, whereMap)
self.executeSQL(db, dbCommand, False)
class DBLoopExecSQLDAOBase(SQLDAO):
def __init__(self, daoList):
self.daoList = daoList
self.table = 'loop_exec'
def getDao(self, dao):
return self.daoList[dao]
def get_sql_columns(self, db, global_props,lock=False):
columns = ['id', 'ts_start', 'ts_end', 'parent_type', 'entity_id', 'entity_type', 'parent_id']
table = 'loop_exec'
whereMap = global_props
orderBy = 'id'
dbCommand = self.createSQLSelect(table, columns, whereMap, orderBy, lock)
data = self.executeSQL(db, dbCommand, True)
res = {}
for row in data:
id = self.convertFromDB(row[0], 'long', 'int')
ts_start = self.convertFromDB(row[1], 'datetime', 'datetime')
ts_end = self.convertFromDB(row[2], 'datetime', 'datetime')
parentType = self.convertFromDB(row[3], 'str', 'char(32)')
entity_id = self.convertFromDB(row[4], 'long', 'int')
entity_type = self.convertFromDB(row[5], 'str', 'char(16)')
parent = self.convertFromDB(row[6], 'long', 'long')
loop_exec = DBLoopExec(ts_start=ts_start,
ts_end=ts_end,
id=id)
loop_exec.db_parentType = parentType
loop_exec.db_entity_id = entity_id
loop_exec.db_entity_type = entity_type
loop_exec.db_parent = parent
loop_exec.is_dirty = False
res[('loop_exec', id)] = loop_exec
return res
def get_sql_select(self, db, global_props,lock=False):
columns = ['id', 'ts_start', 'ts_end', 'parent_type', 'entity_id', 'entity_type', 'parent_id']
table = 'loop_exec'
whereMap = global_props
orderBy = 'id'
return self.createSQLSelect(table, columns, whereMap, orderBy, lock)
def process_sql_columns(self, data, global_props):
res = {}
for row in data:
id = self.convertFromDB(row[0], 'long', 'int')
ts_start = self.convertFromDB(row[1], 'datetime', 'datetime')
ts_end = self.convertFromDB(row[2], 'datetime', 'datetime')
parentType = self.convertFromDB(row[3], 'str', 'char(32)')
entity_id = self.convertFromDB(row[4], 'long', 'int')
entity_type = self.convertFromDB(row[5], 'str', 'char(16)')
parent = self.convertFromDB(row[6], 'long', 'long')
loop_exec = DBLoopExec(ts_start=ts_start,
ts_end=ts_end,
id=id)
loop_exec.db_parentType = parentType
loop_exec.db_entity_id = entity_id
loop_exec.db_entity_type = entity_type
loop_exec.db_parent = parent
loop_exec.is_dirty = False
res[('loop_exec', id)] = loop_exec
return res
def from_sql_fast(self, obj, all_objects):
if obj.db_parentType == 'workflow_exec':
p = all_objects[('workflow_exec', obj.db_parent)]
p.db_add_item_exec(obj)
elif obj.db_parentType == 'group_exec':
p = all_objects[('group_exec', obj.db_parent)]
p.db_add_item_exec(obj)
elif obj.db_parentType == 'module_exec':
p = all_objects[('module_exec', obj.db_parent)]
p.db_add_loop_exec(obj)
def set_sql_columns(self, db, obj, global_props, do_copy=True):
if not do_copy and not obj.is_dirty:
return
columns = ['id', 'ts_start', 'ts_end', 'parent_type', 'entity_id', 'entity_type', 'parent_id']
table = 'loop_exec'
whereMap = {}
whereMap.update(global_props)
if obj.db_id is not None:
keyStr = self.convertToDB(obj.db_id, 'long', 'int')
whereMap['id'] = keyStr
columnMap = {}
if hasattr(obj, 'db_id') and obj.db_id is not None:
columnMap['id'] = \
self.convertToDB(obj.db_id, 'long', 'int')
if hasattr(obj, 'db_ts_start') and obj.db_ts_start is not None:
columnMap['ts_start'] = \
self.convertToDB(obj.db_ts_start, 'datetime', 'datetime')
if hasattr(obj, 'db_ts_end') and obj.db_ts_end is not None:
columnMap['ts_end'] = \
self.convertToDB(obj.db_ts_end, 'datetime', 'datetime')
if hasattr(obj, 'db_parentType') and obj.db_parentType is not None:
columnMap['parent_type'] = \
self.convertToDB(obj.db_parentType, 'str', 'char(32)')
if hasattr(obj, 'db_entity_id') and obj.db_entity_id is not None:
columnMap['entity_id'] = \
self.convertToDB(obj.db_entity_id, 'long', 'int')
if hasattr(obj, 'db_entity_type') and obj.db_entity_type is not None:
columnMap['entity_type'] = \
self.convertToDB(obj.db_entity_type, 'str', 'char(16)')
if hasattr(obj, 'db_parent') and obj.db_parent is not None:
columnMap['parent_id'] = \
self.convertToDB(obj.db_parent, 'long', 'long')
columnMap.update(global_props)
if obj.is_new or do_copy:
dbCommand = self.createSQLInsert(table, columnMap)
else:
dbCommand = self.createSQLUpdate(table, columnMap, whereMap)
lastId = self.executeSQL(db, dbCommand, False)
def set_sql_command(self, db, obj, global_props, do_copy=True):
if not do_copy and not obj.is_dirty:
return None
columns = ['id', 'ts_start', 'ts_end', 'parent_type', 'entity_id', 'entity_type', 'parent_id']
table = 'loop_exec'
whereMap = {}
whereMap.update(global_props)
if obj.db_id is not None:
keyStr = self.convertToDB(obj.db_id, 'long', 'int')
whereMap['id'] = keyStr
columnMap = {}
if hasattr(obj, 'db_id') and obj.db_id is not None:
columnMap['id'] = \
self.convertToDB(obj.db_id, 'long', 'int')
if hasattr(obj, 'db_ts_start') and obj.db_ts_start is not None:
columnMap['ts_start'] = \
self.convertToDB(obj.db_ts_start, 'datetime', 'datetime')
if hasattr(obj, 'db_ts_end') and obj.db_ts_end is not None:
columnMap['ts_end'] = \
self.convertToDB(obj.db_ts_end, 'datetime', 'datetime')
if hasattr(obj, 'db_parentType') and obj.db_parentType is not None:
columnMap['parent_type'] = \
self.convertToDB(obj.db_parentType, 'str', 'char(32)')
if hasattr(obj, 'db_entity_id') and obj.db_entity_id is not None:
columnMap['entity_id'] = \
self.convertToDB(obj.db_entity_id, 'long', 'int')
if hasattr(obj, 'db_entity_type') and obj.db_entity_type is not None:
columnMap['entity_type'] = \
self.convertToDB(obj.db_entity_type, 'str', 'char(16)')
if hasattr(obj, 'db_parent') and obj.db_parent is not None:
columnMap['parent_id'] = \
self.convertToDB(obj.db_parent, 'long', 'long')
columnMap.update(global_props)
if obj.is_new or do_copy:
dbCommand = self.createSQLInsert(table, columnMap)
else:
dbCommand = self.createSQLUpdate(table, columnMap, whereMap)
return dbCommand
def set_sql_process(self, obj, global_props, lastId):
pass
def to_sql_fast(self, obj, do_copy=True):
for child in obj.db_loop_iterations:
child.db_parent = obj.db_id
def delete_sql_column(self, db, obj, global_props):
table = 'loop_exec'
whereMap = {}
whereMap.update(global_props)
if obj.db_id is not None:
keyStr = self.convertToDB(obj.db_id, 'long', 'int')
whereMap['id'] = keyStr
dbCommand = self.createSQLDelete(table, whereMap)
self.executeSQL(db, dbCommand, False)
class DBConnectionSQLDAOBase(SQLDAO):
def __init__(self, daoList):
self.daoList = daoList
self.table = 'connection_tbl'
def getDao(self, dao):
return self.daoList[dao]
def get_sql_columns(self, db, global_props,lock=False):
columns = ['id', 'parent_type', 'entity_id', 'entity_type', 'parent_id']
table = 'connection_tbl'
whereMap = global_props
orderBy = 'id'
dbCommand = self.createSQLSelect(table, columns, whereMap, orderBy, lock)
data = self.executeSQL(db, dbCommand, True)
res = {}
for row in data:
id = self.convertFromDB(row[0], 'long', 'int')
parentType = self.convertFromDB(row[1], 'str', 'char(32)')
entity_id = self.convertFromDB(row[2], 'long', 'int')
entity_type = self.convertFromDB(row[3], 'str', 'char(16)')
parent = self.convertFromDB(row[4], 'long', 'long')
connection = DBConnection(id=id)
connection.db_parentType = parentType
connection.db_entity_id = entity_id
connection.db_entity_type = entity_type
connection.db_parent = parent
connection.is_dirty = False
res[('connection', id)] = connection
return res
def get_sql_select(self, db, global_props,lock=False):
columns = ['id', 'parent_type', 'entity_id', 'entity_type', 'parent_id']
table = 'connection_tbl'
whereMap = global_props
orderBy = 'id'
return self.createSQLSelect(table, columns, whereMap, orderBy, lock)
def process_sql_columns(self, data, global_props):
res = {}
for row in data:
id = self.convertFromDB(row[0], 'long', 'int')
parentType = self.convertFromDB(row[1], 'str', 'char(32)')
entity_id = self.convertFromDB(row[2], 'long', 'int')
entity_type = self.convertFromDB(row[3], 'str', 'char(16)')
parent = self.convertFromDB(row[4], 'long', 'long')
connection = DBConnection(id=id)
connection.db_parentType = parentType
connection.db_entity_id = entity_id
connection.db_entity_type = entity_type
connection.db_parent = parent
connection.is_dirty = False
res[('connection', id)] = connection
return res
def from_sql_fast(self, obj, all_objects):
if obj.db_parentType == 'workflow':
p = all_objects[('workflow', obj.db_parent)]
p.db_add_connection(obj)
elif obj.db_parentType == 'add':
p = all_objects[('add', obj.db_parent)]
p.db_add_data(obj)
elif obj.db_parentType == 'change':
p = all_objects[('change', obj.db_parent)]
p.db_add_data(obj)
def set_sql_columns(self, db, obj, global_props, do_copy=True):
if not do_copy and not obj.is_dirty:
return
columns = ['id', 'parent_type', 'entity_id', 'entity_type', 'parent_id']
table = 'connection_tbl'
whereMap = {}
whereMap.update(global_props)
if obj.db_id is not None:
keyStr = self.convertToDB(obj.db_id, 'long', 'int')
whereMap['id'] = keyStr
columnMap = {}
if hasattr(obj, 'db_id') and obj.db_id is not None:
columnMap['id'] = \
self.convertToDB(obj.db_id, 'long', 'int')
if hasattr(obj, 'db_parentType') and obj.db_parentType is not None:
columnMap['parent_type'] = \
self.convertToDB(obj.db_parentType, 'str', 'char(32)')
if hasattr(obj, 'db_entity_id') and obj.db_entity_id is not None:
columnMap['entity_id'] = \
self.convertToDB(obj.db_entity_id, 'long', 'int')
if hasattr(obj, 'db_entity_type') and obj.db_entity_type is not None:
columnMap['entity_type'] = \
self.convertToDB(obj.db_entity_type, 'str', 'char(16)')
if hasattr(obj, 'db_parent') and obj.db_parent is not None:
columnMap['parent_id'] = \
self.convertToDB(obj.db_parent, 'long', 'long')
columnMap.update(global_props)
if obj.is_new or do_copy:
dbCommand = self.createSQLInsert(table, columnMap)
else:
dbCommand = self.createSQLUpdate(table, columnMap, whereMap)
lastId = self.executeSQL(db, dbCommand, False)
def set_sql_command(self, db, obj, global_props, do_copy=True):
if not do_copy and not obj.is_dirty:
return None
columns = ['id', 'parent_type', 'entity_id', 'entity_type', 'parent_id']
table = 'connection_tbl'
whereMap = {}
whereMap.update(global_props)
if obj.db_id is not None:
keyStr = self.convertToDB(obj.db_id, 'long', 'int')
whereMap['id'] = keyStr
columnMap = {}
if hasattr(obj, 'db_id') and obj.db_id is not None:
columnMap['id'] = \
self.convertToDB(obj.db_id, 'long', 'int')
if hasattr(obj, 'db_parentType') and obj.db_parentType is not None:
columnMap['parent_type'] = \
self.convertToDB(obj.db_parentType, 'str', 'char(32)')
if hasattr(obj, 'db_entity_id') and obj.db_entity_id is not None:
columnMap['entity_id'] = \
self.convertToDB(obj.db_entity_id, 'long', 'int')
if hasattr(obj, 'db_entity_type') and obj.db_entity_type is not None:
columnMap['entity_type'] = \
self.convertToDB(obj.db_entity_type, 'str', 'char(16)')
if hasattr(obj, 'db_parent') and obj.db_parent is not None:
columnMap['parent_id'] = \
self.convertToDB(obj.db_parent, 'long', 'long')
columnMap.update(global_props)
if obj.is_new or do_copy:
dbCommand = self.createSQLInsert(table, columnMap)
else:
dbCommand = self.createSQLUpdate(table, columnMap, whereMap)
return dbCommand
def set_sql_process(self, obj, global_props, lastId):
pass
def to_sql_fast(self, obj, do_copy=True):
for child in obj.db_ports:
child.db_parentType = obj.vtType
child.db_parent = obj.db_id
def delete_sql_column(self, db, obj, global_props):
table = 'connection_tbl'
whereMap = {}
whereMap.update(global_props)
if obj.db_id is not None:
keyStr = self.convertToDB(obj.db_id, 'long', 'int')
whereMap['id'] = keyStr
dbCommand = self.createSQLDelete(table, whereMap)
self.executeSQL(db, dbCommand, False)
class DBActionSQLDAOBase(SQLDAO):
def __init__(self, daoList):
self.daoList = daoList
self.table = 'action'
def getDao(self, dao):
return self.daoList[dao]
def get_sql_columns(self, db, global_props,lock=False):
columns = ['id', 'prev_id', 'date', 'session', 'user', 'parent_id', 'entity_id', 'entity_type']
table = 'action'
whereMap = global_props
orderBy = 'id'
dbCommand = self.createSQLSelect(table, columns, whereMap, orderBy, lock)
data = self.executeSQL(db, dbCommand, True)
res = {}
for row in data:
id = self.convertFromDB(row[0], 'long', 'int')
prevId = self.convertFromDB(row[1], 'long', 'int')
date = self.convertFromDB(row[2], 'datetime', 'datetime')
session = self.convertFromDB(row[3], 'long', 'int')
user = self.convertFromDB(row[4], 'str', 'varchar(255)')
vistrail = self.convertFromDB(row[5], 'long', 'int')
entity_id = self.convertFromDB(row[6], 'long', 'int')
entity_type = self.convertFromDB(row[7], 'str', 'char(16)')
action = DBAction(prevId=prevId,
date=date,
session=session,
user=user,
id=id)
action.db_vistrail = vistrail
action.db_entity_id = entity_id
action.db_entity_type = entity_type
action.is_dirty = False
res[('action', id)] = action
return res
def get_sql_select(self, db, global_props,lock=False):
columns = ['id', 'prev_id', 'date', 'session', 'user', 'parent_id', 'entity_id', 'entity_type']
table = 'action'
whereMap = global_props
orderBy = 'id'
return self.createSQLSelect(table, columns, whereMap, orderBy, lock)
def process_sql_columns(self, data, global_props):
res = {}
for row in data:
id = self.convertFromDB(row[0], 'long', 'int')
prevId = self.convertFromDB(row[1], 'long', 'int')
date = self.convertFromDB(row[2], 'datetime', 'datetime')
session = self.convertFromDB(row[3], 'long', 'int')
user = self.convertFromDB(row[4], 'str', 'varchar(255)')
vistrail = self.convertFromDB(row[5], 'long', 'int')
entity_id = self.convertFromDB(row[6], 'long', 'int')
entity_type = self.convertFromDB(row[7], 'str', 'char(16)')
action = DBAction(prevId=prevId,
date=date,
session=session,
user=user,
id=id)
action.db_vistrail = vistrail
action.db_entity_id = entity_id
action.db_entity_type = entity_type
action.is_dirty = False
res[('action', id)] = action
return res
def from_sql_fast(self, obj, all_objects):
if ('vistrail', obj.db_vistrail) in all_objects:
p = all_objects[('vistrail', obj.db_vistrail)]
p.db_add_action(obj)
def set_sql_columns(self, db, obj, global_props, do_copy=True):
if not do_copy and not obj.is_dirty:
return
columns = ['id', 'prev_id', 'date', 'session', 'user', 'parent_id', 'entity_id', 'entity_type']
table = 'action'
whereMap = {}
whereMap.update(global_props)
if obj.db_id is not None:
keyStr = self.convertToDB(obj.db_id, 'long', 'int')
whereMap['id'] = keyStr
columnMap = {}
if hasattr(obj, 'db_id') and obj.db_id is not None:
columnMap['id'] = \
self.convertToDB(obj.db_id, 'long', 'int')
if hasattr(obj, 'db_prevId') and obj.db_prevId is not None:
columnMap['prev_id'] = \
self.convertToDB(obj.db_prevId, 'long', 'int')
if hasattr(obj, 'db_date') and obj.db_date is not None:
columnMap['date'] = \
self.convertToDB(obj.db_date, 'datetime', 'datetime')
if hasattr(obj, 'db_session') and obj.db_session is not None:
columnMap['session'] = \
self.convertToDB(obj.db_session, 'long', 'int')
if hasattr(obj, 'db_user') and obj.db_user is not None:
columnMap['user'] = \
self.convertToDB(obj.db_user, 'str', 'varchar(255)')
if hasattr(obj, 'db_vistrail') and obj.db_vistrail is not None:
columnMap['parent_id'] = \
self.convertToDB(obj.db_vistrail, 'long', 'int')
if hasattr(obj, 'db_entity_id') and obj.db_entity_id is not None:
columnMap['entity_id'] = \
self.convertToDB(obj.db_entity_id, 'long', 'int')
if hasattr(obj, 'db_entity_type') and obj.db_entity_type is not None:
columnMap['entity_type'] = \
self.convertToDB(obj.db_entity_type, 'str', 'char(16)')
columnMap.update(global_props)
if obj.is_new or do_copy:
dbCommand = self.createSQLInsert(table, columnMap)
else:
dbCommand = self.createSQLUpdate(table, columnMap, whereMap)
lastId = self.executeSQL(db, dbCommand, False)
def set_sql_command(self, db, obj, global_props, do_copy=True):
if not do_copy and not obj.is_dirty:
return None
columns = ['id', 'prev_id', 'date', 'session', 'user', 'parent_id', 'entity_id', 'entity_type']
table = 'action'
whereMap = {}
whereMap.update(global_props)
if obj.db_id is not None:
keyStr = self.convertToDB(obj.db_id, 'long', 'int')
whereMap['id'] = keyStr
columnMap = {}
if hasattr(obj, 'db_id') and obj.db_id is not None:
columnMap['id'] = \
self.convertToDB(obj.db_id, 'long', 'int')
if hasattr(obj, 'db_prevId') and obj.db_prevId is not None:
columnMap['prev_id'] = \
self.convertToDB(obj.db_prevId, 'long', 'int')
if hasattr(obj, 'db_date') and obj.db_date is not None:
columnMap['date'] = \
self.convertToDB(obj.db_date, 'datetime', 'datetime')
if hasattr(obj, 'db_session') and obj.db_session is not None:
columnMap['session'] = \
self.convertToDB(obj.db_session, 'long', 'int')
if hasattr(obj, 'db_user') and obj.db_user is not None:
columnMap['user'] = \
self.convertToDB(obj.db_user, 'str', 'varchar(255)')
if hasattr(obj, 'db_vistrail') and obj.db_vistrail is not None:
columnMap['parent_id'] = \
self.convertToDB(obj.db_vistrail, 'long', 'int')
if hasattr(obj, 'db_entity_id') and obj.db_entity_id is not None:
columnMap['entity_id'] = \
self.convertToDB(obj.db_entity_id, 'long', 'int')
if hasattr(obj, 'db_entity_type') and obj.db_entity_type is not None:
columnMap['entity_type'] = \
self.convertToDB(obj.db_entity_type, 'str', 'char(16)')
columnMap.update(global_props)
if obj.is_new or do_copy:
dbCommand = self.createSQLInsert(table, columnMap)
else:
dbCommand = self.createSQLUpdate(table, columnMap, whereMap)
return dbCommand
def set_sql_process(self, obj, global_props, lastId):
pass
def to_sql_fast(self, obj, do_copy=True):
for child in obj.db_annotations:
child.db_parentType = obj.vtType
child.db_parent = obj.db_id
for child in obj.db_operations:
child.db_action = obj.db_id
def delete_sql_column(self, db, obj, global_props):
table = 'action'
whereMap = {}
whereMap.update(global_props)
if obj.db_id is not None:
keyStr = self.convertToDB(obj.db_id, 'long', 'int')
whereMap['id'] = keyStr
dbCommand = self.createSQLDelete(table, whereMap)
self.executeSQL(db, dbCommand, False)
class DBPortSpecSQLDAOBase(SQLDAO):
def __init__(self, daoList):
self.daoList = daoList
self.table = 'port_spec'
def getDao(self, dao):
return self.daoList[dao]
def get_sql_columns(self, db, global_props,lock=False):
columns = ['id', 'name', 'type', 'optional', 'depth', 'sort_key', 'min_conns', 'max_conns', 'parent_type', 'entity_id', 'entity_type', 'parent_id']
table = 'port_spec'
whereMap = global_props
orderBy = 'id'
dbCommand = self.createSQLSelect(table, columns, whereMap, orderBy, lock)
data = self.executeSQL(db, dbCommand, True)
res = {}
for row in data:
id = self.convertFromDB(row[0], 'long', 'int')
name = self.convertFromDB(row[1], 'str', 'varchar(255)')
type = self.convertFromDB(row[2], 'str', 'varchar(255)')
optional = self.convertFromDB(row[3], 'int', 'int')
depth = self.convertFromDB(row[4], 'int', 'int')
sort_key = self.convertFromDB(row[5], 'int', 'int')
min_conns = self.convertFromDB(row[6], 'int', 'int')
max_conns = self.convertFromDB(row[7], 'int', 'int')
parentType = self.convertFromDB(row[8], 'str', 'char(32)')
entity_id = self.convertFromDB(row[9], 'long', 'int')
entity_type = self.convertFromDB(row[10], 'str', 'char(16)')
parent = self.convertFromDB(row[11], 'long', 'long')
portSpec = DBPortSpec(name=name,
type=type,
optional=optional,
depth=depth,
sort_key=sort_key,
min_conns=min_conns,
max_conns=max_conns,
id=id)
portSpec.db_parentType = parentType
portSpec.db_entity_id = entity_id
portSpec.db_entity_type = entity_type
portSpec.db_parent = parent
portSpec.is_dirty = False
res[('portSpec', id)] = portSpec
return res
def get_sql_select(self, db, global_props,lock=False):
columns = ['id', 'name', 'type', 'optional', 'depth', 'sort_key', 'min_conns', 'max_conns', 'parent_type', 'entity_id', 'entity_type', 'parent_id']
table = 'port_spec'
whereMap = global_props
orderBy = 'id'
return self.createSQLSelect(table, columns, whereMap, orderBy, lock)
def process_sql_columns(self, data, global_props):
res = {}
for row in data:
id = self.convertFromDB(row[0], 'long', 'int')
name = self.convertFromDB(row[1], 'str', 'varchar(255)')
type = self.convertFromDB(row[2], 'str', 'varchar(255)')
optional = self.convertFromDB(row[3], 'int', 'int')
depth = self.convertFromDB(row[4], 'int', 'int')
sort_key = self.convertFromDB(row[5], 'int', 'int')
min_conns = self.convertFromDB(row[6], 'int', 'int')
max_conns = self.convertFromDB(row[7], 'int', 'int')
parentType = self.convertFromDB(row[8], 'str', 'char(32)')
entity_id = self.convertFromDB(row[9], 'long', 'int')
entity_type = self.convertFromDB(row[10], 'str', 'char(16)')
parent = self.convertFromDB(row[11], 'long', 'long')
portSpec = DBPortSpec(name=name,
type=type,
optional=optional,
depth=depth,
sort_key=sort_key,
min_conns=min_conns,
max_conns=max_conns,
id=id)
portSpec.db_parentType = parentType
portSpec.db_entity_id = entity_id
portSpec.db_entity_type = entity_type
portSpec.db_parent = parent
portSpec.is_dirty = False
res[('portSpec', id)] = portSpec
return res
def from_sql_fast(self, obj, all_objects):
if obj.db_parentType == 'module':
p = all_objects[('module', obj.db_parent)]
p.db_add_portSpec(obj)
elif obj.db_parentType == 'module_descriptor':
p = all_objects[('module_descriptor', obj.db_parent)]
p.db_add_portSpec(obj)
elif obj.db_parentType == 'add':
p = all_objects[('add', obj.db_parent)]
p.db_add_data(obj)
elif obj.db_parentType == 'change':
p = all_objects[('change', obj.db_parent)]
p.db_add_data(obj)
def set_sql_columns(self, db, obj, global_props, do_copy=True):
if not do_copy and not obj.is_dirty:
return
columns = ['id', 'name', 'type', 'optional', 'depth', 'sort_key', 'min_conns', 'max_conns', 'parent_type', 'entity_id', 'entity_type', 'parent_id']
table = 'port_spec'
whereMap = {}
whereMap.update(global_props)
if obj.db_id is not None:
keyStr = self.convertToDB(obj.db_id, 'long', 'int')
whereMap['id'] = keyStr
columnMap = {}
if hasattr(obj, 'db_id') and obj.db_id is not None:
columnMap['id'] = \
self.convertToDB(obj.db_id, 'long', 'int')
if hasattr(obj, 'db_name') and obj.db_name is not None:
columnMap['name'] = \
self.convertToDB(obj.db_name, 'str', 'varchar(255)')
if hasattr(obj, 'db_type') and obj.db_type is not None:
columnMap['type'] = \
self.convertToDB(obj.db_type, 'str', 'varchar(255)')
if hasattr(obj, 'db_optional') and obj.db_optional is not None:
columnMap['optional'] = \
self.convertToDB(obj.db_optional, 'int', 'int')
if hasattr(obj, 'db_depth') and obj.db_depth is not None:
columnMap['depth'] = \
self.convertToDB(obj.db_depth, 'int', 'int')
if hasattr(obj, 'db_sort_key') and obj.db_sort_key is not None:
columnMap['sort_key'] = \
self.convertToDB(obj.db_sort_key, 'int', 'int')
if hasattr(obj, 'db_min_conns') and obj.db_min_conns is not None:
columnMap['min_conns'] = \
self.convertToDB(obj.db_min_conns, 'int', 'int')
if hasattr(obj, 'db_max_conns') and obj.db_max_conns is not None:
columnMap['max_conns'] = \
self.convertToDB(obj.db_max_conns, 'int', 'int')
if hasattr(obj, 'db_parentType') and obj.db_parentType is not None:
columnMap['parent_type'] = \
self.convertToDB(obj.db_parentType, 'str', 'char(32)')
if hasattr(obj, 'db_entity_id') and obj.db_entity_id is not None:
columnMap['entity_id'] = \
self.convertToDB(obj.db_entity_id, 'long', 'int')
if hasattr(obj, 'db_entity_type') and obj.db_entity_type is not None:
columnMap['entity_type'] = \
self.convertToDB(obj.db_entity_type, 'str', 'char(16)')
if hasattr(obj, 'db_parent') and obj.db_parent is not None:
columnMap['parent_id'] = \
self.convertToDB(obj.db_parent, 'long', 'long')
columnMap.update(global_props)
if obj.is_new or do_copy:
dbCommand = self.createSQLInsert(table, columnMap)
else:
dbCommand = self.createSQLUpdate(table, columnMap, whereMap)
lastId = self.executeSQL(db, dbCommand, False)
def set_sql_command(self, db, obj, global_props, do_copy=True):
if not do_copy and not obj.is_dirty:
return None
columns = ['id', 'name', 'type', 'optional', 'depth', 'sort_key', 'min_conns', 'max_conns', 'parent_type', 'entity_id', 'entity_type', 'parent_id']
table = 'port_spec'
whereMap = {}
whereMap.update(global_props)
if obj.db_id is not None:
keyStr = self.convertToDB(obj.db_id, 'long', 'int')
whereMap['id'] = keyStr
columnMap = {}
if hasattr(obj, 'db_id') and obj.db_id is not None:
columnMap['id'] = \
self.convertToDB(obj.db_id, 'long', 'int')
if hasattr(obj, 'db_name') and obj.db_name is not None:
columnMap['name'] = \
self.convertToDB(obj.db_name, 'str', 'varchar(255)')
if hasattr(obj, 'db_type') and obj.db_type is not None:
columnMap['type'] = \
self.convertToDB(obj.db_type, 'str', 'varchar(255)')
if hasattr(obj, 'db_optional') and obj.db_optional is not None:
columnMap['optional'] = \
self.convertToDB(obj.db_optional, 'int', 'int')
if hasattr(obj, 'db_depth') and obj.db_depth is not None:
columnMap['depth'] = \
self.convertToDB(obj.db_depth, 'int', 'int')
if hasattr(obj, 'db_sort_key') and obj.db_sort_key is not None:
columnMap['sort_key'] = \
self.convertToDB(obj.db_sort_key, 'int', 'int')
if hasattr(obj, 'db_min_conns') and obj.db_min_conns is not None:
columnMap['min_conns'] = \
self.convertToDB(obj.db_min_conns, 'int', 'int')
if hasattr(obj, 'db_max_conns') and obj.db_max_conns is not None:
columnMap['max_conns'] = \
self.convertToDB(obj.db_max_conns, 'int', 'int')
if hasattr(obj, 'db_parentType') and obj.db_parentType is not None:
columnMap['parent_type'] = \
self.convertToDB(obj.db_parentType, 'str', 'char(32)')
if hasattr(obj, 'db_entity_id') and obj.db_entity_id is not None:
columnMap['entity_id'] = \
self.convertToDB(obj.db_entity_id, 'long', 'int')
if hasattr(obj, 'db_entity_type') and obj.db_entity_type is not None:
columnMap['entity_type'] = \
self.convertToDB(obj.db_entity_type, 'str', 'char(16)')
if hasattr(obj, 'db_parent') and obj.db_parent is not None:
columnMap['parent_id'] = \
self.convertToDB(obj.db_parent, 'long', 'long')
columnMap.update(global_props)
if obj.is_new or do_copy:
dbCommand = self.createSQLInsert(table, columnMap)
else:
dbCommand = self.createSQLUpdate(table, columnMap, whereMap)
return dbCommand
def set_sql_process(self, obj, global_props, lastId):
pass
def to_sql_fast(self, obj, do_copy=True):
for child in obj.db_portSpecItems:
child.db_portSpec = obj.db_id
def delete_sql_column(self, db, obj, global_props):
table = 'port_spec'
whereMap = {}
whereMap.update(global_props)
if obj.db_id is not None:
keyStr = self.convertToDB(obj.db_id, 'long', 'int')
whereMap['id'] = keyStr
dbCommand = self.createSQLDelete(table, whereMap)
self.executeSQL(db, dbCommand, False)
class DBLogSQLDAOBase(SQLDAO):
def __init__(self, daoList):
self.daoList = daoList
self.table = 'log_tbl'
def getDao(self, dao):
return self.daoList[dao]
def get_sql_columns(self, db, global_props,lock=False):
columns = ['id', 'entity_type', 'version', 'name', 'last_modified', 'vistrail_id']
table = 'log_tbl'
whereMap = global_props
orderBy = 'id'
dbCommand = self.createSQLSelect(table, columns, whereMap, orderBy, lock)
data = self.executeSQL(db, dbCommand, True)
res = {}
for row in data:
id = self.convertFromDB(row[0], 'long', 'int')
global_props['entity_id'] = self.convertToDB(id, 'long', 'int')
entity_type = self.convertFromDB(row[1], 'str', 'char(16)')
global_props['entity_type'] = self.convertToDB(entity_type, 'str', 'char(16)')
version = self.convertFromDB(row[2], 'str', 'char(16)')
name = self.convertFromDB(row[3], 'str', 'varchar(255)')
last_modified = self.convertFromDB(row[4], 'datetime', 'datetime')
vistrail_id = self.convertFromDB(row[5], 'long', 'int')
log = DBLog(entity_type=entity_type,
version=version,
name=name,
last_modified=last_modified,
vistrail_id=vistrail_id,
id=id)
log.is_dirty = False
res[('log', id)] = log
return res
def get_sql_select(self, db, global_props,lock=False):
columns = ['id', 'entity_type', 'version', 'name', 'last_modified', 'vistrail_id']
table = 'log_tbl'
whereMap = global_props
orderBy = 'id'
return self.createSQLSelect(table, columns, whereMap, orderBy, lock)
def process_sql_columns(self, data, global_props):
res = {}
for row in data:
id = self.convertFromDB(row[0], 'long', 'int')
global_props['entity_id'] = self.convertToDB(id, 'long', 'int')
entity_type = self.convertFromDB(row[1], 'str', 'char(16)')
global_props['entity_type'] = self.convertToDB(entity_type, 'str', 'char(16)')
version = self.convertFromDB(row[2], 'str', 'char(16)')
name = self.convertFromDB(row[3], 'str', 'varchar(255)')
last_modified = self.convertFromDB(row[4], 'datetime', 'datetime')
vistrail_id = self.convertFromDB(row[5], 'long', 'int')
log = DBLog(entity_type=entity_type,
version=version,
name=name,
last_modified=last_modified,
vistrail_id=vistrail_id,
id=id)
log.is_dirty = False
res[('log', id)] = log
return res
def from_sql_fast(self, obj, all_objects):
pass
def set_sql_columns(self, db, obj, global_props, do_copy=True):
if not do_copy and not obj.is_dirty:
return
columns = ['id', 'entity_type', 'version', 'name', 'last_modified', 'vistrail_id']
table = 'log_tbl'
whereMap = {}
whereMap.update(global_props)
if obj.db_id is not None:
keyStr = self.convertToDB(obj.db_id, 'long', 'int')
whereMap['id'] = keyStr
columnMap = {}
if hasattr(obj, 'db_id') and obj.db_id is not None:
columnMap['id'] = \
self.convertToDB(obj.db_id, 'long', 'int')
if hasattr(obj, 'db_entity_type') and obj.db_entity_type is not None:
columnMap['entity_type'] = \
self.convertToDB(obj.db_entity_type, 'str', 'char(16)')
if hasattr(obj, 'db_version') and obj.db_version is not None:
columnMap['version'] = \
self.convertToDB(obj.db_version, 'str', 'char(16)')
if hasattr(obj, 'db_name') and obj.db_name is not None:
columnMap['name'] = \
self.convertToDB(obj.db_name, 'str', 'varchar(255)')
if hasattr(obj, 'db_last_modified') and obj.db_last_modified is not None:
columnMap['last_modified'] = \
self.convertToDB(obj.db_last_modified, 'datetime', 'datetime')
if hasattr(obj, 'db_vistrail_id') and obj.db_vistrail_id is not None:
columnMap['vistrail_id'] = \
self.convertToDB(obj.db_vistrail_id, 'long', 'int')
columnMap.update(global_props)
if obj.is_new or do_copy:
dbCommand = self.createSQLInsert(table, columnMap)
else:
dbCommand = self.createSQLUpdate(table, columnMap, whereMap)
lastId = self.executeSQL(db, dbCommand, False)
if obj.db_id is None:
obj.db_id = lastId
keyStr = self.convertToDB(obj.db_id, 'long', 'int')
if hasattr(obj, 'db_entity_type') and obj.db_entity_type is not None:
global_props['entity_type'] = self.convertToDB(obj.db_entity_type, 'str', 'char(16)')
if hasattr(obj, 'db_id') and obj.db_id is not None:
global_props['entity_id'] = self.convertToDB(obj.db_id, 'long', 'int')
def set_sql_command(self, db, obj, global_props, do_copy=True):
if not do_copy and not obj.is_dirty:
return None
columns = ['id', 'entity_type', 'version', 'name', 'last_modified', 'vistrail_id']
table = 'log_tbl'
whereMap = {}
whereMap.update(global_props)
if obj.db_id is not None:
keyStr = self.convertToDB(obj.db_id, 'long', 'int')
whereMap['id'] = keyStr
columnMap = {}
if hasattr(obj, 'db_id') and obj.db_id is not None:
columnMap['id'] = \
self.convertToDB(obj.db_id, 'long', 'int')
if hasattr(obj, 'db_entity_type') and obj.db_entity_type is not None:
columnMap['entity_type'] = \
self.convertToDB(obj.db_entity_type, 'str', 'char(16)')
if hasattr(obj, 'db_version') and obj.db_version is not None:
columnMap['version'] = \
self.convertToDB(obj.db_version, 'str', 'char(16)')
if hasattr(obj, 'db_name') and obj.db_name is not None:
columnMap['name'] = \
self.convertToDB(obj.db_name, 'str', 'varchar(255)')
if hasattr(obj, 'db_last_modified') and obj.db_last_modified is not None:
columnMap['last_modified'] = \
self.convertToDB(obj.db_last_modified, 'datetime', 'datetime')
if hasattr(obj, 'db_vistrail_id') and obj.db_vistrail_id is not None:
columnMap['vistrail_id'] = \
self.convertToDB(obj.db_vistrail_id, 'long', 'int')
columnMap.update(global_props)
if obj.is_new or do_copy:
dbCommand = self.createSQLInsert(table, columnMap)
else:
dbCommand = self.createSQLUpdate(table, columnMap, whereMap)
return dbCommand
def set_sql_process(self, obj, global_props, lastId):
if obj.db_id is None:
obj.db_id = lastId
keyStr = self.convertToDB(obj.db_id, 'long', 'int')
if hasattr(obj, 'db_entity_type') and obj.db_entity_type is not None:
global_props['entity_type'] = self.convertToDB(obj.db_entity_type, 'str', 'char(16)')
if hasattr(obj, 'db_id') and obj.db_id is not None:
global_props['entity_id'] = self.convertToDB(obj.db_id, 'long', 'int')
pass
def to_sql_fast(self, obj, do_copy=True):
for child in obj.db_workflow_execs:
child.db_log = obj.db_id
def delete_sql_column(self, db, obj, global_props):
table = 'log_tbl'
whereMap = {}
whereMap.update(global_props)
if obj.db_id is not None:
keyStr = self.convertToDB(obj.db_id, 'long', 'int')
whereMap['id'] = keyStr
dbCommand = self.createSQLDelete(table, whereMap)
self.executeSQL(db, dbCommand, False)
class DBLoopIterationSQLDAOBase(SQLDAO):
def __init__(self, daoList):
self.daoList = daoList
self.table = 'loop_iteration'
def getDao(self, dao):
return self.daoList[dao]
def get_sql_columns(self, db, global_props,lock=False):
columns = ['id', 'ts_start', 'ts_end', 'iteration', 'completed', 'error', 'parent_id', 'entity_id', 'entity_type']
table = 'loop_iteration'
whereMap = global_props
orderBy = 'id'
dbCommand = self.createSQLSelect(table, columns, whereMap, orderBy, lock)
data = self.executeSQL(db, dbCommand, True)
res = {}
for row in data:
id = self.convertFromDB(row[0], 'long', 'int')
ts_start = self.convertFromDB(row[1], 'datetime', 'datetime')
ts_end = self.convertFromDB(row[2], 'datetime', 'datetime')
iteration = self.convertFromDB(row[3], 'int', 'int')
completed = self.convertFromDB(row[4], 'int', 'int')
error = self.convertFromDB(row[5], 'str', 'varchar(1023)')
parent = self.convertFromDB(row[6], 'str', 'int')
entity_id = self.convertFromDB(row[7], 'long', 'int')
entity_type = self.convertFromDB(row[8], 'str', 'char(16)')
loop_iteration = DBLoopIteration(ts_start=ts_start,
ts_end=ts_end,
iteration=iteration,
completed=completed,
error=error,
id=id)
loop_iteration.db_parent = parent
loop_iteration.db_entity_id = entity_id
loop_iteration.db_entity_type = entity_type
loop_iteration.is_dirty = False
res[('loop_iteration', id)] = loop_iteration
return res
def get_sql_select(self, db, global_props,lock=False):
columns = ['id', 'ts_start', 'ts_end', 'iteration', 'completed', 'error', 'parent_id', 'entity_id', 'entity_type']
table = 'loop_iteration'
whereMap = global_props
orderBy = 'id'
return self.createSQLSelect(table, columns, whereMap, orderBy, lock)
def process_sql_columns(self, data, global_props):
res = {}
for row in data:
id = self.convertFromDB(row[0], 'long', 'int')
ts_start = self.convertFromDB(row[1], 'datetime', 'datetime')
ts_end = self.convertFromDB(row[2], 'datetime', 'datetime')
iteration = self.convertFromDB(row[3], 'int', 'int')
completed = self.convertFromDB(row[4], 'int', 'int')
error = self.convertFromDB(row[5], 'str', 'varchar(1023)')
parent = self.convertFromDB(row[6], 'str', 'int')
entity_id = self.convertFromDB(row[7], 'long', 'int')
entity_type = self.convertFromDB(row[8], 'str', 'char(16)')
loop_iteration = DBLoopIteration(ts_start=ts_start,
ts_end=ts_end,
iteration=iteration,
completed=completed,
error=error,
id=id)
loop_iteration.db_parent = parent
loop_iteration.db_entity_id = entity_id
loop_iteration.db_entity_type = entity_type
loop_iteration.is_dirty = False
res[('loop_iteration', id)] = loop_iteration
return res
def from_sql_fast(self, obj, all_objects):
if ('loop_exec', obj.db_parent) in all_objects:
p = all_objects[('loop_exec', obj.db_parent)]
p.db_add_loop_iteration(obj)
def set_sql_columns(self, db, obj, global_props, do_copy=True):
if not do_copy and not obj.is_dirty:
return
columns = ['id', 'ts_start', 'ts_end', 'iteration', 'completed', 'error', 'parent_id', 'entity_id', 'entity_type']
table = 'loop_iteration'
whereMap = {}
whereMap.update(global_props)
if obj.db_id is not None:
keyStr = self.convertToDB(obj.db_id, 'long', 'int')
whereMap['id'] = keyStr
columnMap = {}
if hasattr(obj, 'db_id') and obj.db_id is not None:
columnMap['id'] = \
self.convertToDB(obj.db_id, 'long', 'int')
if hasattr(obj, 'db_ts_start') and obj.db_ts_start is not None:
columnMap['ts_start'] = \
self.convertToDB(obj.db_ts_start, 'datetime', 'datetime')
if hasattr(obj, 'db_ts_end') and obj.db_ts_end is not None:
columnMap['ts_end'] = \
self.convertToDB(obj.db_ts_end, 'datetime', 'datetime')
if hasattr(obj, 'db_iteration') and obj.db_iteration is not None:
columnMap['iteration'] = \
self.convertToDB(obj.db_iteration, 'int', 'int')
if hasattr(obj, 'db_completed') and obj.db_completed is not None:
columnMap['completed'] = \
self.convertToDB(obj.db_completed, 'int', 'int')
if hasattr(obj, 'db_error') and obj.db_error is not None:
columnMap['error'] = \
self.convertToDB(obj.db_error, 'str', 'varchar(1023)')
if hasattr(obj, 'db_parent') and obj.db_parent is not None:
columnMap['parent_id'] = \
self.convertToDB(obj.db_parent, 'str', 'int')
if hasattr(obj, 'db_entity_id') and obj.db_entity_id is not None:
columnMap['entity_id'] = \
self.convertToDB(obj.db_entity_id, 'long', 'int')
if hasattr(obj, 'db_entity_type') and obj.db_entity_type is not None:
columnMap['entity_type'] = \
self.convertToDB(obj.db_entity_type, 'str', 'char(16)')
columnMap.update(global_props)
if obj.is_new or do_copy:
dbCommand = self.createSQLInsert(table, columnMap)
else:
dbCommand = self.createSQLUpdate(table, columnMap, whereMap)
lastId = self.executeSQL(db, dbCommand, False)
def set_sql_command(self, db, obj, global_props, do_copy=True):
if not do_copy and not obj.is_dirty:
return None
columns = ['id', 'ts_start', 'ts_end', 'iteration', 'completed', 'error', 'parent_id', 'entity_id', 'entity_type']
table = 'loop_iteration'
whereMap = {}
whereMap.update(global_props)
if obj.db_id is not None:
keyStr = self.convertToDB(obj.db_id, 'long', 'int')
whereMap['id'] = keyStr
columnMap = {}
if hasattr(obj, 'db_id') and obj.db_id is not None:
columnMap['id'] = \
self.convertToDB(obj.db_id, 'long', 'int')
if hasattr(obj, 'db_ts_start') and obj.db_ts_start is not None:
columnMap['ts_start'] = \
self.convertToDB(obj.db_ts_start, 'datetime', 'datetime')
if hasattr(obj, 'db_ts_end') and obj.db_ts_end is not None:
columnMap['ts_end'] = \
self.convertToDB(obj.db_ts_end, 'datetime', 'datetime')
if hasattr(obj, 'db_iteration') and obj.db_iteration is not None:
columnMap['iteration'] = \
self.convertToDB(obj.db_iteration, 'int', 'int')
if hasattr(obj, 'db_completed') and obj.db_completed is not None:
columnMap['completed'] = \
self.convertToDB(obj.db_completed, 'int', 'int')
if hasattr(obj, 'db_error') and obj.db_error is not None:
columnMap['error'] = \
self.convertToDB(obj.db_error, 'str', 'varchar(1023)')
if hasattr(obj, 'db_parent') and obj.db_parent is not None:
columnMap['parent_id'] = \
self.convertToDB(obj.db_parent, 'str', 'int')
if hasattr(obj, 'db_entity_id') and obj.db_entity_id is not None:
columnMap['entity_id'] = \
self.convertToDB(obj.db_entity_id, 'long', 'int')
if hasattr(obj, 'db_entity_type') and obj.db_entity_type is not None:
columnMap['entity_type'] = \
self.convertToDB(obj.db_entity_type, 'str', 'char(16)')
columnMap.update(global_props)
if obj.is_new or do_copy:
dbCommand = self.createSQLInsert(table, columnMap)
else:
dbCommand = self.createSQLUpdate(table, columnMap, whereMap)
return dbCommand
def set_sql_process(self, obj, global_props, lastId):
pass
def to_sql_fast(self, obj, do_copy=True):
for child in obj.db_item_execs:
child.db_parentType = obj.vtType
child.db_parent = obj.db_id
def delete_sql_column(self, db, obj, global_props):
table = 'loop_iteration'
whereMap = {}
whereMap.update(global_props)
if obj.db_id is not None:
keyStr = self.convertToDB(obj.db_id, 'long', 'int')
whereMap['id'] = keyStr
dbCommand = self.createSQLDelete(table, whereMap)
self.executeSQL(db, dbCommand, False)
class DBPEParameterSQLDAOBase(SQLDAO):
def __init__(self, daoList):
self.daoList = daoList
self.table = 'pe_parameter'
def getDao(self, dao):
return self.daoList[dao]
def get_sql_columns(self, db, global_props,lock=False):
columns = ['id', 'pos', 'interpolator', 'value', 'dimension', 'parent_type', 'parent_id', 'entity_id', 'entity_type']
table = 'pe_parameter'
whereMap = global_props
orderBy = 'id'
dbCommand = self.createSQLSelect(table, columns, whereMap, orderBy, lock)
data = self.executeSQL(db, dbCommand, True)
res = {}
for row in data:
id = self.convertFromDB(row[0], 'long', 'int')
pos = self.convertFromDB(row[1], 'long', 'int')
interpolator = self.convertFromDB(row[2], 'str', 'varchar(255)')
value = self.convertFromDB(row[3], 'str', 'mediumtext')
dimension = self.convertFromDB(row[4], 'long', 'int')
parentType = self.convertFromDB(row[5], 'str', 'char(32)')
pe_function = self.convertFromDB(row[6], 'long', 'int')
entity_id = self.convertFromDB(row[7], 'long', 'int')
entity_type = self.convertFromDB(row[8], 'str', 'char(16)')
pe_parameter = DBPEParameter(pos=pos,
interpolator=interpolator,
value=value,
dimension=dimension,
id=id)
pe_parameter.db_parentType = parentType
pe_parameter.db_pe_function = pe_function
pe_parameter.db_entity_id = entity_id
pe_parameter.db_entity_type = entity_type
pe_parameter.is_dirty = False
res[('pe_parameter', id)] = pe_parameter
return res
def get_sql_select(self, db, global_props,lock=False):
columns = ['id', 'pos', 'interpolator', 'value', 'dimension', 'parent_type', 'parent_id', 'entity_id', 'entity_type']
table = 'pe_parameter'
whereMap = global_props
orderBy = 'id'
return self.createSQLSelect(table, columns, whereMap, orderBy, lock)
def process_sql_columns(self, data, global_props):
res = {}
for row in data:
id = self.convertFromDB(row[0], 'long', 'int')
pos = self.convertFromDB(row[1], 'long', 'int')
interpolator = self.convertFromDB(row[2], 'str', 'varchar(255)')
value = self.convertFromDB(row[3], 'str', 'mediumtext')
dimension = self.convertFromDB(row[4], 'long', 'int')
parentType = self.convertFromDB(row[5], 'str', 'char(32)')
pe_function = self.convertFromDB(row[6], 'long', 'int')
entity_id = self.convertFromDB(row[7], 'long', 'int')
entity_type = self.convertFromDB(row[8], 'str', 'char(16)')
pe_parameter = DBPEParameter(pos=pos,
interpolator=interpolator,
value=value,
dimension=dimension,
id=id)
pe_parameter.db_parentType = parentType
pe_parameter.db_pe_function = pe_function
pe_parameter.db_entity_id = entity_id
pe_parameter.db_entity_type = entity_type
pe_parameter.is_dirty = False
res[('pe_parameter', id)] = pe_parameter
return res
def from_sql_fast(self, obj, all_objects):
if ('pe_function', obj.db_pe_function) in all_objects:
p = all_objects[('pe_function', obj.db_pe_function)]
p.db_add_parameter(obj)
def set_sql_columns(self, db, obj, global_props, do_copy=True):
if not do_copy and not obj.is_dirty:
return
columns = ['id', 'pos', 'interpolator', 'value', 'dimension', 'parent_type', 'parent_id', 'entity_id', 'entity_type']
table = 'pe_parameter'
whereMap = {}
whereMap.update(global_props)
if obj.db_id is not None:
keyStr = self.convertToDB(obj.db_id, 'long', 'int')
whereMap['id'] = keyStr
columnMap = {}
if hasattr(obj, 'db_id') and obj.db_id is not None:
columnMap['id'] = \
self.convertToDB(obj.db_id, 'long', 'int')
if hasattr(obj, 'db_pos') and obj.db_pos is not None:
columnMap['pos'] = \
self.convertToDB(obj.db_pos, 'long', 'int')
if hasattr(obj, 'db_interpolator') and obj.db_interpolator is not None:
columnMap['interpolator'] = \
self.convertToDB(obj.db_interpolator, 'str', 'varchar(255)')
if hasattr(obj, 'db_value') and obj.db_value is not None:
columnMap['value'] = \
self.convertToDB(obj.db_value, 'str', 'mediumtext')
if hasattr(obj, 'db_dimension') and obj.db_dimension is not None:
columnMap['dimension'] = \
self.convertToDB(obj.db_dimension, 'long', 'int')
if hasattr(obj, 'db_parentType') and obj.db_parentType is not None:
columnMap['parent_type'] = \
self.convertToDB(obj.db_parentType, 'str', 'char(32)')
if hasattr(obj, 'db_pe_function') and obj.db_pe_function is not None:
columnMap['parent_id'] = \
self.convertToDB(obj.db_pe_function, 'long', 'int')
if hasattr(obj, 'db_entity_id') and obj.db_entity_id is not None:
columnMap['entity_id'] = \
self.convertToDB(obj.db_entity_id, 'long', 'int')
if hasattr(obj, 'db_entity_type') and obj.db_entity_type is not None:
columnMap['entity_type'] = \
self.convertToDB(obj.db_entity_type, 'str', 'char(16)')
columnMap.update(global_props)
if obj.is_new or do_copy:
dbCommand = self.createSQLInsert(table, columnMap)
else:
dbCommand = self.createSQLUpdate(table, columnMap, whereMap)
lastId = self.executeSQL(db, dbCommand, False)
def set_sql_command(self, db, obj, global_props, do_copy=True):
if not do_copy and not obj.is_dirty:
return None
columns = ['id', 'pos', 'interpolator', 'value', 'dimension', 'parent_type', 'parent_id', 'entity_id', 'entity_type']
table = 'pe_parameter'
whereMap = {}
whereMap.update(global_props)
if obj.db_id is not None:
keyStr = self.convertToDB(obj.db_id, 'long', 'int')
whereMap['id'] = keyStr
columnMap = {}
if hasattr(obj, 'db_id') and obj.db_id is not None:
columnMap['id'] = \
self.convertToDB(obj.db_id, 'long', 'int')
if hasattr(obj, 'db_pos') and obj.db_pos is not None:
columnMap['pos'] = \
self.convertToDB(obj.db_pos, 'long', 'int')
if hasattr(obj, 'db_interpolator') and obj.db_interpolator is not None:
columnMap['interpolator'] = \
self.convertToDB(obj.db_interpolator, 'str', 'varchar(255)')
if hasattr(obj, 'db_value') and obj.db_value is not None:
columnMap['value'] = \
self.convertToDB(obj.db_value, 'str', 'mediumtext')
if hasattr(obj, 'db_dimension') and obj.db_dimension is not None:
columnMap['dimension'] = \
self.convertToDB(obj.db_dimension, 'long', 'int')
if hasattr(obj, 'db_parentType') and obj.db_parentType is not None:
columnMap['parent_type'] = \
self.convertToDB(obj.db_parentType, 'str', 'char(32)')
if hasattr(obj, 'db_pe_function') and obj.db_pe_function is not None:
columnMap['parent_id'] = \
self.convertToDB(obj.db_pe_function, 'long', 'int')
if hasattr(obj, 'db_entity_id') and obj.db_entity_id is not None:
columnMap['entity_id'] = \
self.convertToDB(obj.db_entity_id, 'long', 'int')
if hasattr(obj, 'db_entity_type') and obj.db_entity_type is not None:
columnMap['entity_type'] = \
self.convertToDB(obj.db_entity_type, 'str', 'char(16)')
columnMap.update(global_props)
if obj.is_new or do_copy:
dbCommand = self.createSQLInsert(table, columnMap)
else:
dbCommand = self.createSQLUpdate(table, columnMap, whereMap)
return dbCommand
def set_sql_process(self, obj, global_props, lastId):
pass
def to_sql_fast(self, obj, do_copy=True):
pass
def delete_sql_column(self, db, obj, global_props):
table = 'pe_parameter'
whereMap = {}
whereMap.update(global_props)
if obj.db_id is not None:
keyStr = self.convertToDB(obj.db_id, 'long', 'int')
whereMap['id'] = keyStr
dbCommand = self.createSQLDelete(table, whereMap)
self.executeSQL(db, dbCommand, False)
class DBWorkflowExecSQLDAOBase(SQLDAO):
def __init__(self, daoList):
self.daoList = daoList
self.table = 'workflow_exec'
def getDao(self, dao):
return self.daoList[dao]
def get_sql_columns(self, db, global_props,lock=False):
columns = ['id', 'user', 'ip', 'session', 'vt_version', 'ts_start', 'ts_end', 'parent_id', 'parent_type', 'parent_version', 'completed', 'name', 'log_id', 'entity_id', 'entity_type']
table = 'workflow_exec'
whereMap = global_props
orderBy = 'id'
dbCommand = self.createSQLSelect(table, columns, whereMap, orderBy, lock)
data = self.executeSQL(db, dbCommand, True)
res = {}
for row in data:
id = self.convertFromDB(row[0], 'long', 'int')
user = self.convertFromDB(row[1], 'str', 'varchar(255)')
ip = self.convertFromDB(row[2], 'str', 'varchar(255)')
session = self.convertFromDB(row[3], 'long', 'int')
vt_version = self.convertFromDB(row[4], 'str', 'varchar(255)')
ts_start = self.convertFromDB(row[5], 'datetime', 'datetime')
ts_end = self.convertFromDB(row[6], 'datetime', 'datetime')
parent_id = self.convertFromDB(row[7], 'long', 'int')
parent_type = self.convertFromDB(row[8], 'str', 'varchar(255)')
parent_version = self.convertFromDB(row[9], 'long', 'int')
completed = self.convertFromDB(row[10], 'int', 'int')
name = self.convertFromDB(row[11], 'str', 'varchar(255)')
log = self.convertFromDB(row[12], 'long', 'int')
entity_id = self.convertFromDB(row[13], 'long', 'int')
entity_type = self.convertFromDB(row[14], 'str', 'char(16)')
workflow_exec = DBWorkflowExec(user=user,
ip=ip,
session=session,
vt_version=vt_version,
ts_start=ts_start,
ts_end=ts_end,
parent_id=parent_id,
parent_type=parent_type,
parent_version=parent_version,
completed=completed,
name=name,
id=id)
workflow_exec.db_log = log
workflow_exec.db_entity_id = entity_id
workflow_exec.db_entity_type = entity_type
workflow_exec.is_dirty = False
res[('workflow_exec', id)] = workflow_exec
return res
def get_sql_select(self, db, global_props,lock=False):
columns = ['id', 'user', 'ip', 'session', 'vt_version', 'ts_start', 'ts_end', 'parent_id', 'parent_type', 'parent_version', 'completed', 'name', 'log_id', 'entity_id', 'entity_type']
table = 'workflow_exec'
whereMap = global_props
orderBy = 'id'
return self.createSQLSelect(table, columns, whereMap, orderBy, lock)
def process_sql_columns(self, data, global_props):
res = {}
for row in data:
id = self.convertFromDB(row[0], 'long', 'int')
user = self.convertFromDB(row[1], 'str', 'varchar(255)')
ip = self.convertFromDB(row[2], 'str', 'varchar(255)')
session = self.convertFromDB(row[3], 'long', 'int')
vt_version = self.convertFromDB(row[4], 'str', 'varchar(255)')
ts_start = self.convertFromDB(row[5], 'datetime', 'datetime')
ts_end = self.convertFromDB(row[6], 'datetime', 'datetime')
parent_id = self.convertFromDB(row[7], 'long', 'int')
parent_type = self.convertFromDB(row[8], 'str', 'varchar(255)')
parent_version = self.convertFromDB(row[9], 'long', 'int')
completed = self.convertFromDB(row[10], 'int', 'int')
name = self.convertFromDB(row[11], 'str', 'varchar(255)')
log = self.convertFromDB(row[12], 'long', 'int')
entity_id = self.convertFromDB(row[13], 'long', 'int')
entity_type = self.convertFromDB(row[14], 'str', 'char(16)')
workflow_exec = DBWorkflowExec(user=user,
ip=ip,
session=session,
vt_version=vt_version,
ts_start=ts_start,
ts_end=ts_end,
parent_id=parent_id,
parent_type=parent_type,
parent_version=parent_version,
completed=completed,
name=name,
id=id)
workflow_exec.db_log = log
workflow_exec.db_entity_id = entity_id
workflow_exec.db_entity_type = entity_type
workflow_exec.is_dirty = False
res[('workflow_exec', id)] = workflow_exec
return res
def from_sql_fast(self, obj, all_objects):
if ('log', obj.db_log) in all_objects:
p = all_objects[('log', obj.db_log)]
p.db_add_workflow_exec(obj)
def set_sql_columns(self, db, obj, global_props, do_copy=True):
if not do_copy and not obj.is_dirty:
return
columns = ['id', 'user', 'ip', 'session', 'vt_version', 'ts_start', 'ts_end', 'parent_id', 'parent_type', 'parent_version', 'completed', 'name', 'log_id', 'entity_id', 'entity_type']
table = 'workflow_exec'
whereMap = {}
whereMap.update(global_props)
if obj.db_id is not None:
keyStr = self.convertToDB(obj.db_id, 'long', 'int')
whereMap['id'] = keyStr
columnMap = {}
if hasattr(obj, 'db_id') and obj.db_id is not None:
columnMap['id'] = \
self.convertToDB(obj.db_id, 'long', 'int')
if hasattr(obj, 'db_user') and obj.db_user is not None:
columnMap['user'] = \
self.convertToDB(obj.db_user, 'str', 'varchar(255)')
if hasattr(obj, 'db_ip') and obj.db_ip is not None:
columnMap['ip'] = \
self.convertToDB(obj.db_ip, 'str', 'varchar(255)')
if hasattr(obj, 'db_session') and obj.db_session is not None:
columnMap['session'] = \
self.convertToDB(obj.db_session, 'long', 'int')
if hasattr(obj, 'db_vt_version') and obj.db_vt_version is not None:
columnMap['vt_version'] = \
self.convertToDB(obj.db_vt_version, 'str', 'varchar(255)')
if hasattr(obj, 'db_ts_start') and obj.db_ts_start is not None:
columnMap['ts_start'] = \
self.convertToDB(obj.db_ts_start, 'datetime', 'datetime')
if hasattr(obj, 'db_ts_end') and obj.db_ts_end is not None:
columnMap['ts_end'] = \
self.convertToDB(obj.db_ts_end, 'datetime', 'datetime')
if hasattr(obj, 'db_parent_id') and obj.db_parent_id is not None:
columnMap['parent_id'] = \
self.convertToDB(obj.db_parent_id, 'long', 'int')
if hasattr(obj, 'db_parent_type') and obj.db_parent_type is not None:
columnMap['parent_type'] = \
self.convertToDB(obj.db_parent_type, 'str', 'varchar(255)')
if hasattr(obj, 'db_parent_version') and obj.db_parent_version is not None:
columnMap['parent_version'] = \
self.convertToDB(obj.db_parent_version, 'long', 'int')
if hasattr(obj, 'db_completed') and obj.db_completed is not None:
columnMap['completed'] = \
self.convertToDB(obj.db_completed, 'int', 'int')
if hasattr(obj, 'db_name') and obj.db_name is not None:
columnMap['name'] = \
self.convertToDB(obj.db_name, 'str', 'varchar(255)')
if hasattr(obj, 'db_log') and obj.db_log is not None:
columnMap['log_id'] = \
self.convertToDB(obj.db_log, 'long', 'int')
if hasattr(obj, 'db_entity_id') and obj.db_entity_id is not None:
columnMap['entity_id'] = \
self.convertToDB(obj.db_entity_id, 'long', 'int')
if hasattr(obj, 'db_entity_type') and obj.db_entity_type is not None:
columnMap['entity_type'] = \
self.convertToDB(obj.db_entity_type, 'str', 'char(16)')
columnMap.update(global_props)
if obj.is_new or do_copy:
dbCommand = self.createSQLInsert(table, columnMap)
else:
dbCommand = self.createSQLUpdate(table, columnMap, whereMap)
lastId = self.executeSQL(db, dbCommand, False)
def set_sql_command(self, db, obj, global_props, do_copy=True):
if not do_copy and not obj.is_dirty:
return None
columns = ['id', 'user', 'ip', 'session', 'vt_version', 'ts_start', 'ts_end', 'parent_id', 'parent_type', 'parent_version', 'completed', 'name', 'log_id', 'entity_id', 'entity_type']
table = 'workflow_exec'
whereMap = {}
whereMap.update(global_props)
if obj.db_id is not None:
keyStr = self.convertToDB(obj.db_id, 'long', 'int')
whereMap['id'] = keyStr
columnMap = {}
if hasattr(obj, 'db_id') and obj.db_id is not None:
columnMap['id'] = \
self.convertToDB(obj.db_id, 'long', 'int')
if hasattr(obj, 'db_user') and obj.db_user is not None:
columnMap['user'] = \
self.convertToDB(obj.db_user, 'str', 'varchar(255)')
if hasattr(obj, 'db_ip') and obj.db_ip is not None:
columnMap['ip'] = \
self.convertToDB(obj.db_ip, 'str', 'varchar(255)')
if hasattr(obj, 'db_session') and obj.db_session is not None:
columnMap['session'] = \
self.convertToDB(obj.db_session, 'long', 'int')
if hasattr(obj, 'db_vt_version') and obj.db_vt_version is not None:
columnMap['vt_version'] = \
self.convertToDB(obj.db_vt_version, 'str', 'varchar(255)')
if hasattr(obj, 'db_ts_start') and obj.db_ts_start is not None:
columnMap['ts_start'] = \
self.convertToDB(obj.db_ts_start, 'datetime', 'datetime')
if hasattr(obj, 'db_ts_end') and obj.db_ts_end is not None:
columnMap['ts_end'] = \
self.convertToDB(obj.db_ts_end, 'datetime', 'datetime')
if hasattr(obj, 'db_parent_id') and obj.db_parent_id is not None:
columnMap['parent_id'] = \
self.convertToDB(obj.db_parent_id, 'long', 'int')
if hasattr(obj, 'db_parent_type') and obj.db_parent_type is not None:
columnMap['parent_type'] = \
self.convertToDB(obj.db_parent_type, 'str', 'varchar(255)')
if hasattr(obj, 'db_parent_version') and obj.db_parent_version is not None:
columnMap['parent_version'] = \
self.convertToDB(obj.db_parent_version, 'long', 'int')
if hasattr(obj, 'db_completed') and obj.db_completed is not None:
columnMap['completed'] = \
self.convertToDB(obj.db_completed, 'int', 'int')
if hasattr(obj, 'db_name') and obj.db_name is not None:
columnMap['name'] = \
self.convertToDB(obj.db_name, 'str', 'varchar(255)')
if hasattr(obj, 'db_log') and obj.db_log is not None:
columnMap['log_id'] = \
self.convertToDB(obj.db_log, 'long', 'int')
if hasattr(obj, 'db_entity_id') and obj.db_entity_id is not None:
columnMap['entity_id'] = \
self.convertToDB(obj.db_entity_id, 'long', 'int')
if hasattr(obj, 'db_entity_type') and obj.db_entity_type is not None:
columnMap['entity_type'] = \
self.convertToDB(obj.db_entity_type, 'str', 'char(16)')
columnMap.update(global_props)
if obj.is_new or do_copy:
dbCommand = self.createSQLInsert(table, columnMap)
else:
dbCommand = self.createSQLUpdate(table, columnMap, whereMap)
return dbCommand
def set_sql_process(self, obj, global_props, lastId):
pass
def to_sql_fast(self, obj, do_copy=True):
for child in obj.db_annotations:
child.db_parentType = obj.vtType
child.db_parent = obj.db_id
for child in obj.db_machines:
child.db_workflow_exec = obj.db_id
for child in obj.db_item_execs:
child.db_parentType = obj.vtType
child.db_parent = obj.db_id
def delete_sql_column(self, db, obj, global_props):
table = 'workflow_exec'
whereMap = {}
whereMap.update(global_props)
if obj.db_id is not None:
keyStr = self.convertToDB(obj.db_id, 'long', 'int')
whereMap['id'] = keyStr
dbCommand = self.createSQLDelete(table, whereMap)
self.executeSQL(db, dbCommand, False)
class DBLocationSQLDAOBase(SQLDAO):
def __init__(self, daoList):
self.daoList = daoList
self.table = 'location'
def getDao(self, dao):
return self.daoList[dao]
def get_sql_columns(self, db, global_props,lock=False):
columns = ['id', 'x', 'y', 'parent_type', 'entity_id', 'entity_type', 'parent_id']
table = 'location'
whereMap = global_props
orderBy = 'id'
dbCommand = self.createSQLSelect(table, columns, whereMap, orderBy, lock)
data = self.executeSQL(db, dbCommand, True)
res = {}
for row in data:
id = self.convertFromDB(row[0], 'long', 'int')
x = self.convertFromDB(row[1], 'float', 'DECIMAL(18,12)')
y = self.convertFromDB(row[2], 'float', 'DECIMAL(18,12)')
parentType = self.convertFromDB(row[3], 'str', 'char(32)')
entity_id = self.convertFromDB(row[4], 'long', 'int')
entity_type = self.convertFromDB(row[5], 'str', 'char(16)')
parent = self.convertFromDB(row[6], 'long', 'long')
location = DBLocation(x=x,
y=y,
id=id)
location.db_parentType = parentType
location.db_entity_id = entity_id
location.db_entity_type = entity_type
location.db_parent = parent
location.is_dirty = False
res[('location', id)] = location
return res
def get_sql_select(self, db, global_props,lock=False):
columns = ['id', 'x', 'y', 'parent_type', 'entity_id', 'entity_type', 'parent_id']
table = 'location'
whereMap = global_props
orderBy = 'id'
return self.createSQLSelect(table, columns, whereMap, orderBy, lock)
def process_sql_columns(self, data, global_props):
res = {}
for row in data:
id = self.convertFromDB(row[0], 'long', 'int')
x = self.convertFromDB(row[1], 'float', 'DECIMAL(18,12)')
y = self.convertFromDB(row[2], 'float', 'DECIMAL(18,12)')
parentType = self.convertFromDB(row[3], 'str', 'char(32)')
entity_id = self.convertFromDB(row[4], 'long', 'int')
entity_type = self.convertFromDB(row[5], 'str', 'char(16)')
parent = self.convertFromDB(row[6], 'long', 'long')
location = DBLocation(x=x,
y=y,
id=id)
location.db_parentType = parentType
location.db_entity_id = entity_id
location.db_entity_type = entity_type
location.db_parent = parent
location.is_dirty = False
res[('location', id)] = location
return res
def from_sql_fast(self, obj, all_objects):
if obj.db_parentType == 'module':
p = all_objects[('module', obj.db_parent)]
p.db_add_location(obj)
elif obj.db_parentType == 'abstraction':
p = all_objects[('abstraction', obj.db_parent)]
p.db_add_location(obj)
elif obj.db_parentType == 'group':
p = all_objects[('group', obj.db_parent)]
p.db_add_location(obj)
elif obj.db_parentType == 'add':
p = all_objects[('add', obj.db_parent)]
p.db_add_data(obj)
elif obj.db_parentType == 'change':
p = all_objects[('change', obj.db_parent)]
p.db_add_data(obj)
def set_sql_columns(self, db, obj, global_props, do_copy=True):
if not do_copy and not obj.is_dirty:
return
columns = ['id', 'x', 'y', 'parent_type', 'entity_id', 'entity_type', 'parent_id']
table = 'location'
whereMap = {}
whereMap.update(global_props)
if obj.db_id is not None:
keyStr = self.convertToDB(obj.db_id, 'long', 'int')
whereMap['id'] = keyStr
columnMap = {}
if hasattr(obj, 'db_id') and obj.db_id is not None:
columnMap['id'] = \
self.convertToDB(obj.db_id, 'long', 'int')
if hasattr(obj, 'db_x') and obj.db_x is not None:
columnMap['x'] = \
self.convertToDB(obj.db_x, 'float', 'DECIMAL(18,12)')
if hasattr(obj, 'db_y') and obj.db_y is not None:
columnMap['y'] = \
self.convertToDB(obj.db_y, 'float', 'DECIMAL(18,12)')
if hasattr(obj, 'db_parentType') and obj.db_parentType is not None:
columnMap['parent_type'] = \
self.convertToDB(obj.db_parentType, 'str', 'char(32)')
if hasattr(obj, 'db_entity_id') and obj.db_entity_id is not None:
columnMap['entity_id'] = \
self.convertToDB(obj.db_entity_id, 'long', 'int')
if hasattr(obj, 'db_entity_type') and obj.db_entity_type is not None:
columnMap['entity_type'] = \
self.convertToDB(obj.db_entity_type, 'str', 'char(16)')
if hasattr(obj, 'db_parent') and obj.db_parent is not None:
columnMap['parent_id'] = \
self.convertToDB(obj.db_parent, 'long', 'long')
columnMap.update(global_props)
if obj.is_new or do_copy:
dbCommand = self.createSQLInsert(table, columnMap)
else:
dbCommand = self.createSQLUpdate(table, columnMap, whereMap)
lastId = self.executeSQL(db, dbCommand, False)
def set_sql_command(self, db, obj, global_props, do_copy=True):
if not do_copy and not obj.is_dirty:
return None
columns = ['id', 'x', 'y', 'parent_type', 'entity_id', 'entity_type', 'parent_id']
table = 'location'
whereMap = {}
whereMap.update(global_props)
if obj.db_id is not None:
keyStr = self.convertToDB(obj.db_id, 'long', 'int')
whereMap['id'] = keyStr
columnMap = {}
if hasattr(obj, 'db_id') and obj.db_id is not None:
columnMap['id'] = \
self.convertToDB(obj.db_id, 'long', 'int')
if hasattr(obj, 'db_x') and obj.db_x is not None:
columnMap['x'] = \
self.convertToDB(obj.db_x, 'float', 'DECIMAL(18,12)')
if hasattr(obj, 'db_y') and obj.db_y is not None:
columnMap['y'] = \
self.convertToDB(obj.db_y, 'float', 'DECIMAL(18,12)')
if hasattr(obj, 'db_parentType') and obj.db_parentType is not None:
columnMap['parent_type'] = \
self.convertToDB(obj.db_parentType, 'str', 'char(32)')
if hasattr(obj, 'db_entity_id') and obj.db_entity_id is not None:
columnMap['entity_id'] = \
self.convertToDB(obj.db_entity_id, 'long', 'int')
if hasattr(obj, 'db_entity_type') and obj.db_entity_type is not None:
columnMap['entity_type'] = \
self.convertToDB(obj.db_entity_type, 'str', 'char(16)')
if hasattr(obj, 'db_parent') and obj.db_parent is not None:
columnMap['parent_id'] = \
self.convertToDB(obj.db_parent, 'long', 'long')
columnMap.update(global_props)
if obj.is_new or do_copy:
dbCommand = self.createSQLInsert(table, columnMap)
else:
dbCommand = self.createSQLUpdate(table, columnMap, whereMap)
return dbCommand
def set_sql_process(self, obj, global_props, lastId):
pass
def to_sql_fast(self, obj, do_copy=True):
pass
def delete_sql_column(self, db, obj, global_props):
table = 'location'
whereMap = {}
whereMap.update(global_props)
if obj.db_id is not None:
keyStr = self.convertToDB(obj.db_id, 'long', 'int')
whereMap['id'] = keyStr
dbCommand = self.createSQLDelete(table, whereMap)
self.executeSQL(db, dbCommand, False)
class DBFunctionSQLDAOBase(SQLDAO):
def __init__(self, daoList):
self.daoList = daoList
self.table = 'function'
def getDao(self, dao):
return self.daoList[dao]
def get_sql_columns(self, db, global_props,lock=False):
columns = ['id', 'pos', 'name', 'parent_type', 'entity_id', 'entity_type', 'parent_id']
table = 'function'
whereMap = global_props
orderBy = 'id'
dbCommand = self.createSQLSelect(table, columns, whereMap, orderBy, lock)
data = self.executeSQL(db, dbCommand, True)
res = {}
for row in data:
id = self.convertFromDB(row[0], 'long', 'int')
pos = self.convertFromDB(row[1], 'long', 'int')
name = self.convertFromDB(row[2], 'str', 'varchar(255)')
parentType = self.convertFromDB(row[3], 'str', 'char(32)')
entity_id = self.convertFromDB(row[4], 'long', 'int')
entity_type = self.convertFromDB(row[5], 'str', 'char(16)')
parent = self.convertFromDB(row[6], 'long', 'long')
function = DBFunction(pos=pos,
name=name,
id=id)
function.db_parentType = parentType
function.db_entity_id = entity_id
function.db_entity_type = entity_type
function.db_parent = parent
function.is_dirty = False
res[('function', id)] = function
return res
def get_sql_select(self, db, global_props,lock=False):
columns = ['id', 'pos', 'name', 'parent_type', 'entity_id', 'entity_type', 'parent_id']
table = 'function'
whereMap = global_props
orderBy = 'id'
return self.createSQLSelect(table, columns, whereMap, orderBy, lock)
def process_sql_columns(self, data, global_props):
res = {}
for row in data:
id = self.convertFromDB(row[0], 'long', 'int')
pos = self.convertFromDB(row[1], 'long', 'int')
name = self.convertFromDB(row[2], 'str', 'varchar(255)')
parentType = self.convertFromDB(row[3], 'str', 'char(32)')
entity_id = self.convertFromDB(row[4], 'long', 'int')
entity_type = self.convertFromDB(row[5], 'str', 'char(16)')
parent = self.convertFromDB(row[6], 'long', 'long')
function = DBFunction(pos=pos,
name=name,
id=id)
function.db_parentType = parentType
function.db_entity_id = entity_id
function.db_entity_type = entity_type
function.db_parent = parent
function.is_dirty = False
res[('function', id)] = function
return res
def from_sql_fast(self, obj, all_objects):
if obj.db_parentType == 'module':
p = all_objects[('module', obj.db_parent)]
p.db_add_function(obj)
elif obj.db_parentType == 'abstraction':
p = all_objects[('abstraction', obj.db_parent)]
p.db_add_function(obj)
elif obj.db_parentType == 'group':
p = all_objects[('group', obj.db_parent)]
p.db_add_function(obj)
elif obj.db_parentType == 'add':
p = all_objects[('add', obj.db_parent)]
p.db_add_data(obj)
elif obj.db_parentType == 'change':
p = all_objects[('change', obj.db_parent)]
p.db_add_data(obj)
def set_sql_columns(self, db, obj, global_props, do_copy=True):
if not do_copy and not obj.is_dirty:
return
columns = ['id', 'pos', 'name', 'parent_type', 'entity_id', 'entity_type', 'parent_id']
table = 'function'
whereMap = {}
whereMap.update(global_props)
if obj.db_id is not None:
keyStr = self.convertToDB(obj.db_id, 'long', 'int')
whereMap['id'] = keyStr
columnMap = {}
if hasattr(obj, 'db_id') and obj.db_id is not None:
columnMap['id'] = \
self.convertToDB(obj.db_id, 'long', 'int')
if hasattr(obj, 'db_pos') and obj.db_pos is not None:
columnMap['pos'] = \
self.convertToDB(obj.db_pos, 'long', 'int')
if hasattr(obj, 'db_name') and obj.db_name is not None:
columnMap['name'] = \
self.convertToDB(obj.db_name, 'str', 'varchar(255)')
if hasattr(obj, 'db_parentType') and obj.db_parentType is not None:
columnMap['parent_type'] = \
self.convertToDB(obj.db_parentType, 'str', 'char(32)')
if hasattr(obj, 'db_entity_id') and obj.db_entity_id is not None:
columnMap['entity_id'] = \
self.convertToDB(obj.db_entity_id, 'long', 'int')
if hasattr(obj, 'db_entity_type') and obj.db_entity_type is not None:
columnMap['entity_type'] = \
self.convertToDB(obj.db_entity_type, 'str', 'char(16)')
if hasattr(obj, 'db_parent') and obj.db_parent is not None:
columnMap['parent_id'] = \
self.convertToDB(obj.db_parent, 'long', 'long')
columnMap.update(global_props)
if obj.is_new or do_copy:
dbCommand = self.createSQLInsert(table, columnMap)
else:
dbCommand = self.createSQLUpdate(table, columnMap, whereMap)
lastId = self.executeSQL(db, dbCommand, False)
def set_sql_command(self, db, obj, global_props, do_copy=True):
if not do_copy and not obj.is_dirty:
return None
columns = ['id', 'pos', 'name', 'parent_type', 'entity_id', 'entity_type', 'parent_id']
table = 'function'
whereMap = {}
whereMap.update(global_props)
if obj.db_id is not None:
keyStr = self.convertToDB(obj.db_id, 'long', 'int')
whereMap['id'] = keyStr
columnMap = {}
if hasattr(obj, 'db_id') and obj.db_id is not None:
columnMap['id'] = \
self.convertToDB(obj.db_id, 'long', 'int')
if hasattr(obj, 'db_pos') and obj.db_pos is not None:
columnMap['pos'] = \
self.convertToDB(obj.db_pos, 'long', 'int')
if hasattr(obj, 'db_name') and obj.db_name is not None:
columnMap['name'] = \
self.convertToDB(obj.db_name, 'str', 'varchar(255)')
if hasattr(obj, 'db_parentType') and obj.db_parentType is not None:
columnMap['parent_type'] = \
self.convertToDB(obj.db_parentType, 'str', 'char(32)')
if hasattr(obj, 'db_entity_id') and obj.db_entity_id is not None:
columnMap['entity_id'] = \
self.convertToDB(obj.db_entity_id, 'long', 'int')
if hasattr(obj, 'db_entity_type') and obj.db_entity_type is not None:
columnMap['entity_type'] = \
self.convertToDB(obj.db_entity_type, 'str', 'char(16)')
if hasattr(obj, 'db_parent') and obj.db_parent is not None:
columnMap['parent_id'] = \
self.convertToDB(obj.db_parent, 'long', 'long')
columnMap.update(global_props)
if obj.is_new or do_copy:
dbCommand = self.createSQLInsert(table, columnMap)
else:
dbCommand = self.createSQLUpdate(table, columnMap, whereMap)
return dbCommand
def set_sql_process(self, obj, global_props, lastId):
pass
def to_sql_fast(self, obj, do_copy=True):
for child in obj.db_parameters:
child.db_parentType = obj.vtType
child.db_parent = obj.db_id
def delete_sql_column(self, db, obj, global_props):
table = 'function'
whereMap = {}
whereMap.update(global_props)
if obj.db_id is not None:
keyStr = self.convertToDB(obj.db_id, 'long', 'int')
whereMap['id'] = keyStr
dbCommand = self.createSQLDelete(table, whereMap)
self.executeSQL(db, dbCommand, False)
class DBActionAnnotationSQLDAOBase(SQLDAO):
def __init__(self, daoList):
self.daoList = daoList
self.table = 'action_annotation'
def getDao(self, dao):
return self.daoList[dao]
def get_sql_columns(self, db, global_props,lock=False):
columns = ['id', 'akey', 'value', 'action_id', 'date', 'user', 'parent_id', 'entity_id', 'entity_type']
table = 'action_annotation'
whereMap = global_props
orderBy = 'id'
dbCommand = self.createSQLSelect(table, columns, whereMap, orderBy, lock)
data = self.executeSQL(db, dbCommand, True)
res = {}
for row in data:
id = self.convertFromDB(row[0], 'long', 'int')
key = self.convertFromDB(row[1], 'str', 'varchar(255)')
value = self.convertFromDB(row[2], 'str', 'varchar(8191)')
action_id = self.convertFromDB(row[3], 'long', 'int')
date = self.convertFromDB(row[4], 'datetime', 'datetime')
user = self.convertFromDB(row[5], 'str', 'varchar(255)')
vistrail = self.convertFromDB(row[6], 'long', 'int')
entity_id = self.convertFromDB(row[7], 'long', 'int')
entity_type = self.convertFromDB(row[8], 'str', 'char(16)')
actionAnnotation = DBActionAnnotation(key=key,
value=value,
action_id=action_id,
date=date,
user=user,
id=id)
actionAnnotation.db_vistrail = vistrail
actionAnnotation.db_entity_id = entity_id
actionAnnotation.db_entity_type = entity_type
actionAnnotation.is_dirty = False
res[('actionAnnotation', id)] = actionAnnotation
return res
def get_sql_select(self, db, global_props,lock=False):
columns = ['id', 'akey', 'value', 'action_id', 'date', 'user', 'parent_id', 'entity_id', 'entity_type']
table = 'action_annotation'
whereMap = global_props
orderBy = 'id'
return self.createSQLSelect(table, columns, whereMap, orderBy, lock)
def process_sql_columns(self, data, global_props):
res = {}
for row in data:
id = self.convertFromDB(row[0], 'long', 'int')
key = self.convertFromDB(row[1], 'str', 'varchar(255)')
value = self.convertFromDB(row[2], 'str', 'varchar(8191)')
action_id = self.convertFromDB(row[3], 'long', 'int')
date = self.convertFromDB(row[4], 'datetime', 'datetime')
user = self.convertFromDB(row[5], 'str', 'varchar(255)')
vistrail = self.convertFromDB(row[6], 'long', 'int')
entity_id = self.convertFromDB(row[7], 'long', 'int')
entity_type = self.convertFromDB(row[8], 'str', 'char(16)')
actionAnnotation = DBActionAnnotation(key=key,
value=value,
action_id=action_id,
date=date,
user=user,
id=id)
actionAnnotation.db_vistrail = vistrail
actionAnnotation.db_entity_id = entity_id
actionAnnotation.db_entity_type = entity_type
actionAnnotation.is_dirty = False
res[('actionAnnotation', id)] = actionAnnotation
return res
def from_sql_fast(self, obj, all_objects):
if ('vistrail', obj.db_vistrail) in all_objects:
p = all_objects[('vistrail', obj.db_vistrail)]
p.db_add_actionAnnotation(obj)
def set_sql_columns(self, db, obj, global_props, do_copy=True):
if not do_copy and not obj.is_dirty:
return
columns = ['id', 'akey', 'value', 'action_id', 'date', 'user', 'parent_id', 'entity_id', 'entity_type']
table = 'action_annotation'
whereMap = {}
whereMap.update(global_props)
if obj.db_id is not None:
keyStr = self.convertToDB(obj.db_id, 'long', 'int')
whereMap['id'] = keyStr
columnMap = {}
if hasattr(obj, 'db_id') and obj.db_id is not None:
columnMap['id'] = \
self.convertToDB(obj.db_id, 'long', 'int')
if hasattr(obj, 'db_key') and obj.db_key is not None:
columnMap['akey'] = \
self.convertToDB(obj.db_key, 'str', 'varchar(255)')
if hasattr(obj, 'db_value') and obj.db_value is not None:
columnMap['value'] = \
self.convertToDB(obj.db_value, 'str', 'varchar(8191)')
if hasattr(obj, 'db_action_id') and obj.db_action_id is not None:
columnMap['action_id'] = \
self.convertToDB(obj.db_action_id, 'long', 'int')
if hasattr(obj, 'db_date') and obj.db_date is not None:
columnMap['date'] = \
self.convertToDB(obj.db_date, 'datetime', 'datetime')
if hasattr(obj, 'db_user') and obj.db_user is not None:
columnMap['user'] = \
self.convertToDB(obj.db_user, 'str', 'varchar(255)')
if hasattr(obj, 'db_vistrail') and obj.db_vistrail is not None:
columnMap['parent_id'] = \
self.convertToDB(obj.db_vistrail, 'long', 'int')
if hasattr(obj, 'db_entity_id') and obj.db_entity_id is not None:
columnMap['entity_id'] = \
self.convertToDB(obj.db_entity_id, 'long', 'int')
if hasattr(obj, 'db_entity_type') and obj.db_entity_type is not None:
columnMap['entity_type'] = \
self.convertToDB(obj.db_entity_type, 'str', 'char(16)')
columnMap.update(global_props)
if obj.is_new or do_copy:
dbCommand = self.createSQLInsert(table, columnMap)
else:
dbCommand = self.createSQLUpdate(table, columnMap, whereMap)
lastId = self.executeSQL(db, dbCommand, False)
def set_sql_command(self, db, obj, global_props, do_copy=True):
if not do_copy and not obj.is_dirty:
return None
columns = ['id', 'akey', 'value', 'action_id', 'date', 'user', 'parent_id', 'entity_id', 'entity_type']
table = 'action_annotation'
whereMap = {}
whereMap.update(global_props)
if obj.db_id is not None:
keyStr = self.convertToDB(obj.db_id, 'long', 'int')
whereMap['id'] = keyStr
columnMap = {}
if hasattr(obj, 'db_id') and obj.db_id is not None:
columnMap['id'] = \
self.convertToDB(obj.db_id, 'long', 'int')
if hasattr(obj, 'db_key') and obj.db_key is not None:
columnMap['akey'] = \
self.convertToDB(obj.db_key, 'str', 'varchar(255)')
if hasattr(obj, 'db_value') and obj.db_value is not None:
columnMap['value'] = \
self.convertToDB(obj.db_value, 'str', 'varchar(8191)')
if hasattr(obj, 'db_action_id') and obj.db_action_id is not None:
columnMap['action_id'] = \
self.convertToDB(obj.db_action_id, 'long', 'int')
if hasattr(obj, 'db_date') and obj.db_date is not None:
columnMap['date'] = \
self.convertToDB(obj.db_date, 'datetime', 'datetime')
if hasattr(obj, 'db_user') and obj.db_user is not None:
columnMap['user'] = \
self.convertToDB(obj.db_user, 'str', 'varchar(255)')
if hasattr(obj, 'db_vistrail') and obj.db_vistrail is not None:
columnMap['parent_id'] = \
self.convertToDB(obj.db_vistrail, 'long', 'int')
if hasattr(obj, 'db_entity_id') and obj.db_entity_id is not None:
columnMap['entity_id'] = \
self.convertToDB(obj.db_entity_id, 'long', 'int')
if hasattr(obj, 'db_entity_type') and obj.db_entity_type is not None:
columnMap['entity_type'] = \
self.convertToDB(obj.db_entity_type, 'str', 'char(16)')
columnMap.update(global_props)
if obj.is_new or do_copy:
dbCommand = self.createSQLInsert(table, columnMap)
else:
dbCommand = self.createSQLUpdate(table, columnMap, whereMap)
return dbCommand
def set_sql_process(self, obj, global_props, lastId):
pass
def to_sql_fast(self, obj, do_copy=True):
pass
def delete_sql_column(self, db, obj, global_props):
table = 'action_annotation'
whereMap = {}
whereMap.update(global_props)
if obj.db_id is not None:
keyStr = self.convertToDB(obj.db_id, 'long', 'int')
whereMap['id'] = keyStr
dbCommand = self.createSQLDelete(table, whereMap)
self.executeSQL(db, dbCommand, False)
class DBControlParameterSQLDAOBase(SQLDAO):
def __init__(self, daoList):
self.daoList = daoList
self.table = 'control_parameter'
def getDao(self, dao):
return self.daoList[dao]
def get_sql_columns(self, db, global_props,lock=False):
columns = ['id', 'name', 'value', 'parent_type', 'entity_id', 'entity_type', 'parent_id']
table = 'control_parameter'
whereMap = global_props
orderBy = 'id'
dbCommand = self.createSQLSelect(table, columns, whereMap, orderBy, lock)
data = self.executeSQL(db, dbCommand, True)
res = {}
for row in data:
id = self.convertFromDB(row[0], 'long', 'int')
name = self.convertFromDB(row[1], 'str', 'varchar(255)')
value = self.convertFromDB(row[2], 'str', 'mediumtext')
parentType = self.convertFromDB(row[3], 'str', 'char(32)')
entity_id = self.convertFromDB(row[4], 'long', 'int')
entity_type = self.convertFromDB(row[5], 'str', 'char(16)')
parent = self.convertFromDB(row[6], 'long', 'long')
controlParameter = DBControlParameter(name=name,
value=value,
id=id)
controlParameter.db_parentType = parentType
controlParameter.db_entity_id = entity_id
controlParameter.db_entity_type = entity_type
controlParameter.db_parent = parent
controlParameter.is_dirty = False
res[('controlParameter', id)] = controlParameter
return res
def get_sql_select(self, db, global_props,lock=False):
columns = ['id', 'name', 'value', 'parent_type', 'entity_id', 'entity_type', 'parent_id']
table = 'control_parameter'
whereMap = global_props
orderBy = 'id'
return self.createSQLSelect(table, columns, whereMap, orderBy, lock)
def process_sql_columns(self, data, global_props):
res = {}
for row in data:
id = self.convertFromDB(row[0], 'long', 'int')
name = self.convertFromDB(row[1], 'str', 'varchar(255)')
value = self.convertFromDB(row[2], 'str', 'mediumtext')
parentType = self.convertFromDB(row[3], 'str', 'char(32)')
entity_id = self.convertFromDB(row[4], 'long', 'int')
entity_type = self.convertFromDB(row[5], 'str', 'char(16)')
parent = self.convertFromDB(row[6], 'long', 'long')
controlParameter = DBControlParameter(name=name,
value=value,
id=id)
controlParameter.db_parentType = parentType
controlParameter.db_entity_id = entity_id
controlParameter.db_entity_type = entity_type
controlParameter.db_parent = parent
controlParameter.is_dirty = False
res[('controlParameter', id)] = controlParameter
return res
def from_sql_fast(self, obj, all_objects):
if obj.db_parentType == 'vistrail':
p = all_objects[('vistrail', obj.db_parent)]
p.db_add_controlParameter(obj)
elif obj.db_parentType == 'module':
p = all_objects[('module', obj.db_parent)]
p.db_add_controlParameter(obj)
elif obj.db_parentType == 'add':
p = all_objects[('add', obj.db_parent)]
p.db_add_data(obj)
elif obj.db_parentType == 'change':
p = all_objects[('change', obj.db_parent)]
p.db_add_data(obj)
elif obj.db_parentType == 'abstraction':
p = all_objects[('abstraction', obj.db_parent)]
p.db_add_controlParameter(obj)
elif obj.db_parentType == 'group':
p = all_objects[('group', obj.db_parent)]
p.db_add_controlParameter(obj)
def set_sql_columns(self, db, obj, global_props, do_copy=True):
if not do_copy and not obj.is_dirty:
return
columns = ['id', 'name', 'value', 'parent_type', 'entity_id', 'entity_type', 'parent_id']
table = 'control_parameter'
whereMap = {}
whereMap.update(global_props)
if obj.db_id is not None:
keyStr = self.convertToDB(obj.db_id, 'long', 'int')
whereMap['id'] = keyStr
columnMap = {}
if hasattr(obj, 'db_id') and obj.db_id is not None:
columnMap['id'] = \
self.convertToDB(obj.db_id, 'long', 'int')
if hasattr(obj, 'db_name') and obj.db_name is not None:
columnMap['name'] = \
self.convertToDB(obj.db_name, 'str', 'varchar(255)')
if hasattr(obj, 'db_value') and obj.db_value is not None:
columnMap['value'] = \
self.convertToDB(obj.db_value, 'str', 'mediumtext')
if hasattr(obj, 'db_parentType') and obj.db_parentType is not None:
columnMap['parent_type'] = \
self.convertToDB(obj.db_parentType, 'str', 'char(32)')
if hasattr(obj, 'db_entity_id') and obj.db_entity_id is not None:
columnMap['entity_id'] = \
self.convertToDB(obj.db_entity_id, 'long', 'int')
if hasattr(obj, 'db_entity_type') and obj.db_entity_type is not None:
columnMap['entity_type'] = \
self.convertToDB(obj.db_entity_type, 'str', 'char(16)')
if hasattr(obj, 'db_parent') and obj.db_parent is not None:
columnMap['parent_id'] = \
self.convertToDB(obj.db_parent, 'long', 'long')
columnMap.update(global_props)
if obj.is_new or do_copy:
dbCommand = self.createSQLInsert(table, columnMap)
else:
dbCommand = self.createSQLUpdate(table, columnMap, whereMap)
lastId = self.executeSQL(db, dbCommand, False)
def set_sql_command(self, db, obj, global_props, do_copy=True):
if not do_copy and not obj.is_dirty:
return None
columns = ['id', 'name', 'value', 'parent_type', 'entity_id', 'entity_type', 'parent_id']
table = 'control_parameter'
whereMap = {}
whereMap.update(global_props)
if obj.db_id is not None:
keyStr = self.convertToDB(obj.db_id, 'long', 'int')
whereMap['id'] = keyStr
columnMap = {}
if hasattr(obj, 'db_id') and obj.db_id is not None:
columnMap['id'] = \
self.convertToDB(obj.db_id, 'long', 'int')
if hasattr(obj, 'db_name') and obj.db_name is not None:
columnMap['name'] = \
self.convertToDB(obj.db_name, 'str', 'varchar(255)')
if hasattr(obj, 'db_value') and obj.db_value is not None:
columnMap['value'] = \
self.convertToDB(obj.db_value, 'str', 'mediumtext')
if hasattr(obj, 'db_parentType') and obj.db_parentType is not None:
columnMap['parent_type'] = \
self.convertToDB(obj.db_parentType, 'str', 'char(32)')
if hasattr(obj, 'db_entity_id') and obj.db_entity_id is not None:
columnMap['entity_id'] = \
self.convertToDB(obj.db_entity_id, 'long', 'int')
if hasattr(obj, 'db_entity_type') and obj.db_entity_type is not None:
columnMap['entity_type'] = \
self.convertToDB(obj.db_entity_type, 'str', 'char(16)')
if hasattr(obj, 'db_parent') and obj.db_parent is not None:
columnMap['parent_id'] = \
self.convertToDB(obj.db_parent, 'long', 'long')
columnMap.update(global_props)
if obj.is_new or do_copy:
dbCommand = self.createSQLInsert(table, columnMap)
else:
dbCommand = self.createSQLUpdate(table, columnMap, whereMap)
return dbCommand
def set_sql_process(self, obj, global_props, lastId):
pass
def to_sql_fast(self, obj, do_copy=True):
pass
def delete_sql_column(self, db, obj, global_props):
table = 'control_parameter'
whereMap = {}
whereMap.update(global_props)
if obj.db_id is not None:
keyStr = self.convertToDB(obj.db_id, 'long', 'int')
whereMap['id'] = keyStr
dbCommand = self.createSQLDelete(table, whereMap)
self.executeSQL(db, dbCommand, False)
class DBPluginDataSQLDAOBase(SQLDAO):
def __init__(self, daoList):
self.daoList = daoList
self.table = 'plugin_data'
def getDao(self, dao):
return self.daoList[dao]
def get_sql_columns(self, db, global_props,lock=False):
columns = ['id', 'data', 'parent_type', 'entity_id', 'entity_type', 'parent_id']
table = 'plugin_data'
whereMap = global_props
orderBy = 'id'
dbCommand = self.createSQLSelect(table, columns, whereMap, orderBy, lock)
data = self.executeSQL(db, dbCommand, True)
res = {}
for row in data:
id = self.convertFromDB(row[0], 'long', 'int')
data = self.convertFromDB(row[1], 'str', 'varchar(8191)')
parentType = self.convertFromDB(row[2], 'str', 'char(32)')
entity_id = self.convertFromDB(row[3], 'long', 'int')
entity_type = self.convertFromDB(row[4], 'str', 'char(16)')
parent = self.convertFromDB(row[5], 'long', 'long')
plugin_data = DBPluginData(data=data,
id=id)
plugin_data.db_parentType = parentType
plugin_data.db_entity_id = entity_id
plugin_data.db_entity_type = entity_type
plugin_data.db_parent = parent
plugin_data.is_dirty = False
res[('plugin_data', id)] = plugin_data
return res
def get_sql_select(self, db, global_props,lock=False):
columns = ['id', 'data', 'parent_type', 'entity_id', 'entity_type', 'parent_id']
table = 'plugin_data'
whereMap = global_props
orderBy = 'id'
return self.createSQLSelect(table, columns, whereMap, orderBy, lock)
def process_sql_columns(self, data, global_props):
res = {}
for row in data:
id = self.convertFromDB(row[0], 'long', 'int')
data = self.convertFromDB(row[1], 'str', 'varchar(8191)')
parentType = self.convertFromDB(row[2], 'str', 'char(32)')
entity_id = self.convertFromDB(row[3], 'long', 'int')
entity_type = self.convertFromDB(row[4], 'str', 'char(16)')
parent = self.convertFromDB(row[5], 'long', 'long')
plugin_data = DBPluginData(data=data,
id=id)
plugin_data.db_parentType = parentType
plugin_data.db_entity_id = entity_id
plugin_data.db_entity_type = entity_type
plugin_data.db_parent = parent
plugin_data.is_dirty = False
res[('plugin_data', id)] = plugin_data
return res
def from_sql_fast(self, obj, all_objects):
if obj.db_parentType == 'workflow':
p = all_objects[('workflow', obj.db_parent)]
p.db_add_plugin_data(obj)
elif obj.db_parentType == 'add':
p = all_objects[('add', obj.db_parent)]
p.db_add_data(obj)
elif obj.db_parentType == 'change':
p = all_objects[('change', obj.db_parent)]
p.db_add_data(obj)
def set_sql_columns(self, db, obj, global_props, do_copy=True):
if not do_copy and not obj.is_dirty:
return
columns = ['id', 'data', 'parent_type', 'entity_id', 'entity_type', 'parent_id']
table = 'plugin_data'
whereMap = {}
whereMap.update(global_props)
if obj.db_id is not None:
keyStr = self.convertToDB(obj.db_id, 'long', 'int')
whereMap['id'] = keyStr
columnMap = {}
if hasattr(obj, 'db_id') and obj.db_id is not None:
columnMap['id'] = \
self.convertToDB(obj.db_id, 'long', 'int')
if hasattr(obj, 'db_data') and obj.db_data is not None:
columnMap['data'] = \
self.convertToDB(obj.db_data, 'str', 'varchar(8191)')
if hasattr(obj, 'db_parentType') and obj.db_parentType is not None:
columnMap['parent_type'] = \
self.convertToDB(obj.db_parentType, 'str', 'char(32)')
if hasattr(obj, 'db_entity_id') and obj.db_entity_id is not None:
columnMap['entity_id'] = \
self.convertToDB(obj.db_entity_id, 'long', 'int')
if hasattr(obj, 'db_entity_type') and obj.db_entity_type is not None:
columnMap['entity_type'] = \
self.convertToDB(obj.db_entity_type, 'str', 'char(16)')
if hasattr(obj, 'db_parent') and obj.db_parent is not None:
columnMap['parent_id'] = \
self.convertToDB(obj.db_parent, 'long', 'long')
columnMap.update(global_props)
if obj.is_new or do_copy:
dbCommand = self.createSQLInsert(table, columnMap)
else:
dbCommand = self.createSQLUpdate(table, columnMap, whereMap)
lastId = self.executeSQL(db, dbCommand, False)
def set_sql_command(self, db, obj, global_props, do_copy=True):
if not do_copy and not obj.is_dirty:
return None
columns = ['id', 'data', 'parent_type', 'entity_id', 'entity_type', 'parent_id']
table = 'plugin_data'
whereMap = {}
whereMap.update(global_props)
if obj.db_id is not None:
keyStr = self.convertToDB(obj.db_id, 'long', 'int')
whereMap['id'] = keyStr
columnMap = {}
if hasattr(obj, 'db_id') and obj.db_id is not None:
columnMap['id'] = \
self.convertToDB(obj.db_id, 'long', 'int')
if hasattr(obj, 'db_data') and obj.db_data is not None:
columnMap['data'] = \
self.convertToDB(obj.db_data, 'str', 'varchar(8191)')
if hasattr(obj, 'db_parentType') and obj.db_parentType is not None:
columnMap['parent_type'] = \
self.convertToDB(obj.db_parentType, 'str', 'char(32)')
if hasattr(obj, 'db_entity_id') and obj.db_entity_id is not None:
columnMap['entity_id'] = \
self.convertToDB(obj.db_entity_id, 'long', 'int')
if hasattr(obj, 'db_entity_type') and obj.db_entity_type is not None:
columnMap['entity_type'] = \
self.convertToDB(obj.db_entity_type, 'str', 'char(16)')
if hasattr(obj, 'db_parent') and obj.db_parent is not None:
columnMap['parent_id'] = \
self.convertToDB(obj.db_parent, 'long', 'long')
columnMap.update(global_props)
if obj.is_new or do_copy:
dbCommand = self.createSQLInsert(table, columnMap)
else:
dbCommand = self.createSQLUpdate(table, columnMap, whereMap)
return dbCommand
def set_sql_process(self, obj, global_props, lastId):
pass
def to_sql_fast(self, obj, do_copy=True):
pass
def delete_sql_column(self, db, obj, global_props):
table = 'plugin_data'
whereMap = {}
whereMap.update(global_props)
if obj.db_id is not None:
keyStr = self.convertToDB(obj.db_id, 'long', 'int')
whereMap['id'] = keyStr
dbCommand = self.createSQLDelete(table, whereMap)
self.executeSQL(db, dbCommand, False)
class DBDeleteSQLDAOBase(SQLDAO):
def __init__(self, daoList):
self.daoList = daoList
self.table = 'delete_tbl'
def getDao(self, dao):
return self.daoList[dao]
def get_sql_columns(self, db, global_props,lock=False):
columns = ['id', 'what', 'object_id', 'par_obj_id', 'par_obj_type', 'action_id', 'entity_id', 'entity_type']
table = 'delete_tbl'
whereMap = global_props
orderBy = 'id'
dbCommand = self.createSQLSelect(table, columns, whereMap, orderBy, lock)
data = self.executeSQL(db, dbCommand, True)
res = {}
for row in data:
id = self.convertFromDB(row[0], 'long', 'int')
what = self.convertFromDB(row[1], 'str', 'varchar(255)')
objectId = self.convertFromDB(row[2], 'long', 'int')
parentObjId = self.convertFromDB(row[3], 'long', 'int')
parentObjType = self.convertFromDB(row[4], 'str', 'char(16)')
action = self.convertFromDB(row[5], 'long', 'int')
entity_id = self.convertFromDB(row[6], 'long', 'int')
entity_type = self.convertFromDB(row[7], 'str', 'char(16)')
delete = DBDelete(what=what,
objectId=objectId,
parentObjId=parentObjId,
parentObjType=parentObjType,
id=id)
delete.db_action = action
delete.db_entity_id = entity_id
delete.db_entity_type = entity_type
delete.is_dirty = False
res[('delete', id)] = delete
return res
def get_sql_select(self, db, global_props,lock=False):
columns = ['id', 'what', 'object_id', 'par_obj_id', 'par_obj_type', 'action_id', 'entity_id', 'entity_type']
table = 'delete_tbl'
whereMap = global_props
orderBy = 'id'
return self.createSQLSelect(table, columns, whereMap, orderBy, lock)
def process_sql_columns(self, data, global_props):
res = {}
for row in data:
id = self.convertFromDB(row[0], 'long', 'int')
what = self.convertFromDB(row[1], 'str', 'varchar(255)')
objectId = self.convertFromDB(row[2], 'long', 'int')
parentObjId = self.convertFromDB(row[3], 'long', 'int')
parentObjType = self.convertFromDB(row[4], 'str', 'char(16)')
action = self.convertFromDB(row[5], 'long', 'int')
entity_id = self.convertFromDB(row[6], 'long', 'int')
entity_type = self.convertFromDB(row[7], 'str', 'char(16)')
delete = DBDelete(what=what,
objectId=objectId,
parentObjId=parentObjId,
parentObjType=parentObjType,
id=id)
delete.db_action = action
delete.db_entity_id = entity_id
delete.db_entity_type = entity_type
delete.is_dirty = False
res[('delete', id)] = delete
return res
def from_sql_fast(self, obj, all_objects):
if ('action', obj.db_action) in all_objects:
p = all_objects[('action', obj.db_action)]
p.db_add_operation(obj)
def set_sql_columns(self, db, obj, global_props, do_copy=True):
if not do_copy and not obj.is_dirty:
return
columns = ['id', 'what', 'object_id', 'par_obj_id', 'par_obj_type', 'action_id', 'entity_id', 'entity_type']
table = 'delete_tbl'
whereMap = {}
whereMap.update(global_props)
if obj.db_id is not None:
keyStr = self.convertToDB(obj.db_id, 'long', 'int')
whereMap['id'] = keyStr
columnMap = {}
if hasattr(obj, 'db_id') and obj.db_id is not None:
columnMap['id'] = \
self.convertToDB(obj.db_id, 'long', 'int')
if hasattr(obj, 'db_what') and obj.db_what is not None:
columnMap['what'] = \
self.convertToDB(obj.db_what, 'str', 'varchar(255)')
if hasattr(obj, 'db_objectId') and obj.db_objectId is not None:
columnMap['object_id'] = \
self.convertToDB(obj.db_objectId, 'long', 'int')
if hasattr(obj, 'db_parentObjId') and obj.db_parentObjId is not None:
columnMap['par_obj_id'] = \
self.convertToDB(obj.db_parentObjId, 'long', 'int')
if hasattr(obj, 'db_parentObjType') and obj.db_parentObjType is not None:
columnMap['par_obj_type'] = \
self.convertToDB(obj.db_parentObjType, 'str', 'char(16)')
if hasattr(obj, 'db_action') and obj.db_action is not None:
columnMap['action_id'] = \
self.convertToDB(obj.db_action, 'long', 'int')
if hasattr(obj, 'db_entity_id') and obj.db_entity_id is not None:
columnMap['entity_id'] = \
self.convertToDB(obj.db_entity_id, 'long', 'int')
if hasattr(obj, 'db_entity_type') and obj.db_entity_type is not None:
columnMap['entity_type'] = \
self.convertToDB(obj.db_entity_type, 'str', 'char(16)')
columnMap.update(global_props)
if obj.is_new or do_copy:
dbCommand = self.createSQLInsert(table, columnMap)
else:
dbCommand = self.createSQLUpdate(table, columnMap, whereMap)
lastId = self.executeSQL(db, dbCommand, False)
def set_sql_command(self, db, obj, global_props, do_copy=True):
if not do_copy and not obj.is_dirty:
return None
columns = ['id', 'what', 'object_id', 'par_obj_id', 'par_obj_type', 'action_id', 'entity_id', 'entity_type']
table = 'delete_tbl'
whereMap = {}
whereMap.update(global_props)
if obj.db_id is not None:
keyStr = self.convertToDB(obj.db_id, 'long', 'int')
whereMap['id'] = keyStr
columnMap = {}
if hasattr(obj, 'db_id') and obj.db_id is not None:
columnMap['id'] = \
self.convertToDB(obj.db_id, 'long', 'int')
if hasattr(obj, 'db_what') and obj.db_what is not None:
columnMap['what'] = \
self.convertToDB(obj.db_what, 'str', 'varchar(255)')
if hasattr(obj, 'db_objectId') and obj.db_objectId is not None:
columnMap['object_id'] = \
self.convertToDB(obj.db_objectId, 'long', 'int')
if hasattr(obj, 'db_parentObjId') and obj.db_parentObjId is not None:
columnMap['par_obj_id'] = \
self.convertToDB(obj.db_parentObjId, 'long', 'int')
if hasattr(obj, 'db_parentObjType') and obj.db_parentObjType is not None:
columnMap['par_obj_type'] = \
self.convertToDB(obj.db_parentObjType, 'str', 'char(16)')
if hasattr(obj, 'db_action') and obj.db_action is not None:
columnMap['action_id'] = \
self.convertToDB(obj.db_action, 'long', 'int')
if hasattr(obj, 'db_entity_id') and obj.db_entity_id is not None:
columnMap['entity_id'] = \
self.convertToDB(obj.db_entity_id, 'long', 'int')
if hasattr(obj, 'db_entity_type') and obj.db_entity_type is not None:
columnMap['entity_type'] = \
self.convertToDB(obj.db_entity_type, 'str', 'char(16)')
columnMap.update(global_props)
if obj.is_new or do_copy:
dbCommand = self.createSQLInsert(table, columnMap)
else:
dbCommand = self.createSQLUpdate(table, columnMap, whereMap)
return dbCommand
def set_sql_process(self, obj, global_props, lastId):
pass
def to_sql_fast(self, obj, do_copy=True):
pass
def delete_sql_column(self, db, obj, global_props):
table = 'delete_tbl'
whereMap = {}
whereMap.update(global_props)
if obj.db_id is not None:
keyStr = self.convertToDB(obj.db_id, 'long', 'int')
whereMap['id'] = keyStr
dbCommand = self.createSQLDelete(table, whereMap)
self.executeSQL(db, dbCommand, False)
class DBVistrailVariableSQLDAOBase(SQLDAO):
def __init__(self, daoList):
self.daoList = daoList
self.table = 'vistrail_variable'
def getDao(self, dao):
return self.daoList[dao]
def get_sql_columns(self, db, global_props,lock=False):
columns = ['name', 'uuid', 'package', 'module', 'namespace', 'value', 'parent_id', 'entity_id', 'entity_type']
table = 'vistrail_variable'
whereMap = global_props
orderBy = 'name'
dbCommand = self.createSQLSelect(table, columns, whereMap, orderBy, lock)
data = self.executeSQL(db, dbCommand, True)
res = {}
for row in data:
name = self.convertFromDB(row[0], 'str', 'varchar(255)')
uuid = self.convertFromDB(row[1], 'str', 'char(36)')
package = self.convertFromDB(row[2], 'str', 'varchar(255)')
module = self.convertFromDB(row[3], 'str', 'varchar(255)')
namespace = self.convertFromDB(row[4], 'str', 'varchar(255)')
value = self.convertFromDB(row[5], 'str', 'varchar(8191)')
vistrail = self.convertFromDB(row[6], 'long', 'int')
entity_id = self.convertFromDB(row[7], 'long', 'int')
entity_type = self.convertFromDB(row[8], 'str', 'char(16)')
vistrailVariable = DBVistrailVariable(uuid=uuid,
package=package,
module=module,
namespace=namespace,
value=value,
name=name)
vistrailVariable.db_vistrail = vistrail
vistrailVariable.db_entity_id = entity_id
vistrailVariable.db_entity_type = entity_type
vistrailVariable.is_dirty = False
res[('vistrailVariable', name)] = vistrailVariable
return res
def get_sql_select(self, db, global_props,lock=False):
columns = ['name', 'uuid', 'package', 'module', 'namespace', 'value', 'parent_id', 'entity_id', 'entity_type']
table = 'vistrail_variable'
whereMap = global_props
orderBy = 'name'
return self.createSQLSelect(table, columns, whereMap, orderBy, lock)
def process_sql_columns(self, data, global_props):
res = {}
for row in data:
name = self.convertFromDB(row[0], 'str', 'varchar(255)')
uuid = self.convertFromDB(row[1], 'str', 'char(36)')
package = self.convertFromDB(row[2], 'str', 'varchar(255)')
module = self.convertFromDB(row[3], 'str', 'varchar(255)')
namespace = self.convertFromDB(row[4], 'str', 'varchar(255)')
value = self.convertFromDB(row[5], 'str', 'varchar(8191)')
vistrail = self.convertFromDB(row[6], 'long', 'int')
entity_id = self.convertFromDB(row[7], 'long', 'int')
entity_type = self.convertFromDB(row[8], 'str', 'char(16)')
vistrailVariable = DBVistrailVariable(uuid=uuid,
package=package,
module=module,
namespace=namespace,
value=value,
name=name)
vistrailVariable.db_vistrail = vistrail
vistrailVariable.db_entity_id = entity_id
vistrailVariable.db_entity_type = entity_type
vistrailVariable.is_dirty = False
res[('vistrailVariable', name)] = vistrailVariable
return res
def from_sql_fast(self, obj, all_objects):
if ('vistrail', obj.db_vistrail) in all_objects:
p = all_objects[('vistrail', obj.db_vistrail)]
p.db_add_vistrailVariable(obj)
def set_sql_columns(self, db, obj, global_props, do_copy=True):
if not do_copy and not obj.is_dirty:
return
columns = ['name', 'uuid', 'package', 'module', 'namespace', 'value', 'parent_id', 'entity_id', 'entity_type']
table = 'vistrail_variable'
whereMap = {}
whereMap.update(global_props)
if obj.db_name is not None:
keyStr = self.convertToDB(obj.db_name, 'str', 'varchar(255)')
whereMap['name'] = keyStr
columnMap = {}
if hasattr(obj, 'db_name') and obj.db_name is not None:
columnMap['name'] = \
self.convertToDB(obj.db_name, 'str', 'varchar(255)')
if hasattr(obj, 'db_uuid') and obj.db_uuid is not None:
columnMap['uuid'] = \
self.convertToDB(obj.db_uuid, 'str', 'char(36)')
if hasattr(obj, 'db_package') and obj.db_package is not None:
columnMap['package'] = \
self.convertToDB(obj.db_package, 'str', 'varchar(255)')
if hasattr(obj, 'db_module') and obj.db_module is not None:
columnMap['module'] = \
self.convertToDB(obj.db_module, 'str', 'varchar(255)')
if hasattr(obj, 'db_namespace') and obj.db_namespace is not None:
columnMap['namespace'] = \
self.convertToDB(obj.db_namespace, 'str', 'varchar(255)')
if hasattr(obj, 'db_value') and obj.db_value is not None:
columnMap['value'] = \
self.convertToDB(obj.db_value, 'str', 'varchar(8191)')
if hasattr(obj, 'db_vistrail') and obj.db_vistrail is not None:
columnMap['parent_id'] = \
self.convertToDB(obj.db_vistrail, 'long', 'int')
if hasattr(obj, 'db_entity_id') and obj.db_entity_id is not None:
columnMap['entity_id'] = \
self.convertToDB(obj.db_entity_id, 'long', 'int')
if hasattr(obj, 'db_entity_type') and obj.db_entity_type is not None:
columnMap['entity_type'] = \
self.convertToDB(obj.db_entity_type, 'str', 'char(16)')
columnMap.update(global_props)
if obj.is_new or do_copy:
dbCommand = self.createSQLInsert(table, columnMap)
else:
dbCommand = self.createSQLUpdate(table, columnMap, whereMap)
lastId = self.executeSQL(db, dbCommand, False)
def set_sql_command(self, db, obj, global_props, do_copy=True):
if not do_copy and not obj.is_dirty:
return None
columns = ['name', 'uuid', 'package', 'module', 'namespace', 'value', 'parent_id', 'entity_id', 'entity_type']
table = 'vistrail_variable'
whereMap = {}
whereMap.update(global_props)
if obj.db_name is not None:
keyStr = self.convertToDB(obj.db_name, 'str', 'varchar(255)')
whereMap['name'] = keyStr
columnMap = {}
if hasattr(obj, 'db_name') and obj.db_name is not None:
columnMap['name'] = \
self.convertToDB(obj.db_name, 'str', 'varchar(255)')
if hasattr(obj, 'db_uuid') and obj.db_uuid is not None:
columnMap['uuid'] = \
self.convertToDB(obj.db_uuid, 'str', 'char(36)')
if hasattr(obj, 'db_package') and obj.db_package is not None:
columnMap['package'] = \
self.convertToDB(obj.db_package, 'str', 'varchar(255)')
if hasattr(obj, 'db_module') and obj.db_module is not None:
columnMap['module'] = \
self.convertToDB(obj.db_module, 'str', 'varchar(255)')
if hasattr(obj, 'db_namespace') and obj.db_namespace is not None:
columnMap['namespace'] = \
self.convertToDB(obj.db_namespace, 'str', 'varchar(255)')
if hasattr(obj, 'db_value') and obj.db_value is not None:
columnMap['value'] = \
self.convertToDB(obj.db_value, 'str', 'varchar(8191)')
if hasattr(obj, 'db_vistrail') and obj.db_vistrail is not None:
columnMap['parent_id'] = \
self.convertToDB(obj.db_vistrail, 'long', 'int')
if hasattr(obj, 'db_entity_id') and obj.db_entity_id is not None:
columnMap['entity_id'] = \
self.convertToDB(obj.db_entity_id, 'long', 'int')
if hasattr(obj, 'db_entity_type') and obj.db_entity_type is not None:
columnMap['entity_type'] = \
self.convertToDB(obj.db_entity_type, 'str', 'char(16)')
columnMap.update(global_props)
if obj.is_new or do_copy:
dbCommand = self.createSQLInsert(table, columnMap)
else:
dbCommand = self.createSQLUpdate(table, columnMap, whereMap)
return dbCommand
def set_sql_process(self, obj, global_props, lastId):
pass
def to_sql_fast(self, obj, do_copy=True):
pass
def delete_sql_column(self, db, obj, global_props):
table = 'vistrail_variable'
whereMap = {}
whereMap.update(global_props)
if obj.db_name is not None:
keyStr = self.convertToDB(obj.db_name, 'str', 'varchar(255)')
whereMap['name'] = keyStr
dbCommand = self.createSQLDelete(table, whereMap)
self.executeSQL(db, dbCommand, False)
class DBModuleDescriptorSQLDAOBase(SQLDAO):
def __init__(self, daoList):
self.daoList = daoList
self.table = 'module_descriptor'
def getDao(self, dao):
return self.daoList[dao]
def get_sql_columns(self, db, global_props,lock=False):
columns = ['id', 'name', 'package', 'namespace', 'package_version', 'version', 'base_descriptor_id', 'parent_id', 'entity_id', 'entity_type']
table = 'module_descriptor'
whereMap = global_props
orderBy = 'id'
dbCommand = self.createSQLSelect(table, columns, whereMap, orderBy, lock)
data = self.executeSQL(db, dbCommand, True)
res = {}
for row in data:
id = self.convertFromDB(row[0], 'long', 'int')
name = self.convertFromDB(row[1], 'str', 'varchar(255)')
package = self.convertFromDB(row[2], 'str', 'varchar(255)')
namespace = self.convertFromDB(row[3], 'str', 'varchar(255)')
package_version = self.convertFromDB(row[4], 'str', 'varchar(255)')
version = self.convertFromDB(row[5], 'str', 'varchar(255)')
base_descriptor_id = self.convertFromDB(row[6], 'long', 'int')
package = self.convertFromDB(row[7], 'long', 'int')
entity_id = self.convertFromDB(row[8], 'long', 'int')
entity_type = self.convertFromDB(row[9], 'str', 'char(16)')
module_descriptor = DBModuleDescriptor(name=name,
package=package,
namespace=namespace,
package_version=package_version,
version=version,
base_descriptor_id=base_descriptor_id,
id=id)
module_descriptor.db_package = package
module_descriptor.db_entity_id = entity_id
module_descriptor.db_entity_type = entity_type
module_descriptor.is_dirty = False
res[('module_descriptor', id)] = module_descriptor
return res
def get_sql_select(self, db, global_props,lock=False):
columns = ['id', 'name', 'package', 'namespace', 'package_version', 'version', 'base_descriptor_id', 'parent_id', 'entity_id', 'entity_type']
table = 'module_descriptor'
whereMap = global_props
orderBy = 'id'
return self.createSQLSelect(table, columns, whereMap, orderBy, lock)
def process_sql_columns(self, data, global_props):
res = {}
for row in data:
id = self.convertFromDB(row[0], 'long', 'int')
name = self.convertFromDB(row[1], 'str', 'varchar(255)')
package = self.convertFromDB(row[2], 'str', 'varchar(255)')
namespace = self.convertFromDB(row[3], 'str', 'varchar(255)')
package_version = self.convertFromDB(row[4], 'str', 'varchar(255)')
version = self.convertFromDB(row[5], 'str', 'varchar(255)')
base_descriptor_id = self.convertFromDB(row[6], 'long', 'int')
package = self.convertFromDB(row[7], 'long', 'int')
entity_id = self.convertFromDB(row[8], 'long', 'int')
entity_type = self.convertFromDB(row[9], 'str', 'char(16)')
module_descriptor = DBModuleDescriptor(name=name,
package=package,
namespace=namespace,
package_version=package_version,
version=version,
base_descriptor_id=base_descriptor_id,
id=id)
module_descriptor.db_package = package
module_descriptor.db_entity_id = entity_id
module_descriptor.db_entity_type = entity_type
module_descriptor.is_dirty = False
res[('module_descriptor', id)] = module_descriptor
return res
def from_sql_fast(self, obj, all_objects):
if ('package', obj.db_package) in all_objects:
p = all_objects[('package', obj.db_package)]
p.db_add_module_descriptor(obj)
def set_sql_columns(self, db, obj, global_props, do_copy=True):
if not do_copy and not obj.is_dirty:
return
columns = ['id', 'name', 'package', 'namespace', 'package_version', 'version', 'base_descriptor_id', 'parent_id', 'entity_id', 'entity_type']
table = 'module_descriptor'
whereMap = {}
whereMap.update(global_props)
if obj.db_id is not None:
keyStr = self.convertToDB(obj.db_id, 'long', 'int')
whereMap['id'] = keyStr
columnMap = {}
if hasattr(obj, 'db_id') and obj.db_id is not None:
columnMap['id'] = \
self.convertToDB(obj.db_id, 'long', 'int')
if hasattr(obj, 'db_name') and obj.db_name is not None:
columnMap['name'] = \
self.convertToDB(obj.db_name, 'str', 'varchar(255)')
if hasattr(obj, 'db_package') and obj.db_package is not None:
columnMap['package'] = \
self.convertToDB(obj.db_package, 'str', 'varchar(255)')
if hasattr(obj, 'db_namespace') and obj.db_namespace is not None:
columnMap['namespace'] = \
self.convertToDB(obj.db_namespace, 'str', 'varchar(255)')
if hasattr(obj, 'db_package_version') and obj.db_package_version is not None:
columnMap['package_version'] = \
self.convertToDB(obj.db_package_version, 'str', 'varchar(255)')
if hasattr(obj, 'db_version') and obj.db_version is not None:
columnMap['version'] = \
self.convertToDB(obj.db_version, 'str', 'varchar(255)')
if hasattr(obj, 'db_base_descriptor_id') and obj.db_base_descriptor_id is not None:
columnMap['base_descriptor_id'] = \
self.convertToDB(obj.db_base_descriptor_id, 'long', 'int')
if hasattr(obj, 'db_package') and obj.db_package is not None:
columnMap['parent_id'] = \
self.convertToDB(obj.db_package, 'long', 'int')
if hasattr(obj, 'db_entity_id') and obj.db_entity_id is not None:
columnMap['entity_id'] = \
self.convertToDB(obj.db_entity_id, 'long', 'int')
if hasattr(obj, 'db_entity_type') and obj.db_entity_type is not None:
columnMap['entity_type'] = \
self.convertToDB(obj.db_entity_type, 'str', 'char(16)')
columnMap.update(global_props)
if obj.is_new or do_copy:
dbCommand = self.createSQLInsert(table, columnMap)
else:
dbCommand = self.createSQLUpdate(table, columnMap, whereMap)
lastId = self.executeSQL(db, dbCommand, False)
def set_sql_command(self, db, obj, global_props, do_copy=True):
if not do_copy and not obj.is_dirty:
return None
columns = ['id', 'name', 'package', 'namespace', 'package_version', 'version', 'base_descriptor_id', 'parent_id', 'entity_id', 'entity_type']
table = 'module_descriptor'
whereMap = {}
whereMap.update(global_props)
if obj.db_id is not None:
keyStr = self.convertToDB(obj.db_id, 'long', 'int')
whereMap['id'] = keyStr
columnMap = {}
if hasattr(obj, 'db_id') and obj.db_id is not None:
columnMap['id'] = \
self.convertToDB(obj.db_id, 'long', 'int')
if hasattr(obj, 'db_name') and obj.db_name is not None:
columnMap['name'] = \
self.convertToDB(obj.db_name, 'str', 'varchar(255)')
if hasattr(obj, 'db_package') and obj.db_package is not None:
columnMap['package'] = \
self.convertToDB(obj.db_package, 'str', 'varchar(255)')
if hasattr(obj, 'db_namespace') and obj.db_namespace is not None:
columnMap['namespace'] = \
self.convertToDB(obj.db_namespace, 'str', 'varchar(255)')
if hasattr(obj, 'db_package_version') and obj.db_package_version is not None:
columnMap['package_version'] = \
self.convertToDB(obj.db_package_version, 'str', 'varchar(255)')
if hasattr(obj, 'db_version') and obj.db_version is not None:
columnMap['version'] = \
self.convertToDB(obj.db_version, 'str', 'varchar(255)')
if hasattr(obj, 'db_base_descriptor_id') and obj.db_base_descriptor_id is not None:
columnMap['base_descriptor_id'] = \
self.convertToDB(obj.db_base_descriptor_id, 'long', 'int')
if hasattr(obj, 'db_package') and obj.db_package is not None:
columnMap['parent_id'] = \
self.convertToDB(obj.db_package, 'long', 'int')
if hasattr(obj, 'db_entity_id') and obj.db_entity_id is not None:
columnMap['entity_id'] = \
self.convertToDB(obj.db_entity_id, 'long', 'int')
if hasattr(obj, 'db_entity_type') and obj.db_entity_type is not None:
columnMap['entity_type'] = \
self.convertToDB(obj.db_entity_type, 'str', 'char(16)')
columnMap.update(global_props)
if obj.is_new or do_copy:
dbCommand = self.createSQLInsert(table, columnMap)
else:
dbCommand = self.createSQLUpdate(table, columnMap, whereMap)
return dbCommand
def set_sql_process(self, obj, global_props, lastId):
pass
def to_sql_fast(self, obj, do_copy=True):
for child in obj.db_portSpecs:
child.db_parentType = obj.vtType
child.db_parent = obj.db_id
def delete_sql_column(self, db, obj, global_props):
table = 'module_descriptor'
whereMap = {}
whereMap.update(global_props)
if obj.db_id is not None:
keyStr = self.convertToDB(obj.db_id, 'long', 'int')
whereMap['id'] = keyStr
dbCommand = self.createSQLDelete(table, whereMap)
self.executeSQL(db, dbCommand, False)
class DBTagSQLDAOBase(SQLDAO):
def __init__(self, daoList):
self.daoList = daoList
self.table = 'tag'
def getDao(self, dao):
return self.daoList[dao]
def get_sql_columns(self, db, global_props,lock=False):
columns = ['id', 'name', 'parent_id', 'entity_id', 'entity_type']
table = 'tag'
whereMap = global_props
orderBy = 'id'
dbCommand = self.createSQLSelect(table, columns, whereMap, orderBy, lock)
data = self.executeSQL(db, dbCommand, True)
res = {}
for row in data:
id = self.convertFromDB(row[0], 'long', 'int')
name = self.convertFromDB(row[1], 'str', 'varchar(255)')
vistrail = self.convertFromDB(row[2], 'long', 'int')
entity_id = self.convertFromDB(row[3], 'long', 'int')
entity_type = self.convertFromDB(row[4], 'str', 'char(16)')
tag = DBTag(name=name,
id=id)
tag.db_vistrail = vistrail
tag.db_entity_id = entity_id
tag.db_entity_type = entity_type
tag.is_dirty = False
res[('tag', id)] = tag
return res
def get_sql_select(self, db, global_props,lock=False):
columns = ['id', 'name', 'parent_id', 'entity_id', 'entity_type']
table = 'tag'
whereMap = global_props
orderBy = 'id'
return self.createSQLSelect(table, columns, whereMap, orderBy, lock)
def process_sql_columns(self, data, global_props):
res = {}
for row in data:
id = self.convertFromDB(row[0], 'long', 'int')
name = self.convertFromDB(row[1], 'str', 'varchar(255)')
vistrail = self.convertFromDB(row[2], 'long', 'int')
entity_id = self.convertFromDB(row[3], 'long', 'int')
entity_type = self.convertFromDB(row[4], 'str', 'char(16)')
tag = DBTag(name=name,
id=id)
tag.db_vistrail = vistrail
tag.db_entity_id = entity_id
tag.db_entity_type = entity_type
tag.is_dirty = False
res[('tag', id)] = tag
return res
def from_sql_fast(self, obj, all_objects):
if ('vistrail', obj.db_vistrail) in all_objects:
p = all_objects[('vistrail', obj.db_vistrail)]
p.db_add_tag(obj)
def set_sql_columns(self, db, obj, global_props, do_copy=True):
if not do_copy and not obj.is_dirty:
return
columns = ['id', 'name', 'parent_id', 'entity_id', 'entity_type']
table = 'tag'
whereMap = {}
whereMap.update(global_props)
if obj.db_id is not None:
keyStr = self.convertToDB(obj.db_id, 'long', 'int')
whereMap['id'] = keyStr
columnMap = {}
if hasattr(obj, 'db_id') and obj.db_id is not None:
columnMap['id'] = \
self.convertToDB(obj.db_id, 'long', 'int')
if hasattr(obj, 'db_name') and obj.db_name is not None:
columnMap['name'] = \
self.convertToDB(obj.db_name, 'str', 'varchar(255)')
if hasattr(obj, 'db_vistrail') and obj.db_vistrail is not None:
columnMap['parent_id'] = \
self.convertToDB(obj.db_vistrail, 'long', 'int')
if hasattr(obj, 'db_entity_id') and obj.db_entity_id is not None:
columnMap['entity_id'] = \
self.convertToDB(obj.db_entity_id, 'long', 'int')
if hasattr(obj, 'db_entity_type') and obj.db_entity_type is not None:
columnMap['entity_type'] = \
self.convertToDB(obj.db_entity_type, 'str', 'char(16)')
columnMap.update(global_props)
if obj.is_new or do_copy:
dbCommand = self.createSQLInsert(table, columnMap)
else:
dbCommand = self.createSQLUpdate(table, columnMap, whereMap)
lastId = self.executeSQL(db, dbCommand, False)
def set_sql_command(self, db, obj, global_props, do_copy=True):
if not do_copy and not obj.is_dirty:
return None
columns = ['id', 'name', 'parent_id', 'entity_id', 'entity_type']
table = 'tag'
whereMap = {}
whereMap.update(global_props)
if obj.db_id is not None:
keyStr = self.convertToDB(obj.db_id, 'long', 'int')
whereMap['id'] = keyStr
columnMap = {}
if hasattr(obj, 'db_id') and obj.db_id is not None:
columnMap['id'] = \
self.convertToDB(obj.db_id, 'long', 'int')
if hasattr(obj, 'db_name') and obj.db_name is not None:
columnMap['name'] = \
self.convertToDB(obj.db_name, 'str', 'varchar(255)')
if hasattr(obj, 'db_vistrail') and obj.db_vistrail is not None:
columnMap['parent_id'] = \
self.convertToDB(obj.db_vistrail, 'long', 'int')
if hasattr(obj, 'db_entity_id') and obj.db_entity_id is not None:
columnMap['entity_id'] = \
self.convertToDB(obj.db_entity_id, 'long', 'int')
if hasattr(obj, 'db_entity_type') and obj.db_entity_type is not None:
columnMap['entity_type'] = \
self.convertToDB(obj.db_entity_type, 'str', 'char(16)')
columnMap.update(global_props)
if obj.is_new or do_copy:
dbCommand = self.createSQLInsert(table, columnMap)
else:
dbCommand = self.createSQLUpdate(table, columnMap, whereMap)
return dbCommand
def set_sql_process(self, obj, global_props, lastId):
pass
def to_sql_fast(self, obj, do_copy=True):
pass
def delete_sql_column(self, db, obj, global_props):
table = 'tag'
whereMap = {}
whereMap.update(global_props)
if obj.db_id is not None:
keyStr = self.convertToDB(obj.db_id, 'long', 'int')
whereMap['id'] = keyStr
dbCommand = self.createSQLDelete(table, whereMap)
self.executeSQL(db, dbCommand, False)
class DBPortSpecItemSQLDAOBase(SQLDAO):
def __init__(self, daoList):
self.daoList = daoList
self.table = 'port_spec_item'
def getDao(self, dao):
return self.daoList[dao]
def get_sql_columns(self, db, global_props,lock=False):
columns = ['id', 'pos', 'module', 'package', 'namespace', 'label', '_default', '_values', 'entry_type', 'parent_id', 'entity_id', 'entity_type']
table = 'port_spec_item'
whereMap = global_props
orderBy = 'id'
dbCommand = self.createSQLSelect(table, columns, whereMap, orderBy, lock)
data = self.executeSQL(db, dbCommand, True)
res = {}
for row in data:
id = self.convertFromDB(row[0], 'long', 'int')
pos = self.convertFromDB(row[1], 'long', 'int')
module = self.convertFromDB(row[2], 'str', 'varchar(255)')
package = self.convertFromDB(row[3], 'str', 'varchar(255)')
namespace = self.convertFromDB(row[4], 'str', 'varchar(255)')
label = self.convertFromDB(row[5], 'str', 'varchar(4095)')
default = self.convertFromDB(row[6], 'str', 'varchar(4095)')
values = self.convertFromDB(row[7], 'str', 'mediumtext')
entry_type = self.convertFromDB(row[8], 'str', 'varchar(255)')
portSpec = self.convertFromDB(row[9], 'long', 'int')
entity_id = self.convertFromDB(row[10], 'long', 'int')
entity_type = self.convertFromDB(row[11], 'str', 'char(16)')
portSpecItem = DBPortSpecItem(pos=pos,
module=module,
package=package,
namespace=namespace,
label=label,
default=default,
values=values,
entry_type=entry_type,
id=id)
portSpecItem.db_portSpec = portSpec
portSpecItem.db_entity_id = entity_id
portSpecItem.db_entity_type = entity_type
portSpecItem.is_dirty = False
res[('portSpecItem', id)] = portSpecItem
return res
def get_sql_select(self, db, global_props,lock=False):
columns = ['id', 'pos', 'module', 'package', 'namespace', 'label', '_default', '_values', 'entry_type', 'parent_id', 'entity_id', 'entity_type']
table = 'port_spec_item'
whereMap = global_props
orderBy = 'id'
return self.createSQLSelect(table, columns, whereMap, orderBy, lock)
def process_sql_columns(self, data, global_props):
res = {}
for row in data:
id = self.convertFromDB(row[0], 'long', 'int')
pos = self.convertFromDB(row[1], 'long', 'int')
module = self.convertFromDB(row[2], 'str', 'varchar(255)')
package = self.convertFromDB(row[3], 'str', 'varchar(255)')
namespace = self.convertFromDB(row[4], 'str', 'varchar(255)')
label = self.convertFromDB(row[5], 'str', 'varchar(4095)')
default = self.convertFromDB(row[6], 'str', 'varchar(4095)')
values = self.convertFromDB(row[7], 'str', 'mediumtext')
entry_type = self.convertFromDB(row[8], 'str', 'varchar(255)')
portSpec = self.convertFromDB(row[9], 'long', 'int')
entity_id = self.convertFromDB(row[10], 'long', 'int')
entity_type = self.convertFromDB(row[11], 'str', 'char(16)')
portSpecItem = DBPortSpecItem(pos=pos,
module=module,
package=package,
namespace=namespace,
label=label,
default=default,
values=values,
entry_type=entry_type,
id=id)
portSpecItem.db_portSpec = portSpec
portSpecItem.db_entity_id = entity_id
portSpecItem.db_entity_type = entity_type
portSpecItem.is_dirty = False
res[('portSpecItem', id)] = portSpecItem
return res
def from_sql_fast(self, obj, all_objects):
if ('portSpec', obj.db_portSpec) in all_objects:
p = all_objects[('portSpec', obj.db_portSpec)]
p.db_add_portSpecItem(obj)
def set_sql_columns(self, db, obj, global_props, do_copy=True):
if not do_copy and not obj.is_dirty:
return
columns = ['id', 'pos', 'module', 'package', 'namespace', 'label', '_default', '_values', 'entry_type', 'parent_id', 'entity_id', 'entity_type']
table = 'port_spec_item'
whereMap = {}
whereMap.update(global_props)
if obj.db_id is not None:
keyStr = self.convertToDB(obj.db_id, 'long', 'int')
whereMap['id'] = keyStr
columnMap = {}
if hasattr(obj, 'db_id') and obj.db_id is not None:
columnMap['id'] = \
self.convertToDB(obj.db_id, 'long', 'int')
if hasattr(obj, 'db_pos') and obj.db_pos is not None:
columnMap['pos'] = \
self.convertToDB(obj.db_pos, 'long', 'int')
if hasattr(obj, 'db_module') and obj.db_module is not None:
columnMap['module'] = \
self.convertToDB(obj.db_module, 'str', 'varchar(255)')
if hasattr(obj, 'db_package') and obj.db_package is not None:
columnMap['package'] = \
self.convertToDB(obj.db_package, 'str', 'varchar(255)')
if hasattr(obj, 'db_namespace') and obj.db_namespace is not None:
columnMap['namespace'] = \
self.convertToDB(obj.db_namespace, 'str', 'varchar(255)')
if hasattr(obj, 'db_label') and obj.db_label is not None:
columnMap['label'] = \
self.convertToDB(obj.db_label, 'str', 'varchar(4095)')
if hasattr(obj, 'db_default') and obj.db_default is not None:
columnMap['_default'] = \
self.convertToDB(obj.db_default, 'str', 'varchar(4095)')
if hasattr(obj, 'db_values') and obj.db_values is not None:
columnMap['_values'] = \
self.convertToDB(obj.db_values, 'str', 'mediumtext')
if hasattr(obj, 'db_entry_type') and obj.db_entry_type is not None:
columnMap['entry_type'] = \
self.convertToDB(obj.db_entry_type, 'str', 'varchar(255)')
if hasattr(obj, 'db_portSpec') and obj.db_portSpec is not None:
columnMap['parent_id'] = \
self.convertToDB(obj.db_portSpec, 'long', 'int')
if hasattr(obj, 'db_entity_id') and obj.db_entity_id is not None:
columnMap['entity_id'] = \
self.convertToDB(obj.db_entity_id, 'long', 'int')
if hasattr(obj, 'db_entity_type') and obj.db_entity_type is not None:
columnMap['entity_type'] = \
self.convertToDB(obj.db_entity_type, 'str', 'char(16)')
columnMap.update(global_props)
if obj.is_new or do_copy:
dbCommand = self.createSQLInsert(table, columnMap)
else:
dbCommand = self.createSQLUpdate(table, columnMap, whereMap)
lastId = self.executeSQL(db, dbCommand, False)
def set_sql_command(self, db, obj, global_props, do_copy=True):
if not do_copy and not obj.is_dirty:
return None
columns = ['id', 'pos', 'module', 'package', 'namespace', 'label', '_default', '_values', 'entry_type', 'parent_id', 'entity_id', 'entity_type']
table = 'port_spec_item'
whereMap = {}
whereMap.update(global_props)
if obj.db_id is not None:
keyStr = self.convertToDB(obj.db_id, 'long', 'int')
whereMap['id'] = keyStr
columnMap = {}
if hasattr(obj, 'db_id') and obj.db_id is not None:
columnMap['id'] = \
self.convertToDB(obj.db_id, 'long', 'int')
if hasattr(obj, 'db_pos') and obj.db_pos is not None:
columnMap['pos'] = \
self.convertToDB(obj.db_pos, 'long', 'int')
if hasattr(obj, 'db_module') and obj.db_module is not None:
columnMap['module'] = \
self.convertToDB(obj.db_module, 'str', 'varchar(255)')
if hasattr(obj, 'db_package') and obj.db_package is not None:
columnMap['package'] = \
self.convertToDB(obj.db_package, 'str', 'varchar(255)')
if hasattr(obj, 'db_namespace') and obj.db_namespace is not None:
columnMap['namespace'] = \
self.convertToDB(obj.db_namespace, 'str', 'varchar(255)')
if hasattr(obj, 'db_label') and obj.db_label is not None:
columnMap['label'] = \
self.convertToDB(obj.db_label, 'str', 'varchar(4095)')
if hasattr(obj, 'db_default') and obj.db_default is not None:
columnMap['_default'] = \
self.convertToDB(obj.db_default, 'str', 'varchar(4095)')
if hasattr(obj, 'db_values') and obj.db_values is not None:
columnMap['_values'] = \
self.convertToDB(obj.db_values, 'str', 'mediumtext')
if hasattr(obj, 'db_entry_type') and obj.db_entry_type is not None:
columnMap['entry_type'] = \
self.convertToDB(obj.db_entry_type, 'str', 'varchar(255)')
if hasattr(obj, 'db_portSpec') and obj.db_portSpec is not None:
columnMap['parent_id'] = \
self.convertToDB(obj.db_portSpec, 'long', 'int')
if hasattr(obj, 'db_entity_id') and obj.db_entity_id is not None:
columnMap['entity_id'] = \
self.convertToDB(obj.db_entity_id, 'long', 'int')
if hasattr(obj, 'db_entity_type') and obj.db_entity_type is not None:
columnMap['entity_type'] = \
self.convertToDB(obj.db_entity_type, 'str', 'char(16)')
columnMap.update(global_props)
if obj.is_new or do_copy:
dbCommand = self.createSQLInsert(table, columnMap)
else:
dbCommand = self.createSQLUpdate(table, columnMap, whereMap)
return dbCommand
def set_sql_process(self, obj, global_props, lastId):
pass
def to_sql_fast(self, obj, do_copy=True):
pass
def delete_sql_column(self, db, obj, global_props):
table = 'port_spec_item'
whereMap = {}
whereMap.update(global_props)
if obj.db_id is not None:
keyStr = self.convertToDB(obj.db_id, 'long', 'int')
whereMap['id'] = keyStr
dbCommand = self.createSQLDelete(table, whereMap)
self.executeSQL(db, dbCommand, False)
class DBMashupComponentSQLDAOBase(SQLDAO):
def __init__(self, daoList):
self.daoList = daoList
self.table = 'mashup_component'
def getDao(self, dao):
return self.daoList[dao]
def get_sql_columns(self, db, global_props,lock=False):
columns = ['id', 'vtid', 'vttype', 'vtparent_type', 'vtparent_id', 'vtpos', 'vtmid', 'pos', 'type', 'val', 'minVal', 'maxVal', 'stepSize', 'strvaluelist', 'widget', 'seq', 'parent', 'alias_id', 'entity_id', 'entity_type']
table = 'mashup_component'
whereMap = global_props
orderBy = 'id'
dbCommand = self.createSQLSelect(table, columns, whereMap, orderBy, lock)
data = self.executeSQL(db, dbCommand, True)
res = {}
for row in data:
id = self.convertFromDB(row[0], 'long', 'int')
vtid = self.convertFromDB(row[1], 'long', 'int')
vttype = self.convertFromDB(row[2], 'str', 'varchar(255)')
vtparent_type = self.convertFromDB(row[3], 'str', 'char(32)')
vtparent_id = self.convertFromDB(row[4], 'long', 'int')
vtpos = self.convertFromDB(row[5], 'long', 'int')
vtmid = self.convertFromDB(row[6], 'long', 'int')
pos = self.convertFromDB(row[7], 'long', 'int')
type = self.convertFromDB(row[8], 'str', 'varchar(255)')
val = self.convertFromDB(row[9], 'str', 'mediumtext')
minVal = self.convertFromDB(row[10], 'str', 'varchar(255)')
maxVal = self.convertFromDB(row[11], 'str', 'varchar(255)')
stepSize = self.convertFromDB(row[12], 'str', 'varchar(255)')
strvaluelist = self.convertFromDB(row[13], 'str', 'mediumtext')
widget = self.convertFromDB(row[14], 'str', 'varchar(255)')
seq = self.convertFromDB(row[15], 'int', 'int')
parent = self.convertFromDB(row[16], 'str', 'varchar(255)')
mashup_alias = self.convertFromDB(row[17], 'long', 'int')
entity_id = self.convertFromDB(row[18], 'long', 'int')
entity_type = self.convertFromDB(row[19], 'str', 'char(16)')
mashup_component = DBMashupComponent(vtid=vtid,
vttype=vttype,
vtparent_type=vtparent_type,
vtparent_id=vtparent_id,
vtpos=vtpos,
vtmid=vtmid,
pos=pos,
type=type,
val=val,
minVal=minVal,
maxVal=maxVal,
stepSize=stepSize,
strvaluelist=strvaluelist,
widget=widget,
seq=seq,
parent=parent,
id=id)
mashup_component.db_mashup_alias = mashup_alias
mashup_component.db_entity_id = entity_id
mashup_component.db_entity_type = entity_type
mashup_component.is_dirty = False
res[('mashup_component', id)] = mashup_component
return res
def get_sql_select(self, db, global_props,lock=False):
columns = ['id', 'vtid', 'vttype', 'vtparent_type', 'vtparent_id', 'vtpos', 'vtmid', 'pos', 'type', 'val', 'minVal', 'maxVal', 'stepSize', 'strvaluelist', 'widget', 'seq', 'parent', 'alias_id', 'entity_id', 'entity_type']
table = 'mashup_component'
whereMap = global_props
orderBy = 'id'
return self.createSQLSelect(table, columns, whereMap, orderBy, lock)
def process_sql_columns(self, data, global_props):
res = {}
for row in data:
id = self.convertFromDB(row[0], 'long', 'int')
vtid = self.convertFromDB(row[1], 'long', 'int')
vttype = self.convertFromDB(row[2], 'str', 'varchar(255)')
vtparent_type = self.convertFromDB(row[3], 'str', 'char(32)')
vtparent_id = self.convertFromDB(row[4], 'long', 'int')
vtpos = self.convertFromDB(row[5], 'long', 'int')
vtmid = self.convertFromDB(row[6], 'long', 'int')
pos = self.convertFromDB(row[7], 'long', 'int')
type = self.convertFromDB(row[8], 'str', 'varchar(255)')
val = self.convertFromDB(row[9], 'str', 'mediumtext')
minVal = self.convertFromDB(row[10], 'str', 'varchar(255)')
maxVal = self.convertFromDB(row[11], 'str', 'varchar(255)')
stepSize = self.convertFromDB(row[12], 'str', 'varchar(255)')
strvaluelist = self.convertFromDB(row[13], 'str', 'mediumtext')
widget = self.convertFromDB(row[14], 'str', 'varchar(255)')
seq = self.convertFromDB(row[15], 'int', 'int')
parent = self.convertFromDB(row[16], 'str', 'varchar(255)')
mashup_alias = self.convertFromDB(row[17], 'long', 'int')
entity_id = self.convertFromDB(row[18], 'long', 'int')
entity_type = self.convertFromDB(row[19], 'str', 'char(16)')
mashup_component = DBMashupComponent(vtid=vtid,
vttype=vttype,
vtparent_type=vtparent_type,
vtparent_id=vtparent_id,
vtpos=vtpos,
vtmid=vtmid,
pos=pos,
type=type,
val=val,
minVal=minVal,
maxVal=maxVal,
stepSize=stepSize,
strvaluelist=strvaluelist,
widget=widget,
seq=seq,
parent=parent,
id=id)
mashup_component.db_mashup_alias = mashup_alias
mashup_component.db_entity_id = entity_id
mashup_component.db_entity_type = entity_type
mashup_component.is_dirty = False
res[('mashup_component', id)] = mashup_component
return res
def from_sql_fast(self, obj, all_objects):
if ('mashup_alias', obj.db_mashup_alias) in all_objects:
p = all_objects[('mashup_alias', obj.db_mashup_alias)]
p.db_add_component(obj)
def set_sql_columns(self, db, obj, global_props, do_copy=True):
if not do_copy and not obj.is_dirty:
return
columns = ['id', 'vtid', 'vttype', 'vtparent_type', 'vtparent_id', 'vtpos', 'vtmid', 'pos', 'type', 'val', 'minVal', 'maxVal', 'stepSize', 'strvaluelist', 'widget', 'seq', 'parent', 'alias_id', 'entity_id', 'entity_type']
table = 'mashup_component'
whereMap = {}
whereMap.update(global_props)
if obj.db_id is not None:
keyStr = self.convertToDB(obj.db_id, 'long', 'int')
whereMap['id'] = keyStr
columnMap = {}
if hasattr(obj, 'db_id') and obj.db_id is not None:
columnMap['id'] = \
self.convertToDB(obj.db_id, 'long', 'int')
if hasattr(obj, 'db_vtid') and obj.db_vtid is not None:
columnMap['vtid'] = \
self.convertToDB(obj.db_vtid, 'long', 'int')
if hasattr(obj, 'db_vttype') and obj.db_vttype is not None:
columnMap['vttype'] = \
self.convertToDB(obj.db_vttype, 'str', 'varchar(255)')
if hasattr(obj, 'db_vtparent_type') and obj.db_vtparent_type is not None:
columnMap['vtparent_type'] = \
self.convertToDB(obj.db_vtparent_type, 'str', 'char(32)')
if hasattr(obj, 'db_vtparent_id') and obj.db_vtparent_id is not None:
columnMap['vtparent_id'] = \
self.convertToDB(obj.db_vtparent_id, 'long', 'int')
if hasattr(obj, 'db_vtpos') and obj.db_vtpos is not None:
columnMap['vtpos'] = \
self.convertToDB(obj.db_vtpos, 'long', 'int')
if hasattr(obj, 'db_vtmid') and obj.db_vtmid is not None:
columnMap['vtmid'] = \
self.convertToDB(obj.db_vtmid, 'long', 'int')
if hasattr(obj, 'db_pos') and obj.db_pos is not None:
columnMap['pos'] = \
self.convertToDB(obj.db_pos, 'long', 'int')
if hasattr(obj, 'db_type') and obj.db_type is not None:
columnMap['type'] = \
self.convertToDB(obj.db_type, 'str', 'varchar(255)')
if hasattr(obj, 'db_val') and obj.db_val is not None:
columnMap['val'] = \
self.convertToDB(obj.db_val, 'str', 'mediumtext')
if hasattr(obj, 'db_minVal') and obj.db_minVal is not None:
columnMap['minVal'] = \
self.convertToDB(obj.db_minVal, 'str', 'varchar(255)')
if hasattr(obj, 'db_maxVal') and obj.db_maxVal is not None:
columnMap['maxVal'] = \
self.convertToDB(obj.db_maxVal, 'str', 'varchar(255)')
if hasattr(obj, 'db_stepSize') and obj.db_stepSize is not None:
columnMap['stepSize'] = \
self.convertToDB(obj.db_stepSize, 'str', 'varchar(255)')
if hasattr(obj, 'db_strvaluelist') and obj.db_strvaluelist is not None:
columnMap['strvaluelist'] = \
self.convertToDB(obj.db_strvaluelist, 'str', 'mediumtext')
if hasattr(obj, 'db_widget') and obj.db_widget is not None:
columnMap['widget'] = \
self.convertToDB(obj.db_widget, 'str', 'varchar(255)')
if hasattr(obj, 'db_seq') and obj.db_seq is not None:
columnMap['seq'] = \
self.convertToDB(obj.db_seq, 'int', 'int')
if hasattr(obj, 'db_parent') and obj.db_parent is not None:
columnMap['parent'] = \
self.convertToDB(obj.db_parent, 'str', 'varchar(255)')
if hasattr(obj, 'db_mashup_alias') and obj.db_mashup_alias is not None:
columnMap['alias_id'] = \
self.convertToDB(obj.db_mashup_alias, 'long', 'int')
if hasattr(obj, 'db_entity_id') and obj.db_entity_id is not None:
columnMap['entity_id'] = \
self.convertToDB(obj.db_entity_id, 'long', 'int')
if hasattr(obj, 'db_entity_type') and obj.db_entity_type is not None:
columnMap['entity_type'] = \
self.convertToDB(obj.db_entity_type, 'str', 'char(16)')
columnMap.update(global_props)
if obj.is_new or do_copy:
dbCommand = self.createSQLInsert(table, columnMap)
else:
dbCommand = self.createSQLUpdate(table, columnMap, whereMap)
lastId = self.executeSQL(db, dbCommand, False)
def set_sql_command(self, db, obj, global_props, do_copy=True):
if not do_copy and not obj.is_dirty:
return None
columns = ['id', 'vtid', 'vttype', 'vtparent_type', 'vtparent_id', 'vtpos', 'vtmid', 'pos', 'type', 'val', 'minVal', 'maxVal', 'stepSize', 'strvaluelist', 'widget', 'seq', 'parent', 'alias_id', 'entity_id', 'entity_type']
table = 'mashup_component'
whereMap = {}
whereMap.update(global_props)
if obj.db_id is not None:
keyStr = self.convertToDB(obj.db_id, 'long', 'int')
whereMap['id'] = keyStr
columnMap = {}
if hasattr(obj, 'db_id') and obj.db_id is not None:
columnMap['id'] = \
self.convertToDB(obj.db_id, 'long', 'int')
if hasattr(obj, 'db_vtid') and obj.db_vtid is not None:
columnMap['vtid'] = \
self.convertToDB(obj.db_vtid, 'long', 'int')
if hasattr(obj, 'db_vttype') and obj.db_vttype is not None:
columnMap['vttype'] = \
self.convertToDB(obj.db_vttype, 'str', 'varchar(255)')
if hasattr(obj, 'db_vtparent_type') and obj.db_vtparent_type is not None:
columnMap['vtparent_type'] = \
self.convertToDB(obj.db_vtparent_type, 'str', 'char(32)')
if hasattr(obj, 'db_vtparent_id') and obj.db_vtparent_id is not None:
columnMap['vtparent_id'] = \
self.convertToDB(obj.db_vtparent_id, 'long', 'int')
if hasattr(obj, 'db_vtpos') and obj.db_vtpos is not None:
columnMap['vtpos'] = \
self.convertToDB(obj.db_vtpos, 'long', 'int')
if hasattr(obj, 'db_vtmid') and obj.db_vtmid is not None:
columnMap['vtmid'] = \
self.convertToDB(obj.db_vtmid, 'long', 'int')
if hasattr(obj, 'db_pos') and obj.db_pos is not None:
columnMap['pos'] = \
self.convertToDB(obj.db_pos, 'long', 'int')
if hasattr(obj, 'db_type') and obj.db_type is not None:
columnMap['type'] = \
self.convertToDB(obj.db_type, 'str', 'varchar(255)')
if hasattr(obj, 'db_val') and obj.db_val is not None:
columnMap['val'] = \
self.convertToDB(obj.db_val, 'str', 'mediumtext')
if hasattr(obj, 'db_minVal') and obj.db_minVal is not None:
columnMap['minVal'] = \
self.convertToDB(obj.db_minVal, 'str', 'varchar(255)')
if hasattr(obj, 'db_maxVal') and obj.db_maxVal is not None:
columnMap['maxVal'] = \
self.convertToDB(obj.db_maxVal, 'str', 'varchar(255)')
if hasattr(obj, 'db_stepSize') and obj.db_stepSize is not None:
columnMap['stepSize'] = \
self.convertToDB(obj.db_stepSize, 'str', 'varchar(255)')
if hasattr(obj, 'db_strvaluelist') and obj.db_strvaluelist is not None:
columnMap['strvaluelist'] = \
self.convertToDB(obj.db_strvaluelist, 'str', 'mediumtext')
if hasattr(obj, 'db_widget') and obj.db_widget is not None:
columnMap['widget'] = \
self.convertToDB(obj.db_widget, 'str', 'varchar(255)')
if hasattr(obj, 'db_seq') and obj.db_seq is not None:
columnMap['seq'] = \
self.convertToDB(obj.db_seq, 'int', 'int')
if hasattr(obj, 'db_parent') and obj.db_parent is not None:
columnMap['parent'] = \
self.convertToDB(obj.db_parent, 'str', 'varchar(255)')
if hasattr(obj, 'db_mashup_alias') and obj.db_mashup_alias is not None:
columnMap['alias_id'] = \
self.convertToDB(obj.db_mashup_alias, 'long', 'int')
if hasattr(obj, 'db_entity_id') and obj.db_entity_id is not None:
columnMap['entity_id'] = \
self.convertToDB(obj.db_entity_id, 'long', 'int')
if hasattr(obj, 'db_entity_type') and obj.db_entity_type is not None:
columnMap['entity_type'] = \
self.convertToDB(obj.db_entity_type, 'str', 'char(16)')
columnMap.update(global_props)
if obj.is_new or do_copy:
dbCommand = self.createSQLInsert(table, columnMap)
else:
dbCommand = self.createSQLUpdate(table, columnMap, whereMap)
return dbCommand
def set_sql_process(self, obj, global_props, lastId):
pass
def to_sql_fast(self, obj, do_copy=True):
pass
def delete_sql_column(self, db, obj, global_props):
table = 'mashup_component'
whereMap = {}
whereMap.update(global_props)
if obj.db_id is not None:
keyStr = self.convertToDB(obj.db_id, 'long', 'int')
whereMap['id'] = keyStr
dbCommand = self.createSQLDelete(table, whereMap)
self.executeSQL(db, dbCommand, False)
class DBMashupSQLDAOBase(SQLDAO):
def __init__(self, daoList):
self.daoList = daoList
self.table = 'mashup'
def getDao(self, dao):
return self.daoList[dao]
def get_sql_columns(self, db, global_props,lock=False):
columns = ['id', 'name', 'version', 'type', 'vtid', 'layout', 'geometry', 'has_seq', 'parent_id', 'entity_id', 'entity_type']
table = 'mashup'
whereMap = global_props
orderBy = 'id'
dbCommand = self.createSQLSelect(table, columns, whereMap, orderBy, lock)
data = self.executeSQL(db, dbCommand, True)
res = {}
for row in data:
id = self.convertFromDB(row[0], 'long', 'int')
name = self.convertFromDB(row[1], 'str', 'varchar(255)')
version = self.convertFromDB(row[2], 'long', 'int')
type = self.convertFromDB(row[3], 'str', 'varchar(255)')
vtid = self.convertFromDB(row[4], 'long', 'int')
layout = self.convertFromDB(row[5], 'str', 'mediumtext')
geometry = self.convertFromDB(row[6], 'str', 'mediumtext')
has_seq = self.convertFromDB(row[7], 'int', 'int')
parent = self.convertFromDB(row[8], 'long', 'int')
entity_id = self.convertFromDB(row[9], 'long', 'int')
entity_type = self.convertFromDB(row[10], 'str', 'char(16)')
mashup = DBMashup(name=name,
version=version,
type=type,
vtid=vtid,
layout=layout,
geometry=geometry,
has_seq=has_seq,
id=id)
mashup.db_parent = parent
mashup.db_entity_id = entity_id
mashup.db_entity_type = entity_type
mashup.is_dirty = False
res[('mashup', id)] = mashup
return res
def get_sql_select(self, db, global_props,lock=False):
columns = ['id', 'name', 'version', 'type', 'vtid', 'layout', 'geometry', 'has_seq', 'parent_id', 'entity_id', 'entity_type']
table = 'mashup'
whereMap = global_props
orderBy = 'id'
return self.createSQLSelect(table, columns, whereMap, orderBy, lock)
def process_sql_columns(self, data, global_props):
res = {}
for row in data:
id = self.convertFromDB(row[0], 'long', 'int')
name = self.convertFromDB(row[1], 'str', 'varchar(255)')
version = self.convertFromDB(row[2], 'long', 'int')
type = self.convertFromDB(row[3], 'str', 'varchar(255)')
vtid = self.convertFromDB(row[4], 'long', 'int')
layout = self.convertFromDB(row[5], 'str', 'mediumtext')
geometry = self.convertFromDB(row[6], 'str', 'mediumtext')
has_seq = self.convertFromDB(row[7], 'int', 'int')
parent = self.convertFromDB(row[8], 'long', 'int')
entity_id = self.convertFromDB(row[9], 'long', 'int')
entity_type = self.convertFromDB(row[10], 'str', 'char(16)')
mashup = DBMashup(name=name,
version=version,
type=type,
vtid=vtid,
layout=layout,
geometry=geometry,
has_seq=has_seq,
id=id)
mashup.db_parent = parent
mashup.db_entity_id = entity_id
mashup.db_entity_type = entity_type
mashup.is_dirty = False
res[('mashup', id)] = mashup
return res
def from_sql_fast(self, obj, all_objects):
if ('mashup_action', obj.db_parent) in all_objects:
p = all_objects[('mashup_action', obj.db_parent)]
p.db_add_mashup(obj)
def set_sql_columns(self, db, obj, global_props, do_copy=True):
if not do_copy and not obj.is_dirty:
return
columns = ['id', 'name', 'version', 'type', 'vtid', 'layout', 'geometry', 'has_seq', 'parent_id', 'entity_id', 'entity_type']
table = 'mashup'
whereMap = {}
whereMap.update(global_props)
if obj.db_id is not None:
keyStr = self.convertToDB(obj.db_id, 'long', 'int')
whereMap['id'] = keyStr
columnMap = {}
if hasattr(obj, 'db_id') and obj.db_id is not None:
columnMap['id'] = \
self.convertToDB(obj.db_id, 'long', 'int')
if hasattr(obj, 'db_name') and obj.db_name is not None:
columnMap['name'] = \
self.convertToDB(obj.db_name, 'str', 'varchar(255)')
if hasattr(obj, 'db_version') and obj.db_version is not None:
columnMap['version'] = \
self.convertToDB(obj.db_version, 'long', 'int')
if hasattr(obj, 'db_type') and obj.db_type is not None:
columnMap['type'] = \
self.convertToDB(obj.db_type, 'str', 'varchar(255)')
if hasattr(obj, 'db_vtid') and obj.db_vtid is not None:
columnMap['vtid'] = \
self.convertToDB(obj.db_vtid, 'long', 'int')
if hasattr(obj, 'db_layout') and obj.db_layout is not None:
columnMap['layout'] = \
self.convertToDB(obj.db_layout, 'str', 'mediumtext')
if hasattr(obj, 'db_geometry') and obj.db_geometry is not None:
columnMap['geometry'] = \
self.convertToDB(obj.db_geometry, 'str', 'mediumtext')
if hasattr(obj, 'db_has_seq') and obj.db_has_seq is not None:
columnMap['has_seq'] = \
self.convertToDB(obj.db_has_seq, 'int', 'int')
if hasattr(obj, 'db_parent') and obj.db_parent is not None:
columnMap['parent_id'] = \
self.convertToDB(obj.db_parent, 'long', 'int')
if hasattr(obj, 'db_entity_id') and obj.db_entity_id is not None:
columnMap['entity_id'] = \
self.convertToDB(obj.db_entity_id, 'long', 'int')
if hasattr(obj, 'db_entity_type') and obj.db_entity_type is not None:
columnMap['entity_type'] = \
self.convertToDB(obj.db_entity_type, 'str', 'char(16)')
columnMap.update(global_props)
if obj.is_new or do_copy:
dbCommand = self.createSQLInsert(table, columnMap)
else:
dbCommand = self.createSQLUpdate(table, columnMap, whereMap)
lastId = self.executeSQL(db, dbCommand, False)
def set_sql_command(self, db, obj, global_props, do_copy=True):
if not do_copy and not obj.is_dirty:
return None
columns = ['id', 'name', 'version', 'type', 'vtid', 'layout', 'geometry', 'has_seq', 'parent_id', 'entity_id', 'entity_type']
table = 'mashup'
whereMap = {}
whereMap.update(global_props)
if obj.db_id is not None:
keyStr = self.convertToDB(obj.db_id, 'long', 'int')
whereMap['id'] = keyStr
columnMap = {}
if hasattr(obj, 'db_id') and obj.db_id is not None:
columnMap['id'] = \
self.convertToDB(obj.db_id, 'long', 'int')
if hasattr(obj, 'db_name') and obj.db_name is not None:
columnMap['name'] = \
self.convertToDB(obj.db_name, 'str', 'varchar(255)')
if hasattr(obj, 'db_version') and obj.db_version is not None:
columnMap['version'] = \
self.convertToDB(obj.db_version, 'long', 'int')
if hasattr(obj, 'db_type') and obj.db_type is not None:
columnMap['type'] = \
self.convertToDB(obj.db_type, 'str', 'varchar(255)')
if hasattr(obj, 'db_vtid') and obj.db_vtid is not None:
columnMap['vtid'] = \
self.convertToDB(obj.db_vtid, 'long', 'int')
if hasattr(obj, 'db_layout') and obj.db_layout is not None:
columnMap['layout'] = \
self.convertToDB(obj.db_layout, 'str', 'mediumtext')
if hasattr(obj, 'db_geometry') and obj.db_geometry is not None:
columnMap['geometry'] = \
self.convertToDB(obj.db_geometry, 'str', 'mediumtext')
if hasattr(obj, 'db_has_seq') and obj.db_has_seq is not None:
columnMap['has_seq'] = \
self.convertToDB(obj.db_has_seq, 'int', 'int')
if hasattr(obj, 'db_parent') and obj.db_parent is not None:
columnMap['parent_id'] = \
self.convertToDB(obj.db_parent, 'long', 'int')
if hasattr(obj, 'db_entity_id') and obj.db_entity_id is not None:
columnMap['entity_id'] = \
self.convertToDB(obj.db_entity_id, 'long', 'int')
if hasattr(obj, 'db_entity_type') and obj.db_entity_type is not None:
columnMap['entity_type'] = \
self.convertToDB(obj.db_entity_type, 'str', 'char(16)')
columnMap.update(global_props)
if obj.is_new or do_copy:
dbCommand = self.createSQLInsert(table, columnMap)
else:
dbCommand = self.createSQLUpdate(table, columnMap, whereMap)
return dbCommand
def set_sql_process(self, obj, global_props, lastId):
pass
def to_sql_fast(self, obj, do_copy=True):
for child in obj.db_aliases:
child.db_parent = obj.db_id
def delete_sql_column(self, db, obj, global_props):
table = 'mashup'
whereMap = {}
whereMap.update(global_props)
if obj.db_id is not None:
keyStr = self.convertToDB(obj.db_id, 'long', 'int')
whereMap['id'] = keyStr
dbCommand = self.createSQLDelete(table, whereMap)
self.executeSQL(db, dbCommand, False)
class DBMachineSQLDAOBase(SQLDAO):
def __init__(self, daoList):
self.daoList = daoList
self.table = 'machine'
def getDao(self, dao):
return self.daoList[dao]
def get_sql_columns(self, db, global_props,lock=False):
columns = ['id', 'name', 'os', 'architecture', 'processor', 'ram', 'vt_id', 'log_id', 'entity_id', 'entity_type']
table = 'machine'
whereMap = global_props
orderBy = 'id'
dbCommand = self.createSQLSelect(table, columns, whereMap, orderBy, lock)
data = self.executeSQL(db, dbCommand, True)
res = {}
for row in data:
id = self.convertFromDB(row[0], 'long', 'int')
name = self.convertFromDB(row[1], 'str', 'varchar(255)')
os = self.convertFromDB(row[2], 'str', 'varchar(255)')
architecture = self.convertFromDB(row[3], 'str', 'varchar(255)')
processor = self.convertFromDB(row[4], 'str', 'varchar(255)')
ram = self.convertFromDB(row[5], 'int', 'bigint')
vistrailId = self.convertFromDB(row[6], 'long', 'int')
workflow_exec = self.convertFromDB(row[7], 'long', 'int')
entity_id = self.convertFromDB(row[8], 'long', 'int')
entity_type = self.convertFromDB(row[9], 'str', 'char(16)')
machine = DBMachine(name=name,
os=os,
architecture=architecture,
processor=processor,
ram=ram,
id=id)
machine.db_vistrailId = vistrailId
machine.db_workflow_exec = workflow_exec
machine.db_entity_id = entity_id
machine.db_entity_type = entity_type
machine.is_dirty = False
res[('machine', id)] = machine
return res
def get_sql_select(self, db, global_props,lock=False):
columns = ['id', 'name', 'os', 'architecture', 'processor', 'ram', 'vt_id', 'log_id', 'entity_id', 'entity_type']
table = 'machine'
whereMap = global_props
orderBy = 'id'
return self.createSQLSelect(table, columns, whereMap, orderBy, lock)
def process_sql_columns(self, data, global_props):
res = {}
for row in data:
id = self.convertFromDB(row[0], 'long', 'int')
name = self.convertFromDB(row[1], 'str', 'varchar(255)')
os = self.convertFromDB(row[2], 'str', 'varchar(255)')
architecture = self.convertFromDB(row[3], 'str', 'varchar(255)')
processor = self.convertFromDB(row[4], 'str', 'varchar(255)')
ram = self.convertFromDB(row[5], 'int', 'bigint')
vistrailId = self.convertFromDB(row[6], 'long', 'int')
workflow_exec = self.convertFromDB(row[7], 'long', 'int')
entity_id = self.convertFromDB(row[8], 'long', 'int')
entity_type = self.convertFromDB(row[9], 'str', 'char(16)')
machine = DBMachine(name=name,
os=os,
architecture=architecture,
processor=processor,
ram=ram,
id=id)
machine.db_vistrailId = vistrailId
machine.db_workflow_exec = workflow_exec
machine.db_entity_id = entity_id
machine.db_entity_type = entity_type
machine.is_dirty = False
res[('machine', id)] = machine
return res
def from_sql_fast(self, obj, all_objects):
if ('workflow_exec', obj.db_workflow_exec) in all_objects:
p = all_objects[('workflow_exec', obj.db_workflow_exec)]
p.db_add_machine(obj)
def set_sql_columns(self, db, obj, global_props, do_copy=True):
if not do_copy and not obj.is_dirty:
return
columns = ['id', 'name', 'os', 'architecture', 'processor', 'ram', 'vt_id', 'log_id', 'entity_id', 'entity_type']
table = 'machine'
whereMap = {}
whereMap.update(global_props)
if obj.db_id is not None:
keyStr = self.convertToDB(obj.db_id, 'long', 'int')
whereMap['id'] = keyStr
columnMap = {}
if hasattr(obj, 'db_id') and obj.db_id is not None:
columnMap['id'] = \
self.convertToDB(obj.db_id, 'long', 'int')
if hasattr(obj, 'db_name') and obj.db_name is not None:
columnMap['name'] = \
self.convertToDB(obj.db_name, 'str', 'varchar(255)')
if hasattr(obj, 'db_os') and obj.db_os is not None:
columnMap['os'] = \
self.convertToDB(obj.db_os, 'str', 'varchar(255)')
if hasattr(obj, 'db_architecture') and obj.db_architecture is not None:
columnMap['architecture'] = \
self.convertToDB(obj.db_architecture, 'str', 'varchar(255)')
if hasattr(obj, 'db_processor') and obj.db_processor is not None:
columnMap['processor'] = \
self.convertToDB(obj.db_processor, 'str', 'varchar(255)')
if hasattr(obj, 'db_ram') and obj.db_ram is not None:
columnMap['ram'] = \
self.convertToDB(obj.db_ram, 'int', 'bigint')
if hasattr(obj, 'db_vistrailId') and obj.db_vistrailId is not None:
columnMap['vt_id'] = \
self.convertToDB(obj.db_vistrailId, 'long', 'int')
if hasattr(obj, 'db_workflow_exec') and obj.db_workflow_exec is not None:
columnMap['log_id'] = \
self.convertToDB(obj.db_workflow_exec, 'long', 'int')
if hasattr(obj, 'db_entity_id') and obj.db_entity_id is not None:
columnMap['entity_id'] = \
self.convertToDB(obj.db_entity_id, 'long', 'int')
if hasattr(obj, 'db_entity_type') and obj.db_entity_type is not None:
columnMap['entity_type'] = \
self.convertToDB(obj.db_entity_type, 'str', 'char(16)')
columnMap.update(global_props)
if obj.is_new or do_copy:
dbCommand = self.createSQLInsert(table, columnMap)
else:
dbCommand = self.createSQLUpdate(table, columnMap, whereMap)
lastId = self.executeSQL(db, dbCommand, False)
def set_sql_command(self, db, obj, global_props, do_copy=True):
if not do_copy and not obj.is_dirty:
return None
columns = ['id', 'name', 'os', 'architecture', 'processor', 'ram', 'vt_id', 'log_id', 'entity_id', 'entity_type']
table = 'machine'
whereMap = {}
whereMap.update(global_props)
if obj.db_id is not None:
keyStr = self.convertToDB(obj.db_id, 'long', 'int')
whereMap['id'] = keyStr
columnMap = {}
if hasattr(obj, 'db_id') and obj.db_id is not None:
columnMap['id'] = \
self.convertToDB(obj.db_id, 'long', 'int')
if hasattr(obj, 'db_name') and obj.db_name is not None:
columnMap['name'] = \
self.convertToDB(obj.db_name, 'str', 'varchar(255)')
if hasattr(obj, 'db_os') and obj.db_os is not None:
columnMap['os'] = \
self.convertToDB(obj.db_os, 'str', 'varchar(255)')
if hasattr(obj, 'db_architecture') and obj.db_architecture is not None:
columnMap['architecture'] = \
self.convertToDB(obj.db_architecture, 'str', 'varchar(255)')
if hasattr(obj, 'db_processor') and obj.db_processor is not None:
columnMap['processor'] = \
self.convertToDB(obj.db_processor, 'str', 'varchar(255)')
if hasattr(obj, 'db_ram') and obj.db_ram is not None:
columnMap['ram'] = \
self.convertToDB(obj.db_ram, 'int', 'bigint')
if hasattr(obj, 'db_vistrailId') and obj.db_vistrailId is not None:
columnMap['vt_id'] = \
self.convertToDB(obj.db_vistrailId, 'long', 'int')
if hasattr(obj, 'db_workflow_exec') and obj.db_workflow_exec is not None:
columnMap['log_id'] = \
self.convertToDB(obj.db_workflow_exec, 'long', 'int')
if hasattr(obj, 'db_entity_id') and obj.db_entity_id is not None:
columnMap['entity_id'] = \
self.convertToDB(obj.db_entity_id, 'long', 'int')
if hasattr(obj, 'db_entity_type') and obj.db_entity_type is not None:
columnMap['entity_type'] = \
self.convertToDB(obj.db_entity_type, 'str', 'char(16)')
columnMap.update(global_props)
if obj.is_new or do_copy:
dbCommand = self.createSQLInsert(table, columnMap)
else:
dbCommand = self.createSQLUpdate(table, columnMap, whereMap)
return dbCommand
def set_sql_process(self, obj, global_props, lastId):
pass
def to_sql_fast(self, obj, do_copy=True):
pass
def delete_sql_column(self, db, obj, global_props):
table = 'machine'
whereMap = {}
whereMap.update(global_props)
if obj.db_id is not None:
keyStr = self.convertToDB(obj.db_id, 'long', 'int')
whereMap['id'] = keyStr
dbCommand = self.createSQLDelete(table, whereMap)
self.executeSQL(db, dbCommand, False)
class DBOtherSQLDAOBase(SQLDAO):
def __init__(self, daoList):
self.daoList = daoList
self.table = 'other'
def getDao(self, dao):
return self.daoList[dao]
def get_sql_columns(self, db, global_props,lock=False):
columns = ['id', 'okey', 'value', 'parent_type', 'entity_id', 'entity_type', 'parent_id']
table = 'other'
whereMap = global_props
orderBy = 'id'
dbCommand = self.createSQLSelect(table, columns, whereMap, orderBy, lock)
data = self.executeSQL(db, dbCommand, True)
res = {}
for row in data:
id = self.convertFromDB(row[0], 'long', 'int')
key = self.convertFromDB(row[1], 'str', 'varchar(255)')
value = self.convertFromDB(row[2], 'str', 'varchar(255)')
parentType = self.convertFromDB(row[3], 'str', 'char(32)')
entity_id = self.convertFromDB(row[4], 'long', 'int')
entity_type = self.convertFromDB(row[5], 'str', 'char(16)')
parent = self.convertFromDB(row[6], 'long', 'long')
other = DBOther(key=key,
value=value,
id=id)
other.db_parentType = parentType
other.db_entity_id = entity_id
other.db_entity_type = entity_type
other.db_parent = parent
other.is_dirty = False
res[('other', id)] = other
return res
def get_sql_select(self, db, global_props,lock=False):
columns = ['id', 'okey', 'value', 'parent_type', 'entity_id', 'entity_type', 'parent_id']
table = 'other'
whereMap = global_props
orderBy = 'id'
return self.createSQLSelect(table, columns, whereMap, orderBy, lock)
def process_sql_columns(self, data, global_props):
res = {}
for row in data:
id = self.convertFromDB(row[0], 'long', 'int')
key = self.convertFromDB(row[1], 'str', 'varchar(255)')
value = self.convertFromDB(row[2], 'str', 'varchar(255)')
parentType = self.convertFromDB(row[3], 'str', 'char(32)')
entity_id = self.convertFromDB(row[4], 'long', 'int')
entity_type = self.convertFromDB(row[5], 'str', 'char(16)')
parent = self.convertFromDB(row[6], 'long', 'long')
other = DBOther(key=key,
value=value,
id=id)
other.db_parentType = parentType
other.db_entity_id = entity_id
other.db_entity_type = entity_type
other.db_parent = parent
other.is_dirty = False
res[('other', id)] = other
return res
def from_sql_fast(self, obj, all_objects):
if obj.db_parentType == 'workflow':
p = all_objects[('workflow', obj.db_parent)]
p.db_add_other(obj)
elif obj.db_parentType == 'add':
p = all_objects[('add', obj.db_parent)]
p.db_add_data(obj)
elif obj.db_parentType == 'change':
p = all_objects[('change', obj.db_parent)]
p.db_add_data(obj)
def set_sql_columns(self, db, obj, global_props, do_copy=True):
if not do_copy and not obj.is_dirty:
return
columns = ['id', 'okey', 'value', 'parent_type', 'entity_id', 'entity_type', 'parent_id']
table = 'other'
whereMap = {}
whereMap.update(global_props)
if obj.db_id is not None:
keyStr = self.convertToDB(obj.db_id, 'long', 'int')
whereMap['id'] = keyStr
columnMap = {}
if hasattr(obj, 'db_id') and obj.db_id is not None:
columnMap['id'] = \
self.convertToDB(obj.db_id, 'long', 'int')
if hasattr(obj, 'db_key') and obj.db_key is not None:
columnMap['okey'] = \
self.convertToDB(obj.db_key, 'str', 'varchar(255)')
if hasattr(obj, 'db_value') and obj.db_value is not None:
columnMap['value'] = \
self.convertToDB(obj.db_value, 'str', 'varchar(255)')
if hasattr(obj, 'db_parentType') and obj.db_parentType is not None:
columnMap['parent_type'] = \
self.convertToDB(obj.db_parentType, 'str', 'char(32)')
if hasattr(obj, 'db_entity_id') and obj.db_entity_id is not None:
columnMap['entity_id'] = \
self.convertToDB(obj.db_entity_id, 'long', 'int')
if hasattr(obj, 'db_entity_type') and obj.db_entity_type is not None:
columnMap['entity_type'] = \
self.convertToDB(obj.db_entity_type, 'str', 'char(16)')
if hasattr(obj, 'db_parent') and obj.db_parent is not None:
columnMap['parent_id'] = \
self.convertToDB(obj.db_parent, 'long', 'long')
columnMap.update(global_props)
if obj.is_new or do_copy:
dbCommand = self.createSQLInsert(table, columnMap)
else:
dbCommand = self.createSQLUpdate(table, columnMap, whereMap)
lastId = self.executeSQL(db, dbCommand, False)
def set_sql_command(self, db, obj, global_props, do_copy=True):
if not do_copy and not obj.is_dirty:
return None
columns = ['id', 'okey', 'value', 'parent_type', 'entity_id', 'entity_type', 'parent_id']
table = 'other'
whereMap = {}
whereMap.update(global_props)
if obj.db_id is not None:
keyStr = self.convertToDB(obj.db_id, 'long', 'int')
whereMap['id'] = keyStr
columnMap = {}
if hasattr(obj, 'db_id') and obj.db_id is not None:
columnMap['id'] = \
self.convertToDB(obj.db_id, 'long', 'int')
if hasattr(obj, 'db_key') and obj.db_key is not None:
columnMap['okey'] = \
self.convertToDB(obj.db_key, 'str', 'varchar(255)')
if hasattr(obj, 'db_value') and obj.db_value is not None:
columnMap['value'] = \
self.convertToDB(obj.db_value, 'str', 'varchar(255)')
if hasattr(obj, 'db_parentType') and obj.db_parentType is not None:
columnMap['parent_type'] = \
self.convertToDB(obj.db_parentType, 'str', 'char(32)')
if hasattr(obj, 'db_entity_id') and obj.db_entity_id is not None:
columnMap['entity_id'] = \
self.convertToDB(obj.db_entity_id, 'long', 'int')
if hasattr(obj, 'db_entity_type') and obj.db_entity_type is not None:
columnMap['entity_type'] = \
self.convertToDB(obj.db_entity_type, 'str', 'char(16)')
if hasattr(obj, 'db_parent') and obj.db_parent is not None:
columnMap['parent_id'] = \
self.convertToDB(obj.db_parent, 'long', 'long')
columnMap.update(global_props)
if obj.is_new or do_copy:
dbCommand = self.createSQLInsert(table, columnMap)
else:
dbCommand = self.createSQLUpdate(table, columnMap, whereMap)
return dbCommand
def set_sql_process(self, obj, global_props, lastId):
pass
def to_sql_fast(self, obj, do_copy=True):
pass
def delete_sql_column(self, db, obj, global_props):
table = 'other'
whereMap = {}
whereMap.update(global_props)
if obj.db_id is not None:
keyStr = self.convertToDB(obj.db_id, 'long', 'int')
whereMap['id'] = keyStr
dbCommand = self.createSQLDelete(table, whereMap)
self.executeSQL(db, dbCommand, False)
class DBAbstractionSQLDAOBase(SQLDAO):
def __init__(self, daoList):
self.daoList = daoList
self.table = 'abstraction'
def getDao(self, dao):
return self.daoList[dao]
def get_sql_columns(self, db, global_props,lock=False):
columns = ['id', 'cache', 'name', 'namespace', 'package', 'version', 'internal_version', 'parent_type', 'entity_id', 'entity_type', 'parent_id']
table = 'abstraction'
whereMap = global_props
orderBy = 'id'
dbCommand = self.createSQLSelect(table, columns, whereMap, orderBy, lock)
data = self.executeSQL(db, dbCommand, True)
res = {}
for row in data:
id = self.convertFromDB(row[0], 'long', 'int')
cache = self.convertFromDB(row[1], 'int', 'int')
name = self.convertFromDB(row[2], 'str', 'varchar(255)')
namespace = self.convertFromDB(row[3], 'str', 'varchar(255)')
package = self.convertFromDB(row[4], 'str', 'varchar(511)')
version = self.convertFromDB(row[5], 'str', 'varchar(255)')
internal_version = self.convertFromDB(row[6], 'str', 'varchar(255)')
parentType = self.convertFromDB(row[7], 'str', 'char(32)')
entity_id = self.convertFromDB(row[8], 'long', 'int')
entity_type = self.convertFromDB(row[9], 'str', 'char(16)')
parent = self.convertFromDB(row[10], 'long', 'long')
abstraction = DBAbstraction(cache=cache,
name=name,
namespace=namespace,
package=package,
version=version,
internal_version=internal_version,
id=id)
abstraction.db_parentType = parentType
abstraction.db_entity_id = entity_id
abstraction.db_entity_type = entity_type
abstraction.db_parent = parent
abstraction.is_dirty = False
res[('abstraction', id)] = abstraction
return res
def get_sql_select(self, db, global_props,lock=False):
columns = ['id', 'cache', 'name', 'namespace', 'package', 'version', 'internal_version', 'parent_type', 'entity_id', 'entity_type', 'parent_id']
table = 'abstraction'
whereMap = global_props
orderBy = 'id'
return self.createSQLSelect(table, columns, whereMap, orderBy, lock)
def process_sql_columns(self, data, global_props):
res = {}
for row in data:
id = self.convertFromDB(row[0], 'long', 'int')
cache = self.convertFromDB(row[1], 'int', 'int')
name = self.convertFromDB(row[2], 'str', 'varchar(255)')
namespace = self.convertFromDB(row[3], 'str', 'varchar(255)')
package = self.convertFromDB(row[4], 'str', 'varchar(511)')
version = self.convertFromDB(row[5], 'str', 'varchar(255)')
internal_version = self.convertFromDB(row[6], 'str', 'varchar(255)')
parentType = self.convertFromDB(row[7], 'str', 'char(32)')
entity_id = self.convertFromDB(row[8], 'long', 'int')
entity_type = self.convertFromDB(row[9], 'str', 'char(16)')
parent = self.convertFromDB(row[10], 'long', 'long')
abstraction = DBAbstraction(cache=cache,
name=name,
namespace=namespace,
package=package,
version=version,
internal_version=internal_version,
id=id)
abstraction.db_parentType = parentType
abstraction.db_entity_id = entity_id
abstraction.db_entity_type = entity_type
abstraction.db_parent = parent
abstraction.is_dirty = False
res[('abstraction', id)] = abstraction
return res
def from_sql_fast(self, obj, all_objects):
if obj.db_parentType == 'workflow':
p = all_objects[('workflow', obj.db_parent)]
p.db_add_module(obj)
elif obj.db_parentType == 'add':
p = all_objects[('add', obj.db_parent)]
p.db_add_data(obj)
elif obj.db_parentType == 'change':
p = all_objects[('change', obj.db_parent)]
p.db_add_data(obj)
def set_sql_columns(self, db, obj, global_props, do_copy=True):
if not do_copy and not obj.is_dirty:
return
columns = ['id', 'cache', 'name', 'namespace', 'package', 'version', 'internal_version', 'parent_type', 'entity_id', 'entity_type', 'parent_id']
table = 'abstraction'
whereMap = {}
whereMap.update(global_props)
if obj.db_id is not None:
keyStr = self.convertToDB(obj.db_id, 'long', 'int')
whereMap['id'] = keyStr
columnMap = {}
if hasattr(obj, 'db_id') and obj.db_id is not None:
columnMap['id'] = \
self.convertToDB(obj.db_id, 'long', 'int')
if hasattr(obj, 'db_cache') and obj.db_cache is not None:
columnMap['cache'] = \
self.convertToDB(obj.db_cache, 'int', 'int')
if hasattr(obj, 'db_name') and obj.db_name is not None:
columnMap['name'] = \
self.convertToDB(obj.db_name, 'str', 'varchar(255)')
if hasattr(obj, 'db_namespace') and obj.db_namespace is not None:
columnMap['namespace'] = \
self.convertToDB(obj.db_namespace, 'str', 'varchar(255)')
if hasattr(obj, 'db_package') and obj.db_package is not None:
columnMap['package'] = \
self.convertToDB(obj.db_package, 'str', 'varchar(511)')
if hasattr(obj, 'db_version') and obj.db_version is not None:
columnMap['version'] = \
self.convertToDB(obj.db_version, 'str', 'varchar(255)')
if hasattr(obj, 'db_internal_version') and obj.db_internal_version is not None:
columnMap['internal_version'] = \
self.convertToDB(obj.db_internal_version, 'str', 'varchar(255)')
if hasattr(obj, 'db_parentType') and obj.db_parentType is not None:
columnMap['parent_type'] = \
self.convertToDB(obj.db_parentType, 'str', 'char(32)')
if hasattr(obj, 'db_entity_id') and obj.db_entity_id is not None:
columnMap['entity_id'] = \
self.convertToDB(obj.db_entity_id, 'long', 'int')
if hasattr(obj, 'db_entity_type') and obj.db_entity_type is not None:
columnMap['entity_type'] = \
self.convertToDB(obj.db_entity_type, 'str', 'char(16)')
if hasattr(obj, 'db_parent') and obj.db_parent is not None:
columnMap['parent_id'] = \
self.convertToDB(obj.db_parent, 'long', 'long')
columnMap.update(global_props)
if obj.is_new or do_copy:
dbCommand = self.createSQLInsert(table, columnMap)
else:
dbCommand = self.createSQLUpdate(table, columnMap, whereMap)
lastId = self.executeSQL(db, dbCommand, False)
def set_sql_command(self, db, obj, global_props, do_copy=True):
if not do_copy and not obj.is_dirty:
return None
columns = ['id', 'cache', 'name', 'namespace', 'package', 'version', 'internal_version', 'parent_type', 'entity_id', 'entity_type', 'parent_id']
table = 'abstraction'
whereMap = {}
whereMap.update(global_props)
if obj.db_id is not None:
keyStr = self.convertToDB(obj.db_id, 'long', 'int')
whereMap['id'] = keyStr
columnMap = {}
if hasattr(obj, 'db_id') and obj.db_id is not None:
columnMap['id'] = \
self.convertToDB(obj.db_id, 'long', 'int')
if hasattr(obj, 'db_cache') and obj.db_cache is not None:
columnMap['cache'] = \
self.convertToDB(obj.db_cache, 'int', 'int')
if hasattr(obj, 'db_name') and obj.db_name is not None:
columnMap['name'] = \
self.convertToDB(obj.db_name, 'str', 'varchar(255)')
if hasattr(obj, 'db_namespace') and obj.db_namespace is not None:
columnMap['namespace'] = \
self.convertToDB(obj.db_namespace, 'str', 'varchar(255)')
if hasattr(obj, 'db_package') and obj.db_package is not None:
columnMap['package'] = \
self.convertToDB(obj.db_package, 'str', 'varchar(511)')
if hasattr(obj, 'db_version') and obj.db_version is not None:
columnMap['version'] = \
self.convertToDB(obj.db_version, 'str', 'varchar(255)')
if hasattr(obj, 'db_internal_version') and obj.db_internal_version is not None:
columnMap['internal_version'] = \
self.convertToDB(obj.db_internal_version, 'str', 'varchar(255)')
if hasattr(obj, 'db_parentType') and obj.db_parentType is not None:
columnMap['parent_type'] = \
self.convertToDB(obj.db_parentType, 'str', 'char(32)')
if hasattr(obj, 'db_entity_id') and obj.db_entity_id is not None:
columnMap['entity_id'] = \
self.convertToDB(obj.db_entity_id, 'long', 'int')
if hasattr(obj, 'db_entity_type') and obj.db_entity_type is not None:
columnMap['entity_type'] = \
self.convertToDB(obj.db_entity_type, 'str', 'char(16)')
if hasattr(obj, 'db_parent') and obj.db_parent is not None:
columnMap['parent_id'] = \
self.convertToDB(obj.db_parent, 'long', 'long')
columnMap.update(global_props)
if obj.is_new or do_copy:
dbCommand = self.createSQLInsert(table, columnMap)
else:
dbCommand = self.createSQLUpdate(table, columnMap, whereMap)
return dbCommand
def set_sql_process(self, obj, global_props, lastId):
pass
def to_sql_fast(self, obj, do_copy=True):
if obj.db_location is not None:
child = obj.db_location
child.db_parentType = obj.vtType
child.db_parent = obj.db_id
for child in obj.db_functions:
child.db_parentType = obj.vtType
child.db_parent = obj.db_id
for child in obj.db_annotations:
child.db_parentType = obj.vtType
child.db_parent = obj.db_id
for child in obj.db_controlParameters:
child.db_parentType = obj.vtType
child.db_parent = obj.db_id
def delete_sql_column(self, db, obj, global_props):
table = 'abstraction'
whereMap = {}
whereMap.update(global_props)
if obj.db_id is not None:
keyStr = self.convertToDB(obj.db_id, 'long', 'int')
whereMap['id'] = keyStr
dbCommand = self.createSQLDelete(table, whereMap)
self.executeSQL(db, dbCommand, False)
class DBMashuptrailSQLDAOBase(SQLDAO):
def __init__(self, daoList):
self.daoList = daoList
self.table = 'mashuptrail'
def getDao(self, dao):
return self.daoList[dao]
def get_sql_columns(self, db, global_props,lock=False):
columns = ['id', 'name', 'version', 'vt_version', 'last_modified', 'entity_type']
table = 'mashuptrail'
whereMap = global_props
orderBy = 'id'
dbCommand = self.createSQLSelect(table, columns, whereMap, orderBy, lock)
data = self.executeSQL(db, dbCommand, True)
res = {}
for row in data:
id = self.convertFromDB(row[0], 'long', 'int')
global_props['entity_id'] = self.convertToDB(id, 'long', 'int')
name = self.convertFromDB(row[1], 'str', 'char(36)')
version = self.convertFromDB(row[2], 'str', 'char(16)')
vtVersion = self.convertFromDB(row[3], 'long', 'int')
last_modified = self.convertFromDB(row[4], 'datetime', 'datetime')
entity_type = self.convertFromDB(row[5], 'str', 'char(16)')
mashuptrail = DBMashuptrail(name=name,
version=version,
vtVersion=vtVersion,
last_modified=last_modified,
id=id)
mashuptrail.db_entity_type = entity_type
mashuptrail.is_dirty = False
res[('mashuptrail', id)] = mashuptrail
return res
def get_sql_select(self, db, global_props,lock=False):
columns = ['id', 'name', 'version', 'vt_version', 'last_modified', 'entity_type']
table = 'mashuptrail'
whereMap = global_props
orderBy = 'id'
return self.createSQLSelect(table, columns, whereMap, orderBy, lock)
def process_sql_columns(self, data, global_props):
res = {}
for row in data:
id = self.convertFromDB(row[0], 'long', 'int')
global_props['entity_id'] = self.convertToDB(id, 'long', 'int')
name = self.convertFromDB(row[1], 'str', 'char(36)')
version = self.convertFromDB(row[2], 'str', 'char(16)')
vtVersion = self.convertFromDB(row[3], 'long', 'int')
last_modified = self.convertFromDB(row[4], 'datetime', 'datetime')
entity_type = self.convertFromDB(row[5], 'str', 'char(16)')
mashuptrail = DBMashuptrail(name=name,
version=version,
vtVersion=vtVersion,
last_modified=last_modified,
id=id)
mashuptrail.db_entity_type = entity_type
mashuptrail.is_dirty = False
res[('mashuptrail', id)] = mashuptrail
return res
def from_sql_fast(self, obj, all_objects):
pass
def set_sql_columns(self, db, obj, global_props, do_copy=True):
if not do_copy and not obj.is_dirty:
return
columns = ['id', 'name', 'version', 'vt_version', 'last_modified', 'entity_type']
table = 'mashuptrail'
whereMap = {}
whereMap.update(global_props)
if obj.db_id is not None:
keyStr = self.convertToDB(obj.db_id, 'long', 'int')
whereMap['id'] = keyStr
columnMap = {}
if hasattr(obj, 'db_id') and obj.db_id is not None:
columnMap['id'] = \
self.convertToDB(obj.db_id, 'long', 'int')
if hasattr(obj, 'db_name') and obj.db_name is not None:
columnMap['name'] = \
self.convertToDB(obj.db_name, 'str', 'char(36)')
if hasattr(obj, 'db_version') and obj.db_version is not None:
columnMap['version'] = \
self.convertToDB(obj.db_version, 'str', 'char(16)')
if hasattr(obj, 'db_vtVersion') and obj.db_vtVersion is not None:
columnMap['vt_version'] = \
self.convertToDB(obj.db_vtVersion, 'long', 'int')
if hasattr(obj, 'db_last_modified') and obj.db_last_modified is not None:
columnMap['last_modified'] = \
self.convertToDB(obj.db_last_modified, 'datetime', 'datetime')
if hasattr(obj, 'db_entity_type') and obj.db_entity_type is not None:
columnMap['entity_type'] = \
self.convertToDB(obj.db_entity_type, 'str', 'char(16)')
columnMap.update(global_props)
if obj.is_new or do_copy:
dbCommand = self.createSQLInsert(table, columnMap)
else:
dbCommand = self.createSQLUpdate(table, columnMap, whereMap)
lastId = self.executeSQL(db, dbCommand, False)
if obj.db_id is None:
obj.db_id = lastId
keyStr = self.convertToDB(obj.db_id, 'long', 'int')
if hasattr(obj, 'db_id') and obj.db_id is not None:
global_props['entity_id'] = self.convertToDB(obj.db_id, 'long', 'int')
def set_sql_command(self, db, obj, global_props, do_copy=True):
if not do_copy and not obj.is_dirty:
return None
columns = ['id', 'name', 'version', 'vt_version', 'last_modified', 'entity_type']
table = 'mashuptrail'
whereMap = {}
whereMap.update(global_props)
if obj.db_id is not None:
keyStr = self.convertToDB(obj.db_id, 'long', 'int')
whereMap['id'] = keyStr
columnMap = {}
if hasattr(obj, 'db_id') and obj.db_id is not None:
columnMap['id'] = \
self.convertToDB(obj.db_id, 'long', 'int')
if hasattr(obj, 'db_name') and obj.db_name is not None:
columnMap['name'] = \
self.convertToDB(obj.db_name, 'str', 'char(36)')
if hasattr(obj, 'db_version') and obj.db_version is not None:
columnMap['version'] = \
self.convertToDB(obj.db_version, 'str', 'char(16)')
if hasattr(obj, 'db_vtVersion') and obj.db_vtVersion is not None:
columnMap['vt_version'] = \
self.convertToDB(obj.db_vtVersion, 'long', 'int')
if hasattr(obj, 'db_last_modified') and obj.db_last_modified is not None:
columnMap['last_modified'] = \
self.convertToDB(obj.db_last_modified, 'datetime', 'datetime')
if hasattr(obj, 'db_entity_type') and obj.db_entity_type is not None:
columnMap['entity_type'] = \
self.convertToDB(obj.db_entity_type, 'str', 'char(16)')
columnMap.update(global_props)
if obj.is_new or do_copy:
dbCommand = self.createSQLInsert(table, columnMap)
else:
dbCommand = self.createSQLUpdate(table, columnMap, whereMap)
return dbCommand
def set_sql_process(self, obj, global_props, lastId):
if obj.db_id is None:
obj.db_id = lastId
keyStr = self.convertToDB(obj.db_id, 'long', 'int')
if hasattr(obj, 'db_id') and obj.db_id is not None:
global_props['entity_id'] = self.convertToDB(obj.db_id, 'long', 'int')
pass
def to_sql_fast(self, obj, do_copy=True):
for child in obj.db_actions:
child.db_mashuptrail = obj.db_id
for child in obj.db_annotations:
child.db_parentType = obj.vtType
child.db_parent = obj.db_id
for child in obj.db_actionAnnotations:
child.db_mashuptrail = obj.db_id
def delete_sql_column(self, db, obj, global_props):
table = 'mashuptrail'
whereMap = {}
whereMap.update(global_props)
if obj.db_id is not None:
keyStr = self.convertToDB(obj.db_id, 'long', 'int')
whereMap['id'] = keyStr
dbCommand = self.createSQLDelete(table, whereMap)
self.executeSQL(db, dbCommand, False)
class DBRegistrySQLDAOBase(SQLDAO):
def __init__(self, daoList):
self.daoList = daoList
self.table = 'registry'
def getDao(self, dao):
return self.daoList[dao]
def get_sql_columns(self, db, global_props,lock=False):
columns = ['id', 'entity_type', 'version', 'root_descriptor_id', 'name', 'last_modified']
table = 'registry'
whereMap = global_props
orderBy = 'id'
dbCommand = self.createSQLSelect(table, columns, whereMap, orderBy, lock)
data = self.executeSQL(db, dbCommand, True)
res = {}
for row in data:
id = self.convertFromDB(row[0], 'long', 'int')
global_props['entity_id'] = self.convertToDB(id, 'long', 'int')
entity_type = self.convertFromDB(row[1], 'str', 'char(16)')
global_props['entity_type'] = self.convertToDB(entity_type, 'str', 'char(16)')
version = self.convertFromDB(row[2], 'str', 'char(16)')
root_descriptor_id = self.convertFromDB(row[3], 'long', 'int')
name = self.convertFromDB(row[4], 'str', 'varchar(255)')
last_modified = self.convertFromDB(row[5], 'datetime', 'datetime')
registry = DBRegistry(entity_type=entity_type,
version=version,
root_descriptor_id=root_descriptor_id,
name=name,
last_modified=last_modified,
id=id)
registry.is_dirty = False
res[('registry', id)] = registry
return res
def get_sql_select(self, db, global_props,lock=False):
columns = ['id', 'entity_type', 'version', 'root_descriptor_id', 'name', 'last_modified']
table = 'registry'
whereMap = global_props
orderBy = 'id'
return self.createSQLSelect(table, columns, whereMap, orderBy, lock)
def process_sql_columns(self, data, global_props):
res = {}
for row in data:
id = self.convertFromDB(row[0], 'long', 'int')
global_props['entity_id'] = self.convertToDB(id, 'long', 'int')
entity_type = self.convertFromDB(row[1], 'str', 'char(16)')
global_props['entity_type'] = self.convertToDB(entity_type, 'str', 'char(16)')
version = self.convertFromDB(row[2], 'str', 'char(16)')
root_descriptor_id = self.convertFromDB(row[3], 'long', 'int')
name = self.convertFromDB(row[4], 'str', 'varchar(255)')
last_modified = self.convertFromDB(row[5], 'datetime', 'datetime')
registry = DBRegistry(entity_type=entity_type,
version=version,
root_descriptor_id=root_descriptor_id,
name=name,
last_modified=last_modified,
id=id)
registry.is_dirty = False
res[('registry', id)] = registry
return res
def from_sql_fast(self, obj, all_objects):
pass
def set_sql_columns(self, db, obj, global_props, do_copy=True):
if not do_copy and not obj.is_dirty:
return
columns = ['id', 'entity_type', 'version', 'root_descriptor_id', 'name', 'last_modified']
table = 'registry'
whereMap = {}
whereMap.update(global_props)
if obj.db_id is not None:
keyStr = self.convertToDB(obj.db_id, 'long', 'int')
whereMap['id'] = keyStr
columnMap = {}
if hasattr(obj, 'db_id') and obj.db_id is not None:
columnMap['id'] = \
self.convertToDB(obj.db_id, 'long', 'int')
if hasattr(obj, 'db_entity_type') and obj.db_entity_type is not None:
columnMap['entity_type'] = \
self.convertToDB(obj.db_entity_type, 'str', 'char(16)')
if hasattr(obj, 'db_version') and obj.db_version is not None:
columnMap['version'] = \
self.convertToDB(obj.db_version, 'str', 'char(16)')
if hasattr(obj, 'db_root_descriptor_id') and obj.db_root_descriptor_id is not None:
columnMap['root_descriptor_id'] = \
self.convertToDB(obj.db_root_descriptor_id, 'long', 'int')
if hasattr(obj, 'db_name') and obj.db_name is not None:
columnMap['name'] = \
self.convertToDB(obj.db_name, 'str', 'varchar(255)')
if hasattr(obj, 'db_last_modified') and obj.db_last_modified is not None:
columnMap['last_modified'] = \
self.convertToDB(obj.db_last_modified, 'datetime', 'datetime')
columnMap.update(global_props)
if obj.is_new or do_copy:
dbCommand = self.createSQLInsert(table, columnMap)
else:
dbCommand = self.createSQLUpdate(table, columnMap, whereMap)
lastId = self.executeSQL(db, dbCommand, False)
if obj.db_id is None:
obj.db_id = lastId
keyStr = self.convertToDB(obj.db_id, 'long', 'int')
if hasattr(obj, 'db_entity_type') and obj.db_entity_type is not None:
global_props['entity_type'] = self.convertToDB(obj.db_entity_type, 'str', 'char(16)')
if hasattr(obj, 'db_id') and obj.db_id is not None:
global_props['entity_id'] = self.convertToDB(obj.db_id, 'long', 'int')
def set_sql_command(self, db, obj, global_props, do_copy=True):
if not do_copy and not obj.is_dirty:
return None
columns = ['id', 'entity_type', 'version', 'root_descriptor_id', 'name', 'last_modified']
table = 'registry'
whereMap = {}
whereMap.update(global_props)
if obj.db_id is not None:
keyStr = self.convertToDB(obj.db_id, 'long', 'int')
whereMap['id'] = keyStr
columnMap = {}
if hasattr(obj, 'db_id') and obj.db_id is not None:
columnMap['id'] = \
self.convertToDB(obj.db_id, 'long', 'int')
if hasattr(obj, 'db_entity_type') and obj.db_entity_type is not None:
columnMap['entity_type'] = \
self.convertToDB(obj.db_entity_type, 'str', 'char(16)')
if hasattr(obj, 'db_version') and obj.db_version is not None:
columnMap['version'] = \
self.convertToDB(obj.db_version, 'str', 'char(16)')
if hasattr(obj, 'db_root_descriptor_id') and obj.db_root_descriptor_id is not None:
columnMap['root_descriptor_id'] = \
self.convertToDB(obj.db_root_descriptor_id, 'long', 'int')
if hasattr(obj, 'db_name') and obj.db_name is not None:
columnMap['name'] = \
self.convertToDB(obj.db_name, 'str', 'varchar(255)')
if hasattr(obj, 'db_last_modified') and obj.db_last_modified is not None:
columnMap['last_modified'] = \
self.convertToDB(obj.db_last_modified, 'datetime', 'datetime')
columnMap.update(global_props)
if obj.is_new or do_copy:
dbCommand = self.createSQLInsert(table, columnMap)
else:
dbCommand = self.createSQLUpdate(table, columnMap, whereMap)
return dbCommand
def set_sql_process(self, obj, global_props, lastId):
if obj.db_id is None:
obj.db_id = lastId
keyStr = self.convertToDB(obj.db_id, 'long', 'int')
if hasattr(obj, 'db_entity_type') and obj.db_entity_type is not None:
global_props['entity_type'] = self.convertToDB(obj.db_entity_type, 'str', 'char(16)')
if hasattr(obj, 'db_id') and obj.db_id is not None:
global_props['entity_id'] = self.convertToDB(obj.db_id, 'long', 'int')
pass
def to_sql_fast(self, obj, do_copy=True):
for child in obj.db_packages:
child.db_registry = obj.db_id
def delete_sql_column(self, db, obj, global_props):
table = 'registry'
whereMap = {}
whereMap.update(global_props)
if obj.db_id is not None:
keyStr = self.convertToDB(obj.db_id, 'long', 'int')
whereMap['id'] = keyStr
dbCommand = self.createSQLDelete(table, whereMap)
self.executeSQL(db, dbCommand, False)
class DBAnnotationSQLDAOBase(SQLDAO):
def __init__(self, daoList):
self.daoList = daoList
self.table = 'annotation'
def getDao(self, dao):
return self.daoList[dao]
def get_sql_columns(self, db, global_props,lock=False):
columns = ['id', 'akey', 'value', 'parent_type', 'entity_id', 'entity_type', 'parent_id']
table = 'annotation'
whereMap = global_props
orderBy = 'id'
dbCommand = self.createSQLSelect(table, columns, whereMap, orderBy, lock)
data = self.executeSQL(db, dbCommand, True)
res = {}
for row in data:
id = self.convertFromDB(row[0], 'long', 'int')
key = self.convertFromDB(row[1], 'str', 'varchar(255)')
value = self.convertFromDB(row[2], 'str', 'mediumtext')
parentType = self.convertFromDB(row[3], 'str', 'char(32)')
entity_id = self.convertFromDB(row[4], 'long', 'int')
entity_type = self.convertFromDB(row[5], 'str', 'char(16)')
parent = self.convertFromDB(row[6], 'long', 'long')
annotation = DBAnnotation(key=key,
value=value,
id=id)
annotation.db_parentType = parentType
annotation.db_entity_id = entity_id
annotation.db_entity_type = entity_type
annotation.db_parent = parent
annotation.is_dirty = False
res[('annotation', id)] = annotation
return res
def get_sql_select(self, db, global_props,lock=False):
columns = ['id', 'akey', 'value', 'parent_type', 'entity_id', 'entity_type', 'parent_id']
table = 'annotation'
whereMap = global_props
orderBy = 'id'
return self.createSQLSelect(table, columns, whereMap, orderBy, lock)
def process_sql_columns(self, data, global_props):
res = {}
for row in data:
id = self.convertFromDB(row[0], 'long', 'int')
key = self.convertFromDB(row[1], 'str', 'varchar(255)')
value = self.convertFromDB(row[2], 'str', 'mediumtext')
parentType = self.convertFromDB(row[3], 'str', 'char(32)')
entity_id = self.convertFromDB(row[4], 'long', 'int')
entity_type = self.convertFromDB(row[5], 'str', 'char(16)')
parent = self.convertFromDB(row[6], 'long', 'long')
annotation = DBAnnotation(key=key,
value=value,
id=id)
annotation.db_parentType = parentType
annotation.db_entity_id = entity_id
annotation.db_entity_type = entity_type
annotation.db_parent = parent
annotation.is_dirty = False
res[('annotation', id)] = annotation
return res
def from_sql_fast(self, obj, all_objects):
if obj.db_parentType == 'vistrail':
p = all_objects[('vistrail', obj.db_parent)]
p.db_add_annotation(obj)
elif obj.db_parentType == 'workflow':
p = all_objects[('workflow', obj.db_parent)]
p.db_add_annotation(obj)
elif obj.db_parentType == 'module':
p = all_objects[('module', obj.db_parent)]
p.db_add_annotation(obj)
elif obj.db_parentType == 'workflow_exec':
p = all_objects[('workflow_exec', obj.db_parent)]
p.db_add_annotation(obj)
elif obj.db_parentType == 'module_exec':
p = all_objects[('module_exec', obj.db_parent)]
p.db_add_annotation(obj)
elif obj.db_parentType == 'group_exec':
p = all_objects[('group_exec', obj.db_parent)]
p.db_add_annotation(obj)
elif obj.db_parentType == 'add':
p = all_objects[('add', obj.db_parent)]
p.db_add_data(obj)
elif obj.db_parentType == 'change':
p = all_objects[('change', obj.db_parent)]
p.db_add_data(obj)
elif obj.db_parentType == 'action':
p = all_objects[('action', obj.db_parent)]
p.db_add_annotation(obj)
elif obj.db_parentType == 'abstraction':
p = all_objects[('abstraction', obj.db_parent)]
p.db_add_annotation(obj)
elif obj.db_parentType == 'mashuptrail':
p = all_objects[('mashuptrail', obj.db_parent)]
p.db_add_annotation(obj)
elif obj.db_parentType == 'group':
p = all_objects[('group', obj.db_parent)]
p.db_add_annotation(obj)
def set_sql_columns(self, db, obj, global_props, do_copy=True):
if not do_copy and not obj.is_dirty:
return
columns = ['id', 'akey', 'value', 'parent_type', 'entity_id', 'entity_type', 'parent_id']
table = 'annotation'
whereMap = {}
whereMap.update(global_props)
if obj.db_id is not None:
keyStr = self.convertToDB(obj.db_id, 'long', 'int')
whereMap['id'] = keyStr
columnMap = {}
if hasattr(obj, 'db_id') and obj.db_id is not None:
columnMap['id'] = \
self.convertToDB(obj.db_id, 'long', 'int')
if hasattr(obj, 'db_key') and obj.db_key is not None:
columnMap['akey'] = \
self.convertToDB(obj.db_key, 'str', 'varchar(255)')
if hasattr(obj, 'db_value') and obj.db_value is not None:
columnMap['value'] = \
self.convertToDB(obj.db_value, 'str', 'mediumtext')
if hasattr(obj, 'db_parentType') and obj.db_parentType is not None:
columnMap['parent_type'] = \
self.convertToDB(obj.db_parentType, 'str', 'char(32)')
if hasattr(obj, 'db_entity_id') and obj.db_entity_id is not None:
columnMap['entity_id'] = \
self.convertToDB(obj.db_entity_id, 'long', 'int')
if hasattr(obj, 'db_entity_type') and obj.db_entity_type is not None:
columnMap['entity_type'] = \
self.convertToDB(obj.db_entity_type, 'str', 'char(16)')
if hasattr(obj, 'db_parent') and obj.db_parent is not None:
columnMap['parent_id'] = \
self.convertToDB(obj.db_parent, 'long', 'long')
columnMap.update(global_props)
if obj.is_new or do_copy:
dbCommand = self.createSQLInsert(table, columnMap)
else:
dbCommand = self.createSQLUpdate(table, columnMap, whereMap)
lastId = self.executeSQL(db, dbCommand, False)
def set_sql_command(self, db, obj, global_props, do_copy=True):
if not do_copy and not obj.is_dirty:
return None
columns = ['id', 'akey', 'value', 'parent_type', 'entity_id', 'entity_type', 'parent_id']
table = 'annotation'
whereMap = {}
whereMap.update(global_props)
if obj.db_id is not None:
keyStr = self.convertToDB(obj.db_id, 'long', 'int')
whereMap['id'] = keyStr
columnMap = {}
if hasattr(obj, 'db_id') and obj.db_id is not None:
columnMap['id'] = \
self.convertToDB(obj.db_id, 'long', 'int')
if hasattr(obj, 'db_key') and obj.db_key is not None:
columnMap['akey'] = \
self.convertToDB(obj.db_key, 'str', 'varchar(255)')
if hasattr(obj, 'db_value') and obj.db_value is not None:
columnMap['value'] = \
self.convertToDB(obj.db_value, 'str', 'mediumtext')
if hasattr(obj, 'db_parentType') and obj.db_parentType is not None:
columnMap['parent_type'] = \
self.convertToDB(obj.db_parentType, 'str', 'char(32)')
if hasattr(obj, 'db_entity_id') and obj.db_entity_id is not None:
columnMap['entity_id'] = \
self.convertToDB(obj.db_entity_id, 'long', 'int')
if hasattr(obj, 'db_entity_type') and obj.db_entity_type is not None:
columnMap['entity_type'] = \
self.convertToDB(obj.db_entity_type, 'str', 'char(16)')
if hasattr(obj, 'db_parent') and obj.db_parent is not None:
columnMap['parent_id'] = \
self.convertToDB(obj.db_parent, 'long', 'long')
columnMap.update(global_props)
if obj.is_new or do_copy:
dbCommand = self.createSQLInsert(table, columnMap)
else:
dbCommand = self.createSQLUpdate(table, columnMap, whereMap)
return dbCommand
def set_sql_process(self, obj, global_props, lastId):
pass
def to_sql_fast(self, obj, do_copy=True):
pass
def delete_sql_column(self, db, obj, global_props):
table = 'annotation'
whereMap = {}
whereMap.update(global_props)
if obj.db_id is not None:
keyStr = self.convertToDB(obj.db_id, 'long', 'int')
whereMap['id'] = keyStr
dbCommand = self.createSQLDelete(table, whereMap)
self.executeSQL(db, dbCommand, False)
class DBParameterExplorationSQLDAOBase(SQLDAO):
def __init__(self, daoList):
self.daoList = daoList
self.table = 'parameter_exploration'
def getDao(self, dao):
return self.daoList[dao]
def get_sql_columns(self, db, global_props,lock=False):
columns = ['id', 'action_id', 'name', 'date', 'user', 'dims', 'layout', 'parent_id', 'entity_id', 'entity_type']
table = 'parameter_exploration'
whereMap = global_props
orderBy = 'id'
dbCommand = self.createSQLSelect(table, columns, whereMap, orderBy, lock)
data = self.executeSQL(db, dbCommand, True)
res = {}
for row in data:
id = self.convertFromDB(row[0], 'long', 'int')
action_id = self.convertFromDB(row[1], 'long', 'int')
name = self.convertFromDB(row[2], 'str', 'varchar(255)')
date = self.convertFromDB(row[3], 'datetime', 'datetime')
user = self.convertFromDB(row[4], 'str', 'varchar(255)')
dims = self.convertFromDB(row[5], 'str', 'varchar(255)')
layout = self.convertFromDB(row[6], 'str', 'varchar(255)')
vistrail = self.convertFromDB(row[7], 'long', 'int')
entity_id = self.convertFromDB(row[8], 'long', 'int')
entity_type = self.convertFromDB(row[9], 'str', 'char(16)')
parameter_exploration = DBParameterExploration(action_id=action_id,
name=name,
date=date,
user=user,
dims=dims,
layout=layout,
id=id)
parameter_exploration.db_vistrail = vistrail
parameter_exploration.db_entity_id = entity_id
parameter_exploration.db_entity_type = entity_type
parameter_exploration.is_dirty = False
res[('parameter_exploration', id)] = parameter_exploration
return res
def get_sql_select(self, db, global_props,lock=False):
columns = ['id', 'action_id', 'name', 'date', 'user', 'dims', 'layout', 'parent_id', 'entity_id', 'entity_type']
table = 'parameter_exploration'
whereMap = global_props
orderBy = 'id'
return self.createSQLSelect(table, columns, whereMap, orderBy, lock)
def process_sql_columns(self, data, global_props):
res = {}
for row in data:
id = self.convertFromDB(row[0], 'long', 'int')
action_id = self.convertFromDB(row[1], 'long', 'int')
name = self.convertFromDB(row[2], 'str', 'varchar(255)')
date = self.convertFromDB(row[3], 'datetime', 'datetime')
user = self.convertFromDB(row[4], 'str', 'varchar(255)')
dims = self.convertFromDB(row[5], 'str', 'varchar(255)')
layout = self.convertFromDB(row[6], 'str', 'varchar(255)')
vistrail = self.convertFromDB(row[7], 'long', 'int')
entity_id = self.convertFromDB(row[8], 'long', 'int')
entity_type = self.convertFromDB(row[9], 'str', 'char(16)')
parameter_exploration = DBParameterExploration(action_id=action_id,
name=name,
date=date,
user=user,
dims=dims,
layout=layout,
id=id)
parameter_exploration.db_vistrail = vistrail
parameter_exploration.db_entity_id = entity_id
parameter_exploration.db_entity_type = entity_type
parameter_exploration.is_dirty = False
res[('parameter_exploration', id)] = parameter_exploration
return res
def from_sql_fast(self, obj, all_objects):
if ('vistrail', obj.db_vistrail) in all_objects:
p = all_objects[('vistrail', obj.db_vistrail)]
p.db_add_parameter_exploration(obj)
def set_sql_columns(self, db, obj, global_props, do_copy=True):
if not do_copy and not obj.is_dirty:
return
columns = ['id', 'action_id', 'name', 'date', 'user', 'dims', 'layout', 'parent_id', 'entity_id', 'entity_type']
table = 'parameter_exploration'
whereMap = {}
whereMap.update(global_props)
if obj.db_id is not None:
keyStr = self.convertToDB(obj.db_id, 'long', 'int')
whereMap['id'] = keyStr
columnMap = {}
if hasattr(obj, 'db_id') and obj.db_id is not None:
columnMap['id'] = \
self.convertToDB(obj.db_id, 'long', 'int')
if hasattr(obj, 'db_action_id') and obj.db_action_id is not None:
columnMap['action_id'] = \
self.convertToDB(obj.db_action_id, 'long', 'int')
if hasattr(obj, 'db_name') and obj.db_name is not None:
columnMap['name'] = \
self.convertToDB(obj.db_name, 'str', 'varchar(255)')
if hasattr(obj, 'db_date') and obj.db_date is not None:
columnMap['date'] = \
self.convertToDB(obj.db_date, 'datetime', 'datetime')
if hasattr(obj, 'db_user') and obj.db_user is not None:
columnMap['user'] = \
self.convertToDB(obj.db_user, 'str', 'varchar(255)')
if hasattr(obj, 'db_dims') and obj.db_dims is not None:
columnMap['dims'] = \
self.convertToDB(obj.db_dims, 'str', 'varchar(255)')
if hasattr(obj, 'db_layout') and obj.db_layout is not None:
columnMap['layout'] = \
self.convertToDB(obj.db_layout, 'str', 'varchar(255)')
if hasattr(obj, 'db_vistrail') and obj.db_vistrail is not None:
columnMap['parent_id'] = \
self.convertToDB(obj.db_vistrail, 'long', 'int')
if hasattr(obj, 'db_entity_id') and obj.db_entity_id is not None:
columnMap['entity_id'] = \
self.convertToDB(obj.db_entity_id, 'long', 'int')
if hasattr(obj, 'db_entity_type') and obj.db_entity_type is not None:
columnMap['entity_type'] = \
self.convertToDB(obj.db_entity_type, 'str', 'char(16)')
columnMap.update(global_props)
if obj.is_new or do_copy:
dbCommand = self.createSQLInsert(table, columnMap)
else:
dbCommand = self.createSQLUpdate(table, columnMap, whereMap)
lastId = self.executeSQL(db, dbCommand, False)
def set_sql_command(self, db, obj, global_props, do_copy=True):
if not do_copy and not obj.is_dirty:
return None
columns = ['id', 'action_id', 'name', 'date', 'user', 'dims', 'layout', 'parent_id', 'entity_id', 'entity_type']
table = 'parameter_exploration'
whereMap = {}
whereMap.update(global_props)
if obj.db_id is not None:
keyStr = self.convertToDB(obj.db_id, 'long', 'int')
whereMap['id'] = keyStr
columnMap = {}
if hasattr(obj, 'db_id') and obj.db_id is not None:
columnMap['id'] = \
self.convertToDB(obj.db_id, 'long', 'int')
if hasattr(obj, 'db_action_id') and obj.db_action_id is not None:
columnMap['action_id'] = \
self.convertToDB(obj.db_action_id, 'long', 'int')
if hasattr(obj, 'db_name') and obj.db_name is not None:
columnMap['name'] = \
self.convertToDB(obj.db_name, 'str', 'varchar(255)')
if hasattr(obj, 'db_date') and obj.db_date is not None:
columnMap['date'] = \
self.convertToDB(obj.db_date, 'datetime', 'datetime')
if hasattr(obj, 'db_user') and obj.db_user is not None:
columnMap['user'] = \
self.convertToDB(obj.db_user, 'str', 'varchar(255)')
if hasattr(obj, 'db_dims') and obj.db_dims is not None:
columnMap['dims'] = \
self.convertToDB(obj.db_dims, 'str', 'varchar(255)')
if hasattr(obj, 'db_layout') and obj.db_layout is not None:
columnMap['layout'] = \
self.convertToDB(obj.db_layout, 'str', 'varchar(255)')
if hasattr(obj, 'db_vistrail') and obj.db_vistrail is not None:
columnMap['parent_id'] = \
self.convertToDB(obj.db_vistrail, 'long', 'int')
if hasattr(obj, 'db_entity_id') and obj.db_entity_id is not None:
columnMap['entity_id'] = \
self.convertToDB(obj.db_entity_id, 'long', 'int')
if hasattr(obj, 'db_entity_type') and obj.db_entity_type is not None:
columnMap['entity_type'] = \
self.convertToDB(obj.db_entity_type, 'str', 'char(16)')
columnMap.update(global_props)
if obj.is_new or do_copy:
dbCommand = self.createSQLInsert(table, columnMap)
else:
dbCommand = self.createSQLUpdate(table, columnMap, whereMap)
return dbCommand
def set_sql_process(self, obj, global_props, lastId):
pass
def to_sql_fast(self, obj, do_copy=True):
for child in obj.db_functions:
child.db_parameter_exploration = obj.db_id
def delete_sql_column(self, db, obj, global_props):
table = 'parameter_exploration'
whereMap = {}
whereMap.update(global_props)
if obj.db_id is not None:
keyStr = self.convertToDB(obj.db_id, 'long', 'int')
whereMap['id'] = keyStr
dbCommand = self.createSQLDelete(table, whereMap)
self.executeSQL(db, dbCommand, False)
class DBMashupActionAnnotationSQLDAOBase(SQLDAO):
def __init__(self, daoList):
self.daoList = daoList
self.table = 'mashup_action_annotation'
def getDao(self, dao):
return self.daoList[dao]
def get_sql_columns(self, db, global_props,lock=False):
columns = ['id', 'akey', 'value', 'action_id', 'date', 'user', 'parent_id', 'entity_id', 'entity_type']
table = 'mashup_action_annotation'
whereMap = global_props
orderBy = 'id'
dbCommand = self.createSQLSelect(table, columns, whereMap, orderBy, lock)
data = self.executeSQL(db, dbCommand, True)
res = {}
for row in data:
id = self.convertFromDB(row[0], 'long', 'int')
key = self.convertFromDB(row[1], 'str', 'varchar(255)')
value = self.convertFromDB(row[2], 'str', 'varchar(8191)')
action_id = self.convertFromDB(row[3], 'long', 'int')
date = self.convertFromDB(row[4], 'datetime', 'datetime')
user = self.convertFromDB(row[5], 'str', 'varchar(255)')
mashuptrail = self.convertFromDB(row[6], 'long', 'int')
entity_id = self.convertFromDB(row[7], 'long', 'int')
entity_type = self.convertFromDB(row[8], 'str', 'char(16)')
mashup_actionAnnotation = DBMashupActionAnnotation(key=key,
value=value,
action_id=action_id,
date=date,
user=user,
id=id)
mashup_actionAnnotation.db_mashuptrail = mashuptrail
mashup_actionAnnotation.db_entity_id = entity_id
mashup_actionAnnotation.db_entity_type = entity_type
mashup_actionAnnotation.is_dirty = False
res[('mashup_actionAnnotation', id)] = mashup_actionAnnotation
return res
def get_sql_select(self, db, global_props,lock=False):
columns = ['id', 'akey', 'value', 'action_id', 'date', 'user', 'parent_id', 'entity_id', 'entity_type']
table = 'mashup_action_annotation'
whereMap = global_props
orderBy = 'id'
return self.createSQLSelect(table, columns, whereMap, orderBy, lock)
def process_sql_columns(self, data, global_props):
res = {}
for row in data:
id = self.convertFromDB(row[0], 'long', 'int')
key = self.convertFromDB(row[1], 'str', 'varchar(255)')
value = self.convertFromDB(row[2], 'str', 'varchar(8191)')
action_id = self.convertFromDB(row[3], 'long', 'int')
date = self.convertFromDB(row[4], 'datetime', 'datetime')
user = self.convertFromDB(row[5], 'str', 'varchar(255)')
mashuptrail = self.convertFromDB(row[6], 'long', 'int')
entity_id = self.convertFromDB(row[7], 'long', 'int')
entity_type = self.convertFromDB(row[8], 'str', 'char(16)')
mashup_actionAnnotation = DBMashupActionAnnotation(key=key,
value=value,
action_id=action_id,
date=date,
user=user,
id=id)
mashup_actionAnnotation.db_mashuptrail = mashuptrail
mashup_actionAnnotation.db_entity_id = entity_id
mashup_actionAnnotation.db_entity_type = entity_type
mashup_actionAnnotation.is_dirty = False
res[('mashup_actionAnnotation', id)] = mashup_actionAnnotation
return res
def from_sql_fast(self, obj, all_objects):
if ('mashuptrail', obj.db_mashuptrail) in all_objects:
p = all_objects[('mashuptrail', obj.db_mashuptrail)]
p.db_add_actionAnnotation(obj)
def set_sql_columns(self, db, obj, global_props, do_copy=True):
if not do_copy and not obj.is_dirty:
return
columns = ['id', 'akey', 'value', 'action_id', 'date', 'user', 'parent_id', 'entity_id', 'entity_type']
table = 'mashup_action_annotation'
whereMap = {}
whereMap.update(global_props)
if obj.db_id is not None:
keyStr = self.convertToDB(obj.db_id, 'long', 'int')
whereMap['id'] = keyStr
columnMap = {}
if hasattr(obj, 'db_id') and obj.db_id is not None:
columnMap['id'] = \
self.convertToDB(obj.db_id, 'long', 'int')
if hasattr(obj, 'db_key') and obj.db_key is not None:
columnMap['akey'] = \
self.convertToDB(obj.db_key, 'str', 'varchar(255)')
if hasattr(obj, 'db_value') and obj.db_value is not None:
columnMap['value'] = \
self.convertToDB(obj.db_value, 'str', 'varchar(8191)')
if hasattr(obj, 'db_action_id') and obj.db_action_id is not None:
columnMap['action_id'] = \
self.convertToDB(obj.db_action_id, 'long', 'int')
if hasattr(obj, 'db_date') and obj.db_date is not None:
columnMap['date'] = \
self.convertToDB(obj.db_date, 'datetime', 'datetime')
if hasattr(obj, 'db_user') and obj.db_user is not None:
columnMap['user'] = \
self.convertToDB(obj.db_user, 'str', 'varchar(255)')
if hasattr(obj, 'db_mashuptrail') and obj.db_mashuptrail is not None:
columnMap['parent_id'] = \
self.convertToDB(obj.db_mashuptrail, 'long', 'int')
if hasattr(obj, 'db_entity_id') and obj.db_entity_id is not None:
columnMap['entity_id'] = \
self.convertToDB(obj.db_entity_id, 'long', 'int')
if hasattr(obj, 'db_entity_type') and obj.db_entity_type is not None:
columnMap['entity_type'] = \
self.convertToDB(obj.db_entity_type, 'str', 'char(16)')
columnMap.update(global_props)
if obj.is_new or do_copy:
dbCommand = self.createSQLInsert(table, columnMap)
else:
dbCommand = self.createSQLUpdate(table, columnMap, whereMap)
lastId = self.executeSQL(db, dbCommand, False)
def set_sql_command(self, db, obj, global_props, do_copy=True):
if not do_copy and not obj.is_dirty:
return None
columns = ['id', 'akey', 'value', 'action_id', 'date', 'user', 'parent_id', 'entity_id', 'entity_type']
table = 'mashup_action_annotation'
whereMap = {}
whereMap.update(global_props)
if obj.db_id is not None:
keyStr = self.convertToDB(obj.db_id, 'long', 'int')
whereMap['id'] = keyStr
columnMap = {}
if hasattr(obj, 'db_id') and obj.db_id is not None:
columnMap['id'] = \
self.convertToDB(obj.db_id, 'long', 'int')
if hasattr(obj, 'db_key') and obj.db_key is not None:
columnMap['akey'] = \
self.convertToDB(obj.db_key, 'str', 'varchar(255)')
if hasattr(obj, 'db_value') and obj.db_value is not None:
columnMap['value'] = \
self.convertToDB(obj.db_value, 'str', 'varchar(8191)')
if hasattr(obj, 'db_action_id') and obj.db_action_id is not None:
columnMap['action_id'] = \
self.convertToDB(obj.db_action_id, 'long', 'int')
if hasattr(obj, 'db_date') and obj.db_date is not None:
columnMap['date'] = \
self.convertToDB(obj.db_date, 'datetime', 'datetime')
if hasattr(obj, 'db_user') and obj.db_user is not None:
columnMap['user'] = \
self.convertToDB(obj.db_user, 'str', 'varchar(255)')
if hasattr(obj, 'db_mashuptrail') and obj.db_mashuptrail is not None:
columnMap['parent_id'] = \
self.convertToDB(obj.db_mashuptrail, 'long', 'int')
if hasattr(obj, 'db_entity_id') and obj.db_entity_id is not None:
columnMap['entity_id'] = \
self.convertToDB(obj.db_entity_id, 'long', 'int')
if hasattr(obj, 'db_entity_type') and obj.db_entity_type is not None:
columnMap['entity_type'] = \
self.convertToDB(obj.db_entity_type, 'str', 'char(16)')
columnMap.update(global_props)
if obj.is_new or do_copy:
dbCommand = self.createSQLInsert(table, columnMap)
else:
dbCommand = self.createSQLUpdate(table, columnMap, whereMap)
return dbCommand
def set_sql_process(self, obj, global_props, lastId):
pass
def to_sql_fast(self, obj, do_copy=True):
pass
def delete_sql_column(self, db, obj, global_props):
table = 'mashup_action_annotation'
whereMap = {}
whereMap.update(global_props)
if obj.db_id is not None:
keyStr = self.convertToDB(obj.db_id, 'long', 'int')
whereMap['id'] = keyStr
dbCommand = self.createSQLDelete(table, whereMap)
self.executeSQL(db, dbCommand, False)
class DBModuleExecSQLDAOBase(SQLDAO):
def __init__(self, daoList):
self.daoList = daoList
self.table = 'module_exec'
def getDao(self, dao):
return self.daoList[dao]
def get_sql_columns(self, db, global_props,lock=False):
columns = ['id', 'ts_start', 'ts_end', 'cached', 'module_id', 'module_name', 'completed', 'error', 'machine_id', 'parent_type', 'entity_id', 'entity_type', 'parent_id']
table = 'module_exec'
whereMap = global_props
orderBy = 'id'
dbCommand = self.createSQLSelect(table, columns, whereMap, orderBy, lock)
data = self.executeSQL(db, dbCommand, True)
res = {}
for row in data:
id = self.convertFromDB(row[0], 'long', 'int')
ts_start = self.convertFromDB(row[1], 'datetime', 'datetime')
ts_end = self.convertFromDB(row[2], 'datetime', 'datetime')
cached = self.convertFromDB(row[3], 'int', 'int')
module_id = self.convertFromDB(row[4], 'long', 'int')
module_name = self.convertFromDB(row[5], 'str', 'varchar(255)')
completed = self.convertFromDB(row[6], 'int', 'int')
error = self.convertFromDB(row[7], 'str', 'varchar(1023)')
machine_id = self.convertFromDB(row[8], 'long', 'int')
parentType = self.convertFromDB(row[9], 'str', 'char(32)')
entity_id = self.convertFromDB(row[10], 'long', 'int')
entity_type = self.convertFromDB(row[11], 'str', 'char(16)')
parent = self.convertFromDB(row[12], 'long', 'long')
module_exec = DBModuleExec(ts_start=ts_start,
ts_end=ts_end,
cached=cached,
module_id=module_id,
module_name=module_name,
completed=completed,
error=error,
machine_id=machine_id,
id=id)
module_exec.db_parentType = parentType
module_exec.db_entity_id = entity_id
module_exec.db_entity_type = entity_type
module_exec.db_parent = parent
module_exec.is_dirty = False
res[('module_exec', id)] = module_exec
return res
def get_sql_select(self, db, global_props,lock=False):
columns = ['id', 'ts_start', 'ts_end', 'cached', 'module_id', 'module_name', 'completed', 'error', 'machine_id', 'parent_type', 'entity_id', 'entity_type', 'parent_id']
table = 'module_exec'
whereMap = global_props
orderBy = 'id'
return self.createSQLSelect(table, columns, whereMap, orderBy, lock)
def process_sql_columns(self, data, global_props):
res = {}
for row in data:
id = self.convertFromDB(row[0], 'long', 'int')
ts_start = self.convertFromDB(row[1], 'datetime', 'datetime')
ts_end = self.convertFromDB(row[2], 'datetime', 'datetime')
cached = self.convertFromDB(row[3], 'int', 'int')
module_id = self.convertFromDB(row[4], 'long', 'int')
module_name = self.convertFromDB(row[5], 'str', 'varchar(255)')
completed = self.convertFromDB(row[6], 'int', 'int')
error = self.convertFromDB(row[7], 'str', 'varchar(1023)')
machine_id = self.convertFromDB(row[8], 'long', 'int')
parentType = self.convertFromDB(row[9], 'str', 'char(32)')
entity_id = self.convertFromDB(row[10], 'long', 'int')
entity_type = self.convertFromDB(row[11], 'str', 'char(16)')
parent = self.convertFromDB(row[12], 'long', 'long')
module_exec = DBModuleExec(ts_start=ts_start,
ts_end=ts_end,
cached=cached,
module_id=module_id,
module_name=module_name,
completed=completed,
error=error,
machine_id=machine_id,
id=id)
module_exec.db_parentType = parentType
module_exec.db_entity_id = entity_id
module_exec.db_entity_type = entity_type
module_exec.db_parent = parent
module_exec.is_dirty = False
res[('module_exec', id)] = module_exec
return res
def from_sql_fast(self, obj, all_objects):
if obj.db_parentType == 'workflow_exec':
p = all_objects[('workflow_exec', obj.db_parent)]
p.db_add_item_exec(obj)
elif obj.db_parentType == 'group_exec':
p = all_objects[('group_exec', obj.db_parent)]
p.db_add_item_exec(obj)
elif obj.db_parentType == 'loop_iteration':
p = all_objects[('loop_iteration', obj.db_parent)]
p.db_add_item_exec(obj)
def set_sql_columns(self, db, obj, global_props, do_copy=True):
if not do_copy and not obj.is_dirty:
return
columns = ['id', 'ts_start', 'ts_end', 'cached', 'module_id', 'module_name', 'completed', 'error', 'machine_id', 'parent_type', 'entity_id', 'entity_type', 'parent_id']
table = 'module_exec'
whereMap = {}
whereMap.update(global_props)
if obj.db_id is not None:
keyStr = self.convertToDB(obj.db_id, 'long', 'int')
whereMap['id'] = keyStr
columnMap = {}
if hasattr(obj, 'db_id') and obj.db_id is not None:
columnMap['id'] = \
self.convertToDB(obj.db_id, 'long', 'int')
if hasattr(obj, 'db_ts_start') and obj.db_ts_start is not None:
columnMap['ts_start'] = \
self.convertToDB(obj.db_ts_start, 'datetime', 'datetime')
if hasattr(obj, 'db_ts_end') and obj.db_ts_end is not None:
columnMap['ts_end'] = \
self.convertToDB(obj.db_ts_end, 'datetime', 'datetime')
if hasattr(obj, 'db_cached') and obj.db_cached is not None:
columnMap['cached'] = \
self.convertToDB(obj.db_cached, 'int', 'int')
if hasattr(obj, 'db_module_id') and obj.db_module_id is not None:
columnMap['module_id'] = \
self.convertToDB(obj.db_module_id, 'long', 'int')
if hasattr(obj, 'db_module_name') and obj.db_module_name is not None:
columnMap['module_name'] = \
self.convertToDB(obj.db_module_name, 'str', 'varchar(255)')
if hasattr(obj, 'db_completed') and obj.db_completed is not None:
columnMap['completed'] = \
self.convertToDB(obj.db_completed, 'int', 'int')
if hasattr(obj, 'db_error') and obj.db_error is not None:
columnMap['error'] = \
self.convertToDB(obj.db_error, 'str', 'varchar(1023)')
if hasattr(obj, 'db_machine_id') and obj.db_machine_id is not None:
columnMap['machine_id'] = \
self.convertToDB(obj.db_machine_id, 'long', 'int')
if hasattr(obj, 'db_parentType') and obj.db_parentType is not None:
columnMap['parent_type'] = \
self.convertToDB(obj.db_parentType, 'str', 'char(32)')
if hasattr(obj, 'db_entity_id') and obj.db_entity_id is not None:
columnMap['entity_id'] = \
self.convertToDB(obj.db_entity_id, 'long', 'int')
if hasattr(obj, 'db_entity_type') and obj.db_entity_type is not None:
columnMap['entity_type'] = \
self.convertToDB(obj.db_entity_type, 'str', 'char(16)')
if hasattr(obj, 'db_parent') and obj.db_parent is not None:
columnMap['parent_id'] = \
self.convertToDB(obj.db_parent, 'long', 'long')
columnMap.update(global_props)
if obj.is_new or do_copy:
dbCommand = self.createSQLInsert(table, columnMap)
else:
dbCommand = self.createSQLUpdate(table, columnMap, whereMap)
lastId = self.executeSQL(db, dbCommand, False)
def set_sql_command(self, db, obj, global_props, do_copy=True):
if not do_copy and not obj.is_dirty:
return None
columns = ['id', 'ts_start', 'ts_end', 'cached', 'module_id', 'module_name', 'completed', 'error', 'machine_id', 'parent_type', 'entity_id', 'entity_type', 'parent_id']
table = 'module_exec'
whereMap = {}
whereMap.update(global_props)
if obj.db_id is not None:
keyStr = self.convertToDB(obj.db_id, 'long', 'int')
whereMap['id'] = keyStr
columnMap = {}
if hasattr(obj, 'db_id') and obj.db_id is not None:
columnMap['id'] = \
self.convertToDB(obj.db_id, 'long', 'int')
if hasattr(obj, 'db_ts_start') and obj.db_ts_start is not None:
columnMap['ts_start'] = \
self.convertToDB(obj.db_ts_start, 'datetime', 'datetime')
if hasattr(obj, 'db_ts_end') and obj.db_ts_end is not None:
columnMap['ts_end'] = \
self.convertToDB(obj.db_ts_end, 'datetime', 'datetime')
if hasattr(obj, 'db_cached') and obj.db_cached is not None:
columnMap['cached'] = \
self.convertToDB(obj.db_cached, 'int', 'int')
if hasattr(obj, 'db_module_id') and obj.db_module_id is not None:
columnMap['module_id'] = \
self.convertToDB(obj.db_module_id, 'long', 'int')
if hasattr(obj, 'db_module_name') and obj.db_module_name is not None:
columnMap['module_name'] = \
self.convertToDB(obj.db_module_name, 'str', 'varchar(255)')
if hasattr(obj, 'db_completed') and obj.db_completed is not None:
columnMap['completed'] = \
self.convertToDB(obj.db_completed, 'int', 'int')
if hasattr(obj, 'db_error') and obj.db_error is not None:
columnMap['error'] = \
self.convertToDB(obj.db_error, 'str', 'varchar(1023)')
if hasattr(obj, 'db_machine_id') and obj.db_machine_id is not None:
columnMap['machine_id'] = \
self.convertToDB(obj.db_machine_id, 'long', 'int')
if hasattr(obj, 'db_parentType') and obj.db_parentType is not None:
columnMap['parent_type'] = \
self.convertToDB(obj.db_parentType, 'str', 'char(32)')
if hasattr(obj, 'db_entity_id') and obj.db_entity_id is not None:
columnMap['entity_id'] = \
self.convertToDB(obj.db_entity_id, 'long', 'int')
if hasattr(obj, 'db_entity_type') and obj.db_entity_type is not None:
columnMap['entity_type'] = \
self.convertToDB(obj.db_entity_type, 'str', 'char(16)')
if hasattr(obj, 'db_parent') and obj.db_parent is not None:
columnMap['parent_id'] = \
self.convertToDB(obj.db_parent, 'long', 'long')
columnMap.update(global_props)
if obj.is_new or do_copy:
dbCommand = self.createSQLInsert(table, columnMap)
else:
dbCommand = self.createSQLUpdate(table, columnMap, whereMap)
return dbCommand
def set_sql_process(self, obj, global_props, lastId):
pass
def to_sql_fast(self, obj, do_copy=True):
for child in obj.db_annotations:
child.db_parentType = obj.vtType
child.db_parent = obj.db_id
for child in obj.db_loop_execs:
child.db_parentType = obj.vtType
child.db_parent = obj.db_id
def delete_sql_column(self, db, obj, global_props):
table = 'module_exec'
whereMap = {}
whereMap.update(global_props)
if obj.db_id is not None:
keyStr = self.convertToDB(obj.db_id, 'long', 'int')
whereMap['id'] = keyStr
dbCommand = self.createSQLDelete(table, whereMap)
self.executeSQL(db, dbCommand, False)
"""generated automatically by auto_dao.py"""
class SQLDAOListBase(dict):
def __init__(self, daos=None):
if daos is not None:
dict.update(self, daos)
if 'mashup_alias' not in self:
self['mashup_alias'] = DBMashupAliasSQLDAOBase(self)
if 'group' not in self:
self['group'] = DBGroupSQLDAOBase(self)
if 'add' not in self:
self['add'] = DBAddSQLDAOBase(self)
if 'group_exec' not in self:
self['group_exec'] = DBGroupExecSQLDAOBase(self)
if 'parameter' not in self:
self['parameter'] = DBParameterSQLDAOBase(self)
if 'vistrail' not in self:
self['vistrail'] = DBVistrailSQLDAOBase(self)
if 'module' not in self:
self['module'] = DBModuleSQLDAOBase(self)
if 'port' not in self:
self['port'] = DBPortSQLDAOBase(self)
if 'pe_function' not in self:
self['pe_function'] = DBPEFunctionSQLDAOBase(self)
if 'workflow' not in self:
self['workflow'] = DBWorkflowSQLDAOBase(self)
if 'mashup_action' not in self:
self['mashup_action'] = DBMashupActionSQLDAOBase(self)
if 'change' not in self:
self['change'] = DBChangeSQLDAOBase(self)
if 'package' not in self:
self['package'] = DBPackageSQLDAOBase(self)
if 'loop_exec' not in self:
self['loop_exec'] = DBLoopExecSQLDAOBase(self)
if 'connection' not in self:
self['connection'] = DBConnectionSQLDAOBase(self)
if 'action' not in self:
self['action'] = DBActionSQLDAOBase(self)
if 'portSpec' not in self:
self['portSpec'] = DBPortSpecSQLDAOBase(self)
if 'log' not in self:
self['log'] = DBLogSQLDAOBase(self)
if 'loop_iteration' not in self:
self['loop_iteration'] = DBLoopIterationSQLDAOBase(self)
if 'pe_parameter' not in self:
self['pe_parameter'] = DBPEParameterSQLDAOBase(self)
if 'workflow_exec' not in self:
self['workflow_exec'] = DBWorkflowExecSQLDAOBase(self)
if 'location' not in self:
self['location'] = DBLocationSQLDAOBase(self)
if 'function' not in self:
self['function'] = DBFunctionSQLDAOBase(self)
if 'actionAnnotation' not in self:
self['actionAnnotation'] = DBActionAnnotationSQLDAOBase(self)
if 'controlParameter' not in self:
self['controlParameter'] = DBControlParameterSQLDAOBase(self)
if 'plugin_data' not in self:
self['plugin_data'] = DBPluginDataSQLDAOBase(self)
if 'delete' not in self:
self['delete'] = DBDeleteSQLDAOBase(self)
if 'vistrailVariable' not in self:
self['vistrailVariable'] = DBVistrailVariableSQLDAOBase(self)
if 'module_descriptor' not in self:
self['module_descriptor'] = DBModuleDescriptorSQLDAOBase(self)
if 'tag' not in self:
self['tag'] = DBTagSQLDAOBase(self)
if 'portSpecItem' not in self:
self['portSpecItem'] = DBPortSpecItemSQLDAOBase(self)
if 'mashup_component' not in self:
self['mashup_component'] = DBMashupComponentSQLDAOBase(self)
if 'mashup' not in self:
self['mashup'] = DBMashupSQLDAOBase(self)
if 'machine' not in self:
self['machine'] = DBMachineSQLDAOBase(self)
if 'other' not in self:
self['other'] = DBOtherSQLDAOBase(self)
if 'abstraction' not in self:
self['abstraction'] = DBAbstractionSQLDAOBase(self)
if 'mashuptrail' not in self:
self['mashuptrail'] = DBMashuptrailSQLDAOBase(self)
if 'registry' not in self:
self['registry'] = DBRegistrySQLDAOBase(self)
if 'annotation' not in self:
self['annotation'] = DBAnnotationSQLDAOBase(self)
if 'parameter_exploration' not in self:
self['parameter_exploration'] = DBParameterExplorationSQLDAOBase(self)
if 'mashup_actionAnnotation' not in self:
self['mashup_actionAnnotation'] = DBMashupActionAnnotationSQLDAOBase(self)
if 'module_exec' not in self:
self['module_exec'] = DBModuleExecSQLDAOBase(self)
|
{
"content_hash": "f28aa190b64f8a154c61ab1755c6340f",
"timestamp": "",
"source": "github",
"line_count": 8316,
"max_line_length": 229,
"avg_line_length": 47.498075998076,
"alnum_prop": 0.5452487885891938,
"repo_name": "VisTrails/VisTrails",
"id": "fc342985c81a329ad496d61b515033c552017391",
"size": "396908",
"binary": false,
"copies": "2",
"ref": "refs/heads/v2.2",
"path": "vistrails/db/versions/v1_0_4/persistence/sql/auto_gen.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Batchfile",
"bytes": "1129"
},
{
"name": "Makefile",
"bytes": "768"
},
{
"name": "Mako",
"bytes": "66613"
},
{
"name": "PHP",
"bytes": "49302"
},
{
"name": "Python",
"bytes": "19779006"
},
{
"name": "R",
"bytes": "782836"
},
{
"name": "Ruby",
"bytes": "875"
},
{
"name": "SQLPL",
"bytes": "2323"
},
{
"name": "Shell",
"bytes": "26542"
},
{
"name": "TeX",
"bytes": "147247"
},
{
"name": "XSLT",
"bytes": "1090"
}
],
"symlink_target": ""
}
|
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = '''
---
module: zabbix_template
short_description: Create/update/delete/dump Zabbix template
description:
- This module allows you to create, modify, delete and dump Zabbix templates.
- Multiple templates can be created or modified at once if passing JSON or XML to module.
version_added: "2.5"
author:
- "sookido (@sookido)"
- "Logan Vig (@logan2211)"
- "Dusan Matejka (@D3DeFi)"
requirements:
- "python >= 2.6"
- "zabbix-api >= 0.5.3"
options:
template_name:
description:
- Name of Zabbix template.
- Required when I(template_json) or I(template_xml) are not used.
- Mutually exclusive with I(template_json) and I(template_xml).
required: false
template_json:
description:
- JSON dump of templates to import.
- Multiple templates can be imported this way.
- Mutually exclusive with I(template_name) and I(template_xml).
required: false
type: json
template_xml:
description:
- XML dump of templates to import.
- Multiple templates can be imported this way.
- You are advised to pass XML structure matching the structure used by your version of Zabbix server.
- Custom XML structure can be imported as long as it is valid, but may not yield consistent idempotent
results on subsequent runs.
- Mutually exclusive with I(template_name) and I(template_json).
required: false
version_added: '2.9'
template_groups:
description:
- List of host groups to add template to when template is created.
- Replaces the current host groups the template belongs to if the template is already present.
- Required when creating a new template with C(state=present) and I(template_name) is used.
Not required when updating an existing template.
required: false
type: list
link_templates:
description:
- List of template names to be linked to the template.
- Templates that are not specified and are linked to the existing template will be only unlinked and not
cleared from the template.
required: false
type: list
clear_templates:
description:
- List of template names to be unlinked and cleared from the template.
- This option is ignored if template is being created for the first time.
required: false
type: list
macros:
description:
- List of user macros to create for the template.
- Macros that are not specified and are present on the existing template will be replaced.
- See examples on how to pass macros.
required: false
type: list
suboptions:
name:
description:
- Name of the macro.
- Must be specified in {$NAME} format.
value:
description:
- Value of the macro.
dump_format:
description:
- Format to use when dumping template with C(state=dump).
required: false
choices: [json, xml]
default: "json"
version_added: '2.9'
state:
description:
- Required state of the template.
- On C(state=present) template will be created/imported or updated depending if it is already present.
- On C(state=dump) template content will get dumped into required format specified in I(dump_format).
- On C(state=absent) template will be deleted.
required: false
choices: [present, absent, dump]
default: "present"
extends_documentation_fragment:
- zabbix
'''
EXAMPLES = '''
---
- name: Create a new Zabbix template linked to groups, macros and templates
local_action:
module: zabbix_template
server_url: http://127.0.0.1
login_user: username
login_password: password
template_name: ExampleHost
template_groups:
- Role
- Role2
link_templates:
- Example template1
- Example template2
macros:
- macro: '{$EXAMPLE_MACRO1}'
value: 30000
- macro: '{$EXAMPLE_MACRO2}'
value: 3
- macro: '{$EXAMPLE_MACRO3}'
value: 'Example'
state: present
- name: Unlink and clear templates from the existing Zabbix template
local_action:
module: zabbix_template
server_url: http://127.0.0.1
login_user: username
login_password: password
template_name: ExampleHost
clear_templates:
- Example template3
- Example template4
state: present
- name: Import Zabbix templates from JSON
local_action:
module: zabbix_template
server_url: http://127.0.0.1
login_user: username
login_password: password
template_json: "{{ lookup('file', 'zabbix_apache2.json') }}"
state: present
- name: Import Zabbix templates from XML
local_action:
module: zabbix_template
server_url: http://127.0.0.1
login_user: username
login_password: password
template_xml: "{{ lookup('file', 'zabbix_apache2.json') }}"
state: present
- name: Import Zabbix template from Ansible dict variable
zabbix_template:
login_user: username
login_password: password
server_url: http://127.0.0.1
template_json:
zabbix_export:
version: '3.2'
templates:
- name: Template for Testing
description: 'Testing template import'
template: Test Template
groups:
- name: Templates
applications:
- name: Test Application
state: present
- name: Configure macros on the existing Zabbix template
local_action:
module: zabbix_template
server_url: http://127.0.0.1
login_user: username
login_password: password
template_name: Template
macros:
- macro: '{$TEST_MACRO}'
value: 'Example'
state: present
- name: Delete Zabbix template
local_action:
module: zabbix_template
server_url: http://127.0.0.1
login_user: username
login_password: password
template_name: Template
state: absent
- name: Dump Zabbix template as JSON
local_action:
module: zabbix_template
server_url: http://127.0.0.1
login_user: username
login_password: password
template_name: Template
state: dump
register: template_dump
- name: Dump Zabbix template as XML
local_action:
module: zabbix_template
server_url: http://127.0.0.1
login_user: username
login_password: password
template_name: Template
dump_format: xml
state: dump
register: template_dump
'''
RETURN = '''
---
template_json:
description: The JSON dump of the template
returned: when state is dump
type: str
sample: {
"zabbix_export":{
"date":"2017-11-29T16:37:24Z",
"templates":[{
"templates":[],
"description":"",
"httptests":[],
"screens":[],
"applications":[],
"discovery_rules":[],
"groups":[{"name":"Templates"}],
"name":"Test Template",
"items":[],
"macros":[],
"template":"test"
}],
"version":"3.2",
"groups":[{
"name":"Templates"
}]
}
}
template_xml:
description: dump of the template in XML representation
returned: when state is dump and dump_format is xml
type: str
sample: |-
<?xml version="1.0" ?>
<zabbix_export>
<version>4.2</version>
<date>2019-07-12T13:37:26Z</date>
<groups>
<group>
<name>Templates</name>
</group>
</groups>
<templates>
<template>
<template>test</template>
<name>Test Template</name>
<description/>
<groups>
<group>
<name>Templates</name>
</group>
</groups>
<applications/>
<items/>
<discovery_rules/>
<httptests/>
<macros/>
<templates/>
<screens/>
<tags/>
</template>
</templates>
</zabbix_export>
'''
import atexit
import json
import traceback
import xml.etree.ElementTree as ET
from distutils.version import LooseVersion
from ansible.module_utils.basic import AnsibleModule, missing_required_lib
from ansible.module_utils._text import to_native
try:
from zabbix_api import ZabbixAPI, ZabbixAPIException
HAS_ZABBIX_API = True
except ImportError:
ZBX_IMP_ERR = traceback.format_exc()
HAS_ZABBIX_API = False
class Template(object):
def __init__(self, module, zbx):
self._module = module
self._zapi = zbx
# check if host group exists
def check_host_group_exist(self, group_names):
for group_name in group_names:
result = self._zapi.hostgroup.get({'filter': {'name': group_name}})
if not result:
self._module.fail_json(msg="Hostgroup not found: %s" %
group_name)
return True
# get group ids by group names
def get_group_ids_by_group_names(self, group_names):
group_ids = []
if group_names is None or len(group_names) == 0:
return group_ids
if self.check_host_group_exist(group_names):
group_list = self._zapi.hostgroup.get(
{'output': 'extend',
'filter': {'name': group_names}})
for group in group_list:
group_id = group['groupid']
group_ids.append({'groupid': group_id})
return group_ids
def get_template_ids(self, template_list):
template_ids = []
if template_list is None or len(template_list) == 0:
return template_ids
for template in template_list:
template_list = self._zapi.template.get(
{'output': 'extend',
'filter': {'host': template}})
if len(template_list) < 1:
continue
else:
template_id = template_list[0]['templateid']
template_ids.append(template_id)
return template_ids
def add_template(self, template_name, group_ids, link_template_ids, macros):
if self._module.check_mode:
self._module.exit_json(changed=True)
self._zapi.template.create({'host': template_name, 'groups': group_ids, 'templates': link_template_ids,
'macros': macros})
def check_template_changed(self, template_ids, template_groups, link_templates, clear_templates,
template_macros, template_content, template_type):
"""Compares template parameters to already existing values if any are found.
template_json - JSON structures are compared as deep sorted dictionaries,
template_xml - XML structures are compared as strings, but filtered and formatted first,
If none above is used, all the other arguments are compared to their existing counterparts
retrieved from Zabbix API."""
changed = False
# Compare filtered and formatted XMLs strings for any changes. It is expected that provided
# XML has same structure as Zabbix uses (e.g. it was optimally exported via Zabbix GUI or API)
if template_content is not None and template_type == 'xml':
existing_template = self.dump_template(template_ids, template_type='xml')
if self.filter_xml_template(template_content) != self.filter_xml_template(existing_template):
changed = True
return changed
existing_template = self.dump_template(template_ids, template_type='json')
# Compare JSON objects as deep sorted python dictionaries
if template_content is not None and template_type == 'json':
parsed_template_json = self.load_json_template(template_content)
if self.diff_template(parsed_template_json, existing_template):
changed = True
return changed
# If neither template_json or template_xml were used, user provided all parameters via module options
if template_groups is not None:
existing_groups = [g['name'] for g in existing_template['zabbix_export']['groups']]
if set(template_groups) != set(existing_groups):
changed = True
# Check if any new templates would be linked or any existing would be unlinked
exist_child_templates = [t['name'] for t in existing_template['zabbix_export']['templates'][0]['templates']]
if link_templates is not None:
if set(link_templates) != set(exist_child_templates):
changed = True
# Mark that there will be changes when at least one existing template will be unlinked
if clear_templates is not None:
for t in clear_templates:
if t in exist_child_templates:
changed = True
break
if template_macros is not None:
existing_macros = existing_template['zabbix_export']['templates'][0]['macros']
if template_macros != existing_macros:
changed = True
return changed
def update_template(self, template_ids, group_ids, link_template_ids, clear_template_ids, template_macros):
template_changes = {}
if group_ids is not None:
template_changes.update({'groups': group_ids})
if link_template_ids is not None:
template_changes.update({'templates': link_template_ids})
if clear_template_ids is not None:
template_changes.update({'templates_clear': clear_template_ids})
if template_macros is not None:
template_changes.update({'macros': template_macros})
if template_changes:
# If we got here we know that only one template was provided via template_name
template_changes.update({'templateid': template_ids[0]})
self._zapi.template.update(template_changes)
def delete_template(self, templateids):
if self._module.check_mode:
self._module.exit_json(changed=True)
self._zapi.template.delete(templateids)
def ordered_json(self, obj):
# Deep sort json dicts for comparison
if isinstance(obj, dict):
return sorted((k, self.ordered_json(v)) for k, v in obj.items())
if isinstance(obj, list):
return sorted(self.ordered_json(x) for x in obj)
else:
return obj
def dump_template(self, template_ids, template_type='json'):
if self._module.check_mode:
self._module.exit_json(changed=True)
try:
dump = self._zapi.configuration.export({'format': template_type, 'options': {'templates': template_ids}})
if template_type == 'xml':
return str(ET.tostring(ET.fromstring(dump.encode('utf-8')), encoding='utf-8'))
else:
return self.load_json_template(dump)
except ZabbixAPIException as e:
self._module.fail_json(msg='Unable to export template: %s' % e)
def diff_template(self, template_json_a, template_json_b):
# Compare 2 zabbix templates and return True if they differ.
template_json_a = self.filter_template(template_json_a)
template_json_b = self.filter_template(template_json_b)
if self.ordered_json(template_json_a) == self.ordered_json(template_json_b):
return False
return True
def filter_template(self, template_json):
# Filter the template json to contain only the keys we will update
keep_keys = set(['graphs', 'templates', 'triggers', 'value_maps'])
unwanted_keys = set(template_json['zabbix_export']) - keep_keys
for unwanted_key in unwanted_keys:
del template_json['zabbix_export'][unwanted_key]
# Versions older than 2.4 do not support description field within template
desc_not_supported = False
if LooseVersion(self._zapi.api_version()).version[:2] < LooseVersion('2.4').version:
desc_not_supported = True
# Filter empty attributes from template object to allow accurate comparison
for template in template_json['zabbix_export']['templates']:
for key in list(template.keys()):
if not template[key] or (key == 'description' and desc_not_supported):
template.pop(key)
return template_json
def filter_xml_template(self, template_xml):
"""Filters out keys from XML template that may wary between exports (e.g date or version) and
keys that are not imported via this module.
It is advised that provided XML template exactly matches XML structure used by Zabbix"""
# Strip last new line and convert string to ElementTree
parsed_xml_root = self.load_xml_template(template_xml.strip())
keep_keys = ['graphs', 'templates', 'triggers', 'value_maps']
# Remove unwanted XML nodes
for node in list(parsed_xml_root):
if node.tag not in keep_keys:
parsed_xml_root.remove(node)
# Filter empty attributes from template objects to allow accurate comparison
for template in list(parsed_xml_root.find('templates')):
for element in list(template):
if element.text is None and len(list(element)) == 0:
template.remove(element)
# Filter new lines and indentation
xml_root_text = list(line.strip() for line in ET.tostring(parsed_xml_root).split('\n'))
return ''.join(xml_root_text)
def load_json_template(self, template_json):
try:
return json.loads(template_json)
except ValueError as e:
self._module.fail_json(msg='Invalid JSON provided', details=to_native(e), exception=traceback.format_exc())
def load_xml_template(self, template_xml):
try:
return ET.fromstring(template_xml)
except ET.ParseError as e:
self._module.fail_json(msg='Invalid XML provided', details=to_native(e), exception=traceback.format_exc())
def import_template(self, template_content, template_type='json'):
# rules schema latest version
update_rules = {
'applications': {
'createMissing': True,
'deleteMissing': True
},
'discoveryRules': {
'createMissing': True,
'updateExisting': True,
'deleteMissing': True
},
'graphs': {
'createMissing': True,
'updateExisting': True,
'deleteMissing': True
},
'httptests': {
'createMissing': True,
'updateExisting': True,
'deleteMissing': True
},
'items': {
'createMissing': True,
'updateExisting': True,
'deleteMissing': True
},
'templates': {
'createMissing': True,
'updateExisting': True
},
'templateLinkage': {
'createMissing': True
},
'templateScreens': {
'createMissing': True,
'updateExisting': True,
'deleteMissing': True
},
'triggers': {
'createMissing': True,
'updateExisting': True,
'deleteMissing': True
},
'valueMaps': {
'createMissing': True,
'updateExisting': True
}
}
try:
# old api version support here
api_version = self._zapi.api_version()
# updateExisting for application removed from zabbix api after 3.2
if LooseVersion(api_version).version[:2] <= LooseVersion('3.2').version:
update_rules['applications']['updateExisting'] = True
import_data = {'format': template_type, 'source': template_content, 'rules': update_rules}
self._zapi.configuration.import_(import_data)
except ZabbixAPIException as e:
self._module.fail_json(msg='Unable to import template', details=to_native(e),
exception=traceback.format_exc())
def main():
module = AnsibleModule(
argument_spec=dict(
server_url=dict(type='str', required=True, aliases=['url']),
login_user=dict(type='str', required=True),
login_password=dict(type='str', required=True, no_log=True),
http_login_user=dict(type='str', required=False, default=None),
http_login_password=dict(type='str', required=False, default=None, no_log=True),
validate_certs=dict(type='bool', required=False, default=True),
template_name=dict(type='str', required=False),
template_json=dict(type='json', required=False),
template_xml=dict(type='str', required=False),
template_groups=dict(type='list', required=False),
link_templates=dict(type='list', required=False),
clear_templates=dict(type='list', required=False),
macros=dict(type='list', required=False),
dump_format=dict(type='str', required=False, default='json', choices=['json', 'xml']),
state=dict(default="present", choices=['present', 'absent', 'dump']),
timeout=dict(type='int', default=10)
),
required_one_of=[
['template_name', 'template_json', 'template_xml']
],
mutually_exclusive=[
['template_name', 'template_json', 'template_xml']
],
required_if=[
['state', 'absent', ['template_name']],
['state', 'dump', ['template_name']]
],
supports_check_mode=True
)
if not HAS_ZABBIX_API:
module.fail_json(msg=missing_required_lib('zabbix-api', url='https://pypi.org/project/zabbix-api/'), exception=ZBX_IMP_ERR)
server_url = module.params['server_url']
login_user = module.params['login_user']
login_password = module.params['login_password']
http_login_user = module.params['http_login_user']
http_login_password = module.params['http_login_password']
validate_certs = module.params['validate_certs']
template_name = module.params['template_name']
template_json = module.params['template_json']
template_xml = module.params['template_xml']
template_groups = module.params['template_groups']
link_templates = module.params['link_templates']
clear_templates = module.params['clear_templates']
template_macros = module.params['macros']
dump_format = module.params['dump_format']
state = module.params['state']
timeout = module.params['timeout']
zbx = None
try:
zbx = ZabbixAPI(server_url, timeout=timeout, user=http_login_user, passwd=http_login_password,
validate_certs=validate_certs)
zbx.login(login_user, login_password)
atexit.register(zbx.logout)
except ZabbixAPIException as e:
module.fail_json(msg="Failed to connect to Zabbix server: %s" % e)
template = Template(module, zbx)
# Identify template names for IDs retrieval
# Template names are expected to reside in ['zabbix_export']['templates'][*]['template'] for both data types
template_content, template_type = None, None
if template_json is not None:
template_type = 'json'
template_content = template_json
json_parsed = template.load_json_template(template_content)
template_names = list(t['template'] for t in json_parsed['zabbix_export']['templates'])
elif template_xml is not None:
template_type = 'xml'
template_content = template_xml
xml_parsed = template.load_xml_template(template_content)
template_names = list(t.find('template').text for t in list(xml_parsed.find('templates')))
else:
template_names = [template_name]
template_ids = template.get_template_ids(template_names)
if state == "absent":
if not template_ids:
module.exit_json(changed=False, msg="Template not found. No changed: %s" % template_name)
template.delete_template(template_ids)
module.exit_json(changed=True, result="Successfully deleted template %s" % template_name)
elif state == "dump":
if not template_ids:
module.fail_json(msg='Template not found: %s' % template_name)
if dump_format == 'json':
module.exit_json(changed=False, template_json=template.dump_template(template_ids, template_type='json'))
elif dump_format == 'xml':
module.exit_json(changed=False, template_xml=template.dump_template(template_ids, template_type='xml'))
elif state == "present":
# Load all subelements for template that were provided by user
group_ids = None
if template_groups is not None:
group_ids = template.get_group_ids_by_group_names(template_groups)
link_template_ids = None
if link_templates is not None:
link_template_ids = template.get_template_ids(link_templates)
clear_template_ids = None
if clear_templates is not None:
clear_template_ids = template.get_template_ids(clear_templates)
if template_macros is not None:
# Zabbix configuration.export does not differentiate python types (numbers are returned as strings)
for macroitem in template_macros:
for key in macroitem:
macroitem[key] = str(macroitem[key])
if not template_ids:
# Assume new templates are being added when no ID's were found
if template_content is not None:
template.import_template(template_content, template_type)
module.exit_json(changed=True, result="Template import successful")
else:
if group_ids is None:
module.fail_json(msg='template_groups are required when creating a new Zabbix template')
template.add_template(template_name, group_ids, link_template_ids, template_macros)
module.exit_json(changed=True, result="Successfully added template: %s" % template_name)
else:
changed = template.check_template_changed(template_ids, template_groups, link_templates, clear_templates,
template_macros, template_content, template_type)
if module.check_mode:
module.exit_json(changed=changed)
if changed:
if template_type is not None:
template.import_template(template_content, template_type)
else:
template.update_template(template_ids, group_ids, link_template_ids, clear_template_ids,
template_macros)
module.exit_json(changed=changed, result="Template successfully updated")
if __name__ == '__main__':
main()
|
{
"content_hash": "04c3069349ce18052135d8a4f5075575",
"timestamp": "",
"source": "github",
"line_count": 737,
"max_line_length": 131,
"avg_line_length": 37.92265943012212,
"alnum_prop": 0.5955848151991127,
"repo_name": "thaim/ansible",
"id": "275288d7f5512b5c8776d1527879ae518a59eb6d",
"size": "28106",
"binary": false,
"copies": "5",
"ref": "refs/heads/fix-broken-link",
"path": "lib/ansible/modules/monitoring/zabbix/zabbix_template.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "7"
},
{
"name": "Shell",
"bytes": "246"
}
],
"symlink_target": ""
}
|
from sys import stdin
_ = int(stdin.readline())
words = stdin.readline().split()
_ = int(stdin.readline())
positions = [int(x) for x in stdin.readline().split()]
for pos in positions:
words[pos-1] = '*' * len(words[pos-1])
print ' '.join(str(word) for word in words)
|
{
"content_hash": "df84c24eb5caf94791f0930975c94713",
"timestamp": "",
"source": "github",
"line_count": 11,
"max_line_length": 54,
"avg_line_length": 24.90909090909091,
"alnum_prop": 0.6496350364963503,
"repo_name": "AntoineAugusti/katas",
"id": "4cef8a2487b85e37de8de2ac174d1c8fe21b9361",
"size": "332",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "prologin/2015/4_censure.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Go",
"bytes": "2728"
},
{
"name": "Java",
"bytes": "5700"
},
{
"name": "Python",
"bytes": "78940"
}
],
"symlink_target": ""
}
|
"""
Turtle graphics is a popular way for introducing programming to
kids. It was part of the original Logo programming language developed
by Wally Feurzig and Seymour Papert in 1966.
Imagine a robotic turtle starting at (0, 0) in the x-y plane. After an ``import turtle``, give it
the command turtle.forward(15), and it moves (on-screen!) 15 pixels in
the direction it is facing, drawing a line as it moves. Give it the
command turtle.right(25), and it rotates in-place 25 degrees clockwise.
By combining together these and similar commands, intricate shapes and
pictures can easily be drawn.
----- turtle.py
This module is an extended reimplementation of turtle.py from the
Python standard distribution up to Python 2.5. (See: http://www.python.org)
It tries to keep the merits of turtle.py and to be (nearly) 100%
compatible with it. This means in the first place to enable the
learning programmer to use all the commands, classes and methods
interactively when using the module from within IDLE run with
the -n switch.
Roughly it has the following features added:
- Better animation of the turtle movements, especially of turning the
turtle. So the turtles can more easily be used as a visual feedback
instrument by the (beginning) programmer.
- Different turtle shapes, gif-images as turtle shapes, user defined
and user controllable turtle shapes, among them compound
(multicolored) shapes. Turtle shapes can be stretched and tilted, which
makes turtles very versatile geometrical objects.
- Fine control over turtle movement and screen updates via delay(),
and enhanced tracer() and speed() methods.
- Aliases for the most commonly used commands, like fd for forward etc.,
following the early Logo traditions. This reduces the boring work of
typing long sequences of commands, which often occur in a natural way
when kids try to program fancy pictures on their first encounter with
turtle graphics.
- Turtles now have an undo()-method with configurable undo-buffer.
- Some simple commands/methods for creating event driven programs
(mouse-, key-, timer-events). Especially useful for programming games.
- A scrollable Canvas class. The default scrollable Canvas can be
extended interactively as needed while playing around with the turtle(s).
- A TurtleScreen class with methods controlling background color or
background image, window and canvas size and other properties of the
TurtleScreen.
- There is a method, setworldcoordinates(), to install a user defined
coordinate-system for the TurtleScreen.
- The implementation uses a 2-vector class named Vec2D, derived from tuple.
This class is public, so it can be imported by the application programmer,
which makes certain types of computations very natural and compact.
- Appearance of the TurtleScreen and the Turtles at startup/import can be
configured by means of a turtle.cfg configuration file.
The default configuration mimics the appearance of the old turtle module.
- If configured appropriately the module reads in docstrings from a docstring
dictionary in some different language, supplied separately and replaces
the English ones by those read in. There is a utility function
write_docstringdict() to write a dictionary with the original (English)
docstrings to disc, so it can serve as a template for translations.
Behind the scenes there are some features included with possible
extensions in mind. These will be commented and documented elsewhere.
"""
_ver = "turtle 1.0b1 - for Python 2.6 - 30. 5. 2008, 18:08"
#print _ver
import Tkinter as TK
import types
import math
import time
import os
from os.path import isfile, split, join
from copy import deepcopy
from math import * ## for compatibility with old turtle module
_tg_classes = ['ScrolledCanvas', 'TurtleScreen', 'Screen',
'RawTurtle', 'Turtle', 'RawPen', 'Pen', 'Shape', 'Vec2D']
_tg_screen_functions = ['addshape', 'bgcolor', 'bgpic', 'bye',
'clearscreen', 'colormode', 'delay', 'exitonclick', 'getcanvas',
'getshapes', 'listen', 'mode', 'onkey', 'onscreenclick', 'ontimer',
'register_shape', 'resetscreen', 'screensize', 'setup',
'setworldcoordinates', 'title', 'tracer', 'turtles', 'update',
'window_height', 'window_width']
_tg_turtle_functions = ['back', 'backward', 'begin_fill', 'begin_poly', 'bk',
'circle', 'clear', 'clearstamp', 'clearstamps', 'clone', 'color',
'degrees', 'distance', 'dot', 'down', 'end_fill', 'end_poly', 'fd',
'fill', 'fillcolor', 'forward', 'get_poly', 'getpen', 'getscreen',
'getturtle', 'goto', 'heading', 'hideturtle', 'home', 'ht', 'isdown',
'isvisible', 'left', 'lt', 'onclick', 'ondrag', 'onrelease', 'pd',
'pen', 'pencolor', 'pendown', 'pensize', 'penup', 'pos', 'position',
'pu', 'radians', 'right', 'reset', 'resizemode', 'rt',
'seth', 'setheading', 'setpos', 'setposition', 'settiltangle',
'setundobuffer', 'setx', 'sety', 'shape', 'shapesize', 'showturtle',
'speed', 'st', 'stamp', 'tilt', 'tiltangle', 'towards', 'tracer',
'turtlesize', 'undo', 'undobufferentries', 'up', 'width',
'window_height', 'window_width', 'write', 'xcor', 'ycor']
_tg_utilities = ['write_docstringdict', 'done', 'mainloop']
_math_functions = ['acos', 'asin', 'atan', 'atan2', 'ceil', 'cos', 'cosh',
'e', 'exp', 'fabs', 'floor', 'fmod', 'frexp', 'hypot', 'ldexp', 'log',
'log10', 'modf', 'pi', 'pow', 'sin', 'sinh', 'sqrt', 'tan', 'tanh']
__all__ = (_tg_classes + _tg_screen_functions + _tg_turtle_functions +
_tg_utilities + ['Terminator'] + _math_functions)
_alias_list = ['addshape', 'backward', 'bk', 'fd', 'ht', 'lt', 'pd', 'pos',
'pu', 'rt', 'seth', 'setpos', 'setposition', 'st',
'turtlesize', 'up', 'width']
_CFG = {"width" : 0.5, # Screen
"height" : 0.75,
"canvwidth" : 400,
"canvheight": 300,
"leftright": None,
"topbottom": None,
"mode": "standard", # TurtleScreen
"colormode": 1.0,
"delay": 10,
"undobuffersize": 1000, # RawTurtle
"shape": "classic",
"pencolor" : "black",
"fillcolor" : "black",
"resizemode" : "noresize",
"visible" : True,
"language": "english", # docstrings
"exampleturtle": "turtle",
"examplescreen": "screen",
"title": "Python Turtle Graphics",
"using_IDLE": False
}
##print "cwd:", os.getcwd()
##print "__file__:", __file__
##
##def show(dictionary):
## print "=========================="
## for key in sorted(dictionary.keys()):
## print key, ":", dictionary[key]
## print "=========================="
## print
def config_dict(filename):
"""Convert content of config-file into dictionary."""
f = open(filename, "r")
cfglines = f.readlines()
f.close()
cfgdict = {}
for line in cfglines:
line = line.strip()
if not line or line.startswith("#"):
continue
try:
key, value = line.split("=")
except ValueError:
print "Bad line in config-file %s:\n%s" % (filename,line)
continue
key = key.strip()
value = value.strip()
if value in ["True", "False", "None", "''", '""']:
value = eval(value)
else:
try:
if "." in value:
value = float(value)
else:
value = int(value)
except ValueError:
pass # value need not be converted
cfgdict[key] = value
return cfgdict
def readconfig(cfgdict):
"""Read config-files, change configuration-dict accordingly.
If there is a turtle.cfg file in the current working directory,
read it from there. If this contains an importconfig-value,
say 'myway', construct filename turtle_mayway.cfg else use
turtle.cfg and read it from the import-directory, where
turtle.py is located.
Update configuration dictionary first according to config-file,
in the import directory, then according to config-file in the
current working directory.
If no config-file is found, the default configuration is used.
"""
default_cfg = "turtle.cfg"
cfgdict1 = {}
cfgdict2 = {}
if isfile(default_cfg):
cfgdict1 = config_dict(default_cfg)
#print "1. Loading config-file %s from: %s" % (default_cfg, os.getcwd())
if "importconfig" in cfgdict1:
default_cfg = "turtle_%s.cfg" % cfgdict1["importconfig"]
try:
head, tail = split(__file__)
cfg_file2 = join(head, default_cfg)
except BaseException:
cfg_file2 = ""
if isfile(cfg_file2):
#print "2. Loading config-file %s:" % cfg_file2
cfgdict2 = config_dict(cfg_file2)
## show(_CFG)
## show(cfgdict2)
_CFG.update(cfgdict2)
## show(_CFG)
## show(cfgdict1)
_CFG.update(cfgdict1)
## show(_CFG)
try:
readconfig(_CFG)
except BaseException:
print "No configfile read, reason unknown"
class Vec2D(tuple):
"""A 2 dimensional vector class, used as a helper class
for implementing turtle graphics.
May be useful for turtle graphics programs also.
Derived from tuple, so a vector is a tuple!
Provides (for a, b vectors, k number):
a+b vector addition
a-b vector subtraction
a*b inner product
k*a and a*k multiplication with scalar
|a| absolute value of a
a.rotate(angle) rotation
"""
def __new__(cls, x, y):
return tuple.__new__(cls, (x, y))
def __add__(self, other):
return Vec2D(self[0]+other[0], self[1]+other[1])
def __mul__(self, other):
if isinstance(other, Vec2D):
return self[0]*other[0]+self[1]*other[1]
return Vec2D(self[0]*other, self[1]*other)
def __rmul__(self, other):
if isinstance(other, int) or isinstance(other, float):
return Vec2D(self[0]*other, self[1]*other)
def __sub__(self, other):
return Vec2D(self[0]-other[0], self[1]-other[1])
def __neg__(self):
return Vec2D(-self[0], -self[1])
def __abs__(self):
return (self[0]**2 + self[1]**2)**0.5
def rotate(self, angle):
"""rotate self counterclockwise by angle
"""
perp = Vec2D(-self[1], self[0])
angle = angle * math.pi / 180.0
c, s = math.cos(angle), math.sin(angle)
return Vec2D(self[0]*c+perp[0]*s, self[1]*c+perp[1]*s)
def __getnewargs__(self):
return (self[0], self[1])
def __repr__(self):
return "(%.2f,%.2f)" % self
##############################################################################
### From here up to line : Tkinter - Interface for turtle.py ###
### May be replaced by an interface to some different graphics toolkit ###
##############################################################################
## helper functions for Scrolled Canvas, to forward Canvas-methods
## to ScrolledCanvas class
def __methodDict(cls, _dict):
"""helper function for Scrolled Canvas"""
baseList = list(cls.__bases__)
baseList.reverse()
for _super in baseList:
__methodDict(_super, _dict)
for key, value in cls.__dict__.items():
if type(value) == types.FunctionType:
_dict[key] = value
def __methods(cls):
"""helper function for Scrolled Canvas"""
_dict = {}
__methodDict(cls, _dict)
return _dict.keys()
__stringBody = (
'def %(method)s(self, *args, **kw): return ' +
'self.%(attribute)s.%(method)s(*args, **kw)')
def __forwardmethods(fromClass, toClass, toPart, exclude = ()):
"""Helper functions for Scrolled Canvas, used to forward
ScrolledCanvas-methods to Tkinter.Canvas class.
"""
_dict = {}
__methodDict(toClass, _dict)
for ex in _dict.keys():
if ex[:1] == '_' or ex[-1:] == '_':
del _dict[ex]
for ex in exclude:
if ex in _dict:
del _dict[ex]
for ex in __methods(fromClass):
if ex in _dict:
del _dict[ex]
for method, func in _dict.items():
d = {'method': method, 'func': func}
if type(toPart) == types.StringType:
execString = \
__stringBody % {'method' : method, 'attribute' : toPart}
exec execString in d
fromClass.__dict__[method] = d[method]
class ScrolledCanvas(TK.Frame):
"""Modeled after the scrolled canvas class from Grayons's Tkinter book.
Used as the default canvas, which pops up automatically when
using turtle graphics functions or the Turtle class.
"""
def __init__(self, master, width=500, height=350,
canvwidth=600, canvheight=500):
TK.Frame.__init__(self, master, width=width, height=height)
self._rootwindow = self.winfo_toplevel()
self.width, self.height = width, height
self.canvwidth, self.canvheight = canvwidth, canvheight
self.bg = "white"
self._canvas = TK.Canvas(master, width=width, height=height,
bg=self.bg, relief=TK.SUNKEN, borderwidth=2)
self.hscroll = TK.Scrollbar(master, command=self._canvas.xview,
orient=TK.HORIZONTAL)
self.vscroll = TK.Scrollbar(master, command=self._canvas.yview)
self._canvas.configure(xscrollcommand=self.hscroll.set,
yscrollcommand=self.vscroll.set)
self.rowconfigure(0, weight=1, minsize=0)
self.columnconfigure(0, weight=1, minsize=0)
self._canvas.grid(padx=1, in_ = self, pady=1, row=0,
column=0, rowspan=1, columnspan=1, sticky='news')
self.vscroll.grid(padx=1, in_ = self, pady=1, row=0,
column=1, rowspan=1, columnspan=1, sticky='news')
self.hscroll.grid(padx=1, in_ = self, pady=1, row=1,
column=0, rowspan=1, columnspan=1, sticky='news')
self.reset()
self._rootwindow.bind('<Configure>', self.onResize)
def reset(self, canvwidth=None, canvheight=None, bg = None):
"""Adjust canvas and scrollbars according to given canvas size."""
if canvwidth:
self.canvwidth = canvwidth
if canvheight:
self.canvheight = canvheight
if bg:
self.bg = bg
self._canvas.config(bg=bg,
scrollregion=(-self.canvwidth//2, -self.canvheight//2,
self.canvwidth//2, self.canvheight//2))
self._canvas.xview_moveto(0.5*(self.canvwidth - self.width + 30) /
self.canvwidth)
self._canvas.yview_moveto(0.5*(self.canvheight- self.height + 30) /
self.canvheight)
self.adjustScrolls()
def adjustScrolls(self):
""" Adjust scrollbars according to window- and canvas-size.
"""
cwidth = self._canvas.winfo_width()
cheight = self._canvas.winfo_height()
self._canvas.xview_moveto(0.5*(self.canvwidth-cwidth)/self.canvwidth)
self._canvas.yview_moveto(0.5*(self.canvheight-cheight)/self.canvheight)
if cwidth < self.canvwidth or cheight < self.canvheight:
self.hscroll.grid(padx=1, in_ = self, pady=1, row=1,
column=0, rowspan=1, columnspan=1, sticky='news')
self.vscroll.grid(padx=1, in_ = self, pady=1, row=0,
column=1, rowspan=1, columnspan=1, sticky='news')
else:
self.hscroll.grid_forget()
self.vscroll.grid_forget()
def onResize(self, event):
"""self-explanatory"""
self.adjustScrolls()
def bbox(self, *args):
""" 'forward' method, which canvas itself has inherited...
"""
return self._canvas.bbox(*args)
def cget(self, *args, **kwargs):
""" 'forward' method, which canvas itself has inherited...
"""
return self._canvas.cget(*args, **kwargs)
def config(self, *args, **kwargs):
""" 'forward' method, which canvas itself has inherited...
"""
self._canvas.config(*args, **kwargs)
def bind(self, *args, **kwargs):
""" 'forward' method, which canvas itself has inherited...
"""
self._canvas.bind(*args, **kwargs)
def unbind(self, *args, **kwargs):
""" 'forward' method, which canvas itself has inherited...
"""
self._canvas.unbind(*args, **kwargs)
def focus_force(self):
""" 'forward' method, which canvas itself has inherited...
"""
self._canvas.focus_force()
__forwardmethods(ScrolledCanvas, TK.Canvas, '_canvas')
class _Root(TK.Tk):
"""Root class for Screen based on Tkinter."""
def __init__(self):
TK.Tk.__init__(self)
def setupcanvas(self, width, height, cwidth, cheight):
self._canvas = ScrolledCanvas(self, width, height, cwidth, cheight)
self._canvas.pack(expand=1, fill="both")
def _getcanvas(self):
return self._canvas
def set_geometry(self, width, height, startx, starty):
self.geometry("%dx%d%+d%+d"%(width, height, startx, starty))
def ondestroy(self, destroy):
self.wm_protocol("WM_DELETE_WINDOW", destroy)
def win_width(self):
return self.winfo_screenwidth()
def win_height(self):
return self.winfo_screenheight()
Canvas = TK.Canvas
class TurtleScreenBase(object):
"""Provide the basic graphics functionality.
Interface between Tkinter and turtle.py.
To port turtle.py to some different graphics toolkit
a corresponding TurtleScreenBase class has to be implemented.
"""
@staticmethod
def _blankimage():
"""return a blank image object
"""
img = TK.PhotoImage(width=1, height=1)
img.blank()
return img
@staticmethod
def _image(filename):
"""return an image object containing the
imagedata from a gif-file named filename.
"""
return TK.PhotoImage(file=filename)
def __init__(self, cv):
self.cv = cv
if isinstance(cv, ScrolledCanvas):
w = self.cv.canvwidth
h = self.cv.canvheight
else: # expected: ordinary TK.Canvas
w = int(self.cv.cget("width"))
h = int(self.cv.cget("height"))
self.cv.config(scrollregion = (-w//2, -h//2, w//2, h//2 ))
self.canvwidth = w
self.canvheight = h
self.xscale = self.yscale = 1.0
def _createpoly(self):
"""Create an invisible polygon item on canvas self.cv)
"""
return self.cv.create_polygon((0, 0, 0, 0, 0, 0), fill="", outline="")
def _drawpoly(self, polyitem, coordlist, fill=None,
outline=None, width=None, top=False):
"""Configure polygonitem polyitem according to provided
arguments:
coordlist is sequence of coordinates
fill is filling color
outline is outline color
top is a boolean value, which specifies if polyitem
will be put on top of the canvas' displaylist so it
will not be covered by other items.
"""
cl = []
for x, y in coordlist:
cl.append(x * self.xscale)
cl.append(-y * self.yscale)
self.cv.coords(polyitem, *cl)
if fill is not None:
self.cv.itemconfigure(polyitem, fill=fill)
if outline is not None:
self.cv.itemconfigure(polyitem, outline=outline)
if width is not None:
self.cv.itemconfigure(polyitem, width=width)
if top:
self.cv.tag_raise(polyitem)
def _createline(self):
"""Create an invisible line item on canvas self.cv)
"""
return self.cv.create_line(0, 0, 0, 0, fill="", width=2,
capstyle = TK.ROUND)
def _drawline(self, lineitem, coordlist=None,
fill=None, width=None, top=False):
"""Configure lineitem according to provided arguments:
coordlist is sequence of coordinates
fill is drawing color
width is width of drawn line.
top is a boolean value, which specifies if polyitem
will be put on top of the canvas' displaylist so it
will not be covered by other items.
"""
if coordlist is not None:
cl = []
for x, y in coordlist:
cl.append(x * self.xscale)
cl.append(-y * self.yscale)
self.cv.coords(lineitem, *cl)
if fill is not None:
self.cv.itemconfigure(lineitem, fill=fill)
if width is not None:
self.cv.itemconfigure(lineitem, width=width)
if top:
self.cv.tag_raise(lineitem)
def _delete(self, item):
"""Delete graphics item from canvas.
If item is"all" delete all graphics items.
"""
self.cv.delete(item)
def _update(self):
"""Redraw graphics items on canvas
"""
self.cv.update()
def _delay(self, delay):
"""Delay subsequent canvas actions for delay ms."""
self.cv.after(delay)
def _iscolorstring(self, color):
"""Check if the string color is a legal Tkinter color string.
"""
try:
rgb = self.cv.winfo_rgb(color)
ok = True
except TK.TclError:
ok = False
return ok
def _bgcolor(self, color=None):
"""Set canvas' backgroundcolor if color is not None,
else return backgroundcolor."""
if color is not None:
self.cv.config(bg = color)
self._update()
else:
return self.cv.cget("bg")
def _write(self, pos, txt, align, font, pencolor):
"""Write txt at pos in canvas with specified font
and color.
Return text item and x-coord of right bottom corner
of text's bounding box."""
x, y = pos
x = x * self.xscale
y = y * self.yscale
anchor = {"left":"sw", "center":"s", "right":"se" }
item = self.cv.create_text(x-1, -y, text = txt, anchor = anchor[align],
fill = pencolor, font = font)
x0, y0, x1, y1 = self.cv.bbox(item)
self.cv.update()
return item, x1-1
## def _dot(self, pos, size, color):
## """may be implemented for some other graphics toolkit"""
def _onclick(self, item, fun, num=1, add=None):
"""Bind fun to mouse-click event on turtle.
fun must be a function with two arguments, the coordinates
of the clicked point on the canvas.
num, the number of the mouse-button defaults to 1
"""
if fun is None:
self.cv.tag_unbind(item, "<Button-%s>" % num)
else:
def eventfun(event):
x, y = (self.cv.canvasx(event.x)/self.xscale,
-self.cv.canvasy(event.y)/self.yscale)
fun(x, y)
self.cv.tag_bind(item, "<Button-%s>" % num, eventfun, add)
def _onrelease(self, item, fun, num=1, add=None):
"""Bind fun to mouse-button-release event on turtle.
fun must be a function with two arguments, the coordinates
of the point on the canvas where mouse button is released.
num, the number of the mouse-button defaults to 1
If a turtle is clicked, first _onclick-event will be performed,
then _onscreensclick-event.
"""
if fun is None:
self.cv.tag_unbind(item, "<Button%s-ButtonRelease>" % num)
else:
def eventfun(event):
x, y = (self.cv.canvasx(event.x)/self.xscale,
-self.cv.canvasy(event.y)/self.yscale)
fun(x, y)
self.cv.tag_bind(item, "<Button%s-ButtonRelease>" % num,
eventfun, add)
def _ondrag(self, item, fun, num=1, add=None):
"""Bind fun to mouse-move-event (with pressed mouse button) on turtle.
fun must be a function with two arguments, the coordinates of the
actual mouse position on the canvas.
num, the number of the mouse-button defaults to 1
Every sequence of mouse-move-events on a turtle is preceded by a
mouse-click event on that turtle.
"""
if fun is None:
self.cv.tag_unbind(item, "<Button%s-Motion>" % num)
else:
def eventfun(event):
try:
x, y = (self.cv.canvasx(event.x)/self.xscale,
-self.cv.canvasy(event.y)/self.yscale)
fun(x, y)
except BaseException:
pass
self.cv.tag_bind(item, "<Button%s-Motion>" % num, eventfun, add)
def _onscreenclick(self, fun, num=1, add=None):
"""Bind fun to mouse-click event on canvas.
fun must be a function with two arguments, the coordinates
of the clicked point on the canvas.
num, the number of the mouse-button defaults to 1
If a turtle is clicked, first _onclick-event will be performed,
then _onscreensclick-event.
"""
if fun is None:
self.cv.unbind("<Button-%s>" % num)
else:
def eventfun(event):
x, y = (self.cv.canvasx(event.x)/self.xscale,
-self.cv.canvasy(event.y)/self.yscale)
fun(x, y)
self.cv.bind("<Button-%s>" % num, eventfun, add)
def _onkey(self, fun, key):
"""Bind fun to key-release event of key.
Canvas must have focus. See method listen
"""
if fun is None:
self.cv.unbind("<KeyRelease-%s>" % key, None)
else:
def eventfun(event):
fun()
self.cv.bind("<KeyRelease-%s>" % key, eventfun)
def _listen(self):
"""Set focus on canvas (in order to collect key-events)
"""
self.cv.focus_force()
def _ontimer(self, fun, t):
"""Install a timer, which calls fun after t milliseconds.
"""
if t == 0:
self.cv.after_idle(fun)
else:
self.cv.after(t, fun)
def _createimage(self, image):
"""Create and return image item on canvas.
"""
return self.cv.create_image(0, 0, image=image)
def _drawimage(self, item, pos, image):
"""Configure image item as to draw image object
at position (x,y) on canvas)
"""
x, y = pos
self.cv.coords(item, (x * self.xscale, -y * self.yscale))
self.cv.itemconfig(item, image=image)
def _setbgpic(self, item, image):
"""Configure image item as to draw image object
at center of canvas. Set item to the first item
in the displaylist, so it will be drawn below
any other item ."""
self.cv.itemconfig(item, image=image)
self.cv.tag_lower(item)
def _type(self, item):
"""Return 'line' or 'polygon' or 'image' depending on
type of item.
"""
return self.cv.type(item)
def _pointlist(self, item):
"""returns list of coordinate-pairs of points of item
Example (for insiders):
>>> from turtle import *
>>> getscreen()._pointlist(getturtle().turtle._item)
[(0.0, 9.9999999999999982), (0.0, -9.9999999999999982),
(9.9999999999999982, 0.0)]
>>> """
cl = self.cv.coords(item)
pl = [(cl[i], -cl[i+1]) for i in range(0, len(cl), 2)]
return pl
def _setscrollregion(self, srx1, sry1, srx2, sry2):
self.cv.config(scrollregion=(srx1, sry1, srx2, sry2))
def _rescale(self, xscalefactor, yscalefactor):
items = self.cv.find_all()
for item in items:
coordinates = self.cv.coords(item)
newcoordlist = []
while coordinates:
x, y = coordinates[:2]
newcoordlist.append(x * xscalefactor)
newcoordlist.append(y * yscalefactor)
coordinates = coordinates[2:]
self.cv.coords(item, *newcoordlist)
def _resize(self, canvwidth=None, canvheight=None, bg=None):
"""Resize the canvas the turtles are drawing on. Does
not alter the drawing window.
"""
# needs amendment
if not isinstance(self.cv, ScrolledCanvas):
return self.canvwidth, self.canvheight
if canvwidth is canvheight is bg is None:
return self.cv.canvwidth, self.cv.canvheight
if canvwidth is not None:
self.canvwidth = canvwidth
if canvheight is not None:
self.canvheight = canvheight
self.cv.reset(canvwidth, canvheight, bg)
def _window_size(self):
""" Return the width and height of the turtle window.
"""
width = self.cv.winfo_width()
if width <= 1: # the window isn't managed by a geometry manager
width = self.cv['width']
height = self.cv.winfo_height()
if height <= 1: # the window isn't managed by a geometry manager
height = self.cv['height']
return width, height
##############################################################################
### End of Tkinter - interface ###
##############################################################################
class Terminator (Exception):
"""Will be raised in TurtleScreen.update, if _RUNNING becomes False.
This stops execution of a turtle graphics script.
Main purpose: use in the Demo-Viewer turtle.Demo.py.
"""
pass
class TurtleGraphicsError(Exception):
"""Some TurtleGraphics Error
"""
class Shape(object):
"""Data structure modeling shapes.
attribute _type is one of "polygon", "image", "compound"
attribute _data is - depending on _type a poygon-tuple,
an image or a list constructed using the addcomponent method.
"""
def __init__(self, type_, data=None):
self._type = type_
if type_ == "polygon":
if isinstance(data, list):
data = tuple(data)
elif type_ == "image":
if isinstance(data, basestring):
if data.lower().endswith(".gif") and isfile(data):
data = TurtleScreen._image(data)
# else data assumed to be Photoimage
elif type_ == "compound":
data = []
else:
raise TurtleGraphicsError("There is no shape type %s" % type_)
self._data = data
def addcomponent(self, poly, fill, outline=None):
"""Add component to a shape of type compound.
Arguments: poly is a polygon, i. e. a tuple of number pairs.
fill is the fillcolor of the component,
outline is the outline color of the component.
call (for a Shapeobject namend s):
-- s.addcomponent(((0,0), (10,10), (-10,10)), "red", "blue")
Example:
>>> poly = ((0,0),(10,-5),(0,10),(-10,-5))
>>> s = Shape("compound")
>>> s.addcomponent(poly, "red", "blue")
>>> # .. add more components and then use register_shape()
"""
if self._type != "compound":
raise TurtleGraphicsError("Cannot add component to %s Shape"
% self._type)
if outline is None:
outline = fill
self._data.append([poly, fill, outline])
class Tbuffer(object):
"""Ring buffer used as undobuffer for RawTurtle objects."""
def __init__(self, bufsize=10):
self.bufsize = bufsize
self.buffer = [[None]] * bufsize
self.ptr = -1
self.cumulate = False
def reset(self, bufsize=None):
if bufsize is None:
for i in range(self.bufsize):
self.buffer[i] = [None]
else:
self.bufsize = bufsize
self.buffer = [[None]] * bufsize
self.ptr = -1
def push(self, item):
if self.bufsize > 0:
if not self.cumulate:
self.ptr = (self.ptr + 1) % self.bufsize
self.buffer[self.ptr] = item
else:
self.buffer[self.ptr].append(item)
def pop(self):
if self.bufsize > 0:
item = self.buffer[self.ptr]
if item is None:
return None
else:
self.buffer[self.ptr] = [None]
self.ptr = (self.ptr - 1) % self.bufsize
return (item)
def nr_of_items(self):
return self.bufsize - self.buffer.count([None])
def __repr__(self):
return str(self.buffer) + " " + str(self.ptr)
class TurtleScreen(TurtleScreenBase):
"""Provides screen oriented methods like setbg etc.
Only relies upon the methods of TurtleScreenBase and NOT
upon components of the underlying graphics toolkit -
which is Tkinter in this case.
"""
# _STANDARD_DELAY = 5
_RUNNING = True
def __init__(self, cv, mode=_CFG["mode"],
colormode=_CFG["colormode"], delay=_CFG["delay"]):
self._shapes = {
"arrow" : Shape("polygon", ((-10,0), (10,0), (0,10))),
"turtle" : Shape("polygon", ((0,16), (-2,14), (-1,10), (-4,7),
(-7,9), (-9,8), (-6,5), (-7,1), (-5,-3), (-8,-6),
(-6,-8), (-4,-5), (0,-7), (4,-5), (6,-8), (8,-6),
(5,-3), (7,1), (6,5), (9,8), (7,9), (4,7), (1,10),
(2,14))),
"circle" : Shape("polygon", ((10,0), (9.51,3.09), (8.09,5.88),
(5.88,8.09), (3.09,9.51), (0,10), (-3.09,9.51),
(-5.88,8.09), (-8.09,5.88), (-9.51,3.09), (-10,0),
(-9.51,-3.09), (-8.09,-5.88), (-5.88,-8.09),
(-3.09,-9.51), (-0.00,-10.00), (3.09,-9.51),
(5.88,-8.09), (8.09,-5.88), (9.51,-3.09))),
"square" : Shape("polygon", ((10,-10), (10,10), (-10,10),
(-10,-10))),
"triangle" : Shape("polygon", ((10,-5.77), (0,11.55),
(-10,-5.77))),
"classic": Shape("polygon", ((0,0),(-5,-9),(0,-7),(5,-9))),
"blank" : Shape("image", self._blankimage())
}
self._bgpics = {"nopic" : ""}
TurtleScreenBase.__init__(self, cv)
self._mode = mode
self._delayvalue = delay
self._colormode = _CFG["colormode"]
self._keys = []
self.clear()
def clear(self):
"""Delete all drawings and all turtles from the TurtleScreen.
Reset empty TurtleScreen to its initial state: white background,
no backgroundimage, no eventbindings and tracing on.
No argument.
Example (for a TurtleScreen instance named screen):
>>> screen.clear()
Note: this method is not available as function.
"""
self._delayvalue = _CFG["delay"]
self._colormode = _CFG["colormode"]
self._delete("all")
self._bgpic = self._createimage("")
self._bgpicname = "nopic"
self._tracing = 1
self._updatecounter = 0
self._turtles = []
self.bgcolor("white")
for btn in 1, 2, 3:
self.onclick(None, btn)
for key in self._keys[:]:
self.onkey(None, key)
Turtle._pen = None
def mode(self, mode=None):
"""Set turtle-mode ('standard', 'logo' or 'world') and perform reset.
Optional argument:
mode -- one of the strings 'standard', 'logo' or 'world'
Mode 'standard' is compatible with turtle.py.
Mode 'logo' is compatible with most Logo-Turtle-Graphics.
Mode 'world' uses userdefined 'worldcoordinates'. *Attention*: in
this mode angles appear distorted if x/y unit-ratio doesn't equal 1.
If mode is not given, return the current mode.
Mode Initial turtle heading positive angles
------------|-------------------------|-------------------
'standard' to the right (east) counterclockwise
'logo' upward (north) clockwise
Examples:
>>> mode('logo') # resets turtle heading to north
>>> mode()
'logo'
"""
if mode is None:
return self._mode
mode = mode.lower()
if mode not in ["standard", "logo", "world"]:
raise TurtleGraphicsError("No turtle-graphics-mode %s" % mode)
self._mode = mode
if mode in ["standard", "logo"]:
self._setscrollregion(-self.canvwidth//2, -self.canvheight//2,
self.canvwidth//2, self.canvheight//2)
self.xscale = self.yscale = 1.0
self.reset()
def setworldcoordinates(self, llx, lly, urx, ury):
"""Set up a user defined coordinate-system.
Arguments:
llx -- a number, x-coordinate of lower left corner of canvas
lly -- a number, y-coordinate of lower left corner of canvas
urx -- a number, x-coordinate of upper right corner of canvas
ury -- a number, y-coordinate of upper right corner of canvas
Set up user coodinat-system and switch to mode 'world' if necessary.
This performs a screen.reset. If mode 'world' is already active,
all drawings are redrawn according to the new coordinates.
But ATTENTION: in user-defined coordinatesystems angles may appear
distorted. (see Screen.mode())
Example (for a TurtleScreen instance named screen):
>>> screen.setworldcoordinates(-10,-0.5,50,1.5)
>>> for _ in range(36):
... left(10)
... forward(0.5)
"""
if self.mode() != "world":
self.mode("world")
xspan = float(urx - llx)
yspan = float(ury - lly)
wx, wy = self._window_size()
self.screensize(wx-20, wy-20)
oldxscale, oldyscale = self.xscale, self.yscale
self.xscale = self.canvwidth / xspan
self.yscale = self.canvheight / yspan
srx1 = llx * self.xscale
sry1 = -ury * self.yscale
srx2 = self.canvwidth + srx1
sry2 = self.canvheight + sry1
self._setscrollregion(srx1, sry1, srx2, sry2)
self._rescale(self.xscale/oldxscale, self.yscale/oldyscale)
self.update()
def register_shape(self, name, shape=None):
"""Adds a turtle shape to TurtleScreen's shapelist.
Arguments:
(1) name is the name of a gif-file and shape is None.
Installs the corresponding image shape.
!! Image-shapes DO NOT rotate when turning the turtle,
!! so they do not display the heading of the turtle!
(2) name is an arbitrary string and shape is a tuple
of pairs of coordinates. Installs the corresponding
polygon shape
(3) name is an arbitrary string and shape is a
(compound) Shape object. Installs the corresponding
compound shape.
To use a shape, you have to issue the command shape(shapename).
call: register_shape("turtle.gif")
--or: register_shape("tri", ((0,0), (10,10), (-10,10)))
Example (for a TurtleScreen instance named screen):
>>> screen.register_shape("triangle", ((5,-3),(0,5),(-5,-3)))
"""
if shape is None:
# image
if name.lower().endswith(".gif"):
shape = Shape("image", self._image(name))
else:
raise TurtleGraphicsError("Bad arguments for register_shape.\n"
+ "Use help(register_shape)" )
elif isinstance(shape, tuple):
shape = Shape("polygon", shape)
## else shape assumed to be Shape-instance
self._shapes[name] = shape
# print "shape added:" , self._shapes
def _colorstr(self, color):
"""Return color string corresponding to args.
Argument may be a string or a tuple of three
numbers corresponding to actual colormode,
i.e. in the range 0<=n<=colormode.
If the argument doesn't represent a color,
an error is raised.
"""
if len(color) == 1:
color = color[0]
if isinstance(color, basestring):
if self._iscolorstring(color) or color == "":
return color
else:
raise TurtleGraphicsError("bad color string: %s" % str(color))
try:
r, g, b = color
except (TypeError, ValueError):
raise TurtleGraphicsError("bad color arguments: %s" % str(color))
if self._colormode == 1.0:
r, g, b = [round(255.0*x) for x in (r, g, b)]
if not ((0 <= r <= 255) and (0 <= g <= 255) and (0 <= b <= 255)):
raise TurtleGraphicsError("bad color sequence: %s" % str(color))
return "#%02x%02x%02x" % (r, g, b)
def _color(self, cstr):
if not cstr.startswith("#"):
return cstr
if len(cstr) == 7:
cl = [int(cstr[i:i+2], 16) for i in (1, 3, 5)]
elif len(cstr) == 4:
cl = [16*int(cstr[h], 16) for h in cstr[1:]]
else:
raise TurtleGraphicsError("bad colorstring: %s" % cstr)
return tuple([c * self._colormode/255 for c in cl])
def colormode(self, cmode=None):
"""Return the colormode or set it to 1.0 or 255.
Optional argument:
cmode -- one of the values 1.0 or 255
r, g, b values of colortriples have to be in range 0..cmode.
Example (for a TurtleScreen instance named screen):
>>> screen.colormode()
1.0
>>> screen.colormode(255)
>>> pencolor(240,160,80)
"""
if cmode is None:
return self._colormode
if cmode == 1.0:
self._colormode = float(cmode)
elif cmode == 255:
self._colormode = int(cmode)
def reset(self):
"""Reset all Turtles on the Screen to their initial state.
No argument.
Example (for a TurtleScreen instance named screen):
>>> screen.reset()
"""
for turtle in self._turtles:
turtle._setmode(self._mode)
turtle.reset()
def turtles(self):
"""Return the list of turtles on the screen.
Example (for a TurtleScreen instance named screen):
>>> screen.turtles()
[<turtle.Turtle object at 0x00E11FB0>]
"""
return self._turtles
def bgcolor(self, *args):
"""Set or return backgroundcolor of the TurtleScreen.
Arguments (if given): a color string or three numbers
in the range 0..colormode or a 3-tuple of such numbers.
Example (for a TurtleScreen instance named screen):
>>> screen.bgcolor("orange")
>>> screen.bgcolor()
'orange'
>>> screen.bgcolor(0.5,0,0.5)
>>> screen.bgcolor()
'#800080'
"""
if args:
color = self._colorstr(args)
else:
color = None
color = self._bgcolor(color)
if color is not None:
color = self._color(color)
return color
def tracer(self, n=None, delay=None):
"""Turns turtle animation on/off and set delay for update drawings.
Optional arguments:
n -- nonnegative integer
delay -- nonnegative integer
If n is given, only each n-th regular screen update is really performed.
(Can be used to accelerate the drawing of complex graphics.)
Second arguments sets delay value (see RawTurtle.delay())
Example (for a TurtleScreen instance named screen):
>>> screen.tracer(8, 25)
>>> dist = 2
>>> for i in range(200):
... fd(dist)
... rt(90)
... dist += 2
"""
if n is None:
return self._tracing
self._tracing = int(n)
self._updatecounter = 0
if delay is not None:
self._delayvalue = int(delay)
if self._tracing:
self.update()
def delay(self, delay=None):
""" Return or set the drawing delay in milliseconds.
Optional argument:
delay -- positive integer
Example (for a TurtleScreen instance named screen):
>>> screen.delay(15)
>>> screen.delay()
15
"""
if delay is None:
return self._delayvalue
self._delayvalue = int(delay)
def _incrementudc(self):
"""Increment update counter."""
if not TurtleScreen._RUNNING:
TurtleScreen._RUNNING = True
raise Terminator
if self._tracing > 0:
self._updatecounter += 1
self._updatecounter %= self._tracing
def update(self):
"""Perform a TurtleScreen update.
"""
tracing = self._tracing
self._tracing = True
for t in self.turtles():
t._update_data()
t._drawturtle()
self._tracing = tracing
self._update()
def window_width(self):
""" Return the width of the turtle window.
Example (for a TurtleScreen instance named screen):
>>> screen.window_width()
640
"""
return self._window_size()[0]
def window_height(self):
""" Return the height of the turtle window.
Example (for a TurtleScreen instance named screen):
>>> screen.window_height()
480
"""
return self._window_size()[1]
def getcanvas(self):
"""Return the Canvas of this TurtleScreen.
No argument.
Example (for a Screen instance named screen):
>>> cv = screen.getcanvas()
>>> cv
<turtle.ScrolledCanvas instance at 0x010742D8>
"""
return self.cv
def getshapes(self):
"""Return a list of names of all currently available turtle shapes.
No argument.
Example (for a TurtleScreen instance named screen):
>>> screen.getshapes()
['arrow', 'blank', 'circle', ... , 'turtle']
"""
return sorted(self._shapes.keys())
def onclick(self, fun, btn=1, add=None):
"""Bind fun to mouse-click event on canvas.
Arguments:
fun -- a function with two arguments, the coordinates of the
clicked point on the canvas.
num -- the number of the mouse-button, defaults to 1
Example (for a TurtleScreen instance named screen
and a Turtle instance named turtle):
>>> screen.onclick(goto)
>>> # Subsequently clicking into the TurtleScreen will
>>> # make the turtle move to the clicked point.
>>> screen.onclick(None)
"""
self._onscreenclick(fun, btn, add)
def onkey(self, fun, key):
"""Bind fun to key-release event of key.
Arguments:
fun -- a function with no arguments
key -- a string: key (e.g. "a") or key-symbol (e.g. "space")
In order to be able to register key-events, TurtleScreen
must have focus. (See method listen.)
Example (for a TurtleScreen instance named screen):
>>> def f():
... fd(50)
... lt(60)
...
>>> screen.onkey(f, "Up")
>>> screen.listen()
Subsequently the turtle can be moved by repeatedly pressing
the up-arrow key, consequently drawing a hexagon
"""
if fun is None:
if key in self._keys:
self._keys.remove(key)
elif key not in self._keys:
self._keys.append(key)
self._onkey(fun, key)
def listen(self, xdummy=None, ydummy=None):
"""Set focus on TurtleScreen (in order to collect key-events)
No arguments.
Dummy arguments are provided in order
to be able to pass listen to the onclick method.
Example (for a TurtleScreen instance named screen):
>>> screen.listen()
"""
self._listen()
def ontimer(self, fun, t=0):
"""Install a timer, which calls fun after t milliseconds.
Arguments:
fun -- a function with no arguments.
t -- a number >= 0
Example (for a TurtleScreen instance named screen):
>>> running = True
>>> def f():
... if running:
... fd(50)
... lt(60)
... screen.ontimer(f, 250)
...
>>> f() # makes the turtle marching around
>>> running = False
"""
self._ontimer(fun, t)
def bgpic(self, picname=None):
"""Set background image or return name of current backgroundimage.
Optional argument:
picname -- a string, name of a gif-file or "nopic".
If picname is a filename, set the corresponding image as background.
If picname is "nopic", delete backgroundimage, if present.
If picname is None, return the filename of the current backgroundimage.
Example (for a TurtleScreen instance named screen):
>>> screen.bgpic()
'nopic'
>>> screen.bgpic("landscape.gif")
>>> screen.bgpic()
'landscape.gif'
"""
if picname is None:
return self._bgpicname
if picname not in self._bgpics:
self._bgpics[picname] = self._image(picname)
self._setbgpic(self._bgpic, self._bgpics[picname])
self._bgpicname = picname
def screensize(self, canvwidth=None, canvheight=None, bg=None):
"""Resize the canvas the turtles are drawing on.
Optional arguments:
canvwidth -- positive integer, new width of canvas in pixels
canvheight -- positive integer, new height of canvas in pixels
bg -- colorstring or color-tuple, new backgroundcolor
If no arguments are given, return current (canvaswidth, canvasheight)
Do not alter the drawing window. To observe hidden parts of
the canvas use the scrollbars. (Can make visible those parts
of a drawing, which were outside the canvas before!)
Example (for a Turtle instance named turtle):
>>> turtle.screensize(2000,1500)
>>> # e. g. to search for an erroneously escaped turtle ;-)
"""
return self._resize(canvwidth, canvheight, bg)
onscreenclick = onclick
resetscreen = reset
clearscreen = clear
addshape = register_shape
class TNavigator(object):
"""Navigation part of the RawTurtle.
Implements methods for turtle movement.
"""
START_ORIENTATION = {
"standard": Vec2D(1.0, 0.0),
"world" : Vec2D(1.0, 0.0),
"logo" : Vec2D(0.0, 1.0) }
DEFAULT_MODE = "standard"
DEFAULT_ANGLEOFFSET = 0
DEFAULT_ANGLEORIENT = 1
def __init__(self, mode=DEFAULT_MODE):
self._angleOffset = self.DEFAULT_ANGLEOFFSET
self._angleOrient = self.DEFAULT_ANGLEORIENT
self._mode = mode
self.undobuffer = None
self.degrees()
self._mode = None
self._setmode(mode)
TNavigator.reset(self)
def reset(self):
"""reset turtle to its initial values
Will be overwritten by parent class
"""
self._position = Vec2D(0.0, 0.0)
self._orient = TNavigator.START_ORIENTATION[self._mode]
def _setmode(self, mode=None):
"""Set turtle-mode to 'standard', 'world' or 'logo'.
"""
if mode is None:
return self._mode
if mode not in ["standard", "logo", "world"]:
return
self._mode = mode
if mode in ["standard", "world"]:
self._angleOffset = 0
self._angleOrient = 1
else: # mode == "logo":
self._angleOffset = self._fullcircle/4.
self._angleOrient = -1
def _setDegreesPerAU(self, fullcircle):
"""Helper function for degrees() and radians()"""
self._fullcircle = fullcircle
self._degreesPerAU = 360/fullcircle
if self._mode == "standard":
self._angleOffset = 0
else:
self._angleOffset = fullcircle/4.
def degrees(self, fullcircle=360.0):
""" Set angle measurement units to degrees.
Optional argument:
fullcircle - a number
Set angle measurement units, i. e. set number
of 'degrees' for a full circle. Dafault value is
360 degrees.
Example (for a Turtle instance named turtle):
>>> turtle.left(90)
>>> turtle.heading()
90
Change angle measurement unit to grad (also known as gon,
grade, or gradian and equals 1/100-th of the right angle.)
>>> turtle.degrees(400.0)
>>> turtle.heading()
100
"""
self._setDegreesPerAU(fullcircle)
def radians(self):
""" Set the angle measurement units to radians.
No arguments.
Example (for a Turtle instance named turtle):
>>> turtle.heading()
90
>>> turtle.radians()
>>> turtle.heading()
1.5707963267948966
"""
self._setDegreesPerAU(2*math.pi)
def _go(self, distance):
"""move turtle forward by specified distance"""
ende = self._position + self._orient * distance
self._goto(ende)
def _rotate(self, angle):
"""Turn turtle counterclockwise by specified angle if angle > 0."""
angle *= self._degreesPerAU
self._orient = self._orient.rotate(angle)
def _goto(self, end):
"""move turtle to position end."""
self._position = end
def forward(self, distance):
"""Move the turtle forward by the specified distance.
Aliases: forward | fd
Argument:
distance -- a number (integer or float)
Move the turtle forward by the specified distance, in the direction
the turtle is headed.
Example (for a Turtle instance named turtle):
>>> turtle.position()
(0.00, 0.00)
>>> turtle.forward(25)
>>> turtle.position()
(25.00,0.00)
>>> turtle.forward(-75)
>>> turtle.position()
(-50.00,0.00)
"""
self._go(distance)
def back(self, distance):
"""Move the turtle backward by distance.
Aliases: back | backward | bk
Argument:
distance -- a number
Move the turtle backward by distance ,opposite to the direction the
turtle is headed. Do not change the turtle's heading.
Example (for a Turtle instance named turtle):
>>> turtle.position()
(0.00, 0.00)
>>> turtle.backward(30)
>>> turtle.position()
(-30.00, 0.00)
"""
self._go(-distance)
def right(self, angle):
"""Turn turtle right by angle units.
Aliases: right | rt
Argument:
angle -- a number (integer or float)
Turn turtle right by angle units. (Units are by default degrees,
but can be set via the degrees() and radians() functions.)
Angle orientation depends on mode. (See this.)
Example (for a Turtle instance named turtle):
>>> turtle.heading()
22.0
>>> turtle.right(45)
>>> turtle.heading()
337.0
"""
self._rotate(-angle)
def left(self, angle):
"""Turn turtle left by angle units.
Aliases: left | lt
Argument:
angle -- a number (integer or float)
Turn turtle left by angle units. (Units are by default degrees,
but can be set via the degrees() and radians() functions.)
Angle orientation depends on mode. (See this.)
Example (for a Turtle instance named turtle):
>>> turtle.heading()
22.0
>>> turtle.left(45)
>>> turtle.heading()
67.0
"""
self._rotate(angle)
def pos(self):
"""Return the turtle's current location (x,y), as a Vec2D-vector.
Aliases: pos | position
No arguments.
Example (for a Turtle instance named turtle):
>>> turtle.pos()
(0.00, 240.00)
"""
return self._position
def xcor(self):
""" Return the turtle's x coordinate.
No arguments.
Example (for a Turtle instance named turtle):
>>> reset()
>>> turtle.left(60)
>>> turtle.forward(100)
>>> print turtle.xcor()
50.0
"""
return self._position[0]
def ycor(self):
""" Return the turtle's y coordinate
---
No arguments.
Example (for a Turtle instance named turtle):
>>> reset()
>>> turtle.left(60)
>>> turtle.forward(100)
>>> print turtle.ycor()
86.6025403784
"""
return self._position[1]
def goto(self, x, y=None):
"""Move turtle to an absolute position.
Aliases: setpos | setposition | goto:
Arguments:
x -- a number or a pair/vector of numbers
y -- a number None
call: goto(x, y) # two coordinates
--or: goto((x, y)) # a pair (tuple) of coordinates
--or: goto(vec) # e.g. as returned by pos()
Move turtle to an absolute position. If the pen is down,
a line will be drawn. The turtle's orientation does not change.
Example (for a Turtle instance named turtle):
>>> tp = turtle.pos()
>>> tp
(0.00, 0.00)
>>> turtle.setpos(60,30)
>>> turtle.pos()
(60.00,30.00)
>>> turtle.setpos((20,80))
>>> turtle.pos()
(20.00,80.00)
>>> turtle.setpos(tp)
>>> turtle.pos()
(0.00,0.00)
"""
if y is None:
self._goto(Vec2D(*x))
else:
self._goto(Vec2D(x, y))
def home(self):
"""Move turtle to the origin - coordinates (0,0).
No arguments.
Move turtle to the origin - coordinates (0,0) and set its
heading to its start-orientation (which depends on mode).
Example (for a Turtle instance named turtle):
>>> turtle.home()
"""
self.goto(0, 0)
self.setheading(0)
def setx(self, x):
"""Set the turtle's first coordinate to x
Argument:
x -- a number (integer or float)
Set the turtle's first coordinate to x, leave second coordinate
unchanged.
Example (for a Turtle instance named turtle):
>>> turtle.position()
(0.00, 240.00)
>>> turtle.setx(10)
>>> turtle.position()
(10.00, 240.00)
"""
self._goto(Vec2D(x, self._position[1]))
def sety(self, y):
"""Set the turtle's second coordinate to y
Argument:
y -- a number (integer or float)
Set the turtle's first coordinate to x, second coordinate remains
unchanged.
Example (for a Turtle instance named turtle):
>>> turtle.position()
(0.00, 40.00)
>>> turtle.sety(-10)
>>> turtle.position()
(0.00, -10.00)
"""
self._goto(Vec2D(self._position[0], y))
def distance(self, x, y=None):
"""Return the distance from the turtle to (x,y) in turtle step units.
Arguments:
x -- a number or a pair/vector of numbers or a turtle instance
y -- a number None None
call: distance(x, y) # two coordinates
--or: distance((x, y)) # a pair (tuple) of coordinates
--or: distance(vec) # e.g. as returned by pos()
--or: distance(mypen) # where mypen is another turtle
Example (for a Turtle instance named turtle):
>>> turtle.pos()
(0.00, 0.00)
>>> turtle.distance(30,40)
50.0
>>> pen = Turtle()
>>> pen.forward(77)
>>> turtle.distance(pen)
77.0
"""
if y is not None:
pos = Vec2D(x, y)
if isinstance(x, Vec2D):
pos = x
elif isinstance(x, tuple):
pos = Vec2D(*x)
elif isinstance(x, TNavigator):
pos = x._position
return abs(pos - self._position)
def towards(self, x, y=None):
"""Return the angle of the line from the turtle's position to (x, y).
Arguments:
x -- a number or a pair/vector of numbers or a turtle instance
y -- a number None None
call: distance(x, y) # two coordinates
--or: distance((x, y)) # a pair (tuple) of coordinates
--or: distance(vec) # e.g. as returned by pos()
--or: distance(mypen) # where mypen is another turtle
Return the angle, between the line from turtle-position to position
specified by x, y and the turtle's start orientation. (Depends on
modes - "standard" or "logo")
Example (for a Turtle instance named turtle):
>>> turtle.pos()
(10.00, 10.00)
>>> turtle.towards(0,0)
225.0
"""
if y is not None:
pos = Vec2D(x, y)
if isinstance(x, Vec2D):
pos = x
elif isinstance(x, tuple):
pos = Vec2D(*x)
elif isinstance(x, TNavigator):
pos = x._position
x, y = pos - self._position
result = round(math.atan2(y, x)*180.0/math.pi, 10) % 360.0
result /= self._degreesPerAU
return (self._angleOffset + self._angleOrient*result) % self._fullcircle
def heading(self):
""" Return the turtle's current heading.
No arguments.
Example (for a Turtle instance named turtle):
>>> turtle.left(67)
>>> turtle.heading()
67.0
"""
x, y = self._orient
result = round(math.atan2(y, x)*180.0/math.pi, 10) % 360.0
result /= self._degreesPerAU
return (self._angleOffset + self._angleOrient*result) % self._fullcircle
def setheading(self, to_angle):
"""Set the orientation of the turtle to to_angle.
Aliases: setheading | seth
Argument:
to_angle -- a number (integer or float)
Set the orientation of the turtle to to_angle.
Here are some common directions in degrees:
standard - mode: logo-mode:
-------------------|--------------------
0 - east 0 - north
90 - north 90 - east
180 - west 180 - south
270 - south 270 - west
Example (for a Turtle instance named turtle):
>>> turtle.setheading(90)
>>> turtle.heading()
90
"""
angle = (to_angle - self.heading())*self._angleOrient
full = self._fullcircle
angle = (angle+full/2.)%full - full/2.
self._rotate(angle)
def circle(self, radius, extent = None, steps = None):
""" Draw a circle with given radius.
Arguments:
radius -- a number
extent (optional) -- a number
steps (optional) -- an integer
Draw a circle with given radius. The center is radius units left
of the turtle; extent - an angle - determines which part of the
circle is drawn. If extent is not given, draw the entire circle.
If extent is not a full circle, one endpoint of the arc is the
current pen position. Draw the arc in counterclockwise direction
if radius is positive, otherwise in clockwise direction. Finally
the direction of the turtle is changed by the amount of extent.
As the circle is approximated by an inscribed regular polygon,
steps determines the number of steps to use. If not given,
it will be calculated automatically. Maybe used to draw regular
polygons.
call: circle(radius) # full circle
--or: circle(radius, extent) # arc
--or: circle(radius, extent, steps)
--or: circle(radius, steps=6) # 6-sided polygon
Example (for a Turtle instance named turtle):
>>> turtle.circle(50)
>>> turtle.circle(120, 180) # semicircle
"""
if self.undobuffer:
self.undobuffer.push(["seq"])
self.undobuffer.cumulate = True
speed = self.speed()
if extent is None:
extent = self._fullcircle
if steps is None:
frac = abs(extent)/self._fullcircle
steps = 1+int(min(11+abs(radius)/6.0, 59.0)*frac)
w = 1.0 * extent / steps
w2 = 0.5 * w
l = 2.0 * radius * math.sin(w2*math.pi/180.0*self._degreesPerAU)
if radius < 0:
l, w, w2 = -l, -w, -w2
tr = self.tracer()
dl = self._delay()
if speed == 0:
self.tracer(0, 0)
else:
self.speed(0)
self._rotate(w2)
for i in range(steps):
self.speed(speed)
self._go(l)
self.speed(0)
self._rotate(w)
self._rotate(-w2)
if speed == 0:
self.tracer(tr, dl)
self.speed(speed)
if self.undobuffer:
self.undobuffer.cumulate = False
## three dummy methods to be implemented by child class:
def speed(self, s=0):
"""dummy method - to be overwritten by child class"""
def tracer(self, a=None, b=None):
"""dummy method - to be overwritten by child class"""
def _delay(self, n=None):
"""dummy method - to be overwritten by child class"""
fd = forward
bk = back
backward = back
rt = right
lt = left
position = pos
setpos = goto
setposition = goto
seth = setheading
class TPen(object):
"""Drawing part of the RawTurtle.
Implements drawing properties.
"""
def __init__(self, resizemode=_CFG["resizemode"]):
self._resizemode = resizemode # or "user" or "noresize"
self.undobuffer = None
TPen._reset(self)
def _reset(self, pencolor=_CFG["pencolor"],
fillcolor=_CFG["fillcolor"]):
self._pensize = 1
self._shown = True
self._pencolor = pencolor
self._fillcolor = fillcolor
self._drawing = True
self._speed = 3
self._stretchfactor = (1, 1)
self._tilt = 0
self._outlinewidth = 1
### self.screen = None # to override by child class
def resizemode(self, rmode=None):
"""Set resizemode to one of the values: "auto", "user", "noresize".
(Optional) Argument:
rmode -- one of the strings "auto", "user", "noresize"
Different resizemodes have the following effects:
- "auto" adapts the appearance of the turtle
corresponding to the value of pensize.
- "user" adapts the appearance of the turtle according to the
values of stretchfactor and outlinewidth (outline),
which are set by shapesize()
- "noresize" no adaption of the turtle's appearance takes place.
If no argument is given, return current resizemode.
resizemode("user") is called by a call of shapesize with arguments.
Examples (for a Turtle instance named turtle):
>>> turtle.resizemode("noresize")
>>> turtle.resizemode()
'noresize'
"""
if rmode is None:
return self._resizemode
rmode = rmode.lower()
if rmode in ["auto", "user", "noresize"]:
self.pen(resizemode=rmode)
def pensize(self, width=None):
"""Set or return the line thickness.
Aliases: pensize | width
Argument:
width -- positive number
Set the line thickness to width or return it. If resizemode is set
to "auto" and turtleshape is a polygon, that polygon is drawn with
the same line thickness. If no argument is given, current pensize
is returned.
Example (for a Turtle instance named turtle):
>>> turtle.pensize()
1
>>> turtle.pensize(10) # from here on lines of width 10 are drawn
"""
if width is None:
return self._pensize
self.pen(pensize=width)
def penup(self):
"""Pull the pen up -- no drawing when moving.
Aliases: penup | pu | up
No argument
Example (for a Turtle instance named turtle):
>>> turtle.penup()
"""
if not self._drawing:
return
self.pen(pendown=False)
def pendown(self):
"""Pull the pen down -- drawing when moving.
Aliases: pendown | pd | down
No argument.
Example (for a Turtle instance named turtle):
>>> turtle.pendown()
"""
if self._drawing:
return
self.pen(pendown=True)
def isdown(self):
"""Return True if pen is down, False if it's up.
No argument.
Example (for a Turtle instance named turtle):
>>> turtle.penup()
>>> turtle.isdown()
False
>>> turtle.pendown()
>>> turtle.isdown()
True
"""
return self._drawing
def speed(self, speed=None):
""" Return or set the turtle's speed.
Optional argument:
speed -- an integer in the range 0..10 or a speedstring (see below)
Set the turtle's speed to an integer value in the range 0 .. 10.
If no argument is given: return current speed.
If input is a number greater than 10 or smaller than 0.5,
speed is set to 0.
Speedstrings are mapped to speedvalues in the following way:
'fastest' : 0
'fast' : 10
'normal' : 6
'slow' : 3
'slowest' : 1
speeds from 1 to 10 enforce increasingly faster animation of
line drawing and turtle turning.
Attention:
speed = 0 : *no* animation takes place. forward/back makes turtle jump
and likewise left/right make the turtle turn instantly.
Example (for a Turtle instance named turtle):
>>> turtle.speed(3)
"""
speeds = {'fastest':0, 'fast':10, 'normal':6, 'slow':3, 'slowest':1 }
if speed is None:
return self._speed
if speed in speeds:
speed = speeds[speed]
elif 0.5 < speed < 10.5:
speed = int(round(speed))
else:
speed = 0
self.pen(speed=speed)
def color(self, *args):
"""Return or set the pencolor and fillcolor.
Arguments:
Several input formats are allowed.
They use 0, 1, 2, or 3 arguments as follows:
color()
Return the current pencolor and the current fillcolor
as a pair of color specification strings as are returned
by pencolor and fillcolor.
color(colorstring), color((r,g,b)), color(r,g,b)
inputs as in pencolor, set both, fillcolor and pencolor,
to the given value.
color(colorstring1, colorstring2),
color((r1,g1,b1), (r2,g2,b2))
equivalent to pencolor(colorstring1) and fillcolor(colorstring2)
and analogously, if the other input format is used.
If turtleshape is a polygon, outline and interior of that polygon
is drawn with the newly set colors.
For mor info see: pencolor, fillcolor
Example (for a Turtle instance named turtle):
>>> turtle.color('red', 'green')
>>> turtle.color()
('red', 'green')
>>> colormode(255)
>>> color((40, 80, 120), (160, 200, 240))
>>> color()
('#285078', '#a0c8f0')
"""
if args:
l = len(args)
if l == 1:
pcolor = fcolor = args[0]
elif l == 2:
pcolor, fcolor = args
elif l == 3:
pcolor = fcolor = args
pcolor = self._colorstr(pcolor)
fcolor = self._colorstr(fcolor)
self.pen(pencolor=pcolor, fillcolor=fcolor)
else:
return self._color(self._pencolor), self._color(self._fillcolor)
def pencolor(self, *args):
""" Return or set the pencolor.
Arguments:
Four input formats are allowed:
- pencolor()
Return the current pencolor as color specification string,
possibly in hex-number format (see example).
May be used as input to another color/pencolor/fillcolor call.
- pencolor(colorstring)
s is a Tk color specification string, such as "red" or "yellow"
- pencolor((r, g, b))
*a tuple* of r, g, and b, which represent, an RGB color,
and each of r, g, and b are in the range 0..colormode,
where colormode is either 1.0 or 255
- pencolor(r, g, b)
r, g, and b represent an RGB color, and each of r, g, and b
are in the range 0..colormode
If turtleshape is a polygon, the outline of that polygon is drawn
with the newly set pencolor.
Example (for a Turtle instance named turtle):
>>> turtle.pencolor('brown')
>>> tup = (0.2, 0.8, 0.55)
>>> turtle.pencolor(tup)
>>> turtle.pencolor()
'#33cc8c'
"""
if args:
color = self._colorstr(args)
if color == self._pencolor:
return
self.pen(pencolor=color)
else:
return self._color(self._pencolor)
def fillcolor(self, *args):
""" Return or set the fillcolor.
Arguments:
Four input formats are allowed:
- fillcolor()
Return the current fillcolor as color specification string,
possibly in hex-number format (see example).
May be used as input to another color/pencolor/fillcolor call.
- fillcolor(colorstring)
s is a Tk color specification string, such as "red" or "yellow"
- fillcolor((r, g, b))
*a tuple* of r, g, and b, which represent, an RGB color,
and each of r, g, and b are in the range 0..colormode,
where colormode is either 1.0 or 255
- fillcolor(r, g, b)
r, g, and b represent an RGB color, and each of r, g, and b
are in the range 0..colormode
If turtleshape is a polygon, the interior of that polygon is drawn
with the newly set fillcolor.
Example (for a Turtle instance named turtle):
>>> turtle.fillcolor('violet')
>>> col = turtle.pencolor()
>>> turtle.fillcolor(col)
>>> turtle.fillcolor(0, .5, 0)
"""
if args:
color = self._colorstr(args)
if color == self._fillcolor:
return
self.pen(fillcolor=color)
else:
return self._color(self._fillcolor)
def showturtle(self):
"""Makes the turtle visible.
Aliases: showturtle | st
No argument.
Example (for a Turtle instance named turtle):
>>> turtle.hideturtle()
>>> turtle.showturtle()
"""
self.pen(shown=True)
def hideturtle(self):
"""Makes the turtle invisible.
Aliases: hideturtle | ht
No argument.
It's a good idea to do this while you're in the
middle of a complicated drawing, because hiding
the turtle speeds up the drawing observably.
Example (for a Turtle instance named turtle):
>>> turtle.hideturtle()
"""
self.pen(shown=False)
def isvisible(self):
"""Return True if the Turtle is shown, False if it's hidden.
No argument.
Example (for a Turtle instance named turtle):
>>> turtle.hideturtle()
>>> print turtle.isvisible():
False
"""
return self._shown
def pen(self, pen=None, **pendict):
"""Return or set the pen's attributes.
Arguments:
pen -- a dictionary with some or all of the below listed keys.
**pendict -- one or more keyword-arguments with the below
listed keys as keywords.
Return or set the pen's attributes in a 'pen-dictionary'
with the following key/value pairs:
"shown" : True/False
"pendown" : True/False
"pencolor" : color-string or color-tuple
"fillcolor" : color-string or color-tuple
"pensize" : positive number
"speed" : number in range 0..10
"resizemode" : "auto" or "user" or "noresize"
"stretchfactor": (positive number, positive number)
"outline" : positive number
"tilt" : number
This dictionary can be used as argument for a subsequent
pen()-call to restore the former pen-state. Moreover one
or more of these attributes can be provided as keyword-arguments.
This can be used to set several pen attributes in one statement.
Examples (for a Turtle instance named turtle):
>>> turtle.pen(fillcolor="black", pencolor="red", pensize=10)
>>> turtle.pen()
{'pensize': 10, 'shown': True, 'resizemode': 'auto', 'outline': 1,
'pencolor': 'red', 'pendown': True, 'fillcolor': 'black',
'stretchfactor': (1,1), 'speed': 3}
>>> penstate=turtle.pen()
>>> turtle.color("yellow","")
>>> turtle.penup()
>>> turtle.pen()
{'pensize': 10, 'shown': True, 'resizemode': 'auto', 'outline': 1,
'pencolor': 'yellow', 'pendown': False, 'fillcolor': '',
'stretchfactor': (1,1), 'speed': 3}
>>> p.pen(penstate, fillcolor="green")
>>> p.pen()
{'pensize': 10, 'shown': True, 'resizemode': 'auto', 'outline': 1,
'pencolor': 'red', 'pendown': True, 'fillcolor': 'green',
'stretchfactor': (1,1), 'speed': 3}
"""
_pd = {"shown" : self._shown,
"pendown" : self._drawing,
"pencolor" : self._pencolor,
"fillcolor" : self._fillcolor,
"pensize" : self._pensize,
"speed" : self._speed,
"resizemode" : self._resizemode,
"stretchfactor" : self._stretchfactor,
"outline" : self._outlinewidth,
"tilt" : self._tilt
}
if not (pen or pendict):
return _pd
if isinstance(pen, dict):
p = pen
else:
p = {}
p.update(pendict)
_p_buf = {}
for key in p:
_p_buf[key] = _pd[key]
if self.undobuffer:
self.undobuffer.push(("pen", _p_buf))
newLine = False
if "pendown" in p:
if self._drawing != p["pendown"]:
newLine = True
if "pencolor" in p:
if isinstance(p["pencolor"], tuple):
p["pencolor"] = self._colorstr((p["pencolor"],))
if self._pencolor != p["pencolor"]:
newLine = True
if "pensize" in p:
if self._pensize != p["pensize"]:
newLine = True
if newLine:
self._newLine()
if "pendown" in p:
self._drawing = p["pendown"]
if "pencolor" in p:
self._pencolor = p["pencolor"]
if "pensize" in p:
self._pensize = p["pensize"]
if "fillcolor" in p:
if isinstance(p["fillcolor"], tuple):
p["fillcolor"] = self._colorstr((p["fillcolor"],))
self._fillcolor = p["fillcolor"]
if "speed" in p:
self._speed = p["speed"]
if "resizemode" in p:
self._resizemode = p["resizemode"]
if "stretchfactor" in p:
sf = p["stretchfactor"]
if isinstance(sf, (int, float)):
sf = (sf, sf)
self._stretchfactor = sf
if "outline" in p:
self._outlinewidth = p["outline"]
if "shown" in p:
self._shown = p["shown"]
if "tilt" in p:
self._tilt = p["tilt"]
self._update()
## three dummy methods to be implemented by child class:
def _newLine(self, usePos = True):
"""dummy method - to be overwritten by child class"""
def _update(self, count=True, forced=False):
"""dummy method - to be overwritten by child class"""
def _color(self, args):
"""dummy method - to be overwritten by child class"""
def _colorstr(self, args):
"""dummy method - to be overwritten by child class"""
width = pensize
up = penup
pu = penup
pd = pendown
down = pendown
st = showturtle
ht = hideturtle
class _TurtleImage(object):
"""Helper class: Datatype to store Turtle attributes
"""
def __init__(self, screen, shapeIndex):
self.screen = screen
self._type = None
self._setshape(shapeIndex)
def _setshape(self, shapeIndex):
screen = self.screen # RawTurtle.screens[self.screenIndex]
self.shapeIndex = shapeIndex
if self._type == "polygon" == screen._shapes[shapeIndex]._type:
return
if self._type == "image" == screen._shapes[shapeIndex]._type:
return
if self._type in ["image", "polygon"]:
screen._delete(self._item)
elif self._type == "compound":
for item in self._item:
screen._delete(item)
self._type = screen._shapes[shapeIndex]._type
if self._type == "polygon":
self._item = screen._createpoly()
elif self._type == "image":
self._item = screen._createimage(screen._shapes["blank"]._data)
elif self._type == "compound":
self._item = [screen._createpoly() for item in
screen._shapes[shapeIndex]._data]
class RawTurtle(TPen, TNavigator):
"""Animation part of the RawTurtle.
Puts RawTurtle upon a TurtleScreen and provides tools for
its animation.
"""
screens = []
def __init__(self, canvas=None,
shape=_CFG["shape"],
undobuffersize=_CFG["undobuffersize"],
visible=_CFG["visible"]):
if isinstance(canvas, _Screen):
self.screen = canvas
elif isinstance(canvas, TurtleScreen):
if canvas not in RawTurtle.screens:
RawTurtle.screens.append(canvas)
self.screen = canvas
elif isinstance(canvas, (ScrolledCanvas, Canvas)):
for screen in RawTurtle.screens:
if screen.cv == canvas:
self.screen = screen
break
else:
self.screen = TurtleScreen(canvas)
RawTurtle.screens.append(self.screen)
else:
raise TurtleGraphicsError("bad canvas argument %s" % canvas)
screen = self.screen
TNavigator.__init__(self, screen.mode())
TPen.__init__(self)
screen._turtles.append(self)
self.drawingLineItem = screen._createline()
self.turtle = _TurtleImage(screen, shape)
self._poly = None
self._creatingPoly = False
self._fillitem = self._fillpath = None
self._shown = visible
self._hidden_from_screen = False
self.currentLineItem = screen._createline()
self.currentLine = [self._position]
self.items = [self.currentLineItem]
self.stampItems = []
self._undobuffersize = undobuffersize
self.undobuffer = Tbuffer(undobuffersize)
self._update()
def reset(self):
"""Delete the turtle's drawings and restore its default values.
No argument.
,
Delete the turtle's drawings from the screen, re-center the turtle
and set variables to the default values.
Example (for a Turtle instance named turtle):
>>> turtle.position()
(0.00,-22.00)
>>> turtle.heading()
100.0
>>> turtle.reset()
>>> turtle.position()
(0.00,0.00)
>>> turtle.heading()
0.0
"""
TNavigator.reset(self)
TPen._reset(self)
self._clear()
self._drawturtle()
self._update()
def setundobuffer(self, size):
"""Set or disable undobuffer.
Argument:
size -- an integer or None
If size is an integer an empty undobuffer of given size is installed.
Size gives the maximum number of turtle-actions that can be undone
by the undo() function.
If size is None, no undobuffer is present.
Example (for a Turtle instance named turtle):
>>> turtle.setundobuffer(42)
"""
if size is None or size <= 0:
self.undobuffer = None
else:
self.undobuffer = Tbuffer(size)
def undobufferentries(self):
"""Return count of entries in the undobuffer.
No argument.
Example (for a Turtle instance named turtle):
>>> while undobufferentries():
... undo()
"""
if self.undobuffer is None:
return 0
return self.undobuffer.nr_of_items()
def _clear(self):
"""Delete all of pen's drawings"""
self._fillitem = self._fillpath = None
for item in self.items:
self.screen._delete(item)
self.currentLineItem = self.screen._createline()
self.currentLine = []
if self._drawing:
self.currentLine.append(self._position)
self.items = [self.currentLineItem]
self.clearstamps()
self.setundobuffer(self._undobuffersize)
def clear(self):
"""Delete the turtle's drawings from the screen. Do not move turtle.
No arguments.
Delete the turtle's drawings from the screen. Do not move turtle.
State and position of the turtle as well as drawings of other
turtles are not affected.
Examples (for a Turtle instance named turtle):
>>> turtle.clear()
"""
self._clear()
self._update()
def _update_data(self):
self.screen._incrementudc()
if self.screen._updatecounter != 0:
return
if len(self.currentLine)>1:
self.screen._drawline(self.currentLineItem, self.currentLine,
self._pencolor, self._pensize)
def _update(self):
"""Perform a Turtle-data update.
"""
screen = self.screen
if screen._tracing == 0:
return
elif screen._tracing == 1:
self._update_data()
self._drawturtle()
screen._update() # TurtleScreenBase
screen._delay(screen._delayvalue) # TurtleScreenBase
else:
self._update_data()
if screen._updatecounter == 0:
for t in screen.turtles():
t._drawturtle()
screen._update()
def tracer(self, flag=None, delay=None):
"""Turns turtle animation on/off and set delay for update drawings.
Optional arguments:
n -- nonnegative integer
delay -- nonnegative integer
If n is given, only each n-th regular screen update is really performed.
(Can be used to accelerate the drawing of complex graphics.)
Second arguments sets delay value (see RawTurtle.delay())
Example (for a Turtle instance named turtle):
>>> turtle.tracer(8, 25)
>>> dist = 2
>>> for i in range(200):
... turtle.fd(dist)
... turtle.rt(90)
... dist += 2
"""
return self.screen.tracer(flag, delay)
def _color(self, args):
return self.screen._color(args)
def _colorstr(self, args):
return self.screen._colorstr(args)
def _cc(self, args):
"""Convert colortriples to hexstrings.
"""
if isinstance(args, basestring):
return args
try:
r, g, b = args
except (TypeError, ValueError):
raise TurtleGraphicsError("bad color arguments: %s" % str(args))
if self.screen._colormode == 1.0:
r, g, b = [round(255.0*x) for x in (r, g, b)]
if not ((0 <= r <= 255) and (0 <= g <= 255) and (0 <= b <= 255)):
raise TurtleGraphicsError("bad color sequence: %s" % str(args))
return "#%02x%02x%02x" % (r, g, b)
def clone(self):
"""Create and return a clone of the turtle.
No argument.
Create and return a clone of the turtle with same position, heading
and turtle properties.
Example (for a Turtle instance named mick):
mick = Turtle()
joe = mick.clone()
"""
screen = self.screen
self._newLine(self._drawing)
turtle = self.turtle
self.screen = None
self.turtle = None # too make self deepcopy-able
q = deepcopy(self)
self.screen = screen
self.turtle = turtle
q.screen = screen
q.turtle = _TurtleImage(screen, self.turtle.shapeIndex)
screen._turtles.append(q)
ttype = screen._shapes[self.turtle.shapeIndex]._type
if ttype == "polygon":
q.turtle._item = screen._createpoly()
elif ttype == "image":
q.turtle._item = screen._createimage(screen._shapes["blank"]._data)
elif ttype == "compound":
q.turtle._item = [screen._createpoly() for item in
screen._shapes[self.turtle.shapeIndex]._data]
q.currentLineItem = screen._createline()
q._update()
return q
def shape(self, name=None):
"""Set turtle shape to shape with given name / return current shapename.
Optional argument:
name -- a string, which is a valid shapename
Set turtle shape to shape with given name or, if name is not given,
return name of current shape.
Shape with name must exist in the TurtleScreen's shape dictionary.
Initially there are the following polygon shapes:
'arrow', 'turtle', 'circle', 'square', 'triangle', 'classic'.
To learn about how to deal with shapes see Screen-method register_shape.
Example (for a Turtle instance named turtle):
>>> turtle.shape()
'arrow'
>>> turtle.shape("turtle")
>>> turtle.shape()
'turtle'
"""
if name is None:
return self.turtle.shapeIndex
if not name in self.screen.getshapes():
raise TurtleGraphicsError("There is no shape named %s" % name)
self.turtle._setshape(name)
self._update()
def shapesize(self, stretch_wid=None, stretch_len=None, outline=None):
"""Set/return turtle's stretchfactors/outline. Set resizemode to "user".
Optional arguments:
stretch_wid : positive number
stretch_len : positive number
outline : positive number
Return or set the pen's attributes x/y-stretchfactors and/or outline.
Set resizemode to "user".
If and only if resizemode is set to "user", the turtle will be displayed
stretched according to its stretchfactors:
stretch_wid is stretchfactor perpendicular to orientation
stretch_len is stretchfactor in direction of turtles orientation.
outline determines the width of the shapes's outline.
Examples (for a Turtle instance named turtle):
>>> turtle.resizemode("user")
>>> turtle.shapesize(5, 5, 12)
>>> turtle.shapesize(outline=8)
"""
if stretch_wid is stretch_len is outline is None:
stretch_wid, stretch_len = self._stretchfactor
return stretch_wid, stretch_len, self._outlinewidth
if stretch_wid is not None:
if stretch_len is None:
stretchfactor = stretch_wid, stretch_wid
else:
stretchfactor = stretch_wid, stretch_len
elif stretch_len is not None:
stretchfactor = self._stretchfactor[0], stretch_len
else:
stretchfactor = self._stretchfactor
if outline is None:
outline = self._outlinewidth
self.pen(resizemode="user",
stretchfactor=stretchfactor, outline=outline)
def settiltangle(self, angle):
"""Rotate the turtleshape to point in the specified direction
Optional argument:
angle -- number
Rotate the turtleshape to point in the direction specified by angle,
regardless of its current tilt-angle. DO NOT change the turtle's
heading (direction of movement).
Examples (for a Turtle instance named turtle):
>>> turtle.shape("circle")
>>> turtle.shapesize(5,2)
>>> turtle.settiltangle(45)
>>> stamp()
>>> turtle.fd(50)
>>> turtle.settiltangle(-45)
>>> stamp()
>>> turtle.fd(50)
"""
tilt = -angle * self._degreesPerAU * self._angleOrient
tilt = (tilt * math.pi / 180.0) % (2*math.pi)
self.pen(resizemode="user", tilt=tilt)
def tiltangle(self):
"""Return the current tilt-angle.
No argument.
Return the current tilt-angle, i. e. the angle between the
orientation of the turtleshape and the heading of the turtle
(its direction of movement).
Examples (for a Turtle instance named turtle):
>>> turtle.shape("circle")
>>> turtle.shapesize(5,2)
>>> turtle.tilt(45)
>>> turtle.tiltangle()
"""
tilt = -self._tilt * (180.0/math.pi) * self._angleOrient
return (tilt / self._degreesPerAU) % self._fullcircle
def tilt(self, angle):
"""Rotate the turtleshape by angle.
Argument:
angle - a number
Rotate the turtleshape by angle from its current tilt-angle,
but do NOT change the turtle's heading (direction of movement).
Examples (for a Turtle instance named turtle):
>>> turtle.shape("circle")
>>> turtle.shapesize(5,2)
>>> turtle.tilt(30)
>>> turtle.fd(50)
>>> turtle.tilt(30)
>>> turtle.fd(50)
"""
self.settiltangle(angle + self.tiltangle())
def _polytrafo(self, poly):
"""Computes transformed polygon shapes from a shape
according to current position and heading.
"""
screen = self.screen
p0, p1 = self._position
e0, e1 = self._orient
e = Vec2D(e0, e1 * screen.yscale / screen.xscale)
e0, e1 = (1.0 / abs(e)) * e
return [(p0+(e1*x+e0*y)/screen.xscale, p1+(-e0*x+e1*y)/screen.yscale)
for (x, y) in poly]
def _drawturtle(self):
"""Manages the correct rendering of the turtle with respect to
its shape, resizemode, stretch and tilt etc."""
screen = self.screen
shape = screen._shapes[self.turtle.shapeIndex]
ttype = shape._type
titem = self.turtle._item
if self._shown and screen._updatecounter == 0 and screen._tracing > 0:
self._hidden_from_screen = False
tshape = shape._data
if ttype == "polygon":
if self._resizemode == "noresize":
w = 1
shape = tshape
else:
if self._resizemode == "auto":
lx = ly = max(1, self._pensize/5.0)
w = self._pensize
tiltangle = 0
elif self._resizemode == "user":
lx, ly = self._stretchfactor
w = self._outlinewidth
tiltangle = self._tilt
shape = [(lx*x, ly*y) for (x, y) in tshape]
t0, t1 = math.sin(tiltangle), math.cos(tiltangle)
shape = [(t1*x+t0*y, -t0*x+t1*y) for (x, y) in shape]
shape = self._polytrafo(shape)
fc, oc = self._fillcolor, self._pencolor
screen._drawpoly(titem, shape, fill=fc, outline=oc,
width=w, top=True)
elif ttype == "image":
screen._drawimage(titem, self._position, tshape)
elif ttype == "compound":
lx, ly = self._stretchfactor
w = self._outlinewidth
for item, (poly, fc, oc) in zip(titem, tshape):
poly = [(lx*x, ly*y) for (x, y) in poly]
poly = self._polytrafo(poly)
screen._drawpoly(item, poly, fill=self._cc(fc),
outline=self._cc(oc), width=w, top=True)
else:
if self._hidden_from_screen:
return
if ttype == "polygon":
screen._drawpoly(titem, ((0, 0), (0, 0), (0, 0)), "", "")
elif ttype == "image":
screen._drawimage(titem, self._position,
screen._shapes["blank"]._data)
elif ttype == "compound":
for item in titem:
screen._drawpoly(item, ((0, 0), (0, 0), (0, 0)), "", "")
self._hidden_from_screen = True
############################## stamp stuff ###############################
def stamp(self):
"""Stamp a copy of the turtleshape onto the canvas and return its id.
No argument.
Stamp a copy of the turtle shape onto the canvas at the current
turtle position. Return a stamp_id for that stamp, which can be
used to delete it by calling clearstamp(stamp_id).
Example (for a Turtle instance named turtle):
>>> turtle.color("blue")
>>> turtle.stamp()
13
>>> turtle.fd(50)
"""
screen = self.screen
shape = screen._shapes[self.turtle.shapeIndex]
ttype = shape._type
tshape = shape._data
if ttype == "polygon":
stitem = screen._createpoly()
if self._resizemode == "noresize":
w = 1
shape = tshape
else:
if self._resizemode == "auto":
lx = ly = max(1, self._pensize/5.0)
w = self._pensize
tiltangle = 0
elif self._resizemode == "user":
lx, ly = self._stretchfactor
w = self._outlinewidth
tiltangle = self._tilt
shape = [(lx*x, ly*y) for (x, y) in tshape]
t0, t1 = math.sin(tiltangle), math.cos(tiltangle)
shape = [(t1*x+t0*y, -t0*x+t1*y) for (x, y) in shape]
shape = self._polytrafo(shape)
fc, oc = self._fillcolor, self._pencolor
screen._drawpoly(stitem, shape, fill=fc, outline=oc,
width=w, top=True)
elif ttype == "image":
stitem = screen._createimage("")
screen._drawimage(stitem, self._position, tshape)
elif ttype == "compound":
stitem = []
for element in tshape:
item = screen._createpoly()
stitem.append(item)
stitem = tuple(stitem)
lx, ly = self._stretchfactor
w = self._outlinewidth
for item, (poly, fc, oc) in zip(stitem, tshape):
poly = [(lx*x, ly*y) for (x, y) in poly]
poly = self._polytrafo(poly)
screen._drawpoly(item, poly, fill=self._cc(fc),
outline=self._cc(oc), width=w, top=True)
self.stampItems.append(stitem)
self.undobuffer.push(("stamp", stitem))
return stitem
def _clearstamp(self, stampid):
"""does the work for clearstamp() and clearstamps()
"""
if stampid in self.stampItems:
if isinstance(stampid, tuple):
for subitem in stampid:
self.screen._delete(subitem)
else:
self.screen._delete(stampid)
self.stampItems.remove(stampid)
# Delete stampitem from undobuffer if necessary
# if clearstamp is called directly.
item = ("stamp", stampid)
buf = self.undobuffer
if item not in buf.buffer:
return
index = buf.buffer.index(item)
buf.buffer.remove(item)
if index <= buf.ptr:
buf.ptr = (buf.ptr - 1) % buf.bufsize
buf.buffer.insert((buf.ptr+1)%buf.bufsize, [None])
def clearstamp(self, stampid):
"""Delete stamp with given stampid
Argument:
stampid - an integer, must be return value of previous stamp() call.
Example (for a Turtle instance named turtle):
>>> turtle.color("blue")
>>> astamp = turtle.stamp()
>>> turtle.fd(50)
>>> turtle.clearstamp(astamp)
"""
self._clearstamp(stampid)
self._update()
def clearstamps(self, n=None):
"""Delete all or first/last n of turtle's stamps.
Optional argument:
n -- an integer
If n is None, delete all of pen's stamps,
else if n > 0 delete first n stamps
else if n < 0 delete last n stamps.
Example (for a Turtle instance named turtle):
>>> for i in range(8):
... turtle.stamp(); turtle.fd(30)
...
>>> turtle.clearstamps(2)
>>> turtle.clearstamps(-2)
>>> turtle.clearstamps()
"""
if n is None:
toDelete = self.stampItems[:]
elif n >= 0:
toDelete = self.stampItems[:n]
else:
toDelete = self.stampItems[n:]
for item in toDelete:
self._clearstamp(item)
self._update()
def _goto(self, end):
"""Move the pen to the point end, thereby drawing a line
if pen is down. All other methods for turtle movement depend
on this one.
"""
## Version mit undo-stuff
go_modes = ( self._drawing,
self._pencolor,
self._pensize,
isinstance(self._fillpath, list))
screen = self.screen
undo_entry = ("go", self._position, end, go_modes,
(self.currentLineItem,
self.currentLine[:],
screen._pointlist(self.currentLineItem),
self.items[:])
)
if self.undobuffer:
self.undobuffer.push(undo_entry)
start = self._position
if self._speed and screen._tracing == 1:
diff = (end-start)
diffsq = (diff[0]*screen.xscale)**2 + (diff[1]*screen.yscale)**2
nhops = 1+int((diffsq**0.5)/(3*(1.1**self._speed)*self._speed))
delta = diff * (1.0/nhops)
for n in range(1, nhops):
if n == 1:
top = True
else:
top = False
self._position = start + delta * n
if self._drawing:
screen._drawline(self.drawingLineItem,
(start, self._position),
self._pencolor, self._pensize, top)
self._update()
if self._drawing:
screen._drawline(self.drawingLineItem, ((0, 0), (0, 0)),
fill="", width=self._pensize)
# Turtle now at end,
if self._drawing: # now update currentLine
self.currentLine.append(end)
if isinstance(self._fillpath, list):
self._fillpath.append(end)
###### vererbung!!!!!!!!!!!!!!!!!!!!!!
self._position = end
if self._creatingPoly:
self._poly.append(end)
if len(self.currentLine) > 42: # 42! answer to the ultimate question
# of life, the universe and everything
self._newLine()
self._update() #count=True)
def _undogoto(self, entry):
"""Reverse a _goto. Used for undo()
"""
old, new, go_modes, coodata = entry
drawing, pc, ps, filling = go_modes
cLI, cL, pl, items = coodata
screen = self.screen
if abs(self._position - new) > 0.5:
print "undogoto: HALLO-DA-STIMMT-WAS-NICHT!"
# restore former situation
self.currentLineItem = cLI
self.currentLine = cL
if pl == [(0, 0), (0, 0)]:
usepc = ""
else:
usepc = pc
screen._drawline(cLI, pl, fill=usepc, width=ps)
todelete = [i for i in self.items if (i not in items) and
(screen._type(i) == "line")]
for i in todelete:
screen._delete(i)
self.items.remove(i)
start = old
if self._speed and screen._tracing == 1:
diff = old - new
diffsq = (diff[0]*screen.xscale)**2 + (diff[1]*screen.yscale)**2
nhops = 1+int((diffsq**0.5)/(3*(1.1**self._speed)*self._speed))
delta = diff * (1.0/nhops)
for n in range(1, nhops):
if n == 1:
top = True
else:
top = False
self._position = new + delta * n
if drawing:
screen._drawline(self.drawingLineItem,
(start, self._position),
pc, ps, top)
self._update()
if drawing:
screen._drawline(self.drawingLineItem, ((0, 0), (0, 0)),
fill="", width=ps)
# Turtle now at position old,
self._position = old
## if undo is done during creating a polygon, the last vertex
## will be deleted. if the polygon is entirely deleted,
## creatingPoly will be set to False.
## Polygons created before the last one will not be affected by undo()
if self._creatingPoly:
if len(self._poly) > 0:
self._poly.pop()
if self._poly == []:
self._creatingPoly = False
self._poly = None
if filling:
if self._fillpath == []:
self._fillpath = None
print "Unwahrscheinlich in _undogoto!"
elif self._fillpath is not None:
self._fillpath.pop()
self._update() #count=True)
def _rotate(self, angle):
"""Turns pen clockwise by angle.
"""
if self.undobuffer:
self.undobuffer.push(("rot", angle, self._degreesPerAU))
angle *= self._degreesPerAU
neworient = self._orient.rotate(angle)
tracing = self.screen._tracing
if tracing == 1 and self._speed > 0:
anglevel = 3.0 * self._speed
steps = 1 + int(abs(angle)/anglevel)
delta = 1.0*angle/steps
for _ in range(steps):
self._orient = self._orient.rotate(delta)
self._update()
self._orient = neworient
self._update()
def _newLine(self, usePos=True):
"""Closes current line item and starts a new one.
Remark: if current line became too long, animation
performance (via _drawline) slowed down considerably.
"""
if len(self.currentLine) > 1:
self.screen._drawline(self.currentLineItem, self.currentLine,
self._pencolor, self._pensize)
self.currentLineItem = self.screen._createline()
self.items.append(self.currentLineItem)
else:
self.screen._drawline(self.currentLineItem, top=True)
self.currentLine = []
if usePos:
self.currentLine = [self._position]
def fill(self, flag=None):
"""Call fill(True) before drawing a shape to fill, fill(False) when done.
Optional argument:
flag -- True/False (or 1/0 respectively)
Call fill(True) before drawing the shape you want to fill,
and fill(False) when done.
When used without argument: return fillstate (True if filling,
False else)
Example (for a Turtle instance named turtle):
>>> turtle.fill(True)
>>> turtle.forward(100)
>>> turtle.left(90)
>>> turtle.forward(100)
>>> turtle.left(90)
>>> turtle.forward(100)
>>> turtle.left(90)
>>> turtle.forward(100)
>>> turtle.fill(False)
"""
filling = isinstance(self._fillpath, list)
if flag is None:
return filling
screen = self.screen
entry1 = entry2 = ()
if filling:
if len(self._fillpath) > 2:
self.screen._drawpoly(self._fillitem, self._fillpath,
fill=self._fillcolor)
entry1 = ("dofill", self._fillitem)
if flag:
self._fillitem = self.screen._createpoly()
self.items.append(self._fillitem)
self._fillpath = [self._position]
entry2 = ("beginfill", self._fillitem) # , self._fillpath)
self._newLine()
else:
self._fillitem = self._fillpath = None
if self.undobuffer:
if entry1 == ():
if entry2 != ():
self.undobuffer.push(entry2)
else:
if entry2 == ():
self.undobuffer.push(entry1)
else:
self.undobuffer.push(["seq", entry1, entry2])
self._update()
def begin_fill(self):
"""Called just before drawing a shape to be filled.
No argument.
Example (for a Turtle instance named turtle):
>>> turtle.begin_fill()
>>> turtle.forward(100)
>>> turtle.left(90)
>>> turtle.forward(100)
>>> turtle.left(90)
>>> turtle.forward(100)
>>> turtle.left(90)
>>> turtle.forward(100)
>>> turtle.end_fill()
"""
self.fill(True)
def end_fill(self):
"""Fill the shape drawn after the call begin_fill().
No argument.
Example (for a Turtle instance named turtle):
>>> turtle.begin_fill()
>>> turtle.forward(100)
>>> turtle.left(90)
>>> turtle.forward(100)
>>> turtle.left(90)
>>> turtle.forward(100)
>>> turtle.left(90)
>>> turtle.forward(100)
>>> turtle.end_fill()
"""
self.fill(False)
def dot(self, size=None, *color):
"""Draw a dot with diameter size, using color.
Optional arguments:
size -- an integer >= 1 (if given)
color -- a colorstring or a numeric color tuple
Draw a circular dot with diameter size, using color.
If size is not given, the maximum of pensize+4 and 2*pensize is used.
Example (for a Turtle instance named turtle):
>>> turtle.dot()
>>> turtle.fd(50); turtle.dot(20, "blue"); turtle.fd(50)
"""
#print "dot-1:", size, color
if not color:
if isinstance(size, (basestring, tuple)):
color = self._colorstr(size)
size = self._pensize + max(self._pensize, 4)
else:
color = self._pencolor
if not size:
size = self._pensize + max(self._pensize, 4)
else:
if size is None:
size = self._pensize + max(self._pensize, 4)
color = self._colorstr(color)
#print "dot-2:", size, color
if hasattr(self.screen, "_dot"):
item = self.screen._dot(self._position, size, color)
#print "dot:", size, color, "item:", item
self.items.append(item)
if self.undobuffer:
self.undobuffer.push(("dot", item))
else:
pen = self.pen()
if self.undobuffer:
self.undobuffer.push(["seq"])
self.undobuffer.cumulate = True
try:
if self.resizemode() == 'auto':
self.ht()
self.pendown()
self.pensize(size)
self.pencolor(color)
self.forward(0)
finally:
self.pen(pen)
if self.undobuffer:
self.undobuffer.cumulate = False
def _write(self, txt, align, font):
"""Performs the writing for write()
"""
item, end = self.screen._write(self._position, txt, align, font,
self._pencolor)
self.items.append(item)
if self.undobuffer:
self.undobuffer.push(("wri", item))
return end
def write(self, arg, move=False, align="left", font=("Arial", 8, "normal")):
"""Write text at the current turtle position.
Arguments:
arg -- info, which is to be written to the TurtleScreen
move (optional) -- True/False
align (optional) -- one of the strings "left", "center" or right"
font (optional) -- a triple (fontname, fontsize, fonttype)
Write text - the string representation of arg - at the current
turtle position according to align ("left", "center" or right")
and with the given font.
If move is True, the pen is moved to the bottom-right corner
of the text. By default, move is False.
Example (for a Turtle instance named turtle):
>>> turtle.write('Home = ', True, align="center")
>>> turtle.write((0,0), True)
"""
if self.undobuffer:
self.undobuffer.push(["seq"])
self.undobuffer.cumulate = True
end = self._write(str(arg), align.lower(), font)
if move:
x, y = self.pos()
self.setpos(end, y)
if self.undobuffer:
self.undobuffer.cumulate = False
def begin_poly(self):
"""Start recording the vertices of a polygon.
No argument.
Start recording the vertices of a polygon. Current turtle position
is first point of polygon.
Example (for a Turtle instance named turtle):
>>> turtle.begin_poly()
"""
self._poly = [self._position]
self._creatingPoly = True
def end_poly(self):
"""Stop recording the vertices of a polygon.
No argument.
Stop recording the vertices of a polygon. Current turtle position is
last point of polygon. This will be connected with the first point.
Example (for a Turtle instance named turtle):
>>> turtle.end_poly()
"""
self._creatingPoly = False
def get_poly(self):
"""Return the lastly recorded polygon.
No argument.
Example (for a Turtle instance named turtle):
>>> p = turtle.get_poly()
>>> turtle.register_shape("myFavouriteShape", p)
"""
## check if there is any poly? -- 1st solution:
if self._poly is not None:
return tuple(self._poly)
def getscreen(self):
"""Return the TurtleScreen object, the turtle is drawing on.
No argument.
Return the TurtleScreen object, the turtle is drawing on.
So TurtleScreen-methods can be called for that object.
Example (for a Turtle instance named turtle):
>>> ts = turtle.getscreen()
>>> ts
<turtle.TurtleScreen object at 0x0106B770>
>>> ts.bgcolor("pink")
"""
return self.screen
def getturtle(self):
"""Return the Turtleobject itself.
No argument.
Only reasonable use: as a function to return the 'anonymous turtle':
Example:
>>> pet = getturtle()
>>> pet.fd(50)
>>> pet
<turtle.Turtle object at 0x0187D810>
>>> turtles()
[<turtle.Turtle object at 0x0187D810>]
"""
return self
getpen = getturtle
################################################################
### screen oriented methods recurring to methods of TurtleScreen
################################################################
def window_width(self):
""" Returns the width of the turtle window.
No argument.
Example (for a TurtleScreen instance named screen):
>>> screen.window_width()
640
"""
return self.screen._window_size()[0]
def window_height(self):
""" Return the height of the turtle window.
No argument.
Example (for a TurtleScreen instance named screen):
>>> screen.window_height()
480
"""
return self.screen._window_size()[1]
def _delay(self, delay=None):
"""Set delay value which determines speed of turtle animation.
"""
return self.screen.delay(delay)
##### event binding methods #####
def onclick(self, fun, btn=1, add=None):
"""Bind fun to mouse-click event on this turtle on canvas.
Arguments:
fun -- a function with two arguments, to which will be assigned
the coordinates of the clicked point on the canvas.
num -- number of the mouse-button defaults to 1 (left mouse button).
add -- True or False. If True, new binding will be added, otherwise
it will replace a former binding.
Example for the anonymous turtle, i. e. the procedural way:
>>> def turn(x, y):
... left(360)
...
>>> onclick(turn) # Now clicking into the turtle will turn it.
>>> onclick(None) # event-binding will be removed
"""
self.screen._onclick(self.turtle._item, fun, btn, add)
self._update()
def onrelease(self, fun, btn=1, add=None):
"""Bind fun to mouse-button-release event on this turtle on canvas.
Arguments:
fun -- a function with two arguments, to which will be assigned
the coordinates of the clicked point on the canvas.
num -- number of the mouse-button defaults to 1 (left mouse button).
Example (for a MyTurtle instance named joe):
>>> class MyTurtle(Turtle):
... def glow(self,x,y):
... self.fillcolor("red")
... def unglow(self,x,y):
... self.fillcolor("")
...
>>> joe = MyTurtle()
>>> joe.onclick(joe.glow)
>>> joe.onrelease(joe.unglow)
Clicking on joe turns fillcolor red, unclicking turns it to
transparent.
"""
self.screen._onrelease(self.turtle._item, fun, btn, add)
self._update()
def ondrag(self, fun, btn=1, add=None):
"""Bind fun to mouse-move event on this turtle on canvas.
Arguments:
fun -- a function with two arguments, to which will be assigned
the coordinates of the clicked point on the canvas.
num -- number of the mouse-button defaults to 1 (left mouse button).
Every sequence of mouse-move-events on a turtle is preceded by a
mouse-click event on that turtle.
Example (for a Turtle instance named turtle):
>>> turtle.ondrag(turtle.goto)
Subsequently clicking and dragging a Turtle will move it
across the screen thereby producing handdrawings (if pen is
down).
"""
self.screen._ondrag(self.turtle._item, fun, btn, add)
def _undo(self, action, data):
"""Does the main part of the work for undo()
"""
if self.undobuffer is None:
return
if action == "rot":
angle, degPAU = data
self._rotate(-angle*degPAU/self._degreesPerAU)
dummy = self.undobuffer.pop()
elif action == "stamp":
stitem = data[0]
self.clearstamp(stitem)
elif action == "go":
self._undogoto(data)
elif action in ["wri", "dot"]:
item = data[0]
self.screen._delete(item)
self.items.remove(item)
elif action == "dofill":
item = data[0]
self.screen._drawpoly(item, ((0, 0),(0, 0),(0, 0)),
fill="", outline="")
elif action == "beginfill":
item = data[0]
self._fillitem = self._fillpath = None
self.screen._delete(item)
self.items.remove(item)
elif action == "pen":
TPen.pen(self, data[0])
self.undobuffer.pop()
def undo(self):
"""undo (repeatedly) the last turtle action.
No argument.
undo (repeatedly) the last turtle action.
Number of available undo actions is determined by the size of
the undobuffer.
Example (for a Turtle instance named turtle):
>>> for i in range(4):
... turtle.fd(50); turtle.lt(80)
...
>>> for i in range(8):
... turtle.undo()
...
"""
if self.undobuffer is None:
return
item = self.undobuffer.pop()
action = item[0]
data = item[1:]
if action == "seq":
while data:
item = data.pop()
self._undo(item[0], item[1:])
else:
self._undo(action, data)
turtlesize = shapesize
RawPen = RawTurtle
### Screen - Singleton ########################
def Screen():
"""Return the singleton screen object.
If none exists at the moment, create a new one and return it,
else return the existing one."""
if Turtle._screen is None:
Turtle._screen = _Screen()
return Turtle._screen
class _Screen(TurtleScreen):
_root = None
_canvas = None
_title = _CFG["title"]
def __init__(self):
# XXX there is no need for this code to be conditional,
# as there will be only a single _Screen instance, anyway
# XXX actually, the turtle demo is injecting root window,
# so perhaps the conditional creation of a root should be
# preserved (perhaps by passing it as an optional parameter)
if _Screen._root is None:
_Screen._root = self._root = _Root()
self._root.title(_Screen._title)
self._root.ondestroy(self._destroy)
if _Screen._canvas is None:
width = _CFG["width"]
height = _CFG["height"]
canvwidth = _CFG["canvwidth"]
canvheight = _CFG["canvheight"]
leftright = _CFG["leftright"]
topbottom = _CFG["topbottom"]
self._root.setupcanvas(width, height, canvwidth, canvheight)
_Screen._canvas = self._root._getcanvas()
TurtleScreen.__init__(self, _Screen._canvas)
self.setup(width, height, leftright, topbottom)
def setup(self, width=_CFG["width"], height=_CFG["height"],
startx=_CFG["leftright"], starty=_CFG["topbottom"]):
""" Set the size and position of the main window.
Arguments:
width: as integer a size in pixels, as float a fraction of the screen.
Default is 50% of screen.
height: as integer the height in pixels, as float a fraction of the
screen. Default is 75% of screen.
startx: if positive, starting position in pixels from the left
edge of the screen, if negative from the right edge
Default, startx=None is to center window horizontally.
starty: if positive, starting position in pixels from the top
edge of the screen, if negative from the bottom edge
Default, starty=None is to center window vertically.
Examples (for a Screen instance named screen):
>>> screen.setup (width=200, height=200, startx=0, starty=0)
sets window to 200x200 pixels, in upper left of screen
>>> screen.setup(width=.75, height=0.5, startx=None, starty=None)
sets window to 75% of screen by 50% of screen and centers
"""
if not hasattr(self._root, "set_geometry"):
return
sw = self._root.win_width()
sh = self._root.win_height()
if isinstance(width, float) and 0 <= width <= 1:
width = sw*width
if startx is None:
startx = (sw - width) / 2
if isinstance(height, float) and 0 <= height <= 1:
height = sh*height
if starty is None:
starty = (sh - height) / 2
self._root.set_geometry(width, height, startx, starty)
self.update()
def title(self, titlestring):
"""Set title of turtle-window
Argument:
titlestring -- a string, to appear in the titlebar of the
turtle graphics window.
This is a method of Screen-class. Not available for TurtleScreen-
objects.
Example (for a Screen instance named screen):
>>> screen.title("Welcome to the turtle-zoo!")
"""
if _Screen._root is not None:
_Screen._root.title(titlestring)
_Screen._title = titlestring
def _destroy(self):
root = self._root
if root is _Screen._root:
Turtle._pen = None
Turtle._screen = None
_Screen._root = None
_Screen._canvas = None
TurtleScreen._RUNNING = False
root.destroy()
def bye(self):
"""Shut the turtlegraphics window.
Example (for a TurtleScreen instance named screen):
>>> screen.bye()
"""
self._destroy()
def exitonclick(self):
"""Go into mainloop until the mouse is clicked.
No arguments.
Bind bye() method to mouseclick on TurtleScreen.
If "using_IDLE" - value in configuration dictionary is False
(default value), enter mainloop.
If IDLE with -n switch (no subprocess) is used, this value should be
set to True in turtle.cfg. In this case IDLE's mainloop
is active also for the client script.
This is a method of the Screen-class and not available for
TurtleScreen instances.
Example (for a Screen instance named screen):
>>> screen.exitonclick()
"""
def exitGracefully(x, y):
"""Screen.bye() with two dummy-parameters"""
self.bye()
self.onclick(exitGracefully)
if _CFG["using_IDLE"]:
return
try:
mainloop()
except AttributeError:
exit(0)
class Turtle(RawTurtle):
"""RawTurtle auto-creating (scrolled) canvas.
When a Turtle object is created or a function derived from some
Turtle method is called a TurtleScreen object is automatically created.
"""
_pen = None
_screen = None
def __init__(self,
shape=_CFG["shape"],
undobuffersize=_CFG["undobuffersize"],
visible=_CFG["visible"]):
if Turtle._screen is None:
Turtle._screen = Screen()
RawTurtle.__init__(self, Turtle._screen,
shape=shape,
undobuffersize=undobuffersize,
visible=visible)
Pen = Turtle
def write_docstringdict(filename="turtle_docstringdict"):
"""Create and write docstring-dictionary to file.
Optional argument:
filename -- a string, used as filename
default value is turtle_docstringdict
Has to be called explicitly, (not used by the turtle-graphics classes)
The docstring dictionary will be written to the Python script <filname>.py
It is intended to serve as a template for translation of the docstrings
into different languages.
"""
docsdict = {}
for methodname in _tg_screen_functions:
key = "_Screen."+methodname
docsdict[key] = eval(key).__doc__
for methodname in _tg_turtle_functions:
key = "Turtle."+methodname
docsdict[key] = eval(key).__doc__
f = open("%s.py" % filename,"w")
keys = sorted([x for x in docsdict.keys()
if x.split('.')[1] not in _alias_list])
f.write('docsdict = {\n\n')
for key in keys[:-1]:
f.write('%s :\n' % repr(key))
f.write(' """%s\n""",\n\n' % docsdict[key])
key = keys[-1]
f.write('%s :\n' % repr(key))
f.write(' """%s\n"""\n\n' % docsdict[key])
f.write("}\n")
f.close()
def read_docstrings(lang):
"""Read in docstrings from lang-specific docstring dictionary.
Transfer docstrings, translated to lang, from a dictionary-file
to the methods of classes Screen and Turtle and - in revised form -
to the corresponding functions.
"""
modname = "turtle_docstringdict_%(language)s" % {'language':lang.lower()}
module = __import__(modname)
docsdict = module.docsdict
for key in docsdict:
#print key
try:
eval(key).im_func.__doc__ = docsdict[key]
except BaseException:
print "Bad docstring-entry: %s" % key
_LANGUAGE = _CFG["language"]
try:
if _LANGUAGE != "english":
read_docstrings(_LANGUAGE)
except ImportError:
print "Cannot find docsdict for", _LANGUAGE
except BaseException:
print ("Unknown Error when trying to import %s-docstring-dictionary" %
_LANGUAGE)
def getmethparlist(ob):
"Get strings describing the arguments for the given object"
argText1 = argText2 = ""
# bit of a hack for methods - turn it into a function
# but we drop the "self" param.
if type(ob)==types.MethodType:
fob = ob.im_func
argOffset = 1
else:
fob = ob
argOffset = 0
# Try and build one for Python defined functions
if type(fob) in [types.FunctionType, types.LambdaType]:
try:
counter = fob.func_code.co_argcount
items2 = list(fob.func_code.co_varnames[argOffset:counter])
realArgs = fob.func_code.co_varnames[argOffset:counter]
defaults = fob.func_defaults or []
defaults = list(map(lambda name: "=%s" % repr(name), defaults))
defaults = [""] * (len(realArgs)-len(defaults)) + defaults
items1 = map(lambda arg, dflt: arg+dflt, realArgs, defaults)
if fob.func_code.co_flags & 0x4:
items1.append("*"+fob.func_code.co_varnames[counter])
items2.append("*"+fob.func_code.co_varnames[counter])
counter += 1
if fob.func_code.co_flags & 0x8:
items1.append("**"+fob.func_code.co_varnames[counter])
items2.append("**"+fob.func_code.co_varnames[counter])
argText1 = ", ".join(items1)
argText1 = "(%s)" % argText1
argText2 = ", ".join(items2)
argText2 = "(%s)" % argText2
except:
pass
return argText1, argText2
def _turtle_docrevise(docstr):
"""To reduce docstrings from RawTurtle class for functions
"""
import re
if docstr is None:
return None
turtlename = _CFG["exampleturtle"]
newdocstr = docstr.replace("%s." % turtlename,"")
parexp = re.compile(r' \(.+ %s\):' % turtlename)
newdocstr = parexp.sub(":", newdocstr)
return newdocstr
def _screen_docrevise(docstr):
"""To reduce docstrings from TurtleScreen class for functions
"""
import re
if docstr is None:
return None
screenname = _CFG["examplescreen"]
newdocstr = docstr.replace("%s." % screenname,"")
parexp = re.compile(r' \(.+ %s\):' % screenname)
newdocstr = parexp.sub(":", newdocstr)
return newdocstr
## The following mechanism makes all methods of RawTurtle and Turtle available
## as functions. So we can enhance, change, add, delete methods to these
## classes and do not need to change anything here.
__func_body = """\
def {name}{paramslist}:
if {obj} is None:
if not TurtleScreen._RUNNING:
TurtleScreen._RUNNING = True
raise Terminator
{obj} = {init}
try:
return {obj}.{name}{argslist}
except TK.TclError:
if not TurtleScreen._RUNNING:
TurtleScreen._RUNNING = True
raise Terminator
raise
"""
def _make_global_funcs(functions, cls, obj, init, docrevise):
for methodname in functions:
method = getattr(cls, methodname)
pl1, pl2 = getmethparlist(method)
if pl1 == "":
print ">>>>>>", pl1, pl2
continue
defstr = __func_body.format(obj=obj, init=init, name=methodname,
paramslist=pl1, argslist=pl2)
exec defstr in globals()
globals()[methodname].__doc__ = docrevise(method.__doc__)
_make_global_funcs(_tg_screen_functions, _Screen,
'Turtle._screen', 'Screen()', _screen_docrevise)
_make_global_funcs(_tg_turtle_functions, Turtle,
'Turtle._pen', 'Turtle()', _turtle_docrevise)
done = mainloop = TK.mainloop
if __name__ == "__main__":
def switchpen():
if isdown():
pu()
else:
pd()
def demo1():
"""Demo of old turtle.py - module"""
reset()
tracer(True)
up()
backward(100)
down()
# draw 3 squares; the last filled
width(3)
for i in range(3):
if i == 2:
fill(1)
for _ in range(4):
forward(20)
left(90)
if i == 2:
color("maroon")
fill(0)
up()
forward(30)
down()
width(1)
color("black")
# move out of the way
tracer(False)
up()
right(90)
forward(100)
right(90)
forward(100)
right(180)
down()
# some text
write("startstart", 1)
write(u"start", 1)
color("red")
# staircase
for i in range(5):
forward(20)
left(90)
forward(20)
right(90)
# filled staircase
tracer(True)
fill(1)
for i in range(5):
forward(20)
left(90)
forward(20)
right(90)
fill(0)
# more text
def demo2():
"""Demo of some new features."""
speed(1)
st()
pensize(3)
setheading(towards(0, 0))
radius = distance(0, 0)/2.0
rt(90)
for _ in range(18):
switchpen()
circle(radius, 10)
write("wait a moment...")
while undobufferentries():
undo()
reset()
lt(90)
colormode(255)
laenge = 10
pencolor("green")
pensize(3)
lt(180)
for i in range(-2, 16):
if i > 0:
begin_fill()
fillcolor(255-15*i, 0, 15*i)
for _ in range(3):
fd(laenge)
lt(120)
laenge += 10
lt(15)
speed((speed()+1)%12)
end_fill()
lt(120)
pu()
fd(70)
rt(30)
pd()
color("red","yellow")
speed(0)
fill(1)
for _ in range(4):
circle(50, 90)
rt(90)
fd(30)
rt(90)
fill(0)
lt(90)
pu()
fd(30)
pd()
shape("turtle")
tri = getturtle()
tri.resizemode("auto")
turtle = Turtle()
turtle.resizemode(u"auto")
turtle.shape("turtle")
turtle.reset()
turtle.left(90)
turtle.speed(0)
turtle.up()
turtle.goto(280, 40)
turtle.lt(30)
turtle.down()
turtle.speed(6)
turtle.color("blue",u"orange")
turtle.pensize(2)
tri.speed(6)
setheading(towards(turtle))
count = 1
while tri.distance(turtle) > 4:
turtle.fd(3.5)
turtle.lt(0.6)
tri.setheading(tri.towards(turtle))
tri.fd(4)
if count % 20 == 0:
turtle.stamp()
tri.stamp()
switchpen()
count += 1
tri.write("CAUGHT! ", font=("Arial", 16, "bold"), align=u"right")
tri.pencolor("black")
tri.pencolor(u"red")
def baba(xdummy, ydummy):
clearscreen()
bye()
time.sleep(2)
while undobufferentries():
tri.undo()
turtle.undo()
tri.fd(50)
tri.write(" Click me!", font = ("Courier", 12, "bold") )
tri.onclick(baba, 1)
demo1()
demo2()
exitonclick()
|
{
"content_hash": "9ae0e9e43f4369a4a803468d2cf773eb",
"timestamp": "",
"source": "github",
"line_count": 4010,
"max_line_length": 97,
"avg_line_length": 34.4211970074813,
"alnum_prop": 0.5458200812872657,
"repo_name": "tequa/ammisoft",
"id": "264318effc3dc2d803ea9ab7fcf9c0e9cedf80f6",
"size": "139042",
"binary": false,
"copies": "5",
"ref": "refs/heads/master",
"path": "ammimain/WinPython-64bit-2.7.13.1Zero/python-2.7.13.amd64/Lib/lib-tk/turtle.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Batchfile",
"bytes": "9595"
},
{
"name": "C",
"bytes": "715524"
},
{
"name": "C#",
"bytes": "8440"
},
{
"name": "C++",
"bytes": "59535"
},
{
"name": "CSS",
"bytes": "5382"
},
{
"name": "F#",
"bytes": "2310"
},
{
"name": "Forth",
"bytes": "506"
},
{
"name": "Fortran",
"bytes": "67146"
},
{
"name": "GLSL",
"bytes": "1040"
},
{
"name": "HTML",
"bytes": "46416"
},
{
"name": "Inno Setup",
"bytes": "1227"
},
{
"name": "JavaScript",
"bytes": "24663"
},
{
"name": "Jupyter Notebook",
"bytes": "629939"
},
{
"name": "Makefile",
"bytes": "895"
},
{
"name": "Mask",
"bytes": "969"
},
{
"name": "Objective-C",
"bytes": "567"
},
{
"name": "PowerShell",
"bytes": "3673"
},
{
"name": "Python",
"bytes": "29200377"
},
{
"name": "Shell",
"bytes": "6905"
},
{
"name": "Tcl",
"bytes": "2124176"
},
{
"name": "Visual Basic",
"bytes": "2144"
}
],
"symlink_target": ""
}
|
from operator import itemgetter
from .enums import WEBSOCKET_UPDATE_0SECOND
from .websockets import BinanceSocketManager
class DepthCache(object):
_symbol = None
_bids = {}
_asks = {}
def __init__(self, symbol):
"""Intialise the DepthCache
:param symbol: Symbol to create depth cache for
:type symbol: string
"""
self._symbol = symbol
def add_bid(self, bid):
"""Add a bid to the cache
:param bid:
:return:
"""
self._bids[bid[0]] = float(bid[1])
if bid[1] == "0.00000000":
del self._bids[bid[0]]
def add_ask(self, ask):
"""Add an ask to the cache
:param ask:
:return:
"""
self._asks[ask[0]] = float(ask[1])
if ask[1] == "0.00000000":
del self._asks[ask[0]]
def get_bids(self):
"""Get the current bids
:return: list of bids with price and quantity as floats
.. code-block:: python
[
[
0.0001946, # Price
45.0 # Quantity
],
[
0.00019459,
2384.0
],
[
0.00019158,
5219.0
],
[
0.00019157,
1180.0
],
[
0.00019082,
287.0
]
]
"""
return DepthCache.sort_depth(self._bids, reverse=True)
def get_asks(self):
"""Get the current asks
:return: list of asks with price and quantity as floats
.. code-block:: python
[
[
0.0001955, # Price
57.0' # Quantity
],
[
0.00019699,
778.0
],
[
0.000197,
64.0
],
[
0.00019709,
1130.0
],
[
0.0001971,
385.0
]
]
"""
return DepthCache.sort_depth(self._asks, reverse=False)
@staticmethod
def sort_depth(vals, reverse=False):
"""Sort bids or asks by price
"""
lst = [[float(price), quantity] for price, quantity in vals.items()]
lst = sorted(lst, key=itemgetter(0), reverse=reverse)
return lst
class DepthCacheManager(object):
_first_update_id = 0
_client = None
_symbol = None
_callback = None
_bm = None
_depth_cache = None
def __init__(self, client, symbol, callback):
"""Intialise the DepthCacheManager
:param client: Binance API client
:type client: binance.Client
:param symbol: Symbol to create depth cache for
:type symbol: string
:param callback: Function to receive depth cache updates
:type callback: function
"""
self._client = client
self._symbol = symbol
self._callback = callback
self._depth_cache = DepthCache(self._symbol)
self._init_cache()
self._start_socket()
def _init_cache(self):
res = self._client.get_order_book(symbol=self._symbol, limit=10)
self._first_update_id = res['lastUpdateId']
for bid in res['bids']:
self._depth_cache.add_bid(bid)
for ask in res['asks']:
self._depth_cache.add_ask(ask)
def _start_socket(self):
self._bm = BinanceSocketManager(self._client)
self._bm.start_depth_socket(self._symbol, self._depth_event, update_time=WEBSOCKET_UPDATE_0SECOND)
self._bm.start()
def _depth_event(self, msg):
"""
:param msg:
:return:
"""
# ignore any updates before the initial update id
if msg['u'] <= self._first_update_id:
return
# add any bid or ask values
for bid in msg['b']:
self._depth_cache.add_bid(bid)
for ask in msg['a']:
self._depth_cache.add_ask(ask)
# call the callback with the updated depth cache
self._callback(self._depth_cache)
def get_depth_cache(self):
"""Get the current depth cache
:return: DepthCache object
"""
return self._depth_cache
|
{
"content_hash": "fe8c6241d5785ea0f2485e2b002c3cb4",
"timestamp": "",
"source": "github",
"line_count": 190,
"max_line_length": 106,
"avg_line_length": 23.936842105263157,
"alnum_prop": 0.46745822339489884,
"repo_name": "57s/BeastTools",
"id": "146bd3cbd6f5b58730ec1f510ba85e38bd1abd29",
"size": "4586",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "backend/api/api_binance/depthcache.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "131324"
}
],
"symlink_target": ""
}
|
from __future__ import division, print_function
from sqlalchemy import exists
from sqlalchemy.sql import func, and_
from .tables import Run, Tract, Patch, Source
from .connect import connect, Session
__all__ = ['HugsIngest']
class HugsIngest(object):
def __init__(self, session, run_name):
self.run_name = run_name
self.session = session
self.current_tract_id = None
self.current_patch_id = None
run_query = self.session.query(Run).filter(Run.name==run_name)
num_rows = run_query.count()
if num_rows==0:
self.session.add(Run(name=run_name))
self.session.commit()
self.run_id = self._get_current_id(Run.id)
elif num_rows==1:
self.run_id = run_query.first().id
else:
print('Warning {} rows in run name {}'.format(num_rows, run_name))
def _get_current_id(self, table_id):
return self.session.query(func.max(table_id)).first()[0]
def add_tract(self, tract):
tract_query = self.session.query(Tract).filter(
and_(Tract.hsc_id==tract, Tract.run_id==self.run_id))
num_rows = tract_query.count()
if num_rows==0:
self.session.add(Tract(hsc_id=tract, run_id=self.run_id))
self.session.commit()
self.current_tract_id = self._get_current_id(Tract.id)
elif num_rows==1:
self.current_tract_id = tract_query.first().id
else:
print('Warning {} rows with tract {}'.format(num_rows, tract))
def add_patch(self, patch, patch_meta):
assert self.current_tract_id is not None
patch_row = Patch(
hsc_id=patch,
x0=patch_meta.x0,
y0=patch_meta.y0,
small_frac=patch_meta.small_frac,
cleaned_frac=patch_meta.cleaned_frac,
bright_obj_frac=patch_meta.bright_obj_frac,
good_data_frac=patch_meta.good_data_frac,
tract_id=self.current_tract_id
)
self.session.add(patch_row)
self.session.commit()
self.current_patch_id = self._get_current_id(Patch.id)
def add_catalog(self, catalog):
"""
"""
assert self.current_patch_id is not None
catalog['patch_id'] = self.current_patch_id
catalog.to_sql('source', self.session.bind,
if_exists='append', index=False)
def add_injected_synths(self, synth_ids):
assert self.current_patch_id is not None
synth_ids['patch_id'] = self.current_patch_id
synth_ids.to_sql('synth', self.session.bind,
if_exists='append', index=False)
def add_all(self, tract, patch, patch_meta, catalog):
self.add_tract(tract)
self.add_patch(patch, patch_meta)
self.add_catalog(catalog)
|
{
"content_hash": "c5611d43c0fc7ae74adba4341eace36f",
"timestamp": "",
"source": "github",
"line_count": 81,
"max_line_length": 78,
"avg_line_length": 35.358024691358025,
"alnum_prop": 0.5831005586592178,
"repo_name": "johnnygreco/hugs",
"id": "3fddf9168dedb8be4496bc35d2f17486c0527bae",
"size": "2864",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "hugs/database/ingest.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Jupyter Notebook",
"bytes": "1396968"
},
{
"name": "Python",
"bytes": "168162"
}
],
"symlink_target": ""
}
|
try:
from setuptools import setup
except ImportError:
from distutils.core import setup
with open('README.rst') as readme_file:
readme = readme_file.read()
with open('HISTORY.rst') as history_file:
history = history_file.read().replace('.. :changelog:', '')
requirements = [
'pash>=1.1.0'
]
test_requirements = [
# TODO: put package test requirements here
]
setup(
name='busybees',
version='0.2.0',
description="An implementation of threading that abstracts away the threading.",
long_description=readme + '\n\n' + history,
author="Ian McFarlane",
author_email='iansmcfarlane@gmail.com',
url='https://github.com/iansmcf/busybees',
packages=[
'busybees',
],
package_dir={'busybees':
'busybees'},
include_package_data=True,
install_requires=requirements,
license="BSD",
zip_safe=False,
keywords='busybees',
classifiers=[
'Development Status :: 2 - Pre-Alpha',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Natural Language :: English',
'Programming Language :: Python :: 2.7',
],
test_suite='tests',
tests_require=test_requirements
)
|
{
"content_hash": "313a1f0abbe0f45af3ec118b5e765654",
"timestamp": "",
"source": "github",
"line_count": 48,
"max_line_length": 84,
"avg_line_length": 25.708333333333332,
"alnum_prop": 0.6296596434359806,
"repo_name": "iansmcf/busybees",
"id": "99d9ec029e950c143b97a45a01889bc00745cbe6",
"size": "1282",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "setup.py",
"mode": "33261",
"license": "bsd-3-clause",
"language": [
{
"name": "Makefile",
"bytes": "1700"
},
{
"name": "Python",
"bytes": "19362"
}
],
"symlink_target": ""
}
|
from django.conf import settings
from django import template
from social.backends.utils import load_backends
register = template.Library()
@register.assignment_tag
def get_social_backends(*args, **kwargs):
"""Returns a dictionary of social enabled backends, in the form: {'backend name': backend class }"""
return load_backends(settings.AUTHENTICATION_BACKENDS)
|
{
"content_hash": "3a9d6761416bec890a76cca2598edf1b",
"timestamp": "",
"source": "github",
"line_count": 12,
"max_line_length": 104,
"avg_line_length": 31.166666666666668,
"alnum_prop": 0.7700534759358288,
"repo_name": "georgeyk/quickstartup",
"id": "3b0eee89e1ceadafc490228650bdb57c24904476",
"size": "392",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "quickstartup/accounts/templatetags/get_social_backends.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "1828"
},
{
"name": "HTML",
"bytes": "40670"
},
{
"name": "JavaScript",
"bytes": "147701"
},
{
"name": "Makefile",
"bytes": "1285"
},
{
"name": "Python",
"bytes": "68168"
}
],
"symlink_target": ""
}
|
from .material import *
from .emitter import *
from .absorber import *
from .lambert import *
from .dielectric import *
from .conductor import *
from .modifiers import *
|
{
"content_hash": "30fdd07a0b937326780d9a6f829c80e4",
"timestamp": "",
"source": "github",
"line_count": 9,
"max_line_length": 25,
"avg_line_length": 19.11111111111111,
"alnum_prop": 0.7441860465116279,
"repo_name": "raysect/source",
"id": "9c75b535741fb84278a32063a2ee3d62060b0bbc",
"size": "1778",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "raysect/optical/material/__init__.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Cython",
"bytes": "2044904"
},
{
"name": "Python",
"bytes": "8301227"
},
{
"name": "Shell",
"bytes": "1685"
}
],
"symlink_target": ""
}
|
"""
POLL APP
This module describes the poll model.
Classes:
PollQuestion
PollAnswer
Functions:
n/a
Created on 26 Mar 2013
@author: michael
"""
from django.conf import settings
from django.contrib.sites.models import Site
from django.db import models
from tunobase.core import models as core_models
from tunobase.poll import managers
class PollQuestion(core_models.ImageModel, core_models.StateModel):
"""Set up the poll question fields."""
question = models.CharField(max_length=1024)
multiple_choice = models.BooleanField(default=False)
order = models.PositiveIntegerField(default=0, db_index=True)
sites = models.ManyToManyField(Site, blank=True, null=True)
users_answered = models.ManyToManyField(
settings.AUTH_USER_MODEL,
related_name='polls_answered',
blank=True,
null=True
)
class Meta:
"""First order by order and then by published at date."""
ordering = ['order', '-publish_at']
def __unicode__(self):
"""Return the poll question."""
return u'%s - %s' % (self.question, self.sites.all())
class PollAnswer(core_models.StateModel):
"""Set up the poll answer fields."""
poll = models.ForeignKey(PollQuestion, related_name='answers')
answer = models.CharField(max_length=1024)
vote_count = models.PositiveIntegerField(default=0)
order = models.PositiveIntegerField(default=0, db_index=True)
sites = models.ManyToManyField(Site, blank=True, null=True)
objects = managers.PollAnswerManager()
class Meta:
"""First order by order and then by answer alphabetically."""
ordering = ['order', 'answer']
def __unicode__(self):
"""Return the poll answer option."""
return u'%s' % self.answer
|
{
"content_hash": "b1e76686e500a47d6dab587178de9cdd",
"timestamp": "",
"source": "github",
"line_count": 69,
"max_line_length": 69,
"avg_line_length": 25.91304347826087,
"alnum_prop": 0.6772930648769575,
"repo_name": "unomena/tunobase",
"id": "c05f3d340e929b09ee5554f70e8be6d9cd24af91",
"size": "1788",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "tunobase/poll/models.py",
"mode": "33261",
"license": "bsd-3-clause",
"language": [
{
"name": "HTML",
"bytes": "47213"
},
{
"name": "Python",
"bytes": "780960"
}
],
"symlink_target": ""
}
|
"""
Copyright (c) 2006-2015 sqlmap developers (http://sqlmap.org/)
See the file 'doc/COPYING' for copying permission
"""
from lib.core.enums import HTTP_HEADER
__product__ = "SecureIIS Web Server Security (BeyondTrust)"
def detect(get_page):
page, headers, code = get_page()
retval = code != 404
page, headers, code = get_page(auxHeaders={HTTP_HEADER.TRANSFER_ENCODING: 'a' * 1025, HTTP_HEADER.ACCEPT_ENCODING: "identity"})
retval = retval and code == 404
return retval
|
{
"content_hash": "fcb7ddc30afdf9c8c7cf34c995942efb",
"timestamp": "",
"source": "github",
"line_count": 15,
"max_line_length": 131,
"avg_line_length": 32.86666666666667,
"alnum_prop": 0.6957403651115619,
"repo_name": "V11/volcano",
"id": "425ebdfe581a3cb1985e9811b9e96ce897fe4ff8",
"size": "516",
"binary": false,
"copies": "10",
"ref": "refs/heads/master",
"path": "server/sqlmap/waf/secureiis.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "HTML",
"bytes": "118"
},
{
"name": "JavaScript",
"bytes": "41"
}
],
"symlink_target": ""
}
|
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('workshops', '0067_person_username_regexvalidator'),
]
operations = [
migrations.AddField(
model_name='person',
name='occupation',
field=models.CharField(max_length=100, blank=True, verbose_name='Current occupation/career stage', default=''),
),
migrations.AddField(
model_name='person',
name='orcid',
field=models.CharField(max_length=100, blank=True, verbose_name='ORCID ID', default=''),
),
]
|
{
"content_hash": "3e4d3c9ee12c1c19d76771cdeb5abbf7",
"timestamp": "",
"source": "github",
"line_count": 23,
"max_line_length": 123,
"avg_line_length": 29,
"alnum_prop": 0.6041979010494752,
"repo_name": "vahtras/amy",
"id": "62c04d899fda3fef23bab90cbab934570d04deb9",
"size": "691",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "workshops/migrations/0068_auto_20160119_0629.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "4505"
},
{
"name": "HTML",
"bytes": "216300"
},
{
"name": "JavaScript",
"bytes": "16883"
},
{
"name": "Makefile",
"bytes": "2167"
},
{
"name": "Python",
"bytes": "1090706"
}
],
"symlink_target": ""
}
|
from .base import BaseTestCase
class CommitteesSearchTestCase(BaseTestCase):
url_tmpl = '/api/v1/committees/'
data = dict(state='ex', chamber='lower')
def test_count(self):
self.assertEquals(
len(self.json),
self.db.committees.find(self.data).count())
def test_correct_keys_present(self):
expected_keys = set([
u'level', u'country', u'updated_at', u'parent_id',
u'state', u'subcommittee', u'committee', u'chamber', u'id', 'all_ids'])
self.assertEquals(set(self.json[0]), expected_keys)
def test_status(self):
self.assert_200()
class CommitteeLookupTestCase(BaseTestCase):
url_tmpl = '/api/v1/committees/{committee_id}/'
url_args = dict(committee_id='EXC000001')
def test_state(self):
'''Make sure the returned data has the correct
level field value.
'''
self.assertEquals(self.json['state'], 'ex')
def test_correct_keys_present(self):
expected_keys = set([
u'members', u'level', u'country', u'updated_at',
u'parent_id', u'state', u'subcommittee',
u'committee', u'chamber', u'id', 'all_ids'])
self.assertEquals(set(self.json), expected_keys)
def test_id(self):
self.assertEquals(self.json['id'], 'EXC000001')
def test_status(self):
self.assert_200()
|
{
"content_hash": "ad80d078ad2c6c92c18cfb1fb468f47f",
"timestamp": "",
"source": "github",
"line_count": 46,
"max_line_length": 83,
"avg_line_length": 30.130434782608695,
"alnum_prop": 0.6024531024531025,
"repo_name": "mileswwatkins/billy",
"id": "78afcf71652870da56500058a34f2096aef0cd9d",
"size": "1386",
"binary": false,
"copies": "4",
"ref": "refs/heads/master",
"path": "billy/web/api/tests/test_committees.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "HTML",
"bytes": "123033"
},
{
"name": "JavaScript",
"bytes": "30612"
},
{
"name": "Python",
"bytes": "572322"
}
],
"symlink_target": ""
}
|
"""
Test suite for VMwareAPI.
"""
import urllib2
import mox
from oslo.config import cfg
from nova.compute import power_state
from nova.compute import task_states
from nova import context
from nova import db
from nova import exception
from nova import test
import nova.tests.image.fake
from nova.tests import matchers
from nova.tests import utils
from nova.tests.vmwareapi import db_fakes
from nova.tests.vmwareapi import stubs
from nova.virt.vmwareapi import driver
from nova.virt.vmwareapi import fake as vmwareapi_fake
from nova.virt.vmwareapi import vim
from nova.virt.vmwareapi import vm_util
class fake_vm_ref(object):
def __init__(self):
self.value = 4
self._type = 'VirtualMachine'
class fake_http_resp(object):
def __init__(self):
self.code = 200
def read(self):
return "console log"
class VMwareAPIConfTestCase(test.TestCase):
"""Unit tests for VMWare API configurations."""
def setUp(self):
super(VMwareAPIConfTestCase, self).setUp()
def tearDown(self):
super(VMwareAPIConfTestCase, self).tearDown()
def test_configure_without_wsdl_loc_override(self):
# Test the default configuration behavior. By default,
# use the WSDL sitting on the host we are talking to in
# order to bind the SOAP client.
wsdl_loc = cfg.CONF.vmwareapi_wsdl_loc
self.assertIsNone(wsdl_loc)
wsdl_url = vim.Vim.get_wsdl_url("https", "www.example.com")
url = vim.Vim.get_soap_url("https", "www.example.com")
self.assertEqual("https://www.example.com/sdk/vimService.wsdl",
wsdl_url)
self.assertEqual("https://www.example.com/sdk", url)
def test_configure_with_wsdl_loc_override(self):
# Use the setting vmwareapi_wsdl_loc to override the
# default path to the WSDL.
#
# This is useful as a work-around for XML parsing issues
# found when using some WSDL in combination with some XML
# parsers.
#
# The wsdl_url should point to a different host than the one we
# are actually going to send commands to.
fake_wsdl = "https://www.test.com/sdk/foo.wsdl"
self.flags(vmwareapi_wsdl_loc=fake_wsdl)
wsdl_loc = cfg.CONF.vmwareapi_wsdl_loc
self.assertIsNotNone(wsdl_loc)
self.assertEqual(fake_wsdl, wsdl_loc)
wsdl_url = vim.Vim.get_wsdl_url("https", "www.example.com")
url = vim.Vim.get_soap_url("https", "www.example.com")
self.assertEqual(fake_wsdl, wsdl_url)
self.assertEqual("https://www.example.com/sdk", url)
class VMwareAPIVMTestCase(test.TestCase):
"""Unit tests for Vmware API connection calls."""
def setUp(self):
super(VMwareAPIVMTestCase, self).setUp()
self.context = context.RequestContext('fake', 'fake', is_admin=False)
self.flags(vmwareapi_host_ip='test_url',
vmwareapi_host_username='test_username',
vmwareapi_host_password='test_pass',
vnc_enabled=False,
use_linked_clone=False)
self.user_id = 'fake'
self.project_id = 'fake'
self.context = context.RequestContext(self.user_id, self.project_id)
vmwareapi_fake.reset()
db_fakes.stub_out_db_instance_api(self.stubs)
stubs.set_stubs(self.stubs)
self.conn = driver.VMwareESXDriver(None, False)
# NOTE(vish): none of the network plugging code is actually
# being tested
self.network_info = utils.get_test_network_info(legacy_model=False)
self.image = {
'id': 'c1c8ce3d-c2e0-4247-890c-ccf5cc1c004c',
'disk_format': 'vhd',
'size': 512,
}
nova.tests.image.fake.stub_out_image_service(self.stubs)
def tearDown(self):
super(VMwareAPIVMTestCase, self).tearDown()
vmwareapi_fake.cleanup()
nova.tests.image.fake.FakeImageService_reset()
def _create_instance_in_the_db(self):
values = {'name': 1,
'id': 1,
'project_id': self.project_id,
'user_id': self.user_id,
'image_ref': "1",
'kernel_id': "1",
'ramdisk_id': "1",
'mac_address': "de:ad:be:ef:be:ef",
'instance_type': 'm1.large',
}
self.instance = db.instance_create(None, values)
def _create_vm(self):
"""Create and spawn the VM."""
self._create_instance_in_the_db()
self.type_data = db.instance_type_get_by_name(None, 'm1.large')
self.conn.spawn(self.context, self.instance, self.image,
injected_files=[], admin_password=None,
network_info=self.network_info,
block_device_info=None)
self._check_vm_record()
def _check_vm_record(self):
"""
Check if the spawned VM's properties correspond to the instance in
the db.
"""
instances = self.conn.list_instances()
self.assertEquals(len(instances), 1)
# Get Nova record for VM
vm_info = self.conn.get_info({'name': 1})
# Get record for VM
vms = vmwareapi_fake._get_objects("VirtualMachine")
vm = vms[0]
# Check that m1.large above turned into the right thing.
mem_kib = long(self.type_data['memory_mb']) << 10
vcpus = self.type_data['vcpus']
self.assertEquals(vm_info['max_mem'], mem_kib)
self.assertEquals(vm_info['mem'], mem_kib)
self.assertEquals(vm.get("summary.config.numCpu"), vcpus)
self.assertEquals(vm.get("summary.config.memorySizeMB"),
self.type_data['memory_mb'])
# Check that the VM is running according to Nova
self.assertEquals(vm_info['state'], power_state.RUNNING)
# Check that the VM is running according to vSphere API.
self.assertEquals(vm.get("runtime.powerState"), 'poweredOn')
def _check_vm_info(self, info, pwr_state=power_state.RUNNING):
"""
Check if the get_info returned values correspond to the instance
object in the db.
"""
mem_kib = long(self.type_data['memory_mb']) << 10
self.assertEquals(info["state"], pwr_state)
self.assertEquals(info["max_mem"], mem_kib)
self.assertEquals(info["mem"], mem_kib)
self.assertEquals(info["num_cpu"], self.type_data['vcpus'])
def test_list_instances(self):
instances = self.conn.list_instances()
self.assertEquals(len(instances), 0)
def test_list_instances_1(self):
self._create_vm()
instances = self.conn.list_instances()
self.assertEquals(len(instances), 1)
def test_list_interfaces(self):
self._create_vm()
interfaces = self.conn.list_interfaces(1)
self.assertEquals(len(interfaces), 1)
self.assertEquals(interfaces[0], 4000)
def test_spawn(self):
self._create_vm()
info = self.conn.get_info({'name': 1})
self._check_vm_info(info, power_state.RUNNING)
def test_snapshot(self):
expected_calls = [
{'args': (),
'kwargs':
{'task_state': task_states.IMAGE_PENDING_UPLOAD}},
{'args': (),
'kwargs':
{'task_state': task_states.IMAGE_UPLOADING,
'expected_state': task_states.IMAGE_PENDING_UPLOAD}}]
func_call_matcher = matchers.FunctionCallMatcher(expected_calls)
self._create_vm()
info = self.conn.get_info({'name': 1})
self._check_vm_info(info, power_state.RUNNING)
self.conn.snapshot(self.context, self.instance, "Test-Snapshot",
func_call_matcher.call)
info = self.conn.get_info({'name': 1})
self._check_vm_info(info, power_state.RUNNING)
self.assertIsNone(func_call_matcher.match())
def test_snapshot_non_existent(self):
self._create_instance_in_the_db()
self.assertRaises(exception.InstanceNotFound, self.conn.snapshot,
self.context, self.instance, "Test-Snapshot",
lambda *args, **kwargs: None)
def test_reboot(self):
self._create_vm()
info = self.conn.get_info({'name': 1})
self._check_vm_info(info, power_state.RUNNING)
reboot_type = "SOFT"
self.conn.reboot(self.context, self.instance, self.network_info,
reboot_type)
info = self.conn.get_info({'name': 1})
self._check_vm_info(info, power_state.RUNNING)
def test_reboot_non_existent(self):
self._create_instance_in_the_db()
self.assertRaises(exception.InstanceNotFound, self.conn.reboot,
self.context, self.instance, self.network_info,
'SOFT')
def test_reboot_not_poweredon(self):
self._create_vm()
info = self.conn.get_info({'name': 1})
self._check_vm_info(info, power_state.RUNNING)
self.conn.suspend(self.instance)
info = self.conn.get_info({'name': 1})
self._check_vm_info(info, power_state.SUSPENDED)
self.assertRaises(exception.InstanceRebootFailure, self.conn.reboot,
self.context, self.instance, self.network_info,
'SOFT')
def test_suspend(self):
self._create_vm()
info = self.conn.get_info({'name': 1})
self._check_vm_info(info, power_state.RUNNING)
self.conn.suspend(self.instance)
info = self.conn.get_info({'name': 1})
self._check_vm_info(info, power_state.SUSPENDED)
def test_suspend_non_existent(self):
self._create_instance_in_the_db()
self.assertRaises(exception.InstanceNotFound, self.conn.suspend,
self.instance)
def test_resume(self):
self._create_vm()
info = self.conn.get_info({'name': 1})
self._check_vm_info(info, power_state.RUNNING)
self.conn.suspend(self.instance)
info = self.conn.get_info({'name': 1})
self._check_vm_info(info, power_state.SUSPENDED)
self.conn.resume(self.instance, self.network_info)
info = self.conn.get_info({'name': 1})
self._check_vm_info(info, power_state.RUNNING)
def test_resume_non_existent(self):
self._create_instance_in_the_db()
self.assertRaises(exception.InstanceNotFound, self.conn.resume,
self.instance, self.network_info)
def test_resume_not_suspended(self):
self._create_vm()
info = self.conn.get_info({'name': 1})
self._check_vm_info(info, power_state.RUNNING)
self.assertRaises(exception.InstanceResumeFailure, self.conn.resume,
self.instance, self.network_info)
def test_power_on(self):
self._create_vm()
info = self.conn.get_info({'name': 1})
self._check_vm_info(info, power_state.RUNNING)
self.conn.power_off(self.instance)
info = self.conn.get_info({'name': 1})
self._check_vm_info(info, power_state.SHUTDOWN)
self.conn.power_on(self.instance)
info = self.conn.get_info({'name': 1})
self._check_vm_info(info, power_state.RUNNING)
def test_power_on_non_existent(self):
self._create_instance_in_the_db()
self.assertRaises(exception.InstanceNotFound, self.conn.power_on,
self.instance)
def test_power_off(self):
self._create_vm()
info = self.conn.get_info({'name': 1})
self._check_vm_info(info, power_state.RUNNING)
self.conn.power_off(self.instance)
info = self.conn.get_info({'name': 1})
self._check_vm_info(info, power_state.SHUTDOWN)
def test_power_off_non_existent(self):
self._create_instance_in_the_db()
self.assertRaises(exception.InstanceNotFound, self.conn.power_off,
self.instance)
def test_power_off_suspended(self):
self._create_vm()
self.conn.suspend(self.instance)
info = self.conn.get_info({'name': 1})
self._check_vm_info(info, power_state.SUSPENDED)
self.assertRaises(exception.InstancePowerOffFailure,
self.conn.power_off, self.instance)
def test_get_info(self):
self._create_vm()
info = self.conn.get_info({'name': 1})
self._check_vm_info(info, power_state.RUNNING)
def test_destroy(self):
self._create_vm()
info = self.conn.get_info({'name': 1})
self._check_vm_info(info, power_state.RUNNING)
instances = self.conn.list_instances()
self.assertEquals(len(instances), 1)
self.conn.destroy(self.instance, self.network_info)
instances = self.conn.list_instances()
self.assertEquals(len(instances), 0)
def test_destroy_non_existent(self):
self._create_instance_in_the_db()
self.assertEquals(self.conn.destroy(self.instance, self.network_info),
None)
def test_pause(self):
pass
def test_unpause(self):
pass
def test_diagnostics(self):
pass
def test_get_console_output(self):
vm_ref = fake_vm_ref()
result = fake_http_resp()
self._create_instance_in_the_db()
self.mox.StubOutWithMock(vm_util, 'get_vm_ref_from_name')
self.mox.StubOutWithMock(urllib2, 'urlopen')
vm_util.get_vm_ref_from_name(mox.IgnoreArg(), self.instance['name']).\
AndReturn(vm_ref)
urllib2.urlopen(mox.IgnoreArg()).AndReturn(result)
self.mox.ReplayAll()
self.conn.get_console_output(self.instance)
class VMwareAPIHostTestCase(test.TestCase):
"""Unit tests for Vmware API host calls."""
def setUp(self):
super(VMwareAPIHostTestCase, self).setUp()
self.flags(vmwareapi_host_ip='test_url',
vmwareapi_host_username='test_username',
vmwareapi_host_password='test_pass')
vmwareapi_fake.reset()
stubs.set_stubs(self.stubs)
self.conn = driver.VMwareESXDriver(False)
def tearDown(self):
super(VMwareAPIHostTestCase, self).tearDown()
vmwareapi_fake.cleanup()
def test_host_state(self):
stats = self.conn.get_host_stats()
self.assertEquals(stats['vcpus'], 16)
self.assertEquals(stats['disk_total'], 1024)
self.assertEquals(stats['disk_available'], 500)
self.assertEquals(stats['disk_used'], 1024 - 500)
self.assertEquals(stats['host_memory_total'], 1024)
self.assertEquals(stats['host_memory_free'], 1024 - 500)
supported_instances = [('i686', 'vmware', 'hvm'),
('x86_64', 'vmware', 'hvm')]
self.assertEquals(stats['supported_instances'], supported_instances)
def _test_host_action(self, method, action, expected=None):
result = method('host', action)
self.assertEqual(result, expected)
def test_host_reboot(self):
self._test_host_action(self.conn.host_power_action, 'reboot')
def test_host_shutdown(self):
self._test_host_action(self.conn.host_power_action, 'shutdown')
def test_host_startup(self):
self._test_host_action(self.conn.host_power_action, 'startup')
def test_host_maintenance_on(self):
self._test_host_action(self.conn.host_maintenance_mode, True)
def test_host_maintenance_off(self):
self._test_host_action(self.conn.host_maintenance_mode, False)
|
{
"content_hash": "bd40c1d49b3bb91cedaa5da8099351b9",
"timestamp": "",
"source": "github",
"line_count": 412,
"max_line_length": 78,
"avg_line_length": 38.050970873786405,
"alnum_prop": 0.6058557121898323,
"repo_name": "sridevikoushik31/nova",
"id": "f63e403d6d9820f405eee1bc7910249bfdb7a812",
"size": "16411",
"binary": false,
"copies": "1",
"ref": "refs/heads/port_id_in_vif_on_devide",
"path": "nova/tests/test_vmwareapi.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "JavaScript",
"bytes": "7403"
},
{
"name": "Python",
"bytes": "9944606"
},
{
"name": "Ruby",
"bytes": "782"
},
{
"name": "Shell",
"bytes": "17522"
}
],
"symlink_target": ""
}
|
"""
Implementation of basic functions.
"""
from __future__ import division
from __future__ import absolute_import
from __future__ import print_function
import numpy as np
from ._core import Mean
from ..kernels._distances import rescale, dist
__all__ = ['Zero', 'Constant', 'Linear', 'Quadratic']
class Zero(Mean):
"""
Function which returns zero on any input.
"""
def get_mean(self, X):
return np.zeros(len(X))
def get_grad(self, X):
return iter([])
def get_gradx(self, X):
return np.zeros_like(X)
class Constant(Mean):
"""
Function which returns a constant value on any input.
"""
def __init__(self, bias=0):
super(Constant, self).__init__()
self._bias = self._register('bias', bias)
def __info__(self):
info = []
info.append(('bias', self._bias))
return info
def get_mean(self, X):
return np.full(len(X), self._bias)
def get_grad(self, X):
yield np.ones(len(X))
def get_gradx(self, X):
return np.zeros_like(X)
class Linear(Mean):
"""
Mean function for linear trends
"""
def __init__(self, bias=0, slopes=0):
super(Linear, self).__init__()
self._bias = self._register('bias', bias)
self._slopes = self._register('slopes', slopes, shape=('d',))
def __info__(self):
info = []
info.append(('bias', self._bias))
info.append(('slopes', self._slopes))
return info
def get_mean(self, X):
slopes = self._slopes if (len(self._slopes) == len(X)) else \
np.full(len(X), self._slopes)
return X.dot(slopes) + self._bias
def get_grad(self, X):
for i in X.shape[1]:
yield X[:, i]
def get_gradx(self, X):
slopes = self._slopes if (len(self._slopes) == len(X)) else \
np.full(len(X), self._slopes)
return slopes
class Quadratic(Mean):
def __init__(self, bias, centre, widths, ndim=None):
super(Quadratic, self).__init__()
self._bias = self._register('bias', bias)
self._centre = self._register('centre', centre, shape=('d',))
self._widths = self._register('widths', widths, shape=('d',))
# FIXME: for now _iso and ndim are ignored
self._iso = False
self.ndim = np.size(self._widths)
def __info__(self):
info = []
info.append(('bias', self._bias))
info.append(('centre', self._centre))
info.append(('widths', self._widths))
return info
def get_mean(self, X):
X0 = np.array(self._centre, ndmin=2)
X, X0 = rescale(self._widths, X, X0)
return self._bias - dist(X, X0).ravel() ** 2
def get_grad(self, X):
"""Gradient wrt the value of the constant mean."""
yield np.ones(len(X))
X0 = np.array(self._centre, ndmin=2)
D = 2 * (X - X0) / (self._widths ** 2)
for Di in D.T:
yield Di
X, X0 = rescale(self._widths, X, X0)
D2 = (X - X0) ** 2
K = 2 / self._widths
G = K * D2
for Gi in G.T:
yield Gi
def get_gradx(self, X):
"""Gradient wrt the inputs X."""
X0 = np.array(self._centre, ndmin=2)
D = X - X0
return -2 * D / (self._widths ** 2)
|
{
"content_hash": "b5e10be0581d877718e87caa8d6f3882",
"timestamp": "",
"source": "github",
"line_count": 127,
"max_line_length": 69,
"avg_line_length": 26.165354330708663,
"alnum_prop": 0.5356605476978634,
"repo_name": "mwhoffman/reggie",
"id": "10160f4c985ce8b7c49a79a6b05e1b96a949b011",
"size": "3323",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "reggie/means/basic.py",
"mode": "33188",
"license": "bsd-2-clause",
"language": [
{
"name": "Python",
"bytes": "113546"
}
],
"symlink_target": ""
}
|
from __future__ import unicode_literals
from __future__ import division
from __future__ import absolute_import
from __future__ import print_function
import numpy as np
import pandas as pd
from sklearn.decomposition import PCA
from scipy.interpolate import splev, splrep
import logging
log = logging.getLogger(__name__)
__all__ = ["do_pca", "time_interpolate", "back_proj_interp",
"back_proj_pca", "transformations_matrix", "interp_series"]
def do_pca(trajs,
pca=None,
coords=['x', 'y', 'z'],
suffix='_pca',
append=False, return_pca=False):
'''
Performs a principal component analysis on the input coordinates
suffix is only applied when appending
'''
if pca is None:
pca = PCA()
if not np.all(np.isfinite(trajs[coords])):
log.warning('''Droping non finite values before performing PCA''')
rotated_ = pd.DataFrame(pca.fit_transform(trajs[coords].dropna()))
rotated_.set_index(trajs[coords].dropna().index, inplace=True)
rotated = pd.DataFrame(columns=coords,
index=trajs.index)
rotated.loc[rotated_.index] = rotated_
rotated['t'] = trajs.t
if append:
for pca_coord in [c + suffix for c in coords]:
trajs[pca_coord] = rotated[pca_coord]
if return_pca:
return trajs, pca
else:
return trajs
if return_pca:
return rotated, pca
else:
return rotated
def _grouped_pca(trajs, pca, coords, group_kw):
return trajs.groupby(**group_kw).apply(
lambda df: pca.fit_transform(df[coords].dropna()),
coords)
def time_interpolate(trajs, sampling=1,
s=0, k=3,
coords=['x', 'y', 'z']):
"""Interpolates each segment of the trajectories along time using `scipy.interpolate.splrep`
Parameters
----------
sampling : int,
Must be higher or equal than 1, will add `sampling - 1` extra points
between two consecutive original data point. Sub-sampling is not supported.
coords : tuple of column names, default `('x', 'y', 'z')`
the coordinates to interpolate.
s : float
A smoothing condition. The amount of smoothness is determined by satisfying the conditions:
sum((w * (y - g))**2,axis=0) <= s where g(x) is the smoothed interpolation of (x,y). The
user can use s to control the tradeoff between closeness and smoothness of fit. Larger s
means more smoothing while smaller values of s indicate less smoothing. Recommended values
of s depend on the weights, w. If the weights represent the inverse of the standard-
deviation of y, then a good s value should be found in the range (m-sqrt(2*m),m+sqrt(2*m))
where m is the number of datapoints in x, y, and w. default : s=m-sqrt(2*m) if weights are
supplied. s = 0.0 (interpolating) if no weights are supplied.
k : int
The order of the spline fit. It is recommended to use cubic splines.
Even order splines should be avoided especially with small s values.
1 <= k <= 5
Returns
-------
interpolated : a :class:`pandas.Dataframe` instance
The interpolated values, with column names given by `coords` plus the computed speeds (first
order derivative) and accelarations (second order derivative) if `k` > 2
Notes
-----
- The returned DataFrame is NOT indexed like the input (in particular for `t_stamp`).
- It is also NOT casted to a Trajectories instance.
- The `s` and `k` arguments are passed to `scipy.interpolate.splrep`, see this function
documentation for more details
- If a segment is too short to be interpolated with the passed order `k`, the order will be
automatically diminished.
- Segments with only one point will be returned as is
"""
interpolated = trajs.groupby(level='label').apply(_segment_interpolate_,
sampling=sampling, s=s, k=k,
coords=coords)
interpolated = interpolated.swaplevel(
't_stamp', 'label').sortlevel(['t_stamp', 'label'])
return interpolated
def _segment_interpolate_(segment, sampling, s=0, k=3,
coords=['x', 'y', 'z']):
"""
"""
corrected_k = k
while segment.shape[0] <= corrected_k:
corrected_k -= 2
t_stamps_in = segment.index.get_level_values('t_stamp').values
t_stamp0, t_stamp1 = t_stamps_in[0], t_stamps_in[-1]
t0, t1 = segment.t.iloc[0], segment.t.iloc[-1]
t_stamps = np.arange(t_stamp0*sampling,
t_stamp1*sampling+1, dtype=np.int)
times = np.linspace(t0, t1, t_stamps.size)
t_stamps = pd.Index(t_stamps, dtype=np.int, name='t_stamp')
tmp_df = pd.DataFrame(index=t_stamps)
tmp_df['t'] = times
if segment.shape[0] < 2:
for coord in coords:
tmp_df[coord] = segment[coord].values
tmp_df['v_'+coord] = np.nan
tmp_df['a_'+coord] = np.nan
return tmp_df
#pass
tck = _spline_rep(segment, coords, s=s, k=corrected_k)
for coord in coords:
tmp_df[coord] = splev(times, tck[coord], der=0)
tmp_df['v_'+coord] = splev(times, tck[coord], der=1)
if k > 2:
if corrected_k > 2:
tmp_df['a_'+coord] = splev(times, tck[coord], der=2)
else:
tmp_df['a_'+coord] = times * np.nan
return tmp_df
def _spline_rep(df, coords=('x', 'y', 'z'), s=0, k=3):
time = df.t
tcks = {}
for coord in coords:
tcks[coord] = splrep(time, df[coord].values, s=s, k=k)
return pd.DataFrame.from_dict(tcks)
def back_proj_interp(interpolated, orig, sampling):
''' back_proj_interp(interpolated, trajs, 3).iloc[0].x - trajs.iloc[0].x = 0
'''
back_t_stamps = orig.index.get_level_values('t_stamp')
back_labels = orig.index.get_level_values('label')
back_index = pd.MultiIndex.from_arrays([back_t_stamps,
back_labels], names=['t_stamp', 'label'])
interp_index = pd.MultiIndex.from_arrays([back_t_stamps*sampling,
back_labels], names=['t_stamp', 'label'])
back_projected_ = interpolated.loc[interp_index]
back_index = pd.MultiIndex.from_arrays([back_t_stamps, back_labels],
names=['t_stamp', 'label'])
back_projected = back_projected_.set_index(back_index)
return back_projected
def back_proj_pca(rotated, pca, coords):
back_projected_ = pca.inverse_transform(rotated[coords])
back_t_stamps = rotated.index.get_level_values('t_stamp')
back_labels = rotated.index.get_level_values('label')
back_index = pd.MultiIndex.from_arrays([back_t_stamps, back_labels],
names=['t_stamp', 'label'])
back_projected = pd.DataFrame(back_projected_, index=back_index, columns=coords)
for col in set(rotated.columns) - set(back_projected.columns):
back_projected[col] = rotated[col]
return back_projected
def transformations_matrix(center, vec):
"""Build transformation matrix:
- translation : from (0, 0) to a point (center)
- rotation : following angle between (1, 0) and vec
Parameters
----------
center : list or np.ndarray
vec : list or np.ndarray
Returns
-------
The transformation matrix, np.ndarray.
"""
# Setup vectors
origin_vec = np.array([1, 0])
current_vec = vec / np.linalg.norm(vec)
# Find the rotation angle
a = origin_vec
b = current_vec
theta = np.arctan2(a[1], a[0]) + np.arctan2(b[1], b[0])
# Build rotation matrix
R = np.array([[np.cos(theta), -np.sin(theta), 0],
[np.sin(theta), np.cos(theta), 0],
[0, 0, 1]], dtype="float")
# Build translation matrix
T = np.array([[1, 0, -center[0]],
[0, 1, -center[1]],
[0, 0, 1]], dtype="float")
# Make transformations from R and T in one
A = np.dot(T.T, R)
return A
def interp_series(series, new_index):
"""Numpy API like pandas linear interpolation.
Parameters
----------
series : :class:`pandas.Series`
Index should x-coordinates of the data points and column y-coordinates of the data points.
new_index : np.array
The x-coordinates of the interpolated value.
Return
------
:class:`pandas.Series` of interpolated values.
Examples
--------
>>> import pandas as pd
>>> import numpy as np
>>> from spindle_tracker.trajectories.measures.transformation import interp_series
>>> series = pd.Series([0, 10, 20, 40, 50, 60], index=[0, 1, 2, 4, 5, 6])
>>> new_index = np.arange(0.5, 7.5, 1)
>>> inter = interp_series(series, new_index)
>>> print(inter)
0.5 5
1.5 15
2.5 25
3.5 35
4.5 45
5.5 55
6.5 60
dtype: float64
"""
new_series = pd.Series(index=new_index)
series_inter = pd.concat([series, new_series]).sort_index().interpolate(method='index')
series_inter = series_inter.reindex(new_series.index)
if series_inter.ndim == 2:
series_inter = series_inter.drop(0, axis=1)
return series_inter
|
{
"content_hash": "49e1d5751f1b055012773f48ae7c8f3a",
"timestamp": "",
"source": "github",
"line_count": 270,
"max_line_length": 99,
"avg_line_length": 34.8,
"alnum_prop": 0.5964240102171137,
"repo_name": "hadim/spindle_tracker",
"id": "b17e16a9d908756f4e7c9bfdc40fff09dc64acfa",
"size": "9421",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "spindle_tracker/trajectories/measures/transformation.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Makefile",
"bytes": "315"
},
{
"name": "Python",
"bytes": "515124"
}
],
"symlink_target": ""
}
|
import datetime
import hashlib
import os
import csv
import codecs
import cStringIO
from django.core.paginator import Paginator, InvalidPage, EmptyPage
from django.conf import settings
def _upload_path(tag):
def _upload_path_tagged(instance, filename):
now = datetime.datetime.now()
path = os.path.join(now.strftime('%Y'), now.strftime('%m'),
now.strftime('%d'))
hashed_filename = (hashlib.md5(filename +
str(now.microsecond)).hexdigest())
__, extension = os.path.splitext(filename)
return os.path.join(tag, path, hashed_filename + extension)
return _upload_path_tagged
def get_page(data):
"""Determines the page number"""
try:
page = int(data.get('page', '1'))
except (ValueError, TypeError):
page = 1
return page
def get_paginator(queryset, page_number, items=settings.PAGINATOR_SIZE):
""""Generates a paginator object with the size and page provided"""
paginator = Paginator(queryset, items)
try:
paginated_query = paginator.page(page_number)
except (EmptyPage, InvalidPage):
paginated_query = paginator.page(paginator.num_pages)
return paginated_query
# In case we get unicode in the DB we use a custom reader able to handle it
class UTF8Recoder:
"""
Iterator that reads an encoded stream and reencodes the input to UTF-8
"""
def __init__(self, f, encoding):
self.reader = codecs.getreader(encoding)(f)
def __iter__(self):
return self
def next(self):
return self.reader.next().encode("utf-8")
class UnicodeWriter:
"""
A CSV writer which will write rows to CSV file "f",
which is encoded in the given encoding.
"""
def __init__(self, f, dialect=csv.excel, encoding="utf-8", **kwds):
# Redirect output to a queue
self.queue = cStringIO.StringIO()
self.writer = csv.writer(self.queue, dialect=dialect, **kwds)
self.stream = f
self.encoder = codecs.getincrementalencoder(encoding)()
def writerow(self, row):
self.writer.writerow([s.encode("utf-8") for s in row])
# Fetch UTF-8 output from the queue ...
data = self.queue.getvalue()
data = data.decode("utf-8")
# ... and reencode it into the target encoding
data = self.encoder.encode(data)
# write to the target stream
self.stream.write(data)
# empty queue
self.queue.truncate(0)
def writerows(self, rows):
for row in rows:
self.writerow(row)
|
{
"content_hash": "f9e27e604ad8039e277e8f4e375de4e7",
"timestamp": "",
"source": "github",
"line_count": 85,
"max_line_length": 75,
"avg_line_length": 30.529411764705884,
"alnum_prop": 0.6292870905587669,
"repo_name": "mozilla/gameon",
"id": "42d4060b8b23d863007543e45296b14550cee566",
"size": "2595",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "gameon/base/utils.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "JavaScript",
"bytes": "7140"
},
{
"name": "Puppet",
"bytes": "6677"
},
{
"name": "Python",
"bytes": "636706"
},
{
"name": "Ruby",
"bytes": "1462"
},
{
"name": "Shell",
"bytes": "3065"
}
],
"symlink_target": ""
}
|
"""Tests for optimizers."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import tensorflow as tf
def _setup_model():
x = tf.placeholder(tf.float32, [])
var = tf.get_variable("test", [], initializer=tf.constant_initializer(10))
loss = tf.abs(var * x)
global_step = tf.get_variable(
"global_step", [], trainable=False, dtype=tf.int64,
initializer=tf.constant_initializer(0, dtype=tf.int64))
return x, var, loss, global_step
def _no_op_learning_rate_decay_fn(lr, global_step):
assert lr is not None
assert global_step is not None
return lr
class OptimizersTest(tf.test.TestCase):
def testSGDOptimizer(self):
optimizers = [
"SGD", tf.train.GradientDescentOptimizer,
tf.train.GradientDescentOptimizer(learning_rate=0.1),
lambda lr: tf.train.GradientDescentOptimizer(learning_rate=lr)]
for optimizer in optimizers:
with tf.Graph().as_default() as g:
with self.test_session(graph=g) as session:
x, var, loss, global_step = _setup_model()
train = tf.contrib.layers.optimize_loss(loss,
global_step,
learning_rate=0.1,
optimizer=optimizer)
tf.initialize_all_variables().run()
session.run(train, feed_dict={x: 5})
var_value, global_step_value = session.run([var, global_step])
self.assertEqual(var_value, 9.5)
self.assertEqual(global_step_value, 1)
def testNoLrCallable(self):
def optimizer_fn():
return tf.train.GradientDescentOptimizer(learning_rate=0.1)
with tf.Graph().as_default() as g:
with self.test_session(graph=g) as session:
x, var, loss, global_step = _setup_model()
train = tf.contrib.layers.optimize_loss(loss,
global_step,
learning_rate=None,
optimizer=optimizer_fn)
tf.initialize_all_variables().run()
session.run(train, feed_dict={x: 5})
var_value, global_step_value = session.run([var, global_step])
self.assertEqual(var_value, 9.5)
self.assertEqual(global_step_value, 1)
def testWrongOptimizer(self):
optimizers = ["blah", tf.Variable, object(), lambda x: None]
for optimizer in optimizers:
with tf.Graph().as_default() as g:
with self.test_session(graph=g):
_, _, loss, global_step = _setup_model()
with self.assertRaises(ValueError):
tf.contrib.layers.optimize_loss(loss,
global_step,
learning_rate=0.1,
optimizer=optimizer)
def testInvalidLoss(self):
with tf.Graph().as_default() as g, self.test_session(graph=g):
_, _, _, global_step = _setup_model()
with self.assertRaises(ValueError):
tf.contrib.layers.optimize_loss(None,
global_step,
learning_rate=0.1,
optimizer="SGD")
with self.assertRaises(ValueError):
tf.contrib.layers.optimize_loss([[1.0]],
global_step,
learning_rate=0.1,
optimizer="SGD")
def testInvalidGlobalStep(self):
with tf.Graph().as_default() as g, self.test_session(graph=g):
x = tf.placeholder(tf.float32, [])
var = tf.get_variable("test", [], initializer=tf.constant_initializer(10))
loss = tf.abs(var * x)
with self.assertRaises(TypeError):
tf.contrib.layers.optimize_loss(
loss, global_step=tf.constant(43, dtype=tf.int64),
learning_rate=0.1, optimizer="SGD")
with self.assertRaises(TypeError):
tf.contrib.layers.optimize_loss(
loss,
global_step=tf.get_variable(
"global_step", [], trainable=False, dtype=tf.float64,
initializer=tf.constant_initializer(0.0, dtype=tf.float64)),
learning_rate=0.1, optimizer="SGD")
with self.assertRaises(ValueError):
tf.contrib.layers.optimize_loss(
loss,
global_step=tf.get_variable(
"global_step", [1], trainable=False, dtype=tf.int64,
initializer=tf.constant_initializer([0], dtype=tf.int64)),
learning_rate=0.1, optimizer="SGD")
def testInvalidLearningRate(self):
with tf.Graph().as_default() as g, self.test_session(graph=g):
_, _, loss, global_step = _setup_model()
with self.assertRaises(ValueError):
tf.contrib.layers.optimize_loss(loss,
global_step,
learning_rate=-0.1,
optimizer="SGD")
def testGradientNoise(self):
tf.set_random_seed(42)
with self.test_session() as session:
x, var, loss, global_step = _setup_model()
train = tf.contrib.layers.optimize_loss(loss,
global_step,
learning_rate=0.1,
optimizer="SGD",
gradient_noise_scale=10.0)
tf.initialize_all_variables().run()
session.run(train, feed_dict={x: 5})
var_value, global_step_value = session.run([var, global_step])
# Due to randomness the following number may change if graph is different.
self.assertAlmostEqual(var_value, 8.5591021, 4)
self.assertEqual(global_step_value, 1)
def testGradientNoiseWithClipping(self):
tf.set_random_seed(42)
with self.test_session() as session:
x, var, loss, global_step = _setup_model()
train = tf.contrib.layers.optimize_loss(loss,
global_step,
learning_rate=0.1,
optimizer="SGD",
gradient_noise_scale=10.0,
clip_gradients=10.0)
tf.initialize_all_variables().run()
session.run(train, feed_dict={x: 5})
var_value, global_step_value = session.run([var, global_step])
self.assertAlmostEqual(var_value, 9.0, 4)
self.assertEqual(global_step_value, 1)
def testGradientClip(self):
with self.test_session() as session:
x, var, loss, global_step = _setup_model()
train = tf.contrib.layers.optimize_loss(loss,
global_step,
learning_rate=0.1,
optimizer="SGD",
clip_gradients=0.1)
tf.initialize_all_variables().run()
session.run(train, feed_dict={x: 5})
var_value, global_step_value = session.run([var, global_step])
self.assertAlmostEqual(var_value, 9.98999, 4)
self.assertEqual(global_step_value, 1)
def testGradientMultiply(self):
with self.test_session() as session:
x, var, loss, global_step = _setup_model()
train = tf.contrib.layers.optimize_loss(loss,
global_step,
learning_rate=0.1,
optimizer="SGD",
gradient_multipliers={var: 7.})
tf.initialize_all_variables().run()
session.run(train, feed_dict={x: 5})
var_value, global_step_value = session.run([var, global_step])
# var(0) = 10, x = 5, var(0)/dx = 5,
# var(1) = var(0) - learning_rate * gradient_multiplier * var(0)/dx
self.assertAlmostEqual(var_value, 6.5, 4)
self.assertEqual(global_step_value, 1)
def testIgnoreVariablesWithNoGradients(self):
_, _, loss, global_step = _setup_model()
unused_variable = tf.get_variable("ignore_me", [])
tf.contrib.layers.optimize_loss(
loss, global_step, learning_rate=0.1, optimizer="SGD",
gradient_noise_scale=10.0,
gradient_multipliers={unused_variable: 1.},
clip_gradients=10.0)
def testNoGlobalStep(self):
optimizers = ["SGD", tf.train.GradientDescentOptimizer,
tf.train.GradientDescentOptimizer(learning_rate=0.1)]
for optimizer in optimizers:
with tf.Graph().as_default() as g, self.test_session(graph=g) as session:
x = tf.placeholder(tf.float32, [])
var = tf.get_variable(
"test", [], initializer=tf.constant_initializer(10))
loss = tf.abs(var * x)
update_var = tf.get_variable(
"update", [], initializer=tf.constant_initializer(10))
update_op = tf.assign(update_var, 20)
train = tf.contrib.layers.optimize_loss(loss,
global_step=None,
learning_rate=0.1,
optimizer=optimizer,
update_ops=[update_op])
tf.initialize_all_variables().run()
session.run(train, feed_dict={x: 5})
self.assertEqual(9.5, var.eval())
self.assertEqual(20, update_var.eval())
def testNoGlobalStepWithDecay(self):
optimizers = ["SGD", tf.train.GradientDescentOptimizer,
tf.train.GradientDescentOptimizer(learning_rate=0.1)]
for optimizer in optimizers:
with tf.Graph().as_default() as g, self.test_session(graph=g):
x = tf.placeholder(tf.float32, [])
var = tf.get_variable(
"test", [], initializer=tf.constant_initializer(10))
loss = tf.abs(var * x)
update_var = tf.get_variable(
"update", [], initializer=tf.constant_initializer(10))
update_op = tf.assign(update_var, 20)
with self.assertRaisesRegexp(
ValueError, "global_step is required for learning_rate_decay_fn"):
tf.contrib.layers.optimize_loss(
loss,
global_step=None,
learning_rate=0.1,
learning_rate_decay_fn=_no_op_learning_rate_decay_fn,
optimizer=optimizer,
update_ops=[update_op])
def testNoGlobalStepArg(self):
optimizers = ["SGD", tf.train.GradientDescentOptimizer,
tf.train.GradientDescentOptimizer(learning_rate=0.1)]
for optimizer in optimizers:
with tf.Graph().as_default() as g, self.test_session(graph=g) as session:
x, var, loss, global_step = _setup_model()
update_var = tf.get_variable(
"update", [], initializer=tf.constant_initializer(10))
update_op = tf.assign(update_var, 20)
train = tf.contrib.layers.optimize_loss(loss,
global_step=None,
learning_rate=0.1,
optimizer=optimizer,
update_ops=[update_op])
tf.initialize_all_variables().run()
session.run(train, feed_dict={x: 5})
self.assertEqual(9.5, var.eval())
self.assertEqual(20, update_var.eval())
self.assertEqual(1, global_step.eval())
def testUpdateOp(self):
optimizers = ["SGD", tf.train.GradientDescentOptimizer,
tf.train.GradientDescentOptimizer(learning_rate=0.1)]
for optimizer in optimizers:
with tf.Graph().as_default() as g, self.test_session(graph=g) as session:
x, var, loss, global_step = _setup_model()
update_var = tf.get_variable(
"update", [], initializer=tf.constant_initializer(10))
update_op = tf.assign(update_var, 20)
train = tf.contrib.layers.optimize_loss(loss,
global_step,
learning_rate=0.1,
optimizer=optimizer,
update_ops=[update_op])
tf.initialize_all_variables().run()
session.run(train, feed_dict={x: 5})
self.assertEqual(9.5, var.eval())
self.assertEqual(20, update_var.eval())
self.assertEqual(1, global_step.eval())
def testUpdateOpWithNoOpDecay(self):
optimizers = ["SGD", tf.train.GradientDescentOptimizer,
tf.train.GradientDescentOptimizer(learning_rate=0.1)]
for optimizer in optimizers:
with tf.Graph().as_default() as g, self.test_session(graph=g) as session:
x, var, loss, global_step = _setup_model()
update_var = tf.get_variable(
"update", [], initializer=tf.constant_initializer(10))
update_op = tf.assign(update_var, 20)
train = tf.contrib.layers.optimize_loss(
loss,
global_step,
learning_rate=0.1,
learning_rate_decay_fn=_no_op_learning_rate_decay_fn,
optimizer=optimizer,
update_ops=[update_op])
tf.initialize_all_variables().run()
session.run(train, feed_dict={x: 5})
self.assertEqual(9.5, var.eval())
self.assertEqual(20, update_var.eval())
self.assertEqual(1, global_step.eval())
def testUpdateOpFromCollection(self):
optimizers = ["SGD", tf.train.GradientDescentOptimizer,
tf.train.GradientDescentOptimizer(learning_rate=0.1)]
for optimizer in optimizers:
with tf.Graph().as_default() as g, self.test_session(graph=g) as session:
x, var, loss, global_step = _setup_model()
update_var = tf.get_variable(
"update", [], initializer=tf.constant_initializer(10))
update_op = tf.assign(update_var, 20)
tf.add_to_collection(tf.GraphKeys.UPDATE_OPS, update_op)
train = tf.contrib.layers.optimize_loss(
loss, global_step, learning_rate=0.1, optimizer=optimizer)
tf.initialize_all_variables().run()
session.run(train, feed_dict={x: 5})
var_value, update_var_value, global_step_value = session.run(
[var, update_var, global_step])
self.assertEqual(var_value, 9.5)
self.assertEqual(update_var_value, 20)
self.assertEqual(global_step_value, 1)
if __name__ == "__main__":
tf.test.main()
|
{
"content_hash": "ab04b625d54d1d6b2ac9cbb2d4145d42",
"timestamp": "",
"source": "github",
"line_count": 322,
"max_line_length": 80,
"avg_line_length": 45.67080745341615,
"alnum_prop": 0.5470556235550116,
"repo_name": "pierreg/tensorflow",
"id": "fb76fd20b4a2cdee0ff3ca2d423bca8c10db292b",
"size": "15395",
"binary": false,
"copies": "4",
"ref": "refs/heads/master",
"path": "tensorflow/contrib/layers/python/layers/optimizers_test.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "C",
"bytes": "159361"
},
{
"name": "C++",
"bytes": "9799859"
},
{
"name": "CMake",
"bytes": "34638"
},
{
"name": "CSS",
"bytes": "107"
},
{
"name": "HTML",
"bytes": "784550"
},
{
"name": "Java",
"bytes": "41635"
},
{
"name": "JavaScript",
"bytes": "10875"
},
{
"name": "Jupyter Notebook",
"bytes": "1773504"
},
{
"name": "Makefile",
"bytes": "18646"
},
{
"name": "Objective-C",
"bytes": "5332"
},
{
"name": "Objective-C++",
"bytes": "45585"
},
{
"name": "Protocol Buffer",
"bytes": "116016"
},
{
"name": "Python",
"bytes": "7657184"
},
{
"name": "Shell",
"bytes": "212951"
},
{
"name": "TypeScript",
"bytes": "395465"
}
],
"symlink_target": ""
}
|
"""
classes representing meta data used in COMBINE Archives, such as the OMEX meta data
"""
from datetime import datetime
from xml.etree import ElementTree
try:
# Python 3
from urllib.parse import urlparse, urljoin
except ImportError:
# Python 2
from urlparse import urlparse, urljoin
import combinearchive as combinearchive
import utils
import exceptions
class MetaDataHolder(object):
"""
Mixin for objects, which can contain/be described by meta data
"""
def __init__(self):
self.description = []
def add_description(self, meta, fragment=None):
"""
adds a description to this meta data holder.
Optionally you can define a fragment to specify the part, which is described
by the MetaDataObject.
"""
if not meta:
# Error no meta data provided
raise ValueError('no meta data was provided to be added')
if not isinstance(meta, MetaDataObject):
# wrong class
raise TypeError('provided meta data does not inherit from MetaDataObject')
# set information to MetaDataObject
meta.set_about(self, fragment=fragment)
# add it to the list
self.description.append(meta)
class Namespace(object):
"""
class holding constants for the XML namespaces
"""
DC = 'dcterms'
DC_URI = 'http://purl.org/dc/terms/'
VCARD = 'vCard'
VCARD_URI = 'http://www.w3.org/2006/vcard/ns#'
BQMODEL = 'bqmodel'
BQMODEL_URI = 'http://biomodels.net/model-qualifiers/'
RDF = 'rdf'
RDF_URI = 'http://www.w3.org/1999/02/22-rdf-syntax-ns#'
class rdf_terms:
rdf = 'rdf:RDF'
description = 'rdf:Description'
about = 'rdf:about'
parse_type = 'rdf:parseType'
bag = 'rdf:Bag'
li = 'rdf:li'
class dc_terms:
description = 'dcterms:description'
creator = 'dcterms:creator'
created = 'dcterms:created'
modified = 'dcterms:modified'
w3cdtf = 'dcterms:W3CDTF'
w3cdtf_dateformat = '%Y-%m-%dT%H:%M:%SZ'
class vcard_terms:
has_name = 'vCard:hasName'
family_name = 'vCard:family-name'
given_name = 'vCard:given-name'
email = 'vCard:email'
organization_name = 'vCard:organization-name'
class MetaDataObject(object):
"""
abstract base class for all meta data utilized in COMBINE archives
"""
def __init__(self, xml_element=None):
""" XML element representing the raw meta data"""
self._xml_element = xml_element
""" reference to the object, which this meta data is about. Should have the MetaDataHolder mixin"""
self.about = None
""" fragment/part of the referenced object, which is described by this meta data"""
self.fragment = None
if xml_element is not None:
# start parsing
self._try_parse()
def set_about(self, about, fragment=None, add_to_target=False):
"""
"""
if about is None:
raise ValueError('about is not supposed to be None')
if not isinstance(about, MetaDataHolder):
raise TypeError('provided about object does not inherit from MetaDataHolder')
# set according fields
self.about = about
self.fragment = fragment
# auto wire, if wished
if add_to_target:
self.about.add_description(self, fragment=None)
def _build_desc_elem(self):
"""
constructs the surrounding rdf:description element and returns it
useful for _rebuild_xml()
"""
elem = ElementTree.Element(utils.extend_tag_name(Namespace.rdf_terms.description, combinearchive._XML_NS))
if isinstance(self.about, combinearchive.CombineArchive):
# meta data is about the archive itself
about_url = '.'
elif isinstance(self.about, combinearchive.ArchiveEntry):
# meta data is about a normal archive entry
about_url = self.about.location
# add fragment
if self.fragment:
about_url = urljoin(about_url, '#{}'.format(self.fragment))
elem.attrib[utils.extend_tag_name(Namespace.rdf_terms.about, combinearchive._XML_NS)] = about_url
return elem
def _try_parse(self):
"""
tries to parse the meta data encoded in _xml_element
"""
raise NotImplemented()
def _rebuild_xml(self, document):
"""
rebuilds the xml element so it can be stored again into the RDF file
Returns:
the xml_element
"""
raise NotImplemented()
class DefaultMetaDataObject(MetaDataObject):
"""
default class for meta data in a COMBINE archive
just plain representation of the XML element
"""
def __init__(self, xml_element):
super(DefaultMetaDataObject, self).__init__(xml_element)
def _try_parse(self):
return self
def _rebuild_xml(self, document):
return self._xml_element
class OmexMetaDataObject(MetaDataObject):
"""
Object representing the meta data described in the original
COMBINE Archive specification
"""
def __init__(self, created=None, creator=None, modified=None, description=None, xml_element=None):
self.created = datetime.now() if created is None else created
self.description = description
if isinstance(creator, (list, tuple)):
self.creator = creator
elif creator is not None and isinstance(creator, VCard):
self.creator = [creator]
else:
self.creator = []
if isinstance(modified, (list, tuple)):
self.modified = modified
elif creator is not None and isinstance(modified, datetime):
self.modified = [modified]
else:
self.modified = []
super(OmexMetaDataObject, self).__init__(xml_element=xml_element)
def _try_parse(self):
try:
# getting the dcterms description
desc_elem = self._xml_element.find(Namespace.dc_terms.description, combinearchive._XML_NS)
if desc_elem is not None:
self.description = desc_elem.text
# parsing the date of creation
created_elem = self._xml_element.find(Namespace.dc_terms.created, combinearchive._XML_NS)
if created_elem is not None:
w3cdtf = created_elem.find(Namespace.dc_terms.w3cdtf, combinearchive._XML_NS)
self.created = self._parse_date(w3cdtf.text)
# parsing the creator VCard
creator_elems = self._xml_element.findall(Namespace.dc_terms.creator, combinearchive._XML_NS)
for creator in creator_elems:
self.creator.append(VCard.parse_xml(creator))
# parsing all modification dates with nested W3CDFT date declaration
modified_elems = self._xml_element.findall(Namespace.dc_terms.modified, combinearchive._XML_NS)
for mod in modified_elems:
w3cdtf = mod.find(Namespace.dc_terms.w3cdtf, combinearchive._XML_NS)
self.modified.append(self._parse_date(w3cdtf.text))
except BaseException as e:
raise ValueError('an error occurred, while parsing omex meta data {}'.format(e.message))
else:
return self
def _rebuild_xml(self):
# TODO
# builds top-level rdf:Description element
elem = self._build_desc_elem()
# add description
if self.description and self.description != '':
desc_elem = ElementTree.SubElement(elem, utils.extend_tag_name(Namespace.dc_terms.description, combinearchive._XML_NS))
desc_elem.text = self.description
# add date of creation
if self.created:
created_elem = ElementTree.SubElement(elem, utils.extend_tag_name(Namespace.dc_terms.created, combinearchive._XML_NS))
w3cdtf = ElementTree.SubElement(created_elem, utils.extend_tag_name(Namespace.dc_terms.w3cdtf, combinearchive._XML_NS))
w3cdtf.text = self.created.strftime(Namespace.dc_terms.w3cdtf_dateformat)
# add all modification dates
for mod_date in self.modified:
modified_elem = ElementTree.SubElement(elem, utils.extend_tag_name(Namespace.dc_terms.modified, combinearchive._XML_NS))
w3cdtf = ElementTree.SubElement(modified_elem, utils.extend_tag_name(Namespace.dc_terms.w3cdtf, combinearchive._XML_NS))
w3cdtf.text = mod_date.strftime(Namespace.dc_terms.w3cdtf_dateformat)
# add all VCards
for vcard in self.creator:
creator_elem = vcard.build_xml()
elem.append(creator_elem)
self._xml_element = elem
return self._xml_element
def _parse_date(self, str_datetime):
"""
parses the W3CDTF time format
Returns:
datetime object
Raises ValueError:
in case the date cannot be parsed
"""
return datetime.strptime(str_datetime, Namespace.dc_terms.w3cdtf_dateformat)
class VCard(object):
def __init__(self, family_name=None, given_name=None, email=None, organization=None):
self.family_name = family_name
self.given_name = given_name
self.email = email
self.organization = organization
@staticmethod
def parse_xml(xml_element):
# generate new VCard object
vcard = VCard()
name_elem = xml_element.find(Namespace.vcard_terms.has_name, combinearchive._XML_NS)
if name_elem is not None:
# parse family name
fn_elem = name_elem.find(Namespace.vcard_terms.family_name, combinearchive._XML_NS)
if fn_elem is not None:
vcard.family_name = fn_elem.text
# parse given name
gn_elem = name_elem.find(Namespace.vcard_terms.given_name, combinearchive._XML_NS)
if gn_elem is not None:
vcard.given_name = gn_elem.text
# parse email
em_elem = xml_element.find(Namespace.vcard_terms.email, combinearchive._XML_NS)
if em_elem is not None:
vcard.email = em_elem.text
# parse organization name
on_elem = xml_element.find(Namespace.vcard_terms.organization_name, combinearchive._XML_NS)
if on_elem is not None:
vcard.organization = on_elem.text
# return parsed object
return vcard
def build_xml(self):
# generate new xml element
# (vcards are always housed in a dcterms:creator elem)
elem = ElementTree.Element(utils.extend_tag_name(Namespace.dc_terms.creator, combinearchive._XML_NS))
elem.attrib[utils.extend_tag_name(Namespace.rdf_terms.parse_type, combinearchive._XML_NS)] = 'Resource'
# name tag
if (self.family_name and self.family_name != '') or (self.given_name and self.given_name != ''):
hasname_elem = ElementTree.SubElement(elem, utils.extend_tag_name(Namespace.vcard_terms.has_name, combinearchive._XML_NS))
hasname_elem.attrib[utils.extend_tag_name(Namespace.rdf_terms.parse_type, combinearchive._XML_NS)] = 'Resource'
# add family name
if self.family_name and self.family_name != '':
fn_elem = ElementTree.SubElement(hasname_elem, utils.extend_tag_name(Namespace.vcard_terms.family_name, combinearchive._XML_NS))
fn_elem.text = self.family_name
# add given name
if self.given_name and self.given_name != '':
gn_elem = ElementTree.SubElement(hasname_elem, utils.extend_tag_name(Namespace.vcard_terms.given_name, combinearchive._XML_NS))
gn_elem.text = self.given_name
# add email
if self.email and self.email != '':
em_elem = ElementTree.SubElement(elem, utils.extend_tag_name(Namespace.vcard_terms.email, combinearchive._XML_NS))
em_elem.text = self.email
# add organization
if self.organization and self.organization != '':
on_elem = ElementTree.SubElement(elem, utils.extend_tag_name(Namespace.vcard_terms.organization_name, combinearchive._XML_NS))
on_elem.text = self.organization
return elem
|
{
"content_hash": "287aa8e31535a32c99179d634cd595ad",
"timestamp": "",
"source": "github",
"line_count": 337,
"max_line_length": 144,
"avg_line_length": 36.97329376854599,
"alnum_prop": 0.6202247191011236,
"repo_name": "FreakyBytes/pyCombineArchive",
"id": "a533fd44833262a63bbbbced30830f5c9074df26",
"size": "12460",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "combinearchive/metadata.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Python",
"bytes": "181100"
}
],
"symlink_target": ""
}
|
"""
UCF101 action recognition process functions.
"""
from __future__ import print_function, division
import os
import numpy as np
import progressbar
from dbcollection.datasets import BaseTask
from dbcollection.utils.file_load import load_txt
from dbcollection.utils.string_ascii import convert_str_to_ascii as str2ascii
from dbcollection.utils.pad import pad_list
from dbcollection.utils.hdf5 import hdf5_write_data
from .extract_frames import extract_video_frames
class Recognition(BaseTask):
"""UCF101 action recognition preprocessing functions."""
# metadata filename
filename_h5 = 'recognition'
def load_classes(self):
"""
Load action classes from the annotation files.
"""
filename = os.path.join(self.data_path, 'ucfTrainTestlist', 'classInd.txt')
data = load_txt(filename)
return [row_str.split(' ')[1] for row_str in data if any(row_str)]
def load_file(self, fname):
"""
Load data from a .txt file.
"""
filename = os.path.join(self.data_path, 'ucfTrainTestlist', fname)
data = load_txt(filename)
return [row_str.split(' ')[0] for row_str in data if any(row_str)]
def convert_to_dict(self, data):
"""
Parse the data list into a table. (keys=classes, values=videos)
"""
out_dict = {}
for content in data:
str_split = content.split('/')
class_name = str_split[0]
video_name = os.path.splitext(str_split[1])[0]
try:
out_dict[class_name].append(video_name)
except KeyError:
out_dict[class_name] = [video_name]
# order video names per class
for class_name in out_dict:
out_dict[class_name].sort()
return out_dict
def load_train_test_splits(self):
"""
Load train+test index lists from the annotation files.
"""
splits = [
['trainlist01.txt', 'train01'],
['trainlist02.txt', 'train02'],
['trainlist03.txt', 'train03'],
['testlist01.txt', 'test01'],
['testlist02.txt', 'test02'],
['testlist03.txt', 'test03']
]
splits_idx = {}
for names in splits:
data = self.load_file(names[0])
splits_idx[names[1]] = self.convert_to_dict(data)
return splits_idx
def get_set_data(self, set_split, class_list):
"""
Retrieve the specific data for the set
"""
# cycle all sets
out = {}
iset = 0
for set_name in set_split:
if self.verbose:
iset += 1
print('\n > Split ({}/{}): {}'.format(iset, len(set_split.keys()), set_name))
# initialize lists
object_ids = []
videos = []
video_filenames = []
image_filenames = []
total_frames = []
list_videos_per_class = {}
list_image_filenames_per_video = []
source_data = {} # stores the folder tree of the classes + videos + image files
if self.verbose:
total_vids = sum([len(set_split[set_name][category])
for category in set_split[set_name]])
progbar = progressbar.ProgressBar(maxval=total_vids).start()
i = 0
# fill the lists
count_video, count_imgs = 0, 0
for class_id, category in enumerate(class_list):
source_data[category] = {}
# class_id = class_list.index(category)
for _, video_name in enumerate(set_split[set_name][category]):
videos.append(video_name) # add video name
video_dir = os.path.join(self.root_dir_imgs, category, video_name)
video_filenames.append(os.path.join('UCF-101', category, video_name + '.avi'))
# fetch all files in the dir
images_fnames = os.listdir(video_dir)
# remove any file that does not have .jpg ext
images_fnames = [fname for fname in images_fnames if fname.endswith('.jpg')]
# add category + video_name to the file paths
images_fnames = [os.path.join(self.data_path, self.images_dir,
category, video_name, fname)
for fname in images_fnames]
images_fnames.sort() # sort images
image_filenames = image_filenames + images_fnames # add images filenames
count_imgs += len(images_fnames)
total_frames.append(count_imgs)
# add image filenames to source
source_data[category][video_name] = {
"images": str2ascii(images_fnames),
"video": str2ascii(video_filenames[-1])
}
# add to list of images per video
list_range = list(range(count_imgs - len(images_fnames), count_imgs))
list_image_filenames_per_video.append(list_range)
# add to list of videos per class
try:
list_videos_per_class[class_id].append(count_video)
except KeyError:
list_videos_per_class[class_id] = [count_video]
# add data to 'object_ids'
# [video, video_filename, list_images_per_video,
# class (activity), total_imgs]
object_ids.append(
[count_video, count_video, count_video, class_id, count_video])
# update video counter
count_video += 1
# update progress bar
if self.verbose:
i += 1
progbar.update(i)
# set progressbar to 100%
progbar.finish()
out[set_name] = {
"object_fields": str2ascii(['videos', 'video_filenames',
'list_image_filenames_per_video',
'activities', 'total_frames']),
"object_ids": np.array(object_ids, dtype=np.int32),
"videos": str2ascii(videos),
"video_filenames": str2ascii(video_filenames),
"activities": str2ascii(class_list),
"image_filenames": str2ascii(image_filenames),
"total_frames": np.array(total_frames, dtype=np.int32),
"list_videos_per_activity": np.array(pad_list(list(list_videos_per_class.values()),
-1), dtype=np.int32),
"list_image_filenames_per_video": np.array(pad_list(list_image_filenames_per_video,
-1), dtype=np.int32),
"source_data": source_data
}
return out
def load_data(self):
"""
Load the data from the files.
"""
self.images_dir = 'UCF-101-images'
self.root_dir_imgs = os.path.join(self.data_path, self.images_dir)
# extract images from videos into a new folder
if not os.path.exists(self.root_dir_imgs):
extract_video_frames(self.data_path, self.verbose)
# load classes
class_list = self.load_classes()
# load train+test set splits
set_splits_vids = self.load_train_test_splits()
# fetch folder struct
if self.verbose:
print('==> Processing train/set data splits:')
set_splits_data = self.get_set_data(set_splits_vids, class_list)
yield set_splits_data
def process_set_metadata(self, data, set_name):
"""
Saves the metadata of a set.
"""
hdf5_handler = self.hdf5_manager.get_group(set_name)
hdf5_write_data(hdf5_handler, 'activities',
data["activities"],
dtype=np.uint8, fillvalue=0)
hdf5_write_data(hdf5_handler, 'videos',
data["videos"],
dtype=np.uint8, fillvalue=0)
hdf5_write_data(hdf5_handler, 'video_filenames',
data["video_filenames"],
dtype=np.uint8, fillvalue=0)
hdf5_write_data(hdf5_handler, 'image_filenames',
data["image_filenames"],
dtype=np.uint8, fillvalue=0)
hdf5_write_data(hdf5_handler, 'total_frames',
data["total_frames"],
dtype=np.int32, fillvalue=-1)
hdf5_write_data(hdf5_handler, 'object_ids',
data["object_ids"],
dtype=np.int32, fillvalue=-1)
hdf5_write_data(hdf5_handler, 'object_fields',
data["object_fields"],
dtype=np.uint8, fillvalue=0)
hdf5_write_data(hdf5_handler, 'list_videos_per_activity',
data["list_videos_per_activity"],
dtype=np.int32, fillvalue=-1)
hdf5_write_data(hdf5_handler, 'list_image_filenames_per_video',
data["list_image_filenames_per_video"],
dtype=np.int32, fillvalue=-1)
|
{
"content_hash": "4ca918795a54e5ba4196a1d8dad01044",
"timestamp": "",
"source": "github",
"line_count": 243,
"max_line_length": 99,
"avg_line_length": 39.36625514403292,
"alnum_prop": 0.5137988710014635,
"repo_name": "dbcollection/dbcollection",
"id": "e45e13b59fa93d5406bc3372df48e1960fb30abc",
"size": "9566",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "dbcollection/datasets/ucf/ucf_101/recognition.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Batchfile",
"bytes": "1842"
},
{
"name": "Jupyter Notebook",
"bytes": "21468"
},
{
"name": "Makefile",
"bytes": "1692"
},
{
"name": "PowerShell",
"bytes": "2970"
},
{
"name": "Python",
"bytes": "1620582"
}
],
"symlink_target": ""
}
|
from __future__ import annotations
import unittest
from unittest import mock
from airflow.providers.amazon.aws.operators.s3 import S3ListPrefixesOperator
TASK_ID = 'test-s3-list-prefixes-operator'
BUCKET = 'test-bucket'
DELIMITER = '/'
PREFIX = 'test/'
MOCK_SUBFOLDERS = ['test/']
class TestS3ListOperator(unittest.TestCase):
@mock.patch('airflow.providers.amazon.aws.operators.s3.S3Hook')
def test_execute(self, mock_hook):
mock_hook.return_value.list_prefixes.return_value = MOCK_SUBFOLDERS
operator = S3ListPrefixesOperator(task_id=TASK_ID, bucket=BUCKET, prefix=PREFIX, delimiter=DELIMITER)
subfolders = operator.execute(None)
mock_hook.return_value.list_prefixes.assert_called_once_with(
bucket_name=BUCKET, prefix=PREFIX, delimiter=DELIMITER
)
assert subfolders == MOCK_SUBFOLDERS
|
{
"content_hash": "d00f8e6e7be6184668a018903c124e8a",
"timestamp": "",
"source": "github",
"line_count": 28,
"max_line_length": 109,
"avg_line_length": 30.857142857142858,
"alnum_prop": 0.7268518518518519,
"repo_name": "cfei18/incubator-airflow",
"id": "967dd692069c07d6ca238d76e3fc8b56bd0c93ea",
"size": "1651",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "tests/providers/amazon/aws/operators/test_s3_list_prefixes.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "25980"
},
{
"name": "Dockerfile",
"bytes": "72003"
},
{
"name": "HCL",
"bytes": "3786"
},
{
"name": "HTML",
"bytes": "173434"
},
{
"name": "JavaScript",
"bytes": "143068"
},
{
"name": "Jinja",
"bytes": "38808"
},
{
"name": "Jupyter Notebook",
"bytes": "5482"
},
{
"name": "Mako",
"bytes": "1339"
},
{
"name": "Python",
"bytes": "22660683"
},
{
"name": "R",
"bytes": "313"
},
{
"name": "Shell",
"bytes": "312715"
},
{
"name": "TypeScript",
"bytes": "472379"
}
],
"symlink_target": ""
}
|
'''OpenGL extension EXT.read_format_bgra
This module customises the behaviour of the
OpenGL.raw.GLES1.EXT.read_format_bgra to provide a more
Python-friendly API
The official definition of this extension is available here:
http://www.opengl.org/registry/specs/EXT/read_format_bgra.txt
'''
from OpenGL import platform, constant, arrays
from OpenGL import extensions, wrapper
import ctypes
from OpenGL.raw.GLES1 import _types, _glgets
from OpenGL.raw.GLES1.EXT.read_format_bgra import *
from OpenGL.raw.GLES1.EXT.read_format_bgra import _EXTENSION_NAME
def glInitReadFormatBgraEXT():
'''Return boolean indicating whether this extension is available'''
from OpenGL import extensions
return extensions.hasGLExtension( _EXTENSION_NAME )
### END AUTOGENERATED SECTION
|
{
"content_hash": "192e725174e544fe4bd4c08a79e0cb9f",
"timestamp": "",
"source": "github",
"line_count": 23,
"max_line_length": 71,
"avg_line_length": 33.869565217391305,
"alnum_prop": 0.7907573812580231,
"repo_name": "alexus37/AugmentedRealityChess",
"id": "4a70e5d098b5a68897a8612b3b89d853b30b74ca",
"size": "779",
"binary": false,
"copies": "6",
"ref": "refs/heads/master",
"path": "pythonAnimations/pyOpenGLChess/engineDirectory/oglc-env/lib/python2.7/site-packages/OpenGL/GLES1/EXT/read_format_bgra.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "C",
"bytes": "158062"
},
{
"name": "C++",
"bytes": "267993"
},
{
"name": "CMake",
"bytes": "11319"
},
{
"name": "Fortran",
"bytes": "3707"
},
{
"name": "Makefile",
"bytes": "14618"
},
{
"name": "Python",
"bytes": "12813086"
},
{
"name": "Roff",
"bytes": "3310"
},
{
"name": "Shell",
"bytes": "3855"
}
],
"symlink_target": ""
}
|
"""
.. function:: serialize(obj)
Serialize the object to JSON.
.. function:: deserialize(obj)
Deserialize JSON-encoded object to a Python object.
"""
# Try to import a module that provides json parsing and emitting, starting
# with the fastest alternative and falling back to the slower ones.
try:
# cjson is the fastest
import cjson
serialize = cjson.encode
deserialize = cjson.decode
except ImportError:
try:
# Then try to find simplejson. Later versions has C speedups which
# makes it pretty fast.
import simplejson
serialize = simplejson.dumps
deserialize = simplejson.loads
except ImportError:
try:
# Then try to find the python 2.6 stdlib json module.
import json
serialize = json.dumps
deserialize = json.loads
except ImportError:
# If all of the above fails, fallback to the simplejson
# embedded in Django.
from django.utils import simplejson
serialize = simplejson.dumps
deserialize = simplejson.loads
|
{
"content_hash": "3f314df2328b738ff92c1e9c9241b10e",
"timestamp": "",
"source": "github",
"line_count": 38,
"max_line_length": 74,
"avg_line_length": 29.342105263157894,
"alnum_prop": 0.6430493273542601,
"repo_name": "runeh/carrot",
"id": "0bdb314e1d15778b71b85d169bbbbea922ebf44a",
"size": "1115",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "carrot/serialization.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Python",
"bytes": "50435"
}
],
"symlink_target": ""
}
|
"""test_utils"""
import os, sys, time, unittest
from nose.tools import eq_, assert_raises
from routes.util import controller_scan, GenerationException
from routes import *
class TestUtils(unittest.TestCase):
def setUp(self):
m = Mapper(explicit=False)
m.minimization = True
m.connect('archive/:year/:month/:day', controller='blog', action='view', month=None, day=None,
requirements={'month':'\d{1,2}','day':'\d{1,2}'})
m.connect('viewpost/:id', controller='post', action='view')
m.connect(':controller/:action/:id')
con = request_config()
con.mapper = m
con.host = 'www.test.com'
con.protocol = 'http'
if hasattr(con, 'environ'):
del con.environ
self.con = con
def test_url_for_with_nongen(self):
con = self.con
con.mapper_dict = {}
eq_('/blog', url_for('/blog'))
eq_('/blog?q=fred&q=here%20now', url_for('/blog', q=['fred', u'here now']))
eq_('/blog#here', url_for('/blog', anchor='here'))
def test_url_for_with_nongen_no_encoding(self):
con = self.con
con.mapper_dict = {}
con.mapper.encoding = None
eq_('/blog', url_for('/blog'))
eq_('/blog#here', url_for('/blog', anchor='here'))
def test_url_for_with_unicode(self):
con = self.con
con.mapper_dict = {}
eq_('/blog', url_for(controller='blog'))
eq_('/blog/view/umulat', url_for(controller='blog', action='view', id=u'umulat'))
eq_('/blog/view/umulat?other=%CE%B1%CF%83%CE%B4%CE%B3',
url_for(controller='blog', action='view', id=u'umulat', other=u'\u03b1\u03c3\u03b4\u03b3'))
url = URLGenerator(con.mapper, {})
for urlobj in [url_for, url]:
def raise_url():
return urlobj(u'/some/st\xc3rng')
assert_raises(Exception, raise_url)
def test_url_for(self):
con = self.con
con.mapper_dict = {}
url = URLGenerator(con.mapper, {'HTTP_HOST':'www.test.com:80'})
for urlobj in [url_for, url]:
eq_('/blog', urlobj(controller='blog'))
eq_('/content', urlobj())
eq_('https://www.test.com/viewpost', urlobj(controller='post', action='view', protocol='https'))
eq_('http://www.test.org/content', urlobj(host='www.test.org'))
eq_('//www.test.com/viewpost', urlobj(controller='post', action='view', protocol=''))
eq_('//www.test.org/content', urlobj(host='www.test.org', protocol=''))
def test_url_raises(self):
con = self.con
con.mapper.explicit = True
con.mapper_dict = {}
url = URLGenerator(con.mapper, {})
assert_raises(GenerationException, url_for, action='juice')
assert_raises(GenerationException, url, action='juice')
def test_url_for_with_defaults(self):
con = self.con
con.mapper_dict = {'controller':'blog','action':'view','id':4}
url = URLGenerator(con.mapper, {'wsgiorg.routing_args':((), con.mapper_dict)})
eq_('/blog/view/4', url_for())
eq_('/post/index/4', url_for(controller='post'))
eq_('/blog/view/2', url_for(id=2))
eq_('/viewpost/4', url_for(controller='post', action='view', id=4))
eq_('/blog/view/4', url.current())
eq_('/post/index/4', url.current(controller='post'))
eq_('/blog/view/2', url.current(id=2))
eq_('/viewpost/4', url.current(controller='post', action='view', id=4))
con.mapper_dict = {'controller':'blog','action':'view','year':2004}
url = URLGenerator(con.mapper, {'wsgiorg.routing_args':((), con.mapper_dict)})
eq_('/archive/2004/10', url_for(month=10))
eq_('/archive/2004/9/2', url_for(month=9, day=2))
eq_('/blog', url_for(controller='blog', year=None))
eq_('/archive/2004/10', url.current(month=10))
eq_('/archive/2004/9/2', url.current(month=9, day=2))
eq_('/blog', url.current(controller='blog', year=None))
def test_url_for_with_more_defaults(self):
con = self.con
con.mapper_dict = {'controller':'blog','action':'view','id':4}
url = URLGenerator(con.mapper, {'wsgiorg.routing_args':((), con.mapper_dict)})
eq_('/blog/view/4', url_for())
eq_('/post/index/4', url_for(controller='post'))
eq_('/blog/view/2', url_for(id=2))
eq_('/viewpost/4', url_for(controller='post', action='view', id=4))
eq_('/blog/view/4', url.current())
eq_('/post/index/4', url.current(controller='post'))
eq_('/blog/view/2', url.current(id=2))
eq_('/viewpost/4', url.current(controller='post', action='view', id=4))
con.mapper_dict = {'controller':'blog','action':'view','year':2004}
url = URLGenerator(con.mapper, {'wsgiorg.routing_args':((), con.mapper_dict)})
eq_('/archive/2004/10', url_for(month=10))
eq_('/archive/2004/9/2', url_for(month=9, day=2))
eq_('/blog', url_for(controller='blog', year=None))
eq_('/archive/2004', url_for())
eq_('/archive/2004/10', url.current(month=10))
eq_('/archive/2004/9/2', url.current(month=9, day=2))
eq_('/blog', url.current(controller='blog', year=None))
eq_('/archive/2004', url.current())
def test_url_for_with_defaults_and_qualified(self):
m = self.con.mapper
m.connect('home', '', controller='blog', action='splash')
m.connect('category_home', 'category/:section', controller='blog', action='view', section='home')
m.connect(':controller/:action/:id')
m.create_regs(['content','blog','admin/comments'])
self.con.environ = dict(SCRIPT_NAME='', HTTP_HOST='www.example.com', PATH_INFO='/blog/view/4')
self.con.environ.update({'wsgiorg.routing_args':((), self.con.mapper_dict)})
url = URLGenerator(m, self.con.environ)
eq_('/blog/view/4', url_for())
eq_('/post/index/4', url_for(controller='post'))
eq_('http://www.example.com/blog/view/4', url_for(qualified=True))
eq_('/blog/view/2', url_for(id=2))
eq_('/viewpost/4', url_for(controller='post', action='view', id=4))
eq_('/blog/view/4', url.current())
eq_('/post/index/4', url.current(controller='post'))
eq_('http://www.example.com/blog/view/4', url.current(qualified=True))
eq_('/blog/view/2', url.current(id=2))
eq_('/viewpost/4', url.current(controller='post', action='view', id=4))
env = dict(SCRIPT_NAME='', SERVER_NAME='www.example.com', SERVER_PORT='8080', PATH_INFO='/blog/view/4')
env['wsgi.url_scheme'] = 'http'
self.con.environ = env
self.con.environ.update({'wsgiorg.routing_args':((), self.con.mapper_dict)})
url = URLGenerator(m, self.con.environ)
eq_('/post/index/4', url_for(controller='post'))
eq_('http://www.example.com:8080/blog/view/4', url_for(qualified=True))
eq_('/post/index/4', url.current(controller='post'))
eq_('http://www.example.com:8080/blog/view/4', url.current(qualified=True))
def test_route_overflow(self):
m = self.con.mapper
m.create_regs(["x"*50000])
m.connect('route-overflow', "x"*50000)
url = URLGenerator(m, {})
eq_("/%s" % ("x"*50000), url('route-overflow'))
def test_with_route_names(self):
m = self.con.mapper
self.con.mapper_dict = {}
m.connect('home', '', controller='blog', action='splash')
m.connect('category_home', 'category/:section', controller='blog', action='view', section='home')
m.create_regs(['content','blog','admin/comments'])
url = URLGenerator(m, {})
for urlobj in [url, url_for]:
eq_('/content/view', urlobj(controller='content', action='view'))
eq_('/content', urlobj(controller='content'))
eq_('/admin/comments', urlobj(controller='admin/comments'))
eq_('/category', urlobj('category_home'))
eq_('/category/food', urlobj('category_home', section='food'))
eq_('/', urlobj('home'))
def test_with_route_names_and_defaults(self):
m = self.con.mapper
self.con.mapper_dict = {}
m.connect('home', '', controller='blog', action='splash')
m.connect('category_home', 'category/:section', controller='blog', action='view', section='home')
m.connect('building', 'building/:campus/:building/alljacks', controller='building', action='showjacks')
m.create_regs(['content','blog','admin/comments','building'])
self.con.mapper_dict = dict(controller='building', action='showjacks', campus='wilma', building='port')
url = URLGenerator(m, {'wsgiorg.routing_args':((), self.con.mapper_dict)})
eq_('/building/wilma/port/alljacks', url_for())
eq_('/', url_for('home'))
eq_('/building/wilma/port/alljacks', url.current())
eq_('/', url.current('home'))
def test_with_route_names_and_hardcode(self):
m = self.con.mapper
self.con.mapper_dict = {}
m.hardcode_names = False
m.connect('home', '', controller='blog', action='splash')
m.connect('category_home', 'category/:section', controller='blog', action='view', section='home')
m.connect('building', 'building/:campus/:building/alljacks', controller='building', action='showjacks')
m.connect('gallery_thumb', 'gallery/:(img_id)_thumbnail.jpg')
m.connect('gallery', 'gallery/:(img_id).jpg')
m.create_regs(['content','blog','admin/comments','building'])
self.con.mapper_dict = dict(controller='building', action='showjacks', campus='wilma', building='port')
url = URLGenerator(m, {'wsgiorg.routing_args':((), self.con.mapper_dict)})
eq_('/building/wilma/port/alljacks', url_for())
eq_('/', url_for('home'))
eq_('/gallery/home_thumbnail.jpg', url_for('gallery_thumb', img_id='home'))
eq_('/gallery/home_thumbnail.jpg', url_for('gallery', img_id='home'))
eq_('/building/wilma/port/alljacks', url.current())
eq_('/', url.current('home'))
eq_('/gallery/home_thumbnail.jpg', url.current('gallery_thumb', img_id='home'))
eq_('/gallery/home_thumbnail.jpg', url.current('gallery', img_id='home'))
m.hardcode_names = True
eq_('/gallery/home_thumbnail.jpg', url_for('gallery_thumb', img_id='home'))
eq_('/gallery/home.jpg', url_for('gallery', img_id='home'))
eq_('/gallery/home_thumbnail.jpg', url.current('gallery_thumb', img_id='home'))
eq_('/gallery/home.jpg', url.current('gallery', img_id='home'))
m.hardcode_names = False
def test_redirect_to(self):
m = self.con.mapper
self.con.mapper_dict = {}
self.con.environ = dict(SCRIPT_NAME='', HTTP_HOST='www.example.com')
result = None
def printer(echo):
redirect_to.result = echo
self.con.redirect = printer
m.create_regs(['content','blog','admin/comments'])
redirect_to(controller='content', action='view')
eq_('/content/view', redirect_to.result)
redirect_to(controller='content', action='lookup', id=4)
eq_('/content/lookup/4', redirect_to.result)
redirect_to(controller='admin/comments',action='splash')
eq_('/admin/comments/splash', redirect_to.result)
redirect_to('http://www.example.com/')
eq_('http://www.example.com/', redirect_to.result)
redirect_to('/somewhere.html', var='keyword')
eq_('/somewhere.html?var=keyword', redirect_to.result)
def test_redirect_to_with_route_names(self):
m = self.con.mapper
self.con.mapper_dict = {}
result = None
def printer(echo):
redirect_to.result = echo
self.con.redirect = printer
m.connect('home', '', controller='blog', action='splash')
m.connect('category_home', 'category/:section', controller='blog', action='view', section='home')
m.create_regs(['content','blog','admin/comments'])
redirect_to(controller='content', action='view')
eq_('/content/view', redirect_to.result)
redirect_to(controller='content')
eq_('/content', redirect_to.result)
redirect_to(controller='admin/comments')
eq_('/admin/comments', redirect_to.result)
redirect_to('category_home')
eq_('/category', redirect_to.result)
redirect_to('category_home', section='food')
eq_('/category/food', redirect_to.result)
redirect_to('home')
eq_('/', redirect_to.result)
def test_static_route(self):
m = self.con.mapper
self.con.mapper_dict = {}
self.con.environ = dict(SCRIPT_NAME='', HTTP_HOST='example.com')
m.connect(':controller/:action/:id')
m.connect('home', 'http://www.groovie.org/', _static=True)
m.connect('space', '/nasa/images', _static=True)
m.create_regs(['content', 'blog'])
url = URLGenerator(m, {})
for urlobj in [url_for, url]:
eq_('http://www.groovie.org/', urlobj('home'))
eq_('http://www.groovie.org/?s=stars', urlobj('home', s='stars'))
eq_('/content/view', urlobj(controller='content', action='view'))
eq_('/nasa/images?search=all', urlobj('space', search='all'))
def test_static_route_with_script(self):
m = self.con.mapper
self.con.mapper_dict = {}
self.con.environ = dict(SCRIPT_NAME='/webapp', HTTP_HOST='example.com')
m.connect(':controller/:action/:id')
m.connect('home', 'http://www.groovie.org/', _static=True)
m.connect('space', '/nasa/images', _static=True)
m.connect('login', '/login', action='nowhereville')
m.create_regs(['content', 'blog'])
self.con.environ.update({'wsgiorg.routing_args':((), {})})
url = URLGenerator(m, self.con.environ)
for urlobj in [url_for, url]:
eq_('http://www.groovie.org/', urlobj('home'))
eq_('http://www.groovie.org/?s=stars', urlobj('home', s='stars'))
eq_('/webapp/content/view', urlobj(controller='content', action='view'))
eq_('/webapp/nasa/images?search=all', urlobj('space', search='all'))
eq_('http://example.com/webapp/nasa/images', urlobj('space', protocol='http'))
eq_('http://example.com/webapp/login', urlobj('login', qualified=True))
def test_static_route_with_vars(self):
m = self.con.mapper
self.con.mapper_dict = {}
self.con.environ = dict(SCRIPT_NAME='/webapp', HTTP_HOST='example.com')
m.connect('home', 'http://{domain}.groovie.org/{location}', _static=True)
m.connect('space', '/nasa/{location}', _static=True)
m.create_regs(['home', 'space'])
self.con.environ.update({'wsgiorg.routing_args':((), {})})
url = URLGenerator(m, self.con.environ)
for urlobj in [url_for, url]:
assert_raises(GenerationException, urlobj, 'home')
assert_raises(GenerationException, urlobj, 'home', domain='fred')
assert_raises(GenerationException, urlobj, 'home', location='index')
eq_('http://fred.groovie.org/index', urlobj('home', domain='fred', location='index'))
eq_('http://fred.groovie.org/index?search=all', urlobj('home', domain='fred', location='index', search='all'))
eq_('/webapp/nasa/images?search=all', urlobj('space', location='images', search='all'))
eq_('http://example.com/webapp/nasa/images', urlobj('space', location='images', protocol='http'))
def test_static_route_with_vars_and_defaults(self):
m = self.con.mapper
self.con.mapper_dict = {}
self.con.environ = dict(SCRIPT_NAME='/webapp', HTTP_HOST='example.com')
m.connect('home', 'http://{domain}.groovie.org/{location}', domain='routes', _static=True)
m.connect('space', '/nasa/{location}', location='images', _static=True)
m.create_regs(['home', 'space'])
self.con.environ.update({'wsgiorg.routing_args':((), {})})
url = URLGenerator(m, self.con.environ)
assert_raises(GenerationException, url_for, 'home')
assert_raises(GenerationException, url_for, 'home', domain='fred')
eq_('http://routes.groovie.org/index', url_for('home', location='index'))
eq_('http://fred.groovie.org/index', url_for('home', domain='fred', location='index'))
eq_('http://routes.groovie.org/index?search=all', url_for('home', location='index', search='all'))
eq_('http://fred.groovie.org/index?search=all', url_for('home', domain='fred', location='index', search='all'))
eq_('/webapp/nasa/articles?search=all', url_for('space', location='articles', search='all'))
eq_('http://example.com/webapp/nasa/articles', url_for('space', location='articles', protocol='http'))
eq_('/webapp/nasa/images?search=all', url_for('space', search='all'))
eq_('http://example.com/webapp/nasa/images', url_for('space', protocol='http'))
assert_raises(GenerationException, url.current, 'home')
assert_raises(GenerationException, url.current, 'home', domain='fred')
eq_('http://routes.groovie.org/index', url.current('home', location='index'))
eq_('http://fred.groovie.org/index', url.current('home', domain='fred', location='index'))
eq_('http://routes.groovie.org/index?search=all', url.current('home', location='index', search='all'))
eq_('http://fred.groovie.org/index?search=all', url.current('home', domain='fred', location='index', search='all'))
eq_('/webapp/nasa/articles?search=all', url.current('space', location='articles', search='all'))
eq_('http://example.com/webapp/nasa/articles', url.current('space', location='articles', protocol='http'))
eq_('/webapp/nasa/images?search=all', url.current('space', search='all'))
eq_('http://example.com/webapp/nasa/images', url.current('space', protocol='http'))
def test_static_route_with_vars_and_requirements(self):
m = self.con.mapper
self.con.mapper_dict = {}
self.con.environ = dict(SCRIPT_NAME='/webapp', HTTP_HOST='example.com')
m.connect('home', 'http://{domain}.groovie.org/{location}', requirements=dict(domain='fred|bob'), _static=True)
m.connect('space', '/nasa/articles/{year}/{month}', requirements=dict(year=r'\d{2,4}', month=r'\d{1,2}'), _static=True)
m.create_regs(['home', 'space'])
self.con.environ.update({'wsgiorg.routing_args':((), {})})
url = URLGenerator(m, self.con.environ)
assert_raises(GenerationException, url_for, 'home', domain='george', location='index')
assert_raises(GenerationException, url_for, 'space', year='asdf', month='1')
assert_raises(GenerationException, url_for, 'space', year='2004', month='a')
assert_raises(GenerationException, url_for, 'space', year='1', month='1')
assert_raises(GenerationException, url_for, 'space', year='20045', month='1')
assert_raises(GenerationException, url_for, 'space', year='2004', month='123')
eq_('http://fred.groovie.org/index', url_for('home', domain='fred', location='index'))
eq_('http://bob.groovie.org/index', url_for('home', domain='bob', location='index'))
eq_('http://fred.groovie.org/asdf', url_for('home', domain='fred', location='asdf'))
eq_('/webapp/nasa/articles/2004/6', url_for('space', year='2004', month='6'))
eq_('/webapp/nasa/articles/2004/12', url_for('space', year='2004', month='12'))
eq_('/webapp/nasa/articles/89/6', url_for('space', year='89', month='6'))
assert_raises(GenerationException, url.current, 'home', domain='george', location='index')
assert_raises(GenerationException, url.current, 'space', year='asdf', month='1')
assert_raises(GenerationException, url.current, 'space', year='2004', month='a')
assert_raises(GenerationException, url.current, 'space', year='1', month='1')
assert_raises(GenerationException, url.current, 'space', year='20045', month='1')
assert_raises(GenerationException, url.current, 'space', year='2004', month='123')
eq_('http://fred.groovie.org/index', url.current('home', domain='fred', location='index'))
eq_('http://bob.groovie.org/index', url.current('home', domain='bob', location='index'))
eq_('http://fred.groovie.org/asdf', url.current('home', domain='fred', location='asdf'))
eq_('/webapp/nasa/articles/2004/6', url.current('space', year='2004', month='6'))
eq_('/webapp/nasa/articles/2004/12', url.current('space', year='2004', month='12'))
eq_('/webapp/nasa/articles/89/6', url.current('space', year='89', month='6'))
def test_no_named_path(self):
m = self.con.mapper
self.con.mapper_dict = {}
self.con.environ = dict(SCRIPT_NAME='', HTTP_HOST='example.com')
m.connect(':controller/:action/:id')
m.connect('home', 'http://www.groovie.org/', _static=True)
m.connect('space', '/nasa/images', _static=True)
m.create_regs(['content', 'blog'])
url = URLGenerator(m, {})
for urlobj in [url_for, url]:
eq_('http://www.google.com/search', urlobj('http://www.google.com/search'))
eq_('http://www.google.com/search?q=routes', urlobj('http://www.google.com/search', q='routes'))
eq_('/delicious.jpg', urlobj('/delicious.jpg'))
eq_('/delicious/search?v=routes', urlobj('/delicious/search', v='routes'))
def test_append_slash(self):
m = self.con.mapper
self.con.mapper_dict = {}
m.append_slash = True
self.con.environ = dict(SCRIPT_NAME='', HTTP_HOST='example.com')
m.connect(':controller/:action/:id')
m.connect('home', 'http://www.groovie.org/', _static=True)
m.connect('space', '/nasa/images', _static=True)
m.create_regs(['content', 'blog'])
url = URLGenerator(m, {})
for urlobj in [url_for, url]:
eq_('http://www.google.com/search', urlobj('http://www.google.com/search'))
eq_('http://www.google.com/search?q=routes', urlobj('http://www.google.com/search', q='routes'))
eq_('/delicious.jpg', urlobj('/delicious.jpg'))
eq_('/delicious/search?v=routes', urlobj('/delicious/search', v='routes'))
eq_('/content/list/', urlobj(controller='/content', action='list'))
eq_('/content/list/?page=1', urlobj(controller='/content', action='list', page='1'))
def test_no_named_path_with_script(self):
m = self.con.mapper
self.con.mapper_dict = {}
self.con.environ = dict(SCRIPT_NAME='/webapp', HTTP_HOST='example.com')
m.connect(':controller/:action/:id')
m.connect('home', 'http://www.groovie.org/', _static=True)
m.connect('space', '/nasa/images', _static=True)
m.create_regs(['content', 'blog'])
url = URLGenerator(m, self.con.environ)
for urlobj in [url_for, url]:
eq_('http://www.google.com/search', urlobj('http://www.google.com/search'))
eq_('http://www.google.com/search?q=routes', urlobj('http://www.google.com/search', q='routes'))
eq_('/webapp/delicious.jpg', urlobj('/delicious.jpg'))
eq_('/webapp/delicious/search?v=routes', urlobj('/delicious/search', v='routes'))
def test_route_filter(self):
def article_filter(kargs):
article = kargs.pop('article', None)
if article is not None:
kargs.update(
dict(year=article.get('year', 2004),
month=article.get('month', 12),
day=article.get('day', 20),
slug=article.get('slug', 'default')
)
)
return kargs
self.con.mapper_dict = {}
self.con.environ = dict(SCRIPT_NAME='', HTTP_HOST='example.com')
m = Mapper(explicit=False)
m.minimization = True
m.connect(':controller/:(action)-:(id).html')
m.connect('archives', 'archives/:year/:month/:day/:slug', controller='archives', action='view',
_filter=article_filter)
m.create_regs(['content','archives','admin/comments'])
self.con.mapper = m
url = URLGenerator(m, self.con.environ)
for urlobj in [url_for, url]:
assert_raises(Exception, urlobj, controller='content', action='view')
assert_raises(Exception, urlobj, controller='content')
eq_('/content/view-3.html', urlobj(controller='content', action='view', id=3))
eq_('/content/index-2.html', urlobj(controller='content', id=2))
eq_('/archives/2005/10/5/happy',
urlobj('archives',year=2005, month=10, day=5, slug='happy'))
story = dict(year=2003, month=8, day=2, slug='woopee')
empty = {}
eq_({'controller':'archives','action':'view','year':'2005',
'month':'10','day':'5','slug':'happy'}, m.match('/archives/2005/10/5/happy'))
eq_('/archives/2003/8/2/woopee', urlobj('archives', article=story))
eq_('/archives/2004/12/20/default', urlobj('archives', article=empty))
def test_with_ssl_environ(self):
base_environ = dict(SCRIPT_NAME='', HTTPS='on', SERVER_PORT='443', PATH_INFO='/',
HTTP_HOST='example.com', SERVER_NAME='example.com')
self.con.mapper_dict = {}
self.con.environ = base_environ.copy()
m = Mapper(explicit=False)
m.minimization = True
m.connect(':controller/:action/:id')
m.create_regs(['content','archives','admin/comments'])
m.sub_domains = True
self.con.mapper = m
url = URLGenerator(m, self.con.environ)
for urlobj in [url_for, url]:
# HTTPS is on, but we're running on a different port internally
eq_(self.con.protocol, 'https')
eq_('/content/view', urlobj(controller='content', action='view'))
eq_('/content/index/2', urlobj(controller='content', id=2))
eq_('https://nowhere.com/content', urlobj(host='nowhere.com', controller='content'))
# If HTTPS is on, but the port isn't 443, we'll need to include the port info
environ = base_environ.copy()
environ.update(dict(SERVER_PORT='8080'))
self.con.environ = environ
self.con.mapper_dict = {}
eq_('/content/index/2', urlobj(controller='content', id=2))
eq_('https://nowhere.com/content', urlobj(host='nowhere.com', controller='content'))
eq_('https://nowhere.com:8080/content', urlobj(host='nowhere.com:8080', controller='content'))
eq_('http://nowhere.com/content', urlobj(host='nowhere.com', protocol='http', controller='content'))
eq_('http://home.com/content', urlobj(host='home.com', protocol='http', controller='content'))
def test_with_http_environ(self):
base_environ = dict(SCRIPT_NAME='', SERVER_PORT='1080', PATH_INFO='/',
HTTP_HOST='example.com', SERVER_NAME='example.com')
base_environ['wsgi.url_scheme'] = 'http'
self.con.environ = base_environ.copy()
self.con.mapper_dict = {}
m = Mapper(explicit=False)
m.minimization = True
m.connect(':controller/:action/:id')
m.create_regs(['content','archives','admin/comments'])
self.con.mapper = m
url = URLGenerator(m, self.con.environ)
for urlobj in [url_for, url]:
eq_(self.con.protocol, 'http')
eq_('/content/view', urlobj(controller='content', action='view'))
eq_('/content/index/2', urlobj(controller='content', id=2))
eq_('https://example.com/content', urlobj(protocol='https', controller='content'))
def test_subdomains(self):
base_environ = dict(SCRIPT_NAME='', PATH_INFO='/', HTTP_HOST='example.com', SERVER_NAME='example.com')
self.con.mapper_dict = {}
self.con.environ = base_environ.copy()
m = Mapper(explicit=False)
m.minimization = True
m.sub_domains = True
m.connect(':controller/:action/:id')
m.create_regs(['content','archives','admin/comments'])
self.con.mapper = m
url = URLGenerator(m, self.con.environ)
for urlobj in [url_for, url]:
eq_('/content/view', urlobj(controller='content', action='view'))
eq_('/content/index/2', urlobj(controller='content', id=2))
environ = base_environ.copy()
environ.update(dict(HTTP_HOST='sub.example.com'))
self.con.environ = environ
self.con.mapper_dict = {'sub_domain':'sub'}
eq_('/content/view/3', urlobj(controller='content', action='view', id=3))
eq_('http://new.example.com/content', urlobj(controller='content', sub_domain='new'))
def test_subdomains_with_exceptions(self):
base_environ = dict(SCRIPT_NAME='', PATH_INFO='/', HTTP_HOST='example.com', SERVER_NAME='example.com')
self.con.mapper_dict = {}
self.con.environ = base_environ.copy()
m = Mapper(explicit=False)
m.minimization = True
m.sub_domains = True
m.sub_domains_ignore = ['www']
m.connect(':controller/:action/:id')
m.create_regs(['content','archives','admin/comments'])
self.con.mapper = m
url = URLGenerator(m, self.con.environ)
eq_('/content/view', url_for(controller='content', action='view'))
eq_('/content/index/2', url_for(controller='content', id=2))
eq_('/content/view', url(controller='content', action='view'))
eq_('/content/index/2', url(controller='content', id=2))
environ = base_environ.copy()
environ.update(dict(HTTP_HOST='sub.example.com'))
self.con.environ = environ
self.con.mapper_dict = {'sub_domain':'sub'}
self.con.environ.update({'wsgiorg.routing_args':((), self.con.mapper_dict)})
url = URLGenerator(m, self.con.environ)
eq_('/content/view/3', url_for(controller='content', action='view', id=3))
eq_('http://new.example.com/content', url_for(controller='content', sub_domain='new'))
eq_('http://example.com/content', url_for(controller='content', sub_domain='www'))
eq_('/content/view/3', url(controller='content', action='view', id=3))
eq_('http://new.example.com/content', url(controller='content', sub_domain='new'))
eq_('http://example.com/content', url(controller='content', sub_domain='www'))
self.con.mapper_dict = {'sub_domain':'www'}
self.con.environ.update({'wsgiorg.routing_args':((), self.con.mapper_dict)})
url = URLGenerator(m, self.con.environ)
eq_('http://example.com/content/view/3', url_for(controller='content', action='view', id=3))
eq_('http://new.example.com/content', url_for(controller='content', sub_domain='new'))
eq_('/content', url_for(controller='content', sub_domain='sub'))
# This requires the sub-domain, because we don't automatically go to the existing match dict
eq_('http://example.com/content/view/3', url(controller='content', action='view', id=3, sub_domain='www'))
eq_('http://new.example.com/content', url(controller='content', sub_domain='new'))
eq_('/content', url(controller='content', sub_domain='sub'))
def test_subdomains_with_named_routes(self):
base_environ = dict(SCRIPT_NAME='', PATH_INFO='/', HTTP_HOST='example.com', SERVER_NAME='example.com')
self.con.mapper_dict = {}
self.con.environ = base_environ.copy()
m = Mapper(explicit=False)
m.minimization = True
m.sub_domains = True
m.connect(':controller/:action/:id')
m.connect('category_home', 'category/:section', controller='blog', action='view', section='home')
m.connect('building', 'building/:campus/:building/alljacks', controller='building', action='showjacks')
m.create_regs(['content','blog','admin/comments','building'])
self.con.mapper = m
url = URLGenerator(m, self.con.environ)
for urlobj in [url_for, url]:
eq_('/content/view', urlobj(controller='content', action='view'))
eq_('/content/index/2', urlobj(controller='content', id=2))
eq_('/category', urlobj('category_home'))
eq_('http://new.example.com/category', urlobj('category_home', sub_domain='new'))
environ = base_environ.copy()
environ.update(dict(HTTP_HOST='sub.example.com'))
self.con.environ = environ
self.con.mapper_dict = {'sub_domain':'sub'}
self.con.environ.update({'wsgiorg.routing_args':((), self.con.mapper_dict)})
url = URLGenerator(m, self.con.environ)
eq_('/content/view/3', url_for(controller='content', action='view', id=3))
eq_('http://joy.example.com/building/west/merlot/alljacks',
url_for('building', campus='west', building='merlot', sub_domain='joy'))
eq_('http://example.com/category/feeds', url_for('category_home', section='feeds', sub_domain=None))
eq_('/content/view/3', url(controller='content', action='view', id=3))
eq_('http://joy.example.com/building/west/merlot/alljacks',
url('building', campus='west', building='merlot', sub_domain='joy'))
eq_('http://example.com/category/feeds', url('category_home', section='feeds', sub_domain=None))
def test_subdomains_with_ports(self):
base_environ = dict(SCRIPT_NAME='', PATH_INFO='/', HTTP_HOST='example.com:8000', SERVER_NAME='example.com')
self.con.mapper_dict = {}
self.con.environ = base_environ.copy()
m = Mapper(explicit=False)
m.minimization = True
m.sub_domains = True
m.connect(':controller/:action/:id')
m.connect('category_home', 'category/:section', controller='blog', action='view', section='home')
m.connect('building', 'building/:campus/:building/alljacks', controller='building', action='showjacks')
m.create_regs(['content','blog','admin/comments','building'])
self.con.mapper = m
url = URLGenerator(m, self.con.environ)
for urlobj in [url, url_for]:
self.con.environ['HTTP_HOST'] = 'example.com:8000'
eq_('/content/view', urlobj(controller='content', action='view'))
eq_('/category', urlobj('category_home'))
eq_('http://new.example.com:8000/category', urlobj('category_home', sub_domain='new'))
eq_('http://joy.example.com:8000/building/west/merlot/alljacks',
urlobj('building', campus='west', building='merlot', sub_domain='joy'))
self.con.environ['HTTP_HOST'] = 'example.com'
del self.con.environ['routes.cached_hostinfo']
eq_('http://new.example.com/category', urlobj('category_home', sub_domain='new'))
def test_subdomains_with_default(self):
base_environ = dict(SCRIPT_NAME='', PATH_INFO='/', HTTP_HOST='example.com:8000', SERVER_NAME='example.com')
self.con.mapper_dict = {}
self.con.environ = base_environ.copy()
m = Mapper(explicit=False)
m.minimization = True
m.sub_domains = True
m.connect(':controller/:action/:id')
m.connect('category_home', 'category/:section', controller='blog', action='view', section='home',
sub_domain='cat', conditions=dict(sub_domain=['cat']))
m.connect('building', 'building/:campus/:building/alljacks', controller='building', action='showjacks')
m.create_regs(['content','blog','admin/comments','building'])
self.con.mapper = m
urlobj = URLGenerator(m, self.con.environ)
self.con.environ['HTTP_HOST'] = 'example.com:8000'
eq_('/content/view', urlobj(controller='content', action='view'))
eq_('http://cat.example.com:8000/category', urlobj('category_home'))
self.con.environ['HTTP_HOST'] = 'example.com'
del self.con.environ['routes.cached_hostinfo']
assert_raises(GenerationException, lambda: urlobj('category_home', sub_domain='new'))
def test_controller_scan(self):
here_dir = os.path.dirname(__file__)
controller_dir = os.path.join(os.path.dirname(here_dir),
os.path.join('test_files', 'controller_files'))
controllers = controller_scan(controller_dir)
eq_(len(controllers), 3)
eq_(controllers[0], 'admin/users')
eq_(controllers[1], 'content')
eq_(controllers[2], 'users')
def test_auto_controller_scan(self):
here_dir = os.path.dirname(__file__)
controller_dir = os.path.join(os.path.dirname(here_dir),
os.path.join('test_files', 'controller_files'))
m = Mapper(directory=controller_dir, explicit=False)
m.minimization = True
m.always_scan = True
m.connect(':controller/:action/:id')
eq_({'action':'index', 'controller':'content','id':None}, m.match('/content'))
eq_({'action':'index', 'controller':'users','id':None}, m.match('/users'))
eq_({'action':'index', 'controller':'admin/users','id':None}, m.match('/admin/users'))
class TestUtilsWithExplicit(unittest.TestCase):
def setUp(self):
m = Mapper(explicit=True)
m.minimization = True
m.connect('archive/:year/:month/:day', controller='blog', action='view', month=None, day=None,
requirements={'month':'\d{1,2}','day':'\d{1,2}'})
m.connect('viewpost/:id', controller='post', action='view', id=None)
m.connect(':controller/:action/:id')
con = request_config()
con.mapper = m
con.host = 'www.test.com'
con.protocol = 'http'
self.con = con
def test_url_for(self):
con = self.con
con.mapper_dict = {}
assert_raises(Exception, url_for, controller='blog')
assert_raises(Exception, url_for)
eq_('/blog/view/3', url_for(controller='blog', action='view', id=3))
eq_('https://www.test.com/viewpost', url_for(controller='post', action='view', protocol='https'))
eq_('http://www.test.org/content/view/2', url_for(host='www.test.org', controller='content', action='view', id=2))
def test_url_for_with_defaults(self):
con = self.con
con.mapper_dict = {'controller':'blog','action':'view','id':4}
assert_raises(Exception, url_for)
assert_raises(Exception, url_for, controller='post')
assert_raises(Exception, url_for, id=2)
eq_('/viewpost/4', url_for(controller='post', action='view', id=4))
con.mapper_dict = {'controller':'blog','action':'view','year':2004}
assert_raises(Exception, url_for, month=10)
assert_raises(Exception, url_for, month=9, day=2)
assert_raises(Exception, url_for, controller='blog', year=None)
def test_url_for_with_more_defaults(self):
con = self.con
con.mapper_dict = {'controller':'blog','action':'view','id':4}
assert_raises(Exception, url_for)
assert_raises(Exception, url_for, controller='post')
assert_raises(Exception, url_for, id=2)
eq_('/viewpost/4', url_for(controller='post', action='view', id=4))
con.mapper_dict = {'controller':'blog','action':'view','year':2004}
assert_raises(Exception, url_for, month=10)
assert_raises(Exception, url_for)
def test_url_for_with_defaults_and_qualified(self):
m = self.con.mapper
m.connect('home', '', controller='blog', action='splash')
m.connect('category_home', 'category/:section', controller='blog', action='view', section='home')
m.connect(':controller/:action/:id')
m.create_regs(['content','blog','admin/comments'])
env = dict(SCRIPT_NAME='', SERVER_NAME='www.example.com', SERVER_PORT='80', PATH_INFO='/blog/view/4')
env['wsgi.url_scheme'] = 'http'
self.con.environ = env
assert_raises(Exception, url_for)
assert_raises(Exception, url_for, controller='post')
assert_raises(Exception, url_for, id=2)
assert_raises(Exception, url_for, qualified=True, controller='blog', id=4)
eq_('http://www.example.com/blog/view/4', url_for(qualified=True, controller='blog', action='view', id=4))
eq_('/viewpost/4', url_for(controller='post', action='view', id=4))
env = dict(SCRIPT_NAME='', SERVER_NAME='www.example.com', SERVER_PORT='8080', PATH_INFO='/blog/view/4')
env['wsgi.url_scheme'] = 'http'
self.con.environ = env
assert_raises(Exception, url_for, controller='post')
eq_('http://www.example.com:8080/blog/view/4', url_for(qualified=True, controller='blog', action='view', id=4))
def test_with_route_names(self):
m = self.con.mapper
m.minimization = True
self.con.mapper_dict = {}
m.connect('home', '', controller='blog', action='splash')
m.connect('category_home', 'category/:section', controller='blog', action='view', section='home')
m.create_regs(['content','blog','admin/comments'])
assert_raises(Exception, url_for, controller='content', action='view')
assert_raises(Exception, url_for, controller='content')
assert_raises(Exception, url_for, controller='admin/comments')
eq_('/category', url_for('category_home'))
eq_('/category/food', url_for('category_home', section='food'))
assert_raises(Exception, url_for, 'home', controller='content')
eq_('/', url_for('home'))
def test_with_route_names_and_nomin(self):
m = self.con.mapper
m.minimization = False
self.con.mapper_dict = {}
m.connect('home', '', controller='blog', action='splash')
m.connect('category_home', 'category/:section', controller='blog', action='view', section='home')
m.create_regs(['content','blog','admin/comments'])
assert_raises(Exception, url_for, controller='content', action='view')
assert_raises(Exception, url_for, controller='content')
assert_raises(Exception, url_for, controller='admin/comments')
eq_('/category/home', url_for('category_home'))
eq_('/category/food', url_for('category_home', section='food'))
assert_raises(Exception, url_for, 'home', controller='content')
eq_('/', url_for('home'))
def test_with_route_names_and_defaults(self):
m = self.con.mapper
self.con.mapper_dict = {}
m.connect('home', '', controller='blog', action='splash')
m.connect('category_home', 'category/:section', controller='blog', action='view', section='home')
m.connect('building', 'building/:campus/:building/alljacks', controller='building', action='showjacks')
m.create_regs(['content','blog','admin/comments','building'])
self.con.mapper_dict = dict(controller='building', action='showjacks', campus='wilma', building='port')
assert_raises(Exception, url_for)
eq_('/building/wilma/port/alljacks', url_for(controller='building', action='showjacks', campus='wilma', building='port'))
eq_('/', url_for('home'))
def test_with_resource_route_names(self):
m = Mapper()
self.con.mapper = m
self.con.mapper_dict = {}
m.resource('message', 'messages', member={'mark':'GET'}, collection={'rss':'GET'})
m.create_regs(['messages'])
assert_raises(Exception, url_for, controller='content', action='view')
assert_raises(Exception, url_for, controller='content')
assert_raises(Exception, url_for, controller='admin/comments')
eq_('/messages', url_for('messages'))
eq_('/messages/rss', url_for('rss_messages'))
eq_('/messages/4', url_for('message', id=4))
eq_('/messages/4/edit', url_for('edit_message', id=4))
eq_('/messages/4/mark', url_for('mark_message', id=4))
eq_('/messages/new', url_for('new_message'))
eq_('/messages.xml', url_for('formatted_messages', format='xml'))
eq_('/messages/rss.xml', url_for('formatted_rss_messages', format='xml'))
eq_('/messages/4.xml', url_for('formatted_message', id=4, format='xml'))
eq_('/messages/4/edit.xml', url_for('formatted_edit_message', id=4, format='xml'))
eq_('/messages/4/mark.xml', url_for('formatted_mark_message', id=4, format='xml'))
eq_('/messages/new.xml', url_for('formatted_new_message', format='xml'))
def test_with_resource_route_names_and_nomin(self):
m = Mapper()
self.con.mapper = m
self.con.mapper_dict = {}
m.minimization = False
m.resource('message', 'messages', member={'mark':'GET'}, collection={'rss':'GET'})
m.create_regs(['messages'])
assert_raises(Exception, url_for, controller='content', action='view')
assert_raises(Exception, url_for, controller='content')
assert_raises(Exception, url_for, controller='admin/comments')
eq_('/messages', url_for('messages'))
eq_('/messages/rss', url_for('rss_messages'))
eq_('/messages/4', url_for('message', id=4))
eq_('/messages/4/edit', url_for('edit_message', id=4))
eq_('/messages/4/mark', url_for('mark_message', id=4))
eq_('/messages/new', url_for('new_message'))
eq_('/messages.xml', url_for('formatted_messages', format='xml'))
eq_('/messages/rss.xml', url_for('formatted_rss_messages', format='xml'))
eq_('/messages/4.xml', url_for('formatted_message', id=4, format='xml'))
eq_('/messages/4/edit.xml', url_for('formatted_edit_message', id=4, format='xml'))
eq_('/messages/4/mark.xml', url_for('formatted_mark_message', id=4, format='xml'))
eq_('/messages/new.xml', url_for('formatted_new_message', format='xml'))
if __name__ == '__main__':
unittest.main()
else:
def bench_gen(withcache = False):
m = Mapper(explicit=False)
m.connect('', controller='articles', action='index')
m.connect('admin', controller='admin/general', action='index')
m.connect('admin/comments/article/:article_id/:action/:id', controller = 'admin/comments', action = None, id=None)
m.connect('admin/trackback/article/:article_id/:action/:id', controller='admin/trackback', action=None, id=None)
m.connect('admin/content/:action/:id', controller='admin/content')
m.connect('xml/:action/feed.xml', controller='xml')
m.connect('xml/articlerss/:id/feed.xml', controller='xml', action='articlerss')
m.connect('index.rdf', controller='xml', action='rss')
m.connect('articles', controller='articles', action='index')
m.connect('articles/page/:page', controller='articles', action='index', requirements = {'page':'\d+'})
m.connect('articles/:year/:month/:day/page/:page', controller='articles', action='find_by_date', month = None, day = None,
requirements = {'year':'\d{4}', 'month':'\d{1,2}','day':'\d{1,2}'})
m.connect('articles/category/:id', controller='articles', action='category')
m.connect('pages/*name', controller='articles', action='view_page')
con = Config()
con.mapper = m
con.host = 'www.test.com'
con.protocol = 'http'
con.mapper_dict = {'controller':'xml','action':'articlerss'}
if withcache:
m.urlcache = {}
m._create_gens()
n = 5000
start = time.time()
for x in range(1,n):
url_for(controller='/articles', action='index', page=4)
url_for(controller='admin/general', action='index')
url_for(controller='admin/comments', action='show', article_id=2)
url_for(controller='articles', action='find_by_date', year=2004, page=1)
url_for(controller='articles', action='category', id=4)
url_for(id=2)
end = time.time()
ts = time.time()
for x in range(1,n*6):
pass
en = time.time()
total = end-start-(en-ts)
per_url = total / (n*6)
print("Generation (%s URLs) RouteSet" % (n*6))
print("%s ms/url" % (per_url*1000))
print("%s urls/s\n" % (1.00/per_url))
|
{
"content_hash": "ec942696627a37814dcc88960b351f2c",
"timestamp": "",
"source": "github",
"line_count": 959,
"max_line_length": 130,
"avg_line_length": 50.5140771637122,
"alnum_prop": 0.5963709927130855,
"repo_name": "bbangert/routes",
"id": "e2a6a0489f00e03ff30e44cad5e4185c2898ed65",
"size": "48443",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "tests/test_functional/test_utils.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "292746"
}
],
"symlink_target": ""
}
|
import yaml, shutil, os, sys
from collections import OrderedDict
# Placeholder region code used when particular location has no region specified.
# Shouldn't match any of subdivision codes defined by ISO 3166-2
UNKNOWN_REGION_CODE = ".NONE"
DEFAULT_OUTPUT_BASENAME = "country"
PARSER_HINT_TAG = "flags"
TODO_MARKER = "(TODO)"
PROJECT_HOMEPAGE = "https://github.com/sergeuz/locode"
VERSION_STRING = "0.2.0"
# Supported parser hints
PARSER_HINT_PRESERVE = "preserve"
PARSER_HINT_ODD = "odd" # Not recognized actually, see "Document structure"
# Global module settings
quiet = False
verbose = False
def print_q(*objs):
if not quiet:
print(*objs)
def print_v(*objs):
if verbose:
print_q(*objs)
def simplify_str(s):
return ' '.join(s.split())
def is_upper_str(s):
for c in s:
if c.isalpha() and not c.istitle():
return False
return True
def transact_copy(src_dir, dest_dir, replace=None):
files = []
# At first step checking if we actually can write all our destination files
for name in os.listdir(src_dir):
src_file = os.path.abspath(src_dir + '/' + name)
if not os.path.isfile(src_file):
continue
dest_file = os.path.abspath(dest_dir + '/' + name)
if not os.path.exists(dest_file) or os.path.isfile(dest_file) and os.access(dest_file, os.W_OK):
files.append((src_file, dest_file))
else:
raise RuntimeError("Unable to write to file: {}".format(dest_file))
# Copying files
for src_file, dest_file in files:
if replace != None:
if os.path.exists(dest_file):
if replace:
print_q("Replacing file:", dest_file)
else:
print_q("Updating file:", dest_file) # Actually we always overwrite files
else:
print_q("Creating file:", dest_file)
else:
print_q("Saving file:", dest_file) # Generic message
shutil.copyfile(src_file, dest_file)
def parse_yml_file(file_name, yml_dest, ctry_codes):
print_q("Loading file:", file_name)
f = open(file_name, 'r')
yml_src = yaml.safe_load(f)
f.close()
if "country" not in yml_src:
sys.stderr.write("Warning: No country data found, skipping file\n")
return
# Checking if all codes are provided in upper case. Helps mostly with loc2yaml
# debugging when certain unquoted node names, such as 'NO' (Norway), are being
# converted to other Python types by pyyaml's safe_load() parser, rather than
# being represented as strings
src_ctry_root = yml_src["country"]
for ctry_code, src_ctry in src_ctry_root.items():
if len(ctry_code) != 2:
sys.stderr.write("Warning: Invalid country code: {}\n".format(ctry_code))
continue
if not is_upper_str(ctry_code):
sys.stderr.write("Warning: Country code contains mixed case letters: {}\n".format(ctry_code))
if "region" in src_ctry:
for region_code, src_region in src_ctry["region"].items():
if not is_upper_str(region_code):
sys.stderr.write("Warning: Region code contains mixed case letters: {}\n".format(region_code))
if "city" in src_region:
for city_code in src_region["city"].keys():
if not is_upper_str(city_code):
sys.stderr.write("Warning: City code contains mixed case letters: {}\n".format(city_code))
if ctry_codes:
# Filtering content by country codes
dest_ctry_root = yml_dest["country"]
for ctry_code in src_ctry_root:
if ctry_code in ctry_codes: # Upper-case expected
dest_ctry_root.setdefault(ctry_code, {}).update(src_ctry_root[ctry_code])
else:
continue
else:
yml_dest.update(yml_src)
def arrange_yml_nodes(yml_node):
if type(yml_node) != type({}):
return yml_node;
# Nodes to appear at first place
head = ["name", "default", PARSER_HINT_TAG]
# And ones to be placed at the end
tail = ["region", "city", UNKNOWN_REGION_CODE]
res = OrderedDict()
for key in head:
if key in yml_node:
res[key] = arrange_yml_nodes(yml_node[key])
for key, val in sorted(yml_node.items()): # Other nodes come in sorted order
if key not in head and key not in tail:
res[key] = arrange_yml_nodes(val)
for key in tail:
if key in yml_node:
res[key] = arrange_yml_nodes(yml_node[key])
return res
def write_yml_data(yml_root, stream):
stream.write(yaml.safe_dump(arrange_yml_nodes(yml_root), # Order YAML nodes in specific manner
allow_unicode=True, default_flow_style=False))
# Representer function for pyyaml used to dump OrderedDict objects. Basically a
# copy-paste of BaseRepresenter.represent_mapping() without using sort()
def represent_odict(dump, tag, mapping, flow_style = None):
value = []
node = yaml.MappingNode(tag, value, flow_style=flow_style)
if dump.alias_key is not None:
dump.represented_objects[dump.alias_key] = node
best_style = True
if hasattr(mapping, 'items'):
mapping = mapping.items()
for item_key, item_value in mapping:
node_key = dump.represent_data(item_key)
node_value = dump.represent_data(item_value)
if not (isinstance(node_key, yaml.ScalarNode) and not node_key.style):
best_style = False
if not (isinstance(node_value, yaml.ScalarNode) and not node_value.style):
best_style = False
value.append((node_key, node_value))
if flow_style is None:
if dump.default_flow_style is not None:
node.flow_style = dump.default_flow_style
else:
node.flow_style = best_style
return node
if __name__ == "locode":
# Installing our custom representer for OrderedDict objects
yaml.SafeDumper.add_representer(OrderedDict,
lambda dump, val: represent_odict(dump, "tag:yaml.org,2002:map", val))
|
{
"content_hash": "f9cf2d816996ce6b664eccb0ac105a8a",
"timestamp": "",
"source": "github",
"line_count": 168,
"max_line_length": 118,
"avg_line_length": 36.45238095238095,
"alnum_prop": 0.6209993468321359,
"repo_name": "sergeuz/locode",
"id": "fa20f071779017edc5435ed7e938e04cbd9a3441",
"size": "6341",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "locode.py",
"mode": "33188",
"license": "bsd-2-clause",
"language": [
{
"name": "Python",
"bytes": "58667"
}
],
"symlink_target": ""
}
|
"""fb
Revision ID: 0844c85671d9
Revises:
Create Date: 2017-09-05 21:42:15.234396+00:00
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = '0844c85671d9'
down_revision = None
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.add_column('donations', sa.Column('fb_post_id', sa.String(length=92), nullable=True))
op.add_column('donations', sa.Column('fb_publish_attempts', sa.Integer(), nullable=False))
op.add_column('donations', sa.Column('fb_publish_last_attempt_at', sa.DateTime(), nullable=True))
op.add_column('donations', sa.Column('fb_published_at', sa.DateTime(), nullable=True))
op.add_column('donations', sa.Column('fb_skip_msg', sa.SmallInteger(), nullable=False))
op.create_index(op.f('ix_donations_fb_skip_msg'), 'donations', ['fb_skip_msg'], unique=False)
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.drop_index(op.f('ix_donations_fb_skip_msg'), table_name='donations')
op.drop_column('donations', 'fb_skip_msg')
op.drop_column('donations', 'fb_published_at')
op.drop_column('donations', 'fb_publish_last_attempt_at')
op.drop_column('donations', 'fb_publish_attempts')
op.drop_column('donations', 'fb_post_id')
# ### end Alembic commands ###
|
{
"content_hash": "0c8b23e2a23739cd441cd45c104ee002",
"timestamp": "",
"source": "github",
"line_count": 38,
"max_line_length": 101,
"avg_line_length": 37.26315789473684,
"alnum_prop": 0.6807909604519774,
"repo_name": "yolosec/zeman-parser",
"id": "b6145e1fd09c15f247d71f7b464c87358ad6bbd9",
"size": "1416",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "alembic/versions/20170905_2142_0844c85671d9_fb.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Mako",
"bytes": "494"
},
{
"name": "Python",
"bytes": "64955"
}
],
"symlink_target": ""
}
|
from __future__ import absolute_import
import unittest
import six
import mock
from telemetry import project_config
from telemetry.internal.backends.chrome import chrome_startup_args
from telemetry.internal.browser import browser_options as browser_options_module
from telemetry.util import wpr_modes
class FakeBrowserOptions(browser_options_module.BrowserOptions):
def __init__(self, wpr_mode=wpr_modes.WPR_OFF):
super().__init__()
self.wpr_mode = wpr_mode
self.browser_type = 'chrome'
self.browser_user_agent_type = 'desktop'
self.disable_background_networking = False
self.disable_component_extensions_with_background_pages = False
self.disable_default_apps = False
class FakeProjectConfig(project_config.ProjectConfig):
def __init__(self):
super().__init__(top_level_dir=None)
def AdjustStartupFlags(self, args):
# Example function that removes '--bar' flags.
return [arg for arg in args if arg != '--bar']
class StartupArgsTest(unittest.TestCase):
"""Test expected inputs for GetBrowserStartupArgs."""
def testAdjustStartupFlagsApplied(self):
browser_options = FakeBrowserOptions()
browser_options.AppendExtraBrowserArgs(['--foo', '--bar'])
browser_options.environment = FakeProjectConfig()
startup_args = chrome_startup_args.GetFromBrowserOptions(browser_options)
self.assertIn('--foo', startup_args)
self.assertNotIn('--bar', startup_args)
class ReplayStartupArgsTest(unittest.TestCase):
"""Test expected inputs for GetReplayArgs."""
def setUp(self):
if six.PY3:
self.assertItemsEqual = self.assertCountEqual
def testReplayOffGivesEmptyArgs(self):
network_backend = mock.Mock()
network_backend.is_open = False
network_backend.forwarder = None
self.assertEqual([], chrome_startup_args.GetReplayArgs(network_backend))
def testReplayArgsBasic(self):
network_backend = mock.Mock()
network_backend.is_open = True
network_backend.use_live_traffic = False
network_backend.forwarder.remote_port = 789
expected_args = [
'--proxy-server=socks://127.0.0.1:789',
'--proxy-bypass-list=<-loopback>',
'--ignore-certificate-errors-spki-list='
'PhrPvGIaAMmd29hj8BCZOq096yj7uMpRNHpn5PDxI6I=']
self.assertItemsEqual(
expected_args,
chrome_startup_args.GetReplayArgs(network_backend))
def testReplayArgsNoSpkiSupport(self):
network_backend = mock.Mock()
network_backend.is_open = True
network_backend.use_live_traffic = False
network_backend.forwarder.remote_port = 789
expected_args = [
'--proxy-server=socks://127.0.0.1:789',
'--proxy-bypass-list=<-loopback>',
'--ignore-certificate-errors']
self.assertItemsEqual(
expected_args,
chrome_startup_args.GetReplayArgs(network_backend, False))
def testReplayArgsUseLiveTrafficWithSpkiSupport(self):
network_backend = mock.Mock()
network_backend.is_open = True
network_backend.use_live_traffic = True
network_backend.forwarder.remote_port = 789
expected_args = [
'--proxy-server=socks://127.0.0.1:789',
'--proxy-bypass-list=<-loopback>']
self.assertItemsEqual(
expected_args,
chrome_startup_args.GetReplayArgs(network_backend,
supports_spki_list=True))
def testReplayArgsUseLiveTrafficWithNoSpkiSupport(self):
network_backend = mock.Mock()
network_backend.is_open = True
network_backend.use_live_traffic = True
network_backend.forwarder.remote_port = 123
expected_args = [
'--proxy-server=socks://127.0.0.1:123',
'--proxy-bypass-list=<-loopback>']
self.assertItemsEqual(
expected_args,
chrome_startup_args.GetReplayArgs(network_backend,
supports_spki_list=False))
|
{
"content_hash": "326b62a4e09d6f4719b989c9ce6d556b",
"timestamp": "",
"source": "github",
"line_count": 113,
"max_line_length": 80,
"avg_line_length": 34.19469026548673,
"alnum_prop": 0.6915113871635611,
"repo_name": "catapult-project/catapult",
"id": "ef0bac6f99b99f3b2066d77c16193eff5ee6ef50",
"size": "4027",
"binary": false,
"copies": "6",
"ref": "refs/heads/main",
"path": "telemetry/telemetry/internal/backends/chrome/chrome_startup_args_unittest.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "C",
"bytes": "1324"
},
{
"name": "C++",
"bytes": "46069"
},
{
"name": "CSS",
"bytes": "23376"
},
{
"name": "Dockerfile",
"bytes": "1541"
},
{
"name": "Go",
"bytes": "114396"
},
{
"name": "HTML",
"bytes": "12394298"
},
{
"name": "JavaScript",
"bytes": "1559584"
},
{
"name": "Makefile",
"bytes": "1774"
},
{
"name": "Python",
"bytes": "6778695"
},
{
"name": "Shell",
"bytes": "2288"
}
],
"symlink_target": ""
}
|
from os.path import join, dirname, abspath
import yaml
CONFIG = join(dirname(abspath(__file__)), "config.yml")
with open(CONFIG, "r") as config:
config = yaml.load(config)
for database in config["databases"]:
print "DROP DATABASE IF EXISTS %s;" % database
print "DROP ROLE IF EXISTS %s;" % config["username"]
print "CREATE ROLE %s WITH ENCRYPTED PASSWORD '%s';" % (config["username"], config["password"])
for database in config["databases"]:
print "CREATE DATABASE %s OWNER %s;" % (database, config["username"])
|
{
"content_hash": "3bc0aeb51c1309ab7a3d7edd37b52bb2",
"timestamp": "",
"source": "github",
"line_count": 19,
"max_line_length": 95,
"avg_line_length": 28,
"alnum_prop": 0.6842105263157895,
"repo_name": "pyfarm/pyfarm-build",
"id": "fd8397891344e818648b9d3d8623693d11f2e935",
"size": "532",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "master/dbsetup/generate_postgres_sql.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "10196"
},
{
"name": "HTML",
"bytes": "3381"
},
{
"name": "Python",
"bytes": "29319"
},
{
"name": "Shell",
"bytes": "429"
}
],
"symlink_target": ""
}
|
from flask import Markup, url_for
from sqlalchemy import Column, Integer, Float, String, ForeignKey, Text
from sqlalchemy.orm import relationship
from flask.ext.appbuilder.models.mixins import ImageColumn
from flask.ext.appbuilder.filemanager import ImageManager
from flask.ext.appbuilder import Model
from flask_appbuilder.security.sqla.models import User
class ProductType(Model):
id = Column(Integer, primary_key=True)
name = Column(String(50), unique=True, nullable=False)
def __repr__(self):
return self.name
class Product(Model):
id = Column(Integer, primary_key=True)
name = Column(String(50), unique=True, nullable=False)
price = Column(Float, nullable=False)
photo = Column(ImageColumn)
description = Column(Text())
product_type_id = Column(Integer, ForeignKey('product_type.id'), nullable=False)
product_type = relationship("ProductType")
def photo_img(self):
im = ImageManager()
if self.photo:
return Markup('<a href="' + url_for('ProductPubView.show',
pk=str(self.id)) + '" class="thumbnail"><img src="' +
im.get_url(self.photo) + '" alt="Photo" class="img-rounded img-responsive"></a>')
else:
return Markup('<a href="' + url_for('ProductPubView.show',
pk=str(self.id)) + '" class="thumbnail"><img src="//:0" alt="Photo" class="img-responsive"></a>')
def price_label(self):
return Markup('Price:<strong> {} </strong>'.format(self.price))
def __repr__(self):
return self.name
class Client(User):
extra = Column(String(50), unique=True, nullable=False)
|
{
"content_hash": "f254e7eca764b1cc3bde1f9f093f6f0e",
"timestamp": "",
"source": "github",
"line_count": 47,
"max_line_length": 145,
"avg_line_length": 36.702127659574465,
"alnum_prop": 0.6295652173913043,
"repo_name": "zhounanshu/Flask-AppBuilder",
"id": "1e942a8de3f58eedc1752c6ffd735412ada42f63",
"size": "1725",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "examples/productsale/app/models.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "CSS",
"bytes": "47287"
},
{
"name": "HTML",
"bytes": "79464"
},
{
"name": "JavaScript",
"bytes": "159467"
},
{
"name": "Python",
"bytes": "434821"
},
{
"name": "Shell",
"bytes": "627"
}
],
"symlink_target": ""
}
|
from datetime import datetime
from utils.Dir import Dir
from utils.Text import Text
class DateDomainWriter(object):
def __init__(self, domain, base_dump_directory='./raw_data'):
self._str_now = (datetime.now().strftime("%m_%d_%y"))
self._root_dir = base_dump_directory + '/' + self._str_now + '/' + domain + '/'
self._file_counter = 1
if not Dir.exists(self._root_dir):
Dir.create(self._root_dir)
def write(self, text, extension='.txt', delimiter=None):
file_path = self._root_dir + str(self._file_counter) + extension
Text.write(file_path, text, delimiter)
self._file_counter += 1
|
{
"content_hash": "6d760b64341c521616f96060a3568aab",
"timestamp": "",
"source": "github",
"line_count": 20,
"max_line_length": 87,
"avg_line_length": 33.2,
"alnum_prop": 0.6144578313253012,
"repo_name": "DanielDeychakiwsky/abstraction-engine",
"id": "57fc1d8d6fee3f5fc31601dd44e87f53bff03875",
"size": "664",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "src/file_writers/DateDomainWriter.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "10092"
}
],
"symlink_target": ""
}
|
import smbus
import time
import math
import RPi.GPIO as GPIO
import struct
import sys
debug =0
if sys.version_info<(3,0):
p_version=2
else:
p_version=3
rev = GPIO.RPI_REVISION
if rev == 2 or rev == 3:
bus = smbus.SMBus(1)
else:
bus = smbus.SMBus(0)
# I2C Address of Arduino
address = 0x04
# Command Format
# digitalRead() command format header
dRead_cmd = [1]
# digitalWrite() command format header
dWrite_cmd = [2]
# analogRead() command format header
aRead_cmd = [3]
# analogWrite() command format header
aWrite_cmd = [4]
# pinMode() command format header
pMode_cmd = [5]
# Ultrasonic read
uRead_cmd = [7]
# Get firmware version
version_cmd = [8]
# Accelerometer (+/- 1.5g) read
acc_xyz_cmd = [20]
# RTC get time
rtc_getTime_cmd = [30]
# DHT Pro sensor temperature
dht_temp_cmd = [40]
# Grove LED Bar commands
# Initialise
ledBarInit_cmd = [50]
# Set orientation
ledBarOrient_cmd = [51]
# Set level
ledBarLevel_cmd = [52]
# Set single LED
ledBarSetOne_cmd = [53]
# Toggle single LED
ledBarToggleOne_cmd = [54]
# Set all LEDs
ledBarSet_cmd = [55]
# Get current state
ledBarGet_cmd = [56]
# Grove 4 Digit Display commands
# Initialise
fourDigitInit_cmd = [70]
# Set brightness, not visible until next cmd
fourDigitBrightness_cmd = [71]
# Set numeric value without leading zeros
fourDigitValue_cmd = [72]
# Set numeric value with leading zeros
fourDigitValueZeros_cmd = [73]
# Set individual digit
fourDigitIndividualDigit_cmd = [74]
# Set individual leds of a segment
fourDigitIndividualLeds_cmd = [75]
# Set left and right values with colon
fourDigitScore_cmd = [76]
# Analog read for n seconds
fourDigitAnalogRead_cmd = [77]
# Entire display on
fourDigitAllOn_cmd = [78]
# Entire display off
fourDigitAllOff_cmd = [79]
# Grove Chainable RGB LED commands
# Store color for later use
storeColor_cmd = [90]
# Initialise
chainableRgbLedInit_cmd = [91]
# Initialise and test with a simple color
chainableRgbLedTest_cmd = [92]
# Set one or more leds to the stored color by pattern
chainableRgbLedSetPattern_cmd = [93]
# set one or more leds to the stored color by modulo
chainableRgbLedSetModulo_cmd = [94]
# sets leds similar to a bar graph, reversible
chainableRgbLedSetLevel_cmd = [95]
# Read the button from IR sensor
ir_read_cmd=[21]
# Set pin for the IR reciever
ir_recv_pin_cmd=[22]
dus_sensor_read_cmd=[10]
dust_sensor_en_cmd=[14]
dust_sensor_dis_cmd=[15]
encoder_read_cmd=[11]
encoder_en_cmd=[16]
encoder_dis_cmd=[17]
flow_read_cmd=[12]
flow_disable_cmd=[13]
flow_en_cmd=[18]
# This allows us to be more specific about which commands contain unused bytes
unused = 0
# Function declarations of the various functions used for encoding and sending
# data from RPi to Arduino
# Write I2C block
def write_i2c_block(address, block):
try:
return bus.write_i2c_block_data(address, 1, block)
except IOError:
if debug:
print ("IOError")
return -1
# Read I2C byte
def read_i2c_byte(address):
try:
return bus.read_byte(address)
except IOError:
if debug:
print ("IOError")
return -1
# Read I2C block
def read_i2c_block(address):
try:
return bus.read_i2c_block_data(address, 1)
except IOError:
if debug:
print ("IOError")
return -1
# Arduino Digital Read
def digitalRead(pin):
write_i2c_block(address, dRead_cmd + [pin, unused, unused])
time.sleep(.1)
n = read_i2c_byte(address)
return n
# Arduino Digital Write
def digitalWrite(pin, value):
write_i2c_block(address, dWrite_cmd + [pin, value, unused])
return 1
# Setting Up Pin mode on Arduino
def pinMode(pin, mode):
if mode == "OUTPUT":
write_i2c_block(address, pMode_cmd + [pin, 1, unused])
elif mode == "INPUT":
write_i2c_block(address, pMode_cmd + [pin, 0, unused])
return 1
# Read analog value from Pin
def analogRead(pin):
bus.write_i2c_block_data(address, 1, aRead_cmd + [pin, unused, unused])
time.sleep(.1)
bus.read_byte(address)
number = bus.read_i2c_block_data(address, 1)
time.sleep(.1)
return number[1] * 256 + number[2]
# Write PWM
def analogWrite(pin, value):
write_i2c_block(address, aWrite_cmd + [pin, value, unused])
return 1
# Read temp in Celsius from Grove Temperature Sensor
def temp(pin, model = '1.0'):
# each of the sensor revisions use different thermistors, each with their own B value constant
if model == '1.2':
bValue = 4250 # sensor v1.2 uses thermistor ??? (assuming NCP18WF104F03RC until SeeedStudio clarifies)
elif model == '1.1':
bValue = 4250 # sensor v1.1 uses thermistor NCP18WF104F03RC
else:
bValue = 3975 # sensor v1.0 uses thermistor TTC3A103*39H
a = analogRead(pin)
resistance = (float)(1023 - a) * 10000 / a
t = (float)(1 / (math.log(resistance / 10000) / bValue + 1 / 298.15) - 273.15)
return t
# Read value from Grove Ultrasonic
def ultrasonicRead(pin):
write_i2c_block(address, uRead_cmd + [pin, unused, unused])
time.sleep(.2)
read_i2c_byte(address)
number = read_i2c_block(address)
return (number[1] * 256 + number[2])
# Read the firmware version
def version():
write_i2c_block(address, version_cmd + [unused, unused, unused])
time.sleep(.1)
read_i2c_byte(address)
number = read_i2c_block(address)
return "%s.%s.%s" % (number[1], number[2], number[3])
# Read Grove Accelerometer (+/- 1.5g) XYZ value
def acc_xyz():
write_i2c_block(address, acc_xyz_cmd + [unused, unused, unused])
time.sleep(.1)
read_i2c_byte(address)
number = read_i2c_block(address)
if number[1] > 32:
number[1] = - (number[1] - 224)
if number[2] > 32:
number[2] = - (number[2] - 224)
if number[3] > 32:
number[3] = - (number[3] - 224)
return (number[1], number[2], number[3])
# Read from Grove RTC
def rtc_getTime():
write_i2c_block(address, rtc_getTime_cmd + [unused, unused, unused])
time.sleep(.1)
read_i2c_byte(address)
number = read_i2c_block(address)
return number
# Read and return temperature and humidity from Grove DHT Pro
def dht(pin, module_type):
write_i2c_block(address, dht_temp_cmd + [pin, module_type, unused])
# Delay necessary for proper reading fron DHT sensor
time.sleep(.6)
try:
read_i2c_byte(address)
number = read_i2c_block(address)
time.sleep(.1)
if number == -1:
return -1
except (TypeError, IndexError):
return -1
# data returned in IEEE format as a float in 4 bytes
if p_version==2:
h=''
for element in (number[1:5]):
h+=chr(element)
t_val=struct.unpack('f', h)
t = round(t_val[0], 2)
h = ''
for element in (number[5:9]):
h+=chr(element)
hum_val=struct.unpack('f',h)
hum = round(hum_val[0], 2)
else:
t_val=bytearray(number[1:5])
h_val=bytearray(number[5:9])
t=round(struct.unpack('f',t_val)[0],2)
hum=round(struct.unpack('f',h_val)[0],2)
return [t, hum]
# Grove LED Bar - initialise
# orientation: (0 = red to green, 1 = green to red)
def ledBar_init(pin, orientation):
write_i2c_block(address, ledBarInit_cmd + [pin, orientation, unused])
return 1
# Grove LED Bar - set orientation
# orientation: (0 = red to green, 1 = green to red)
def ledBar_orientation(pin, orientation):
write_i2c_block(address, ledBarOrient_cmd + [pin, orientation, unused])
return 1
# Grove LED Bar - set level
# level: (0-10)
def ledBar_setLevel(pin, level):
write_i2c_block(address, ledBarLevel_cmd + [pin, level, unused])
return 1
# Grove LED Bar - set single led
# led: which led (1-10)
# state: off or on (0-1)
def ledBar_setLed(pin, led, state):
write_i2c_block(address, ledBarSetOne_cmd + [pin, led, state])
return 1
# Grove LED Bar - toggle single led
# led: which led (1-10)
def ledBar_toggleLed(pin, led):
write_i2c_block(address, ledBarToggleOne_cmd + [pin, led, unused])
return 1
# Grove LED Bar - set all leds
# state: (0-1023) or (0x00-0x3FF) or (0b0000000000-0b1111111111) or (int('0000000000',2)-int('1111111111',2))
def ledBar_setBits(pin, state):
byte1 = state & 255
byte2 = state >> 8
write_i2c_block(address, ledBarSet_cmd + [pin, byte1, byte2])
return 1
# Grove LED Bar - get current state
# state: (0-1023) a bit for each of the 10 LEDs
def ledBar_getBits(pin):
write_i2c_block(address, ledBarGet_cmd + [pin, unused, unused])
time.sleep(.2)
read_i2c_byte(0x04)
block = read_i2c_block(0x04)
return block[1] ^ (block[2] << 8)
# Grove 4 Digit Display - initialise
def fourDigit_init(pin):
write_i2c_block(address, fourDigitInit_cmd + [pin, unused, unused])
return 1
# Grove 4 Digit Display - set numeric value with or without leading zeros
# value: (0-65535) or (0000-FFFF)
def fourDigit_number(pin, value, leading_zero):
# split the value into two bytes so we can render 0000-FFFF on the display
byte1 = value & 255
byte2 = value >> 8
# separate commands to overcome current 4 bytes per command limitation
if (leading_zero):
write_i2c_block(address, fourDigitValue_cmd + [pin, byte1, byte2])
else:
write_i2c_block(address, fourDigitValueZeros_cmd + [pin, byte1, byte2])
time.sleep(.05)
return 1
# Grove 4 Digit Display - set brightness
# brightness: (0-7)
def fourDigit_brightness(pin, brightness):
# not actually visible until next command is executed
write_i2c_block(address, fourDigitBrightness_cmd + [pin, brightness, unused])
time.sleep(.05)
return 1
# Grove 4 Digit Display - set individual segment (0-9,A-F)
# segment: (0-3)
# value: (0-15) or (0-F)
def fourDigit_digit(pin, segment, value):
write_i2c_block(address, fourDigitIndividualDigit_cmd + [pin, segment, value])
time.sleep(.05)
return 1
# Grove 4 Digit Display - set 7 individual leds of a segment
# segment: (0-3)
# leds: (0-255) or (0-0xFF) one bit per led, segment 2 is special, 8th bit is the colon
def fourDigit_segment(pin, segment, leds):
write_i2c_block(address, fourDigitIndividualLeds_cmd + [pin, segment, leds])
time.sleep(.05)
return 1
# Grove 4 Digit Display - set left and right values (0-99), with leading zeros and a colon
# left: (0-255) or (0-FF)
# right: (0-255) or (0-FF)
# colon will be lit
def fourDigit_score(pin, left, right):
write_i2c_block(address, fourDigitScore_cmd + [pin, left, right])
time.sleep(.05)
return 1
# Grove 4 Digit Display - display analogRead value for n seconds, 4 samples per second
# analog: analog pin to read
# duration: analog read for this many seconds
def fourDigit_monitor(pin, analog, duration):
write_i2c_block(address, fourDigitAnalogRead_cmd + [pin, analog, duration])
time.sleep(duration + .05)
return 1
# Grove 4 Digit Display - turn entire display on (88:88)
def fourDigit_on(pin):
write_i2c_block(address, fourDigitAllOn_cmd + [pin, unused, unused])
time.sleep(.05)
return 1
# Grove 4 Digit Display - turn entire display off
def fourDigit_off(pin):
write_i2c_block(address, fourDigitAllOff_cmd + [pin, unused, unused])
time.sleep(.05)
return 1
# Grove Chainable RGB LED - store a color for later use
# red: 0-255
# green: 0-255
# blue: 0-255
def storeColor(red, green, blue):
write_i2c_block(address, storeColor_cmd + [red, green, blue])
time.sleep(.05)
return 1
# Grove Chainable RGB LED - initialise
# numLeds: how many leds do you have in the chain
def chainableRgbLed_init(pin, numLeds):
write_i2c_block(address, chainableRgbLedInit_cmd + [pin, numLeds, unused])
time.sleep(.05)
return 1
# Grove Chainable RGB LED - initialise and test with a simple color
# numLeds: how many leds do you have in the chain
# testColor: (0-7) 3 bits in total - a bit for red, green and blue, eg. 0x04 == 0b100 (0bRGB) == rgb(255, 0, 0) == #FF0000 == red
# ie. 0 black, 1 blue, 2 green, 3 cyan, 4 red, 5 magenta, 6 yellow, 7 white
def chainableRgbLed_test(pin, numLeds, testColor):
write_i2c_block(address, chainableRgbLedTest_cmd + [pin, numLeds, testColor])
time.sleep(.05)
return 1
# Grove Chainable RGB LED - set one or more leds to the stored color by pattern
# pattern: (0-3) 0 = this led only, 1 all leds except this led, 2 this led and all leds inwards, 3 this led and all leds outwards
# whichLed: index of led you wish to set counting outwards from the GrovePi, 0 = led closest to the GrovePi
def chainableRgbLed_pattern(pin, pattern, whichLed):
write_i2c_block(address, chainableRgbLedSetPattern_cmd + [pin, pattern, whichLed])
time.sleep(.05)
return 1
# Grove Chainable RGB LED - set one or more leds to the stored color by modulo
# offset: index of led you wish to start at, 0 = led closest to the GrovePi, counting outwards
# divisor: when 1 (default) sets stored color on all leds >= offset, when 2 sets every 2nd led >= offset and so on
def chainableRgbLed_modulo(pin, offset, divisor):
write_i2c_block(address, chainableRgbLedSetModulo_cmd + [pin, offset, divisor])
time.sleep(.05)
return 1
# Grove Chainable RGB LED - sets leds similar to a bar graph, reversible
# level: (0-10) the number of leds you wish to set to the stored color
# reversible (0-1) when 0 counting outwards from GrovePi, 0 = led closest to the GrovePi, otherwise counting inwards
def chainableRgbLed_setLevel(pin, level, reverse):
write_i2c_block(address, chainableRgbLedSetLevel_cmd + [pin, level, reverse])
time.sleep(.05)
return 1
# Grove - Infrared Receiver- get the commands received from the Grove IR sensor
def ir_read_signal():
try:
write_i2c_block(address,ir_read_cmd+[unused,unused,unused])
time.sleep(.1)
data_back= bus.read_i2c_block_data(address, 1)[0:21]
if (data_back[1]!=255):
return data_back
return [-1]*21
except IOError:
return [-1]*21
# Grove - Infrared Receiver- set the pin on which the Grove IR sensor is connected
def ir_recv_pin(pin):
write_i2c_block(address,ir_recv_pin_cmd+[pin,unused,unused])
def dust_sensor_en():
write_i2c_block(address, dust_sensor_en_cmd + [unused, unused, unused])
time.sleep(.2)
def dust_sensor_dis():
write_i2c_block(address, dust_sensor_dis_cmd + [unused, unused, unused])
time.sleep(.2)
def dustSensorRead():
write_i2c_block(address, dus_sensor_read_cmd + [unused, unused, unused])
time.sleep(.2)
#read_i2c_byte(address)
#number = read_i2c_block(address)
#return (number[1] * 256 + number[2])
data_back= bus.read_i2c_block_data(address, 1)[0:4]
#print data_back[:4]
if data_back[0]!=255:
lowpulseoccupancy=(data_back[3]*256*256+data_back[2]*256+data_back[1])
#print [data_back[0],lowpulseoccupancy]
return [data_back[0],lowpulseoccupancy]
else:
return [-1,-1]
print data_back
def encoder_en():
write_i2c_block(address, encoder_en_cmd + [unused, unused, unused])
time.sleep(.2)
def encoder_dis():
write_i2c_block(address, encoder_dis_cmd + [unused, unused, unused])
time.sleep(.2)
def encoderRead():
write_i2c_block(address, encoder_read_cmd + [unused, unused, unused])
time.sleep(.2)
data_back= bus.read_i2c_block_data(address, 1)[0:2]
#print data_back
if data_back[0]!=255:
return [data_back[0],data_back[1]]
else:
return [-1,-1]
def flowDisable():
write_i2c_block(address, flow_disable_cmd + [unused, unused, unused])
time.sleep(.2)
def flowEnable():
write_i2c_block(address, flow_en_cmd + [unused, unused, unused])
time.sleep(.2)
def flowRead():
write_i2c_block(address, flow_read_cmd + [unused, unused, unused])
time.sleep(.2)
data_back= bus.read_i2c_block_data(address, 1)[0:3]
#print data_back
if data_back[0]!=255:
return [data_back[0],data_back[2]*256+data_back[1]]
else:
return [-1,-1]
|
{
"content_hash": "716cf26a2e82cb67d73069d9a77e12ce",
"timestamp": "",
"source": "github",
"line_count": 523,
"max_line_length": 129,
"avg_line_length": 28.90248565965583,
"alnum_prop": 0.7083223074887537,
"repo_name": "royveshovda/pifog",
"id": "c6d3feb9fb25d3445066b9d328392780378d0e11",
"size": "15138",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "source/piclient/sensorpi/grovepi.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "570"
},
{
"name": "Elixir",
"bytes": "19501"
},
{
"name": "HTML",
"bytes": "3147"
},
{
"name": "JavaScript",
"bytes": "3677266"
},
{
"name": "Python",
"bytes": "63772"
},
{
"name": "Shell",
"bytes": "872"
}
],
"symlink_target": ""
}
|
from operator import mul
import numpy as np
from numpy.random import randint
from numpy.lib import Arrayterator
def test():
np.random.seed(np.arange(10))
# Create a random array
ndims = randint(5)+1
shape = tuple(randint(10)+1 for dim in range(ndims))
els = reduce(mul, shape)
a = np.arange(els)
a.shape = shape
buf_size = randint(2*els)
b = Arrayterator(a, buf_size)
# Check that each block has at most ``buf_size`` elements
for block in b:
assert len(block.flat) <= (buf_size or els)
# Check that all elements are iterated correctly
assert list(b.flat) == list(a.flat)
# Slice arrayterator
start = [randint(dim) for dim in shape]
stop = [randint(dim)+1 for dim in shape]
step = [randint(dim)+1 for dim in shape]
slice_ = tuple(slice(*t) for t in zip(start, stop, step))
c = b[slice_]
d = a[slice_]
# Check that each block has at most ``buf_size`` elements
for block in c:
assert len(block.flat) <= (buf_size or els)
# Check that the arrayterator is sliced correctly
assert np.all(c.__array__() == d)
# Check that all elements are iterated correctly
assert list(c.flat) == list(d.flat)
|
{
"content_hash": "7e05274170242488d2729b57dcf81c14",
"timestamp": "",
"source": "github",
"line_count": 43,
"max_line_length": 61,
"avg_line_length": 28.302325581395348,
"alnum_prop": 0.6392769104354972,
"repo_name": "efiring/numpy-work",
"id": "4215696516c8ce44b0c90a95086c97dd1c82f826",
"size": "1217",
"binary": false,
"copies": "3",
"ref": "refs/heads/cfastma",
"path": "numpy/lib/tests/test_arrayterator.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "C",
"bytes": "4483665"
},
{
"name": "C++",
"bytes": "22396"
},
{
"name": "FORTRAN",
"bytes": "8946"
},
{
"name": "Python",
"bytes": "3516998"
},
{
"name": "Shell",
"bytes": "2035"
}
],
"symlink_target": ""
}
|
import werkzeug.serving
import gevent.monkey
gevent.monkey.patch_all()
@werkzeug.serving.run_with_reloader
def runServer():
from bakery.app import app, register_blueprints
import os
from werkzeug.wsgi import SharedDataMiddleware
register_blueprints(app)
app = SharedDataMiddleware(app, {
'/static': os.path.join(os.path.dirname(__file__), 'static')
})
from socketio.server import SocketIOServer
SocketIOServer(('0.0.0.0', 5000), app,
resource="socket.io", policy_server=False,
transports=['websocket', 'xhr-polling']).serve_forever()
if __name__ == '__main__':
runServer()
|
{
"content_hash": "6c50a32d366d35ca0794560c30917274",
"timestamp": "",
"source": "github",
"line_count": 21,
"max_line_length": 75,
"avg_line_length": 31.333333333333332,
"alnum_prop": 0.6580547112462006,
"repo_name": "vitalyvolkov/fontbakery",
"id": "4e79b3e1923128168a292bdf6173faac35531aa8",
"size": "1360",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "entry.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "8114"
},
{
"name": "JavaScript",
"bytes": "6468"
},
{
"name": "Python",
"bytes": "312438"
},
{
"name": "Shell",
"bytes": "234"
}
],
"symlink_target": ""
}
|
from __future__ import unicode_literals, division, absolute_import, print_function
from builtins import * # noqa pylint: disable=unused-import, redefined-builtin
import codecs
import collections
import contextlib
import logging
import logging.handlers
import sys
import threading
import uuid
import warnings
import os
from flexget import __version__
from flexget.utils.tools import io_encoding
# A level more detailed than DEBUG
TRACE = 5
# A level more detailed than INFO
VERBOSE = 15
# environment variables to modify rotating log parameters from defaults of 1 MB and 9 files
ENV_MAXBYTES = 'FLEXGET_LOG_MAXBYTES'
ENV_MAXCOUNT = 'FLEXGET_LOG_MAXCOUNT'
# Stores `task`, logging `session_id`, and redirected `output` stream in a thread local context
local_context = threading.local()
def get_level_no(level):
if not isinstance(level, int):
# Cannot use getLevelName here as in 3.4.0 it returns a string.
level = level.upper()
if level == 'TRACE':
level = TRACE
elif level == 'VERBOSE':
level = VERBOSE
else:
level = getattr(logging, level)
return level
@contextlib.contextmanager
def task_logging(task):
"""Context manager which adds task information to log messages."""
old_task = getattr(local_context, 'task', '')
local_context.task = task
try:
yield
finally:
local_context.task = old_task
class SessionFilter(logging.Filter):
def __init__(self, session_id):
self.session_id = session_id
def filter(self, record):
return getattr(record, 'session_id', None) == self.session_id
@contextlib.contextmanager
def capture_output(stream, loglevel=None):
"""Context manager which captures all log and console output to given `stream` while in scope."""
root_logger = logging.getLogger()
old_level = root_logger.getEffectiveLevel()
old_id = getattr(local_context, 'session_id', None)
# Keep using current, or create one if none already set
local_context.session_id = old_id or uuid.uuid4()
old_output = getattr(local_context, 'output', None)
old_loglevel = getattr(local_context, 'loglevel', None)
streamhandler = logging.StreamHandler(stream)
streamhandler.setFormatter(FlexGetFormatter())
streamhandler.addFilter(SessionFilter(local_context.session_id))
if loglevel is not None:
loglevel = get_level_no(loglevel)
streamhandler.setLevel(loglevel)
# If requested loglevel is lower than the root logger is filtering for, we need to turn it down.
# All existing handlers should have their desired level set and not be affected.
if not root_logger.isEnabledFor(loglevel):
root_logger.setLevel(loglevel)
local_context.output = stream
local_context.loglevel = loglevel
root_logger.addHandler(streamhandler)
try:
yield
finally:
root_logger.removeHandler(streamhandler)
root_logger.setLevel(old_level)
local_context.session_id = old_id
local_context.output = old_output
local_context.loglevel = old_loglevel
def get_capture_stream():
"""If output is currently being redirected to a stream, returns that stream."""
return getattr(local_context, 'output', None)
def get_capture_loglevel():
"""If output is currently being redirected to a stream, returns declared loglevel for that stream."""
return getattr(local_context, 'loglevel', None)
class RollingBuffer(collections.deque):
"""File-like that keeps a certain number of lines of text in memory."""
def write(self, line):
self.append(line)
class FlexGetLogger(logging.Logger):
"""Custom logger that adds trace and verbose logging methods, and contextual information to log records."""
def makeRecord(self, name, level, fn, lno, msg, args, exc_info, func, extra, *exargs):
extra = extra or {}
extra.update(
task=getattr(local_context, 'task', ''),
session_id=getattr(local_context, 'session_id', ''))
# Replace newlines in log messages with \n
if isinstance(msg, str):
msg = msg.replace('\n', '\\n')
return logging.Logger.makeRecord(self, name, level, fn, lno, msg, args, exc_info, func, extra, *exargs)
def trace(self, msg, *args, **kwargs):
"""Log at TRACE level (more detailed than DEBUG)."""
self.log(TRACE, msg, *args, **kwargs)
def verbose(self, msg, *args, **kwargs):
"""Log at VERBOSE level (displayed when FlexGet is run interactively.)"""
self.log(VERBOSE, msg, *args, **kwargs)
class FlexGetFormatter(logging.Formatter):
"""Custom formatter that can handle both regular log records and those created by FlexGetLogger"""
flexget_fmt = '%(asctime)-15s %(levelname)-8s %(name)-13s %(task)-15s %(message)s'
def __init__(self):
logging.Formatter.__init__(self, self.flexget_fmt, '%Y-%m-%d %H:%M')
def format(self, record):
if not hasattr(record, 'task'):
record.task = ''
return logging.Formatter.format(self, record)
_logging_configured = False
_buff_handler = None
_logging_started = False
# Stores the last 50 debug messages
debug_buffer = RollingBuffer(maxlen=50)
def initialize(unit_test=False):
"""Prepare logging.
"""
global _logging_configured, _logging_started, _buff_handler
if _logging_configured:
return
if 'dev' in __version__:
warnings.filterwarnings('always', category=DeprecationWarning, module='flexget.*')
warnings.simplefilter('once', append=True)
logging.addLevelName(TRACE, 'TRACE')
logging.addLevelName(VERBOSE, 'VERBOSE')
_logging_configured = True
# with unit test we want pytest to add the handlers
if unit_test:
_logging_started = True
return
# Store any log messages in a buffer until we `start` function is run
logger = logging.getLogger()
_buff_handler = logging.handlers.BufferingHandler(1000 * 1000)
logger.addHandler(_buff_handler)
logger.setLevel(logging.NOTSET)
# Add a handler that sores the last 50 debug lines to `debug_buffer` for use in crash reports
crash_handler = logging.StreamHandler(debug_buffer)
crash_handler.setLevel(logging.DEBUG)
crash_handler.setFormatter(FlexGetFormatter())
logger.addHandler(crash_handler)
def start(filename=None, level=logging.INFO, to_console=True, to_file=True):
"""After initialization, start file logging.
"""
global _logging_started
assert _logging_configured
if _logging_started:
return
# root logger
logger = logging.getLogger()
level = get_level_no(level)
logger.setLevel(level)
formatter = FlexGetFormatter()
if to_file:
file_handler = logging.handlers.RotatingFileHandler(filename,
maxBytes=int(os.environ.get(ENV_MAXBYTES, 1000 * 1024)),
backupCount=int(os.environ.get(ENV_MAXCOUNT, 9)))
file_handler.setFormatter(formatter)
file_handler.setLevel(level)
logger.addHandler(file_handler)
# without --cron we log to console
if to_console:
# Make sure we don't send any characters that the current terminal doesn't support printing
stdout = sys.stdout
if hasattr(stdout, 'buffer'):
# On python 3, we need to get the buffer directly to support writing bytes
stdout = stdout.buffer
safe_stdout = codecs.getwriter(io_encoding)(stdout, 'replace')
console_handler = logging.StreamHandler(safe_stdout)
console_handler.setFormatter(formatter)
console_handler.setLevel(level)
logger.addHandler(console_handler)
# flush what we have stored from the plugin initialization
logger.removeHandler(_buff_handler)
if _buff_handler:
for record in _buff_handler.buffer:
if logger.isEnabledFor(record.levelno):
logger.handle(record)
_buff_handler.flush()
_logging_started = True
# Set our custom logger class as default
logging.setLoggerClass(FlexGetLogger)
|
{
"content_hash": "3787cb23de097dd2b97f239587001a03",
"timestamp": "",
"source": "github",
"line_count": 236,
"max_line_length": 116,
"avg_line_length": 34.771186440677965,
"alnum_prop": 0.6715817694369973,
"repo_name": "OmgOhnoes/Flexget",
"id": "be164c090aabe10bb9ae4a1e64c468c094fb7549",
"size": "8206",
"binary": false,
"copies": "4",
"ref": "refs/heads/develop",
"path": "flexget/logger.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "11875"
},
{
"name": "HTML",
"bytes": "79376"
},
{
"name": "JavaScript",
"bytes": "263723"
},
{
"name": "Python",
"bytes": "3324701"
},
{
"name": "SRecode Template",
"bytes": "3"
}
],
"symlink_target": ""
}
|
from cosmo_tester.framework.examples import get_example_deployment
from cosmo_tester.framework.util import get_resource_path
from . import validate_agent
def test_3_2_agent_install(image_based_manager, ssh_key, logger, test_config):
# Check agent install with the 3.2 types and 1.2 DSL version via ssh
example = get_example_deployment(image_based_manager, ssh_key, logger,
'agent_install_3_2', test_config,
upload_plugin=False)
example.blueprint_file = get_resource_path(
'blueprints/compute/example_3_2.yaml'
)
example.upload_and_verify_install(skip_plugins_validation=True)
validate_agent(image_based_manager, example, test_config)
example.uninstall()
|
{
"content_hash": "a8dbd5740450d70a77da5baa27e5202a",
"timestamp": "",
"source": "github",
"line_count": 17,
"max_line_length": 78,
"avg_line_length": 45.11764705882353,
"alnum_prop": 0.6792698826597132,
"repo_name": "cloudify-cosmo/cloudify-system-tests",
"id": "29c0af2046cc7d3bb7a5f9a4fd8ef25e1d8b83ee",
"size": "767",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "cosmo_tester/test_suites/agent/test_back_compat.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "487590"
},
{
"name": "Shell",
"bytes": "117"
}
],
"symlink_target": ""
}
|
import numpy as np
from math import sqrt
# Fibonacci version 1 in exponential time.
# The running time of fib1(n) is proportional to 2^(0.69n) ~ (1.6)^n, so it takes 1.6 times longer to compute Fn+1 than Fn.
def fib1(n):
if n == 0: return 0
if n == 1: return 1
return fib1(n-1) + fib1(n-2)
# Fibonacci version 2 in linear time (Polynomial)
# Why is fib1(n) so slow? because many computations are repeated during the recursion. A more sensible scheme would store the intermediate results, the value of F0.....Fn-1 to an array as soon as they are know so that they don't have to be recomputed during each recursion call.
def fib2(n):
if n == 0: return 0
f = np.zeros(n+1, dtype=np.int) # Creates an array holds F0 to Fn
f[0] = 0
f[1] = 1
for i in range(2, n+1): # Loops from f2 to fn (range is exclusive on the 2nd argument)
f[i] = f[i-1] + f[i-2]
return f[n]
# Fibonacci version 3 in constant time.
# Of course, the fastest is using Binet's Fibonacci Number Formula: http://mathworld.wolfram.com/BinetsFibonacciNumberFormula.html
def fib3(n):
return ((1+sqrt(5))**n-(1-sqrt(5))**n)/(2**n*sqrt(5))
# Tests
print "fib1(10) = " + str(fib1(10))
print "fib2(10) = " + str(fib2(10))
print "fib3(10) = " + str(fib3(10))
|
{
"content_hash": "ca3a1fdd0eb97a38bc2bdffdfcac071a",
"timestamp": "",
"source": "github",
"line_count": 33,
"max_line_length": 278,
"avg_line_length": 38.39393939393939,
"alnum_prop": 0.664561957379637,
"repo_name": "AlexOuyang/CSE-20",
"id": "1b85ed7d8c2cc7fb5d2d333dd5de3aa30aee10ef",
"size": "1267",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "Python/Fibonacci.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Java",
"bytes": "2725"
}
],
"symlink_target": ""
}
|
import logging
import subprocess
import sys
from rejviz import nic
from rejviz import tmp
logging.basicConfig(level=logging.INFO)
LOG = logging.getLogger(__name__)
def main():
try:
tmp_dir = tmp.create_dir()
LOG.debug('Created tmp directory %s', tmp_dir)
virt_builder_args = _process_args(sys.argv[1:], tmp_dir)
_run_virt_builder(virt_builder_args)
finally:
tmp.remove_dir(tmp_dir)
LOG.debug('Removed tmp directory %s', tmp_dir)
def _process_args(args, tmp_dir):
processed = nic.process_args(args, tmp_dir)
return processed
def _run_virt_builder(args):
command_line = ["virt-builder"] + args
LOG.info("Calling virt-builder: %s" % " ".join(command_line))
subprocess.call(command_line)
|
{
"content_hash": "75694e933ef46e7e6adb12b4d279e376",
"timestamp": "",
"source": "github",
"line_count": 32,
"max_line_length": 65,
"avg_line_length": 24.03125,
"alnum_prop": 0.6605981794538361,
"repo_name": "jistr/rejviz",
"id": "a09808b79e4da1b7e9a8b49d8151a839826b16aa",
"size": "1315",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "rejviz/cmd/builder.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "44536"
}
],
"symlink_target": ""
}
|
# This is only needed for Python v2 but is harmless for Python v3.
#import sip
#sip.setapi('QString', 2)
import math
from PySide import QtCore, QtGui
import diagramscene_rc
class Arrow(QtGui.QGraphicsLineItem):
def __init__(self, startItem, endItem, parent=None, scene=None):
super(Arrow, self).__init__(parent, scene)
self.arrowHead = QtGui.QPolygonF()
self.myStartItem = startItem
self.myEndItem = endItem
self.setFlag(QtGui.QGraphicsItem.ItemIsSelectable, True)
self.myColor = QtCore.Qt.black
self.setPen(QtGui.QPen(self.myColor, 2, QtCore.Qt.SolidLine,
QtCore.Qt.RoundCap, QtCore.Qt.RoundJoin))
def setColor(self, color):
self.myColor = color
def startItem(self):
return self.myStartItem
def endItem(self):
return self.myEndItem
def boundingRect(self):
extra = (self.pen().width() + 20) / 2.0
p1 = self.line().p1()
p2 = self.line().p2()
return QtCore.QRectF(p1, QtCore.QSizeF(p2.x() - p1.x(), p2.y() - p1.y())).normalized().adjusted(-extra, -extra, extra, extra)
def shape(self):
path = super(Arrow, self).shape()
path.addPolygon(self.arrowHead)
return path
def updatePosition(self):
line = QtCore.QLineF(self.mapFromItem(self.myStartItem, 0, 0), self.mapFromItem(self.myEndItem, 0, 0))
self.setLine(line)
def paint(self, painter, option, widget=None):
if (self.myStartItem.collidesWithItem(self.myEndItem)):
return
myStartItem = self.myStartItem
myEndItem = self.myEndItem
myColor = self.myColor
myPen = self.pen()
myPen.setColor(self.myColor)
arrowSize = 20.0
painter.setPen(myPen)
painter.setBrush(self.myColor)
centerLine = QtCore.QLineF(myStartItem.pos(), myEndItem.pos())
endPolygon = myEndItem.polygon()
p1 = endPolygon.at(0) + myEndItem.pos()
intersectPoint = QtCore.QPointF()
for i in endPolygon:
p2 = i + myEndItem.pos()
polyLine = QtCore.QLineF(p1, p2)
intersectType, intersectPoint = polyLine.intersect(centerLine)
if intersectType == QtCore.QLineF.BoundedIntersection:
break
p1 = p2
self.setLine(QtCore.QLineF(intersectPoint, myStartItem.pos()))
line = self.line()
angle = math.acos(line.dx() / line.length())
if line.dy() >= 0:
angle = (math.pi * 2.0) - angle
arrowP1 = line.p1() + QtCore.QPointF(math.sin(angle + math.pi / 3.0) * arrowSize,
math.cos(angle + math.pi / 3) * arrowSize)
arrowP2 = line.p1() + QtCore.QPointF(math.sin(angle + math.pi - math.pi / 3.0) * arrowSize,
math.cos(angle + math.pi - math.pi / 3.0) * arrowSize)
self.arrowHead.clear()
for point in [line.p1(), arrowP1, arrowP2]:
self.arrowHead.append(point)
painter.drawLine(line)
painter.drawPolygon(self.arrowHead)
if self.isSelected():
painter.setPen(QtGui.QPen(myColor, 1, QtCore.Qt.DashLine))
myLine = QtCore.QLineF(line)
myLine.translate(0, 4.0)
painter.drawLine(myLine)
myLine.translate(0,-8.0)
painter.drawLine(myLine)
class DiagramTextItem(QtGui.QGraphicsTextItem):
lostFocus = QtCore.Signal(QtGui.QGraphicsTextItem)
selectedChange = QtCore.Signal(QtGui.QGraphicsItem)
def __init__(self, parent=None, scene=None):
super(DiagramTextItem, self).__init__(parent, scene)
self.setFlag(QtGui.QGraphicsItem.ItemIsMovable)
self.setFlag(QtGui.QGraphicsItem.ItemIsSelectable)
def itemChange(self, change, value):
if change == QtGui.QGraphicsItem.ItemSelectedChange:
self.selectedChange.emit(self)
return value
def focusOutEvent(self, event):
self.setTextInteractionFlags(QtCore.Qt.NoTextInteraction)
self.lostFocus.emit(self)
super(DiagramTextItem, self).focusOutEvent(event)
def mouseDoubleClickEvent(self, event):
if self.textInteractionFlags() == QtCore.Qt.NoTextInteraction:
self.setTextInteractionFlags(QtCore.Qt.TextEditorInteraction)
super(DiagramTextItem, self).mouseDoubleClickEvent(event)
class DiagramItem(QtGui.QGraphicsPolygonItem):
Step, Conditional, StartEnd, Io = range(4)
def __init__(self, diagramType, contextMenu, parent=None, scene=None):
super(DiagramItem, self).__init__(parent, scene)
self.arrows = []
self.diagramType = diagramType
self.myContextMenu = contextMenu
path = QtGui.QPainterPath()
if self.diagramType == self.StartEnd:
path.moveTo(200, 50)
path.arcTo(150, 0, 50, 50, 0, 90)
path.arcTo(50, 0, 50, 50, 90, 90)
path.arcTo(50, 50, 50, 50, 180, 90)
path.arcTo(150, 50, 50, 50, 270, 90)
path.lineTo(200, 25)
self.myPolygon = path.toFillPolygon()
elif self.diagramType == self.Conditional:
self.myPolygon = QtGui.QPolygonF([
QtCore.QPointF(-100, 0), QtCore.QPointF(0, 100),
QtCore.QPointF(100, 0), QtCore.QPointF(0, -100),
QtCore.QPointF(-100, 0)])
elif self.diagramType == self.Step:
self.myPolygon = QtGui.QPolygonF([
QtCore.QPointF(-100, -100), QtCore.QPointF(100, -100),
QtCore.QPointF(100, 100), QtCore.QPointF(-100, 100),
QtCore.QPointF(-100, -100)])
else:
self.myPolygon = QtGui.QPolygonF([
QtCore.QPointF(-120, -80), QtCore.QPointF(-70, 80),
QtCore.QPointF(120, 80), QtCore.QPointF(70, -80),
QtCore.QPointF(-120, -80)])
self.setPolygon(self.myPolygon)
self.setFlag(QtGui.QGraphicsItem.ItemIsMovable, True)
self.setFlag(QtGui.QGraphicsItem.ItemIsSelectable, True)
def removeArrow(self, arrow):
try:
self.arrows.remove(arrow)
except ValueError:
pass
def removeArrows(self):
for arrow in self.arrows[:]:
arrow.startItem().removeArrow(arrow)
arrow.endItem().removeArrow(arrow)
self.scene().removeItem(arrow)
def addArrow(self, arrow):
self.arrows.append(arrow)
def image(self):
pixmap = QtGui.QPixmap(250, 250)
pixmap.fill(QtCore.Qt.transparent)
painter = QtGui.QPainter(pixmap)
painter.setPen(QtGui.QPen(QtCore.Qt.black, 8))
painter.translate(125, 125)
painter.drawPolyline(self.myPolygon)
return pixmap
def contextMenuEvent(self, event):
self.scene().clearSelection()
self.setSelected(True)
self.myContextMenu.exec_(event.screenPos())
def itemChange(self, change, value):
if change == QtGui.QGraphicsItem.ItemPositionChange:
for arrow in self.arrows:
arrow.updatePosition()
return value
class DiagramScene(QtGui.QGraphicsScene):
InsertItem, InsertLine, InsertText, MoveItem = range(4)
itemInserted = QtCore.Signal(DiagramItem)
textInserted = QtCore.Signal(QtGui.QGraphicsTextItem)
itemSelected = QtCore.Signal(QtGui.QGraphicsItem)
def __init__(self, itemMenu, parent=None):
super(DiagramScene, self).__init__(parent)
self.myItemMenu = itemMenu
self.myMode = self.MoveItem
self.myItemType = DiagramItem.Step
self.line = None
self.textItem = None
self.myItemColor = QtCore.Qt.white
self.myTextColor = QtCore.Qt.black
self.myLineColor = QtCore.Qt.black
self.myFont = QtGui.QFont()
def setLineColor(self, color):
self.myLineColor = color
if self.isItemChange(Arrow):
item = self.selectedItems()[0]
item.setColor(self.myLineColor)
self.update()
def setTextColor(self, color):
self.myTextColor = color
if self.isItemChange(DiagramTextItem):
item = self.selectedItems()[0]
item.setDefaultTextColor(self.myTextColor)
def setItemColor(self, color):
self.myItemColor = color
if self.isItemChange(DiagramItem):
item = self.selectedItems()[0]
item.setBrush(self.myItemColor)
def setFont(self, font):
self.myFont = font
if self.isItemChange(DiagramTextItem):
item = self.selectedItems()[0]
item.setFont(self.myFont)
def setMode(self, mode):
self.myMode = mode
def setItemType(self, type):
self.myItemType = type
def editorLostFocus(self, item):
cursor = item.textCursor()
cursor.clearSelection()
item.setTextCursor(cursor)
if not item.toPlainText():
self.removeItem(item)
item.deleteLater()
def mousePressEvent(self, mouseEvent):
if (mouseEvent.button() != QtCore.Qt.LeftButton):
return
if self.myMode == self.InsertItem:
item = DiagramItem(self.myItemType, self.myItemMenu)
item.setBrush(self.myItemColor)
self.addItem(item)
item.setPos(mouseEvent.scenePos())
self.itemInserted.emit(item)
elif self.myMode == self.InsertLine:
self.line = QtGui.QGraphicsLineItem(QtCore.QLineF(mouseEvent.scenePos(),
mouseEvent.scenePos()))
self.line.setPen(QtGui.QPen(self.myLineColor, 2))
self.addItem(self.line)
elif self.myMode == self.InsertText:
textItem = DiagramTextItem()
textItem.setFont(self.myFont)
textItem.setTextInteractionFlags(QtCore.Qt.TextEditorInteraction)
textItem.setZValue(1000.0)
textItem.lostFocus.connect(self.editorLostFocus)
textItem.selectedChange.connect(self.itemSelected)
self.addItem(textItem)
textItem.setDefaultTextColor(self.myTextColor)
textItem.setPos(mouseEvent.scenePos())
self.textInserted.emit(textItem)
super(DiagramScene, self).mousePressEvent(mouseEvent)
def mouseMoveEvent(self, mouseEvent):
if self.myMode == self.InsertLine and self.line:
newLine = QtCore.QLineF(self.line.line().p1(), mouseEvent.scenePos())
self.line.setLine(newLine)
elif self.myMode == self.MoveItem:
super(DiagramScene, self).mouseMoveEvent(mouseEvent)
def mouseReleaseEvent(self, mouseEvent):
if self.line and self.myMode == self.InsertLine:
startItems = self.items(self.line.line().p1())
if len(startItems) and startItems[0] == self.line:
startItems.pop(0)
endItems = self.items(self.line.line().p2())
if len(endItems) and endItems[0] == self.line:
endItems.pop(0)
self.removeItem(self.line)
self.line = None
if len(startItems) and len(endItems) and \
isinstance(startItems[0], DiagramItem) and \
isinstance(endItems[0], DiagramItem) and \
startItems[0] != endItems[0]:
startItem = startItems[0]
endItem = endItems[0]
arrow = Arrow(startItem, endItem)
arrow.setColor(self.myLineColor)
startItem.addArrow(arrow)
endItem.addArrow(arrow)
arrow.setZValue(-1000.0)
self.addItem(arrow)
arrow.updatePosition()
self.line = None
super(DiagramScene, self).mouseReleaseEvent(mouseEvent)
def isItemChange(self, type):
for item in self.selectedItems():
if isinstance(item, type):
return True
return False
class MainWindow(QtGui.QMainWindow):
InsertTextButton = 10
def __init__(self):
super(MainWindow, self).__init__()
self.createActions()
self.createMenus()
self.createToolBox()
self.scene = DiagramScene(self.itemMenu)
self.scene.setSceneRect(QtCore.QRectF(0, 0, 5000, 5000))
self.scene.itemInserted.connect(self.itemInserted)
self.scene.textInserted.connect(self.textInserted)
self.scene.itemSelected.connect(self.itemSelected)
self.createToolbars()
layout = QtGui.QHBoxLayout()
layout.addWidget(self.toolBox)
self.view = QtGui.QGraphicsView(self.scene)
layout.addWidget(self.view)
self.widget = QtGui.QWidget()
self.widget.setLayout(layout)
self.setCentralWidget(self.widget)
self.setWindowTitle("Diagramscene")
def backgroundButtonGroupClicked(self, button):
buttons = self.backgroundButtonGroup.buttons()
for myButton in buttons:
if myButton != button:
button.setChecked(False)
text = button.text()
if text == "Blue Grid":
self.scene.setBackgroundBrush(QtGui.QBrush(QtGui.QPixmap(':/images/background1.png')))
elif text == "White Grid":
self.scene.setBackgroundBrush(QtGui.QBrush(QtGui.QPixmap(':/images/background2.png')))
elif text == "Gray Grid":
self.scene.setBackgroundBrush(QtGui.QBrush(QtGui.QPixmap(':/images/background3.png')))
else:
self.scene.setBackgroundBrush(QtGui.QBrush(QtGui.QPixmap(':/images/background4.png')))
self.scene.update()
self.view.update()
def buttonGroupClicked(self, id):
buttons = self.buttonGroup.buttons()
for button in buttons:
if self.buttonGroup.button(id) != button:
button.setChecked(False)
if id == self.InsertTextButton:
self.scene.setMode(DiagramScene.InsertText)
else:
self.scene.setItemType(id)
self.scene.setMode(DiagramScene.InsertItem)
def deleteItem(self):
for item in self.scene.selectedItems():
if isinstance(item, DiagramItem):
item.removeArrows()
self.scene.removeItem(item)
def pointerGroupClicked(self, i):
self.scene.setMode(self.pointerTypeGroup.checkedId())
def bringToFront(self):
if not self.scene.selectedItems():
return
selectedItem = self.scene.selectedItems()[0]
overlapItems = selectedItem.collidingItems()
zValue = 0
for item in overlapItems:
if (item.zValue() >= zValue and isinstance(item, DiagramItem)):
zValue = item.zValue() + 0.1
selectedItem.setZValue(zValue)
def sendToBack(self):
if not self.scene.selectedItems():
return
selectedItem = self.scene.selectedItems()[0]
overlapItems = selectedItem.collidingItems()
zValue = 0
for item in overlapItems:
if (item.zValue() <= zValue and isinstance(item, DiagramItem)):
zValue = item.zValue() - 0.1
selectedItem.setZValue(zValue)
def itemInserted(self, item):
self.pointerTypeGroup.button(DiagramScene.MoveItem).setChecked(True)
self.scene.setMode(self.pointerTypeGroup.checkedId())
self.buttonGroup.button(item.diagramType).setChecked(False)
def textInserted(self, item):
self.buttonGroup.button(self.InsertTextButton).setChecked(False)
self.scene.setMode(self.pointerTypeGroup.checkedId())
def currentFontChanged(self, font):
self.handleFontChange()
def fontSizeChanged(self, font):
self.handleFontChange()
def sceneScaleChanged(self, scale):
newScale = int(scale[:-1]) / 100.0
oldMatrix = self.view.matrix()
self.view.resetMatrix()
self.view.translate(oldMatrix.dx(), oldMatrix.dy())
self.view.scale(newScale, newScale)
def textColorChanged(self):
self.textAction = self.sender()
self.fontColorToolButton.setIcon(self.createColorToolButtonIcon(
':/images/textpointer.png',
QtGui.QColor(self.textAction.data())))
self.textButtonTriggered()
def itemColorChanged(self):
self.fillAction = self.sender()
self.fillColorToolButton.setIcon(self.createColorToolButtonIcon(
':/images/floodfill.png',
QtGui.QColor(self.fillAction.data())))
self.fillButtonTriggered()
def lineColorChanged(self):
self.lineAction = self.sender()
self.lineColorToolButton.setIcon(self.createColorToolButtonIcon(
':/images/linecolor.png',
QtGui.QColor(self.lineAction.data())))
self.lineButtonTriggered()
def textButtonTriggered(self):
self.scene.setTextColor(QtGui.QColor(self.textAction.data()))
def fillButtonTriggered(self):
self.scene.setItemColor(QtGui.QColor(self.fillAction.data()))
def lineButtonTriggered(self):
self.scene.setLineColor(QtGui.QColor(self.lineAction.data()))
def handleFontChange(self):
font = self.fontCombo.currentFont()
font.setPointSize(int(self.fontSizeCombo.currentText()))
if self.boldAction.isChecked():
font.setWeight(QtGui.QFont.Bold)
else:
font.setWeight(QtGui.QFont.Normal)
font.setItalic(self.italicAction.isChecked())
font.setUnderline(self.underlineAction.isChecked())
self.scene.setFont(font)
def itemSelected(self, item):
font = item.font()
color = item.defaultTextColor()
self.fontCombo.setCurrentFont(font)
self.fontSizeCombo.setEditText(str(font.pointSize()))
self.boldAction.setChecked(font.weight() == QtGui.QFont.Bold)
self.italicAction.setChecked(font.italic())
self.underlineAction.setChecked(font.underline())
def about(self):
QtGui.QMessageBox.about(self, "About Diagram Scene",
"The <b>Diagram Scene</b> example shows use of the graphics framework.")
def createToolBox(self):
self.buttonGroup = QtGui.QButtonGroup()
self.buttonGroup.setExclusive(False)
self.buttonGroup.buttonClicked[int].connect(self.buttonGroupClicked)
layout = QtGui.QGridLayout()
layout.addWidget(self.createCellWidget("Conditional", DiagramItem.Conditional),
0, 0)
layout.addWidget(self.createCellWidget("Process", DiagramItem.Step), 0,
1)
layout.addWidget(self.createCellWidget("Input/Output", DiagramItem.Io),
1, 0)
textButton = QtGui.QToolButton()
textButton.setCheckable(True)
self.buttonGroup.addButton(textButton, self.InsertTextButton)
textButton.setIcon(QtGui.QIcon(QtGui.QPixmap(':/images/textpointer.png')
.scaled(30, 30)))
textButton.setIconSize(QtCore.QSize(50, 50))
textLayout = QtGui.QGridLayout()
textLayout.addWidget(textButton, 0, 0, QtCore.Qt.AlignHCenter)
textLayout.addWidget(QtGui.QLabel("Text"), 1, 0,
QtCore.Qt.AlignCenter)
textWidget = QtGui.QWidget()
textWidget.setLayout(textLayout)
layout.addWidget(textWidget, 1, 1)
layout.setRowStretch(3, 10)
layout.setColumnStretch(2, 10)
itemWidget = QtGui.QWidget()
itemWidget.setLayout(layout)
self.backgroundButtonGroup = QtGui.QButtonGroup()
self.backgroundButtonGroup.buttonClicked.connect(self.backgroundButtonGroupClicked)
backgroundLayout = QtGui.QGridLayout()
backgroundLayout.addWidget(self.createBackgroundCellWidget("Blue Grid",
':/images/background1.png'), 0, 0)
backgroundLayout.addWidget(self.createBackgroundCellWidget("White Grid",
':/images/background2.png'), 0, 1)
backgroundLayout.addWidget(self.createBackgroundCellWidget("Gray Grid",
':/images/background3.png'), 1, 0)
backgroundLayout.addWidget(self.createBackgroundCellWidget("No Grid",
':/images/background4.png'), 1, 1)
backgroundLayout.setRowStretch(2, 10)
backgroundLayout.setColumnStretch(2, 10)
backgroundWidget = QtGui.QWidget()
backgroundWidget.setLayout(backgroundLayout)
self.toolBox = QtGui.QToolBox()
self.toolBox.setSizePolicy(QtGui.QSizePolicy(QtGui.QSizePolicy.Maximum, QtGui.QSizePolicy.Ignored))
self.toolBox.setMinimumWidth(itemWidget.sizeHint().width())
self.toolBox.addItem(itemWidget, "Basic Flowchart Shapes")
self.toolBox.addItem(backgroundWidget, "Backgrounds")
def createActions(self):
self.toFrontAction = QtGui.QAction(
QtGui.QIcon(':/images/bringtofront.png'), "Bring to &Front",
self, shortcut="Ctrl+F", statusTip="Bring item to front",
triggered=self.bringToFront)
self.sendBackAction = QtGui.QAction(
QtGui.QIcon(':/images/sendtoback.png'), "Send to &Back", self,
shortcut="Ctrl+B", statusTip="Send item to back",
triggered=self.sendToBack)
self.deleteAction = QtGui.QAction(QtGui.QIcon(':/images/delete.png'),
"&Delete", self, shortcut="Delete",
statusTip="Delete item from diagram",
triggered=self.deleteItem)
self.exitAction = QtGui.QAction("E&xit", self, shortcut="Ctrl+X",
statusTip="Quit Scenediagram example", triggered=self.close)
self.boldAction = QtGui.QAction(QtGui.QIcon(':/images/bold.png'),
"Bold", self, checkable=True, shortcut="Ctrl+B",
triggered=self.handleFontChange)
self.italicAction = QtGui.QAction(QtGui.QIcon(':/images/italic.png'),
"Italic", self, checkable=True, shortcut="Ctrl+I",
triggered=self.handleFontChange)
self.underlineAction = QtGui.QAction(
QtGui.QIcon(':/images/underline.png'), "Underline", self,
checkable=True, shortcut="Ctrl+U",
triggered=self.handleFontChange)
self.aboutAction = QtGui.QAction("A&bout", self, shortcut="Ctrl+B",
triggered=self.about)
def createMenus(self):
self.fileMenu = self.menuBar().addMenu("&File")
self.fileMenu.addAction(self.exitAction)
self.itemMenu = self.menuBar().addMenu("&Item")
self.itemMenu.addAction(self.deleteAction)
self.itemMenu.addSeparator()
self.itemMenu.addAction(self.toFrontAction)
self.itemMenu.addAction(self.sendBackAction)
self.aboutMenu = self.menuBar().addMenu("&Help")
self.aboutMenu.addAction(self.aboutAction)
def createToolbars(self):
self.editToolBar = self.addToolBar("Edit")
self.editToolBar.addAction(self.deleteAction)
self.editToolBar.addAction(self.toFrontAction)
self.editToolBar.addAction(self.sendBackAction)
self.fontCombo = QtGui.QFontComboBox()
self.fontCombo.currentFontChanged.connect(self.currentFontChanged)
self.fontSizeCombo = QtGui.QComboBox()
self.fontSizeCombo.setEditable(True)
for i in range(8, 30, 2):
self.fontSizeCombo.addItem(str(i))
validator = QtGui.QIntValidator(2, 64, self)
self.fontSizeCombo.setValidator(validator)
self.fontSizeCombo.currentIndexChanged.connect(self.fontSizeChanged)
self.fontColorToolButton = QtGui.QToolButton()
self.fontColorToolButton.setPopupMode(QtGui.QToolButton.MenuButtonPopup)
self.fontColorToolButton.setMenu(
self.createColorMenu(self.textColorChanged, QtCore.Qt.black))
self.textAction = self.fontColorToolButton.menu().defaultAction()
self.fontColorToolButton.setIcon(
self.createColorToolButtonIcon(':/images/textpointer.png',
QtCore.Qt.black))
self.fontColorToolButton.setAutoFillBackground(True)
self.fontColorToolButton.clicked.connect(self.textButtonTriggered)
self.fillColorToolButton = QtGui.QToolButton()
self.fillColorToolButton.setPopupMode(QtGui.QToolButton.MenuButtonPopup)
self.fillColorToolButton.setMenu(
self.createColorMenu(self.itemColorChanged, QtCore.Qt.white))
self.fillAction = self.fillColorToolButton.menu().defaultAction()
self.fillColorToolButton.setIcon(
self.createColorToolButtonIcon(':/images/floodfill.png',
QtCore.Qt.white))
self.fillColorToolButton.clicked.connect(self.fillButtonTriggered)
self.lineColorToolButton = QtGui.QToolButton()
self.lineColorToolButton.setPopupMode(QtGui.QToolButton.MenuButtonPopup)
self.lineColorToolButton.setMenu(
self.createColorMenu(self.lineColorChanged, QtCore.Qt.black))
self.lineAction = self.lineColorToolButton.menu().defaultAction()
self.lineColorToolButton.setIcon(
self.createColorToolButtonIcon(':/images/linecolor.png',
QtCore.Qt.black))
self.lineColorToolButton.clicked.connect(self.lineButtonTriggered)
self.textToolBar = self.addToolBar("Font")
self.textToolBar.addWidget(self.fontCombo)
self.textToolBar.addWidget(self.fontSizeCombo)
self.textToolBar.addAction(self.boldAction)
self.textToolBar.addAction(self.italicAction)
self.textToolBar.addAction(self.underlineAction)
self.colorToolBar = self.addToolBar("Color")
self.colorToolBar.addWidget(self.fontColorToolButton)
self.colorToolBar.addWidget(self.fillColorToolButton)
self.colorToolBar.addWidget(self.lineColorToolButton)
pointerButton = QtGui.QToolButton()
pointerButton.setCheckable(True)
pointerButton.setChecked(True)
pointerButton.setIcon(QtGui.QIcon(':/images/pointer.png'))
linePointerButton = QtGui.QToolButton()
linePointerButton.setCheckable(True)
linePointerButton.setIcon(QtGui.QIcon(':/images/linepointer.png'))
self.pointerTypeGroup = QtGui.QButtonGroup()
self.pointerTypeGroup.addButton(pointerButton, DiagramScene.MoveItem)
self.pointerTypeGroup.addButton(linePointerButton,
DiagramScene.InsertLine)
self.pointerTypeGroup.buttonClicked[int].connect(self.pointerGroupClicked)
self.sceneScaleCombo = QtGui.QComboBox()
self.sceneScaleCombo.addItems(["50%", "75%", "100%", "125%", "150%"])
self.sceneScaleCombo.setCurrentIndex(2)
self.sceneScaleCombo.currentIndexChanged[str].connect(self.sceneScaleChanged)
self.pointerToolbar = self.addToolBar("Pointer type")
self.pointerToolbar.addWidget(pointerButton)
self.pointerToolbar.addWidget(linePointerButton)
self.pointerToolbar.addWidget(self.sceneScaleCombo)
def createBackgroundCellWidget(self, text, image):
button = QtGui.QToolButton()
button.setText(text)
button.setIcon(QtGui.QIcon(image))
button.setIconSize(QtCore.QSize(50, 50))
button.setCheckable(True)
self.backgroundButtonGroup.addButton(button)
layout = QtGui.QGridLayout()
layout.addWidget(button, 0, 0, QtCore.Qt.AlignHCenter)
layout.addWidget(QtGui.QLabel(text), 1, 0, QtCore.Qt.AlignCenter)
widget = QtGui.QWidget()
widget.setLayout(layout)
return widget
def createCellWidget(self, text, diagramType):
item = DiagramItem(diagramType, self.itemMenu)
icon = QtGui.QIcon(item.image())
button = QtGui.QToolButton()
button.setIcon(icon)
button.setIconSize(QtCore.QSize(50, 50))
button.setCheckable(True)
self.buttonGroup.addButton(button, diagramType)
layout = QtGui.QGridLayout()
layout.addWidget(button, 0, 0, QtCore.Qt.AlignHCenter)
layout.addWidget(QtGui.QLabel(text), 1, 0, QtCore.Qt.AlignCenter)
widget = QtGui.QWidget()
widget.setLayout(layout)
return widget
def createColorMenu(self, slot, defaultColor):
colors = [QtCore.Qt.black, QtCore.Qt.white, QtCore.Qt.red, QtCore.Qt.blue, QtCore.Qt.yellow]
names = ["black", "white", "red", "blue", "yellow"]
colorMenu = QtGui.QMenu(self)
for color, name in zip(colors, names):
action = QtGui.QAction(self.createColorIcon(color), name, self,
triggered=slot)
action.setData(QtGui.QColor(color))
colorMenu.addAction(action)
if color == defaultColor:
colorMenu.setDefaultAction(action)
return colorMenu
def createColorToolButtonIcon(self, imageFile, color):
pixmap = QtGui.QPixmap(50, 80)
pixmap.fill(QtCore.Qt.transparent)
painter = QtGui.QPainter(pixmap)
image = QtGui.QPixmap(imageFile)
target = QtCore.QRect(0, 0, 50, 60)
source = QtCore.QRect(0, 0, 42, 42)
painter.fillRect(QtCore.QRect(0, 60, 50, 80), color)
painter.drawPixmap(target, image, source)
painter.end()
return QtGui.QIcon(pixmap)
def createColorIcon(self, color):
pixmap = QtGui.QPixmap(20, 20)
painter = QtGui.QPainter(pixmap)
painter.setPen(QtCore.Qt.NoPen)
painter.fillRect(QtCore.QRect(0, 0, 20, 20), color)
painter.end()
return QtGui.QIcon(pixmap)
if __name__ == '__main__':
import sys
app = QtGui.QApplication(sys.argv)
mainWindow = MainWindow()
mainWindow.setGeometry(100, 100, 800, 500)
mainWindow.show()
sys.exit(app.exec_())
|
{
"content_hash": "22289170e833f489ec3ebdf03795d5f6",
"timestamp": "",
"source": "github",
"line_count": 787,
"max_line_length": 133,
"avg_line_length": 39.0571791613723,
"alnum_prop": 0.617444205868957,
"repo_name": "cherry-wb/SideTools",
"id": "57f2ea01cbad8ba00fd6e8a780ac86c8ae245f97",
"size": "30761",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "examples/graphicsview/diagramscene/diagramscene.py",
"mode": "33261",
"license": "apache-2.0",
"language": [
{
"name": "JavaScript",
"bytes": "9501"
},
{
"name": "Python",
"bytes": "4071976"
},
{
"name": "Shell",
"bytes": "182"
},
{
"name": "TypeScript",
"bytes": "25292"
}
],
"symlink_target": ""
}
|
from __future__ import absolute_import
from __future__ import division
# Copyright (c) 2010-2019 openpyxl
import math
#constants
DEFAULT_ROW_HEIGHT = 15. # Default row height measured in point size.
BASE_COL_WIDTH = 13 # in characters
DEFAULT_COLUMN_WIDTH = 51.85 # in points, should be characters
DEFAULT_LEFT_MARGIN = 0.7 # in inches, = right margin
DEFAULT_TOP_MARGIN = 0.7874 # in inches = bottom margin
DEFAULT_HEADER = 0.3 # in inches
# Conversion functions
"""
From the ECMA Spec (4th Edition part 1)
Page setup: "Left Page Margin in inches" p. 1647
Docs from
http://startbigthinksmall.wordpress.com/2010/01/04/points-inches-and-emus-measuring-units-in-office-open-xml/
See also http://msdn.microsoft.com/en-us/library/dd560821(v=office.12).aspx
dxa: The main unit in OOXML is a twentieth of a point. Also called twips.
pt: point. In Excel there are 72 points to an inch
hp: half-points are used to specify font sizes. A font-size of 12pt equals 24 half points
pct: Half-points are used to specify font sizes. A font-size of 12pt equals 24 half points
EMU: English Metric Unit, EMUs are used for coordinates in vector-based
drawings and embedded pictures. One inch equates to 914400 EMUs and a
centimeter is 360000. For bitmaps the default resolution is 96 dpi (known as
PixelsPerInch in Excel). Spec p. 1122
For radial geometry Excel uses integert units of 1/60000th of a degree.
"""
def inch_to_dxa(value):
"""1 inch = 72 * 20 dxa"""
return int(value * 20 * 72)
def dxa_to_inch(value):
return value / 72 / 20
def dxa_to_cm(value):
return 2.54 * dxa_to_inch(value)
def cm_to_dxa(value):
emu = cm_to_EMU(value)
inch = EMU_to_inch(emu)
return inch_to_dxa(inch)
def pixels_to_EMU(value):
"""1 pixel = 9525 EMUs"""
return int(value * 9525)
def EMU_to_pixels(value):
return round(value / 9525)
def cm_to_EMU(value):
"""1 cm = 360000 EMUs"""
return int(value * 360000)
def EMU_to_cm(value):
return round(value / 360000, 4)
def inch_to_EMU(value):
"""1 inch = 914400 EMUs"""
return int(value * 914400)
def EMU_to_inch(value):
return round(value / 914400, 4)
def pixels_to_points(value, dpi=96):
"""96 dpi, 72i"""
return value * 72 / dpi
def points_to_pixels(value, dpi=96):
return int(math.ceil(value * dpi / 72))
def degrees_to_angle(value):
"""1 degree = 60000 angles"""
return int(round(value * 60000))
def angle_to_degrees(value):
return round(value / 60000, 2)
def short_color(color):
""" format a color to its short size """
if len(color) > 6:
return color[2:]
return color
|
{
"content_hash": "b3c466a3fc53a7f730611e919c5093c6",
"timestamp": "",
"source": "github",
"line_count": 108,
"max_line_length": 109,
"avg_line_length": 24.34259259259259,
"alnum_prop": 0.6884747052111069,
"repo_name": "kawamon/hue",
"id": "393af3e27458d578dfd3909b665e1816eac36110",
"size": "2629",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "desktop/core/ext-py/openpyxl-2.6.4/openpyxl/utils/units.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "ABAP",
"bytes": "962"
},
{
"name": "ActionScript",
"bytes": "1133"
},
{
"name": "Ada",
"bytes": "99"
},
{
"name": "Assembly",
"bytes": "5786"
},
{
"name": "AutoHotkey",
"bytes": "720"
},
{
"name": "Batchfile",
"bytes": "118907"
},
{
"name": "C",
"bytes": "3196521"
},
{
"name": "C#",
"bytes": "83"
},
{
"name": "C++",
"bytes": "308860"
},
{
"name": "COBOL",
"bytes": "4"
},
{
"name": "CSS",
"bytes": "1050129"
},
{
"name": "Cirru",
"bytes": "520"
},
{
"name": "Clojure",
"bytes": "794"
},
{
"name": "CoffeeScript",
"bytes": "403"
},
{
"name": "ColdFusion",
"bytes": "86"
},
{
"name": "Common Lisp",
"bytes": "632"
},
{
"name": "D",
"bytes": "324"
},
{
"name": "Dart",
"bytes": "489"
},
{
"name": "Dockerfile",
"bytes": "10981"
},
{
"name": "Eiffel",
"bytes": "375"
},
{
"name": "Elixir",
"bytes": "692"
},
{
"name": "Elm",
"bytes": "487"
},
{
"name": "Emacs Lisp",
"bytes": "411907"
},
{
"name": "Erlang",
"bytes": "487"
},
{
"name": "Forth",
"bytes": "979"
},
{
"name": "FreeMarker",
"bytes": "1017"
},
{
"name": "G-code",
"bytes": "521"
},
{
"name": "GLSL",
"bytes": "512"
},
{
"name": "Genshi",
"bytes": "946"
},
{
"name": "Gherkin",
"bytes": "699"
},
{
"name": "Go",
"bytes": "7312"
},
{
"name": "Groovy",
"bytes": "1080"
},
{
"name": "HTML",
"bytes": "24999718"
},
{
"name": "Haskell",
"bytes": "512"
},
{
"name": "Haxe",
"bytes": "447"
},
{
"name": "HiveQL",
"bytes": "43"
},
{
"name": "Io",
"bytes": "140"
},
{
"name": "JSONiq",
"bytes": "4"
},
{
"name": "Java",
"bytes": "471854"
},
{
"name": "JavaScript",
"bytes": "28075556"
},
{
"name": "Julia",
"bytes": "210"
},
{
"name": "Jupyter Notebook",
"bytes": "73168"
},
{
"name": "LSL",
"bytes": "2080"
},
{
"name": "Lean",
"bytes": "213"
},
{
"name": "Lex",
"bytes": "264449"
},
{
"name": "Liquid",
"bytes": "1883"
},
{
"name": "LiveScript",
"bytes": "5747"
},
{
"name": "Lua",
"bytes": "78382"
},
{
"name": "M4",
"bytes": "1377"
},
{
"name": "MATLAB",
"bytes": "203"
},
{
"name": "Makefile",
"bytes": "269655"
},
{
"name": "Mako",
"bytes": "3614942"
},
{
"name": "Mask",
"bytes": "597"
},
{
"name": "Myghty",
"bytes": "936"
},
{
"name": "Nix",
"bytes": "2212"
},
{
"name": "OCaml",
"bytes": "539"
},
{
"name": "Objective-C",
"bytes": "2672"
},
{
"name": "OpenSCAD",
"bytes": "333"
},
{
"name": "PHP",
"bytes": "662"
},
{
"name": "PLSQL",
"bytes": "31565"
},
{
"name": "PLpgSQL",
"bytes": "6006"
},
{
"name": "Pascal",
"bytes": "1412"
},
{
"name": "Perl",
"bytes": "4327"
},
{
"name": "PigLatin",
"bytes": "371"
},
{
"name": "PowerShell",
"bytes": "3204"
},
{
"name": "Python",
"bytes": "76440000"
},
{
"name": "R",
"bytes": "2445"
},
{
"name": "Roff",
"bytes": "95764"
},
{
"name": "Ruby",
"bytes": "1098"
},
{
"name": "Rust",
"bytes": "495"
},
{
"name": "Scala",
"bytes": "1541"
},
{
"name": "Scheme",
"bytes": "559"
},
{
"name": "Shell",
"bytes": "190718"
},
{
"name": "Smarty",
"bytes": "130"
},
{
"name": "TSQL",
"bytes": "10013"
},
{
"name": "Tcl",
"bytes": "899"
},
{
"name": "TeX",
"bytes": "165743"
},
{
"name": "Thrift",
"bytes": "317058"
},
{
"name": "TypeScript",
"bytes": "1607"
},
{
"name": "VBA",
"bytes": "2884"
},
{
"name": "VBScript",
"bytes": "938"
},
{
"name": "VHDL",
"bytes": "830"
},
{
"name": "Vala",
"bytes": "485"
},
{
"name": "Verilog",
"bytes": "274"
},
{
"name": "Vim Snippet",
"bytes": "226931"
},
{
"name": "XQuery",
"bytes": "114"
},
{
"name": "XSLT",
"bytes": "521413"
},
{
"name": "Yacc",
"bytes": "2133855"
}
],
"symlink_target": ""
}
|
"""
El proposito de este experimento es analizar la convergencia del método de
Newton para diferentes magnitudes de x0 y de alphas.
Graficaremos una escala logaritimica en x 10^-n..10^n y veremos cuantas
iteraciones tarda en converger.
"""
import itertools
from copy import deepcopy
from math import sqrt
from pylab import plt, legend
from tp1 import Experimento
x0s = [10 ** n for n in range(-10, 10)]
alphas = [0.0001, 0.5, 2, 1000]
marker = itertools.cycle(('+', 'o', '*'))
class Newton(Experimento):
metodo = 'newton'
funcion = 'f'
criterio = 'relativo'
limite = 0.0001
plt.figure(1)
plt.xlabel('x0')
plt.ylabel('iteraciones')
#plt.xscale('semilog')
plt.semilogx()
# codigo
# voy iterando por cada alpha
resultados = {}
for alpha in alphas:
resultado_real = sqrt(alpha)
new_x0s = deepcopy(x0s)
mi_newton = Newton(entradas=[alpha] * len(new_x0s), x0s=new_x0s)
mi_newton.run()
resultados[alpha] = mi_newton.resultados
for alpha in alphas:
resultado = resultados[alpha]
# distancia_a_resultado = [abs(x0 - resultado_real) for x0 in x0s]
# plt.plot(distancia_a_resultado, [res['iteraciones'] for res in resultado], marker=marker.next(), linestyle='', label=u"α = %s" % alpha)
plt.plot(x0s, [res['iteraciones'] for res in resultado], marker=marker.next(), linestyle='', label=u"α = %s" % alpha)
legend()
plt.show()
|
{
"content_hash": "75b37687dcb8670a3925dd16438c84db",
"timestamp": "",
"source": "github",
"line_count": 56,
"max_line_length": 141,
"avg_line_length": 24.678571428571427,
"alnum_prop": 0.6852387843704776,
"repo_name": "metnum/2013-1c-tp1",
"id": "4beb7c24b9018a72020c0f4367789ef664e9b8c5",
"size": "1398",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "codigo/exp_x0_f_newton.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "C",
"bytes": "8972"
},
{
"name": "Python",
"bytes": "35337"
},
{
"name": "TeX",
"bytes": "51017"
}
],
"symlink_target": ""
}
|
from rest_framework import generics
from rest_framework import status
from rest_framework.response import Response
from rest_framework import permissions
from django.contrib.auth.models import User
from django.shortcuts import get_object_or_404
from apps.core.serializers import *
from apps.core.models import *
from django.db.models import Q
from django.db import transaction
import decimal
import json
class CurrentUser(generics.RetrieveAPIView):
permission_classes = (permissions.IsAuthenticated,)
def get(self, request, format=None):
user = User.objects.get(id=request.user.id)
serializer = UserDetailSerializer(user)
return Response(serializer.data)
class UserLabAccounts(generics.GenericAPIView):
permission_classes = (permissions.IsAuthenticated,)
def get(self, request, format=None):
lab_group = request.user.profile.lab_group.all()
serializer = LabGroupSerializer(lab_group, many=True)
return Response(serializer.data)
class MyPlants(generics.ListAPIView):
permission_classes = (permissions.IsAuthenticated,)
def get(self, request, format=None):
p = Plant.objects.all()
plants = PlantSerializer(p, many=True)
return Response(plants.data)
class AllPlants(generics.ListAPIView):
permission_classes = (permissions.IsAuthenticated,)
def get(self, request, format=None):
p = Plant.objects.all()
plants = PlantSerializer(p, many=True)
return Response(plants.data)
class UpdateTags(generics.UpdateAPIView):
permission_classes = (permissions.IsAuthenticated,)
def patch(self, request, pk, format=None):
submission = get_object_or_404(Submission, pk=pk)
tags = submission.tags.all()
new_tags = request.DATA['tags']
to_add = list()
to_delete = list()
for tag in tags:
found = False
for ntag in new_tags:
if tag.id == ntag['id']:
found = True
if not found:
to_delete.append(tag)
for tag in new_tags:
if tag['id'] == 'new':
to_add.append(tag)
with transaction.atomic():
for item in to_delete:
submission.tags.remove(item)
for item in to_add:
submission.tags.add(item['name'])
return Response('Tags for Submission ID "{0}" saved successfully.'.format(pk))
##REST File Upload for Submission
class SubmissionFileUpload(generics.ListCreateAPIView):
permission_classes = (permissions.IsAuthenticated,)
queryset = UploadedFile.objects.none()
serializer_class = UploadedFileSerializer
pk_url_kwarg = 'pk'
def get_queryset(self):
"""
This view returns a list of all uploaded files for a submission <pk>
@todo: need to check for access permission so we don't give out files
"""
pk = self.kwargs.get(self.pk_url_kwarg, None)
if pk is not None:
## here we need to check if user has access or not!
return UploadedFile.objects.filter(submissionfk__pk=pk)
else:
return UploadedFile.objects.none()
def pre_save(self, obj, request):
"""
We override our submissionfk, uploadedby & file_description here
"""
pk = self.kwargs.get(self.pk_url_kwarg, None)
if pk is not None:
obj.submissionfk = get_object_or_404(Submission, pk=pk)
obj.uploaded_by_who = request.user
obj.file_description = ""
else:
## TODO: we need to give up and 404 or something
pass
def post(self, request, format=None, *args, **kwargs):
"""
We need insert our own pre_save call...
"""
serializer = self.get_serializer(data=request.DATA, files=request.FILES, partial=True)
if serializer.is_valid():
self.pre_save(serializer.object, request)
self.object = serializer.save(force_insert=True)
self.post_save(self.object, created=True)
headers = self.get_success_headers(serializer.data)
return Response(serializer.data, status=status.HTTP_201_CREATED, headers=headers)
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
|
{
"content_hash": "076373c42a3a74317fa4eeaeaaf7425c",
"timestamp": "",
"source": "github",
"line_count": 126,
"max_line_length": 94,
"avg_line_length": 34.317460317460316,
"alnum_prop": 0.6433857539315448,
"repo_name": "dparizek/newecosystems",
"id": "32078d3577f9689fa55ce6c9a2d6558f98497819",
"size": "4324",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "newecosystems/apps/core/api.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "217260"
},
{
"name": "JavaScript",
"bytes": "456614"
},
{
"name": "Python",
"bytes": "51889"
}
],
"symlink_target": ""
}
|
from __future__ import unicode_literals
from flask import session
from jinja2.filters import do_filesizeformat
from indico.core import signals
from indico.core.logger import Logger
from indico.core.settings.converters import EnumConverter
from indico.modules.events.features.base import EventFeature
from indico.modules.events.logs import EventLogKind, EventLogRealm
from indico.modules.events.models.events import EventType
from indico.modules.events.settings import EventSettingsProxy, ThemeSettingsProxy
from indico.modules.users import NameFormat
from indico.util.i18n import _
from indico.web.flask.util import url_for
from indico.web.menu import SideMenuItem
logger = Logger.get('events.layout')
layout_settings = EventSettingsProxy('layout', {
'is_searchable': True,
'show_nav_bar': True,
'show_social_badges': True,
'name_format': None,
'show_banner': False,
'header_text_color': '',
'header_background_color': '',
'announcement': None,
'show_announcement': False,
'use_custom_css': False,
'theme': None,
'timetable_theme': None,
'timetable_theme_settings': {},
'use_custom_menu': False,
'timetable_by_room': False,
'timetable_detailed': False,
}, converters={
'name_format': EnumConverter(NameFormat)
})
theme_settings = ThemeSettingsProxy()
@signals.event.created.connect
def _event_created(event, **kwargs):
defaults = event.category.default_event_themes
if not layout_settings.get(event, 'timetable_theme') and event.type_.name in defaults:
layout_settings.set(event, 'timetable_theme', defaults[event.type_.name])
@signals.event.type_changed.connect
def _event_type_changed(event, **kwargs):
theme = event.category.default_event_themes.get(event.type_.name)
if theme is None:
layout_settings.delete(event, 'timetable_theme')
else:
layout_settings.set(event, 'timetable_theme', theme)
@signals.menu.items.connect_via('event-management-sidemenu')
def _extend_event_management_menu_layout(sender, event, **kwargs):
if not event.can_manage(session.user):
return
yield SideMenuItem('layout', _('Layout'), url_for('event_layout.index', event), section='customization')
if event.type_ == EventType.conference:
yield SideMenuItem('menu', _('Menu'), url_for('event_layout.menu', event), section='customization')
if event.has_feature('images'):
yield SideMenuItem('images', _('Images'), url_for('event_layout.images', event), section='customization')
@signals.event.cloned.connect
def _event_cloned(old_event, new_event, **kwargs):
if old_event.type_ == EventType.conference:
return
# for meetings/lecture we want to keep the default timetable style in all cases
theme = layout_settings.get(old_event, 'timetable_theme')
if theme is not None:
layout_settings.set(new_event, 'timetable_theme', theme)
@signals.event_management.get_cloners.connect
def _get_cloners(sender, **kwargs):
from indico.modules.events.layout.clone import ImageCloner, LayoutCloner
yield ImageCloner
yield LayoutCloner
@signals.event.get_feature_definitions.connect
def _get_feature_definitions(sender, **kwargs):
return ImagesFeature
@signals.event_management.image_created.connect
def _log_image_created(image, user, **kwargs):
image.event.log(EventLogRealm.management, EventLogKind.positive, 'Layout',
'Added image "{}"'.format(image.filename), user, data={
'File name': image.filename,
'File type': image.content_type,
'File size': do_filesizeformat(image.size)
})
@signals.event_management.image_deleted.connect
def _log_image_deleted(image, user, **kwargs):
image.event.log(EventLogRealm.management, EventLogKind.negative, 'Layout',
'Deleted image "{}"'.format(image.filename), user, data={
'File name': image.filename
})
class ImagesFeature(EventFeature):
name = 'images'
friendly_name = _('Image manager')
description = _('Allows event managers to attach images to the event, which can then be used from HTML code. '
'Very useful for e.g. sponsor logos and conference custom pages.')
@classmethod
def is_default_for_event(cls, event):
return event.type_ == EventType.conference
|
{
"content_hash": "b2e14d51e9c778ce36f02e9e708b58cc",
"timestamp": "",
"source": "github",
"line_count": 119,
"max_line_length": 114,
"avg_line_length": 37.12605042016807,
"alnum_prop": 0.6933001358080579,
"repo_name": "OmeGak/indico",
"id": "d0b1893cdb5b85dfa3fac17a9b07ec787f68e5dd",
"size": "4632",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "indico/modules/events/layout/__init__.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "547418"
},
{
"name": "HTML",
"bytes": "1366687"
},
{
"name": "JavaScript",
"bytes": "1678182"
},
{
"name": "Mako",
"bytes": "1340"
},
{
"name": "Python",
"bytes": "4488419"
},
{
"name": "Shell",
"bytes": "2724"
},
{
"name": "TeX",
"bytes": "23051"
},
{
"name": "XSLT",
"bytes": "1504"
}
],
"symlink_target": ""
}
|
import sys, os
from PySide import QtCore, QtGui
class Window(QtGui.QWidget):
def __init__(self, startDirectory):
super(Window, self).__init__()
self.init_(startDirectory)
def init_(self, startDirectory):
self.startButton = QtGui.QPushButton('START', self)
self.startButton.setFixedSize(QtCore.QSize(200,100))
self.startButton.setMenu(self.buildMenu(startDirectory))
self.setGeometry(100, 100, 200, 100)
self.show()
def buildMenu(self, path):
menu = QtGui.QMenu(self)
for fname in filter(lambda x: 'desktop.ini' != x.lower(), os.listdir(path)):
current = os.sep.join([path, fname])
action = QtGui.QAction(fname.partition('.lnk')[0], self)
if os.path.isdir(current):
action.setMenu(self.buildMenu(current))
else:
# default value to lambda sets value immediately
action.triggered.connect(lambda p=current: print(str(p))) # TODO: replace this with `wine start`
pass
menu.addAction(action)
return menu
app = QtGui.QApplication(sys.argv)
win = Window('c:\\Users\\Martin Hamlyn\\AppData\\Roaming\\Microsoft\\Windows\\Start Menu\\Programs')
app.exec_()
|
{
"content_hash": "8774b2c5db742522197e2a9b6167a08e",
"timestamp": "",
"source": "github",
"line_count": 28,
"max_line_length": 104,
"avg_line_length": 41.17857142857143,
"alnum_prop": 0.6816999132697311,
"repo_name": "stwb/winestart",
"id": "ca38b49eeadbac3128be9100a8833143d189cae6",
"size": "1176",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "winestart.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "1176"
}
],
"symlink_target": ""
}
|
"""Implementation of JSONDecoder
"""
import re
import sys
import struct
from jsb.contrib.simplejson.scanner import make_scanner
def _import_c_scanstring():
return None
c_scanstring = _import_c_scanstring()
__all__ = ['JSONDecoder']
FLAGS = re.VERBOSE | re.MULTILINE | re.DOTALL
def _floatconstants():
_BYTES = '7FF80000000000007FF0000000000000'.decode('hex')
# The struct module in Python 2.4 would get frexp() out of range here
# when an endian is specified in the format string. Fixed in Python 2.5+
if sys.byteorder != 'big':
_BYTES = _BYTES[:8][::-1] + _BYTES[8:][::-1]
nan, inf = struct.unpack('dd', _BYTES)
return nan, inf, -inf
NaN, PosInf, NegInf = _floatconstants()
class JSONDecodeError(ValueError):
"""Subclass of ValueError with the following additional properties:
msg: The unformatted error message
doc: The JSON document being parsed
pos: The start index of doc where parsing failed
end: The end index of doc where parsing failed (may be None)
lineno: The line corresponding to pos
colno: The column corresponding to pos
endlineno: The line corresponding to end (may be None)
endcolno: The column corresponding to end (may be None)
"""
def __init__(self, msg, doc, pos, end=None):
ValueError.__init__(self, errmsg(msg, doc, pos, end=end))
self.msg = msg
self.doc = doc
self.pos = pos
self.end = end
self.lineno, self.colno = linecol(doc, pos)
if end is not None:
self.endlineno, self.endcolno = linecol(doc, end)
else:
self.endlineno, self.endcolno = None, None
def linecol(doc, pos):
lineno = doc.count('\n', 0, pos) + 1
if lineno == 1:
colno = pos
else:
colno = pos - doc.rindex('\n', 0, pos)
return lineno, colno
def errmsg(msg, doc, pos, end=None):
# Note that this function is called from _speedups
lineno, colno = linecol(doc, pos)
if end is None:
#fmt = '{0}: line {1} column {2} (char {3})'
#return fmt.format(msg, lineno, colno, pos)
fmt = '%s: line %d column %d (char %d)'
return fmt % (msg, lineno, colno, pos)
endlineno, endcolno = linecol(doc, end)
#fmt = '{0}: line {1} column {2} - line {3} column {4} (char {5} - {6})'
#return fmt.format(msg, lineno, colno, endlineno, endcolno, pos, end)
fmt = '%s: line %d column %d - line %d column %d (char %d - %d)'
return fmt % (msg, lineno, colno, endlineno, endcolno, pos, end)
_CONSTANTS = {
'-Infinity': NegInf,
'Infinity': PosInf,
'NaN': NaN,
}
STRINGCHUNK = re.compile(r'(.*?)(["\\\x00-\x1f])', FLAGS)
BACKSLASH = {
'"': u'"', '\\': u'\\', '/': u'/',
'b': u'\b', 'f': u'\f', 'n': u'\n', 'r': u'\r', 't': u'\t',
}
DEFAULT_ENCODING = "utf-8"
def py_scanstring(s, end, encoding=None, strict=True,
_b=BACKSLASH, _m=STRINGCHUNK.match):
"""Scan the string s for a JSON string. End is the index of the
character in s after the quote that started the JSON string.
Unescapes all valid JSON string escape sequences and raises ValueError
on attempt to decode an invalid string. If strict is False then literal
control characters are allowed in the string.
Returns a tuple of the decoded string and the index of the character in s
after the end quote."""
if encoding is None:
encoding = DEFAULT_ENCODING
chunks = []
_append = chunks.append
begin = end - 1
while 1:
chunk = _m(s, end)
if chunk is None:
raise JSONDecodeError(
"Unterminated string starting at", s, begin)
end = chunk.end()
content, terminator = chunk.groups()
# Content is contains zero or more unescaped string characters
if content:
if not isinstance(content, unicode):
content = unicode(content, encoding)
_append(content)
# Terminator is the end of string, a literal control character,
# or a backslash denoting that an escape sequence follows
if terminator == '"':
break
elif terminator != '\\':
if strict:
msg = "Invalid control character %r at" % (terminator,)
#msg = "Invalid control character {0!r} at".format(terminator)
raise JSONDecodeError(msg, s, end)
else:
_append(terminator)
continue
try:
esc = s[end]
except IndexError:
raise JSONDecodeError(
"Unterminated string starting at", s, begin)
# If not a unicode escape sequence, must be in the lookup table
if esc != 'u':
try:
char = _b[esc]
except KeyError:
msg = "Invalid \\escape: " + repr(esc)
raise JSONDecodeError(msg, s, end)
end += 1
else:
# Unicode escape sequence
esc = s[end + 1:end + 5]
next_end = end + 5
if len(esc) != 4:
msg = "Invalid \\uXXXX escape"
raise JSONDecodeError(msg, s, end)
uni = int(esc, 16)
# Check for surrogate pair on UCS-4 systems
if 0xd800 <= uni <= 0xdbff and sys.maxunicode > 65535:
msg = "Invalid \\uXXXX\\uXXXX surrogate pair"
if not s[end + 5:end + 7] == '\\u':
raise JSONDecodeError(msg, s, end)
esc2 = s[end + 7:end + 11]
if len(esc2) != 4:
raise JSONDecodeError(msg, s, end)
uni2 = int(esc2, 16)
uni = 0x10000 + (((uni - 0xd800) << 10) | (uni2 - 0xdc00))
next_end += 6
char = unichr(uni)
end = next_end
# Append the unescaped character
_append(char)
return u''.join(chunks), end
# Use speedup if available
scanstring = c_scanstring or py_scanstring
WHITESPACE = re.compile(r'[ \t\n\r]*', FLAGS)
WHITESPACE_STR = ' \t\n\r'
def JSONObject((s, end), encoding, strict, scan_once, object_hook,
object_pairs_hook, memo=None,
_w=WHITESPACE.match, _ws=WHITESPACE_STR):
# Backwards compatibility
if memo is None:
memo = {}
memo_get = memo.setdefault
pairs = []
# Use a slice to prevent IndexError from being raised, the following
# check will raise a more specific ValueError if the string is empty
nextchar = s[end:end + 1]
# Normally we expect nextchar == '"'
if nextchar != '"':
if nextchar in _ws:
end = _w(s, end).end()
nextchar = s[end:end + 1]
# Trivial empty object
if nextchar == '}':
if object_pairs_hook is not None:
result = object_pairs_hook(pairs)
return result, end + 1
pairs = {}
if object_hook is not None:
pairs = object_hook(pairs)
return pairs, end + 1
elif nextchar != '"':
raise JSONDecodeError("Expecting property name", s, end)
end += 1
while True:
key, end = scanstring(s, end, encoding, strict)
key = memo_get(key, key)
# To skip some function call overhead we optimize the fast paths where
# the JSON key separator is ": " or just ":".
if s[end:end + 1] != ':':
end = _w(s, end).end()
if s[end:end + 1] != ':':
raise JSONDecodeError("Expecting : delimiter", s, end)
end += 1
try:
if s[end] in _ws:
end += 1
if s[end] in _ws:
end = _w(s, end + 1).end()
except IndexError:
pass
try:
value, end = scan_once(s, end)
except StopIteration:
raise JSONDecodeError("Expecting object", s, end)
pairs.append((key, value))
try:
nextchar = s[end]
if nextchar in _ws:
end = _w(s, end + 1).end()
nextchar = s[end]
except IndexError:
nextchar = ''
end += 1
if nextchar == '}':
break
elif nextchar != ',':
raise JSONDecodeError("Expecting , delimiter", s, end - 1)
try:
nextchar = s[end]
if nextchar in _ws:
end += 1
nextchar = s[end]
if nextchar in _ws:
end = _w(s, end + 1).end()
nextchar = s[end]
except IndexError:
nextchar = ''
end += 1
if nextchar != '"':
raise JSONDecodeError("Expecting property name", s, end - 1)
if object_pairs_hook is not None:
result = object_pairs_hook(pairs)
return result, end
pairs = dict(pairs)
if object_hook is not None:
pairs = object_hook(pairs)
return pairs, end
def JSONArray((s, end), scan_once, _w=WHITESPACE.match, _ws=WHITESPACE_STR):
values = []
nextchar = s[end:end + 1]
if nextchar in _ws:
end = _w(s, end + 1).end()
nextchar = s[end:end + 1]
# Look-ahead for trivial empty array
if nextchar == ']':
return values, end + 1
_append = values.append
while True:
try:
value, end = scan_once(s, end)
except StopIteration:
raise JSONDecodeError("Expecting object", s, end)
_append(value)
nextchar = s[end:end + 1]
if nextchar in _ws:
end = _w(s, end + 1).end()
nextchar = s[end:end + 1]
end += 1
if nextchar == ']':
break
elif nextchar != ',':
raise JSONDecodeError("Expecting , delimiter", s, end)
try:
if s[end] in _ws:
end += 1
if s[end] in _ws:
end = _w(s, end + 1).end()
except IndexError:
pass
return values, end
class JSONDecoder(object):
"""Simple JSON <http://json.org> decoder
Performs the following translations in decoding by default:
+---------------+-------------------+
| JSON | Python |
+===============+===================+
| object | dict |
+---------------+-------------------+
| array | list |
+---------------+-------------------+
| string | unicode |
+---------------+-------------------+
| number (int) | int, long |
+---------------+-------------------+
| number (real) | float |
+---------------+-------------------+
| true | True |
+---------------+-------------------+
| false | False |
+---------------+-------------------+
| null | None |
+---------------+-------------------+
It also understands ``NaN``, ``Infinity``, and ``-Infinity`` as
their corresponding ``float`` values, which is outside the JSON spec.
"""
def __init__(self, encoding=None, object_hook=None, parse_float=None,
parse_int=None, parse_constant=None, strict=True,
object_pairs_hook=None):
"""
*encoding* determines the encoding used to interpret any
:class:`str` objects decoded by this instance (``'utf-8'`` by
default). It has no effect when decoding :class:`unicode` objects.
Note that currently only encodings that are a superset of ASCII work,
strings of other encodings should be passed in as :class:`unicode`.
*object_hook*, if specified, will be called with the result of every
JSON object decoded and its return value will be used in place of the
given :class:`dict`. This can be used to provide custom
deserializations (e.g. to support JSON-RPC class hinting).
*object_pairs_hook* is an optional function that will be called with
the result of any object literal decode with an ordered list of pairs.
The return value of *object_pairs_hook* will be used instead of the
:class:`dict`. This feature can be used to implement custom decoders
that rely on the order that the key and value pairs are decoded (for
example, :func:`collections.OrderedDict` will remember the order of
insertion). If *object_hook* is also defined, the *object_pairs_hook*
takes priority.
*parse_float*, if specified, will be called with the string of every
JSON float to be decoded. By default, this is equivalent to
``float(num_str)``. This can be used to use another datatype or parser
for JSON floats (e.g. :class:`decimal.Decimal`).
*parse_int*, if specified, will be called with the string of every
JSON int to be decoded. By default, this is equivalent to
``int(num_str)``. This can be used to use another datatype or parser
for JSON integers (e.g. :class:`float`).
*parse_constant*, if specified, will be called with one of the
following strings: ``'-Infinity'``, ``'Infinity'``, ``'NaN'``. This
can be used to raise an exception if invalid JSON numbers are
encountered.
*strict* controls the parser's behavior when it encounters an
invalid control character in a string. The default setting of
``True`` means that unescaped control characters are parse errors, if
``False`` then control characters will be allowed in strings.
"""
self.encoding = encoding
self.object_hook = object_hook
self.object_pairs_hook = object_pairs_hook
self.parse_float = parse_float or float
self.parse_int = parse_int or int
self.parse_constant = parse_constant or _CONSTANTS.__getitem__
self.strict = strict
self.parse_object = JSONObject
self.parse_array = JSONArray
self.parse_string = scanstring
self.memo = {}
self.scan_once = make_scanner(self)
def decode(self, s, _w=WHITESPACE.match):
"""Return the Python representation of ``s`` (a ``str`` or ``unicode``
instance containing a JSON document)
"""
obj, end = self.raw_decode(s, idx=_w(s, 0).end())
end = _w(s, end).end()
if end != len(s):
raise JSONDecodeError("Extra data", s, end, len(s))
return obj
def raw_decode(self, s, idx=0):
"""Decode a JSON document from ``s`` (a ``str`` or ``unicode``
beginning with a JSON document) and return a 2-tuple of the Python
representation and the index in ``s`` where the document ended.
This can be used to decode a JSON document from a string that may
have extraneous data at the end.
"""
try:
obj, end = self.scan_once(s, idx)
except StopIteration:
raise JSONDecodeError("No JSON object could be decoded", s, idx)
return obj, end
|
{
"content_hash": "62ed3aaa486d167526a7e2cc3c5853d9",
"timestamp": "",
"source": "github",
"line_count": 418,
"max_line_length": 78,
"avg_line_length": 35.995215311004785,
"alnum_prop": 0.5438654791971288,
"repo_name": "melmothx/jsonbot",
"id": "468b32f8e3a8434d4113f98b2f88bc9669945358",
"size": "15046",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "jsb/contrib/simplejson/decoder.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "C",
"bytes": "83165"
},
{
"name": "Python",
"bytes": "1443481"
},
{
"name": "Shell",
"bytes": "854"
}
],
"symlink_target": ""
}
|
"""
test.test_common
~~~~~~~~~~~
Test common utility code.
"""
import logging
import mock
import os
import unittest
import calico.common as common
# Logger
log = logging.getLogger(__name__)
class TestCommon(unittest.TestCase):
def setUp(self):
pass
def tearDown(self):
pass
def test_validate_port(self):
self.assertFalse(common.validate_port(-1))
self.assertFalse(common.validate_port(0))
self.assertTrue(common.validate_port(3))
self.assertTrue(common.validate_port(3))
self.assertTrue(common.validate_port(65535))
self.assertFalse(common.validate_port(65536))
self.assertFalse(common.validate_port("-1"))
self.assertFalse(common.validate_port("0"))
self.assertTrue(common.validate_port("3"))
self.assertTrue(common.validate_port("3"))
self.assertTrue(common.validate_port("65535"))
self.assertFalse(common.validate_port("65536"))
self.assertFalse(common.validate_port("1-10"))
self.assertFalse(common.validate_port("blah"))
def test_validate_ip_addr(self):
self.assertTrue(common.validate_ip_addr("1.2.3.4", 4))
self.assertFalse(common.validate_ip_addr("1.2.3.4.5", 4))
self.assertFalse(common.validate_ip_addr("1.2.3.4/32", 4))
self.assertTrue(common.validate_ip_addr("1.2.3", 4))
self.assertFalse(common.validate_ip_addr("bloop", 4))
self.assertFalse(common.validate_ip_addr("::", 4))
self.assertFalse(common.validate_ip_addr("2001::abc", 4))
self.assertFalse(common.validate_ip_addr("2001::a/64", 4))
self.assertFalse(common.validate_ip_addr("1.2.3.4", 6))
self.assertFalse(common.validate_ip_addr("1.2.3.4.5", 6))
self.assertFalse(common.validate_ip_addr("1.2.3.4/32", 6))
self.assertFalse(common.validate_ip_addr("1.2.3", 6))
self.assertFalse(common.validate_ip_addr("bloop", 6))
self.assertTrue(common.validate_ip_addr("::", 6))
self.assertTrue(common.validate_ip_addr("2001::abc", 6))
self.assertFalse(common.validate_ip_addr("2001::a/64", 6))
self.assertTrue(common.validate_ip_addr("1.2.3.4", None))
self.assertFalse(common.validate_ip_addr("1.2.3.4.5", None))
self.assertFalse(common.validate_ip_addr("1.2.3.4/32", None))
self.assertTrue(common.validate_ip_addr("1.2.3", None))
self.assertFalse(common.validate_ip_addr("bloop", None))
self.assertTrue(common.validate_ip_addr("::", None))
self.assertTrue(common.validate_ip_addr("2001::abc", None))
self.assertFalse(common.validate_ip_addr("2001::a/64", None))
def test_validate_cidr(self):
self.assertTrue(common.validate_cidr("1.2.3.4", 4))
self.assertFalse(common.validate_cidr("1.2.3.4.5", 4))
self.assertTrue(common.validate_cidr("1.2.3.4/32", 4))
self.assertTrue(common.validate_cidr("1.2.3", 4))
self.assertFalse(common.validate_cidr("bloop", 4))
self.assertFalse(common.validate_cidr("::", 4))
self.assertFalse(common.validate_cidr("2001::abc", 4))
self.assertFalse(common.validate_cidr("2001::a/64", 4))
self.assertFalse(common.validate_cidr("1.2.3.4", 6))
self.assertFalse(common.validate_cidr("1.2.3.4.5", 6))
self.assertFalse(common.validate_cidr("1.2.3.4/32", 6))
self.assertFalse(common.validate_cidr("1.2.3", 6))
self.assertFalse(common.validate_cidr("bloop", 6))
self.assertTrue(common.validate_cidr("::", 6))
self.assertTrue(common.validate_cidr("2001::abc", 6))
self.assertTrue(common.validate_cidr("2001::a/64", 6))
self.assertTrue(common.validate_cidr("1.2.3.4", None))
self.assertFalse(common.validate_cidr("1.2.3.4.5", None))
self.assertTrue(common.validate_cidr("1.2.3.4/32", None))
self.assertTrue(common.validate_cidr("1.2.3", None))
self.assertFalse(common.validate_cidr("bloop", None))
self.assertTrue(common.validate_cidr("::", None))
self.assertTrue(common.validate_cidr("2001::abc", None))
self.assertTrue(common.validate_cidr("2001::a/64", None))
|
{
"content_hash": "bf6681bcf3eceefb45d620335eea7c53",
"timestamp": "",
"source": "github",
"line_count": 95,
"max_line_length": 69,
"avg_line_length": 44.04210526315789,
"alnum_prop": 0.643642447418738,
"repo_name": "fasaxc/felix",
"id": "08229e60f6dcff504a1f68220c7a278e48669998",
"size": "4791",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "calico/test/test_common.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "414938"
},
{
"name": "Shell",
"bytes": "9186"
}
],
"symlink_target": ""
}
|
import os
import sys
import traceback
import webbrowser
import pyqrcode
import requests
import mimetypes
import json
import xml.dom.minidom
import urllib
import time
import re
import random
from traceback import format_exc
from requests.exceptions import ConnectionError, ReadTimeout
import HTMLParser
from datetime import datetime
UNKONWN = 'unkonwn'
SUCCESS = '200'
SCANED = '201'
TIMEOUT = '408'
def map_username_batch(user_name):
return {"UserName": user_name, "EncryChatRoomId": ""}
def show_image(file_path):
"""
跨平台显示图片文件
:param file_path: 图片文件路径
"""
if sys.version_info >= (3, 3):
from shlex import quote
else:
from pipes import quote
if sys.platform == "darwin":
command = "open -a /Applications/Preview.app %s&" % quote(file_path)
os.system(command)
elif sys.platform == 'linux2':
os.system('sz %s' % file_path)
else:
webbrowser.open(os.path.join(os.getcwd(),'temp',file_path))
class SafeSession(requests.Session):
def request(self, method, url, params=None, data=None, headers=None, cookies=None, files=None, auth=None,
timeout=None, allow_redirects=True, proxies=None, hooks=None, stream=None, verify=None, cert=None,
json=None):
for i in range(3):
try:
return super(SafeSession, self).request(method, url, params, data, headers, cookies, files, auth,
timeout,
allow_redirects, proxies, hooks, stream, verify, cert, json)
except Exception as e:
print e.message, traceback.format_exc()
continue
#重试3次以后再加一次,抛出异常
try:
return super(SafeSession, self).request(method, url, params, data, headers, cookies, files, auth,
timeout,
allow_redirects, proxies, hooks, stream, verify, cert, json)
except Exception as e:
raise e
class WXBot:
"""WXBot功能类"""
def __init__(self):
self.DEBUG = False
self.uuid = ''
self.base_uri = ''
self.base_host = ''
self.redirect_uri = ''
self.uin = ''
self.sid = ''
self.skey = ''
self.pass_ticket = ''
self.device_id = 'e' + repr(random.random())[2:17]
self.base_request = {}
self.sync_key_str = ''
self.sync_key = []
self.sync_host = ''
status = 'wait4login' #表示机器人状态,供WEBAPI读取,WxbotManage使用
bot_conf = {} #机器人配置,在webapi初始化的时候传入,后续也可修改,WxbotManage使用
self.batch_count = 50 #一次拉取50个联系人的信息
self.full_user_name_list = [] #直接获取不到通讯录时,获取的username列表
self.wxid_list = [] #获取到的wxid的列表
self.cursor = 0 #拉取联系人信息的游标
self.is_big_contact = False #通讯录人数过多,无法直接获取
#文件缓存目录
self.temp_pwd = os.path.join(os.getcwd(), 'temp')
if os.path.exists(self.temp_pwd) == False:
os.makedirs(self.temp_pwd)
self.session = SafeSession()
self.session.headers.update({'User-Agent': 'Mozilla/5.0 (X11; Linux i686; U;) Gecko/20070322 Kazehakase/0.4.5'})
self.conf = {'qr': 'png'}
self.my_account = {} # 当前账户
# 所有相关账号: 联系人, 公众号, 群组, 特殊账号
self.member_list = []
# 所有群组的成员, {'group_id1': [member1, member2, ...], ...}
self.group_members = {}
# 所有账户, {'group_member':{'id':{'type':'group_member', 'info':{}}, ...}, 'normal_member':{'id':{}, ...}}
self.account_info = {'group_member': {}, 'normal_member': {}}
self.contact_list = [] # 联系人列表
self.public_list = [] # 公众账号列表
self.group_list = [] # 群聊列表
self.special_list = [] # 特殊账号列表
self.encry_chat_room_id_list = [] # 存储群聊的EncryChatRoomId,获取群内成员头像时需要用到
self.file_index = 0
#在未传入bot_conf的情况下尝试载入本地配置文件,WxbotManage使用
def load_conf(self,bot_conf):
try:
if bot_conf == {}:
with open(os.path.join(self.temp_pwd,'bot_conf.json')) as f:
self.bot_conf= json.loads(f.read())
except:
self.bot_conf = {}
#保存配置文件,WxbotManage使用
def save_conf(self):
with open(os.path.join(self.temp_pwd,'bot_conf.json'), 'w') as f:
f.write(json.dumps(self.bot_conf))
@staticmethod
def to_unicode(string, encoding='utf-8'):
"""
将字符串转换为Unicode
:param string: 待转换字符串
:param encoding: 字符串解码方式
:return: 转换后的Unicode字符串
"""
if isinstance(string, str):
return string.decode(encoding)
elif isinstance(string, unicode):
return string
else:
raise Exception('Unknown Type')
def get_contact(self):
"""获取当前账户的所有相关账号(包括联系人、公众号、群聊、特殊账号)"""
dic_list = []
url = self.base_uri + '/webwxgetcontact?seq=0&pass_ticket=%s&skey=%s&r=%s' \
% (self.pass_ticket, self.skey, int(time.time()))
#如果通讯录联系人过多,这里会直接获取失败
try:
r = self.session.post(url, data='{}', timeout=180)
except Exception as e:
return False
r.encoding = 'utf-8'
dic = json.loads(r.text)
dic_list.append(dic)
while int(dic["Seq"]) != 0:
print "[INFO] Geting contacts. Get %s contacts for now" % dic["MemberCount"]
url = self.base_uri + '/webwxgetcontact?seq=%s&pass_ticket=%s&skey=%s&r=%s' \
% (dic["Seq"], self.pass_ticket, self.skey, int(time.time()))
r = self.session.post(url, data='{}', timeout=180)
r.encoding = 'utf-8'
dic = json.loads(r.text)
dic_list.append(dic)
if self.DEBUG:
with open(os.path.join(self.temp_pwd,'contacts.json'), 'w') as f:
f.write(json.dumps(dic_list))
self.member_list = []
for dic in dic_list:
self.member_list.extend(dic['MemberList'])
special_users = ['newsapp', 'fmessage', 'filehelper', 'weibo', 'qqmail',
'fmessage', 'tmessage', 'qmessage', 'qqsync', 'floatbottle',
'lbsapp', 'shakeapp', 'medianote', 'qqfriend', 'readerapp',
'blogapp', 'facebookapp', 'masssendapp', 'meishiapp',
'feedsapp', 'voip', 'blogappweixin', 'weixin', 'brandsessionholder',
'weixinreminder', 'wxid_novlwrv3lqwv11', 'gh_22b87fa7cb3c',
'officialaccounts', 'notification_messages', 'wxid_novlwrv3lqwv11',
'gh_22b87fa7cb3c', 'wxitil', 'userexperience_alarm', 'notification_messages']
self.contact_list = []
self.public_list = []
self.special_list = []
self.group_list = []
for contact in self.member_list:
if contact['VerifyFlag'] & 8 != 0: # 公众号
self.public_list.append(contact)
self.account_info['normal_member'][contact['UserName']] = {'type': 'public', 'info': contact}
elif contact['UserName'] in special_users: # 特殊账户
self.special_list.append(contact)
self.account_info['normal_member'][contact['UserName']] = {'type': 'special', 'info': contact}
elif contact['UserName'].find('@@') != -1: # 群聊
self.group_list.append(contact)
self.account_info['normal_member'][contact['UserName']] = {'type': 'group', 'info': contact}
elif contact['UserName'] == self.my_account['UserName']: # 自己
self.account_info['normal_member'][contact['UserName']] = {'type': 'self', 'info': contact}
else:
self.contact_list.append(contact)
self.account_info['normal_member'][contact['UserName']] = {'type': 'contact', 'info': contact}
self.batch_get_group_members()
for group in self.group_members:
for member in self.group_members[group]:
if member['UserName'] not in self.account_info:
self.account_info['group_member'][member['UserName']] = \
{'type': 'group_member', 'info': member, 'group': group}
if self.DEBUG:
with open(os.path.join(self.temp_pwd,'contact_list.json'), 'w') as f:
f.write(json.dumps(self.contact_list))
with open(os.path.join(self.temp_pwd,'special_list.json'), 'w') as f:
f.write(json.dumps(self.special_list))
with open(os.path.join(self.temp_pwd,'group_list.json'), 'w') as f:
f.write(json.dumps(self.group_list))
with open(os.path.join(self.temp_pwd,'public_list.json'), 'w') as f:
f.write(json.dumps(self.public_list))
with open(os.path.join(self.temp_pwd,'member_list.json'), 'w') as f:
f.write(json.dumps(self.member_list))
with open(os.path.join(self.temp_pwd,'group_users.json'), 'w') as f:
f.write(json.dumps(self.group_members))
with open(os.path.join(self.temp_pwd,'account_info.json'), 'w') as f:
f.write(json.dumps(self.account_info))
return True
def get_big_contact(self):
total_len = len(self.full_user_name_list)
user_info_list = []
#一次拉取50个联系人的信息,包括所有的群聊,公众号,好友
while self.cursor < total_len:
cur_batch = self.full_user_name_list[self.cursor:(self.cursor+self.batch_count)]
self.cursor += self.batch_count
cur_batch = map(map_username_batch, cur_batch)
user_info_list += self.batch_get_contact(cur_batch)
print "[INFO] Get batch contacts"
self.member_list = user_info_list
special_users = ['newsapp', 'filehelper', 'weibo', 'qqmail',
'fmessage', 'tmessage', 'qmessage', 'qqsync', 'floatbottle',
'lbsapp', 'shakeapp', 'medianote', 'qqfriend', 'readerapp',
'blogapp', 'facebookapp', 'masssendapp', 'meishiapp',
'feedsapp', 'voip', 'blogappweixin', 'weixin', 'brandsessionholder',
'weixinreminder', 'wxid_novlwrv3lqwv11',
'officialaccounts',
'gh_22b87fa7cb3c', 'wxitil', 'userexperience_alarm', 'notification_messages', 'notifymessage']
self.contact_list = []
self.public_list = []
self.special_list = []
self.group_list = []
for i, contact in enumerate(self.member_list):
if contact['VerifyFlag'] & 8 != 0: # 公众号
self.public_list.append(contact)
self.account_info['normal_member'][contact['UserName']] = {'type': 'public', 'info': contact}
elif contact['UserName'] in special_users or self.wxid_list[i] in special_users: # 特殊账户
self.special_list.append(contact)
self.account_info['normal_member'][contact['UserName']] = {'type': 'special', 'info': contact}
elif contact['UserName'].find('@@') != -1: # 群聊
self.group_list.append(contact)
self.account_info['normal_member'][contact['UserName']] = {'type': 'group', 'info': contact}
elif contact['UserName'] == self.my_account['UserName']: # 自己
self.account_info['normal_member'][contact['UserName']] = {'type': 'self', 'info': contact}
else:
self.contact_list.append(contact)
self.account_info['normal_member'][contact['UserName']] = {'type': 'contact', 'info': contact}
group_members = {}
encry_chat_room_id = {}
for group in self.group_list:
gid = group['UserName']
members = group['MemberList']
group_members[gid] = members
encry_chat_room_id[gid] = group['EncryChatRoomId']
self.group_members = group_members
self.encry_chat_room_id_list = encry_chat_room_id
for group in self.group_members:
for member in self.group_members[group]:
if member['UserName'] not in self.account_info:
self.account_info['group_member'][member['UserName']] = \
{'type': 'group_member', 'info': member, 'group': group}
if self.DEBUG:
with open(os.path.join(self.temp_pwd,'contact_list.json'), 'w') as f:
f.write(json.dumps(self.contact_list))
with open(os.path.join(self.temp_pwd,'special_list.json'), 'w') as f:
f.write(json.dumps(self.special_list))
with open(os.path.join(self.temp_pwd,'group_list.json'), 'w') as f:
f.write(json.dumps(self.group_list))
with open(os.path.join(self.temp_pwd,'public_list.json'), 'w') as f:
f.write(json.dumps(self.public_list))
with open(os.path.join(self.temp_pwd,'member_list.json'), 'w') as f:
f.write(json.dumps(self.member_list))
with open(os.path.join(self.temp_pwd,'group_users.json'), 'w') as f:
f.write(json.dumps(self.group_members))
with open(os.path.join(self.temp_pwd,'account_info.json'), 'w') as f:
f.write(json.dumps(self.account_info))
print '[INFO] Get %d contacts' % len(self.contact_list)
print '[INFO] Start to process messages .'
return True
def batch_get_contact(self, cur_batch):
"""批量获取成员信息"""
url = self.base_uri + '/webwxbatchgetcontact?type=ex&r=%s&pass_ticket=%s' % (int(time.time()), self.pass_ticket)
params = {
'BaseRequest': self.base_request,
"Count": len(cur_batch),
"List": cur_batch
}
r = self.session.post(url, data=json.dumps(params))
r.encoding = 'utf-8'
dic = json.loads(r.text)
#print dic['ContactList']
return dic['ContactList']
def batch_get_group_members(self):
"""批量获取所有群聊成员信息"""
url = self.base_uri + '/webwxbatchgetcontact?type=ex&r=%s&pass_ticket=%s' % (int(time.time()), self.pass_ticket)
params = {
'BaseRequest': self.base_request,
"Count": len(self.group_list),
"List": [{"UserName": group['UserName'], "EncryChatRoomId": ""} for group in self.group_list]
}
r = self.session.post(url, data=json.dumps(params))
r.encoding = 'utf-8'
dic = json.loads(r.text)
group_members = {}
encry_chat_room_id = {}
for group in dic['ContactList']:
gid = group['UserName']
members = group['MemberList']
group_members[gid] = members
encry_chat_room_id[gid] = group['EncryChatRoomId']
self.group_members = group_members
self.encry_chat_room_id_list = encry_chat_room_id
def get_group_member_name(self, gid, uid):
"""
获取群聊中指定成员的名称信息
:param gid: 群id
:param uid: 群聊成员id
:return: 名称信息,类似 {"display_name": "test_user", "nickname": "test", "remark_name": "for_test" }
"""
if gid not in self.group_members:
return None
group = self.group_members[gid]
for member in group:
if member['UserName'] == uid:
names = {}
if 'RemarkName' in member and member['RemarkName']:
names['remark_name'] = member['RemarkName']
if 'NickName' in member and member['NickName']:
names['nickname'] = member['NickName']
if 'DisplayName' in member and member['DisplayName']:
names['display_name'] = member['DisplayName']
return names
return None
def get_contact_info(self, uid):
return self.account_info['normal_member'].get(uid)
def get_group_member_info(self, uid):
return self.account_info['group_member'].get(uid)
def get_contact_name(self, uid):
info = self.get_contact_info(uid)
if info is None:
return None
info = info['info']
name = {}
if 'RemarkName' in info and info['RemarkName']:
name['remark_name'] = info['RemarkName']
if 'NickName' in info and info['NickName']:
name['nickname'] = info['NickName']
if 'DisplayName' in info and info['DisplayName']:
name['display_name'] = info['DisplayName']
if len(name) == 0:
return None
else:
return name
@staticmethod
def get_contact_prefer_name(name):
if name is None:
return None
if 'remark_name' in name:
return name['remark_name']
if 'nickname' in name:
return name['nickname']
if 'display_name' in name:
return name['display_name']
return None
@staticmethod
def get_group_member_prefer_name(name):
if name is None:
return None
if 'remark_name' in name:
return name['remark_name']
if 'display_name' in name:
return name['display_name']
if 'nickname' in name:
return name['nickname']
return None
def get_user_type(self, wx_user_id):
"""
获取特定账号与自己的关系
:param wx_user_id: 账号id:
:return: 与当前账号的关系
"""
for account in self.contact_list:
if wx_user_id == account['UserName']:
return 'contact'
for account in self.public_list:
if wx_user_id == account['UserName']:
return 'public'
for account in self.special_list:
if wx_user_id == account['UserName']:
return 'special'
for account in self.group_list:
if wx_user_id == account['UserName']:
return 'group'
for group in self.group_members:
for member in self.group_members[group]:
if member['UserName'] == wx_user_id:
return 'group_member'
return 'unknown'
def is_contact(self, uid):
for account in self.contact_list:
if uid == account['UserName']:
return True
return False
def is_public(self, uid):
for account in self.public_list:
if uid == account['UserName']:
return True
return False
def is_special(self, uid):
for account in self.special_list:
if uid == account['UserName']:
return True
return False
def handle_msg_all(self, msg):
"""
处理所有消息,请子类化后覆盖此函数
msg:
msg_id -> 消息id
msg_type_id -> 消息类型id
user -> 发送消息的账号id
content -> 消息内容
:param msg: 收到的消息
"""
pass
@staticmethod
def proc_at_info(msg):
if not msg:
return '', []
segs = msg.split(u'\u2005')
str_msg_all = ''
str_msg = ''
infos = []
if len(segs) > 1:
for i in range(0, len(segs) - 1):
segs[i] += u'\u2005'
pm = re.search(u'@.*\u2005', segs[i]).group()
if pm:
name = pm[1:-1]
string = segs[i].replace(pm, '')
str_msg_all += string + '@' + name + ' '
str_msg += string
if string:
infos.append({'type': 'str', 'value': string})
infos.append({'type': 'at', 'value': name})
else:
infos.append({'type': 'str', 'value': segs[i]})
str_msg_all += segs[i]
str_msg += segs[i]
str_msg_all += segs[-1]
str_msg += segs[-1]
infos.append({'type': 'str', 'value': segs[-1]})
else:
infos.append({'type': 'str', 'value': segs[-1]})
str_msg_all = msg
str_msg = msg
return str_msg_all.replace(u'\u2005', ''), str_msg.replace(u'\u2005', ''), infos
def extract_msg_content(self, msg_type_id, msg):
"""
content_type_id:
0 -> Text
1 -> Location
3 -> Image
4 -> Voice
5 -> Recommend
6 -> Animation
7 -> Share
8 -> Video
9 -> VideoCall
10 -> Redraw
11 -> Empty
99 -> Unknown
:param msg_type_id: 消息类型id
:param msg: 消息结构体
:return: 解析的消息
"""
mtype = msg['MsgType']
content = HTMLParser.HTMLParser().unescape(msg['Content'])
msg_id = msg['MsgId']
msg_content = {}
if msg_type_id == 0:
return {'type': 11, 'data': ''}
elif msg_type_id == 2: # File Helper
return {'type': 0, 'data': content.replace('<br/>', '\n')}
elif msg_type_id == 3: # 群聊
sp = content.find('<br/>')
uid = content[:sp]
content = content[sp:]
content = content.replace('<br/>', '')
uid = uid[:-1]
name = self.get_contact_prefer_name(self.get_contact_name(uid))
if not name:
name = self.get_group_member_prefer_name(self.get_group_member_name(msg['FromUserName'], uid))
if not name:
name = 'unknown'
msg_content['user'] = {'id': uid, 'name': name}
else: # Self, Contact, Special, Public, Unknown
pass
msg_prefix = (msg_content['user']['name'] + ':') if 'user' in msg_content else ''
if mtype == 1:
if content.find('http://weixin.qq.com/cgi-bin/redirectforward?args=') != -1:
r = self.session.get(content)
r.encoding = 'gbk'
data = r.text
pos = self.search_content('title', data, 'xml')
msg_content['type'] = 1
msg_content['data'] = pos
msg_content['detail'] = data
if self.DEBUG:
print ' %s[Location] %s ' % (msg_prefix, pos)
else:
msg_content['type'] = 0
if msg_type_id == 3 or (msg_type_id == 1 and msg['ToUserName'][:2] == '@@'): # Group text message
msg_infos = self.proc_at_info(content)
str_msg_all = msg_infos[0]
str_msg = msg_infos[1]
detail = msg_infos[2]
msg_content['data'] = str_msg_all
msg_content['detail'] = detail
msg_content['desc'] = str_msg
else:
msg_content['data'] = content
if self.DEBUG:
try:
print ' %s[Text] %s' % (msg_prefix, msg_content['data'])
except UnicodeEncodeError:
print ' %s[Text] (illegal text).' % msg_prefix
elif mtype == 3:
msg_content['type'] = 3
msg_content['data'] = self.get_msg_img_url(msg_id)
msg_content['img'] = self.session.get(msg_content['data']).content.encode('hex')
if self.DEBUG:
image = self.get_msg_img(msg_id)
print ' %s[Image] %s' % (msg_prefix, image)
elif mtype == 34:
msg_content['type'] = 4
msg_content['data'] = self.get_voice_url(msg_id)
msg_content['voice'] = self.session.get(msg_content['data']).content.encode('hex')
if self.DEBUG:
voice = self.get_voice(msg_id)
print ' %s[Voice] %s' % (msg_prefix, voice)
elif mtype == 37:
msg_content['type'] = 37
msg_content['data'] = msg['RecommendInfo']
if self.DEBUG:
print ' %s[useradd] %s' % (msg_prefix,msg['RecommendInfo']['NickName'])
elif mtype == 42:
msg_content['type'] = 5
info = msg['RecommendInfo']
msg_content['data'] = {'nickname': info['NickName'],
'alias': info['Alias'],
'province': info['Province'],
'city': info['City'],
'gender': ['unknown', 'male', 'female'][info['Sex']]}
if self.DEBUG:
print ' %s[Recommend]' % msg_prefix
print ' -----------------------------'
print ' | NickName: %s' % info['NickName']
print ' | Alias: %s' % info['Alias']
print ' | Local: %s %s' % (info['Province'], info['City'])
print ' | Gender: %s' % ['unknown', 'male', 'female'][info['Sex']]
print ' -----------------------------'
elif mtype == 47:
msg_content['type'] = 6
msg_content['data'] = self.search_content('cdnurl', content)
if self.DEBUG:
print ' %s[Animation] %s' % (msg_prefix, msg_content['data'])
elif mtype == 49:
msg_content['type'] = 7
if msg['AppMsgType'] == 3:
app_msg_type = 'music'
elif msg['AppMsgType'] == 5:
app_msg_type = 'link'
elif msg['AppMsgType'] == 7:
app_msg_type = 'weibo'
else:
app_msg_type = 'unknown'
msg_content['data'] = {'type': app_msg_type,
'title': msg['FileName'],
'desc': self.search_content('des', content, 'xml'),
'url': msg['Url'],
'from': self.search_content('appname', content, 'xml'),
'content': msg.get('Content') # 有的公众号会发一次性3 4条链接一个大图,如果只url那只能获取第一条,content里面有所有的链接
}
if self.DEBUG:
print ' %s[Share] %s' % (msg_prefix, app_msg_type)
print ' --------------------------'
print ' | title: %s' % msg['FileName']
print ' | desc: %s' % self.search_content('des', content, 'xml')
print ' | link: %s' % msg['Url']
print ' | from: %s' % self.search_content('appname', content, 'xml')
print ' | content: %s' % (msg.get('content')[:20] if msg.get('content') else "unknown")
print ' --------------------------'
elif mtype == 62:
msg_content['type'] = 8
msg_content['data'] = content
if self.DEBUG:
print ' %s[Video] Please check on mobiles' % msg_prefix
elif mtype == 53:
msg_content['type'] = 9
msg_content['data'] = content
if self.DEBUG:
print ' %s[Video Call]' % msg_prefix
elif mtype == 10002:
msg_content['type'] = 10
msg_content['data'] = content
if self.DEBUG:
print ' %s[Redraw]' % msg_prefix
elif mtype == 10000: # unknown, maybe red packet, or group invite
msg_content['type'] = 12
msg_content['data'] = msg['Content']
if self.DEBUG:
print ' [Unknown]'
elif mtype == 43:
msg_content['type'] = 13
msg_content['data'] = self.get_video_url(msg_id)
if self.DEBUG:
print ' %s[video] %s' % (msg_prefix, msg_content['data'])
else:
msg_content['type'] = 99
msg_content['data'] = content
if self.DEBUG:
print ' %s[Unknown]' % msg_prefix
return msg_content
def handle_msg(self, r):
"""
处理原始微信消息的内部函数
msg_type_id:
0 -> Init
1 -> Self
2 -> FileHelper
3 -> Group
4 -> Contact
5 -> Public
6 -> Special
99 -> Unknown
:param r: 原始微信消息
"""
for msg in r['AddMsgList']:
user = {'id': msg['FromUserName'], 'name': 'unknown'}
if msg['MsgType'] == 51 and msg['StatusNotifyCode'] == 4: # init message
msg_type_id = 0
user['name'] = 'system'
#会获取所有联系人的username 和 wxid,但是会收到3次这个消息,只取第一次
if self.is_big_contact and len(self.full_user_name_list) == 0:
self.full_user_name_list = msg['StatusNotifyUserName'].split(",")
self.wxid_list = re.search(r"username>(.*?)</username", msg["Content"]).group(1).split(",")
with open(os.path.join(self.temp_pwd,'UserName.txt'), 'w') as f:
f.write(msg['StatusNotifyUserName'])
with open(os.path.join(self.temp_pwd,'wxid.txt'), 'w') as f:
f.write(json.dumps(self.wxid_list))
print "[INFO] Contact list is too big. Now start to fetch member list ."
#self.get_big_contact()
elif msg['MsgType'] == 37: # friend request
msg_type_id = 37
pass
# content = msg['Content']
# username = content[content.index('fromusername='): content.index('encryptusername')]
# username = username[username.index('"') + 1: username.rindex('"')]
# print u'[Friend Request]'
# print u' Nickname:' + msg['RecommendInfo']['NickName']
# print u' 附加消息:'+msg['RecommendInfo']['Content']
# # print u'Ticket:'+msg['RecommendInfo']['Ticket'] # Ticket添加好友时要用
# print u' 微信号:'+username #未设置微信号的 腾讯会自动生成一段微信ID 但是无法通过搜索 搜索到此人
elif msg['FromUserName'] == self.my_account['UserName']: # Self
msg_type_id = 1
user['name'] = 'self'
elif msg['ToUserName'] == 'filehelper': # File Helper
msg_type_id = 2
user['name'] = 'file_helper'
elif msg['FromUserName'][:2] == '@@': # Group
msg_type_id = 3
user['name'] = self.get_contact_prefer_name(self.get_contact_name(user['id']))
elif self.is_contact(msg['FromUserName']): # Contact
msg_type_id = 4
user['name'] = self.get_contact_prefer_name(self.get_contact_name(user['id']))
elif self.is_public(msg['FromUserName']): # Public
msg_type_id = 5
user['name'] = self.get_contact_prefer_name(self.get_contact_name(user['id']))
elif self.is_special(msg['FromUserName']): # Special
msg_type_id = 6
user['name'] = self.get_contact_prefer_name(self.get_contact_name(user['id']))
else:
msg_type_id = 99
user['name'] = 'unknown'
if not user['name']:
user['name'] = 'unknown'
user['name'] = HTMLParser.HTMLParser().unescape(user['name'])
if self.DEBUG and msg_type_id != 0:
print u'[MSG] %s:' % user['name']
content = self.extract_msg_content(msg_type_id, msg)
message = {'msg_type_id': msg_type_id,
'msg_id': msg['MsgId'],
'content': content,
'to_user_id': msg['ToUserName'],
'user': user}
self.handle_msg_all(message)
def schedule(self):
"""
做任务型事情的函数,如果需要,可以在子类中覆盖此函数
此函数在处理消息的间隙被调用,请不要长时间阻塞此函数
"""
pass
def proc_msg(self):
self.test_sync_check()
self.status = 'loginsuccess' #WxbotManage使用
while True:
if self.status == 'wait4loginout': #WxbotManage使用
return
check_time = time.time()
try:
[retcode, selector] = self.sync_check()
# print '[DEBUG] sync_check:', retcode, selector
if retcode == '1100': # 从微信客户端上登出
break
elif retcode == '1101': # 从其它设备上登了网页微信
break
elif retcode == '0':
if selector == '2': # 有新消息
r = self.sync()
if r is not None:
self.handle_msg(r)
elif selector == '3': # 未知
r = self.sync()
if r is not None:
self.handle_msg(r)
elif selector == '4': # 通讯录更新
r = self.sync()
if r is not None:
self.get_contact()
elif selector == '6': # 可能是红包
r = self.sync()
if r is not None:
self.handle_msg(r)
elif selector == '7': # 在手机上操作了微信
r = self.sync()
if r is not None:
self.handle_msg(r)
elif selector == '0': # 无事件
pass
else:
print datetime.now().strftime('%Y-%m-%d %H:%M:%S'), ' [DEBUG] sync_check:', retcode, selector
r = self.sync()
if r is not None:
self.handle_msg(r)
else:
print datetime.now().strftime('%Y-%m-%d %H:%M:%S'), ' [DEBUG] sync_check:', retcode, selector
time.sleep(10)
self.schedule()
except:
print '[ERROR] Except in proc_msg'
print format_exc()
check_time = time.time() - check_time
if check_time < 0.8:
time.sleep(1 - check_time)
def apply_useradd_requests(self,RecommendInfo):
url = self.base_uri + '/webwxverifyuser?r='+str(int(time.time()))+'&lang=zh_CN'
params = {
"BaseRequest": self.base_request,
"Opcode": 3,
"VerifyUserListSize": 1,
"VerifyUserList": [
{
"Value": RecommendInfo['UserName'],
"VerifyUserTicket": RecommendInfo['Ticket'] }
],
"VerifyContent": "",
"SceneListCount": 1,
"SceneList": [
33
],
"skey": self.skey
}
headers = {'content-type': 'application/json; charset=UTF-8'}
data = json.dumps(params, ensure_ascii=False).encode('utf8')
try:
r = self.session.post(url, data=data, headers=headers)
except (ConnectionError, ReadTimeout):
return False
dic = r.json()
return dic['BaseResponse']['Ret'] == 0
def add_groupuser_to_friend_by_uid(self,uid,VerifyContent):
"""
主动向群内人员打招呼,提交添加好友请求
uid-群内人员得uid VerifyContent-好友招呼内容
慎用此接口!封号后果自负!慎用此接口!封号后果自负!慎用此接口!封号后果自负!
"""
if self.is_contact(uid):
return True
url = self.base_uri + '/webwxverifyuser?r='+str(int(time.time()))+'&lang=zh_CN'
params ={
"BaseRequest": self.base_request,
"Opcode": 2,
"VerifyUserListSize": 1,
"VerifyUserList": [
{
"Value": uid,
"VerifyUserTicket": ""
}
],
"VerifyContent": VerifyContent,
"SceneListCount": 1,
"SceneList": [
33
],
"skey": self.skey
}
headers = {'content-type': 'application/json; charset=UTF-8'}
data = json.dumps(params, ensure_ascii=False).encode('utf8')
try:
r = self.session.post(url, data=data, headers=headers)
except (ConnectionError, ReadTimeout):
return False
dic = r.json()
return dic['BaseResponse']['Ret'] == 0
def add_friend_to_group(self,uid,group_name):
"""
将好友加入到群聊中
"""
gid = ''
#通过群名获取群id,群没保存到通讯录中的话无法添加哦
for group in self.group_list:
if group['NickName'] == group_name:
gid = group['UserName']
if gid == '':
return False
#获取群成员数量并判断邀请方式
group_num=len(self.group_members[gid])
print '[DEBUG] group_name:%s group_num:%s' % (group_name,group_num)
#通过群id判断uid是否在群中
for user in self.group_members[gid]:
if user['UserName'] == uid:
#已经在群里面了,不用加了
return True
if group_num<=100:
url = self.base_uri + '/webwxupdatechatroom?fun=addmember&pass_ticket=%s' % self.pass_ticket
params ={
"AddMemberList": uid,
"ChatRoomName": gid,
"BaseRequest": self.base_request
}
else:
url = self.base_uri + '/webwxupdatechatroom?fun=invitemember'
params ={
"InviteMemberList": uid,
"ChatRoomName": gid,
"BaseRequest": self.base_request
}
headers = {'content-type': 'application/json; charset=UTF-8'}
data = json.dumps(params, ensure_ascii=False).encode('utf8')
try:
r = self.session.post(url, data=data, headers=headers)
except (ConnectionError, ReadTimeout):
return False
dic = r.json()
return dic['BaseResponse']['Ret'] == 0
def invite_friend_to_group(self,uid,group_name):
"""
将好友加入到群中。对人数多的群,需要调用此方法。
拉人时,可以先尝试使用add_friend_to_group方法,当调用失败(Ret=1)时,再尝试调用此方法。
"""
gid = ''
# 通过群名获取群id,群没保存到通讯录中的话无法添加哦
for group in self.group_list:
if group['NickName'] == group_name:
gid = group['UserName']
if gid == '':
return False
# 通过群id判断uid是否在群中
for user in self.group_members[gid]:
if user['UserName'] == uid:
# 已经在群里面了,不用加了
return True
url = self.base_uri + '/webwxupdatechatroom?fun=invitemember&pass_ticket=%s' % self.pass_ticket
params = {
"InviteMemberList": uid,
"ChatRoomName": gid,
"BaseRequest": self.base_request
}
headers = {'content-type': 'application/json; charset=UTF-8'}
data = json.dumps(params, ensure_ascii=False).encode('utf8')
try:
r = self.session.post(url, data=data, headers=headers)
except (ConnectionError, ReadTimeout):
return False
dic = r.json()
return dic['BaseResponse']['Ret'] == 0
def delete_user_from_group(self,uname,gid):
"""
将群用户从群中剔除,只有群管理员有权限
"""
uid = ""
for user in self.group_members[gid]:
if user['NickName'] == uname:
uid = user['UserName']
if uid == "":
return False
url = self.base_uri + '/webwxupdatechatroom?fun=delmember&pass_ticket=%s' % self.pass_ticket
params ={
"DelMemberList": uid,
"ChatRoomName": gid,
"BaseRequest": self.base_request
}
headers = {'content-type': 'application/json; charset=UTF-8'}
data = json.dumps(params, ensure_ascii=False).encode('utf8')
try:
r = self.session.post(url, data=data, headers=headers)
except (ConnectionError, ReadTimeout):
return False
dic = r.json()
return dic['BaseResponse']['Ret'] == 0
def set_group_name(self,gid,gname):
"""
设置群聊名称
"""
url = self.base_uri + '/webwxupdatechatroom?fun=modtopic&pass_ticket=%s' % self.pass_ticket
params ={
"NewTopic": gname,
"ChatRoomName": gid,
"BaseRequest": self.base_request
}
headers = {'content-type': 'application/json; charset=UTF-8'}
data = json.dumps(params, ensure_ascii=False).encode('utf8')
try:
r = self.session.post(url, data=data, headers=headers)
except (ConnectionError, ReadTimeout):
return False
dic = r.json()
return dic['BaseResponse']['Ret'] == 0
def send_msg_by_uid(self, word, dst='filehelper'):
url = self.base_uri + '/webwxsendmsg?pass_ticket=%s' % self.pass_ticket
msg_id = str(int(time.time() * 1000)) + str(random.random())[:5].replace('.', '')
word = self.to_unicode(word)
params = {
'BaseRequest': self.base_request,
'Msg': {
"Type": 1,
"Content": word,
"FromUserName": self.my_account['UserName'],
"ToUserName": dst,
"LocalID": msg_id,
"ClientMsgId": msg_id
}
}
headers = {'content-type': 'application/json; charset=UTF-8'}
data = json.dumps(params, ensure_ascii=False).encode('utf8')
try:
r = self.session.post(url, data=data, headers=headers)
except (ConnectionError, ReadTimeout):
return False
dic = r.json()
return dic['BaseResponse']['Ret'] == 0
def upload_media(self, fpath, is_img=False):
if not os.path.exists(fpath):
print '[ERROR] File not exists.'
return None
url_1 = 'https://file.'+self.base_host+'/cgi-bin/mmwebwx-bin/webwxuploadmedia?f=json'
url_2 = 'https://file2.'+self.base_host+'/cgi-bin/mmwebwx-bin/webwxuploadmedia?f=json'
flen = str(os.path.getsize(fpath))
ftype = mimetypes.guess_type(fpath)[0] or 'application/octet-stream'
files = {
'id': (None, 'WU_FILE_%s' % str(self.file_index)),
'name': (None, os.path.basename(fpath)),
'type': (None, ftype),
'lastModifiedDate': (None, time.strftime('%m/%d/%Y, %H:%M:%S GMT+0800 (CST)')),
'size': (None, flen),
'mediatype': (None, 'pic' if is_img else 'doc'),
'uploadmediarequest': (None, json.dumps({
'BaseRequest': self.base_request,
'ClientMediaId': int(time.time()),
'TotalLen': flen,
'StartPos': 0,
'DataLen': flen,
'MediaType': 4,
})),
'webwx_data_ticket': (None, self.session.cookies['webwx_data_ticket']),
'pass_ticket': (None, self.pass_ticket),
'filename': (os.path.basename(fpath), open(fpath, 'rb'),ftype.split('/')[1]),
}
self.file_index += 1
try:
r = self.session.post(url_1, files=files)
if json.loads(r.text)['BaseResponse']['Ret'] != 0:
# 当file返回值不为0时则为上传失败,尝试第二服务器上传
r = self.session.post(url_2, files=files)
if json.loads(r.text)['BaseResponse']['Ret'] != 0:
print '[ERROR] Upload media failure.'
return None
mid = json.loads(r.text)['MediaId']
return mid
except Exception,e:
return None
def send_file_msg_by_uid(self, fpath, uid):
mid = self.upload_media(fpath)
if mid is None or not mid:
return False
url = self.base_uri + '/webwxsendappmsg?fun=async&f=json&pass_ticket=' + self.pass_ticket
msg_id = str(int(time.time() * 1000)) + str(random.random())[:5].replace('.', '')
data = {
'BaseRequest': self.base_request,
'Msg': {
'Type': 6,
'Content': ("<appmsg appid='wxeb7ec651dd0aefa9' sdkver=''><title>%s</title><des></des><action></action><type>6</type><content></content><url></url><lowurl></lowurl><appattach><totallen>%s</totallen><attachid>%s</attachid><fileext>%s</fileext></appattach><extinfo></extinfo></appmsg>" % (os.path.basename(fpath).encode('utf-8'), str(os.path.getsize(fpath)), mid, fpath.split('.')[-1])).encode('utf8'),
'FromUserName': self.my_account['UserName'],
'ToUserName': uid,
'LocalID': msg_id,
'ClientMsgId': msg_id, }, }
try:
r = self.session.post(url, data=json.dumps(data))
res = json.loads(r.text)
if res['BaseResponse']['Ret'] == 0:
return True
else:
return False
except Exception,e:
return False
def send_img_msg_by_uid(self, fpath, uid):
mid = self.upload_media(fpath, is_img=True)
if mid is None:
return False
url = self.base_uri + '/webwxsendmsgimg?fun=async&f=json'
data = {
'BaseRequest': self.base_request,
'Msg': {
'Type': 3,
'MediaId': mid,
'FromUserName': self.my_account['UserName'],
'ToUserName': uid,
'LocalID': str(time.time() * 1e7),
'ClientMsgId': str(time.time() * 1e7), }, }
if fpath[-4:] == '.gif':
url = self.base_uri + '/webwxsendemoticon?fun=sys'
data['Msg']['Type'] = 47
data['Msg']['EmojiFlag'] = 2
try:
r = self.session.post(url, data=json.dumps(data))
res = json.loads(r.text)
if res['BaseResponse']['Ret'] == 0:
return True
else:
return False
except Exception,e:
return False
def get_user_id(self, name):
if name == '':
return None
name = self.to_unicode(name)
for contact in self.contact_list:
if 'RemarkName' in contact and contact['RemarkName'] == name:
return contact['UserName']
elif 'NickName' in contact and contact['NickName'] == name:
return contact['UserName']
elif 'DisplayName' in contact and contact['DisplayName'] == name:
return contact['UserName']
for group in self.group_list:
if 'RemarkName' in group and group['RemarkName'] == name:
return group['UserName']
if 'NickName' in group and group['NickName'] == name:
return group['UserName']
if 'DisplayName' in group and group['DisplayName'] == name:
return group['UserName']
return ''
def send_msg(self, name, word, isfile=False):
uid = self.get_user_id(name)
if uid is not None:
if isfile:
with open(word, 'r') as f:
result = True
for line in f.readlines():
line = line.replace('\n', '')
print '-> ' + name + ': ' + line
if self.send_msg_by_uid(line, uid):
pass
else:
result = False
time.sleep(1)
return result
else:
word = self.to_unicode(word)
if self.send_msg_by_uid(word, uid):
return True
else:
return False
else:
if self.DEBUG:
print '[ERROR] This user does not exist .'
return True
@staticmethod
def search_content(key, content, fmat='attr'):
if fmat == 'attr':
pm = re.search(key + '\s?=\s?"([^"<]+)"', content)
if pm:
return pm.group(1)
elif fmat == 'xml':
pm = re.search('<{0}>([^<]+)</{0}>'.format(key), content)
if pm:
return pm.group(1)
return 'unknown'
def run(self):
try:
self.get_uuid()
self.gen_qr_code(os.path.join(self.temp_pwd,'wxqr.png'))
print '[INFO] Please use WeChat to scan the QR code .'
result = self.wait4login()
if result != SUCCESS:
print '[ERROR] Web WeChat login failed. failed code=%s' % (result,)
self.status = 'loginout'
return
if self.login():
print '[INFO] Web WeChat login succeed .'
else:
print '[ERROR] Web WeChat login failed .'
self.status = 'loginout'
return
if self.init():
print '[INFO] Web WeChat init succeed .'
else:
print '[INFO] Web WeChat init failed'
self.status = 'loginout'
return
self.status_notify()
if self.get_contact():
print '[INFO] Get %d contacts' % len(self.contact_list)
print '[INFO] Start to process messages .'
self.proc_msg()
self.status = 'loginout'
except Exception,e:
print '[ERROR] Web WeChat run failed --> %s'%(e)
self.status = 'loginout'
def get_uuid(self):
url = 'https://login.weixin.qq.com/jslogin'
params = {
'appid': 'wx782c26e4c19acffb',
'fun': 'new',
'lang': 'zh_CN',
'_': int(time.time()) * 1000 + random.randint(1, 999),
}
r = self.session.get(url, params=params)
r.encoding = 'utf-8'
data = r.text
regx = r'window.QRLogin.code = (\d+); window.QRLogin.uuid = "(\S+?)"'
pm = re.search(regx, data)
if pm:
code = pm.group(1)
self.uuid = pm.group(2)
return code == '200'
return False
def gen_qr_code(self, qr_file_path):
string = 'https://login.weixin.qq.com/l/' + self.uuid
qr = pyqrcode.create(string)
if self.conf['qr'] == 'png':
qr.png(qr_file_path, scale=8)
show_image(qr_file_path)
# img = Image.open(qr_file_path)
# img.show()
elif self.conf['qr'] == 'tty':
print(qr.terminal(quiet_zone=1))
def do_request(self, url):
r = self.session.get(url)
r.encoding = 'utf-8'
data = r.text
param = re.search(r'window.code=(\d+);', data)
code = param.group(1)
return code, data
def wait4login(self):
"""
http comet:
tip=1, 等待用户扫描二维码,
201: scaned
408: timeout
tip=0, 等待用户确认登录,
200: confirmed
"""
LOGIN_TEMPLATE = 'https://login.weixin.qq.com/cgi-bin/mmwebwx-bin/login?tip=%s&uuid=%s&_=%s'
tip = 1
try_later_secs = 1
MAX_RETRY_TIMES = 10
code = UNKONWN
retry_time = MAX_RETRY_TIMES
while retry_time > 0:
url = LOGIN_TEMPLATE % (tip, self.uuid, int(time.time()))
code, data = self.do_request(url)
if code == SCANED:
print '[INFO] Please confirm to login .'
tip = 0
elif code == SUCCESS: # 确认登录成功
param = re.search(r'window.redirect_uri="(\S+?)";', data)
redirect_uri = param.group(1) + '&fun=new'
self.redirect_uri = redirect_uri
self.base_uri = redirect_uri[:redirect_uri.rfind('/')]
temp_host = self.base_uri[8:]
self.base_host = temp_host[:temp_host.find("/")]
return code
elif code == TIMEOUT:
print '[ERROR] WeChat login timeout. retry in %s secs later...' % (try_later_secs,)
tip = 1 # 重置
retry_time -= 1
time.sleep(try_later_secs)
else:
print ('[ERROR] WeChat login exception return_code=%s. retry in %s secs later...' %
(code, try_later_secs))
tip = 1
retry_time -= 1
time.sleep(try_later_secs)
return code
def login(self):
if len(self.redirect_uri) < 4:
print '[ERROR] Login failed due to network problem, please try again.'
return False
r = self.session.get(self.redirect_uri)
r.encoding = 'utf-8'
data = r.text
doc = xml.dom.minidom.parseString(data)
root = doc.documentElement
for node in root.childNodes:
if node.nodeName == 'skey':
self.skey = node.childNodes[0].data
elif node.nodeName == 'wxsid':
self.sid = node.childNodes[0].data
elif node.nodeName == 'wxuin':
self.uin = node.childNodes[0].data
elif node.nodeName == 'pass_ticket':
self.pass_ticket = node.childNodes[0].data
if '' in (self.skey, self.sid, self.uin, self.pass_ticket):
return False
self.base_request = {
'Uin': self.uin,
'Sid': self.sid,
'Skey': self.skey,
'DeviceID': self.device_id,
}
return True
def init(self):
url = self.base_uri + '/webwxinit?r=%i&lang=en_US&pass_ticket=%s' % (int(time.time()), self.pass_ticket)
params = {
'BaseRequest': self.base_request
}
r = self.session.post(url, data=json.dumps(params))
r.encoding = 'utf-8'
dic = json.loads(r.text)
self.sync_key = dic['SyncKey']
self.my_account = dic['User']
self.sync_key_str = '|'.join([str(keyVal['Key']) + '_' + str(keyVal['Val'])
for keyVal in self.sync_key['List']])
return dic['BaseResponse']['Ret'] == 0
def status_notify(self):
url = self.base_uri + '/webwxstatusnotify?lang=zh_CN&pass_ticket=%s' % self.pass_ticket
self.base_request['Uin'] = int(self.base_request['Uin'])
params = {
'BaseRequest': self.base_request,
"Code": 3,
"FromUserName": self.my_account['UserName'],
"ToUserName": self.my_account['UserName'],
"ClientMsgId": int(time.time())
}
r = self.session.post(url, data=json.dumps(params))
r.encoding = 'utf-8'
dic = json.loads(r.text)
return dic['BaseResponse']['Ret'] == 0
def test_sync_check(self):
for host1 in ['webpush.', 'webpush2.']:
self.sync_host = host1+self.base_host
try:
retcode = self.sync_check()[0]
except:
retcode = -1
if retcode == '0':
return True
return False
def sync_check(self):
params = {
'r': int(time.time()),
'sid': self.sid,
'uin': self.uin,
'skey': self.skey,
'deviceid': self.device_id,
'synckey': self.sync_key_str,
'_': int(time.time()),
}
url = 'https://' + self.sync_host + '/cgi-bin/mmwebwx-bin/synccheck?' + urllib.urlencode(params)
try:
r = self.session.get(url, timeout=60)
r.encoding = 'utf-8'
data = r.text
pm = re.search(r'window.synccheck=\{retcode:"(\d+)",selector:"(\d+)"\}', data)
retcode = pm.group(1)
selector = pm.group(2)
return [retcode, selector]
except:
return [-1, -1]
def sync(self):
url = self.base_uri + '/webwxsync?sid=%s&skey=%s&lang=en_US&pass_ticket=%s' \
% (self.sid, self.skey, self.pass_ticket)
params = {
'BaseRequest': self.base_request,
'SyncKey': self.sync_key,
'rr': ~int(time.time())
}
try:
r = self.session.post(url, data=json.dumps(params), timeout=60)
r.encoding = 'utf-8'
dic = json.loads(r.text)
if dic['BaseResponse']['Ret'] == 0:
self.sync_key = dic['SyncCheckKey']
self.sync_key_str = '|'.join([str(keyVal['Key']) + '_' + str(keyVal['Val'])
for keyVal in self.sync_key['List']])
return dic
except:
return None
def get_icon(self, uid, gid=None):
"""
获取联系人或者群聊成员头像
:param uid: 联系人id
:param gid: 群id,如果为非None获取群中成员头像,如果为None则获取联系人头像
"""
if gid is None:
url = self.base_uri + '/webwxgeticon?username=%s&skey=%s' % (uid, self.skey)
else:
url = self.base_uri + '/webwxgeticon?username=%s&skey=%s&chatroomid=%s' % (
uid, self.skey, self.encry_chat_room_id_list[gid])
r = self.session.get(url)
data = r.content
fn = 'icon_' + uid + '.jpg'
with open(os.path.join(self.temp_pwd,fn), 'wb') as f:
f.write(data)
return fn
def get_head_img(self, uid):
"""
获取群头像
:param uid: 群uid
"""
url = self.base_uri + '/webwxgetheadimg?username=%s&skey=%s' % (uid, self.skey)
r = self.session.get(url)
data = r.content
fn = 'head_' + uid + '.jpg'
with open(os.path.join(self.temp_pwd,fn), 'wb') as f:
f.write(data)
return fn
def get_msg_img_url(self, msgid):
return self.base_uri + '/webwxgetmsgimg?MsgID=%s&skey=%s' % (msgid, self.skey)
def get_msg_img(self, msgid):
"""
获取图片消息,下载图片到本地
:param msgid: 消息id
:return: 保存的本地图片文件路径
"""
url = self.base_uri + '/webwxgetmsgimg?MsgID=%s&skey=%s' % (msgid, self.skey)
r = self.session.get(url)
data = r.content
fn = 'img_' + msgid + '.jpg'
with open(os.path.join(self.temp_pwd,fn), 'wb') as f:
f.write(data)
return fn
def get_voice_url(self, msgid):
return self.base_uri + '/webwxgetvoice?msgid=%s&skey=%s' % (msgid, self.skey)
def get_voice(self, msgid):
"""
获取语音消息,下载语音到本地
:param msgid: 语音消息id
:return: 保存的本地语音文件路径
"""
url = self.base_uri + '/webwxgetvoice?msgid=%s&skey=%s' % (msgid, self.skey)
r = self.session.get(url)
data = r.content
fn = 'voice_' + msgid + '.mp3'
with open(os.path.join(self.temp_pwd,fn), 'wb') as f:
f.write(data)
return fn
def get_video_url(self, msgid):
return self.base_uri + '/webwxgetvideo?msgid=%s&skey=%s' % (msgid, self.skey)
def get_video(self, msgid):
"""
获取视频消息,下载视频到本地
:param msgid: 视频消息id
:return: 保存的本地视频文件路径
"""
url = self.base_uri + '/webwxgetvideo?msgid=%s&skey=%s' % (msgid, self.skey)
headers = {'Range': 'bytes=0-'}
r = self.session.get(url, headers=headers)
data = r.content
fn = 'video_' + msgid + '.mp4'
with open(os.path.join(self.temp_pwd,fn), 'wb') as f:
f.write(data)
return fn
def set_remarkname(self,uid,remarkname):#设置联系人的备注名
url = self.base_uri + '/webwxoplog?lang=zh_CN&pass_ticket=%s' \
% (self.pass_ticket)
remarkname = self.to_unicode(remarkname)
params = {
'BaseRequest': self.base_request,
'CmdId': 2,
'RemarkName': remarkname,
'UserName': uid
}
try:
r = self.session.post(url, data=json.dumps(params), timeout=60)
r.encoding = 'utf-8'
dic = json.loads(r.text)
return dic['BaseResponse']['ErrMsg']
except:
return None
|
{
"content_hash": "e83aea23be4e5a6d069eb2591fb7ebab",
"timestamp": "",
"source": "github",
"line_count": 1511,
"max_line_length": 420,
"avg_line_length": 39.651224354731966,
"alnum_prop": 0.49523475706441006,
"repo_name": "Detailscool/YHSpider",
"id": "13ad10d060cc5acc0bfc80e4c4f9ea949c4f5149",
"size": "62294",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "ZhaocongReading/wxbot.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "HTML",
"bytes": "1011283"
},
{
"name": "Python",
"bytes": "230070"
},
{
"name": "Shell",
"bytes": "730"
}
],
"symlink_target": ""
}
|
"""Test transaction signing using the signrawtransaction* RPCs."""
from test_framework.address import check_script, script_to_p2sh
from test_framework.key import ECKey
from test_framework.test_framework import BitcoinTestFramework
from test_framework.util import assert_equal, assert_raises_rpc_error, find_vout_for_address, hex_str_to_bytes
from test_framework.messages import sha256
from test_framework.script import CScript, OP_0, OP_CHECKSIG
from test_framework.script_util import key_to_p2pkh_script, script_to_p2sh_p2wsh_script, script_to_p2wsh_script
from test_framework.wallet_util import bytes_to_wif
from decimal import Decimal
class SignRawTransactionsTest(BitcoinTestFramework):
def set_test_params(self):
self.setup_clean_chain = True
self.num_nodes = 2
def skip_test_if_missing_module(self):
self.skip_if_no_wallet()
def successful_signing_test(self):
"""Create and sign a valid raw transaction with one input.
Expected results:
1) The transaction has a complete set of signatures
2) No script verification error occurred"""
self.log.info("Test valid raw transaction with one input")
privKeys = ['cUeKHd5orzT3mz8P9pxyREHfsWtVfgsfDjiZZBcjUBAaGk1BTj7N', 'cVKpPfVKSJxKqVpE9awvXNWuLHCa5j5tiE7K6zbUSptFpTEtiFrA']
inputs = [
# Valid pay-to-pubkey scripts
{'txid': '9b907ef1e3c26fc71fe4a4b3580bc75264112f95050014157059c736f0202e71', 'vout': 0,
'scriptPubKey': '76a91460baa0f494b38ce3c940dea67f3804dc52d1fb9488ac'},
{'txid': '83a4f6a6b73660e13ee6cb3c6063fa3759c50c9b7521d0536022961898f4fb02', 'vout': 0,
'scriptPubKey': '76a914669b857c03a5ed269d5d85a1ffac9ed5d663072788ac'},
]
outputs = {'mpLQjfK79b7CCV4VMJWEWAj5Mpx8Up5zxB': 0.1}
rawTx = self.nodes[0].createrawtransaction(inputs, outputs)
rawTxSigned = self.nodes[0].signrawtransactionwithkey(rawTx, privKeys, inputs)
# 1) The transaction has a complete set of signatures
assert rawTxSigned['complete']
# 2) No script verification error occurred
assert 'errors' not in rawTxSigned
def test_with_lock_outputs(self):
self.log.info("Test correct error reporting when trying to sign a locked output")
self.nodes[0].encryptwallet("password")
rawTx = '020000000156b958f78e3f24e0b2f4e4db1255426b0902027cb37e3ddadb52e37c3557dddb0000000000ffffffff01c0a6b929010000001600149a2ee8c77140a053f36018ac8124a6ececc1668a00000000'
assert_raises_rpc_error(-13, "Please enter the wallet passphrase with walletpassphrase first", self.nodes[0].signrawtransactionwithwallet, rawTx)
def script_verification_error_test(self):
"""Create and sign a raw transaction with valid (vin 0), invalid (vin 1) and one missing (vin 2) input script.
Expected results:
3) The transaction has no complete set of signatures
4) Two script verification errors occurred
5) Script verification errors have certain properties ("txid", "vout", "scriptSig", "sequence", "error")
6) The verification errors refer to the invalid (vin 1) and missing input (vin 2)"""
self.log.info("Test script verification errors")
privKeys = ['cUeKHd5orzT3mz8P9pxyREHfsWtVfgsfDjiZZBcjUBAaGk1BTj7N']
inputs = [
# Valid pay-to-pubkey script
{'txid': '9b907ef1e3c26fc71fe4a4b3580bc75264112f95050014157059c736f0202e71', 'vout': 0},
# Invalid script
{'txid': '5b8673686910442c644b1f4993d8f7753c7c8fcb5c87ee40d56eaeef25204547', 'vout': 7},
# Missing scriptPubKey
{'txid': '9b907ef1e3c26fc71fe4a4b3580bc75264112f95050014157059c736f0202e71', 'vout': 1},
]
scripts = [
# Valid pay-to-pubkey script
{'txid': '9b907ef1e3c26fc71fe4a4b3580bc75264112f95050014157059c736f0202e71', 'vout': 0,
'scriptPubKey': '76a91460baa0f494b38ce3c940dea67f3804dc52d1fb9488ac'},
# Invalid script
{'txid': '5b8673686910442c644b1f4993d8f7753c7c8fcb5c87ee40d56eaeef25204547', 'vout': 7,
'scriptPubKey': 'badbadbadbad'}
]
outputs = {'mpLQjfK79b7CCV4VMJWEWAj5Mpx8Up5zxB': 0.1}
rawTx = self.nodes[0].createrawtransaction(inputs, outputs)
# Make sure decoderawtransaction is at least marginally sane
decodedRawTx = self.nodes[0].decoderawtransaction(rawTx)
for i, inp in enumerate(inputs):
assert_equal(decodedRawTx["vin"][i]["txid"], inp["txid"])
assert_equal(decodedRawTx["vin"][i]["vout"], inp["vout"])
# Make sure decoderawtransaction throws if there is extra data
assert_raises_rpc_error(-22, "TX decode failed", self.nodes[0].decoderawtransaction, rawTx + "00")
rawTxSigned = self.nodes[0].signrawtransactionwithkey(rawTx, privKeys, scripts)
# 3) The transaction has no complete set of signatures
assert not rawTxSigned['complete']
# 4) Two script verification errors occurred
assert 'errors' in rawTxSigned
assert_equal(len(rawTxSigned['errors']), 2)
# 5) Script verification errors have certain properties
assert 'txid' in rawTxSigned['errors'][0]
assert 'vout' in rawTxSigned['errors'][0]
assert 'witness' in rawTxSigned['errors'][0]
assert 'scriptSig' in rawTxSigned['errors'][0]
assert 'sequence' in rawTxSigned['errors'][0]
assert 'error' in rawTxSigned['errors'][0]
# 6) The verification errors refer to the invalid (vin 1) and missing input (vin 2)
assert_equal(rawTxSigned['errors'][0]['txid'], inputs[1]['txid'])
assert_equal(rawTxSigned['errors'][0]['vout'], inputs[1]['vout'])
assert_equal(rawTxSigned['errors'][1]['txid'], inputs[2]['txid'])
assert_equal(rawTxSigned['errors'][1]['vout'], inputs[2]['vout'])
assert not rawTxSigned['errors'][0]['witness']
# Now test signing failure for transaction with input witnesses
p2wpkh_raw_tx = "01000000000102fff7f7881a8099afa6940d42d1e7f6362bec38171ea3edf433541db4e4ad969f00000000494830450221008b9d1dc26ba6a9cb62127b02742fa9d754cd3bebf337f7a55d114c8e5cdd30be022040529b194ba3f9281a99f2b1c0a19c0489bc22ede944ccf4ecbab4cc618ef3ed01eeffffffef51e1b804cc89d182d279655c3aa89e815b1b309fe287d9b2b55d57b90ec68a0100000000ffffffff02202cb206000000001976a9148280b37df378db99f66f85c95a783a76ac7a6d5988ac9093510d000000001976a9143bde42dbee7e4dbe6a21b2d50ce2f0167faa815988ac000247304402203609e17b84f6a7d30c80bfa610b5b4542f32a8a0d5447a12fb1366d7f01cc44a0220573a954c4518331561406f90300e8f3358f51928d43c212a8caed02de67eebee0121025476c2e83188368da1ff3e292e7acafcdb3566bb0ad253f62fc70f07aeee635711000000"
rawTxSigned = self.nodes[0].signrawtransactionwithwallet(p2wpkh_raw_tx)
# 7) The transaction has no complete set of signatures
assert not rawTxSigned['complete']
# 8) Two script verification errors occurred
assert 'errors' in rawTxSigned
assert_equal(len(rawTxSigned['errors']), 2)
# 9) Script verification errors have certain properties
assert 'txid' in rawTxSigned['errors'][0]
assert 'vout' in rawTxSigned['errors'][0]
assert 'witness' in rawTxSigned['errors'][0]
assert 'scriptSig' in rawTxSigned['errors'][0]
assert 'sequence' in rawTxSigned['errors'][0]
assert 'error' in rawTxSigned['errors'][0]
# Non-empty witness checked here
assert_equal(rawTxSigned['errors'][1]['witness'], ["304402203609e17b84f6a7d30c80bfa610b5b4542f32a8a0d5447a12fb1366d7f01cc44a0220573a954c4518331561406f90300e8f3358f51928d43c212a8caed02de67eebee01", "025476c2e83188368da1ff3e292e7acafcdb3566bb0ad253f62fc70f07aeee6357"])
assert not rawTxSigned['errors'][0]['witness']
def witness_script_test(self):
self.log.info("Test signing transaction to P2SH-P2WSH addresses without wallet")
# Create a new P2SH-P2WSH 1-of-1 multisig address:
eckey = ECKey()
eckey.generate()
embedded_privkey = bytes_to_wif(eckey.get_bytes())
embedded_pubkey = eckey.get_pubkey().get_bytes().hex()
p2sh_p2wsh_address = self.nodes[1].createmultisig(1, [embedded_pubkey], "p2sh-segwit")
# send transaction to P2SH-P2WSH 1-of-1 multisig address
self.nodes[0].generate(101)
self.nodes[0].sendtoaddress(p2sh_p2wsh_address["address"], 49.999)
self.nodes[0].generate(1)
self.sync_all()
# Get the UTXO info from scantxoutset
unspent_output = self.nodes[1].scantxoutset('start', [p2sh_p2wsh_address['descriptor']])['unspents'][0]
spk = script_to_p2sh_p2wsh_script(p2sh_p2wsh_address['redeemScript']).hex()
unspent_output['witnessScript'] = p2sh_p2wsh_address['redeemScript']
unspent_output['redeemScript'] = script_to_p2wsh_script(unspent_output['witnessScript']).hex()
assert_equal(spk, unspent_output['scriptPubKey'])
# Now create and sign a transaction spending that output on node[0], which doesn't know the scripts or keys
spending_tx = self.nodes[0].createrawtransaction([unspent_output], {self.nodes[1].get_wallet_rpc(self.default_wallet_name).getnewaddress(): Decimal("49.998")})
spending_tx_signed = self.nodes[0].signrawtransactionwithkey(spending_tx, [embedded_privkey], [unspent_output])
# Check the signing completed successfully
assert 'complete' in spending_tx_signed
assert_equal(spending_tx_signed['complete'], True)
# Now test with P2PKH and P2PK scripts as the witnessScript
for tx_type in ['P2PKH', 'P2PK']: # these tests are order-independent
self.verify_txn_with_witness_script(tx_type)
def verify_txn_with_witness_script(self, tx_type):
self.log.info("Test with a {} script as the witnessScript".format(tx_type))
eckey = ECKey()
eckey.generate()
embedded_privkey = bytes_to_wif(eckey.get_bytes())
embedded_pubkey = eckey.get_pubkey().get_bytes().hex()
witness_script = {
'P2PKH': key_to_p2pkh_script(embedded_pubkey).hex(),
'P2PK': CScript([hex_str_to_bytes(embedded_pubkey), OP_CHECKSIG]).hex()
}.get(tx_type, "Invalid tx_type")
redeem_script = CScript([OP_0, sha256(check_script(witness_script))]).hex()
addr = script_to_p2sh(redeem_script)
script_pub_key = self.nodes[1].validateaddress(addr)['scriptPubKey']
# Fund that address
txid = self.nodes[0].sendtoaddress(addr, 10)
vout = find_vout_for_address(self.nodes[0], txid, addr)
self.nodes[0].generate(1)
# Now create and sign a transaction spending that output on node[0], which doesn't know the scripts or keys
spending_tx = self.nodes[0].createrawtransaction([{'txid': txid, 'vout': vout}], {self.nodes[1].getnewaddress(): Decimal("9.999")})
spending_tx_signed = self.nodes[0].signrawtransactionwithkey(spending_tx, [embedded_privkey], [{'txid': txid, 'vout': vout, 'scriptPubKey': script_pub_key, 'redeemScript': redeem_script, 'witnessScript': witness_script, 'amount': 10}])
# Check the signing completed successfully
assert 'complete' in spending_tx_signed
assert_equal(spending_tx_signed['complete'], True)
self.nodes[0].sendrawtransaction(spending_tx_signed['hex'])
def OP_1NEGATE_test(self):
self.log.info("Test OP_1NEGATE (0x4f) satisfies BIP62 minimal push standardness rule")
hex_str = (
"0200000001FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFF"
"FFFFFFFF00000000044F024F9CFDFFFFFF01F0B9F5050000000023210277777777"
"77777777777777777777777777777777777777777777777777777777AC66030000"
)
prev_txs = [
{
"txid": "FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFF",
"vout": 0,
"scriptPubKey": "A914AE44AB6E9AA0B71F1CD2B453B69340E9BFBAEF6087",
"redeemScript": "4F9C",
"amount": 1,
}
]
txn = self.nodes[0].signrawtransactionwithwallet(hex_str, prev_txs)
assert txn["complete"]
def run_test(self):
self.successful_signing_test()
self.script_verification_error_test()
self.witness_script_test()
self.OP_1NEGATE_test()
self.test_with_lock_outputs()
if __name__ == '__main__':
SignRawTransactionsTest().main()
|
{
"content_hash": "a2771af38df440ab1b0625de32f24bca",
"timestamp": "",
"source": "github",
"line_count": 233,
"max_line_length": 712,
"avg_line_length": 53.8412017167382,
"alnum_prop": 0.6953367875647668,
"repo_name": "alecalve/bitcoin",
"id": "b962e1c3a552bedb8643a91326c780fbc045496a",
"size": "12759",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "test/functional/rpc_signrawtransaction.py",
"mode": "33261",
"license": "mit",
"language": [
{
"name": "Assembly",
"bytes": "28453"
},
{
"name": "C",
"bytes": "695632"
},
{
"name": "C++",
"bytes": "6008562"
},
{
"name": "HTML",
"bytes": "21860"
},
{
"name": "Java",
"bytes": "30290"
},
{
"name": "M4",
"bytes": "197255"
},
{
"name": "Makefile",
"bytes": "117105"
},
{
"name": "Objective-C",
"bytes": "123749"
},
{
"name": "Objective-C++",
"bytes": "6594"
},
{
"name": "Python",
"bytes": "1469100"
},
{
"name": "QMake",
"bytes": "756"
},
{
"name": "Shell",
"bytes": "88183"
}
],
"symlink_target": ""
}
|
import csv
import datetime
import json
import os
# my lib
import vra_helper
def get_race_name(f, rest):
_, f_name = os.path.split(f)
return f_name.replace(rest, "").strip()
def read_split_data(f):
"""
Read the content of a Concept2 VRA split data file.
"""
result = {}
with open(f) as csvfile:
reader = csv.DictReader(csvfile)
competitor = ""
for row in reader:
#{'Split_Stroke_Rate': '22',
# 'Split_Heart_Rate': '0',
# 'Interval': '',
# 'Time': '125.5',
# 'Boat': '',
# 'Split_Avg_Pace': '125.5',
# 'Meters': '1000.0'}
if row['Boat'] != "":
competitor = row['Boat']
result[competitor] = []
split_time = float(row['Time'])
split_meters = float(row['Meters'])
result[competitor].append({'split_time':split_time, 'split_meters':split_meters})
return result
def read_result_data(f):
"""
Read the content of a Concept2 VRA race result file.
"""
sep = ','
is_headline = True
result = []
for line in open(f):
if line.strip().startswith('Detailed'):
# here starts the next table in this file, so stop
break
if ',' not in line:
continue
# work with line
if is_headline:
is_headline = False
headline = line.split(sep)
else:
curr_line = line.split(sep)
line_dict = {}
for i in range(len(headline)):
line_dict[headline[i].strip('\n\r')] = curr_line[i].strip('\n\r')
result.append(line_dict)
return result
class Race(object):
def __init__(self, end_timestamp, results, splits):
self.end = end_timestamp
self.start = int(self.end - self.calculate_start_offset(results))
self.classes = self.merge(results, splits)
def calculate_start_offset(self, data):
"""
Get the longest time in the result data set (as FLOAT)
"""
longest = 0
for x in data:
time_rowed = x['Time Rowed']
sum_time = vra_helper.convert_time_in_seconds(time_rowed)
if sum_time > longest:
longest = sum_time
return longest
def merge(self, r, s):
result = {}
for x in r:
starter = Starter(x)
clazz = x['Class']
if clazz not in result:
result[clazz] = []
result[clazz].append(starter)
for n in s:
if starter.name == n:
starter.add_splits(s[n])
return result
def toJSON(self):
return json.dumps(self, default=lambda o: o.__dict__, sort_keys=True, indent=4)
class Starter(object):
def __init__(self, race_result_dataset):
self.name = race_result_dataset['Boat/Team Name']
self.id = int(race_result_dataset['Bib Number'])
self.place = int(race_result_dataset['Place'])
self.final_time = race_result_dataset['Time Rowed'].strip()
self.final_time_sec = vra_helper.convert_time_in_seconds(self.final_time)
self.avg_pace = race_result_dataset['Avg. Pace'].strip()
self.avg_pace_sec = vra_helper.convert_time_in_seconds(self.avg_pace)
self.meters_rowed = int(race_result_dataset['Meters Rowed'])
self.splits = []
def add_splits(self, splits):
# order splits by meters
xs = {}
for s in splits:
xs[s['split_meters']] = s['split_time']
total = 0.0
for k in sorted(list(xs.keys())):
total = total + xs[k]
split = {
'meters': k,
'split_time': xs[k],
'total_time': total
}
self.splits.append(split)
def process_race_result(f_result, f_split, output_path):
race_name = get_race_name(f_result, 'Results.txt')
last_mod_date = vra_helper.creation_date(f_result)
output_fname = datetime.datetime.fromtimestamp(last_mod_date).strftime('%Y%m%d_%H%M_') + race_name + '.rac_result.txt'
results = read_result_data(f_result)
splits = read_split_data(f_split)
r = Race(last_mod_date, results, splits)
my_json = r.toJSON()
output_fpath = os.path.join(output_path, output_fname)
with open(output_fpath, 'w+') as text_file:
print(my_json, file=text_file)
|
{
"content_hash": "390e88c8537b3c34ee446afac84a5fd1",
"timestamp": "",
"source": "github",
"line_count": 137,
"max_line_length": 122,
"avg_line_length": 32.56934306569343,
"alnum_prop": 0.5432541461228149,
"repo_name": "ruderphilipp/regatta",
"id": "5e192046d6124c626bf7dfde888d79c55810e598",
"size": "4462",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "bin/VRA_parser/vra_parser.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "ApacheConf",
"bytes": "3605"
},
{
"name": "CSS",
"bytes": "729"
},
{
"name": "HTML",
"bytes": "83340"
},
{
"name": "JavaScript",
"bytes": "447516"
},
{
"name": "PHP",
"bytes": "316741"
},
{
"name": "Python",
"bytes": "16528"
}
],
"symlink_target": ""
}
|
"""Delegate accounts are used to provide secure access to the XMLRPC API.
They are normal Unix accounts with a shell that tunnels XMLRPC requests to the API server."""
from pwd import getpwnam
from grp import getgrnam
import logger
import tools
import accounts
class Controller(accounts.Account):
SHELL = '/usr/bin/forward_api_calls' # tunneling shell
TYPE = 'controller.Controller'
@staticmethod
def create(name, vref = None):
add_shell(Controller.SHELL)
group = getgrnam("slices")[2]
logger.log_call(['/usr/sbin/useradd', '-p', '*', '-g', str(group), '-s', Controller.SHELL, name, ])
@staticmethod
def destroy(name): logger.log_call(['/usr/sbin/userdel', '-r', name, ])
def is_running(self):
logger.verbose("controller: is_running: %s" % self.name)
return getpwnam(self.name)[6] == self.SHELL
def add_shell(shell):
"""Add <shell> to /etc/shells if it's not already there."""
etc_shells = open('/etc/shells')
valid_shells = etc_shells.read().split()
etc_shells.close()
if shell not in valid_shells:
etc_shells = open('/etc/shells', 'a')
print >>etc_shells, shell
etc_shells.close()
|
{
"content_hash": "ce282e8246d3e6d1abed20331d5b5b7f",
"timestamp": "",
"source": "github",
"line_count": 37,
"max_line_length": 107,
"avg_line_length": 32.567567567567565,
"alnum_prop": 0.6506224066390042,
"repo_name": "planetlab/NodeManager",
"id": "9979370d5398c92e2f6ea47ec48e2a8bda552594",
"size": "1207",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "controller.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "C",
"bytes": "3539"
},
{
"name": "PHP",
"bytes": "1917"
},
{
"name": "Python",
"bytes": "196258"
},
{
"name": "Shell",
"bytes": "3830"
}
],
"symlink_target": ""
}
|
"""Enable Filter Select
Revision ID: f1f2d4af5b90
Revises: e46f2d27a08e
Create Date: 2016-11-23 10:27:18.517919
"""
# revision identifiers, used by Alembic.
revision = 'f1f2d4af5b90'
down_revision = 'e46f2d27a08e'
from alembic import op
import sqlalchemy as sa
def upgrade():
op.add_column('datasources', sa.Column('filter_select_enabled',
sa.Boolean(), default=False))
op.add_column('tables', sa.Column('filter_select_enabled',
sa.Boolean(), default=False))
def downgrade():
op.drop_column('tables', 'filter_select_enabled')
op.drop_column('datasources', 'filter_select_enabled')
|
{
"content_hash": "fb360ebdcbccaddec1ff5c8c8cd03615",
"timestamp": "",
"source": "github",
"line_count": 25,
"max_line_length": 72,
"avg_line_length": 27.4,
"alnum_prop": 0.6423357664233577,
"repo_name": "alanmcruickshank/superset-dev",
"id": "36bae518ce0637fcfaa884f4ed50132ed2881fae",
"size": "685",
"binary": false,
"copies": "9",
"ref": "refs/heads/master",
"path": "superset/migrations/versions/f1f2d4af5b90_.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "65422"
},
{
"name": "HTML",
"bytes": "101728"
},
{
"name": "JavaScript",
"bytes": "783366"
},
{
"name": "Mako",
"bytes": "412"
},
{
"name": "Python",
"bytes": "815898"
},
{
"name": "Shell",
"bytes": "1367"
}
],
"symlink_target": ""
}
|
"""Support for Meteoclimatic sensor."""
import logging
from homeassistant.components.sensor import SensorEntity
from homeassistant.config_entries import ConfigEntry
from homeassistant.const import ATTR_ATTRIBUTION
from homeassistant.core import HomeAssistant
from homeassistant.helpers.update_coordinator import (
CoordinatorEntity,
DataUpdateCoordinator,
)
from .const import (
ATTRIBUTION,
DOMAIN,
MANUFACTURER,
MODEL,
SENSOR_TYPE_CLASS,
SENSOR_TYPE_ICON,
SENSOR_TYPE_NAME,
SENSOR_TYPE_UNIT,
SENSOR_TYPES,
)
_LOGGER = logging.getLogger(__name__)
async def async_setup_entry(
hass: HomeAssistant, entry: ConfigEntry, async_add_entities
) -> None:
"""Set up the Meteoclimatic sensor platform."""
coordinator = hass.data[DOMAIN][entry.entry_id]
async_add_entities(
[MeteoclimaticSensor(sensor_type, coordinator) for sensor_type in SENSOR_TYPES],
False,
)
class MeteoclimaticSensor(CoordinatorEntity, SensorEntity):
"""Representation of a Meteoclimatic sensor."""
def __init__(self, sensor_type: str, coordinator: DataUpdateCoordinator) -> None:
"""Initialize the Meteoclimatic sensor."""
super().__init__(coordinator)
self._type = sensor_type
station = self.coordinator.data["station"]
self._attr_device_class = SENSOR_TYPES[sensor_type].get(SENSOR_TYPE_CLASS)
self._attr_icon = SENSOR_TYPES[sensor_type].get(SENSOR_TYPE_ICON)
self._attr_name = (
f"{station.name} {SENSOR_TYPES[sensor_type][SENSOR_TYPE_NAME]}"
)
self._attr_unique_id = f"{station.code}_{sensor_type}"
self._attr_native_unit_of_measurement = SENSOR_TYPES[sensor_type].get(
SENSOR_TYPE_UNIT
)
@property
def device_info(self):
"""Return the device info."""
return {
"identifiers": {(DOMAIN, self.platform.config_entry.unique_id)},
"name": self.coordinator.name,
"manufacturer": MANUFACTURER,
"model": MODEL,
"entry_type": "service",
}
@property
def native_value(self):
"""Return the state of the sensor."""
return (
getattr(self.coordinator.data["weather"], self._type)
if self.coordinator.data
else None
)
@property
def extra_state_attributes(self):
"""Return the state attributes."""
return {ATTR_ATTRIBUTION: ATTRIBUTION}
|
{
"content_hash": "5e64ee61a854fd862044c4c78012b40b",
"timestamp": "",
"source": "github",
"line_count": 81,
"max_line_length": 88,
"avg_line_length": 30.666666666666668,
"alnum_prop": 0.644524959742351,
"repo_name": "sander76/home-assistant",
"id": "b5a07ad06e6622d798045e4a2951484e99d18816",
"size": "2484",
"binary": false,
"copies": "2",
"ref": "refs/heads/dev",
"path": "homeassistant/components/meteoclimatic/sensor.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Dockerfile",
"bytes": "1795"
},
{
"name": "Python",
"bytes": "36548768"
},
{
"name": "Shell",
"bytes": "4910"
}
],
"symlink_target": ""
}
|
from stratiform.base import NameableAWSObject, prop
from stratiform.utils import Wrapper
class Parameter(NameableAWSObject):
class Type(Wrapper):
pass
Type.String = Type('String')
Type.Number = Type('Number')
Type.ListNumber = Type('List<Number>')
Type.CommaDelimitedList = Type('CommaDelimitedList')
Type.KeyPair = Type('AWS::EC2::KeyPair::KeyName')
Type.SecurityGroup = Type('AWS::EC2::SecurityGroup::Id')
Type.Subnet = Type('AWS::EC2::Subnet::Id')
Type.Vpc = Type('AWS::EC2::VPC::Id')
Type.ListSecurityGroup = Type('List<AWS::EC2::SecurityGroup::Id>')
Type.ListSubnet = Type('List<AWS::EC2::Subnet::Id>')
Type.ListVpc = Type('List<AWS::EC2::VPC::Id>')
@staticmethod
def props():
return [prop('Type', Parameter.Type),
prop('Description', basestring),
prop('Default'),
prop('AllowedValues'),
prop('AllowedPattern'),
prop('ConstraintDescription'),
prop('MaxLength'),
prop('MinLength'),
prop('MaxValue'),
prop('MinValue'),
prop('NoEcho')]
def bind_type(type):
def f(*args, **kwargs):
return Parameter(*args, type=type, **kwargs)
return f
class PseudoParameter(NameableAWSObject):
@staticmethod
def props():
return []
#### Public API ####
parameter = Parameter
string_parameter = bind_type(Parameter.Type.String)
number_parameter = bind_type(Parameter.Type.Number)
list_number_parameter = bind_type(Parameter.Type.ListNumber)
comma_delimited_list_parameter = bind_type(Parameter.Type.CommaDelimitedList)
key_pair_parameter = bind_type(Parameter.Type.KeyPair)
security_group_parameter = bind_type(Parameter.Type.SecurityGroup)
subnet_parameter = bind_type(Parameter.Type.Subnet)
vpc_parameter = bind_type(Parameter.Type.Vpc)
list_security_group_parameter = bind_type(Parameter.Type.ListSecurityGroup)
list_subnet_parameter = bind_type(Parameter.Type.ListSubnet)
list_vpc_parameter = bind_type(Parameter.Type.ListVpc)
AccountId = PseudoParameter('AWS::AccountId')
NotificationArns = PseudoParameter('AWS::NotificationARNs')
NoValue = PseudoParameter('AWS::NoValue')
Region = PseudoParameter('AWS::Region')
StackId = PseudoParameter('AWS::StackId')
StackName = PseudoParameter('AWS::StackName')
__all__ = ['parameter'] + \
['string_parameter', 'number_parameter',
'list_number_parameter', 'comma_delimited_list_parameter',
'key_pair_parameter', 'security_group_parameter',
'subnet_parameter', 'vpc_parameter',
'list_security_group_parameter', 'list_subnet_parameter',
'list_vpc_parameter'] + \
['AccountId', 'NotificationArns', 'NoValue', 'Region',
'StackId', 'StackName']
|
{
"content_hash": "eaab2d3346e7d56d79d1988f0475af7a",
"timestamp": "",
"source": "github",
"line_count": 75,
"max_line_length": 77,
"avg_line_length": 40.973333333333336,
"alnum_prop": 0.6068987959648552,
"repo_name": "drbild/stratiform",
"id": "4b86a81776bc50cd187fad969af8032c65262c9b",
"size": "3678",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "stratiform/parameters.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "120105"
},
{
"name": "Shell",
"bytes": "372"
}
],
"symlink_target": ""
}
|
def get_grid_index_for_ray_point(ray, ray_idx, n_depth_pts):
"""Given a ray and a particular point along that ray, return the corresponding grid point index."""
if ray.mu < 0:
return (ray_idx)
else:
return (n_depth_pts - ray_idx - 1)
def get_ray_index_for_grid_point(ray, grid_idx, n_depth_pts):
"""Given a ray and a particular point on the physical grid, return the index along that ray corresponding to that point."""
if ray.mu < 0:
return (grid_idx)
else:
return (n_depth_pts - (grid_idx + 1))
|
{
"content_hash": "ce81b1ce569958ef1d68bfed01e95825",
"timestamp": "",
"source": "github",
"line_count": 13,
"max_line_length": 127,
"avg_line_length": 42.61538461538461,
"alnum_prop": 0.6389891696750902,
"repo_name": "bcfriesen/PyRT",
"id": "49dd5753c00824fa1f039a6c77ff5bb2f1b08dd9",
"size": "554",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "src/grid_functions.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "9805"
}
],
"symlink_target": ""
}
|
from __future__ import unicode_literals
import psutil
from .base import Metric, MetricData
class Memory(Metric):
TYPE = 'memory'
def collect(self):
virtual_memory = psutil.virtual_memory()
return [MetricData(
name=self.measurement,
tags=self.tags,
fields={
"total": virtual_memory.total,
"available": virtual_memory.available,
"percent": virtual_memory.percent,
"used": virtual_memory.used,
"free": virtual_memory.free,
"active": virtual_memory.active,
"inactive": virtual_memory.inactive,
"buffers": virtual_memory.buffers,
"cached": virtual_memory.cached,
"shared": virtual_memory.shared
}
)]
|
{
"content_hash": "7157c5db8f910427f6a5f21ec5000217",
"timestamp": "",
"source": "github",
"line_count": 29,
"max_line_length": 54,
"avg_line_length": 29,
"alnum_prop": 0.5386444708680143,
"repo_name": "falgore88/grafana-metrics",
"id": "3028915402c667a45c81cc2bd8db3ed82006a06e",
"size": "856",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "grafana_metrics/metrics/memory.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "25738"
}
],
"symlink_target": ""
}
|
import base64
import os
import re
import random
import shutil
import socket
import string
import json
import ipaddress
import charms.leadership
from shutil import move
from shlex import split
from subprocess import check_call
from subprocess import check_output
from subprocess import CalledProcessError
from charms import layer
from charms.layer import snap
from charms.reactive import hook
from charms.reactive import remove_state
from charms.reactive import set_state
from charms.reactive import is_state
from charms.reactive import when, when_any, when_not, when_all
from charms.reactive.helpers import data_changed, any_file_changed
from charms.kubernetes.common import get_version
from charms.kubernetes.common import retry
from charms.kubernetes.flagmanager import FlagManager
from charmhelpers.core import hookenv
from charmhelpers.core import host
from charmhelpers.core import unitdata
from charmhelpers.core.host import service_stop
from charmhelpers.core.templating import render
from charmhelpers.fetch import apt_install
from charmhelpers.contrib.charmsupport import nrpe
# Override the default nagios shortname regex to allow periods, which we
# need because our bin names contain them (e.g. 'snap.foo.daemon'). The
# default regex in charmhelpers doesn't allow periods, but nagios itself does.
nrpe.Check.shortname_re = '[\.A-Za-z0-9-_]+$'
os.environ['PATH'] += os.pathsep + os.path.join(os.sep, 'snap', 'bin')
def service_cidr():
''' Return the charm's service-cidr config '''
db = unitdata.kv()
frozen_cidr = db.get('kubernetes-master.service-cidr')
return frozen_cidr or hookenv.config('service-cidr')
def freeze_service_cidr():
''' Freeze the service CIDR. Once the apiserver has started, we can no
longer safely change this value. '''
db = unitdata.kv()
db.set('kubernetes-master.service-cidr', service_cidr())
@hook('upgrade-charm')
def reset_states_for_delivery():
'''An upgrade charm event was triggered by Juju, react to that here.'''
migrate_from_pre_snaps()
install_snaps()
add_rbac_roles()
set_state('reconfigure.authentication.setup')
remove_state('authentication.setup')
def add_rbac_roles():
'''Update the known_tokens file with proper groups.'''
tokens_fname = '/root/cdk/known_tokens.csv'
tokens_backup_fname = '/root/cdk/known_tokens.csv.backup'
move(tokens_fname, tokens_backup_fname)
with open(tokens_fname, 'w') as ftokens:
with open(tokens_backup_fname, 'r') as stream:
for line in stream:
record = line.strip().split(',')
# token, username, user, groups
if record[2] == 'admin' and len(record) == 3:
towrite = '{0},{1},{2},"{3}"\n'.format(record[0],
record[1],
record[2],
'system:masters')
ftokens.write(towrite)
continue
if record[2] == 'kube_proxy':
towrite = '{0},{1},{2}\n'.format(record[0],
'system:kube-proxy',
'kube-proxy')
ftokens.write(towrite)
continue
if record[2] == 'kubelet' and record[1] == 'kubelet':
continue
ftokens.write('{}'.format(line))
def rename_file_idempotent(source, destination):
if os.path.isfile(source):
os.rename(source, destination)
def migrate_from_pre_snaps():
# remove old states
remove_state('kubernetes.components.installed')
remove_state('kubernetes.dashboard.available')
remove_state('kube-dns.available')
remove_state('kubernetes-master.app_version.set')
# disable old services
services = ['kube-apiserver',
'kube-controller-manager',
'kube-scheduler']
for service in services:
hookenv.log('Stopping {0} service.'.format(service))
host.service_stop(service)
# rename auth files
os.makedirs('/root/cdk', exist_ok=True)
rename_file_idempotent('/etc/kubernetes/serviceaccount.key',
'/root/cdk/serviceaccount.key')
rename_file_idempotent('/srv/kubernetes/basic_auth.csv',
'/root/cdk/basic_auth.csv')
rename_file_idempotent('/srv/kubernetes/known_tokens.csv',
'/root/cdk/known_tokens.csv')
# cleanup old files
files = [
"/lib/systemd/system/kube-apiserver.service",
"/lib/systemd/system/kube-controller-manager.service",
"/lib/systemd/system/kube-scheduler.service",
"/etc/default/kube-defaults",
"/etc/default/kube-apiserver.defaults",
"/etc/default/kube-controller-manager.defaults",
"/etc/default/kube-scheduler.defaults",
"/srv/kubernetes",
"/home/ubuntu/kubectl",
"/usr/local/bin/kubectl",
"/usr/local/bin/kube-apiserver",
"/usr/local/bin/kube-controller-manager",
"/usr/local/bin/kube-scheduler",
"/etc/kubernetes"
]
for file in files:
if os.path.isdir(file):
hookenv.log("Removing directory: " + file)
shutil.rmtree(file)
elif os.path.isfile(file):
hookenv.log("Removing file: " + file)
os.remove(file)
# clear the flag managers
FlagManager('kube-apiserver').destroy_all()
FlagManager('kube-controller-manager').destroy_all()
FlagManager('kube-scheduler').destroy_all()
def install_snaps():
channel = hookenv.config('channel')
hookenv.status_set('maintenance', 'Installing kubectl snap')
snap.install('kubectl', channel=channel, classic=True)
hookenv.status_set('maintenance', 'Installing kube-apiserver snap')
snap.install('kube-apiserver', channel=channel)
hookenv.status_set('maintenance',
'Installing kube-controller-manager snap')
snap.install('kube-controller-manager', channel=channel)
hookenv.status_set('maintenance', 'Installing kube-scheduler snap')
snap.install('kube-scheduler', channel=channel)
hookenv.status_set('maintenance', 'Installing cdk-addons snap')
snap.install('cdk-addons', channel=channel)
set_state('kubernetes-master.snaps.installed')
remove_state('kubernetes-master.components.started')
@when('config.changed.channel')
def channel_changed():
install_snaps()
@when('config.changed.client_password', 'leadership.is_leader')
def password_changed():
"""Handle password change via the charms config."""
password = hookenv.config('client_password')
if password == "" and is_state('client.password.initialised'):
# password_changed is called during an upgrade. Nothing to do.
return
elif password == "":
# Password not initialised
password = token_generator()
setup_basic_auth(password, "admin", "admin")
set_state('reconfigure.authentication.setup')
remove_state('authentication.setup')
set_state('client.password.initialised')
@when('cni.connected')
@when_not('cni.configured')
def configure_cni(cni):
''' Set master configuration on the CNI relation. This lets the CNI
subordinate know that we're the master so it can respond accordingly. '''
cni.set_config(is_master=True, kubeconfig_path='')
@when('leadership.is_leader')
@when_not('authentication.setup')
def setup_leader_authentication():
'''Setup basic authentication and token access for the cluster.'''
api_opts = FlagManager('kube-apiserver')
controller_opts = FlagManager('kube-controller-manager')
service_key = '/root/cdk/serviceaccount.key'
basic_auth = '/root/cdk/basic_auth.csv'
known_tokens = '/root/cdk/known_tokens.csv'
api_opts.add('basic-auth-file', basic_auth)
api_opts.add('token-auth-file', known_tokens)
hookenv.status_set('maintenance', 'Rendering authentication templates.')
keys = [service_key, basic_auth, known_tokens]
# Try first to fetch data from an old leadership broadcast.
if not get_keys_from_leader(keys) \
or is_state('reconfigure.authentication.setup'):
last_pass = get_password('basic_auth.csv', 'admin')
setup_basic_auth(last_pass, 'admin', 'admin', 'system:masters')
if not os.path.isfile(known_tokens):
touch(known_tokens)
# Generate the default service account token key
os.makedirs('/root/cdk', exist_ok=True)
if not os.path.isfile(service_key):
cmd = ['openssl', 'genrsa', '-out', service_key,
'2048']
check_call(cmd)
remove_state('reconfigure.authentication.setup')
api_opts.add('service-account-key-file', service_key)
controller_opts.add('service-account-private-key-file', service_key)
# read service account key for syndication
leader_data = {}
for f in [known_tokens, basic_auth, service_key]:
with open(f, 'r') as fp:
leader_data[f] = fp.read()
# this is slightly opaque, but we are sending file contents under its file
# path as a key.
# eg:
# {'/root/cdk/serviceaccount.key': 'RSA:2471731...'}
charms.leadership.leader_set(leader_data)
remove_state('kubernetes-master.components.started')
set_state('authentication.setup')
@when_not('leadership.is_leader')
def setup_non_leader_authentication():
service_key = '/root/cdk/serviceaccount.key'
basic_auth = '/root/cdk/basic_auth.csv'
known_tokens = '/root/cdk/known_tokens.csv'
keys = [service_key, basic_auth, known_tokens]
# The source of truth for non-leaders is the leader.
# Therefore we overwrite_local with whatever the leader has.
if not get_keys_from_leader(keys, overwrite_local=True):
# the keys were not retrieved. Non-leaders have to retry.
return
if not any_file_changed(keys) and is_state('authentication.setup'):
# No change detected and we have already setup the authentication
return
hookenv.status_set('maintenance', 'Rendering authentication templates.')
api_opts = FlagManager('kube-apiserver')
api_opts.add('basic-auth-file', basic_auth)
api_opts.add('token-auth-file', known_tokens)
api_opts.add('service-account-key-file', service_key)
controller_opts = FlagManager('kube-controller-manager')
controller_opts.add('service-account-private-key-file', service_key)
remove_state('kubernetes-master.components.started')
set_state('authentication.setup')
def get_keys_from_leader(keys, overwrite_local=False):
"""
Gets the broadcasted keys from the leader and stores them in
the corresponding files.
Args:
keys: list of keys. Keys are actually files on the FS.
Returns: True if all key were fetched, False if not.
"""
# This races with other codepaths, and seems to require being created first
# This block may be extracted later, but for now seems to work as intended
os.makedirs('/root/cdk', exist_ok=True)
for k in keys:
# If the path does not exist, assume we need it
if not os.path.exists(k) or overwrite_local:
# Fetch data from leadership broadcast
contents = charms.leadership.leader_get(k)
# Default to logging the warning and wait for leader data to be set
if contents is None:
msg = "Waiting on leaders crypto keys."
hookenv.status_set('waiting', msg)
hookenv.log('Missing content for file {}'.format(k))
return False
# Write out the file and move on to the next item
with open(k, 'w+') as fp:
fp.write(contents)
fp.write('\n')
return True
@when('kubernetes-master.snaps.installed')
def set_app_version():
''' Declare the application version to juju '''
version = check_output(['kube-apiserver', '--version'])
hookenv.application_version_set(version.split(b' v')[-1].rstrip())
@when('cdk-addons.configured', 'kube-api-endpoint.available',
'kube-control.connected')
def idle_status(kube_api, kube_control):
''' Signal at the end of the run that we are running. '''
if not all_kube_system_pods_running():
hookenv.status_set('waiting', 'Waiting for kube-system pods to start')
elif hookenv.config('service-cidr') != service_cidr():
msg = 'WARN: cannot change service-cidr, still using ' + service_cidr()
hookenv.status_set('active', msg)
else:
# All services should be up and running at this point. Double-check...
failing_services = master_services_down()
if len(failing_services) == 0:
hookenv.status_set('active', 'Kubernetes master running.')
else:
msg = 'Stopped services: {}'.format(','.join(failing_services))
hookenv.status_set('blocked', msg)
def master_services_down():
"""Ensure master services are up and running.
Return: list of failing services"""
services = ['kube-apiserver',
'kube-controller-manager',
'kube-scheduler']
failing_services = []
for service in services:
daemon = 'snap.{}.daemon'.format(service)
if not host.service_running(daemon):
failing_services.append(service)
return failing_services
@when('etcd.available', 'tls_client.server.certificate.saved',
'authentication.setup')
@when_not('kubernetes-master.components.started')
def start_master(etcd):
'''Run the Kubernetes master components.'''
hookenv.status_set('maintenance',
'Configuring the Kubernetes master services.')
freeze_service_cidr()
if not etcd.get_connection_string():
# etcd is not returning a connection string. This happens when
# the master unit disconnects from etcd and is ready to terminate.
# No point in trying to start master services and fail. Just return.
return
# TODO: Make sure below relation is handled on change
# https://github.com/kubernetes/kubernetes/issues/43461
handle_etcd_relation(etcd)
# Add CLI options to all components
configure_apiserver()
configure_controller_manager()
configure_scheduler()
hookenv.open_port(6443)
@when('etcd.available')
def etcd_data_change(etcd):
''' Etcd scale events block master reconfiguration due to the
kubernetes-master.components.started state. We need a way to
handle these events consistenly only when the number of etcd
units has actually changed '''
# key off of the connection string
connection_string = etcd.get_connection_string()
# If the connection string changes, remove the started state to trigger
# handling of the master components
if data_changed('etcd-connect', connection_string):
remove_state('kubernetes-master.components.started')
@when('kube-control.connected')
@when('cdk-addons.configured')
def send_cluster_dns_detail(kube_control):
''' Send cluster DNS info '''
# Note that the DNS server doesn't necessarily exist at this point. We know
# where we're going to put it, though, so let's send the info anyway.
dns_ip = get_dns_ip()
kube_control.set_dns(53, hookenv.config('dns_domain'), dns_ip)
@when('kube-control.connected')
@when('snap.installed.kubectl')
@when('leadership.is_leader')
def create_service_configs(kube_control):
"""Create the users for kubelet"""
should_restart = False
# generate the username/pass for the requesting unit
proxy_token = get_token('system:kube-proxy')
if not proxy_token:
setup_tokens(None, 'system:kube-proxy', 'kube-proxy')
proxy_token = get_token('system:kube-proxy')
should_restart = True
client_token = get_token('admin')
if not client_token:
setup_tokens(None, 'admin', 'admin', "system:masters")
client_token = get_token('admin')
should_restart = True
requests = kube_control.auth_user()
for request in requests:
username = request[1]['user']
group = request[1]['group']
kubelet_token = get_token(username)
if not kubelet_token and username and group:
# Usernames have to be in the form of system:node:<nodeName>
userid = "kubelet-{}".format(request[0].split('/')[1])
setup_tokens(None, username, userid, group)
kubelet_token = get_token(username)
kube_control.sign_auth_request(request[0], username,
kubelet_token, proxy_token,
client_token)
should_restart = True
if should_restart:
host.service_restart('snap.kube-apiserver.daemon')
remove_state('authentication.setup')
@when('kube-control.departed')
@when('leadership.is_leader')
def flush_auth_for_departed(kube_control):
''' Unit has left the cluster and needs to have its authentication
tokens removed from the token registry '''
token_auth_file = '/root/cdk/known_tokens.csv'
departing_unit = kube_control.flush_departed()
userid = "kubelet-{}".format(departing_unit.split('/')[1])
known_tokens = open(token_auth_file, 'r').readlines()
for line in known_tokens[:]:
haystack = line.split(',')
# skip the entry if we dont have token,user,id,groups format
if len(haystack) < 4:
continue
if haystack[2] == userid:
hookenv.log('Found unit {} in token auth. Removing auth'
' token.'.format(userid))
known_tokens.remove(line)
# atomically rewrite the file minus any scrubbed units
hookenv.log('Rewriting token auth file: {}'.format(token_auth_file))
with open(token_auth_file, 'w') as fp:
fp.writelines(known_tokens)
# Trigger rebroadcast of auth files for followers
remove_state('authentication.setup')
@when_not('kube-control.connected')
def missing_kube_control():
"""Inform the operator master is waiting for a relation to workers.
If deploying via bundle this won't happen, but if operator is upgrading a
a charm in a deployment that pre-dates the kube-control relation, it'll be
missing.
"""
hookenv.status_set('blocked', 'Waiting for workers.')
@when('kube-api-endpoint.available')
def push_service_data(kube_api):
''' Send configuration to the load balancer, and close access to the
public interface '''
kube_api.configure(port=6443)
@when('certificates.available')
def send_data(tls):
'''Send the data that is required to create a server certificate for
this server.'''
# Use the public ip of this unit as the Common Name for the certificate.
common_name = hookenv.unit_public_ip()
# Get the SDN gateway based on the cidr address.
kubernetes_service_ip = get_kubernetes_service_ip()
domain = hookenv.config('dns_domain')
# Create SANs that the tls layer will add to the server cert.
sans = [
hookenv.unit_public_ip(),
hookenv.unit_private_ip(),
socket.gethostname(),
kubernetes_service_ip,
'kubernetes',
'kubernetes.{0}'.format(domain),
'kubernetes.default',
'kubernetes.default.svc',
'kubernetes.default.svc.{0}'.format(domain)
]
# maybe they have extra names they want as SANs
extra_sans = hookenv.config('extra_sans')
if extra_sans and not extra_sans == "":
sans.extend(extra_sans.split())
# Create a path safe name by removing path characters from the unit name.
certificate_name = hookenv.local_unit().replace('/', '_')
# Request a server cert with this information.
tls.request_server_cert(common_name, sans, certificate_name)
@when('config.changed.extra_sans', 'certificates.available')
def update_certificate(tls):
# I using the config.changed flag instead of something more
# specific to try and catch ip changes. Being a little
# spammy here is ok because the cert layer checks for
# changes to the cert before issuing a new one
send_data(tls)
@when('certificates.server.cert.available',
'kubernetes-master.components.started')
def kick_api_server(tls):
# need to be idempotent and don't want to kick the api server
# without need
if data_changed('cert', tls.get_server_cert()):
# certificate changed, so restart the api server
hookenv.log("Certificate information changed, restarting api server")
set_state('kube-apiserver.do-restart')
@when('kubernetes-master.components.started')
def configure_cdk_addons():
''' Configure CDK addons '''
remove_state('cdk-addons.configured')
dbEnabled = str(hookenv.config('enable-dashboard-addons')).lower()
args = [
'arch=' + arch(),
'dns-ip=' + get_dns_ip(),
'dns-domain=' + hookenv.config('dns_domain'),
'enable-dashboard=' + dbEnabled
]
check_call(['snap', 'set', 'cdk-addons'] + args)
if not addons_ready():
hookenv.status_set('waiting', 'Waiting to retry addon deployment')
remove_state('cdk-addons.configured')
return
set_state('cdk-addons.configured')
@retry(times=3, delay_secs=20)
def addons_ready():
"""
Test if the add ons got installed
Returns: True is the addons got applied
"""
try:
check_call(['cdk-addons.apply'])
return True
except CalledProcessError:
hookenv.log("Addons are not ready yet.")
return False
@when('loadbalancer.available', 'certificates.ca.available',
'certificates.client.cert.available', 'authentication.setup')
def loadbalancer_kubeconfig(loadbalancer, ca, client):
# Get the potential list of loadbalancers from the relation object.
hosts = loadbalancer.get_addresses_ports()
# Get the public address of loadbalancers so users can access the cluster.
address = hosts[0].get('public-address')
# Get the port of the loadbalancer so users can access the cluster.
port = hosts[0].get('port')
server = 'https://{0}:{1}'.format(address, port)
build_kubeconfig(server)
@when('certificates.ca.available', 'certificates.client.cert.available',
'authentication.setup')
@when_not('loadbalancer.available')
def create_self_config(ca, client):
'''Create a kubernetes configuration for the master unit.'''
server = 'https://{0}:{1}'.format(hookenv.unit_get('public-address'), 6443)
build_kubeconfig(server)
@when('ceph-storage.available')
def ceph_state_control(ceph_admin):
''' Determine if we should remove the state that controls the re-render
and execution of the ceph-relation-changed event because there
are changes in the relationship data, and we should re-render any
configs, keys, and/or service pre-reqs '''
ceph_relation_data = {
'mon_hosts': ceph_admin.mon_hosts(),
'fsid': ceph_admin.fsid(),
'auth_supported': ceph_admin.auth(),
'hostname': socket.gethostname(),
'key': ceph_admin.key()
}
# Re-execute the rendering if the data has changed.
if data_changed('ceph-config', ceph_relation_data):
remove_state('ceph-storage.configured')
@when('ceph-storage.available')
@when_not('ceph-storage.configured')
def ceph_storage(ceph_admin):
'''Ceph on kubernetes will require a few things - namely a ceph
configuration, and the ceph secret key file used for authentication.
This method will install the client package, and render the requisit files
in order to consume the ceph-storage relation.'''
ceph_context = {
'mon_hosts': ceph_admin.mon_hosts(),
'fsid': ceph_admin.fsid(),
'auth_supported': ceph_admin.auth(),
'use_syslog': "true",
'ceph_public_network': '',
'ceph_cluster_network': '',
'loglevel': 1,
'hostname': socket.gethostname(),
}
# Install the ceph common utilities.
apt_install(['ceph-common'], fatal=True)
etc_ceph_directory = '/etc/ceph'
if not os.path.isdir(etc_ceph_directory):
os.makedirs(etc_ceph_directory)
charm_ceph_conf = os.path.join(etc_ceph_directory, 'ceph.conf')
# Render the ceph configuration from the ceph conf template
render('ceph.conf', charm_ceph_conf, ceph_context)
# The key can rotate independently of other ceph config, so validate it
admin_key = os.path.join(etc_ceph_directory,
'ceph.client.admin.keyring')
try:
with open(admin_key, 'w') as key_file:
key_file.write("[client.admin]\n\tkey = {}\n".format(
ceph_admin.key()))
except IOError as err:
hookenv.log("IOError writing admin.keyring: {}".format(err))
# Enlist the ceph-admin key as a kubernetes secret
if ceph_admin.key():
encoded_key = base64.b64encode(ceph_admin.key().encode('utf-8'))
else:
# We didn't have a key, and cannot proceed. Do not set state and
# allow this method to re-execute
return
context = {'secret': encoded_key.decode('ascii')}
render('ceph-secret.yaml', '/tmp/ceph-secret.yaml', context)
try:
# At first glance this is deceptive. The apply stanza will create if
# it doesn't exist, otherwise it will update the entry, ensuring our
# ceph-secret is always reflective of what we have in /etc/ceph
# assuming we have invoked this anytime that file would change.
cmd = ['kubectl', 'apply', '-f', '/tmp/ceph-secret.yaml']
check_call(cmd)
os.remove('/tmp/ceph-secret.yaml')
except: # NOQA
# the enlistment in kubernetes failed, return and prepare for re-exec
return
# when complete, set a state relating to configuration of the storage
# backend that will allow other modules to hook into this and verify we
# have performed the necessary pre-req steps to interface with a ceph
# deployment.
set_state('ceph-storage.configured')
@when('nrpe-external-master.available')
@when_not('nrpe-external-master.initial-config')
def initial_nrpe_config(nagios=None):
set_state('nrpe-external-master.initial-config')
update_nrpe_config(nagios)
@when('config.changed.authorization-mode',
'kubernetes-master.components.started')
def switch_auth_mode():
config = hookenv.config()
mode = config.get('authorization-mode')
if data_changed('auth-mode', mode):
remove_state('kubernetes-master.components.started')
@when('kubernetes-master.components.started')
@when('nrpe-external-master.available')
@when_any('config.changed.nagios_context',
'config.changed.nagios_servicegroups')
def update_nrpe_config(unused=None):
services = (
'snap.kube-apiserver.daemon',
'snap.kube-controller-manager.daemon',
'snap.kube-scheduler.daemon'
)
hostname = nrpe.get_nagios_hostname()
current_unit = nrpe.get_nagios_unit_name()
nrpe_setup = nrpe.NRPE(hostname=hostname)
nrpe.add_init_service_checks(nrpe_setup, services, current_unit)
nrpe_setup.write()
@when_not('nrpe-external-master.available')
@when('nrpe-external-master.initial-config')
def remove_nrpe_config(nagios=None):
remove_state('nrpe-external-master.initial-config')
# List of systemd services for which the checks will be removed
services = (
'snap.kube-apiserver.daemon',
'snap.kube-controller-manager.daemon',
'snap.kube-scheduler.daemon'
)
# The current nrpe-external-master interface doesn't handle a lot of logic,
# use the charm-helpers code for now.
hostname = nrpe.get_nagios_hostname()
nrpe_setup = nrpe.NRPE(hostname=hostname)
for service in services:
nrpe_setup.remove_check(shortname=service)
def is_privileged():
"""Return boolean indicating whether or not to set allow-privileged=true.
"""
privileged = hookenv.config('allow-privileged')
if privileged == 'auto':
return is_state('kubernetes-master.gpu.enabled')
else:
return privileged == 'true'
@when('config.changed.allow-privileged')
@when('kubernetes-master.components.started')
def on_config_allow_privileged_change():
"""React to changed 'allow-privileged' config value.
"""
remove_state('kubernetes-master.components.started')
remove_state('config.changed.allow-privileged')
@when('config.changed.api-extra-args')
@when('kubernetes-master.components.started')
def on_config_api_extra_args_change():
configure_apiserver()
@when('kube-control.gpu.available')
@when('kubernetes-master.components.started')
@when_not('kubernetes-master.gpu.enabled')
def on_gpu_available(kube_control):
"""The remote side (kubernetes-worker) is gpu-enabled.
We need to run in privileged mode.
"""
config = hookenv.config()
if config['allow-privileged'] == "false":
hookenv.status_set(
'active',
'GPUs available. Set allow-privileged="auto" to enable.'
)
return
remove_state('kubernetes-master.components.started')
set_state('kubernetes-master.gpu.enabled')
@when('kubernetes-master.gpu.enabled')
@when_not('kubernetes-master.privileged')
def disable_gpu_mode():
"""We were in gpu mode, but the operator has set allow-privileged="false",
so we can't run in gpu mode anymore.
"""
remove_state('kubernetes-master.gpu.enabled')
@hook('stop')
def shutdown():
""" Stop the kubernetes master services
"""
service_stop('snap.kube-apiserver.daemon')
service_stop('snap.kube-controller-manager.daemon')
service_stop('snap.kube-scheduler.daemon')
@when('kube-apiserver.do-restart')
def restart_apiserver():
prev_state, prev_msg = hookenv.status_get()
hookenv.status_set('maintenance', 'Restarting kube-apiserver')
host.service_restart('snap.kube-apiserver.daemon')
hookenv.status_set(prev_state, prev_msg)
remove_state('kube-apiserver.do-restart')
set_state('kube-apiserver.started')
@when('kube-controller-manager.do-restart')
def restart_controller_manager():
prev_state, prev_msg = hookenv.status_get()
hookenv.status_set('maintenance', 'Restarting kube-controller-manager')
host.service_restart('snap.kube-controller-manager.daemon')
hookenv.status_set(prev_state, prev_msg)
remove_state('kube-controller-manager.do-restart')
set_state('kube-controller-manager.started')
@when('kube-scheduler.do-restart')
def restart_scheduler():
prev_state, prev_msg = hookenv.status_get()
hookenv.status_set('maintenance', 'Restarting kube-scheduler')
host.service_restart('snap.kube-scheduler.daemon')
hookenv.status_set(prev_state, prev_msg)
remove_state('kube-scheduler.do-restart')
set_state('kube-scheduler.started')
@when_all('kube-apiserver.started',
'kube-controller-manager.started',
'kube-scheduler.started')
@when_not('kubernetes-master.components.started')
def componenets_started():
set_state('kubernetes-master.components.started')
def arch():
'''Return the package architecture as a string. Raise an exception if the
architecture is not supported by kubernetes.'''
# Get the package architecture for this system.
architecture = check_output(['dpkg', '--print-architecture']).rstrip()
# Convert the binary result into a string.
architecture = architecture.decode('utf-8')
return architecture
def build_kubeconfig(server):
'''Gather the relevant data for Kubernetes configuration objects and create
a config object with that information.'''
# Get the options from the tls-client layer.
layer_options = layer.options('tls-client')
# Get all the paths to the tls information required for kubeconfig.
ca = layer_options.get('ca_certificate_path')
ca_exists = ca and os.path.isfile(ca)
client_pass = get_password('basic_auth.csv', 'admin')
# Do we have everything we need?
if ca_exists and client_pass:
# Create an absolute path for the kubeconfig file.
kubeconfig_path = os.path.join(os.sep, 'home', 'ubuntu', 'config')
# Create the kubeconfig on this system so users can access the cluster.
create_kubeconfig(kubeconfig_path, server, ca,
user='admin', password=client_pass)
# Make the config file readable by the ubuntu users so juju scp works.
cmd = ['chown', 'ubuntu:ubuntu', kubeconfig_path]
check_call(cmd)
def create_kubeconfig(kubeconfig, server, ca, key=None, certificate=None,
user='ubuntu', context='juju-context',
cluster='juju-cluster', password=None, token=None):
'''Create a configuration for Kubernetes based on path using the supplied
arguments for values of the Kubernetes server, CA, key, certificate, user
context and cluster.'''
if not key and not certificate and not password and not token:
raise ValueError('Missing authentication mechanism.')
# token and password are mutually exclusive. Error early if both are
# present. The developer has requested an impossible situation.
# see: kubectl config set-credentials --help
if token and password:
raise ValueError('Token and Password are mutually exclusive.')
# Create the config file with the address of the master server.
cmd = 'kubectl config --kubeconfig={0} set-cluster {1} ' \
'--server={2} --certificate-authority={3} --embed-certs=true'
check_call(split(cmd.format(kubeconfig, cluster, server, ca)))
# Delete old users
cmd = 'kubectl config --kubeconfig={0} unset users'
check_call(split(cmd.format(kubeconfig)))
# Create the credentials using the client flags.
cmd = 'kubectl config --kubeconfig={0} ' \
'set-credentials {1} '.format(kubeconfig, user)
if key and certificate:
cmd = '{0} --client-key={1} --client-certificate={2} '\
'--embed-certs=true'.format(cmd, key, certificate)
if password:
cmd = "{0} --username={1} --password={2}".format(cmd, user, password)
# This is mutually exclusive from password. They will not work together.
if token:
cmd = "{0} --token={1}".format(cmd, token)
check_call(split(cmd))
# Create a default context with the cluster.
cmd = 'kubectl config --kubeconfig={0} set-context {1} ' \
'--cluster={2} --user={3}'
check_call(split(cmd.format(kubeconfig, context, cluster, user)))
# Make the config use this new context.
cmd = 'kubectl config --kubeconfig={0} use-context {1}'
check_call(split(cmd.format(kubeconfig, context)))
def get_dns_ip():
'''Get an IP address for the DNS server on the provided cidr.'''
interface = ipaddress.IPv4Interface(service_cidr())
# Add .10 at the end of the network
ip = interface.network.network_address + 10
return ip.exploded
def get_kubernetes_service_ip():
'''Get the IP address for the kubernetes service based on the cidr.'''
interface = ipaddress.IPv4Interface(service_cidr())
# Add .1 at the end of the network
ip = interface.network.network_address + 1
return ip.exploded
def handle_etcd_relation(reldata):
''' Save the client credentials and set appropriate daemon flags when
etcd declares itself as available'''
connection_string = reldata.get_connection_string()
# Define where the etcd tls files will be kept.
etcd_dir = '/root/cdk/etcd'
# Create paths to the etcd client ca, key, and cert file locations.
ca = os.path.join(etcd_dir, 'client-ca.pem')
key = os.path.join(etcd_dir, 'client-key.pem')
cert = os.path.join(etcd_dir, 'client-cert.pem')
# Save the client credentials (in relation data) to the paths provided.
reldata.save_client_credentials(key, cert, ca)
api_opts = FlagManager('kube-apiserver')
# Never use stale data, always prefer whats coming in during context
# building. if its stale, its because whats in unitdata is stale
data = api_opts.data
if data.get('etcd-servers-strict') or data.get('etcd-servers'):
api_opts.destroy('etcd-cafile')
api_opts.destroy('etcd-keyfile')
api_opts.destroy('etcd-certfile')
api_opts.destroy('etcd-servers', strict=True)
api_opts.destroy('etcd-servers')
# Set the apiserver flags in the options manager
api_opts.add('etcd-cafile', ca)
api_opts.add('etcd-keyfile', key)
api_opts.add('etcd-certfile', cert)
api_opts.add('etcd-servers', connection_string, strict=True)
def get_config_args():
db = unitdata.kv()
old_config_args = db.get('api-extra-args', [])
# We have to convert them to tuples becuase we use sets
old_config_args = [tuple(i) for i in old_config_args]
new_config_args = []
new_config_arg_names = []
for arg in hookenv.config().get('api-extra-args', '').split():
new_config_arg_names.append(arg.split('=', 1)[0])
if len(arg.split('=', 1)) == 1: # handle flags ie. --profiling
new_config_args.append(tuple([arg, 'true']))
else:
new_config_args.append(tuple(arg.split('=', 1)))
hookenv.log('Handling "api-extra-args" option.')
hookenv.log('Old arguments: {}'.format(old_config_args))
hookenv.log('New arguments: {}'.format(new_config_args))
if set(new_config_args) == set(old_config_args):
return (new_config_args, [])
# Store new args
db.set('api-extra-args', new_config_args)
to_add = set(new_config_args)
to_remove = set(old_config_args) - set(new_config_args)
# Extract option names only
to_remove = [i[0] for i in to_remove if i[0] not in new_config_arg_names]
return (to_add, to_remove)
def configure_apiserver():
# TODO: investigate if it's possible to use config file to store args
# https://github.com/juju-solutions/bundle-canonical-kubernetes/issues/315
# Handle api-extra-args config option
to_add, to_remove = get_config_args()
api_opts = FlagManager('kube-apiserver')
# Remove arguments that are no longer provided as config option
# this allows them to be reverted to charm defaults
for arg in to_remove:
hookenv.log('Removing option: {}'.format(arg))
api_opts.destroy(arg)
# We need to "unset" options by settig their value to "null" string
cmd = ['snap', 'set', 'kube-apiserver', '{}=null'.format(arg)]
check_call(cmd)
# Get the tls paths from the layer data.
layer_options = layer.options('tls-client')
ca_cert_path = layer_options.get('ca_certificate_path')
client_cert_path = layer_options.get('client_certificate_path')
client_key_path = layer_options.get('client_key_path')
server_cert_path = layer_options.get('server_certificate_path')
server_key_path = layer_options.get('server_key_path')
if is_privileged():
api_opts.add('allow-privileged', 'true', strict=True)
set_state('kubernetes-master.privileged')
else:
api_opts.add('allow-privileged', 'false', strict=True)
remove_state('kubernetes-master.privileged')
# Handle static options for now
api_opts.add('service-cluster-ip-range', service_cidr())
api_opts.add('min-request-timeout', '300')
api_opts.add('v', '4')
api_opts.add('tls-cert-file', server_cert_path)
api_opts.add('tls-private-key-file', server_key_path)
api_opts.add('kubelet-certificate-authority', ca_cert_path)
api_opts.add('kubelet-client-certificate', client_cert_path)
api_opts.add('kubelet-client-key', client_key_path)
api_opts.add('logtostderr', 'true')
api_opts.add('insecure-bind-address', '127.0.0.1')
api_opts.add('insecure-port', '8080')
api_opts.add('storage-backend', 'etcd2') # FIXME: add etcd3 support
admission_control = [
'Initializers',
'NamespaceLifecycle',
'LimitRanger',
'ServiceAccount',
'ResourceQuota',
'DefaultTolerationSeconds'
]
auth_mode = hookenv.config('authorization-mode')
if 'Node' in auth_mode:
admission_control.append('NodeRestriction')
api_opts.add('authorization-mode', auth_mode, strict=True)
if get_version('kube-apiserver') < (1, 6):
hookenv.log('Removing DefaultTolerationSeconds from admission-control')
admission_control.remove('DefaultTolerationSeconds')
if get_version('kube-apiserver') < (1, 7):
hookenv.log('Removing Initializers from admission-control')
admission_control.remove('Initializers')
api_opts.add('admission-control', ','.join(admission_control), strict=True)
# Add operator-provided arguments, this allows operators
# to override defaults
for arg in to_add:
hookenv.log('Adding option: {} {}'.format(arg[0], arg[1]))
# Make sure old value is gone
api_opts.destroy(arg[0])
api_opts.add(arg[0], arg[1])
cmd = ['snap', 'set', 'kube-apiserver'] + api_opts.to_s().split(' ')
check_call(cmd)
set_state('kube-apiserver.do-restart')
def configure_controller_manager():
controller_opts = FlagManager('kube-controller-manager')
# Get the tls paths from the layer data.
layer_options = layer.options('tls-client')
ca_cert_path = layer_options.get('ca_certificate_path')
# Default to 3 minute resync. TODO: Make this configureable?
controller_opts.add('min-resync-period', '3m')
controller_opts.add('v', '2')
controller_opts.add('root-ca-file', ca_cert_path)
controller_opts.add('logtostderr', 'true')
controller_opts.add('master', 'http://127.0.0.1:8080')
cmd = (
['snap', 'set', 'kube-controller-manager'] +
controller_opts.to_s().split(' ')
)
check_call(cmd)
set_state('kube-controller-manager.do-restart')
def configure_scheduler():
scheduler_opts = FlagManager('kube-scheduler')
scheduler_opts.add('v', '2')
scheduler_opts.add('logtostderr', 'true')
scheduler_opts.add('master', 'http://127.0.0.1:8080')
cmd = ['snap', 'set', 'kube-scheduler'] + scheduler_opts.to_s().split(' ')
check_call(cmd)
set_state('kube-scheduler.do-restart')
def setup_basic_auth(password=None, username='admin', uid='admin',
groups=None):
'''Create the htacces file and the tokens.'''
root_cdk = '/root/cdk'
if not os.path.isdir(root_cdk):
os.makedirs(root_cdk)
htaccess = os.path.join(root_cdk, 'basic_auth.csv')
if not password:
password = token_generator()
with open(htaccess, 'w') as stream:
if groups:
stream.write('{0},{1},{2},"{3}"'.format(password,
username, uid, groups))
else:
stream.write('{0},{1},{2}'.format(password, username, uid))
def setup_tokens(token, username, user, groups=None):
'''Create a token file for kubernetes authentication.'''
root_cdk = '/root/cdk'
if not os.path.isdir(root_cdk):
os.makedirs(root_cdk)
known_tokens = os.path.join(root_cdk, 'known_tokens.csv')
if not token:
token = token_generator()
with open(known_tokens, 'a') as stream:
if groups:
stream.write('{0},{1},{2},"{3}"\n'.format(token,
username,
user,
groups))
else:
stream.write('{0},{1},{2}\n'.format(token, username, user))
def get_password(csv_fname, user):
'''Get the password of user within the csv file provided.'''
root_cdk = '/root/cdk'
tokens_fname = os.path.join(root_cdk, csv_fname)
if not os.path.isfile(tokens_fname):
return None
with open(tokens_fname, 'r') as stream:
for line in stream:
record = line.split(',')
if record[1] == user:
return record[0]
return None
def get_token(username):
"""Grab a token from the static file if present. """
return get_password('known_tokens.csv', username)
def set_token(password, save_salt):
''' Store a token so it can be recalled later by token_generator.
param: password - the password to be stored
param: save_salt - the key to store the value of the token.'''
db = unitdata.kv()
db.set(save_salt, password)
return db.get(save_salt)
def token_generator(length=32):
''' Generate a random token for use in passwords and account tokens.
param: length - the length of the token to generate'''
alpha = string.ascii_letters + string.digits
token = ''.join(random.SystemRandom().choice(alpha) for _ in range(length))
return token
@retry(times=3, delay_secs=10)
def all_kube_system_pods_running():
''' Check pod status in the kube-system namespace. Returns True if all
pods are running, False otherwise. '''
cmd = ['kubectl', 'get', 'po', '-n', 'kube-system', '-o', 'json']
try:
output = check_output(cmd).decode('utf-8')
except CalledProcessError:
hookenv.log('failed to get kube-system pod status')
return False
result = json.loads(output)
for pod in result['items']:
status = pod['status']['phase']
if status != 'Running':
return False
return True
def apiserverVersion():
cmd = 'kube-apiserver --version'.split()
version_string = check_output(cmd).decode('utf-8')
return tuple(int(q) for q in re.findall("[0-9]+", version_string)[:3])
def touch(fname):
try:
os.utime(fname, None)
except OSError:
open(fname, 'a').close()
|
{
"content_hash": "3c955314c25b7c005993033f2a2eed2a",
"timestamp": "",
"source": "github",
"line_count": 1233,
"max_line_length": 79,
"avg_line_length": 37.15409570154096,
"alnum_prop": 0.6531837331645238,
"repo_name": "rajatchopra/kubernetes",
"id": "37f89197812a062fc04273b3c85b7eca822d56e6",
"size": "46422",
"binary": false,
"copies": "4",
"ref": "refs/heads/master",
"path": "cluster/juju/layers/kubernetes-master/reactive/kubernetes_master.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "C",
"bytes": "2525"
},
{
"name": "Go",
"bytes": "42387281"
},
{
"name": "HTML",
"bytes": "1193990"
},
{
"name": "Makefile",
"bytes": "72489"
},
{
"name": "PowerShell",
"bytes": "4261"
},
{
"name": "Python",
"bytes": "2455049"
},
{
"name": "Ruby",
"bytes": "1591"
},
{
"name": "SaltStack",
"bytes": "51788"
},
{
"name": "Shell",
"bytes": "1624872"
}
],
"symlink_target": ""
}
|
# Comment out or delete your code after finishing each challenge.
# Here is a big list of names to have fun with!!
# from: http://deron.meranda.us/data/census-derived-all-first.txt
names = [
"Rosanna", "Kristofer", "Yesenia", "Lovie", "Reita", "Merrilee", "Glenna", "Liz",
"Wen", "Danille", "Josefine", "Senaida", "Susan", "Renda", "Particia", "Stephan",
"Stormy", "Shawana", "Trinh", "Venus", "Asley", "Stanford", "Joye", "Marcelene",
"Tanisha", "Earnest", "Noble", "Twanda", "Amparo", "Houston", "Dick",
"Thomasena", "Latina", "Catherin", "Shana", "Fabian", "Johnna", "Francine",
"Hollie", "Jamal", "Brigid", "Bell", "Merideth", "Cristy", "Ermelinda", "Rodger",
"Darwin", "Barabara", "Edyth", "Elicia", "Bernarda", "Esperanza", "Angella",
"Claris", "Gaston", "Evon", "Holley", "Carolin", "Cathern", "Wilton",
"Ernestina", "Mariano", "Andreas", "Quincy", "Frederic", "Lyndsey", "Wynell",
"Larisa", "Inger", "Hwa", "Malisa", "Olene", "Genevie", "Douglass", "Phuong",
"Bobbye", "Azucena", "Ezequiel", "Anita", "Jaimee", "Fidel", "Glendora",
"Marilyn", "Mirna", "Nada", "Columbus", "Arturo", "Ashleigh", "Lupita", "Cindie",
"Mafalda", "Annette", "Spring", "Gustavo", "Abbey", "Garnet", "Londa", "Pedro",
"Marybelle", "Lazaro", "Elmira", "Maricela", "Shawanna", "Bernard", "Sally",
"Paola", "Margret", "Kallie", "Jolie", "Jeniffer", "Winona", "Ginger", "Jovan",
"Cassaundra", "Janett", "Kory", "Dalila", "Rudolf", "Portia", "Tressie", "Keva",
"Shelba", "Cristine", "Alena", "Nicolette", "Analisa", "Jonas", "Jolene",
"Georgetta", "Lajuana", "Constance", "Hubert", "Ines", "Brady", "Weston",
"Marlana", "Kenyetta", "Melaine", "Darcy", "Carlene", "Maryjane", "Margarito",
"Thaddeus", "Charolette", "Kasha", "Joni", "Lavelle", "Gwenn", "Darlena", "Cory",
"Rod", "Towanda", "Enid", "Bruce", "Landon", "Rubye", "Jaime", "Hisako",
"Claude", "Leigha", "Arlyne", "Archie", "Ilene", "Hilton", "Michael", "Merle",
"Christena", "Kathline", "Cletus", "Velma", "Martina", "Desmond", "Aisha", "Lea",
"Leah", "Eugenie", "Flo", "Lashell", "Kanisha", "Cody", "Madie", "Barbar",
"Alisia", "Katharyn", "Velva", "Weldon", "Tory", "Walter", "Kiera", "Denver",
"Samatha", "Mignon", "Bradley", "Marie", "Ok", "Siobhan", "Eugene", "Raquel",
"Tamie", "Kena",
]
# Challenge 2.1 - Print all the names in the list, one on each line.
# Next to each name, also print the length of that name, using len().
# Challenge 2.2 - Print out only the names that are three letters long.
# Challenge 2.3 - Make a new empty list (in a variable). Then add all the names
# that are three letters long to that list. Then print out that new list.
# Challenge 2.4 - Using code, figure out what is the average length of a name
# (of the names in this list) - the answer will be a decimal.
# Hint: Use code from Challenge 2.1, but know you'll need len(names) and len of
# each name!
# BONUS Challenge 2.5 - Ask the user to input a capital letter (remember input()?),
# then print out only the names that start with that inputted letter.
# Hint: Which index of a string has the first letter?
|
{
"content_hash": "54435144f5ea54c8834863b8b7f1fb9c",
"timestamp": "",
"source": "github",
"line_count": 60,
"max_line_length": 85,
"avg_line_length": 53.96666666666667,
"alnum_prop": 0.6084002470660902,
"repo_name": "google/teknowledge",
"id": "afb169eba3f1f96ae523fc1490916e6e487e57f4",
"size": "3353",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "curriculum/05_lists_01_many_names_2_DAYS/05_lists_01_many_names_day_1/05_01_02_many_names.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "120427"
}
],
"symlink_target": ""
}
|
import functools
from django import http
from django.shortcuts import get_object_or_404
from olympia.access import acl
from olympia.addons.models import Addon
def owner_or_unlisted_reviewer(request, addon):
return (acl.check_unlisted_addons_reviewer(request) or
# We don't want "admins" here, because it includes anyone with the
# "Addons:Edit" perm, we only want those with
# "Addons:ReviewUnlisted" perm (which is checked above).
acl.check_addon_ownership(request, addon, admin=False, dev=True))
def addon_view(f, qs=Addon.objects.all):
@functools.wraps(f)
def wrapper(request, addon_id=None, *args, **kw):
"""Provides an addon instance to the view given addon_id, which can be
an Addon pk, guid or a slug."""
assert addon_id, 'Must provide addon id, guid or slug'
lookup_field = Addon.get_lookup_field(addon_id)
if lookup_field == 'slug':
addon = get_object_or_404(qs(), slug=addon_id)
else:
try:
if lookup_field == 'pk':
addon = qs().get(id=addon_id)
elif lookup_field == 'guid':
addon = qs().get(guid=addon_id)
except Addon.DoesNotExist:
raise http.Http404
# Don't get in an infinite loop if addon.slug.isdigit().
if addon.slug and addon.slug != addon_id:
url = request.path.replace(addon_id, addon.slug, 1)
if request.GET:
url += '?' + request.GET.urlencode()
return http.HttpResponsePermanentRedirect(url)
# If the addon is unlisted it needs either an owner/viewer/dev/support,
# or an unlisted addon reviewer.
if not (addon.has_listed_versions() or
owner_or_unlisted_reviewer(request, addon)):
raise http.Http404
return f(request, addon, *args, **kw)
return wrapper
def addon_view_factory(qs):
# Don't evaluate qs or the locale will get stuck on whatever the server
# starts with. The addon_view() decorator will call qs with no arguments
# before doing anything, so lambdas are ok.
# GOOD: Addon.objects.valid
# GOOD: lambda: Addon.objects.valid().filter(type=1)
# BAD: Addon.objects.valid()
return functools.partial(addon_view, qs=qs)
|
{
"content_hash": "eea2efc70f525bcf90324eb1ea65797e",
"timestamp": "",
"source": "github",
"line_count": 59,
"max_line_length": 79,
"avg_line_length": 40.186440677966104,
"alnum_prop": 0.6149304091100801,
"repo_name": "psiinon/addons-server",
"id": "735d8ef8907555ad06c46800934223fb0ab862f1",
"size": "2371",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "src/olympia/addons/decorators.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "CSS",
"bytes": "752741"
},
{
"name": "Dockerfile",
"bytes": "4089"
},
{
"name": "HTML",
"bytes": "314894"
},
{
"name": "JavaScript",
"bytes": "947557"
},
{
"name": "Makefile",
"bytes": "564"
},
{
"name": "Python",
"bytes": "5192809"
},
{
"name": "Shell",
"bytes": "6712"
},
{
"name": "Smarty",
"bytes": "1418"
},
{
"name": "TSQL",
"bytes": "6926"
}
],
"symlink_target": ""
}
|
from __future__ import unicode_literals
from django.db import migrations
import phonenumber_field.modelfields
class Migration(migrations.Migration):
dependencies = [
('app', '0010_auto_20170204_1835'),
]
operations = [
migrations.AddField(
model_name='patient',
name='telephone',
field=phonenumber_field.modelfields.PhoneNumberField(default=b'', max_length=128),
),
]
|
{
"content_hash": "edbb8586040d7c16eca3d3541b91fb42",
"timestamp": "",
"source": "github",
"line_count": 19,
"max_line_length": 94,
"avg_line_length": 23.63157894736842,
"alnum_prop": 0.6347438752783965,
"repo_name": "cqw1/palliassist_webportal",
"id": "d815b4f0969c23360503e1d512650195b997f200",
"size": "522",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "app/migrations/0011_patient_telephone.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "51748"
},
{
"name": "HTML",
"bytes": "68746"
},
{
"name": "JavaScript",
"bytes": "200087"
},
{
"name": "Python",
"bytes": "137914"
},
{
"name": "Shell",
"bytes": "88"
}
],
"symlink_target": ""
}
|
from django import template
register = template.Library()
from django.template import Node, TemplateSyntaxError, Variable
from django.core.mail import mail_admins
try:
from hashlib import md5
except ImportError:
from md5 import md5
import struct
from urlparse import urljoin
from os import remove, rename
from os.path import join, abspath, exists, split
from pprint import pformat
from subprocess import Popen, PIPE
import cairo
from django.conf import settings
PARAM_KEYWORDS = set(
['font', 'size', 'baseline', 'height', 'color', 'background'])
_colors = {
'aliceblue':'#f0f8ff','antiquewhite':'#faebd7','aqua':'#00ffff',
'aquamarine':'#7fffd4','azure':'#f0ffff','beige':'#f5f5dc',
'bisque':'#ffe4c4','black':'#000000','blanchedalmond':'#ffebcd',
'blue':'#0000ff','blueviolet':'#8a2be2','brown':'#a52a2a',
'burlywood':'#deb887','cadetblue':'#5f9ea0','chartreuse':'#7fff00',
'chocolate':'#d2691e','coral':'#ff7f50','cornflowerblue':'#6495ed',
'cornsilk':'#fff8dc','crimson':'#dc143c','cyan':'#00ffff',
'darkblue':'#00008b','darkcyan':'#008b8b','darkgoldenrod':'#b8860b',
'darkgray':'#a9a9a9','darkgreen':'#006400','darkkhaki':'#bdb76b',
'darkmagenta':'#8b008b','darkolivegreen':'#556b2f','darkorange':'#ff8c00',
'darkorchid':'#9932cc','darkred':'#8b0000','darksalmon':'#e9967a',
'darkseagreen':'#8fbc8f','darkslateblue':'#483d8b',
'darkslategray':'#2f4f4f','darkturquoise':'#00ced1','darkviolet':'#9400d3',
'deeppink':'#ff1493','deepskyblue':'#00bfff','dimgray':'#696969',
'dodgerblue':'#1e90ff','firebrick':'#b22222','floralwhite':'#fffaf0',
'forestgreen':'#228b22','fuchsia':'#ff00ff','gainsboro':'#dcdcdc',
'ghostwhite':'#f8f8ff','gold':'#ffd700','goldenrod':'#daa520',
'gray':'#808080','green':'#008000','greenyellow':'#adff2f',
'honeydew':'#f0fff0','hotpink':'#ff69b4','indianred':'#cd5c5c',
'indigo':'#4b0082','ivory':'#fffff0','khaki':'#f0e68c',
'lavender':'#e6e6fa','lavenderblush':'#fff0f5','lawngreen':'#7cfc00',
'lemonchiffon':'#fffacd','lightblue':'#add8e6','lightcoral':'#f08080',
'lightcyan':'#e0ffff','lightgoldenrodyellow':'#fafad2',
'lightgreen':'#90ee90','lightgrey':'#d3d3d3','lightpink':'#ffb6c1',
'lightsalmon':'#ffa07a','lightseagreen':'#20b2aa','lightskyblue':'#87cefa',
'lightslategray':'#778899','lightsteelblue':'#b0c4de',
'lightyellow':'#ffffe0','lime':'#00ff00','limegreen':'#32cd32',
'linen':'#faf0e6','magenta':'#ff00ff','maroon':'#800000',
'mediumaquamarine':'#66cdaa','mediumblue':'#0000cd',
'mediumorchid':'#ba55d3','mediumpurple':'#9370db',
'mediumseagreen':'#3cb371','mediumslateblue':'#7b68ee',
'mediumspringgreen':'#00fa9a','mediumturquoise':'#48d1cc',
'mediumvioletred':'#c71585','midnightblue':'#191970','mintcream':'#f5fffa',
'mistyrose':'#ffe4e1','moccasin':'#ffe4b5','navajowhite':'#ffdead',
'navy':'#000080','oldlace':'#fdf5e6','olive':'#808000',
'olivedrab':'#6b8e23','orange':'#ffa500','orangered':'#ff4500',
'orchid':'#da70d6','palegoldenrod':'#eee8aa','palegreen':'#98fb98',
'paleturquoise':'#afeeee','palevioletred':'#db7093','papayawhip':'#ffefd5',
'peachpuff':'#ffdab9','peru':'#cd853f','pink':'#ffc0cb','plum':'#dda0dd',
'powderblue':'#b0e0e6','purple':'#800080','red':'#ff0000',
'rosybrown':'#bc8f8f','royalblue':'#4169e1','saddlebrown':'#8b4513',
'salmon':'#fa8072','sandybrown':'#f4a460','seagreen':'#2e8b57',
'seashell':'#fff5ee','sienna':'#a0522d','silver':'#c0c0c0',
'skyblue':'#87ceeb','slateblue':'#6a5acd','slategray':'#708090',
'snow':'#fffafa','springgreen':'#00ff7f','steelblue':'#4682b4',
'tan':'#d2b48c','teal':'#008080','thistle':'#d8bfd8','tomato':'#ff6347',
'turquoise':'#40e0d0','violet':'#ee82ee','wheat':'#f5deb3',
'white':'#ffffff','whitesmoke':'#f5f5f5','yellow':'#ffff00',
'yellowgreen':'#9acd32'}
def convert_color(s):
if not isinstance(s, basestring):
return s
if not s.startswith('#'):
s = _colors[s]
l = len(s)
if l in (4, 5):
c = [int(x*2, 16)/255.0 for x in s[1:]]
elif l in (7, 9):
c = [int(s[i:i+2], 16)/255.0 for i in range(1, l, 2)]
else:
raise ValueError('color %r has invalid length' % s)
if len(c) < 4:
c.append(1)
return tuple(c)
def read_png_chunk(pngfile):
"Read a PNG chunk from the input file, return tag name and data."
# http://www.w3.org/TR/PNG/#5Chunk-layout
data_bytes, tag = struct.unpack('!I4s', pngfile.read(8))
data = pngfile.read(data_bytes)
return tag, data
def get_png_size(filepath):
pngfile = file(filepath, 'rb')
signature = pngfile.read(8)
if (signature != struct.pack("8B", 137, 80, 78, 71, 13, 10, 26, 10)):
raise ValueError("Invalid PNG signature")
while True:
try:
tag, data = read_png_chunk(pngfile)
except ValueError, e:
raise ValueError('Invalid PNG file: ' + e.args[0])
if tag == 'IHDR': # http://www.w3.org/TR/PNG/#11IHDR
return struct.unpack("!2I5B", data)[:2]
raise ValueError('PNG header not found')
def render_text(text, filepath, params):
size = params.get('size', 18)
weight = cairo.FONT_WEIGHT_NORMAL
style = cairo.FONT_SLANT_NORMAL
font = params.get('font', 'Sans')
surface = cairo.ImageSurface(cairo.FORMAT_ARGB32, 1, 1)
context = cairo.Context(surface)
context.select_font_face(font, style, weight)
context.set_font_size(size)
extents = context.text_extents(text)
x = -extents[0]
baseline = params.get('baseline', -extents[1])
width = max(1, extents[2])
height = max(1, params.get('height', extents[3]))
surface = cairo.ImageSurface(cairo.FORMAT_ARGB32, int(width), int(height))
font_options = surface.get_font_options()
font_options.set_antialias(cairo.ANTIALIAS_GRAY)
context = cairo.Context(surface)
# Paint the background, default is white.
background = convert_color(params.get('background', (1, 1, 1, 1)))
context.set_source_rgba(*background)
context.paint()
color = convert_color(params.get('color', (0, 0, 0, 1))) # black
context.set_source_rgba(*color)
context.select_font_face(font, style, weight)
context.set_font_size(size)
# We need to adjust by the text's offsets to center it.
context.move_to(x, baseline)
# We stroke and fill to make sure thinner parts are visible.
context.text_path(text)
#context.set_line_width(0.05)
#context.stroke_preserve()
context.fill()
filepath = abspath(filepath)
try:
surface.write_to_png(filepath)
except IOError, e:
raise IOError("Can't save image in %r: %s\n"
"Text: %r\n"
"Parameters: %r" % (filepath, e, text, params))
optimizer = Optimizer()
if optimizer.is_enabled():
optimizer.optimize(filepath)
surface.finish()
return int(width), int(height)
class OptimizerError(Exception): pass
class Optimizer(object):
def __init__(self):
self.cmdline_template = getattr(
settings, 'CAIROTEXT_OPTIMIZER', None)
self.dest_path_template = getattr(
settings, 'CAIROTEXT_OPTIMIZED_PATH', None)
def is_enabled(self):
return self.cmdline_template and self.dest_path_template
def optimize(self, filepath):
self.filepath = filepath
params = self.get_params_for(filepath)
self.cmdline = self.cmdline_template % params
self.dest_path = self.dest_path_template % params
process = Popen(self.cmdline, shell=True, stdout=PIPE, stderr=PIPE)
self.stdout, self.stderr = process.stdout.read(), process.stderr.read()
self.retval = process.wait()
if self.retval:
self.error('Cairotext external optimizer failure')
elif not exists(self.dest_path):
self.error('Cairotext optimized image missing')
elif self.stdout or self.stderr:
if not self.stderr.startswith('libpng warning: '):
self.error('Cairotext optimizer output')
if exists(self.dest_path):
# os.rename overwrites existing destination
rename(self.dest_path, filepath)
def get_params_for(self, filepath):
directory, filename = split(filepath)
name, ext = filename.rsplit('.', 1)
return {'path': filepath,
'directory': directory,
'name': name,
'ext': ext}
def error(self, subject):
message = (
'Original PNG path: %s\n'
'Optimized PNG path: %s\n'
'Optimizer command line: %s\n'
'Optimizer return value: %d\n%s%s' % (
self.filepath, self.dest_path, self.cmdline,
self.retval, self.stdout, self.stderr))
if settings.DEBUG:
raise OptimizerError(message)
else:
mail_admins(subject, message, fail_silently=True)
class TextImage(object):
def __init__(self, url, path, size):
self.url = url
self.path = path
self.width, self.height = size
def _embed(self):
if not hasattr(self, '_base64'):
self._base64 = ('data:image/png;base64,%s' %
file(self.path).read().encode('base64')[:-1])
return self._base64
embed = property(_embed)
class GetTextImageNode(Node):
def __init__(self, base_params, text, overrides, varname):
self.base_params = base_params
self.text = text
self.overrides = overrides
self.varname = varname
def render(self, context):
params = {}
if self.base_params is not None:
params = self.base_params.resolve(context)
if isinstance(params, basestring):
try:
presets = settings.CAIROTEXT_PRESETS
params = dict(presets[params])
except (AttributeError, KeyError):
raise KeyError('Preset "%s" not found in '
'settings.CAIROTEXT_PRESETS' % params)
params.update(dict((key, val.resolve(context))
for key, val in self.overrides.items()))
text = self.text.resolve(context)
name = md5(text.encode('UTF-8') + pformat(params)).hexdigest()
render_dir = getattr(settings, 'CAIROTEXT_DIR', 'cairotext_cache')
filename = '%s.png' % name
fileurl = urljoin(settings.MEDIA_URL, join(render_dir, filename))
filepath = join(settings.MEDIA_ROOT, render_dir, filename)
size = None
if not exists(filepath):
size = render_text(text, filepath, params)
pngsize = get_png_size(filepath)
assert size is None or size == pngsize, \
'size mismatch: expected %rx%r, got %rx%r' % (size+pngsize)
context[self.varname] = TextImage(fileurl, filepath, pngsize)
return ''
def compile(parser, value):
if value[0] in '-0123456789':
return Variable(value)
return parser.compile_filter(value)
def do_get_text_image(parser, token):
"""
To use presets from settings.CAIROTEXT_PRESETS['base_params'] and
override text color, use:
{% get_text_image "Text" color "#aaa" font "Sans" height 20 as img %}
<img src="{{img.url}}" width="{{img.width}}" height="{{img.height}}" />
"""
bits = token.split_contents()
count = len(bits)
if count < 4:
raise TemplateSyntaxError('%r expects at least 3 arguments')
if bits[-2] != 'as':
raise TemplateSyntaxError('%r expects "as" as its '
'second last argument')
text = parser.compile_filter(bits[1])
base_params = None
if count % 2:
base_params = parser.compile_filter(bits[2])
overrides = dict((keyword, compile(parser, value))
for keyword, value in zip(bits[2+count%2:-2:2], bits[3+count%2:-2:2]))
varname = bits[-1]
unknown_keywords = set(overrides.keys()).difference(PARAM_KEYWORDS)
if unknown_keywords:
raise TemplateSyntaxError('%r got unknown keywords %s' % (
bits[0],
', '.join(unknown_keywords)))
return GetTextImageNode(base_params, text, overrides, varname)
do_get_text_image = register.tag('get_text_image', do_get_text_image)
|
{
"content_hash": "cd56da1676d0e9d5d0295686f35f0e88",
"timestamp": "",
"source": "github",
"line_count": 298,
"max_line_length": 91,
"avg_line_length": 41.43624161073826,
"alnum_prop": 0.6145934564301911,
"repo_name": "akaihola/django-cairo-text",
"id": "4bd6cefa910f91c7ba49789a6353846e5c0d3d49",
"size": "12348",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "cairotext/templatetags/cairotext.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Python",
"bytes": "32383"
}
],
"symlink_target": ""
}
|
import xmpp
import feedparser
from models import *
import time,re
import urllib
login = '<Your feeder bot>'
pwd = '<your feeder bot password>'
# Lets login
jid=xmpp.JID(login)
user, server = jid.getNode(), jid.getDomain()
cnx = xmpp.Client(server,debug=[])
conres = cnx.connect(server=('talk.google.com',5223))
authres = cnx.auth(user,pwd, 'Home')
def send_feed():
kwds = Keyword.get()
temp_url = "http://search.twitter.com/search.atom?q=%s"
for k in kwds:
feed_url = temp_url % (urllib.quote(k.data),)
d = False
try:
d = feedparser.parse(feed_url)
except UnicodeDecodeError,e:
pass
if d:
fdate = k.updated_at
entries = d.entries
entries.reverse()
for entry in entries:
try:
dtuple = entry.updated_parsed
pdate = datetime.datetime(dtuple[0],dtuple[1],dtuple[2],dtuple[3],dtuple[4],dtuple[5])
fdate = pdate
except AttributeError:
fdate = datetime.datetime.now()
if pdate>k.updated_at:
content = entry.title
try:
content = entry.summary_detail.value
except AttributeError:
content = entry.content[0].value
content = re.sub('<([^!>]([^>]|\n)*)>', '', content)
cnx.send(xmpp.Message("<your primary>","***"+str(k.id)+"***"+content))
k.updated_at = fdate
k.save()
cnx.sendInitPresence()
while cnx.Process(1):
send_feed()
time.sleep(100)
|
{
"content_hash": "ac858211b2b603e9b9a7d463d1cfd87d",
"timestamp": "",
"source": "github",
"line_count": 54,
"max_line_length": 106,
"avg_line_length": 30.944444444444443,
"alnum_prop": 0.5230400957510473,
"repo_name": "electrosocial/buzztracker",
"id": "47e2ba25906336227f25d64ff7666d987a4780cf",
"size": "1671",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "feeder.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "4287"
}
],
"symlink_target": ""
}
|
from tests import *
import unittest
if __name__ == '__main__':
testsuite = unittest.TestLoader().discover('.')
unittest.TextTestRunner(verbosity=1).run(testsuite)
|
{
"content_hash": "5658c9e9608d7614d1af4a1e06ae0410",
"timestamp": "",
"source": "github",
"line_count": 6,
"max_line_length": 55,
"avg_line_length": 28.666666666666668,
"alnum_prop": 0.686046511627907,
"repo_name": "ecohealthalliance/EpiTator",
"id": "abf4caa66c5bf44e089f2efe1dad9a4b974e96cb",
"size": "172",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "runtests.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "329685"
}
],
"symlink_target": ""
}
|
import ibis
def test_column_ref_quoting(translate):
schema = [('has a space', 'double')]
table = ibis.table(schema)
assert translate(table['has a space']) == '`has a space`'
def test_identifier_quoting(translate):
schema = [('date', 'double'), ('table', 'string')]
table = ibis.table(schema)
assert translate(table['date']) == '`date`'
assert translate(table['table']) == '`table`'
# TODO: fix it
# def test_named_expression(alltypes, translate):
# a, b = alltypes.get_columns(['int_col', 'float_col'])
# expr = ((a - b) * a).name('expr')
# expected = '(`int_col` - `float_col`) * `int_col` AS `expr`'
# assert translate(expr) == expected
|
{
"content_hash": "5c04533f83bb009a0d7a38eed7d0ecb7",
"timestamp": "",
"source": "github",
"line_count": 23,
"max_line_length": 66,
"avg_line_length": 30,
"alnum_prop": 0.6043478260869565,
"repo_name": "cloudera/ibis",
"id": "da65a4cd0e9ce8c2f6538ed843033bf0a8cd4f66",
"size": "690",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "ibis/backends/clickhouse/tests/test_identifiers.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "C++",
"bytes": "44943"
},
{
"name": "CMake",
"bytes": "4383"
},
{
"name": "Python",
"bytes": "2570944"
},
{
"name": "Shell",
"bytes": "1989"
}
],
"symlink_target": ""
}
|
"""Configuration and hyperparameter sweeps."""
from lra_benchmarks.image.configs.cifar10 import base_cifar10_config
def get_config():
"""Get the hyperparameter configuration."""
config = base_cifar10_config.get_config()
config.model_type = "longformer"
config.model.num_layers = 4
config.model.emb_dim = 128
config.model.qkv_dim = 64
config.model.mlp_dim = 128
config.model.num_heads = 4
config.model.classifier_pool = "MEAN"
return config
def get_hyper(hyper):
return hyper.product([])
|
{
"content_hash": "102a59cb0a34843302c0209fadbc5d51",
"timestamp": "",
"source": "github",
"line_count": 20,
"max_line_length": 68,
"avg_line_length": 25.7,
"alnum_prop": 0.7237354085603113,
"repo_name": "google-research/long-range-arena",
"id": "8b91c7548a82bebc26bd73c2eb9b76863458bad4",
"size": "1086",
"binary": false,
"copies": "1",
"ref": "refs/heads/main",
"path": "lra_benchmarks/image/configs/cifar10/longformer_base.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "507289"
}
],
"symlink_target": ""
}
|
""" astropy.cosmology contains classes and functions for cosmological
distance measures and other cosmology-related calculations.
See the `Astropy documentation
<http://docs.astropy.org/en/latest/cosmology/index.html>`_ for more
detailed usage examples and references.
"""
from __future__ import (absolute_import, division, print_function,
unicode_literals)
from .velocities import *
|
{
"content_hash": "fc28ef0e3b647a98a097756b32731b8c",
"timestamp": "",
"source": "github",
"line_count": 11,
"max_line_length": 69,
"avg_line_length": 37.27272727272727,
"alnum_prop": 0.748780487804878,
"repo_name": "nhmc/xastropy",
"id": "ac9f99f6f60b9becd44d5f1c6fefe4639be389b0",
"size": "474",
"binary": false,
"copies": "4",
"ref": "refs/heads/master",
"path": "xastropy/relativity/__init__.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "OpenEdge ABL",
"bytes": "144038"
},
{
"name": "Python",
"bytes": "1007319"
}
],
"symlink_target": ""
}
|
import fixtures
from essential import lockutils
class LockFixture(fixtures.Fixture):
"""External locking fixture.
This fixture is basically an alternative to the synchronized decorator with
the external flag so that tearDowns and addCleanups will be included in
the lock context for locking between tests. The fixture is recommended to
be the first line in a test method, like so::
def test_method(self):
self.useFixture(LockFixture)
...
or the first line in setUp if all the test methods in the class are
required to be serialized. Something like::
class TestCase(testtools.testcase):
def setUp(self):
self.useFixture(LockFixture)
super(TestCase, self).setUp()
...
This is because addCleanups are put on a LIFO queue that gets run after the
test method exits. (either by completing or raising an exception)
"""
def __init__(self, name, lock_file_prefix=None):
self.mgr = lockutils.lock(name, lock_file_prefix, True)
def setUp(self):
super(LockFixture, self).setUp()
self.addCleanup(self.mgr.__exit__, None, None, None)
self.lock = self.mgr.__enter__()
|
{
"content_hash": "6c9cc53d246063640a56a62b78c39047",
"timestamp": "",
"source": "github",
"line_count": 36,
"max_line_length": 79,
"avg_line_length": 34.72222222222222,
"alnum_prop": 0.6528,
"repo_name": "zhangxiaolins/python_base",
"id": "fd3a97f802bd7cede248f64d083def014b46ee9d",
"size": "1887",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "essential/fixture/lockutils.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "1440663"
}
],
"symlink_target": ""
}
|
from sklearn import svm
import numpy as np
np.random.seed(1)
CSV_START_COLUMN = 6
CSV_END_COLUMN = 53
DATASET = np.genfromtxt('training.csv', delimiter=',', skip_header=1,
usecols=np.arange(CSV_START_COLUMN,CSV_END_COLUMN), invalid_raise=False)
X = DATASET[:,:46]
Y = DATASET[:,46]
def combineData(values1, values2):
combined_stats = []
for a in range(values1.size):
result_component = [values1[a],values2[a]]
combined_stats.append(result_component)
combined_stats = np.array(combined_stats)
return combined_stats
classifier = svm.SVC()
#offense
# #TS% vs PPG
scoring_ability = combineData(X[:,3], X[:,45])
# #FTr vs FT%
ft = combineData(X[:,5], X[:,36])
# #ast vs TOV
passing = combineData(X[:,40], X[:,43])
def offense(player):
result = 0.0
# #TS% vs PPG
classifier.fit(scoring_ability, Y)
result += classifier.predict([[ player[1][3], player[0][23] ]])[0]
print("STAR Offensive Rating:",result)
return result
#defense
# #STL vs BLK
stalwart = combineData(X[:,41], X[:,42])
# #DBPM vs DBPM
def_rating = combineData(X[:,19], X[:,19])
# pf vs pf
fouls = combineData(X[:,44], X[:,44])
def defense(player):
result = 0.0
# #STL vs BLK
classifier.fit(stalwart, Y)
result += classifier.predict([[ player[0][20], player[0][19] ]])[0]
# #DBpm vs DBPM
classifier.fit(def_rating, Y)
result += classifier.predict([[ player[1][21], player[1][21] ]])[0]
print("STAR Defensive Rating:",result)
return result
#overall efficiency
# #usage/PER
efficiency_set = combineData(X[:,13], X[:,2])
# #USG/TOV%
further_set = combineData(X[:,13], X[:,12])
# #PER/PER lol
per = combineData(X[:,2], X[:,2])
def efficiency(player):
result = 0.0
# #usage/PER
classifier.fit(efficiency_set, Y)
result += classifier.predict([[ player[1][13], player[1][2] ]])[0]
# #USG/TOV%
classifier.fit(further_set, Y)
result += classifier.predict([[ player[1][13], player[1][12] ]])[0]
# #PER/PER lol
classifier.fit(per, Y)
result += classifier.predict([[ player[1][2], player[1][2] ]])[0]
print("STAR Efficiency Rating:",result)
return result
#durability
# #GP/MP
play = combineData(X[:,0], X[:,1])
def durability(player):
result = 0.0
# #MPG/MP
classifier.fit(play, Y)
result += classifier.predict([[ player[1][0], player[1][1] ]])[0]
print("STAR Durability Rating:",result)
return result
#value
# #VORP/WS
value_set = combineData(X[:,21], X[:,21])
def value(player):
result = 0.0
# #VORP/VORp
classifier.fit(value_set, Y)
result += classifier.predict([[ player[1][23], player[1][23] ]])[0]
print("STAR Value Rating:",result)
return result
def compositePredict(player):
result = 0.0
result += offense(player)
result += defense(player)
result += efficiency(player)
result += 1.5*(durability(player))
# result += value(player)
result = 1.25*result
if(result >= 10):
result = 10
return round(result,3)
def offenseSkillWord(player):
if(offense(player) > 0):
return True
else:
return False
def defenseSkillWord(player):
if(defense(player) > 0):
return True
else:
return False
def efficiencySkillWord(player):
if(efficiency(player) > 0):
return True
else:
return False
def durabilitySkillWord(player):
if(durability(player) > 0):
return True
else:
return False
def sumUp(value):
if(value > 2.9):
return True
# if __name__ == "__main__":
# scoringPredict([[0.55,11.3]])
|
{
"content_hash": "5c1821c67b8107104e133ab9317ef664",
"timestamp": "",
"source": "github",
"line_count": 167,
"max_line_length": 76,
"avg_line_length": 20.239520958083833,
"alnum_prop": 0.6556213017751479,
"repo_name": "sidharthrajaram/NBA-Predict",
"id": "2956d45b77c1001ce9b848fda693f0b31f6dcfd8",
"size": "3380",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "multi.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "2466"
},
{
"name": "HTML",
"bytes": "13577"
},
{
"name": "Python",
"bytes": "11711"
}
],
"symlink_target": ""
}
|
inmoov = Runtime.createAndStart("inmoov","InMoov")
inmoov.startRightHand("COM7")
inmoov.rightHand.index.setMinMax(0,160)
inmoov.rightHand.thumb.setMinMax(55,135)
inmoov.rightHand.majeure.setMinMax(50,170)
inmoov.rightHand.ringFinger.setMinMax(48,145)
inmoov.rightHand.pinky.setMinMax(30,168)
sleep(1)
inmoov.rightHand.startLeapTracking()
# inmoov.rightHand.stopLeapTracking()
|
{
"content_hash": "5dc1350c1e24c9b3023382d518f75849",
"timestamp": "",
"source": "github",
"line_count": 10,
"max_line_length": 50,
"avg_line_length": 37.5,
"alnum_prop": 0.8213333333333334,
"repo_name": "MyRobotLab/pyrobotlab",
"id": "bb02c3a17966e33f1382c48dd1cf5c2cb1b3b2f8",
"size": "375",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "home/hairygael/InMoov2LeapMotion1.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Batchfile",
"bytes": "1827"
},
{
"name": "C",
"bytes": "126258"
},
{
"name": "C++",
"bytes": "373018"
},
{
"name": "Java",
"bytes": "156911"
},
{
"name": "Processing",
"bytes": "17022"
},
{
"name": "Python",
"bytes": "3309101"
},
{
"name": "Shell",
"bytes": "4635"
},
{
"name": "VBA",
"bytes": "11115"
}
],
"symlink_target": ""
}
|
from werkzeug.wsgi import DispatcherMiddleware
from .app import create_app
application = DispatcherMiddleware(create_app(config_name='production'))
|
{
"content_hash": "a98ab7f2e1586fbadf770609ce3942d9",
"timestamp": "",
"source": "github",
"line_count": 5,
"max_line_length": 72,
"avg_line_length": 30,
"alnum_prop": 0.8266666666666667,
"repo_name": "RRCKI/prodsys-pa",
"id": "868d09d43ff1d7c03687fff67254a590b010f4fd",
"size": "580",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "prodsyspa/wsgi.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Mako",
"bytes": "411"
},
{
"name": "PigLatin",
"bytes": "2438"
},
{
"name": "Python",
"bytes": "96817"
},
{
"name": "Shell",
"bytes": "1276"
}
],
"symlink_target": ""
}
|
import pytest
from pock.matchers import ExactValueMatcher, AnyValueMatcher, Matcher, MatchCriteria
def test_base_matcher():
matcher = Matcher()
with pytest.raises(NotImplementedError):
matcher == 1
with pytest.raises(NotImplementedError):
matcher != 1
with pytest.raises(NotImplementedError):
hash(matcher)
with pytest.raises(NotImplementedError):
matcher.matches(None)
def test_exact_value_matcher_equality():
matcher1 = ExactValueMatcher(15)
matcher2 = ExactValueMatcher(15)
assert matcher1 == matcher2
def test_exact_value_matcher_inequality():
matcher1 = ExactValueMatcher(10)
matcher2 = ExactValueMatcher(15)
matcher3 = AnyValueMatcher()
assert matcher1 != matcher2
assert matcher1 != matcher3
def test_exact_value_hashes():
matcher1 = ExactValueMatcher(15)
matcher2 = ExactValueMatcher(15)
assert hash(matcher1) == hash(matcher2)
def test_exact_value_matcher_matching():
matcher = ExactValueMatcher(12)
assert matcher.matches(12)
assert not matcher.matches(13)
def test_any_value_matcher_equality():
matcher1 = AnyValueMatcher()
matcher2 = AnyValueMatcher()
assert matcher1 == matcher2
def test_any_value_matcher_inequality():
matcher1 = AnyValueMatcher()
matcher2 = ExactValueMatcher(10)
assert matcher1 != matcher2
def test_any_value_hashes():
matcher1 = AnyValueMatcher()
matcher2 = AnyValueMatcher()
assert hash(matcher1) == hash(matcher2)
def test_any_value_matching():
matcher = AnyValueMatcher()
assert matcher.matches(10)
assert matcher.matches('anything')
def test_match_criteria_converts_non_matcher_args_to_exact_value_matchers():
match_criteria = MatchCriteria((1, ExactValueMatcher(2)), {'a': 1, 'b': ExactValueMatcher(2)})
assert all([isinstance(arg, ExactValueMatcher) for arg in match_criteria.arg_matchers])
assert all([isinstance(arg, ExactValueMatcher) for arg in match_criteria.kwarg_matchers.values()])
def test_match_criteria_equality():
match_criteria1 = MatchCriteria((1, 5, 3), {'fd': 43, 'asd': 54})
match_criteria2 = MatchCriteria((1, 5, 3), {'fd': 43, 'asd': 54})
assert match_criteria1 == match_criteria2
def test_match_criteria_inequality():
match_criteria1 = MatchCriteria((1, 5, 3), {'fd': 43, 'asd': 54})
match_criteria2 = MatchCriteria((2, 4, 3), {'fd': 43, 'asd': 54})
match_criteria3 = False
assert match_criteria1 != match_criteria2
assert match_criteria2 != match_criteria3
def test_match_criteria_hash():
match_criteria1 = MatchCriteria((1, 5, 3), {'fd': 43, 'asd': 54})
match_criteria2 = MatchCriteria((1, 5, 3), {'fd': 43, 'asd': 54})
assert hash(match_criteria1) == hash(match_criteria2)
def test_match_criteria_does_not_match_if_args_mismatch():
match_criteria = MatchCriteria((1,), {})
assert not match_criteria.matches((2,), {})
def test_match_criteria_does_not_match_if_kwargs_mismatch():
match_criteria = MatchCriteria((), {'a': 1})
assert not match_criteria.matches((), {'a': 2})
def test_match_criteria_returns_true_when_matching():
match_criteria = MatchCriteria((1,), {'a': 1})
assert match_criteria.matches((1,), {'a': 1})
def test_match_criteria_does_not_match_when_args_length_mismatch():
match_criteria = MatchCriteria((1,), {})
assert not match_criteria.matches((1, 2), {})
|
{
"content_hash": "cd990753fffc884635f0770c9e251146",
"timestamp": "",
"source": "github",
"line_count": 131,
"max_line_length": 102,
"avg_line_length": 26.221374045801525,
"alnum_prop": 0.6847161572052402,
"repo_name": "atbentley/pock",
"id": "8fa732953f9f253ef2f583cfd6e11385ce0d2500",
"size": "3435",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "tests/unit/matchers_unit_test.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "44412"
}
],
"symlink_target": ""
}
|
from xml.dom.minidom import parseString
from xml.etree.ElementTree import tostring, SubElement, Element
from datetime import datetime
from dateutil.parser import parse
from api import XeroPrivateClient, XeroException
from api import XERO_BASE_URL, XERO_API_URL
import urllib
class XeroException404(XeroException):
pass
class XeroException500(XeroException):
pass
class XeroBadRequest(XeroException):
pass
class XeroNotImplemented(XeroException):
pass
class XeroExceptionUnknown(XeroException):
pass
class Manager(object):
DECORATED_METHODS = ('get', 'save', 'filter', 'all', 'put')
DATETIME_FIELDS = (u'UpdatedDateUTC', u'Updated', u'FullyPaidOnDate')
DATE_FIELDS = (u'DueDate', u'Date')
BOOLEAN_FIELDS = (u'IsSupplier', u'IsCustomer', u'HasAttachments')
MULTI_LINES = (u'LineItem', u'Phone', u'Address', 'TaxRate')
PLURAL_EXCEPTIONS = {'Addresse':'Address'}
def __init__(self, name, client):
self.client = client
self.name = name
# setup our singular variants of the name
# only if the name ends in 0
if name[-1] == "s":
self.singular = name[:len(name)-1]
else:
self.singular = name
for method_name in self.DECORATED_METHODS:
method = getattr(self, method_name)
setattr(self, method_name, self.__get_data(method))
def walk_dom(self, dom):
tree_list = tuple()
for node in dom.childNodes:
tagName = getattr(node, 'tagName', None)
if tagName:
tree_list += (tagName , self.walk_dom(node),)
else:
data = node.data.strip()
if data:
tree_list += (node.data.strip(),)
return tree_list
def convert_to_dict(self, deep_list):
out = {}
if len(deep_list) > 2:
lists = [l for l in deep_list if isinstance(l, tuple)]
keys = [l for l in deep_list if isinstance(l, unicode)]
for key, data in zip(keys, lists):
if len(data) == 1:
# we're setting a value
# check to see if we need to apply any special
# formatting to the value
val = data[0]
if key in self.BOOLEAN_FIELDS:
val = True if val.lower() == 'true' else False
if key in self.DATETIME_FIELDS:
#Jayd hack to convert datetime object to string
#Allows saving in MongoDB
val = parse(val).strftime("%Y-%m-%d %H:%M:%S")
if key in self.DATE_FIELDS:
#Jayd hack to convert datetime object to string
#Allows saving in MongoDB
val = parse(val).strftime("%Y-%m-%d %H:%M:%S")
out[key] = val
elif len(data) > 1 and ((key in self.MULTI_LINES) or (key == self.singular)):
# our data is a collection and needs to be handled as such
if out:
out += (self.convert_to_dict(data),)
else:
out = (self.convert_to_dict(data),)
elif len(data) > 1:
out[key] = self.convert_to_dict(data)
elif len(deep_list) == 2:
key = deep_list[0]
data = deep_list[1]
out[key] = self.convert_to_dict(data)
else:
out = deep_list[0]
return out
def dict_to_xml( self, root_elm, dict_data ):
for key in dict_data.keys():
_data = dict_data[key]
_elm = SubElement(root_elm, key)
_list_data = (isinstance(_data, list) or isinstance(_data, tuple))
_is_plural = (key[len(key)-1] == "s")
_plural_name = key[:len(key)-1]
if isinstance(_data, dict):
_elm = self.dict_to_xml(_elm, _data)
elif _list_data and not _is_plural:
for _d in _data:
__elm = self.dict_to_xml(_elm, _d)
elif _list_data:
for _d in _data:
_plural_name = self.PLURAL_EXCEPTIONS.get(_plural_name, _plural_name)
__elm = self.dict_to_xml(SubElement(_elm, _plural_name), _d)
else:
_elm.text = str(_data)
return root_elm
def __prepare_data__for_save(self, data):
if isinstance(data, list) or isinstance(data, tuple):
root_elm = Element(self.name)
for d in data:
sub_elm = SubElement(root_elm, self.singular)
self.dict_to_xml(sub_elm, d)
else:
root_elm = self.dict_to_xml(Element(self.singular), data)
return tostring(root_elm)
def __get_results(self, data):
response = data[u'Response']
result = response.get(self.name, {})
if isinstance(result, tuple):
return result
if isinstance(result, dict) and result.has_key(self.singular):
return result[self.singular]
def __get_data(self, func):
def wrapper(*args, **kwargs):
req_args = func(*args, **kwargs)
response = self.client.request(*req_args)
body = response[1]
headers = response[0]
if headers['status'] == '200':
if headers['content-type'] == 'application/pdf':
return body
dom = parseString(body)
data = self.convert_to_dict(self.walk_dom(dom))
return self.__get_results(data)
elif headers['status'] == '404':
msg = ' : '.join([str(headers['status']), body])
raise XeroException404(msg)
elif headers['status'] == '500':
msg = ' : '.join([str(headers['status']), body])
raise XeroException500(msg)
elif headers['status'] == '400' or headers['status'] == '401':
msg = ' : '.join([str(headers['status']), body])
raise XeroBadRequest(msg)
elif headers['status'] == '501':
msg = ' : '.join([str(headers['status']), body])
raise XeroNotImplemented(msg)
else:
msg = ' : '.join([str(headers['status']), body])
raise XeroExceptionUnknown(msg)
return wrapper
def get(self, id, headers=None):
uri = '/'.join([XERO_API_URL, self.name, id])
return uri, 'GET', None, headers
def save_or_put(self, data, method='post'):
headers = {
"Content-Type": "application/x-www-form-urlencoded; charset=utf-8"
}
uri = '/'.join([XERO_API_URL, self.name])
body = 'xml='+urllib.quote(self.__prepare_data__for_save(data))
return uri, method, body, headers
def save(self, data):
return self.save_or_put(data, method='post')
def put(self, data):
return self.save_or_put(data, method='PUT')
def prepare_filtering_date(self, val):
if isinstance(val, datetime):
val = val.strftime('%a, %d %b %Y %H:%M:%S GMT')
else:
val = '"%s"' % val
return {'If-Modified-Since': val}
def filter(self, **kwargs):
headers = None
uri = '/'.join([XERO_API_URL, self.name])
if kwargs:
if kwargs.has_key('Since'):
val = kwargs['Since']
headers = self.prepare_filtering_date(val)
del kwargs['Since']
def get_filter_params():
if key in self.BOOLEAN_FIELDS:
return 'true' if kwargs[key] else 'false'
elif key in self.DATETIME_FIELDS:
return kwargs[key].isoformat()
else:
return '"%s"' % str(kwargs[key])
def generate_param(key):
parts = key.split("__")
field = key.replace('_','.')
fmt = '%s==%s'
if len(parts) == 2:
# support filters:
# Name__Contains=John becomes Name.Contains("John")
if parts[1] in ["Contains", "StartsWith", "EndsWith"]:
field = parts[0]
fmt = ''.join(['%s.', parts[1], '(%s)'])
return fmt % (
field,
get_filter_params()
)
params = [generate_param(key) for key in kwargs.keys()]
if params:
uri += '?where=' + urllib.quote('&&'.join(params))
return uri, 'GET', None, headers
def all(self):
uri = '/'.join([XERO_API_URL, self.name])
return uri, 'GET', None, None
class Xero(object):
"""
An ORM interface to the Xero API
This has only been tested with the Private API
"""
OBJECT_LIST = (u'Contacts', u'Accounts', u'CreditNotes',
u'Currencies', u'Invoices', u'Organisation', u'Overpayments',
u'Payments', u'TaxRates', u'TrackingCategories')
def __init__(self, consumer_key, consumer_secret, privatekey):
# instantiate our private api client
client = XeroPrivateClient(consumer_key,
consumer_secret,
privatekey)
# iterate through the list of objects we support, for
# each of them create an attribute on our self that is
# the lowercase name of the object and attach it to an
# instance of a Manager object to operate on it
for name in self.OBJECT_LIST:
setattr(self, name.lower(), Manager(name, client))
|
{
"content_hash": "fd72d432f2efde05538fccc79ace5e2f",
"timestamp": "",
"source": "github",
"line_count": 277,
"max_line_length": 93,
"avg_line_length": 35.55234657039711,
"alnum_prop": 0.5115759545085297,
"repo_name": "jaydlawrence/XeroPy",
"id": "4e3b9a7095892c5c6a79741d4c726721520ac1af",
"size": "9848",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "XeroPy/__init__.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Python",
"bytes": "17308"
}
],
"symlink_target": ""
}
|
from org.myrobotlab.service import Runtime
from org.myrobotlab.document.transformer import WorkflowConfiguration
from org.myrobotlab.document.transformer import StageConfiguration
# create the pipeline service
pipeline = runtime.start("docproc", "DocumentPipeline")
# create a pipeline
# pipeline.workflowName = "default";
# create a workflow to load into that pipeline service
workflowConfig = WorkflowConfiguration();
workflowConfig.setName("default");
staticFieldStageConfig = StageConfiguration();
staticFieldStageConfig.setStageClass("org.myrobotlab.document.transformer.SetStaticFieldValue");
staticFieldStageConfig.setStageName("SetTableField");
# statically assign the value of "MRL" to the field "table" on the document
staticFieldStageConfig.setStringParam("table", "MRL");
workflowConfig.addStage(staticFieldStageConfig);
# a stage that sends a document to solr
openNLPConfig = StageConfiguration()
openNLPConfig.setStageClass("org.myrobotlab.document.transformer.OpenNLP")
openNLPConfig.setStageName("OpenNLP")
openNLPConfig.setStringParam("textField","description")
workflowConfig.addStage(openNLPConfig)
sendToSolrConfig = StageConfiguration();
sendToSolrConfig.setStageClass("org.myrobotlab.document.transformer.SendToSolr")
sendToSolrConfig.setStageName("SendToSolr")
sendToSolrConfig.setStringParam("solrUrl", "http://www.skizatch.org:8983/solr/graph")
workflowConfig.addStage(sendToSolrConfig)
# set the config on the pipeline service
pipeline.setConfig(workflowConfig)
# initialize the pipeline (load the config)
pipeline.initalize()
# create a connector that crawls MyRobotLab RSS url
rss = runtime.start("rss", "RSSConnector")
# Attach the output of the rss connector to the pipeline
rss.addDocumentListener(pipeline)
# tell the RSS connector to start crawling the site
rss.startCrawling()
# connector issues a flush when it's done crawling.
|
{
"content_hash": "35d7c56390c4e28af39472dcdee0164f",
"timestamp": "",
"source": "github",
"line_count": 40,
"max_line_length": 96,
"avg_line_length": 46.6,
"alnum_prop": 0.8267167381974249,
"repo_name": "MyRobotLab/myrobotlab",
"id": "a1fbfe7e58e669d1063ed24293fa0847ceb396fd",
"size": "1981",
"binary": false,
"copies": "1",
"ref": "refs/heads/develop",
"path": "src/main/resources/resource/DocumentPipeline/DocumentPipeline.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Batchfile",
"bytes": "1542"
},
{
"name": "C",
"bytes": "6677"
},
{
"name": "C++",
"bytes": "274868"
},
{
"name": "CSS",
"bytes": "83744"
},
{
"name": "GLSL",
"bytes": "757"
},
{
"name": "HTML",
"bytes": "374401"
},
{
"name": "Java",
"bytes": "7100082"
},
{
"name": "JavaScript",
"bytes": "1536187"
},
{
"name": "Propeller Spin",
"bytes": "14406"
},
{
"name": "Python",
"bytes": "191671"
},
{
"name": "Shell",
"bytes": "3547"
}
],
"symlink_target": ""
}
|
from django.conf.urls import include, url
from todo import views
urlpatterns = [
url(r'^$', views.home),
]
|
{
"content_hash": "c459fe68c9b4b26ab9a0352d5ca0e318",
"timestamp": "",
"source": "github",
"line_count": 6,
"max_line_length": 41,
"avg_line_length": 18.666666666666668,
"alnum_prop": 0.6875,
"repo_name": "gregwym/djangoProjectStructure",
"id": "4c5f0f08332877b88c07acbf72aa2b50918cdd60",
"size": "112",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "todo/urls.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "7953"
}
],
"symlink_target": ""
}
|
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding field 'Badge.points'
db.add_column('badges_badge', 'points', self.gf('django.db.models.fields.IntegerField')(default=0), keep_default=False)
def backwards(self, orm):
# Deleting field 'Badge.points'
db.delete_column('badges_badge', 'points')
models = {
'auth.group': {
'Meta': {'object_name': 'Group'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
'auth.permission': {
'Meta': {'ordering': "('content_type__app_label', 'content_type__model', 'codename')", 'unique_together': "(('content_type', 'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
'auth.user': {
'Meta': {'object_name': 'User'},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime(2012, 6, 18, 22, 23, 8, 859563)'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Group']", 'symmetrical': 'False', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime(2012, 6, 18, 22, 23, 8, 858981)'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'})
},
'badges.badge': {
'Meta': {'object_name': 'Badge'},
'award_condition': ('django.db.models.fields.CharField', [], {'max_length': '1024'}),
'description': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'hint': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'image': ('django.db.models.fields.files.ImageField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'label': ('django.db.models.fields.CharField', [], {'max_length': '20'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'points': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'slug': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'theme': ('django.db.models.fields.CharField', [], {'default': "'6'", 'max_length': '1'})
},
'badges.badgeaward': {
'Meta': {'object_name': 'BadgeAward'},
'awarded_at': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'badge': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['badges.Badge']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'profile': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['player_mgr.Profile']"})
},
'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
'player_mgr.profile': {
'Meta': {'object_name': 'Profile'},
'completion_date': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'contact_carrier': ('django.db.models.fields.CharField', [], {'max_length': '50', 'null': 'True', 'blank': 'True'}),
'contact_email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'null': 'True', 'blank': 'True'}),
'contact_text': ('django.contrib.localflavor.us.models.PhoneNumberField', [], {'max_length': '20', 'null': 'True', 'blank': 'True'}),
'daily_visit_count': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '50', 'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '50', 'null': 'True', 'blank': 'True'}),
'last_visit_date': ('django.db.models.fields.DateField', [], {'null': 'True', 'blank': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '50'}),
'referrer_awarded': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'referring_user': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'referred_profiles'", 'null': 'True', 'to': "orm['auth.User']"}),
'setup_complete': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'setup_profile': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'team': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['team_mgr.Team']", 'null': 'True', 'blank': 'True'}),
'theme': ('django.db.models.fields.CharField', [], {'max_length': '50', 'null': 'True', 'blank': 'True'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'profile'", 'unique': 'True', 'to': "orm['auth.User']"})
},
'team_mgr.group': {
'Meta': {'object_name': 'Group'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
'slug': ('django.db.models.fields.SlugField', [], {'max_length': '50', 'null': 'True', 'db_index': 'True'})
},
'team_mgr.team': {
'Meta': {'object_name': 'Team'},
'group': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['team_mgr.Group']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'logo': ('django.db.models.fields.files.ImageField', [], {'max_length': '1024', 'null': 'True', 'blank': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'}),
'slug': ('django.db.models.fields.SlugField', [], {'max_length': '50', 'null': 'True', 'db_index': 'True'})
}
}
complete_apps = ['badges']
|
{
"content_hash": "273815b2284449e6a829e2a3090bb242",
"timestamp": "",
"source": "github",
"line_count": 113,
"max_line_length": 182,
"avg_line_length": 73.57522123893806,
"alnum_prop": 0.5449843637238393,
"repo_name": "KendyllD/boukenda-project",
"id": "d9af5011bfafcb6d7db1bc1b4fb3c4976f8623f6",
"size": "8332",
"binary": false,
"copies": "7",
"ref": "refs/heads/master",
"path": "makahiki/apps/widgets/badges/migrations/0004_auto__add_field_badge_points.py",
"mode": "33188",
"license": "mit",
"language": [],
"symlink_target": ""
}
|
import django
from django.conf.urls import include, url
from django.contrib import admin
if django.VERSION < (1, 9):
admin_urls = include(admin.site.urls)
else:
admin_urls = admin.site.urls
urlpatterns = [
url(r'^admin/', admin_urls),
]
|
{
"content_hash": "f2a3f83d6c320a82c48ec1c4812463bc",
"timestamp": "",
"source": "github",
"line_count": 13,
"max_line_length": 41,
"avg_line_length": 19.384615384615383,
"alnum_prop": 0.6944444444444444,
"repo_name": "kbussell/django-auditlog",
"id": "c9346f253732cdabc684d84ab0d2656d8cb11e16",
"size": "252",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "src/auditlog_tests/urls.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "80631"
}
],
"symlink_target": ""
}
|
import tensorflow as tf
import numpy as np
from data import *
def weight_variable(shape, name=None):
initial = tf.truncated_normal(shape, stddev=0.1)
if name:
return tf.Variable(initial, name)
else:
return tf.Variable(initial)
def bias_variable(shape, name=None):
initial = tf.constant(0.1, shape=shape)
if name:
return tf.Variable(initial, name)
else:
return tf.Variable(initial)
class Implynet:
def gen_feed_dict(self, obs_x, obs_y, obs_tf,
new_ob_x, new_ob_y, new_ob_tf):
ret = {}
for a, b in zip(self.ph_obs_x, obs_x):
ret[a] = b
for a, b in zip(self.ph_obs_y, obs_y):
ret[a] = b
for a, b in zip(self.ph_obs_tf, obs_tf):
ret[a] = b
ret[self.ph_new_ob_x] = new_ob_x
ret[self.ph_new_ob_y] = new_ob_y
ret[self.ph_new_ob_tf] = new_ob_tf
return ret
# load the model and give back a session
def load_model(self, sess, saved_loc):
self.saver.restore(sess, saved_loc)
print("Model restored.")
# make the model
def __init__(self, name):
with tf.variable_scope('imply') as scope:
# set up placeholders
self.ph_obs_x = [tf.placeholder(tf.float32, [N_BATCH, L],
name="ph_ob_x"+str(i)) for i in range(OBS_SIZE)]
self.ph_obs_y = [tf.placeholder(tf.float32, [N_BATCH, L],
name="ph_ob_y"+str(j)) for j in range(OBS_SIZE)]
self.ph_obs_tf = [tf.placeholder(tf.float32, [N_BATCH, 2],
name="ph_ob_tf"+str(k)) for k in range(OBS_SIZE)]
self.ph_new_ob_x = tf.placeholder(tf.float32, [N_BATCH, L], name="ph_new_ob_x")
self.ph_new_ob_y = tf.placeholder(tf.float32, [N_BATCH, L], name="ph_new_ob_y")
self.ph_new_ob_tf = tf.placeholder(tf.float32, [N_BATCH,2], name="ph_new_ob_tf")
# some constants
self.n_hidden = 1200
self.n_pred_hidden = 1000
# a list of variables for different tasks
self.VAR_pred = []
# ------------------------------------------------------------------ convolve in the observations
# initial lstm state
state = tf.zeros([N_BATCH, self.n_hidden])
# initialize some weights
# initialize some weights
# stacked lstm
lstm = tf.nn.rnn_cell.MultiRNNCell([tf.nn.rnn_cell.LSTMCell(300), tf.nn.rnn_cell.LSTMCell(300)])
hiddens = [state]
with tf.variable_scope("imply/LSTM") as scope:
for i in range(OBS_SIZE):
if i > 0:
scope.reuse_variables()
cell_input = tf.concat(1, [self.ph_obs_x[i], self.ph_obs_y[i], self.ph_obs_tf[i]])
output, state = lstm(cell_input, state)
hiddens.append(state)
lstm_variables = [v for v in tf.all_variables()
if v.name.startswith("imply/LSTM")]
print lstm_variables
self.VAR_pred += lstm_variables
# ----------------------------------------------------------------- answer the query
with tf.variable_scope('imply') as scope:
W_query1 = weight_variable([self.n_hidden + L + L, self.n_pred_hidden])
b_query1 = bias_variable([self.n_pred_hidden])
W_query2 = weight_variable([self.n_pred_hidden, 2])
b_query2 = bias_variable([2])
self.VAR_pred += [W_query1, b_query1, W_query2, b_query2]
hidden_cat_query = [tf.nn.relu(\
tf.matmul(tf.concat(1, [self.ph_new_ob_x, self.ph_new_ob_y, hidden]),W_query1) + b_query1)\
for hidden in hiddens]
print "hidden_cat_query shape ", show_dim(hidden_cat_query)
e2 = tf.constant(1e-10, shape=[N_BATCH, 2])
self.query_preds = [tf.nn.softmax(tf.matmul(hcq, W_query2) + b_query2)+e2 for hcq in hidden_cat_query]
print "query_preds shape ", show_dim(self.query_preds)
query_pred_costs = [-tf.reduce_sum(self.ph_new_ob_tf * tf.log(op)) for op in self.query_preds]
print "costs shapes ", show_dim(query_pred_costs)
self.cost_query_pred = sum(query_pred_costs)
# ------------------------------------------------------------------------ training steps
# gvs = optimizer.compute_gradients(cost)
# capped_gvs = [(tf.clip_by_value(grad, -1., 1.), var) for grad, var in gvs]
# train_op = optimizer.apply_gradients(capped_gvs)
optimizer = tf.train.RMSPropOptimizer(0.0001)
pred_gvs = optimizer.compute_gradients(self.cost_query_pred, var_list = self.VAR_pred)
capped_pred_gvs = [(tf.clip_by_value(grad, -5., 5.), var) for grad, var in pred_gvs]
#train_pred = optimizer.minimize(cost_pred, var_list = VAR_pred)
self.train_query_pred = optimizer.apply_gradients(capped_pred_gvs)
# train_query_pred = optimizer.minimize(cost_query_pred, var_list = VAR_pred)
# Before starting, initialize the variables. We will 'run' this first.
self.init = tf.initialize_all_variables()
self.saver = tf.train.Saver()
# save the model
def save(self, sess, model_loc="model_imply.ckpt"):
save_path = self.saver.save(sess, model_loc)
print("Model saved in file: %s" % save_path)
# train on a particular data batch
def train(self, sess, data_batch):
_, obs_x, obs_y, obs_tfs, new_ob_x, new_ob_y, new_ob_tf, _ = data_batch
feed_dic = self.gen_feed_dict(obs_x, obs_y, obs_tfs, new_ob_x, new_ob_y, new_ob_tf)
cost_query_pred_pre = sess.run([self.cost_query_pred], feed_dict=feed_dic)[0]
sess.run([self.train_query_pred], feed_dict=feed_dic)
cost_query_pred_post = sess.run([self.cost_query_pred], feed_dict=feed_dic)[0]
print "train query pred ", cost_query_pred_pre, " ",\
cost_query_pred_post, " ", True if cost_query_pred_post < cost_query_pred_pre else False
# =========== HELPERS =============
# a placeholder to feed in a single observation
def get_feed_dic_obs(self, obs):
# needing to create all the nessisary feeds
obs_x = []
obs_y = []
obs_tf = []
for _ in range(OBS_SIZE):
obs_x.append(np.zeros([N_BATCH,L]))
obs_y.append(np.zeros([N_BATCH,L]))
obs_tf.append(np.zeros([N_BATCH,2]))
num_obs = len(obs)
for ob_idx in range(num_obs):
ob_coord, ob_lab = obs[ob_idx]
ob_x, ob_y = vectorize(ob_coord)
obs_x[ob_idx] = np.tile(ob_x, [50,1])
obs_y[ob_idx] = np.tile(ob_y, [50,1])
obs_tf[ob_idx] = np.tile(ob_lab, [50,1])
feed_dic = dict(zip(self.ph_obs_x + self.ph_obs_y + self.ph_obs_tf,
obs_x + obs_y + obs_tf))
return feed_dic
def get_preds_batch(self, sess, obs, batch_querys):
ret = [[] for _ in range(OBS_SIZE+1)]
feed_dic = self.get_feed_dic_obs(obs)
assert len(batch_querys) == N_BATCH
new_ob_x = []
new_ob_y = []
for q in batch_querys:
q_x, q_y = vectorize(q)
new_ob_x.append(q_x)
new_ob_y.append(q_y)
feed_dic[self.ph_new_ob_x] = np.array(new_ob_x)
feed_dic[self.ph_new_ob_y] = np.array(new_ob_y)
pred_tfs = sess.run(self.query_preds, feed_dict=feed_dic)
for key_ob in range(OBS_SIZE+1):
for q_idx, q in enumerate(batch_querys):
ret[key_ob].append((q, pred_tfs[key_ob][q_idx]))
return ret
def get_all_preds_fast(self, sess, obs):
all_querys = []
for i in range(L):
for j in range(L):
all_querys.append((i,j))
def batch_qrys(all_qs):
ret = []
while len(all_qs) != 0:
to_add = [(0,0) for _ in range(N_BATCH)]
for idk in range(N_BATCH):
if len(all_qs) == 0:
break
to_add[idk] = all_qs.pop()
ret.append(to_add)
return ret
ret = [[] for _ in range(OBS_SIZE+1)]
batched_qrysss = batch_qrys(all_querys)
for batched_q in batched_qrysss:
ppp = self.get_preds_batch(sess, obs, batched_q)
for ijk in range(OBS_SIZE+1):
ret[ijk] += ppp[ijk]
return ret
def get_most_confuse(self, sess, obs):
key_ob = len(obs)
all_preds = self.get_all_preds_fast(sess, obs)
all_pred_at_key = all_preds[key_ob]
# get rid of already seen things
observed_coords = [x[0] for x in obs]
all_pred_at_key1 = filter(lambda x: x[0] not in observed_coords, all_pred_at_key)
# print len(all_pred_at_key), " ", len(all_pred_at_key1)
most_confs = [(abs(x[1][0] - x[1][1]), x[0]) for x in all_pred_at_key1]
most_conf = min(most_confs)
return most_conf[1]
def get_active_trace(self, sess, query, epi=0.0):
obs = []
for i in range(OBS_SIZE):
if np.random.random() < epi:
rand_coord = sample_coord()
obs.append((rand_coord, query(rand_coord)))
else:
most_conf = self.get_most_confuse(sess, obs)
obs.append((most_conf, query(most_conf)))
feed_dic = self.get_feed_dic_obs(obs)
# return zip([None] + obs, self.get_all_preds_fast(sess, obs))
return obs
# def get_random_inv(self, sess, query):
# ob_pts = [sample_coord_bias(query) for _ in range(OBS_SIZE)]
# obs = [(op, query(op)) for op in ob_pts]
#
# feed_dic = self.get_feed_dic_obs(obs)
# return zip([None] + obs, self.get_all_preds_fast(sess, obs), invs)
class Invnet:
def gen_feed_dict(self, true_lab, obs):
ret = dict()
ret[self.true_label] = true_lab
ret[self.observations] = obs
return ret
# load the model and give back a session
def load_model(self, sess, saved_loc):
self.saver.restore(sess, saved_loc)
print("Inversion Model restored.")
# save the model
def save(self, sess, model_loc="model_invert.ckpt"):
save_path = self.saver.save(sess, model_loc)
print("Model saved in file: %s" % save_path)
def __init__(self, name):
with tf.variable_scope('inv') as scope:
self.true_label = tf.placeholder(tf.float32, [N_BATCH, X_L], name="true_label_"+name)
self.observations = tf.placeholder(tf.float32, [N_BATCH, L, L, 2], name="obs_"+name)
self.n_hidden = 1200
W_inv1 = weight_variable([L*L*2, self.n_hidden], name="W_inv1_"+name)
b_inv1 = bias_variable([self.n_hidden], name="b_inv1_"+name)
W_inv2 = weight_variable([self.n_hidden,X_L], name="W_inv2_"+name)
b_inv2 = bias_variable([X_L], name="b_inv2_"+name)
self.VARS = [W_inv1, b_inv1, W_inv2, b_inv2]
reshape_ob = tf.reshape(self.observations, [N_BATCH, L*L*2])
blah = tf.nn.relu(tf.matmul(reshape_ob, W_inv1) + b_inv1)
epsilon1 = tf.constant(1e-10, shape=[N_BATCH, X_L])
self.pred = tf.nn.softmax(tf.matmul(blah, W_inv2) + b_inv2) + epsilon1
self.cost = -tf.reduce_sum(self.true_label * tf.log(self.pred))
optimizer = tf.train.RMSPropOptimizer(0.001)
inv_gvs = optimizer.compute_gradients(self.cost)
self.train_inv = optimizer.apply_gradients(inv_gvs)
all_var_var = tf.get_collection(tf.GraphKeys.VARIABLES, scope='inv')
self.init = tf.initialize_variables(all_var_var)
self.saver = tf.train.Saver(self.VARS)
# train on a particular data batch
def train(self, sess, data_batch):
true_lab, obss = data_batch
feed_dic = self.gen_feed_dict(true_lab, obss)
cost_pre = sess.run([self.cost], feed_dict=feed_dic)[0]
sess.run([self.train_inv], feed_dict=feed_dic)
cost_post = sess.run([self.cost], feed_dict=feed_dic)[0]
print "train inv ", cost_pre, " ", cost_post, " ", True if cost_post < cost_pre else False
# get inversion from observations
def invert(self, sess, obs):
obss = [obs for _ in range(N_BATCH)]
fake_lab = [np.zeros(shape=[X_L]) for _ in range(N_BATCH)]
data_in = inv_batch_obs(fake_lab, obss)
feed_dic = self.gen_feed_dict(*data_in)
return sess.run([self.pred], feed_dict=feed_dic)[0][0]
|
{
"content_hash": "8704c3ff890c36acebcc1c41414e0059",
"timestamp": "",
"source": "github",
"line_count": 327,
"max_line_length": 108,
"avg_line_length": 35.37003058103976,
"alnum_prop": 0.5972678540549887,
"repo_name": "evanthebouncy/nnhmm",
"id": "01bed6b445b0593cbaaa211bd64b0736b18ec589",
"size": "11566",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "mnist_1/model.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "1212484"
}
],
"symlink_target": ""
}
|
import os
####### Database config. This assumes Postgres #######
INTERNAL_IPS = ['127.0.0.1']
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.postgresql_psycopg2',
'NAME': 'commcarehq',
'USER': 'commcarehq',
'PASSWORD': 'commcarehq',
'HOST': 'localhost',
'PORT': '5432',
'TEST': {
'SERIALIZE': False,
},
}
}
USE_PARTITIONED_DATABASE = False
if USE_PARTITIONED_DATABASE:
PARTITION_DATABASE_CONFIG = {
'shards': {
'p1': [0, 1],
'p2': [2, 3]
},
'groups': {
'main': ['default'],
'proxy': ['proxy'],
'form_processing': ['p1', 'p2'],
},
'host_map': {} # allows mapping HOST in DATABASE settings to a different value for plproxy
}
DATABASES.update({
'proxy': {
'ENGINE': 'django.db.backends.postgresql_psycopg2',
'NAME': 'commcarehq_proxy',
'USER': 'commcarehq',
'PASSWORD': 'commcarehq',
'HOST': 'localhost',
'PORT': '5432',
'TEST': {
'SERIALIZE': False,
},
},
'p1': {
'ENGINE': 'django.db.backends.postgresql_psycopg2',
'NAME': 'commcarehq_p1',
'USER': 'commcarehq',
'PASSWORD': 'commcarehq',
'HOST': 'localhost',
'PORT': '5432',
'TEST': {
'SERIALIZE': False,
},
},
'p2': {
'ENGINE': 'django.db.backends.postgresql_psycopg2',
'NAME': 'commcarehq_p2',
'USER': 'commcarehq',
'PASSWORD': 'commcarehq',
'HOST': 'localhost',
'PORT': '5432',
'TEST': {
'SERIALIZE': False,
},
},
})
# Custom databases can be used to configure a separate database for specific UCR data sources
# The key is what you will reference in the datasource, e.g. 'custom_ucr_database'
# The value is the sql connection string "postgresql://%(USER)s:%(PASSWORD)s@%(HOST)s:%(PORT)s/commcarehq_reporting" % DATABASES['default']
CUSTOM_DATABASES = {}
####### Couch Config ######
COUCH_HTTPS = False # recommended production value is True if enabling https
COUCH_SERVER_ROOT = '127.0.0.1:5984' #6984 for https couch
COUCH_USERNAME = 'commcarehq'
COUCH_PASSWORD = 'commcarehq'
COUCH_DATABASE_NAME = 'commcarehq'
### Public / Pre-login Site information
ENABLE_PRELOGIN_SITE = False
####### # Email setup ########
# email settings: these ones are the custom hq ones
EMAIL_LOGIN = "notifications@dimagi.com"
EMAIL_PASSWORD = "******"
EMAIL_SMTP_HOST = "smtp.gmail.com"
EMAIL_SMTP_PORT = 587
# Print emails to console so there is no danger of spamming, but you can still get registration URLs
EMAIL_BACKEND = 'django.core.mail.backends.console.EmailBackend'
ADMINS = (('HQ Dev Team', 'commcarehq-dev+www-notifications@dimagi.com'),)
BUG_REPORT_RECIPIENTS = ['commcarehq-support@dimagi.com']
NEW_DOMAIN_RECIPIENTS = ['commcarehq-dev+newdomain@dimagi.com']
EXCHANGE_NOTIFICATION_RECIPIENTS = ['commcarehq-dev+exchange@dimagi.com']
SERVER_EMAIL = 'commcarehq-noreply@dimagi.com' #the physical server emailing - differentiate if needed
DEFAULT_FROM_EMAIL = 'commcarehq-noreply@dimagi.com'
SUPPORT_EMAIL = "commcarehq-support@dimagi.com"
EMAIL_SUBJECT_PREFIX = '[commcarehq] '
SERVER_ENVIRONMENT = 'changeme' #Modify this value if you are deploying multiple environments of HQ to the same machine. Identify the target type of this running environment
####### Log/debug setup ########
DEBUG = True
TEMPLATE_DEBUG = DEBUG
# log directories must exist and be writeable!
DJANGO_LOG_FILE = "/tmp/commcare-hq.django.log"
LOG_FILE = "/tmp/commcare-hq.log"
CELERY_SEND_TASK_ERROR_EMAILS = True
CELERY_PERIODIC_QUEUE = 'celery' # change this to something else if you want a different queue for periodic tasks
CELERY_FLOWER_URL = 'http://127.0.0.1:5555'
####### Django Compressor ########
COMPRESS_ENABLED = False # this will allow less.js to compile less files on the client side
####### Bitly ########
BITLY_LOGIN = None # set to None to disable bitly app url shortening (useful offline) set to 'dimagi' if you are using the api key
BITLY_APIKEY = '*******'
####### Jar signing config ########
_ROOT_DIR = os.path.dirname(os.path.abspath(__file__))
# Uncomment below when signing the JAR
# JAR_SIGN = {
# 'key_store': os.path.join(os.path.dirname(os.path.dirname(_ROOT_DIR)), "DimagiKeyStore"),
# 'key_alias': "javarosakey",
# 'store_pass': "*******",
# 'key_pass': "*******",
# }
####### Touchforms config - for CloudCare #######
XFORMS_PLAYER_URL = 'http://127.0.0.1:4444'
# email and password for an admin django user, such as one created with
# ./manage.py bootstrap <project-name> <email> <password>
TOUCHFORMS_API_USER = 'admin@example.com'
TOUCHFORMS_API_PASSWORD = 'password'
####### Misc / HQ-specific Config ########
DEFAULT_PROTOCOL = "http" # or https
OVERRIDE_LOCATION = "https://www.commcarehq.org"
# Set to something like "192.168.1.5:8000" (with your IP address).
# See corehq/apps/builds/README.md for more information.
BASE_ADDRESS = 'localhost:8000'
# Set your analytics IDs here for GA and pingdom RUM
ANALYTICS_IDS = {
'GOOGLE_ANALYTICS_API_ID': '*******',
'KISSMETRICS_KEY': '*****',
'HUBSPOT_API_KEY': '*****',
}
ANALYTICS_CONFIG = {
"HQ_INSTANCE": '' # e.g. "www", or "india", or "staging"
}
AXES_LOCK_OUT_AT_FAILURE = False
LUCENE_ENABLED = True
PREVIEWER_RE = r'^.*@dimagi\.com$'
GMAPS_API_KEY = '******'
MAPS_LAYERS = {
'Maps': {
'family': 'mapbox',
'args': {
'apikey': '*****'
}
},
'Satellite': {
'family': 'mapbox',
'args': {
'apikey': '*****'
}
},
}
FORMTRANSLATE_TIMEOUT = 5
LOCAL_APPS = (
# 'django_coverage', # Adds `python manage.py test_coverage` (settings below)
# 'debug_toolbar', # Adds a retractable panel to every page giving profiling & debugging info
# 'couchdebugpanel', # Adds couch info to said toolbar
# 'devserver', # Adds improved dev server that also prints SQL on the console (for AJAX, etc, when you cannot use debug_toolbar)
# 'django_cpserver', # Another choice for a replacement server
# 'dimagi.utils',
# 'testapps.test_elasticsearch',
# 'testapps.test_pillowtop',
)
LOCAL_MIDDLEWARE_CLASSES = [
# 'debug_toolbar.middleware.DebugToolbarMiddleware',
]
# list of domains to enable ADM reporting on
ADM_ENABLED_PROJECTS = []
# prod settings
SOIL_DEFAULT_CACHE = "redis"
# reports cache
REPORT_CACHE = 'default' # or e.g. 'redis'
redis_cache = {
'BACKEND': 'django_redis.cache.RedisCache',
'LOCATION': 'redis://127.0.0.1:6379/0',
'OPTIONS': {},
}
CACHES = {
'default': redis_cache,
'redis': redis_cache,
}
# on both a local and a distributed environment this should be localhost
ELASTICSEARCH_HOST = 'localhost'
ELASTICSEARCH_PORT = 9200
# our production logstash aggregation
LOGSTASH_DEVICELOG_PORT = 10777
LOGSTASH_AUDITCARE_PORT = 10999
LOGSTASH_HOST = 'localhost'
LOCAL_PILLOWTOPS = {
# 'my_pillows': ['some.pillow.Class', ],
# 'and_more': []
}
####### API throttling #####
CCHQ_API_THROTTLE_REQUESTS = 200 # number of requests allowed per timeframe
# Use a lower value in production. This is set
# to 200 to prevent AssertionError: 429 != 200
# test failures in development environsments.
CCHQ_API_THROTTLE_TIMEFRAME = 10 # seconds
####### django-coverage config ########
COVERAGE_REPORT_HTML_OUTPUT_DIR='coverage-html'
COVERAGE_MODULE_EXCLUDES= ['tests$', 'settings$', 'urls$', 'locale$',
'common.views.test', '^django', 'management', 'migrations',
'^south', '^djcelery', '^debug_toolbar']
INTERNAL_DATA = {
"business_unit": [],
"product": ["CommCare", "CommConnect", "CommTrack", "RapidSMS", "Custom"],
"services": [],
"account_types": [],
"initiatives": [],
"contract_type": [],
"area": [
{
"name": "Health",
"sub_areas": ["Maternal, Newborn, & Child Health", "Family Planning", "HIV/AIDS"]
},
{
"name": "Other",
"sub_areas": ["Emergency Response"]
},
],
"country": ["Afghanistan", "Albania", "Algeria", "Andorra", "Angola", "Antigua & Deps", "Argentina", "Armenia",
"Australia", "Austria", "Azerbaijan", "Bahamas", "Bahrain", "Bangladesh", "Barbados", "Belarus",
"Belgium", "Belize", "Benin", "Bhutan", "Bolivia", "Bosnia Herzegovina", "Botswana", "Brazil",
"Brunei", "Bulgaria", "Burkina", "Burundi", "Cambodia", "Cameroon", "Canada", "Cape Verde",
"Central African Rep", "Chad", "Chile", "China", "Colombia", "Comoros", "Congo",
"Congo {Democratic Rep}", "Costa Rica", "Croatia", "Cuba", "Cyprus", "Czech Republic", "Denmark",
"Djibouti", "Dominica", "Dominican Republic", "East Timor", "Ecuador", "Egypt", "El Salvador",
"Equatorial Guinea", "Eritrea", "Estonia", "Ethiopia", "Fiji", "Finland", "France", "Gabon", "Gambia",
"Georgia", "Germany", "Ghana", "Greece", "Grenada", "Guatemala", "Guinea", "Guinea-Bissau", "Guyana",
"Haiti", "Honduras", "Hungary", "Iceland", "India", "Indonesia", "Iran", "Iraq", "Ireland {Republic}",
"Israel", "Italy", "Ivory Coast", "Jamaica", "Japan", "Jordan", "Kazakhstan", "Kenya", "Kiribati",
"Korea North", "Korea South", "Kosovo", "Kuwait", "Kyrgyzstan", "Laos", "Latvia", "Lebanon", "Lesotho",
"Liberia", "Libya", "Liechtenstein", "Lithuania", "Luxembourg", "Macedonia", "Madagascar", "Malawi",
"Malaysia", "Maldives", "Mali", "Malta", "Marshall Islands", "Mauritania", "Mauritius", "Mexico",
"Micronesia", "Moldova", "Monaco", "Mongolia", "Montenegro", "Morocco", "Mozambique", "Myanmar, {Burma}",
"Namibia", "Nauru", "Nepal", "Netherlands", "New Zealand", "Nicaragua", "Niger", "Nigeria", "Norway",
"Oman", "Pakistan", "Palau", "Panama", "Papua New Guinea", "Paraguay", "Peru", "Philippines", "Poland",
"Portugal", "Qatar", "Romania", "Russian Federation", "Rwanda", "St Kitts & Nevis", "St Lucia",
"Saint Vincent & the Grenadines", "Samoa", "San Marino", "Sao Tome & Principe", "Saudi Arabia",
"Senegal", "Serbia", "Seychelles", "Sierra Leone", "Singapore", "Slovakia", "Slovenia",
"Solomon Islands", "Somalia", "South Africa", "South Sudan", "Spain", "Sri Lanka", "Sudan", "Suriname",
"Swaziland", "Sweden", "Switzerland", "Syria", "Taiwan", "Tajikistan", "Tanzania", "Thailand", "Togo",
"Tonga", "Trinidad & Tobago", "Tunisia", "Turkey", "Turkmenistan", "Tuvalu", "Uganda", "Ukraine",
"United Arab Emirates", "United Kingdom", "United States", "Uruguay", "Uzbekistan", "Vanuatu",
"Vatican City", "Venezuela", "Vietnam", "Yemen", "Zambia", "Zimbabwe"]
}
# The passcodes will print out on the console
# TWO_FACTOR_CALL_GATEWAY = 'two_factor.gateways.fake.Fake'
# TWO_FACTOR_SMS_GATEWAY = 'two_factor.gateways.fake.Fake'
|
{
"content_hash": "d7ea63b3077a9c5f709d9846c80075c1",
"timestamp": "",
"source": "github",
"line_count": 304,
"max_line_length": 173,
"avg_line_length": 37.625,
"alnum_prop": 0.5922364049659031,
"repo_name": "qedsoftware/commcare-hq",
"id": "2766c80476b4299030b31c4c91674a89cef44985",
"size": "11438",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "localsettings.example.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "ActionScript",
"bytes": "15950"
},
{
"name": "CSS",
"bytes": "508392"
},
{
"name": "HTML",
"bytes": "2869325"
},
{
"name": "JavaScript",
"bytes": "2395360"
},
{
"name": "PHP",
"bytes": "2232"
},
{
"name": "PLpgSQL",
"bytes": "125298"
},
{
"name": "Python",
"bytes": "14670713"
},
{
"name": "Shell",
"bytes": "37514"
}
],
"symlink_target": ""
}
|
import struct
import zlib
def parse_from(binary):
""" Turn a packed binary message as recieved from a Kafka broker into a :class:`Message`. """
# A message. The format of an N byte message is the following:
# 1 byte "magic" identifier to allow format changes
# 4 byte CRC32 of the payload
# N - 5 byte payload
size = struct.unpack('>i', binary[0:4])[0]
magic = struct.unpack('>B', binary[4:5])[0]
checksum = struct.unpack('>i', binary[5:9])[0]
payload = binary[9:9+size]
return Message(payload, magic, checksum)
class Message(object):
""" A Kafka Message object. """
MAGIC_IDENTIFIER_DEFAULT = 0
def __init__(self, payload=None, magic=MAGIC_IDENTIFIER_DEFAULT, checksum=None):
self.magic = magic
self.checksum = checksum
self.payload = None
if payload is not None:
self.payload = str(payload)
if self.payload is not None and self.checksum is None:
self.checksum = self.calculate_checksum()
def __str__(self):
return self.payload
def __eq__(self, other):
if isinstance(other, self.__class__):
return self.magic == other.magic and self.payload == other.payload and self.checksum == other.checksum
return False
def __ne__(self, other):
return not self.__eq__(other)
def calculate_checksum(self):
""" Returns the checksum for the payload. """
return zlib.crc32(self.payload)
def is_valid(self):
""" Returns true if the checksum for this message is valid. """
return self.checksum == self.calculate_checksum()
def encode(self):
""" Encode a :class:`Message` to binary form. """
# <MAGIC_BYTE: char> <CRC32: int> <PAYLOAD: bytes>
return struct.pack('>Bi%ds' % len(self.payload), self.magic, self.calculate_checksum(), self.payload)
|
{
"content_hash": "b2c21db760847d5195874d3e5911e7cf",
"timestamp": "",
"source": "github",
"line_count": 60,
"max_line_length": 108,
"avg_line_length": 29.6,
"alnum_prop": 0.6593468468468469,
"repo_name": "xujun10110/pykafka",
"id": "32e1d4049e5f054caf5e93a3b4bfd9d691bdda43",
"size": "1776",
"binary": false,
"copies": "4",
"ref": "refs/heads/master",
"path": "kafka/message.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "17395"
}
],
"symlink_target": ""
}
|
from collections import OrderedDict
from service import api_client
import logging
LOGGER = logging.getLogger(__name__)
healthchecks = OrderedDict([
('digital-register-api', api_client.check_health),
])
# TODO: tested through test_app - should have its own tests now
def perform_healthchecks():
LOGGER.debug("STARTED: perform_healthchecks")
results = [_check_application_health(app_name) for app_name in healthchecks.keys()]
LOGGER.debug("perform_healthchecks: {0}".format(results))
error_messages = [error_msg for result in results for error_msg in result]
LOGGER.debug("ENDED: perform_healthchecks")
return error_messages
def _check_application_health(application_name):
LOGGER.debug("STARTED: _check_application_health application_name: {}".format(application_name))
try:
healthcheck_response = healthchecks[application_name]()
response_json = _get_json_from_response(healthcheck_response)
if response_json:
LOGGER.debug("ENDED: _check_application_health")
return _extract_errors_from_health_response_json(response_json, application_name)
else:
LOGGER.debug("ENDED: with error: _check_application_health")
return ['{0} health endpoint returned an invalid response: {1}'.format(
application_name, healthcheck_response.text)]
except Exception as e:
LOGGER.debug("ENDED: _check_application_health")
return ['Problem talking to {0}: {1}'.format(application_name, str(e))]
def _get_json_from_response(response):
try:
return response.json()
except Exception:
return None
def _extract_errors_from_health_response_json(response_json, application_name):
if response_json.get('status') == 'ok':
return []
elif response_json.get('errors'):
return ['{0} health endpoint returned errors: {1}'.format(
application_name, response_json['errors'])]
else:
return ['{0} health endpoint returned an invalid response: {1}'.format(
application_name, response_json)]
|
{
"content_hash": "0f89a1582706395c05a0e36d9f3790fe",
"timestamp": "",
"source": "github",
"line_count": 55,
"max_line_length": 100,
"avg_line_length": 38.054545454545455,
"alnum_prop": 0.6798853320592451,
"repo_name": "LandRegistry/digital-register-frontend",
"id": "afd340426ab1e127c58abb034bd8eacb7ace003b",
"size": "2093",
"binary": false,
"copies": "1",
"ref": "refs/heads/develop",
"path": "service/health_checker.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "89744"
},
{
"name": "HTML",
"bytes": "77916"
},
{
"name": "JavaScript",
"bytes": "8566"
},
{
"name": "Python",
"bytes": "91491"
},
{
"name": "Shell",
"bytes": "1515"
}
],
"symlink_target": ""
}
|
import re
import os
import base64
import uuid
from pprint import pprint
import config
COMMAND = '~'
'''
This module operates like ‘textnet’ filesystem:
- finds files by set of tags
- gets data from known ‘root’ nodes
NO CACHE YET
Quite naive and messed up implementation.
Fragments will probably become classes
'''
# universal simplifier: lowercase, remove non-word characters
def simplify_name(name):
return re.sub(r'\W+', ' ',name, flags=re.UNICODE).lower().strip()
# These encoders convert everything to textnet-compatible raw format
def encoder_txt(abs_path, ext):
with open(abs_path, 'r') as f:
content = f.read().decode('UTF-8')
lines = content.split("\n") + ["----------"]
fragments = []
fragment = []
for l in lines:
if re.match(r'^-{5,}\s*$', l):
fragments += ["\n".join(fragment)]
fragment = []
else:
fragment += [l]
fragments = ["\n".join(['~ext: %s' % ext,x]) for x in fragments if not re.match(r'^\s*$', x)]
return fragments
def encoder_attach(abs_path, ext):
with open(abs_path, 'rb') as f:
content = f.read()
return ["\n".join([
'~ext: %s' % ext,
'~decode: base64',
base64.urlsafe_b64encode(content)
])]
def encoder_skip(abs_path, ext):
return []
registered_encoders = dict(
txt = encoder_txt,
attach = encoder_attach,
skip = encoder_skip,
)
def get_raw_fragments(abs_path):
# prepare encoders
encoders = dict()
for e in config.filesystem.encoders:
for ext in e["ext"]:
encoders["."+ext] = e
encoders_default = "."+config.filesystem.encoders_default
encoders_fallback = "."+config.filesystem.encoders_fallback
# find encoder
path, ext = os.path.splitext(abs_path)
original_ext = ext
if ext == "": ext = encoders_default
if ext not in encoders: ext = encoders_fallback
encoder = registered_encoders[ encoders[ext]["id"] ]
return encoder(abs_path, original_ext[1:])
def get_command(line):
if len(line) == 0 or line[0] != COMMAND:
return None
else:
parts = line.split(":")
if len(parts) == 1:
return dict(command=simplify_name(parts[0]))
else:
return dict(command=simplify_name(parts[0]), content=(":".join(parts[1:])).strip())
def get_tags(tags_commands):
tags = []
for command in tags_commands:
tags += [x for x in [simplify_name(t) for t in command["content"].split('#')] if len(x) > 0]
return tags
def hash_commands(commands):
hash = dict()
for command in commands:
hash[command["command"]] = command
return hash
def get_fragment_structure(fragment_content):
fragment = dict(raw=fragment_content, uuid=uuid.uuid4())
# find commands
fragment["commands"] = []
fragment["content"] = []
lines = fragment_content.split("\n")
for line in lines:
command = get_command(line)
if command:
fragment["commands"] = fragment["commands"]+[command]
else:
fragment["content"] += [line]
# extract tags
fragment["commands_hash"] = hash_commands(fragment["commands"])
fragment["tags"] = get_tags([command for command in fragment["commands"] if command["command"] == "tags"])
fragment["content"] = u"\n".join(fragment["content"])
return fragment
def build_sources(abs_paths):
fragments_by_tag = dict()
fragments_by_uuid = dict()
publishing = dict()
for abs_path in abs_paths:
for dirName, subdirList, fileList in os.walk(os.path.dirname(abs_path)):
for fname in fileList:
fname = fname.decode("UTF-8")
fragment_count = 0
# remove all ignored
okay = True
for r in config.filesystem.ignore:
if re.search(r,fname):
okay = False
break
if okay:
# get filename tags
fname_trimmed = re.sub(r'\..*?$', '', fname)
tags = [simplify_name(x) for x in (fname, fname_trimmed)]
# get fragments
fragments = [get_fragment_structure(f) for f in get_raw_fragments(os.path.join(dirName, fname))]
for fragment in fragments:
fragment_count += 1
fragment["tags"] += [str(fragment_count).encode("UTF-8")]
fragment["tags"] += tags
fragments_by_uuid[fragment["uuid"]] = fragment
for tag in fragment["tags"]:
if tag not in fragments_by_tag: fragments_by_tag[tag] = []
fragments_by_tag[tag] += [fragment]
# get publish command
for fragment in fragments:
for command in fragment["commands"]:
if command["command"] == "publish":
command["publish_path"] = "/".join([simplify_name(x) for x in command["content"].split("/")])
if command["publish_path"] not in publishing:
publishing[command["publish_path"]] = []
publishing[command["publish_path"]] += [dirName]
return dict(by_tag=fragments_by_tag, by_uuid=fragments_by_uuid, publishing=publishing)
def lookup_paths(publish_path, abs_paths):
sources = build_sources(abs_paths)
publish_path = "/".join([simplify_name(x) for x in publish_path.split("/")])
print publish_path
if publish_path in sources["publishing"]:
return sources["publishing"][publish_path]
def find(publish_path, tags, abs_paths):
return filter(tags, lookup_paths(publish_path, abs_paths))
def filter(tags, abs_paths):
sources = build_sources(abs_paths)
uuids = []
first = True
for tag in tags:
tag = simplify_name(tag)
# it is enough to meet a non-existing tag
if tag not in sources["by_tag"]:
return []
# first time just add everything
if first:
uuids = [f["uuid"] for f in sources["by_tag"][tag]]
first = False
else:
uuids = [f["uuid"] for f in sources["by_tag"][tag] if f["uuid"] in uuids]
return [sources["by_uuid"][x] for x in uuids]
|
{
"content_hash": "eaae0b787af2fd48d66136b72b09cdd8",
"timestamp": "",
"source": "github",
"line_count": 183,
"max_line_length": 125,
"avg_line_length": 35.16393442622951,
"alnum_prop": 0.5620823620823621,
"repo_name": "textnet/protoserver",
"id": "cc46e691b2b4d7a419df3adc776f43758f8e9845",
"size": "6467",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "filesystem.py",
"mode": "33188",
"license": "bsd-2-clause",
"language": [
{
"name": "CSS",
"bytes": "403"
},
{
"name": "HTML",
"bytes": "3722"
},
{
"name": "JavaScript",
"bytes": "1515"
},
{
"name": "Python",
"bytes": "10229"
}
],
"symlink_target": ""
}
|
"""
Test for the deletion of L{BatchManholePowerup}.
"""
from axiom.batch import BatchManholePowerup
from axiom.test.historic.stubloader import StubbedTest
class BatchManholePowerupTests(StubbedTest):
def test_deletion(self):
"""
The upgrade to schema version 2 deletes L{BatchManholePowerup}.
"""
self.assertEqual(self.store.query(BatchManholePowerup).count(), 0)
|
{
"content_hash": "1a995ebda6b4f21595327f967bb2a6b6",
"timestamp": "",
"source": "github",
"line_count": 14,
"max_line_length": 74,
"avg_line_length": 28.857142857142858,
"alnum_prop": 0.7252475247524752,
"repo_name": "hawkowl/axiom",
"id": "2836595f9653f2799d96bb426905b20008197168",
"size": "461",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "axiom/test/historic/test_manhole1to2.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "769649"
}
],
"symlink_target": ""
}
|
"""
werkzeug.wrappers
~~~~~~~~~~~~~~~~~
The wrappers are simple request and response objects which you can
subclass to do whatever you want them to do. The request object contains
the information transmitted by the client (webbrowser) and the response
object contains all the information sent back to the browser.
An important detail is that the request object is created with the WSGI
environ and will act as high-level proxy whereas the response object is an
actual WSGI application.
Like everything else in Werkzeug these objects will work correctly with
unicode data. Incoming form data parsed by the response object will be
decoded into an unicode object if possible and if it makes sense.
:copyright: (c) 2011 by the Werkzeug Team, see AUTHORS for more details.
:license: BSD, see LICENSE for more details.
"""
import urlparse
from datetime import datetime, timedelta
from werkzeug.http import HTTP_STATUS_CODES, \
parse_accept_header, parse_cache_control_header, parse_etags, \
parse_date, generate_etag, is_resource_modified, unquote_etag, \
quote_etag, parse_set_header, parse_authorization_header, \
parse_www_authenticate_header, remove_entity_headers, \
parse_options_header, dump_options_header, http_date, \
parse_if_range_header, parse_cookie, dump_cookie, \
parse_range_header, parse_content_range_header, dump_header
from werkzeug.urls import url_decode, iri_to_uri
from werkzeug.formparser import FormDataParser, default_stream_factory
from werkzeug.utils import cached_property, environ_property, \
header_property, get_content_type
from werkzeug.wsgi import get_current_url, get_host, LimitedStream, \
ClosingIterator
from werkzeug.datastructures import MultiDict, CombinedMultiDict, Headers, \
EnvironHeaders, ImmutableMultiDict, ImmutableTypeConversionDict, \
ImmutableList, MIMEAccept, CharsetAccept, LanguageAccept, \
ResponseCacheControl, RequestCacheControl, CallbackDict, \
ContentRange
from werkzeug._internal import _empty_stream, _decode_unicode, \
_patch_wrapper, _get_environ
def _run_wsgi_app(*args):
"""This function replaces itself to ensure that the test module is not
imported unless required. DO NOT USE!
"""
global _run_wsgi_app
from werkzeug.test import run_wsgi_app as _run_wsgi_app
return _run_wsgi_app(*args)
def _warn_if_string(iterable):
"""Helper for the response objects to check if the iterable returned
to the WSGI server is not a string.
"""
if isinstance(iterable, basestring):
from warnings import warn
warn(Warning('response iterable was set to a string. This appears '
'to work but means that the server will send the '
'data to the client char, by char. This is almost '
'never intended behavior, use response.data to assign '
'strings to the response object.'), stacklevel=2)
class BaseRequest(object):
"""Very basic request object. This does not implement advanced stuff like
entity tag parsing or cache controls. The request object is created with
the WSGI environment as first argument and will add itself to the WSGI
environment as ``'werkzeug.request'`` unless it's created with
`populate_request` set to False.
There are a couple of mixins available that add additional functionality
to the request object, there is also a class called `Request` which
subclasses `BaseRequest` and all the important mixins.
It's a good idea to create a custom subclass of the :class:`BaseRequest`
and add missing functionality either via mixins or direct implementation.
Here an example for such subclasses::
from werkzeug.wrappers import BaseRequest, ETagRequestMixin
class Request(BaseRequest, ETagRequestMixin):
pass
Request objects are **read only**. As of 0.5 modifications are not
allowed in any place. Unlike the lower level parsing functions the
request object will use immutable objects everywhere possible.
Per default the request object will assume all the text data is `utf-8`
encoded. Please refer to `the unicode chapter <unicode.txt>`_ for more
details about customizing the behavior.
Per default the request object will be added to the WSGI
environment as `werkzeug.request` to support the debugging system.
If you don't want that, set `populate_request` to `False`.
If `shallow` is `True` the environment is initialized as shallow
object around the environ. Every operation that would modify the
environ in any way (such as consuming form data) raises an exception
unless the `shallow` attribute is explicitly set to `False`. This
is useful for middlewares where you don't want to consume the form
data by accident. A shallow request is not populated to the WSGI
environment.
.. versionchanged:: 0.5
read-only mode was enforced by using immutables classes for all
data.
"""
#: the charset for the request, defaults to utf-8
charset = 'utf-8'
#: the error handling procedure for errors, defaults to 'replace'
encoding_errors = 'replace'
#: the maximum content length. This is forwarded to the form data
#: parsing function (:func:`parse_form_data`). When set and the
#: :attr:`form` or :attr:`files` attribute is accessed and the
#: parsing fails because more than the specified value is transmitted
#: a :exc:`~werkzeug.exceptions.RequestEntityTooLarge` exception is raised.
#:
#: Have a look at :ref:`dealing-with-request-data` for more details.
#:
#: .. versionadded:: 0.5
max_content_length = None
#: the maximum form field size. This is forwarded to the form data
#: parsing function (:func:`parse_form_data`). When set and the
#: :attr:`form` or :attr:`files` attribute is accessed and the
#: data in memory for post data is longer than the specified value a
#: :exc:`~werkzeug.exceptions.RequestEntityTooLarge` exception is raised.
#:
#: Have a look at :ref:`dealing-with-request-data` for more details.
#:
#: .. versionadded:: 0.5
max_form_memory_size = None
#: the class to use for `args` and `form`. The default is an
#: :class:`~werkzeug.datastructures.ImmutableMultiDict` which supports
#: multiple values per key. alternatively it makes sense to use an
#: :class:`~werkzeug.datastructures.ImmutableOrderedMultiDict` which
#: preserves order or a :class:`~werkzeug.datastructures.ImmutableDict`
#: which is the fastest but only remembers the last key. It is also
#: possible to use mutable structures, but this is not recommended.
#:
#: .. versionadded:: 0.6
parameter_storage_class = ImmutableMultiDict
#: the type to be used for list values from the incoming WSGI environment.
#: By default an :class:`~werkzeug.datastructures.ImmutableList` is used
#: (for example for :attr:`access_list`).
#:
#: .. versionadded:: 0.6
list_storage_class = ImmutableList
#: the type to be used for dict values from the incoming WSGI environment.
#: By default an
#: :class:`~werkzeug.datastructures.ImmutableTypeConversionDict` is used
#: (for example for :attr:`cookies`).
#:
#: .. versionadded:: 0.6
dict_storage_class = ImmutableTypeConversionDict
#: The form data parser that shoud be used. Can be replaced to customize
#: the form date parsing.
form_data_parser_class = FormDataParser
def __init__(self, environ, populate_request=True, shallow=False):
self.environ = environ
if populate_request and not shallow:
self.environ['werkzeug.request'] = self
self.shallow = shallow
def __repr__(self):
# make sure the __repr__ even works if the request was created
# from an invalid WSGI environment. If we display the request
# in a debug session we don't want the repr to blow up.
args = []
try:
args.append("'%s'" % self.url)
args.append('[%s]' % self.method)
except Exception:
args.append('(invalid WSGI environ)')
return '<%s %s>' % (
self.__class__.__name__,
' '.join(args)
)
@property
def url_charset(self):
"""The charset that is assumed for URLs. Defaults to the value
of :attr:`charset`.
.. versionadded:: 0.6
"""
return self.charset
@classmethod
def from_values(cls, *args, **kwargs):
"""Create a new request object based on the values provided. If
environ is given missing values are filled from there. This method is
useful for small scripts when you need to simulate a request from an URL.
Do not use this method for unittesting, there is a full featured client
object (:class:`Client`) that allows to create multipart requests,
support for cookies etc.
This accepts the same options as the
:class:`~werkzeug.test.EnvironBuilder`.
.. versionchanged:: 0.5
This method now accepts the same arguments as
:class:`~werkzeug.test.EnvironBuilder`. Because of this the
`environ` parameter is now called `environ_overrides`.
:return: request object
"""
from werkzeug.test import EnvironBuilder
charset = kwargs.pop('charset', cls.charset)
builder = EnvironBuilder(*args, **kwargs)
try:
return builder.get_request(cls)
finally:
builder.close()
@classmethod
def application(cls, f):
"""Decorate a function as responder that accepts the request as first
argument. This works like the :func:`responder` decorator but the
function is passed the request object as first argument::
@Request.application
def my_wsgi_app(request):
return Response('Hello World!')
:param f: the WSGI callable to decorate
:return: a new WSGI callable
"""
#: return a callable that wraps the -2nd argument with the request
#: and calls the function with all the arguments up to that one and
#: the request. The return value is then called with the latest
#: two arguments. This makes it possible to use this decorator for
#: both methods and standalone WSGI functions.
return _patch_wrapper(f, lambda *a: f(*a[:-2]+(cls(a[-2]),))(*a[-2:]))
def _get_file_stream(self, total_content_length, content_type, filename=None,
content_length=None):
"""Called to get a stream for the file upload.
This must provide a file-like class with `read()`, `readline()`
and `seek()` methods that is both writeable and readable.
The default implementation returns a temporary file if the total
content length is higher than 500KB. Because many browsers do not
provide a content length for the files only the total content
length matters.
:param total_content_length: the total content length of all the
data in the request combined. This value
is guaranteed to be there.
:param content_type: the mimetype of the uploaded file.
:param filename: the filename of the uploaded file. May be `None`.
:param content_length: the length of this file. This value is usually
not provided because webbrowsers do not provide
this value.
"""
return default_stream_factory(total_content_length, content_type,
filename, content_length)
@property
def want_form_data_parsed(self):
"""Returns True if the request method is ``POST``, ``PUT`` or
``PATCH``. Can be overriden to support other HTTP methods that
should carry form data.
.. versionadded:: 0.8
"""
return self.environ['REQUEST_METHOD'] in ('POST', 'PUT', 'PATCH')
def make_form_data_parser(self):
"""Creates the form data parser. Instanciates the
:attr:`form_data_parser_class` with some parameters.
.. versionadded:: 0.8
"""
return self.form_data_parser_class(self._get_file_stream,
self.charset,
self.encoding_errors,
self.max_form_memory_size,
self.max_content_length,
self.parameter_storage_class)
def _load_form_data(self):
"""Method used internally to retrieve submitted data. After calling
this sets `form` and `files` on the request object to multi dicts
filled with the incoming form data. As a matter of fact the input
stream will be empty afterwards.
.. versionadded:: 0.8
"""
# abort early if we have already consumed the stream
if 'stream' in self.__dict__:
return
if self.shallow:
raise RuntimeError('A shallow request tried to consume '
'form data. If you really want to do '
'that, set `shallow` to False.')
data = None
stream = _empty_stream
if self.want_form_data_parsed:
parser = self.make_form_data_parser()
data = parser.parse_from_environ(self.environ)
else:
# if we have a content length header we are able to properly
# guard the incoming stream, no matter what request method is
# used.
content_length = self.headers.get('content-length', type=int)
if content_length is not None:
stream = LimitedStream(self.environ['wsgi.input'],
content_length)
if data is None:
data = (stream, self.parameter_storage_class(),
self.parameter_storage_class())
# inject the values into the instance dict so that we bypass
# our cached_property non-data descriptor.
d = self.__dict__
d['stream'], d['form'], d['files'] = data
@cached_property
def stream(self):
"""The parsed stream if the submitted data was not multipart or
urlencoded form data. This stream is the stream left by the form data
parser module after parsing. This is *not* the WSGI input stream but
a wrapper around it that ensures the caller does not accidentally
read past `Content-Length`.
"""
self._load_form_data()
return self.stream
input_stream = environ_property('wsgi.input', 'The WSGI input stream.\n'
'In general it\'s a bad idea to use this one because you can easily '
'read past the boundary. Use the :attr:`stream` instead.')
@cached_property
def args(self):
"""The parsed URL parameters. By default an
:class:`~werkzeug.datastructures.ImmutableMultiDict`
is returned from this function. This can be changed by setting
:attr:`parameter_storage_class` to a different type. This might
be necessary if the order of the form data is important.
"""
return url_decode(self.environ.get('QUERY_STRING', ''),
self.url_charset, errors=self.encoding_errors,
cls=self.parameter_storage_class)
@cached_property
def data(self):
"""This reads the buffered incoming data from the client into the
string. Usually it's a bad idea to access :attr:`data` because a client
could send dozens of megabytes or more to cause memory problems on the
server.
To circumvent that make sure to check the content length first.
"""
return self.stream.read()
@cached_property
def form(self):
"""The form parameters. By default an
:class:`~werkzeug.datastructures.ImmutableMultiDict`
is returned from this function. This can be changed by setting
:attr:`parameter_storage_class` to a different type. This might
be necessary if the order of the form data is important.
"""
self._load_form_data()
return self.form
@cached_property
def values(self):
"""Combined multi dict for :attr:`args` and :attr:`form`."""
args = []
for d in self.args, self.form:
if not isinstance(d, MultiDict):
d = MultiDict(d)
args.append(d)
return CombinedMultiDict(args)
@cached_property
def files(self):
""":class:`~werkzeug.datastructures.MultiDict` object containing
all uploaded files. Each key in :attr:`files` is the name from the
``<input type="file" name="">``. Each value in :attr:`files` is a
Werkzeug :class:`~werkzeug.datastructures.FileStorage` object.
Note that :attr:`files` will only contain data if the request method was
POST, PUT or PATCH and the ``<form>`` that posted to the request had
``enctype="multipart/form-data"``. It will be empty otherwise.
See the :class:`~werkzeug.datastructures.MultiDict` /
:class:`~werkzeug.datastructures.FileStorage` documentation for
more details about the used data structure.
"""
self._load_form_data()
return self.files
@cached_property
def cookies(self):
"""Read only access to the retrieved cookie values as dictionary."""
return parse_cookie(self.environ, self.charset,
cls=self.dict_storage_class)
@cached_property
def headers(self):
"""The headers from the WSGI environ as immutable
:class:`~werkzeug.datastructures.EnvironHeaders`.
"""
return EnvironHeaders(self.environ)
@cached_property
def path(self):
"""Requested path as unicode. This works a bit like the regular path
info in the WSGI environment but will always include a leading slash,
even if the URL root is accessed.
"""
path = '/' + (self.environ.get('PATH_INFO') or '').lstrip('/')
return _decode_unicode(path, self.url_charset, self.encoding_errors)
@cached_property
def script_root(self):
"""The root path of the script without the trailing slash."""
path = (self.environ.get('SCRIPT_NAME') or '').rstrip('/')
return _decode_unicode(path, self.url_charset, self.encoding_errors)
@cached_property
def url(self):
"""The reconstructed current URL"""
return get_current_url(self.environ)
@cached_property
def base_url(self):
"""Like :attr:`url` but without the querystring"""
return get_current_url(self.environ, strip_querystring=True)
@cached_property
def url_root(self):
"""The full URL root (with hostname), this is the application root."""
return get_current_url(self.environ, True)
@cached_property
def host_url(self):
"""Just the host with scheme."""
return get_current_url(self.environ, host_only=True)
@cached_property
def host(self):
"""Just the host including the port if available."""
return get_host(self.environ)
query_string = environ_property('QUERY_STRING', '', read_only=True, doc=
'''The URL parameters as raw bytestring.''')
method = environ_property('REQUEST_METHOD', 'GET', read_only=True, doc=
'''The transmission method. (For example ``'GET'`` or ``'POST'``).''')
@cached_property
def access_route(self):
"""If a forwarded header exists this is a list of all ip addresses
from the client ip to the last proxy server.
"""
if 'HTTP_X_FORWARDED_FOR' in self.environ:
addr = self.environ['HTTP_X_FORWARDED_FOR'].split(',')
return self.list_storage_class([x.strip() for x in addr])
elif 'REMOTE_ADDR' in self.environ:
return self.list_storage_class([self.environ['REMOTE_ADDR']])
return self.list_storage_class()
@property
def remote_addr(self):
"""The remote address of the client."""
return self.environ.get('REMOTE_ADDR')
remote_user = environ_property('REMOTE_USER', doc='''
If the server supports user authentication, and the script is
protected, this attribute contains the username the user has
authenticated as.''')
scheme = environ_property('wsgi.url_scheme', doc='''
URL scheme (http or https).
.. versionadded:: 0.7''')
is_xhr = property(lambda x: x.environ.get('HTTP_X_REQUESTED_WITH', '')
.lower() == 'xmlhttprequest', doc='''
True if the request was triggered via a JavaScript XMLHttpRequest.
This only works with libraries that support the `X-Requested-With`
header and set it to "XMLHttpRequest". Libraries that do that are
prototype, jQuery and Mochikit and probably some more.''')
is_secure = property(lambda x: x.environ['wsgi.url_scheme'] == 'https',
doc='`True` if the request is secure.')
is_multithread = environ_property('wsgi.multithread', doc='''
boolean that is `True` if the application is served by
a multithreaded WSGI server.''')
is_multiprocess = environ_property('wsgi.multiprocess', doc='''
boolean that is `True` if the application is served by
a WSGI server that spawns multiple processes.''')
is_run_once = environ_property('wsgi.run_once', doc='''
boolean that is `True` if the application will be executed only
once in a process lifetime. This is the case for CGI for example,
but it's not guaranteed that the exeuction only happens one time.''')
class BaseResponse(object):
"""Base response class. The most important fact about a response object
is that it's a regular WSGI application. It's initialized with a couple
of response parameters (headers, body, status code etc.) and will start a
valid WSGI response when called with the environ and start response
callable.
Because it's a WSGI application itself processing usually ends before the
actual response is sent to the server. This helps debugging systems
because they can catch all the exceptions before responses are started.
Here a small example WSGI application that takes advantage of the
response objects::
from werkzeug.wrappers import BaseResponse as Response
def index():
return Response('Index page')
def application(environ, start_response):
path = environ.get('PATH_INFO') or '/'
if path == '/':
response = index()
else:
response = Response('Not Found', status=404)
return response(environ, start_response)
Like :class:`BaseRequest` which object is lacking a lot of functionality
implemented in mixins. This gives you a better control about the actual
API of your response objects, so you can create subclasses and add custom
functionality. A full featured response object is available as
:class:`Response` which implements a couple of useful mixins.
To enforce a new type of already existing responses you can use the
:meth:`force_type` method. This is useful if you're working with different
subclasses of response objects and you want to post process them with a
know interface.
Per default the request object will assume all the text data is `utf-8`
encoded. Please refer to `the unicode chapter <unicode.txt>`_ for more
details about customizing the behavior.
Response can be any kind of iterable or string. If it's a string it's
considered being an iterable with one item which is the string passed.
Headers can be a list of tuples or a
:class:`~werkzeug.datastructures.Headers` object.
Special note for `mimetype` and `content_type`: For most mime types
`mimetype` and `content_type` work the same, the difference affects
only 'text' mimetypes. If the mimetype passed with `mimetype` is a
mimetype starting with `text/` it becomes a charset parameter defined
with the charset of the response object. In contrast the
`content_type` parameter is always added as header unmodified.
.. versionchanged:: 0.5
the `direct_passthrough` parameter was added.
:param response: a string or response iterable.
:param status: a string with a status or an integer with the status code.
:param headers: a list of headers or a
:class:`~werkzeug.datastructures.Headers` object.
:param mimetype: the mimetype for the request. See notice above.
:param content_type: the content type for the request. See notice above.
:param direct_passthrough: if set to `True` :meth:`iter_encoded` is not
called before iteration which makes it
possible to pass special iterators though
unchanged (see :func:`wrap_file` for more
details.)
"""
#: the charset of the response.
charset = 'utf-8'
#: the default status if none is provided.
default_status = 200
#: the default mimetype if none is provided.
default_mimetype = 'text/plain'
#: if set to `False` accessing properties on the response object will
#: not try to consume the response iterator and convert it into a list.
#:
#: .. versionadded:: 0.6.2
#:
#: That attribute was previously called `implicit_seqence_conversion`.
#: (Notice the typo). If you did use this feature, you have to adapt
#: your code to the name change.
implicit_sequence_conversion = True
#: Should this response object correct the location header to be RFC
#: conformant? This is true by default.
#:
#: .. versionadded:: 0.8
autocorrect_location_header = True
#: Should this response object automatically set the content-length
#: header if possible? This is true by default.
#:
#: .. versionadded:: 0.8
automatically_set_content_length = True
def __init__(self, response=None, status=None, headers=None,
mimetype=None, content_type=None, direct_passthrough=False):
if isinstance(headers, Headers):
self.headers = headers
elif not headers:
self.headers = Headers()
else:
self.headers = Headers(headers)
if content_type is None:
if mimetype is None and 'content-type' not in self.headers:
mimetype = self.default_mimetype
if mimetype is not None:
mimetype = get_content_type(mimetype, self.charset)
content_type = mimetype
if content_type is not None:
self.headers['Content-Type'] = content_type
if status is None:
status = self.default_status
if isinstance(status, (int, long)):
self.status_code = status
else:
self.status = status
self.direct_passthrough = direct_passthrough
self._on_close = []
# we set the response after the headers so that if a class changes
# the charset attribute, the data is set in the correct charset.
if response is None:
self.response = []
elif isinstance(response, basestring):
self.data = response
else:
self.response = response
def call_on_close(self, func):
"""Adds a function to the internal list of functions that should
be called as part of closing down the response. Since 0.7 this
function also returns the function that was passed so that this
can be used as a decorator.
.. versionadded:: 0.6
"""
self._on_close.append(func)
return func
def __repr__(self):
if self.is_sequence:
body_info = '%d bytes' % sum(map(len, self.iter_encoded()))
else:
body_info = self.is_streamed and 'streamed' or 'likely-streamed'
return '<%s %s [%s]>' % (
self.__class__.__name__,
body_info,
self.status
)
@classmethod
def force_type(cls, response, environ=None):
"""Enforce that the WSGI response is a response object of the current
type. Werkzeug will use the :class:`BaseResponse` internally in many
situations like the exceptions. If you call :meth:`get_response` on an
exception you will get back a regular :class:`BaseResponse` object, even
if you are using a custom subclass.
This method can enforce a given response type, and it will also
convert arbitrary WSGI callables into response objects if an environ
is provided::
# convert a Werkzeug response object into an instance of the
# MyResponseClass subclass.
response = MyResponseClass.force_type(response)
# convert any WSGI application into a response object
response = MyResponseClass.force_type(response, environ)
This is especially useful if you want to post-process responses in
the main dispatcher and use functionality provided by your subclass.
Keep in mind that this will modify response objects in place if
possible!
:param response: a response object or wsgi application.
:param environ: a WSGI environment object.
:return: a response object.
"""
if not isinstance(response, BaseResponse):
if environ is None:
raise TypeError('cannot convert WSGI application into '
'response objects without an environ')
response = BaseResponse(*_run_wsgi_app(response, environ))
response.__class__ = cls
return response
@classmethod
def from_app(cls, app, environ, buffered=False):
"""Create a new response object from an application output. This
works best if you pass it an application that returns a generator all
the time. Sometimes applications may use the `write()` callable
returned by the `start_response` function. This tries to resolve such
edge cases automatically. But if you don't get the expected output
you should set `buffered` to `True` which enforces buffering.
:param app: the WSGI application to execute.
:param environ: the WSGI environment to execute against.
:param buffered: set to `True` to enforce buffering.
:return: a response object.
"""
return cls(*_run_wsgi_app(app, environ, buffered))
def _get_status_code(self):
return self._status_code
def _set_status_code(self, code):
self._status_code = code
try:
self._status = '%d %s' % (code, HTTP_STATUS_CODES[code].upper())
except KeyError:
self._status = '%d UNKNOWN' % code
status_code = property(_get_status_code, _set_status_code,
'The HTTP Status code as number')
del _get_status_code, _set_status_code
def _get_status(self):
return self._status
def _set_status(self, value):
self._status = value
try:
self._status_code = int(self._status.split(None, 1)[0])
except ValueError:
self._status_code = 0
status = property(_get_status, _set_status, 'The HTTP Status code')
del _get_status, _set_status
def _get_data(self):
"""The string representation of the request body. Whenever you access
this property the request iterable is encoded and flattened. This
can lead to unwanted behavior if you stream big data.
This behavior can be disabled by setting
:attr:`implicit_sequence_conversion` to `False`.
"""
self._ensure_sequence()
return ''.join(self.iter_encoded())
def _set_data(self, value):
# if an unicode string is set, it's encoded directly so that we
# can set the content length
if isinstance(value, unicode):
value = value.encode(self.charset)
self.response = [value]
if self.automatically_set_content_length:
self.headers['Content-Length'] = str(len(value))
data = property(_get_data, _set_data, doc=_get_data.__doc__)
del _get_data, _set_data
def _ensure_sequence(self, mutable=False):
"""This method can be called by methods that need a sequence. If
`mutable` is true, it will also ensure that the response sequence
is a standard Python list.
.. versionadded:: 0.6
"""
if self.is_sequence:
# if we need a mutable object, we ensure it's a list.
if mutable and not isinstance(self.response, list):
self.response = list(self.response)
return
if not self.implicit_sequence_conversion:
raise RuntimeError('The response object required the iterable '
'to be a sequence, but the implicit '
'conversion was disabled. Call '
'make_sequence() yourself.')
self.make_sequence()
def make_sequence(self):
"""Converts the response iterator in a list. By default this happens
automatically if required. If `implicit_sequence_conversion` is
disabled, this method is not automatically called and some properties
might raise exceptions. This also encodes all the items.
.. versionadded:: 0.6
"""
if not self.is_sequence:
# if we consume an iterable we have to ensure that the close
# method of the iterable is called if available when we tear
# down the response
close = getattr(self.response, 'close', None)
self.response = list(self.iter_encoded())
if close is not None:
self.call_on_close(close)
def iter_encoded(self, charset=None):
"""Iter the response encoded with the encoding of the response.
If the response object is invoked as WSGI application the return
value of this method is used as application iterator unless
:attr:`direct_passthrough` was activated.
.. versionchanged:: 0.6
The `charset` parameter was deprecated and became a no-op.
"""
# XXX: deprecated
if __debug__ and charset is not None: # pragma: no cover
from warnings import warn
warn(DeprecationWarning('charset was deprecated and is ignored.'),
stacklevel=2)
charset = self.charset
if __debug__:
_warn_if_string(self.response)
for item in self.response:
if isinstance(item, unicode):
yield item.encode(charset)
else:
yield str(item)
def set_cookie(self, key, value='', max_age=None, expires=None,
path='/', domain=None, secure=None, httponly=False):
"""Sets a cookie. The parameters are the same as in the cookie `Morsel`
object in the Python standard library but it accepts unicode data, too.
:param key: the key (name) of the cookie to be set.
:param value: the value of the cookie.
:param max_age: should be a number of seconds, or `None` (default) if
the cookie should last only as long as the client's
browser session.
:param expires: should be a `datetime` object or UNIX timestamp.
:param domain: if you want to set a cross-domain cookie. For example,
``domain=".example.com"`` will set a cookie that is
readable by the domain ``www.example.com``,
``foo.example.com`` etc. Otherwise, a cookie will only
be readable by the domain that set it.
:param path: limits the cookie to a given path, per default it will
span the whole domain.
"""
self.headers.add('Set-Cookie', dump_cookie(key, value, max_age,
expires, path, domain, secure, httponly,
self.charset))
def delete_cookie(self, key, path='/', domain=None):
"""Delete a cookie. Fails silently if key doesn't exist.
:param key: the key (name) of the cookie to be deleted.
:param path: if the cookie that should be deleted was limited to a
path, the path has to be defined here.
:param domain: if the cookie that should be deleted was limited to a
domain, that domain has to be defined here.
"""
self.set_cookie(key, expires=0, max_age=0, path=path, domain=domain)
@property
def header_list(self): # pragma: no cover
# XXX: deprecated
if __debug__:
from warnings import warn
warn(DeprecationWarning('header_list is deprecated'),
stacklevel=2)
return self.headers.to_list(self.charset)
@property
def is_streamed(self):
"""If the response is streamed (the response is not an iterable with
a length information) this property is `True`. In this case streamed
means that there is no information about the number of iterations.
This is usually `True` if a generator is passed to the response object.
This is useful for checking before applying some sort of post
filtering that should not take place for streamed responses.
"""
try:
len(self.response)
except TypeError:
return True
return False
@property
def is_sequence(self):
"""If the iterator is buffered, this property will be `True`. A
response object will consider an iterator to be buffered if the
response attribute is a list or tuple.
.. versionadded:: 0.6
"""
return isinstance(self.response, (tuple, list))
def close(self):
"""Close the wrapped response if possible."""
if hasattr(self.response, 'close'):
self.response.close()
for func in self._on_close:
func()
def freeze(self):
"""Call this method if you want to make your response object ready for
being pickled. This buffers the generator if there is one. It will
also set the `Content-Length` header to the length of the body.
.. versionchanged:: 0.6
The `Content-Length` header is now set.
"""
# we explicitly set the length to a list of the *encoded* response
# iterator. Even if the implicit sequence conversion is disabled.
self.response = list(self.iter_encoded())
self.headers['Content-Length'] = str(sum(map(len, self.response)))
def fix_headers(self, environ):
# XXX: deprecated
if __debug__:
from warnings import warn
warn(DeprecationWarning('called into deprecated fix_headers baseclass '
'method. Use get_wsgi_headers instead.'),
stacklevel=2)
self.headers[:] = self.get_wsgi_headers(environ)
def get_wsgi_headers(self, environ):
"""This is automatically called right before the response is started
and returns headers modified for the given environment. It returns a
copy of the headers from the response with some modifications applied
if necessary.
For example the location header (if present) is joined with the root
URL of the environment. Also the content length is automatically set
to zero here for certain status codes.
.. versionchanged:: 0.6
Previously that function was called `fix_headers` and modified
the response object in place. Also since 0.6, IRIs in location
and content-location headers are handled properly.
Also starting with 0.6, Werkzeug will attempt to set the content
length if it is able to figure it out on its own. This is the
case if all the strings in the response iterable are already
encoded and the iterable is buffered.
:param environ: the WSGI environment of the request.
:return: returns a new :class:`~werkzeug.datastructures.Headers`
object.
"""
headers = Headers(self.headers)
location = None
content_location = None
content_length = None
status = self.status_code
# iterate over the headers to find all values in one go. Because
# get_wsgi_headers is used each response that gives us a tiny
# speedup.
for key, value in headers:
ikey = key.lower()
if ikey == 'location':
location = value
elif ikey == 'content-location':
content_location = value
elif ikey == 'content-length':
content_length = value
# make sure the location header is an absolute URL
if location is not None:
old_location = location
if isinstance(location, unicode):
location = iri_to_uri(location)
if self.autocorrect_location_header:
location = urlparse.urljoin(
get_current_url(environ, root_only=True),
location
)
if location != old_location:
headers['Location'] = location
# make sure the content location is a URL
if content_location is not None and \
isinstance(content_location, unicode):
headers['Content-Location'] = iri_to_uri(content_location)
# remove entity headers and set content length to zero if needed.
# Also update content_length accordingly so that the automatic
# content length detection does not trigger in the following
# code.
if 100 <= status < 200 or status == 204:
headers['Content-Length'] = content_length = '0'
elif status == 304:
remove_entity_headers(headers)
# if we can determine the content length automatically, we
# should try to do that. But only if this does not involve
# flattening the iterator or encoding of unicode strings in
# the response. We however should not do that if we have a 304
# response.
if self.automatically_set_content_length and \
self.is_sequence and content_length is None and status != 304:
try:
content_length = sum(len(str(x)) for x in self.response)
except UnicodeError:
# aha, something non-bytestringy in there, too bad, we
# can't safely figure out the length of the response.
pass
else:
headers['Content-Length'] = str(content_length)
return headers
def get_app_iter(self, environ):
"""Returns the application iterator for the given environ. Depending
on the request method and the current status code the return value
might be an empty response rather than the one from the response.
If the request method is `HEAD` or the status code is in a range
where the HTTP specification requires an empty response, an empty
iterable is returned.
.. versionadded:: 0.6
:param environ: the WSGI environment of the request.
:return: a response iterable.
"""
status = self.status_code
if environ['REQUEST_METHOD'] == 'HEAD' or \
100 <= status < 200 or status in (204, 304):
return ()
if self.direct_passthrough:
if __debug__:
_warn_if_string(self.response)
return self.response
return ClosingIterator(self.iter_encoded(), self.close)
def get_wsgi_response(self, environ):
"""Returns the final WSGI response as tuple. The first item in
the tuple is the application iterator, the second the status and
the third the list of headers. The response returned is created
specially for the given environment. For example if the request
method in the WSGI environment is ``'HEAD'`` the response will
be empty and only the headers and status code will be present.
.. versionadded:: 0.6
:param environ: the WSGI environment of the request.
:return: an ``(app_iter, status, headers)`` tuple.
"""
# XXX: code for backwards compatibility with custom fix_headers
# methods.
if self.fix_headers.func_code is not \
BaseResponse.fix_headers.func_code:
if __debug__:
from warnings import warn
warn(DeprecationWarning('fix_headers changed behavior in 0.6 '
'and is now called get_wsgi_headers. '
'See documentation for more details.'),
stacklevel=2)
self.fix_headers(environ)
headers = self.headers
else:
headers = self.get_wsgi_headers(environ)
app_iter = self.get_app_iter(environ)
return app_iter, self.status, headers.to_list()
def __call__(self, environ, start_response):
"""Process this response as WSGI application.
:param environ: the WSGI environment.
:param start_response: the response callable provided by the WSGI
server.
:return: an application iterator
"""
app_iter, status, headers = self.get_wsgi_response(environ)
start_response(status, headers)
return app_iter
class AcceptMixin(object):
"""A mixin for classes with an :attr:`~BaseResponse.environ` attribute
to get all the HTTP accept headers as
:class:`~werkzeug.datastructures.Accept` objects (or subclasses
thereof).
"""
@cached_property
def accept_mimetypes(self):
"""List of mimetypes this client supports as
:class:`~werkzeug.datastructures.MIMEAccept` object.
"""
return parse_accept_header(self.environ.get('HTTP_ACCEPT'), MIMEAccept)
@cached_property
def accept_charsets(self):
"""List of charsets this client supports as
:class:`~werkzeug.datastructures.CharsetAccept` object.
"""
return parse_accept_header(self.environ.get('HTTP_ACCEPT_CHARSET'),
CharsetAccept)
@cached_property
def accept_encodings(self):
"""List of encodings this client accepts. Encodings in a HTTP term
are compression encodings such as gzip. For charsets have a look at
:attr:`accept_charset`.
"""
return parse_accept_header(self.environ.get('HTTP_ACCEPT_ENCODING'))
@cached_property
def accept_languages(self):
"""List of languages this client accepts as
:class:`~werkzeug.datastructures.LanguageAccept` object.
.. versionchanged 0.5
In previous versions this was a regular
:class:`~werkzeug.datastructures.Accept` object.
"""
return parse_accept_header(self.environ.get('HTTP_ACCEPT_LANGUAGE'),
LanguageAccept)
class ETagRequestMixin(object):
"""Add entity tag and cache descriptors to a request object or object with
a WSGI environment available as :attr:`~BaseRequest.environ`. This not
only provides access to etags but also to the cache control header.
"""
@cached_property
def cache_control(self):
"""A :class:`~werkzeug.datastructures.RequestCacheControl` object
for the incoming cache control headers.
"""
cache_control = self.environ.get('HTTP_CACHE_CONTROL')
return parse_cache_control_header(cache_control, None,
RequestCacheControl)
@cached_property
def if_match(self):
"""An object containing all the etags in the `If-Match` header.
:rtype: :class:`~werkzeug.datastructures.ETags`
"""
return parse_etags(self.environ.get('HTTP_IF_MATCH'))
@cached_property
def if_none_match(self):
"""An object containing all the etags in the `If-None-Match` header.
:rtype: :class:`~werkzeug.datastructures.ETags`
"""
return parse_etags(self.environ.get('HTTP_IF_NONE_MATCH'))
@cached_property
def if_modified_since(self):
"""The parsed `If-Modified-Since` header as datetime object."""
return parse_date(self.environ.get('HTTP_IF_MODIFIED_SINCE'))
@cached_property
def if_unmodified_since(self):
"""The parsed `If-Unmodified-Since` header as datetime object."""
return parse_date(self.environ.get('HTTP_IF_UNMODIFIED_SINCE'))
@cached_property
def if_range(self):
"""The parsed `If-Range` header.
.. versionadded:: 0.7
:rtype: :class:`~werkzeug.datastructures.IfRange`
"""
return parse_if_range_header(self.environ.get('HTTP_IF_RANGE'))
@cached_property
def range(self):
"""The parsed `Range` header.
.. versionadded:: 0.7
:rtype: :class:`~werkzeug.datastructures.Range`
"""
return parse_range_header(self.environ.get('HTTP_RANGE'))
class UserAgentMixin(object):
"""Adds a `user_agent` attribute to the request object which contains the
parsed user agent of the browser that triggered the request as a
:class:`~werkzeug.useragents.UserAgent` object.
"""
@cached_property
def user_agent(self):
"""The current user agent."""
from werkzeug.useragents import UserAgent
return UserAgent(self.environ)
class AuthorizationMixin(object):
"""Adds an :attr:`authorization` property that represents the parsed
value of the `Authorization` header as
:class:`~werkzeug.datastructures.Authorization` object.
"""
@cached_property
def authorization(self):
"""The `Authorization` object in parsed form."""
header = self.environ.get('HTTP_AUTHORIZATION')
return parse_authorization_header(header)
class ETagResponseMixin(object):
"""Adds extra functionality to a response object for etag and cache
handling. This mixin requires an object with at least a `headers`
object that implements a dict like interface similar to
:class:`~werkzeug.datastructures.Headers`.
If you want the :meth:`freeze` method to automatically add an etag, you
have to mixin this method before the response base class. The default
response class does not do that.
"""
@property
def cache_control(self):
"""The Cache-Control general-header field is used to specify
directives that MUST be obeyed by all caching mechanisms along the
request/response chain.
"""
def on_update(cache_control):
if not cache_control and 'cache-control' in self.headers:
del self.headers['cache-control']
elif cache_control:
self.headers['Cache-Control'] = cache_control.to_header()
return parse_cache_control_header(self.headers.get('cache-control'),
on_update,
ResponseCacheControl)
def make_conditional(self, request_or_environ):
"""Make the response conditional to the request. This method works
best if an etag was defined for the response already. The `add_etag`
method can be used to do that. If called without etag just the date
header is set.
This does nothing if the request method in the request or environ is
anything but GET or HEAD.
It does not remove the body of the response because that's something
the :meth:`__call__` function does for us automatically.
Returns self so that you can do ``return resp.make_conditional(req)``
but modifies the object in-place.
:param request_or_environ: a request object or WSGI environment to be
used to make the response conditional
against.
"""
environ = _get_environ(request_or_environ)
if environ['REQUEST_METHOD'] in ('GET', 'HEAD'):
# if the date is not in the headers, add it now. We however
# will not override an already existing header. Unfortunately
# this header will be overriden by many WSGI servers including
# wsgiref.
if 'date' not in self.headers:
self.headers['Date'] = http_date()
if 'content-length' not in self.headers:
self.headers['Content-Length'] = len(self.data)
if not is_resource_modified(environ, self.headers.get('etag'), None,
self.headers.get('last-modified')):
self.status_code = 304
return self
def add_etag(self, overwrite=False, weak=False):
"""Add an etag for the current response if there is none yet."""
if overwrite or 'etag' not in self.headers:
self.set_etag(generate_etag(self.data), weak)
def set_etag(self, etag, weak=False):
"""Set the etag, and override the old one if there was one."""
self.headers['ETag'] = quote_etag(etag, weak)
def get_etag(self):
"""Return a tuple in the form ``(etag, is_weak)``. If there is no
ETag the return value is ``(None, None)``.
"""
return unquote_etag(self.headers.get('ETag'))
def freeze(self, no_etag=False):
"""Call this method if you want to make your response object ready for
pickeling. This buffers the generator if there is one. This also
sets the etag unless `no_etag` is set to `True`.
"""
if not no_etag:
self.add_etag()
super(ETagResponseMixin, self).freeze()
accept_ranges = header_property('Accept-Ranges', doc='''
The `Accept-Ranges` header. Even though the name would indicate
that multiple values are supported, it must be one string token only.
The values ``'bytes'`` and ``'none'`` are common.
.. versionadded:: 0.7''')
def _get_content_range(self):
def on_update(rng):
if not rng:
del self.headers['content-range']
else:
self.headers['Content-Range'] = rng.to_header()
rv = parse_content_range_header(self.headers.get('content-range'),
on_update)
# always provide a content range object to make the descriptor
# more user friendly. It provides an unset() method that can be
# used to remove the header quickly.
if rv is None:
rv = ContentRange(None, None, None, on_update=on_update)
return rv
def _set_content_range(self, value):
if not value:
del self.headers['content-range']
elif isinstance(value, basestring):
self.headers['Content-Range'] = value
else:
self.headers['Content-Range'] = value.to_header()
content_range = property(_get_content_range, _set_content_range, doc='''
The `Content-Range` header as
:class:`~werkzeug.datastructures.ContentRange` object. Even if the
header is not set it wil provide such an object for easier
manipulation.
.. versionadded:: 0.7''')
del _get_content_range, _set_content_range
class ResponseStream(object):
"""A file descriptor like object used by the :class:`ResponseStreamMixin` to
represent the body of the stream. It directly pushes into the response
iterable of the response object.
"""
mode = 'wb+'
def __init__(self, response):
self.response = response
self.closed = False
def write(self, value):
if self.closed:
raise ValueError('I/O operation on closed file')
self.response._ensure_sequence(mutable=True)
self.response.response.append(value)
def writelines(self, seq):
for item in seq:
self.write(item)
def close(self):
self.closed = True
def flush(self):
if self.closed:
raise ValueError('I/O operation on closed file')
def isatty(self):
if self.closed:
raise ValueError('I/O operation on closed file')
return False
@property
def encoding(self):
return self.response.charset
class ResponseStreamMixin(object):
"""Mixin for :class:`BaseRequest` subclasses. Classes that inherit from
this mixin will automatically get a :attr:`stream` property that provides
a write-only interface to the response iterable.
"""
@cached_property
def stream(self):
"""The response iterable as write-only stream."""
return ResponseStream(self)
class CommonRequestDescriptorsMixin(object):
"""A mixin for :class:`BaseRequest` subclasses. Request objects that
mix this class in will automatically get descriptors for a couple of
HTTP headers with automatic type conversion.
.. versionadded:: 0.5
"""
content_type = environ_property('CONTENT_TYPE', doc='''
The Content-Type entity-header field indicates the media type of
the entity-body sent to the recipient or, in the case of the HEAD
method, the media type that would have been sent had the request
been a GET.''')
content_length = environ_property('CONTENT_LENGTH', None, int, str, doc='''
The Content-Length entity-header field indicates the size of the
entity-body in bytes or, in the case of the HEAD method, the size of
the entity-body that would have been sent had the request been a
GET.''')
referrer = environ_property('HTTP_REFERER', doc='''
The Referer[sic] request-header field allows the client to specify,
for the server's benefit, the address (URI) of the resource from which
the Request-URI was obtained (the "referrer", although the header
field is misspelled).''')
date = environ_property('HTTP_DATE', None, parse_date, doc='''
The Date general-header field represents the date and time at which
the message was originated, having the same semantics as orig-date
in RFC 822.''')
max_forwards = environ_property('HTTP_MAX_FORWARDS', None, int, doc='''
The Max-Forwards request-header field provides a mechanism with the
TRACE and OPTIONS methods to limit the number of proxies or gateways
that can forward the request to the next inbound server.''')
def _parse_content_type(self):
if not hasattr(self, '_parsed_content_type'):
self._parsed_content_type = \
parse_options_header(self.environ.get('CONTENT_TYPE', ''))
@property
def mimetype(self):
"""Like :attr:`content_type` but without parameters (eg, without
charset, type etc.). For example if the content
type is ``text/html; charset=utf-8`` the mimetype would be
``'text/html'``.
"""
self._parse_content_type()
return self._parsed_content_type[0]
@property
def mimetype_params(self):
"""The mimetype parameters as dict. For example if the content
type is ``text/html; charset=utf-8`` the params would be
``{'charset': 'utf-8'}``.
"""
self._parse_content_type()
return self._parsed_content_type[1]
@cached_property
def pragma(self):
"""The Pragma general-header field is used to include
implementation-specific directives that might apply to any recipient
along the request/response chain. All pragma directives specify
optional behavior from the viewpoint of the protocol; however, some
systems MAY require that behavior be consistent with the directives.
"""
return parse_set_header(self.environ.get('HTTP_PRAGMA', ''))
class CommonResponseDescriptorsMixin(object):
"""A mixin for :class:`BaseResponse` subclasses. Response objects that
mix this class in will automatically get descriptors for a couple of
HTTP headers with automatic type conversion.
"""
def _get_mimetype(self):
ct = self.headers.get('content-type')
if ct:
return ct.split(';')[0].strip()
def _set_mimetype(self, value):
self.headers['Content-Type'] = get_content_type(value, self.charset)
def _get_mimetype_params(self):
def on_update(d):
self.headers['Content-Type'] = \
dump_options_header(self.mimetype, d)
d = parse_options_header(self.headers.get('content-type', ''))[1]
return CallbackDict(d, on_update)
mimetype = property(_get_mimetype, _set_mimetype, doc='''
The mimetype (content type without charset etc.)''')
mimetype_params = property(_get_mimetype_params, doc='''
The mimetype parameters as dict. For example if the content
type is ``text/html; charset=utf-8`` the params would be
``{'charset': 'utf-8'}``.
.. versionadded:: 0.5
''')
location = header_property('Location', doc='''
The Location response-header field is used to redirect the recipient
to a location other than the Request-URI for completion of the request
or identification of a new resource.''')
age = header_property('Age', None, parse_date, http_date, doc='''
The Age response-header field conveys the sender's estimate of the
amount of time since the response (or its revalidation) was
generated at the origin server.
Age values are non-negative decimal integers, representing time in
seconds.''')
content_type = header_property('Content-Type', doc='''
The Content-Type entity-header field indicates the media type of the
entity-body sent to the recipient or, in the case of the HEAD method,
the media type that would have been sent had the request been a GET.
''')
content_length = header_property('Content-Length', None, int, str, doc='''
The Content-Length entity-header field indicates the size of the
entity-body, in decimal number of OCTETs, sent to the recipient or,
in the case of the HEAD method, the size of the entity-body that would
have been sent had the request been a GET.''')
content_location = header_property('Content-Location', doc='''
The Content-Location entity-header field MAY be used to supply the
resource location for the entity enclosed in the message when that
entity is accessible from a location separate from the requested
resource's URI.''')
content_encoding = header_property('Content-Encoding', doc='''
The Content-Encoding entity-header field is used as a modifier to the
media-type. When present, its value indicates what additional content
codings have been applied to the entity-body, and thus what decoding
mechanisms must be applied in order to obtain the media-type
referenced by the Content-Type header field.''')
content_md5 = header_property('Content-MD5', doc='''
The Content-MD5 entity-header field, as defined in RFC 1864, is an
MD5 digest of the entity-body for the purpose of providing an
end-to-end message integrity check (MIC) of the entity-body. (Note:
a MIC is good for detecting accidental modification of the
entity-body in transit, but is not proof against malicious attacks.)
''')
date = header_property('Date', None, parse_date, http_date, doc='''
The Date general-header field represents the date and time at which
the message was originated, having the same semantics as orig-date
in RFC 822.''')
expires = header_property('Expires', None, parse_date, http_date, doc='''
The Expires entity-header field gives the date/time after which the
response is considered stale. A stale cache entry may not normally be
returned by a cache.''')
last_modified = header_property('Last-Modified', None, parse_date,
http_date, doc='''
The Last-Modified entity-header field indicates the date and time at
which the origin server believes the variant was last modified.''')
def _get_retry_after(self):
value = self.headers.get('retry-after')
if value is None:
return
elif value.isdigit():
return datetime.utcnow() + timedelta(seconds=int(value))
return parse_date(value)
def _set_retry_after(self, value):
if value is None:
if 'retry-after' in self.headers:
del self.headers['retry-after']
return
elif isinstance(value, datetime):
value = http_date(value)
else:
value = str(value)
self.headers['Retry-After'] = value
retry_after = property(_get_retry_after, _set_retry_after, doc='''
The Retry-After response-header field can be used with a 503 (Service
Unavailable) response to indicate how long the service is expected
to be unavailable to the requesting client.
Time in seconds until expiration or date.''')
def _set_property(name, doc=None):
def fget(self):
def on_update(header_set):
if not header_set and name in self.headers:
del self.headers[name]
elif header_set:
self.headers[name] = header_set.to_header()
return parse_set_header(self.headers.get(name), on_update)
def fset(self, value):
if not value:
del self.headers[name]
elif isinstance(value, basestring):
self.headers[name] = value
else:
self.headers[name] = dump_header(value)
return property(fget, fset, doc=doc)
vary = _set_property('Vary', doc='''
The Vary field value indicates the set of request-header fields that
fully determines, while the response is fresh, whether a cache is
permitted to use the response to reply to a subsequent request
without revalidation.''')
content_language = _set_property('Content-Language', doc='''
The Content-Language entity-header field describes the natural
language(s) of the intended audience for the enclosed entity. Note
that this might not be equivalent to all the languages used within
the entity-body.''')
allow = _set_property('Allow', doc='''
The Allow entity-header field lists the set of methods supported
by the resource identified by the Request-URI. The purpose of this
field is strictly to inform the recipient of valid methods
associated with the resource. An Allow header field MUST be
present in a 405 (Method Not Allowed) response.''')
del _set_property, _get_mimetype, _set_mimetype, _get_retry_after, \
_set_retry_after
class WWWAuthenticateMixin(object):
"""Adds a :attr:`www_authenticate` property to a response object."""
@property
def www_authenticate(self):
"""The `WWW-Authenticate` header in a parsed form."""
def on_update(www_auth):
if not www_auth and 'www-authenticate' in self.headers:
del self.headers['www-authenticate']
elif www_auth:
self.headers['WWW-Authenticate'] = www_auth.to_header()
header = self.headers.get('www-authenticate')
return parse_www_authenticate_header(header, on_update)
class Request(BaseRequest, AcceptMixin, ETagRequestMixin,
UserAgentMixin, AuthorizationMixin,
CommonRequestDescriptorsMixin):
"""Full featured request object implementing the following mixins:
- :class:`AcceptMixin` for accept header parsing
- :class:`ETagRequestMixin` for etag and cache control handling
- :class:`UserAgentMixin` for user agent introspection
- :class:`AuthorizationMixin` for http auth handling
- :class:`CommonRequestDescriptorsMixin` for common headers
"""
class Response(BaseResponse, ETagResponseMixin, ResponseStreamMixin,
CommonResponseDescriptorsMixin,
WWWAuthenticateMixin):
"""Full featured response object implementing the following mixins:
- :class:`ETagResponseMixin` for etag and cache control handling
- :class:`ResponseStreamMixin` to add support for the `stream` property
- :class:`CommonResponseDescriptorsMixin` for various HTTP descriptors
- :class:`WWWAuthenticateMixin` for HTTP authentication support
"""
|
{
"content_hash": "4da33cbadcf3b2d9542d33143b6c4d70",
"timestamp": "",
"source": "github",
"line_count": 1640,
"max_line_length": 83,
"avg_line_length": 43.43353658536585,
"alnum_prop": 0.6165152812679873,
"repo_name": "Glottotopia/aagd",
"id": "676e3bf3817ad47f38ea48854a65382b1af45e86",
"size": "71256",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "moin/local/moin/build/lib.linux-x86_64-2.6/MoinMoin/support/werkzeug/wrappers.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "ASP",
"bytes": "152885"
},
{
"name": "CSS",
"bytes": "454208"
},
{
"name": "ColdFusion",
"bytes": "438820"
},
{
"name": "HTML",
"bytes": "1998354"
},
{
"name": "Java",
"bytes": "510468"
},
{
"name": "JavaScript",
"bytes": "6505329"
},
{
"name": "Lasso",
"bytes": "72399"
},
{
"name": "Makefile",
"bytes": "10216"
},
{
"name": "PHP",
"bytes": "259528"
},
{
"name": "Perl",
"bytes": "137186"
},
{
"name": "Python",
"bytes": "13713475"
},
{
"name": "Shell",
"bytes": "346"
},
{
"name": "XSLT",
"bytes": "15970"
}
],
"symlink_target": ""
}
|
"""
Copyright (C) 2012 Alan J Lockett
Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
"""
import pyec.optimize
from numpy import *
seterr(all="ignore")
def test_de_dim1():
x,f = pyec.optimize.differential_evolution("sphere",dimension=1)
print "de dim 1: ", x, f
assert sqrt((x**2).sum()) < 1e-1
assert abs(f) < 1e-5
x,f = pyec.optimize.de("sphere",dimension=1)
assert sqrt((x**2).sum()) < 1e-1
assert abs(f) < 1e-5
def test_cmaes_dim1():
x,f = pyec.optimize.cmaes("sphere",dimension=1)
print "cmaes dim 1: ", x, f
assert sqrt((x**2).sum()) < 1e-1
assert abs(f) < 1e-5
def test_nm_dim1():
x,f = pyec.optimize.nelder_mead("sphere",dimension=1, generations=2500)
print "nm dim 1: ", x, f
assert sqrt((x**2).sum()) < 1e-1
assert abs(f) < 1e-4
x,f = pyec.optimize.nm("sphere",dimension=1, generations=2500)
assert sqrt((x**2).sum()) < 1e-1
assert abs(f) < 1e-4
def test_gss_dim1():
x,f = pyec.optimize.generating_set_search("sphere",dimension=1, generations=2500)
print "gss dim 1: ", x, f
assert sqrt((x**2).sum()) < 1e-1
assert abs(f) < 1e-5
x,f = pyec.optimize.gss("sphere",dimension=1, generations=2500)
assert sqrt((x**2).sum()) < 1e-1
assert abs(f) < 1e-5
def test_pso_dim1():
x,f = pyec.optimize.particle_swarm_optimization("sphere",dimension=1, generations=100)
print "pso dim 1: ", x,f
assert sqrt((x**2).sum()) < 1e-1
assert abs(f) < 1e-4
x,f = pyec.optimize.pso("sphere",dimension=1, generations=100)
print "pso dim 1: ", x,f
assert sqrt((x**2).sum()) < 1e-1
assert abs(f) < 1e-4
def test_evoanneal_dim1():
x,f = pyec.optimize.evolutionary_annealing("sphere",dimension=1,generations=100, learningRate=10.0)
print "evoanneal dim 1: ", x, f
assert sqrt((x**2).sum()) < 1e-1
assert abs(f) < 1e-5
x,f = pyec.optimize.evoanneal("sphere",dimension=1,generations=100, learningRate=10.0)
print "evoanneal dim 1: ", x, f
assert sqrt((x**2).sum()) < 1e-1
assert abs(f) < 1e-5
def test_sa_dim1():
x,f = pyec.optimize.simulated_annealing("sphere",dimension=1,schedule="linear",learning_rate=10., generation=1000)
print "sa dim 1: ", x, f
assert sqrt((x**2).sum()) < 1
assert abs(f) < 1
x,f = pyec.optimize.sa("sphere",dimension=1,schedule="linear",learning_rate=10., generation=1000)
print "sa dim 1: ", x, f
assert sqrt((x**2).sum()) < 1
assert abs(f) < 1
def test_de_dim5():
x,f = pyec.optimize.differential_evolution("sphere",dimension=5,CR=.5,F=.5,generations=1000)
print "de dim 5: ", x, f
assert sqrt((x**2).sum()) < 1e-1
assert abs(f) < 1e-5
x,f = pyec.optimize.de("sphere",dimension=5,CR=.5,F=.5,generations=1000)
assert sqrt((x**2).sum()) < 1e-1
assert abs(f) < 1e-5
def test_cmaes_dim5():
x,f = pyec.optimize.cmaes("sphere",dimension=5)
print "cmaes dim 5: ", x, f
assert sqrt((x**2).sum()) < 1e-1
assert abs(f) < 1e-5
def test_nm_dim5():
x,f = pyec.optimize.nelder_mead("sphere",dimension=5, generations=2500)
print "nm dim 5: ", x, f
assert sqrt((x**2).sum()) < 1e-1
assert abs(f) < 1e-2
x,f = pyec.optimize.nm("sphere",dimension=5, generations=2500)
assert sqrt((x**2).sum()) < 1e-1
assert abs(f) < 1e-2
def test_gss_dim5():
x,f = pyec.optimize.generating_set_search("sphere",dimension=5)
print "gss dim 5: ", x, f
assert sqrt((x**2).sum()) < 1e-1
assert abs(f) < 1e-2
x,f = pyec.optimize.gss("sphere",dimension=5)
assert sqrt((x**2).sum()) < 1e-1
assert abs(f) < 1e-2
def test_sa_dim5():
x,f = pyec.optimize.simulated_annealing("sphere",dimension=5,schedule="linear",generations=2500, learning_rate=10.)
print "sa dim 5: ", x, f
assert sqrt((x**2).sum()) < 1e-1
assert abs(f) < 1e-2
x,f = pyec.optimize.sa("sphere",dimension=5,schedule="linear",generations=2500,learning_rate=10.)
assert sqrt((x**2).sum()) < 1e-1
assert abs(f) < 1e-2
def test_pso_dim5():
x,f = pyec.optimize.particle_swarm_optimization("sphere",dimension=5,generations=1000, population=50)
print "pso dim 5: ", x, f
assert sqrt((x**2).sum()) < 5e-1
assert abs(f) < 1e-1
x,f = pyec.optimize.pso("sphere",dimension=5,generations=1000, population=50)
assert sqrt((x**2).sum()) < 5e-1
assert abs(f) < 1e-1
def test_evoanneal_dim5():
x,f = pyec.optimize.evolutionary_annealing("sphere",dimension=5, learning_rate=1000., generations=250)
print "evoanneal dim 5: ", x, f
assert sqrt((x**2).sum()) < 1e-1
assert abs(f) < 1e-5
x,f = pyec.optimize.evoanneal("sphere",dimension=5, learning_rate=1000., generations=250)
print "evoanneal dim 5: ", x, f
assert sqrt((x**2).sum()) < 1e-1
assert abs(f) < 1e-5
def test_de_dim10():
x,f = pyec.optimize.differential_evolution("sphere",dimension=10,CR=.5,F=.5,generations=2500)
print "de dim 10: ", x, f
assert sqrt((x**2).sum()) < 1e-1
assert abs(f) < 1e-5
x,f = pyec.optimize.de("sphere",dimension=10,CR=.5,F=.5,generations=2500)
assert sqrt((x**2).sum()) < 1e-1
assert abs(f) < 1e-5
def test_cmaes_dim10():
x,f = pyec.optimize.cmaes("sphere",dimension=10)
assert sqrt((x**2).sum()) < 1e-1
assert abs(f) < 1e-5
print "cmaes dim 10: ", x, f
def test_nm_dim10():
x,f = pyec.optimize.nelder_mead("sphere",dimension=10, generations=10000)
assert sqrt((x**2).sum()) < 1e-1
assert abs(f) < 1e-5
print "nm dim 10: ", x, f
x,f = pyec.optimize.nm("sphere",dimension=10, generations=10000)
assert sqrt((x**2).sum()) < 1e-1
assert abs(f) < 1e-5
def test_gss_dim10():
x,f = pyec.optimize.generating_set_search("sphere",dimension=10, generations=10000)
assert sqrt((x**2).sum()) < 1e-1
assert abs(f) < 1e-5
print "gss dim 10: ", x, f
x,f = pyec.optimize.gss("sphere",dimension=10, generations=10000)
assert sqrt((x**2).sum()) < 1e-1
assert abs(f) < 1e-5
def test_sa_dim10():
x,f = pyec.optimize.simulated_annealing("sphere",dimension=10, schedule="linear", learning_rate=100., generations=2500)
print "sa dim 10: ", x, f
assert sqrt((x**2).sum()) < 5e-1
assert abs(f) < 5e-1
x,f = pyec.optimize.sa("sphere",dimension=10, schedule="linear", learning_rate=100., generations=2500)
assert sqrt((x**2).sum()) < 5e-1
assert abs(f) < 5e-1
def test_pso_dim10():
x,f = pyec.optimize.particle_swarm_optimization("sphere",dimension=10, generations=1000, population=50)
print "pso dim 10: ", x, f
assert sqrt((x**2).sum()) < 1.0
assert abs(f) < 5e-1
x,f = pyec.optimize.pso("sphere",dimension=10, generations=1000, population=50)
assert sqrt((x**2).sum()) < 1.0
assert abs(f) < 5e-1
def test_evoanneal_dim10():
x,f = pyec.optimize.evolutionary_annealing("sphere",dimension=10, learning_rate=10000., generations=750)
print "evoanneal dim 10: ", x, f
assert sqrt((x**2).sum()) < 1e-1
assert abs(f) < 1e-5
x,f = pyec.optimize.evoanneal("sphere",dimension=10, learning_rate=10000., generations=750)
print "evoanneal dim 10: ", x, f
assert sqrt((x**2).sum()) < 1e-1
assert abs(f) < 1e-5
|
{
"content_hash": "567b1463715b6912e511f42ecbd37d4a",
"timestamp": "",
"source": "github",
"line_count": 221,
"max_line_length": 460,
"avg_line_length": 36.77828054298642,
"alnum_prop": 0.6434547244094488,
"repo_name": "hypernicon/pyec",
"id": "b975e5a6c8ad45d7361b438babb2c386e308c378",
"size": "8128",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "pyec/tests/test_optimizers.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "C",
"bytes": "335459"
},
{
"name": "C++",
"bytes": "693926"
},
{
"name": "CSS",
"bytes": "49305"
},
{
"name": "JavaScript",
"bytes": "160196"
},
{
"name": "Python",
"bytes": "728889"
},
{
"name": "Shell",
"bytes": "10202"
}
],
"symlink_target": ""
}
|
class TreeNode:
def __init__(self, d):
self.left = None
self.right = None
self.data = d
def __lt__(self, other):
return self.data < other.data
def __repr__(self):
return self.data
class Tree:
def __init__(self):
self.root = None
def add(self, d):
newNode = TreeNode(d)
if self.root == None:
self.root = newNode
else:
self.insert(self.root, newNode)
print "Added"
def insert(self, parent, newNode):
if parent > newNode:
print "Left"
if parent.left==None:
parent.left = newNode
else:
self.insert(parent.left, newNode)
else:
print "Right"
if parent.right==None:
parent.right = newNode
else:
self.insert(parent.right, newNode)
t = Tree()
arr = [10, 15, 8, 12, 45, 23, 10]
for item in arr:
t.add(item)
|
{
"content_hash": "b4e1e8bb0b84bcdb7028a6c7e6256f26",
"timestamp": "",
"source": "github",
"line_count": 42,
"max_line_length": 39,
"avg_line_length": 18.476190476190474,
"alnum_prop": 0.6237113402061856,
"repo_name": "capsci/chrome",
"id": "4ba1858be38df90967675eadde33a0f577a18c57",
"size": "776",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "practice/python/BST.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Awk",
"bytes": "8"
},
{
"name": "C",
"bytes": "28082"
},
{
"name": "C++",
"bytes": "346"
},
{
"name": "HTML",
"bytes": "197667"
},
{
"name": "Java",
"bytes": "12082"
},
{
"name": "JavaScript",
"bytes": "1085968"
},
{
"name": "Makefile",
"bytes": "143"
},
{
"name": "PHP",
"bytes": "1621"
},
{
"name": "Perl",
"bytes": "16913"
},
{
"name": "PostScript",
"bytes": "4690456"
},
{
"name": "Prolog",
"bytes": "958"
},
{
"name": "Python",
"bytes": "441792"
},
{
"name": "Shell",
"bytes": "630"
}
],
"symlink_target": ""
}
|
"""
Revision ID: 0258_service_postage_nullable
Revises: 0257_letter_branding_migration
Create Date: 2019-02-12 11:52:53.139383
"""
import sqlalchemy as sa
from alembic import op
revision = "0258_service_postage_nullable"
down_revision = "0257_letter_branding_migration"
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.alter_column("services_history", "postage", existing_type=sa.BOOLEAN(), nullable=True)
op.alter_column("services", "postage", existing_type=sa.BOOLEAN(), nullable=True)
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.alter_column("services_history", "postage", existing_type=sa.BOOLEAN(), nullable=False)
op.alter_column("services", "postage", existing_type=sa.BOOLEAN(), nullable=False)
# ### end Alembic commands ###
|
{
"content_hash": "4250bfb3067693b07d55975eb89e495e",
"timestamp": "",
"source": "github",
"line_count": 26,
"max_line_length": 94,
"avg_line_length": 33.57692307692308,
"alnum_prop": 0.7033218785796106,
"repo_name": "alphagov/notifications-api",
"id": "711a6f90dea115731eb637ba1e9ae8a7b4806b56",
"size": "873",
"binary": false,
"copies": "1",
"ref": "refs/heads/main",
"path": "migrations/versions/0258_service_postage_nullable.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Dockerfile",
"bytes": "719"
},
{
"name": "Jinja",
"bytes": "5543"
},
{
"name": "Makefile",
"bytes": "6627"
},
{
"name": "Mako",
"bytes": "361"
},
{
"name": "Procfile",
"bytes": "35"
},
{
"name": "Python",
"bytes": "3506225"
},
{
"name": "Shell",
"bytes": "13179"
}
],
"symlink_target": ""
}
|
"""
Created on Fri Mar 4 20:44:24 2016
@author: ddboline
"""
from __future__ import (absolute_import, division, print_function, unicode_literals)
import os
# nasbase = '/media/dileptonnas/Documents'
nasbase = '/media/sabrent2000/Documents'
def remove_leftover_avi(run_command=False):
naspaths = ('%s/television' % nasbase, '%s/movies' % nasbase)
for naspath in naspaths:
for root, dirs, files in os.walk(naspath):
for fn_ in files:
if '.mp4' not in fn_:
continue
fname = '%s/%s' % (root, fn_)
fname_avi = fname.replace('.mp4', '.avi')
fname_mkv = fname.replace('.mp4', '.mkv')
if os.path.exists(fname_avi):
print(fname_avi)
if run_command:
os.remove(fname_avi)
if os.path.exists(fname_mkv):
print(fname_mkv)
if run_command:
os.remove(fname_mkv)
|
{
"content_hash": "20ab44d2061b7d15d874620dd9f449e8",
"timestamp": "",
"source": "github",
"line_count": 31,
"max_line_length": 84,
"avg_line_length": 33,
"alnum_prop": 0.5151515151515151,
"repo_name": "ddboline/movie_collection_app",
"id": "0809529bd777334a79e8ce77638c748da1ad5dae",
"size": "1070",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "movie_collection_app/sync_sabrent_with_nas.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "118282"
},
{
"name": "Shell",
"bytes": "1720"
}
],
"symlink_target": ""
}
|
from sfa.client.sfaserverproxy import SfaServerProxy
from sfa.util.xml import XML
# GeniLight client support is optional
try:
from egeni.geniLight_client import *
except ImportError:
GeniClientLight = None
class Interface:
"""
Interface to another SFA service, typically a peer, or the local aggregate
can retrieve a xmlrpclib.ServerProxy object for issuing calls there
"""
def __init__(self, hrn, addr, port, client_type='sfa'):
self.hrn = hrn
self.addr = addr
self.port = port
self.client_type = client_type
def get_url(self):
address_parts = self.addr.split('/')
address_parts[0] = address_parts[0] + ":" + str(self.port)
url = "http://%s" % "/".join(address_parts)
return url
def server_proxy(self, key_file, cert_file, timeout=30):
server = None
if self.client_type == 'geniclientlight' and GeniClientLight:
# xxx url and self.api are undefined
server = GeniClientLight(url, self.api.key_file, self.api.cert_file)
else:
server = SfaServerProxy(self.get_url(), key_file, cert_file, timeout)
return server
##
# In is a dictionary of registry connections keyed on the registry
# hrn
class Interfaces(dict):
"""
Interfaces is a base class for managing information on the
peers we are federated with. Provides connections (xmlrpc or soap) to federated peers
"""
# fields that must be specified in the config file
default_fields = {
'hrn': '',
'addr': '',
'port': '',
}
# defined by the class
default_dict = {}
def __init__(self, conf_file):
dict.__init__(self, {})
# load config file
required_fields = set(self.default_fields.keys())
self.interface_info = XML(conf_file).todict()
for value in self.interface_info.values():
if isinstance(value, list):
for record in value:
if isinstance(record, dict) and \
required_fields.issubset(record.keys()):
hrn, address, port = record['hrn'], record['addr'], record['port']
# sometime this is called at a very early stage with no config loaded
# avoid to remember this instance in such a case
if not address or not port:
continue
interface = Interface(hrn, address, port)
self[hrn] = interface
def server_proxy(self, hrn, key_file, cert_file, timeout=30):
return self[hrn].server_proxy(key_file, cert_file, timeout)
|
{
"content_hash": "992dae2a7f9a6c463e516af76252f89f",
"timestamp": "",
"source": "github",
"line_count": 75,
"max_line_length": 93,
"avg_line_length": 36.413333333333334,
"alnum_prop": 0.5822043207616258,
"repo_name": "yippeecw/sfa",
"id": "2f4614618d9810918321438302b483f01a8f2067",
"size": "2731",
"binary": false,
"copies": "2",
"ref": "refs/heads/geni-v3",
"path": "sfa/server/interface.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "HTML",
"bytes": "424"
},
{
"name": "Makefile",
"bytes": "14208"
},
{
"name": "Python",
"bytes": "1398912"
},
{
"name": "Shell",
"bytes": "19422"
},
{
"name": "XSLT",
"bytes": "15293"
}
],
"symlink_target": ""
}
|
import datetime
from django.utils import timezone
from utils import *
RCD_PER_PAGE = 8
def user_own_events(request, userid):
context = {}
context['user_info'] = get_object_or_404(UserInfo, user__username=userid)
event_list = Event.objects.filter(topic__user__exact=context['user_info'].user).order_by("-time_period__start")
paginator = Paginator(event_list, RCD_PER_PAGE)
context['pages'] = [None] * paginator.num_pages
page = request.GET.get('page')
if page != None:
context['pages'][int(page)-1] = 1
context['own_events'] = getPageContent(paginator,page)
return render(request, 'event/user_own_events.json', context, content_type="application/json")
@login_required
def user_event_applications(request):
context = {}
application_list = EventApplication.objects.filter(event__topic__user__exact=request.user).order_by("-created_on")
paginator = Paginator(application_list, RCD_PER_PAGE)
context['pages'] = [None] * paginator.num_pages
page = request.GET.get('page')
if page != None:
context['pages'][int(page)-1] = 1
context['event_applications'] = getPageContent(paginator,page)
return render(request, 'event/user_event_applications.json', context, content_type="application/json")
def user_participating_events(request, userid):
context = {}
context['user_info'] = get_object_or_404(UserInfo, user__username=userid)
event_list = context['user_info'].participating_events.order_by("-time_period__start")
paginator = Paginator(event_list, RCD_PER_PAGE)
context['pages'] = [None] * paginator.num_pages
page = request.GET.get('page')
if page != None:
context['pages'][int(page)-1] = 1
context['participanting_events'] = getPageContent(paginator,page)
return render(request, 'event/user_participating_events.json', context, content_type="application/json")
@login_required
@transaction.atomic
def create_event(request, forum_name):
context = {}
forum = get_object_or_404(Forum, name=forum_name)
context['forum'] = forum
if request.method == 'GET':
context['form'] = EventForm(initial={'start':timezone.now(),'end':timezone.now() + datetime.timedelta(minutes=60)})
return render(request, 'event/create_event.html', context)
form = EventForm(request.POST, user=request.user, forum=forum)
if not form.is_valid():
context ['form'] = form
return render(request, 'event/create_event.html', context)
new_event = form.save()
new_event.topic.user = request.user
new_event.topic.forum = forum
new_event.save()
return redirect(reverse('in_forum', kwargs={"forum_name":forum_name})+ "?filter_tag=Event")
@transaction.atomic
def load_events(request, forum_name):
forum = get_object_or_404(Forum, name=forum_name)
events = Event.objects.filter(topic__forum=forum).filter(time_period__end__gte=datetime.date.today)
return render(request, 'event/events.json', {"events":events}, content_type="application/json")
@login_required
@transaction.atomic
def edit_event(request, topic_id):
event = get_object_or_404(Event, topic_id__exact=topic_id)
if event.topic.user.username != request.user.username:
raise PermissionDenied()
context = {}
context['topic_id'] = topic_id
context['participants'] = event.participants.all()
if request.method == 'GET':
context ['form'] = EventForm(event=event)
context['participant_form'] = ParticipantForm(event=event)
return render(request, 'event/edit_event.html', context)
form = EventForm(request.POST,event=event)
if not form.is_valid():
context ['form'] = form
return render(request, 'event/edit_event.html', context)
form.save()
return redirect(reverse('in_topic', kwargs={'topic_id':topic_id}))
@login_required
def apply_for_event(request, topic_id):
if request.method == 'POST':
event = get_object_or_404(Event, topic__id__exact=topic_id)
event_available = (Event.objects.filter(topic__exact=event.topic).filter(time_period__end__gte=datetime.date.today).exists()) and (len(event.participants.all()) < event.max_participants) and not EventApplication.objects.filter(applicant__exact=request.user).filter(event__exact=event).exists()
if event.public or event.participants.filter(user__exact=request.user).exists() or not event_available:
raise PermissionDenied();
user = User.objects.get(username=request.user)
friend = event.topic.user
context = {}
apply_form = EventApplicationForm(request.POST)
if not apply_form.is_valid():
return HttpResponse()
context['message'] = apply_form.cleaned_data['message']
new_application = EventApplication(event=event, applicant=request.user, message=context['message'])
new_application.save()
context['application'] = new_application
notification = Notification(user=friend, notification_type="event_app", key=event.topic.id)
notification.save()
return redirect(reverse('in_topic',kwargs={'topic_id':topic_id}))
return HttpResponse()
@transaction.atomic
def add_event_participant(event, new_participant, notify=False):
event.participants.add(new_participant.userinfo)
event.save()
if notify:
context = {}
context['operation'] = 'added into'
context['event'] = event
context['time'] = time.strftime("%I:%M %d/%m/%Y")
notification = Notification(user=new_participant, notification_type="event_add", key=event.topic.id)
notification.save()
return True
@transaction.atomic
def delete_event_participant(event, participant, notify=False):
if event.participants.filter(user__exact=participant).exists():
event.participants.remove(participant.userinfo)
event.save()
if notify:
context = {}
context['operation'] = 'deleted from'
context['event'] = event
context['time'] = time.strftime("%I:%M %d/%m/%Y")
notification = Notification(user=participant, notification_type="event_del", key=event.topic.id)
notification.save()
return True
return False
@login_required
@transaction.atomic
def delete_participant(request, topic_id, userid):
participant = get_object_or_404(User, username=userid)
event = get_object_or_404(Event, topic__id__exact=topic_id)
if event.topic.user != request.user:
raise PermissionDenied()
delete_event_participant(event, participant, True)
return redirect(reverse('edit_event', kwargs={"topic_id":topic_id}))
@login_required
@transaction.atomic
def add_participant(request, topic_id):
event = get_object_or_404(Event, topic__id__exact=topic_id)
if event.topic.user != request.user:
raise PermissionDenied()
context = {}
if request.method == 'GET':
form = ParticipantForm(event=event)
return render(request, 'event/add_participant.json', context, content_type="application/json")
form = ParticipantForm(request.POST, event=event)
if not form.is_valid():
context ['participant_form'] = form
context ['successful'] = False
return render(request, 'event/add_participant.json', context, content_type="application/json")
new_participant = get_object_or_404(User, username__exact=form.cleaned_data['username'])
context ['successful'] = add_event_participant(event, new_participant, True)
return render(request, 'event/add_participant.json', context, content_type="application/json")
@login_required
@transaction.atomic
def rsvp_event(request, topic_id):
event = get_object_or_404(Event, topic__id__exact=topic_id)
if not event.public:
raise PermissionDenied()
if not event.participants.filter(user__exact=request.user).exists():
if len(event.participants.all()) < event.max_participants:
add_event_participant(event, request.user, False)
return JsonResponse({'rsvp_successful':True})
else:
return JsonResponse({'rsvp_successful':False})
@login_required
@transaction.atomic
def undo_rsvp_event(request, topic_id):
event = get_object_or_404(Event, topic__id__exact=topic_id)
st = delete_event_participant(event, request.user, False)
return JsonResponse({'undo_rsvp_successful':st})
@login_required
@transaction.atomic
def accept_event_application(request, application_id):
application = get_object_or_404(EventApplication, id__exact=application_id)
if application.event.topic.user != request.user:
raise PermissionDenied()
add_event_participant(application.event, application.applicant, True)
application.delete()
return JsonResponse({'successful':True})
@login_required
@transaction.atomic
def decline_event_application(request, application_id):
application = get_object_or_404(EventApplication, id__exact=application_id)
if application.event.topic.user != request.user:
raise PermissionDenied()
application.delete()
return JsonResponse({'successful':True})
|
{
"content_hash": "81e362e8123ca42d295e685f1b55ff56",
"timestamp": "",
"source": "github",
"line_count": 227,
"max_line_length": 296,
"avg_line_length": 37.36123348017621,
"alnum_prop": 0.7372951302912393,
"repo_name": "zhubw91/biyemaijianbing",
"id": "cb6a7861f75dc5ff43d768b9226fc73b6dac45e3",
"size": "8481",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "src/cmubbs/views/views_event.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "14636"
},
{
"name": "HTML",
"bytes": "119684"
},
{
"name": "JavaScript",
"bytes": "57868"
},
{
"name": "Python",
"bytes": "131722"
}
],
"symlink_target": ""
}
|
from django.apps import AppConfig
from django.core.management import settings
class ODM2AdminConfig(AppConfig):
name = '{}'.format(settings.APP_NAME)
verbose_name = '{}'.format(settings.VERBOSE_NAME)
|
{
"content_hash": "cfc768d1e935ac98ed020b0a2f131869",
"timestamp": "",
"source": "github",
"line_count": 7,
"max_line_length": 53,
"avg_line_length": 30,
"alnum_prop": 0.7476190476190476,
"repo_name": "miguelcleon/ODM2-Admin",
"id": "2a83bdf29bf73da17ac76e1333665bb7b11f230e",
"size": "210",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "odm2admin/apps.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "23546"
},
{
"name": "Dockerfile",
"bytes": "1343"
},
{
"name": "HTML",
"bytes": "216455"
},
{
"name": "JavaScript",
"bytes": "491230"
},
{
"name": "PLpgSQL",
"bytes": "270807"
},
{
"name": "Python",
"bytes": "718550"
},
{
"name": "Shell",
"bytes": "2012"
}
],
"symlink_target": ""
}
|
import os
import logging
from datetime import datetime
import typing
import jinja2
import markupsafe
import pymongo
from pymongo.errors import DuplicateKeyError
from tg import tmpl_context as c, app_globals as g
import tg
from ming import schema
from ming.orm.base import session
from ming.orm.property import (FieldProperty, RelationProperty,
ForeignIdProperty)
from ming.utils import LazyProperty
from bson import ObjectId
from allura.lib import helpers as h
from allura.lib import security
from allura.lib.security import require_access, has_access
from allura.lib import utils
from allura.model.notification import Notification, Mailbox
from .artifact import Artifact, ArtifactReference, VersionedArtifact, Snapshot, Message, Feed, ReactableArtifact
from .attachments import BaseAttachment
from .auth import User, ProjectRole, AlluraUserProperty
from .timeline import ActivityObject
from .types import MarkdownCache
if typing.TYPE_CHECKING:
from ming.odm.mapper import Query
log = logging.getLogger(__name__)
class Discussion(Artifact, ActivityObject):
class __mongometa__:
name = 'discussion'
query: 'Query[Discussion]'
type_s = 'Discussion'
parent_id = FieldProperty(schema.Deprecated)
shortname = FieldProperty(str)
name = FieldProperty(str)
description = FieldProperty(str, if_missing='')
description_cache = FieldProperty(MarkdownCache)
num_topics = FieldProperty(int, if_missing=0)
num_posts = FieldProperty(int, if_missing=0)
subscriptions = FieldProperty({str: bool})
threads = RelationProperty('Thread', via='discussion_id')
posts = RelationProperty('Post', via='discussion_id')
def __json__(self, limit=None, posts_limit=None, is_export=False):
return dict(
_id=str(self._id),
shortname=self.shortname,
name=self.name,
description=self.description,
threads=[t.__json__(limit=posts_limit, is_export=is_export) for t
in self.thread_class().query.find(dict(discussion_id=self._id)).limit(limit or 0)]
)
@property
def activity_name(self):
return 'discussion %s' % self.name
@classmethod
def thread_class(cls):
return cls.threads.related
@classmethod
def post_class(cls):
return cls.posts.related
@classmethod
def attachment_class(cls):
return DiscussionAttachment
def update_stats(self):
self.num_topics = self.thread_class().query.find(
dict(discussion_id=self._id)).count()
self.num_posts = self.post_class().query.find(
dict(discussion_id=self._id, status='ok', deleted=False)).count()
@LazyProperty
def last_post(self):
q = self.post_class().query.find(dict(
discussion_id=self._id,
status='ok',
deleted=False,
)).sort('timestamp', pymongo.DESCENDING).limit(1)
return q.first()
def url(self):
return self.app.url + '_discuss/'
def shorthand_id(self):
return self.shortname
def index(self):
result = Artifact.index(self)
result.update(
title=self.name,
name_s=self.name,
text=self.description)
return result
def delete(self):
# Delete all the threads, posts, and artifacts
self.thread_class().query.remove(dict(discussion_id=self._id))
self.post_class().query.remove(dict(discussion_id=self._id))
self.attachment_class().remove(dict(discussion_id=self._id))
super().delete()
def find_posts(self, **kw):
q = dict(kw, discussion_id=self._id, deleted=False)
return self.post_class().query.find(q)
class Thread(Artifact, ActivityObject):
class __mongometa__:
name = 'thread'
indexes = [
('artifact_id',),
('ref_id',),
(('app_config_id', pymongo.ASCENDING),
('last_post_date', pymongo.DESCENDING),
('mod_date', pymongo.DESCENDING)),
('discussion_id',),
]
query: 'Query[Thread]'
type_s = 'Thread'
_id = FieldProperty(str, if_missing=lambda: h.nonce(10))
discussion_id = ForeignIdProperty(Discussion)
ref_id: str = ForeignIdProperty('ArtifactReference')
subject = FieldProperty(str, if_missing='')
num_replies = FieldProperty(int, if_missing=0)
num_views = FieldProperty(int, if_missing=0)
subscriptions = FieldProperty({str: bool})
first_post_id = ForeignIdProperty('Post')
last_post_date = FieldProperty(datetime, if_missing=datetime(1970, 1, 1))
artifact_reference = FieldProperty(schema.Deprecated)
artifact_id = FieldProperty(schema.Deprecated)
discussion = RelationProperty(Discussion)
posts = RelationProperty('Post', via='thread_id')
first_post = RelationProperty('Post', via='first_post_id')
ref = RelationProperty('ArtifactReference')
def should_update_index(self, old_doc, new_doc):
"""Skip index update if only `num_views` has changed.
Value of `num_views` is updated whenever user loads thread page.
This generates a lot of unnecessary `add_artifacts` tasks.
"""
old_doc.pop('num_views', None)
new_doc.pop('num_views', None)
return old_doc != new_doc
def attachment_for_export(self, page):
return [dict(bytes=attach.length,
url=h.absurl(attach.url()),
path=os.path.join(
self.artifact.app_config.options.mount_point,
str(self.artifact._id),
self._id,
page.slug,
os.path.basename(attach.filename))
) for attach in page.attachments]
def attachments_for_json(self, page):
return [dict(bytes=attach.length,
url=h.absurl(attach.url())) for attach in page.attachments]
def __json__(self, limit=None, page=None, is_export=False):
return dict(
_id=self._id,
discussion_id=str(self.discussion_id),
subject=self.subject,
limit=limit,
page=page,
posts=[dict(slug=p.slug,
text=p.text,
subject=p.subject,
author=p.author().username,
author_icon_url=h.absurl(p.author().icon_url()),
timestamp=p.timestamp,
last_edited=p.last_edit_date,
attachments=self.attachment_for_export(p) if is_export else self.attachments_for_json(p))
for p in self.query_posts(status='ok', style='chronological', limit=limit, page=page)
]
)
@property
def activity_name(self):
return 'thread %s' % self.subject
def parent_security_context(self):
return self.discussion
@classmethod
def new(cls, **props):
'''Creates a new Thread instance, ensuring a unique _id.'''
for i in range(5):
try:
thread = cls(**props)
session(thread).flush(thread)
return thread
except DuplicateKeyError as err:
log.warning(
'Got DuplicateKeyError: attempt #%s, trying again. %s', i, err)
if i == 4:
raise
session(thread).expunge(thread)
continue
@classmethod
def discussion_class(cls):
return cls.discussion.related
@classmethod
def post_class(cls):
return cls.posts.related
@classmethod
def attachment_class(cls):
return DiscussionAttachment
@property
def artifact(self):
# Threads attached to a wiki page, ticket, etc will have a .ref.artifact pointing to that WikiPage etc
# Threads that are part of a forum will not have that
if self.ref is None:
return self.discussion
return self.ref.artifact
# Use wisely - there's .num_replies also
@property
def post_count(self):
return Post.query.find(dict(
discussion_id=self.discussion_id,
thread_id=self._id,
status={'$in': ['ok', 'pending']},
deleted=False,
)).count()
def primary(self):
if self.ref is None:
return self
return self.ref.artifact
def post_to_feed(self, post):
if post.status == 'ok':
Feed.post(
self.primary(),
title=post.subject,
description=post.text,
link=post.url_paginated(),
pubdate=post.mod_date,
)
def add_post(self, **kw):
"""Helper function to avoid code duplication."""
p = self.post(**kw)
p.commit(update_stats=False)
session(self).flush(self)
self.update_stats()
if not self.first_post:
self.first_post_id = p._id
self.post_to_feed(p)
return p
def include_subject_in_spam_check(self, post):
return (post.primary() == post # only artifacts where the discussion is the main thing i.e. ForumPost
and
self.num_replies == 0) # only first post in thread
def is_spam(self, post):
roles = [r.name for r in c.project.named_roles]
spam_check_text = post.text
if self.include_subject_in_spam_check(post):
spam_check_text = self.subject + '\n' + spam_check_text
spammy = g.spam_checker.check(spam_check_text, artifact=post, user=c.user)
if c.user in c.project.users_with_role(*roles):
# always run the check, so it's logged. But don't act on it for admins/developers of their own project
return False
else:
return spammy
def post(self, text, message_id=None, parent_id=None, notify=True,
notification_text=None, timestamp=None, ignore_security=False,
is_meta=False, subscribe=False, **kw):
if not ignore_security:
require_access(self, 'post')
if subscribe:
self.primary().subscribe()
if message_id is None:
message_id = h.gen_message_id()
parent = parent_id and self.post_class().query.get(_id=parent_id)
slug, full_slug = self.post_class().make_slugs(parent, timestamp)
kwargs = dict(
discussion_id=self.discussion_id,
full_slug=full_slug,
slug=slug,
thread_id=self._id,
parent_id=parent_id,
text=text,
status='pending',
is_meta=is_meta)
if timestamp is not None:
kwargs['timestamp'] = timestamp
if message_id is not None:
kwargs['_id'] = message_id
post = self.post_class()(**kwargs)
if ignore_security or is_meta:
spammy = False
else:
spammy = self.is_spam(post)
# unmoderated post -> autoapprove
# unmoderated post but is spammy -> don't approve it, it goes into moderation
# moderated post -> moderation
# moderated post but is spammy -> mark as spam
if ignore_security or (not spammy and has_access(self, 'unmoderated_post')):
log.info('Auto-approving message from %s', c.user.username)
file_info = kw.get('file_info', None)
post.approve(file_info, notify=notify,
notification_text=notification_text)
elif not has_access(self, 'unmoderated_post') and spammy:
post.spam(submit_spam_feedback=False) # no feedback since we're marking as spam automatically not manually
else:
self.notify_moderators(post)
return post
def notify_moderators(self, post):
''' Notify moderators that a post needs approval [#2963] '''
artifact = self.artifact or self
subject = '[{}:{}] Moderation action required'.format(
c.project.shortname, c.app.config.options.mount_point)
author = post.author()
url = self.discussion_class().query.get(_id=self.discussion_id).url()
text = ('The following submission requires approval at %s before '
'it can be approved for posting:\n\n%s'
% (h.absurl(url + 'moderate'), post.text))
n = Notification(
ref_id=artifact.index_id(),
topic='message',
link=artifact.url(),
_id=artifact.url() + post._id,
from_address=str(author._id) if author != User.anonymous()
else None,
reply_to_address=g.noreply,
subject=subject,
text=text,
in_reply_to=post.parent_id,
author_id=author._id,
pubdate=datetime.utcnow())
users = self.app_config.project.users()
for u in users:
if (has_access(self, 'moderate', u)
and Mailbox.subscribed(user_id=u._id,
app_config_id=post.app_config_id)):
n.send_direct(str(u._id))
def update_stats(self):
self.num_replies = self.post_class().query.find(
dict(thread_id=self._id, status='ok', deleted=False)).count()
@LazyProperty
def last_post(self):
q = self.post_class().query.find(dict(
thread_id=self._id,
deleted=False,
)).sort('timestamp', pymongo.DESCENDING)
return q.first()
def create_post_threads(self, posts):
result = []
post_index = {}
for p in sorted(posts, key=lambda p: p.full_slug):
pi = dict(post=p, children=[])
post_index[p._id] = pi
if p.parent_id in post_index:
post_index[p.parent_id]['children'].append(pi)
else:
result.append(pi)
return result
def query_posts(self, page=None, limit=None,
timestamp=None, style='threaded', status=None):
if timestamp:
terms = dict(discussion_id=self.discussion_id, thread_id=self._id,
status={'$in': ['ok', 'pending']}, timestamp=timestamp)
else:
terms = dict(discussion_id=self.discussion_id, thread_id=self._id,
status={'$in': ['ok', 'pending']})
if status:
terms['status'] = status
terms['deleted'] = False
q = self.post_class().query.find(terms)
if style == 'threaded':
q = q.sort('full_slug')
else:
q = q.sort('timestamp')
if limit is not None:
limit = int(limit)
if page is not None:
q = q.skip(page * limit)
q = q.limit(limit)
return q
def find_posts(self, *args, **kwargs):
return self.query_posts(*args, **kwargs).all()
def url(self):
# Can't use self.discussion because it might change during the req
discussion = self.discussion_class().query.get(_id=self.discussion_id)
return discussion.url() + 'thread/' + str(self._id) + '/'
def shorthand_id(self):
return self._id
def index(self):
result = Artifact.index(self)
result.update(
title=self.subject or '(no subject)',
name_s=self.subject,
views_i=self.num_views,
text=self.subject)
return result
def delete(self):
for p in self.post_class().query.find(dict(thread_id=self._id)):
p.delete()
self.attachment_class().remove(dict(thread_id=self._id))
super().delete()
def spam(self):
"""Mark this thread as spam."""
for p in self.post_class().query.find(dict(thread_id=self._id)):
p.spam()
class PostHistory(Snapshot):
class __mongometa__:
name = 'post_history'
query: 'Query[PostHistory]'
artifact_id = ForeignIdProperty('Post')
@classmethod
def post_class(cls):
return cls.artifact_id.related
def original(self):
return self.post_class().query.get(_id=self.artifact_id)
def shorthand_id(self):
original = self.original()
if original:
return f'{original.shorthand_id()}#{self.version}'
else:
return None
def url(self):
if self.original():
return self.original().url() + '?version=%d' % self.version
else:
return None
def index(self):
result = Snapshot.index(self)
result.update(
type_s='Post Snapshot',
text=self.data.text)
return result
class Post(Message, VersionedArtifact, ActivityObject, ReactableArtifact):
class __mongometa__:
name = 'post'
history_class = PostHistory
indexes = [
# used in general lookups, last_post, etc
('discussion_id', 'status', 'timestamp'),
# for update_stats()
('discussion_id', 'deleted', 'status'),
# for update_stats() and thread_id in general
('thread_id', 'status', 'deleted'),
# for find_posts/query_posts, including full_slug sort which is useful on super big threads
('deleted', 'discussion_id', 'thread_id', 'full_slug'),
]
query: 'Query[Post]'
type_s = 'Post'
thread_id = ForeignIdProperty(Thread)
discussion_id = ForeignIdProperty(Discussion)
subject = FieldProperty(schema.Deprecated)
status = FieldProperty(schema.OneOf('ok', 'pending', 'spam',
if_missing='pending'))
last_edit_date = FieldProperty(datetime, if_missing=None)
last_edit_by_id: ObjectId = AlluraUserProperty()
edit_count = FieldProperty(int, if_missing=0)
spam_check_id = FieldProperty(str, if_missing='')
text_cache = FieldProperty(MarkdownCache)
# meta comment - system generated, describes changes to an artifact
is_meta = FieldProperty(bool, if_missing=False)
thread = RelationProperty(Thread)
discussion = RelationProperty(Discussion)
def __json__(self):
author = self.author()
return dict(
_id=str(self._id),
thread_id=self.thread_id,
slug=self.slug,
subject=self.subject,
status=self.status,
text=self.text,
timestamp=self.timestamp,
last_edited=self.last_edit_date,
author_id=str(author._id),
author=author.username)
@property
def activity_name(self):
return 'a comment'
@property
def activity_url(self):
return self.url_paginated()
def has_activity_access(self, perm, user, activity):
"""Return True if user has perm access to this object, otherwise
return False.
For the purposes of activitystreams, we're saying that the user does
not have access to a 'comment' activity unless he also has access to
the artifact on which it was posted (if there is one).
"""
if self.project is None or self.deleted or self.status != 'ok':
return False
artifact_access = True
if self.thread.artifact:
if self.thread.artifact.project is None:
return False
if self.thread.artifact.deleted:
return False
artifact_access = security.has_access(self.thread.artifact, perm,
user, self.thread.artifact.project)
return artifact_access and security.has_access(self, perm, user,
self.project)
@property
def activity_extras(self):
d = ActivityObject.activity_extras.fget(self)
# For activity summary, convert Post text to html,
# strip all tags, and truncate
LEN = 500
summary = markupsafe.Markup.escape(
g.markdown.cached_convert(self, 'text')).striptags()
if len(summary) > LEN:
split = max(summary.find(' ', LEN), LEN)
summary = summary[:split] + '...'
d.update(summary=summary)
return d
def index(self):
result = super().index()
result.update(
title='Post by {} on {}'.format(
self.author().username, self.subject),
name_s=self.subject,
type_s='Post',
text=self.text)
return result
@classmethod
def discussion_class(cls):
return cls.discussion.related
@classmethod
def thread_class(cls):
return cls.thread.related
@classmethod
def attachment_class(cls):
return DiscussionAttachment
@property
def parent(self):
if self.parent_id:
return self.query.get(_id=self.parent_id)
@property
def subject(self):
subject = None
if self.thread:
subject = self.thread.subject
if not subject:
artifact = self.thread.artifact
if artifact:
subject = getattr(artifact, 'email_subject', '')
return subject or '(no subject)'
def add_multiple_attachments(self, file_info):
if isinstance(file_info, list):
for fi in file_info:
self.add_attachment(fi)
else:
self.add_attachment(file_info)
def add_attachment(self, file_info):
if hasattr(file_info, 'file'):
mime_type = file_info.type
if not mime_type or '/' not in mime_type:
mime_type = utils.guess_mime_type(file_info.filename)
self.attach(
file_info.filename, file_info.file, content_type=mime_type,
post_id=self._id,
thread_id=self.thread_id,
discussion_id=self.discussion_id)
def last_edit_by(self):
return User.query.get(_id=self.last_edit_by_id) or User.anonymous()
def primary(self):
return self.thread.primary()
def url(self):
if self.thread:
return self.thread.url() + h.urlquote(self.slug) + '/'
else: # pragma no cover
return None
def parent_artifact(self):
"""
:return: the artifact (e.g Ticket, Wiki Page) that this Post belongs to. May return None.
"""
aref = ArtifactReference.query.get(_id=self.thread.ref_id)
if aref and aref.artifact:
return aref.artifact
else:
return None
def main_url(self):
"""
:return: the URL for the artifact (e.g Ticket, Wiki Page) that this Post belongs to,
else the default thread URL
"""
parent_artifact = self.parent_artifact()
if parent_artifact:
url = parent_artifact.url()
else:
url = self.thread.url()
return url
def url_paginated(self):
'''Return link to the thread with a #target that poins to this comment.
Also handle pagination properly.
'''
if not self.thread: # pragma no cover
return None
limit, p, s = g.handle_paging(None, 0) # get paging limit
if self.query.find(dict(thread_id=self.thread._id)).count() <= limit:
# all posts in a single page
page = 0
else:
posts = self.thread.find_posts()
posts = self.thread.create_post_threads(posts)
def find_i(posts):
'''Find the index number of this post in the display order'''
q = []
def traverse(posts):
for p in posts:
if p['post']._id == self._id:
return True # found
q.append(p)
if traverse(p['children']):
return True
traverse(posts)
return len(q)
page = find_i(posts) // limit
slug = h.urlquote(self.slug)
url = self.main_url()
if page == 0:
return f'{url}?limit={limit}#{slug}'
return f'{url}?limit={limit}&page={page}#{slug}'
def shorthand_id(self):
if self.thread:
return f'{self.thread.shorthand_id()}#{self.slug}'
else: # pragma no cover
return None
def link_text(self):
return self.subject
def reply_subject(self):
if self.subject and self.subject.lower().startswith('re:'):
return self.subject
else:
return 'Re: ' + (self.subject or '(no subject)')
def delete(self):
self.deleted = True
session(self).flush(self)
self.thread.update_stats()
def approve(self, file_info=None, notify=True, notification_text=None):
if self.status == 'ok':
return
self.status = 'ok'
author = self.author()
author_role = ProjectRole.by_user(
author, project=self.project, upsert=True)
if not author.is_anonymous():
security.simple_grant(
self.acl, author_role._id, 'moderate')
self.commit()
if (c.app.config.options.get('PostingPolicy') == 'ApproveOnceModerated'
and author._id is not None):
security.simple_grant(
self.acl, author_role._id, 'unmoderated_post')
if notify:
self.notify(file_info=file_info, notification_text=notification_text)
artifact = self.thread.artifact or self.thread
session(self).flush()
self.thread.last_post_date = max(
self.thread.last_post_date,
self.mod_date)
self.thread.update_stats()
if hasattr(artifact, 'update_stats'):
artifact.update_stats()
if self.text and not self.is_meta:
g.director.create_activity(author, 'posted', self, target=artifact,
related_nodes=[self.app_config.project],
tags=['comment'])
def notify(self, file_info=None, notification_text=None):
if self.project.notifications_disabled:
return # notifications disabled for entire project
artifact = self.thread.artifact or self.thread
msg_id = artifact.url() + self._id
notification_params = dict(
post=self,
text=notification_text,
file_info=file_info)
n = Notification.query.get(_id=msg_id)
if n and 'Moderation action required' in n.subject:
# Existing notification for this artifact is for moderators only,
# this means artifact was not auto approved, and all the
# subscribers did not receive notification. Now, moderator approved
# artifact/post, so we should re-send actual notification
msg_id = 'approved-' + msg_id
n = Notification.query.get(_id=msg_id)
if n:
# 'approved' notification also exists, re-send
n.fire_notification_task([artifact, self.thread], 'message')
else:
# 'approved' notification does not exist, create
notification_params['message_id'] = msg_id
if not n:
# artifact is Forum (or artifact like WikiPage)
n = Notification.post(artifact, 'message',
additional_artifacts_to_match_subscriptions=self.thread,
**notification_params)
if not n:
return
if getattr(artifact, 'monitoring_email', None):
if hasattr(artifact, 'notify_post'):
if artifact.notify_post:
n.send_simple(artifact.monitoring_email)
else: # Send if no extra checks required
n.send_simple(artifact.monitoring_email)
def spam(self, submit_spam_feedback=True):
self.status = 'spam'
if submit_spam_feedback:
g.spam_checker.submit_spam(self.text, artifact=self, user=self.author())
session(self).flush(self)
self.thread.update_stats()
def undo(self, prev_status):
if prev_status in ('ok', 'pending'):
self.status = prev_status
session(self).flush(self)
self.thread.update_stats()
class DiscussionAttachment(BaseAttachment):
DiscussionClass = Discussion
ThreadClass = Thread
PostClass = Post
ArtifactClass = Post
thumbnail_size = (100, 100)
class __mongometa__:
polymorphic_identity = 'DiscussionAttachment'
indexes = ['filename', 'discussion_id', 'thread_id', 'post_id']
query: 'Query[DiscussionAttachment]'
discussion_id = FieldProperty(schema.ObjectId)
thread_id = FieldProperty(str)
post_id = FieldProperty(str)
artifact_id = FieldProperty(str)
attachment_type = FieldProperty(str, if_missing='DiscussionAttachment')
@property
def discussion(self):
return self.DiscussionClass.query.get(_id=self.discussion_id)
@property
def thread(self):
return self.ThreadClass.query.get(_id=self.thread_id)
@property
def post(self):
return self.PostClass.query.get(_id=self.post_id)
@classmethod
def metadata_for(cls, post):
return dict(
post_id=post._id,
thread_id=post.thread_id,
discussion_id=post.discussion_id,
app_config_id=post.app_config_id)
def url(self):
if self.post_id:
return (self.post.url() + 'attachment/' +
h.urlquote(self.filename))
elif self.thread_id:
return (self.thread.url() + 'attachment/' +
h.urlquote(self.filename))
else:
return (self.discussion.url() + 'attachment/' +
h.urlquote(self.filename))
|
{
"content_hash": "19b498ebab2681fad109e3514daf4c2d",
"timestamp": "",
"source": "github",
"line_count": 860,
"max_line_length": 119,
"avg_line_length": 34.967441860465115,
"alnum_prop": 0.5725591912742751,
"repo_name": "apache/allura",
"id": "2c40db80fad3df0864e2d95883a818ccb829ab14",
"size": "30942",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "Allura/allura/model/discuss.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Batchfile",
"bytes": "6142"
},
{
"name": "CSS",
"bytes": "181457"
},
{
"name": "Dockerfile",
"bytes": "4748"
},
{
"name": "HTML",
"bytes": "867332"
},
{
"name": "JavaScript",
"bytes": "1191836"
},
{
"name": "Makefile",
"bytes": "6248"
},
{
"name": "Python",
"bytes": "4499987"
},
{
"name": "RAML",
"bytes": "27600"
},
{
"name": "Roff",
"bytes": "41"
},
{
"name": "Ruby",
"bytes": "1280"
},
{
"name": "SCSS",
"bytes": "27742"
},
{
"name": "Shell",
"bytes": "131207"
},
{
"name": "XSLT",
"bytes": "3357"
}
],
"symlink_target": ""
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.