hexsha
stringlengths 40
40
| size
int64 1
1.03M
| ext
stringclasses 10
values | lang
stringclasses 1
value | max_stars_repo_path
stringlengths 3
239
| max_stars_repo_name
stringlengths 5
130
| max_stars_repo_head_hexsha
stringlengths 40
78
| max_stars_repo_licenses
listlengths 1
10
| max_stars_count
int64 1
191k
⌀ | max_stars_repo_stars_event_min_datetime
stringlengths 24
24
⌀ | max_stars_repo_stars_event_max_datetime
stringlengths 24
24
⌀ | max_issues_repo_path
stringlengths 3
239
| max_issues_repo_name
stringlengths 5
130
| max_issues_repo_head_hexsha
stringlengths 40
78
| max_issues_repo_licenses
listlengths 1
10
| max_issues_count
int64 1
67k
⌀ | max_issues_repo_issues_event_min_datetime
stringlengths 24
24
⌀ | max_issues_repo_issues_event_max_datetime
stringlengths 24
24
⌀ | max_forks_repo_path
stringlengths 3
239
| max_forks_repo_name
stringlengths 5
130
| max_forks_repo_head_hexsha
stringlengths 40
78
| max_forks_repo_licenses
listlengths 1
10
| max_forks_count
int64 1
105k
⌀ | max_forks_repo_forks_event_min_datetime
stringlengths 24
24
⌀ | max_forks_repo_forks_event_max_datetime
stringlengths 24
24
⌀ | content
stringlengths 1
1.03M
| avg_line_length
float64 1
958k
| max_line_length
int64 1
1.03M
| alphanum_fraction
float64 0
1
|
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
794b3888cb4905fdc7ea80d46ca95c137ea50e9d
| 856
|
py
|
Python
|
internos/locations/urls.py
|
UNICEFLebanonInnovation/Staging-Neuro
|
aac1e4f335ff4ec32041f989a9c22f8581a4961a
|
[
"MIT"
] | null | null | null |
internos/locations/urls.py
|
UNICEFLebanonInnovation/Staging-Neuro
|
aac1e4f335ff4ec32041f989a9c22f8581a4961a
|
[
"MIT"
] | null | null | null |
internos/locations/urls.py
|
UNICEFLebanonInnovation/Staging-Neuro
|
aac1e4f335ff4ec32041f989a9c22f8581a4961a
|
[
"MIT"
] | null | null | null |
from django.conf.urls import include, url
from rest_framework import routers
from . import views
app_name = 'locations'
api = routers.SimpleRouter()
api.register(r'locations', views.LocationsViewSet, base_name='locations')
api.register(r'locations-light', views.LocationsLightViewSet, base_name='locations-light')
urlpatterns = [
url(r'', include(api.urls)),
url(
r'^locations/pcode/(?P<p_code>\w+)/$', views.LocationsViewSet.as_view({'get': 'retrieve'}),
name='locations_detail_pcode'
),
url(r'^autocomplete/$', views.LocationQuerySetView.as_view(), name='locations_autocomplete'),
url(
regex=r'^site-profile/$',
view=views.SiteProfileView.as_view(),
name='site_profile'
),
url(
regex=r'^Export/$',
view=views.ExportSetView.as_view(),
name='export'
),
]
| 27.612903
| 99
| 0.66472
|
794b396b21dd556d4d25300fa904680cd9641ed5
| 17,801
|
py
|
Python
|
keras/preprocessing/sequence.py
|
bmatschke/keras
|
6c392b5ad96fb47a05019e6dda42d2af1f1ec08e
|
[
"Apache-2.0"
] | null | null | null |
keras/preprocessing/sequence.py
|
bmatschke/keras
|
6c392b5ad96fb47a05019e6dda42d2af1f1ec08e
|
[
"Apache-2.0"
] | null | null | null |
keras/preprocessing/sequence.py
|
bmatschke/keras
|
6c392b5ad96fb47a05019e6dda42d2af1f1ec08e
|
[
"Apache-2.0"
] | null | null | null |
# Copyright 2015 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Utilities for preprocessing sequence data.
Deprecated: `tf.keras.preprocessing.sequence` APIs are not recommended for new
code. Prefer `tf.keras.utils.timeseries_dataset_from_array` and
the `tf.data` APIs which provide a much more flexible mechanisms for dealing
with sequences. See the [tf.data guide](https://www.tensorflow.org/guide/data)
for more details.
"""
# pylint: disable=invalid-name
# pylint: disable=g-classes-have-attributes
# pylint: disable=g-direct-tensorflow-import
import json
import random
from keras.utils import data_utils
import numpy as np
from tensorflow.python.util.tf_export import keras_export
def _remove_long_seq(maxlen, seq, label):
"""Removes sequences that exceed the maximum length.
Args:
maxlen: Int, maximum length of the output sequences.
seq: List of lists, where each sublist is a sequence.
label: List where each element is an integer.
Returns:
new_seq, new_label: shortened lists for `seq` and `label`.
"""
new_seq, new_label = [], []
for x, y in zip(seq, label):
if len(x) < maxlen:
new_seq.append(x)
new_label.append(y)
return new_seq, new_label
@keras_export('keras.preprocessing.sequence.TimeseriesGenerator')
class TimeseriesGenerator(data_utils.Sequence):
"""Utility class for generating batches of temporal data.
Deprecated: `tf.keras.preprocessing.sequence.TimeseriesGenerator` does not
operate on tensors and is not recommended for new code. Prefer using a
`tf.data.Dataset` which provides a more efficient and flexible mechanism for
batching, shuffling, and windowing input. See the
[tf.data guide](https://www.tensorflow.org/guide/data) for more details.
This class takes in a sequence of data-points gathered at
equal intervals, along with time series parameters such as
stride, length of history, etc., to produce batches for
training/validation.
Arguments:
data: Indexable generator (such as list or Numpy array)
containing consecutive data points (timesteps).
The data should be at 2D, and axis 0 is expected
to be the time dimension.
targets: Targets corresponding to timesteps in `data`.
It should have same length as `data`.
length: Length of the output sequences (in number of timesteps).
sampling_rate: Period between successive individual timesteps
within sequences. For rate `r`, timesteps
`data[i]`, `data[i-r]`, ... `data[i - length]`
are used for create a sample sequence.
stride: Period between successive output sequences.
For stride `s`, consecutive output samples would
be centered around `data[i]`, `data[i+s]`, `data[i+2*s]`, etc.
start_index: Data points earlier than `start_index` will not be used
in the output sequences. This is useful to reserve part of the
data for test or validation.
end_index: Data points later than `end_index` will not be used
in the output sequences. This is useful to reserve part of the
data for test or validation.
shuffle: Whether to shuffle output samples,
or instead draw them in chronological order.
reverse: Boolean: if `true`, timesteps in each output sample will be
in reverse chronological order.
batch_size: Number of timeseries samples in each batch
(except maybe the last one).
Returns:
A [Sequence](
https://www.tensorflow.org/api_docs/python/tf/keras/utils/Sequence)
instance.
Examples:
```python
from keras.preprocessing.sequence import TimeseriesGenerator
import numpy as np
data = np.array([[i] for i in range(50)])
targets = np.array([[i] for i in range(50)])
data_gen = TimeseriesGenerator(data, targets,
length=10, sampling_rate=2,
batch_size=2)
assert len(data_gen) == 20
batch_0 = data_gen[0]
x, y = batch_0
assert np.array_equal(x,
np.array([[[0], [2], [4], [6], [8]],
[[1], [3], [5], [7], [9]]]))
assert np.array_equal(y,
np.array([[10], [11]]))
```
"""
def __init__(self,
data,
targets,
length,
sampling_rate=1,
stride=1,
start_index=0,
end_index=None,
shuffle=False,
reverse=False,
batch_size=128):
if len(data) != len(targets):
raise ValueError('Data and targets have to be' + ' of same length. '
'Data length is {}'.format(len(data)) +
' while target length is {}'.format(len(targets)))
self.data = data
self.targets = targets
self.length = length
self.sampling_rate = sampling_rate
self.stride = stride
self.start_index = start_index + length
if end_index is None:
end_index = len(data) - 1
self.end_index = end_index
self.shuffle = shuffle
self.reverse = reverse
self.batch_size = batch_size
if self.start_index > self.end_index:
raise ValueError('`start_index+length=%i > end_index=%i` '
'is disallowed, as no part of the sequence '
'would be left to be used as current step.' %
(self.start_index, self.end_index))
def __len__(self):
return (self.end_index - self.start_index +
self.batch_size * self.stride) // (
self.batch_size * self.stride)
def __getitem__(self, index):
if self.shuffle:
rows = np.random.randint(
self.start_index, self.end_index + 1, size=self.batch_size)
else:
i = self.start_index + self.batch_size * self.stride * index
rows = np.arange(
i, min(i + self.batch_size * self.stride, self.end_index + 1),
self.stride)
samples = np.array(
[self.data[row - self.length:row:self.sampling_rate] for row in rows])
targets = np.array([self.targets[row] for row in rows])
if self.reverse:
return samples[:, ::-1, ...], targets
return samples, targets
def get_config(self):
"""Returns the TimeseriesGenerator configuration as Python dictionary.
Returns:
A Python dictionary with the TimeseriesGenerator configuration.
"""
data = self.data
if type(self.data).__module__ == np.__name__:
data = self.data.tolist()
try:
json_data = json.dumps(data)
except TypeError as e:
raise TypeError('Data not JSON Serializable:', data) from e
targets = self.targets
if type(self.targets).__module__ == np.__name__:
targets = self.targets.tolist()
try:
json_targets = json.dumps(targets)
except TypeError as e:
raise TypeError('Targets not JSON Serializable:', targets) from e
return {
'data': json_data,
'targets': json_targets,
'length': self.length,
'sampling_rate': self.sampling_rate,
'stride': self.stride,
'start_index': self.start_index,
'end_index': self.end_index,
'shuffle': self.shuffle,
'reverse': self.reverse,
'batch_size': self.batch_size
}
def to_json(self, **kwargs):
"""Returns a JSON string containing the timeseries generator configuration.
Args:
**kwargs: Additional keyword arguments
to be passed to `json.dumps()`.
Returns:
A JSON string containing the tokenizer configuration.
"""
config = self.get_config()
timeseries_generator_config = {
'class_name': self.__class__.__name__,
'config': config
}
return json.dumps(timeseries_generator_config, **kwargs)
@keras_export('keras.utils.pad_sequences',
'keras.preprocessing.sequence.pad_sequences')
def pad_sequences(sequences, maxlen=None, dtype='int32',
padding='pre', truncating='pre', value=0.):
"""Pads sequences to the same length.
This function transforms a list (of length `num_samples`)
of sequences (lists of integers)
into a 2D Numpy array of shape `(num_samples, num_timesteps)`.
`num_timesteps` is either the `maxlen` argument if provided,
or the length of the longest sequence in the list.
Sequences that are shorter than `num_timesteps`
are padded with `value` until they are `num_timesteps` long.
Sequences longer than `num_timesteps` are truncated
so that they fit the desired length.
The position where padding or truncation happens is determined by
the arguments `padding` and `truncating`, respectively.
Pre-padding or removing values from the beginning of the sequence is the
default.
>>> sequence = [[1], [2, 3], [4, 5, 6]]
>>> tf.keras.preprocessing.sequence.pad_sequences(sequence)
array([[0, 0, 1],
[0, 2, 3],
[4, 5, 6]], dtype=int32)
>>> tf.keras.preprocessing.sequence.pad_sequences(sequence, value=-1)
array([[-1, -1, 1],
[-1, 2, 3],
[ 4, 5, 6]], dtype=int32)
>>> tf.keras.preprocessing.sequence.pad_sequences(sequence, padding='post')
array([[1, 0, 0],
[2, 3, 0],
[4, 5, 6]], dtype=int32)
>>> tf.keras.preprocessing.sequence.pad_sequences(sequence, maxlen=2)
array([[0, 1],
[2, 3],
[5, 6]], dtype=int32)
Args:
sequences: List of sequences (each sequence is a list of integers).
maxlen: Optional Int, maximum length of all sequences. If not provided,
sequences will be padded to the length of the longest individual
sequence.
dtype: (Optional, defaults to int32). Type of the output sequences.
To pad sequences with variable length strings, you can use `object`.
padding: String, 'pre' or 'post' (optional, defaults to 'pre'):
pad either before or after each sequence.
truncating: String, 'pre' or 'post' (optional, defaults to 'pre'):
remove values from sequences larger than
`maxlen`, either at the beginning or at the end of the sequences.
value: Float or String, padding value. (Optional, defaults to 0.)
Returns:
Numpy array with shape `(len(sequences), maxlen)`
Raises:
ValueError: In case of invalid values for `truncating` or `padding`,
or in case of invalid shape for a `sequences` entry.
"""
if not hasattr(sequences, '__len__'):
raise ValueError('`sequences` must be iterable.')
num_samples = len(sequences)
lengths = []
sample_shape = ()
flag = True
# take the sample shape from the first non empty sequence
# checking for consistency in the main loop below.
for x in sequences:
try:
lengths.append(len(x))
if flag and len(x):
sample_shape = np.asarray(x).shape[1:]
flag = False
except TypeError as e:
raise ValueError('`sequences` must be a list of iterables. '
'Found non-iterable: ' + str(x)) from e
if maxlen is None:
maxlen = np.max(lengths)
is_dtype_str = np.issubdtype(dtype, np.str_) or np.issubdtype(
dtype, np.unicode_)
if isinstance(value, str) and dtype != object and not is_dtype_str:
raise ValueError(
"`dtype` {} is not compatible with `value`'s type: {}\n"
'You should set `dtype=object` for variable length strings.'.format(
dtype, type(value)))
x = np.full((num_samples, maxlen) + sample_shape, value, dtype=dtype)
for idx, s in enumerate(sequences):
if not len(s): # pylint: disable=g-explicit-length-test
continue # empty list/array was found
if truncating == 'pre':
trunc = s[-maxlen:] # pylint: disable=invalid-unary-operand-type
elif truncating == 'post':
trunc = s[:maxlen]
else:
raise ValueError('Truncating type "%s" ' 'not understood' % truncating)
# check `trunc` has expected shape
trunc = np.asarray(trunc, dtype=dtype)
if trunc.shape[1:] != sample_shape:
raise ValueError('Shape of sample %s of sequence at position %s '
'is different from expected shape %s' %
(trunc.shape[1:], idx, sample_shape))
if padding == 'post':
x[idx, :len(trunc)] = trunc
elif padding == 'pre':
x[idx, -len(trunc):] = trunc
else:
raise ValueError('Padding type "%s" not understood' % padding)
return x
@keras_export('keras.preprocessing.sequence.make_sampling_table')
def make_sampling_table(size, sampling_factor=1e-5):
"""Generates a word rank-based probabilistic sampling table.
Used for generating the `sampling_table` argument for `skipgrams`.
`sampling_table[i]` is the probability of sampling
the word i-th most common word in a dataset
(more common words should be sampled less frequently, for balance).
The sampling probabilities are generated according
to the sampling distribution used in word2vec:
```
p(word) = (min(1, sqrt(word_frequency / sampling_factor) /
(word_frequency / sampling_factor)))
```
We assume that the word frequencies follow Zipf's law (s=1) to derive
a numerical approximation of frequency(rank):
`frequency(rank) ~ 1/(rank * (log(rank) + gamma) + 1/2 - 1/(12*rank))`
where `gamma` is the Euler-Mascheroni constant.
Args:
size: Int, number of possible words to sample.
sampling_factor: The sampling factor in the word2vec formula.
Returns:
A 1D Numpy array of length `size` where the ith entry
is the probability that a word of rank i should be sampled.
"""
gamma = 0.577
rank = np.arange(size)
rank[0] = 1
inv_fq = rank * (np.log(rank) + gamma) + 0.5 - 1. / (12. * rank)
f = sampling_factor * inv_fq
return np.minimum(1., f / np.sqrt(f))
@keras_export('keras.preprocessing.sequence.skipgrams')
def skipgrams(sequence,
vocabulary_size,
window_size=4,
negative_samples=1.,
shuffle=True,
categorical=False,
sampling_table=None,
seed=None):
"""Generates skipgram word pairs.
This function transforms a sequence of word indexes (list of integers)
into tuples of words of the form:
- (word, word in the same window), with label 1 (positive samples).
- (word, random word from the vocabulary), with label 0 (negative samples).
Read more about Skipgram in this gnomic paper by Mikolov et al.:
[Efficient Estimation of Word Representations in
Vector Space](http://arxiv.org/pdf/1301.3781v3.pdf)
Args:
sequence: A word sequence (sentence), encoded as a list
of word indices (integers). If using a `sampling_table`,
word indices are expected to match the rank
of the words in a reference dataset (e.g. 10 would encode
the 10-th most frequently occurring token).
Note that index 0 is expected to be a non-word and will be skipped.
vocabulary_size: Int, maximum possible word index + 1
window_size: Int, size of sampling windows (technically half-window).
The window of a word `w_i` will be
`[i - window_size, i + window_size+1]`.
negative_samples: Float >= 0. 0 for no negative (i.e. random) samples.
1 for same number as positive samples.
shuffle: Whether to shuffle the word couples before returning them.
categorical: bool. if False, labels will be
integers (eg. `[0, 1, 1 .. ]`),
if `True`, labels will be categorical, e.g.
`[[1,0],[0,1],[0,1] .. ]`.
sampling_table: 1D array of size `vocabulary_size` where the entry i
encodes the probability to sample a word of rank i.
seed: Random seed.
Returns:
couples, labels: where `couples` are int pairs and
`labels` are either 0 or 1.
Note:
By convention, index 0 in the vocabulary is
a non-word and will be skipped.
"""
couples = []
labels = []
for i, wi in enumerate(sequence):
if not wi:
continue
if sampling_table is not None:
if sampling_table[wi] < random.random():
continue
window_start = max(0, i - window_size)
window_end = min(len(sequence), i + window_size + 1)
for j in range(window_start, window_end):
if j != i:
wj = sequence[j]
if not wj:
continue
couples.append([wi, wj])
if categorical:
labels.append([0, 1])
else:
labels.append(1)
if negative_samples > 0:
num_negative_samples = int(len(labels) * negative_samples)
words = [c[0] for c in couples]
random.shuffle(words)
couples += [[words[i % len(words)],
random.randint(1, vocabulary_size - 1)]
for i in range(num_negative_samples)]
if categorical:
labels += [[1, 0]] * num_negative_samples
else:
labels += [0] * num_negative_samples
if shuffle:
if seed is None:
seed = random.randint(0, 10e6)
random.seed(seed)
random.shuffle(couples)
random.seed(seed)
random.shuffle(labels)
return couples, labels
| 36.107505
| 80
| 0.641818
|
794b397698a9c335aa2d6fb69c60951394ee301e
| 3,420
|
py
|
Python
|
pyJHTDB/__init__.py
|
lento234/pyJHTDB
|
9d525b790037456839ce82a88a086faabf034c67
|
[
"Apache-2.0"
] | null | null | null |
pyJHTDB/__init__.py
|
lento234/pyJHTDB
|
9d525b790037456839ce82a88a086faabf034c67
|
[
"Apache-2.0"
] | null | null | null |
pyJHTDB/__init__.py
|
lento234/pyJHTDB
|
9d525b790037456839ce82a88a086faabf034c67
|
[
"Apache-2.0"
] | null | null | null |
########################################################################
#
# Copyright 2014 Johns Hopkins University
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# Contact: turbulence@pha.jhu.edu
# Website: http://turbulence.pha.jhu.edu/
#
########################################################################
"""
Python tools and wrappers for the Johns Hopkins Turbulence Database
Cluster C library.
Contact: turbulence@pha.jhu.edu
Website: http://turbulence.pha.jhu.edu/
Although this particular Python wrapper is still a work in progress, it
is mature enough to be used in production work.
On first contact with this library, we recommend that you first run
"test_plain". To be more specific:
.. code:: python
>>> from pyJHTDB import test_plain
>>> test_plain()
The code that is executed can be found in "pyJHTDB/test.py", and it's
the simplest example of how to access the turbulence database.
"""
import os
import os.path
import sys
import numpy as np
import ctypes
import platform
from pkg_resources import get_distribution, DistributionNotFound
try:
_dist = get_distribution('pyJHTDB')
# Normalize case for Windows systems
dist_loc = os.path.normcase(_dist.location)
here = os.path.normcase(__file__)
if not here.startswith(os.path.join(dist_loc, 'pyJHTDB')):
# not installed, but there is another version that *is*
raise DistributionNotFound
except DistributionNotFound:
__version__ = 'Please install this project with setup.py'
else:
__version__ = _dist.version
auth_token = 'edu.jhu.pha.turbulence.testing-201406'
homefolder = os.path.expanduser('~')
lib_folder = os.path.join(homefolder, '.config/', 'JHTDB/')
version_info = "20200827.0"
version = str(version_info)
# check if .config/JHTDB folder exists, create it if not
#if os.path.isdir(lib_folder):
# if os.path.exists(os.path.join(lib_folder, 'auth_token.txt')):
# auth_token = str(open(os.path.join(lib_folder, 'auth_token.txt'), 'r').readline().split()[0])
# else:
# open(os.path.join(lib_folder, 'auth_token.txt'), 'w').write(auth_token)
#else:
# os.mkdir(lib_folder)
# open(os.path.join(lib_folder, 'auth_token.txt'), 'w').write(auth_token)
try:
import h5py
found_h5py = True
except ImportError:
found_h5py = False
print('h5py not found. cutout functionality not available.')
try:
import matplotlib
found_matplotlib = True
except ImportError:
found_matplotlib = False
print('matplotlib not found. plotting functionality not available.')
try:
import scipy
found_scipy = True
except ImportError:
found_scipy = False
print('scipy not found. not all interpolation functionality available')
from .libJHTDB import *
from .test import test_plain
if pyJHTDB.found_matplotlib:
from .test import test_misc
from .test import test_interp_1D as test_interpolator
from .generic_splines import main0 as test_gs
| 31.376147
| 102
| 0.708772
|
794b39b1cb7237feaedd8131505394127afc5805
| 1,664
|
py
|
Python
|
scripts/prepare_genre_data.py
|
eldrin/lyricpsych-tasks
|
10f340e0e3e83e37a9611cb64d8f0db366b3a98e
|
[
"MIT"
] | null | null | null |
scripts/prepare_genre_data.py
|
eldrin/lyricpsych-tasks
|
10f340e0e3e83e37a9611cb64d8f0db366b3a98e
|
[
"MIT"
] | null | null | null |
scripts/prepare_genre_data.py
|
eldrin/lyricpsych-tasks
|
10f340e0e3e83e37a9611cb64d8f0db366b3a98e
|
[
"MIT"
] | null | null | null |
from os.path import join, dirname
import sys
import argparse
sys.path.append(join(dirname(__file__), '..'))
import h5py
import numpy as np
from lyricpsych.data import load_mxm2msd
def main(msd_tagtraum_fn, mxm_h5_fn, out_fn):
"""
"""
# load relevant data
mxm2msd = load_mxm2msd()
msd2mxm = {v:k for k, v in mxm2msd.items()}
with h5py.File(mxm_h5_fn, 'r') as hf:
mxm_tids = [mxm2msd[tid] for tid in hf['features']['ids'][:]]
mxm_tids_set = set(mxm_tids)
with open(msd_tagtraum_fn, 'r') as f:
genre = [l.strip().split('\t') for l in f if l[0] != '#']
# filter out songs that are included in the MxM
genre = [(tid, g) for tid, g in genre if tid in mxm_tids_set]
# save to the file
genres = np.array(
[g for tid, g in genre], dtype=h5py.special_dtype(vlen=str)
)
tids = np.array(
[tid for tid, g in genre], dtype=h5py.special_dtype(vlen=str)
)
with h5py.File(out_fn, 'w') as hf:
hf.create_dataset('tracks', data=tids)
hf.create_dataset('genre', data=genres)
if __name__ == "__main__":
# setup argparser
parser = argparse.ArgumentParser()
parser.add_argument('msd_tagtraum_fn', type=str,
help='path where all genre (MSD-TagTraum) data is stored')
parser.add_argument('mxm_h5_fn', type=str,
help='filename to the song-level lyric features')
parser.add_argument('out_fn', type=str,
help='path to dump processed h5 file')
args = parser.parse_args()
# run
main(args.msd_tagtraum_fn, args.mxm_h5_fn, args.out_fn)
| 32
| 82
| 0.614183
|
794b3db1e35d458e96042c68cae3b784081616ca
| 391
|
py
|
Python
|
py-data/sight2sound/problems/api-related/1/correct-usages/main.py
|
ualberta-smr/NFBugs
|
65d9ef603e9527b3d83f53af0606b1ae240513f1
|
[
"MIT"
] | 3
|
2019-10-01T19:58:24.000Z
|
2021-09-17T04:03:21.000Z
|
py-data/sight2sound/problems/api-related/1/correct-usages/main.py
|
senseconcordia/NFBugsExtended
|
60058ccbd64107018a92ede73056d08ecbdaaed2
|
[
"MIT"
] | 22
|
2018-08-23T15:15:37.000Z
|
2019-03-15T17:09:41.000Z
|
py-data/sight2sound/problems/api-related/1/correct-usages/main.py
|
senseconcordia/NFBugsExtended
|
60058ccbd64107018a92ede73056d08ecbdaaed2
|
[
"MIT"
] | 1
|
2019-02-11T18:26:36.000Z
|
2019-02-11T18:26:36.000Z
|
import hilbert_curve as hc
from PIL import Image
import math
from time import sleep
import alsaaudio
import numpy as np
from threading import Thread, Lock
def main():
while True:
img = Image.open(input_file).convert("L")
pixels = img.load()
output = [
pixels[hc.d2xy(math.log(x * y, 2), i)]
for i in range(x*x)
]
| 21.722222
| 54
| 0.588235
|
794b3df4330f110bedc644486fd0b30108cde44b
| 3,761
|
py
|
Python
|
allennlp/training/callbacks/console_logger.py
|
jbrry/allennlp
|
d906175d953bebcc177567ec0157220c3bd1b9ad
|
[
"Apache-2.0"
] | 2
|
2022-01-02T12:15:21.000Z
|
2022-01-02T12:15:23.000Z
|
allennlp/training/callbacks/console_logger.py
|
jbrry/allennlp
|
d906175d953bebcc177567ec0157220c3bd1b9ad
|
[
"Apache-2.0"
] | 56
|
2020-03-14T21:10:07.000Z
|
2022-03-28T13:04:57.000Z
|
allennlp/training/callbacks/console_logger.py
|
jbrry/allennlp
|
d906175d953bebcc177567ec0157220c3bd1b9ad
|
[
"Apache-2.0"
] | 3
|
2020-09-22T17:35:53.000Z
|
2022-02-08T01:03:03.000Z
|
import logging
from typing import List, Dict, Any, Optional, TYPE_CHECKING
import torch
from allennlp.training.callbacks.callback import TrainerCallback
from allennlp.training.util import get_train_and_validation_metrics
from allennlp.data import TensorDict
if TYPE_CHECKING:
from allennlp.training.trainer import GradientDescentTrainer
logger = logging.getLogger(__name__)
@TrainerCallback.register("console_logger")
class ConsoleLoggerCallback(TrainerCallback):
def __init__(
self,
serialization_dir: str,
should_log_inputs: bool = False,
) -> None:
super().__init__(serialization_dir)
self._should_log_inputs = should_log_inputs
def on_batch(
self,
trainer: "GradientDescentTrainer",
batch_inputs: List[TensorDict],
batch_outputs: List[Dict[str, Any]],
batch_metrics: Dict[str, Any],
epoch: int,
batch_number: int,
is_training: bool,
is_primary: bool = True,
batch_grad_norm: Optional[float] = None,
**kwargs,
) -> None:
if not is_primary:
return None
# We only want to do this for the first batch in the first epoch.
if batch_number == 1 and epoch == 0 and self._should_log_inputs:
logger.info("Batch inputs")
for b, batch in enumerate(batch_inputs):
self._log_fields(batch, log_prefix="batch_input") # type: ignore
def _log_fields(self, fields: Dict, log_prefix: str = ""):
for key, val in fields.items():
key = log_prefix + "/" + key
if isinstance(val, dict):
self._log_fields(val, key)
elif isinstance(val, torch.Tensor):
torch.set_printoptions(threshold=2)
logger.info("%s (Shape: %s)\n%s", key, " x ".join([str(x) for x in val.shape]), val)
torch.set_printoptions(threshold=1000)
elif isinstance(val, List):
logger.info('Field : "%s" : (Length %d of type "%s")', key, len(val), type(val[0]))
elif isinstance(val, str):
logger.info('Field : "{}" : "{:20.20} ..."'.format(key, val))
else:
logger.info('Field : "%s" : %s', key, val)
def on_epoch(
self,
trainer: "GradientDescentTrainer",
metrics: Dict[str, Any],
epoch: int,
is_primary: bool = True,
**kwargs,
) -> None:
if not is_primary:
return None
train_metrics, val_metrics = get_train_and_validation_metrics(metrics)
metric_names = set(train_metrics.keys())
if val_metrics is not None:
metric_names.update(val_metrics.keys())
val_metrics = val_metrics or {}
dual_message_template = "%s | %8.3f | %8.3f"
no_val_message_template = "%s | %8.3f | %8s"
no_train_message_template = "%s | %8s | %8.3f"
header_template = "%s | %-10s"
name_length = max(len(x) for x in metric_names)
logger.info(header_template, "Training".rjust(name_length + 13), "Validation")
for name in sorted(metric_names):
train_metric = train_metrics.get(name)
val_metric = val_metrics.get(name)
if val_metric is not None and train_metric is not None:
logger.info(
dual_message_template, name.ljust(name_length), train_metric, val_metric
)
elif val_metric is not None:
logger.info(no_train_message_template, name.ljust(name_length), "N/A", val_metric)
elif train_metric is not None:
logger.info(no_val_message_template, name.ljust(name_length), train_metric, "N/A")
| 36.514563
| 100
| 0.604626
|
794b3e756dfdc687f3ddfcd55882df3eb12eb173
| 6,985
|
py
|
Python
|
superset/views/database/views.py
|
GrahamDownhill/incubator-superset
|
e55d8bbb26b3415e82ed2c443cac196b84a3af88
|
[
"Apache-2.0"
] | 1
|
2020-11-06T17:14:49.000Z
|
2020-11-06T17:14:49.000Z
|
superset/views/database/views.py
|
GrahamDownhill/incubator-superset
|
e55d8bbb26b3415e82ed2c443cac196b84a3af88
|
[
"Apache-2.0"
] | 2
|
2019-08-22T08:12:12.000Z
|
2020-04-27T11:38:04.000Z
|
superset/views/database/views.py
|
GrahamDownhill/incubator-superset
|
e55d8bbb26b3415e82ed2c443cac196b84a3af88
|
[
"Apache-2.0"
] | 1
|
2020-10-08T07:26:19.000Z
|
2020-10-08T07:26:19.000Z
|
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
import os
import tempfile
from typing import TYPE_CHECKING
from flask import flash, g, redirect
from flask_appbuilder import SimpleFormView
from flask_appbuilder.models.sqla.interface import SQLAInterface
from flask_babel import lazy_gettext as _
from wtforms.fields import StringField
from wtforms.validators import ValidationError
import superset.models.core as models
from superset import app, db
from superset.connectors.sqla.models import SqlaTable
from superset.constants import RouteMethod
from superset.exceptions import CertificateException
from superset.utils import core as utils
from superset.views.base import DeleteMixin, SupersetModelView, YamlExportMixin
from .forms import CsvToDatabaseForm
from .mixins import DatabaseMixin
from .validators import schema_allows_csv_upload, sqlalchemy_uri_validator
if TYPE_CHECKING:
from werkzeug.datastructures import FileStorage # pylint: disable=unused-import
config = app.config
stats_logger = config["STATS_LOGGER"]
def sqlalchemy_uri_form_validator(_, field: StringField) -> None:
"""
Check if user has submitted a valid SQLAlchemy URI
"""
sqlalchemy_uri_validator(field.data, exception=ValidationError)
def certificate_form_validator(_, field: StringField) -> None:
"""
Check if user has submitted a valid SSL certificate
"""
if field.data:
try:
utils.parse_ssl_cert(field.data)
except CertificateException as ex:
raise ValidationError(ex.message)
def upload_stream_write(form_file_field: "FileStorage", path: str):
chunk_size = app.config["UPLOAD_CHUNK_SIZE"]
with open(path, "bw") as file_description:
while True:
chunk = form_file_field.stream.read(chunk_size)
if not chunk:
break
file_description.write(chunk)
class DatabaseView(
DatabaseMixin, SupersetModelView, DeleteMixin, YamlExportMixin
): # pylint: disable=too-many-ancestors
datamodel = SQLAInterface(models.Database)
include_route_methods = RouteMethod.CRUD_SET
add_template = "superset/models/database/add.html"
edit_template = "superset/models/database/edit.html"
validators_columns = {
"sqlalchemy_uri": [sqlalchemy_uri_form_validator],
"server_cert": [certificate_form_validator],
}
yaml_dict_key = "databases"
def _delete(self, pk):
DeleteMixin._delete(self, pk)
class CsvToDatabaseView(SimpleFormView):
form = CsvToDatabaseForm
form_template = "superset/form_view/csv_to_database_view/edit.html"
form_title = _("CSV to Database configuration")
add_columns = ["database", "schema", "table_name"]
def form_get(self, form):
form.sep.data = ","
form.header.data = 0
form.mangle_dupe_cols.data = True
form.skipinitialspace.data = False
form.skip_blank_lines.data = True
form.infer_datetime_format.data = True
form.decimal.data = "."
form.if_exists.data = "fail"
def form_post(self, form):
database = form.con.data
schema_name = form.schema.data or ""
if not schema_allows_csv_upload(database, schema_name):
message = _(
'Database "%(database_name)s" schema "%(schema_name)s" '
"is not allowed for csv uploads. Please contact your Superset Admin.",
database_name=database.database_name,
schema_name=schema_name,
)
flash(message, "danger")
return redirect("/csvtodatabaseview/form")
csv_filename = form.csv_file.data.filename
extension = os.path.splitext(csv_filename)[1].lower()
path = tempfile.NamedTemporaryFile(
dir=app.config["UPLOAD_FOLDER"], suffix=extension, delete=False
).name
form.csv_file.data.filename = path
try:
utils.ensure_path_exists(config["UPLOAD_FOLDER"])
upload_stream_write(form.csv_file.data, path)
table_name = form.name.data
con = form.data.get("con")
database = (
db.session.query(models.Database).filter_by(id=con.data.get("id")).one()
)
database.db_engine_spec.create_table_from_csv(form, database)
table = (
db.session.query(SqlaTable)
.filter_by(
table_name=table_name,
schema=form.schema.data,
database_id=database.id,
)
.one_or_none()
)
if table:
table.fetch_metadata()
if not table:
table = SqlaTable(table_name=table_name)
table.database = database
table.database_id = database.id
table.user_id = g.user.id
table.schema = form.schema.data
table.fetch_metadata()
db.session.add(table)
db.session.commit()
except Exception as e: # pylint: disable=broad-except
db.session.rollback()
try:
os.remove(path)
except OSError:
pass
message = _(
'Unable to upload CSV file "%(filename)s" to table '
'"%(table_name)s" in database "%(db_name)s". '
"Error message: %(error_msg)s",
filename=csv_filename,
table_name=form.name.data,
db_name=database.database_name,
error_msg=str(e),
)
flash(message, "danger")
stats_logger.incr("failed_csv_upload")
return redirect("/csvtodatabaseview/form")
os.remove(path)
# Go back to welcome page / splash screen
message = _(
'CSV file "%(csv_filename)s" uploaded to table "%(table_name)s" in '
'database "%(db_name)s"',
csv_filename=csv_filename,
table_name=form.name.data,
db_name=table.database.database_name,
)
flash(message, "info")
stats_logger.incr("successful_csv_upload")
return redirect("/tablemodelview/list/")
| 36.19171
| 88
| 0.647817
|
794b3e7787409906f48a6dfdb7f4ee39b4994224
| 5,511
|
py
|
Python
|
LBGM/src/data/distributor.py
|
sidsrini12/FURL_Sim
|
55b420a771858c06f1aef58f48bb68302be36621
|
[
"MIT"
] | null | null | null |
LBGM/src/data/distributor.py
|
sidsrini12/FURL_Sim
|
55b420a771858c06f1aef58f48bb68302be36621
|
[
"MIT"
] | null | null | null |
LBGM/src/data/distributor.py
|
sidsrini12/FURL_Sim
|
55b420a771858c06f1aef58f48bb68302be36621
|
[
"MIT"
] | null | null | null |
from collections import defaultdict
from networkx import is_connected
from networkx.generators.geometric import random_geometric_graph
from networkx.generators.random_graphs import erdos_renyi_graph
import numpy as np
import random
from sklearn.model_selection import train_test_split
import syft as sy
import torch
def get_cluster_sizes(total, num_parts, uniform=True):
parts = []
if uniform:
part_size = total//num_parts
parts = [part_size] * num_parts
for _ in range(total - sum(parts)):
parts[_] += 1
else:
crossover = [0] + \
list(sorted(random.sample(range(2, total), num_parts-1))) \
+ [total]
for i in range(1, len(crossover)):
parts.append(crossover[i]-crossover[i-1])
return parts
def assign_classes(classes, num_nodes, n):
num_stacks = n*num_nodes
class_stack = []
num_classes = len(classes)
classes = classes[np.random.permutation(num_classes)]
i = 0
while len(class_stack) < num_stacks:
class_stack.append(classes[i])
i += 1
if i == len(classes):
i = 0
classes = classes[np.random.permutation(num_classes)]
class_stack = [sorted(class_stack[i*n: i*n + n])
for i in range(num_nodes)]
class_map = defaultdict(list)
for node_id in range(len(class_stack)):
for _ in range(n):
class_map[class_stack[node_id][_]].append(node_id)
return class_stack, class_map
def get_distributed_data(X_train, y_train, num_parts,
stratify=True, repeat=False,
uniform=True, shuffle=True,
non_iid=10, num_classes=10,
class_map=None):
if shuffle:
for _ in range(10):
perm = np.random.permutation(X_train.shape[0])
X_train = X_train[perm]
y_train = y_train[perm]
X_trains = []
y_trains = []
if (non_iid == num_classes or not non_iid) and uniform:
for i in range(num_parts-1):
test_size = 1/(num_parts-i)
if stratify and non_iid:
X_train, X_iter, y_train, y_iter = train_test_split(
X_train, y_train, stratify=y_train, test_size=test_size)
else:
X_train, X_iter, y_train, y_iter = train_test_split(
X_train, y_train, test_size=test_size)
X_trains.append(X_iter)
y_trains.append(y_iter)
X_trains.append(X_train)
y_trains.append(y_train)
else:
X_train_class = {}
y_train_class = {}
for cls in range(10):
indices = torch.where(y_train == cls)
X_train_class[cls] = X_train[indices]
y_train_class[cls] = y_train[indices]
if not class_map:
_, class_map = assign_classes(
np.unique(y_train), num_parts, non_iid)
X_trains = [[] for _ in range(num_parts)]
y_trains = [[] for _ in range(num_parts)]
for cls, node_list in class_map.items():
X_cls = X_train_class[cls]
y_cls = y_train_class[cls]
num_splits = len(node_list)
if uniform:
split_size = X_cls.shape[0]//num_splits
crossover = np.array([_*split_size
for _ in range(num_splits+1)])
remaining = X_cls.shape[0] - crossover[-1]
for idx in range(remaining):
crossover[idx:] += 1
assert crossover[-1] == X_cls.shape[0]
else:
crossover = [0] + \
list(sorted(random.sample(
range(2, X_cls.shape[0]), num_splits-1))) \
+ [X_cls.shape[0]]
for id_ in range(len(node_list)):
X_trains[node_list[id_]].append(
X_cls[crossover[id_]: crossover[id_+1]])
y_trains[node_list[id_]].append(
y_cls[crossover[id_]: crossover[id_+1]])
for id_ in range(num_parts):
X_trains[id_] = torch.cat(X_trains[id_], dim=0)
y_trains[id_] = torch.cat(y_trains[id_], dim=0)
assert len(X_trains) == num_parts
return X_trains, y_trains, class_map
def get_distributed_data_using_loader(train_loader):
X_trains = []
y_trains = []
for batch_idx, (data, target) in enumerate(train_loader):
X_trains.append(data)
y_trains.append(target)
return X_trains, y_trains
def get_fl_graph(hook, num_workers):
# Define workers and layers
workers = {}
agg_map = {}
layer = 0
for id_ in range(num_workers):
name = 'L{}_W{}'.format(layer, id_)
workers[name] = sy.VirtualWorker(hook, id=name)
layer = 1
# single layer model averaging fl
name = 'L1_W0'
workers[name] = sy.VirtualWorker(hook, id=name)
worker_ids = [_ for _ in workers.keys() if 'L0' in _]
agg_map[name] = worker_ids
return agg_map, workers
def get_connected_graph(num_nodes, param, topology='rgg', retries=10):
if topology == 'rgg':
generator = random_geometric_graph
elif topology == 'er':
generator = erdos_renyi_graph
graph = generator(num_nodes, param)
counter = 0
while not is_connected(graph):
graph = generator(num_nodes, param)
counter += 1
if counter > retries:
return False
return graph
| 32.22807
| 76
| 0.58229
|
794b3f52ff9027b4a33071312ef511bc02c968af
| 8,342
|
py
|
Python
|
tests/test_misc.py
|
pl77/sabnzbd
|
7e87a0c759944966ce7318134d8ed89b569ae73f
|
[
"0BSD",
"PSF-2.0",
"BSD-2-Clause",
"MIT",
"BSD-3-Clause"
] | null | null | null |
tests/test_misc.py
|
pl77/sabnzbd
|
7e87a0c759944966ce7318134d8ed89b569ae73f
|
[
"0BSD",
"PSF-2.0",
"BSD-2-Clause",
"MIT",
"BSD-3-Clause"
] | null | null | null |
tests/test_misc.py
|
pl77/sabnzbd
|
7e87a0c759944966ce7318134d8ed89b569ae73f
|
[
"0BSD",
"PSF-2.0",
"BSD-2-Clause",
"MIT",
"BSD-3-Clause"
] | null | null | null |
#!/usr/bin/python3 -OO
# Copyright 2007-2019 The SABnzbd-Team <team@sabnzbd.org>
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
"""
tests.test_misc - Testing functions in misc.py
"""
import datetime
from sabnzbd import misc
from sabnzbd import lang
from sabnzbd.config import ConfigCat
from tests.testhelper import *
class TestMisc:
def assertTime(self, offset, age):
assert offset == misc.calc_age(age, trans=True)
assert offset == misc.calc_age(age, trans=False)
def test_timeformat24h(self):
assert "%H:%M:%S" == misc.time_format("%H:%M:%S")
assert "%H:%M" == misc.time_format("%H:%M")
@set_config({"ampm": True})
def test_timeformatampm(self):
misc.HAVE_AMPM = True
assert "%I:%M:%S %p" == misc.time_format("%H:%M:%S")
assert "%I:%M %p" == misc.time_format("%H:%M")
def test_calc_age(self):
date = datetime.datetime.now()
m = date - datetime.timedelta(minutes=1)
h = date - datetime.timedelta(hours=1)
d = date - datetime.timedelta(days=1)
self.assertTime("1m", m)
self.assertTime("1h", h)
self.assertTime("1d", d)
def test_monthrange(self):
# Dynamic dates would be a problem
assert 12 == len(list(misc.monthrange(datetime.date(2018, 1, 1), datetime.date(2018, 12, 31))))
assert 2 == len(list(misc.monthrange(datetime.date(2019, 1, 1), datetime.date(2019, 2, 1))))
def test_safe_lower(self):
assert "all caps" == misc.safe_lower("ALL CAPS")
assert "" == misc.safe_lower(None)
def test_cmp(self):
assert misc.cmp(1, 2) < 0
assert misc.cmp(2, 1) > 0
assert misc.cmp(1, 1) == 0
def test_cat_to_opts(self):
# Need to create the Default category
# Otherwise it will try to save the config
ConfigCat("*", {})
assert ("*", "", "Default", -100) == misc.cat_to_opts("*")
assert ("*", "", "Default", -100) == misc.cat_to_opts("Nonsense")
assert ("*", 1, "Default", -100) == misc.cat_to_opts("*", pp=1)
assert ("*", 1, "test.py", -100) == misc.cat_to_opts("*", pp=1, script="test.py")
def test_wildcard_to_re(self):
assert "\\\\\\^\\$\\.\\[" == misc.wildcard_to_re("\\^$.[")
assert "\\]\\(\\)\\+.\\|\\{\\}.*" == misc.wildcard_to_re("]()+?|{}*")
def test_cat_convert(self):
# TODO: Make test
pass
def test_convert_version(self):
assert (3010099, False) == misc.convert_version("3.1.0")
assert (3010099, False) == misc.convert_version("3.1.0BlaBla")
assert (3010001, True) == misc.convert_version("3.1.0Alpha1")
assert (3010041, True) == misc.convert_version("3.1.0Beta1")
assert (3010081, True) == misc.convert_version("3.1.0RC1")
assert (3010194, True) == misc.convert_version("3.1.1RC14")
def test_from_units(self):
assert 100.0 == misc.from_units("100")
assert 1024.0 == misc.from_units("1KB")
assert 1048576.0 == misc.from_units("1024KB")
assert 1048576.0 == misc.from_units("1024Kb")
assert 1048576.0 == misc.from_units("1024kB")
assert 1048576.0 == misc.from_units("1MB")
assert 1073741824.0 == misc.from_units("1GB")
assert 1125899906842624.0 == misc.from_units("1P")
def test_to_units(self):
assert "1 K" == misc.to_units(1024)
assert "1 KBla" == misc.to_units(1024, postfix="Bla")
assert "1.0 M" == misc.to_units(1024 * 1024)
assert "1.0 M" == misc.to_units(1024 * 1024 + 10)
assert "10.0 M" == misc.to_units(1024 * 1024 * 10)
assert "100.0 M" == misc.to_units(1024 * 1024 * 100)
assert "9.8 G" == misc.to_units(1024 * 1024 * 10000)
assert "1024.0 P" == misc.to_units(1024 ** 6)
def test_unit_back_and_forth(self):
assert 100 == misc.from_units(misc.to_units(100))
assert 1024 == misc.from_units(misc.to_units(1024))
assert 1024 ** 3 == misc.from_units(misc.to_units(1024 ** 3))
def test_caller_name(self):
@set_config({"log_level": 0})
def test_wrapper(skip):
return misc.caller_name(skip=skip)
@set_config({"log_level": 2})
def test_wrapper_2(skip):
return misc.caller_name(skip=skip)
# No logging on lower-level
assert "N/A" == test_wrapper(1)
assert "N/A" == test_wrapper(2)
assert "N/A" == test_wrapper(3)
# Wrappers originate from the set_config-wrapper
assert "test_wrapper_2" in test_wrapper_2(1)
assert "wrapper_func" in test_wrapper_2(2)
def test_split_host(self):
assert (None, None) == misc.split_host(None)
assert (None, None) == misc.split_host("")
assert ("sabnzbd.org", 123) == misc.split_host("sabnzbd.org:123")
assert ("sabnzbd.org", None) == misc.split_host("sabnzbd.org")
assert ("127.0.0.1", 566) == misc.split_host("127.0.0.1:566")
assert ("[::1]", 1234) == misc.split_host("[::1]:1234")
assert ("[2001:db8::8080]", None) == misc.split_host("[2001:db8::8080]")
@set_config({"cleanup_list": [".exe", ".nzb"]})
def test_on_cleanup_list(self):
assert misc.on_cleanup_list("test.exe")
assert misc.on_cleanup_list("TEST.EXE")
assert misc.on_cleanup_list("longExeFIlanam.EXe")
assert not misc.on_cleanup_list("testexe")
assert misc.on_cleanup_list("test.nzb")
assert not misc.on_cleanup_list("test.nzb", skip_nzb=True)
assert not misc.on_cleanup_list("test.exe.lnk")
def test_format_time_string(self):
assert "0 seconds" == misc.format_time_string(None)
assert "0 seconds" == misc.format_time_string("Test")
assert "0 seconds" == misc.format_time_string(0)
assert "1 sec" == misc.format_time_string(1)
assert "10 seconds" == misc.format_time_string(10)
assert "1 min" == misc.format_time_string(60)
assert "1 hour 1 min 1 sec" == misc.format_time_string(60 * 60 + 60 + 1)
assert "1 day 59 seconds" == misc.format_time_string(86400 + 59)
assert "2 days 2 hours 2 seconds" == misc.format_time_string(2 * 86400 + 2 * 60 * 60 + 2)
def test_format_time_string_locale(self):
# Have to set the languages, if it was compiled
locale_dir = os.path.join(SAB_BASE_DIR, "..", sabnzbd.constants.DEF_LANGUAGE)
if not os.path.exists(locale_dir):
pytest.mark.skip("No language files compiled")
lang.set_locale_info("SABnzbd", locale_dir)
lang.set_language("de")
assert "1 Sekunde" == misc.format_time_string(1)
assert "10 Sekunden" == misc.format_time_string(10)
assert "1 Minuten" == misc.format_time_string(60)
assert "1 Stunde 1 Minuten 1 Sekunde" == misc.format_time_string(60 * 60 + 60 + 1)
assert "1 Tag 59 Sekunden" == misc.format_time_string(86400 + 59)
assert "2 Tage 2 Stunden 2 Sekunden" == misc.format_time_string(2 * 86400 + 2 * 60 * 60 + 2)
def test_int_conv(self):
assert 0 == misc.int_conv("0")
assert 10 == misc.int_conv("10")
assert 10 == misc.int_conv(10)
assert 10 == misc.int_conv(10.0)
assert 0 == misc.int_conv(None)
assert 1 == misc.int_conv(True)
assert 0 == misc.int_conv(object)
def test_create_https_certificates(self):
cert_file = "test.cert"
key_file = "test.key"
assert misc.create_https_certificates(cert_file, key_file)
assert os.path.exists(cert_file)
assert os.path.exists(key_file)
# Remove files
os.unlink("test.cert")
os.unlink("test.key")
| 41.71
| 103
| 0.623831
|
794b4000d2e7b3a54f3504b74ee0e84dbe6948ef
| 3,542
|
py
|
Python
|
src/modelSuite/XTran/forTrans/mandelAgolNL.py
|
mirofedurco/PyAstronomy
|
b0e5806a18bde647654e6c9de323327803722864
|
[
"MIT"
] | 98
|
2015-01-01T12:46:05.000Z
|
2022-02-13T14:17:36.000Z
|
src/modelSuite/XTran/forTrans/mandelAgolNL.py
|
mirofedurco/PyAstronomy
|
b0e5806a18bde647654e6c9de323327803722864
|
[
"MIT"
] | 46
|
2015-02-10T19:53:38.000Z
|
2022-01-11T17:26:05.000Z
|
src/modelSuite/XTran/forTrans/mandelAgolNL.py
|
mirofedurco/PyAstronomy
|
b0e5806a18bde647654e6c9de323327803722864
|
[
"MIT"
] | 38
|
2015-01-08T17:00:34.000Z
|
2022-03-04T05:15:22.000Z
|
# -*- coding: utf-8 -*-
import numpy
import PyAstronomy.funcFit as fuf
from PyAstronomy.pyaC import pyaErrors as PE
from PyAstronomy.modelSuite.XTran import _ZList
try:
from . import occultnl
_importOccultnl = True
except ImportError:
_importOccultnl = False
class MandelAgolNLLC(_ZList, fuf.OneDFit):
"""
Calculate and fit analytical transit light-curves using the formulae
provided by Mandel & Agol 2002. This model uses the non-linear
limb-darkening prescription by Claret 2011:
.. math :: \\frac{I(\mu)}{I(1)}= 1 - \\sum_{n=1}^{4}{a_n(1-\mu^{n/2})}
.. note :: The computation of transit light curves
is done using the external *occultnl* FORTRAN library.
This library must be compiled manually using SciPy's f2py
wrapper (http://www.scipy.org/F2py). Simply go to the
*forTrans* directory of the source distribution of PyAstronomy,
then invoke
f2py -c occultnl.pyf occultnl.f
*Model parameters*:
- `p` - Radius ratio between planet and star.
- `a` - Semi-major axis of planetary orbit [stellar radii].
- `i` - Inclination of orbit in degrees (90 deg is *edge on* view).
- `a1` - Non-Linear limb-darkening coefficient.
- `a2` - Non-Linear limb-darkening coefficient.
- `a3` - Non-Linear limb-darkening coefficient.
- `a4` - Non-Linear limb-darkening coefficient.
- `T0` - Time offset of transit center.
- `per` - Period of planetary orbit.
- `b` - Describes the flux ratio between a stellar companion and the main star (default is 0).
"""
def __init__(self):
_ZList.__init__(self, "circular")
if not _importOccultnl:
raise(PE.PyARequiredImport("Could not import required shared object library 'occultquad.so'",\
solution=["Invoke PyA's install script (setup.py) with the --with-ext option.",
"Go to 'forTrans' directory of PyAstronomy and invoke\n f2py -c occultnl.pyf occultnl.f"]
))
fuf.OneDFit.__init__(self, ["p", "a", "i", "a1", "a2", "a3", "a4", "T0", "per", "b"])
self.freeze(["p", "a", "i", "a1", "a2", "a3", "a4", "T0", "per", "b"])
self.setRootName("MandelAgolNL")
self.__zlist=None
def evaluate(self, time):
"""
Calculate a light curve according to the analytical models
given by Mandel & Agol 2002.
Parameters
----------
time : array
An array of time points at which the light curve
shall be calculated.
.. note:: time = 0 -> Planet is exactly in the line of sight (phase = 0).
Returns
-------
Model : array
The analytical light curve is stored in the property `lightcurve`.
"""
# Translate the given parameters into an orbit and, finally,
# into a projected, normalized distance (z-parameter)
self._calcZList(time - self["T0"])
# Use occultquad Fortran library to compute flux decrease
result = occultnl.occultnl(self["p"],self["a1"],self["a2"],self["a3"], \
self["a4"],self._zlist[self._intrans])
df = numpy.zeros(len(time))
df[self._intrans] = (1.0 - result[0])
self.lightcurve = (1.-df)*1./(1.+self["b"]) + self["b"]/(1.0+self["b"])
return self.lightcurve
MandelAgolNLLC_Rebin = fuf.turnIntoRebin(MandelAgolNLLC)
| 36.895833
| 137
| 0.594862
|
794b408447b39eec70fb0902cbe3ef25c1b16ae6
| 1,447
|
py
|
Python
|
compte/models.py
|
bzg/acceslibre
|
52c7c6990dc132da71a92e856d65f4a983c3b15a
|
[
"MIT"
] | 8
|
2020-07-23T08:17:28.000Z
|
2022-03-09T22:31:36.000Z
|
compte/models.py
|
bzg/acceslibre
|
52c7c6990dc132da71a92e856d65f4a983c3b15a
|
[
"MIT"
] | 37
|
2020-07-01T08:47:33.000Z
|
2022-02-03T19:50:58.000Z
|
compte/models.py
|
bzg/acceslibre
|
52c7c6990dc132da71a92e856d65f4a983c3b15a
|
[
"MIT"
] | 4
|
2021-04-08T10:57:18.000Z
|
2022-01-31T13:16:31.000Z
|
from django.conf import settings
from django.contrib.auth import get_user_model
from django.db import models
from django.db.models.signals import post_save
from django.dispatch import receiver
class EmailToken(models.Model):
class Meta:
ordering = ("-created_at",)
verbose_name = "EmailToken"
verbose_name_plural = "EmailTokens"
indexes = [
models.Index(fields=["activation_token"]),
]
activation_token = models.UUIDField()
user = models.ForeignKey(
settings.AUTH_USER_MODEL,
verbose_name="Utilisateur",
on_delete=models.CASCADE,
)
new_email = models.EmailField()
expire_at = models.DateTimeField()
created_at = models.DateTimeField(auto_now_add=True)
class UserPreferences(models.Model):
class Meta:
verbose_name = "UserPreferences"
verbose_name_plural = "UsersPreferences"
user = models.ForeignKey(
settings.AUTH_USER_MODEL,
verbose_name="Utilisateur",
on_delete=models.CASCADE,
related_name="preferences",
)
notify_on_unpublished_erps = models.BooleanField(
default=True,
verbose_name="Recevoir des mails de rappel de publication",
)
@receiver(post_save, sender=get_user_model())
def save_profile(sender, instance, created, **kwargs):
if created:
user_prefs = UserPreferences(user=instance)
user_prefs.save()
| 29.530612
| 67
| 0.679337
|
794b41af096b6da68ff1e0c19b6749a58c2d91ef
| 23,419
|
py
|
Python
|
tmt/steps/provision/__init__.py
|
happz/tmt
|
ff953b11204c8e1c2ed1a8ac9cf5c805a09a9ba1
|
[
"MIT"
] | null | null | null |
tmt/steps/provision/__init__.py
|
happz/tmt
|
ff953b11204c8e1c2ed1a8ac9cf5c805a09a9ba1
|
[
"MIT"
] | null | null | null |
tmt/steps/provision/__init__.py
|
happz/tmt
|
ff953b11204c8e1c2ed1a8ac9cf5c805a09a9ba1
|
[
"MIT"
] | null | null | null |
import os
import random
import re
import shlex
import string
import subprocess
import tempfile
import time
import click
import fmf
import tmt
# Timeout in seconds of waiting for a connection
CONNECTION_TIMEOUT = 60 * 4
# Wait time when reboot happens in seconds
SSH_INITIAL_WAIT_TIME = 5
class Provision(tmt.steps.Step):
""" Provision an environment for testing or use localhost. """
# Default implementation for provision is a virtual machine
how = 'virtual'
def __init__(self, data, plan):
""" Initialize provision step data """
super().__init__(data, plan)
# List of provisioned guests and loaded guest data
self._guests = []
self._guest_data = {}
def load(self, extra_keys=None):
""" Load guest data from the workdir """
extra_keys = extra_keys or []
super().load(extra_keys)
try:
self._guest_data = tmt.utils.yaml_to_dict(self.read('guests.yaml'))
except tmt.utils.FileError:
self.debug('Provisioned guests not found.', level=2)
def save(self, data=None):
""" Save guest data to the workdir """
data = data or {}
super().save(data)
try:
guests = dict(
[(guest.name, guest.save()) for guest in self.guests()])
self.write('guests.yaml', tmt.utils.dict_to_yaml(guests))
except tmt.utils.FileError:
self.debug('Failed to save provisioned guests.')
def wake(self):
""" Wake up the step (process workdir and command line) """
super().wake()
# Choose the right plugin and wake it up
for data in self.data:
plugin = ProvisionPlugin.delegate(self, data)
self._plugins.append(plugin)
# If guest data loaded, perform a complete wake up
plugin.wake(data=self._guest_data.get(plugin.name))
if plugin.guest():
self._guests.append(plugin.guest())
# Nothing more to do if already done
if self.status() == 'done':
self.debug(
'Provision wake up complete (already done before).', level=2)
# Save status and step data (now we know what to do)
else:
self.status('todo')
self.save()
def show(self):
""" Show discover details """
for data in self.data:
ProvisionPlugin.delegate(self, data).show()
def summary(self):
""" Give a concise summary of the provisioning """
# Summary of provisioned guests
guests = fmf.utils.listed(self.guests(), 'guest')
self.info('summary', f'{guests} provisioned', 'green', shift=1)
# Guest list in verbose mode
for guest in self.guests():
if guest.name != tmt.utils.DEFAULT_NAME:
self.verbose(guest.name, color='red', shift=2)
def go(self):
""" Provision all guests"""
super().go()
# Nothing more to do if already done
if self.status() == 'done':
self.info('status', 'done', 'green', shift=1)
self.summary()
self.actions()
return
# Provision guests
self._guests = []
save = True
try:
for plugin in self.plugins():
try:
plugin.go()
if isinstance(plugin, ProvisionPlugin):
plugin.guest().details()
finally:
if isinstance(plugin, ProvisionPlugin):
self._guests.append(plugin.guest())
# Give a summary, update status and save
self.summary()
self.status('done')
except SystemExit as error:
# A plugin will only raise SystemExit if the exit is really desired
# and no other actions should be done. An example of this is
# listing available images. In such case, the workdir is deleted
# as it's redundant and save() would throw an error.
save = False
raise error
finally:
if save:
self.save()
def guests(self):
""" Return the list of all provisioned guests """
return self._guests
def requires(self):
"""
Packages required by all enabled provision plugins
Return a list of packages which need to be installed on the
provisioned guest so that the workdir can be synced to it.
Used by the prepare step.
"""
requires = set()
for plugin in self.plugins(classes=ProvisionPlugin):
requires.update(plugin.requires())
return list(requires)
class ProvisionPlugin(tmt.steps.Plugin):
""" Common parent of provision plugins """
# Default implementation for provision is a virtual machine
how = 'virtual'
# List of all supported methods aggregated from all plugins
_supported_methods = []
@classmethod
def base_command(cls, method_class=None, usage=None):
""" Create base click command (common for all provision plugins) """
# Prepare general usage message for the step
if method_class:
usage = Provision.usage(method_overview=usage)
# Create the command
@click.command(cls=method_class, help=usage)
@click.pass_context
@click.option(
'-h', '--how', metavar='METHOD',
help='Use specified method for provisioning.')
def provision(context, **kwargs):
context.obj.steps.add('provision')
Provision._save_context(context)
return provision
def wake(self, keys=None, data=None):
"""
Wake up the plugin
Override data with command line options.
Wake up the guest based on provided guest data.
"""
super().wake(keys=keys)
def guest(self):
"""
Return provisioned guest
Each ProvisionPlugin has to implement this method.
Should return a provisioned Guest() instance.
"""
raise NotImplementedError
def requires(self):
""" List of required packages needed for workdir sync """
return Guest.requires()
@classmethod
def clean_images(cls, clean, dry):
""" Remove the images of one particular plugin """
class Guest(tmt.utils.Common):
"""
Guest provisioned for test execution
The following keys are expected in the 'data' dictionary::
guest ...... hostname or ip address
port ....... port to connect to
user ....... user name to log in
key ........ path to the private key (str or list)
password ... password
These are by default imported into instance attributes (see the
class attribute '_keys' below).
"""
# List of supported keys
# (used for import/export to/from attributes during load and save)
_keys = ['guest', 'port', 'user', 'key', 'password']
# Master ssh connection process and socket path
_ssh_master_process = None
_ssh_socket_path = None
def __init__(self, data, name=None, parent=None):
""" Initialize guest data """
super().__init__(parent, name)
self.load(data)
def _random_name(self, prefix='', length=16):
""" Generate a random name """
# Append at least 5 random characters
min_random_part = max(5, length - len(prefix))
name = prefix + ''.join(
random.choices(string.ascii_letters, k=min_random_part))
# Return tail (containing random characters) of name
return name[-length:]
def _ssh_guest(self):
""" Return user@guest """
return f'{self.user}@{self.guest}'
def _ssh_socket(self):
""" Prepare path to the master connection socket """
if not self._ssh_socket_path:
socket_dir = f"/run/user/{os.getuid()}/tmt"
os.makedirs(socket_dir, exist_ok=True)
self._ssh_socket_path = tempfile.mktemp(dir=socket_dir)
return self._ssh_socket_path
def _ssh_options(self, join=False):
""" Return common ssh options (list or joined) """
options = [
'-oStrictHostKeyChecking=no',
'-oUserKnownHostsFile=/dev/null',
]
if self.key or self.password:
# Skip ssh-agent (it adds additional identities)
options.extend(['-oIdentitiesOnly=yes'])
if self.port:
options.extend(['-p', str(self.port)])
if self.key:
keys = self.key if isinstance(self.key, list) else [self.key]
for key in keys:
options.extend(['-i', shlex.quote(key) if join else key])
if self.password:
options.extend(['-oPasswordAuthentication=yes'])
# Use the shared master connection
options.extend(["-S", self._ssh_socket()])
return ' '.join(options) if join else options
def _ssh_master_connection(self, command):
""" Check/create the master ssh connection """
if self._ssh_master_process:
return
command = command + self._ssh_options() + ["-MNnT", self._ssh_guest()]
self.debug(f"Create the master ssh connection: {' '.join(command)}")
self._ssh_master_process = subprocess.Popen(
command,
stdin=subprocess.DEVNULL,
stdout=subprocess.DEVNULL,
stderr=subprocess.DEVNULL)
def _ssh_command(self, join=False):
""" Prepare an ssh command line for execution (list or joined) """
command = []
if self.password:
password = shlex.quote(self.password) if join else self.password
command.extend(["sshpass", "-p", password])
command.append("ssh")
# Check the master connection
self._ssh_master_connection(command)
if join:
return " ".join(command) + " " + self._ssh_options(join=True)
else:
return command + self._ssh_options()
def load(self, data):
"""
Load guest data into object attributes for easy access
Called during guest object initialization. Takes care of storing
all supported keys (see class attribute _keys for the list) from
provided data to the guest object attributes. Child classes can
extend it to make additional guest attributes easily available.
Data dictionary can contain guest information from both command
line options / L2 metadata / user configuration and wake up data
stored by the save() method below.
"""
for key in self._keys:
setattr(self, key, data.get(key))
def save(self):
"""
Save guest data for future wake up
Export all essential guest data into a dictionary which will be
stored in the `guests.yaml` file for possible future wake up of
the guest. Everything needed to attach to a running instance
should be added into the data dictionary by child classes.
"""
data = dict()
for key in self._keys:
value = getattr(self, key)
if value is not None:
data[key] = value
return data
def wake(self):
"""
Wake up the guest
Perform any actions necessary after step wake up to be able to
attach to a running guest instance and execute commands. Called
after load() is completed so all guest data should be prepared.
"""
self.debug(f"Doing nothing to wake up guest '{self.guest}'.")
def start(self):
"""
Start the guest
Get a new guest instance running. This should include preparing
any configuration necessary to get it started. Called after
load() is completed so all guest data should be available.
"""
self.debug(f"Doing nothing to start guest '{self.guest}'.")
def details(self):
""" Show guest details such as distro and kernel """
# Skip distro & kernel check in dry mode
if self.opt('dry'):
return
# Distro (check os-release first)
try:
distro = self.execute('cat /etc/os-release')[0].strip()
distro = re.search('PRETTY_NAME="(.*)"', distro).group(1)
except (tmt.utils.RunError, AttributeError):
# Check for lsb-release
try:
distro = self.execute('cat /etc/lsb-release')[0].strip()
distro = re.search(
'DISTRIB_DESCRIPTION="(.*)"', distro).group(1)
except (tmt.utils.RunError, AttributeError):
# Check for redhat-release
try:
distro = self.execute('cat /etc/redhat-release')[0].strip()
except (tmt.utils.RunError, AttributeError):
distro = None
# Handle standard cloud images message when connecting
if distro is not None and 'Please login as the user' in distro:
raise tmt.utils.GeneralError(
f'Login to the guest failed.\n{distro}')
if distro:
self.info('distro', distro, 'green')
# Kernel
kernel = self.execute('uname -r')[0].strip()
self.verbose('kernel', kernel, 'green')
def _ansible_verbosity(self):
""" Prepare verbose level based on the --debug option count """
if self.opt('debug') < 3:
return ''
else:
return ' -' + (self.opt('debug') - 2) * 'v'
@staticmethod
def _ansible_extra_args(extra_args):
""" Prepare extra arguments for ansible-playbook"""
return '' if extra_args is None else str(extra_args)
def _ansible_summary(self, output):
""" Check the output for ansible result summary numbers """
if not output:
return
keys = 'ok changed unreachable failed skipped rescued ignored'.split()
for key in keys:
matched = re.search(rf'^.*\s:\s.*{key}=(\d+).*$', output, re.M)
if matched and int(matched.group(1)) > 0:
tasks = fmf.utils.listed(matched.group(1), 'task')
self.verbose(key, tasks, 'green')
def _ansible_playbook_path(self, playbook):
""" Prepare full ansible playbook path """
# Playbook paths should be relative to the metadata tree root
self.debug(f"Applying playbook '{playbook}' on guest '{self.guest}'.")
playbook = os.path.join(self.parent.plan.my_run.tree.root, playbook)
self.debug(f"Playbook full path: '{playbook}'", level=2)
return playbook
def _export_environment(self, execute_environment=None):
""" Prepare shell export of environment variables """
# Prepare environment variables so they can be correctly passed
# to ssh's shell. Create a copy to prevent modifying source.
environment = dict()
environment.update(execute_environment or dict())
# Plan environment and variables provided on the command line
# override environment provided to execute().
environment.update(self.parent.plan.environment)
# Prepend with export and run as a separate command.
if not environment:
return ''
return 'export {}; '.format(
' '.join(tmt.utils.shell_variables(environment)))
def ansible(self, playbook, extra_args=None):
""" Prepare guest using ansible playbook """
playbook = self._ansible_playbook_path(playbook)
stdout, stderr = self.run(
f'{self._export_environment()}'
f'stty cols {tmt.utils.OUTPUT_WIDTH}; ansible-playbook '
f'--ssh-common-args="{self._ssh_options(join=True)}" '
f'{self._ansible_verbosity()} '
f'{self._ansible_extra_args(extra_args)} -i {self._ssh_guest()},'
f' {playbook}',
cwd=self.parent.plan.worktree)
self._ansible_summary(stdout)
def execute(self, command, **kwargs):
"""
Execute command on the guest
command ... string or list of command arguments (required)
env ....... dictionary with environment variables
cwd ....... working directory to be entered before execution
If the command is provided as a list, it will be space-joined.
If necessary, quote escaping has to be handled by the caller.
"""
# Prepare the export of environment variables
environment = self._export_environment(kwargs.get('env', dict()))
# Change to given directory on guest if cwd provided
directory = kwargs.get('cwd') or ''
if directory:
directory = f"cd '{directory}'; "
# Run in interactive mode if requested
interactive = ['-t'] if kwargs.get('interactive') else []
# Prepare command and run it
if isinstance(command, (list, tuple)):
command = ' '.join(command)
self.debug(f"Execute command '{command}' on guest '{self.guest}'.")
command = (
self._ssh_command() + interactive + [self._ssh_guest()] +
[f'{environment}{directory}{command}'])
return self.run(command, shell=False, **kwargs)
def push(self, source=None, destination=None, options=None):
"""
Push files to the guest
By default the whole plan workdir is synced to the same location
on the guest. Use the 'source' and 'destination' to sync custom
location and the 'options' parametr to modify default options
which are '-Rrz --links --safe-links --delete'.
"""
# Prepare options
if options is None:
options = "-Rrz --links --safe-links --delete".split()
if destination is None:
destination = "/"
if source is None:
source = self.parent.plan.workdir
self.debug(f"Push workdir to guest '{self.guest}'.")
else:
self.debug(f"Copy '{source}' to '{destination}' on the guest.")
# Make sure rsync is present (tests can remove it) and sync
self._check_rsync()
try:
self.run(
["rsync"] + options
+ ["-e", self._ssh_command(join=True)]
+ [source, f"{self._ssh_guest()}:{destination}"],
shell=False)
except tmt.utils.RunError:
# Provide a reasonable error to the user
self.fail(
f"Failed to push workdir to the guest. This usually means "
f"login as '{self.user}' to the test machine does not work.")
raise
def pull(self, source=None, destination=None, options=None):
"""
Pull files from the guest
By default the whole plan workdir is synced from the same
location on the guest. Use the 'source' and 'destination' to
sync custom location and the 'options' parametr to modify
default options '-Rrz --links --safe-links --protect-args'.
"""
# Prepare options
if options is None:
options = "-Rrz --links --safe-links --protect-args".split()
if destination is None:
destination = "/"
if source is None:
source = self.parent.plan.workdir
self.debug(f"Pull workdir from guest '{self.guest}'.")
else:
self.debug(f"Copy '{source}' from the guest to '{destination}'.")
# Make sure rsync is present (tests can remove it) and sync
self._check_rsync()
self.run(
["rsync"] + options
+ ["-e", self._ssh_command(join=True)]
+ [f"{self._ssh_guest()}:{source}", destination],
shell=False)
def stop(self):
"""
Stop the guest
Shut down a running guest instance so that it does not consume
any memory or cpu resources. If needed, perform any actions
necessary to store the instance status to disk.
"""
# Close the master ssh connection
if self._ssh_master_process:
self.debug("Close the master ssh connection.", level=3)
try:
self._ssh_master_process.terminate()
self._ssh_master_process.wait(timeout=3)
except subprocess.TimeoutExpired:
pass
# Remove the ssh socket
if self._ssh_socket_path and os.path.exists(self._ssh_socket_path):
self.debug(
f"Remove ssh socket '{self._ssh_socket_path}'.", level=3)
try:
os.unlink(self._ssh_socket_path)
except OSError as error:
self.debug(f"Failed to remove the socket: {error}", level=3)
def reboot(self, hard=False):
"""
Reboot the guest, return True if successful
Parameter 'hard' set to True means that guest should be
rebooted by way which is not clean in sense that data can be
lost. When set to False reboot should be done gracefully.
"""
if hard:
raise tmt.utils.ProvisionError(
"Method does not support hard reboot.")
try:
self.execute("reboot")
except tmt.utils.RunError as error:
# Connection can be closed by the remote host even before the
# reboot command is completed. Let's ignore such errors.
if error.returncode == 255:
self.debug(
f"Seems the connection was closed too fast, ignoring.")
else:
raise
return self.reconnect()
def reconnect(self):
""" Ensure the connection to the guest is working after reboot """
# Try to wait for machine to really shutdown sshd
time.sleep(SSH_INITIAL_WAIT_TIME)
self.debug("Wait for a connection to the guest.")
for attempt in range(1, CONNECTION_TIMEOUT):
try:
self.execute('whoami')
break
except tmt.utils.RunError:
self.debug('Failed to connect to the guest, retrying.')
time.sleep(1)
if attempt == CONNECTION_TIMEOUT:
self.debug("Connection to guest failed after reboot.")
return False
return True
def remove(self):
"""
Remove the guest
Completely remove all guest instance data so that it does not
consume any disk resources.
"""
self.debug(f"Doing nothing to remove guest '{self.guest}'.")
def _check_rsync(self):
"""
Make sure that rsync is installed on the guest
For now works only with RHEL based distributions.
On read-only distros install under the '/root/pkg' directory.
"""
self.debug("Ensure that rsync is installed on the guest.", level=3)
self.execute(
"rsync --version --quiet || "
# Regular yum install on read-write distros
"if [[ ! -f /usr/bin/rpm-ostree ]]; then yum install -y rsync; "
# Install under /root/pkg for read-only distros
"else yum install -y --installroot=/root/pkg --releasever / rsync "
"&& ln -sf /root/pkg/bin/rsync /usr/local/bin/rsync; fi")
@classmethod
def requires(cls):
""" No extra requires needed """
return []
| 36.535101
| 79
| 0.589137
|
794b426d5389a80b72bf4f8ec0d109ddb0714114
| 1,024
|
py
|
Python
|
vibscore/__init__.py
|
ChocolateCircus445/VibriBot
|
3c6d9f879e6fb8b849d9fed015a101a811d4fa0b
|
[
"MIT"
] | 1
|
2021-10-31T19:31:31.000Z
|
2021-10-31T19:31:31.000Z
|
vibscore/__init__.py
|
ChocolateCircus445/VibriBot
|
3c6d9f879e6fb8b849d9fed015a101a811d4fa0b
|
[
"MIT"
] | null | null | null |
vibscore/__init__.py
|
ChocolateCircus445/VibriBot
|
3c6d9f879e6fb8b849d9fed015a101a811d4fa0b
|
[
"MIT"
] | null | null | null |
scorevals = (
[0, 0, 0, 0, 0, 0, 0],
[1, 1, 1, 1, 1, 1, 1],
[8, 7, 6, 5, 4, 3, 2],
[36, 28, 21, 15, 10, 6, 3],
[120, 84, 56, 35, 20, 10, 4],
[330, 210, 126, 70, 35, 15, 5],
[792, 462, 252, 126, 56, 21, 6],
[1716, 924, 462, 210, 84, 28, 7],
[3432, 1716, 792, 330, 120, 36, 8],
[6435, 3003, 1287, 495, 165, 45, 9],
[11440, 5005, 2002, 715, 220, 55, 10],
[19448, 8008, 3003, 1001, 286, 66, 11],
[31824, 12376, 4368, 1365, 364, 78, 12],
[50388, 18564, 6188, 1820, 455, 91, 13],
[77520, 27132, 8568, 2380, 560, 105, 14]
)
def calculate(num):
global scorevals
curval = num
res = ""
for place in range(0, 7):
for compval in reversed(range(0,len(scorevals))):
if curval < scorevals[compval][place]:
continue
else:
res += str(compval) + " "
curval = curval - scorevals[compval][place]
break
return res
if __name__ == "__main__":
print(calculate(34))
| 28.444444
| 59
| 0.491211
|
794b43487a50cab871c284c5169eaad072d4b686
| 2,551
|
py
|
Python
|
solutions/year_2020/day_5/binary_boarding.py
|
svandermeulen/advent_of_code
|
99fe936a564f8ee66f8f28aaac4e36e013ebc28d
|
[
"MIT"
] | null | null | null |
solutions/year_2020/day_5/binary_boarding.py
|
svandermeulen/advent_of_code
|
99fe936a564f8ee66f8f28aaac4e36e013ebc28d
|
[
"MIT"
] | null | null | null |
solutions/year_2020/day_5/binary_boarding.py
|
svandermeulen/advent_of_code
|
99fe936a564f8ee66f8f28aaac4e36e013ebc28d
|
[
"MIT"
] | null | null | null |
"""
-*- coding: utf-8 -*-
Written by: sme30393
Date: 05/12/2020
"""
import os
from solutions.config import Config
def get_row(row_sequence: str, row_indices: list) -> list:
if not len(row_indices) <= 1:
if row_sequence[0] == "F":
return get_row(row_sequence=row_sequence[1:], row_indices=row_indices[:len(row_indices) // 2])
return get_row(row_sequence=row_sequence[1:], row_indices=row_indices[len(row_indices) // 2:])
return row_indices
def get_column(col_sequence: str, col_indices: list) -> list:
if not len(col_indices) <= 1:
if col_sequence[0] == "L":
return get_column(col_sequence=col_sequence[1:], col_indices=col_indices[:len(col_indices) // 2])
return get_column(col_sequence=col_sequence[1:], col_indices=col_indices[len(col_indices) // 2:])
return col_indices
def get_seat_id(row: int, column: int) -> int:
return row * 8 + column
def main():
config = Config(day=5)
path_file = os.path.join(config.path_data, "boarding_passes.txt")
with open(path_file, "r") as f:
boarding_passes = [value.strip("\n") for value in f.readlines()]
print(f"The number of boarding passes equals: {len(boarding_passes)}")
boarding_passes_test = ["BFFFBBFRRR", "FFFBBBFRRR", "BBFFBBFRLL"]
row_indices = [*range(0, 128)]
column_indices = [*range(0, 8)]
for boarding_pass, row_number_exp, col_number_exp in zip(boarding_passes_test, [70, 14, 102], [7, 7, 4]):
row, = get_row(row_sequence=boarding_pass[:7], row_indices=row_indices)
assert row_number_exp == row
column, = get_column(col_sequence=boarding_pass[7:], col_indices=column_indices)
assert col_number_exp == column
seat_id = get_seat_id(row=row, column=column)
print(f"Boardingpass {boarding_pass}, sits on row {row} and column {column}, with seat id {seat_id}")
seat_ids = []
for boarding_pass in boarding_passes:
row, = get_row(row_sequence=boarding_pass[:7], row_indices=row_indices)
column, = get_column(col_sequence=boarding_pass[7:], col_indices=column_indices)
seat_ids.append(get_seat_id(row=row, column=column))
print(f"The seat with the highest seat ID equals: {max(seat_ids)}")
# PART TWO
missing_seat_ids = set(range(0, max(seat_ids))).difference(set(seat_ids))
for seat_id in missing_seat_ids:
if seat_id - 1 in seat_ids and seat_id + 1 in seat_ids:
print(f"Your seat ID: {seat_id}")
return True
if __name__ == "__main__":
main()
| 34.945205
| 109
| 0.677381
|
794b436457bb877fb5740e1b94cff013eec47732
| 953
|
py
|
Python
|
tests/test_hello_cookiecutter.py
|
diego-alves/hello_cookiecutter
|
71c6e0126b598e07702ef2a80f44bf552c799d6f
|
[
"MIT"
] | null | null | null |
tests/test_hello_cookiecutter.py
|
diego-alves/hello_cookiecutter
|
71c6e0126b598e07702ef2a80f44bf552c799d6f
|
[
"MIT"
] | null | null | null |
tests/test_hello_cookiecutter.py
|
diego-alves/hello_cookiecutter
|
71c6e0126b598e07702ef2a80f44bf552c799d6f
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""Tests for `hello_cookiecutter` package."""
import unittest
from click.testing import CliRunner
from hello_cookiecutter import hello_cookiecutter
from hello_cookiecutter import cli
class TestHello_cookiecutter(unittest.TestCase):
"""Tests for `hello_cookiecutter` package."""
def setUp(self):
"""Set up test fixtures, if any."""
def tearDown(self):
"""Tear down test fixtures, if any."""
def test_000_something(self):
"""Test something."""
def test_command_line_interface(self):
"""Test the CLI."""
runner = CliRunner()
result = runner.invoke(cli.main)
assert result.exit_code == 0
assert 'hello_cookiecutter.cli.main' in result.output
help_result = runner.invoke(cli.main, ['--help'])
assert help_result.exit_code == 0
assert '--help Show this message and exit.' in help_result.output
| 27.228571
| 74
| 0.66212
|
794b4389e17316c11d0e18171f2944fa249ba181
| 464
|
py
|
Python
|
xos/synchronizers/ec2/deleters/site_deployment_deleter.py
|
xmaruto/mcord
|
3678a3d10c3703c2b73f396c293faebf0c82a4f4
|
[
"Apache-2.0"
] | null | null | null |
xos/synchronizers/ec2/deleters/site_deployment_deleter.py
|
xmaruto/mcord
|
3678a3d10c3703c2b73f396c293faebf0c82a4f4
|
[
"Apache-2.0"
] | 5
|
2020-06-05T17:47:15.000Z
|
2021-09-23T23:21:27.000Z
|
xos/synchronizers/ec2/deleters/site_deployment_deleter.py
|
pan2za/xos
|
c2a4da2ccaa12360b2718be303b247866aefdfe6
|
[
"Apache-2.0"
] | null | null | null |
from core.models import Site, SiteDeployment
from synchronizers.base.deleter import Deleter
class SiteDeploymentDeleter(Deleter):
model='SiteDeployment'
def call(self, pk):
site_deployment = SiteDeployment.objects.get(pk=pk)
if site_deployment.tenant_id:
driver = self.driver.admin_driver(deployment=site_deployment.deployment.name)
driver.delete_tenant(site_deployment.tenant_id)
site_deployment.delete()
| 35.692308
| 89
| 0.741379
|
794b45b61decf56d1e25cbc22d15606310a7617d
| 3,043
|
py
|
Python
|
cogs/profiles.py
|
DaiconRadishPotato/Blabber
|
fb40dfd0e20e37c1b230e3546173dd1f32de01e0
|
[
"MIT"
] | null | null | null |
cogs/profiles.py
|
DaiconRadishPotato/Blabber
|
fb40dfd0e20e37c1b230e3546173dd1f32de01e0
|
[
"MIT"
] | 1
|
2020-01-03T10:46:40.000Z
|
2020-01-03T10:46:40.000Z
|
cogs/profiles.py
|
DaiconRadishPotato/Blabber
|
fb40dfd0e20e37c1b230e3546173dd1f32de01e0
|
[
"MIT"
] | null | null | null |
# profiles.py
#
# Author: Fanny Avila (Fa-Avila)
# Contributor: Jacky Zhang (jackyeightzhang),
# Marcos Avila (DaiconV)
# Date created: 3/27/2020
# Date last modified: 6/2/2020
# Python Version: 3.8.1
# License: MIT License
from discord import Embed, Colour
from discord.ext import commands
from blabber.checks import *
class Profiles(commands.Cog):
"""
Collection of commands for managing Blabber user settings.
parameters:
bot [Bot]: client object representing a Discord bot
"""
def __init__(self, bot):
self.voice_profiles = bot.voice_profiles
self.prefixes = bot.prefixes
@commands.command(name='voice', aliases=['v'])
async def voice(self, ctx, *, alias: str=''):
"""
Modifies/displays voice profile information of the command invoker.
parameter:
ctx [Context]: context object representing command invocation
alias [str] (default=''): name of command invoker's new voice
"""
member = ctx.author.display_name
# Check if an alias was provided
if not alias:
# Retrieve command invoker's current alias
alias = self.voice_profiles[(ctx.author, ctx.channel)]
prefix = self.prefixes[ctx.guild]
embed = Embed(
title=":gear: **Voice Settings**",
colour=Colour.gold())
embed.add_field(
name=f"**{member}'s Current Voice:**",
value=f"`{alias}`",
inline=False)
embed.add_field(
name="**Update Voice:**",
value=f"`{prefix}voice [New Voice]`",
inline=False)
elif await voice_is_valid(alias):
# Set the command invoker's new alias
self.voice_profiles[(ctx.author, ctx.channel)] = alias
embed = Embed(
title=(f":white_check_mark: **{member}'s new voice is **"
f"`{alias}`"),
colour=Colour.green())
await ctx.send(embed=embed)
@voice.error
async def voice_error(self, ctx, error):
"""
Local error handler for Blabber's voice command.
parameters:
ctx [Context]: context object representing command invocation
error [Exception]: exception object raised from command function
"""
prefix = self.prefixes[ctx.guild]
embed = Embed(title=":x: **Unable to set voice**",colour=Colour.red())
if isinstance(error, VoiceNotSupported):
embed.description=(f"{error}\n\n:wrench: **Use** `{prefix}list` "
"**to search for supported voices**")
else:
embed.description="**Unexpected Error**\n"
"Please contact development team"
await ctx.send(embed=embed)
def setup(bot):
"""
Adds Profiles Cog to bot.
parameter:
bot [Bot]: client object representing a Discord bot
"""
bot.add_cog(Profiles(bot))
| 31.697917
| 78
| 0.576733
|
794b45ea2ef7306e580e26c1e1a1f15ddce9c3fa
| 11,287
|
py
|
Python
|
TextHelper.py
|
DanATW/exchange-rates-tg-bot
|
cda30804fd76807e3ad6e248f6da82e262f06dbe
|
[
"MIT"
] | 10
|
2020-06-11T17:19:01.000Z
|
2022-03-25T17:52:18.000Z
|
TextHelper.py
|
DanATW/exchange-rates-tg-bot
|
cda30804fd76807e3ad6e248f6da82e262f06dbe
|
[
"MIT"
] | 3
|
2021-08-16T16:33:25.000Z
|
2022-01-13T18:30:53.000Z
|
TextHelper.py
|
DanATW/exchange-rates-tg-bot
|
cda30804fd76807e3ad6e248f6da82e262f06dbe
|
[
"MIT"
] | 6
|
2020-07-24T17:40:30.000Z
|
2021-09-16T11:29:16.000Z
|
from aiogram.types.inline_keyboard import InlineKeyboardMarkup, InlineKeyboardButton
import DBH
import ListsCache
import os
from NewPrint import Print
from Dictionaries.ButtonTexts import ButtonTexts
from Dictionaries.MessageTexts import MessageTexts
AllBigTexts = {}
ListOfNamesOfTextforBigTexts = []
def LoadTexts():
global AllBigTexts
global ListOfNamesOfTextforBigTexts
UAtexts = {}
RUtexts = {}
ENtexts = {}
try:
listFiles = os.listdir("Texts")
for i in listFiles:
fileWithText = open("Texts/" + i)
fileText = fileWithText.read()
fileWithText.close()
nameOfText = i[2:-4]
if nameOfText not in ListOfNamesOfTextforBigTexts:
ListOfNamesOfTextforBigTexts.append(nameOfText)
if i.find("UA") == 0:
UAtexts[nameOfText] = fileText
elif i.find("EN") == 0:
ENtexts[nameOfText] = fileText
elif i.find("RU") == 0:
RUtexts[nameOfText] = fileText
AllBigTexts['ua'] = UAtexts
AllBigTexts['ru'] = RUtexts
AllBigTexts['en'] = ENtexts
except:
Print("Problem with Texts. Redownload, pls.", "E")
def DonateMarkup(chatID: str, chatType: str) -> InlineKeyboardMarkup:
lang = DBH.GetSetting(chatID, "lang", chatType)
isDeleteButton = DBH.GetSetting(chatID, "deleteButton", chatType)
dictLang = ButtonTexts[lang]
DonateMU = InlineKeyboardMarkup()
DonateMU.add(InlineKeyboardButton(dictLang['donate'], url="https://secure.wayforpay.com/payment/s3641f64becae", callback_data="donate"))
if isDeleteButton:
DonateMU.add(InlineKeyboardButton(dictLang['delete'], callback_data = "delete"))
return DonateMU
def DeleteMarkup(chatID: str, chatType: str) -> InlineKeyboardMarkup:
isDeleteButton = DBH.GetSetting(chatID, "deleteButton", chatType)
DeleteMU = InlineKeyboardMarkup()
if isDeleteButton:
lang = DBH.GetSetting(chatID, "lang", chatType)
dictLang = ButtonTexts[lang]
DeleteMU.add(InlineKeyboardButton(dictLang['delete'], callback_data = "delete"))
return DeleteMU
def SettingsMarkup(chatID: str, chatType: str) -> InlineKeyboardMarkup:
lang = DBH.GetSetting(chatID, "lang", chatType)
isDeleteButton = DBH.GetSetting(chatID, "deleteButton", chatType)
dictLang = ButtonTexts[lang]
SettingsMU = InlineKeyboardMarkup()
SettingsMU.add(InlineKeyboardButton(dictLang['currencies'], callback_data = "cur_menu"))
SettingsMU.add(InlineKeyboardButton(dictLang['lang'], callback_data = "lang_menu"))
SettingsMU.add(InlineKeyboardButton(dictLang['delete_button'], callback_data = "delbut_menu"))
SettingsMU.add(InlineKeyboardButton(dictLang['flags'], callback_data = "flags_menu"))
if chatType != "private":
SettingsMU.add(InlineKeyboardButton(dictLang['permisssions'], callback_data = "edit_menu"))
if isDeleteButton:
SettingsMU.add(InlineKeyboardButton(dictLang['delete'], callback_data = "delete"))
return SettingsMU
def DeleteButtonMenuMarkup(chatID: str, chatType: str) -> InlineKeyboardMarkup:
def RulesMark(role: str, answDict) -> str:
if answDict['deleteRules'] == role:
return " ✅"
else:
return " ❌"
lang = DBH.GetSetting(chatID, "lang", chatType)
AllSettings = DBH.GetAllSettings(chatID, chatType)
dictLang = ButtonTexts[lang]
DeleteButtonMenuMU = InlineKeyboardMarkup()
if AllSettings['deleteButton']:
DeleteButtonMenuMU.add(InlineKeyboardButton(dictLang['delbutton'] + " ✅", callback_data = "delbut_button"))
if chatType != "private":
DeleteButtonMenuMU.add(InlineKeyboardButton(dictLang['creator'] + RulesMark('creator', AllSettings), callback_data = "delbut_creator"))
DeleteButtonMenuMU.add(InlineKeyboardButton(dictLang['admins'] + RulesMark('admins', AllSettings), callback_data = "delbut_admins"))
DeleteButtonMenuMU.add(InlineKeyboardButton(dictLang['everybody'] + RulesMark('everybody', AllSettings), callback_data = "delbut_everybody"))
else:
DeleteButtonMenuMU.add(InlineKeyboardButton(dictLang['delbutton'] + " ❌", callback_data = "delbut_button"))
DeleteButtonMenuMU.add(InlineKeyboardButton(dictLang['back'], callback_data = "settings"))
return DeleteButtonMenuMU
def LanguageMenuMarkup(chatID: str, chatType: str):
def RulesMark(lang: str, answDict) -> str:
if answDict['lang'] == lang:
return " ✅"
else:
return " ❌"
lang = DBH.GetSetting(chatID, "lang", chatType)
AllSettings = DBH.GetAllSettings(chatID, chatType)
dictLang = ButtonTexts[lang]
LanguageMenuMU = InlineKeyboardMarkup()
LanguageMenuMU.add(InlineKeyboardButton("🇬🇧EN" + RulesMark('en', AllSettings), callback_data = "lang_en"))
LanguageMenuMU.add(InlineKeyboardButton("🇷🇺RU" + RulesMark('ru', AllSettings), callback_data = "lang_ru"))
LanguageMenuMU.add(InlineKeyboardButton("🇺🇦UA" + RulesMark('ua', AllSettings), callback_data = "lang_ua"))
LanguageMenuMU.add(InlineKeyboardButton(dictLang['back'], callback_data = "settings"))
return LanguageMenuMU
def FlagsMarkup(chatID: str, chatType: str) -> InlineKeyboardMarkup:
lang = DBH.GetSetting(chatID, "lang", chatType)
AllSettings = DBH.GetAllSettings(chatID, chatType)
dictLang = ButtonTexts[lang]
FlagsMU = InlineKeyboardMarkup()
if AllSettings['flags']:
FlagsMU.add(InlineKeyboardButton(dictLang['flags_button'] + " ✅", callback_data = "flags_button"))
else:
FlagsMU.add(InlineKeyboardButton(dictLang['flags_button'] + " ❌", callback_data = "flags_button"))
FlagsMU.add(InlineKeyboardButton(dictLang['back'], callback_data = "settings"))
return FlagsMU
def EditMenuMarkup(chatID: str, chatType: str) -> InlineKeyboardMarkup:
def RulesMark(role: str, answDict) -> str:
if answDict['editSettings'] == role:
return " ✅"
else:
return " ❌"
lang = DBH.GetSetting(chatID, "lang", chatType)
AllSettings = DBH.GetAllSettings(chatID, chatType)
dictLang = ButtonTexts[lang]
EditMenuMU = InlineKeyboardMarkup()
EditMenuMU.add(InlineKeyboardButton(dictLang['creator'] + RulesMark('creator', AllSettings), callback_data = "edit_creator"))
EditMenuMU.add(InlineKeyboardButton(dictLang['admins'] + RulesMark('admins', AllSettings), callback_data = "edit_admins"))
EditMenuMU.add(InlineKeyboardButton(dictLang['everybody'] + RulesMark('everybody', AllSettings), callback_data = "edit_everybody"))
EditMenuMU.add(InlineKeyboardButton(dictLang['back'], callback_data = "settings"))
return EditMenuMU
def CurrenciesMainMenuMarkup(chatID: str, chatType: str) -> InlineKeyboardMarkup:
lang = DBH.GetSetting(chatID, "lang", chatType)
dictLang = ButtonTexts[lang]
CurrenciesMainMenuMU = InlineKeyboardMarkup()
CurrenciesMainMenuMU.add(InlineKeyboardButton(dictLang['cur_menu'], callback_data = "cur_curmenu"))
CurrenciesMainMenuMU.add(InlineKeyboardButton(dictLang['crypto_menu'], callback_data = "cur_cryptomenu"))
CurrenciesMainMenuMU.add(InlineKeyboardButton(dictLang['back'], callback_data = "settings"))
return CurrenciesMainMenuMU
def CryptoMenuMarkup(chatID: str, chatType: str) -> InlineKeyboardMarkup:
lang = DBH.GetSetting(chatID, "lang", chatType)
dictLang = ButtonTexts[lang]
CryptoMenuMU = InlineKeyboardMarkup()
AllCrypto = ListsCache.GetListOfCrypto()
TurnedOnCrypto = DBH.GetAllCrypto(chatID)
for i in AllCrypto:
if i in TurnedOnCrypto:
CryptoMenuMU.add(InlineKeyboardButton(i + " ✅", callback_data = "cur_" + i))
else:
CryptoMenuMU.add(InlineKeyboardButton(i + " ❌", callback_data = "cur_" + i))
CryptoMenuMU.add(InlineKeyboardButton(dictLang['back'], callback_data = "cur_menu"))
return CryptoMenuMU
def CurrenciesMenuMarkup(chatID: str, chatType: str) -> InlineKeyboardMarkup:
lang = DBH.GetSetting(chatID, "lang", chatType)
dictLang = ButtonTexts[lang]
CurrenciesMenuMU = InlineKeyboardMarkup()
CurrenciesMenuMU.add(InlineKeyboardButton("A", callback_data = "cur_a"))
CurrenciesMenuMU.add(InlineKeyboardButton("B", callback_data = "cur_b"))
CurrenciesMenuMU.add(InlineKeyboardButton("C", callback_data = "cur_c"))
CurrenciesMenuMU.add(InlineKeyboardButton("D-F", callback_data = "cur_df"))
CurrenciesMenuMU.add(InlineKeyboardButton("G-H", callback_data = "cur_gh"))
CurrenciesMenuMU.add(InlineKeyboardButton("I-J", callback_data = "cur_ij"))
CurrenciesMenuMU.add(InlineKeyboardButton("K-L", callback_data = "cur_kl"))
CurrenciesMenuMU.add(InlineKeyboardButton("M", callback_data = "cur_m"))
CurrenciesMenuMU.add(InlineKeyboardButton("N-Q", callback_data = "cur_nq"))
CurrenciesMenuMU.add(InlineKeyboardButton("R-S", callback_data = "cur_rs"))
CurrenciesMenuMU.add(InlineKeyboardButton("T-U", callback_data = "cur_tu"))
CurrenciesMenuMU.add(InlineKeyboardButton("V-Z", callback_data = "cur_vz"))
CurrenciesMenuMU.add(InlineKeyboardButton(dictLang['back'], callback_data = "cur_menu"))
return CurrenciesMenuMU
def CurrenciesSetupMarkup(chatID: str, chatType: str, letter: str) -> InlineKeyboardMarkup:
lang = DBH.GetSetting(chatID, "lang", chatType)
dictLang = ButtonTexts[lang]
AllCurrencies = ListsCache.GetListOfCur()
TurnedOnCurrencies = DBH.GetAllCurrencies(chatID)
AllFlags = ListsCache.GetDictOfFlags()
CurrenciesSetupMU = InlineKeyboardMarkup()
if len(letter) == 1:
letter = letter.upper()
for i in AllCurrencies:
if i[0] == letter:
if i in TurnedOnCurrencies:
CurrenciesSetupMU.add(InlineKeyboardButton(AllFlags[i] + i + " ✅", callback_data = "cur_" + i))
else:
CurrenciesSetupMU.add(InlineKeyboardButton(AllFlags[i] + i + " ❌", callback_data = "cur_" + i))
else:
firstLetter = ord(letter[0].upper())
lastLetter = ord(letter[1].upper())
listOfLetters = []
while firstLetter <= lastLetter:
listOfLetters.append(chr(firstLetter))
firstLetter += 1
for i in AllCurrencies:
if i[0] in listOfLetters:
if i in TurnedOnCurrencies:
CurrenciesSetupMU.add(InlineKeyboardButton(AllFlags[i] + i + " ✅", callback_data = "cur_" + i))
else:
CurrenciesSetupMU.add(InlineKeyboardButton(AllFlags[i] + i + " ❌", callback_data = "cur_" + i))
CurrenciesSetupMU.add(InlineKeyboardButton(dictLang['back'], callback_data = "cur_curmenu"))
return CurrenciesSetupMU
def GetText(chatID: str, nameOfText: str, chatType: str) -> str:
lang = DBH.GetSetting(chatID, "lang", chatType)
answerText = ''
if nameOfText in ListOfNamesOfTextforBigTexts:
dictLang = AllBigTexts[lang]
answerText = dictLang[nameOfText]
elif nameOfText in ButtonTexts[lang]:
dictLang = ButtonTexts[lang]
answerText = dictLang[nameOfText]
elif nameOfText in MessageTexts[lang]:
dictLang = MessageTexts[lang]
answerText = dictLang[nameOfText]
return answerText
| 49.504386
| 153
| 0.694782
|
794b4693e0e4556cc8e8ce094f1406f766d4f0ab
| 7,528
|
py
|
Python
|
tests/test_processing.py
|
patymori/document-store-migracao
|
1320ef58de1484ca8383c29c1fea55c4b2d89e67
|
[
"BSD-2-Clause"
] | 1
|
2019-11-21T12:35:36.000Z
|
2019-11-21T12:35:36.000Z
|
tests/test_processing.py
|
patymori/document-store-migracao
|
1320ef58de1484ca8383c29c1fea55c4b2d89e67
|
[
"BSD-2-Clause"
] | 336
|
2019-04-01T14:06:37.000Z
|
2022-03-21T22:16:55.000Z
|
tests/test_processing.py
|
patymori/document-store-migracao
|
1320ef58de1484ca8383c29c1fea55c4b2d89e67
|
[
"BSD-2-Clause"
] | 4
|
2019-03-28T13:32:04.000Z
|
2020-04-17T18:03:19.000Z
|
import os
import unittest
import tempfile
import shutil
from lxml import etree
from unittest.mock import patch, ANY, call, Mock, MagicMock
from xylose.scielodocument import Journal, Article
from documentstore_migracao.processing import (
extracted,
conversion,
validation,
reading,
inserting,
)
from documentstore_migracao.utils import PoisonPill
from . import (
utils,
SAMPLES_PATH,
SAMPLES_JOURNAL,
SAMPLES_XML_ARTICLE,
SAMPLES_ARTICLE,
COUNT_SAMPLES_FILES,
SAMPLE_KERNEL_JOURNAL,
)
class TestProcessingExtracted(unittest.TestCase):
@patch("documentstore_migracao.processing.extracted.article.ext_article_txt")
def test_extract_all_data(self, mk_extract_article_txt):
mk_extract_article_txt.return_value = SAMPLES_XML_ARTICLE
with utils.environ(SOURCE_PATH="/tmp"):
try:
extracted.extract_all_data(["S0036-36341997000100001"])
self.assertTrue(os.path.exists("/tmp/S0036-36341997000100001.xml"))
finally:
os.remove("/tmp/S0036-36341997000100001.xml")
class TestProcessingConversion(unittest.TestCase):
def setUp(self):
self.conversion_path = tempfile.mkdtemp()
self.poison_pill = PoisonPill()
def tearDown(self):
shutil.rmtree(self.conversion_path)
def test_convert_article_xml_saves_xml_obj_in_conversion_path(self):
file_xml_path = os.path.join(SAMPLES_PATH, "S0036-36341997000100001.xml")
with utils.environ(
SOURCE_PATH=SAMPLES_PATH, CONVERSION_PATH=self.conversion_path
):
conversion.convert_article_xml(file_xml_path, self.poison_pill)
new_file_xml_path = os.path.join(
self.conversion_path, "S0036-36341997000100001.es.xml"
)
self.assertTrue(os.path.exists(new_file_xml_path))
def test_convert_article_xml_completes_pubdate(self):
file_xml_path = os.path.join(SAMPLES_PATH, "S0036-36341997000100001.xml")
with utils.environ(
SOURCE_PATH=SAMPLES_PATH, CONVERSION_PATH=self.conversion_path
):
conversion.convert_article_xml(file_xml_path, self.poison_pill)
new_file_xml_path = os.path.join(
self.conversion_path, "S0036-36341997000100001.es.xml"
)
xmltree = etree.parse(new_file_xml_path, etree.XMLParser())
self.assertIsNotNone(xmltree.find('.//pub-date[@date-type="pub"]'))
self.assertIsNotNone(xmltree.find('.//pub-date[@date-type="collection"]'))
class TestReadingJournals(unittest.TestCase):
def setUp(self):
self.journals_json_path = os.path.join(
SAMPLES_PATH, "base-isis-sample", "title", "title.json"
)
def test_should_load_file_successfull(self):
data = reading.read_json_file(self.journals_json_path)
self.assertTrue(type(data), list)
self.assertEqual(
data[0].get("v140")[0]["_"],
"Colégio Brasileiro de Cirurgia Digestiva - CBCD",
)
self.assertEqual(len(data), 3)
class TestConversionJournalJson(unittest.TestCase):
def setUp(self):
self.json_journal = {
"v100": [{"_": "sample"}],
"v68": [{"_": "spl"}],
"v940": [{"_": "20190128"}],
"v50": [{"_": "C"}],
"v400": [{"_": "0001-3714"}],
"v435": [{"t": "ONLIN", "_": "0001-3714"}],
}
def test_should_return_a_bundle(self):
journal = conversion.conversion_journal_to_bundle(self.json_journal)
self.assertEqual(SAMPLE_KERNEL_JOURNAL, journal)
def test_should_return_a_list_of_bundle(self):
journals = conversion.conversion_journals_to_kernel([self.json_journal])
self.assertEqual([SAMPLE_KERNEL_JOURNAL], journals)
class TestProcessingValidation(unittest.TestCase):
def test_validate_article_xml(self):
result = validation.validate_article_xml(
os.path.join(SAMPLES_PATH, "S0044-59672003000300001.pt.xml")
)
self.assertIn(
"Element p is not declared in p list of possible children", result.keys()
)
def test_validate_article_xml_not_print(self):
result = validation.validate_article_xml(
os.path.join(SAMPLES_PATH, "S0044-59672003000300001.pt.xml"), False
)
self.assertIn(
"Element p is not declared in p list of possible children", result.keys()
)
def test_validate_article_xml_XMLSPSVersionError(self):
result = validation.validate_article_xml(
os.path.join(SAMPLES_PATH, "S0102-86501998000100007_invalid.xml")
)
self.assertIn(
"cannot get the SPS version from /article/@specific-use", result.keys()
)
def test_validate_article_xml_valid(self):
result = validation.validate_article_xml(
os.path.join(SAMPLES_PATH, "0034-8910-rsp-48-2-0347-valid.xml"), False
)
self.assertEqual({}, result)
@patch("documentstore_migracao.processing.validation.validate_article_xml")
def test_validate_article_ALLxml(self, mk_validate_article_xml):
mk_validate_article_xml.return_value = {
"Element p is not declared in p list of possible children": {
"count": 2,
"lineno": [410, 419],
"filename": {
os.path.join(SAMPLES_PATH, "S0044-59672003000300001.pt.xml")
},
"message": [
"Element p is not declared in p" " list of possible children",
"Element p is not declared in p" " list of possible children",
],
}
}
list_files_xmls = [
"S0044-59672003000300001.pt.xml",
"S0102-86501998000100007.xml",
"S0102-86501998000100002.xml",
"S0036-36341997000100003.xml",
"S0036-36341997000100002.xml",
"S0036-36341997000100001.xml",
]
calls = [
call(os.path.join(SAMPLES_PATH, file_xml), False)
for file_xml in list_files_xmls
]
with utils.environ(CONVERSION_PATH=SAMPLES_PATH):
validation.validate_article_ALLxml()
mk_validate_article_xml.assert_has_calls(calls, any_order=True)
@patch("documentstore_migracao.processing.validation.validate_article_xml")
def test_validate_article_ALLxml_with_exception(self, mk_validate_article_xml):
mk_validate_article_xml.side_effect = KeyError("Test Error - Validation")
with utils.environ(CONVERSION_PATH=SAMPLES_PATH):
with self.assertRaises(KeyError) as cm:
validation.validate_article_ALLxml()
self.assertEqual("Test Error - Validation", str(cm.exception))
@patch("documentstore_migracao.processing.validation.XMLValidator")
def test_validation_should_fail_if_lxml_raise_an_exception(self, mk_xmlvalidator):
mk_xmlvalidator.parse.side_effect = etree.XMLSyntaxError(
"some error", 1, 1, 1, "fake_path/file.xml"
)
result = validation.validate_article_xml("fake_path/file.xml")
self.assertEqual(
{
"some error (file.xml, line 1)": {
"count": 1,
"lineno": [1],
"message": ["some error (file.xml, line 1)"],
"filename": {"fake_path/file.xml"},
}
},
result,
)
| 35.677725
| 86
| 0.640276
|
794b473b6d66339c5bfc050345477c96722c4cc8
| 2,953
|
py
|
Python
|
pettingzoo/sisl/waterworld/waterworld.py
|
rodrigodelazcano/PettingZoo
|
41fe43c7da2fd92fa8c6aa5a5a28083664092aa5
|
[
"Apache-2.0"
] | null | null | null |
pettingzoo/sisl/waterworld/waterworld.py
|
rodrigodelazcano/PettingZoo
|
41fe43c7da2fd92fa8c6aa5a5a28083664092aa5
|
[
"Apache-2.0"
] | null | null | null |
pettingzoo/sisl/waterworld/waterworld.py
|
rodrigodelazcano/PettingZoo
|
41fe43c7da2fd92fa8c6aa5a5a28083664092aa5
|
[
"Apache-2.0"
] | null | null | null |
import numpy as np
from pettingzoo import AECEnv
from pettingzoo.utils import agent_selector, wrappers
from pettingzoo.utils.conversions import parallel_wrapper_fn
from .waterworld_base import MAWaterWorld as _env
def env(**kwargs):
env = raw_env(**kwargs)
env = wrappers.ClipOutOfBoundsWrapper(env)
env = wrappers.OrderEnforcingWrapper(env)
return env
parallel_env = parallel_wrapper_fn(env)
class raw_env(AECEnv):
metadata = {"render.modes": ["human", "rgb_array"], "name": "waterworld_v3"}
def __init__(self, *args, **kwargs):
super().__init__()
self.env = _env(*args, **kwargs)
self.agents = ["pursuer_" + str(r) for r in range(self.env.num_agents)]
self.possible_agents = self.agents[:]
self.agent_name_mapping = dict(zip(self.agents, list(range(self.num_agents))))
self._agent_selector = agent_selector(self.agents)
# spaces
self.action_spaces = dict(zip(self.agents, self.env.action_space))
self.observation_spaces = dict(zip(self.agents, self.env.observation_space))
self.has_reset = False
def seed(self, seed=None):
self.env.seed(seed)
def convert_to_dict(self, list_of_list):
return dict(zip(self.agents, list_of_list))
def reset(self):
self.has_reset = True
self.env.reset()
self.agents = self.possible_agents[:]
self._agent_selector.reinit(self.agents)
self.agent_selection = self._agent_selector.next()
self.rewards = dict(zip(self.agents, [(0) for _ in self.agents]))
self._cumulative_rewards = dict(zip(self.agents, [(0) for _ in self.agents]))
self.dones = dict(zip(self.agents, [False for _ in self.agents]))
self.infos = dict(zip(self.agents, [{} for _ in self.agents]))
def close(self):
if self.has_reset:
self.env.close()
def render(self, mode="human"):
return self.env.render(mode)
def step(self, action):
if self.dones[self.agent_selection]:
return self._was_done_step(action)
agent = self.agent_selection
is_last = self._agent_selector.is_last()
self.env.step(action, self.agent_name_mapping[agent], is_last)
for r in self.rewards:
self.rewards[r] = self.env.control_rewards[self.agent_name_mapping[r]]
if is_last:
for r in self.rewards:
self.rewards[r] += self.env.last_rewards[self.agent_name_mapping[r]]
if self.env.frames >= self.env.max_cycles:
self.dones = dict(zip(self.agents, [True for _ in self.agents]))
else:
self.dones = dict(zip(self.agents, self.env.last_dones))
self._cumulative_rewards[self.agent_selection] = 0
self.agent_selection = self._agent_selector.next()
self._accumulate_rewards()
def observe(self, agent):
return self.env.observe(self.agent_name_mapping[agent])
| 34.741176
| 86
| 0.657636
|
794b477ff48c583c08d33aed1a6acec62726bc99
| 4,128
|
py
|
Python
|
src/intensity_func.py
|
quangduyhcmut/Simple_Image_ProcessinUIg_G
|
546bf33ee4ad0904594dfe702e7c864472c617bc
|
[
"MIT"
] | null | null | null |
src/intensity_func.py
|
quangduyhcmut/Simple_Image_ProcessinUIg_G
|
546bf33ee4ad0904594dfe702e7c864472c617bc
|
[
"MIT"
] | null | null | null |
src/intensity_func.py
|
quangduyhcmut/Simple_Image_ProcessinUIg_G
|
546bf33ee4ad0904594dfe702e7c864472c617bc
|
[
"MIT"
] | null | null | null |
import cv2
import numpy as np
def img_neg(img):
img_negative = 255 - img
return img_negative
def img_thres(img,threshold):
img[img<threshold] = 0
img[img>=threshold] =255
return img
def img_log(image):
image = image.astype(np.float)
c = 255 / np.log(1 + 255)
log_image = c * (np.log(image + 1))
log_image = np.array(log_image, dtype = np.uint8)
return log_image
def img_invlog(image):
image = image.astype(np.float)
c = 255 / np.log(1 + 255)
exp_image = (np.exp(image)**(1/c)) -1
exp_image = np.array(exp_image, dtype = np.uint8)
return exp_image
def img_gamma_correction(img,c,gamma):
r = img/255
img_gamma = c * (r**gamma)
img_gamma = np.array(img_gamma*255,dtype = np.uint8)
return img_gamma
def pix_linear(img,r1,s1,r2,s2):
if (0 <= img and img <= r1):
return (s1 / r1)*img
elif (r1 < img and img <= r2):
return ((s2 - s1)/(r2 - r1)) * (img - r1) + s1
else:
return ((255 - s2)/(255 - r2)) * (img - r2) + s2
def img_linear(img,r1,s1,r2,s2):
pixelVal_vec = np.vectorize(pix_linear)
# Apply contrast stretching.
contrast_stretched = pixelVal_vec(img, r1, s1, r2, s2)
return contrast_stretched
def img_bit_trans(img):
lst = []
for i in range(img.shape[0]):
for j in range(img.shape[1]):
lst.append(np.binary_repr(img[i][j] ,width=8)) # width = no. of bits
eight_bit_img = (np.array([int(i[0]) for i in lst],dtype = np.uint8) * 128).reshape(img.shape[0],img.shape[1])
seven_bit_img = (np.array([int(i[1]) for i in lst],dtype = np.uint8) * 64).reshape(img.shape[0],img.shape[1])
six_bit_img = (np.array([int(i[2]) for i in lst],dtype = np.uint8) * 32).reshape(img.shape[0],img.shape[1])
five_bit_img = (np.array([int(i[3]) for i in lst],dtype = np.uint8) * 16).reshape(img.shape[0],img.shape[1])
four_bit_img = (np.array([int(i[4]) for i in lst],dtype = np.uint8) * 8).reshape(img.shape[0],img.shape[1])
three_bit_img = (np.array([int(i[5]) for i in lst],dtype = np.uint8) * 4).reshape(img.shape[0],img.shape[1])
two_bit_img = (np.array([int(i[6]) for i in lst],dtype = np.uint8) * 2).reshape(img.shape[0],img.shape[1])
one_bit_img = (np.array([int(i[7]) for i in lst],dtype = np.uint8) * 1).reshape(img.shape[0],img.shape[1])
finalr = cv2.hconcat([eight_bit_img,seven_bit_img,six_bit_img,five_bit_img])
finalv =cv2.hconcat([four_bit_img,three_bit_img,two_bit_img,one_bit_img])
final = cv2.vconcat([finalr,finalv])
return final
def arguments_parser():
import argparse
parser = argparse.ArgumentParser()
parser.add_argument('-p', '--path', default='data/Lenna.png', required=False, help='Path to image')
parser.add_argument('-f', '--function', choices=['negative', 'threshold', 'log', 'invlog', 'gamma', 'linear', 'bitplane'], required=True, help='Chosse transformation function')
parser.add_argument('-thr', '--threshold', default=127, required=False, help='Threshold value')
parser.add_argument('-g', '--gamma', default=0.5, required=False, help='Gamma correction coefficient')
parser.add_argument('-s', '--save', action='store_true', help='Save output image')
return parser.parse_args()
def main():
args = arguments_parser()
img = cv2.imread(args.path,1)
if args.function == 'negative':
output = img_neg(img)
elif args.function == 'threshold':
output = img_thres(img, args.threshold)
elif args.function =='log':
output = img_log(img)
elif args.function == 'invlog':
output = img_invlog(img)
elif args.function == 'gamma':
output = img_gamma_correction(img, 1, args.gamma)
elif args.function == 'linear':
output = img_linear(img, r1=5, s1=10, r2=100, s2=200)
elif args.function == 'bitplane':
output = img_bit_trans(img)
else:
raise NotImplementedError
if args.save:
cv2.imwrite('output/intensity_demo.png', output)
cv2.imshow("img",img)
cv2.waitKey(0)
cv2.destroyAllWindows()
if __name__ == "__main__":
main()
| 35.895652
| 180
| 0.634205
|
794b48b86955c141eabc2a9c7285043d3045c23c
| 192
|
py
|
Python
|
submissions/arc033/a.py
|
m-star18/atcoder
|
08e475810516602fa088f87daf1eba590b4e07cc
|
[
"Unlicense"
] | 1
|
2021-05-10T01:16:28.000Z
|
2021-05-10T01:16:28.000Z
|
submissions/arc033/a.py
|
m-star18/atcoder
|
08e475810516602fa088f87daf1eba590b4e07cc
|
[
"Unlicense"
] | 3
|
2021-05-11T06:14:15.000Z
|
2021-06-19T08:18:36.000Z
|
submissions/arc033/a.py
|
m-star18/atcoder
|
08e475810516602fa088f87daf1eba590b4e07cc
|
[
"Unlicense"
] | null | null | null |
import sys
read = sys.stdin.buffer.read
readline = sys.stdin.buffer.readline
readlines = sys.stdin.buffer.readlines
sys.setrecursionlimit(10 ** 7)
n = int(readline())
print(n * (n + 1) // 2)
| 21.333333
| 38
| 0.713542
|
794b48c7f1ef4c01ebbf256938276ccc354008c2
| 3,861
|
py
|
Python
|
todo_list_web/settings.py
|
felipe-menelau/todo-list-web
|
9b60a549dc6d5bdd88e1a584b8bb2c4f56131cb5
|
[
"MIT"
] | null | null | null |
todo_list_web/settings.py
|
felipe-menelau/todo-list-web
|
9b60a549dc6d5bdd88e1a584b8bb2c4f56131cb5
|
[
"MIT"
] | null | null | null |
todo_list_web/settings.py
|
felipe-menelau/todo-list-web
|
9b60a549dc6d5bdd88e1a584b8bb2c4f56131cb5
|
[
"MIT"
] | null | null | null |
"""
Django settings for todo_list_web project.
Generated by 'django-admin startproject' using Django 2.0.1.
For more information on this file, see
https://docs.djangoproject.com/en/2.0/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/2.0/ref/settings/
"""
import os
from dotenv import load_dotenv
from datetime import timedelta
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
load_dotenv(os.path.join(BASE_DIR, '.env'))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/2.0/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = os.getenv('SECRET_KEY')
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
ALLOWED_HOSTS = os.getenv('ALLOWED_HOSTS').split(',') if os.getenv('ALLOWED_HOSTS') else []
# Application definition
INSTALLED_APPS = [
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'rest_framework',
'api',
'django_extensions',
]
MIDDLEWARE = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
]
ROOT_URLCONF = 'todo_list_web.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'todo_list_web.wsgi.application'
# Database
# https://docs.djangoproject.com/en/2.0/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.postgresql_psycopg2',
'NAME': os.getenv('DB_NAME'),
'USER': os.getenv('DB_USERNAME'),
'PASSWORD': os.getenv('DB_PASSWORD'),
'HOST': os.getenv('DB_HOST'),
'PORT': '',
}
}
# Password validation
# https://docs.djangoproject.com/en/2.0/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
REST_FRAMEWORK = {
'DEFAULT_AUTHENTICATION_CLASSES': (
'rest_framework_simplejwt.authentication.JWTAuthentication',
)
}
SIMPLE_JWT = {
'ACCESS_TOKEN_LIFETIME': timedelta(days=1)
}
# Internationalization
# https://docs.djangoproject.com/en/2.0/topics/i18n/
LANGUAGE_CODE = 'en-us'
USE_I18N = True
USE_L10N = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/2.0/howto/static-files/
STATIC_URL = '/static/'
# Email Config
EMAIL_USE_TLS = os.getenv('EMAIL_USE_TLS')
EMAIL_HOST = os.getenv('EMAIL_HOST')
EMAIL_HOST_USER = os.getenv('EMAIL_HOST_USER')
EMAIL_HOST_PASSWORD = os.getenv('EMAIL_HOST_PASSWORD')
EMAIL_PORT = os.getenv('EMAIL_PORT')
| 27
| 91
| 0.700078
|
794b4a58ef4728ec1a1dbd9431a97d2a3160abf8
| 1,111
|
py
|
Python
|
WEEKS/CD_Sata-Structures/_RESOURCES/python-prac/python-scripts/scripts/34_git_all_repos.py
|
webdevhub42/Lambda
|
b04b84fb5b82fe7c8b12680149e25ae0d27a0960
|
[
"MIT"
] | 5
|
2021-06-02T23:44:25.000Z
|
2021-12-27T16:21:57.000Z
|
WEEKS/CD_Sata-Structures/_RESOURCES/python-prac/python-scripts/scripts/34_git_all_repos.py
|
webdevhub42/Lambda
|
b04b84fb5b82fe7c8b12680149e25ae0d27a0960
|
[
"MIT"
] | 22
|
2021-05-31T01:33:25.000Z
|
2021-10-18T18:32:39.000Z
|
WEEKS/CD_Sata-Structures/_RESOURCES/python-prac/python-scripts/scripts/34_git_all_repos.py
|
webdevhub42/Lambda
|
b04b84fb5b82fe7c8b12680149e25ae0d27a0960
|
[
"MIT"
] | 3
|
2021-06-19T03:37:47.000Z
|
2021-08-31T00:49:51.000Z
|
import sys
import os
import requests
def get_total_repos(group, name):
repo_urls = []
page = 1
while True:
url = "https://api.github.com/{0}/{1}/repos?per_page=100&page={2}"
r = requests.get(url.format(group, name, page))
if r.status_code == 200:
rdata = r.json()
for repo in rdata:
repo_urls.append(repo["clone_url"])
if len(rdata) >= 100:
page += 1
else:
print("Found {0} repos.".format(len(repo_urls)))
break
else:
print(r)
return False
return repo_urls
def clone_repos(all_repos):
count = 1
print("Cloning...")
for repo in all_repos:
os.system("Git clone " + repo)
print("Completed repo #{0} of {1}".format(count, len(all_repos)))
count += 1
if __name__ == "__main__":
if len(sys.argv) > 2:
total = get_total_repos(sys.argv[1], sys.argv[2])
if total:
clone_repos(total)
else:
print("Usage: python USERS_OR_ORG GITHUB_USER_OR_ORG-NAME")
| 25.25
| 74
| 0.538254
|
794b4aa85994085d27f131d60f42577781a9b496
| 4,877
|
py
|
Python
|
astropy/nddata/tests/test_compat.py
|
jbkalmbach/astropy
|
88ae8c615533efd1e60de4aded204943f66f881c
|
[
"BSD-3-Clause"
] | 1
|
2022-03-02T17:07:20.000Z
|
2022-03-02T17:07:20.000Z
|
astropy/nddata/tests/test_compat.py
|
jbkalmbach/astropy
|
88ae8c615533efd1e60de4aded204943f66f881c
|
[
"BSD-3-Clause"
] | 11
|
2017-12-18T16:27:29.000Z
|
2018-08-29T14:54:22.000Z
|
astropy/nddata/tests/test_compat.py
|
jbkalmbach/astropy
|
88ae8c615533efd1e60de4aded204943f66f881c
|
[
"BSD-3-Clause"
] | 1
|
2018-08-02T09:33:21.000Z
|
2018-08-02T09:33:21.000Z
|
# Licensed under a 3-clause BSD style license - see LICENSE.rst
# This module contains tests of a class equivalent to pre-1.0 NDData.
import pytest
import numpy as np
from ..nddata import NDData
from ..compat import NDDataArray
from ..nduncertainty import StdDevUncertainty
from ... import units as u
NDDATA_ATTRIBUTES = ['mask', 'flags', 'uncertainty', 'unit', 'shape', 'size',
'dtype', 'ndim', 'wcs', 'convert_unit_to']
def test_nddataarray_has_attributes_of_old_nddata():
ndd = NDDataArray([1, 2, 3])
for attr in NDDATA_ATTRIBUTES:
assert hasattr(ndd, attr)
def test_nddata_simple():
nd = NDDataArray(np.zeros((10, 10)))
assert nd.shape == (10, 10)
assert nd.size == 100
assert nd.dtype == np.dtype(float)
def test_nddata_parameters():
# Test for issue 4620
nd = NDDataArray(data=np.zeros((10, 10)))
assert nd.shape == (10, 10)
assert nd.size == 100
assert nd.dtype == np.dtype(float)
# Change order; `data` has to be given explicitly here
nd = NDDataArray(meta={}, data=np.zeros((10, 10)))
assert nd.shape == (10, 10)
assert nd.size == 100
assert nd.dtype == np.dtype(float)
# Pass uncertainty as second implicit argument
data = np.zeros((10, 10))
uncertainty = StdDevUncertainty(0.1 + np.zeros_like(data))
nd = NDDataArray(data, uncertainty)
assert nd.shape == (10, 10)
assert nd.size == 100
assert nd.dtype == np.dtype(float)
assert nd.uncertainty == uncertainty
def test_nddata_conversion():
nd = NDDataArray(np.array([[1, 2, 3], [4, 5, 6]]))
assert nd.size == 6
assert nd.dtype == np.dtype(int)
@pytest.mark.parametrize('flags_in', [
np.array([True, False]),
np.array([1, 0]),
[True, False],
[1, 0],
np.array(['a', 'b']),
['a', 'b']])
def test_nddata_flags_init_without_np_array(flags_in):
ndd = NDDataArray([1, 1], flags=flags_in)
assert (ndd.flags == flags_in).all()
@pytest.mark.parametrize(('shape'), [(10,), (5, 5), (3, 10, 10)])
def test_nddata_flags_invalid_shape(shape):
with pytest.raises(ValueError) as exc:
NDDataArray(np.zeros((10, 10)), flags=np.ones(shape))
assert exc.value.args[0] == 'dimensions of flags do not match data'
def test_convert_unit_to():
# convert_unit_to should return a copy of its input
d = NDDataArray(np.ones((5, 5)))
d.unit = 'km'
d.uncertainty = StdDevUncertainty(0.1 + np.zeros_like(d))
# workaround because zeros_like does not support dtype arg until v1.6
# and NDData accepts only bool ndarray as mask
tmp = np.zeros_like(d.data)
d.mask = np.array(tmp, dtype=bool)
d1 = d.convert_unit_to('m')
assert np.all(d1.data == np.array(1000.0))
assert np.all(d1.uncertainty.array == 1000.0 * d.uncertainty.array)
assert d1.unit == u.m
# changing the output mask should not change the original
d1.mask[0, 0] = True
assert d.mask[0, 0] != d1.mask[0, 0]
d.flags = np.zeros_like(d.data)
d1 = d.convert_unit_to('m')
# check that subclasses can require wcs and/or unit to be present and use
# _arithmetic and convert_unit_to
class SubNDData(NDDataArray):
"""
Subclass for test initialization of subclasses in NDData._arithmetic and
NDData.convert_unit_to
"""
def __init__(self, *arg, **kwd):
super().__init__(*arg, **kwd)
if self.unit is None:
raise ValueError("Unit for subclass must be specified")
if self.wcs is None:
raise ValueError("WCS for subclass must be specified")
def test_init_of_subclass_in_convert_unit_to():
data = np.ones([10, 10])
arr1 = SubNDData(data, unit='m', wcs=5)
result = arr1.convert_unit_to('km')
np.testing.assert_array_equal(arr1.data, 1000 * result.data)
# Test for issue #4129:
def test_nddataarray_from_nddataarray():
ndd1 = NDDataArray([1., 4., 9.],
uncertainty=StdDevUncertainty([1., 2., 3.]),
flags=[0, 1, 0])
ndd2 = NDDataArray(ndd1)
# Test that the 2 instances point to the same objects and aren't just
# equal; this is explicitly documented for the main data array and we
# probably want to catch any future change in behaviour for the other
# attributes too and ensure they are intentional.
assert ndd2.data is ndd1.data
assert ndd2.uncertainty is ndd1.uncertainty
assert ndd2.flags is ndd1.flags
assert ndd2.meta == ndd1.meta
# Test for issue #4137:
def test_nddataarray_from_nddata():
ndd1 = NDData([1., 4., 9.],
uncertainty=StdDevUncertainty([1., 2., 3.]))
ndd2 = NDDataArray(ndd1)
assert ndd2.data is ndd1.data
assert ndd2.uncertainty is ndd1.uncertainty
assert ndd2.meta == ndd1.meta
| 33.868056
| 77
| 0.639532
|
794b4b2bb3d84ff434136e9f4ca8a8b2ee2cb96c
| 3,042
|
py
|
Python
|
build/lib/lsh_example/Phrases.py
|
huangbeidan/activeLearner_autophrase
|
c7f1a4f1c7ea57a36c29c246ba393ba31e040353
|
[
"MIT"
] | 1
|
2021-03-05T15:42:32.000Z
|
2021-03-05T15:42:32.000Z
|
build/lib/lsh_example/Phrases.py
|
huangbeidan/activeLearner_autophrase
|
c7f1a4f1c7ea57a36c29c246ba393ba31e040353
|
[
"MIT"
] | null | null | null |
build/lib/lsh_example/Phrases.py
|
huangbeidan/activeLearner_autophrase
|
c7f1a4f1c7ea57a36c29c246ba393ba31e040353
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
from collections import defaultdict
class Phrase:
def __init__(self):
self.idx = -1
self.quality = -1
self.tokens = ""
self.words = ""
self.label = ""
def add_phrase(self, idx, quality, tokens, words, label):
self.idx = idx
self.quality = quality
self.tokens = tokens
self.words = words
self.label = label
class Phrases:
def __init__(self, token_mapping_dir="input/token_mapping.txt", intermediate_labels_dir="input/intermediate_labels.txt"):
"""
:param token_mapping_dir: Immediate output from AutoPhrase
:param intermediate_labels_dir: Intermediate labels produced by AL-Autophrase: https://github.com/huangbeidan/AutoPhrase
Example files have been put under input/ folder
"""
self.phrases = []
self.phrases_num = []
self.phrase_labels_dict=defaultdict()
self.word2phrase = defaultdict()
self.token2word = defaultdict()
self.token_mapping_dir = token_mapping_dir
self.intermediate_labels_dir = intermediate_labels_dir
self.load_content_v2(self.intermediate_labels_dir)
def _get_phrases(self):
return self.phrases
def load_tokens_mapping(self):
tokens_dict = defaultdict(lambda:' ')
with open(self.token_mapping_dir) as content:
for line in content:
line = line.strip()
cans = line.split('\t')
if len(cans) > 1:
token = cans[0]
word = cans[1]
tokens_dict[token] = word
return tokens_dict
def load_content_v2(self, sentence_file):
tokens_dict = self.load_tokens_mapping()
with open(sentence_file) as content:
for line in content:
line = line.strip()
cans = line.split('\t')
phrase = ""
if len(cans) > 3:
idx = cans[0]
label = cans[1]
score = cans[2]
tokens_raw = cans[3]
for tk in tokens_raw.split(' '):
# assert tk in tokens_dict.keys(), "tokens should be in tokens mapping dictionary"
phrase += tokens_dict[tk]
phrase += " "
phrase_clean = phrase.replace(",", "")
phrase_clean = phrase_clean.replace(":", "")
phrase_clean = phrase_clean.lower()
phrase_clean = phrase_clean.strip()
if len(phrase_clean.split(' ')) < 2: continue
pp = Phrase()
pp.add_phrase(idx, score, tokens_raw, phrase_clean, label)
self.phrases.append(pp)
self.phrases_num.append(tokens_raw)
self.phrase_labels_dict[phrase_clean] = label
self.word2phrase[phrase_clean] = pp
self.token2word[tokens_raw] = pp
| 33.428571
| 128
| 0.556213
|
794b4be991a20d1e9fdc0c6d496d10fd8cc27c00
| 777
|
py
|
Python
|
medet/users/migrations/0001_initial.py
|
ayushri117/MEDET
|
11941c89a44f9e49939710ad23ff85378b89c5a9
|
[
"MIT"
] | null | null | null |
medet/users/migrations/0001_initial.py
|
ayushri117/MEDET
|
11941c89a44f9e49939710ad23ff85378b89c5a9
|
[
"MIT"
] | null | null | null |
medet/users/migrations/0001_initial.py
|
ayushri117/MEDET
|
11941c89a44f9e49939710ad23ff85378b89c5a9
|
[
"MIT"
] | 1
|
2021-08-07T15:57:40.000Z
|
2021-08-07T15:57:40.000Z
|
# Generated by Django 3.0.6 on 2021-08-01 05:54
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
initial = True
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
]
operations = [
migrations.CreateModel(
name='Profile',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('image', models.ImageField(default='default.jpg', upload_to='profile_pics')),
('user', models.OneToOneField(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)),
],
),
]
| 29.884615
| 121
| 0.646075
|
794b4d22e84f8221d75a584e381ee2dbd6004120
| 4,923
|
py
|
Python
|
google-cloud-sdk/lib/surface/sql/instances/describe.py
|
bopopescu/Social-Lite
|
ee05d6a7431c36ff582c8d6b58bb20a8c5f550bf
|
[
"Apache-2.0"
] | null | null | null |
google-cloud-sdk/lib/surface/sql/instances/describe.py
|
bopopescu/Social-Lite
|
ee05d6a7431c36ff582c8d6b58bb20a8c5f550bf
|
[
"Apache-2.0"
] | 4
|
2020-07-21T12:51:46.000Z
|
2022-01-22T10:29:25.000Z
|
google-cloud-sdk/lib/surface/sql/instances/describe.py
|
bopopescu/Social-Lite
|
ee05d6a7431c36ff582c8d6b58bb20a8c5f550bf
|
[
"Apache-2.0"
] | 1
|
2020-07-25T18:17:57.000Z
|
2020-07-25T18:17:57.000Z
|
# -*- coding: utf-8 -*- #
# Copyright 2013 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Retrieves information about a Cloud SQL instance."""
from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
from apitools.base.py import exceptions as apitools_exceptions
from googlecloudsdk.api_lib.sql import api_util
from googlecloudsdk.api_lib.sql import exceptions
from googlecloudsdk.api_lib.sql import instances as instance_api_util
from googlecloudsdk.api_lib.sql import validate
from googlecloudsdk.api_lib.util import apis
from googlecloudsdk.calliope import base
from googlecloudsdk.calliope import exceptions as calliope_exceptions
from googlecloudsdk.command_lib.sql import flags
from googlecloudsdk.command_lib.sql import instances as instance_command_util
from googlecloudsdk.core import properties
import six
import six.moves.http_client
messages = apis.GetMessagesModule('sql', 'v1beta4')
class DatabaseInstancePresentation(object):
"""Represents a DatabaseInstance message that is modified for user visibility."""
def __init__(self, orig):
for field in orig.all_fields():
if field.name == 'state':
if orig.settings and orig.settings.activationPolicy == messages.Settings.ActivationPolicyValueValuesEnum.NEVER:
self.state = 'STOPPED'
else:
self.state = orig.state
else:
value = getattr(orig, field.name)
if value is not None and not (isinstance(value, list) and not value):
if field.name in ['currentDiskSize', 'maxDiskSize']:
setattr(self, field.name, six.text_type(value))
else:
setattr(self, field.name, value)
@base.ReleaseTracks(base.ReleaseTrack.GA, base.ReleaseTrack.BETA,
base.ReleaseTrack.ALPHA)
class Get(base.DescribeCommand):
"""Displays configuration and metadata about a Cloud SQL instance.
Displays configuration and metadata about a Cloud SQL instance.
Information such as instance name, IP address, region, the CA certificate
and configuration settings will be displayed.
"""
@staticmethod
def Args(parser):
"""Args is called by calliope to gather arguments for this command.
Args:
parser: An argparse parser that you can use it to add arguments that go on
the command line after this command. Positional arguments are allowed.
"""
parser.add_argument(
'instance',
completer=flags.InstanceCompleter,
help='Cloud SQL instance ID.')
parser.display_info.AddFormat('{0} default'.format(
flags.INSTANCES_USERLABELS_FORMAT))
def Run(self, args):
"""Displays configuration and metadata about a Cloud SQL instance.
Information such as instance name, IP address, region, the CA certificate
and configuration settings will be displayed.
Args:
args: argparse.Namespace, The arguments that this command was invoked
with.
Returns:
A dict object representing the instance resource if fetching the instance
was successful.
Raises:
HttpException: A http error response was received while executing api
request.
ResourceNotFoundError: The SQL instance was not found.
"""
client = api_util.SqlClient(api_util.API_VERSION_DEFAULT)
sql_client = client.sql_client
sql_messages = client.sql_messages
validate.ValidateInstanceName(args.instance)
instance_ref = client.resource_parser.Parse(
args.instance,
params={'project': properties.VALUES.core.project.GetOrFail},
collection='sql.instances')
try:
instance = sql_client.instances.Get(
sql_messages.SqlInstancesGetRequest(
project=instance_ref.project, instance=instance_ref.instance))
# TODO(b/122660263): Remove when V1 instances are no longer supported.
if instance_api_util.IsInstanceV1(sql_messages, instance):
instance_command_util.ShowV1DeprecationWarning()
return DatabaseInstancePresentation(instance)
except apitools_exceptions.HttpError as error:
if error.status_code == six.moves.http_client.FORBIDDEN:
raise exceptions.ResourceNotFoundError(
'There was no instance found at {} or you are not authorized to '
'access it.'.format(instance_ref.RelativeName()))
raise calliope_exceptions.HttpException(error)
| 39.071429
| 119
| 0.73634
|
794b4d2aec483385aada3088d94b2c7a960f62e7
| 5,129
|
py
|
Python
|
easygraph/readwrite/graphviz.py
|
tddschn/Easy-Graph
|
2cdecbde0b67a0c106e2bcf82d13c6fcd0b4c5d8
|
[
"BSD-3-Clause"
] | 1
|
2022-03-19T11:29:42.000Z
|
2022-03-19T11:29:42.000Z
|
easygraph/readwrite/graphviz.py
|
tddschn/Easy-Graph
|
2cdecbde0b67a0c106e2bcf82d13c6fcd0b4c5d8
|
[
"BSD-3-Clause"
] | null | null | null |
easygraph/readwrite/graphviz.py
|
tddschn/Easy-Graph
|
2cdecbde0b67a0c106e2bcf82d13c6fcd0b4c5d8
|
[
"BSD-3-Clause"
] | null | null | null |
import easygraph as eg
__all__ = ["write_dot", "read_dot", "from_agraph", "to_agraph"]
def from_agraph(A, create_using=None):
"""Returns a EasyGraph Graph or DiGraph from a PyGraphviz graph.
Parameters
----------
A : PyGraphviz AGraph
A graph created with PyGraphviz
create_using : EasyGraph graph constructor, optional (default=None)
Graph type to create. If graph instance, then cleared before populated.
If `None`, then the appropriate Graph type is inferred from `A`.
Examples
--------
>>> K5 = eg.complete_graph(5)
>>> A = eg.to_agraph(K5)
>>> G = eg.from_agraph(A)
Notes
-----
The Graph G will have a dictionary G.graph_attr containing
the default graphviz attributes for graphs, nodes and edges.
Default node attributes will be in the dictionary G.node_attr
which is keyed by node.
Edge attributes will be returned as edge data in G. With
edge_attr=False the edge data will be the Graphviz edge weight
attribute or the value 1 if no edge weight attribute is found.
"""
if create_using is None:
if A.is_directed():
if A.is_strict():
create_using = eg.DiGraph
else:
create_using = eg.MultiDiGraph
else:
if A.is_strict():
create_using = eg.Graph
else:
create_using = eg.MultiGraph
# assign defaults
N = eg.empty_graph(0, create_using)
if A.name is not None:
N.name = A.name
# add graph attributes
N.graph.update(A.graph_attr)
# add nodes, attributes to N.node_attr
for n in A.nodes():
str_attr = {str(k): v for k, v in n.attr.items()}
N.add_node(str(n), **str_attr)
# add edges, assign edge data as dictionary of attributes
for e in A.edges():
u, v = str(e[0]), str(e[1])
attr = dict(e.attr)
str_attr = {str(k): v for k, v in attr.items()}
if not N.is_multigraph():
if e.name is not None:
str_attr["key"] = e.name
N.add_edge(u, v, **str_attr)
else:
N.add_edge(u, v, key=e.name, **str_attr)
# add default attributes for graph, nodes, and edges
# hang them on N.graph_attr
N.graph["graph"] = dict(A.graph_attr)
N.graph["node"] = dict(A.node_attr)
N.graph["edge"] = dict(A.edge_attr)
return N
def to_agraph(N):
"""Returns a pygraphviz graph from a EasyGraph graph N.
Parameters
----------
N : EasyGraph graph
A graph created with EasyGraph
Examples
--------
>>> K5 = eg.complete_graph(5)
>>> A = eg.to_agraph(K5)
Notes
-----
If N has an dict N.graph_attr an attempt will be made first
to copy properties attached to the graph (see from_agraph)
and then updated with the calling arguments if any.
"""
try:
import pygraphviz
except ImportError as err:
raise ImportError("requires pygraphviz "
"http://pygraphviz.github.io/") from err
directed = N.is_directed()
strict = eg.number_of_selfloops(N) == 0 and not N.is_multigraph()
A = pygraphviz.AGraph(name=N.name, strict=strict, directed=directed)
# default graph attributes
A.graph_attr.update(N.graph.get("graph", {}))
A.node_attr.update(N.graph.get("node", {}))
A.edge_attr.update(N.graph.get("edge", {}))
A.graph_attr.update((k, v) for k, v in N.graph.items()
if k not in ("graph", "node", "edge"))
# add nodes
for n, nodedata in N.nodes.items():
A.add_node(n)
# Add node data
a = A.get_node(n)
a.attr.update({k: str(v) for k, v in nodedata.items()})
# loop over edges
if N.is_multigraph():
for u, v, key, edgedata in N.edges:
str_edgedata = {
k: str(v)
for k, v in edgedata.items() if k != "key"
}
A.add_edge(u, v, key=str(key))
# Add edge data
a = A.get_edge(u, v)
a.attr.update(str_edgedata)
else:
for u, v, edgedata in N.edges:
str_edgedata = {k: str(v) for k, v in edgedata.items()}
A.add_edge(u, v)
# Add edge data
a = A.get_edge(u, v)
a.attr.update(str_edgedata)
return A
def write_dot(G, path):
"""Write EasyGraph graph G to Graphviz dot format on path.
Parameters
----------
G : graph
A easygraph graph
path : filename
Filename or file handle to write
"""
A = to_agraph(G)
A.write(path)
A.clear()
return
def read_dot(path):
"""Returns a EasyGraph graph from a dot file on path.
Parameters
----------
path : file or string
File name or file handle to read.
"""
try:
import pygraphviz
except ImportError as err:
raise ImportError("read_dot() requires pygraphviz "
"http://pygraphviz.github.io/") from err
A = pygraphviz.AGraph(file=path)
gr = from_agraph(A)
A.clear()
return gr
| 28.181319
| 78
| 0.578475
|
794b4d311747a7b9ed555b39798cda525d100e63
| 21,340
|
py
|
Python
|
main_tsp.py
|
cem0963/dpdp
|
86f91a1e425826a3f31bcc175724c26a349fdb96
|
[
"MIT"
] | null | null | null |
main_tsp.py
|
cem0963/dpdp
|
86f91a1e425826a3f31bcc175724c26a349fdb96
|
[
"MIT"
] | null | null | null |
main_tsp.py
|
cem0963/dpdp
|
86f91a1e425826a3f31bcc175724c26a349fdb96
|
[
"MIT"
] | null | null | null |
import os
import json
import argparse
import time
import numpy as np
import torch
from torch.autograd import Variable
import torch.nn.functional as F
import torch.nn as nn
from sklearn.utils.class_weight import compute_class_weight
from tensorboardX import SummaryWriter
from fastprogress import master_bar, progress_bar
# Remove warning
import warnings
warnings.filterwarnings("ignore", category=UserWarning)
from scipy.sparse import SparseEfficiencyWarning
warnings.simplefilter('ignore', SparseEfficiencyWarning)
from config import *
from problems.tsp.tsp_reader import TSPReader
from problems.tsptw.tsptw_reader import TSPTWReader
from models.gcn_model import ResidualGatedGCNModel
from models.sparse_wrapper import wrap_sparse
from models.prep_wrapper import PrepWrapResidualGatedGCNModel
parser = argparse.ArgumentParser(description='gcn_tsp_parser')
parser.add_argument('-c','--config', type=str, default="configs/default.json")
args = parser.parse_args()
config_path = args.config
config = get_config(config_path)
print("Loaded {}:\n{}".format(config_path, config))
is_tsptw = config.get('problem', 'tsp') == 'tsptw'
DataReader = TSPTWReader if is_tsptw else TSPReader
if torch.cuda.is_available():
print("CUDA available, using {} GPUs".format(torch.cuda.device_count()))
dtypeFloat = torch.cuda.FloatTensor
dtypeLong = torch.cuda.LongTensor
torch.cuda.manual_seed(1)
else:
print("CUDA not available")
dtypeFloat = torch.FloatTensor
dtypeLong = torch.LongTensor
torch.manual_seed(1)
def mean_tour_len_edges(x_edges_values, y_pred_edges):
"""
Computes mean tour length for given batch prediction as edge adjacency matrices (for PyTorch tensors).
Args:
x_edges_values: Edge values (distance) matrix (batch_size, num_nodes, num_nodes)
y_pred_edges: Edge predictions (batch_size, num_nodes, num_nodes, voc_edges)
Returns:
mean_tour_len: Mean tour length over batch
"""
y = F.softmax(y_pred_edges, dim=-1) # B x V x V x voc_edges
y = y.argmax(dim=3) # B x V x V
# Divide by 2 because edges_values is symmetric
tour_lens = (y.float() * x_edges_values.float()).sum(dim=1).sum(dim=1) / 2
mean_tour_len = tour_lens.sum().to(dtype=torch.float).item() / tour_lens.numel()
return mean_tour_len
def train_one_epoch(net, optimizer, config, master_bar, dataset=None):
# Set training mode
net.train()
# Assign parameters
num_nodes = config.num_nodes
num_neighbors = config.num_neighbors
batch_size = config.batch_size
batches_per_epoch = config.batches_per_epoch
accumulation_steps = config.accumulation_steps
train_filepath = config.train_filepath
train_target_filepath = config.train_filepath_solution
if dataset is None:
dataset = DataReader(num_nodes, num_neighbors, batch_size, train_filepath, train_target_filepath, do_shuffle=True, do_prep=False)
else:
dataset.shuffle()
if batches_per_epoch != -1:
batches_per_epoch = min(batches_per_epoch, dataset.max_iter)
else:
batches_per_epoch = dataset.max_iter
# Convert dataset to iterable
dataset = iter(dataset)
# Initially set loss class weights as None
edge_cw = None
# Initialize running data
running_loss = 0.0
# running_err_edges = 0.0
# running_err_tour = 0.0
# running_err_tsp = 0.0
running_pred_tour_len = 0.0
running_gt_tour_len = 0.0
running_nb_data = 0
running_nb_batch = 0
start_epoch = time.time()
for batch_num in progress_bar(range(batches_per_epoch), parent=master_bar):
# Generate a batch of TSPs
try:
batch = next(dataset)
except StopIteration:
break
# Convert batch to torch Variables
# x_edges = Variable(torch.LongTensor(batch.edges).type(dtypeLong), requires_grad=False)
# x_edges_values = Variable(torch.FloatTensor(batch.edges_values).type(dtypeFloat), requires_grad=False)
# x_nodes = Variable(torch.LongTensor(batch.nodes).type(dtypeLong), requires_grad=False)
x_nodes_coord = Variable(torch.FloatTensor(batch.nodes_coord).type(dtypeFloat), requires_grad=False)
x_nodes_timew = Variable(torch.FloatTensor(batch.nodes_timew).type(dtypeFloat), requires_grad=False) if is_tsptw else None
# y_edges = Variable(torch.LongTensor(batch.edges_target).type(dtypeLong), requires_grad=False)
# y_nodes = Variable(torch.LongTensor(batch.nodes_target).type(dtypeLong), requires_grad=False)
y_tour = Variable(torch.LongTensor(batch.tour_nodes).type(dtypeLong), requires_grad=False)
# Compute class weights (if uncomputed)
if type(edge_cw) != torch.Tensor:
# edge_labels = y_edges.cpu().numpy().flatten()
# edge_cw = compute_class_weight("balanced", classes=np.unique(edge_labels), y=edge_labels)
# edge_cw = len(y_edges) / (num_edge_classes * edge_label_bincount)
num_nodes = x_nodes_coord.size(1)
num_edges = num_nodes * num_nodes
num_edge_classes = 2
# Don't make tensor since then it will mess up DataParallel, this is a parameter, not input!
edge_label_bincount = np.array([num_edges - 2 * num_nodes, 2 * num_nodes])
edge_cw = num_edges / (num_edge_classes * edge_label_bincount)
# Forward pass
# y_preds, loss = net.forward(x_edges, x_edges_values, x_nodes, x_nodes_coord, y_edges, edge_cw)
y_preds, loss, x_edges_values = net.forward(x_nodes_coord, x_nodes_timew, y_tour, edge_cw)
loss = loss.mean() # Take mean of loss across multiple GPUs
loss = loss / accumulation_steps # Scale loss by accumulation steps
loss.backward()
# Backward pass
if (batch_num+1) % accumulation_steps == 0:
optimizer.step()
optimizer.zero_grad()
# Compute error metrics and mean tour lengths
# err_edges, err_tour, err_tsp, tour_err_idx, tsp_err_idx = edge_error(y_preds, y_edges, x_edges)
pred_tour_len = mean_tour_len_edges(x_edges_values, y_preds)
gt_tour_len = np.mean(batch.tour_len)
# Update running data
running_nb_data += batch_size
running_loss += batch_size* loss.data.item()* accumulation_steps # Re-scale loss
# running_err_edges += batch_size* err_edges
# running_err_tour += batch_size* err_tour
# running_err_tsp += batch_size* err_tsp
running_pred_tour_len += batch_size* pred_tour_len
running_gt_tour_len += batch_size* gt_tour_len
running_nb_batch += 1
# Log intermediate statistics
result = ('loss:{loss:.4f} pred_tour_len:{pred_tour_len:.3f} gt_tour_len:{gt_tour_len:.3f}'.format(
loss=running_loss/running_nb_data,
pred_tour_len=running_pred_tour_len/running_nb_data,
gt_tour_len=running_gt_tour_len/running_nb_data))
master_bar.child.comment = result
# Compute statistics for full epoch
loss = running_loss/ running_nb_data
err_edges = 0 # running_err_edges/ running_nb_data
err_tour = 0 # running_err_tour/ running_nb_data
err_tsp = 0 # running_err_tsp/ running_nb_data
pred_tour_len = running_pred_tour_len/ running_nb_data
gt_tour_len = running_gt_tour_len/ running_nb_data
return time.time()-start_epoch, loss, err_edges, err_tour, err_tsp, pred_tour_len, gt_tour_len
def metrics_to_str(epoch, time, learning_rate, loss, err_edges, err_tour, err_tsp, pred_tour_len, gt_tour_len):
result = ( 'epoch:{epoch:0>2d}\t'
'time:{time:.1f}h\t'
'lr:{learning_rate:.2e}\t'
'loss:{loss:.4f}\t'
# 'err_edges:{err_edges:.2f}\t'
# 'err_tour:{err_tour:.2f}\t'
# 'err_tsp:{err_tsp:.2f}\t'
'pred_tour_len:{pred_tour_len:.3f}\t'
'gt_tour_len:{gt_tour_len:.3f}'.format(
epoch=epoch,
time=time/3600,
learning_rate=learning_rate,
loss=loss,
# err_edges=err_edges,
# err_tour=err_tour,
# err_tsp=err_tsp,
pred_tour_len=pred_tour_len,
gt_tour_len=gt_tour_len))
return result
def test(net, config, master_bar, mode='test'):
# Set evaluation mode
net.eval()
# Assign parameters
num_nodes = config.num_nodes
num_neighbors = config.num_neighbors
batch_size = config.batch_size
batches_per_epoch = config.batches_per_epoch
beam_size = config.beam_size
val_filepath = config.val_filepath
val_target_filepath = config.val_filepath_solution
test_filepath = config.test_filepath
test_target_filepath = config.test_filepath_solution
# Load TSP data
if mode == 'val':
dataset = DataReader(num_nodes, num_neighbors, batch_size=batch_size, filepath=val_filepath, target_filepath=val_target_filepath, do_prep=False)
elif mode == 'test':
dataset = DataReader(num_nodes, num_neighbors, batch_size=batch_size, filepath=test_filepath, target_filepath=test_target_filepath, do_prep=False)
batches_per_epoch = dataset.max_iter
# Convert dataset to iterable
dataset = iter(dataset)
# Initially set loss class weights as None
edge_cw = None
# Initialize running data
running_loss = 0.0
# running_err_edges = 0.0
# running_err_tour = 0.0
# running_err_tsp = 0.0
running_pred_tour_len = 0.0
running_gt_tour_len = 0.0
running_nb_data = 0
running_nb_batch = 0
with torch.no_grad():
start_test = time.time()
for batch_num in progress_bar(range(batches_per_epoch), parent=master_bar):
# Generate a batch of TSPs
try:
batch = next(dataset)
except StopIteration:
break
# Convert batch to torch Variables
# x_edges = Variable(torch.LongTensor(batch.edges).type(dtypeLong), requires_grad=False)
# x_edges_values = Variable(torch.FloatTensor(batch.edges_values).type(dtypeFloat), requires_grad=False)
# x_nodes = Variable(torch.LongTensor(batch.nodes).type(dtypeLong), requires_grad=False)
x_nodes_coord = Variable(torch.FloatTensor(batch.nodes_coord).type(dtypeFloat), requires_grad=False)
x_nodes_timew = Variable(torch.FloatTensor(batch.nodes_timew).type(dtypeFloat), requires_grad=False) if is_tsptw else None
# y_edges = Variable(torch.LongTensor(batch.edges_target).type(dtypeLong), requires_grad=False)
# y_nodes = Variable(torch.LongTensor(batch.nodes_target).type(dtypeLong), requires_grad=False)
y_tour = Variable(torch.LongTensor(batch.tour_nodes).type(dtypeLong), requires_grad=False)
# Compute class weights (if uncomputed)
if type(edge_cw) != torch.Tensor:
# edge_labels = y_edges.cpu().numpy().flatten()
# edge_cw = compute_class_weight("balanced", classes=np.unique(edge_labels), y=edge_labels)
# edge_cw = len(y_edges) / (num_edge_classes * edge_label_bincount)
num_nodes = x_nodes_coord.size(1)
num_edges = num_nodes * num_nodes
num_edge_classes = 2
# Don't make tensor since then it will mess up DataParallel, this is a parameter, not input!
edge_label_bincount = np.array([num_edges - 2 * num_nodes, 2 * num_nodes])
edge_cw = num_edges / (num_edge_classes * edge_label_bincount)
# Forward pass
# y_preds, loss = net.forward(x_edges, x_edges_values, x_nodes, x_nodes_coord, y_edges, edge_cw)
y_preds, loss, x_edges_values = net.forward(x_nodes_coord, x_nodes_timew, y_tour, edge_cw)
loss = loss.mean() # Take mean of loss across multiple GPUs
# Compute error metrics
# err_edges, err_tour, err_tsp, tour_err_idx, tsp_err_idx = edge_error(y_preds, y_edges, x_edges)
# Get batch beamsearch tour prediction
# if mode == 'val': # Validation: faster 'vanilla' beamsearch
# bs_nodes = beamsearch_tour_nodes(
# y_preds, beam_size, batch_size, num_nodes, dtypeFloat, dtypeLong, probs_type='logits')
# elif mode == 'test': # Testing: beamsearch with shortest tour heuristic
# bs_nodes = beamsearch_tour_nodes_shortest(
# y_preds, x_edges_values, beam_size, batch_size, num_nodes, dtypeFloat, dtypeLong, probs_type='logits')
#
# Compute mean tour length
# pred_tour_len = mean_tour_len_nodes(x_edges_values, bs_nodes)
gt_tour_len = np.mean(batch.tour_len)
# Update running data
running_nb_data += batch_size
running_loss += batch_size* loss.data.item()
# running_err_edges += batch_size* err_edges
# running_err_tour += batch_size* err_tour
# running_err_tsp += batch_size* err_tsp
# running_pred_tour_len += batch_size* pred_tour_len
running_gt_tour_len += batch_size* gt_tour_len
running_nb_batch += 1
# Log intermediate statistics
# result = ('loss:{loss:.4f} pred_tour_len:{pred_tour_len:.3f} gt_tour_len:{gt_tour_len:.3f}'.format(
result = ('loss:{loss:.4f} gt_tour_len:{gt_tour_len:.3f}'.format(
loss=running_loss/running_nb_data,
# pred_tour_len=running_pred_tour_len/running_nb_data,
gt_tour_len=running_gt_tour_len/running_nb_data))
master_bar.child.comment = result
# Compute statistics for full epoch
loss = running_loss/ running_nb_data
err_edges = 0 # running_err_edges/ running_nb_data
err_tour = 0 # running_err_tour/ running_nb_data
err_tsp = 0 # running_err_tsp/ running_nb_data
pred_tour_len = running_pred_tour_len/ running_nb_data
gt_tour_len = running_gt_tour_len/ running_nb_data
return time.time()-start_test, loss, err_edges, err_tour, err_tsp, pred_tour_len, gt_tour_len
def main(config):
# Instantiate the network
assert config.num_neighbors == -1, "KNN features is deprecated due to PrepWrap"
model = ResidualGatedGCNModel(config, dtypeFloat, dtypeLong)
if 'sparse' in config and config.sparse is not None:
model = wrap_sparse(model, config.sparse)
model = PrepWrapResidualGatedGCNModel(model)
net = nn.DataParallel(model)
if torch.cuda.is_available():
net.cuda()
print(net)
# Compute number of network parameters
nb_param = 0
for param in net.parameters():
nb_param += np.prod(list(param.data.size()))
print('Number of parameters:', nb_param)
# Create log directory
log_dir = f"./logs/{config.expt_name}/"
os.makedirs(log_dir, exist_ok=True)
json.dump(config, open(f"{log_dir}/config.json", "w"), indent=4)
writer = SummaryWriter(log_dir) # Define Tensorboard writer
# Training parameters
num_nodes = config.num_nodes
num_neighbors = config.num_neighbors
max_epochs = config.max_epochs
val_every = config.val_every
test_every = config.test_every
batch_size = config.batch_size
batches_per_epoch = config.batches_per_epoch
accumulation_steps = config.accumulation_steps
learning_rate = config.learning_rate
decay_rate = config.decay_rate
val_loss_old = 1e6 # For decaying LR based on validation loss
best_pred_tour_len = 1e6 # For saving checkpoints
best_val_loss = 1e6 # For saving checkpoints
# Define optimizer
optimizer = torch.optim.Adam(net.parameters(), lr=learning_rate)
print(optimizer)
dataset = DataReader(
config.num_nodes, config.num_neighbors, config.batch_size,
config.train_filepath, config.train_filepath_solution,
do_prep=False
)
if 'resume_from_dir' in config:
if torch.cuda.is_available():
checkpoint = torch.load(os.path.join(config.resume_from_dir, "last_train_checkpoint.tar"))
else:
checkpoint = torch.load(os.path.join(config.resume_from_dir, "last_train_checkpoint.tar"), map_location='cpu')
# Load network state
net.load_state_dict(checkpoint['model_state_dict'])
# Load optimizer state
optimizer.load_state_dict(checkpoint['optimizer_state_dict'])
# Load other training parameters
epoch = checkpoint['epoch']
train_loss = checkpoint['train_loss']
val_loss = checkpoint['val_loss']
# Note: the learning_rate was set in load_state_dict,
# this is just to have the local variable for logging
for param_group in optimizer.param_groups:
learning_rate = param_group['lr']
print(f"Loaded checkpoint from epoch {epoch}")
else:
epoch = -1
epoch_bar = master_bar(range(epoch + 1, max_epochs))
for epoch in epoch_bar:
# Log to Tensorboard
writer.add_scalar('learning_rate', learning_rate, epoch)
# Train
train_time, train_loss, train_err_edges, train_err_tour, train_err_tsp, train_pred_tour_len, train_gt_tour_len = train_one_epoch(net, optimizer, config, epoch_bar, dataset=dataset)
epoch_bar.write('t: ' + metrics_to_str(epoch, train_time, learning_rate, train_loss, train_err_edges, train_err_tour, train_err_tsp, train_pred_tour_len, train_gt_tour_len))
writer.add_scalar('loss/train_loss', train_loss, epoch)
writer.add_scalar('pred_tour_len/train_pred_tour_len', train_pred_tour_len, epoch)
writer.add_scalar('optimality_gap/train_opt_gap', train_pred_tour_len/train_gt_tour_len - 1, epoch)
if epoch % val_every == 0 or epoch == max_epochs-1:
# Validate
val_time, val_loss, val_err_edges, val_err_tour, val_err_tsp, val_pred_tour_len, val_gt_tour_len = test(net, config, epoch_bar, mode='val')
epoch_bar.write('v: ' + metrics_to_str(epoch, val_time, learning_rate, val_loss, val_err_edges, val_err_tour, val_err_tsp, val_pred_tour_len, val_gt_tour_len))
writer.add_scalar('loss/val_loss', val_loss, epoch)
writer.add_scalar('pred_tour_len/val_pred_tour_len', val_pred_tour_len, epoch)
writer.add_scalar('optimality_gap/val_opt_gap', val_pred_tour_len/val_gt_tour_len - 1, epoch)
# Save checkpoint
if val_pred_tour_len < best_pred_tour_len:
best_pred_tour_len = val_pred_tour_len # Update best val predicted tour length
torch.save({
'epoch': epoch,
'model_state_dict': net.state_dict(),
'optimizer_state_dict': optimizer.state_dict(),
'train_loss': train_loss,
'val_loss': val_loss,
}, log_dir+"best_val_tourlen_checkpoint.tar")
if val_loss < best_val_loss:
best_val_loss = val_loss # Update best val loss
torch.save({
'epoch': epoch,
'model_state_dict': net.state_dict(),
'optimizer_state_dict': optimizer.state_dict(),
'train_loss': train_loss,
'val_loss': val_loss,
}, log_dir+"best_val_loss_checkpoint.tar")
# Update learning rate
if val_loss > 0.99 * val_loss_old:
learning_rate /= decay_rate
for param_group in optimizer.param_groups:
param_group['lr'] = learning_rate
val_loss_old = val_loss # Update old validation loss
if epoch % test_every == 0 or epoch == max_epochs-1:
# Test
test_time, test_loss, test_err_edges, test_err_tour, test_err_tsp, test_pred_tour_len, test_gt_tour_len = test(net, config, epoch_bar, mode='test')
epoch_bar.write('T: ' + metrics_to_str(epoch, test_time, learning_rate, test_loss, test_err_edges, test_err_tour, test_err_tsp, test_pred_tour_len, test_gt_tour_len))
writer.add_scalar('loss/test_loss', test_loss, epoch)
writer.add_scalar('pred_tour_len/test_pred_tour_len', test_pred_tour_len, epoch)
writer.add_scalar('optimality_gap/test_opt_gap', test_pred_tour_len/test_gt_tour_len - 1, epoch)
# Save training checkpoint at the end of epoch
torch.save({
'epoch': epoch,
'model_state_dict': net.state_dict(),
'optimizer_state_dict': optimizer.state_dict(),
'train_loss': train_loss,
'val_loss': val_loss,
}, log_dir+"last_train_checkpoint.tar")
# Save checkpoint after every 250 epochs
if epoch != 0 and (epoch % 250 == 0 or epoch == max_epochs-1):
torch.save({
'epoch': epoch,
'model_state_dict': net.state_dict(),
'optimizer_state_dict': optimizer.state_dict(),
'train_loss': train_loss,
'val_loss': val_loss,
}, log_dir+f"checkpoint_epoch{epoch}.tar")
return net
if __name__ == "__main__":
main(config)
| 44.365904
| 188
| 0.671368
|
794b4d7ab53e871c252ef2d705ab6909fac797b6
| 2,734
|
py
|
Python
|
QTranslatorHelper/qtrer.py
|
xyz1001/PythonToolkit
|
b8e348764051c0184643b14f5b85c46223285d98
|
[
"MIT"
] | 1
|
2022-01-11T02:20:16.000Z
|
2022-01-11T02:20:16.000Z
|
QTranslatorHelper/qtrer.py
|
xyz1001/PythonToolkit
|
b8e348764051c0184643b14f5b85c46223285d98
|
[
"MIT"
] | null | null | null |
QTranslatorHelper/qtrer.py
|
xyz1001/PythonToolkit
|
b8e348764051c0184643b14f5b85c46223285d98
|
[
"MIT"
] | 2
|
2018-11-01T08:06:40.000Z
|
2020-04-29T08:39:20.000Z
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""Translate Qt ts file
Usage:
qtrer [--ts_dir=<ts_dir> --excel_dir=<excel_dir> --log_level=<log_level> --patch]
Options:
--ts_dir=<ts_dir> Qt翻译文件的目录
--excel_dir=<excel_dir> Excel翻译文件的路径
--loglevel=<log_level> log等级:NOTSET,DEBUG,INFO,WARN,ERROR,FATAL,CRITICAL
--patch 补丁翻译模式
"""
import os
import logging
import sys
import docopt
import pandas
from excel_parser import ExcelParser
from qt_ts import QtTs
from opencc_translator import OpenccTranslator
def main():
arg = docopt.docopt(__doc__)
qt_ts_file_dir = arg["--ts_dir"]
translation_file_dir = arg["--excel_dir"]
log_level = arg["--log_level"]
if qt_ts_file_dir is None:
qt_ts_file_dir = "./translation"
if translation_file_dir is None:
translation_file_dir = "./doc/translation"
if log_level is None:
log_level = "INFO"
logging.basicConfig(level=logging._nameToLevel[log_level.upper()])
translation = {}
for item in os.listdir(translation_file_dir):
file_path = os.path.join(translation_file_dir, item)
if not os.path.isfile(file_path) or not (file_path.endswith(".xlsx") or file_path.endswith(".csv")):
continue
temp_falg = False
if file_path.endswith(".csv"):
read_file = pandas.read_csv(file_path)
file_path = './__temp.xlsx'
read_file.to_excel(file_path, index=None, header=True)
temp_falg = True
parser = ExcelParser()
parser.parse(file_path)
translation.update(parser.translations)
if temp_falg:
os.remove(file_path)
if "zh_TW" not in translation:
logging.info("zh_TW not in doc, use OpenccTranslator")
zh_TW_translator = OpenccTranslator("s2twp")
translation["zh_TW"] = zh_TW_translator.generate(translation["zh_CN"])
if "zh_HK" not in translation:
logging.info("zh_HK not in doc, use OpenccTranslator")
zh_HK_translator = OpenccTranslator("s2hk")
translation["zh_HK"] = zh_HK_translator.generate(translation["zh_CN"])
logging.info(translation.keys())
for item in os.listdir(qt_ts_file_dir):
file_path = os.path.join(qt_ts_file_dir, item)
if not file_path.endswith(".ts"):
continue
qt_ts = QtTs(file_path, translation, arg["--patch"])
locale_name = os.path.splitext(os.path.basename(file_path))[0]
try:
qt_ts.tr(locale_name)
except KeyError as e:
logging.error(e.args)
raise
qt_ts.save(file_path)
if __name__ == "__main__":
main()
| 31.068182
| 108
| 0.633138
|
794b4db7cc11c2d09a564deb9eaa0e0a2339e45e
| 836
|
py
|
Python
|
manage.py
|
debo0611/modern-django-
|
80ea1bea493815e32d819919aab0be8b5e9778f7
|
[
"MIT"
] | null | null | null |
manage.py
|
debo0611/modern-django-
|
80ea1bea493815e32d819919aab0be8b5e9778f7
|
[
"MIT"
] | 1
|
2022-01-13T00:49:07.000Z
|
2022-01-13T00:49:07.000Z
|
manage.py
|
debo0611/modern-django-
|
80ea1bea493815e32d819919aab0be8b5e9778f7
|
[
"MIT"
] | 1
|
2017-10-19T11:49:37.000Z
|
2017-10-19T11:49:37.000Z
|
#!/usr/bin/env python
import os
import sys
if __name__ == "__main__":
os.environ.setdefault("DJANGO_SETTINGS_MODULE",
"config.settings.local")
try:
from django.core.management import execute_from_command_line
except ImportError:
# The above import may fail for some other reason. Ensure that the
# issue is really that Django is missing to avoid masking other
# exceptions on Python 2.
try:
import django
except ImportError:
raise ImportError(
"Couldn't import Django. Are you sure it's installed and "
"available on your PYTHONPATH environment variable? Did you "
"forget to activate a virtual environment?"
)
raise
execute_from_command_line(sys.argv)
| 34.833333
| 77
| 0.623206
|
794b4dbab19da947416c7ae2d12100f52d284123
| 20,645
|
py
|
Python
|
src/lib/datasets/dataset/jde.py
|
kaushal21/FairMOTAssignment
|
ca1a94a084da41cb3ad2e697e88a1032a7735caa
|
[
"MIT"
] | null | null | null |
src/lib/datasets/dataset/jde.py
|
kaushal21/FairMOTAssignment
|
ca1a94a084da41cb3ad2e697e88a1032a7735caa
|
[
"MIT"
] | null | null | null |
src/lib/datasets/dataset/jde.py
|
kaushal21/FairMOTAssignment
|
ca1a94a084da41cb3ad2e697e88a1032a7735caa
|
[
"MIT"
] | null | null | null |
import glob
import math
import os
import os.path as osp
import random
import time
from collections import OrderedDict
import cv2
import json
import numpy as np
import torch
import copy
from torch.utils.data import Dataset
from torchvision.transforms import transforms as T
from cython_bbox import bbox_overlaps as bbox_ious
from src.lib.opts import opts
from src.lib.utils.image import gaussian_radius, draw_umich_gaussian, draw_msra_gaussian
from src.lib.utils.utils import xyxy2xywh, generate_anchors, xywh2xyxy, encode_delta
class LoadImages: # for inference
def __init__(self, path, img_size=(1088, 608)):
if os.path.isdir(path):
image_format = ['.jpg', '.jpeg', '.png', '.tif']
self.files = sorted(glob.glob('%s/*.*' % path))
self.files = list(filter(lambda x: os.path.splitext(x)[1].lower() in image_format, self.files))
elif os.path.isfile(path):
self.files = [path]
self.nF = len(self.files) # number of image files
self.width = img_size[0]
self.height = img_size[1]
self.count = 0
assert self.nF > 0, 'No images found in ' + path
def __iter__(self):
self.count = -1
return self
def __next__(self):
self.count += 1
if self.count == self.nF:
raise StopIteration
img_path = self.files[self.count]
# Read image
img0 = cv2.imread(img_path) # BGR
assert img0 is not None, 'Failed to load ' + img_path
# Padded resize
img, _, _, _ = letterbox(img0, height=self.height, width=self.width)
# Normalize RGB
img = img[:, :, ::-1].transpose(2, 0, 1)
img = np.ascontiguousarray(img, dtype=np.float32)
img /= 255.0
# cv2.imwrite(img_path + '.letterbox.jpg', 255 * img.transpose((1, 2, 0))[:, :, ::-1]) # save letterbox image
return img_path, img, img0
def __getitem__(self, idx):
idx = idx % self.nF
img_path = self.files[idx]
# Read image
img0 = cv2.imread(img_path) # BGR
assert img0 is not None, 'Failed to load ' + img_path
# Padded resize
img, _, _, _ = letterbox(img0, height=self.height, width=self.width)
# Normalize RGB
img = img[:, :, ::-1].transpose(2, 0, 1)
img = np.ascontiguousarray(img, dtype=np.float32)
img /= 255.0
return img_path, img, img0
def __len__(self):
return self.nF # number of files
class LoadVideo: # for inference
def __init__(self, path, img_size=(1088, 608)):
self.cap = cv2.VideoCapture(path)
self.frame_rate = int(round(self.cap.get(cv2.CAP_PROP_FPS)))
self.vw = int(self.cap.get(cv2.CAP_PROP_FRAME_WIDTH))
self.vh = int(self.cap.get(cv2.CAP_PROP_FRAME_HEIGHT))
self.vn = int(self.cap.get(cv2.CAP_PROP_FRAME_COUNT))
self.width = img_size[0]
self.height = img_size[1]
self.count = 0
self.w, self.h = 1920, 1080
print('Lenth of the video: {:d} frames'.format(self.vn))
def get_size(self, vw, vh, dw, dh):
wa, ha = float(dw) / vw, float(dh) / vh
a = min(wa, ha)
return int(vw * a), int(vh * a)
def __iter__(self):
self.count = -1
return self
def __next__(self):
self.count += 1
if self.count == len(self):
raise StopIteration
# Read image
res, img0 = self.cap.read() # BGR
assert img0 is not None, 'Failed to load frame {:d}'.format(self.count)
img0 = cv2.resize(img0, (self.w, self.h))
# Padded resize
img, _, _, _ = letterbox(img0, height=self.height, width=self.width)
# Normalize RGB
img = img[:, :, ::-1].transpose(2, 0, 1)
img = np.ascontiguousarray(img, dtype=np.float32)
img /= 255.0
# cv2.imwrite(img_path + '.letterbox.jpg', 255 * img.transpose((1, 2, 0))[:, :, ::-1]) # save letterbox image
return self.count, img, img0
def __len__(self):
return self.vn # number of files
class LoadImagesAndLabels: # for training
def __init__(self, path, img_size=(1088, 608), augment=False, transforms=None):
with open(path, 'r') as file:
self.img_files = file.readlines()
self.img_files = [x.replace('\n', '') for x in self.img_files]
self.img_files = list(filter(lambda x: len(x) > 0, self.img_files))
self.label_files = [x.replace('images', 'labels_with_ids').replace('.png', '.txt').replace('.jpg', '.txt')
for x in self.img_files]
self.nF = len(self.img_files) # number of image files
self.width = img_size[0]
self.height = img_size[1]
self.augment = augment
self.transforms = transforms
def __getitem__(self, files_index):
img_path = self.img_files[files_index]
label_path = self.label_files[files_index]
return self.get_data(img_path, label_path)
def get_data(self, img_path, label_path):
height = self.height
width = self.width
img = cv2.imread(img_path) # BGR
if img is None:
raise ValueError('File corrupt {}'.format(img_path))
augment_hsv = True
if self.augment and augment_hsv:
# SV augmentation by 50%
fraction = 0.50
img_hsv = cv2.cvtColor(img, cv2.COLOR_BGR2HSV)
S = img_hsv[:, :, 1].astype(np.float32)
V = img_hsv[:, :, 2].astype(np.float32)
a = (random.random() * 2 - 1) * fraction + 1
S *= a
if a > 1:
np.clip(S, a_min=0, a_max=255, out=S)
a = (random.random() * 2 - 1) * fraction + 1
V *= a
if a > 1:
np.clip(V, a_min=0, a_max=255, out=V)
img_hsv[:, :, 1] = S.astype(np.uint8)
img_hsv[:, :, 2] = V.astype(np.uint8)
cv2.cvtColor(img_hsv, cv2.COLOR_HSV2BGR, dst=img)
h, w, _ = img.shape
img, ratio, padw, padh = letterbox(img, height=height, width=width)
# Load labels
if os.path.isfile(label_path):
labels0 = np.loadtxt(label_path, dtype=np.float32).reshape(-1, 6)
# Normalized xywh to pixel xyxy format
labels = labels0.copy()
labels[:, 2] = ratio * w * (labels0[:, 2] - labels0[:, 4] / 2) + padw
labels[:, 3] = ratio * h * (labels0[:, 3] - labels0[:, 5] / 2) + padh
labels[:, 4] = ratio * w * (labels0[:, 2] + labels0[:, 4] / 2) + padw
labels[:, 5] = ratio * h * (labels0[:, 3] + labels0[:, 5] / 2) + padh
else:
labels = np.array([])
# Augment image and labels
if self.augment:
img, labels, M = random_affine(img, labels, degrees=(-5, 5), translate=(0.10, 0.10), scale=(0.50, 1.20))
plotFlag = False
if plotFlag:
import matplotlib
matplotlib.use('Agg')
import matplotlib.pyplot as plt
plt.figure(figsize=(50, 50))
plt.imshow(img[:, :, ::-1])
plt.plot(labels[:, [1, 3, 3, 1, 1]].T, labels[:, [2, 2, 4, 4, 2]].T, '.-')
plt.axis('off')
plt.savefig('test.jpg')
time.sleep(10)
nL = len(labels)
if nL > 0:
# convert xyxy to xywh
labels[:, 2:6] = xyxy2xywh(labels[:, 2:6].copy()) # / height
labels[:, 2] /= width
labels[:, 3] /= height
labels[:, 4] /= width
labels[:, 5] /= height
if self.augment:
# random left-right flip
lr_flip = True
if lr_flip & (random.random() > 0.5):
img = np.fliplr(img)
if nL > 0:
labels[:, 2] = 1 - labels[:, 2]
img = np.ascontiguousarray(img[:, :, ::-1]) # BGR to RGB
if self.transforms is not None:
img = self.transforms(img)
return img, labels, img_path, (h, w)
def __len__(self):
return self.nF # number of batches
def letterbox(img, height=608, width=1088,
color=(127.5, 127.5, 127.5)): # resize a rectangular image to a padded rectangular
shape = img.shape[:2] # shape = [height, width]
ratio = min(float(height) / shape[0], float(width) / shape[1])
new_shape = (round(shape[1] * ratio), round(shape[0] * ratio)) # new_shape = [width, height]
dw = (width - new_shape[0]) / 2 # width padding
dh = (height - new_shape[1]) / 2 # height padding
top, bottom = round(dh - 0.1), round(dh + 0.1)
left, right = round(dw - 0.1), round(dw + 0.1)
img = cv2.resize(img, new_shape, interpolation=cv2.INTER_AREA) # resized, no border
img = cv2.copyMakeBorder(img, top, bottom, left, right, cv2.BORDER_CONSTANT, value=color) # padded rectangular
return img, ratio, dw, dh
def random_affine(img, targets=None, degrees=(-10, 10), translate=(.1, .1), scale=(.9, 1.1), shear=(-2, 2),
borderValue=(127.5, 127.5, 127.5)):
# torchvision.transforms.RandomAffine(degrees=(-10, 10), translate=(.1, .1), scale=(.9, 1.1), shear=(-10, 10))
# https://medium.com/uruvideo/dataset-augmentation-with-random-homographies-a8f4b44830d4
border = 0 # width of added border (optional)
height = img.shape[0]
width = img.shape[1]
# Rotation and Scale
R = np.eye(3)
a = random.random() * (degrees[1] - degrees[0]) + degrees[0]
# a += random.choice([-180, -90, 0, 90]) # 90deg rotations added to small rotations
s = random.random() * (scale[1] - scale[0]) + scale[0]
R[:2] = cv2.getRotationMatrix2D(angle=a, center=(img.shape[1] / 2, img.shape[0] / 2), scale=s)
# Translation
T = np.eye(3)
T[0, 2] = (random.random() * 2 - 1) * translate[0] * img.shape[0] + border # x translation (pixels)
T[1, 2] = (random.random() * 2 - 1) * translate[1] * img.shape[1] + border # y translation (pixels)
# Shear
S = np.eye(3)
S[0, 1] = math.tan((random.random() * (shear[1] - shear[0]) + shear[0]) * math.pi / 180) # x shear (deg)
S[1, 0] = math.tan((random.random() * (shear[1] - shear[0]) + shear[0]) * math.pi / 180) # y shear (deg)
M = S @ T @ R # Combined rotation matrix. ORDER IS IMPORTANT HERE!!
imw = cv2.warpPerspective(img, M, dsize=(width, height), flags=cv2.INTER_LINEAR,
borderValue=borderValue) # BGR order borderValue
# Return warped points also
if targets is not None:
if len(targets) > 0:
n = targets.shape[0]
points = targets[:, 2:6].copy()
area0 = (points[:, 2] - points[:, 0]) * (points[:, 3] - points[:, 1])
# warp points
xy = np.ones((n * 4, 3))
xy[:, :2] = points[:, [0, 1, 2, 3, 0, 3, 2, 1]].reshape(n * 4, 2) # x1y1, x2y2, x1y2, x2y1
xy = (xy @ M.T)[:, :2].reshape(n, 8)
# create new boxes
x = xy[:, [0, 2, 4, 6]]
y = xy[:, [1, 3, 5, 7]]
xy = np.concatenate((x.min(1), y.min(1), x.max(1), y.max(1))).reshape(4, n).T
# apply angle-based reduction
radians = a * math.pi / 180
reduction = max(abs(math.sin(radians)), abs(math.cos(radians))) ** 0.5
x = (xy[:, 2] + xy[:, 0]) / 2
y = (xy[:, 3] + xy[:, 1]) / 2
w = (xy[:, 2] - xy[:, 0]) * reduction
h = (xy[:, 3] - xy[:, 1]) * reduction
xy = np.concatenate((x - w / 2, y - h / 2, x + w / 2, y + h / 2)).reshape(4, n).T
# reject warped points outside of image
#np.clip(xy[:, 0], 0, width, out=xy[:, 0])
#np.clip(xy[:, 2], 0, width, out=xy[:, 2])
#np.clip(xy[:, 1], 0, height, out=xy[:, 1])
#np.clip(xy[:, 3], 0, height, out=xy[:, 3])
w = xy[:, 2] - xy[:, 0]
h = xy[:, 3] - xy[:, 1]
area = w * h
ar = np.maximum(w / (h + 1e-16), h / (w + 1e-16))
i = (w > 4) & (h > 4) & (area / (area0 + 1e-16) > 0.1) & (ar < 10)
targets = targets[i]
targets[:, 2:6] = xy[i]
return imw, targets, M
else:
return imw
class JointDataset(LoadImagesAndLabels): # for training
default_resolution = [1088, 608]
mean = None
std = None
num_classes = 1
def __init__(self, opt, root, paths, img_size=(1088, 608), augment=False, transforms=None):
self.opt = opt
dataset_names = paths.keys()
self.img_files = OrderedDict()
self.label_files = OrderedDict()
self.tid_num = OrderedDict()
self.tid_start_index = OrderedDict()
self.num_classes = 1
for ds, path in paths.items():
with open(path, 'r') as file:
self.img_files[ds] = file.readlines()
self.img_files[ds] = [osp.join(root, x.strip()) for x in self.img_files[ds]]
self.img_files[ds] = list(filter(lambda x: len(x) > 0, self.img_files[ds]))
self.label_files[ds] = [
x.replace('images', 'labels_with_ids').replace('.png', '.txt').replace('.jpg', '.txt')
for x in self.img_files[ds]]
for ds, label_paths in self.label_files.items():
max_index = -1
for lp in label_paths:
lb = np.loadtxt(lp)
if len(lb) < 1:
continue
if len(lb.shape) < 2:
img_max = lb[1]
else:
img_max = np.max(lb[:, 1])
if img_max > max_index:
max_index = img_max
self.tid_num[ds] = max_index + 1
last_index = 0
for i, (k, v) in enumerate(self.tid_num.items()):
self.tid_start_index[k] = last_index
last_index += v
self.nID = int(last_index + 1)
self.nds = [len(x) for x in self.img_files.values()]
self.cds = [sum(self.nds[:i]) for i in range(len(self.nds))]
self.nF = sum(self.nds)
self.width = img_size[0]
self.height = img_size[1]
self.max_objs = opt.K
self.augment = augment
self.transforms = transforms
print('=' * 80)
print('dataset summary')
print(self.tid_num)
print('total # identities:', self.nID)
print('start index')
print(self.tid_start_index)
print('=' * 80)
def __getitem__(self, files_index):
for i, c in enumerate(self.cds):
if files_index >= c:
ds = list(self.label_files.keys())[i]
start_index = c
img_path = self.img_files[ds][files_index - start_index]
label_path = self.label_files[ds][files_index - start_index]
imgs, labels, img_path, (input_h, input_w) = self.get_data(img_path, label_path)
for i, _ in enumerate(labels):
if labels[i, 1] > -1:
labels[i, 1] += self.tid_start_index[ds]
output_h = imgs.shape[1] // self.opt.down_ratio
output_w = imgs.shape[2] // self.opt.down_ratio
num_classes = self.num_classes
num_objs = labels.shape[0]
hm = np.zeros((num_classes, output_h, output_w), dtype=np.float32)
if self.opt.ltrb:
wh = np.zeros((self.max_objs, 4), dtype=np.float32)
else:
wh = np.zeros((self.max_objs, 2), dtype=np.float32)
reg = np.zeros((self.max_objs, 2), dtype=np.float32)
ind = np.zeros((self.max_objs, ), dtype=np.int64)
reg_mask = np.zeros((self.max_objs, ), dtype=np.uint8)
ids = np.zeros((self.max_objs, ), dtype=np.int64)
bbox_xys = np.zeros((self.max_objs, 4), dtype=np.float32)
draw_gaussian = draw_msra_gaussian if self.opt.mse_loss else draw_umich_gaussian
for k in range(num_objs):
label = labels[k]
bbox = label[2:]
cls_id = int(label[0])
bbox[[0, 2]] = bbox[[0, 2]] * output_w
bbox[[1, 3]] = bbox[[1, 3]] * output_h
bbox_amodal = copy.deepcopy(bbox)
bbox_amodal[0] = bbox_amodal[0] - bbox_amodal[2] / 2.
bbox_amodal[1] = bbox_amodal[1] - bbox_amodal[3] / 2.
bbox_amodal[2] = bbox_amodal[0] + bbox_amodal[2]
bbox_amodal[3] = bbox_amodal[1] + bbox_amodal[3]
bbox[0] = np.clip(bbox[0], 0, output_w - 1)
bbox[1] = np.clip(bbox[1], 0, output_h - 1)
h = bbox[3]
w = bbox[2]
bbox_xy = copy.deepcopy(bbox)
bbox_xy[0] = bbox_xy[0] - bbox_xy[2] / 2
bbox_xy[1] = bbox_xy[1] - bbox_xy[3] / 2
bbox_xy[2] = bbox_xy[0] + bbox_xy[2]
bbox_xy[3] = bbox_xy[1] + bbox_xy[3]
if h > 0 and w > 0:
radius = gaussian_radius((math.ceil(h), math.ceil(w)))
radius = max(0, int(radius))
radius = 6 if self.opt.mse_loss else radius
#radius = max(1, int(radius)) if self.opt.mse_loss else radius
ct = np.array(
[bbox[0], bbox[1]], dtype=np.float32)
ct_int = ct.astype(np.int32)
draw_gaussian(hm[cls_id], ct_int, radius)
if self.opt.ltrb:
wh[k] = ct[0] - bbox_amodal[0], ct[1] - bbox_amodal[1], \
bbox_amodal[2] - ct[0], bbox_amodal[3] - ct[1]
else:
wh[k] = 1. * w, 1. * h
ind[k] = ct_int[1] * output_w + ct_int[0]
reg[k] = ct - ct_int
reg_mask[k] = 1
ids[k] = label[1]
bbox_xys[k] = bbox_xy
ret = {'input': imgs, 'hm': hm, 'reg_mask': reg_mask, 'ind': ind, 'wh': wh, 'reg': reg, 'ids': ids, 'bbox': bbox_xys}
return ret
class DetDataset(LoadImagesAndLabels): # for training
def __init__(self, root, paths, img_size=(1088, 608), augment=False, transforms=None):
dataset_names = paths.keys()
self.img_files = OrderedDict()
self.label_files = OrderedDict()
self.tid_num = OrderedDict()
self.tid_start_index = OrderedDict()
for ds, path in paths.items():
with open(path, 'r') as file:
self.img_files[ds] = file.readlines()
self.img_files[ds] = [osp.join(root, x.strip()) for x in self.img_files[ds]]
self.img_files[ds] = list(filter(lambda x: len(x) > 0, self.img_files[ds]))
self.label_files[ds] = [
x.replace('images', 'labels_with_ids').replace('.png', '.txt').replace('.jpg', '.txt')
for x in self.img_files[ds]]
for ds, label_paths in self.label_files.items():
max_index = -1
for lp in label_paths:
lb = np.loadtxt(lp)
if len(lb) < 1:
continue
if len(lb.shape) < 2:
img_max = lb[1]
else:
img_max = np.max(lb[:, 1])
if img_max > max_index:
max_index = img_max
self.tid_num[ds] = max_index + 1
last_index = 0
for i, (k, v) in enumerate(self.tid_num.items()):
self.tid_start_index[k] = last_index
last_index += v
self.nID = int(last_index + 1)
self.nds = [len(x) for x in self.img_files.values()]
self.cds = [sum(self.nds[:i]) for i in range(len(self.nds))]
self.nF = sum(self.nds)
self.width = img_size[0]
self.height = img_size[1]
self.augment = augment
self.transforms = transforms
print('=' * 80)
print('dataset summary')
print(self.tid_num)
print('total # identities:', self.nID)
print('start index')
print(self.tid_start_index)
print('=' * 80)
def __getitem__(self, files_index):
for i, c in enumerate(self.cds):
if files_index >= c:
ds = list(self.label_files.keys())[i]
start_index = c
img_path = self.img_files[ds][files_index - start_index]
label_path = self.label_files[ds][files_index - start_index]
if os.path.isfile(label_path):
labels0 = np.loadtxt(label_path, dtype=np.float32).reshape(-1, 6)
imgs, labels, img_path, (h, w) = self.get_data(img_path, label_path)
for i, _ in enumerate(labels):
if labels[i, 1] > -1:
labels[i, 1] += self.tid_start_index[ds]
return imgs, labels0, img_path, (h, w)
| 37.950368
| 125
| 0.535335
|
794b4de296156f241940a937ee3dc3fb0ecd9a30
| 27,216
|
py
|
Python
|
ticket_to_ride/static/routes.py
|
dcastf01/API_to_ticket_ticket_to_ride
|
1d291dac9f697b9836e97f3dee97f44ca8de2755
|
[
"MIT"
] | null | null | null |
ticket_to_ride/static/routes.py
|
dcastf01/API_to_ticket_ticket_to_ride
|
1d291dac9f697b9836e97f3dee97f44ca8de2755
|
[
"MIT"
] | null | null | null |
ticket_to_ride/static/routes.py
|
dcastf01/API_to_ticket_ticket_to_ride
|
1d291dac9f697b9836e97f3dee97f44ca8de2755
|
[
"MIT"
] | null | null | null |
import itertools
all_routes=[ ('Cadiz','Lisboa',2),
('Cadiz','Madrid',3),
('Cadiz','Barcelona',5),
('Cadiz','Pamplona',6),
('Cadiz','Marseille',9),
('Cadiz','Brest',10),
('Cadiz','Dieppe',11),
('Cadiz','Paris',10),
('Cadiz','Bruxelles',12),
('Cadiz','Zurich',11),
('Cadiz','Munchen',13),
('Cadiz','Frankfurt',13),
('Cadiz','Roma',13),
('Cadiz','Venezia',13),
('Cadiz','London',13),
('Cadiz','Edinburgh',17),
('Cadiz','Amsterdam',13),
('Cadiz','Essen',15),
('Cadiz','Kobenhavn',18),
('Cadiz','Stockholm',21),
('Cadiz','Berlin',16),
('Cadiz','Wien',16),
('Cadiz','Zagrab',15),
('Cadiz','Brindisi',15),
('Cadiz','Palermo',17),
('Cadiz','Athina',19),
('Cadiz','Sarajevo',18),
('Cadiz','Budapest',17),
('Cadiz','Warszawa',20),
('Cadiz','Danzig',20),
('Cadiz','Riga',23),
('Cadiz','Petrograd',27),
('Cadiz','Wilno',23),
('Cadiz','Kyiv',24),
('Cadiz','Bucuresti',21),
('Cadiz','Sofia',20),
('Cadiz','Smyrna',21),
('Cadiz','Constantinople',23),
('Cadiz','Angora',24),
('Cadiz','Erzurum',27),
('Cadiz','Sevastopol',25),
('Cadiz','Sochi',27),
('Cadiz','Rostov',29),
('Cadiz','Kharkov',28),
('Cadiz','Moskva',28),
('Cadiz','Smolensk',26),
('Lisboa','Madrid',3),
('Lisboa','Barcelona',5),
('Lisboa','Pamplona',6),
('Lisboa','Marseille',9),
('Lisboa','Brest',10),
('Lisboa','Dieppe',11),
('Lisboa','Paris',10),
('Lisboa','Bruxelles',12),
('Lisboa','Zurich',11),
('Lisboa','Munchen',13),
('Lisboa','Frankfurt',13),
('Lisboa','Roma',13),
('Lisboa','Venezia',13),
('Lisboa','London',13),
('Lisboa','Edinburgh',17),
('Lisboa','Amsterdam',13),
('Lisboa','Essen',15),
('Lisboa','Kobenhavn',18),
('Lisboa','Stockholm',21),
('Lisboa','Berlin',16),
('Lisboa','Wien',16),
('Lisboa','Zagrab',15),
('Lisboa','Brindisi',15),
('Lisboa','Palermo',17),
('Lisboa','Athina',19),
('Lisboa','Sarajevo',18),
('Lisboa','Budapest',17),
('Lisboa','Warszawa',20),
('Lisboa','Danzig',20),
('Lisboa','Riga',23),
('Lisboa','Petrograd',27),
('Lisboa','Wilno',23),
('Lisboa','Kyiv',24),
('Lisboa','Bucuresti',21),
('Lisboa','Sofia',20),
('Lisboa','Smyrna',21),
('Lisboa','Constantinople',23),
('Lisboa','Angora',24),
('Lisboa','Erzurum',27),
('Lisboa','Sevastopol',25),
('Lisboa','Sochi',27),
('Lisboa','Rostov',29),
('Lisboa','Kharkov',28),
('Lisboa','Moskva',28),
('Lisboa','Smolensk',26),
('Madrid','Barcelona',2),
('Madrid','Pamplona',3),
('Madrid','Marseille',6),
('Madrid','Brest',7),
('Madrid','Dieppe',8),
('Madrid','Paris',7),
('Madrid','Bruxelles',9),
('Madrid','Zurich',8),
('Madrid','Munchen',10),
('Madrid','Frankfurt',10),
('Madrid','Roma',10),
('Madrid','Venezia',10),
('Madrid','London',10),
('Madrid','Edinburgh',14),
('Madrid','Amsterdam',10),
('Madrid','Essen',12),
('Madrid','Kobenhavn',15),
('Madrid','Stockholm',18),
('Madrid','Berlin',13),
('Madrid','Wien',13),
('Madrid','Zagrab',12),
('Madrid','Brindisi',12),
('Madrid','Palermo',14),
('Madrid','Athina',16),
('Madrid','Sarajevo',15),
('Madrid','Budapest',14),
('Madrid','Warszawa',17),
('Madrid','Danzig',17),
('Madrid','Riga',20),
('Madrid','Petrograd',24),
('Madrid','Wilno',20),
('Madrid','Kyiv',21),
('Madrid','Bucuresti',18),
('Madrid','Sofia',17),
('Madrid','Smyrna',18),
('Madrid','Constantinople',20),
('Madrid','Angora',21),
('Madrid','Erzurum',24),
('Madrid','Sevastopol',22),
('Madrid','Sochi',24),
('Madrid','Rostov',26),
('Madrid','Kharkov',25),
('Madrid','Moskva',25),
('Madrid','Smolensk',23),
('Barcelona','Pamplona',2),
('Barcelona','Marseille',4),
('Barcelona','Brest',6),
('Barcelona','Dieppe',7),
('Barcelona','Paris',6),
('Barcelona','Bruxelles',8),
('Barcelona','Zurich',6),
('Barcelona','Munchen',8),
('Barcelona','Frankfurt',9),
('Barcelona','Roma',8),
('Barcelona','Venezia',8),
('Barcelona','London',9),
('Barcelona','Edinburgh',13),
('Barcelona','Amsterdam',9),
('Barcelona','Essen',11),
('Barcelona','Kobenhavn',14),
('Barcelona','Stockholm',17),
('Barcelona','Berlin',12),
('Barcelona','Wien',11),
('Barcelona','Zagrab',10),
('Barcelona','Brindisi',10),
('Barcelona','Palermo',12),
('Barcelona','Athina',14),
('Barcelona','Sarajevo',13),
('Barcelona','Budapest',12),
('Barcelona','Warszawa',15),
('Barcelona','Danzig',16),
('Barcelona','Riga',19),
('Barcelona','Petrograd',22),
('Barcelona','Wilno',18),
('Barcelona','Kyiv',18),
('Barcelona','Bucuresti',16),
('Barcelona','Sofia',15),
('Barcelona','Smyrna',16),
('Barcelona','Constantinople',18),
('Barcelona','Angora',20),
('Barcelona','Erzurum',23),
('Barcelona','Sevastopol',20),
('Barcelona','Sochi',22),
('Barcelona','Rostov',24),
('Barcelona','Kharkov',22),
('Barcelona','Moskva',23),
('Barcelona','Smolensk',21),
('Pamplona','Marseille',4),
('Pamplona','Brest',4),
('Pamplona','Dieppe',5),
('Pamplona','Paris',4),
('Pamplona','Bruxelles',6),
('Pamplona','Zurich',6),
('Pamplona','Munchen',8),
('Pamplona','Frankfurt',7),
('Pamplona','Roma',8),
('Pamplona','Venezia',8),
('Pamplona','London',7),
('Pamplona','Edinburgh',11),
('Pamplona','Amsterdam',7),
('Pamplona','Essen',9),
('Pamplona','Kobenhavn',12),
('Pamplona','Stockholm',15),
('Pamplona','Berlin',10),
('Pamplona','Wien',11),
('Pamplona','Zagrab',10),
('Pamplona','Brindisi',10),
('Pamplona','Palermo',12),
('Pamplona','Athina',14),
('Pamplona','Sarajevo',13),
('Pamplona','Budapest',12),
('Pamplona','Warszawa',14),
('Pamplona','Danzig',14),
('Pamplona','Riga',17),
('Pamplona','Petrograd',21),
('Pamplona','Wilno',17),
('Pamplona','Kyiv',18),
('Pamplona','Bucuresti',16),
('Pamplona','Sofia',15),
('Pamplona','Smyrna',17),
('Pamplona','Constantinople',18),
('Pamplona','Angora',20),
('Pamplona','Erzurum',23),
('Pamplona','Sevastopol',20),
('Pamplona','Sochi',22),
('Pamplona','Rostov',24),
('Pamplona','Kharkov',22),
('Pamplona','Moskva',22),
('Pamplona','Smolensk',20),
('Marseille','Brest',7),
('Marseille','Dieppe',5),
('Marseille','Paris',4),
('Marseille','Bruxelles',6),
('Marseille','Zurich',2),
('Marseille','Munchen',4),
('Marseille','Frankfurt',6),
('Marseille','Roma',4),
('Marseille','Venezia',4),
('Marseille','London',7),
('Marseille','Edinburgh',11),
('Marseille','Amsterdam',7),
('Marseille','Essen',8),
('Marseille','Kobenhavn',11),
('Marseille','Stockholm',14),
('Marseille','Berlin',9),
('Marseille','Wien',7),
('Marseille','Zagrab',6),
('Marseille','Brindisi',6),
('Marseille','Palermo',8),
('Marseille','Athina',10),
('Marseille','Sarajevo',9),
('Marseille','Budapest',8),
('Marseille','Warszawa',11),
('Marseille','Danzig',13),
('Marseille','Riga',16),
('Marseille','Petrograd',18),
('Marseille','Wilno',14),
('Marseille','Kyiv',14),
('Marseille','Bucuresti',12),
('Marseille','Sofia',11),
('Marseille','Smyrna',12),
('Marseille','Constantinople',14),
('Marseille','Angora',15),
('Marseille','Erzurum',18),
('Marseille','Sevastopol',16),
('Marseille','Sochi',18),
('Marseille','Rostov',20),
('Marseille','Kharkov',18),
('Marseille','Moskva',19),
('Marseille','Smolensk',17),
('Brest','Dieppe',2),
('Brest','Paris',3),
('Brest','Bruxelles',4),
('Brest','Zurich',6),
('Brest','Munchen',8),
('Brest','Frankfurt',6),
('Brest','Roma',10),
('Brest','Venezia',8),
('Brest','London',4),
('Brest','Edinburgh',8),
('Brest','Amsterdam',5),
('Brest','Essen',8),
('Brest','Kobenhavn',11),
('Brest','Stockholm',14),
('Brest','Berlin',9),
('Brest','Wien',11),
('Brest','Zagrab',10),
('Brest','Brindisi',12),
('Brest','Palermo',14),
('Brest','Athina',16),
('Brest','Sarajevo',13),
('Brest','Budapest',12),
('Brest','Warszawa',13),
('Brest','Danzig',13),
('Brest','Riga',16),
('Brest','Petrograd',20),
('Brest','Wilno',16),
('Brest','Kyiv',17),
('Brest','Bucuresti',16),
('Brest','Sofia',15),
('Brest','Smyrna',18),
('Brest','Constantinople',18),
('Brest','Angora',20),
('Brest','Erzurum',23),
('Brest','Sevastopol',20),
('Brest','Sochi',23),
('Brest','Rostov',22),
('Brest','Kharkov',21),
('Brest','Moskva',21),
('Brest','Smolensk',19),
('Dieppe','Paris',1),
('Dieppe','Bruxelles',2),
('Dieppe','Zurich',4),
('Dieppe','Munchen',6),
('Dieppe','Frankfurt',4),
('Dieppe','Roma',8),
('Dieppe','Venezia',6),
('Dieppe','London',2),
('Dieppe','Edinburgh',6),
('Dieppe','Amsterdam',3),
('Dieppe','Essen',6),
('Dieppe','Kobenhavn',9),
('Dieppe','Stockholm',12),
('Dieppe','Berlin',7),
('Dieppe','Wien',9),
('Dieppe','Zagrab',8),
('Dieppe','Brindisi',10),
('Dieppe','Palermo',12),
('Dieppe','Athina',14),
('Dieppe','Sarajevo',11),
('Dieppe','Budapest',10),
('Dieppe','Warszawa',11),
('Dieppe','Danzig',11),
('Dieppe','Riga',14),
('Dieppe','Petrograd',18),
('Dieppe','Wilno',14),
('Dieppe','Kyiv',15),
('Dieppe','Bucuresti',0),
('Dieppe','Sofia',0),
('Dieppe','Smyrna',0),
('Dieppe','Constantinople',0),
('Dieppe','Angora',0),
('Dieppe','Erzurum',0),
('Dieppe','Sevastopol',0),
('Dieppe','Sochi',0),
('Dieppe','Rostov',0),
('Dieppe','Kharkov',0),
('Dieppe','Moskva',0),
('Dieppe','Smolensk',0),
('Paris','Bruxelles',0),
('Paris','Zurich',0),
('Paris','Munchen',0),
('Paris','Frankfurt',0),
('Paris','Roma',0),
('Paris','Venezia',0),
('Paris','London',0),
('Paris','Edinburgh',0),
('Paris','Amsterdam',0),
('Paris','Essen',0),
('Paris','Kobenhavn',0),
('Paris','Stockholm',0),
('Paris','Berlin',0),
('Paris','Wien',0),
('Paris','Zagrab',0),
('Paris','Brindisi',0),
('Paris','Palermo',0),
('Paris','Athina',0),
('Paris','Sarajevo',0),
('Paris','Budapest',0),
('Paris','Warszawa',0),
('Paris','Danzig',0),
('Paris','Riga',0),
('Paris','Petrograd',0),
('Paris','Wilno',0),
('Paris','Kyiv',0),
('Paris','Bucuresti',0),
('Paris','Sofia',0),
('Paris','Smyrna',0),
('Paris','Constantinople',0),
('Paris','Angora',0),
('Paris','Erzurum',0),
('Paris','Sevastopol',0),
('Paris','Sochi',0),
('Paris','Rostov',0),
('Paris','Kharkov',0),
('Paris','Moskva',0),
('Paris','Smolensk',0),
('Bruxelles','Zurich',0),
('Bruxelles','Munchen',0),
('Bruxelles','Frankfurt',0),
('Bruxelles','Roma',0),
('Bruxelles','Venezia',0),
('Bruxelles','London',0),
('Bruxelles','Edinburgh',0),
('Bruxelles','Amsterdam',0),
('Bruxelles','Essen',0),
('Bruxelles','Kobenhavn',0),
('Bruxelles','Stockholm',0),
('Bruxelles','Berlin',0),
('Bruxelles','Wien',0),
('Bruxelles','Zagrab',0),
('Bruxelles','Brindisi',0),
('Bruxelles','Palermo',0),
('Bruxelles','Athina',0),
('Bruxelles','Sarajevo',0),
('Bruxelles','Budapest',0),
('Bruxelles','Warszawa',0),
('Bruxelles','Danzig',0),
('Bruxelles','Riga',0),
('Bruxelles','Petrograd',0),
('Bruxelles','Wilno',0),
('Bruxelles','Kyiv',0),
('Bruxelles','Bucuresti',0),
('Bruxelles','Sofia',0),
('Bruxelles','Smyrna',0),
('Bruxelles','Constantinople',0),
('Bruxelles','Angora',0),
('Bruxelles','Erzurum',0),
('Bruxelles','Sevastopol',0),
('Bruxelles','Sochi',0),
('Bruxelles','Rostov',0),
('Bruxelles','Kharkov',0),
('Bruxelles','Moskva',0),
('Bruxelles','Smolensk',0),
('Zurich','Munchen',0),
('Zurich','Frankfurt',0),
('Zurich','Roma',0),
('Zurich','Venezia',0),
('Zurich','London',0),
('Zurich','Edinburgh',0),
('Zurich','Amsterdam',0),
('Zurich','Essen',0),
('Zurich','Kobenhavn',0),
('Zurich','Stockholm',0),
('Zurich','Berlin',0),
('Zurich','Wien',0),
('Zurich','Zagrab',0),
('Zurich','Brindisi',0),
('Zurich','Palermo',0),
('Zurich','Athina',0),
('Zurich','Sarajevo',0),
('Zurich','Budapest',0),
('Zurich','Warszawa',0),
('Zurich','Danzig',0),
('Zurich','Riga',0),
('Zurich','Petrograd',0),
('Zurich','Wilno',0),
('Zurich','Kyiv',0),
('Zurich','Bucuresti',0),
('Zurich','Sofia',0),
('Zurich','Smyrna',0),
('Zurich','Constantinople',0),
('Zurich','Angora',0),
('Zurich','Erzurum',0),
('Zurich','Sevastopol',0),
('Zurich','Sochi',0),
('Zurich','Rostov',0),
('Zurich','Kharkov',0),
('Zurich','Moskva',0),
('Zurich','Smolensk',0),
('Munchen','Frankfurt',0),
('Munchen','Roma',0),
('Munchen','Venezia',0),
('Munchen','London',0),
('Munchen','Edinburgh',0),
('Munchen','Amsterdam',0),
('Munchen','Essen',0),
('Munchen','Kobenhavn',0),
('Munchen','Stockholm',0),
('Munchen','Berlin',0),
('Munchen','Wien',0),
('Munchen','Zagrab',0),
('Munchen','Brindisi',0),
('Munchen','Palermo',0),
('Munchen','Athina',0),
('Munchen','Sarajevo',0),
('Munchen','Budapest',0),
('Munchen','Warszawa',0),
('Munchen','Danzig',0),
('Munchen','Riga',0),
('Munchen','Petrograd',0),
('Munchen','Wilno',0),
('Munchen','Kyiv',0),
('Munchen','Bucuresti',0),
('Munchen','Sofia',0),
('Munchen','Smyrna',0),
('Munchen','Constantinople',0),
('Munchen','Angora',0),
('Munchen','Erzurum',0),
('Munchen','Sevastopol',0),
('Munchen','Sochi',0),
('Munchen','Rostov',0),
('Munchen','Kharkov',0),
('Munchen','Moskva',0),
('Munchen','Smolensk',0),
('Frankfurt','Roma',0),
('Frankfurt','Venezia',0),
('Frankfurt','London',0),
('Frankfurt','Edinburgh',0),
('Frankfurt','Amsterdam',0),
('Frankfurt','Essen',0),
('Frankfurt','Kobenhavn',0),
('Frankfurt','Stockholm',0),
('Frankfurt','Berlin',0),
('Frankfurt','Wien',0),
('Frankfurt','Zagrab',0),
('Frankfurt','Brindisi',0),
('Frankfurt','Palermo',0),
('Frankfurt','Athina',0),
('Frankfurt','Sarajevo',0),
('Frankfurt','Budapest',0),
('Frankfurt','Warszawa',0),
('Frankfurt','Danzig',0),
('Frankfurt','Riga',0),
('Frankfurt','Petrograd',0),
('Frankfurt','Wilno',0),
('Frankfurt','Kyiv',0),
('Frankfurt','Bucuresti',0),
('Frankfurt','Sofia',0),
('Frankfurt','Smyrna',0),
('Frankfurt','Constantinople',0),
('Frankfurt','Angora',0),
('Frankfurt','Erzurum',0),
('Frankfurt','Sevastopol',0),
('Frankfurt','Sochi',0),
('Frankfurt','Rostov',0),
('Frankfurt','Kharkov',0),
('Frankfurt','Moskva',0),
('Frankfurt','Smolensk',0),
('Roma','Venezia',0),
('Roma','London',0),
('Roma','Edinburgh',0),
('Roma','Amsterdam',0),
('Roma','Essen',0),
('Roma','Kobenhavn',0),
('Roma','Stockholm',0),
('Roma','Berlin',0),
('Roma','Wien',0),
('Roma','Zagrab',0),
('Roma','Brindisi',0),
('Roma','Palermo',0),
('Roma','Athina',0),
('Roma','Sarajevo',0),
('Roma','Budapest',0),
('Roma','Warszawa',0),
('Roma','Danzig',0),
('Roma','Riga',0),
('Roma','Petrograd',0),
('Roma','Wilno',0),
('Roma','Kyiv',0),
('Roma','Bucuresti',0),
('Roma','Sofia',0),
('Roma','Smyrna',0),
('Roma','Constantinople',0),
('Roma','Angora',0),
('Roma','Erzurum',0),
('Roma','Sevastopol',0),
('Roma','Sochi',0),
('Roma','Rostov',0),
('Roma','Kharkov',0),
('Roma','Moskva',0),
('Roma','Smolensk',0),
('Venezia','London',0),
('Venezia','Edinburgh',0),
('Venezia','Amsterdam',0),
('Venezia','Essen',0),
('Venezia','Kobenhavn',0),
('Venezia','Stockholm',0),
('Venezia','Berlin',0),
('Venezia','Wien',0),
('Venezia','Zagrab',0),
('Venezia','Brindisi',0),
('Venezia','Palermo',0),
('Venezia','Athina',0),
('Venezia','Sarajevo',0),
('Venezia','Budapest',0),
('Venezia','Warszawa',0),
('Venezia','Danzig',0),
('Venezia','Riga',0),
('Venezia','Petrograd',0),
('Venezia','Wilno',0),
('Venezia','Kyiv',0),
('Venezia','Bucuresti',0),
('Venezia','Sofia',0),
('Venezia','Smyrna',0),
('Venezia','Constantinople',0),
('Venezia','Angora',0),
('Venezia','Erzurum',0),
('Venezia','Sevastopol',0),
('Venezia','Sochi',0),
('Venezia','Rostov',0),
('Venezia','Kharkov',0),
('Venezia','Moskva',0),
('Venezia','Smolensk',0),
('London','Edinburgh',0),
('London','Amsterdam',0),
('London','Essen',0),
('London','Kobenhavn',0),
('London','Stockholm',0),
('London','Berlin',0),
('London','Wien',0),
('London','Zagrab',0),
('London','Brindisi',0),
('London','Palermo',0),
('London','Athina',0),
('London','Sarajevo',0),
('London','Budapest',0),
('London','Warszawa',0),
('London','Danzig',0),
('London','Riga',0),
('London','Petrograd',0),
('London','Wilno',0),
('London','Kyiv',0),
('London','Bucuresti',0),
('London','Sofia',0),
('London','Smyrna',0),
('London','Constantinople',0),
('London','Angora',0),
('London','Erzurum',0),
('London','Sevastopol',0),
('London','Sochi',0),
('London','Rostov',0),
('London','Kharkov',0),
('London','Moskva',0),
('London','Smolensk',0),
('Edinburgh','Amsterdam',0),
('Edinburgh','Essen',0),
('Edinburgh','Kobenhavn',0),
('Edinburgh','Stockholm',0),
('Edinburgh','Berlin',0),
('Edinburgh','Wien',0),
('Edinburgh','Zagrab',0),
('Edinburgh','Brindisi',0),
('Edinburgh','Palermo',0),
('Edinburgh','Athina',0),
('Edinburgh','Sarajevo',0),
('Edinburgh','Budapest',0),
('Edinburgh','Warszawa',0),
('Edinburgh','Danzig',0),
('Edinburgh','Riga',0),
('Edinburgh','Petrograd',0),
('Edinburgh','Wilno',0),
('Edinburgh','Kyiv',0),
('Edinburgh','Bucuresti',0),
('Edinburgh','Sofia',0),
('Edinburgh','Smyrna',0),
('Edinburgh','Constantinople',0),
('Edinburgh','Angora',0),
('Edinburgh','Erzurum',0),
('Edinburgh','Sevastopol',0),
('Edinburgh','Sochi',0),
('Edinburgh','Rostov',0),
('Edinburgh','Kharkov',0),
('Edinburgh','Moskva',0),
('Edinburgh','Smolensk',0),
('Amsterdam','Essen',0),
('Amsterdam','Kobenhavn',0),
('Amsterdam','Stockholm',0),
('Amsterdam','Berlin',0),
('Amsterdam','Wien',0),
('Amsterdam','Zagrab',0),
('Amsterdam','Brindisi',0),
('Amsterdam','Palermo',0),
('Amsterdam','Athina',0),
('Amsterdam','Sarajevo',0),
('Amsterdam','Budapest',0),
('Amsterdam','Warszawa',0),
('Amsterdam','Danzig',0),
('Amsterdam','Riga',0),
('Amsterdam','Petrograd',0),
('Amsterdam','Wilno',0),
('Amsterdam','Kyiv',0),
('Amsterdam','Bucuresti',0),
('Amsterdam','Sofia',0),
('Amsterdam','Smyrna',0),
('Amsterdam','Constantinople',0),
('Amsterdam','Angora',0),
('Amsterdam','Erzurum',0),
('Amsterdam','Sevastopol',0),
('Amsterdam','Sochi',0),
('Amsterdam','Rostov',0),
('Amsterdam','Kharkov',0),
('Amsterdam','Moskva',0),
('Amsterdam','Smolensk',0),
('Essen','Kobenhavn',0),
('Essen','Stockholm',0),
('Essen','Berlin',0),
('Essen','Wien',0),
('Essen','Zagrab',0),
('Essen','Brindisi',0),
('Essen','Palermo',0),
('Essen','Athina',0),
('Essen','Sarajevo',0),
('Essen','Budapest',0),
('Essen','Warszawa',0),
('Essen','Danzig',0),
('Essen','Riga',0),
('Essen','Petrograd',0),
('Essen','Wilno',0),
('Essen','Kyiv',0),
('Essen','Bucuresti',0),
('Essen','Sofia',0),
('Essen','Smyrna',0),
('Essen','Constantinople',0),
('Essen','Angora',0),
('Essen','Erzurum',0),
('Essen','Sevastopol',0),
('Essen','Sochi',0),
('Essen','Rostov',0),
('Essen','Kharkov',0),
('Essen','Moskva',0),
('Essen','Smolensk',0),
('Kobenhavn','Stockholm',0),
('Kobenhavn','Berlin',0),
('Kobenhavn','Wien',0),
('Kobenhavn','Zagrab',0),
('Kobenhavn','Brindisi',0),
('Kobenhavn','Palermo',0),
('Kobenhavn','Athina',0),
('Kobenhavn','Sarajevo',0),
('Kobenhavn','Budapest',0),
('Kobenhavn','Warszawa',0),
('Kobenhavn','Danzig',0),
('Kobenhavn','Riga',0),
('Kobenhavn','Petrograd',0),
('Kobenhavn','Wilno',0),
('Kobenhavn','Kyiv',0),
('Kobenhavn','Bucuresti',0),
('Kobenhavn','Sofia',0),
('Kobenhavn','Smyrna',0),
('Kobenhavn','Constantinople',0),
('Kobenhavn','Angora',0),
('Kobenhavn','Erzurum',0),
('Kobenhavn','Sevastopol',0),
('Kobenhavn','Sochi',0),
('Kobenhavn','Rostov',0),
('Kobenhavn','Kharkov',0),
('Kobenhavn','Moskva',0),
('Kobenhavn','Smolensk',0),
('Stockholm','Berlin',0),
('Stockholm','Wien',0),
('Stockholm','Zagrab',0),
('Stockholm','Brindisi',0),
('Stockholm','Palermo',0),
('Stockholm','Athina',0),
('Stockholm','Sarajevo',0),
('Stockholm','Budapest',0),
('Stockholm','Warszawa',0),
('Stockholm','Danzig',0),
('Stockholm','Riga',0),
('Stockholm','Petrograd',0),
('Stockholm','Wilno',0),
('Stockholm','Kyiv',0),
('Stockholm','Bucuresti',0),
('Stockholm','Sofia',0),
('Stockholm','Smyrna',0),
('Stockholm','Constantinople',0),
('Stockholm','Angora',0),
('Stockholm','Erzurum',0),
('Stockholm','Sevastopol',0),
('Stockholm','Sochi',0),
('Stockholm','Rostov',0),
('Stockholm','Kharkov',0),
('Stockholm','Moskva',0),
('Stockholm','Smolensk',0),
('Berlin','Wien',0),
('Berlin','Zagrab',0),
('Berlin','Brindisi',0),
('Berlin','Palermo',0),
('Berlin','Athina',0),
('Berlin','Sarajevo',0),
('Berlin','Budapest',0),
('Berlin','Warszawa',0),
('Berlin','Danzig',0),
('Berlin','Riga',0),
('Berlin','Petrograd',0),
('Berlin','Wilno',0),
('Berlin','Kyiv',0),
('Berlin','Bucuresti',0),
('Berlin','Sofia',0),
('Berlin','Smyrna',0),
('Berlin','Constantinople',0),
('Berlin','Angora',0),
('Berlin','Erzurum',0),
('Berlin','Sevastopol',0),
('Berlin','Sochi',0),
('Berlin','Rostov',0),
('Berlin','Kharkov',0),
('Berlin','Moskva',0),
('Berlin','Smolensk',0),
('Wien','Zagrab',0),
('Wien','Brindisi',0),
('Wien','Palermo',0),
('Wien','Athina',0),
('Wien','Sarajevo',0),
('Wien','Budapest',0),
('Wien','Warszawa',0),
('Wien','Danzig',0),
('Wien','Riga',0),
('Wien','Petrograd',0),
('Wien','Wilno',0),
('Wien','Kyiv',0),
('Wien','Bucuresti',0),
('Wien','Sofia',0),
('Wien','Smyrna',0),
('Wien','Constantinople',0),
('Wien','Angora',0),
('Wien','Erzurum',0),
('Wien','Sevastopol',0),
('Wien','Sochi',0),
('Wien','Rostov',0),
('Wien','Kharkov',0),
('Wien','Moskva',0),
('Wien','Smolensk',0),
('Zagrab','Brindisi',0),
('Zagrab','Palermo',0),
('Zagrab','Athina',0),
('Zagrab','Sarajevo',0),
('Zagrab','Budapest',0),
('Zagrab','Warszawa',0),
('Zagrab','Danzig',0),
('Zagrab','Riga',0),
('Zagrab','Petrograd',0),
('Zagrab','Wilno',0),
('Zagrab','Kyiv',0),
('Zagrab','Bucuresti',0),
('Zagrab','Sofia',0),
('Zagrab','Smyrna',0),
('Zagrab','Constantinople',0),
('Zagrab','Angora',0),
('Zagrab','Erzurum',0),
('Zagrab','Sevastopol',0),
('Zagrab','Sochi',0),
('Zagrab','Rostov',0),
('Zagrab','Kharkov',0),
('Zagrab','Moskva',0),
('Zagrab','Smolensk',0),
('Brindisi','Palermo',0),
('Brindisi','Athina',0),
('Brindisi','Sarajevo',0),
('Brindisi','Budapest',0),
('Brindisi','Warszawa',0),
('Brindisi','Danzig',0),
('Brindisi','Riga',0),
('Brindisi','Petrograd',0),
('Brindisi','Wilno',0),
('Brindisi','Kyiv',0),
('Brindisi','Bucuresti',0),
('Brindisi','Sofia',0),
('Brindisi','Smyrna',0),
('Brindisi','Constantinople',0),
('Brindisi','Angora',0),
('Brindisi','Erzurum',0),
('Brindisi','Sevastopol',0),
('Brindisi','Sochi',0),
('Brindisi','Rostov',0),
('Brindisi','Kharkov',0),
('Brindisi','Moskva',0),
('Brindisi','Smolensk',0),
('Palermo','Athina',0),
('Palermo','Sarajevo',0),
('Palermo','Budapest',0),
('Palermo','Warszawa',0),
('Palermo','Danzig',0),
('Palermo','Riga',0),
('Palermo','Petrograd',0),
('Palermo','Wilno',0),
('Palermo','Kyiv',0),
('Palermo','Bucuresti',0),
('Palermo','Sofia',0),
('Palermo','Smyrna',0),
('Palermo','Constantinople',0),
('Palermo','Angora',0),
('Palermo','Erzurum',0),
('Palermo','Sevastopol',0),
('Palermo','Sochi',0),
('Palermo','Rostov',0),
('Palermo','Kharkov',0),
('Palermo','Moskva',0),
('Palermo','Smolensk',0),
('Athina','Sarajevo',0),
('Athina','Budapest',0),
('Athina','Warszawa',0),
('Athina','Danzig',0),
('Athina','Riga',0),
('Athina','Petrograd',0),
('Athina','Wilno',0),
('Athina','Kyiv',0),
('Athina','Bucuresti',0),
('Athina','Sofia',0),
('Athina','Smyrna',0),
('Athina','Constantinople',0),
('Athina','Angora',0),
('Athina','Erzurum',0),
('Athina','Sevastopol',0),
('Athina','Sochi',0),
('Athina','Rostov',0),
('Athina','Kharkov',0),
('Athina','Moskva',0),
('Athina','Smolensk',0),
('Sarajevo','Budapest',0),
('Sarajevo','Warszawa',0),
('Sarajevo','Danzig',0),
('Sarajevo','Riga',0),
('Sarajevo','Petrograd',0),
('Sarajevo','Wilno',0),
('Sarajevo','Kyiv',0),
('Sarajevo','Bucuresti',0),
('Sarajevo','Sofia',0),
('Sarajevo','Smyrna',0),
('Sarajevo','Constantinople',0),
('Sarajevo','Angora',0),
('Sarajevo','Erzurum',0),
('Sarajevo','Sevastopol',0),
('Sarajevo','Sochi',0),
('Sarajevo','Rostov',0),
('Sarajevo','Kharkov',0),
('Sarajevo','Moskva',0),
('Sarajevo','Smolensk',0),
('Budapest','Warszawa',0),
('Budapest','Danzig',0),
('Budapest','Riga',0),
('Budapest','Petrograd',0),
('Budapest','Wilno',0),
('Budapest','Kyiv',0),
('Budapest','Bucuresti',0),
('Budapest','Sofia',0),
('Budapest','Smyrna',0),
('Budapest','Constantinople',0),
('Budapest','Angora',0),
('Budapest','Erzurum',0),
('Budapest','Sevastopol',0),
('Budapest','Sochi',0),
('Budapest','Rostov',0),
('Budapest','Kharkov',0),
('Budapest','Moskva',0),
('Budapest','Smolensk',0),
('Warszawa','Danzig',0),
('Warszawa','Riga',0),
('Warszawa','Petrograd',0),
('Warszawa','Wilno',0),
('Warszawa','Kyiv',0),
('Warszawa','Bucuresti',0),
('Warszawa','Sofia',0),
('Warszawa','Smyrna',0),
('Warszawa','Constantinople',0),
('Warszawa','Angora',0),
('Warszawa','Erzurum',0),
('Warszawa','Sevastopol',0),
('Warszawa','Sochi',0),
('Warszawa','Rostov',0),
('Warszawa','Kharkov',0),
('Warszawa','Moskva',0),
('Warszawa','Smolensk',0),
('Danzig','Riga',3),
('Danzig','Petrograd',7),
('Danzig','Wilno',5),
('Danzig','Kyiv',6),
('Danzig','Bucuresti',10),
('Danzig','Sofia',12),
('Danzig','Smyrna',15),
('Danzig','Constantinople',13),
('Danzig','Angora',15),
('Danzig','Erzurum',17),
('Danzig','Sevastopol',14),
('Danzig','Sochi',14),
('Danzig','Rostov',12),
('Danzig','Kharkov',10),
('Danzig','Moskva',10),
('Danzig','Smolensk',8),
('Riga','Petrograd',4),
('Riga','Wilno',4),
('Riga','Kyiv',6),
('Riga','Bucuresti',10),
('Riga','Sofia',12),
('Riga','Smyrna',15),
('Riga','Constantinople',13),
('Riga','Angora',15),
('Riga','Erzurum',17),
('Riga','Sevastopol',14),
('Riga','Sochi',14),
('Riga','Rostov',12),
('Riga','Kharkov',10),
('Riga','Moskva',8),
('Riga','Smolensk',7),
('Petrograd','Wilno',4),
('Petrograd','Kyiv',6),
('Petrograd','Bucuresti',10),
('Petrograd','Sofia',12),
('Petrograd','Smyrna',15),
('Petrograd','Constantinople',13),
('Petrograd','Angora',15),
('Petrograd','Erzurum',15),
('Petrograd','Sevastopol',14),
('Petrograd','Sochi',12),
('Petrograd','Rostov',10),
('Petrograd','Kharkov',8),
('Petrograd','Moskva',4),
('Petrograd','Smolensk',6),
('Wilno','Kyiv',2),
('Wilno','Bucuresti',6),
('Wilno','Sofia',8),
('Wilno','Smyrna',11),
('Wilno','Constantinople',9),
('Wilno','Angora',11),
('Wilno','Erzurum',13),
('Wilno','Sevastopol',10),
('Wilno','Sochi',10),
('Wilno','Rostov',8),
('Wilno','Kharkov',6),
('Wilno','Moskva',5),
('Wilno','Smolensk',3),
('Kyiv','Bucuresti',4),
('Kyiv','Sofia',6),
('Kyiv','Smyrna',9),
('Kyiv','Constantinople',7),
('Kyiv','Angora',9),
('Kyiv','Erzurum',12),
('Kyiv','Sevastopol',8),
('Kyiv','Sochi',8),
('Kyiv','Rostov',6),
('Kyiv','Kharkov',4),
('Kyiv','Moskva',5),
('Kyiv','Smolensk',3),
('Bucuresti','Sofia',2),
('Bucuresti','Smyrna',5),
('Bucuresti','Constantinople',3),
('Bucuresti','Angora',5),
('Bucuresti','Erzurum',8),
('Bucuresti','Sevastopol',4),
('Bucuresti','Sochi',6),
('Bucuresti','Rostov',8),
('Bucuresti','Kharkov',8),
('Bucuresti','Moskva',9),
('Bucuresti','Smolensk',7),
('Sofia','Smyrna',5),
('Sofia','Constantinople',3),
('Sofia','Angora',5),
('Sofia','Erzurum',8),
('Sofia','Sevastopol',6),
('Sofia','Sochi',8),
('Sofia','Rostov',10),
('Sofia','Kharkov',10),
('Sofia','Moskva',16),
('Sofia','Smolensk',12),
('Smyrna','Constantinople',2),
('Smyrna','Angora',3),
('Smyrna','Erzurum',6),
('Smyrna','Sevastopol',6),
('Smyrna','Sochi',8),
('Smyrna','Rostov',10),
('Smyrna','Kharkov',12),
('Smyrna','Moskva',16),
('Smyrna','Smolensk',18),
('Constantinople','Angora',2),
('Constantinople','Erzurum',5),
('Constantinople','Sevastopol',4),
('Constantinople','Sochi',6),
('Constantinople','Rostov',8),
('Constantinople','Kharkov',10),
('Constantinople','Moskva',14),
('Constantinople','Smolensk',16),
('Angora','Erzurum',3),
('Angora','Sevastopol',6),
('Angora','Sochi',6),
('Angora','Rostov',8),
('Angora','Kharkov',10),
('Angora','Moskva',14),
('Angora','Smolensk',16),
('Erzurum','Sevastopol',4),
('Erzurum','Sochi',3),
('Erzurum','Rostov',5),
('Erzurum','Kharkov',7),
('Erzurum','Moskva',11),
('Erzurum','Smolensk',13),
('Sevastopol','Sochi',2),
('Sevastopol','Rostov',4),
('Sevastopol','Kharkov',6),
('Sevastopol','Moskva',10),
('Sevastopol','Smolensk',12),
('Sochi','Rostov',2),
('Sochi','Kharkov',4),
('Sochi','Moskva',8),
('Sochi','Smolensk',10),
('Rostov','Kharkov',2),
('Rostov','Moskva',6),
('Rostov','Smolensk',8),
('Kharkov','Moskva',4),
('Kharkov','Smolensk',6),
('Moskva','Smolensk',2)
]
| 25.060773
| 36
| 0.60244
|
794b4ed15c8f93c479bec99b65b51a11d39732e7
| 377
|
py
|
Python
|
flows/models.py
|
sergioisidoro/django-flows
|
326baa3e216a15bd7a8d13b2a09ba9752e250dbb
|
[
"BSD-2-Clause"
] | 104
|
2015-01-05T14:29:16.000Z
|
2021-11-08T11:20:24.000Z
|
flows/models.py
|
sergioisidoro/django-flows
|
326baa3e216a15bd7a8d13b2a09ba9752e250dbb
|
[
"BSD-2-Clause"
] | 4
|
2015-09-23T11:14:50.000Z
|
2020-03-21T06:08:34.000Z
|
flows/models.py
|
sergioisidoro/django-flows
|
326baa3e216a15bd7a8d13b2a09ba9752e250dbb
|
[
"BSD-2-Clause"
] | 16
|
2015-01-05T10:13:44.000Z
|
2022-02-14T05:21:23.000Z
|
# -*- coding: UTF-8 -*-
# Note: this is mainly required because using the Django test runner
# requires that apps under test have a 'models' module, even if it's
# just empty.
from flows import config
if config.FLOWS_STATE_STORE == 'flows.statestore.django_store':
from flows.statestore.django_store import StateModel #@UnusedImport only used to registed with django ORM
| 37.7
| 109
| 0.763926
|
794b503ecb29b62498206fc52e4a68f9966de75f
| 401,557
|
py
|
Python
|
opcua/ua/uaprotocol_auto.py
|
aixiwang/opcua2cloud
|
32e1e745e4939f8d4fd51892d9a51230ffdfc198
|
[
"Apache-2.0"
] | null | null | null |
opcua/ua/uaprotocol_auto.py
|
aixiwang/opcua2cloud
|
32e1e745e4939f8d4fd51892d9a51230ffdfc198
|
[
"Apache-2.0"
] | null | null | null |
opcua/ua/uaprotocol_auto.py
|
aixiwang/opcua2cloud
|
32e1e745e4939f8d4fd51892d9a51230ffdfc198
|
[
"Apache-2.0"
] | 2
|
2019-01-14T10:13:57.000Z
|
2020-02-11T15:22:14.000Z
|
'''
Autogenerate code from xml spec
'''
from datetime import datetime
from enum import IntEnum
from opcua.ua.uatypes import *
from opcua.ua.object_ids import ObjectIds
class NamingRuleType(IntEnum):
'''
:ivar Mandatory:
:vartype Mandatory: 1
:ivar Optional:
:vartype Optional: 2
:ivar Constraint:
:vartype Constraint: 3
'''
Mandatory = 1
Optional = 2
Constraint = 3
class OpenFileMode(IntEnum):
'''
:ivar Read:
:vartype Read: 1
:ivar Write:
:vartype Write: 2
:ivar EraseExisting:
:vartype EraseExisting: 4
:ivar Append:
:vartype Append: 8
'''
Read = 1
Write = 2
EraseExisting = 4
Append = 8
class IdentityCriteriaType(IntEnum):
'''
:ivar UserName:
:vartype UserName: 1
:ivar Thumbprint:
:vartype Thumbprint: 2
:ivar Role:
:vartype Role: 3
:ivar GroupId:
:vartype GroupId: 4
:ivar Anonymous:
:vartype Anonymous: 5
:ivar AuthenticatedUser:
:vartype AuthenticatedUser: 6
'''
UserName = 1
Thumbprint = 2
Role = 3
GroupId = 4
Anonymous = 5
AuthenticatedUser = 6
class TrustListMasks(IntEnum):
'''
:ivar None_:
:vartype None_: 0
:ivar TrustedCertificates:
:vartype TrustedCertificates: 1
:ivar TrustedCrls:
:vartype TrustedCrls: 2
:ivar IssuerCertificates:
:vartype IssuerCertificates: 4
:ivar IssuerCrls:
:vartype IssuerCrls: 8
:ivar All:
:vartype All: 15
'''
None_ = 0
TrustedCertificates = 1
TrustedCrls = 2
IssuerCertificates = 4
IssuerCrls = 8
All = 15
class PubSubState(IntEnum):
'''
:ivar Disabled:
:vartype Disabled: 0
:ivar Paused:
:vartype Paused: 1
:ivar Operational:
:vartype Operational: 2
:ivar Error:
:vartype Error: 3
'''
Disabled = 0
Paused = 1
Operational = 2
Error = 3
class DataSetFieldFlags(IntEnum):
'''
:ivar PromotedField:
:vartype PromotedField: 1
'''
PromotedField = 1
class DataSetFieldContentMask(IntEnum):
'''
:ivar StatusCode:
:vartype StatusCode: 1
:ivar SourceTimestamp:
:vartype SourceTimestamp: 2
:ivar ServerTimestamp:
:vartype ServerTimestamp: 4
:ivar SourcePicoSeconds:
:vartype SourcePicoSeconds: 8
:ivar ServerPicoSeconds:
:vartype ServerPicoSeconds: 16
:ivar RawDataEncoding:
:vartype RawDataEncoding: 32
'''
StatusCode = 1
SourceTimestamp = 2
ServerTimestamp = 4
SourcePicoSeconds = 8
ServerPicoSeconds = 16
RawDataEncoding = 32
class OverrideValueHandling(IntEnum):
'''
:ivar Disabled:
:vartype Disabled: 0
:ivar LastUseableValue:
:vartype LastUseableValue: 1
:ivar OverrideValue:
:vartype OverrideValue: 2
'''
Disabled = 0
LastUseableValue = 1
OverrideValue = 2
class DataSetOrderingType(IntEnum):
'''
:ivar Undefined:
:vartype Undefined: 0
:ivar AscendingWriterId:
:vartype AscendingWriterId: 1
:ivar AscendingWriterIdSingle:
:vartype AscendingWriterIdSingle: 2
'''
Undefined = 0
AscendingWriterId = 1
AscendingWriterIdSingle = 2
class UadpNetworkMessageContentMask(IntEnum):
'''
:ivar PublisherId:
:vartype PublisherId: 1
:ivar GroupHeader:
:vartype GroupHeader: 2
:ivar WriterGroupId:
:vartype WriterGroupId: 4
:ivar GroupVersion:
:vartype GroupVersion: 8
:ivar NetworkMessageNumber:
:vartype NetworkMessageNumber: 16
:ivar SequenceNumber:
:vartype SequenceNumber: 32
:ivar PayloadHeader:
:vartype PayloadHeader: 64
:ivar Timestamp:
:vartype Timestamp: 128
:ivar Picoseconds:
:vartype Picoseconds: 256
:ivar DataSetClassId:
:vartype DataSetClassId: 512
:ivar PromotedFields:
:vartype PromotedFields: 1024
'''
PublisherId = 1
GroupHeader = 2
WriterGroupId = 4
GroupVersion = 8
NetworkMessageNumber = 16
SequenceNumber = 32
PayloadHeader = 64
Timestamp = 128
Picoseconds = 256
DataSetClassId = 512
PromotedFields = 1024
class UadpDataSetMessageContentMask(IntEnum):
'''
:ivar Timestamp:
:vartype Timestamp: 1
:ivar PicoSeconds:
:vartype PicoSeconds: 2
:ivar Status:
:vartype Status: 4
:ivar MajorVersion:
:vartype MajorVersion: 8
:ivar MinorVersion:
:vartype MinorVersion: 16
:ivar SequenceNumber:
:vartype SequenceNumber: 32
'''
Timestamp = 1
PicoSeconds = 2
Status = 4
MajorVersion = 8
MinorVersion = 16
SequenceNumber = 32
class JsonNetworkMessageContentMask(IntEnum):
'''
:ivar NetworkMessageHeader:
:vartype NetworkMessageHeader: 1
:ivar DataSetMessageHeader:
:vartype DataSetMessageHeader: 2
:ivar SingleDataSetMessage:
:vartype SingleDataSetMessage: 4
:ivar PublisherId:
:vartype PublisherId: 8
:ivar DataSetClassId:
:vartype DataSetClassId: 16
:ivar ReplyTo:
:vartype ReplyTo: 32
'''
NetworkMessageHeader = 1
DataSetMessageHeader = 2
SingleDataSetMessage = 4
PublisherId = 8
DataSetClassId = 16
ReplyTo = 32
class JsonDataSetMessageContentMask(IntEnum):
'''
:ivar DataSetWriterId:
:vartype DataSetWriterId: 1
:ivar MetaDataVersion:
:vartype MetaDataVersion: 2
:ivar SequenceNumber:
:vartype SequenceNumber: 4
:ivar Timestamp:
:vartype Timestamp: 8
:ivar Status:
:vartype Status: 16
'''
DataSetWriterId = 1
MetaDataVersion = 2
SequenceNumber = 4
Timestamp = 8
Status = 16
class BrokerTransportQualityOfService(IntEnum):
'''
:ivar NotSpecified:
:vartype NotSpecified: 0
:ivar BestEffort:
:vartype BestEffort: 1
:ivar AtLeastOnce:
:vartype AtLeastOnce: 2
:ivar AtMostOnce:
:vartype AtMostOnce: 3
:ivar ExactlyOnce:
:vartype ExactlyOnce: 4
'''
NotSpecified = 0
BestEffort = 1
AtLeastOnce = 2
AtMostOnce = 3
ExactlyOnce = 4
class DiagnosticsLevel(IntEnum):
'''
:ivar Basic:
:vartype Basic: 0
:ivar Advanced:
:vartype Advanced: 1
:ivar Info:
:vartype Info: 2
:ivar Log:
:vartype Log: 3
:ivar Debug:
:vartype Debug: 4
'''
Basic = 0
Advanced = 1
Info = 2
Log = 3
Debug = 4
class PubSubDiagnosticsCounterClassification(IntEnum):
'''
:ivar Information:
:vartype Information: 0
:ivar Error:
:vartype Error: 1
'''
Information = 0
Error = 1
class IdType(IntEnum):
'''
The type of identifier used in a node id.
:ivar Numeric:
:vartype Numeric: 0
:ivar String:
:vartype String: 1
:ivar Guid:
:vartype Guid: 2
:ivar Opaque:
:vartype Opaque: 3
'''
Numeric = 0
String = 1
Guid = 2
Opaque = 3
class NodeClass(IntEnum):
'''
A mask specifying the class of the node.
:ivar Unspecified:
:vartype Unspecified: 0
:ivar Object:
:vartype Object: 1
:ivar Variable:
:vartype Variable: 2
:ivar Method:
:vartype Method: 4
:ivar ObjectType:
:vartype ObjectType: 8
:ivar VariableType:
:vartype VariableType: 16
:ivar ReferenceType:
:vartype ReferenceType: 32
:ivar DataType:
:vartype DataType: 64
:ivar View:
:vartype View: 128
'''
Unspecified = 0
Object = 1
Variable = 2
Method = 4
ObjectType = 8
VariableType = 16
ReferenceType = 32
DataType = 64
View = 128
class AccessLevelType(IntEnum):
'''
:ivar None_:
:vartype None_: 0
:ivar CurrentRead:
:vartype CurrentRead: 1
:ivar CurrentWrite:
:vartype CurrentWrite: 2
:ivar HistoryRead:
:vartype HistoryRead: 4
:ivar HistoryWrite:
:vartype HistoryWrite: 16
:ivar StatusWrite:
:vartype StatusWrite: 32
:ivar TimestampWrite:
:vartype TimestampWrite: 64
'''
None_ = 0
CurrentRead = 1
CurrentWrite = 2
HistoryRead = 4
HistoryWrite = 16
StatusWrite = 32
TimestampWrite = 64
class AccessLevelExType(IntEnum):
'''
:ivar None_:
:vartype None_: 0
:ivar CurrentRead:
:vartype CurrentRead: 1
:ivar CurrentWrite:
:vartype CurrentWrite: 2
:ivar HistoryRead:
:vartype HistoryRead: 4
:ivar HistoryWrite:
:vartype HistoryWrite: 16
:ivar StatusWrite:
:vartype StatusWrite: 32
:ivar TimestampWrite:
:vartype TimestampWrite: 64
:ivar NonatomicRead:
:vartype NonatomicRead: 65536
:ivar NonatomicWrite:
:vartype NonatomicWrite: 131072
:ivar WriteFullArrayOnly:
:vartype WriteFullArrayOnly: 262144
'''
None_ = 0
CurrentRead = 1
CurrentWrite = 2
HistoryRead = 4
HistoryWrite = 16
StatusWrite = 32
TimestampWrite = 64
NonatomicRead = 65536
NonatomicWrite = 131072
WriteFullArrayOnly = 262144
class EventNotifierType(IntEnum):
'''
:ivar None_:
:vartype None_: 0
:ivar SubscribeToEvents:
:vartype SubscribeToEvents: 1
:ivar HistoryRead:
:vartype HistoryRead: 4
:ivar HistoryWrite:
:vartype HistoryWrite: 8
'''
None_ = 0
SubscribeToEvents = 1
HistoryRead = 4
HistoryWrite = 8
class StructureType(IntEnum):
'''
:ivar Structure:
:vartype Structure: 0
:ivar StructureWithOptionalFields:
:vartype StructureWithOptionalFields: 1
:ivar Union:
:vartype Union: 2
'''
Structure = 0
StructureWithOptionalFields = 1
Union = 2
class ApplicationType(IntEnum):
'''
The types of applications.
:ivar Server:
:vartype Server: 0
:ivar Client:
:vartype Client: 1
:ivar ClientAndServer:
:vartype ClientAndServer: 2
:ivar DiscoveryServer:
:vartype DiscoveryServer: 3
'''
Server = 0
Client = 1
ClientAndServer = 2
DiscoveryServer = 3
class MessageSecurityMode(IntEnum):
'''
The type of security to use on a message.
:ivar Invalid:
:vartype Invalid: 0
:ivar None_:
:vartype None_: 1
:ivar Sign:
:vartype Sign: 2
:ivar SignAndEncrypt:
:vartype SignAndEncrypt: 3
'''
Invalid = 0
None_ = 1
Sign = 2
SignAndEncrypt = 3
class UserTokenType(IntEnum):
'''
The possible user token types.
:ivar Anonymous:
:vartype Anonymous: 0
:ivar UserName:
:vartype UserName: 1
:ivar Certificate:
:vartype Certificate: 2
:ivar IssuedToken:
:vartype IssuedToken: 3
'''
Anonymous = 0
UserName = 1
Certificate = 2
IssuedToken = 3
class SecurityTokenRequestType(IntEnum):
'''
Indicates whether a token if being created or renewed.
:ivar Issue:
:vartype Issue: 0
:ivar Renew:
:vartype Renew: 1
'''
Issue = 0
Renew = 1
class NodeAttributesMask(IntEnum):
'''
The bits used to specify default attributes for a new node.
:ivar None_:
:vartype None_: 0
:ivar AccessLevel:
:vartype AccessLevel: 1
:ivar ArrayDimensions:
:vartype ArrayDimensions: 2
:ivar BrowseName:
:vartype BrowseName: 4
:ivar ContainsNoLoops:
:vartype ContainsNoLoops: 8
:ivar DataType:
:vartype DataType: 16
:ivar Description:
:vartype Description: 32
:ivar DisplayName:
:vartype DisplayName: 64
:ivar EventNotifier:
:vartype EventNotifier: 128
:ivar Executable:
:vartype Executable: 256
:ivar Historizing:
:vartype Historizing: 512
:ivar InverseName:
:vartype InverseName: 1024
:ivar IsAbstract:
:vartype IsAbstract: 2048
:ivar MinimumSamplingInterval:
:vartype MinimumSamplingInterval: 4096
:ivar NodeClass:
:vartype NodeClass: 8192
:ivar NodeId:
:vartype NodeId: 16384
:ivar Symmetric:
:vartype Symmetric: 32768
:ivar UserAccessLevel:
:vartype UserAccessLevel: 65536
:ivar UserExecutable:
:vartype UserExecutable: 131072
:ivar UserWriteMask:
:vartype UserWriteMask: 262144
:ivar ValueRank:
:vartype ValueRank: 524288
:ivar WriteMask:
:vartype WriteMask: 1048576
:ivar Value:
:vartype Value: 2097152
:ivar DataTypeDefinition:
:vartype DataTypeDefinition: 4194304
:ivar RolePermissions:
:vartype RolePermissions: 8388608
:ivar AccessRestrictions:
:vartype AccessRestrictions: 16777216
:ivar All:
:vartype All: 33554431
:ivar BaseNode:
:vartype BaseNode: 26501220
:ivar Object:
:vartype Object: 26501348
:ivar ObjectType:
:vartype ObjectType: 26503268
:ivar Variable:
:vartype Variable: 26571383
:ivar VariableType:
:vartype VariableType: 28600438
:ivar Method:
:vartype Method: 26632548
:ivar ReferenceType:
:vartype ReferenceType: 26537060
:ivar View:
:vartype View: 26501356
'''
None_ = 0
AccessLevel = 1
ArrayDimensions = 2
BrowseName = 4
ContainsNoLoops = 8
DataType = 16
Description = 32
DisplayName = 64
EventNotifier = 128
Executable = 256
Historizing = 512
InverseName = 1024
IsAbstract = 2048
MinimumSamplingInterval = 4096
NodeClass = 8192
NodeId = 16384
Symmetric = 32768
UserAccessLevel = 65536
UserExecutable = 131072
UserWriteMask = 262144
ValueRank = 524288
WriteMask = 1048576
Value = 2097152
DataTypeDefinition = 4194304
RolePermissions = 8388608
AccessRestrictions = 16777216
All = 33554431
BaseNode = 26501220
Object = 26501348
ObjectType = 26503268
Variable = 26571383
VariableType = 28600438
Method = 26632548
ReferenceType = 26537060
View = 26501356
class AttributeWriteMask(IntEnum):
'''
Define bits used to indicate which attributes are writable.
:ivar None_:
:vartype None_: 0
:ivar AccessLevel:
:vartype AccessLevel: 1
:ivar ArrayDimensions:
:vartype ArrayDimensions: 2
:ivar BrowseName:
:vartype BrowseName: 4
:ivar ContainsNoLoops:
:vartype ContainsNoLoops: 8
:ivar DataType:
:vartype DataType: 16
:ivar Description:
:vartype Description: 32
:ivar DisplayName:
:vartype DisplayName: 64
:ivar EventNotifier:
:vartype EventNotifier: 128
:ivar Executable:
:vartype Executable: 256
:ivar Historizing:
:vartype Historizing: 512
:ivar InverseName:
:vartype InverseName: 1024
:ivar IsAbstract:
:vartype IsAbstract: 2048
:ivar MinimumSamplingInterval:
:vartype MinimumSamplingInterval: 4096
:ivar NodeClass:
:vartype NodeClass: 8192
:ivar NodeId:
:vartype NodeId: 16384
:ivar Symmetric:
:vartype Symmetric: 32768
:ivar UserAccessLevel:
:vartype UserAccessLevel: 65536
:ivar UserExecutable:
:vartype UserExecutable: 131072
:ivar UserWriteMask:
:vartype UserWriteMask: 262144
:ivar ValueRank:
:vartype ValueRank: 524288
:ivar WriteMask:
:vartype WriteMask: 1048576
:ivar ValueForVariableType:
:vartype ValueForVariableType: 2097152
:ivar DataTypeDefinition:
:vartype DataTypeDefinition: 4194304
:ivar RolePermissions:
:vartype RolePermissions: 8388608
:ivar AccessRestrictions:
:vartype AccessRestrictions: 16777216
:ivar AccessLevelEx:
:vartype AccessLevelEx: 33554432
'''
None_ = 0
AccessLevel = 1
ArrayDimensions = 2
BrowseName = 4
ContainsNoLoops = 8
DataType = 16
Description = 32
DisplayName = 64
EventNotifier = 128
Executable = 256
Historizing = 512
InverseName = 1024
IsAbstract = 2048
MinimumSamplingInterval = 4096
NodeClass = 8192
NodeId = 16384
Symmetric = 32768
UserAccessLevel = 65536
UserExecutable = 131072
UserWriteMask = 262144
ValueRank = 524288
WriteMask = 1048576
ValueForVariableType = 2097152
DataTypeDefinition = 4194304
RolePermissions = 8388608
AccessRestrictions = 16777216
AccessLevelEx = 33554432
class BrowseDirection(IntEnum):
'''
The directions of the references to return.
:ivar Forward:
:vartype Forward: 0
:ivar Inverse:
:vartype Inverse: 1
:ivar Both:
:vartype Both: 2
:ivar Invalid:
:vartype Invalid: 3
'''
Forward = 0
Inverse = 1
Both = 2
Invalid = 3
class BrowseResultMask(IntEnum):
'''
A bit mask which specifies what should be returned in a browse response.
:ivar None_:
:vartype None_: 0
:ivar ReferenceTypeId:
:vartype ReferenceTypeId: 1
:ivar IsForward:
:vartype IsForward: 2
:ivar NodeClass:
:vartype NodeClass: 4
:ivar BrowseName:
:vartype BrowseName: 8
:ivar DisplayName:
:vartype DisplayName: 16
:ivar TypeDefinition:
:vartype TypeDefinition: 32
:ivar All:
:vartype All: 63
:ivar ReferenceTypeInfo:
:vartype ReferenceTypeInfo: 3
:ivar TargetInfo:
:vartype TargetInfo: 60
'''
None_ = 0
ReferenceTypeId = 1
IsForward = 2
NodeClass = 4
BrowseName = 8
DisplayName = 16
TypeDefinition = 32
All = 63
ReferenceTypeInfo = 3
TargetInfo = 60
class FilterOperator(IntEnum):
'''
:ivar Equals:
:vartype Equals: 0
:ivar IsNull:
:vartype IsNull: 1
:ivar GreaterThan:
:vartype GreaterThan: 2
:ivar LessThan:
:vartype LessThan: 3
:ivar GreaterThanOrEqual:
:vartype GreaterThanOrEqual: 4
:ivar LessThanOrEqual:
:vartype LessThanOrEqual: 5
:ivar Like:
:vartype Like: 6
:ivar Not:
:vartype Not: 7
:ivar Between:
:vartype Between: 8
:ivar InList:
:vartype InList: 9
:ivar And:
:vartype And: 10
:ivar Or:
:vartype Or: 11
:ivar Cast:
:vartype Cast: 12
:ivar InView:
:vartype InView: 13
:ivar OfType:
:vartype OfType: 14
:ivar RelatedTo:
:vartype RelatedTo: 15
:ivar BitwiseAnd:
:vartype BitwiseAnd: 16
:ivar BitwiseOr:
:vartype BitwiseOr: 17
'''
Equals = 0
IsNull = 1
GreaterThan = 2
LessThan = 3
GreaterThanOrEqual = 4
LessThanOrEqual = 5
Like = 6
Not = 7
Between = 8
InList = 9
And = 10
Or = 11
Cast = 12
InView = 13
OfType = 14
RelatedTo = 15
BitwiseAnd = 16
BitwiseOr = 17
class TimestampsToReturn(IntEnum):
'''
:ivar Source:
:vartype Source: 0
:ivar Server:
:vartype Server: 1
:ivar Both:
:vartype Both: 2
:ivar Neither:
:vartype Neither: 3
:ivar Invalid:
:vartype Invalid: 4
'''
Source = 0
Server = 1
Both = 2
Neither = 3
Invalid = 4
class HistoryUpdateType(IntEnum):
'''
:ivar Insert:
:vartype Insert: 1
:ivar Replace:
:vartype Replace: 2
:ivar Update:
:vartype Update: 3
:ivar Delete:
:vartype Delete: 4
'''
Insert = 1
Replace = 2
Update = 3
Delete = 4
class PerformUpdateType(IntEnum):
'''
:ivar Insert:
:vartype Insert: 1
:ivar Replace:
:vartype Replace: 2
:ivar Update:
:vartype Update: 3
:ivar Remove:
:vartype Remove: 4
'''
Insert = 1
Replace = 2
Update = 3
Remove = 4
class MonitoringMode(IntEnum):
'''
:ivar Disabled:
:vartype Disabled: 0
:ivar Sampling:
:vartype Sampling: 1
:ivar Reporting:
:vartype Reporting: 2
'''
Disabled = 0
Sampling = 1
Reporting = 2
class DataChangeTrigger(IntEnum):
'''
:ivar Status:
:vartype Status: 0
:ivar StatusValue:
:vartype StatusValue: 1
:ivar StatusValueTimestamp:
:vartype StatusValueTimestamp: 2
'''
Status = 0
StatusValue = 1
StatusValueTimestamp = 2
class DeadbandType(IntEnum):
'''
:ivar None_:
:vartype None_: 0
:ivar Absolute:
:vartype Absolute: 1
:ivar Percent:
:vartype Percent: 2
'''
None_ = 0
Absolute = 1
Percent = 2
class RedundancySupport(IntEnum):
'''
:ivar None_:
:vartype None_: 0
:ivar Cold:
:vartype Cold: 1
:ivar Warm:
:vartype Warm: 2
:ivar Hot:
:vartype Hot: 3
:ivar Transparent:
:vartype Transparent: 4
:ivar HotAndMirrored:
:vartype HotAndMirrored: 5
'''
None_ = 0
Cold = 1
Warm = 2
Hot = 3
Transparent = 4
HotAndMirrored = 5
class ServerState(IntEnum):
'''
:ivar Running:
:vartype Running: 0
:ivar Failed:
:vartype Failed: 1
:ivar NoConfiguration:
:vartype NoConfiguration: 2
:ivar Suspended:
:vartype Suspended: 3
:ivar Shutdown:
:vartype Shutdown: 4
:ivar Test:
:vartype Test: 5
:ivar CommunicationFault:
:vartype CommunicationFault: 6
:ivar Unknown:
:vartype Unknown: 7
'''
Running = 0
Failed = 1
NoConfiguration = 2
Suspended = 3
Shutdown = 4
Test = 5
CommunicationFault = 6
Unknown = 7
class ModelChangeStructureVerbMask(IntEnum):
'''
:ivar NodeAdded:
:vartype NodeAdded: 1
:ivar NodeDeleted:
:vartype NodeDeleted: 2
:ivar ReferenceAdded:
:vartype ReferenceAdded: 4
:ivar ReferenceDeleted:
:vartype ReferenceDeleted: 8
:ivar DataTypeChanged:
:vartype DataTypeChanged: 16
'''
NodeAdded = 1
NodeDeleted = 2
ReferenceAdded = 4
ReferenceDeleted = 8
DataTypeChanged = 16
class AxisScaleEnumeration(IntEnum):
'''
:ivar Linear:
:vartype Linear: 0
:ivar Log:
:vartype Log: 1
:ivar Ln:
:vartype Ln: 2
'''
Linear = 0
Log = 1
Ln = 2
class ExceptionDeviationFormat(IntEnum):
'''
:ivar AbsoluteValue:
:vartype AbsoluteValue: 0
:ivar PercentOfValue:
:vartype PercentOfValue: 1
:ivar PercentOfRange:
:vartype PercentOfRange: 2
:ivar PercentOfEURange:
:vartype PercentOfEURange: 3
:ivar Unknown:
:vartype Unknown: 4
'''
AbsoluteValue = 0
PercentOfValue = 1
PercentOfRange = 2
PercentOfEURange = 3
Unknown = 4
class DataTypeDefinition(FrozenClass):
'''
'''
ua_types = [
]
def __init__(self):
self._freeze = True
def __str__(self):
return 'DataTypeDefinition(' + + ')'
__repr__ = __str__
class DiagnosticInfo(FrozenClass):
'''
A recursive structure containing diagnostic information associated with a status code.
:ivar Encoding:
:vartype Encoding: Byte
:ivar SymbolicId:
:vartype SymbolicId: Int32
:ivar NamespaceURI:
:vartype NamespaceURI: Int32
:ivar Locale:
:vartype Locale: Int32
:ivar LocalizedText:
:vartype LocalizedText: Int32
:ivar AdditionalInfo:
:vartype AdditionalInfo: String
:ivar InnerStatusCode:
:vartype InnerStatusCode: StatusCode
:ivar InnerDiagnosticInfo:
:vartype InnerDiagnosticInfo: DiagnosticInfo
'''
ua_switches = {
'SymbolicId': ('Encoding', 0),
'NamespaceURI': ('Encoding', 1),
'Locale': ('Encoding', 2),
'LocalizedText': ('Encoding', 3),
'AdditionalInfo': ('Encoding', 4),
'InnerStatusCode': ('Encoding', 5),
'InnerDiagnosticInfo': ('Encoding', 6),
}
ua_types = [
('Encoding', 'Byte'),
('SymbolicId', 'Int32'),
('NamespaceURI', 'Int32'),
('Locale', 'Int32'),
('LocalizedText', 'Int32'),
('AdditionalInfo', 'String'),
('InnerStatusCode', 'StatusCode'),
('InnerDiagnosticInfo', 'DiagnosticInfo'),
]
def __init__(self):
self.Encoding = 0
self.SymbolicId = None
self.NamespaceURI = None
self.Locale = None
self.LocalizedText = None
self.AdditionalInfo = None
self.InnerStatusCode = None
self.InnerDiagnosticInfo = None
self._freeze = True
def __str__(self):
return 'DiagnosticInfo(' + 'Encoding:' + str(self.Encoding) + ', ' + \
'SymbolicId:' + str(self.SymbolicId) + ', ' + \
'NamespaceURI:' + str(self.NamespaceURI) + ', ' + \
'Locale:' + str(self.Locale) + ', ' + \
'LocalizedText:' + str(self.LocalizedText) + ', ' + \
'AdditionalInfo:' + str(self.AdditionalInfo) + ', ' + \
'InnerStatusCode:' + str(self.InnerStatusCode) + ', ' + \
'InnerDiagnosticInfo:' + str(self.InnerDiagnosticInfo) + ')'
__repr__ = __str__
class KeyValuePair(FrozenClass):
'''
:ivar Key:
:vartype Key: QualifiedName
:ivar Value:
:vartype Value: Variant
'''
ua_types = [
('Key', 'QualifiedName'),
('Value', 'Variant'),
]
def __init__(self):
self.Key = QualifiedName()
self.Value = Variant()
self._freeze = True
def __str__(self):
return 'KeyValuePair(' + 'Key:' + str(self.Key) + ', ' + \
'Value:' + str(self.Value) + ')'
__repr__ = __str__
class EndpointType(FrozenClass):
'''
:ivar EndpointUrl:
:vartype EndpointUrl: String
:ivar SecurityMode:
:vartype SecurityMode: MessageSecurityMode
:ivar SecurityPolicyUri:
:vartype SecurityPolicyUri: String
:ivar TransportProfileUri:
:vartype TransportProfileUri: String
'''
ua_types = [
('EndpointUrl', 'String'),
('SecurityMode', 'MessageSecurityMode'),
('SecurityPolicyUri', 'String'),
('TransportProfileUri', 'String'),
]
def __init__(self):
self.EndpointUrl = None
self.SecurityMode = MessageSecurityMode(0)
self.SecurityPolicyUri = None
self.TransportProfileUri = None
self._freeze = True
def __str__(self):
return 'EndpointType(' + 'EndpointUrl:' + str(self.EndpointUrl) + ', ' + \
'SecurityMode:' + str(self.SecurityMode) + ', ' + \
'SecurityPolicyUri:' + str(self.SecurityPolicyUri) + ', ' + \
'TransportProfileUri:' + str(self.TransportProfileUri) + ')'
__repr__ = __str__
class IdentityMappingRuleType(FrozenClass):
'''
:ivar CriteriaType:
:vartype CriteriaType: IdentityCriteriaType
:ivar Criteria:
:vartype Criteria: String
'''
ua_types = [
('CriteriaType', 'IdentityCriteriaType'),
('Criteria', 'String'),
]
def __init__(self):
self.CriteriaType = IdentityCriteriaType(0)
self.Criteria = None
self._freeze = True
def __str__(self):
return 'IdentityMappingRuleType(' + 'CriteriaType:' + str(self.CriteriaType) + ', ' + \
'Criteria:' + str(self.Criteria) + ')'
__repr__ = __str__
class TrustListDataType(FrozenClass):
'''
:ivar SpecifiedLists:
:vartype SpecifiedLists: UInt32
:ivar TrustedCertificates:
:vartype TrustedCertificates: ByteString
:ivar TrustedCrls:
:vartype TrustedCrls: ByteString
:ivar IssuerCertificates:
:vartype IssuerCertificates: ByteString
:ivar IssuerCrls:
:vartype IssuerCrls: ByteString
'''
ua_types = [
('SpecifiedLists', 'UInt32'),
('TrustedCertificates', 'ListOfByteString'),
('TrustedCrls', 'ListOfByteString'),
('IssuerCertificates', 'ListOfByteString'),
('IssuerCrls', 'ListOfByteString'),
]
def __init__(self):
self.SpecifiedLists = 0
self.TrustedCertificates = []
self.TrustedCrls = []
self.IssuerCertificates = []
self.IssuerCrls = []
self._freeze = True
def __str__(self):
return 'TrustListDataType(' + 'SpecifiedLists:' + str(self.SpecifiedLists) + ', ' + \
'TrustedCertificates:' + str(self.TrustedCertificates) + ', ' + \
'TrustedCrls:' + str(self.TrustedCrls) + ', ' + \
'IssuerCertificates:' + str(self.IssuerCertificates) + ', ' + \
'IssuerCrls:' + str(self.IssuerCrls) + ')'
__repr__ = __str__
class DecimalDataType(FrozenClass):
'''
:ivar Scale:
:vartype Scale: Int16
:ivar Value:
:vartype Value: ByteString
'''
ua_types = [
('Scale', 'Int16'),
('Value', 'ByteString'),
]
def __init__(self):
self.Scale = 0
self.Value = None
self._freeze = True
def __str__(self):
return 'DecimalDataType(' + 'Scale:' + str(self.Scale) + ', ' + \
'Value:' + str(self.Value) + ')'
__repr__ = __str__
class DataTypeSchemaHeader(FrozenClass):
'''
:ivar Namespaces:
:vartype Namespaces: String
:ivar StructureDataTypes:
:vartype StructureDataTypes: StructureDescription
:ivar EnumDataTypes:
:vartype EnumDataTypes: EnumDescription
:ivar SimpleDataTypes:
:vartype SimpleDataTypes: SimpleTypeDescription
'''
ua_types = [
('Namespaces', 'ListOfString'),
('StructureDataTypes', 'ListOfStructureDescription'),
('EnumDataTypes', 'ListOfEnumDescription'),
('SimpleDataTypes', 'ListOfSimpleTypeDescription'),
]
def __init__(self):
self.Namespaces = []
self.StructureDataTypes = []
self.EnumDataTypes = []
self.SimpleDataTypes = []
self._freeze = True
def __str__(self):
return 'DataTypeSchemaHeader(' + 'Namespaces:' + str(self.Namespaces) + ', ' + \
'StructureDataTypes:' + str(self.StructureDataTypes) + ', ' + \
'EnumDataTypes:' + str(self.EnumDataTypes) + ', ' + \
'SimpleDataTypes:' + str(self.SimpleDataTypes) + ')'
__repr__ = __str__
class DataTypeDescription(FrozenClass):
'''
:ivar DataTypeId:
:vartype DataTypeId: NodeId
:ivar Name:
:vartype Name: QualifiedName
'''
ua_types = [
('DataTypeId', 'NodeId'),
('Name', 'QualifiedName'),
]
def __init__(self):
self.DataTypeId = NodeId()
self.Name = QualifiedName()
self._freeze = True
def __str__(self):
return 'DataTypeDescription(' + 'DataTypeId:' + str(self.DataTypeId) + ', ' + \
'Name:' + str(self.Name) + ')'
__repr__ = __str__
class StructureDescription(FrozenClass):
'''
:ivar DataTypeId:
:vartype DataTypeId: NodeId
:ivar Name:
:vartype Name: QualifiedName
:ivar StructureDefinition:
:vartype StructureDefinition: StructureDefinition
'''
ua_types = [
('DataTypeId', 'NodeId'),
('Name', 'QualifiedName'),
('StructureDefinition', 'StructureDefinition'),
]
def __init__(self):
self.DataTypeId = NodeId()
self.Name = QualifiedName()
self.StructureDefinition = StructureDefinition()
self._freeze = True
def __str__(self):
return 'StructureDescription(' + 'DataTypeId:' + str(self.DataTypeId) + ', ' + \
'Name:' + str(self.Name) + ', ' + \
'StructureDefinition:' + str(self.StructureDefinition) + ')'
__repr__ = __str__
class EnumDescription(FrozenClass):
'''
:ivar DataTypeId:
:vartype DataTypeId: NodeId
:ivar Name:
:vartype Name: QualifiedName
:ivar EnumDefinition:
:vartype EnumDefinition: EnumDefinition
:ivar BuiltInType:
:vartype BuiltInType: Byte
'''
ua_types = [
('DataTypeId', 'NodeId'),
('Name', 'QualifiedName'),
('EnumDefinition', 'EnumDefinition'),
('BuiltInType', 'Byte'),
]
def __init__(self):
self.DataTypeId = NodeId()
self.Name = QualifiedName()
self.EnumDefinition = EnumDefinition()
self.BuiltInType = 0
self._freeze = True
def __str__(self):
return 'EnumDescription(' + 'DataTypeId:' + str(self.DataTypeId) + ', ' + \
'Name:' + str(self.Name) + ', ' + \
'EnumDefinition:' + str(self.EnumDefinition) + ', ' + \
'BuiltInType:' + str(self.BuiltInType) + ')'
__repr__ = __str__
class SimpleTypeDescription(FrozenClass):
'''
:ivar DataTypeId:
:vartype DataTypeId: NodeId
:ivar Name:
:vartype Name: QualifiedName
:ivar BaseDataType:
:vartype BaseDataType: NodeId
:ivar BuiltInType:
:vartype BuiltInType: Byte
'''
ua_types = [
('DataTypeId', 'NodeId'),
('Name', 'QualifiedName'),
('BaseDataType', 'NodeId'),
('BuiltInType', 'Byte'),
]
def __init__(self):
self.DataTypeId = NodeId()
self.Name = QualifiedName()
self.BaseDataType = NodeId()
self.BuiltInType = 0
self._freeze = True
def __str__(self):
return 'SimpleTypeDescription(' + 'DataTypeId:' + str(self.DataTypeId) + ', ' + \
'Name:' + str(self.Name) + ', ' + \
'BaseDataType:' + str(self.BaseDataType) + ', ' + \
'BuiltInType:' + str(self.BuiltInType) + ')'
__repr__ = __str__
class UABinaryFileDataType(FrozenClass):
'''
:ivar Namespaces:
:vartype Namespaces: String
:ivar StructureDataTypes:
:vartype StructureDataTypes: StructureDescription
:ivar EnumDataTypes:
:vartype EnumDataTypes: EnumDescription
:ivar SimpleDataTypes:
:vartype SimpleDataTypes: SimpleTypeDescription
:ivar SchemaLocation:
:vartype SchemaLocation: String
:ivar FileHeader:
:vartype FileHeader: KeyValuePair
:ivar Body:
:vartype Body: Variant
'''
ua_types = [
('Namespaces', 'ListOfString'),
('StructureDataTypes', 'ListOfStructureDescription'),
('EnumDataTypes', 'ListOfEnumDescription'),
('SimpleDataTypes', 'ListOfSimpleTypeDescription'),
('SchemaLocation', 'String'),
('FileHeader', 'ListOfKeyValuePair'),
('Body', 'Variant'),
]
def __init__(self):
self.Namespaces = []
self.StructureDataTypes = []
self.EnumDataTypes = []
self.SimpleDataTypes = []
self.SchemaLocation = None
self.FileHeader = []
self.Body = Variant()
self._freeze = True
def __str__(self):
return 'UABinaryFileDataType(' + 'Namespaces:' + str(self.Namespaces) + ', ' + \
'StructureDataTypes:' + str(self.StructureDataTypes) + ', ' + \
'EnumDataTypes:' + str(self.EnumDataTypes) + ', ' + \
'SimpleDataTypes:' + str(self.SimpleDataTypes) + ', ' + \
'SchemaLocation:' + str(self.SchemaLocation) + ', ' + \
'FileHeader:' + str(self.FileHeader) + ', ' + \
'Body:' + str(self.Body) + ')'
__repr__ = __str__
class DataSetMetaDataType(FrozenClass):
'''
:ivar Namespaces:
:vartype Namespaces: String
:ivar StructureDataTypes:
:vartype StructureDataTypes: StructureDescription
:ivar EnumDataTypes:
:vartype EnumDataTypes: EnumDescription
:ivar SimpleDataTypes:
:vartype SimpleDataTypes: SimpleTypeDescription
:ivar Name:
:vartype Name: String
:ivar Description:
:vartype Description: LocalizedText
:ivar Fields:
:vartype Fields: FieldMetaData
:ivar DataSetClassId:
:vartype DataSetClassId: Guid
:ivar ConfigurationVersion:
:vartype ConfigurationVersion: ConfigurationVersionDataType
'''
ua_types = [
('Namespaces', 'ListOfString'),
('StructureDataTypes', 'ListOfStructureDescription'),
('EnumDataTypes', 'ListOfEnumDescription'),
('SimpleDataTypes', 'ListOfSimpleTypeDescription'),
('Name', 'String'),
('Description', 'LocalizedText'),
('Fields', 'ListOfFieldMetaData'),
('DataSetClassId', 'Guid'),
('ConfigurationVersion', 'ConfigurationVersionDataType'),
]
def __init__(self):
self.Namespaces = []
self.StructureDataTypes = []
self.EnumDataTypes = []
self.SimpleDataTypes = []
self.Name = None
self.Description = LocalizedText()
self.Fields = []
self.DataSetClassId = Guid()
self.ConfigurationVersion = ConfigurationVersionDataType()
self._freeze = True
def __str__(self):
return 'DataSetMetaDataType(' + 'Namespaces:' + str(self.Namespaces) + ', ' + \
'StructureDataTypes:' + str(self.StructureDataTypes) + ', ' + \
'EnumDataTypes:' + str(self.EnumDataTypes) + ', ' + \
'SimpleDataTypes:' + str(self.SimpleDataTypes) + ', ' + \
'Name:' + str(self.Name) + ', ' + \
'Description:' + str(self.Description) + ', ' + \
'Fields:' + str(self.Fields) + ', ' + \
'DataSetClassId:' + str(self.DataSetClassId) + ', ' + \
'ConfigurationVersion:' + str(self.ConfigurationVersion) + ')'
__repr__ = __str__
class FieldMetaData(FrozenClass):
'''
:ivar Name:
:vartype Name: String
:ivar Description:
:vartype Description: LocalizedText
:ivar FieldFlags:
:vartype FieldFlags: DataSetFieldFlags
:ivar BuiltInType:
:vartype BuiltInType: Byte
:ivar DataType:
:vartype DataType: NodeId
:ivar ValueRank:
:vartype ValueRank: Int32
:ivar ArrayDimensions:
:vartype ArrayDimensions: UInt32
:ivar MaxStringLength:
:vartype MaxStringLength: UInt32
:ivar DataSetFieldId:
:vartype DataSetFieldId: Guid
:ivar Properties:
:vartype Properties: KeyValuePair
'''
ua_types = [
('Name', 'String'),
('Description', 'LocalizedText'),
('FieldFlags', 'DataSetFieldFlags'),
('BuiltInType', 'Byte'),
('DataType', 'NodeId'),
('ValueRank', 'Int32'),
('ArrayDimensions', 'ListOfUInt32'),
('MaxStringLength', 'UInt32'),
('DataSetFieldId', 'Guid'),
('Properties', 'ListOfKeyValuePair'),
]
def __init__(self):
self.Name = None
self.Description = LocalizedText()
self.FieldFlags = DataSetFieldFlags(0)
self.BuiltInType = 0
self.DataType = NodeId()
self.ValueRank = 0
self.ArrayDimensions = []
self.MaxStringLength = 0
self.DataSetFieldId = Guid()
self.Properties = []
self._freeze = True
def __str__(self):
return 'FieldMetaData(' + 'Name:' + str(self.Name) + ', ' + \
'Description:' + str(self.Description) + ', ' + \
'FieldFlags:' + str(self.FieldFlags) + ', ' + \
'BuiltInType:' + str(self.BuiltInType) + ', ' + \
'DataType:' + str(self.DataType) + ', ' + \
'ValueRank:' + str(self.ValueRank) + ', ' + \
'ArrayDimensions:' + str(self.ArrayDimensions) + ', ' + \
'MaxStringLength:' + str(self.MaxStringLength) + ', ' + \
'DataSetFieldId:' + str(self.DataSetFieldId) + ', ' + \
'Properties:' + str(self.Properties) + ')'
__repr__ = __str__
class ConfigurationVersionDataType(FrozenClass):
'''
:ivar MajorVersion:
:vartype MajorVersion: UInt32
:ivar MinorVersion:
:vartype MinorVersion: UInt32
'''
ua_types = [
('MajorVersion', 'UInt32'),
('MinorVersion', 'UInt32'),
]
def __init__(self):
self.MajorVersion = 0
self.MinorVersion = 0
self._freeze = True
def __str__(self):
return 'ConfigurationVersionDataType(' + 'MajorVersion:' + str(self.MajorVersion) + ', ' + \
'MinorVersion:' + str(self.MinorVersion) + ')'
__repr__ = __str__
class PublishedDataSetDataType(FrozenClass):
'''
:ivar Name:
:vartype Name: String
:ivar DataSetFolder:
:vartype DataSetFolder: String
:ivar DataSetMetaData:
:vartype DataSetMetaData: DataSetMetaDataType
:ivar ExtensionFields:
:vartype ExtensionFields: KeyValuePair
:ivar DataSetSource:
:vartype DataSetSource: ExtensionObject
'''
ua_types = [
('Name', 'String'),
('DataSetFolder', 'ListOfString'),
('DataSetMetaData', 'DataSetMetaDataType'),
('ExtensionFields', 'ListOfKeyValuePair'),
('DataSetSource', 'ExtensionObject'),
]
def __init__(self):
self.Name = None
self.DataSetFolder = []
self.DataSetMetaData = DataSetMetaDataType()
self.ExtensionFields = []
self.DataSetSource = ExtensionObject()
self._freeze = True
def __str__(self):
return 'PublishedDataSetDataType(' + 'Name:' + str(self.Name) + ', ' + \
'DataSetFolder:' + str(self.DataSetFolder) + ', ' + \
'DataSetMetaData:' + str(self.DataSetMetaData) + ', ' + \
'ExtensionFields:' + str(self.ExtensionFields) + ', ' + \
'DataSetSource:' + str(self.DataSetSource) + ')'
__repr__ = __str__
class PublishedDataSetSourceDataType(FrozenClass):
'''
'''
ua_types = [
]
def __init__(self):
self._freeze = True
def __str__(self):
return 'PublishedDataSetSourceDataType(' + + ')'
__repr__ = __str__
class PublishedVariableDataType(FrozenClass):
'''
:ivar PublishedVariable:
:vartype PublishedVariable: NodeId
:ivar AttributeId:
:vartype AttributeId: UInt32
:ivar SamplingIntervalHint:
:vartype SamplingIntervalHint: Double
:ivar DeadbandType:
:vartype DeadbandType: UInt32
:ivar DeadbandValue:
:vartype DeadbandValue: Double
:ivar IndexRange:
:vartype IndexRange: String
:ivar SubstituteValue:
:vartype SubstituteValue: Variant
:ivar MetaDataProperties:
:vartype MetaDataProperties: QualifiedName
'''
ua_types = [
('PublishedVariable', 'NodeId'),
('AttributeId', 'UInt32'),
('SamplingIntervalHint', 'Double'),
('DeadbandType', 'UInt32'),
('DeadbandValue', 'Double'),
('IndexRange', 'String'),
('SubstituteValue', 'Variant'),
('MetaDataProperties', 'ListOfQualifiedName'),
]
def __init__(self):
self.PublishedVariable = NodeId()
self.AttributeId = 0
self.SamplingIntervalHint = 0
self.DeadbandType = 0
self.DeadbandValue = 0
self.IndexRange = None
self.SubstituteValue = Variant()
self.MetaDataProperties = []
self._freeze = True
def __str__(self):
return 'PublishedVariableDataType(' + 'PublishedVariable:' + str(self.PublishedVariable) + ', ' + \
'AttributeId:' + str(self.AttributeId) + ', ' + \
'SamplingIntervalHint:' + str(self.SamplingIntervalHint) + ', ' + \
'DeadbandType:' + str(self.DeadbandType) + ', ' + \
'DeadbandValue:' + str(self.DeadbandValue) + ', ' + \
'IndexRange:' + str(self.IndexRange) + ', ' + \
'SubstituteValue:' + str(self.SubstituteValue) + ', ' + \
'MetaDataProperties:' + str(self.MetaDataProperties) + ')'
__repr__ = __str__
class PublishedDataItemsDataType(FrozenClass):
'''
:ivar PublishedData:
:vartype PublishedData: PublishedVariableDataType
'''
ua_types = [
('PublishedData', 'ListOfPublishedVariableDataType'),
]
def __init__(self):
self.PublishedData = []
self._freeze = True
def __str__(self):
return 'PublishedDataItemsDataType(' + 'PublishedData:' + str(self.PublishedData) + ')'
__repr__ = __str__
class PublishedEventsDataType(FrozenClass):
'''
:ivar EventNotifier:
:vartype EventNotifier: NodeId
:ivar SelectedFields:
:vartype SelectedFields: SimpleAttributeOperand
:ivar Filter:
:vartype Filter: ContentFilter
'''
ua_types = [
('EventNotifier', 'NodeId'),
('SelectedFields', 'ListOfSimpleAttributeOperand'),
('Filter', 'ContentFilter'),
]
def __init__(self):
self.EventNotifier = NodeId()
self.SelectedFields = []
self.Filter = ContentFilter()
self._freeze = True
def __str__(self):
return 'PublishedEventsDataType(' + 'EventNotifier:' + str(self.EventNotifier) + ', ' + \
'SelectedFields:' + str(self.SelectedFields) + ', ' + \
'Filter:' + str(self.Filter) + ')'
__repr__ = __str__
class DataSetWriterDataType(FrozenClass):
'''
:ivar Name:
:vartype Name: String
:ivar Enabled:
:vartype Enabled: Boolean
:ivar DataSetWriterId:
:vartype DataSetWriterId: UInt16
:ivar DataSetFieldContentMask:
:vartype DataSetFieldContentMask: DataSetFieldContentMask
:ivar KeyFrameCount:
:vartype KeyFrameCount: UInt32
:ivar DataSetName:
:vartype DataSetName: String
:ivar DataSetWriterProperties:
:vartype DataSetWriterProperties: KeyValuePair
:ivar TransportSettings:
:vartype TransportSettings: ExtensionObject
:ivar MessageSettings:
:vartype MessageSettings: ExtensionObject
'''
ua_types = [
('Name', 'String'),
('Enabled', 'Boolean'),
('DataSetWriterId', 'UInt16'),
('DataSetFieldContentMask', 'DataSetFieldContentMask'),
('KeyFrameCount', 'UInt32'),
('DataSetName', 'String'),
('DataSetWriterProperties', 'ListOfKeyValuePair'),
('TransportSettings', 'ExtensionObject'),
('MessageSettings', 'ExtensionObject'),
]
def __init__(self):
self.Name = None
self.Enabled = True
self.DataSetWriterId = 0
self.DataSetFieldContentMask = DataSetFieldContentMask(0)
self.KeyFrameCount = 0
self.DataSetName = None
self.DataSetWriterProperties = []
self.TransportSettings = ExtensionObject()
self.MessageSettings = ExtensionObject()
self._freeze = True
def __str__(self):
return 'DataSetWriterDataType(' + 'Name:' + str(self.Name) + ', ' + \
'Enabled:' + str(self.Enabled) + ', ' + \
'DataSetWriterId:' + str(self.DataSetWriterId) + ', ' + \
'DataSetFieldContentMask:' + str(self.DataSetFieldContentMask) + ', ' + \
'KeyFrameCount:' + str(self.KeyFrameCount) + ', ' + \
'DataSetName:' + str(self.DataSetName) + ', ' + \
'DataSetWriterProperties:' + str(self.DataSetWriterProperties) + ', ' + \
'TransportSettings:' + str(self.TransportSettings) + ', ' + \
'MessageSettings:' + str(self.MessageSettings) + ')'
__repr__ = __str__
class DataSetWriterTransportDataType(FrozenClass):
'''
'''
ua_types = [
]
def __init__(self):
self._freeze = True
def __str__(self):
return 'DataSetWriterTransportDataType(' + + ')'
__repr__ = __str__
class DataSetWriterMessageDataType(FrozenClass):
'''
'''
ua_types = [
]
def __init__(self):
self._freeze = True
def __str__(self):
return 'DataSetWriterMessageDataType(' + + ')'
__repr__ = __str__
class PubSubGroupDataType(FrozenClass):
'''
:ivar Name:
:vartype Name: String
:ivar Enabled:
:vartype Enabled: Boolean
:ivar SecurityMode:
:vartype SecurityMode: MessageSecurityMode
:ivar SecurityGroupId:
:vartype SecurityGroupId: String
:ivar SecurityKeyServices:
:vartype SecurityKeyServices: EndpointDescription
:ivar MaxNetworkMessageSize:
:vartype MaxNetworkMessageSize: UInt32
:ivar GroupProperties:
:vartype GroupProperties: KeyValuePair
'''
ua_types = [
('Name', 'String'),
('Enabled', 'Boolean'),
('SecurityMode', 'MessageSecurityMode'),
('SecurityGroupId', 'String'),
('SecurityKeyServices', 'ListOfEndpointDescription'),
('MaxNetworkMessageSize', 'UInt32'),
('GroupProperties', 'ListOfKeyValuePair'),
]
def __init__(self):
self.Name = None
self.Enabled = True
self.SecurityMode = MessageSecurityMode(0)
self.SecurityGroupId = None
self.SecurityKeyServices = []
self.MaxNetworkMessageSize = 0
self.GroupProperties = []
self._freeze = True
def __str__(self):
return 'PubSubGroupDataType(' + 'Name:' + str(self.Name) + ', ' + \
'Enabled:' + str(self.Enabled) + ', ' + \
'SecurityMode:' + str(self.SecurityMode) + ', ' + \
'SecurityGroupId:' + str(self.SecurityGroupId) + ', ' + \
'SecurityKeyServices:' + str(self.SecurityKeyServices) + ', ' + \
'MaxNetworkMessageSize:' + str(self.MaxNetworkMessageSize) + ', ' + \
'GroupProperties:' + str(self.GroupProperties) + ')'
__repr__ = __str__
class WriterGroupDataType(FrozenClass):
'''
:ivar Name:
:vartype Name: String
:ivar Enabled:
:vartype Enabled: Boolean
:ivar SecurityMode:
:vartype SecurityMode: MessageSecurityMode
:ivar SecurityGroupId:
:vartype SecurityGroupId: String
:ivar SecurityKeyServices:
:vartype SecurityKeyServices: EndpointDescription
:ivar MaxNetworkMessageSize:
:vartype MaxNetworkMessageSize: UInt32
:ivar GroupProperties:
:vartype GroupProperties: KeyValuePair
:ivar WriterGroupId:
:vartype WriterGroupId: UInt16
:ivar PublishingInterval:
:vartype PublishingInterval: Double
:ivar KeepAliveTime:
:vartype KeepAliveTime: Double
:ivar Priority:
:vartype Priority: Byte
:ivar LocaleIds:
:vartype LocaleIds: String
:ivar TransportSettings:
:vartype TransportSettings: ExtensionObject
:ivar MessageSettings:
:vartype MessageSettings: ExtensionObject
:ivar DataSetWriters:
:vartype DataSetWriters: DataSetWriterDataType
'''
ua_types = [
('Name', 'String'),
('Enabled', 'Boolean'),
('SecurityMode', 'MessageSecurityMode'),
('SecurityGroupId', 'String'),
('SecurityKeyServices', 'ListOfEndpointDescription'),
('MaxNetworkMessageSize', 'UInt32'),
('GroupProperties', 'ListOfKeyValuePair'),
('WriterGroupId', 'UInt16'),
('PublishingInterval', 'Double'),
('KeepAliveTime', 'Double'),
('Priority', 'Byte'),
('LocaleIds', 'ListOfString'),
('TransportSettings', 'ExtensionObject'),
('MessageSettings', 'ExtensionObject'),
('DataSetWriters', 'ListOfDataSetWriterDataType'),
]
def __init__(self):
self.Name = None
self.Enabled = True
self.SecurityMode = MessageSecurityMode(0)
self.SecurityGroupId = None
self.SecurityKeyServices = []
self.MaxNetworkMessageSize = 0
self.GroupProperties = []
self.WriterGroupId = 0
self.PublishingInterval = 0
self.KeepAliveTime = 0
self.Priority = 0
self.LocaleIds = []
self.TransportSettings = ExtensionObject()
self.MessageSettings = ExtensionObject()
self.DataSetWriters = []
self._freeze = True
def __str__(self):
return 'WriterGroupDataType(' + 'Name:' + str(self.Name) + ', ' + \
'Enabled:' + str(self.Enabled) + ', ' + \
'SecurityMode:' + str(self.SecurityMode) + ', ' + \
'SecurityGroupId:' + str(self.SecurityGroupId) + ', ' + \
'SecurityKeyServices:' + str(self.SecurityKeyServices) + ', ' + \
'MaxNetworkMessageSize:' + str(self.MaxNetworkMessageSize) + ', ' + \
'GroupProperties:' + str(self.GroupProperties) + ', ' + \
'WriterGroupId:' + str(self.WriterGroupId) + ', ' + \
'PublishingInterval:' + str(self.PublishingInterval) + ', ' + \
'KeepAliveTime:' + str(self.KeepAliveTime) + ', ' + \
'Priority:' + str(self.Priority) + ', ' + \
'LocaleIds:' + str(self.LocaleIds) + ', ' + \
'TransportSettings:' + str(self.TransportSettings) + ', ' + \
'MessageSettings:' + str(self.MessageSettings) + ', ' + \
'DataSetWriters:' + str(self.DataSetWriters) + ')'
__repr__ = __str__
class WriterGroupTransportDataType(FrozenClass):
'''
'''
ua_types = [
]
def __init__(self):
self._freeze = True
def __str__(self):
return 'WriterGroupTransportDataType(' + + ')'
__repr__ = __str__
class WriterGroupMessageDataType(FrozenClass):
'''
'''
ua_types = [
]
def __init__(self):
self._freeze = True
def __str__(self):
return 'WriterGroupMessageDataType(' + + ')'
__repr__ = __str__
class PubSubConnectionDataType(FrozenClass):
'''
:ivar Name:
:vartype Name: String
:ivar Enabled:
:vartype Enabled: Boolean
:ivar PublisherId:
:vartype PublisherId: Variant
:ivar TransportProfileUri:
:vartype TransportProfileUri: String
:ivar Address:
:vartype Address: ExtensionObject
:ivar ConnectionProperties:
:vartype ConnectionProperties: KeyValuePair
:ivar TransportSettings:
:vartype TransportSettings: ExtensionObject
:ivar WriterGroups:
:vartype WriterGroups: WriterGroupDataType
:ivar ReaderGroups:
:vartype ReaderGroups: ReaderGroupDataType
'''
ua_types = [
('Name', 'String'),
('Enabled', 'Boolean'),
('PublisherId', 'Variant'),
('TransportProfileUri', 'String'),
('Address', 'ExtensionObject'),
('ConnectionProperties', 'ListOfKeyValuePair'),
('TransportSettings', 'ExtensionObject'),
('WriterGroups', 'ListOfWriterGroupDataType'),
('ReaderGroups', 'ListOfReaderGroupDataType'),
]
def __init__(self):
self.Name = None
self.Enabled = True
self.PublisherId = Variant()
self.TransportProfileUri = None
self.Address = ExtensionObject()
self.ConnectionProperties = []
self.TransportSettings = ExtensionObject()
self.WriterGroups = []
self.ReaderGroups = []
self._freeze = True
def __str__(self):
return 'PubSubConnectionDataType(' + 'Name:' + str(self.Name) + ', ' + \
'Enabled:' + str(self.Enabled) + ', ' + \
'PublisherId:' + str(self.PublisherId) + ', ' + \
'TransportProfileUri:' + str(self.TransportProfileUri) + ', ' + \
'Address:' + str(self.Address) + ', ' + \
'ConnectionProperties:' + str(self.ConnectionProperties) + ', ' + \
'TransportSettings:' + str(self.TransportSettings) + ', ' + \
'WriterGroups:' + str(self.WriterGroups) + ', ' + \
'ReaderGroups:' + str(self.ReaderGroups) + ')'
__repr__ = __str__
class ConnectionTransportDataType(FrozenClass):
'''
'''
ua_types = [
]
def __init__(self):
self._freeze = True
def __str__(self):
return 'ConnectionTransportDataType(' + + ')'
__repr__ = __str__
class NetworkAddressDataType(FrozenClass):
'''
:ivar NetworkInterface:
:vartype NetworkInterface: String
'''
ua_types = [
('NetworkInterface', 'String'),
]
def __init__(self):
self.NetworkInterface = None
self._freeze = True
def __str__(self):
return 'NetworkAddressDataType(' + 'NetworkInterface:' + str(self.NetworkInterface) + ')'
__repr__ = __str__
class NetworkAddressUrlDataType(FrozenClass):
'''
:ivar NetworkInterface:
:vartype NetworkInterface: String
:ivar Url:
:vartype Url: String
'''
ua_types = [
('NetworkInterface', 'String'),
('Url', 'String'),
]
def __init__(self):
self.NetworkInterface = None
self.Url = None
self._freeze = True
def __str__(self):
return 'NetworkAddressUrlDataType(' + 'NetworkInterface:' + str(self.NetworkInterface) + ', ' + \
'Url:' + str(self.Url) + ')'
__repr__ = __str__
class ReaderGroupDataType(FrozenClass):
'''
:ivar Name:
:vartype Name: String
:ivar Enabled:
:vartype Enabled: Boolean
:ivar SecurityMode:
:vartype SecurityMode: MessageSecurityMode
:ivar SecurityGroupId:
:vartype SecurityGroupId: String
:ivar SecurityKeyServices:
:vartype SecurityKeyServices: EndpointDescription
:ivar MaxNetworkMessageSize:
:vartype MaxNetworkMessageSize: UInt32
:ivar GroupProperties:
:vartype GroupProperties: KeyValuePair
:ivar TransportSettings:
:vartype TransportSettings: ExtensionObject
:ivar MessageSettings:
:vartype MessageSettings: ExtensionObject
:ivar DataSetReaders:
:vartype DataSetReaders: DataSetReaderDataType
'''
ua_types = [
('Name', 'String'),
('Enabled', 'Boolean'),
('SecurityMode', 'MessageSecurityMode'),
('SecurityGroupId', 'String'),
('SecurityKeyServices', 'ListOfEndpointDescription'),
('MaxNetworkMessageSize', 'UInt32'),
('GroupProperties', 'ListOfKeyValuePair'),
('TransportSettings', 'ExtensionObject'),
('MessageSettings', 'ExtensionObject'),
('DataSetReaders', 'ListOfDataSetReaderDataType'),
]
def __init__(self):
self.Name = None
self.Enabled = True
self.SecurityMode = MessageSecurityMode(0)
self.SecurityGroupId = None
self.SecurityKeyServices = []
self.MaxNetworkMessageSize = 0
self.GroupProperties = []
self.TransportSettings = ExtensionObject()
self.MessageSettings = ExtensionObject()
self.DataSetReaders = []
self._freeze = True
def __str__(self):
return 'ReaderGroupDataType(' + 'Name:' + str(self.Name) + ', ' + \
'Enabled:' + str(self.Enabled) + ', ' + \
'SecurityMode:' + str(self.SecurityMode) + ', ' + \
'SecurityGroupId:' + str(self.SecurityGroupId) + ', ' + \
'SecurityKeyServices:' + str(self.SecurityKeyServices) + ', ' + \
'MaxNetworkMessageSize:' + str(self.MaxNetworkMessageSize) + ', ' + \
'GroupProperties:' + str(self.GroupProperties) + ', ' + \
'TransportSettings:' + str(self.TransportSettings) + ', ' + \
'MessageSettings:' + str(self.MessageSettings) + ', ' + \
'DataSetReaders:' + str(self.DataSetReaders) + ')'
__repr__ = __str__
class ReaderGroupTransportDataType(FrozenClass):
'''
'''
ua_types = [
]
def __init__(self):
self._freeze = True
def __str__(self):
return 'ReaderGroupTransportDataType(' + + ')'
__repr__ = __str__
class ReaderGroupMessageDataType(FrozenClass):
'''
'''
ua_types = [
]
def __init__(self):
self._freeze = True
def __str__(self):
return 'ReaderGroupMessageDataType(' + + ')'
__repr__ = __str__
class DataSetReaderDataType(FrozenClass):
'''
:ivar Name:
:vartype Name: String
:ivar Enabled:
:vartype Enabled: Boolean
:ivar PublisherId:
:vartype PublisherId: Variant
:ivar WriterGroupId:
:vartype WriterGroupId: UInt16
:ivar DataSetWriterId:
:vartype DataSetWriterId: UInt16
:ivar DataSetMetaData:
:vartype DataSetMetaData: DataSetMetaDataType
:ivar DataSetFieldContentMask:
:vartype DataSetFieldContentMask: DataSetFieldContentMask
:ivar MessageReceiveTimeout:
:vartype MessageReceiveTimeout: Double
:ivar SecurityMode:
:vartype SecurityMode: MessageSecurityMode
:ivar SecurityGroupId:
:vartype SecurityGroupId: String
:ivar SecurityKeyServices:
:vartype SecurityKeyServices: EndpointDescription
:ivar DataSetReaderProperties:
:vartype DataSetReaderProperties: KeyValuePair
:ivar TransportSettings:
:vartype TransportSettings: ExtensionObject
:ivar MessageSettings:
:vartype MessageSettings: ExtensionObject
:ivar SubscribedDataSet:
:vartype SubscribedDataSet: ExtensionObject
'''
ua_types = [
('Name', 'String'),
('Enabled', 'Boolean'),
('PublisherId', 'Variant'),
('WriterGroupId', 'UInt16'),
('DataSetWriterId', 'UInt16'),
('DataSetMetaData', 'DataSetMetaDataType'),
('DataSetFieldContentMask', 'DataSetFieldContentMask'),
('MessageReceiveTimeout', 'Double'),
('SecurityMode', 'MessageSecurityMode'),
('SecurityGroupId', 'String'),
('SecurityKeyServices', 'ListOfEndpointDescription'),
('DataSetReaderProperties', 'ListOfKeyValuePair'),
('TransportSettings', 'ExtensionObject'),
('MessageSettings', 'ExtensionObject'),
('SubscribedDataSet', 'ExtensionObject'),
]
def __init__(self):
self.Name = None
self.Enabled = True
self.PublisherId = Variant()
self.WriterGroupId = 0
self.DataSetWriterId = 0
self.DataSetMetaData = DataSetMetaDataType()
self.DataSetFieldContentMask = DataSetFieldContentMask(0)
self.MessageReceiveTimeout = 0
self.SecurityMode = MessageSecurityMode(0)
self.SecurityGroupId = None
self.SecurityKeyServices = []
self.DataSetReaderProperties = []
self.TransportSettings = ExtensionObject()
self.MessageSettings = ExtensionObject()
self.SubscribedDataSet = ExtensionObject()
self._freeze = True
def __str__(self):
return 'DataSetReaderDataType(' + 'Name:' + str(self.Name) + ', ' + \
'Enabled:' + str(self.Enabled) + ', ' + \
'PublisherId:' + str(self.PublisherId) + ', ' + \
'WriterGroupId:' + str(self.WriterGroupId) + ', ' + \
'DataSetWriterId:' + str(self.DataSetWriterId) + ', ' + \
'DataSetMetaData:' + str(self.DataSetMetaData) + ', ' + \
'DataSetFieldContentMask:' + str(self.DataSetFieldContentMask) + ', ' + \
'MessageReceiveTimeout:' + str(self.MessageReceiveTimeout) + ', ' + \
'SecurityMode:' + str(self.SecurityMode) + ', ' + \
'SecurityGroupId:' + str(self.SecurityGroupId) + ', ' + \
'SecurityKeyServices:' + str(self.SecurityKeyServices) + ', ' + \
'DataSetReaderProperties:' + str(self.DataSetReaderProperties) + ', ' + \
'TransportSettings:' + str(self.TransportSettings) + ', ' + \
'MessageSettings:' + str(self.MessageSettings) + ', ' + \
'SubscribedDataSet:' + str(self.SubscribedDataSet) + ')'
__repr__ = __str__
class DataSetReaderTransportDataType(FrozenClass):
'''
'''
ua_types = [
]
def __init__(self):
self._freeze = True
def __str__(self):
return 'DataSetReaderTransportDataType(' + + ')'
__repr__ = __str__
class DataSetReaderMessageDataType(FrozenClass):
'''
'''
ua_types = [
]
def __init__(self):
self._freeze = True
def __str__(self):
return 'DataSetReaderMessageDataType(' + + ')'
__repr__ = __str__
class SubscribedDataSetDataType(FrozenClass):
'''
'''
ua_types = [
]
def __init__(self):
self._freeze = True
def __str__(self):
return 'SubscribedDataSetDataType(' + + ')'
__repr__ = __str__
class TargetVariablesDataType(FrozenClass):
'''
:ivar TargetVariables:
:vartype TargetVariables: FieldTargetDataType
'''
ua_types = [
('TargetVariables', 'ListOfFieldTargetDataType'),
]
def __init__(self):
self.TargetVariables = []
self._freeze = True
def __str__(self):
return 'TargetVariablesDataType(' + 'TargetVariables:' + str(self.TargetVariables) + ')'
__repr__ = __str__
class FieldTargetDataType(FrozenClass):
'''
:ivar DataSetFieldId:
:vartype DataSetFieldId: Guid
:ivar ReceiverIndexRange:
:vartype ReceiverIndexRange: String
:ivar TargetNodeId:
:vartype TargetNodeId: NodeId
:ivar AttributeId:
:vartype AttributeId: UInt32
:ivar WriteIndexRange:
:vartype WriteIndexRange: String
:ivar OverrideValueHandling:
:vartype OverrideValueHandling: OverrideValueHandling
:ivar OverrideValue:
:vartype OverrideValue: Variant
'''
ua_types = [
('DataSetFieldId', 'Guid'),
('ReceiverIndexRange', 'String'),
('TargetNodeId', 'NodeId'),
('AttributeId', 'UInt32'),
('WriteIndexRange', 'String'),
('OverrideValueHandling', 'OverrideValueHandling'),
('OverrideValue', 'Variant'),
]
def __init__(self):
self.DataSetFieldId = Guid()
self.ReceiverIndexRange = None
self.TargetNodeId = NodeId()
self.AttributeId = 0
self.WriteIndexRange = None
self.OverrideValueHandling = OverrideValueHandling(0)
self.OverrideValue = Variant()
self._freeze = True
def __str__(self):
return 'FieldTargetDataType(' + 'DataSetFieldId:' + str(self.DataSetFieldId) + ', ' + \
'ReceiverIndexRange:' + str(self.ReceiverIndexRange) + ', ' + \
'TargetNodeId:' + str(self.TargetNodeId) + ', ' + \
'AttributeId:' + str(self.AttributeId) + ', ' + \
'WriteIndexRange:' + str(self.WriteIndexRange) + ', ' + \
'OverrideValueHandling:' + str(self.OverrideValueHandling) + ', ' + \
'OverrideValue:' + str(self.OverrideValue) + ')'
__repr__ = __str__
class SubscribedDataSetMirrorDataType(FrozenClass):
'''
:ivar ParentNodeName:
:vartype ParentNodeName: String
:ivar RolePermissions:
:vartype RolePermissions: RolePermissionType
'''
ua_types = [
('ParentNodeName', 'String'),
('RolePermissions', 'ListOfRolePermissionType'),
]
def __init__(self):
self.ParentNodeName = None
self.RolePermissions = []
self._freeze = True
def __str__(self):
return 'SubscribedDataSetMirrorDataType(' + 'ParentNodeName:' + str(self.ParentNodeName) + ', ' + \
'RolePermissions:' + str(self.RolePermissions) + ')'
__repr__ = __str__
class PubSubConfigurationDataType(FrozenClass):
'''
:ivar PublishedDataSets:
:vartype PublishedDataSets: PublishedDataSetDataType
:ivar Connections:
:vartype Connections: PubSubConnectionDataType
:ivar Enabled:
:vartype Enabled: Boolean
'''
ua_types = [
('PublishedDataSets', 'ListOfPublishedDataSetDataType'),
('Connections', 'ListOfPubSubConnectionDataType'),
('Enabled', 'Boolean'),
]
def __init__(self):
self.PublishedDataSets = []
self.Connections = []
self.Enabled = True
self._freeze = True
def __str__(self):
return 'PubSubConfigurationDataType(' + 'PublishedDataSets:' + str(self.PublishedDataSets) + ', ' + \
'Connections:' + str(self.Connections) + ', ' + \
'Enabled:' + str(self.Enabled) + ')'
__repr__ = __str__
class UadpWriterGroupMessageDataType(FrozenClass):
'''
:ivar GroupVersion:
:vartype GroupVersion: UInt32
:ivar DataSetOrdering:
:vartype DataSetOrdering: DataSetOrderingType
:ivar NetworkMessageContentMask:
:vartype NetworkMessageContentMask: UadpNetworkMessageContentMask
:ivar SamplingOffset:
:vartype SamplingOffset: Double
:ivar PublishingOffset:
:vartype PublishingOffset: Double
'''
ua_types = [
('GroupVersion', 'UInt32'),
('DataSetOrdering', 'DataSetOrderingType'),
('NetworkMessageContentMask', 'UadpNetworkMessageContentMask'),
('SamplingOffset', 'Double'),
('PublishingOffset', 'ListOfDouble'),
]
def __init__(self):
self.GroupVersion = 0
self.DataSetOrdering = DataSetOrderingType(0)
self.NetworkMessageContentMask = UadpNetworkMessageContentMask(0)
self.SamplingOffset = 0
self.PublishingOffset = []
self._freeze = True
def __str__(self):
return 'UadpWriterGroupMessageDataType(' + 'GroupVersion:' + str(self.GroupVersion) + ', ' + \
'DataSetOrdering:' + str(self.DataSetOrdering) + ', ' + \
'NetworkMessageContentMask:' + str(self.NetworkMessageContentMask) + ', ' + \
'SamplingOffset:' + str(self.SamplingOffset) + ', ' + \
'PublishingOffset:' + str(self.PublishingOffset) + ')'
__repr__ = __str__
class UadpDataSetWriterMessageDataType(FrozenClass):
'''
:ivar DataSetMessageContentMask:
:vartype DataSetMessageContentMask: UadpDataSetMessageContentMask
:ivar ConfiguredSize:
:vartype ConfiguredSize: UInt16
:ivar NetworkMessageNumber:
:vartype NetworkMessageNumber: UInt16
:ivar DataSetOffset:
:vartype DataSetOffset: UInt16
'''
ua_types = [
('DataSetMessageContentMask', 'UadpDataSetMessageContentMask'),
('ConfiguredSize', 'UInt16'),
('NetworkMessageNumber', 'UInt16'),
('DataSetOffset', 'UInt16'),
]
def __init__(self):
self.DataSetMessageContentMask = UadpDataSetMessageContentMask(0)
self.ConfiguredSize = 0
self.NetworkMessageNumber = 0
self.DataSetOffset = 0
self._freeze = True
def __str__(self):
return 'UadpDataSetWriterMessageDataType(' + 'DataSetMessageContentMask:' + str(self.DataSetMessageContentMask) + ', ' + \
'ConfiguredSize:' + str(self.ConfiguredSize) + ', ' + \
'NetworkMessageNumber:' + str(self.NetworkMessageNumber) + ', ' + \
'DataSetOffset:' + str(self.DataSetOffset) + ')'
__repr__ = __str__
class UadpDataSetReaderMessageDataType(FrozenClass):
'''
:ivar GroupVersion:
:vartype GroupVersion: UInt32
:ivar NetworkMessageNumber:
:vartype NetworkMessageNumber: UInt16
:ivar DataSetOffset:
:vartype DataSetOffset: UInt16
:ivar DataSetClassId:
:vartype DataSetClassId: Guid
:ivar NetworkMessageContentMask:
:vartype NetworkMessageContentMask: UadpNetworkMessageContentMask
:ivar DataSetMessageContentMask:
:vartype DataSetMessageContentMask: UadpDataSetMessageContentMask
:ivar PublishingInterval:
:vartype PublishingInterval: Double
:ivar ReceiveOffset:
:vartype ReceiveOffset: Double
:ivar ProcessingOffset:
:vartype ProcessingOffset: Double
'''
ua_types = [
('GroupVersion', 'UInt32'),
('NetworkMessageNumber', 'UInt16'),
('DataSetOffset', 'UInt16'),
('DataSetClassId', 'Guid'),
('NetworkMessageContentMask', 'UadpNetworkMessageContentMask'),
('DataSetMessageContentMask', 'UadpDataSetMessageContentMask'),
('PublishingInterval', 'Double'),
('ReceiveOffset', 'Double'),
('ProcessingOffset', 'Double'),
]
def __init__(self):
self.GroupVersion = 0
self.NetworkMessageNumber = 0
self.DataSetOffset = 0
self.DataSetClassId = Guid()
self.NetworkMessageContentMask = UadpNetworkMessageContentMask(0)
self.DataSetMessageContentMask = UadpDataSetMessageContentMask(0)
self.PublishingInterval = 0
self.ReceiveOffset = 0
self.ProcessingOffset = 0
self._freeze = True
def __str__(self):
return 'UadpDataSetReaderMessageDataType(' + 'GroupVersion:' + str(self.GroupVersion) + ', ' + \
'NetworkMessageNumber:' + str(self.NetworkMessageNumber) + ', ' + \
'DataSetOffset:' + str(self.DataSetOffset) + ', ' + \
'DataSetClassId:' + str(self.DataSetClassId) + ', ' + \
'NetworkMessageContentMask:' + str(self.NetworkMessageContentMask) + ', ' + \
'DataSetMessageContentMask:' + str(self.DataSetMessageContentMask) + ', ' + \
'PublishingInterval:' + str(self.PublishingInterval) + ', ' + \
'ReceiveOffset:' + str(self.ReceiveOffset) + ', ' + \
'ProcessingOffset:' + str(self.ProcessingOffset) + ')'
__repr__ = __str__
class JsonWriterGroupMessageDataType(FrozenClass):
'''
:ivar NetworkMessageContentMask:
:vartype NetworkMessageContentMask: JsonNetworkMessageContentMask
'''
ua_types = [
('NetworkMessageContentMask', 'JsonNetworkMessageContentMask'),
]
def __init__(self):
self.NetworkMessageContentMask = JsonNetworkMessageContentMask(0)
self._freeze = True
def __str__(self):
return 'JsonWriterGroupMessageDataType(' + 'NetworkMessageContentMask:' + str(self.NetworkMessageContentMask) + ')'
__repr__ = __str__
class JsonDataSetWriterMessageDataType(FrozenClass):
'''
:ivar DataSetMessageContentMask:
:vartype DataSetMessageContentMask: JsonDataSetMessageContentMask
'''
ua_types = [
('DataSetMessageContentMask', 'JsonDataSetMessageContentMask'),
]
def __init__(self):
self.DataSetMessageContentMask = JsonDataSetMessageContentMask(0)
self._freeze = True
def __str__(self):
return 'JsonDataSetWriterMessageDataType(' + 'DataSetMessageContentMask:' + str(self.DataSetMessageContentMask) + ')'
__repr__ = __str__
class JsonDataSetReaderMessageDataType(FrozenClass):
'''
:ivar NetworkMessageContentMask:
:vartype NetworkMessageContentMask: JsonNetworkMessageContentMask
:ivar DataSetMessageContentMask:
:vartype DataSetMessageContentMask: JsonDataSetMessageContentMask
'''
ua_types = [
('NetworkMessageContentMask', 'JsonNetworkMessageContentMask'),
('DataSetMessageContentMask', 'JsonDataSetMessageContentMask'),
]
def __init__(self):
self.NetworkMessageContentMask = JsonNetworkMessageContentMask(0)
self.DataSetMessageContentMask = JsonDataSetMessageContentMask(0)
self._freeze = True
def __str__(self):
return 'JsonDataSetReaderMessageDataType(' + 'NetworkMessageContentMask:' + str(self.NetworkMessageContentMask) + ', ' + \
'DataSetMessageContentMask:' + str(self.DataSetMessageContentMask) + ')'
__repr__ = __str__
class DatagramConnectionTransportDataType(FrozenClass):
'''
:ivar DiscoveryAddress:
:vartype DiscoveryAddress: ExtensionObject
'''
ua_types = [
('DiscoveryAddress', 'ExtensionObject'),
]
def __init__(self):
self.DiscoveryAddress = ExtensionObject()
self._freeze = True
def __str__(self):
return 'DatagramConnectionTransportDataType(' + 'DiscoveryAddress:' + str(self.DiscoveryAddress) + ')'
__repr__ = __str__
class DatagramWriterGroupTransportDataType(FrozenClass):
'''
:ivar MessageRepeatCount:
:vartype MessageRepeatCount: Byte
:ivar MessageRepeatDelay:
:vartype MessageRepeatDelay: Double
'''
ua_types = [
('MessageRepeatCount', 'Byte'),
('MessageRepeatDelay', 'Double'),
]
def __init__(self):
self.MessageRepeatCount = 0
self.MessageRepeatDelay = 0
self._freeze = True
def __str__(self):
return 'DatagramWriterGroupTransportDataType(' + 'MessageRepeatCount:' + str(self.MessageRepeatCount) + ', ' + \
'MessageRepeatDelay:' + str(self.MessageRepeatDelay) + ')'
__repr__ = __str__
class BrokerConnectionTransportDataType(FrozenClass):
'''
:ivar ResourceUri:
:vartype ResourceUri: String
:ivar AuthenticationProfileUri:
:vartype AuthenticationProfileUri: String
'''
ua_types = [
('ResourceUri', 'String'),
('AuthenticationProfileUri', 'String'),
]
def __init__(self):
self.ResourceUri = None
self.AuthenticationProfileUri = None
self._freeze = True
def __str__(self):
return 'BrokerConnectionTransportDataType(' + 'ResourceUri:' + str(self.ResourceUri) + ', ' + \
'AuthenticationProfileUri:' + str(self.AuthenticationProfileUri) + ')'
__repr__ = __str__
class BrokerWriterGroupTransportDataType(FrozenClass):
'''
:ivar QueueName:
:vartype QueueName: String
:ivar ResourceUri:
:vartype ResourceUri: String
:ivar AuthenticationProfileUri:
:vartype AuthenticationProfileUri: String
:ivar RequestedDeliveryGuarantee:
:vartype RequestedDeliveryGuarantee: BrokerTransportQualityOfService
'''
ua_types = [
('QueueName', 'String'),
('ResourceUri', 'String'),
('AuthenticationProfileUri', 'String'),
('RequestedDeliveryGuarantee', 'BrokerTransportQualityOfService'),
]
def __init__(self):
self.QueueName = None
self.ResourceUri = None
self.AuthenticationProfileUri = None
self.RequestedDeliveryGuarantee = BrokerTransportQualityOfService(0)
self._freeze = True
def __str__(self):
return 'BrokerWriterGroupTransportDataType(' + 'QueueName:' + str(self.QueueName) + ', ' + \
'ResourceUri:' + str(self.ResourceUri) + ', ' + \
'AuthenticationProfileUri:' + str(self.AuthenticationProfileUri) + ', ' + \
'RequestedDeliveryGuarantee:' + str(self.RequestedDeliveryGuarantee) + ')'
__repr__ = __str__
class BrokerDataSetWriterTransportDataType(FrozenClass):
'''
:ivar QueueName:
:vartype QueueName: String
:ivar ResourceUri:
:vartype ResourceUri: String
:ivar AuthenticationProfileUri:
:vartype AuthenticationProfileUri: String
:ivar MetaDataQueueName:
:vartype MetaDataQueueName: String
:ivar MetaDataUpdateTime:
:vartype MetaDataUpdateTime: Double
'''
ua_types = [
('QueueName', 'String'),
('ResourceUri', 'String'),
('AuthenticationProfileUri', 'String'),
('MetaDataQueueName', 'String'),
('MetaDataUpdateTime', 'Double'),
]
def __init__(self):
self.QueueName = None
self.ResourceUri = None
self.AuthenticationProfileUri = None
self.MetaDataQueueName = None
self.MetaDataUpdateTime = 0
self._freeze = True
def __str__(self):
return 'BrokerDataSetWriterTransportDataType(' + 'QueueName:' + str(self.QueueName) + ', ' + \
'ResourceUri:' + str(self.ResourceUri) + ', ' + \
'AuthenticationProfileUri:' + str(self.AuthenticationProfileUri) + ', ' + \
'MetaDataQueueName:' + str(self.MetaDataQueueName) + ', ' + \
'MetaDataUpdateTime:' + str(self.MetaDataUpdateTime) + ')'
__repr__ = __str__
class BrokerDataSetReaderTransportDataType(FrozenClass):
'''
:ivar QueueName:
:vartype QueueName: String
:ivar ResourceUri:
:vartype ResourceUri: String
:ivar AuthenticationProfileUri:
:vartype AuthenticationProfileUri: String
:ivar RequestedDeliveryGuarantee:
:vartype RequestedDeliveryGuarantee: BrokerTransportQualityOfService
:ivar MetaDataQueueName:
:vartype MetaDataQueueName: String
'''
ua_types = [
('QueueName', 'String'),
('ResourceUri', 'String'),
('AuthenticationProfileUri', 'String'),
('RequestedDeliveryGuarantee', 'BrokerTransportQualityOfService'),
('MetaDataQueueName', 'String'),
]
def __init__(self):
self.QueueName = None
self.ResourceUri = None
self.AuthenticationProfileUri = None
self.RequestedDeliveryGuarantee = BrokerTransportQualityOfService(0)
self.MetaDataQueueName = None
self._freeze = True
def __str__(self):
return 'BrokerDataSetReaderTransportDataType(' + 'QueueName:' + str(self.QueueName) + ', ' + \
'ResourceUri:' + str(self.ResourceUri) + ', ' + \
'AuthenticationProfileUri:' + str(self.AuthenticationProfileUri) + ', ' + \
'RequestedDeliveryGuarantee:' + str(self.RequestedDeliveryGuarantee) + ', ' + \
'MetaDataQueueName:' + str(self.MetaDataQueueName) + ')'
__repr__ = __str__
class RolePermissionType(FrozenClass):
'''
:ivar RoleId:
:vartype RoleId: NodeId
:ivar Permissions:
:vartype Permissions: UInt32
'''
ua_types = [
('RoleId', 'NodeId'),
('Permissions', 'UInt32'),
]
def __init__(self):
self.RoleId = NodeId()
self.Permissions = 0
self._freeze = True
def __str__(self):
return 'RolePermissionType(' + 'RoleId:' + str(self.RoleId) + ', ' + \
'Permissions:' + str(self.Permissions) + ')'
__repr__ = __str__
class StructureField(FrozenClass):
'''
:ivar Name:
:vartype Name: String
:ivar Description:
:vartype Description: LocalizedText
:ivar DataType:
:vartype DataType: NodeId
:ivar ValueRank:
:vartype ValueRank: Int32
:ivar ArrayDimensions:
:vartype ArrayDimensions: UInt32
:ivar MaxStringLength:
:vartype MaxStringLength: UInt32
:ivar IsOptional:
:vartype IsOptional: Boolean
'''
ua_types = [
('Name', 'String'),
('Description', 'LocalizedText'),
('DataType', 'NodeId'),
('ValueRank', 'Int32'),
('ArrayDimensions', 'ListOfUInt32'),
('MaxStringLength', 'UInt32'),
('IsOptional', 'Boolean'),
]
def __init__(self):
self.Name = None
self.Description = LocalizedText()
self.DataType = NodeId()
self.ValueRank = 0
self.ArrayDimensions = []
self.MaxStringLength = 0
self.IsOptional = True
self._freeze = True
def __str__(self):
return 'StructureField(' + 'Name:' + str(self.Name) + ', ' + \
'Description:' + str(self.Description) + ', ' + \
'DataType:' + str(self.DataType) + ', ' + \
'ValueRank:' + str(self.ValueRank) + ', ' + \
'ArrayDimensions:' + str(self.ArrayDimensions) + ', ' + \
'MaxStringLength:' + str(self.MaxStringLength) + ', ' + \
'IsOptional:' + str(self.IsOptional) + ')'
__repr__ = __str__
class StructureDefinition(FrozenClass):
'''
:ivar DefaultEncodingId:
:vartype DefaultEncodingId: NodeId
:ivar BaseDataType:
:vartype BaseDataType: NodeId
:ivar StructureType:
:vartype StructureType: StructureType
:ivar Fields:
:vartype Fields: StructureField
'''
ua_types = [
('DefaultEncodingId', 'NodeId'),
('BaseDataType', 'NodeId'),
('StructureType', 'StructureType'),
('Fields', 'ListOfStructureField'),
]
def __init__(self):
self.DefaultEncodingId = NodeId()
self.BaseDataType = NodeId()
self.StructureType = StructureType(0)
self.Fields = []
self._freeze = True
def __str__(self):
return 'StructureDefinition(' + 'DefaultEncodingId:' + str(self.DefaultEncodingId) + ', ' + \
'BaseDataType:' + str(self.BaseDataType) + ', ' + \
'StructureType:' + str(self.StructureType) + ', ' + \
'Fields:' + str(self.Fields) + ')'
__repr__ = __str__
class EnumDefinition(FrozenClass):
'''
:ivar Fields:
:vartype Fields: EnumField
'''
ua_types = [
('Fields', 'ListOfEnumField'),
]
def __init__(self):
self.Fields = []
self._freeze = True
def __str__(self):
return 'EnumDefinition(' + 'Fields:' + str(self.Fields) + ')'
__repr__ = __str__
class Argument(FrozenClass):
'''
An argument for a method.
:ivar Name:
:vartype Name: String
:ivar DataType:
:vartype DataType: NodeId
:ivar ValueRank:
:vartype ValueRank: Int32
:ivar ArrayDimensions:
:vartype ArrayDimensions: UInt32
:ivar Description:
:vartype Description: LocalizedText
'''
ua_types = [
('Name', 'String'),
('DataType', 'NodeId'),
('ValueRank', 'Int32'),
('ArrayDimensions', 'ListOfUInt32'),
('Description', 'LocalizedText'),
]
def __init__(self):
self.Name = None
self.DataType = NodeId()
self.ValueRank = 0
self.ArrayDimensions = []
self.Description = LocalizedText()
self._freeze = True
def __str__(self):
return 'Argument(' + 'Name:' + str(self.Name) + ', ' + \
'DataType:' + str(self.DataType) + ', ' + \
'ValueRank:' + str(self.ValueRank) + ', ' + \
'ArrayDimensions:' + str(self.ArrayDimensions) + ', ' + \
'Description:' + str(self.Description) + ')'
__repr__ = __str__
class EnumValueType(FrozenClass):
'''
A mapping between a value of an enumerated type and a name and description.
:ivar Value:
:vartype Value: Int64
:ivar DisplayName:
:vartype DisplayName: LocalizedText
:ivar Description:
:vartype Description: LocalizedText
'''
ua_types = [
('Value', 'Int64'),
('DisplayName', 'LocalizedText'),
('Description', 'LocalizedText'),
]
def __init__(self):
self.Value = 0
self.DisplayName = LocalizedText()
self.Description = LocalizedText()
self._freeze = True
def __str__(self):
return 'EnumValueType(' + 'Value:' + str(self.Value) + ', ' + \
'DisplayName:' + str(self.DisplayName) + ', ' + \
'Description:' + str(self.Description) + ')'
__repr__ = __str__
class EnumField(FrozenClass):
'''
:ivar Value:
:vartype Value: Int64
:ivar DisplayName:
:vartype DisplayName: LocalizedText
:ivar Description:
:vartype Description: LocalizedText
:ivar Name:
:vartype Name: String
'''
ua_types = [
('Value', 'Int64'),
('DisplayName', 'LocalizedText'),
('Description', 'LocalizedText'),
('Name', 'String'),
]
def __init__(self):
self.Value = 0
self.DisplayName = LocalizedText()
self.Description = LocalizedText()
self.Name = None
self._freeze = True
def __str__(self):
return 'EnumField(' + 'Value:' + str(self.Value) + ', ' + \
'DisplayName:' + str(self.DisplayName) + ', ' + \
'Description:' + str(self.Description) + ', ' + \
'Name:' + str(self.Name) + ')'
__repr__ = __str__
class OptionSet(FrozenClass):
'''
This abstract Structured DataType is the base DataType for all DataTypes representing a bit mask.
:ivar Value:
:vartype Value: ByteString
:ivar ValidBits:
:vartype ValidBits: ByteString
'''
ua_types = [
('Value', 'ByteString'),
('ValidBits', 'ByteString'),
]
def __init__(self):
self.Value = None
self.ValidBits = None
self._freeze = True
def __str__(self):
return 'OptionSet(' + 'Value:' + str(self.Value) + ', ' + \
'ValidBits:' + str(self.ValidBits) + ')'
__repr__ = __str__
class Union(FrozenClass):
'''
This abstract DataType is the base DataType for all union DataTypes.
'''
ua_types = [
]
def __init__(self):
self._freeze = True
def __str__(self):
return 'Union(' + + ')'
__repr__ = __str__
class TimeZoneDataType(FrozenClass):
'''
:ivar Offset:
:vartype Offset: Int16
:ivar DaylightSavingInOffset:
:vartype DaylightSavingInOffset: Boolean
'''
ua_types = [
('Offset', 'Int16'),
('DaylightSavingInOffset', 'Boolean'),
]
def __init__(self):
self.Offset = 0
self.DaylightSavingInOffset = True
self._freeze = True
def __str__(self):
return 'TimeZoneDataType(' + 'Offset:' + str(self.Offset) + ', ' + \
'DaylightSavingInOffset:' + str(self.DaylightSavingInOffset) + ')'
__repr__ = __str__
class ApplicationDescription(FrozenClass):
'''
Describes an application and how to find it.
:ivar ApplicationUri:
:vartype ApplicationUri: String
:ivar ProductUri:
:vartype ProductUri: String
:ivar ApplicationName:
:vartype ApplicationName: LocalizedText
:ivar ApplicationType:
:vartype ApplicationType: ApplicationType
:ivar GatewayServerUri:
:vartype GatewayServerUri: String
:ivar DiscoveryProfileUri:
:vartype DiscoveryProfileUri: String
:ivar DiscoveryUrls:
:vartype DiscoveryUrls: String
'''
ua_types = [
('ApplicationUri', 'String'),
('ProductUri', 'String'),
('ApplicationName', 'LocalizedText'),
('ApplicationType', 'ApplicationType'),
('GatewayServerUri', 'String'),
('DiscoveryProfileUri', 'String'),
('DiscoveryUrls', 'ListOfString'),
]
def __init__(self):
self.ApplicationUri = None
self.ProductUri = None
self.ApplicationName = LocalizedText()
self.ApplicationType = ApplicationType(0)
self.GatewayServerUri = None
self.DiscoveryProfileUri = None
self.DiscoveryUrls = []
self._freeze = True
def __str__(self):
return 'ApplicationDescription(' + 'ApplicationUri:' + str(self.ApplicationUri) + ', ' + \
'ProductUri:' + str(self.ProductUri) + ', ' + \
'ApplicationName:' + str(self.ApplicationName) + ', ' + \
'ApplicationType:' + str(self.ApplicationType) + ', ' + \
'GatewayServerUri:' + str(self.GatewayServerUri) + ', ' + \
'DiscoveryProfileUri:' + str(self.DiscoveryProfileUri) + ', ' + \
'DiscoveryUrls:' + str(self.DiscoveryUrls) + ')'
__repr__ = __str__
class RequestHeader(FrozenClass):
'''
The header passed with every server request.
:ivar AuthenticationToken:
:vartype AuthenticationToken: NodeId
:ivar Timestamp:
:vartype Timestamp: DateTime
:ivar RequestHandle:
:vartype RequestHandle: UInt32
:ivar ReturnDiagnostics:
:vartype ReturnDiagnostics: UInt32
:ivar AuditEntryId:
:vartype AuditEntryId: String
:ivar TimeoutHint:
:vartype TimeoutHint: UInt32
:ivar AdditionalHeader:
:vartype AdditionalHeader: ExtensionObject
'''
ua_types = [
('AuthenticationToken', 'NodeId'),
('Timestamp', 'DateTime'),
('RequestHandle', 'UInt32'),
('ReturnDiagnostics', 'UInt32'),
('AuditEntryId', 'String'),
('TimeoutHint', 'UInt32'),
('AdditionalHeader', 'ExtensionObject'),
]
def __init__(self):
self.AuthenticationToken = NodeId()
self.Timestamp = datetime.utcnow()
self.RequestHandle = 0
self.ReturnDiagnostics = 0
self.AuditEntryId = None
self.TimeoutHint = 0
self.AdditionalHeader = ExtensionObject()
self._freeze = True
def __str__(self):
return 'RequestHeader(' + 'AuthenticationToken:' + str(self.AuthenticationToken) + ', ' + \
'Timestamp:' + str(self.Timestamp) + ', ' + \
'RequestHandle:' + str(self.RequestHandle) + ', ' + \
'ReturnDiagnostics:' + str(self.ReturnDiagnostics) + ', ' + \
'AuditEntryId:' + str(self.AuditEntryId) + ', ' + \
'TimeoutHint:' + str(self.TimeoutHint) + ', ' + \
'AdditionalHeader:' + str(self.AdditionalHeader) + ')'
__repr__ = __str__
class ResponseHeader(FrozenClass):
'''
The header passed with every server response.
:ivar Timestamp:
:vartype Timestamp: DateTime
:ivar RequestHandle:
:vartype RequestHandle: UInt32
:ivar ServiceResult:
:vartype ServiceResult: StatusCode
:ivar ServiceDiagnostics:
:vartype ServiceDiagnostics: DiagnosticInfo
:ivar StringTable:
:vartype StringTable: String
:ivar AdditionalHeader:
:vartype AdditionalHeader: ExtensionObject
'''
ua_types = [
('Timestamp', 'DateTime'),
('RequestHandle', 'UInt32'),
('ServiceResult', 'StatusCode'),
('ServiceDiagnostics', 'DiagnosticInfo'),
('StringTable', 'ListOfString'),
('AdditionalHeader', 'ExtensionObject'),
]
def __init__(self):
self.Timestamp = datetime.utcnow()
self.RequestHandle = 0
self.ServiceResult = StatusCode()
self.ServiceDiagnostics = DiagnosticInfo()
self.StringTable = []
self.AdditionalHeader = ExtensionObject()
self._freeze = True
def __str__(self):
return 'ResponseHeader(' + 'Timestamp:' + str(self.Timestamp) + ', ' + \
'RequestHandle:' + str(self.RequestHandle) + ', ' + \
'ServiceResult:' + str(self.ServiceResult) + ', ' + \
'ServiceDiagnostics:' + str(self.ServiceDiagnostics) + ', ' + \
'StringTable:' + str(self.StringTable) + ', ' + \
'AdditionalHeader:' + str(self.AdditionalHeader) + ')'
__repr__ = __str__
class ServiceFault(FrozenClass):
'''
The response returned by all services when there is a service level error.
:ivar TypeId:
:vartype TypeId: NodeId
:ivar ResponseHeader:
:vartype ResponseHeader: ResponseHeader
'''
ua_types = [
('TypeId', 'NodeId'),
('ResponseHeader', 'ResponseHeader'),
]
def __init__(self):
self.TypeId = FourByteNodeId(ObjectIds.ServiceFault_Encoding_DefaultBinary)
self.ResponseHeader = ResponseHeader()
self._freeze = True
def __str__(self):
return 'ServiceFault(' + 'TypeId:' + str(self.TypeId) + ', ' + \
'ResponseHeader:' + str(self.ResponseHeader) + ')'
__repr__ = __str__
class SessionlessInvokeRequestType(FrozenClass):
'''
:ivar UrisVersion:
:vartype UrisVersion: UInt32
:ivar NamespaceUris:
:vartype NamespaceUris: String
:ivar ServerUris:
:vartype ServerUris: String
:ivar LocaleIds:
:vartype LocaleIds: String
:ivar ServiceId:
:vartype ServiceId: UInt32
'''
ua_types = [
('UrisVersion', 'ListOfUInt32'),
('NamespaceUris', 'ListOfString'),
('ServerUris', 'ListOfString'),
('LocaleIds', 'ListOfString'),
('ServiceId', 'UInt32'),
]
def __init__(self):
self.UrisVersion = []
self.NamespaceUris = []
self.ServerUris = []
self.LocaleIds = []
self.ServiceId = 0
self._freeze = True
def __str__(self):
return 'SessionlessInvokeRequestType(' + 'UrisVersion:' + str(self.UrisVersion) + ', ' + \
'NamespaceUris:' + str(self.NamespaceUris) + ', ' + \
'ServerUris:' + str(self.ServerUris) + ', ' + \
'LocaleIds:' + str(self.LocaleIds) + ', ' + \
'ServiceId:' + str(self.ServiceId) + ')'
__repr__ = __str__
class SessionlessInvokeResponseType(FrozenClass):
'''
:ivar NamespaceUris:
:vartype NamespaceUris: String
:ivar ServerUris:
:vartype ServerUris: String
:ivar ServiceId:
:vartype ServiceId: UInt32
'''
ua_types = [
('NamespaceUris', 'ListOfString'),
('ServerUris', 'ListOfString'),
('ServiceId', 'UInt32'),
]
def __init__(self):
self.NamespaceUris = []
self.ServerUris = []
self.ServiceId = 0
self._freeze = True
def __str__(self):
return 'SessionlessInvokeResponseType(' + 'NamespaceUris:' + str(self.NamespaceUris) + ', ' + \
'ServerUris:' + str(self.ServerUris) + ', ' + \
'ServiceId:' + str(self.ServiceId) + ')'
__repr__ = __str__
class FindServersParameters(FrozenClass):
'''
:ivar EndpointUrl:
:vartype EndpointUrl: String
:ivar LocaleIds:
:vartype LocaleIds: String
:ivar ServerUris:
:vartype ServerUris: String
'''
ua_types = [
('EndpointUrl', 'String'),
('LocaleIds', 'ListOfString'),
('ServerUris', 'ListOfString'),
]
def __init__(self):
self.EndpointUrl = None
self.LocaleIds = []
self.ServerUris = []
self._freeze = True
def __str__(self):
return 'FindServersParameters(' + 'EndpointUrl:' + str(self.EndpointUrl) + ', ' + \
'LocaleIds:' + str(self.LocaleIds) + ', ' + \
'ServerUris:' + str(self.ServerUris) + ')'
__repr__ = __str__
class FindServersRequest(FrozenClass):
'''
Finds the servers known to the discovery server.
:ivar TypeId:
:vartype TypeId: NodeId
:ivar RequestHeader:
:vartype RequestHeader: RequestHeader
:ivar Parameters:
:vartype Parameters: FindServersParameters
'''
ua_types = [
('TypeId', 'NodeId'),
('RequestHeader', 'RequestHeader'),
('Parameters', 'FindServersParameters'),
]
def __init__(self):
self.TypeId = FourByteNodeId(ObjectIds.FindServersRequest_Encoding_DefaultBinary)
self.RequestHeader = RequestHeader()
self.Parameters = FindServersParameters()
self._freeze = True
def __str__(self):
return 'FindServersRequest(' + 'TypeId:' + str(self.TypeId) + ', ' + \
'RequestHeader:' + str(self.RequestHeader) + ', ' + \
'Parameters:' + str(self.Parameters) + ')'
__repr__ = __str__
class FindServersResponse(FrozenClass):
'''
Finds the servers known to the discovery server.
:ivar TypeId:
:vartype TypeId: NodeId
:ivar ResponseHeader:
:vartype ResponseHeader: ResponseHeader
:ivar Servers:
:vartype Servers: ApplicationDescription
'''
ua_types = [
('TypeId', 'NodeId'),
('ResponseHeader', 'ResponseHeader'),
('Servers', 'ListOfApplicationDescription'),
]
def __init__(self):
self.TypeId = FourByteNodeId(ObjectIds.FindServersResponse_Encoding_DefaultBinary)
self.ResponseHeader = ResponseHeader()
self.Servers = []
self._freeze = True
def __str__(self):
return 'FindServersResponse(' + 'TypeId:' + str(self.TypeId) + ', ' + \
'ResponseHeader:' + str(self.ResponseHeader) + ', ' + \
'Servers:' + str(self.Servers) + ')'
__repr__ = __str__
class ServerOnNetwork(FrozenClass):
'''
:ivar RecordId:
:vartype RecordId: UInt32
:ivar ServerName:
:vartype ServerName: String
:ivar DiscoveryUrl:
:vartype DiscoveryUrl: String
:ivar ServerCapabilities:
:vartype ServerCapabilities: String
'''
ua_types = [
('RecordId', 'UInt32'),
('ServerName', 'String'),
('DiscoveryUrl', 'String'),
('ServerCapabilities', 'ListOfString'),
]
def __init__(self):
self.RecordId = 0
self.ServerName = None
self.DiscoveryUrl = None
self.ServerCapabilities = []
self._freeze = True
def __str__(self):
return 'ServerOnNetwork(' + 'RecordId:' + str(self.RecordId) + ', ' + \
'ServerName:' + str(self.ServerName) + ', ' + \
'DiscoveryUrl:' + str(self.DiscoveryUrl) + ', ' + \
'ServerCapabilities:' + str(self.ServerCapabilities) + ')'
__repr__ = __str__
class FindServersOnNetworkParameters(FrozenClass):
'''
:ivar StartingRecordId:
:vartype StartingRecordId: UInt32
:ivar MaxRecordsToReturn:
:vartype MaxRecordsToReturn: UInt32
:ivar ServerCapabilityFilter:
:vartype ServerCapabilityFilter: String
'''
ua_types = [
('StartingRecordId', 'UInt32'),
('MaxRecordsToReturn', 'UInt32'),
('ServerCapabilityFilter', 'ListOfString'),
]
def __init__(self):
self.StartingRecordId = 0
self.MaxRecordsToReturn = 0
self.ServerCapabilityFilter = []
self._freeze = True
def __str__(self):
return 'FindServersOnNetworkParameters(' + 'StartingRecordId:' + str(self.StartingRecordId) + ', ' + \
'MaxRecordsToReturn:' + str(self.MaxRecordsToReturn) + ', ' + \
'ServerCapabilityFilter:' + str(self.ServerCapabilityFilter) + ')'
__repr__ = __str__
class FindServersOnNetworkRequest(FrozenClass):
'''
:ivar TypeId:
:vartype TypeId: NodeId
:ivar RequestHeader:
:vartype RequestHeader: RequestHeader
:ivar Parameters:
:vartype Parameters: FindServersOnNetworkParameters
'''
ua_types = [
('TypeId', 'NodeId'),
('RequestHeader', 'RequestHeader'),
('Parameters', 'FindServersOnNetworkParameters'),
]
def __init__(self):
self.TypeId = FourByteNodeId(ObjectIds.FindServersOnNetworkRequest_Encoding_DefaultBinary)
self.RequestHeader = RequestHeader()
self.Parameters = FindServersOnNetworkParameters()
self._freeze = True
def __str__(self):
return 'FindServersOnNetworkRequest(' + 'TypeId:' + str(self.TypeId) + ', ' + \
'RequestHeader:' + str(self.RequestHeader) + ', ' + \
'Parameters:' + str(self.Parameters) + ')'
__repr__ = __str__
class FindServersOnNetworkResult(FrozenClass):
'''
:ivar LastCounterResetTime:
:vartype LastCounterResetTime: DateTime
:ivar Servers:
:vartype Servers: ServerOnNetwork
'''
ua_types = [
('LastCounterResetTime', 'DateTime'),
('Servers', 'ListOfServerOnNetwork'),
]
def __init__(self):
self.LastCounterResetTime = datetime.utcnow()
self.Servers = []
self._freeze = True
def __str__(self):
return 'FindServersOnNetworkResult(' + 'LastCounterResetTime:' + str(self.LastCounterResetTime) + ', ' + \
'Servers:' + str(self.Servers) + ')'
__repr__ = __str__
class FindServersOnNetworkResponse(FrozenClass):
'''
:ivar TypeId:
:vartype TypeId: NodeId
:ivar ResponseHeader:
:vartype ResponseHeader: ResponseHeader
:ivar Parameters:
:vartype Parameters: FindServersOnNetworkResult
'''
ua_types = [
('TypeId', 'NodeId'),
('ResponseHeader', 'ResponseHeader'),
('Parameters', 'FindServersOnNetworkResult'),
]
def __init__(self):
self.TypeId = FourByteNodeId(ObjectIds.FindServersOnNetworkResponse_Encoding_DefaultBinary)
self.ResponseHeader = ResponseHeader()
self.Parameters = FindServersOnNetworkResult()
self._freeze = True
def __str__(self):
return 'FindServersOnNetworkResponse(' + 'TypeId:' + str(self.TypeId) + ', ' + \
'ResponseHeader:' + str(self.ResponseHeader) + ', ' + \
'Parameters:' + str(self.Parameters) + ')'
__repr__ = __str__
class UserTokenPolicy(FrozenClass):
'''
Describes a user token that can be used with a server.
:ivar PolicyId:
:vartype PolicyId: String
:ivar TokenType:
:vartype TokenType: UserTokenType
:ivar IssuedTokenType:
:vartype IssuedTokenType: String
:ivar IssuerEndpointUrl:
:vartype IssuerEndpointUrl: String
:ivar SecurityPolicyUri:
:vartype SecurityPolicyUri: String
'''
ua_types = [
('PolicyId', 'String'),
('TokenType', 'UserTokenType'),
('IssuedTokenType', 'String'),
('IssuerEndpointUrl', 'String'),
('SecurityPolicyUri', 'String'),
]
def __init__(self):
self.PolicyId = None
self.TokenType = UserTokenType(0)
self.IssuedTokenType = None
self.IssuerEndpointUrl = None
self.SecurityPolicyUri = None
self._freeze = True
def __str__(self):
return 'UserTokenPolicy(' + 'PolicyId:' + str(self.PolicyId) + ', ' + \
'TokenType:' + str(self.TokenType) + ', ' + \
'IssuedTokenType:' + str(self.IssuedTokenType) + ', ' + \
'IssuerEndpointUrl:' + str(self.IssuerEndpointUrl) + ', ' + \
'SecurityPolicyUri:' + str(self.SecurityPolicyUri) + ')'
__repr__ = __str__
class EndpointDescription(FrozenClass):
'''
The description of a endpoint that can be used to access a server.
:ivar EndpointUrl:
:vartype EndpointUrl: String
:ivar Server:
:vartype Server: ApplicationDescription
:ivar ServerCertificate:
:vartype ServerCertificate: ByteString
:ivar SecurityMode:
:vartype SecurityMode: MessageSecurityMode
:ivar SecurityPolicyUri:
:vartype SecurityPolicyUri: String
:ivar UserIdentityTokens:
:vartype UserIdentityTokens: UserTokenPolicy
:ivar TransportProfileUri:
:vartype TransportProfileUri: String
:ivar SecurityLevel:
:vartype SecurityLevel: Byte
'''
ua_types = [
('EndpointUrl', 'String'),
('Server', 'ApplicationDescription'),
('ServerCertificate', 'ByteString'),
('SecurityMode', 'MessageSecurityMode'),
('SecurityPolicyUri', 'String'),
('UserIdentityTokens', 'ListOfUserTokenPolicy'),
('TransportProfileUri', 'String'),
('SecurityLevel', 'Byte'),
]
def __init__(self):
self.EndpointUrl = None
self.Server = ApplicationDescription()
self.ServerCertificate = None
self.SecurityMode = MessageSecurityMode(0)
self.SecurityPolicyUri = None
self.UserIdentityTokens = []
self.TransportProfileUri = None
self.SecurityLevel = 0
self._freeze = True
def __str__(self):
return 'EndpointDescription(' + 'EndpointUrl:' + str(self.EndpointUrl) + ', ' + \
'Server:' + str(self.Server) + ', ' + \
'ServerCertificate:' + str(self.ServerCertificate) + ', ' + \
'SecurityMode:' + str(self.SecurityMode) + ', ' + \
'SecurityPolicyUri:' + str(self.SecurityPolicyUri) + ', ' + \
'UserIdentityTokens:' + str(self.UserIdentityTokens) + ', ' + \
'TransportProfileUri:' + str(self.TransportProfileUri) + ', ' + \
'SecurityLevel:' + str(self.SecurityLevel) + ')'
__repr__ = __str__
class GetEndpointsParameters(FrozenClass):
'''
:ivar EndpointUrl:
:vartype EndpointUrl: String
:ivar LocaleIds:
:vartype LocaleIds: String
:ivar ProfileUris:
:vartype ProfileUris: String
'''
ua_types = [
('EndpointUrl', 'String'),
('LocaleIds', 'ListOfString'),
('ProfileUris', 'ListOfString'),
]
def __init__(self):
self.EndpointUrl = None
self.LocaleIds = []
self.ProfileUris = []
self._freeze = True
def __str__(self):
return 'GetEndpointsParameters(' + 'EndpointUrl:' + str(self.EndpointUrl) + ', ' + \
'LocaleIds:' + str(self.LocaleIds) + ', ' + \
'ProfileUris:' + str(self.ProfileUris) + ')'
__repr__ = __str__
class GetEndpointsRequest(FrozenClass):
'''
Gets the endpoints used by the server.
:ivar TypeId:
:vartype TypeId: NodeId
:ivar RequestHeader:
:vartype RequestHeader: RequestHeader
:ivar Parameters:
:vartype Parameters: GetEndpointsParameters
'''
ua_types = [
('TypeId', 'NodeId'),
('RequestHeader', 'RequestHeader'),
('Parameters', 'GetEndpointsParameters'),
]
def __init__(self):
self.TypeId = FourByteNodeId(ObjectIds.GetEndpointsRequest_Encoding_DefaultBinary)
self.RequestHeader = RequestHeader()
self.Parameters = GetEndpointsParameters()
self._freeze = True
def __str__(self):
return 'GetEndpointsRequest(' + 'TypeId:' + str(self.TypeId) + ', ' + \
'RequestHeader:' + str(self.RequestHeader) + ', ' + \
'Parameters:' + str(self.Parameters) + ')'
__repr__ = __str__
class GetEndpointsResponse(FrozenClass):
'''
Gets the endpoints used by the server.
:ivar TypeId:
:vartype TypeId: NodeId
:ivar ResponseHeader:
:vartype ResponseHeader: ResponseHeader
:ivar Endpoints:
:vartype Endpoints: EndpointDescription
'''
ua_types = [
('TypeId', 'NodeId'),
('ResponseHeader', 'ResponseHeader'),
('Endpoints', 'ListOfEndpointDescription'),
]
def __init__(self):
self.TypeId = FourByteNodeId(ObjectIds.GetEndpointsResponse_Encoding_DefaultBinary)
self.ResponseHeader = ResponseHeader()
self.Endpoints = []
self._freeze = True
def __str__(self):
return 'GetEndpointsResponse(' + 'TypeId:' + str(self.TypeId) + ', ' + \
'ResponseHeader:' + str(self.ResponseHeader) + ', ' + \
'Endpoints:' + str(self.Endpoints) + ')'
__repr__ = __str__
class RegisteredServer(FrozenClass):
'''
The information required to register a server with a discovery server.
:ivar ServerUri:
:vartype ServerUri: String
:ivar ProductUri:
:vartype ProductUri: String
:ivar ServerNames:
:vartype ServerNames: LocalizedText
:ivar ServerType:
:vartype ServerType: ApplicationType
:ivar GatewayServerUri:
:vartype GatewayServerUri: String
:ivar DiscoveryUrls:
:vartype DiscoveryUrls: String
:ivar SemaphoreFilePath:
:vartype SemaphoreFilePath: String
:ivar IsOnline:
:vartype IsOnline: Boolean
'''
ua_types = [
('ServerUri', 'String'),
('ProductUri', 'String'),
('ServerNames', 'ListOfLocalizedText'),
('ServerType', 'ApplicationType'),
('GatewayServerUri', 'String'),
('DiscoveryUrls', 'ListOfString'),
('SemaphoreFilePath', 'String'),
('IsOnline', 'Boolean'),
]
def __init__(self):
self.ServerUri = None
self.ProductUri = None
self.ServerNames = []
self.ServerType = ApplicationType(0)
self.GatewayServerUri = None
self.DiscoveryUrls = []
self.SemaphoreFilePath = None
self.IsOnline = True
self._freeze = True
def __str__(self):
return 'RegisteredServer(' + 'ServerUri:' + str(self.ServerUri) + ', ' + \
'ProductUri:' + str(self.ProductUri) + ', ' + \
'ServerNames:' + str(self.ServerNames) + ', ' + \
'ServerType:' + str(self.ServerType) + ', ' + \
'GatewayServerUri:' + str(self.GatewayServerUri) + ', ' + \
'DiscoveryUrls:' + str(self.DiscoveryUrls) + ', ' + \
'SemaphoreFilePath:' + str(self.SemaphoreFilePath) + ', ' + \
'IsOnline:' + str(self.IsOnline) + ')'
__repr__ = __str__
class RegisterServerRequest(FrozenClass):
'''
Registers a server with the discovery server.
:ivar TypeId:
:vartype TypeId: NodeId
:ivar RequestHeader:
:vartype RequestHeader: RequestHeader
:ivar Server:
:vartype Server: RegisteredServer
'''
ua_types = [
('TypeId', 'NodeId'),
('RequestHeader', 'RequestHeader'),
('Server', 'RegisteredServer'),
]
def __init__(self):
self.TypeId = FourByteNodeId(ObjectIds.RegisterServerRequest_Encoding_DefaultBinary)
self.RequestHeader = RequestHeader()
self.Server = RegisteredServer()
self._freeze = True
def __str__(self):
return 'RegisterServerRequest(' + 'TypeId:' + str(self.TypeId) + ', ' + \
'RequestHeader:' + str(self.RequestHeader) + ', ' + \
'Server:' + str(self.Server) + ')'
__repr__ = __str__
class RegisterServerResponse(FrozenClass):
'''
Registers a server with the discovery server.
:ivar TypeId:
:vartype TypeId: NodeId
:ivar ResponseHeader:
:vartype ResponseHeader: ResponseHeader
'''
ua_types = [
('TypeId', 'NodeId'),
('ResponseHeader', 'ResponseHeader'),
]
def __init__(self):
self.TypeId = FourByteNodeId(ObjectIds.RegisterServerResponse_Encoding_DefaultBinary)
self.ResponseHeader = ResponseHeader()
self._freeze = True
def __str__(self):
return 'RegisterServerResponse(' + 'TypeId:' + str(self.TypeId) + ', ' + \
'ResponseHeader:' + str(self.ResponseHeader) + ')'
__repr__ = __str__
class DiscoveryConfiguration(FrozenClass):
'''
A base type for discovery configuration information.
'''
ua_types = [
]
def __init__(self):
self._freeze = True
def __str__(self):
return 'DiscoveryConfiguration(' + + ')'
__repr__ = __str__
class MdnsDiscoveryConfiguration(FrozenClass):
'''
The discovery information needed for mDNS registration.
:ivar MdnsServerName:
:vartype MdnsServerName: String
:ivar ServerCapabilities:
:vartype ServerCapabilities: String
'''
ua_types = [
('MdnsServerName', 'String'),
('ServerCapabilities', 'ListOfString'),
]
def __init__(self):
self.MdnsServerName = None
self.ServerCapabilities = []
self._freeze = True
def __str__(self):
return 'MdnsDiscoveryConfiguration(' + 'MdnsServerName:' + str(self.MdnsServerName) + ', ' + \
'ServerCapabilities:' + str(self.ServerCapabilities) + ')'
__repr__ = __str__
class RegisterServer2Parameters(FrozenClass):
'''
:ivar Server:
:vartype Server: RegisteredServer
:ivar DiscoveryConfiguration:
:vartype DiscoveryConfiguration: ExtensionObject
'''
ua_types = [
('Server', 'RegisteredServer'),
('DiscoveryConfiguration', 'ListOfExtensionObject'),
]
def __init__(self):
self.Server = RegisteredServer()
self.DiscoveryConfiguration = []
self._freeze = True
def __str__(self):
return 'RegisterServer2Parameters(' + 'Server:' + str(self.Server) + ', ' + \
'DiscoveryConfiguration:' + str(self.DiscoveryConfiguration) + ')'
__repr__ = __str__
class RegisterServer2Request(FrozenClass):
'''
:ivar TypeId:
:vartype TypeId: NodeId
:ivar RequestHeader:
:vartype RequestHeader: RequestHeader
:ivar Parameters:
:vartype Parameters: RegisterServer2Parameters
'''
ua_types = [
('TypeId', 'NodeId'),
('RequestHeader', 'RequestHeader'),
('Parameters', 'RegisterServer2Parameters'),
]
def __init__(self):
self.TypeId = FourByteNodeId(ObjectIds.RegisterServer2Request_Encoding_DefaultBinary)
self.RequestHeader = RequestHeader()
self.Parameters = RegisterServer2Parameters()
self._freeze = True
def __str__(self):
return 'RegisterServer2Request(' + 'TypeId:' + str(self.TypeId) + ', ' + \
'RequestHeader:' + str(self.RequestHeader) + ', ' + \
'Parameters:' + str(self.Parameters) + ')'
__repr__ = __str__
class RegisterServer2Response(FrozenClass):
'''
:ivar TypeId:
:vartype TypeId: NodeId
:ivar ResponseHeader:
:vartype ResponseHeader: ResponseHeader
:ivar ConfigurationResults:
:vartype ConfigurationResults: StatusCode
:ivar DiagnosticInfos:
:vartype DiagnosticInfos: DiagnosticInfo
'''
ua_types = [
('TypeId', 'NodeId'),
('ResponseHeader', 'ResponseHeader'),
('ConfigurationResults', 'ListOfStatusCode'),
('DiagnosticInfos', 'ListOfDiagnosticInfo'),
]
def __init__(self):
self.TypeId = FourByteNodeId(ObjectIds.RegisterServer2Response_Encoding_DefaultBinary)
self.ResponseHeader = ResponseHeader()
self.ConfigurationResults = []
self.DiagnosticInfos = []
self._freeze = True
def __str__(self):
return 'RegisterServer2Response(' + 'TypeId:' + str(self.TypeId) + ', ' + \
'ResponseHeader:' + str(self.ResponseHeader) + ', ' + \
'ConfigurationResults:' + str(self.ConfigurationResults) + ', ' + \
'DiagnosticInfos:' + str(self.DiagnosticInfos) + ')'
__repr__ = __str__
class ChannelSecurityToken(FrozenClass):
'''
The token that identifies a set of keys for an active secure channel.
:ivar ChannelId:
:vartype ChannelId: UInt32
:ivar TokenId:
:vartype TokenId: UInt32
:ivar CreatedAt:
:vartype CreatedAt: DateTime
:ivar RevisedLifetime:
:vartype RevisedLifetime: UInt32
'''
ua_types = [
('ChannelId', 'UInt32'),
('TokenId', 'UInt32'),
('CreatedAt', 'DateTime'),
('RevisedLifetime', 'UInt32'),
]
def __init__(self):
self.ChannelId = 0
self.TokenId = 0
self.CreatedAt = datetime.utcnow()
self.RevisedLifetime = 0
self._freeze = True
def __str__(self):
return 'ChannelSecurityToken(' + 'ChannelId:' + str(self.ChannelId) + ', ' + \
'TokenId:' + str(self.TokenId) + ', ' + \
'CreatedAt:' + str(self.CreatedAt) + ', ' + \
'RevisedLifetime:' + str(self.RevisedLifetime) + ')'
__repr__ = __str__
class OpenSecureChannelParameters(FrozenClass):
'''
:ivar ClientProtocolVersion:
:vartype ClientProtocolVersion: UInt32
:ivar RequestType:
:vartype RequestType: SecurityTokenRequestType
:ivar SecurityMode:
:vartype SecurityMode: MessageSecurityMode
:ivar ClientNonce:
:vartype ClientNonce: ByteString
:ivar RequestedLifetime:
:vartype RequestedLifetime: UInt32
'''
ua_types = [
('ClientProtocolVersion', 'UInt32'),
('RequestType', 'SecurityTokenRequestType'),
('SecurityMode', 'MessageSecurityMode'),
('ClientNonce', 'ByteString'),
('RequestedLifetime', 'UInt32'),
]
def __init__(self):
self.ClientProtocolVersion = 0
self.RequestType = SecurityTokenRequestType(0)
self.SecurityMode = MessageSecurityMode(0)
self.ClientNonce = None
self.RequestedLifetime = 0
self._freeze = True
def __str__(self):
return 'OpenSecureChannelParameters(' + 'ClientProtocolVersion:' + str(self.ClientProtocolVersion) + ', ' + \
'RequestType:' + str(self.RequestType) + ', ' + \
'SecurityMode:' + str(self.SecurityMode) + ', ' + \
'ClientNonce:' + str(self.ClientNonce) + ', ' + \
'RequestedLifetime:' + str(self.RequestedLifetime) + ')'
__repr__ = __str__
class OpenSecureChannelRequest(FrozenClass):
'''
Creates a secure channel with a server.
:ivar TypeId:
:vartype TypeId: NodeId
:ivar RequestHeader:
:vartype RequestHeader: RequestHeader
:ivar Parameters:
:vartype Parameters: OpenSecureChannelParameters
'''
ua_types = [
('TypeId', 'NodeId'),
('RequestHeader', 'RequestHeader'),
('Parameters', 'OpenSecureChannelParameters'),
]
def __init__(self):
self.TypeId = FourByteNodeId(ObjectIds.OpenSecureChannelRequest_Encoding_DefaultBinary)
self.RequestHeader = RequestHeader()
self.Parameters = OpenSecureChannelParameters()
self._freeze = True
def __str__(self):
return 'OpenSecureChannelRequest(' + 'TypeId:' + str(self.TypeId) + ', ' + \
'RequestHeader:' + str(self.RequestHeader) + ', ' + \
'Parameters:' + str(self.Parameters) + ')'
__repr__ = __str__
class OpenSecureChannelResult(FrozenClass):
'''
:ivar ServerProtocolVersion:
:vartype ServerProtocolVersion: UInt32
:ivar SecurityToken:
:vartype SecurityToken: ChannelSecurityToken
:ivar ServerNonce:
:vartype ServerNonce: ByteString
'''
ua_types = [
('ServerProtocolVersion', 'UInt32'),
('SecurityToken', 'ChannelSecurityToken'),
('ServerNonce', 'ByteString'),
]
def __init__(self):
self.ServerProtocolVersion = 0
self.SecurityToken = ChannelSecurityToken()
self.ServerNonce = None
self._freeze = True
def __str__(self):
return 'OpenSecureChannelResult(' + 'ServerProtocolVersion:' + str(self.ServerProtocolVersion) + ', ' + \
'SecurityToken:' + str(self.SecurityToken) + ', ' + \
'ServerNonce:' + str(self.ServerNonce) + ')'
__repr__ = __str__
class OpenSecureChannelResponse(FrozenClass):
'''
Creates a secure channel with a server.
:ivar TypeId:
:vartype TypeId: NodeId
:ivar ResponseHeader:
:vartype ResponseHeader: ResponseHeader
:ivar Parameters:
:vartype Parameters: OpenSecureChannelResult
'''
ua_types = [
('TypeId', 'NodeId'),
('ResponseHeader', 'ResponseHeader'),
('Parameters', 'OpenSecureChannelResult'),
]
def __init__(self):
self.TypeId = FourByteNodeId(ObjectIds.OpenSecureChannelResponse_Encoding_DefaultBinary)
self.ResponseHeader = ResponseHeader()
self.Parameters = OpenSecureChannelResult()
self._freeze = True
def __str__(self):
return 'OpenSecureChannelResponse(' + 'TypeId:' + str(self.TypeId) + ', ' + \
'ResponseHeader:' + str(self.ResponseHeader) + ', ' + \
'Parameters:' + str(self.Parameters) + ')'
__repr__ = __str__
class CloseSecureChannelRequest(FrozenClass):
'''
Closes a secure channel.
:ivar TypeId:
:vartype TypeId: NodeId
:ivar RequestHeader:
:vartype RequestHeader: RequestHeader
'''
ua_types = [
('TypeId', 'NodeId'),
('RequestHeader', 'RequestHeader'),
]
def __init__(self):
self.TypeId = FourByteNodeId(ObjectIds.CloseSecureChannelRequest_Encoding_DefaultBinary)
self.RequestHeader = RequestHeader()
self._freeze = True
def __str__(self):
return 'CloseSecureChannelRequest(' + 'TypeId:' + str(self.TypeId) + ', ' + \
'RequestHeader:' + str(self.RequestHeader) + ')'
__repr__ = __str__
class CloseSecureChannelResponse(FrozenClass):
'''
Closes a secure channel.
:ivar TypeId:
:vartype TypeId: NodeId
:ivar ResponseHeader:
:vartype ResponseHeader: ResponseHeader
'''
ua_types = [
('TypeId', 'NodeId'),
('ResponseHeader', 'ResponseHeader'),
]
def __init__(self):
self.TypeId = FourByteNodeId(ObjectIds.CloseSecureChannelResponse_Encoding_DefaultBinary)
self.ResponseHeader = ResponseHeader()
self._freeze = True
def __str__(self):
return 'CloseSecureChannelResponse(' + 'TypeId:' + str(self.TypeId) + ', ' + \
'ResponseHeader:' + str(self.ResponseHeader) + ')'
__repr__ = __str__
class SignedSoftwareCertificate(FrozenClass):
'''
A software certificate with a digital signature.
:ivar CertificateData:
:vartype CertificateData: ByteString
:ivar Signature:
:vartype Signature: ByteString
'''
ua_types = [
('CertificateData', 'ByteString'),
('Signature', 'ByteString'),
]
def __init__(self):
self.CertificateData = None
self.Signature = None
self._freeze = True
def __str__(self):
return 'SignedSoftwareCertificate(' + 'CertificateData:' + str(self.CertificateData) + ', ' + \
'Signature:' + str(self.Signature) + ')'
__repr__ = __str__
class SignatureData(FrozenClass):
'''
A digital signature.
:ivar Algorithm:
:vartype Algorithm: String
:ivar Signature:
:vartype Signature: ByteString
'''
ua_types = [
('Algorithm', 'String'),
('Signature', 'ByteString'),
]
def __init__(self):
self.Algorithm = None
self.Signature = None
self._freeze = True
def __str__(self):
return 'SignatureData(' + 'Algorithm:' + str(self.Algorithm) + ', ' + \
'Signature:' + str(self.Signature) + ')'
__repr__ = __str__
class CreateSessionParameters(FrozenClass):
'''
:ivar ClientDescription:
:vartype ClientDescription: ApplicationDescription
:ivar ServerUri:
:vartype ServerUri: String
:ivar EndpointUrl:
:vartype EndpointUrl: String
:ivar SessionName:
:vartype SessionName: String
:ivar ClientNonce:
:vartype ClientNonce: ByteString
:ivar ClientCertificate:
:vartype ClientCertificate: ByteString
:ivar RequestedSessionTimeout:
:vartype RequestedSessionTimeout: Double
:ivar MaxResponseMessageSize:
:vartype MaxResponseMessageSize: UInt32
'''
ua_types = [
('ClientDescription', 'ApplicationDescription'),
('ServerUri', 'String'),
('EndpointUrl', 'String'),
('SessionName', 'String'),
('ClientNonce', 'ByteString'),
('ClientCertificate', 'ByteString'),
('RequestedSessionTimeout', 'Double'),
('MaxResponseMessageSize', 'UInt32'),
]
def __init__(self):
self.ClientDescription = ApplicationDescription()
self.ServerUri = None
self.EndpointUrl = None
self.SessionName = None
self.ClientNonce = None
self.ClientCertificate = None
self.RequestedSessionTimeout = 0
self.MaxResponseMessageSize = 0
self._freeze = True
def __str__(self):
return 'CreateSessionParameters(' + 'ClientDescription:' + str(self.ClientDescription) + ', ' + \
'ServerUri:' + str(self.ServerUri) + ', ' + \
'EndpointUrl:' + str(self.EndpointUrl) + ', ' + \
'SessionName:' + str(self.SessionName) + ', ' + \
'ClientNonce:' + str(self.ClientNonce) + ', ' + \
'ClientCertificate:' + str(self.ClientCertificate) + ', ' + \
'RequestedSessionTimeout:' + str(self.RequestedSessionTimeout) + ', ' + \
'MaxResponseMessageSize:' + str(self.MaxResponseMessageSize) + ')'
__repr__ = __str__
class CreateSessionRequest(FrozenClass):
'''
Creates a new session with the server.
:ivar TypeId:
:vartype TypeId: NodeId
:ivar RequestHeader:
:vartype RequestHeader: RequestHeader
:ivar Parameters:
:vartype Parameters: CreateSessionParameters
'''
ua_types = [
('TypeId', 'NodeId'),
('RequestHeader', 'RequestHeader'),
('Parameters', 'CreateSessionParameters'),
]
def __init__(self):
self.TypeId = FourByteNodeId(ObjectIds.CreateSessionRequest_Encoding_DefaultBinary)
self.RequestHeader = RequestHeader()
self.Parameters = CreateSessionParameters()
self._freeze = True
def __str__(self):
return 'CreateSessionRequest(' + 'TypeId:' + str(self.TypeId) + ', ' + \
'RequestHeader:' + str(self.RequestHeader) + ', ' + \
'Parameters:' + str(self.Parameters) + ')'
__repr__ = __str__
class CreateSessionResult(FrozenClass):
'''
:ivar SessionId:
:vartype SessionId: NodeId
:ivar AuthenticationToken:
:vartype AuthenticationToken: NodeId
:ivar RevisedSessionTimeout:
:vartype RevisedSessionTimeout: Double
:ivar ServerNonce:
:vartype ServerNonce: ByteString
:ivar ServerCertificate:
:vartype ServerCertificate: ByteString
:ivar ServerEndpoints:
:vartype ServerEndpoints: EndpointDescription
:ivar ServerSoftwareCertificates:
:vartype ServerSoftwareCertificates: SignedSoftwareCertificate
:ivar ServerSignature:
:vartype ServerSignature: SignatureData
:ivar MaxRequestMessageSize:
:vartype MaxRequestMessageSize: UInt32
'''
ua_types = [
('SessionId', 'NodeId'),
('AuthenticationToken', 'NodeId'),
('RevisedSessionTimeout', 'Double'),
('ServerNonce', 'ByteString'),
('ServerCertificate', 'ByteString'),
('ServerEndpoints', 'ListOfEndpointDescription'),
('ServerSoftwareCertificates', 'ListOfSignedSoftwareCertificate'),
('ServerSignature', 'SignatureData'),
('MaxRequestMessageSize', 'UInt32'),
]
def __init__(self):
self.SessionId = NodeId()
self.AuthenticationToken = NodeId()
self.RevisedSessionTimeout = 0
self.ServerNonce = None
self.ServerCertificate = None
self.ServerEndpoints = []
self.ServerSoftwareCertificates = []
self.ServerSignature = SignatureData()
self.MaxRequestMessageSize = 0
self._freeze = True
def __str__(self):
return 'CreateSessionResult(' + 'SessionId:' + str(self.SessionId) + ', ' + \
'AuthenticationToken:' + str(self.AuthenticationToken) + ', ' + \
'RevisedSessionTimeout:' + str(self.RevisedSessionTimeout) + ', ' + \
'ServerNonce:' + str(self.ServerNonce) + ', ' + \
'ServerCertificate:' + str(self.ServerCertificate) + ', ' + \
'ServerEndpoints:' + str(self.ServerEndpoints) + ', ' + \
'ServerSoftwareCertificates:' + str(self.ServerSoftwareCertificates) + ', ' + \
'ServerSignature:' + str(self.ServerSignature) + ', ' + \
'MaxRequestMessageSize:' + str(self.MaxRequestMessageSize) + ')'
__repr__ = __str__
class CreateSessionResponse(FrozenClass):
'''
Creates a new session with the server.
:ivar TypeId:
:vartype TypeId: NodeId
:ivar ResponseHeader:
:vartype ResponseHeader: ResponseHeader
:ivar Parameters:
:vartype Parameters: CreateSessionResult
'''
ua_types = [
('TypeId', 'NodeId'),
('ResponseHeader', 'ResponseHeader'),
('Parameters', 'CreateSessionResult'),
]
def __init__(self):
self.TypeId = FourByteNodeId(ObjectIds.CreateSessionResponse_Encoding_DefaultBinary)
self.ResponseHeader = ResponseHeader()
self.Parameters = CreateSessionResult()
self._freeze = True
def __str__(self):
return 'CreateSessionResponse(' + 'TypeId:' + str(self.TypeId) + ', ' + \
'ResponseHeader:' + str(self.ResponseHeader) + ', ' + \
'Parameters:' + str(self.Parameters) + ')'
__repr__ = __str__
class UserIdentityToken(FrozenClass):
'''
A base type for a user identity token.
:ivar PolicyId:
:vartype PolicyId: String
'''
ua_types = [
('PolicyId', 'String'),
]
def __init__(self):
self.PolicyId = None
self._freeze = True
def __str__(self):
return 'UserIdentityToken(' + 'PolicyId:' + str(self.PolicyId) + ')'
__repr__ = __str__
class AnonymousIdentityToken(FrozenClass):
'''
A token representing an anonymous user.
:ivar PolicyId:
:vartype PolicyId: String
'''
ua_types = [
('PolicyId', 'String'),
]
def __init__(self):
self.PolicyId = None
self._freeze = True
def __str__(self):
return 'AnonymousIdentityToken(' + 'PolicyId:' + str(self.PolicyId) + ')'
__repr__ = __str__
class UserNameIdentityToken(FrozenClass):
'''
A token representing a user identified by a user name and password.
:ivar PolicyId:
:vartype PolicyId: String
:ivar UserName:
:vartype UserName: String
:ivar Password:
:vartype Password: ByteString
:ivar EncryptionAlgorithm:
:vartype EncryptionAlgorithm: String
'''
ua_types = [
('PolicyId', 'String'),
('UserName', 'String'),
('Password', 'ByteString'),
('EncryptionAlgorithm', 'String'),
]
def __init__(self):
self.PolicyId = None
self.UserName = None
self.Password = None
self.EncryptionAlgorithm = None
self._freeze = True
def __str__(self):
return 'UserNameIdentityToken(' + 'PolicyId:' + str(self.PolicyId) + ', ' + \
'UserName:' + str(self.UserName) + ', ' + \
'Password:' + str(self.Password) + ', ' + \
'EncryptionAlgorithm:' + str(self.EncryptionAlgorithm) + ')'
__repr__ = __str__
class X509IdentityToken(FrozenClass):
'''
A token representing a user identified by an X509 certificate.
:ivar PolicyId:
:vartype PolicyId: String
:ivar CertificateData:
:vartype CertificateData: ByteString
'''
ua_types = [
('PolicyId', 'String'),
('CertificateData', 'ByteString'),
]
def __init__(self):
self.PolicyId = None
self.CertificateData = None
self._freeze = True
def __str__(self):
return 'X509IdentityToken(' + 'PolicyId:' + str(self.PolicyId) + ', ' + \
'CertificateData:' + str(self.CertificateData) + ')'
__repr__ = __str__
class IssuedIdentityToken(FrozenClass):
'''
A token representing a user identified by a WS-Security XML token.
:ivar PolicyId:
:vartype PolicyId: String
:ivar TokenData:
:vartype TokenData: ByteString
:ivar EncryptionAlgorithm:
:vartype EncryptionAlgorithm: String
'''
ua_types = [
('PolicyId', 'String'),
('TokenData', 'ByteString'),
('EncryptionAlgorithm', 'String'),
]
def __init__(self):
self.PolicyId = None
self.TokenData = None
self.EncryptionAlgorithm = None
self._freeze = True
def __str__(self):
return 'IssuedIdentityToken(' + 'PolicyId:' + str(self.PolicyId) + ', ' + \
'TokenData:' + str(self.TokenData) + ', ' + \
'EncryptionAlgorithm:' + str(self.EncryptionAlgorithm) + ')'
__repr__ = __str__
class ActivateSessionParameters(FrozenClass):
'''
:ivar ClientSignature:
:vartype ClientSignature: SignatureData
:ivar ClientSoftwareCertificates:
:vartype ClientSoftwareCertificates: SignedSoftwareCertificate
:ivar LocaleIds:
:vartype LocaleIds: String
:ivar UserIdentityToken:
:vartype UserIdentityToken: ExtensionObject
:ivar UserTokenSignature:
:vartype UserTokenSignature: SignatureData
'''
ua_types = [
('ClientSignature', 'SignatureData'),
('ClientSoftwareCertificates', 'ListOfSignedSoftwareCertificate'),
('LocaleIds', 'ListOfString'),
('UserIdentityToken', 'ExtensionObject'),
('UserTokenSignature', 'SignatureData'),
]
def __init__(self):
self.ClientSignature = SignatureData()
self.ClientSoftwareCertificates = []
self.LocaleIds = []
self.UserIdentityToken = ExtensionObject()
self.UserTokenSignature = SignatureData()
self._freeze = True
def __str__(self):
return 'ActivateSessionParameters(' + 'ClientSignature:' + str(self.ClientSignature) + ', ' + \
'ClientSoftwareCertificates:' + str(self.ClientSoftwareCertificates) + ', ' + \
'LocaleIds:' + str(self.LocaleIds) + ', ' + \
'UserIdentityToken:' + str(self.UserIdentityToken) + ', ' + \
'UserTokenSignature:' + str(self.UserTokenSignature) + ')'
__repr__ = __str__
class ActivateSessionRequest(FrozenClass):
'''
Activates a session with the server.
:ivar TypeId:
:vartype TypeId: NodeId
:ivar RequestHeader:
:vartype RequestHeader: RequestHeader
:ivar Parameters:
:vartype Parameters: ActivateSessionParameters
'''
ua_types = [
('TypeId', 'NodeId'),
('RequestHeader', 'RequestHeader'),
('Parameters', 'ActivateSessionParameters'),
]
def __init__(self):
self.TypeId = FourByteNodeId(ObjectIds.ActivateSessionRequest_Encoding_DefaultBinary)
self.RequestHeader = RequestHeader()
self.Parameters = ActivateSessionParameters()
self._freeze = True
def __str__(self):
return 'ActivateSessionRequest(' + 'TypeId:' + str(self.TypeId) + ', ' + \
'RequestHeader:' + str(self.RequestHeader) + ', ' + \
'Parameters:' + str(self.Parameters) + ')'
__repr__ = __str__
class ActivateSessionResult(FrozenClass):
'''
:ivar ServerNonce:
:vartype ServerNonce: ByteString
:ivar Results:
:vartype Results: StatusCode
:ivar DiagnosticInfos:
:vartype DiagnosticInfos: DiagnosticInfo
'''
ua_types = [
('ServerNonce', 'ByteString'),
('Results', 'ListOfStatusCode'),
('DiagnosticInfos', 'ListOfDiagnosticInfo'),
]
def __init__(self):
self.ServerNonce = None
self.Results = []
self.DiagnosticInfos = []
self._freeze = True
def __str__(self):
return 'ActivateSessionResult(' + 'ServerNonce:' + str(self.ServerNonce) + ', ' + \
'Results:' + str(self.Results) + ', ' + \
'DiagnosticInfos:' + str(self.DiagnosticInfos) + ')'
__repr__ = __str__
class ActivateSessionResponse(FrozenClass):
'''
Activates a session with the server.
:ivar TypeId:
:vartype TypeId: NodeId
:ivar ResponseHeader:
:vartype ResponseHeader: ResponseHeader
:ivar Parameters:
:vartype Parameters: ActivateSessionResult
'''
ua_types = [
('TypeId', 'NodeId'),
('ResponseHeader', 'ResponseHeader'),
('Parameters', 'ActivateSessionResult'),
]
def __init__(self):
self.TypeId = FourByteNodeId(ObjectIds.ActivateSessionResponse_Encoding_DefaultBinary)
self.ResponseHeader = ResponseHeader()
self.Parameters = ActivateSessionResult()
self._freeze = True
def __str__(self):
return 'ActivateSessionResponse(' + 'TypeId:' + str(self.TypeId) + ', ' + \
'ResponseHeader:' + str(self.ResponseHeader) + ', ' + \
'Parameters:' + str(self.Parameters) + ')'
__repr__ = __str__
class CloseSessionRequest(FrozenClass):
'''
Closes a session with the server.
:ivar TypeId:
:vartype TypeId: NodeId
:ivar RequestHeader:
:vartype RequestHeader: RequestHeader
:ivar DeleteSubscriptions:
:vartype DeleteSubscriptions: Boolean
'''
ua_types = [
('TypeId', 'NodeId'),
('RequestHeader', 'RequestHeader'),
('DeleteSubscriptions', 'Boolean'),
]
def __init__(self):
self.TypeId = FourByteNodeId(ObjectIds.CloseSessionRequest_Encoding_DefaultBinary)
self.RequestHeader = RequestHeader()
self.DeleteSubscriptions = True
self._freeze = True
def __str__(self):
return 'CloseSessionRequest(' + 'TypeId:' + str(self.TypeId) + ', ' + \
'RequestHeader:' + str(self.RequestHeader) + ', ' + \
'DeleteSubscriptions:' + str(self.DeleteSubscriptions) + ')'
__repr__ = __str__
class CloseSessionResponse(FrozenClass):
'''
Closes a session with the server.
:ivar TypeId:
:vartype TypeId: NodeId
:ivar ResponseHeader:
:vartype ResponseHeader: ResponseHeader
'''
ua_types = [
('TypeId', 'NodeId'),
('ResponseHeader', 'ResponseHeader'),
]
def __init__(self):
self.TypeId = FourByteNodeId(ObjectIds.CloseSessionResponse_Encoding_DefaultBinary)
self.ResponseHeader = ResponseHeader()
self._freeze = True
def __str__(self):
return 'CloseSessionResponse(' + 'TypeId:' + str(self.TypeId) + ', ' + \
'ResponseHeader:' + str(self.ResponseHeader) + ')'
__repr__ = __str__
class CancelParameters(FrozenClass):
'''
:ivar RequestHandle:
:vartype RequestHandle: UInt32
'''
ua_types = [
('RequestHandle', 'UInt32'),
]
def __init__(self):
self.RequestHandle = 0
self._freeze = True
def __str__(self):
return 'CancelParameters(' + 'RequestHandle:' + str(self.RequestHandle) + ')'
__repr__ = __str__
class CancelRequest(FrozenClass):
'''
Cancels an outstanding request.
:ivar TypeId:
:vartype TypeId: NodeId
:ivar RequestHeader:
:vartype RequestHeader: RequestHeader
:ivar Parameters:
:vartype Parameters: CancelParameters
'''
ua_types = [
('TypeId', 'NodeId'),
('RequestHeader', 'RequestHeader'),
('Parameters', 'CancelParameters'),
]
def __init__(self):
self.TypeId = FourByteNodeId(ObjectIds.CancelRequest_Encoding_DefaultBinary)
self.RequestHeader = RequestHeader()
self.Parameters = CancelParameters()
self._freeze = True
def __str__(self):
return 'CancelRequest(' + 'TypeId:' + str(self.TypeId) + ', ' + \
'RequestHeader:' + str(self.RequestHeader) + ', ' + \
'Parameters:' + str(self.Parameters) + ')'
__repr__ = __str__
class CancelResult(FrozenClass):
'''
:ivar CancelCount:
:vartype CancelCount: UInt32
'''
ua_types = [
('CancelCount', 'UInt32'),
]
def __init__(self):
self.CancelCount = 0
self._freeze = True
def __str__(self):
return 'CancelResult(' + 'CancelCount:' + str(self.CancelCount) + ')'
__repr__ = __str__
class CancelResponse(FrozenClass):
'''
Cancels an outstanding request.
:ivar TypeId:
:vartype TypeId: NodeId
:ivar ResponseHeader:
:vartype ResponseHeader: ResponseHeader
:ivar Parameters:
:vartype Parameters: CancelResult
'''
ua_types = [
('TypeId', 'NodeId'),
('ResponseHeader', 'ResponseHeader'),
('Parameters', 'CancelResult'),
]
def __init__(self):
self.TypeId = FourByteNodeId(ObjectIds.CancelResponse_Encoding_DefaultBinary)
self.ResponseHeader = ResponseHeader()
self.Parameters = CancelResult()
self._freeze = True
def __str__(self):
return 'CancelResponse(' + 'TypeId:' + str(self.TypeId) + ', ' + \
'ResponseHeader:' + str(self.ResponseHeader) + ', ' + \
'Parameters:' + str(self.Parameters) + ')'
__repr__ = __str__
class NodeAttributes(FrozenClass):
'''
The base attributes for all nodes.
:ivar SpecifiedAttributes:
:vartype SpecifiedAttributes: UInt32
:ivar DisplayName:
:vartype DisplayName: LocalizedText
:ivar Description:
:vartype Description: LocalizedText
:ivar WriteMask:
:vartype WriteMask: UInt32
:ivar UserWriteMask:
:vartype UserWriteMask: UInt32
'''
ua_types = [
('SpecifiedAttributes', 'UInt32'),
('DisplayName', 'LocalizedText'),
('Description', 'LocalizedText'),
('WriteMask', 'UInt32'),
('UserWriteMask', 'UInt32'),
]
def __init__(self):
self.SpecifiedAttributes = 0
self.DisplayName = LocalizedText()
self.Description = LocalizedText()
self.WriteMask = 0
self.UserWriteMask = 0
self._freeze = True
def __str__(self):
return 'NodeAttributes(' + 'SpecifiedAttributes:' + str(self.SpecifiedAttributes) + ', ' + \
'DisplayName:' + str(self.DisplayName) + ', ' + \
'Description:' + str(self.Description) + ', ' + \
'WriteMask:' + str(self.WriteMask) + ', ' + \
'UserWriteMask:' + str(self.UserWriteMask) + ')'
__repr__ = __str__
class ObjectAttributes(FrozenClass):
'''
The attributes for an object node.
:ivar SpecifiedAttributes:
:vartype SpecifiedAttributes: UInt32
:ivar DisplayName:
:vartype DisplayName: LocalizedText
:ivar Description:
:vartype Description: LocalizedText
:ivar WriteMask:
:vartype WriteMask: UInt32
:ivar UserWriteMask:
:vartype UserWriteMask: UInt32
:ivar EventNotifier:
:vartype EventNotifier: Byte
'''
ua_types = [
('SpecifiedAttributes', 'UInt32'),
('DisplayName', 'LocalizedText'),
('Description', 'LocalizedText'),
('WriteMask', 'UInt32'),
('UserWriteMask', 'UInt32'),
('EventNotifier', 'Byte'),
]
def __init__(self):
self.SpecifiedAttributes = 0
self.DisplayName = LocalizedText()
self.Description = LocalizedText()
self.WriteMask = 0
self.UserWriteMask = 0
self.EventNotifier = 0
self._freeze = True
def __str__(self):
return 'ObjectAttributes(' + 'SpecifiedAttributes:' + str(self.SpecifiedAttributes) + ', ' + \
'DisplayName:' + str(self.DisplayName) + ', ' + \
'Description:' + str(self.Description) + ', ' + \
'WriteMask:' + str(self.WriteMask) + ', ' + \
'UserWriteMask:' + str(self.UserWriteMask) + ', ' + \
'EventNotifier:' + str(self.EventNotifier) + ')'
__repr__ = __str__
class VariableAttributes(FrozenClass):
'''
The attributes for a variable node.
:ivar SpecifiedAttributes:
:vartype SpecifiedAttributes: UInt32
:ivar DisplayName:
:vartype DisplayName: LocalizedText
:ivar Description:
:vartype Description: LocalizedText
:ivar WriteMask:
:vartype WriteMask: UInt32
:ivar UserWriteMask:
:vartype UserWriteMask: UInt32
:ivar Value:
:vartype Value: Variant
:ivar DataType:
:vartype DataType: NodeId
:ivar ValueRank:
:vartype ValueRank: Int32
:ivar ArrayDimensions:
:vartype ArrayDimensions: UInt32
:ivar AccessLevel:
:vartype AccessLevel: Byte
:ivar UserAccessLevel:
:vartype UserAccessLevel: Byte
:ivar MinimumSamplingInterval:
:vartype MinimumSamplingInterval: Double
:ivar Historizing:
:vartype Historizing: Boolean
'''
ua_types = [
('SpecifiedAttributes', 'UInt32'),
('DisplayName', 'LocalizedText'),
('Description', 'LocalizedText'),
('WriteMask', 'UInt32'),
('UserWriteMask', 'UInt32'),
('Value', 'Variant'),
('DataType', 'NodeId'),
('ValueRank', 'Int32'),
('ArrayDimensions', 'ListOfUInt32'),
('AccessLevel', 'Byte'),
('UserAccessLevel', 'Byte'),
('MinimumSamplingInterval', 'Double'),
('Historizing', 'Boolean'),
]
def __init__(self):
self.SpecifiedAttributes = 0
self.DisplayName = LocalizedText()
self.Description = LocalizedText()
self.WriteMask = 0
self.UserWriteMask = 0
self.Value = Variant()
self.DataType = NodeId()
self.ValueRank = 0
self.ArrayDimensions = []
self.AccessLevel = 0
self.UserAccessLevel = 0
self.MinimumSamplingInterval = 0
self.Historizing = True
self._freeze = True
def __str__(self):
return 'VariableAttributes(' + 'SpecifiedAttributes:' + str(self.SpecifiedAttributes) + ', ' + \
'DisplayName:' + str(self.DisplayName) + ', ' + \
'Description:' + str(self.Description) + ', ' + \
'WriteMask:' + str(self.WriteMask) + ', ' + \
'UserWriteMask:' + str(self.UserWriteMask) + ', ' + \
'Value:' + str(self.Value) + ', ' + \
'DataType:' + str(self.DataType) + ', ' + \
'ValueRank:' + str(self.ValueRank) + ', ' + \
'ArrayDimensions:' + str(self.ArrayDimensions) + ', ' + \
'AccessLevel:' + str(self.AccessLevel) + ', ' + \
'UserAccessLevel:' + str(self.UserAccessLevel) + ', ' + \
'MinimumSamplingInterval:' + str(self.MinimumSamplingInterval) + ', ' + \
'Historizing:' + str(self.Historizing) + ')'
__repr__ = __str__
class MethodAttributes(FrozenClass):
'''
The attributes for a method node.
:ivar SpecifiedAttributes:
:vartype SpecifiedAttributes: UInt32
:ivar DisplayName:
:vartype DisplayName: LocalizedText
:ivar Description:
:vartype Description: LocalizedText
:ivar WriteMask:
:vartype WriteMask: UInt32
:ivar UserWriteMask:
:vartype UserWriteMask: UInt32
:ivar Executable:
:vartype Executable: Boolean
:ivar UserExecutable:
:vartype UserExecutable: Boolean
'''
ua_types = [
('SpecifiedAttributes', 'UInt32'),
('DisplayName', 'LocalizedText'),
('Description', 'LocalizedText'),
('WriteMask', 'UInt32'),
('UserWriteMask', 'UInt32'),
('Executable', 'Boolean'),
('UserExecutable', 'Boolean'),
]
def __init__(self):
self.SpecifiedAttributes = 0
self.DisplayName = LocalizedText()
self.Description = LocalizedText()
self.WriteMask = 0
self.UserWriteMask = 0
self.Executable = True
self.UserExecutable = True
self._freeze = True
def __str__(self):
return 'MethodAttributes(' + 'SpecifiedAttributes:' + str(self.SpecifiedAttributes) + ', ' + \
'DisplayName:' + str(self.DisplayName) + ', ' + \
'Description:' + str(self.Description) + ', ' + \
'WriteMask:' + str(self.WriteMask) + ', ' + \
'UserWriteMask:' + str(self.UserWriteMask) + ', ' + \
'Executable:' + str(self.Executable) + ', ' + \
'UserExecutable:' + str(self.UserExecutable) + ')'
__repr__ = __str__
class ObjectTypeAttributes(FrozenClass):
'''
The attributes for an object type node.
:ivar SpecifiedAttributes:
:vartype SpecifiedAttributes: UInt32
:ivar DisplayName:
:vartype DisplayName: LocalizedText
:ivar Description:
:vartype Description: LocalizedText
:ivar WriteMask:
:vartype WriteMask: UInt32
:ivar UserWriteMask:
:vartype UserWriteMask: UInt32
:ivar IsAbstract:
:vartype IsAbstract: Boolean
'''
ua_types = [
('SpecifiedAttributes', 'UInt32'),
('DisplayName', 'LocalizedText'),
('Description', 'LocalizedText'),
('WriteMask', 'UInt32'),
('UserWriteMask', 'UInt32'),
('IsAbstract', 'Boolean'),
]
def __init__(self):
self.SpecifiedAttributes = 0
self.DisplayName = LocalizedText()
self.Description = LocalizedText()
self.WriteMask = 0
self.UserWriteMask = 0
self.IsAbstract = True
self._freeze = True
def __str__(self):
return 'ObjectTypeAttributes(' + 'SpecifiedAttributes:' + str(self.SpecifiedAttributes) + ', ' + \
'DisplayName:' + str(self.DisplayName) + ', ' + \
'Description:' + str(self.Description) + ', ' + \
'WriteMask:' + str(self.WriteMask) + ', ' + \
'UserWriteMask:' + str(self.UserWriteMask) + ', ' + \
'IsAbstract:' + str(self.IsAbstract) + ')'
__repr__ = __str__
class VariableTypeAttributes(FrozenClass):
'''
The attributes for a variable type node.
:ivar SpecifiedAttributes:
:vartype SpecifiedAttributes: UInt32
:ivar DisplayName:
:vartype DisplayName: LocalizedText
:ivar Description:
:vartype Description: LocalizedText
:ivar WriteMask:
:vartype WriteMask: UInt32
:ivar UserWriteMask:
:vartype UserWriteMask: UInt32
:ivar Value:
:vartype Value: Variant
:ivar DataType:
:vartype DataType: NodeId
:ivar ValueRank:
:vartype ValueRank: Int32
:ivar ArrayDimensions:
:vartype ArrayDimensions: UInt32
:ivar IsAbstract:
:vartype IsAbstract: Boolean
'''
ua_types = [
('SpecifiedAttributes', 'UInt32'),
('DisplayName', 'LocalizedText'),
('Description', 'LocalizedText'),
('WriteMask', 'UInt32'),
('UserWriteMask', 'UInt32'),
('Value', 'Variant'),
('DataType', 'NodeId'),
('ValueRank', 'Int32'),
('ArrayDimensions', 'ListOfUInt32'),
('IsAbstract', 'Boolean'),
]
def __init__(self):
self.SpecifiedAttributes = 0
self.DisplayName = LocalizedText()
self.Description = LocalizedText()
self.WriteMask = 0
self.UserWriteMask = 0
self.Value = Variant()
self.DataType = NodeId()
self.ValueRank = 0
self.ArrayDimensions = []
self.IsAbstract = True
self._freeze = True
def __str__(self):
return 'VariableTypeAttributes(' + 'SpecifiedAttributes:' + str(self.SpecifiedAttributes) + ', ' + \
'DisplayName:' + str(self.DisplayName) + ', ' + \
'Description:' + str(self.Description) + ', ' + \
'WriteMask:' + str(self.WriteMask) + ', ' + \
'UserWriteMask:' + str(self.UserWriteMask) + ', ' + \
'Value:' + str(self.Value) + ', ' + \
'DataType:' + str(self.DataType) + ', ' + \
'ValueRank:' + str(self.ValueRank) + ', ' + \
'ArrayDimensions:' + str(self.ArrayDimensions) + ', ' + \
'IsAbstract:' + str(self.IsAbstract) + ')'
__repr__ = __str__
class ReferenceTypeAttributes(FrozenClass):
'''
The attributes for a reference type node.
:ivar SpecifiedAttributes:
:vartype SpecifiedAttributes: UInt32
:ivar DisplayName:
:vartype DisplayName: LocalizedText
:ivar Description:
:vartype Description: LocalizedText
:ivar WriteMask:
:vartype WriteMask: UInt32
:ivar UserWriteMask:
:vartype UserWriteMask: UInt32
:ivar IsAbstract:
:vartype IsAbstract: Boolean
:ivar Symmetric:
:vartype Symmetric: Boolean
:ivar InverseName:
:vartype InverseName: LocalizedText
'''
ua_types = [
('SpecifiedAttributes', 'UInt32'),
('DisplayName', 'LocalizedText'),
('Description', 'LocalizedText'),
('WriteMask', 'UInt32'),
('UserWriteMask', 'UInt32'),
('IsAbstract', 'Boolean'),
('Symmetric', 'Boolean'),
('InverseName', 'LocalizedText'),
]
def __init__(self):
self.SpecifiedAttributes = 0
self.DisplayName = LocalizedText()
self.Description = LocalizedText()
self.WriteMask = 0
self.UserWriteMask = 0
self.IsAbstract = True
self.Symmetric = True
self.InverseName = LocalizedText()
self._freeze = True
def __str__(self):
return 'ReferenceTypeAttributes(' + 'SpecifiedAttributes:' + str(self.SpecifiedAttributes) + ', ' + \
'DisplayName:' + str(self.DisplayName) + ', ' + \
'Description:' + str(self.Description) + ', ' + \
'WriteMask:' + str(self.WriteMask) + ', ' + \
'UserWriteMask:' + str(self.UserWriteMask) + ', ' + \
'IsAbstract:' + str(self.IsAbstract) + ', ' + \
'Symmetric:' + str(self.Symmetric) + ', ' + \
'InverseName:' + str(self.InverseName) + ')'
__repr__ = __str__
class DataTypeAttributes(FrozenClass):
'''
The attributes for a data type node.
:ivar SpecifiedAttributes:
:vartype SpecifiedAttributes: UInt32
:ivar DisplayName:
:vartype DisplayName: LocalizedText
:ivar Description:
:vartype Description: LocalizedText
:ivar WriteMask:
:vartype WriteMask: UInt32
:ivar UserWriteMask:
:vartype UserWriteMask: UInt32
:ivar IsAbstract:
:vartype IsAbstract: Boolean
'''
ua_types = [
('SpecifiedAttributes', 'UInt32'),
('DisplayName', 'LocalizedText'),
('Description', 'LocalizedText'),
('WriteMask', 'UInt32'),
('UserWriteMask', 'UInt32'),
('IsAbstract', 'Boolean'),
]
def __init__(self):
self.SpecifiedAttributes = 0
self.DisplayName = LocalizedText()
self.Description = LocalizedText()
self.WriteMask = 0
self.UserWriteMask = 0
self.IsAbstract = True
self._freeze = True
def __str__(self):
return 'DataTypeAttributes(' + 'SpecifiedAttributes:' + str(self.SpecifiedAttributes) + ', ' + \
'DisplayName:' + str(self.DisplayName) + ', ' + \
'Description:' + str(self.Description) + ', ' + \
'WriteMask:' + str(self.WriteMask) + ', ' + \
'UserWriteMask:' + str(self.UserWriteMask) + ', ' + \
'IsAbstract:' + str(self.IsAbstract) + ')'
__repr__ = __str__
class ViewAttributes(FrozenClass):
'''
The attributes for a view node.
:ivar SpecifiedAttributes:
:vartype SpecifiedAttributes: UInt32
:ivar DisplayName:
:vartype DisplayName: LocalizedText
:ivar Description:
:vartype Description: LocalizedText
:ivar WriteMask:
:vartype WriteMask: UInt32
:ivar UserWriteMask:
:vartype UserWriteMask: UInt32
:ivar ContainsNoLoops:
:vartype ContainsNoLoops: Boolean
:ivar EventNotifier:
:vartype EventNotifier: Byte
'''
ua_types = [
('SpecifiedAttributes', 'UInt32'),
('DisplayName', 'LocalizedText'),
('Description', 'LocalizedText'),
('WriteMask', 'UInt32'),
('UserWriteMask', 'UInt32'),
('ContainsNoLoops', 'Boolean'),
('EventNotifier', 'Byte'),
]
def __init__(self):
self.SpecifiedAttributes = 0
self.DisplayName = LocalizedText()
self.Description = LocalizedText()
self.WriteMask = 0
self.UserWriteMask = 0
self.ContainsNoLoops = True
self.EventNotifier = 0
self._freeze = True
def __str__(self):
return 'ViewAttributes(' + 'SpecifiedAttributes:' + str(self.SpecifiedAttributes) + ', ' + \
'DisplayName:' + str(self.DisplayName) + ', ' + \
'Description:' + str(self.Description) + ', ' + \
'WriteMask:' + str(self.WriteMask) + ', ' + \
'UserWriteMask:' + str(self.UserWriteMask) + ', ' + \
'ContainsNoLoops:' + str(self.ContainsNoLoops) + ', ' + \
'EventNotifier:' + str(self.EventNotifier) + ')'
__repr__ = __str__
class GenericAttributeValue(FrozenClass):
'''
:ivar AttributeId:
:vartype AttributeId: UInt32
:ivar Value:
:vartype Value: Variant
'''
ua_types = [
('AttributeId', 'UInt32'),
('Value', 'Variant'),
]
def __init__(self):
self.AttributeId = 0
self.Value = Variant()
self._freeze = True
def __str__(self):
return 'GenericAttributeValue(' + 'AttributeId:' + str(self.AttributeId) + ', ' + \
'Value:' + str(self.Value) + ')'
__repr__ = __str__
class GenericAttributes(FrozenClass):
'''
:ivar SpecifiedAttributes:
:vartype SpecifiedAttributes: UInt32
:ivar DisplayName:
:vartype DisplayName: LocalizedText
:ivar Description:
:vartype Description: LocalizedText
:ivar WriteMask:
:vartype WriteMask: UInt32
:ivar UserWriteMask:
:vartype UserWriteMask: UInt32
:ivar AttributeValues:
:vartype AttributeValues: GenericAttributeValue
'''
ua_types = [
('SpecifiedAttributes', 'UInt32'),
('DisplayName', 'LocalizedText'),
('Description', 'LocalizedText'),
('WriteMask', 'UInt32'),
('UserWriteMask', 'UInt32'),
('AttributeValues', 'ListOfGenericAttributeValue'),
]
def __init__(self):
self.SpecifiedAttributes = 0
self.DisplayName = LocalizedText()
self.Description = LocalizedText()
self.WriteMask = 0
self.UserWriteMask = 0
self.AttributeValues = []
self._freeze = True
def __str__(self):
return 'GenericAttributes(' + 'SpecifiedAttributes:' + str(self.SpecifiedAttributes) + ', ' + \
'DisplayName:' + str(self.DisplayName) + ', ' + \
'Description:' + str(self.Description) + ', ' + \
'WriteMask:' + str(self.WriteMask) + ', ' + \
'UserWriteMask:' + str(self.UserWriteMask) + ', ' + \
'AttributeValues:' + str(self.AttributeValues) + ')'
__repr__ = __str__
class AddNodesItem(FrozenClass):
'''
A request to add a node to the server address space.
:ivar ParentNodeId:
:vartype ParentNodeId: ExpandedNodeId
:ivar ReferenceTypeId:
:vartype ReferenceTypeId: NodeId
:ivar RequestedNewNodeId:
:vartype RequestedNewNodeId: ExpandedNodeId
:ivar BrowseName:
:vartype BrowseName: QualifiedName
:ivar NodeClass:
:vartype NodeClass: NodeClass
:ivar NodeAttributes:
:vartype NodeAttributes: ExtensionObject
:ivar TypeDefinition:
:vartype TypeDefinition: ExpandedNodeId
'''
ua_types = [
('ParentNodeId', 'ExpandedNodeId'),
('ReferenceTypeId', 'NodeId'),
('RequestedNewNodeId', 'ExpandedNodeId'),
('BrowseName', 'QualifiedName'),
('NodeClass', 'NodeClass'),
('NodeAttributes', 'ExtensionObject'),
('TypeDefinition', 'ExpandedNodeId'),
]
def __init__(self):
self.ParentNodeId = ExpandedNodeId()
self.ReferenceTypeId = NodeId()
self.RequestedNewNodeId = ExpandedNodeId()
self.BrowseName = QualifiedName()
self.NodeClass = NodeClass(0)
self.NodeAttributes = ExtensionObject()
self.TypeDefinition = ExpandedNodeId()
self._freeze = True
def __str__(self):
return 'AddNodesItem(' + 'ParentNodeId:' + str(self.ParentNodeId) + ', ' + \
'ReferenceTypeId:' + str(self.ReferenceTypeId) + ', ' + \
'RequestedNewNodeId:' + str(self.RequestedNewNodeId) + ', ' + \
'BrowseName:' + str(self.BrowseName) + ', ' + \
'NodeClass:' + str(self.NodeClass) + ', ' + \
'NodeAttributes:' + str(self.NodeAttributes) + ', ' + \
'TypeDefinition:' + str(self.TypeDefinition) + ')'
__repr__ = __str__
class AddNodesResult(FrozenClass):
'''
A result of an add node operation.
:ivar StatusCode:
:vartype StatusCode: StatusCode
:ivar AddedNodeId:
:vartype AddedNodeId: NodeId
'''
ua_types = [
('StatusCode', 'StatusCode'),
('AddedNodeId', 'NodeId'),
]
def __init__(self):
self.StatusCode = StatusCode()
self.AddedNodeId = NodeId()
self._freeze = True
def __str__(self):
return 'AddNodesResult(' + 'StatusCode:' + str(self.StatusCode) + ', ' + \
'AddedNodeId:' + str(self.AddedNodeId) + ')'
__repr__ = __str__
class AddNodesParameters(FrozenClass):
'''
:ivar NodesToAdd:
:vartype NodesToAdd: AddNodesItem
'''
ua_types = [
('NodesToAdd', 'ListOfAddNodesItem'),
]
def __init__(self):
self.NodesToAdd = []
self._freeze = True
def __str__(self):
return 'AddNodesParameters(' + 'NodesToAdd:' + str(self.NodesToAdd) + ')'
__repr__ = __str__
class AddNodesRequest(FrozenClass):
'''
Adds one or more nodes to the server address space.
:ivar TypeId:
:vartype TypeId: NodeId
:ivar RequestHeader:
:vartype RequestHeader: RequestHeader
:ivar Parameters:
:vartype Parameters: AddNodesParameters
'''
ua_types = [
('TypeId', 'NodeId'),
('RequestHeader', 'RequestHeader'),
('Parameters', 'AddNodesParameters'),
]
def __init__(self):
self.TypeId = FourByteNodeId(ObjectIds.AddNodesRequest_Encoding_DefaultBinary)
self.RequestHeader = RequestHeader()
self.Parameters = AddNodesParameters()
self._freeze = True
def __str__(self):
return 'AddNodesRequest(' + 'TypeId:' + str(self.TypeId) + ', ' + \
'RequestHeader:' + str(self.RequestHeader) + ', ' + \
'Parameters:' + str(self.Parameters) + ')'
__repr__ = __str__
class AddNodesResponse(FrozenClass):
'''
Adds one or more nodes to the server address space.
:ivar TypeId:
:vartype TypeId: NodeId
:ivar ResponseHeader:
:vartype ResponseHeader: ResponseHeader
:ivar Results:
:vartype Results: AddNodesResult
:ivar DiagnosticInfos:
:vartype DiagnosticInfos: DiagnosticInfo
'''
ua_types = [
('TypeId', 'NodeId'),
('ResponseHeader', 'ResponseHeader'),
('Results', 'ListOfAddNodesResult'),
('DiagnosticInfos', 'ListOfDiagnosticInfo'),
]
def __init__(self):
self.TypeId = FourByteNodeId(ObjectIds.AddNodesResponse_Encoding_DefaultBinary)
self.ResponseHeader = ResponseHeader()
self.Results = []
self.DiagnosticInfos = []
self._freeze = True
def __str__(self):
return 'AddNodesResponse(' + 'TypeId:' + str(self.TypeId) + ', ' + \
'ResponseHeader:' + str(self.ResponseHeader) + ', ' + \
'Results:' + str(self.Results) + ', ' + \
'DiagnosticInfos:' + str(self.DiagnosticInfos) + ')'
__repr__ = __str__
class AddReferencesItem(FrozenClass):
'''
A request to add a reference to the server address space.
:ivar SourceNodeId:
:vartype SourceNodeId: NodeId
:ivar ReferenceTypeId:
:vartype ReferenceTypeId: NodeId
:ivar IsForward:
:vartype IsForward: Boolean
:ivar TargetServerUri:
:vartype TargetServerUri: String
:ivar TargetNodeId:
:vartype TargetNodeId: ExpandedNodeId
:ivar TargetNodeClass:
:vartype TargetNodeClass: NodeClass
'''
ua_types = [
('SourceNodeId', 'NodeId'),
('ReferenceTypeId', 'NodeId'),
('IsForward', 'Boolean'),
('TargetServerUri', 'String'),
('TargetNodeId', 'ExpandedNodeId'),
('TargetNodeClass', 'NodeClass'),
]
def __init__(self):
self.SourceNodeId = NodeId()
self.ReferenceTypeId = NodeId()
self.IsForward = True
self.TargetServerUri = None
self.TargetNodeId = ExpandedNodeId()
self.TargetNodeClass = NodeClass(0)
self._freeze = True
def __str__(self):
return 'AddReferencesItem(' + 'SourceNodeId:' + str(self.SourceNodeId) + ', ' + \
'ReferenceTypeId:' + str(self.ReferenceTypeId) + ', ' + \
'IsForward:' + str(self.IsForward) + ', ' + \
'TargetServerUri:' + str(self.TargetServerUri) + ', ' + \
'TargetNodeId:' + str(self.TargetNodeId) + ', ' + \
'TargetNodeClass:' + str(self.TargetNodeClass) + ')'
__repr__ = __str__
class AddReferencesParameters(FrozenClass):
'''
:ivar ReferencesToAdd:
:vartype ReferencesToAdd: AddReferencesItem
'''
ua_types = [
('ReferencesToAdd', 'ListOfAddReferencesItem'),
]
def __init__(self):
self.ReferencesToAdd = []
self._freeze = True
def __str__(self):
return 'AddReferencesParameters(' + 'ReferencesToAdd:' + str(self.ReferencesToAdd) + ')'
__repr__ = __str__
class AddReferencesRequest(FrozenClass):
'''
Adds one or more references to the server address space.
:ivar TypeId:
:vartype TypeId: NodeId
:ivar RequestHeader:
:vartype RequestHeader: RequestHeader
:ivar Parameters:
:vartype Parameters: AddReferencesParameters
'''
ua_types = [
('TypeId', 'NodeId'),
('RequestHeader', 'RequestHeader'),
('Parameters', 'AddReferencesParameters'),
]
def __init__(self):
self.TypeId = FourByteNodeId(ObjectIds.AddReferencesRequest_Encoding_DefaultBinary)
self.RequestHeader = RequestHeader()
self.Parameters = AddReferencesParameters()
self._freeze = True
def __str__(self):
return 'AddReferencesRequest(' + 'TypeId:' + str(self.TypeId) + ', ' + \
'RequestHeader:' + str(self.RequestHeader) + ', ' + \
'Parameters:' + str(self.Parameters) + ')'
__repr__ = __str__
class AddReferencesResponse(FrozenClass):
'''
Adds one or more references to the server address space.
:ivar TypeId:
:vartype TypeId: NodeId
:ivar ResponseHeader:
:vartype ResponseHeader: ResponseHeader
:ivar Results:
:vartype Results: StatusCode
:ivar DiagnosticInfos:
:vartype DiagnosticInfos: DiagnosticInfo
'''
ua_types = [
('TypeId', 'NodeId'),
('ResponseHeader', 'ResponseHeader'),
('Results', 'ListOfStatusCode'),
('DiagnosticInfos', 'ListOfDiagnosticInfo'),
]
def __init__(self):
self.TypeId = FourByteNodeId(ObjectIds.AddReferencesResponse_Encoding_DefaultBinary)
self.ResponseHeader = ResponseHeader()
self.Results = []
self.DiagnosticInfos = []
self._freeze = True
def __str__(self):
return 'AddReferencesResponse(' + 'TypeId:' + str(self.TypeId) + ', ' + \
'ResponseHeader:' + str(self.ResponseHeader) + ', ' + \
'Results:' + str(self.Results) + ', ' + \
'DiagnosticInfos:' + str(self.DiagnosticInfos) + ')'
__repr__ = __str__
class DeleteNodesItem(FrozenClass):
'''
A request to delete a node to the server address space.
:ivar NodeId:
:vartype NodeId: NodeId
:ivar DeleteTargetReferences:
:vartype DeleteTargetReferences: Boolean
'''
ua_types = [
('NodeId', 'NodeId'),
('DeleteTargetReferences', 'Boolean'),
]
def __init__(self):
self.NodeId = NodeId()
self.DeleteTargetReferences = True
self._freeze = True
def __str__(self):
return 'DeleteNodesItem(' + 'NodeId:' + str(self.NodeId) + ', ' + \
'DeleteTargetReferences:' + str(self.DeleteTargetReferences) + ')'
__repr__ = __str__
class DeleteNodesParameters(FrozenClass):
'''
:ivar NodesToDelete:
:vartype NodesToDelete: DeleteNodesItem
'''
ua_types = [
('NodesToDelete', 'ListOfDeleteNodesItem'),
]
def __init__(self):
self.NodesToDelete = []
self._freeze = True
def __str__(self):
return 'DeleteNodesParameters(' + 'NodesToDelete:' + str(self.NodesToDelete) + ')'
__repr__ = __str__
class DeleteNodesRequest(FrozenClass):
'''
Delete one or more nodes from the server address space.
:ivar TypeId:
:vartype TypeId: NodeId
:ivar RequestHeader:
:vartype RequestHeader: RequestHeader
:ivar Parameters:
:vartype Parameters: DeleteNodesParameters
'''
ua_types = [
('TypeId', 'NodeId'),
('RequestHeader', 'RequestHeader'),
('Parameters', 'DeleteNodesParameters'),
]
def __init__(self):
self.TypeId = FourByteNodeId(ObjectIds.DeleteNodesRequest_Encoding_DefaultBinary)
self.RequestHeader = RequestHeader()
self.Parameters = DeleteNodesParameters()
self._freeze = True
def __str__(self):
return 'DeleteNodesRequest(' + 'TypeId:' + str(self.TypeId) + ', ' + \
'RequestHeader:' + str(self.RequestHeader) + ', ' + \
'Parameters:' + str(self.Parameters) + ')'
__repr__ = __str__
class DeleteNodesResponse(FrozenClass):
'''
Delete one or more nodes from the server address space.
:ivar TypeId:
:vartype TypeId: NodeId
:ivar ResponseHeader:
:vartype ResponseHeader: ResponseHeader
:ivar Results:
:vartype Results: StatusCode
:ivar DiagnosticInfos:
:vartype DiagnosticInfos: DiagnosticInfo
'''
ua_types = [
('TypeId', 'NodeId'),
('ResponseHeader', 'ResponseHeader'),
('Results', 'ListOfStatusCode'),
('DiagnosticInfos', 'ListOfDiagnosticInfo'),
]
def __init__(self):
self.TypeId = FourByteNodeId(ObjectIds.DeleteNodesResponse_Encoding_DefaultBinary)
self.ResponseHeader = ResponseHeader()
self.Results = []
self.DiagnosticInfos = []
self._freeze = True
def __str__(self):
return 'DeleteNodesResponse(' + 'TypeId:' + str(self.TypeId) + ', ' + \
'ResponseHeader:' + str(self.ResponseHeader) + ', ' + \
'Results:' + str(self.Results) + ', ' + \
'DiagnosticInfos:' + str(self.DiagnosticInfos) + ')'
__repr__ = __str__
class DeleteReferencesItem(FrozenClass):
'''
A request to delete a node from the server address space.
:ivar SourceNodeId:
:vartype SourceNodeId: NodeId
:ivar ReferenceTypeId:
:vartype ReferenceTypeId: NodeId
:ivar IsForward:
:vartype IsForward: Boolean
:ivar TargetNodeId:
:vartype TargetNodeId: ExpandedNodeId
:ivar DeleteBidirectional:
:vartype DeleteBidirectional: Boolean
'''
ua_types = [
('SourceNodeId', 'NodeId'),
('ReferenceTypeId', 'NodeId'),
('IsForward', 'Boolean'),
('TargetNodeId', 'ExpandedNodeId'),
('DeleteBidirectional', 'Boolean'),
]
def __init__(self):
self.SourceNodeId = NodeId()
self.ReferenceTypeId = NodeId()
self.IsForward = True
self.TargetNodeId = ExpandedNodeId()
self.DeleteBidirectional = True
self._freeze = True
def __str__(self):
return 'DeleteReferencesItem(' + 'SourceNodeId:' + str(self.SourceNodeId) + ', ' + \
'ReferenceTypeId:' + str(self.ReferenceTypeId) + ', ' + \
'IsForward:' + str(self.IsForward) + ', ' + \
'TargetNodeId:' + str(self.TargetNodeId) + ', ' + \
'DeleteBidirectional:' + str(self.DeleteBidirectional) + ')'
__repr__ = __str__
class DeleteReferencesParameters(FrozenClass):
'''
:ivar ReferencesToDelete:
:vartype ReferencesToDelete: DeleteReferencesItem
'''
ua_types = [
('ReferencesToDelete', 'ListOfDeleteReferencesItem'),
]
def __init__(self):
self.ReferencesToDelete = []
self._freeze = True
def __str__(self):
return 'DeleteReferencesParameters(' + 'ReferencesToDelete:' + str(self.ReferencesToDelete) + ')'
__repr__ = __str__
class DeleteReferencesRequest(FrozenClass):
'''
Delete one or more references from the server address space.
:ivar TypeId:
:vartype TypeId: NodeId
:ivar RequestHeader:
:vartype RequestHeader: RequestHeader
:ivar Parameters:
:vartype Parameters: DeleteReferencesParameters
'''
ua_types = [
('TypeId', 'NodeId'),
('RequestHeader', 'RequestHeader'),
('Parameters', 'DeleteReferencesParameters'),
]
def __init__(self):
self.TypeId = FourByteNodeId(ObjectIds.DeleteReferencesRequest_Encoding_DefaultBinary)
self.RequestHeader = RequestHeader()
self.Parameters = DeleteReferencesParameters()
self._freeze = True
def __str__(self):
return 'DeleteReferencesRequest(' + 'TypeId:' + str(self.TypeId) + ', ' + \
'RequestHeader:' + str(self.RequestHeader) + ', ' + \
'Parameters:' + str(self.Parameters) + ')'
__repr__ = __str__
class DeleteReferencesResult(FrozenClass):
'''
:ivar Results:
:vartype Results: StatusCode
:ivar DiagnosticInfos:
:vartype DiagnosticInfos: DiagnosticInfo
'''
ua_types = [
('Results', 'ListOfStatusCode'),
('DiagnosticInfos', 'ListOfDiagnosticInfo'),
]
def __init__(self):
self.Results = []
self.DiagnosticInfos = []
self._freeze = True
def __str__(self):
return 'DeleteReferencesResult(' + 'Results:' + str(self.Results) + ', ' + \
'DiagnosticInfos:' + str(self.DiagnosticInfos) + ')'
__repr__ = __str__
class DeleteReferencesResponse(FrozenClass):
'''
Delete one or more references from the server address space.
:ivar TypeId:
:vartype TypeId: NodeId
:ivar ResponseHeader:
:vartype ResponseHeader: ResponseHeader
:ivar Parameters:
:vartype Parameters: DeleteReferencesResult
'''
ua_types = [
('TypeId', 'NodeId'),
('ResponseHeader', 'ResponseHeader'),
('Parameters', 'DeleteReferencesResult'),
]
def __init__(self):
self.TypeId = FourByteNodeId(ObjectIds.DeleteReferencesResponse_Encoding_DefaultBinary)
self.ResponseHeader = ResponseHeader()
self.Parameters = DeleteReferencesResult()
self._freeze = True
def __str__(self):
return 'DeleteReferencesResponse(' + 'TypeId:' + str(self.TypeId) + ', ' + \
'ResponseHeader:' + str(self.ResponseHeader) + ', ' + \
'Parameters:' + str(self.Parameters) + ')'
__repr__ = __str__
class ViewDescription(FrozenClass):
'''
The view to browse.
:ivar ViewId:
:vartype ViewId: NodeId
:ivar Timestamp:
:vartype Timestamp: DateTime
:ivar ViewVersion:
:vartype ViewVersion: UInt32
'''
ua_types = [
('ViewId', 'NodeId'),
('Timestamp', 'DateTime'),
('ViewVersion', 'UInt32'),
]
def __init__(self):
self.ViewId = NodeId()
self.Timestamp = datetime.utcnow()
self.ViewVersion = 0
self._freeze = True
def __str__(self):
return 'ViewDescription(' + 'ViewId:' + str(self.ViewId) + ', ' + \
'Timestamp:' + str(self.Timestamp) + ', ' + \
'ViewVersion:' + str(self.ViewVersion) + ')'
__repr__ = __str__
class BrowseDescription(FrozenClass):
'''
A request to browse the the references from a node.
:ivar NodeId:
:vartype NodeId: NodeId
:ivar BrowseDirection:
:vartype BrowseDirection: BrowseDirection
:ivar ReferenceTypeId:
:vartype ReferenceTypeId: NodeId
:ivar IncludeSubtypes:
:vartype IncludeSubtypes: Boolean
:ivar NodeClassMask:
:vartype NodeClassMask: UInt32
:ivar ResultMask:
:vartype ResultMask: UInt32
'''
ua_types = [
('NodeId', 'NodeId'),
('BrowseDirection', 'BrowseDirection'),
('ReferenceTypeId', 'NodeId'),
('IncludeSubtypes', 'Boolean'),
('NodeClassMask', 'UInt32'),
('ResultMask', 'UInt32'),
]
def __init__(self):
self.NodeId = NodeId()
self.BrowseDirection = BrowseDirection(0)
self.ReferenceTypeId = NodeId()
self.IncludeSubtypes = True
self.NodeClassMask = 0
self.ResultMask = 0
self._freeze = True
def __str__(self):
return 'BrowseDescription(' + 'NodeId:' + str(self.NodeId) + ', ' + \
'BrowseDirection:' + str(self.BrowseDirection) + ', ' + \
'ReferenceTypeId:' + str(self.ReferenceTypeId) + ', ' + \
'IncludeSubtypes:' + str(self.IncludeSubtypes) + ', ' + \
'NodeClassMask:' + str(self.NodeClassMask) + ', ' + \
'ResultMask:' + str(self.ResultMask) + ')'
__repr__ = __str__
class ReferenceDescription(FrozenClass):
'''
The description of a reference.
:ivar ReferenceTypeId:
:vartype ReferenceTypeId: NodeId
:ivar IsForward:
:vartype IsForward: Boolean
:ivar NodeId:
:vartype NodeId: ExpandedNodeId
:ivar BrowseName:
:vartype BrowseName: QualifiedName
:ivar DisplayName:
:vartype DisplayName: LocalizedText
:ivar NodeClass:
:vartype NodeClass: NodeClass
:ivar TypeDefinition:
:vartype TypeDefinition: ExpandedNodeId
'''
ua_types = [
('ReferenceTypeId', 'NodeId'),
('IsForward', 'Boolean'),
('NodeId', 'ExpandedNodeId'),
('BrowseName', 'QualifiedName'),
('DisplayName', 'LocalizedText'),
('NodeClass', 'NodeClass'),
('TypeDefinition', 'ExpandedNodeId'),
]
def __init__(self):
self.ReferenceTypeId = NodeId()
self.IsForward = True
self.NodeId = ExpandedNodeId()
self.BrowseName = QualifiedName()
self.DisplayName = LocalizedText()
self.NodeClass = NodeClass(0)
self.TypeDefinition = ExpandedNodeId()
self._freeze = True
def __str__(self):
return 'ReferenceDescription(' + 'ReferenceTypeId:' + str(self.ReferenceTypeId) + ', ' + \
'IsForward:' + str(self.IsForward) + ', ' + \
'NodeId:' + str(self.NodeId) + ', ' + \
'BrowseName:' + str(self.BrowseName) + ', ' + \
'DisplayName:' + str(self.DisplayName) + ', ' + \
'NodeClass:' + str(self.NodeClass) + ', ' + \
'TypeDefinition:' + str(self.TypeDefinition) + ')'
__repr__ = __str__
class BrowseResult(FrozenClass):
'''
The result of a browse operation.
:ivar StatusCode:
:vartype StatusCode: StatusCode
:ivar ContinuationPoint:
:vartype ContinuationPoint: ByteString
:ivar References:
:vartype References: ReferenceDescription
'''
ua_types = [
('StatusCode', 'StatusCode'),
('ContinuationPoint', 'ByteString'),
('References', 'ListOfReferenceDescription'),
]
def __init__(self):
self.StatusCode = StatusCode()
self.ContinuationPoint = None
self.References = []
self._freeze = True
def __str__(self):
return 'BrowseResult(' + 'StatusCode:' + str(self.StatusCode) + ', ' + \
'ContinuationPoint:' + str(self.ContinuationPoint) + ', ' + \
'References:' + str(self.References) + ')'
__repr__ = __str__
class BrowseParameters(FrozenClass):
'''
:ivar View:
:vartype View: ViewDescription
:ivar RequestedMaxReferencesPerNode:
:vartype RequestedMaxReferencesPerNode: UInt32
:ivar NodesToBrowse:
:vartype NodesToBrowse: BrowseDescription
'''
ua_types = [
('View', 'ViewDescription'),
('RequestedMaxReferencesPerNode', 'UInt32'),
('NodesToBrowse', 'ListOfBrowseDescription'),
]
def __init__(self):
self.View = ViewDescription()
self.RequestedMaxReferencesPerNode = 0
self.NodesToBrowse = []
self._freeze = True
def __str__(self):
return 'BrowseParameters(' + 'View:' + str(self.View) + ', ' + \
'RequestedMaxReferencesPerNode:' + str(self.RequestedMaxReferencesPerNode) + ', ' + \
'NodesToBrowse:' + str(self.NodesToBrowse) + ')'
__repr__ = __str__
class BrowseRequest(FrozenClass):
'''
Browse the references for one or more nodes from the server address space.
:ivar TypeId:
:vartype TypeId: NodeId
:ivar RequestHeader:
:vartype RequestHeader: RequestHeader
:ivar Parameters:
:vartype Parameters: BrowseParameters
'''
ua_types = [
('TypeId', 'NodeId'),
('RequestHeader', 'RequestHeader'),
('Parameters', 'BrowseParameters'),
]
def __init__(self):
self.TypeId = FourByteNodeId(ObjectIds.BrowseRequest_Encoding_DefaultBinary)
self.RequestHeader = RequestHeader()
self.Parameters = BrowseParameters()
self._freeze = True
def __str__(self):
return 'BrowseRequest(' + 'TypeId:' + str(self.TypeId) + ', ' + \
'RequestHeader:' + str(self.RequestHeader) + ', ' + \
'Parameters:' + str(self.Parameters) + ')'
__repr__ = __str__
class BrowseResponse(FrozenClass):
'''
Browse the references for one or more nodes from the server address space.
:ivar TypeId:
:vartype TypeId: NodeId
:ivar ResponseHeader:
:vartype ResponseHeader: ResponseHeader
:ivar Results:
:vartype Results: BrowseResult
:ivar DiagnosticInfos:
:vartype DiagnosticInfos: DiagnosticInfo
'''
ua_types = [
('TypeId', 'NodeId'),
('ResponseHeader', 'ResponseHeader'),
('Results', 'ListOfBrowseResult'),
('DiagnosticInfos', 'ListOfDiagnosticInfo'),
]
def __init__(self):
self.TypeId = FourByteNodeId(ObjectIds.BrowseResponse_Encoding_DefaultBinary)
self.ResponseHeader = ResponseHeader()
self.Results = []
self.DiagnosticInfos = []
self._freeze = True
def __str__(self):
return 'BrowseResponse(' + 'TypeId:' + str(self.TypeId) + ', ' + \
'ResponseHeader:' + str(self.ResponseHeader) + ', ' + \
'Results:' + str(self.Results) + ', ' + \
'DiagnosticInfos:' + str(self.DiagnosticInfos) + ')'
__repr__ = __str__
class BrowseNextParameters(FrozenClass):
'''
:ivar ReleaseContinuationPoints:
:vartype ReleaseContinuationPoints: Boolean
:ivar ContinuationPoints:
:vartype ContinuationPoints: ByteString
'''
ua_types = [
('ReleaseContinuationPoints', 'Boolean'),
('ContinuationPoints', 'ListOfByteString'),
]
def __init__(self):
self.ReleaseContinuationPoints = True
self.ContinuationPoints = []
self._freeze = True
def __str__(self):
return 'BrowseNextParameters(' + 'ReleaseContinuationPoints:' + str(self.ReleaseContinuationPoints) + ', ' + \
'ContinuationPoints:' + str(self.ContinuationPoints) + ')'
__repr__ = __str__
class BrowseNextRequest(FrozenClass):
'''
Continues one or more browse operations.
:ivar TypeId:
:vartype TypeId: NodeId
:ivar RequestHeader:
:vartype RequestHeader: RequestHeader
:ivar Parameters:
:vartype Parameters: BrowseNextParameters
'''
ua_types = [
('TypeId', 'NodeId'),
('RequestHeader', 'RequestHeader'),
('Parameters', 'BrowseNextParameters'),
]
def __init__(self):
self.TypeId = FourByteNodeId(ObjectIds.BrowseNextRequest_Encoding_DefaultBinary)
self.RequestHeader = RequestHeader()
self.Parameters = BrowseNextParameters()
self._freeze = True
def __str__(self):
return 'BrowseNextRequest(' + 'TypeId:' + str(self.TypeId) + ', ' + \
'RequestHeader:' + str(self.RequestHeader) + ', ' + \
'Parameters:' + str(self.Parameters) + ')'
__repr__ = __str__
class BrowseNextResult(FrozenClass):
'''
:ivar Results:
:vartype Results: BrowseResult
:ivar DiagnosticInfos:
:vartype DiagnosticInfos: DiagnosticInfo
'''
ua_types = [
('Results', 'ListOfBrowseResult'),
('DiagnosticInfos', 'ListOfDiagnosticInfo'),
]
def __init__(self):
self.Results = []
self.DiagnosticInfos = []
self._freeze = True
def __str__(self):
return 'BrowseNextResult(' + 'Results:' + str(self.Results) + ', ' + \
'DiagnosticInfos:' + str(self.DiagnosticInfos) + ')'
__repr__ = __str__
class BrowseNextResponse(FrozenClass):
'''
Continues one or more browse operations.
:ivar TypeId:
:vartype TypeId: NodeId
:ivar ResponseHeader:
:vartype ResponseHeader: ResponseHeader
:ivar Parameters:
:vartype Parameters: BrowseNextResult
'''
ua_types = [
('TypeId', 'NodeId'),
('ResponseHeader', 'ResponseHeader'),
('Parameters', 'BrowseNextResult'),
]
def __init__(self):
self.TypeId = FourByteNodeId(ObjectIds.BrowseNextResponse_Encoding_DefaultBinary)
self.ResponseHeader = ResponseHeader()
self.Parameters = BrowseNextResult()
self._freeze = True
def __str__(self):
return 'BrowseNextResponse(' + 'TypeId:' + str(self.TypeId) + ', ' + \
'ResponseHeader:' + str(self.ResponseHeader) + ', ' + \
'Parameters:' + str(self.Parameters) + ')'
__repr__ = __str__
class RelativePathElement(FrozenClass):
'''
An element in a relative path.
:ivar ReferenceTypeId:
:vartype ReferenceTypeId: NodeId
:ivar IsInverse:
:vartype IsInverse: Boolean
:ivar IncludeSubtypes:
:vartype IncludeSubtypes: Boolean
:ivar TargetName:
:vartype TargetName: QualifiedName
'''
ua_types = [
('ReferenceTypeId', 'NodeId'),
('IsInverse', 'Boolean'),
('IncludeSubtypes', 'Boolean'),
('TargetName', 'QualifiedName'),
]
def __init__(self):
self.ReferenceTypeId = NodeId()
self.IsInverse = True
self.IncludeSubtypes = True
self.TargetName = QualifiedName()
self._freeze = True
def __str__(self):
return 'RelativePathElement(' + 'ReferenceTypeId:' + str(self.ReferenceTypeId) + ', ' + \
'IsInverse:' + str(self.IsInverse) + ', ' + \
'IncludeSubtypes:' + str(self.IncludeSubtypes) + ', ' + \
'TargetName:' + str(self.TargetName) + ')'
__repr__ = __str__
class RelativePath(FrozenClass):
'''
A relative path constructed from reference types and browse names.
:ivar Elements:
:vartype Elements: RelativePathElement
'''
ua_types = [
('Elements', 'ListOfRelativePathElement'),
]
def __init__(self):
self.Elements = []
self._freeze = True
def __str__(self):
return 'RelativePath(' + 'Elements:' + str(self.Elements) + ')'
__repr__ = __str__
class BrowsePath(FrozenClass):
'''
A request to translate a path into a node id.
:ivar StartingNode:
:vartype StartingNode: NodeId
:ivar RelativePath:
:vartype RelativePath: RelativePath
'''
ua_types = [
('StartingNode', 'NodeId'),
('RelativePath', 'RelativePath'),
]
def __init__(self):
self.StartingNode = NodeId()
self.RelativePath = RelativePath()
self._freeze = True
def __str__(self):
return 'BrowsePath(' + 'StartingNode:' + str(self.StartingNode) + ', ' + \
'RelativePath:' + str(self.RelativePath) + ')'
__repr__ = __str__
class BrowsePathTarget(FrozenClass):
'''
The target of the translated path.
:ivar TargetId:
:vartype TargetId: ExpandedNodeId
:ivar RemainingPathIndex:
:vartype RemainingPathIndex: UInt32
'''
ua_types = [
('TargetId', 'ExpandedNodeId'),
('RemainingPathIndex', 'UInt32'),
]
def __init__(self):
self.TargetId = ExpandedNodeId()
self.RemainingPathIndex = 0
self._freeze = True
def __str__(self):
return 'BrowsePathTarget(' + 'TargetId:' + str(self.TargetId) + ', ' + \
'RemainingPathIndex:' + str(self.RemainingPathIndex) + ')'
__repr__ = __str__
class BrowsePathResult(FrozenClass):
'''
The result of a translate opearation.
:ivar StatusCode:
:vartype StatusCode: StatusCode
:ivar Targets:
:vartype Targets: BrowsePathTarget
'''
ua_types = [
('StatusCode', 'StatusCode'),
('Targets', 'ListOfBrowsePathTarget'),
]
def __init__(self):
self.StatusCode = StatusCode()
self.Targets = []
self._freeze = True
def __str__(self):
return 'BrowsePathResult(' + 'StatusCode:' + str(self.StatusCode) + ', ' + \
'Targets:' + str(self.Targets) + ')'
__repr__ = __str__
class TranslateBrowsePathsToNodeIdsParameters(FrozenClass):
'''
:ivar BrowsePaths:
:vartype BrowsePaths: BrowsePath
'''
ua_types = [
('BrowsePaths', 'ListOfBrowsePath'),
]
def __init__(self):
self.BrowsePaths = []
self._freeze = True
def __str__(self):
return 'TranslateBrowsePathsToNodeIdsParameters(' + 'BrowsePaths:' + str(self.BrowsePaths) + ')'
__repr__ = __str__
class TranslateBrowsePathsToNodeIdsRequest(FrozenClass):
'''
Translates one or more paths in the server address space.
:ivar TypeId:
:vartype TypeId: NodeId
:ivar RequestHeader:
:vartype RequestHeader: RequestHeader
:ivar Parameters:
:vartype Parameters: TranslateBrowsePathsToNodeIdsParameters
'''
ua_types = [
('TypeId', 'NodeId'),
('RequestHeader', 'RequestHeader'),
('Parameters', 'TranslateBrowsePathsToNodeIdsParameters'),
]
def __init__(self):
self.TypeId = FourByteNodeId(ObjectIds.TranslateBrowsePathsToNodeIdsRequest_Encoding_DefaultBinary)
self.RequestHeader = RequestHeader()
self.Parameters = TranslateBrowsePathsToNodeIdsParameters()
self._freeze = True
def __str__(self):
return 'TranslateBrowsePathsToNodeIdsRequest(' + 'TypeId:' + str(self.TypeId) + ', ' + \
'RequestHeader:' + str(self.RequestHeader) + ', ' + \
'Parameters:' + str(self.Parameters) + ')'
__repr__ = __str__
class TranslateBrowsePathsToNodeIdsResponse(FrozenClass):
'''
Translates one or more paths in the server address space.
:ivar TypeId:
:vartype TypeId: NodeId
:ivar ResponseHeader:
:vartype ResponseHeader: ResponseHeader
:ivar Results:
:vartype Results: BrowsePathResult
:ivar DiagnosticInfos:
:vartype DiagnosticInfos: DiagnosticInfo
'''
ua_types = [
('TypeId', 'NodeId'),
('ResponseHeader', 'ResponseHeader'),
('Results', 'ListOfBrowsePathResult'),
('DiagnosticInfos', 'ListOfDiagnosticInfo'),
]
def __init__(self):
self.TypeId = FourByteNodeId(ObjectIds.TranslateBrowsePathsToNodeIdsResponse_Encoding_DefaultBinary)
self.ResponseHeader = ResponseHeader()
self.Results = []
self.DiagnosticInfos = []
self._freeze = True
def __str__(self):
return 'TranslateBrowsePathsToNodeIdsResponse(' + 'TypeId:' + str(self.TypeId) + ', ' + \
'ResponseHeader:' + str(self.ResponseHeader) + ', ' + \
'Results:' + str(self.Results) + ', ' + \
'DiagnosticInfos:' + str(self.DiagnosticInfos) + ')'
__repr__ = __str__
class RegisterNodesParameters(FrozenClass):
'''
:ivar NodesToRegister:
:vartype NodesToRegister: NodeId
'''
ua_types = [
('NodesToRegister', 'ListOfNodeId'),
]
def __init__(self):
self.NodesToRegister = []
self._freeze = True
def __str__(self):
return 'RegisterNodesParameters(' + 'NodesToRegister:' + str(self.NodesToRegister) + ')'
__repr__ = __str__
class RegisterNodesRequest(FrozenClass):
'''
Registers one or more nodes for repeated use within a session.
:ivar TypeId:
:vartype TypeId: NodeId
:ivar RequestHeader:
:vartype RequestHeader: RequestHeader
:ivar Parameters:
:vartype Parameters: RegisterNodesParameters
'''
ua_types = [
('TypeId', 'NodeId'),
('RequestHeader', 'RequestHeader'),
('Parameters', 'RegisterNodesParameters'),
]
def __init__(self):
self.TypeId = FourByteNodeId(ObjectIds.RegisterNodesRequest_Encoding_DefaultBinary)
self.RequestHeader = RequestHeader()
self.Parameters = RegisterNodesParameters()
self._freeze = True
def __str__(self):
return 'RegisterNodesRequest(' + 'TypeId:' + str(self.TypeId) + ', ' + \
'RequestHeader:' + str(self.RequestHeader) + ', ' + \
'Parameters:' + str(self.Parameters) + ')'
__repr__ = __str__
class RegisterNodesResult(FrozenClass):
'''
:ivar RegisteredNodeIds:
:vartype RegisteredNodeIds: NodeId
'''
ua_types = [
('RegisteredNodeIds', 'ListOfNodeId'),
]
def __init__(self):
self.RegisteredNodeIds = []
self._freeze = True
def __str__(self):
return 'RegisterNodesResult(' + 'RegisteredNodeIds:' + str(self.RegisteredNodeIds) + ')'
__repr__ = __str__
class RegisterNodesResponse(FrozenClass):
'''
Registers one or more nodes for repeated use within a session.
:ivar TypeId:
:vartype TypeId: NodeId
:ivar ResponseHeader:
:vartype ResponseHeader: ResponseHeader
:ivar Parameters:
:vartype Parameters: RegisterNodesResult
'''
ua_types = [
('TypeId', 'NodeId'),
('ResponseHeader', 'ResponseHeader'),
('Parameters', 'RegisterNodesResult'),
]
def __init__(self):
self.TypeId = FourByteNodeId(ObjectIds.RegisterNodesResponse_Encoding_DefaultBinary)
self.ResponseHeader = ResponseHeader()
self.Parameters = RegisterNodesResult()
self._freeze = True
def __str__(self):
return 'RegisterNodesResponse(' + 'TypeId:' + str(self.TypeId) + ', ' + \
'ResponseHeader:' + str(self.ResponseHeader) + ', ' + \
'Parameters:' + str(self.Parameters) + ')'
__repr__ = __str__
class UnregisterNodesParameters(FrozenClass):
'''
:ivar NodesToUnregister:
:vartype NodesToUnregister: NodeId
'''
ua_types = [
('NodesToUnregister', 'ListOfNodeId'),
]
def __init__(self):
self.NodesToUnregister = []
self._freeze = True
def __str__(self):
return 'UnregisterNodesParameters(' + 'NodesToUnregister:' + str(self.NodesToUnregister) + ')'
__repr__ = __str__
class UnregisterNodesRequest(FrozenClass):
'''
Unregisters one or more previously registered nodes.
:ivar TypeId:
:vartype TypeId: NodeId
:ivar RequestHeader:
:vartype RequestHeader: RequestHeader
:ivar Parameters:
:vartype Parameters: UnregisterNodesParameters
'''
ua_types = [
('TypeId', 'NodeId'),
('RequestHeader', 'RequestHeader'),
('Parameters', 'UnregisterNodesParameters'),
]
def __init__(self):
self.TypeId = FourByteNodeId(ObjectIds.UnregisterNodesRequest_Encoding_DefaultBinary)
self.RequestHeader = RequestHeader()
self.Parameters = UnregisterNodesParameters()
self._freeze = True
def __str__(self):
return 'UnregisterNodesRequest(' + 'TypeId:' + str(self.TypeId) + ', ' + \
'RequestHeader:' + str(self.RequestHeader) + ', ' + \
'Parameters:' + str(self.Parameters) + ')'
__repr__ = __str__
class UnregisterNodesResponse(FrozenClass):
'''
Unregisters one or more previously registered nodes.
:ivar TypeId:
:vartype TypeId: NodeId
:ivar ResponseHeader:
:vartype ResponseHeader: ResponseHeader
'''
ua_types = [
('TypeId', 'NodeId'),
('ResponseHeader', 'ResponseHeader'),
]
def __init__(self):
self.TypeId = FourByteNodeId(ObjectIds.UnregisterNodesResponse_Encoding_DefaultBinary)
self.ResponseHeader = ResponseHeader()
self._freeze = True
def __str__(self):
return 'UnregisterNodesResponse(' + 'TypeId:' + str(self.TypeId) + ', ' + \
'ResponseHeader:' + str(self.ResponseHeader) + ')'
__repr__ = __str__
class EndpointConfiguration(FrozenClass):
'''
:ivar OperationTimeout:
:vartype OperationTimeout: Int32
:ivar UseBinaryEncoding:
:vartype UseBinaryEncoding: Boolean
:ivar MaxStringLength:
:vartype MaxStringLength: Int32
:ivar MaxByteStringLength:
:vartype MaxByteStringLength: Int32
:ivar MaxArrayLength:
:vartype MaxArrayLength: Int32
:ivar MaxMessageSize:
:vartype MaxMessageSize: Int32
:ivar MaxBufferSize:
:vartype MaxBufferSize: Int32
:ivar ChannelLifetime:
:vartype ChannelLifetime: Int32
:ivar SecurityTokenLifetime:
:vartype SecurityTokenLifetime: Int32
'''
ua_types = [
('OperationTimeout', 'Int32'),
('UseBinaryEncoding', 'Boolean'),
('MaxStringLength', 'Int32'),
('MaxByteStringLength', 'Int32'),
('MaxArrayLength', 'Int32'),
('MaxMessageSize', 'Int32'),
('MaxBufferSize', 'Int32'),
('ChannelLifetime', 'Int32'),
('SecurityTokenLifetime', 'Int32'),
]
def __init__(self):
self.OperationTimeout = 0
self.UseBinaryEncoding = True
self.MaxStringLength = 0
self.MaxByteStringLength = 0
self.MaxArrayLength = 0
self.MaxMessageSize = 0
self.MaxBufferSize = 0
self.ChannelLifetime = 0
self.SecurityTokenLifetime = 0
self._freeze = True
def __str__(self):
return 'EndpointConfiguration(' + 'OperationTimeout:' + str(self.OperationTimeout) + ', ' + \
'UseBinaryEncoding:' + str(self.UseBinaryEncoding) + ', ' + \
'MaxStringLength:' + str(self.MaxStringLength) + ', ' + \
'MaxByteStringLength:' + str(self.MaxByteStringLength) + ', ' + \
'MaxArrayLength:' + str(self.MaxArrayLength) + ', ' + \
'MaxMessageSize:' + str(self.MaxMessageSize) + ', ' + \
'MaxBufferSize:' + str(self.MaxBufferSize) + ', ' + \
'ChannelLifetime:' + str(self.ChannelLifetime) + ', ' + \
'SecurityTokenLifetime:' + str(self.SecurityTokenLifetime) + ')'
__repr__ = __str__
class QueryDataDescription(FrozenClass):
'''
:ivar RelativePath:
:vartype RelativePath: RelativePath
:ivar AttributeId:
:vartype AttributeId: UInt32
:ivar IndexRange:
:vartype IndexRange: String
'''
ua_types = [
('RelativePath', 'RelativePath'),
('AttributeId', 'UInt32'),
('IndexRange', 'String'),
]
def __init__(self):
self.RelativePath = RelativePath()
self.AttributeId = 0
self.IndexRange = None
self._freeze = True
def __str__(self):
return 'QueryDataDescription(' + 'RelativePath:' + str(self.RelativePath) + ', ' + \
'AttributeId:' + str(self.AttributeId) + ', ' + \
'IndexRange:' + str(self.IndexRange) + ')'
__repr__ = __str__
class NodeTypeDescription(FrozenClass):
'''
:ivar TypeDefinitionNode:
:vartype TypeDefinitionNode: ExpandedNodeId
:ivar IncludeSubTypes:
:vartype IncludeSubTypes: Boolean
:ivar DataToReturn:
:vartype DataToReturn: QueryDataDescription
'''
ua_types = [
('TypeDefinitionNode', 'ExpandedNodeId'),
('IncludeSubTypes', 'Boolean'),
('DataToReturn', 'ListOfQueryDataDescription'),
]
def __init__(self):
self.TypeDefinitionNode = ExpandedNodeId()
self.IncludeSubTypes = True
self.DataToReturn = []
self._freeze = True
def __str__(self):
return 'NodeTypeDescription(' + 'TypeDefinitionNode:' + str(self.TypeDefinitionNode) + ', ' + \
'IncludeSubTypes:' + str(self.IncludeSubTypes) + ', ' + \
'DataToReturn:' + str(self.DataToReturn) + ')'
__repr__ = __str__
class QueryDataSet(FrozenClass):
'''
:ivar NodeId:
:vartype NodeId: ExpandedNodeId
:ivar TypeDefinitionNode:
:vartype TypeDefinitionNode: ExpandedNodeId
:ivar Values:
:vartype Values: Variant
'''
ua_types = [
('NodeId', 'ExpandedNodeId'),
('TypeDefinitionNode', 'ExpandedNodeId'),
('Values', 'ListOfVariant'),
]
def __init__(self):
self.NodeId = ExpandedNodeId()
self.TypeDefinitionNode = ExpandedNodeId()
self.Values = []
self._freeze = True
def __str__(self):
return 'QueryDataSet(' + 'NodeId:' + str(self.NodeId) + ', ' + \
'TypeDefinitionNode:' + str(self.TypeDefinitionNode) + ', ' + \
'Values:' + str(self.Values) + ')'
__repr__ = __str__
class NodeReference(FrozenClass):
'''
:ivar NodeId:
:vartype NodeId: NodeId
:ivar ReferenceTypeId:
:vartype ReferenceTypeId: NodeId
:ivar IsForward:
:vartype IsForward: Boolean
:ivar ReferencedNodeIds:
:vartype ReferencedNodeIds: NodeId
'''
ua_types = [
('NodeId', 'NodeId'),
('ReferenceTypeId', 'NodeId'),
('IsForward', 'Boolean'),
('ReferencedNodeIds', 'ListOfNodeId'),
]
def __init__(self):
self.NodeId = NodeId()
self.ReferenceTypeId = NodeId()
self.IsForward = True
self.ReferencedNodeIds = []
self._freeze = True
def __str__(self):
return 'NodeReference(' + 'NodeId:' + str(self.NodeId) + ', ' + \
'ReferenceTypeId:' + str(self.ReferenceTypeId) + ', ' + \
'IsForward:' + str(self.IsForward) + ', ' + \
'ReferencedNodeIds:' + str(self.ReferencedNodeIds) + ')'
__repr__ = __str__
class ContentFilterElement(FrozenClass):
'''
:ivar FilterOperator:
:vartype FilterOperator: FilterOperator
:ivar FilterOperands:
:vartype FilterOperands: ExtensionObject
'''
ua_types = [
('FilterOperator', 'FilterOperator'),
('FilterOperands', 'ListOfExtensionObject'),
]
def __init__(self):
self.FilterOperator = FilterOperator(0)
self.FilterOperands = []
self._freeze = True
def __str__(self):
return 'ContentFilterElement(' + 'FilterOperator:' + str(self.FilterOperator) + ', ' + \
'FilterOperands:' + str(self.FilterOperands) + ')'
__repr__ = __str__
class ContentFilter(FrozenClass):
'''
:ivar Elements:
:vartype Elements: ContentFilterElement
'''
ua_types = [
('Elements', 'ListOfContentFilterElement'),
]
def __init__(self):
self.Elements = []
self._freeze = True
def __str__(self):
return 'ContentFilter(' + 'Elements:' + str(self.Elements) + ')'
__repr__ = __str__
class ElementOperand(FrozenClass):
'''
:ivar Index:
:vartype Index: UInt32
'''
ua_types = [
('Index', 'UInt32'),
]
def __init__(self):
self.Index = 0
self._freeze = True
def __str__(self):
return 'ElementOperand(' + 'Index:' + str(self.Index) + ')'
__repr__ = __str__
class LiteralOperand(FrozenClass):
'''
:ivar Value:
:vartype Value: Variant
'''
ua_types = [
('Value', 'Variant'),
]
def __init__(self):
self.Value = Variant()
self._freeze = True
def __str__(self):
return 'LiteralOperand(' + 'Value:' + str(self.Value) + ')'
__repr__ = __str__
class AttributeOperand(FrozenClass):
'''
:ivar NodeId:
:vartype NodeId: NodeId
:ivar Alias:
:vartype Alias: String
:ivar BrowsePath:
:vartype BrowsePath: RelativePath
:ivar AttributeId:
:vartype AttributeId: UInt32
:ivar IndexRange:
:vartype IndexRange: String
'''
ua_types = [
('NodeId', 'NodeId'),
('Alias', 'String'),
('BrowsePath', 'RelativePath'),
('AttributeId', 'UInt32'),
('IndexRange', 'String'),
]
def __init__(self):
self.NodeId = NodeId()
self.Alias = None
self.BrowsePath = RelativePath()
self.AttributeId = 0
self.IndexRange = None
self._freeze = True
def __str__(self):
return 'AttributeOperand(' + 'NodeId:' + str(self.NodeId) + ', ' + \
'Alias:' + str(self.Alias) + ', ' + \
'BrowsePath:' + str(self.BrowsePath) + ', ' + \
'AttributeId:' + str(self.AttributeId) + ', ' + \
'IndexRange:' + str(self.IndexRange) + ')'
__repr__ = __str__
class SimpleAttributeOperand(FrozenClass):
'''
:ivar TypeDefinitionId:
:vartype TypeDefinitionId: NodeId
:ivar BrowsePath:
:vartype BrowsePath: QualifiedName
:ivar AttributeId:
:vartype AttributeId: UInt32
:ivar IndexRange:
:vartype IndexRange: String
'''
ua_types = [
('TypeDefinitionId', 'NodeId'),
('BrowsePath', 'ListOfQualifiedName'),
('AttributeId', 'UInt32'),
('IndexRange', 'String'),
]
def __init__(self):
self.TypeDefinitionId = NodeId()
self.BrowsePath = []
self.AttributeId = 0
self.IndexRange = None
self._freeze = True
def __str__(self):
return 'SimpleAttributeOperand(' + 'TypeDefinitionId:' + str(self.TypeDefinitionId) + ', ' + \
'BrowsePath:' + str(self.BrowsePath) + ', ' + \
'AttributeId:' + str(self.AttributeId) + ', ' + \
'IndexRange:' + str(self.IndexRange) + ')'
__repr__ = __str__
class ContentFilterElementResult(FrozenClass):
'''
:ivar StatusCode:
:vartype StatusCode: StatusCode
:ivar OperandStatusCodes:
:vartype OperandStatusCodes: StatusCode
:ivar OperandDiagnosticInfos:
:vartype OperandDiagnosticInfos: DiagnosticInfo
'''
ua_types = [
('StatusCode', 'StatusCode'),
('OperandStatusCodes', 'ListOfStatusCode'),
('OperandDiagnosticInfos', 'ListOfDiagnosticInfo'),
]
def __init__(self):
self.StatusCode = StatusCode()
self.OperandStatusCodes = []
self.OperandDiagnosticInfos = []
self._freeze = True
def __str__(self):
return 'ContentFilterElementResult(' + 'StatusCode:' + str(self.StatusCode) + ', ' + \
'OperandStatusCodes:' + str(self.OperandStatusCodes) + ', ' + \
'OperandDiagnosticInfos:' + str(self.OperandDiagnosticInfos) + ')'
__repr__ = __str__
class ContentFilterResult(FrozenClass):
'''
:ivar ElementResults:
:vartype ElementResults: ContentFilterElementResult
:ivar ElementDiagnosticInfos:
:vartype ElementDiagnosticInfos: DiagnosticInfo
'''
ua_types = [
('ElementResults', 'ListOfContentFilterElementResult'),
('ElementDiagnosticInfos', 'ListOfDiagnosticInfo'),
]
def __init__(self):
self.ElementResults = []
self.ElementDiagnosticInfos = []
self._freeze = True
def __str__(self):
return 'ContentFilterResult(' + 'ElementResults:' + str(self.ElementResults) + ', ' + \
'ElementDiagnosticInfos:' + str(self.ElementDiagnosticInfos) + ')'
__repr__ = __str__
class ParsingResult(FrozenClass):
'''
:ivar StatusCode:
:vartype StatusCode: StatusCode
:ivar DataStatusCodes:
:vartype DataStatusCodes: StatusCode
:ivar DataDiagnosticInfos:
:vartype DataDiagnosticInfos: DiagnosticInfo
'''
ua_types = [
('StatusCode', 'StatusCode'),
('DataStatusCodes', 'ListOfStatusCode'),
('DataDiagnosticInfos', 'ListOfDiagnosticInfo'),
]
def __init__(self):
self.StatusCode = StatusCode()
self.DataStatusCodes = []
self.DataDiagnosticInfos = []
self._freeze = True
def __str__(self):
return 'ParsingResult(' + 'StatusCode:' + str(self.StatusCode) + ', ' + \
'DataStatusCodes:' + str(self.DataStatusCodes) + ', ' + \
'DataDiagnosticInfos:' + str(self.DataDiagnosticInfos) + ')'
__repr__ = __str__
class QueryFirstParameters(FrozenClass):
'''
:ivar View:
:vartype View: ViewDescription
:ivar NodeTypes:
:vartype NodeTypes: NodeTypeDescription
:ivar Filter:
:vartype Filter: ContentFilter
:ivar MaxDataSetsToReturn:
:vartype MaxDataSetsToReturn: UInt32
:ivar MaxReferencesToReturn:
:vartype MaxReferencesToReturn: UInt32
'''
ua_types = [
('View', 'ViewDescription'),
('NodeTypes', 'ListOfNodeTypeDescription'),
('Filter', 'ContentFilter'),
('MaxDataSetsToReturn', 'UInt32'),
('MaxReferencesToReturn', 'UInt32'),
]
def __init__(self):
self.View = ViewDescription()
self.NodeTypes = []
self.Filter = ContentFilter()
self.MaxDataSetsToReturn = 0
self.MaxReferencesToReturn = 0
self._freeze = True
def __str__(self):
return 'QueryFirstParameters(' + 'View:' + str(self.View) + ', ' + \
'NodeTypes:' + str(self.NodeTypes) + ', ' + \
'Filter:' + str(self.Filter) + ', ' + \
'MaxDataSetsToReturn:' + str(self.MaxDataSetsToReturn) + ', ' + \
'MaxReferencesToReturn:' + str(self.MaxReferencesToReturn) + ')'
__repr__ = __str__
class QueryFirstRequest(FrozenClass):
'''
:ivar TypeId:
:vartype TypeId: NodeId
:ivar RequestHeader:
:vartype RequestHeader: RequestHeader
:ivar Parameters:
:vartype Parameters: QueryFirstParameters
'''
ua_types = [
('TypeId', 'NodeId'),
('RequestHeader', 'RequestHeader'),
('Parameters', 'QueryFirstParameters'),
]
def __init__(self):
self.TypeId = FourByteNodeId(ObjectIds.QueryFirstRequest_Encoding_DefaultBinary)
self.RequestHeader = RequestHeader()
self.Parameters = QueryFirstParameters()
self._freeze = True
def __str__(self):
return 'QueryFirstRequest(' + 'TypeId:' + str(self.TypeId) + ', ' + \
'RequestHeader:' + str(self.RequestHeader) + ', ' + \
'Parameters:' + str(self.Parameters) + ')'
__repr__ = __str__
class QueryFirstResult(FrozenClass):
'''
:ivar QueryDataSets:
:vartype QueryDataSets: QueryDataSet
:ivar ContinuationPoint:
:vartype ContinuationPoint: ByteString
:ivar ParsingResults:
:vartype ParsingResults: ParsingResult
:ivar DiagnosticInfos:
:vartype DiagnosticInfos: DiagnosticInfo
:ivar FilterResult:
:vartype FilterResult: ContentFilterResult
'''
ua_types = [
('QueryDataSets', 'ListOfQueryDataSet'),
('ContinuationPoint', 'ByteString'),
('ParsingResults', 'ListOfParsingResult'),
('DiagnosticInfos', 'ListOfDiagnosticInfo'),
('FilterResult', 'ContentFilterResult'),
]
def __init__(self):
self.QueryDataSets = []
self.ContinuationPoint = None
self.ParsingResults = []
self.DiagnosticInfos = []
self.FilterResult = ContentFilterResult()
self._freeze = True
def __str__(self):
return 'QueryFirstResult(' + 'QueryDataSets:' + str(self.QueryDataSets) + ', ' + \
'ContinuationPoint:' + str(self.ContinuationPoint) + ', ' + \
'ParsingResults:' + str(self.ParsingResults) + ', ' + \
'DiagnosticInfos:' + str(self.DiagnosticInfos) + ', ' + \
'FilterResult:' + str(self.FilterResult) + ')'
__repr__ = __str__
class QueryFirstResponse(FrozenClass):
'''
:ivar TypeId:
:vartype TypeId: NodeId
:ivar ResponseHeader:
:vartype ResponseHeader: ResponseHeader
:ivar Parameters:
:vartype Parameters: QueryFirstResult
'''
ua_types = [
('TypeId', 'NodeId'),
('ResponseHeader', 'ResponseHeader'),
('Parameters', 'QueryFirstResult'),
]
def __init__(self):
self.TypeId = FourByteNodeId(ObjectIds.QueryFirstResponse_Encoding_DefaultBinary)
self.ResponseHeader = ResponseHeader()
self.Parameters = QueryFirstResult()
self._freeze = True
def __str__(self):
return 'QueryFirstResponse(' + 'TypeId:' + str(self.TypeId) + ', ' + \
'ResponseHeader:' + str(self.ResponseHeader) + ', ' + \
'Parameters:' + str(self.Parameters) + ')'
__repr__ = __str__
class QueryNextParameters(FrozenClass):
'''
:ivar ReleaseContinuationPoint:
:vartype ReleaseContinuationPoint: Boolean
:ivar ContinuationPoint:
:vartype ContinuationPoint: ByteString
'''
ua_types = [
('ReleaseContinuationPoint', 'Boolean'),
('ContinuationPoint', 'ByteString'),
]
def __init__(self):
self.ReleaseContinuationPoint = True
self.ContinuationPoint = None
self._freeze = True
def __str__(self):
return 'QueryNextParameters(' + 'ReleaseContinuationPoint:' + str(self.ReleaseContinuationPoint) + ', ' + \
'ContinuationPoint:' + str(self.ContinuationPoint) + ')'
__repr__ = __str__
class QueryNextRequest(FrozenClass):
'''
:ivar TypeId:
:vartype TypeId: NodeId
:ivar RequestHeader:
:vartype RequestHeader: RequestHeader
:ivar Parameters:
:vartype Parameters: QueryNextParameters
'''
ua_types = [
('TypeId', 'NodeId'),
('RequestHeader', 'RequestHeader'),
('Parameters', 'QueryNextParameters'),
]
def __init__(self):
self.TypeId = FourByteNodeId(ObjectIds.QueryNextRequest_Encoding_DefaultBinary)
self.RequestHeader = RequestHeader()
self.Parameters = QueryNextParameters()
self._freeze = True
def __str__(self):
return 'QueryNextRequest(' + 'TypeId:' + str(self.TypeId) + ', ' + \
'RequestHeader:' + str(self.RequestHeader) + ', ' + \
'Parameters:' + str(self.Parameters) + ')'
__repr__ = __str__
class QueryNextResult(FrozenClass):
'''
:ivar QueryDataSets:
:vartype QueryDataSets: QueryDataSet
:ivar RevisedContinuationPoint:
:vartype RevisedContinuationPoint: ByteString
'''
ua_types = [
('QueryDataSets', 'ListOfQueryDataSet'),
('RevisedContinuationPoint', 'ByteString'),
]
def __init__(self):
self.QueryDataSets = []
self.RevisedContinuationPoint = None
self._freeze = True
def __str__(self):
return 'QueryNextResult(' + 'QueryDataSets:' + str(self.QueryDataSets) + ', ' + \
'RevisedContinuationPoint:' + str(self.RevisedContinuationPoint) + ')'
__repr__ = __str__
class QueryNextResponse(FrozenClass):
'''
:ivar TypeId:
:vartype TypeId: NodeId
:ivar ResponseHeader:
:vartype ResponseHeader: ResponseHeader
:ivar Parameters:
:vartype Parameters: QueryNextResult
'''
ua_types = [
('TypeId', 'NodeId'),
('ResponseHeader', 'ResponseHeader'),
('Parameters', 'QueryNextResult'),
]
def __init__(self):
self.TypeId = FourByteNodeId(ObjectIds.QueryNextResponse_Encoding_DefaultBinary)
self.ResponseHeader = ResponseHeader()
self.Parameters = QueryNextResult()
self._freeze = True
def __str__(self):
return 'QueryNextResponse(' + 'TypeId:' + str(self.TypeId) + ', ' + \
'ResponseHeader:' + str(self.ResponseHeader) + ', ' + \
'Parameters:' + str(self.Parameters) + ')'
__repr__ = __str__
class ReadValueId(FrozenClass):
'''
:ivar NodeId:
:vartype NodeId: NodeId
:ivar AttributeId:
:vartype AttributeId: UInt32
:ivar IndexRange:
:vartype IndexRange: String
:ivar DataEncoding:
:vartype DataEncoding: QualifiedName
'''
ua_types = [
('NodeId', 'NodeId'),
('AttributeId', 'UInt32'),
('IndexRange', 'String'),
('DataEncoding', 'QualifiedName'),
]
def __init__(self):
self.NodeId = NodeId()
self.AttributeId = 0
self.IndexRange = None
self.DataEncoding = QualifiedName()
self._freeze = True
def __str__(self):
return 'ReadValueId(' + 'NodeId:' + str(self.NodeId) + ', ' + \
'AttributeId:' + str(self.AttributeId) + ', ' + \
'IndexRange:' + str(self.IndexRange) + ', ' + \
'DataEncoding:' + str(self.DataEncoding) + ')'
__repr__ = __str__
class ReadParameters(FrozenClass):
'''
:ivar MaxAge:
:vartype MaxAge: Double
:ivar TimestampsToReturn:
:vartype TimestampsToReturn: TimestampsToReturn
:ivar NodesToRead:
:vartype NodesToRead: ReadValueId
'''
ua_types = [
('MaxAge', 'Double'),
('TimestampsToReturn', 'TimestampsToReturn'),
('NodesToRead', 'ListOfReadValueId'),
]
def __init__(self):
self.MaxAge = 0
self.TimestampsToReturn = TimestampsToReturn(0)
self.NodesToRead = []
self._freeze = True
def __str__(self):
return 'ReadParameters(' + 'MaxAge:' + str(self.MaxAge) + ', ' + \
'TimestampsToReturn:' + str(self.TimestampsToReturn) + ', ' + \
'NodesToRead:' + str(self.NodesToRead) + ')'
__repr__ = __str__
class ReadRequest(FrozenClass):
'''
:ivar TypeId:
:vartype TypeId: NodeId
:ivar RequestHeader:
:vartype RequestHeader: RequestHeader
:ivar Parameters:
:vartype Parameters: ReadParameters
'''
ua_types = [
('TypeId', 'NodeId'),
('RequestHeader', 'RequestHeader'),
('Parameters', 'ReadParameters'),
]
def __init__(self):
self.TypeId = FourByteNodeId(ObjectIds.ReadRequest_Encoding_DefaultBinary)
self.RequestHeader = RequestHeader()
self.Parameters = ReadParameters()
self._freeze = True
def __str__(self):
return 'ReadRequest(' + 'TypeId:' + str(self.TypeId) + ', ' + \
'RequestHeader:' + str(self.RequestHeader) + ', ' + \
'Parameters:' + str(self.Parameters) + ')'
__repr__ = __str__
class ReadResponse(FrozenClass):
'''
:ivar TypeId:
:vartype TypeId: NodeId
:ivar ResponseHeader:
:vartype ResponseHeader: ResponseHeader
:ivar Results:
:vartype Results: DataValue
:ivar DiagnosticInfos:
:vartype DiagnosticInfos: DiagnosticInfo
'''
ua_types = [
('TypeId', 'NodeId'),
('ResponseHeader', 'ResponseHeader'),
('Results', 'ListOfDataValue'),
('DiagnosticInfos', 'ListOfDiagnosticInfo'),
]
def __init__(self):
self.TypeId = FourByteNodeId(ObjectIds.ReadResponse_Encoding_DefaultBinary)
self.ResponseHeader = ResponseHeader()
self.Results = []
self.DiagnosticInfos = []
self._freeze = True
def __str__(self):
return 'ReadResponse(' + 'TypeId:' + str(self.TypeId) + ', ' + \
'ResponseHeader:' + str(self.ResponseHeader) + ', ' + \
'Results:' + str(self.Results) + ', ' + \
'DiagnosticInfos:' + str(self.DiagnosticInfos) + ')'
__repr__ = __str__
class HistoryReadValueId(FrozenClass):
'''
:ivar NodeId:
:vartype NodeId: NodeId
:ivar IndexRange:
:vartype IndexRange: String
:ivar DataEncoding:
:vartype DataEncoding: QualifiedName
:ivar ContinuationPoint:
:vartype ContinuationPoint: ByteString
'''
ua_types = [
('NodeId', 'NodeId'),
('IndexRange', 'String'),
('DataEncoding', 'QualifiedName'),
('ContinuationPoint', 'ByteString'),
]
def __init__(self):
self.NodeId = NodeId()
self.IndexRange = None
self.DataEncoding = QualifiedName()
self.ContinuationPoint = None
self._freeze = True
def __str__(self):
return 'HistoryReadValueId(' + 'NodeId:' + str(self.NodeId) + ', ' + \
'IndexRange:' + str(self.IndexRange) + ', ' + \
'DataEncoding:' + str(self.DataEncoding) + ', ' + \
'ContinuationPoint:' + str(self.ContinuationPoint) + ')'
__repr__ = __str__
class HistoryReadResult(FrozenClass):
'''
:ivar StatusCode:
:vartype StatusCode: StatusCode
:ivar ContinuationPoint:
:vartype ContinuationPoint: ByteString
:ivar HistoryData:
:vartype HistoryData: ExtensionObject
'''
ua_types = [
('StatusCode', 'StatusCode'),
('ContinuationPoint', 'ByteString'),
('HistoryData', 'ExtensionObject'),
]
def __init__(self):
self.StatusCode = StatusCode()
self.ContinuationPoint = None
self.HistoryData = ExtensionObject()
self._freeze = True
def __str__(self):
return 'HistoryReadResult(' + 'StatusCode:' + str(self.StatusCode) + ', ' + \
'ContinuationPoint:' + str(self.ContinuationPoint) + ', ' + \
'HistoryData:' + str(self.HistoryData) + ')'
__repr__ = __str__
class HistoryReadDetails(FrozenClass):
'''
'''
ua_types = [
]
def __init__(self):
self._freeze = True
def __str__(self):
return 'HistoryReadDetails(' + + ')'
__repr__ = __str__
class ReadEventDetails(FrozenClass):
'''
:ivar NumValuesPerNode:
:vartype NumValuesPerNode: UInt32
:ivar StartTime:
:vartype StartTime: DateTime
:ivar EndTime:
:vartype EndTime: DateTime
:ivar Filter:
:vartype Filter: EventFilter
'''
ua_types = [
('NumValuesPerNode', 'UInt32'),
('StartTime', 'DateTime'),
('EndTime', 'DateTime'),
('Filter', 'EventFilter'),
]
def __init__(self):
self.NumValuesPerNode = 0
self.StartTime = datetime.utcnow()
self.EndTime = datetime.utcnow()
self.Filter = EventFilter()
self._freeze = True
def __str__(self):
return 'ReadEventDetails(' + 'NumValuesPerNode:' + str(self.NumValuesPerNode) + ', ' + \
'StartTime:' + str(self.StartTime) + ', ' + \
'EndTime:' + str(self.EndTime) + ', ' + \
'Filter:' + str(self.Filter) + ')'
__repr__ = __str__
class ReadRawModifiedDetails(FrozenClass):
'''
:ivar IsReadModified:
:vartype IsReadModified: Boolean
:ivar StartTime:
:vartype StartTime: DateTime
:ivar EndTime:
:vartype EndTime: DateTime
:ivar NumValuesPerNode:
:vartype NumValuesPerNode: UInt32
:ivar ReturnBounds:
:vartype ReturnBounds: Boolean
'''
ua_types = [
('IsReadModified', 'Boolean'),
('StartTime', 'DateTime'),
('EndTime', 'DateTime'),
('NumValuesPerNode', 'UInt32'),
('ReturnBounds', 'Boolean'),
]
def __init__(self):
self.IsReadModified = True
self.StartTime = datetime.utcnow()
self.EndTime = datetime.utcnow()
self.NumValuesPerNode = 0
self.ReturnBounds = True
self._freeze = True
def __str__(self):
return 'ReadRawModifiedDetails(' + 'IsReadModified:' + str(self.IsReadModified) + ', ' + \
'StartTime:' + str(self.StartTime) + ', ' + \
'EndTime:' + str(self.EndTime) + ', ' + \
'NumValuesPerNode:' + str(self.NumValuesPerNode) + ', ' + \
'ReturnBounds:' + str(self.ReturnBounds) + ')'
__repr__ = __str__
class ReadProcessedDetails(FrozenClass):
'''
:ivar StartTime:
:vartype StartTime: DateTime
:ivar EndTime:
:vartype EndTime: DateTime
:ivar ProcessingInterval:
:vartype ProcessingInterval: Double
:ivar AggregateType:
:vartype AggregateType: NodeId
:ivar AggregateConfiguration:
:vartype AggregateConfiguration: AggregateConfiguration
'''
ua_types = [
('StartTime', 'DateTime'),
('EndTime', 'DateTime'),
('ProcessingInterval', 'Double'),
('AggregateType', 'ListOfNodeId'),
('AggregateConfiguration', 'AggregateConfiguration'),
]
def __init__(self):
self.StartTime = datetime.utcnow()
self.EndTime = datetime.utcnow()
self.ProcessingInterval = 0
self.AggregateType = []
self.AggregateConfiguration = AggregateConfiguration()
self._freeze = True
def __str__(self):
return 'ReadProcessedDetails(' + 'StartTime:' + str(self.StartTime) + ', ' + \
'EndTime:' + str(self.EndTime) + ', ' + \
'ProcessingInterval:' + str(self.ProcessingInterval) + ', ' + \
'AggregateType:' + str(self.AggregateType) + ', ' + \
'AggregateConfiguration:' + str(self.AggregateConfiguration) + ')'
__repr__ = __str__
class ReadAtTimeDetails(FrozenClass):
'''
:ivar ReqTimes:
:vartype ReqTimes: DateTime
:ivar UseSimpleBounds:
:vartype UseSimpleBounds: Boolean
'''
ua_types = [
('ReqTimes', 'ListOfDateTime'),
('UseSimpleBounds', 'Boolean'),
]
def __init__(self):
self.ReqTimes = []
self.UseSimpleBounds = True
self._freeze = True
def __str__(self):
return 'ReadAtTimeDetails(' + 'ReqTimes:' + str(self.ReqTimes) + ', ' + \
'UseSimpleBounds:' + str(self.UseSimpleBounds) + ')'
__repr__ = __str__
class HistoryData(FrozenClass):
'''
:ivar DataValues:
:vartype DataValues: DataValue
'''
ua_types = [
('DataValues', 'ListOfDataValue'),
]
def __init__(self):
self.DataValues = []
self._freeze = True
def __str__(self):
return 'HistoryData(' + 'DataValues:' + str(self.DataValues) + ')'
__repr__ = __str__
class ModificationInfo(FrozenClass):
'''
:ivar ModificationTime:
:vartype ModificationTime: DateTime
:ivar UpdateType:
:vartype UpdateType: HistoryUpdateType
:ivar UserName:
:vartype UserName: String
'''
ua_types = [
('ModificationTime', 'DateTime'),
('UpdateType', 'HistoryUpdateType'),
('UserName', 'String'),
]
def __init__(self):
self.ModificationTime = datetime.utcnow()
self.UpdateType = HistoryUpdateType(0)
self.UserName = None
self._freeze = True
def __str__(self):
return 'ModificationInfo(' + 'ModificationTime:' + str(self.ModificationTime) + ', ' + \
'UpdateType:' + str(self.UpdateType) + ', ' + \
'UserName:' + str(self.UserName) + ')'
__repr__ = __str__
class HistoryModifiedData(FrozenClass):
'''
:ivar DataValues:
:vartype DataValues: DataValue
:ivar ModificationInfos:
:vartype ModificationInfos: ModificationInfo
'''
ua_types = [
('DataValues', 'ListOfDataValue'),
('ModificationInfos', 'ListOfModificationInfo'),
]
def __init__(self):
self.DataValues = []
self.ModificationInfos = []
self._freeze = True
def __str__(self):
return 'HistoryModifiedData(' + 'DataValues:' + str(self.DataValues) + ', ' + \
'ModificationInfos:' + str(self.ModificationInfos) + ')'
__repr__ = __str__
class HistoryEvent(FrozenClass):
'''
:ivar Events:
:vartype Events: HistoryEventFieldList
'''
ua_types = [
('Events', 'ListOfHistoryEventFieldList'),
]
def __init__(self):
self.Events = []
self._freeze = True
def __str__(self):
return 'HistoryEvent(' + 'Events:' + str(self.Events) + ')'
__repr__ = __str__
class HistoryReadParameters(FrozenClass):
'''
:ivar HistoryReadDetails:
:vartype HistoryReadDetails: ExtensionObject
:ivar TimestampsToReturn:
:vartype TimestampsToReturn: TimestampsToReturn
:ivar ReleaseContinuationPoints:
:vartype ReleaseContinuationPoints: Boolean
:ivar NodesToRead:
:vartype NodesToRead: HistoryReadValueId
'''
ua_types = [
('HistoryReadDetails', 'ExtensionObject'),
('TimestampsToReturn', 'TimestampsToReturn'),
('ReleaseContinuationPoints', 'Boolean'),
('NodesToRead', 'ListOfHistoryReadValueId'),
]
def __init__(self):
self.HistoryReadDetails = ExtensionObject()
self.TimestampsToReturn = TimestampsToReturn(0)
self.ReleaseContinuationPoints = True
self.NodesToRead = []
self._freeze = True
def __str__(self):
return 'HistoryReadParameters(' + 'HistoryReadDetails:' + str(self.HistoryReadDetails) + ', ' + \
'TimestampsToReturn:' + str(self.TimestampsToReturn) + ', ' + \
'ReleaseContinuationPoints:' + str(self.ReleaseContinuationPoints) + ', ' + \
'NodesToRead:' + str(self.NodesToRead) + ')'
__repr__ = __str__
class HistoryReadRequest(FrozenClass):
'''
:ivar TypeId:
:vartype TypeId: NodeId
:ivar RequestHeader:
:vartype RequestHeader: RequestHeader
:ivar Parameters:
:vartype Parameters: HistoryReadParameters
'''
ua_types = [
('TypeId', 'NodeId'),
('RequestHeader', 'RequestHeader'),
('Parameters', 'HistoryReadParameters'),
]
def __init__(self):
self.TypeId = FourByteNodeId(ObjectIds.HistoryReadRequest_Encoding_DefaultBinary)
self.RequestHeader = RequestHeader()
self.Parameters = HistoryReadParameters()
self._freeze = True
def __str__(self):
return 'HistoryReadRequest(' + 'TypeId:' + str(self.TypeId) + ', ' + \
'RequestHeader:' + str(self.RequestHeader) + ', ' + \
'Parameters:' + str(self.Parameters) + ')'
__repr__ = __str__
class HistoryReadResponse(FrozenClass):
'''
:ivar TypeId:
:vartype TypeId: NodeId
:ivar ResponseHeader:
:vartype ResponseHeader: ResponseHeader
:ivar Results:
:vartype Results: HistoryReadResult
:ivar DiagnosticInfos:
:vartype DiagnosticInfos: DiagnosticInfo
'''
ua_types = [
('TypeId', 'NodeId'),
('ResponseHeader', 'ResponseHeader'),
('Results', 'ListOfHistoryReadResult'),
('DiagnosticInfos', 'ListOfDiagnosticInfo'),
]
def __init__(self):
self.TypeId = FourByteNodeId(ObjectIds.HistoryReadResponse_Encoding_DefaultBinary)
self.ResponseHeader = ResponseHeader()
self.Results = []
self.DiagnosticInfos = []
self._freeze = True
def __str__(self):
return 'HistoryReadResponse(' + 'TypeId:' + str(self.TypeId) + ', ' + \
'ResponseHeader:' + str(self.ResponseHeader) + ', ' + \
'Results:' + str(self.Results) + ', ' + \
'DiagnosticInfos:' + str(self.DiagnosticInfos) + ')'
__repr__ = __str__
class WriteValue(FrozenClass):
'''
:ivar NodeId:
:vartype NodeId: NodeId
:ivar AttributeId:
:vartype AttributeId: UInt32
:ivar IndexRange:
:vartype IndexRange: String
:ivar Value:
:vartype Value: DataValue
'''
ua_types = [
('NodeId', 'NodeId'),
('AttributeId', 'UInt32'),
('IndexRange', 'String'),
('Value', 'DataValue'),
]
def __init__(self):
self.NodeId = NodeId()
self.AttributeId = 0
self.IndexRange = None
self.Value = DataValue()
self._freeze = True
def __str__(self):
return 'WriteValue(' + 'NodeId:' + str(self.NodeId) + ', ' + \
'AttributeId:' + str(self.AttributeId) + ', ' + \
'IndexRange:' + str(self.IndexRange) + ', ' + \
'Value:' + str(self.Value) + ')'
__repr__ = __str__
class WriteParameters(FrozenClass):
'''
:ivar NodesToWrite:
:vartype NodesToWrite: WriteValue
'''
ua_types = [
('NodesToWrite', 'ListOfWriteValue'),
]
def __init__(self):
self.NodesToWrite = []
self._freeze = True
def __str__(self):
return 'WriteParameters(' + 'NodesToWrite:' + str(self.NodesToWrite) + ')'
__repr__ = __str__
class WriteRequest(FrozenClass):
'''
:ivar TypeId:
:vartype TypeId: NodeId
:ivar RequestHeader:
:vartype RequestHeader: RequestHeader
:ivar Parameters:
:vartype Parameters: WriteParameters
'''
ua_types = [
('TypeId', 'NodeId'),
('RequestHeader', 'RequestHeader'),
('Parameters', 'WriteParameters'),
]
def __init__(self):
self.TypeId = FourByteNodeId(ObjectIds.WriteRequest_Encoding_DefaultBinary)
self.RequestHeader = RequestHeader()
self.Parameters = WriteParameters()
self._freeze = True
def __str__(self):
return 'WriteRequest(' + 'TypeId:' + str(self.TypeId) + ', ' + \
'RequestHeader:' + str(self.RequestHeader) + ', ' + \
'Parameters:' + str(self.Parameters) + ')'
__repr__ = __str__
class WriteResponse(FrozenClass):
'''
:ivar TypeId:
:vartype TypeId: NodeId
:ivar ResponseHeader:
:vartype ResponseHeader: ResponseHeader
:ivar Results:
:vartype Results: StatusCode
:ivar DiagnosticInfos:
:vartype DiagnosticInfos: DiagnosticInfo
'''
ua_types = [
('TypeId', 'NodeId'),
('ResponseHeader', 'ResponseHeader'),
('Results', 'ListOfStatusCode'),
('DiagnosticInfos', 'ListOfDiagnosticInfo'),
]
def __init__(self):
self.TypeId = FourByteNodeId(ObjectIds.WriteResponse_Encoding_DefaultBinary)
self.ResponseHeader = ResponseHeader()
self.Results = []
self.DiagnosticInfos = []
self._freeze = True
def __str__(self):
return 'WriteResponse(' + 'TypeId:' + str(self.TypeId) + ', ' + \
'ResponseHeader:' + str(self.ResponseHeader) + ', ' + \
'Results:' + str(self.Results) + ', ' + \
'DiagnosticInfos:' + str(self.DiagnosticInfos) + ')'
__repr__ = __str__
class HistoryUpdateDetails(FrozenClass):
'''
:ivar NodeId:
:vartype NodeId: NodeId
'''
ua_types = [
('NodeId', 'NodeId'),
]
def __init__(self):
self.NodeId = NodeId()
self._freeze = True
def __str__(self):
return 'HistoryUpdateDetails(' + 'NodeId:' + str(self.NodeId) + ')'
__repr__ = __str__
class UpdateDataDetails(FrozenClass):
'''
:ivar NodeId:
:vartype NodeId: NodeId
:ivar PerformInsertReplace:
:vartype PerformInsertReplace: PerformUpdateType
:ivar UpdateValues:
:vartype UpdateValues: DataValue
'''
ua_types = [
('NodeId', 'NodeId'),
('PerformInsertReplace', 'PerformUpdateType'),
('UpdateValues', 'ListOfDataValue'),
]
def __init__(self):
self.NodeId = NodeId()
self.PerformInsertReplace = PerformUpdateType(0)
self.UpdateValues = []
self._freeze = True
def __str__(self):
return 'UpdateDataDetails(' + 'NodeId:' + str(self.NodeId) + ', ' + \
'PerformInsertReplace:' + str(self.PerformInsertReplace) + ', ' + \
'UpdateValues:' + str(self.UpdateValues) + ')'
__repr__ = __str__
class UpdateStructureDataDetails(FrozenClass):
'''
:ivar NodeId:
:vartype NodeId: NodeId
:ivar PerformInsertReplace:
:vartype PerformInsertReplace: PerformUpdateType
:ivar UpdateValues:
:vartype UpdateValues: DataValue
'''
ua_types = [
('NodeId', 'NodeId'),
('PerformInsertReplace', 'PerformUpdateType'),
('UpdateValues', 'ListOfDataValue'),
]
def __init__(self):
self.NodeId = NodeId()
self.PerformInsertReplace = PerformUpdateType(0)
self.UpdateValues = []
self._freeze = True
def __str__(self):
return 'UpdateStructureDataDetails(' + 'NodeId:' + str(self.NodeId) + ', ' + \
'PerformInsertReplace:' + str(self.PerformInsertReplace) + ', ' + \
'UpdateValues:' + str(self.UpdateValues) + ')'
__repr__ = __str__
class UpdateEventDetails(FrozenClass):
'''
:ivar NodeId:
:vartype NodeId: NodeId
:ivar PerformInsertReplace:
:vartype PerformInsertReplace: PerformUpdateType
:ivar Filter:
:vartype Filter: EventFilter
:ivar EventData:
:vartype EventData: HistoryEventFieldList
'''
ua_types = [
('NodeId', 'NodeId'),
('PerformInsertReplace', 'PerformUpdateType'),
('Filter', 'EventFilter'),
('EventData', 'ListOfHistoryEventFieldList'),
]
def __init__(self):
self.NodeId = NodeId()
self.PerformInsertReplace = PerformUpdateType(0)
self.Filter = EventFilter()
self.EventData = []
self._freeze = True
def __str__(self):
return 'UpdateEventDetails(' + 'NodeId:' + str(self.NodeId) + ', ' + \
'PerformInsertReplace:' + str(self.PerformInsertReplace) + ', ' + \
'Filter:' + str(self.Filter) + ', ' + \
'EventData:' + str(self.EventData) + ')'
__repr__ = __str__
class DeleteRawModifiedDetails(FrozenClass):
'''
:ivar NodeId:
:vartype NodeId: NodeId
:ivar IsDeleteModified:
:vartype IsDeleteModified: Boolean
:ivar StartTime:
:vartype StartTime: DateTime
:ivar EndTime:
:vartype EndTime: DateTime
'''
ua_types = [
('NodeId', 'NodeId'),
('IsDeleteModified', 'Boolean'),
('StartTime', 'DateTime'),
('EndTime', 'DateTime'),
]
def __init__(self):
self.NodeId = NodeId()
self.IsDeleteModified = True
self.StartTime = datetime.utcnow()
self.EndTime = datetime.utcnow()
self._freeze = True
def __str__(self):
return 'DeleteRawModifiedDetails(' + 'NodeId:' + str(self.NodeId) + ', ' + \
'IsDeleteModified:' + str(self.IsDeleteModified) + ', ' + \
'StartTime:' + str(self.StartTime) + ', ' + \
'EndTime:' + str(self.EndTime) + ')'
__repr__ = __str__
class DeleteAtTimeDetails(FrozenClass):
'''
:ivar NodeId:
:vartype NodeId: NodeId
:ivar ReqTimes:
:vartype ReqTimes: DateTime
'''
ua_types = [
('NodeId', 'NodeId'),
('ReqTimes', 'ListOfDateTime'),
]
def __init__(self):
self.NodeId = NodeId()
self.ReqTimes = []
self._freeze = True
def __str__(self):
return 'DeleteAtTimeDetails(' + 'NodeId:' + str(self.NodeId) + ', ' + \
'ReqTimes:' + str(self.ReqTimes) + ')'
__repr__ = __str__
class DeleteEventDetails(FrozenClass):
'''
:ivar NodeId:
:vartype NodeId: NodeId
:ivar EventIds:
:vartype EventIds: ByteString
'''
ua_types = [
('NodeId', 'NodeId'),
('EventIds', 'ListOfByteString'),
]
def __init__(self):
self.NodeId = NodeId()
self.EventIds = []
self._freeze = True
def __str__(self):
return 'DeleteEventDetails(' + 'NodeId:' + str(self.NodeId) + ', ' + \
'EventIds:' + str(self.EventIds) + ')'
__repr__ = __str__
class HistoryUpdateResult(FrozenClass):
'''
:ivar StatusCode:
:vartype StatusCode: StatusCode
:ivar OperationResults:
:vartype OperationResults: StatusCode
:ivar DiagnosticInfos:
:vartype DiagnosticInfos: DiagnosticInfo
'''
ua_types = [
('StatusCode', 'StatusCode'),
('OperationResults', 'ListOfStatusCode'),
('DiagnosticInfos', 'ListOfDiagnosticInfo'),
]
def __init__(self):
self.StatusCode = StatusCode()
self.OperationResults = []
self.DiagnosticInfos = []
self._freeze = True
def __str__(self):
return 'HistoryUpdateResult(' + 'StatusCode:' + str(self.StatusCode) + ', ' + \
'OperationResults:' + str(self.OperationResults) + ', ' + \
'DiagnosticInfos:' + str(self.DiagnosticInfos) + ')'
__repr__ = __str__
class HistoryUpdateParameters(FrozenClass):
'''
:ivar HistoryUpdateDetails:
:vartype HistoryUpdateDetails: ExtensionObject
'''
ua_types = [
('HistoryUpdateDetails', 'ListOfExtensionObject'),
]
def __init__(self):
self.HistoryUpdateDetails = []
self._freeze = True
def __str__(self):
return 'HistoryUpdateParameters(' + 'HistoryUpdateDetails:' + str(self.HistoryUpdateDetails) + ')'
__repr__ = __str__
class HistoryUpdateRequest(FrozenClass):
'''
:ivar TypeId:
:vartype TypeId: NodeId
:ivar RequestHeader:
:vartype RequestHeader: RequestHeader
:ivar Parameters:
:vartype Parameters: HistoryUpdateParameters
'''
ua_types = [
('TypeId', 'NodeId'),
('RequestHeader', 'RequestHeader'),
('Parameters', 'HistoryUpdateParameters'),
]
def __init__(self):
self.TypeId = FourByteNodeId(ObjectIds.HistoryUpdateRequest_Encoding_DefaultBinary)
self.RequestHeader = RequestHeader()
self.Parameters = HistoryUpdateParameters()
self._freeze = True
def __str__(self):
return 'HistoryUpdateRequest(' + 'TypeId:' + str(self.TypeId) + ', ' + \
'RequestHeader:' + str(self.RequestHeader) + ', ' + \
'Parameters:' + str(self.Parameters) + ')'
__repr__ = __str__
class HistoryUpdateResponse(FrozenClass):
'''
:ivar TypeId:
:vartype TypeId: NodeId
:ivar ResponseHeader:
:vartype ResponseHeader: ResponseHeader
:ivar Results:
:vartype Results: HistoryUpdateResult
:ivar DiagnosticInfos:
:vartype DiagnosticInfos: DiagnosticInfo
'''
ua_types = [
('TypeId', 'NodeId'),
('ResponseHeader', 'ResponseHeader'),
('Results', 'ListOfHistoryUpdateResult'),
('DiagnosticInfos', 'ListOfDiagnosticInfo'),
]
def __init__(self):
self.TypeId = FourByteNodeId(ObjectIds.HistoryUpdateResponse_Encoding_DefaultBinary)
self.ResponseHeader = ResponseHeader()
self.Results = []
self.DiagnosticInfos = []
self._freeze = True
def __str__(self):
return 'HistoryUpdateResponse(' + 'TypeId:' + str(self.TypeId) + ', ' + \
'ResponseHeader:' + str(self.ResponseHeader) + ', ' + \
'Results:' + str(self.Results) + ', ' + \
'DiagnosticInfos:' + str(self.DiagnosticInfos) + ')'
__repr__ = __str__
class CallMethodRequest(FrozenClass):
'''
:ivar ObjectId:
:vartype ObjectId: NodeId
:ivar MethodId:
:vartype MethodId: NodeId
:ivar InputArguments:
:vartype InputArguments: Variant
'''
ua_types = [
('ObjectId', 'NodeId'),
('MethodId', 'NodeId'),
('InputArguments', 'ListOfVariant'),
]
def __init__(self):
self.ObjectId = NodeId()
self.MethodId = NodeId()
self.InputArguments = []
self._freeze = True
def __str__(self):
return 'CallMethodRequest(' + 'ObjectId:' + str(self.ObjectId) + ', ' + \
'MethodId:' + str(self.MethodId) + ', ' + \
'InputArguments:' + str(self.InputArguments) + ')'
__repr__ = __str__
class CallMethodResult(FrozenClass):
'''
:ivar StatusCode:
:vartype StatusCode: StatusCode
:ivar InputArgumentResults:
:vartype InputArgumentResults: StatusCode
:ivar InputArgumentDiagnosticInfos:
:vartype InputArgumentDiagnosticInfos: DiagnosticInfo
:ivar OutputArguments:
:vartype OutputArguments: Variant
'''
ua_types = [
('StatusCode', 'StatusCode'),
('InputArgumentResults', 'ListOfStatusCode'),
('InputArgumentDiagnosticInfos', 'ListOfDiagnosticInfo'),
('OutputArguments', 'ListOfVariant'),
]
def __init__(self):
self.StatusCode = StatusCode()
self.InputArgumentResults = []
self.InputArgumentDiagnosticInfos = []
self.OutputArguments = []
self._freeze = True
def __str__(self):
return 'CallMethodResult(' + 'StatusCode:' + str(self.StatusCode) + ', ' + \
'InputArgumentResults:' + str(self.InputArgumentResults) + ', ' + \
'InputArgumentDiagnosticInfos:' + str(self.InputArgumentDiagnosticInfos) + ', ' + \
'OutputArguments:' + str(self.OutputArguments) + ')'
__repr__ = __str__
class CallParameters(FrozenClass):
'''
:ivar MethodsToCall:
:vartype MethodsToCall: CallMethodRequest
'''
ua_types = [
('MethodsToCall', 'ListOfCallMethodRequest'),
]
def __init__(self):
self.MethodsToCall = []
self._freeze = True
def __str__(self):
return 'CallParameters(' + 'MethodsToCall:' + str(self.MethodsToCall) + ')'
__repr__ = __str__
class CallRequest(FrozenClass):
'''
:ivar TypeId:
:vartype TypeId: NodeId
:ivar RequestHeader:
:vartype RequestHeader: RequestHeader
:ivar Parameters:
:vartype Parameters: CallParameters
'''
ua_types = [
('TypeId', 'NodeId'),
('RequestHeader', 'RequestHeader'),
('Parameters', 'CallParameters'),
]
def __init__(self):
self.TypeId = FourByteNodeId(ObjectIds.CallRequest_Encoding_DefaultBinary)
self.RequestHeader = RequestHeader()
self.Parameters = CallParameters()
self._freeze = True
def __str__(self):
return 'CallRequest(' + 'TypeId:' + str(self.TypeId) + ', ' + \
'RequestHeader:' + str(self.RequestHeader) + ', ' + \
'Parameters:' + str(self.Parameters) + ')'
__repr__ = __str__
class CallResponse(FrozenClass):
'''
:ivar TypeId:
:vartype TypeId: NodeId
:ivar ResponseHeader:
:vartype ResponseHeader: ResponseHeader
:ivar Results:
:vartype Results: CallMethodResult
:ivar DiagnosticInfos:
:vartype DiagnosticInfos: DiagnosticInfo
'''
ua_types = [
('TypeId', 'NodeId'),
('ResponseHeader', 'ResponseHeader'),
('Results', 'ListOfCallMethodResult'),
('DiagnosticInfos', 'ListOfDiagnosticInfo'),
]
def __init__(self):
self.TypeId = FourByteNodeId(ObjectIds.CallResponse_Encoding_DefaultBinary)
self.ResponseHeader = ResponseHeader()
self.Results = []
self.DiagnosticInfos = []
self._freeze = True
def __str__(self):
return 'CallResponse(' + 'TypeId:' + str(self.TypeId) + ', ' + \
'ResponseHeader:' + str(self.ResponseHeader) + ', ' + \
'Results:' + str(self.Results) + ', ' + \
'DiagnosticInfos:' + str(self.DiagnosticInfos) + ')'
__repr__ = __str__
class MonitoringFilter(FrozenClass):
'''
'''
ua_types = [
]
def __init__(self):
self._freeze = True
def __str__(self):
return 'MonitoringFilter(' + + ')'
__repr__ = __str__
class DataChangeFilter(FrozenClass):
'''
:ivar Trigger:
:vartype Trigger: DataChangeTrigger
:ivar DeadbandType:
:vartype DeadbandType: UInt32
:ivar DeadbandValue:
:vartype DeadbandValue: Double
'''
ua_types = [
('Trigger', 'DataChangeTrigger'),
('DeadbandType', 'UInt32'),
('DeadbandValue', 'Double'),
]
def __init__(self):
self.Trigger = DataChangeTrigger(0)
self.DeadbandType = 0
self.DeadbandValue = 0
self._freeze = True
def __str__(self):
return 'DataChangeFilter(' + 'Trigger:' + str(self.Trigger) + ', ' + \
'DeadbandType:' + str(self.DeadbandType) + ', ' + \
'DeadbandValue:' + str(self.DeadbandValue) + ')'
__repr__ = __str__
class EventFilter(FrozenClass):
'''
:ivar SelectClauses:
:vartype SelectClauses: SimpleAttributeOperand
:ivar WhereClause:
:vartype WhereClause: ContentFilter
'''
ua_types = [
('SelectClauses', 'ListOfSimpleAttributeOperand'),
('WhereClause', 'ContentFilter'),
]
def __init__(self):
self.SelectClauses = []
self.WhereClause = ContentFilter()
self._freeze = True
def __str__(self):
return 'EventFilter(' + 'SelectClauses:' + str(self.SelectClauses) + ', ' + \
'WhereClause:' + str(self.WhereClause) + ')'
__repr__ = __str__
class AggregateConfiguration(FrozenClass):
'''
:ivar UseServerCapabilitiesDefaults:
:vartype UseServerCapabilitiesDefaults: Boolean
:ivar TreatUncertainAsBad:
:vartype TreatUncertainAsBad: Boolean
:ivar PercentDataBad:
:vartype PercentDataBad: Byte
:ivar PercentDataGood:
:vartype PercentDataGood: Byte
:ivar UseSlopedExtrapolation:
:vartype UseSlopedExtrapolation: Boolean
'''
ua_types = [
('UseServerCapabilitiesDefaults', 'Boolean'),
('TreatUncertainAsBad', 'Boolean'),
('PercentDataBad', 'Byte'),
('PercentDataGood', 'Byte'),
('UseSlopedExtrapolation', 'Boolean'),
]
def __init__(self):
self.UseServerCapabilitiesDefaults = True
self.TreatUncertainAsBad = True
self.PercentDataBad = 0
self.PercentDataGood = 0
self.UseSlopedExtrapolation = True
self._freeze = True
def __str__(self):
return 'AggregateConfiguration(' + 'UseServerCapabilitiesDefaults:' + str(self.UseServerCapabilitiesDefaults) + ', ' + \
'TreatUncertainAsBad:' + str(self.TreatUncertainAsBad) + ', ' + \
'PercentDataBad:' + str(self.PercentDataBad) + ', ' + \
'PercentDataGood:' + str(self.PercentDataGood) + ', ' + \
'UseSlopedExtrapolation:' + str(self.UseSlopedExtrapolation) + ')'
__repr__ = __str__
class AggregateFilter(FrozenClass):
'''
:ivar StartTime:
:vartype StartTime: DateTime
:ivar AggregateType:
:vartype AggregateType: NodeId
:ivar ProcessingInterval:
:vartype ProcessingInterval: Double
:ivar AggregateConfiguration:
:vartype AggregateConfiguration: AggregateConfiguration
'''
ua_types = [
('StartTime', 'DateTime'),
('AggregateType', 'NodeId'),
('ProcessingInterval', 'Double'),
('AggregateConfiguration', 'AggregateConfiguration'),
]
def __init__(self):
self.StartTime = datetime.utcnow()
self.AggregateType = NodeId()
self.ProcessingInterval = 0
self.AggregateConfiguration = AggregateConfiguration()
self._freeze = True
def __str__(self):
return 'AggregateFilter(' + 'StartTime:' + str(self.StartTime) + ', ' + \
'AggregateType:' + str(self.AggregateType) + ', ' + \
'ProcessingInterval:' + str(self.ProcessingInterval) + ', ' + \
'AggregateConfiguration:' + str(self.AggregateConfiguration) + ')'
__repr__ = __str__
class MonitoringFilterResult(FrozenClass):
'''
'''
ua_types = [
]
def __init__(self):
self._freeze = True
def __str__(self):
return 'MonitoringFilterResult(' + + ')'
__repr__ = __str__
class EventFilterResult(FrozenClass):
'''
:ivar SelectClauseResults:
:vartype SelectClauseResults: StatusCode
:ivar SelectClauseDiagnosticInfos:
:vartype SelectClauseDiagnosticInfos: DiagnosticInfo
:ivar WhereClauseResult:
:vartype WhereClauseResult: ContentFilterResult
'''
ua_types = [
('SelectClauseResults', 'ListOfStatusCode'),
('SelectClauseDiagnosticInfos', 'ListOfDiagnosticInfo'),
('WhereClauseResult', 'ContentFilterResult'),
]
def __init__(self):
self.SelectClauseResults = []
self.SelectClauseDiagnosticInfos = []
self.WhereClauseResult = ContentFilterResult()
self._freeze = True
def __str__(self):
return 'EventFilterResult(' + 'SelectClauseResults:' + str(self.SelectClauseResults) + ', ' + \
'SelectClauseDiagnosticInfos:' + str(self.SelectClauseDiagnosticInfos) + ', ' + \
'WhereClauseResult:' + str(self.WhereClauseResult) + ')'
__repr__ = __str__
class AggregateFilterResult(FrozenClass):
'''
:ivar RevisedStartTime:
:vartype RevisedStartTime: DateTime
:ivar RevisedProcessingInterval:
:vartype RevisedProcessingInterval: Double
:ivar RevisedAggregateConfiguration:
:vartype RevisedAggregateConfiguration: AggregateConfiguration
'''
ua_types = [
('RevisedStartTime', 'DateTime'),
('RevisedProcessingInterval', 'Double'),
('RevisedAggregateConfiguration', 'AggregateConfiguration'),
]
def __init__(self):
self.RevisedStartTime = datetime.utcnow()
self.RevisedProcessingInterval = 0
self.RevisedAggregateConfiguration = AggregateConfiguration()
self._freeze = True
def __str__(self):
return 'AggregateFilterResult(' + 'RevisedStartTime:' + str(self.RevisedStartTime) + ', ' + \
'RevisedProcessingInterval:' + str(self.RevisedProcessingInterval) + ', ' + \
'RevisedAggregateConfiguration:' + str(self.RevisedAggregateConfiguration) + ')'
__repr__ = __str__
class MonitoringParameters(FrozenClass):
'''
:ivar ClientHandle:
:vartype ClientHandle: UInt32
:ivar SamplingInterval:
:vartype SamplingInterval: Double
:ivar Filter:
:vartype Filter: ExtensionObject
:ivar QueueSize:
:vartype QueueSize: UInt32
:ivar DiscardOldest:
:vartype DiscardOldest: Boolean
'''
ua_types = [
('ClientHandle', 'UInt32'),
('SamplingInterval', 'Double'),
('Filter', 'ExtensionObject'),
('QueueSize', 'UInt32'),
('DiscardOldest', 'Boolean'),
]
def __init__(self):
self.ClientHandle = 0
self.SamplingInterval = 0
self.Filter = ExtensionObject()
self.QueueSize = 0
self.DiscardOldest = True
self._freeze = True
def __str__(self):
return 'MonitoringParameters(' + 'ClientHandle:' + str(self.ClientHandle) + ', ' + \
'SamplingInterval:' + str(self.SamplingInterval) + ', ' + \
'Filter:' + str(self.Filter) + ', ' + \
'QueueSize:' + str(self.QueueSize) + ', ' + \
'DiscardOldest:' + str(self.DiscardOldest) + ')'
__repr__ = __str__
class MonitoredItemCreateRequest(FrozenClass):
'''
:ivar ItemToMonitor:
:vartype ItemToMonitor: ReadValueId
:ivar MonitoringMode:
:vartype MonitoringMode: MonitoringMode
:ivar RequestedParameters:
:vartype RequestedParameters: MonitoringParameters
'''
ua_types = [
('ItemToMonitor', 'ReadValueId'),
('MonitoringMode', 'MonitoringMode'),
('RequestedParameters', 'MonitoringParameters'),
]
def __init__(self):
self.ItemToMonitor = ReadValueId()
self.MonitoringMode = MonitoringMode(0)
self.RequestedParameters = MonitoringParameters()
self._freeze = True
def __str__(self):
return 'MonitoredItemCreateRequest(' + 'ItemToMonitor:' + str(self.ItemToMonitor) + ', ' + \
'MonitoringMode:' + str(self.MonitoringMode) + ', ' + \
'RequestedParameters:' + str(self.RequestedParameters) + ')'
__repr__ = __str__
class MonitoredItemCreateResult(FrozenClass):
'''
:ivar StatusCode:
:vartype StatusCode: StatusCode
:ivar MonitoredItemId:
:vartype MonitoredItemId: UInt32
:ivar RevisedSamplingInterval:
:vartype RevisedSamplingInterval: Double
:ivar RevisedQueueSize:
:vartype RevisedQueueSize: UInt32
:ivar FilterResult:
:vartype FilterResult: ExtensionObject
'''
ua_types = [
('StatusCode', 'StatusCode'),
('MonitoredItemId', 'UInt32'),
('RevisedSamplingInterval', 'Double'),
('RevisedQueueSize', 'UInt32'),
('FilterResult', 'ExtensionObject'),
]
def __init__(self):
self.StatusCode = StatusCode()
self.MonitoredItemId = 0
self.RevisedSamplingInterval = 0
self.RevisedQueueSize = 0
self.FilterResult = ExtensionObject()
self._freeze = True
def __str__(self):
return 'MonitoredItemCreateResult(' + 'StatusCode:' + str(self.StatusCode) + ', ' + \
'MonitoredItemId:' + str(self.MonitoredItemId) + ', ' + \
'RevisedSamplingInterval:' + str(self.RevisedSamplingInterval) + ', ' + \
'RevisedQueueSize:' + str(self.RevisedQueueSize) + ', ' + \
'FilterResult:' + str(self.FilterResult) + ')'
__repr__ = __str__
class CreateMonitoredItemsParameters(FrozenClass):
'''
:ivar SubscriptionId:
:vartype SubscriptionId: UInt32
:ivar TimestampsToReturn:
:vartype TimestampsToReturn: TimestampsToReturn
:ivar ItemsToCreate:
:vartype ItemsToCreate: MonitoredItemCreateRequest
'''
ua_types = [
('SubscriptionId', 'UInt32'),
('TimestampsToReturn', 'TimestampsToReturn'),
('ItemsToCreate', 'ListOfMonitoredItemCreateRequest'),
]
def __init__(self):
self.SubscriptionId = 0
self.TimestampsToReturn = TimestampsToReturn(0)
self.ItemsToCreate = []
self._freeze = True
def __str__(self):
return 'CreateMonitoredItemsParameters(' + 'SubscriptionId:' + str(self.SubscriptionId) + ', ' + \
'TimestampsToReturn:' + str(self.TimestampsToReturn) + ', ' + \
'ItemsToCreate:' + str(self.ItemsToCreate) + ')'
__repr__ = __str__
class CreateMonitoredItemsRequest(FrozenClass):
'''
:ivar TypeId:
:vartype TypeId: NodeId
:ivar RequestHeader:
:vartype RequestHeader: RequestHeader
:ivar Parameters:
:vartype Parameters: CreateMonitoredItemsParameters
'''
ua_types = [
('TypeId', 'NodeId'),
('RequestHeader', 'RequestHeader'),
('Parameters', 'CreateMonitoredItemsParameters'),
]
def __init__(self):
self.TypeId = FourByteNodeId(ObjectIds.CreateMonitoredItemsRequest_Encoding_DefaultBinary)
self.RequestHeader = RequestHeader()
self.Parameters = CreateMonitoredItemsParameters()
self._freeze = True
def __str__(self):
return 'CreateMonitoredItemsRequest(' + 'TypeId:' + str(self.TypeId) + ', ' + \
'RequestHeader:' + str(self.RequestHeader) + ', ' + \
'Parameters:' + str(self.Parameters) + ')'
__repr__ = __str__
class CreateMonitoredItemsResponse(FrozenClass):
'''
:ivar TypeId:
:vartype TypeId: NodeId
:ivar ResponseHeader:
:vartype ResponseHeader: ResponseHeader
:ivar Results:
:vartype Results: MonitoredItemCreateResult
:ivar DiagnosticInfos:
:vartype DiagnosticInfos: DiagnosticInfo
'''
ua_types = [
('TypeId', 'NodeId'),
('ResponseHeader', 'ResponseHeader'),
('Results', 'ListOfMonitoredItemCreateResult'),
('DiagnosticInfos', 'ListOfDiagnosticInfo'),
]
def __init__(self):
self.TypeId = FourByteNodeId(ObjectIds.CreateMonitoredItemsResponse_Encoding_DefaultBinary)
self.ResponseHeader = ResponseHeader()
self.Results = []
self.DiagnosticInfos = []
self._freeze = True
def __str__(self):
return 'CreateMonitoredItemsResponse(' + 'TypeId:' + str(self.TypeId) + ', ' + \
'ResponseHeader:' + str(self.ResponseHeader) + ', ' + \
'Results:' + str(self.Results) + ', ' + \
'DiagnosticInfos:' + str(self.DiagnosticInfos) + ')'
__repr__ = __str__
class MonitoredItemModifyRequest(FrozenClass):
'''
:ivar MonitoredItemId:
:vartype MonitoredItemId: UInt32
:ivar RequestedParameters:
:vartype RequestedParameters: MonitoringParameters
'''
ua_types = [
('MonitoredItemId', 'UInt32'),
('RequestedParameters', 'MonitoringParameters'),
]
def __init__(self):
self.MonitoredItemId = 0
self.RequestedParameters = MonitoringParameters()
self._freeze = True
def __str__(self):
return 'MonitoredItemModifyRequest(' + 'MonitoredItemId:' + str(self.MonitoredItemId) + ', ' + \
'RequestedParameters:' + str(self.RequestedParameters) + ')'
__repr__ = __str__
class MonitoredItemModifyResult(FrozenClass):
'''
:ivar StatusCode:
:vartype StatusCode: StatusCode
:ivar RevisedSamplingInterval:
:vartype RevisedSamplingInterval: Double
:ivar RevisedQueueSize:
:vartype RevisedQueueSize: UInt32
:ivar FilterResult:
:vartype FilterResult: ExtensionObject
'''
ua_types = [
('StatusCode', 'StatusCode'),
('RevisedSamplingInterval', 'Double'),
('RevisedQueueSize', 'UInt32'),
('FilterResult', 'ExtensionObject'),
]
def __init__(self):
self.StatusCode = StatusCode()
self.RevisedSamplingInterval = 0
self.RevisedQueueSize = 0
self.FilterResult = ExtensionObject()
self._freeze = True
def __str__(self):
return 'MonitoredItemModifyResult(' + 'StatusCode:' + str(self.StatusCode) + ', ' + \
'RevisedSamplingInterval:' + str(self.RevisedSamplingInterval) + ', ' + \
'RevisedQueueSize:' + str(self.RevisedQueueSize) + ', ' + \
'FilterResult:' + str(self.FilterResult) + ')'
__repr__ = __str__
class ModifyMonitoredItemsParameters(FrozenClass):
'''
:ivar SubscriptionId:
:vartype SubscriptionId: UInt32
:ivar TimestampsToReturn:
:vartype TimestampsToReturn: TimestampsToReturn
:ivar ItemsToModify:
:vartype ItemsToModify: MonitoredItemModifyRequest
'''
ua_types = [
('SubscriptionId', 'UInt32'),
('TimestampsToReturn', 'TimestampsToReturn'),
('ItemsToModify', 'ListOfMonitoredItemModifyRequest'),
]
def __init__(self):
self.SubscriptionId = 0
self.TimestampsToReturn = TimestampsToReturn(0)
self.ItemsToModify = []
self._freeze = True
def __str__(self):
return 'ModifyMonitoredItemsParameters(' + 'SubscriptionId:' + str(self.SubscriptionId) + ', ' + \
'TimestampsToReturn:' + str(self.TimestampsToReturn) + ', ' + \
'ItemsToModify:' + str(self.ItemsToModify) + ')'
__repr__ = __str__
class ModifyMonitoredItemsRequest(FrozenClass):
'''
:ivar TypeId:
:vartype TypeId: NodeId
:ivar RequestHeader:
:vartype RequestHeader: RequestHeader
:ivar Parameters:
:vartype Parameters: ModifyMonitoredItemsParameters
'''
ua_types = [
('TypeId', 'NodeId'),
('RequestHeader', 'RequestHeader'),
('Parameters', 'ModifyMonitoredItemsParameters'),
]
def __init__(self):
self.TypeId = FourByteNodeId(ObjectIds.ModifyMonitoredItemsRequest_Encoding_DefaultBinary)
self.RequestHeader = RequestHeader()
self.Parameters = ModifyMonitoredItemsParameters()
self._freeze = True
def __str__(self):
return 'ModifyMonitoredItemsRequest(' + 'TypeId:' + str(self.TypeId) + ', ' + \
'RequestHeader:' + str(self.RequestHeader) + ', ' + \
'Parameters:' + str(self.Parameters) + ')'
__repr__ = __str__
class ModifyMonitoredItemsResponse(FrozenClass):
'''
:ivar TypeId:
:vartype TypeId: NodeId
:ivar ResponseHeader:
:vartype ResponseHeader: ResponseHeader
:ivar Results:
:vartype Results: MonitoredItemModifyResult
:ivar DiagnosticInfos:
:vartype DiagnosticInfos: DiagnosticInfo
'''
ua_types = [
('TypeId', 'NodeId'),
('ResponseHeader', 'ResponseHeader'),
('Results', 'ListOfMonitoredItemModifyResult'),
('DiagnosticInfos', 'ListOfDiagnosticInfo'),
]
def __init__(self):
self.TypeId = FourByteNodeId(ObjectIds.ModifyMonitoredItemsResponse_Encoding_DefaultBinary)
self.ResponseHeader = ResponseHeader()
self.Results = []
self.DiagnosticInfos = []
self._freeze = True
def __str__(self):
return 'ModifyMonitoredItemsResponse(' + 'TypeId:' + str(self.TypeId) + ', ' + \
'ResponseHeader:' + str(self.ResponseHeader) + ', ' + \
'Results:' + str(self.Results) + ', ' + \
'DiagnosticInfos:' + str(self.DiagnosticInfos) + ')'
__repr__ = __str__
class SetMonitoringModeParameters(FrozenClass):
'''
:ivar SubscriptionId:
:vartype SubscriptionId: UInt32
:ivar MonitoringMode:
:vartype MonitoringMode: MonitoringMode
:ivar MonitoredItemIds:
:vartype MonitoredItemIds: UInt32
'''
ua_types = [
('SubscriptionId', 'UInt32'),
('MonitoringMode', 'MonitoringMode'),
('MonitoredItemIds', 'ListOfUInt32'),
]
def __init__(self):
self.SubscriptionId = 0
self.MonitoringMode = MonitoringMode(0)
self.MonitoredItemIds = []
self._freeze = True
def __str__(self):
return 'SetMonitoringModeParameters(' + 'SubscriptionId:' + str(self.SubscriptionId) + ', ' + \
'MonitoringMode:' + str(self.MonitoringMode) + ', ' + \
'MonitoredItemIds:' + str(self.MonitoredItemIds) + ')'
__repr__ = __str__
class SetMonitoringModeRequest(FrozenClass):
'''
:ivar TypeId:
:vartype TypeId: NodeId
:ivar RequestHeader:
:vartype RequestHeader: RequestHeader
:ivar Parameters:
:vartype Parameters: SetMonitoringModeParameters
'''
ua_types = [
('TypeId', 'NodeId'),
('RequestHeader', 'RequestHeader'),
('Parameters', 'SetMonitoringModeParameters'),
]
def __init__(self):
self.TypeId = FourByteNodeId(ObjectIds.SetMonitoringModeRequest_Encoding_DefaultBinary)
self.RequestHeader = RequestHeader()
self.Parameters = SetMonitoringModeParameters()
self._freeze = True
def __str__(self):
return 'SetMonitoringModeRequest(' + 'TypeId:' + str(self.TypeId) + ', ' + \
'RequestHeader:' + str(self.RequestHeader) + ', ' + \
'Parameters:' + str(self.Parameters) + ')'
__repr__ = __str__
class SetMonitoringModeResult(FrozenClass):
'''
:ivar Results:
:vartype Results: StatusCode
:ivar DiagnosticInfos:
:vartype DiagnosticInfos: DiagnosticInfo
'''
ua_types = [
('Results', 'ListOfStatusCode'),
('DiagnosticInfos', 'ListOfDiagnosticInfo'),
]
def __init__(self):
self.Results = []
self.DiagnosticInfos = []
self._freeze = True
def __str__(self):
return 'SetMonitoringModeResult(' + 'Results:' + str(self.Results) + ', ' + \
'DiagnosticInfos:' + str(self.DiagnosticInfos) + ')'
__repr__ = __str__
class SetMonitoringModeResponse(FrozenClass):
'''
:ivar TypeId:
:vartype TypeId: NodeId
:ivar ResponseHeader:
:vartype ResponseHeader: ResponseHeader
:ivar Parameters:
:vartype Parameters: SetMonitoringModeResult
'''
ua_types = [
('TypeId', 'NodeId'),
('ResponseHeader', 'ResponseHeader'),
('Parameters', 'SetMonitoringModeResult'),
]
def __init__(self):
self.TypeId = FourByteNodeId(ObjectIds.SetMonitoringModeResponse_Encoding_DefaultBinary)
self.ResponseHeader = ResponseHeader()
self.Parameters = SetMonitoringModeResult()
self._freeze = True
def __str__(self):
return 'SetMonitoringModeResponse(' + 'TypeId:' + str(self.TypeId) + ', ' + \
'ResponseHeader:' + str(self.ResponseHeader) + ', ' + \
'Parameters:' + str(self.Parameters) + ')'
__repr__ = __str__
class SetTriggeringParameters(FrozenClass):
'''
:ivar SubscriptionId:
:vartype SubscriptionId: UInt32
:ivar TriggeringItemId:
:vartype TriggeringItemId: UInt32
:ivar LinksToAdd:
:vartype LinksToAdd: UInt32
:ivar LinksToRemove:
:vartype LinksToRemove: UInt32
'''
ua_types = [
('SubscriptionId', 'UInt32'),
('TriggeringItemId', 'UInt32'),
('LinksToAdd', 'ListOfUInt32'),
('LinksToRemove', 'ListOfUInt32'),
]
def __init__(self):
self.SubscriptionId = 0
self.TriggeringItemId = 0
self.LinksToAdd = []
self.LinksToRemove = []
self._freeze = True
def __str__(self):
return 'SetTriggeringParameters(' + 'SubscriptionId:' + str(self.SubscriptionId) + ', ' + \
'TriggeringItemId:' + str(self.TriggeringItemId) + ', ' + \
'LinksToAdd:' + str(self.LinksToAdd) + ', ' + \
'LinksToRemove:' + str(self.LinksToRemove) + ')'
__repr__ = __str__
class SetTriggeringRequest(FrozenClass):
'''
:ivar TypeId:
:vartype TypeId: NodeId
:ivar RequestHeader:
:vartype RequestHeader: RequestHeader
:ivar Parameters:
:vartype Parameters: SetTriggeringParameters
'''
ua_types = [
('TypeId', 'NodeId'),
('RequestHeader', 'RequestHeader'),
('Parameters', 'SetTriggeringParameters'),
]
def __init__(self):
self.TypeId = FourByteNodeId(ObjectIds.SetTriggeringRequest_Encoding_DefaultBinary)
self.RequestHeader = RequestHeader()
self.Parameters = SetTriggeringParameters()
self._freeze = True
def __str__(self):
return 'SetTriggeringRequest(' + 'TypeId:' + str(self.TypeId) + ', ' + \
'RequestHeader:' + str(self.RequestHeader) + ', ' + \
'Parameters:' + str(self.Parameters) + ')'
__repr__ = __str__
class SetTriggeringResult(FrozenClass):
'''
:ivar AddResults:
:vartype AddResults: StatusCode
:ivar AddDiagnosticInfos:
:vartype AddDiagnosticInfos: DiagnosticInfo
:ivar RemoveResults:
:vartype RemoveResults: StatusCode
:ivar RemoveDiagnosticInfos:
:vartype RemoveDiagnosticInfos: DiagnosticInfo
'''
ua_types = [
('AddResults', 'ListOfStatusCode'),
('AddDiagnosticInfos', 'ListOfDiagnosticInfo'),
('RemoveResults', 'ListOfStatusCode'),
('RemoveDiagnosticInfos', 'ListOfDiagnosticInfo'),
]
def __init__(self):
self.AddResults = []
self.AddDiagnosticInfos = []
self.RemoveResults = []
self.RemoveDiagnosticInfos = []
self._freeze = True
def __str__(self):
return 'SetTriggeringResult(' + 'AddResults:' + str(self.AddResults) + ', ' + \
'AddDiagnosticInfos:' + str(self.AddDiagnosticInfos) + ', ' + \
'RemoveResults:' + str(self.RemoveResults) + ', ' + \
'RemoveDiagnosticInfos:' + str(self.RemoveDiagnosticInfos) + ')'
__repr__ = __str__
class SetTriggeringResponse(FrozenClass):
'''
:ivar TypeId:
:vartype TypeId: NodeId
:ivar ResponseHeader:
:vartype ResponseHeader: ResponseHeader
:ivar Parameters:
:vartype Parameters: SetTriggeringResult
'''
ua_types = [
('TypeId', 'NodeId'),
('ResponseHeader', 'ResponseHeader'),
('Parameters', 'SetTriggeringResult'),
]
def __init__(self):
self.TypeId = FourByteNodeId(ObjectIds.SetTriggeringResponse_Encoding_DefaultBinary)
self.ResponseHeader = ResponseHeader()
self.Parameters = SetTriggeringResult()
self._freeze = True
def __str__(self):
return 'SetTriggeringResponse(' + 'TypeId:' + str(self.TypeId) + ', ' + \
'ResponseHeader:' + str(self.ResponseHeader) + ', ' + \
'Parameters:' + str(self.Parameters) + ')'
__repr__ = __str__
class DeleteMonitoredItemsParameters(FrozenClass):
'''
:ivar SubscriptionId:
:vartype SubscriptionId: UInt32
:ivar MonitoredItemIds:
:vartype MonitoredItemIds: UInt32
'''
ua_types = [
('SubscriptionId', 'UInt32'),
('MonitoredItemIds', 'ListOfUInt32'),
]
def __init__(self):
self.SubscriptionId = 0
self.MonitoredItemIds = []
self._freeze = True
def __str__(self):
return 'DeleteMonitoredItemsParameters(' + 'SubscriptionId:' + str(self.SubscriptionId) + ', ' + \
'MonitoredItemIds:' + str(self.MonitoredItemIds) + ')'
__repr__ = __str__
class DeleteMonitoredItemsRequest(FrozenClass):
'''
:ivar TypeId:
:vartype TypeId: NodeId
:ivar RequestHeader:
:vartype RequestHeader: RequestHeader
:ivar Parameters:
:vartype Parameters: DeleteMonitoredItemsParameters
'''
ua_types = [
('TypeId', 'NodeId'),
('RequestHeader', 'RequestHeader'),
('Parameters', 'DeleteMonitoredItemsParameters'),
]
def __init__(self):
self.TypeId = FourByteNodeId(ObjectIds.DeleteMonitoredItemsRequest_Encoding_DefaultBinary)
self.RequestHeader = RequestHeader()
self.Parameters = DeleteMonitoredItemsParameters()
self._freeze = True
def __str__(self):
return 'DeleteMonitoredItemsRequest(' + 'TypeId:' + str(self.TypeId) + ', ' + \
'RequestHeader:' + str(self.RequestHeader) + ', ' + \
'Parameters:' + str(self.Parameters) + ')'
__repr__ = __str__
class DeleteMonitoredItemsResponse(FrozenClass):
'''
:ivar TypeId:
:vartype TypeId: NodeId
:ivar ResponseHeader:
:vartype ResponseHeader: ResponseHeader
:ivar Results:
:vartype Results: StatusCode
:ivar DiagnosticInfos:
:vartype DiagnosticInfos: DiagnosticInfo
'''
ua_types = [
('TypeId', 'NodeId'),
('ResponseHeader', 'ResponseHeader'),
('Results', 'ListOfStatusCode'),
('DiagnosticInfos', 'ListOfDiagnosticInfo'),
]
def __init__(self):
self.TypeId = FourByteNodeId(ObjectIds.DeleteMonitoredItemsResponse_Encoding_DefaultBinary)
self.ResponseHeader = ResponseHeader()
self.Results = []
self.DiagnosticInfos = []
self._freeze = True
def __str__(self):
return 'DeleteMonitoredItemsResponse(' + 'TypeId:' + str(self.TypeId) + ', ' + \
'ResponseHeader:' + str(self.ResponseHeader) + ', ' + \
'Results:' + str(self.Results) + ', ' + \
'DiagnosticInfos:' + str(self.DiagnosticInfos) + ')'
__repr__ = __str__
class CreateSubscriptionParameters(FrozenClass):
'''
:ivar RequestedPublishingInterval:
:vartype RequestedPublishingInterval: Double
:ivar RequestedLifetimeCount:
:vartype RequestedLifetimeCount: UInt32
:ivar RequestedMaxKeepAliveCount:
:vartype RequestedMaxKeepAliveCount: UInt32
:ivar MaxNotificationsPerPublish:
:vartype MaxNotificationsPerPublish: UInt32
:ivar PublishingEnabled:
:vartype PublishingEnabled: Boolean
:ivar Priority:
:vartype Priority: Byte
'''
ua_types = [
('RequestedPublishingInterval', 'Double'),
('RequestedLifetimeCount', 'UInt32'),
('RequestedMaxKeepAliveCount', 'UInt32'),
('MaxNotificationsPerPublish', 'UInt32'),
('PublishingEnabled', 'Boolean'),
('Priority', 'Byte'),
]
def __init__(self):
self.RequestedPublishingInterval = 0
self.RequestedLifetimeCount = 0
self.RequestedMaxKeepAliveCount = 0
self.MaxNotificationsPerPublish = 0
self.PublishingEnabled = True
self.Priority = 0
self._freeze = True
def __str__(self):
return 'CreateSubscriptionParameters(' + 'RequestedPublishingInterval:' + str(self.RequestedPublishingInterval) + ', ' + \
'RequestedLifetimeCount:' + str(self.RequestedLifetimeCount) + ', ' + \
'RequestedMaxKeepAliveCount:' + str(self.RequestedMaxKeepAliveCount) + ', ' + \
'MaxNotificationsPerPublish:' + str(self.MaxNotificationsPerPublish) + ', ' + \
'PublishingEnabled:' + str(self.PublishingEnabled) + ', ' + \
'Priority:' + str(self.Priority) + ')'
__repr__ = __str__
class CreateSubscriptionRequest(FrozenClass):
'''
:ivar TypeId:
:vartype TypeId: NodeId
:ivar RequestHeader:
:vartype RequestHeader: RequestHeader
:ivar Parameters:
:vartype Parameters: CreateSubscriptionParameters
'''
ua_types = [
('TypeId', 'NodeId'),
('RequestHeader', 'RequestHeader'),
('Parameters', 'CreateSubscriptionParameters'),
]
def __init__(self):
self.TypeId = FourByteNodeId(ObjectIds.CreateSubscriptionRequest_Encoding_DefaultBinary)
self.RequestHeader = RequestHeader()
self.Parameters = CreateSubscriptionParameters()
self._freeze = True
def __str__(self):
return 'CreateSubscriptionRequest(' + 'TypeId:' + str(self.TypeId) + ', ' + \
'RequestHeader:' + str(self.RequestHeader) + ', ' + \
'Parameters:' + str(self.Parameters) + ')'
__repr__ = __str__
class CreateSubscriptionResult(FrozenClass):
'''
:ivar SubscriptionId:
:vartype SubscriptionId: UInt32
:ivar RevisedPublishingInterval:
:vartype RevisedPublishingInterval: Double
:ivar RevisedLifetimeCount:
:vartype RevisedLifetimeCount: UInt32
:ivar RevisedMaxKeepAliveCount:
:vartype RevisedMaxKeepAliveCount: UInt32
'''
ua_types = [
('SubscriptionId', 'UInt32'),
('RevisedPublishingInterval', 'Double'),
('RevisedLifetimeCount', 'UInt32'),
('RevisedMaxKeepAliveCount', 'UInt32'),
]
def __init__(self):
self.SubscriptionId = 0
self.RevisedPublishingInterval = 0
self.RevisedLifetimeCount = 0
self.RevisedMaxKeepAliveCount = 0
self._freeze = True
def __str__(self):
return 'CreateSubscriptionResult(' + 'SubscriptionId:' + str(self.SubscriptionId) + ', ' + \
'RevisedPublishingInterval:' + str(self.RevisedPublishingInterval) + ', ' + \
'RevisedLifetimeCount:' + str(self.RevisedLifetimeCount) + ', ' + \
'RevisedMaxKeepAliveCount:' + str(self.RevisedMaxKeepAliveCount) + ')'
__repr__ = __str__
class CreateSubscriptionResponse(FrozenClass):
'''
:ivar TypeId:
:vartype TypeId: NodeId
:ivar ResponseHeader:
:vartype ResponseHeader: ResponseHeader
:ivar Parameters:
:vartype Parameters: CreateSubscriptionResult
'''
ua_types = [
('TypeId', 'NodeId'),
('ResponseHeader', 'ResponseHeader'),
('Parameters', 'CreateSubscriptionResult'),
]
def __init__(self):
self.TypeId = FourByteNodeId(ObjectIds.CreateSubscriptionResponse_Encoding_DefaultBinary)
self.ResponseHeader = ResponseHeader()
self.Parameters = CreateSubscriptionResult()
self._freeze = True
def __str__(self):
return 'CreateSubscriptionResponse(' + 'TypeId:' + str(self.TypeId) + ', ' + \
'ResponseHeader:' + str(self.ResponseHeader) + ', ' + \
'Parameters:' + str(self.Parameters) + ')'
__repr__ = __str__
class ModifySubscriptionParameters(FrozenClass):
'''
:ivar SubscriptionId:
:vartype SubscriptionId: UInt32
:ivar RequestedPublishingInterval:
:vartype RequestedPublishingInterval: Double
:ivar RequestedLifetimeCount:
:vartype RequestedLifetimeCount: UInt32
:ivar RequestedMaxKeepAliveCount:
:vartype RequestedMaxKeepAliveCount: UInt32
:ivar MaxNotificationsPerPublish:
:vartype MaxNotificationsPerPublish: UInt32
:ivar Priority:
:vartype Priority: Byte
'''
ua_types = [
('SubscriptionId', 'UInt32'),
('RequestedPublishingInterval', 'Double'),
('RequestedLifetimeCount', 'UInt32'),
('RequestedMaxKeepAliveCount', 'UInt32'),
('MaxNotificationsPerPublish', 'UInt32'),
('Priority', 'Byte'),
]
def __init__(self):
self.SubscriptionId = 0
self.RequestedPublishingInterval = 0
self.RequestedLifetimeCount = 0
self.RequestedMaxKeepAliveCount = 0
self.MaxNotificationsPerPublish = 0
self.Priority = 0
self._freeze = True
def __str__(self):
return 'ModifySubscriptionParameters(' + 'SubscriptionId:' + str(self.SubscriptionId) + ', ' + \
'RequestedPublishingInterval:' + str(self.RequestedPublishingInterval) + ', ' + \
'RequestedLifetimeCount:' + str(self.RequestedLifetimeCount) + ', ' + \
'RequestedMaxKeepAliveCount:' + str(self.RequestedMaxKeepAliveCount) + ', ' + \
'MaxNotificationsPerPublish:' + str(self.MaxNotificationsPerPublish) + ', ' + \
'Priority:' + str(self.Priority) + ')'
__repr__ = __str__
class ModifySubscriptionRequest(FrozenClass):
'''
:ivar TypeId:
:vartype TypeId: NodeId
:ivar RequestHeader:
:vartype RequestHeader: RequestHeader
:ivar Parameters:
:vartype Parameters: ModifySubscriptionParameters
'''
ua_types = [
('TypeId', 'NodeId'),
('RequestHeader', 'RequestHeader'),
('Parameters', 'ModifySubscriptionParameters'),
]
def __init__(self):
self.TypeId = FourByteNodeId(ObjectIds.ModifySubscriptionRequest_Encoding_DefaultBinary)
self.RequestHeader = RequestHeader()
self.Parameters = ModifySubscriptionParameters()
self._freeze = True
def __str__(self):
return 'ModifySubscriptionRequest(' + 'TypeId:' + str(self.TypeId) + ', ' + \
'RequestHeader:' + str(self.RequestHeader) + ', ' + \
'Parameters:' + str(self.Parameters) + ')'
__repr__ = __str__
class ModifySubscriptionResult(FrozenClass):
'''
:ivar RevisedPublishingInterval:
:vartype RevisedPublishingInterval: Double
:ivar RevisedLifetimeCount:
:vartype RevisedLifetimeCount: UInt32
:ivar RevisedMaxKeepAliveCount:
:vartype RevisedMaxKeepAliveCount: UInt32
'''
ua_types = [
('RevisedPublishingInterval', 'Double'),
('RevisedLifetimeCount', 'UInt32'),
('RevisedMaxKeepAliveCount', 'UInt32'),
]
def __init__(self):
self.RevisedPublishingInterval = 0
self.RevisedLifetimeCount = 0
self.RevisedMaxKeepAliveCount = 0
self._freeze = True
def __str__(self):
return 'ModifySubscriptionResult(' + 'RevisedPublishingInterval:' + str(self.RevisedPublishingInterval) + ', ' + \
'RevisedLifetimeCount:' + str(self.RevisedLifetimeCount) + ', ' + \
'RevisedMaxKeepAliveCount:' + str(self.RevisedMaxKeepAliveCount) + ')'
__repr__ = __str__
class ModifySubscriptionResponse(FrozenClass):
'''
:ivar TypeId:
:vartype TypeId: NodeId
:ivar ResponseHeader:
:vartype ResponseHeader: ResponseHeader
:ivar Parameters:
:vartype Parameters: ModifySubscriptionResult
'''
ua_types = [
('TypeId', 'NodeId'),
('ResponseHeader', 'ResponseHeader'),
('Parameters', 'ModifySubscriptionResult'),
]
def __init__(self):
self.TypeId = FourByteNodeId(ObjectIds.ModifySubscriptionResponse_Encoding_DefaultBinary)
self.ResponseHeader = ResponseHeader()
self.Parameters = ModifySubscriptionResult()
self._freeze = True
def __str__(self):
return 'ModifySubscriptionResponse(' + 'TypeId:' + str(self.TypeId) + ', ' + \
'ResponseHeader:' + str(self.ResponseHeader) + ', ' + \
'Parameters:' + str(self.Parameters) + ')'
__repr__ = __str__
class SetPublishingModeParameters(FrozenClass):
'''
:ivar PublishingEnabled:
:vartype PublishingEnabled: Boolean
:ivar SubscriptionIds:
:vartype SubscriptionIds: UInt32
'''
ua_types = [
('PublishingEnabled', 'Boolean'),
('SubscriptionIds', 'ListOfUInt32'),
]
def __init__(self):
self.PublishingEnabled = True
self.SubscriptionIds = []
self._freeze = True
def __str__(self):
return 'SetPublishingModeParameters(' + 'PublishingEnabled:' + str(self.PublishingEnabled) + ', ' + \
'SubscriptionIds:' + str(self.SubscriptionIds) + ')'
__repr__ = __str__
class SetPublishingModeRequest(FrozenClass):
'''
:ivar TypeId:
:vartype TypeId: NodeId
:ivar RequestHeader:
:vartype RequestHeader: RequestHeader
:ivar Parameters:
:vartype Parameters: SetPublishingModeParameters
'''
ua_types = [
('TypeId', 'NodeId'),
('RequestHeader', 'RequestHeader'),
('Parameters', 'SetPublishingModeParameters'),
]
def __init__(self):
self.TypeId = FourByteNodeId(ObjectIds.SetPublishingModeRequest_Encoding_DefaultBinary)
self.RequestHeader = RequestHeader()
self.Parameters = SetPublishingModeParameters()
self._freeze = True
def __str__(self):
return 'SetPublishingModeRequest(' + 'TypeId:' + str(self.TypeId) + ', ' + \
'RequestHeader:' + str(self.RequestHeader) + ', ' + \
'Parameters:' + str(self.Parameters) + ')'
__repr__ = __str__
class SetPublishingModeResult(FrozenClass):
'''
:ivar Results:
:vartype Results: StatusCode
:ivar DiagnosticInfos:
:vartype DiagnosticInfos: DiagnosticInfo
'''
ua_types = [
('Results', 'ListOfStatusCode'),
('DiagnosticInfos', 'ListOfDiagnosticInfo'),
]
def __init__(self):
self.Results = []
self.DiagnosticInfos = []
self._freeze = True
def __str__(self):
return 'SetPublishingModeResult(' + 'Results:' + str(self.Results) + ', ' + \
'DiagnosticInfos:' + str(self.DiagnosticInfos) + ')'
__repr__ = __str__
class SetPublishingModeResponse(FrozenClass):
'''
:ivar TypeId:
:vartype TypeId: NodeId
:ivar ResponseHeader:
:vartype ResponseHeader: ResponseHeader
:ivar Parameters:
:vartype Parameters: SetPublishingModeResult
'''
ua_types = [
('TypeId', 'NodeId'),
('ResponseHeader', 'ResponseHeader'),
('Parameters', 'SetPublishingModeResult'),
]
def __init__(self):
self.TypeId = FourByteNodeId(ObjectIds.SetPublishingModeResponse_Encoding_DefaultBinary)
self.ResponseHeader = ResponseHeader()
self.Parameters = SetPublishingModeResult()
self._freeze = True
def __str__(self):
return 'SetPublishingModeResponse(' + 'TypeId:' + str(self.TypeId) + ', ' + \
'ResponseHeader:' + str(self.ResponseHeader) + ', ' + \
'Parameters:' + str(self.Parameters) + ')'
__repr__ = __str__
class NotificationMessage(FrozenClass):
'''
:ivar SequenceNumber:
:vartype SequenceNumber: UInt32
:ivar PublishTime:
:vartype PublishTime: DateTime
:ivar NotificationData:
:vartype NotificationData: ExtensionObject
'''
ua_types = [
('SequenceNumber', 'UInt32'),
('PublishTime', 'DateTime'),
('NotificationData', 'ListOfExtensionObject'),
]
def __init__(self):
self.SequenceNumber = 0
self.PublishTime = datetime.utcnow()
self.NotificationData = []
self._freeze = True
def __str__(self):
return 'NotificationMessage(' + 'SequenceNumber:' + str(self.SequenceNumber) + ', ' + \
'PublishTime:' + str(self.PublishTime) + ', ' + \
'NotificationData:' + str(self.NotificationData) + ')'
__repr__ = __str__
class NotificationData(FrozenClass):
'''
'''
ua_types = [
]
def __init__(self):
self._freeze = True
def __str__(self):
return 'NotificationData(' + + ')'
__repr__ = __str__
class DataChangeNotification(FrozenClass):
'''
:ivar MonitoredItems:
:vartype MonitoredItems: MonitoredItemNotification
:ivar DiagnosticInfos:
:vartype DiagnosticInfos: DiagnosticInfo
'''
ua_types = [
('MonitoredItems', 'ListOfMonitoredItemNotification'),
('DiagnosticInfos', 'ListOfDiagnosticInfo'),
]
def __init__(self):
self.MonitoredItems = []
self.DiagnosticInfos = []
self._freeze = True
def __str__(self):
return 'DataChangeNotification(' + 'MonitoredItems:' + str(self.MonitoredItems) + ', ' + \
'DiagnosticInfos:' + str(self.DiagnosticInfos) + ')'
__repr__ = __str__
class MonitoredItemNotification(FrozenClass):
'''
:ivar ClientHandle:
:vartype ClientHandle: UInt32
:ivar Value:
:vartype Value: DataValue
'''
ua_types = [
('ClientHandle', 'UInt32'),
('Value', 'DataValue'),
]
def __init__(self):
self.ClientHandle = 0
self.Value = DataValue()
self._freeze = True
def __str__(self):
return 'MonitoredItemNotification(' + 'ClientHandle:' + str(self.ClientHandle) + ', ' + \
'Value:' + str(self.Value) + ')'
__repr__ = __str__
class EventNotificationList(FrozenClass):
'''
:ivar Events:
:vartype Events: EventFieldList
'''
ua_types = [
('Events', 'ListOfEventFieldList'),
]
def __init__(self):
self.Events = []
self._freeze = True
def __str__(self):
return 'EventNotificationList(' + 'Events:' + str(self.Events) + ')'
__repr__ = __str__
class EventFieldList(FrozenClass):
'''
:ivar ClientHandle:
:vartype ClientHandle: UInt32
:ivar EventFields:
:vartype EventFields: Variant
'''
ua_types = [
('ClientHandle', 'UInt32'),
('EventFields', 'ListOfVariant'),
]
def __init__(self):
self.ClientHandle = 0
self.EventFields = []
self._freeze = True
def __str__(self):
return 'EventFieldList(' + 'ClientHandle:' + str(self.ClientHandle) + ', ' + \
'EventFields:' + str(self.EventFields) + ')'
__repr__ = __str__
class HistoryEventFieldList(FrozenClass):
'''
:ivar EventFields:
:vartype EventFields: Variant
'''
ua_types = [
('EventFields', 'ListOfVariant'),
]
def __init__(self):
self.EventFields = []
self._freeze = True
def __str__(self):
return 'HistoryEventFieldList(' + 'EventFields:' + str(self.EventFields) + ')'
__repr__ = __str__
class StatusChangeNotification(FrozenClass):
'''
:ivar Status:
:vartype Status: StatusCode
:ivar DiagnosticInfo:
:vartype DiagnosticInfo: DiagnosticInfo
'''
ua_types = [
('Status', 'StatusCode'),
('DiagnosticInfo', 'DiagnosticInfo'),
]
def __init__(self):
self.Status = StatusCode()
self.DiagnosticInfo = DiagnosticInfo()
self._freeze = True
def __str__(self):
return 'StatusChangeNotification(' + 'Status:' + str(self.Status) + ', ' + \
'DiagnosticInfo:' + str(self.DiagnosticInfo) + ')'
__repr__ = __str__
class SubscriptionAcknowledgement(FrozenClass):
'''
:ivar SubscriptionId:
:vartype SubscriptionId: UInt32
:ivar SequenceNumber:
:vartype SequenceNumber: UInt32
'''
ua_types = [
('SubscriptionId', 'UInt32'),
('SequenceNumber', 'UInt32'),
]
def __init__(self):
self.SubscriptionId = 0
self.SequenceNumber = 0
self._freeze = True
def __str__(self):
return 'SubscriptionAcknowledgement(' + 'SubscriptionId:' + str(self.SubscriptionId) + ', ' + \
'SequenceNumber:' + str(self.SequenceNumber) + ')'
__repr__ = __str__
class PublishParameters(FrozenClass):
'''
:ivar SubscriptionAcknowledgements:
:vartype SubscriptionAcknowledgements: SubscriptionAcknowledgement
'''
ua_types = [
('SubscriptionAcknowledgements', 'ListOfSubscriptionAcknowledgement'),
]
def __init__(self):
self.SubscriptionAcknowledgements = []
self._freeze = True
def __str__(self):
return 'PublishParameters(' + 'SubscriptionAcknowledgements:' + str(self.SubscriptionAcknowledgements) + ')'
__repr__ = __str__
class PublishRequest(FrozenClass):
'''
:ivar TypeId:
:vartype TypeId: NodeId
:ivar RequestHeader:
:vartype RequestHeader: RequestHeader
:ivar Parameters:
:vartype Parameters: PublishParameters
'''
ua_types = [
('TypeId', 'NodeId'),
('RequestHeader', 'RequestHeader'),
('Parameters', 'PublishParameters'),
]
def __init__(self):
self.TypeId = FourByteNodeId(ObjectIds.PublishRequest_Encoding_DefaultBinary)
self.RequestHeader = RequestHeader()
self.Parameters = PublishParameters()
self._freeze = True
def __str__(self):
return 'PublishRequest(' + 'TypeId:' + str(self.TypeId) + ', ' + \
'RequestHeader:' + str(self.RequestHeader) + ', ' + \
'Parameters:' + str(self.Parameters) + ')'
__repr__ = __str__
class PublishResult(FrozenClass):
'''
:ivar SubscriptionId:
:vartype SubscriptionId: UInt32
:ivar AvailableSequenceNumbers:
:vartype AvailableSequenceNumbers: UInt32
:ivar MoreNotifications:
:vartype MoreNotifications: Boolean
:ivar NotificationMessage:
:vartype NotificationMessage: NotificationMessage
:ivar Results:
:vartype Results: StatusCode
:ivar DiagnosticInfos:
:vartype DiagnosticInfos: DiagnosticInfo
'''
ua_types = [
('SubscriptionId', 'UInt32'),
('AvailableSequenceNumbers', 'ListOfUInt32'),
('MoreNotifications', 'Boolean'),
('NotificationMessage', 'NotificationMessage'),
('Results', 'ListOfStatusCode'),
('DiagnosticInfos', 'ListOfDiagnosticInfo'),
]
def __init__(self):
self.SubscriptionId = 0
self.AvailableSequenceNumbers = []
self.MoreNotifications = True
self.NotificationMessage = NotificationMessage()
self.Results = []
self.DiagnosticInfos = []
self._freeze = True
def __str__(self):
return 'PublishResult(' + 'SubscriptionId:' + str(self.SubscriptionId) + ', ' + \
'AvailableSequenceNumbers:' + str(self.AvailableSequenceNumbers) + ', ' + \
'MoreNotifications:' + str(self.MoreNotifications) + ', ' + \
'NotificationMessage:' + str(self.NotificationMessage) + ', ' + \
'Results:' + str(self.Results) + ', ' + \
'DiagnosticInfos:' + str(self.DiagnosticInfos) + ')'
__repr__ = __str__
class PublishResponse(FrozenClass):
'''
:ivar TypeId:
:vartype TypeId: NodeId
:ivar ResponseHeader:
:vartype ResponseHeader: ResponseHeader
:ivar Parameters:
:vartype Parameters: PublishResult
'''
ua_types = [
('TypeId', 'NodeId'),
('ResponseHeader', 'ResponseHeader'),
('Parameters', 'PublishResult'),
]
def __init__(self):
self.TypeId = FourByteNodeId(ObjectIds.PublishResponse_Encoding_DefaultBinary)
self.ResponseHeader = ResponseHeader()
self.Parameters = PublishResult()
self._freeze = True
def __str__(self):
return 'PublishResponse(' + 'TypeId:' + str(self.TypeId) + ', ' + \
'ResponseHeader:' + str(self.ResponseHeader) + ', ' + \
'Parameters:' + str(self.Parameters) + ')'
__repr__ = __str__
class RepublishParameters(FrozenClass):
'''
:ivar SubscriptionId:
:vartype SubscriptionId: UInt32
:ivar RetransmitSequenceNumber:
:vartype RetransmitSequenceNumber: UInt32
'''
ua_types = [
('SubscriptionId', 'UInt32'),
('RetransmitSequenceNumber', 'UInt32'),
]
def __init__(self):
self.SubscriptionId = 0
self.RetransmitSequenceNumber = 0
self._freeze = True
def __str__(self):
return 'RepublishParameters(' + 'SubscriptionId:' + str(self.SubscriptionId) + ', ' + \
'RetransmitSequenceNumber:' + str(self.RetransmitSequenceNumber) + ')'
__repr__ = __str__
class RepublishRequest(FrozenClass):
'''
:ivar TypeId:
:vartype TypeId: NodeId
:ivar RequestHeader:
:vartype RequestHeader: RequestHeader
:ivar Parameters:
:vartype Parameters: RepublishParameters
'''
ua_types = [
('TypeId', 'NodeId'),
('RequestHeader', 'RequestHeader'),
('Parameters', 'RepublishParameters'),
]
def __init__(self):
self.TypeId = FourByteNodeId(ObjectIds.RepublishRequest_Encoding_DefaultBinary)
self.RequestHeader = RequestHeader()
self.Parameters = RepublishParameters()
self._freeze = True
def __str__(self):
return 'RepublishRequest(' + 'TypeId:' + str(self.TypeId) + ', ' + \
'RequestHeader:' + str(self.RequestHeader) + ', ' + \
'Parameters:' + str(self.Parameters) + ')'
__repr__ = __str__
class RepublishResponse(FrozenClass):
'''
:ivar TypeId:
:vartype TypeId: NodeId
:ivar ResponseHeader:
:vartype ResponseHeader: ResponseHeader
:ivar NotificationMessage:
:vartype NotificationMessage: NotificationMessage
'''
ua_types = [
('TypeId', 'NodeId'),
('ResponseHeader', 'ResponseHeader'),
('NotificationMessage', 'NotificationMessage'),
]
def __init__(self):
self.TypeId = FourByteNodeId(ObjectIds.RepublishResponse_Encoding_DefaultBinary)
self.ResponseHeader = ResponseHeader()
self.NotificationMessage = NotificationMessage()
self._freeze = True
def __str__(self):
return 'RepublishResponse(' + 'TypeId:' + str(self.TypeId) + ', ' + \
'ResponseHeader:' + str(self.ResponseHeader) + ', ' + \
'NotificationMessage:' + str(self.NotificationMessage) + ')'
__repr__ = __str__
class TransferResult(FrozenClass):
'''
:ivar StatusCode:
:vartype StatusCode: StatusCode
:ivar AvailableSequenceNumbers:
:vartype AvailableSequenceNumbers: UInt32
'''
ua_types = [
('StatusCode', 'StatusCode'),
('AvailableSequenceNumbers', 'ListOfUInt32'),
]
def __init__(self):
self.StatusCode = StatusCode()
self.AvailableSequenceNumbers = []
self._freeze = True
def __str__(self):
return 'TransferResult(' + 'StatusCode:' + str(self.StatusCode) + ', ' + \
'AvailableSequenceNumbers:' + str(self.AvailableSequenceNumbers) + ')'
__repr__ = __str__
class TransferSubscriptionsParameters(FrozenClass):
'''
:ivar SubscriptionIds:
:vartype SubscriptionIds: UInt32
:ivar SendInitialValues:
:vartype SendInitialValues: Boolean
'''
ua_types = [
('SubscriptionIds', 'ListOfUInt32'),
('SendInitialValues', 'Boolean'),
]
def __init__(self):
self.SubscriptionIds = []
self.SendInitialValues = True
self._freeze = True
def __str__(self):
return 'TransferSubscriptionsParameters(' + 'SubscriptionIds:' + str(self.SubscriptionIds) + ', ' + \
'SendInitialValues:' + str(self.SendInitialValues) + ')'
__repr__ = __str__
class TransferSubscriptionsRequest(FrozenClass):
'''
:ivar TypeId:
:vartype TypeId: NodeId
:ivar RequestHeader:
:vartype RequestHeader: RequestHeader
:ivar Parameters:
:vartype Parameters: TransferSubscriptionsParameters
'''
ua_types = [
('TypeId', 'NodeId'),
('RequestHeader', 'RequestHeader'),
('Parameters', 'TransferSubscriptionsParameters'),
]
def __init__(self):
self.TypeId = FourByteNodeId(ObjectIds.TransferSubscriptionsRequest_Encoding_DefaultBinary)
self.RequestHeader = RequestHeader()
self.Parameters = TransferSubscriptionsParameters()
self._freeze = True
def __str__(self):
return 'TransferSubscriptionsRequest(' + 'TypeId:' + str(self.TypeId) + ', ' + \
'RequestHeader:' + str(self.RequestHeader) + ', ' + \
'Parameters:' + str(self.Parameters) + ')'
__repr__ = __str__
class TransferSubscriptionsResult(FrozenClass):
'''
:ivar Results:
:vartype Results: TransferResult
:ivar DiagnosticInfos:
:vartype DiagnosticInfos: DiagnosticInfo
'''
ua_types = [
('Results', 'ListOfTransferResult'),
('DiagnosticInfos', 'ListOfDiagnosticInfo'),
]
def __init__(self):
self.Results = []
self.DiagnosticInfos = []
self._freeze = True
def __str__(self):
return 'TransferSubscriptionsResult(' + 'Results:' + str(self.Results) + ', ' + \
'DiagnosticInfos:' + str(self.DiagnosticInfos) + ')'
__repr__ = __str__
class TransferSubscriptionsResponse(FrozenClass):
'''
:ivar TypeId:
:vartype TypeId: NodeId
:ivar ResponseHeader:
:vartype ResponseHeader: ResponseHeader
:ivar Parameters:
:vartype Parameters: TransferSubscriptionsResult
'''
ua_types = [
('TypeId', 'NodeId'),
('ResponseHeader', 'ResponseHeader'),
('Parameters', 'TransferSubscriptionsResult'),
]
def __init__(self):
self.TypeId = FourByteNodeId(ObjectIds.TransferSubscriptionsResponse_Encoding_DefaultBinary)
self.ResponseHeader = ResponseHeader()
self.Parameters = TransferSubscriptionsResult()
self._freeze = True
def __str__(self):
return 'TransferSubscriptionsResponse(' + 'TypeId:' + str(self.TypeId) + ', ' + \
'ResponseHeader:' + str(self.ResponseHeader) + ', ' + \
'Parameters:' + str(self.Parameters) + ')'
__repr__ = __str__
class DeleteSubscriptionsParameters(FrozenClass):
'''
:ivar SubscriptionIds:
:vartype SubscriptionIds: UInt32
'''
ua_types = [
('SubscriptionIds', 'ListOfUInt32'),
]
def __init__(self):
self.SubscriptionIds = []
self._freeze = True
def __str__(self):
return 'DeleteSubscriptionsParameters(' + 'SubscriptionIds:' + str(self.SubscriptionIds) + ')'
__repr__ = __str__
class DeleteSubscriptionsRequest(FrozenClass):
'''
:ivar TypeId:
:vartype TypeId: NodeId
:ivar RequestHeader:
:vartype RequestHeader: RequestHeader
:ivar Parameters:
:vartype Parameters: DeleteSubscriptionsParameters
'''
ua_types = [
('TypeId', 'NodeId'),
('RequestHeader', 'RequestHeader'),
('Parameters', 'DeleteSubscriptionsParameters'),
]
def __init__(self):
self.TypeId = FourByteNodeId(ObjectIds.DeleteSubscriptionsRequest_Encoding_DefaultBinary)
self.RequestHeader = RequestHeader()
self.Parameters = DeleteSubscriptionsParameters()
self._freeze = True
def __str__(self):
return 'DeleteSubscriptionsRequest(' + 'TypeId:' + str(self.TypeId) + ', ' + \
'RequestHeader:' + str(self.RequestHeader) + ', ' + \
'Parameters:' + str(self.Parameters) + ')'
__repr__ = __str__
class DeleteSubscriptionsResponse(FrozenClass):
'''
:ivar TypeId:
:vartype TypeId: NodeId
:ivar ResponseHeader:
:vartype ResponseHeader: ResponseHeader
:ivar Results:
:vartype Results: StatusCode
:ivar DiagnosticInfos:
:vartype DiagnosticInfos: DiagnosticInfo
'''
ua_types = [
('TypeId', 'NodeId'),
('ResponseHeader', 'ResponseHeader'),
('Results', 'ListOfStatusCode'),
('DiagnosticInfos', 'ListOfDiagnosticInfo'),
]
def __init__(self):
self.TypeId = FourByteNodeId(ObjectIds.DeleteSubscriptionsResponse_Encoding_DefaultBinary)
self.ResponseHeader = ResponseHeader()
self.Results = []
self.DiagnosticInfos = []
self._freeze = True
def __str__(self):
return 'DeleteSubscriptionsResponse(' + 'TypeId:' + str(self.TypeId) + ', ' + \
'ResponseHeader:' + str(self.ResponseHeader) + ', ' + \
'Results:' + str(self.Results) + ', ' + \
'DiagnosticInfos:' + str(self.DiagnosticInfos) + ')'
__repr__ = __str__
class BuildInfo(FrozenClass):
'''
:ivar ProductUri:
:vartype ProductUri: String
:ivar ManufacturerName:
:vartype ManufacturerName: String
:ivar ProductName:
:vartype ProductName: String
:ivar SoftwareVersion:
:vartype SoftwareVersion: String
:ivar BuildNumber:
:vartype BuildNumber: String
:ivar BuildDate:
:vartype BuildDate: DateTime
'''
ua_types = [
('ProductUri', 'String'),
('ManufacturerName', 'String'),
('ProductName', 'String'),
('SoftwareVersion', 'String'),
('BuildNumber', 'String'),
('BuildDate', 'DateTime'),
]
def __init__(self):
self.ProductUri = None
self.ManufacturerName = None
self.ProductName = None
self.SoftwareVersion = None
self.BuildNumber = None
self.BuildDate = datetime.utcnow()
self._freeze = True
def __str__(self):
return 'BuildInfo(' + 'ProductUri:' + str(self.ProductUri) + ', ' + \
'ManufacturerName:' + str(self.ManufacturerName) + ', ' + \
'ProductName:' + str(self.ProductName) + ', ' + \
'SoftwareVersion:' + str(self.SoftwareVersion) + ', ' + \
'BuildNumber:' + str(self.BuildNumber) + ', ' + \
'BuildDate:' + str(self.BuildDate) + ')'
__repr__ = __str__
class RedundantServerDataType(FrozenClass):
'''
:ivar ServerId:
:vartype ServerId: String
:ivar ServiceLevel:
:vartype ServiceLevel: Byte
:ivar ServerState:
:vartype ServerState: ServerState
'''
ua_types = [
('ServerId', 'String'),
('ServiceLevel', 'Byte'),
('ServerState', 'ServerState'),
]
def __init__(self):
self.ServerId = None
self.ServiceLevel = 0
self.ServerState = ServerState(0)
self._freeze = True
def __str__(self):
return 'RedundantServerDataType(' + 'ServerId:' + str(self.ServerId) + ', ' + \
'ServiceLevel:' + str(self.ServiceLevel) + ', ' + \
'ServerState:' + str(self.ServerState) + ')'
__repr__ = __str__
class EndpointUrlListDataType(FrozenClass):
'''
:ivar EndpointUrlList:
:vartype EndpointUrlList: String
'''
ua_types = [
('EndpointUrlList', 'ListOfString'),
]
def __init__(self):
self.EndpointUrlList = []
self._freeze = True
def __str__(self):
return 'EndpointUrlListDataType(' + 'EndpointUrlList:' + str(self.EndpointUrlList) + ')'
__repr__ = __str__
class NetworkGroupDataType(FrozenClass):
'''
:ivar ServerUri:
:vartype ServerUri: String
:ivar NetworkPaths:
:vartype NetworkPaths: EndpointUrlListDataType
'''
ua_types = [
('ServerUri', 'String'),
('NetworkPaths', 'ListOfEndpointUrlListDataType'),
]
def __init__(self):
self.ServerUri = None
self.NetworkPaths = []
self._freeze = True
def __str__(self):
return 'NetworkGroupDataType(' + 'ServerUri:' + str(self.ServerUri) + ', ' + \
'NetworkPaths:' + str(self.NetworkPaths) + ')'
__repr__ = __str__
class SamplingIntervalDiagnosticsDataType(FrozenClass):
'''
:ivar SamplingInterval:
:vartype SamplingInterval: Double
:ivar MonitoredItemCount:
:vartype MonitoredItemCount: UInt32
:ivar MaxMonitoredItemCount:
:vartype MaxMonitoredItemCount: UInt32
:ivar DisabledMonitoredItemCount:
:vartype DisabledMonitoredItemCount: UInt32
'''
ua_types = [
('SamplingInterval', 'Double'),
('MonitoredItemCount', 'UInt32'),
('MaxMonitoredItemCount', 'UInt32'),
('DisabledMonitoredItemCount', 'UInt32'),
]
def __init__(self):
self.SamplingInterval = 0
self.MonitoredItemCount = 0
self.MaxMonitoredItemCount = 0
self.DisabledMonitoredItemCount = 0
self._freeze = True
def __str__(self):
return 'SamplingIntervalDiagnosticsDataType(' + 'SamplingInterval:' + str(self.SamplingInterval) + ', ' + \
'MonitoredItemCount:' + str(self.MonitoredItemCount) + ', ' + \
'MaxMonitoredItemCount:' + str(self.MaxMonitoredItemCount) + ', ' + \
'DisabledMonitoredItemCount:' + str(self.DisabledMonitoredItemCount) + ')'
__repr__ = __str__
class ServerDiagnosticsSummaryDataType(FrozenClass):
'''
:ivar ServerViewCount:
:vartype ServerViewCount: UInt32
:ivar CurrentSessionCount:
:vartype CurrentSessionCount: UInt32
:ivar CumulatedSessionCount:
:vartype CumulatedSessionCount: UInt32
:ivar SecurityRejectedSessionCount:
:vartype SecurityRejectedSessionCount: UInt32
:ivar RejectedSessionCount:
:vartype RejectedSessionCount: UInt32
:ivar SessionTimeoutCount:
:vartype SessionTimeoutCount: UInt32
:ivar SessionAbortCount:
:vartype SessionAbortCount: UInt32
:ivar CurrentSubscriptionCount:
:vartype CurrentSubscriptionCount: UInt32
:ivar CumulatedSubscriptionCount:
:vartype CumulatedSubscriptionCount: UInt32
:ivar PublishingIntervalCount:
:vartype PublishingIntervalCount: UInt32
:ivar SecurityRejectedRequestsCount:
:vartype SecurityRejectedRequestsCount: UInt32
:ivar RejectedRequestsCount:
:vartype RejectedRequestsCount: UInt32
'''
ua_types = [
('ServerViewCount', 'UInt32'),
('CurrentSessionCount', 'UInt32'),
('CumulatedSessionCount', 'UInt32'),
('SecurityRejectedSessionCount', 'UInt32'),
('RejectedSessionCount', 'UInt32'),
('SessionTimeoutCount', 'UInt32'),
('SessionAbortCount', 'UInt32'),
('CurrentSubscriptionCount', 'UInt32'),
('CumulatedSubscriptionCount', 'UInt32'),
('PublishingIntervalCount', 'UInt32'),
('SecurityRejectedRequestsCount', 'UInt32'),
('RejectedRequestsCount', 'UInt32'),
]
def __init__(self):
self.ServerViewCount = 0
self.CurrentSessionCount = 0
self.CumulatedSessionCount = 0
self.SecurityRejectedSessionCount = 0
self.RejectedSessionCount = 0
self.SessionTimeoutCount = 0
self.SessionAbortCount = 0
self.CurrentSubscriptionCount = 0
self.CumulatedSubscriptionCount = 0
self.PublishingIntervalCount = 0
self.SecurityRejectedRequestsCount = 0
self.RejectedRequestsCount = 0
self._freeze = True
def __str__(self):
return 'ServerDiagnosticsSummaryDataType(' + 'ServerViewCount:' + str(self.ServerViewCount) + ', ' + \
'CurrentSessionCount:' + str(self.CurrentSessionCount) + ', ' + \
'CumulatedSessionCount:' + str(self.CumulatedSessionCount) + ', ' + \
'SecurityRejectedSessionCount:' + str(self.SecurityRejectedSessionCount) + ', ' + \
'RejectedSessionCount:' + str(self.RejectedSessionCount) + ', ' + \
'SessionTimeoutCount:' + str(self.SessionTimeoutCount) + ', ' + \
'SessionAbortCount:' + str(self.SessionAbortCount) + ', ' + \
'CurrentSubscriptionCount:' + str(self.CurrentSubscriptionCount) + ', ' + \
'CumulatedSubscriptionCount:' + str(self.CumulatedSubscriptionCount) + ', ' + \
'PublishingIntervalCount:' + str(self.PublishingIntervalCount) + ', ' + \
'SecurityRejectedRequestsCount:' + str(self.SecurityRejectedRequestsCount) + ', ' + \
'RejectedRequestsCount:' + str(self.RejectedRequestsCount) + ')'
__repr__ = __str__
class ServerStatusDataType(FrozenClass):
'''
:ivar StartTime:
:vartype StartTime: DateTime
:ivar CurrentTime:
:vartype CurrentTime: DateTime
:ivar State:
:vartype State: ServerState
:ivar BuildInfo:
:vartype BuildInfo: BuildInfo
:ivar SecondsTillShutdown:
:vartype SecondsTillShutdown: UInt32
:ivar ShutdownReason:
:vartype ShutdownReason: LocalizedText
'''
ua_types = [
('StartTime', 'DateTime'),
('CurrentTime', 'DateTime'),
('State', 'ServerState'),
('BuildInfo', 'BuildInfo'),
('SecondsTillShutdown', 'UInt32'),
('ShutdownReason', 'LocalizedText'),
]
def __init__(self):
self.StartTime = datetime.utcnow()
self.CurrentTime = datetime.utcnow()
self.State = ServerState(0)
self.BuildInfo = BuildInfo()
self.SecondsTillShutdown = 0
self.ShutdownReason = LocalizedText()
self._freeze = True
def __str__(self):
return 'ServerStatusDataType(' + 'StartTime:' + str(self.StartTime) + ', ' + \
'CurrentTime:' + str(self.CurrentTime) + ', ' + \
'State:' + str(self.State) + ', ' + \
'BuildInfo:' + str(self.BuildInfo) + ', ' + \
'SecondsTillShutdown:' + str(self.SecondsTillShutdown) + ', ' + \
'ShutdownReason:' + str(self.ShutdownReason) + ')'
__repr__ = __str__
class SessionDiagnosticsDataType(FrozenClass):
'''
:ivar SessionId:
:vartype SessionId: NodeId
:ivar SessionName:
:vartype SessionName: String
:ivar ClientDescription:
:vartype ClientDescription: ApplicationDescription
:ivar ServerUri:
:vartype ServerUri: String
:ivar EndpointUrl:
:vartype EndpointUrl: String
:ivar LocaleIds:
:vartype LocaleIds: String
:ivar ActualSessionTimeout:
:vartype ActualSessionTimeout: Double
:ivar MaxResponseMessageSize:
:vartype MaxResponseMessageSize: UInt32
:ivar ClientConnectionTime:
:vartype ClientConnectionTime: DateTime
:ivar ClientLastContactTime:
:vartype ClientLastContactTime: DateTime
:ivar CurrentSubscriptionsCount:
:vartype CurrentSubscriptionsCount: UInt32
:ivar CurrentMonitoredItemsCount:
:vartype CurrentMonitoredItemsCount: UInt32
:ivar CurrentPublishRequestsInQueue:
:vartype CurrentPublishRequestsInQueue: UInt32
:ivar TotalRequestCount:
:vartype TotalRequestCount: ServiceCounterDataType
:ivar UnauthorizedRequestCount:
:vartype UnauthorizedRequestCount: UInt32
:ivar ReadCount:
:vartype ReadCount: ServiceCounterDataType
:ivar HistoryReadCount:
:vartype HistoryReadCount: ServiceCounterDataType
:ivar WriteCount:
:vartype WriteCount: ServiceCounterDataType
:ivar HistoryUpdateCount:
:vartype HistoryUpdateCount: ServiceCounterDataType
:ivar CallCount:
:vartype CallCount: ServiceCounterDataType
:ivar CreateMonitoredItemsCount:
:vartype CreateMonitoredItemsCount: ServiceCounterDataType
:ivar ModifyMonitoredItemsCount:
:vartype ModifyMonitoredItemsCount: ServiceCounterDataType
:ivar SetMonitoringModeCount:
:vartype SetMonitoringModeCount: ServiceCounterDataType
:ivar SetTriggeringCount:
:vartype SetTriggeringCount: ServiceCounterDataType
:ivar DeleteMonitoredItemsCount:
:vartype DeleteMonitoredItemsCount: ServiceCounterDataType
:ivar CreateSubscriptionCount:
:vartype CreateSubscriptionCount: ServiceCounterDataType
:ivar ModifySubscriptionCount:
:vartype ModifySubscriptionCount: ServiceCounterDataType
:ivar SetPublishingModeCount:
:vartype SetPublishingModeCount: ServiceCounterDataType
:ivar PublishCount:
:vartype PublishCount: ServiceCounterDataType
:ivar RepublishCount:
:vartype RepublishCount: ServiceCounterDataType
:ivar TransferSubscriptionsCount:
:vartype TransferSubscriptionsCount: ServiceCounterDataType
:ivar DeleteSubscriptionsCount:
:vartype DeleteSubscriptionsCount: ServiceCounterDataType
:ivar AddNodesCount:
:vartype AddNodesCount: ServiceCounterDataType
:ivar AddReferencesCount:
:vartype AddReferencesCount: ServiceCounterDataType
:ivar DeleteNodesCount:
:vartype DeleteNodesCount: ServiceCounterDataType
:ivar DeleteReferencesCount:
:vartype DeleteReferencesCount: ServiceCounterDataType
:ivar BrowseCount:
:vartype BrowseCount: ServiceCounterDataType
:ivar BrowseNextCount:
:vartype BrowseNextCount: ServiceCounterDataType
:ivar TranslateBrowsePathsToNodeIdsCount:
:vartype TranslateBrowsePathsToNodeIdsCount: ServiceCounterDataType
:ivar QueryFirstCount:
:vartype QueryFirstCount: ServiceCounterDataType
:ivar QueryNextCount:
:vartype QueryNextCount: ServiceCounterDataType
:ivar RegisterNodesCount:
:vartype RegisterNodesCount: ServiceCounterDataType
:ivar UnregisterNodesCount:
:vartype UnregisterNodesCount: ServiceCounterDataType
'''
ua_types = [
('SessionId', 'NodeId'),
('SessionName', 'String'),
('ClientDescription', 'ApplicationDescription'),
('ServerUri', 'String'),
('EndpointUrl', 'String'),
('LocaleIds', 'ListOfString'),
('ActualSessionTimeout', 'Double'),
('MaxResponseMessageSize', 'UInt32'),
('ClientConnectionTime', 'DateTime'),
('ClientLastContactTime', 'DateTime'),
('CurrentSubscriptionsCount', 'UInt32'),
('CurrentMonitoredItemsCount', 'UInt32'),
('CurrentPublishRequestsInQueue', 'UInt32'),
('TotalRequestCount', 'ServiceCounterDataType'),
('UnauthorizedRequestCount', 'UInt32'),
('ReadCount', 'ServiceCounterDataType'),
('HistoryReadCount', 'ServiceCounterDataType'),
('WriteCount', 'ServiceCounterDataType'),
('HistoryUpdateCount', 'ServiceCounterDataType'),
('CallCount', 'ServiceCounterDataType'),
('CreateMonitoredItemsCount', 'ServiceCounterDataType'),
('ModifyMonitoredItemsCount', 'ServiceCounterDataType'),
('SetMonitoringModeCount', 'ServiceCounterDataType'),
('SetTriggeringCount', 'ServiceCounterDataType'),
('DeleteMonitoredItemsCount', 'ServiceCounterDataType'),
('CreateSubscriptionCount', 'ServiceCounterDataType'),
('ModifySubscriptionCount', 'ServiceCounterDataType'),
('SetPublishingModeCount', 'ServiceCounterDataType'),
('PublishCount', 'ServiceCounterDataType'),
('RepublishCount', 'ServiceCounterDataType'),
('TransferSubscriptionsCount', 'ServiceCounterDataType'),
('DeleteSubscriptionsCount', 'ServiceCounterDataType'),
('AddNodesCount', 'ServiceCounterDataType'),
('AddReferencesCount', 'ServiceCounterDataType'),
('DeleteNodesCount', 'ServiceCounterDataType'),
('DeleteReferencesCount', 'ServiceCounterDataType'),
('BrowseCount', 'ServiceCounterDataType'),
('BrowseNextCount', 'ServiceCounterDataType'),
('TranslateBrowsePathsToNodeIdsCount', 'ServiceCounterDataType'),
('QueryFirstCount', 'ServiceCounterDataType'),
('QueryNextCount', 'ServiceCounterDataType'),
('RegisterNodesCount', 'ServiceCounterDataType'),
('UnregisterNodesCount', 'ServiceCounterDataType'),
]
def __init__(self):
self.SessionId = NodeId()
self.SessionName = None
self.ClientDescription = ApplicationDescription()
self.ServerUri = None
self.EndpointUrl = None
self.LocaleIds = []
self.ActualSessionTimeout = 0
self.MaxResponseMessageSize = 0
self.ClientConnectionTime = datetime.utcnow()
self.ClientLastContactTime = datetime.utcnow()
self.CurrentSubscriptionsCount = 0
self.CurrentMonitoredItemsCount = 0
self.CurrentPublishRequestsInQueue = 0
self.TotalRequestCount = ServiceCounterDataType()
self.UnauthorizedRequestCount = 0
self.ReadCount = ServiceCounterDataType()
self.HistoryReadCount = ServiceCounterDataType()
self.WriteCount = ServiceCounterDataType()
self.HistoryUpdateCount = ServiceCounterDataType()
self.CallCount = ServiceCounterDataType()
self.CreateMonitoredItemsCount = ServiceCounterDataType()
self.ModifyMonitoredItemsCount = ServiceCounterDataType()
self.SetMonitoringModeCount = ServiceCounterDataType()
self.SetTriggeringCount = ServiceCounterDataType()
self.DeleteMonitoredItemsCount = ServiceCounterDataType()
self.CreateSubscriptionCount = ServiceCounterDataType()
self.ModifySubscriptionCount = ServiceCounterDataType()
self.SetPublishingModeCount = ServiceCounterDataType()
self.PublishCount = ServiceCounterDataType()
self.RepublishCount = ServiceCounterDataType()
self.TransferSubscriptionsCount = ServiceCounterDataType()
self.DeleteSubscriptionsCount = ServiceCounterDataType()
self.AddNodesCount = ServiceCounterDataType()
self.AddReferencesCount = ServiceCounterDataType()
self.DeleteNodesCount = ServiceCounterDataType()
self.DeleteReferencesCount = ServiceCounterDataType()
self.BrowseCount = ServiceCounterDataType()
self.BrowseNextCount = ServiceCounterDataType()
self.TranslateBrowsePathsToNodeIdsCount = ServiceCounterDataType()
self.QueryFirstCount = ServiceCounterDataType()
self.QueryNextCount = ServiceCounterDataType()
self.RegisterNodesCount = ServiceCounterDataType()
self.UnregisterNodesCount = ServiceCounterDataType()
self._freeze = True
def __str__(self):
return 'SessionDiagnosticsDataType(' + 'SessionId:' + str(self.SessionId) + ', ' + \
'SessionName:' + str(self.SessionName) + ', ' + \
'ClientDescription:' + str(self.ClientDescription) + ', ' + \
'ServerUri:' + str(self.ServerUri) + ', ' + \
'EndpointUrl:' + str(self.EndpointUrl) + ', ' + \
'LocaleIds:' + str(self.LocaleIds) + ', ' + \
'ActualSessionTimeout:' + str(self.ActualSessionTimeout) + ', ' + \
'MaxResponseMessageSize:' + str(self.MaxResponseMessageSize) + ', ' + \
'ClientConnectionTime:' + str(self.ClientConnectionTime) + ', ' + \
'ClientLastContactTime:' + str(self.ClientLastContactTime) + ', ' + \
'CurrentSubscriptionsCount:' + str(self.CurrentSubscriptionsCount) + ', ' + \
'CurrentMonitoredItemsCount:' + str(self.CurrentMonitoredItemsCount) + ', ' + \
'CurrentPublishRequestsInQueue:' + str(self.CurrentPublishRequestsInQueue) + ', ' + \
'TotalRequestCount:' + str(self.TotalRequestCount) + ', ' + \
'UnauthorizedRequestCount:' + str(self.UnauthorizedRequestCount) + ', ' + \
'ReadCount:' + str(self.ReadCount) + ', ' + \
'HistoryReadCount:' + str(self.HistoryReadCount) + ', ' + \
'WriteCount:' + str(self.WriteCount) + ', ' + \
'HistoryUpdateCount:' + str(self.HistoryUpdateCount) + ', ' + \
'CallCount:' + str(self.CallCount) + ', ' + \
'CreateMonitoredItemsCount:' + str(self.CreateMonitoredItemsCount) + ', ' + \
'ModifyMonitoredItemsCount:' + str(self.ModifyMonitoredItemsCount) + ', ' + \
'SetMonitoringModeCount:' + str(self.SetMonitoringModeCount) + ', ' + \
'SetTriggeringCount:' + str(self.SetTriggeringCount) + ', ' + \
'DeleteMonitoredItemsCount:' + str(self.DeleteMonitoredItemsCount) + ', ' + \
'CreateSubscriptionCount:' + str(self.CreateSubscriptionCount) + ', ' + \
'ModifySubscriptionCount:' + str(self.ModifySubscriptionCount) + ', ' + \
'SetPublishingModeCount:' + str(self.SetPublishingModeCount) + ', ' + \
'PublishCount:' + str(self.PublishCount) + ', ' + \
'RepublishCount:' + str(self.RepublishCount) + ', ' + \
'TransferSubscriptionsCount:' + str(self.TransferSubscriptionsCount) + ', ' + \
'DeleteSubscriptionsCount:' + str(self.DeleteSubscriptionsCount) + ', ' + \
'AddNodesCount:' + str(self.AddNodesCount) + ', ' + \
'AddReferencesCount:' + str(self.AddReferencesCount) + ', ' + \
'DeleteNodesCount:' + str(self.DeleteNodesCount) + ', ' + \
'DeleteReferencesCount:' + str(self.DeleteReferencesCount) + ', ' + \
'BrowseCount:' + str(self.BrowseCount) + ', ' + \
'BrowseNextCount:' + str(self.BrowseNextCount) + ', ' + \
'TranslateBrowsePathsToNodeIdsCount:' + str(self.TranslateBrowsePathsToNodeIdsCount) + ', ' + \
'QueryFirstCount:' + str(self.QueryFirstCount) + ', ' + \
'QueryNextCount:' + str(self.QueryNextCount) + ', ' + \
'RegisterNodesCount:' + str(self.RegisterNodesCount) + ', ' + \
'UnregisterNodesCount:' + str(self.UnregisterNodesCount) + ')'
__repr__ = __str__
class SessionSecurityDiagnosticsDataType(FrozenClass):
'''
:ivar SessionId:
:vartype SessionId: NodeId
:ivar ClientUserIdOfSession:
:vartype ClientUserIdOfSession: String
:ivar ClientUserIdHistory:
:vartype ClientUserIdHistory: String
:ivar AuthenticationMechanism:
:vartype AuthenticationMechanism: String
:ivar Encoding:
:vartype Encoding: String
:ivar TransportProtocol:
:vartype TransportProtocol: String
:ivar SecurityMode:
:vartype SecurityMode: MessageSecurityMode
:ivar SecurityPolicyUri:
:vartype SecurityPolicyUri: String
:ivar ClientCertificate:
:vartype ClientCertificate: ByteString
'''
ua_types = [
('SessionId', 'NodeId'),
('ClientUserIdOfSession', 'String'),
('ClientUserIdHistory', 'ListOfString'),
('AuthenticationMechanism', 'String'),
('Encoding', 'String'),
('TransportProtocol', 'String'),
('SecurityMode', 'MessageSecurityMode'),
('SecurityPolicyUri', 'String'),
('ClientCertificate', 'ByteString'),
]
def __init__(self):
self.SessionId = NodeId()
self.ClientUserIdOfSession = None
self.ClientUserIdHistory = []
self.AuthenticationMechanism = None
self.Encoding = None
self.TransportProtocol = None
self.SecurityMode = MessageSecurityMode(0)
self.SecurityPolicyUri = None
self.ClientCertificate = None
self._freeze = True
def __str__(self):
return 'SessionSecurityDiagnosticsDataType(' + 'SessionId:' + str(self.SessionId) + ', ' + \
'ClientUserIdOfSession:' + str(self.ClientUserIdOfSession) + ', ' + \
'ClientUserIdHistory:' + str(self.ClientUserIdHistory) + ', ' + \
'AuthenticationMechanism:' + str(self.AuthenticationMechanism) + ', ' + \
'Encoding:' + str(self.Encoding) + ', ' + \
'TransportProtocol:' + str(self.TransportProtocol) + ', ' + \
'SecurityMode:' + str(self.SecurityMode) + ', ' + \
'SecurityPolicyUri:' + str(self.SecurityPolicyUri) + ', ' + \
'ClientCertificate:' + str(self.ClientCertificate) + ')'
__repr__ = __str__
class ServiceCounterDataType(FrozenClass):
'''
:ivar TotalCount:
:vartype TotalCount: UInt32
:ivar ErrorCount:
:vartype ErrorCount: UInt32
'''
ua_types = [
('TotalCount', 'UInt32'),
('ErrorCount', 'UInt32'),
]
def __init__(self):
self.TotalCount = 0
self.ErrorCount = 0
self._freeze = True
def __str__(self):
return 'ServiceCounterDataType(' + 'TotalCount:' + str(self.TotalCount) + ', ' + \
'ErrorCount:' + str(self.ErrorCount) + ')'
__repr__ = __str__
class StatusResult(FrozenClass):
'''
:ivar StatusCode:
:vartype StatusCode: StatusCode
:ivar DiagnosticInfo:
:vartype DiagnosticInfo: DiagnosticInfo
'''
ua_types = [
('StatusCode', 'StatusCode'),
('DiagnosticInfo', 'DiagnosticInfo'),
]
def __init__(self):
self.StatusCode = StatusCode()
self.DiagnosticInfo = DiagnosticInfo()
self._freeze = True
def __str__(self):
return 'StatusResult(' + 'StatusCode:' + str(self.StatusCode) + ', ' + \
'DiagnosticInfo:' + str(self.DiagnosticInfo) + ')'
__repr__ = __str__
class SubscriptionDiagnosticsDataType(FrozenClass):
'''
:ivar SessionId:
:vartype SessionId: NodeId
:ivar SubscriptionId:
:vartype SubscriptionId: UInt32
:ivar Priority:
:vartype Priority: Byte
:ivar PublishingInterval:
:vartype PublishingInterval: Double
:ivar MaxKeepAliveCount:
:vartype MaxKeepAliveCount: UInt32
:ivar MaxLifetimeCount:
:vartype MaxLifetimeCount: UInt32
:ivar MaxNotificationsPerPublish:
:vartype MaxNotificationsPerPublish: UInt32
:ivar PublishingEnabled:
:vartype PublishingEnabled: Boolean
:ivar ModifyCount:
:vartype ModifyCount: UInt32
:ivar EnableCount:
:vartype EnableCount: UInt32
:ivar DisableCount:
:vartype DisableCount: UInt32
:ivar RepublishRequestCount:
:vartype RepublishRequestCount: UInt32
:ivar RepublishMessageRequestCount:
:vartype RepublishMessageRequestCount: UInt32
:ivar RepublishMessageCount:
:vartype RepublishMessageCount: UInt32
:ivar TransferRequestCount:
:vartype TransferRequestCount: UInt32
:ivar TransferredToAltClientCount:
:vartype TransferredToAltClientCount: UInt32
:ivar TransferredToSameClientCount:
:vartype TransferredToSameClientCount: UInt32
:ivar PublishRequestCount:
:vartype PublishRequestCount: UInt32
:ivar DataChangeNotificationsCount:
:vartype DataChangeNotificationsCount: UInt32
:ivar EventNotificationsCount:
:vartype EventNotificationsCount: UInt32
:ivar NotificationsCount:
:vartype NotificationsCount: UInt32
:ivar LatePublishRequestCount:
:vartype LatePublishRequestCount: UInt32
:ivar CurrentKeepAliveCount:
:vartype CurrentKeepAliveCount: UInt32
:ivar CurrentLifetimeCount:
:vartype CurrentLifetimeCount: UInt32
:ivar UnacknowledgedMessageCount:
:vartype UnacknowledgedMessageCount: UInt32
:ivar DiscardedMessageCount:
:vartype DiscardedMessageCount: UInt32
:ivar MonitoredItemCount:
:vartype MonitoredItemCount: UInt32
:ivar DisabledMonitoredItemCount:
:vartype DisabledMonitoredItemCount: UInt32
:ivar MonitoringQueueOverflowCount:
:vartype MonitoringQueueOverflowCount: UInt32
:ivar NextSequenceNumber:
:vartype NextSequenceNumber: UInt32
:ivar EventQueueOverFlowCount:
:vartype EventQueueOverFlowCount: UInt32
'''
ua_types = [
('SessionId', 'NodeId'),
('SubscriptionId', 'UInt32'),
('Priority', 'Byte'),
('PublishingInterval', 'Double'),
('MaxKeepAliveCount', 'UInt32'),
('MaxLifetimeCount', 'UInt32'),
('MaxNotificationsPerPublish', 'UInt32'),
('PublishingEnabled', 'Boolean'),
('ModifyCount', 'UInt32'),
('EnableCount', 'UInt32'),
('DisableCount', 'UInt32'),
('RepublishRequestCount', 'UInt32'),
('RepublishMessageRequestCount', 'UInt32'),
('RepublishMessageCount', 'UInt32'),
('TransferRequestCount', 'UInt32'),
('TransferredToAltClientCount', 'UInt32'),
('TransferredToSameClientCount', 'UInt32'),
('PublishRequestCount', 'UInt32'),
('DataChangeNotificationsCount', 'UInt32'),
('EventNotificationsCount', 'UInt32'),
('NotificationsCount', 'UInt32'),
('LatePublishRequestCount', 'UInt32'),
('CurrentKeepAliveCount', 'UInt32'),
('CurrentLifetimeCount', 'UInt32'),
('UnacknowledgedMessageCount', 'UInt32'),
('DiscardedMessageCount', 'UInt32'),
('MonitoredItemCount', 'UInt32'),
('DisabledMonitoredItemCount', 'UInt32'),
('MonitoringQueueOverflowCount', 'UInt32'),
('NextSequenceNumber', 'UInt32'),
('EventQueueOverFlowCount', 'UInt32'),
]
def __init__(self):
self.SessionId = NodeId()
self.SubscriptionId = 0
self.Priority = 0
self.PublishingInterval = 0
self.MaxKeepAliveCount = 0
self.MaxLifetimeCount = 0
self.MaxNotificationsPerPublish = 0
self.PublishingEnabled = True
self.ModifyCount = 0
self.EnableCount = 0
self.DisableCount = 0
self.RepublishRequestCount = 0
self.RepublishMessageRequestCount = 0
self.RepublishMessageCount = 0
self.TransferRequestCount = 0
self.TransferredToAltClientCount = 0
self.TransferredToSameClientCount = 0
self.PublishRequestCount = 0
self.DataChangeNotificationsCount = 0
self.EventNotificationsCount = 0
self.NotificationsCount = 0
self.LatePublishRequestCount = 0
self.CurrentKeepAliveCount = 0
self.CurrentLifetimeCount = 0
self.UnacknowledgedMessageCount = 0
self.DiscardedMessageCount = 0
self.MonitoredItemCount = 0
self.DisabledMonitoredItemCount = 0
self.MonitoringQueueOverflowCount = 0
self.NextSequenceNumber = 0
self.EventQueueOverFlowCount = 0
self._freeze = True
def __str__(self):
return 'SubscriptionDiagnosticsDataType(' + 'SessionId:' + str(self.SessionId) + ', ' + \
'SubscriptionId:' + str(self.SubscriptionId) + ', ' + \
'Priority:' + str(self.Priority) + ', ' + \
'PublishingInterval:' + str(self.PublishingInterval) + ', ' + \
'MaxKeepAliveCount:' + str(self.MaxKeepAliveCount) + ', ' + \
'MaxLifetimeCount:' + str(self.MaxLifetimeCount) + ', ' + \
'MaxNotificationsPerPublish:' + str(self.MaxNotificationsPerPublish) + ', ' + \
'PublishingEnabled:' + str(self.PublishingEnabled) + ', ' + \
'ModifyCount:' + str(self.ModifyCount) + ', ' + \
'EnableCount:' + str(self.EnableCount) + ', ' + \
'DisableCount:' + str(self.DisableCount) + ', ' + \
'RepublishRequestCount:' + str(self.RepublishRequestCount) + ', ' + \
'RepublishMessageRequestCount:' + str(self.RepublishMessageRequestCount) + ', ' + \
'RepublishMessageCount:' + str(self.RepublishMessageCount) + ', ' + \
'TransferRequestCount:' + str(self.TransferRequestCount) + ', ' + \
'TransferredToAltClientCount:' + str(self.TransferredToAltClientCount) + ', ' + \
'TransferredToSameClientCount:' + str(self.TransferredToSameClientCount) + ', ' + \
'PublishRequestCount:' + str(self.PublishRequestCount) + ', ' + \
'DataChangeNotificationsCount:' + str(self.DataChangeNotificationsCount) + ', ' + \
'EventNotificationsCount:' + str(self.EventNotificationsCount) + ', ' + \
'NotificationsCount:' + str(self.NotificationsCount) + ', ' + \
'LatePublishRequestCount:' + str(self.LatePublishRequestCount) + ', ' + \
'CurrentKeepAliveCount:' + str(self.CurrentKeepAliveCount) + ', ' + \
'CurrentLifetimeCount:' + str(self.CurrentLifetimeCount) + ', ' + \
'UnacknowledgedMessageCount:' + str(self.UnacknowledgedMessageCount) + ', ' + \
'DiscardedMessageCount:' + str(self.DiscardedMessageCount) + ', ' + \
'MonitoredItemCount:' + str(self.MonitoredItemCount) + ', ' + \
'DisabledMonitoredItemCount:' + str(self.DisabledMonitoredItemCount) + ', ' + \
'MonitoringQueueOverflowCount:' + str(self.MonitoringQueueOverflowCount) + ', ' + \
'NextSequenceNumber:' + str(self.NextSequenceNumber) + ', ' + \
'EventQueueOverFlowCount:' + str(self.EventQueueOverFlowCount) + ')'
__repr__ = __str__
class ModelChangeStructureDataType(FrozenClass):
'''
:ivar Affected:
:vartype Affected: NodeId
:ivar AffectedType:
:vartype AffectedType: NodeId
:ivar Verb:
:vartype Verb: Byte
'''
ua_types = [
('Affected', 'NodeId'),
('AffectedType', 'NodeId'),
('Verb', 'Byte'),
]
def __init__(self):
self.Affected = NodeId()
self.AffectedType = NodeId()
self.Verb = 0
self._freeze = True
def __str__(self):
return 'ModelChangeStructureDataType(' + 'Affected:' + str(self.Affected) + ', ' + \
'AffectedType:' + str(self.AffectedType) + ', ' + \
'Verb:' + str(self.Verb) + ')'
__repr__ = __str__
class SemanticChangeStructureDataType(FrozenClass):
'''
:ivar Affected:
:vartype Affected: NodeId
:ivar AffectedType:
:vartype AffectedType: NodeId
'''
ua_types = [
('Affected', 'NodeId'),
('AffectedType', 'NodeId'),
]
def __init__(self):
self.Affected = NodeId()
self.AffectedType = NodeId()
self._freeze = True
def __str__(self):
return 'SemanticChangeStructureDataType(' + 'Affected:' + str(self.Affected) + ', ' + \
'AffectedType:' + str(self.AffectedType) + ')'
__repr__ = __str__
class Range(FrozenClass):
'''
:ivar Low:
:vartype Low: Double
:ivar High:
:vartype High: Double
'''
ua_types = [
('Low', 'Double'),
('High', 'Double'),
]
def __init__(self):
self.Low = 0
self.High = 0
self._freeze = True
def __str__(self):
return 'Range(' + 'Low:' + str(self.Low) + ', ' + \
'High:' + str(self.High) + ')'
__repr__ = __str__
class EUInformation(FrozenClass):
'''
:ivar NamespaceUri:
:vartype NamespaceUri: String
:ivar UnitId:
:vartype UnitId: Int32
:ivar DisplayName:
:vartype DisplayName: LocalizedText
:ivar Description:
:vartype Description: LocalizedText
'''
ua_types = [
('NamespaceUri', 'String'),
('UnitId', 'Int32'),
('DisplayName', 'LocalizedText'),
('Description', 'LocalizedText'),
]
def __init__(self):
self.NamespaceUri = None
self.UnitId = 0
self.DisplayName = LocalizedText()
self.Description = LocalizedText()
self._freeze = True
def __str__(self):
return 'EUInformation(' + 'NamespaceUri:' + str(self.NamespaceUri) + ', ' + \
'UnitId:' + str(self.UnitId) + ', ' + \
'DisplayName:' + str(self.DisplayName) + ', ' + \
'Description:' + str(self.Description) + ')'
__repr__ = __str__
class ComplexNumberType(FrozenClass):
'''
:ivar Real:
:vartype Real: Float
:ivar Imaginary:
:vartype Imaginary: Float
'''
ua_types = [
('Real', 'Float'),
('Imaginary', 'Float'),
]
def __init__(self):
self.Real = 0
self.Imaginary = 0
self._freeze = True
def __str__(self):
return 'ComplexNumberType(' + 'Real:' + str(self.Real) + ', ' + \
'Imaginary:' + str(self.Imaginary) + ')'
__repr__ = __str__
class DoubleComplexNumberType(FrozenClass):
'''
:ivar Real:
:vartype Real: Double
:ivar Imaginary:
:vartype Imaginary: Double
'''
ua_types = [
('Real', 'Double'),
('Imaginary', 'Double'),
]
def __init__(self):
self.Real = 0
self.Imaginary = 0
self._freeze = True
def __str__(self):
return 'DoubleComplexNumberType(' + 'Real:' + str(self.Real) + ', ' + \
'Imaginary:' + str(self.Imaginary) + ')'
__repr__ = __str__
class AxisInformation(FrozenClass):
'''
:ivar EngineeringUnits:
:vartype EngineeringUnits: EUInformation
:ivar EURange:
:vartype EURange: Range
:ivar Title:
:vartype Title: LocalizedText
:ivar AxisScaleType:
:vartype AxisScaleType: AxisScaleEnumeration
:ivar AxisSteps:
:vartype AxisSteps: Double
'''
ua_types = [
('EngineeringUnits', 'EUInformation'),
('EURange', 'Range'),
('Title', 'LocalizedText'),
('AxisScaleType', 'AxisScaleEnumeration'),
('AxisSteps', 'ListOfDouble'),
]
def __init__(self):
self.EngineeringUnits = EUInformation()
self.EURange = Range()
self.Title = LocalizedText()
self.AxisScaleType = AxisScaleEnumeration(0)
self.AxisSteps = []
self._freeze = True
def __str__(self):
return 'AxisInformation(' + 'EngineeringUnits:' + str(self.EngineeringUnits) + ', ' + \
'EURange:' + str(self.EURange) + ', ' + \
'Title:' + str(self.Title) + ', ' + \
'AxisScaleType:' + str(self.AxisScaleType) + ', ' + \
'AxisSteps:' + str(self.AxisSteps) + ')'
__repr__ = __str__
class XVType(FrozenClass):
'''
:ivar X:
:vartype X: Double
:ivar Value:
:vartype Value: Float
'''
ua_types = [
('X', 'Double'),
('Value', 'Float'),
]
def __init__(self):
self.X = 0
self.Value = 0
self._freeze = True
def __str__(self):
return 'XVType(' + 'X:' + str(self.X) + ', ' + \
'Value:' + str(self.Value) + ')'
__repr__ = __str__
class ProgramDiagnosticDataType(FrozenClass):
'''
:ivar CreateSessionId:
:vartype CreateSessionId: NodeId
:ivar CreateClientName:
:vartype CreateClientName: String
:ivar InvocationCreationTime:
:vartype InvocationCreationTime: DateTime
:ivar LastTransitionTime:
:vartype LastTransitionTime: DateTime
:ivar LastMethodCall:
:vartype LastMethodCall: String
:ivar LastMethodSessionId:
:vartype LastMethodSessionId: NodeId
:ivar LastMethodInputArguments:
:vartype LastMethodInputArguments: Argument
:ivar LastMethodOutputArguments:
:vartype LastMethodOutputArguments: Argument
:ivar LastMethodCallTime:
:vartype LastMethodCallTime: DateTime
:ivar LastMethodReturnStatus:
:vartype LastMethodReturnStatus: StatusResult
'''
ua_types = [
('CreateSessionId', 'NodeId'),
('CreateClientName', 'String'),
('InvocationCreationTime', 'DateTime'),
('LastTransitionTime', 'DateTime'),
('LastMethodCall', 'String'),
('LastMethodSessionId', 'NodeId'),
('LastMethodInputArguments', 'ListOfArgument'),
('LastMethodOutputArguments', 'ListOfArgument'),
('LastMethodCallTime', 'DateTime'),
('LastMethodReturnStatus', 'StatusResult'),
]
def __init__(self):
self.CreateSessionId = NodeId()
self.CreateClientName = None
self.InvocationCreationTime = datetime.utcnow()
self.LastTransitionTime = datetime.utcnow()
self.LastMethodCall = None
self.LastMethodSessionId = NodeId()
self.LastMethodInputArguments = []
self.LastMethodOutputArguments = []
self.LastMethodCallTime = datetime.utcnow()
self.LastMethodReturnStatus = StatusResult()
self._freeze = True
def __str__(self):
return 'ProgramDiagnosticDataType(' + 'CreateSessionId:' + str(self.CreateSessionId) + ', ' + \
'CreateClientName:' + str(self.CreateClientName) + ', ' + \
'InvocationCreationTime:' + str(self.InvocationCreationTime) + ', ' + \
'LastTransitionTime:' + str(self.LastTransitionTime) + ', ' + \
'LastMethodCall:' + str(self.LastMethodCall) + ', ' + \
'LastMethodSessionId:' + str(self.LastMethodSessionId) + ', ' + \
'LastMethodInputArguments:' + str(self.LastMethodInputArguments) + ', ' + \
'LastMethodOutputArguments:' + str(self.LastMethodOutputArguments) + ', ' + \
'LastMethodCallTime:' + str(self.LastMethodCallTime) + ', ' + \
'LastMethodReturnStatus:' + str(self.LastMethodReturnStatus) + ')'
__repr__ = __str__
class ProgramDiagnostic2DataType(FrozenClass):
'''
:ivar CreateSessionId:
:vartype CreateSessionId: NodeId
:ivar CreateClientName:
:vartype CreateClientName: String
:ivar InvocationCreationTime:
:vartype InvocationCreationTime: DateTime
:ivar LastTransitionTime:
:vartype LastTransitionTime: DateTime
:ivar LastMethodCall:
:vartype LastMethodCall: String
:ivar LastMethodSessionId:
:vartype LastMethodSessionId: NodeId
:ivar LastMethodInputArguments:
:vartype LastMethodInputArguments: Argument
:ivar LastMethodOutputArguments:
:vartype LastMethodOutputArguments: Argument
:ivar LastMethodInputValues:
:vartype LastMethodInputValues: Variant
:ivar LastMethodOutputValues:
:vartype LastMethodOutputValues: Variant
:ivar LastMethodCallTime:
:vartype LastMethodCallTime: DateTime
:ivar LastMethodReturnStatus:
:vartype LastMethodReturnStatus: StatusResult
'''
ua_types = [
('CreateSessionId', 'NodeId'),
('CreateClientName', 'String'),
('InvocationCreationTime', 'DateTime'),
('LastTransitionTime', 'DateTime'),
('LastMethodCall', 'String'),
('LastMethodSessionId', 'NodeId'),
('LastMethodInputArguments', 'ListOfArgument'),
('LastMethodOutputArguments', 'ListOfArgument'),
('LastMethodInputValues', 'ListOfVariant'),
('LastMethodOutputValues', 'ListOfVariant'),
('LastMethodCallTime', 'DateTime'),
('LastMethodReturnStatus', 'StatusResult'),
]
def __init__(self):
self.CreateSessionId = NodeId()
self.CreateClientName = None
self.InvocationCreationTime = datetime.utcnow()
self.LastTransitionTime = datetime.utcnow()
self.LastMethodCall = None
self.LastMethodSessionId = NodeId()
self.LastMethodInputArguments = []
self.LastMethodOutputArguments = []
self.LastMethodInputValues = []
self.LastMethodOutputValues = []
self.LastMethodCallTime = datetime.utcnow()
self.LastMethodReturnStatus = StatusResult()
self._freeze = True
def __str__(self):
return 'ProgramDiagnostic2DataType(' + 'CreateSessionId:' + str(self.CreateSessionId) + ', ' + \
'CreateClientName:' + str(self.CreateClientName) + ', ' + \
'InvocationCreationTime:' + str(self.InvocationCreationTime) + ', ' + \
'LastTransitionTime:' + str(self.LastTransitionTime) + ', ' + \
'LastMethodCall:' + str(self.LastMethodCall) + ', ' + \
'LastMethodSessionId:' + str(self.LastMethodSessionId) + ', ' + \
'LastMethodInputArguments:' + str(self.LastMethodInputArguments) + ', ' + \
'LastMethodOutputArguments:' + str(self.LastMethodOutputArguments) + ', ' + \
'LastMethodInputValues:' + str(self.LastMethodInputValues) + ', ' + \
'LastMethodOutputValues:' + str(self.LastMethodOutputValues) + ', ' + \
'LastMethodCallTime:' + str(self.LastMethodCallTime) + ', ' + \
'LastMethodReturnStatus:' + str(self.LastMethodReturnStatus) + ')'
__repr__ = __str__
class Annotation(FrozenClass):
'''
:ivar Message:
:vartype Message: String
:ivar UserName:
:vartype UserName: String
:ivar AnnotationTime:
:vartype AnnotationTime: DateTime
'''
ua_types = [
('Message', 'String'),
('UserName', 'String'),
('AnnotationTime', 'DateTime'),
]
def __init__(self):
self.Message = None
self.UserName = None
self.AnnotationTime = datetime.utcnow()
self._freeze = True
def __str__(self):
return 'Annotation(' + 'Message:' + str(self.Message) + ', ' + \
'UserName:' + str(self.UserName) + ', ' + \
'AnnotationTime:' + str(self.AnnotationTime) + ')'
__repr__ = __str__
nid = FourByteNodeId(ObjectIds.KeyValuePair_Encoding_DefaultBinary)
extension_object_classes[nid] = KeyValuePair
extension_object_ids['KeyValuePair'] = nid
nid = FourByteNodeId(ObjectIds.EndpointType_Encoding_DefaultBinary)
extension_object_classes[nid] = EndpointType
extension_object_ids['EndpointType'] = nid
nid = FourByteNodeId(ObjectIds.IdentityMappingRuleType_Encoding_DefaultBinary)
extension_object_classes[nid] = IdentityMappingRuleType
extension_object_ids['IdentityMappingRuleType'] = nid
nid = FourByteNodeId(ObjectIds.TrustListDataType_Encoding_DefaultBinary)
extension_object_classes[nid] = TrustListDataType
extension_object_ids['TrustListDataType'] = nid
nid = FourByteNodeId(ObjectIds.DecimalDataType_Encoding_DefaultBinary)
extension_object_classes[nid] = DecimalDataType
extension_object_ids['DecimalDataType'] = nid
nid = FourByteNodeId(ObjectIds.DataTypeSchemaHeader_Encoding_DefaultBinary)
extension_object_classes[nid] = DataTypeSchemaHeader
extension_object_ids['DataTypeSchemaHeader'] = nid
nid = FourByteNodeId(ObjectIds.DataTypeDescription_Encoding_DefaultBinary)
extension_object_classes[nid] = DataTypeDescription
extension_object_ids['DataTypeDescription'] = nid
nid = FourByteNodeId(ObjectIds.StructureDescription_Encoding_DefaultBinary)
extension_object_classes[nid] = StructureDescription
extension_object_ids['StructureDescription'] = nid
nid = FourByteNodeId(ObjectIds.EnumDescription_Encoding_DefaultBinary)
extension_object_classes[nid] = EnumDescription
extension_object_ids['EnumDescription'] = nid
nid = FourByteNodeId(ObjectIds.SimpleTypeDescription_Encoding_DefaultBinary)
extension_object_classes[nid] = SimpleTypeDescription
extension_object_ids['SimpleTypeDescription'] = nid
nid = FourByteNodeId(ObjectIds.UABinaryFileDataType_Encoding_DefaultBinary)
extension_object_classes[nid] = UABinaryFileDataType
extension_object_ids['UABinaryFileDataType'] = nid
nid = FourByteNodeId(ObjectIds.DataSetMetaDataType_Encoding_DefaultBinary)
extension_object_classes[nid] = DataSetMetaDataType
extension_object_ids['DataSetMetaDataType'] = nid
nid = FourByteNodeId(ObjectIds.FieldMetaData_Encoding_DefaultBinary)
extension_object_classes[nid] = FieldMetaData
extension_object_ids['FieldMetaData'] = nid
nid = FourByteNodeId(ObjectIds.ConfigurationVersionDataType_Encoding_DefaultBinary)
extension_object_classes[nid] = ConfigurationVersionDataType
extension_object_ids['ConfigurationVersionDataType'] = nid
nid = FourByteNodeId(ObjectIds.PublishedDataSetDataType_Encoding_DefaultBinary)
extension_object_classes[nid] = PublishedDataSetDataType
extension_object_ids['PublishedDataSetDataType'] = nid
nid = FourByteNodeId(ObjectIds.PublishedDataSetSourceDataType_Encoding_DefaultBinary)
extension_object_classes[nid] = PublishedDataSetSourceDataType
extension_object_ids['PublishedDataSetSourceDataType'] = nid
nid = FourByteNodeId(ObjectIds.PublishedVariableDataType_Encoding_DefaultBinary)
extension_object_classes[nid] = PublishedVariableDataType
extension_object_ids['PublishedVariableDataType'] = nid
nid = FourByteNodeId(ObjectIds.PublishedDataItemsDataType_Encoding_DefaultBinary)
extension_object_classes[nid] = PublishedDataItemsDataType
extension_object_ids['PublishedDataItemsDataType'] = nid
nid = FourByteNodeId(ObjectIds.PublishedEventsDataType_Encoding_DefaultBinary)
extension_object_classes[nid] = PublishedEventsDataType
extension_object_ids['PublishedEventsDataType'] = nid
nid = FourByteNodeId(ObjectIds.DataSetWriterDataType_Encoding_DefaultBinary)
extension_object_classes[nid] = DataSetWriterDataType
extension_object_ids['DataSetWriterDataType'] = nid
nid = FourByteNodeId(ObjectIds.DataSetWriterTransportDataType_Encoding_DefaultBinary)
extension_object_classes[nid] = DataSetWriterTransportDataType
extension_object_ids['DataSetWriterTransportDataType'] = nid
nid = FourByteNodeId(ObjectIds.DataSetWriterMessageDataType_Encoding_DefaultBinary)
extension_object_classes[nid] = DataSetWriterMessageDataType
extension_object_ids['DataSetWriterMessageDataType'] = nid
nid = FourByteNodeId(ObjectIds.PubSubGroupDataType_Encoding_DefaultBinary)
extension_object_classes[nid] = PubSubGroupDataType
extension_object_ids['PubSubGroupDataType'] = nid
nid = FourByteNodeId(ObjectIds.WriterGroupDataType_Encoding_DefaultBinary)
extension_object_classes[nid] = WriterGroupDataType
extension_object_ids['WriterGroupDataType'] = nid
nid = FourByteNodeId(ObjectIds.WriterGroupTransportDataType_Encoding_DefaultBinary)
extension_object_classes[nid] = WriterGroupTransportDataType
extension_object_ids['WriterGroupTransportDataType'] = nid
nid = FourByteNodeId(ObjectIds.WriterGroupMessageDataType_Encoding_DefaultBinary)
extension_object_classes[nid] = WriterGroupMessageDataType
extension_object_ids['WriterGroupMessageDataType'] = nid
nid = FourByteNodeId(ObjectIds.PubSubConnectionDataType_Encoding_DefaultBinary)
extension_object_classes[nid] = PubSubConnectionDataType
extension_object_ids['PubSubConnectionDataType'] = nid
nid = FourByteNodeId(ObjectIds.ConnectionTransportDataType_Encoding_DefaultBinary)
extension_object_classes[nid] = ConnectionTransportDataType
extension_object_ids['ConnectionTransportDataType'] = nid
nid = FourByteNodeId(ObjectIds.NetworkAddressDataType_Encoding_DefaultBinary)
extension_object_classes[nid] = NetworkAddressDataType
extension_object_ids['NetworkAddressDataType'] = nid
nid = FourByteNodeId(ObjectIds.NetworkAddressUrlDataType_Encoding_DefaultBinary)
extension_object_classes[nid] = NetworkAddressUrlDataType
extension_object_ids['NetworkAddressUrlDataType'] = nid
nid = FourByteNodeId(ObjectIds.ReaderGroupDataType_Encoding_DefaultBinary)
extension_object_classes[nid] = ReaderGroupDataType
extension_object_ids['ReaderGroupDataType'] = nid
nid = FourByteNodeId(ObjectIds.ReaderGroupTransportDataType_Encoding_DefaultBinary)
extension_object_classes[nid] = ReaderGroupTransportDataType
extension_object_ids['ReaderGroupTransportDataType'] = nid
nid = FourByteNodeId(ObjectIds.ReaderGroupMessageDataType_Encoding_DefaultBinary)
extension_object_classes[nid] = ReaderGroupMessageDataType
extension_object_ids['ReaderGroupMessageDataType'] = nid
nid = FourByteNodeId(ObjectIds.DataSetReaderDataType_Encoding_DefaultBinary)
extension_object_classes[nid] = DataSetReaderDataType
extension_object_ids['DataSetReaderDataType'] = nid
nid = FourByteNodeId(ObjectIds.DataSetReaderTransportDataType_Encoding_DefaultBinary)
extension_object_classes[nid] = DataSetReaderTransportDataType
extension_object_ids['DataSetReaderTransportDataType'] = nid
nid = FourByteNodeId(ObjectIds.DataSetReaderMessageDataType_Encoding_DefaultBinary)
extension_object_classes[nid] = DataSetReaderMessageDataType
extension_object_ids['DataSetReaderMessageDataType'] = nid
nid = FourByteNodeId(ObjectIds.SubscribedDataSetDataType_Encoding_DefaultBinary)
extension_object_classes[nid] = SubscribedDataSetDataType
extension_object_ids['SubscribedDataSetDataType'] = nid
nid = FourByteNodeId(ObjectIds.TargetVariablesDataType_Encoding_DefaultBinary)
extension_object_classes[nid] = TargetVariablesDataType
extension_object_ids['TargetVariablesDataType'] = nid
nid = FourByteNodeId(ObjectIds.FieldTargetDataType_Encoding_DefaultBinary)
extension_object_classes[nid] = FieldTargetDataType
extension_object_ids['FieldTargetDataType'] = nid
nid = FourByteNodeId(ObjectIds.SubscribedDataSetMirrorDataType_Encoding_DefaultBinary)
extension_object_classes[nid] = SubscribedDataSetMirrorDataType
extension_object_ids['SubscribedDataSetMirrorDataType'] = nid
nid = FourByteNodeId(ObjectIds.PubSubConfigurationDataType_Encoding_DefaultBinary)
extension_object_classes[nid] = PubSubConfigurationDataType
extension_object_ids['PubSubConfigurationDataType'] = nid
nid = FourByteNodeId(ObjectIds.UadpWriterGroupMessageDataType_Encoding_DefaultBinary)
extension_object_classes[nid] = UadpWriterGroupMessageDataType
extension_object_ids['UadpWriterGroupMessageDataType'] = nid
nid = FourByteNodeId(ObjectIds.UadpDataSetWriterMessageDataType_Encoding_DefaultBinary)
extension_object_classes[nid] = UadpDataSetWriterMessageDataType
extension_object_ids['UadpDataSetWriterMessageDataType'] = nid
nid = FourByteNodeId(ObjectIds.UadpDataSetReaderMessageDataType_Encoding_DefaultBinary)
extension_object_classes[nid] = UadpDataSetReaderMessageDataType
extension_object_ids['UadpDataSetReaderMessageDataType'] = nid
nid = FourByteNodeId(ObjectIds.JsonWriterGroupMessageDataType_Encoding_DefaultBinary)
extension_object_classes[nid] = JsonWriterGroupMessageDataType
extension_object_ids['JsonWriterGroupMessageDataType'] = nid
nid = FourByteNodeId(ObjectIds.JsonDataSetWriterMessageDataType_Encoding_DefaultBinary)
extension_object_classes[nid] = JsonDataSetWriterMessageDataType
extension_object_ids['JsonDataSetWriterMessageDataType'] = nid
nid = FourByteNodeId(ObjectIds.JsonDataSetReaderMessageDataType_Encoding_DefaultBinary)
extension_object_classes[nid] = JsonDataSetReaderMessageDataType
extension_object_ids['JsonDataSetReaderMessageDataType'] = nid
nid = FourByteNodeId(ObjectIds.DatagramConnectionTransportDataType_Encoding_DefaultBinary)
extension_object_classes[nid] = DatagramConnectionTransportDataType
extension_object_ids['DatagramConnectionTransportDataType'] = nid
nid = FourByteNodeId(ObjectIds.DatagramWriterGroupTransportDataType_Encoding_DefaultBinary)
extension_object_classes[nid] = DatagramWriterGroupTransportDataType
extension_object_ids['DatagramWriterGroupTransportDataType'] = nid
nid = FourByteNodeId(ObjectIds.BrokerConnectionTransportDataType_Encoding_DefaultBinary)
extension_object_classes[nid] = BrokerConnectionTransportDataType
extension_object_ids['BrokerConnectionTransportDataType'] = nid
nid = FourByteNodeId(ObjectIds.BrokerWriterGroupTransportDataType_Encoding_DefaultBinary)
extension_object_classes[nid] = BrokerWriterGroupTransportDataType
extension_object_ids['BrokerWriterGroupTransportDataType'] = nid
nid = FourByteNodeId(ObjectIds.BrokerDataSetWriterTransportDataType_Encoding_DefaultBinary)
extension_object_classes[nid] = BrokerDataSetWriterTransportDataType
extension_object_ids['BrokerDataSetWriterTransportDataType'] = nid
nid = FourByteNodeId(ObjectIds.BrokerDataSetReaderTransportDataType_Encoding_DefaultBinary)
extension_object_classes[nid] = BrokerDataSetReaderTransportDataType
extension_object_ids['BrokerDataSetReaderTransportDataType'] = nid
nid = FourByteNodeId(ObjectIds.RolePermissionType_Encoding_DefaultBinary)
extension_object_classes[nid] = RolePermissionType
extension_object_ids['RolePermissionType'] = nid
nid = FourByteNodeId(ObjectIds.StructureField_Encoding_DefaultBinary)
extension_object_classes[nid] = StructureField
extension_object_ids['StructureField'] = nid
nid = FourByteNodeId(ObjectIds.Argument_Encoding_DefaultBinary)
extension_object_classes[nid] = Argument
extension_object_ids['Argument'] = nid
nid = FourByteNodeId(ObjectIds.EnumValueType_Encoding_DefaultBinary)
extension_object_classes[nid] = EnumValueType
extension_object_ids['EnumValueType'] = nid
nid = FourByteNodeId(ObjectIds.EnumField_Encoding_DefaultBinary)
extension_object_classes[nid] = EnumField
extension_object_ids['EnumField'] = nid
nid = FourByteNodeId(ObjectIds.OptionSet_Encoding_DefaultBinary)
extension_object_classes[nid] = OptionSet
extension_object_ids['OptionSet'] = nid
nid = FourByteNodeId(ObjectIds.Union_Encoding_DefaultBinary)
extension_object_classes[nid] = Union
extension_object_ids['Union'] = nid
nid = FourByteNodeId(ObjectIds.TimeZoneDataType_Encoding_DefaultBinary)
extension_object_classes[nid] = TimeZoneDataType
extension_object_ids['TimeZoneDataType'] = nid
nid = FourByteNodeId(ObjectIds.ApplicationDescription_Encoding_DefaultBinary)
extension_object_classes[nid] = ApplicationDescription
extension_object_ids['ApplicationDescription'] = nid
nid = FourByteNodeId(ObjectIds.RequestHeader_Encoding_DefaultBinary)
extension_object_classes[nid] = RequestHeader
extension_object_ids['RequestHeader'] = nid
nid = FourByteNodeId(ObjectIds.ResponseHeader_Encoding_DefaultBinary)
extension_object_classes[nid] = ResponseHeader
extension_object_ids['ResponseHeader'] = nid
nid = FourByteNodeId(ObjectIds.ServiceFault_Encoding_DefaultBinary)
extension_object_classes[nid] = ServiceFault
extension_object_ids['ServiceFault'] = nid
nid = FourByteNodeId(ObjectIds.SessionlessInvokeRequestType_Encoding_DefaultBinary)
extension_object_classes[nid] = SessionlessInvokeRequestType
extension_object_ids['SessionlessInvokeRequestType'] = nid
nid = FourByteNodeId(ObjectIds.SessionlessInvokeResponseType_Encoding_DefaultBinary)
extension_object_classes[nid] = SessionlessInvokeResponseType
extension_object_ids['SessionlessInvokeResponseType'] = nid
nid = FourByteNodeId(ObjectIds.FindServersRequest_Encoding_DefaultBinary)
extension_object_classes[nid] = FindServersRequest
extension_object_ids['FindServersRequest'] = nid
nid = FourByteNodeId(ObjectIds.FindServersResponse_Encoding_DefaultBinary)
extension_object_classes[nid] = FindServersResponse
extension_object_ids['FindServersResponse'] = nid
nid = FourByteNodeId(ObjectIds.ServerOnNetwork_Encoding_DefaultBinary)
extension_object_classes[nid] = ServerOnNetwork
extension_object_ids['ServerOnNetwork'] = nid
nid = FourByteNodeId(ObjectIds.FindServersOnNetworkRequest_Encoding_DefaultBinary)
extension_object_classes[nid] = FindServersOnNetworkRequest
extension_object_ids['FindServersOnNetworkRequest'] = nid
nid = FourByteNodeId(ObjectIds.FindServersOnNetworkResponse_Encoding_DefaultBinary)
extension_object_classes[nid] = FindServersOnNetworkResponse
extension_object_ids['FindServersOnNetworkResponse'] = nid
nid = FourByteNodeId(ObjectIds.UserTokenPolicy_Encoding_DefaultBinary)
extension_object_classes[nid] = UserTokenPolicy
extension_object_ids['UserTokenPolicy'] = nid
nid = FourByteNodeId(ObjectIds.EndpointDescription_Encoding_DefaultBinary)
extension_object_classes[nid] = EndpointDescription
extension_object_ids['EndpointDescription'] = nid
nid = FourByteNodeId(ObjectIds.GetEndpointsRequest_Encoding_DefaultBinary)
extension_object_classes[nid] = GetEndpointsRequest
extension_object_ids['GetEndpointsRequest'] = nid
nid = FourByteNodeId(ObjectIds.GetEndpointsResponse_Encoding_DefaultBinary)
extension_object_classes[nid] = GetEndpointsResponse
extension_object_ids['GetEndpointsResponse'] = nid
nid = FourByteNodeId(ObjectIds.RegisteredServer_Encoding_DefaultBinary)
extension_object_classes[nid] = RegisteredServer
extension_object_ids['RegisteredServer'] = nid
nid = FourByteNodeId(ObjectIds.RegisterServerRequest_Encoding_DefaultBinary)
extension_object_classes[nid] = RegisterServerRequest
extension_object_ids['RegisterServerRequest'] = nid
nid = FourByteNodeId(ObjectIds.RegisterServerResponse_Encoding_DefaultBinary)
extension_object_classes[nid] = RegisterServerResponse
extension_object_ids['RegisterServerResponse'] = nid
nid = FourByteNodeId(ObjectIds.DiscoveryConfiguration_Encoding_DefaultBinary)
extension_object_classes[nid] = DiscoveryConfiguration
extension_object_ids['DiscoveryConfiguration'] = nid
nid = FourByteNodeId(ObjectIds.MdnsDiscoveryConfiguration_Encoding_DefaultBinary)
extension_object_classes[nid] = MdnsDiscoveryConfiguration
extension_object_ids['MdnsDiscoveryConfiguration'] = nid
nid = FourByteNodeId(ObjectIds.RegisterServer2Request_Encoding_DefaultBinary)
extension_object_classes[nid] = RegisterServer2Request
extension_object_ids['RegisterServer2Request'] = nid
nid = FourByteNodeId(ObjectIds.RegisterServer2Response_Encoding_DefaultBinary)
extension_object_classes[nid] = RegisterServer2Response
extension_object_ids['RegisterServer2Response'] = nid
nid = FourByteNodeId(ObjectIds.ChannelSecurityToken_Encoding_DefaultBinary)
extension_object_classes[nid] = ChannelSecurityToken
extension_object_ids['ChannelSecurityToken'] = nid
nid = FourByteNodeId(ObjectIds.OpenSecureChannelRequest_Encoding_DefaultBinary)
extension_object_classes[nid] = OpenSecureChannelRequest
extension_object_ids['OpenSecureChannelRequest'] = nid
nid = FourByteNodeId(ObjectIds.OpenSecureChannelResponse_Encoding_DefaultBinary)
extension_object_classes[nid] = OpenSecureChannelResponse
extension_object_ids['OpenSecureChannelResponse'] = nid
nid = FourByteNodeId(ObjectIds.CloseSecureChannelRequest_Encoding_DefaultBinary)
extension_object_classes[nid] = CloseSecureChannelRequest
extension_object_ids['CloseSecureChannelRequest'] = nid
nid = FourByteNodeId(ObjectIds.CloseSecureChannelResponse_Encoding_DefaultBinary)
extension_object_classes[nid] = CloseSecureChannelResponse
extension_object_ids['CloseSecureChannelResponse'] = nid
nid = FourByteNodeId(ObjectIds.SignedSoftwareCertificate_Encoding_DefaultBinary)
extension_object_classes[nid] = SignedSoftwareCertificate
extension_object_ids['SignedSoftwareCertificate'] = nid
nid = FourByteNodeId(ObjectIds.SignatureData_Encoding_DefaultBinary)
extension_object_classes[nid] = SignatureData
extension_object_ids['SignatureData'] = nid
nid = FourByteNodeId(ObjectIds.CreateSessionRequest_Encoding_DefaultBinary)
extension_object_classes[nid] = CreateSessionRequest
extension_object_ids['CreateSessionRequest'] = nid
nid = FourByteNodeId(ObjectIds.CreateSessionResponse_Encoding_DefaultBinary)
extension_object_classes[nid] = CreateSessionResponse
extension_object_ids['CreateSessionResponse'] = nid
nid = FourByteNodeId(ObjectIds.UserIdentityToken_Encoding_DefaultBinary)
extension_object_classes[nid] = UserIdentityToken
extension_object_ids['UserIdentityToken'] = nid
nid = FourByteNodeId(ObjectIds.AnonymousIdentityToken_Encoding_DefaultBinary)
extension_object_classes[nid] = AnonymousIdentityToken
extension_object_ids['AnonymousIdentityToken'] = nid
nid = FourByteNodeId(ObjectIds.UserNameIdentityToken_Encoding_DefaultBinary)
extension_object_classes[nid] = UserNameIdentityToken
extension_object_ids['UserNameIdentityToken'] = nid
nid = FourByteNodeId(ObjectIds.X509IdentityToken_Encoding_DefaultBinary)
extension_object_classes[nid] = X509IdentityToken
extension_object_ids['X509IdentityToken'] = nid
nid = FourByteNodeId(ObjectIds.IssuedIdentityToken_Encoding_DefaultBinary)
extension_object_classes[nid] = IssuedIdentityToken
extension_object_ids['IssuedIdentityToken'] = nid
nid = FourByteNodeId(ObjectIds.ActivateSessionRequest_Encoding_DefaultBinary)
extension_object_classes[nid] = ActivateSessionRequest
extension_object_ids['ActivateSessionRequest'] = nid
nid = FourByteNodeId(ObjectIds.ActivateSessionResponse_Encoding_DefaultBinary)
extension_object_classes[nid] = ActivateSessionResponse
extension_object_ids['ActivateSessionResponse'] = nid
nid = FourByteNodeId(ObjectIds.CloseSessionRequest_Encoding_DefaultBinary)
extension_object_classes[nid] = CloseSessionRequest
extension_object_ids['CloseSessionRequest'] = nid
nid = FourByteNodeId(ObjectIds.CloseSessionResponse_Encoding_DefaultBinary)
extension_object_classes[nid] = CloseSessionResponse
extension_object_ids['CloseSessionResponse'] = nid
nid = FourByteNodeId(ObjectIds.CancelRequest_Encoding_DefaultBinary)
extension_object_classes[nid] = CancelRequest
extension_object_ids['CancelRequest'] = nid
nid = FourByteNodeId(ObjectIds.CancelResponse_Encoding_DefaultBinary)
extension_object_classes[nid] = CancelResponse
extension_object_ids['CancelResponse'] = nid
nid = FourByteNodeId(ObjectIds.NodeAttributes_Encoding_DefaultBinary)
extension_object_classes[nid] = NodeAttributes
extension_object_ids['NodeAttributes'] = nid
nid = FourByteNodeId(ObjectIds.ObjectAttributes_Encoding_DefaultBinary)
extension_object_classes[nid] = ObjectAttributes
extension_object_ids['ObjectAttributes'] = nid
nid = FourByteNodeId(ObjectIds.VariableAttributes_Encoding_DefaultBinary)
extension_object_classes[nid] = VariableAttributes
extension_object_ids['VariableAttributes'] = nid
nid = FourByteNodeId(ObjectIds.MethodAttributes_Encoding_DefaultBinary)
extension_object_classes[nid] = MethodAttributes
extension_object_ids['MethodAttributes'] = nid
nid = FourByteNodeId(ObjectIds.ObjectTypeAttributes_Encoding_DefaultBinary)
extension_object_classes[nid] = ObjectTypeAttributes
extension_object_ids['ObjectTypeAttributes'] = nid
nid = FourByteNodeId(ObjectIds.VariableTypeAttributes_Encoding_DefaultBinary)
extension_object_classes[nid] = VariableTypeAttributes
extension_object_ids['VariableTypeAttributes'] = nid
nid = FourByteNodeId(ObjectIds.ReferenceTypeAttributes_Encoding_DefaultBinary)
extension_object_classes[nid] = ReferenceTypeAttributes
extension_object_ids['ReferenceTypeAttributes'] = nid
nid = FourByteNodeId(ObjectIds.DataTypeAttributes_Encoding_DefaultBinary)
extension_object_classes[nid] = DataTypeAttributes
extension_object_ids['DataTypeAttributes'] = nid
nid = FourByteNodeId(ObjectIds.ViewAttributes_Encoding_DefaultBinary)
extension_object_classes[nid] = ViewAttributes
extension_object_ids['ViewAttributes'] = nid
nid = FourByteNodeId(ObjectIds.GenericAttributeValue_Encoding_DefaultBinary)
extension_object_classes[nid] = GenericAttributeValue
extension_object_ids['GenericAttributeValue'] = nid
nid = FourByteNodeId(ObjectIds.GenericAttributes_Encoding_DefaultBinary)
extension_object_classes[nid] = GenericAttributes
extension_object_ids['GenericAttributes'] = nid
nid = FourByteNodeId(ObjectIds.AddNodesItem_Encoding_DefaultBinary)
extension_object_classes[nid] = AddNodesItem
extension_object_ids['AddNodesItem'] = nid
nid = FourByteNodeId(ObjectIds.AddNodesResult_Encoding_DefaultBinary)
extension_object_classes[nid] = AddNodesResult
extension_object_ids['AddNodesResult'] = nid
nid = FourByteNodeId(ObjectIds.AddNodesRequest_Encoding_DefaultBinary)
extension_object_classes[nid] = AddNodesRequest
extension_object_ids['AddNodesRequest'] = nid
nid = FourByteNodeId(ObjectIds.AddNodesResponse_Encoding_DefaultBinary)
extension_object_classes[nid] = AddNodesResponse
extension_object_ids['AddNodesResponse'] = nid
nid = FourByteNodeId(ObjectIds.AddReferencesItem_Encoding_DefaultBinary)
extension_object_classes[nid] = AddReferencesItem
extension_object_ids['AddReferencesItem'] = nid
nid = FourByteNodeId(ObjectIds.AddReferencesRequest_Encoding_DefaultBinary)
extension_object_classes[nid] = AddReferencesRequest
extension_object_ids['AddReferencesRequest'] = nid
nid = FourByteNodeId(ObjectIds.AddReferencesResponse_Encoding_DefaultBinary)
extension_object_classes[nid] = AddReferencesResponse
extension_object_ids['AddReferencesResponse'] = nid
nid = FourByteNodeId(ObjectIds.DeleteNodesItem_Encoding_DefaultBinary)
extension_object_classes[nid] = DeleteNodesItem
extension_object_ids['DeleteNodesItem'] = nid
nid = FourByteNodeId(ObjectIds.DeleteNodesRequest_Encoding_DefaultBinary)
extension_object_classes[nid] = DeleteNodesRequest
extension_object_ids['DeleteNodesRequest'] = nid
nid = FourByteNodeId(ObjectIds.DeleteNodesResponse_Encoding_DefaultBinary)
extension_object_classes[nid] = DeleteNodesResponse
extension_object_ids['DeleteNodesResponse'] = nid
nid = FourByteNodeId(ObjectIds.DeleteReferencesItem_Encoding_DefaultBinary)
extension_object_classes[nid] = DeleteReferencesItem
extension_object_ids['DeleteReferencesItem'] = nid
nid = FourByteNodeId(ObjectIds.DeleteReferencesRequest_Encoding_DefaultBinary)
extension_object_classes[nid] = DeleteReferencesRequest
extension_object_ids['DeleteReferencesRequest'] = nid
nid = FourByteNodeId(ObjectIds.DeleteReferencesResponse_Encoding_DefaultBinary)
extension_object_classes[nid] = DeleteReferencesResponse
extension_object_ids['DeleteReferencesResponse'] = nid
nid = FourByteNodeId(ObjectIds.ViewDescription_Encoding_DefaultBinary)
extension_object_classes[nid] = ViewDescription
extension_object_ids['ViewDescription'] = nid
nid = FourByteNodeId(ObjectIds.BrowseDescription_Encoding_DefaultBinary)
extension_object_classes[nid] = BrowseDescription
extension_object_ids['BrowseDescription'] = nid
nid = FourByteNodeId(ObjectIds.ReferenceDescription_Encoding_DefaultBinary)
extension_object_classes[nid] = ReferenceDescription
extension_object_ids['ReferenceDescription'] = nid
nid = FourByteNodeId(ObjectIds.BrowseResult_Encoding_DefaultBinary)
extension_object_classes[nid] = BrowseResult
extension_object_ids['BrowseResult'] = nid
nid = FourByteNodeId(ObjectIds.BrowseRequest_Encoding_DefaultBinary)
extension_object_classes[nid] = BrowseRequest
extension_object_ids['BrowseRequest'] = nid
nid = FourByteNodeId(ObjectIds.BrowseResponse_Encoding_DefaultBinary)
extension_object_classes[nid] = BrowseResponse
extension_object_ids['BrowseResponse'] = nid
nid = FourByteNodeId(ObjectIds.BrowseNextRequest_Encoding_DefaultBinary)
extension_object_classes[nid] = BrowseNextRequest
extension_object_ids['BrowseNextRequest'] = nid
nid = FourByteNodeId(ObjectIds.BrowseNextResponse_Encoding_DefaultBinary)
extension_object_classes[nid] = BrowseNextResponse
extension_object_ids['BrowseNextResponse'] = nid
nid = FourByteNodeId(ObjectIds.RelativePathElement_Encoding_DefaultBinary)
extension_object_classes[nid] = RelativePathElement
extension_object_ids['RelativePathElement'] = nid
nid = FourByteNodeId(ObjectIds.RelativePath_Encoding_DefaultBinary)
extension_object_classes[nid] = RelativePath
extension_object_ids['RelativePath'] = nid
nid = FourByteNodeId(ObjectIds.BrowsePath_Encoding_DefaultBinary)
extension_object_classes[nid] = BrowsePath
extension_object_ids['BrowsePath'] = nid
nid = FourByteNodeId(ObjectIds.BrowsePathTarget_Encoding_DefaultBinary)
extension_object_classes[nid] = BrowsePathTarget
extension_object_ids['BrowsePathTarget'] = nid
nid = FourByteNodeId(ObjectIds.BrowsePathResult_Encoding_DefaultBinary)
extension_object_classes[nid] = BrowsePathResult
extension_object_ids['BrowsePathResult'] = nid
nid = FourByteNodeId(ObjectIds.TranslateBrowsePathsToNodeIdsRequest_Encoding_DefaultBinary)
extension_object_classes[nid] = TranslateBrowsePathsToNodeIdsRequest
extension_object_ids['TranslateBrowsePathsToNodeIdsRequest'] = nid
nid = FourByteNodeId(ObjectIds.TranslateBrowsePathsToNodeIdsResponse_Encoding_DefaultBinary)
extension_object_classes[nid] = TranslateBrowsePathsToNodeIdsResponse
extension_object_ids['TranslateBrowsePathsToNodeIdsResponse'] = nid
nid = FourByteNodeId(ObjectIds.RegisterNodesRequest_Encoding_DefaultBinary)
extension_object_classes[nid] = RegisterNodesRequest
extension_object_ids['RegisterNodesRequest'] = nid
nid = FourByteNodeId(ObjectIds.RegisterNodesResponse_Encoding_DefaultBinary)
extension_object_classes[nid] = RegisterNodesResponse
extension_object_ids['RegisterNodesResponse'] = nid
nid = FourByteNodeId(ObjectIds.UnregisterNodesRequest_Encoding_DefaultBinary)
extension_object_classes[nid] = UnregisterNodesRequest
extension_object_ids['UnregisterNodesRequest'] = nid
nid = FourByteNodeId(ObjectIds.UnregisterNodesResponse_Encoding_DefaultBinary)
extension_object_classes[nid] = UnregisterNodesResponse
extension_object_ids['UnregisterNodesResponse'] = nid
nid = FourByteNodeId(ObjectIds.EndpointConfiguration_Encoding_DefaultBinary)
extension_object_classes[nid] = EndpointConfiguration
extension_object_ids['EndpointConfiguration'] = nid
nid = FourByteNodeId(ObjectIds.QueryDataDescription_Encoding_DefaultBinary)
extension_object_classes[nid] = QueryDataDescription
extension_object_ids['QueryDataDescription'] = nid
nid = FourByteNodeId(ObjectIds.NodeTypeDescription_Encoding_DefaultBinary)
extension_object_classes[nid] = NodeTypeDescription
extension_object_ids['NodeTypeDescription'] = nid
nid = FourByteNodeId(ObjectIds.QueryDataSet_Encoding_DefaultBinary)
extension_object_classes[nid] = QueryDataSet
extension_object_ids['QueryDataSet'] = nid
nid = FourByteNodeId(ObjectIds.NodeReference_Encoding_DefaultBinary)
extension_object_classes[nid] = NodeReference
extension_object_ids['NodeReference'] = nid
nid = FourByteNodeId(ObjectIds.ContentFilterElement_Encoding_DefaultBinary)
extension_object_classes[nid] = ContentFilterElement
extension_object_ids['ContentFilterElement'] = nid
nid = FourByteNodeId(ObjectIds.ContentFilter_Encoding_DefaultBinary)
extension_object_classes[nid] = ContentFilter
extension_object_ids['ContentFilter'] = nid
nid = FourByteNodeId(ObjectIds.ElementOperand_Encoding_DefaultBinary)
extension_object_classes[nid] = ElementOperand
extension_object_ids['ElementOperand'] = nid
nid = FourByteNodeId(ObjectIds.LiteralOperand_Encoding_DefaultBinary)
extension_object_classes[nid] = LiteralOperand
extension_object_ids['LiteralOperand'] = nid
nid = FourByteNodeId(ObjectIds.AttributeOperand_Encoding_DefaultBinary)
extension_object_classes[nid] = AttributeOperand
extension_object_ids['AttributeOperand'] = nid
nid = FourByteNodeId(ObjectIds.SimpleAttributeOperand_Encoding_DefaultBinary)
extension_object_classes[nid] = SimpleAttributeOperand
extension_object_ids['SimpleAttributeOperand'] = nid
nid = FourByteNodeId(ObjectIds.ContentFilterElementResult_Encoding_DefaultBinary)
extension_object_classes[nid] = ContentFilterElementResult
extension_object_ids['ContentFilterElementResult'] = nid
nid = FourByteNodeId(ObjectIds.ContentFilterResult_Encoding_DefaultBinary)
extension_object_classes[nid] = ContentFilterResult
extension_object_ids['ContentFilterResult'] = nid
nid = FourByteNodeId(ObjectIds.ParsingResult_Encoding_DefaultBinary)
extension_object_classes[nid] = ParsingResult
extension_object_ids['ParsingResult'] = nid
nid = FourByteNodeId(ObjectIds.QueryFirstRequest_Encoding_DefaultBinary)
extension_object_classes[nid] = QueryFirstRequest
extension_object_ids['QueryFirstRequest'] = nid
nid = FourByteNodeId(ObjectIds.QueryFirstResponse_Encoding_DefaultBinary)
extension_object_classes[nid] = QueryFirstResponse
extension_object_ids['QueryFirstResponse'] = nid
nid = FourByteNodeId(ObjectIds.QueryNextRequest_Encoding_DefaultBinary)
extension_object_classes[nid] = QueryNextRequest
extension_object_ids['QueryNextRequest'] = nid
nid = FourByteNodeId(ObjectIds.QueryNextResponse_Encoding_DefaultBinary)
extension_object_classes[nid] = QueryNextResponse
extension_object_ids['QueryNextResponse'] = nid
nid = FourByteNodeId(ObjectIds.ReadValueId_Encoding_DefaultBinary)
extension_object_classes[nid] = ReadValueId
extension_object_ids['ReadValueId'] = nid
nid = FourByteNodeId(ObjectIds.ReadRequest_Encoding_DefaultBinary)
extension_object_classes[nid] = ReadRequest
extension_object_ids['ReadRequest'] = nid
nid = FourByteNodeId(ObjectIds.ReadResponse_Encoding_DefaultBinary)
extension_object_classes[nid] = ReadResponse
extension_object_ids['ReadResponse'] = nid
nid = FourByteNodeId(ObjectIds.HistoryReadValueId_Encoding_DefaultBinary)
extension_object_classes[nid] = HistoryReadValueId
extension_object_ids['HistoryReadValueId'] = nid
nid = FourByteNodeId(ObjectIds.HistoryReadResult_Encoding_DefaultBinary)
extension_object_classes[nid] = HistoryReadResult
extension_object_ids['HistoryReadResult'] = nid
nid = FourByteNodeId(ObjectIds.HistoryReadDetails_Encoding_DefaultBinary)
extension_object_classes[nid] = HistoryReadDetails
extension_object_ids['HistoryReadDetails'] = nid
nid = FourByteNodeId(ObjectIds.ReadEventDetails_Encoding_DefaultBinary)
extension_object_classes[nid] = ReadEventDetails
extension_object_ids['ReadEventDetails'] = nid
nid = FourByteNodeId(ObjectIds.ReadRawModifiedDetails_Encoding_DefaultBinary)
extension_object_classes[nid] = ReadRawModifiedDetails
extension_object_ids['ReadRawModifiedDetails'] = nid
nid = FourByteNodeId(ObjectIds.ReadProcessedDetails_Encoding_DefaultBinary)
extension_object_classes[nid] = ReadProcessedDetails
extension_object_ids['ReadProcessedDetails'] = nid
nid = FourByteNodeId(ObjectIds.ReadAtTimeDetails_Encoding_DefaultBinary)
extension_object_classes[nid] = ReadAtTimeDetails
extension_object_ids['ReadAtTimeDetails'] = nid
nid = FourByteNodeId(ObjectIds.HistoryData_Encoding_DefaultBinary)
extension_object_classes[nid] = HistoryData
extension_object_ids['HistoryData'] = nid
nid = FourByteNodeId(ObjectIds.ModificationInfo_Encoding_DefaultBinary)
extension_object_classes[nid] = ModificationInfo
extension_object_ids['ModificationInfo'] = nid
nid = FourByteNodeId(ObjectIds.HistoryModifiedData_Encoding_DefaultBinary)
extension_object_classes[nid] = HistoryModifiedData
extension_object_ids['HistoryModifiedData'] = nid
nid = FourByteNodeId(ObjectIds.HistoryEvent_Encoding_DefaultBinary)
extension_object_classes[nid] = HistoryEvent
extension_object_ids['HistoryEvent'] = nid
nid = FourByteNodeId(ObjectIds.HistoryReadRequest_Encoding_DefaultBinary)
extension_object_classes[nid] = HistoryReadRequest
extension_object_ids['HistoryReadRequest'] = nid
nid = FourByteNodeId(ObjectIds.HistoryReadResponse_Encoding_DefaultBinary)
extension_object_classes[nid] = HistoryReadResponse
extension_object_ids['HistoryReadResponse'] = nid
nid = FourByteNodeId(ObjectIds.WriteValue_Encoding_DefaultBinary)
extension_object_classes[nid] = WriteValue
extension_object_ids['WriteValue'] = nid
nid = FourByteNodeId(ObjectIds.WriteRequest_Encoding_DefaultBinary)
extension_object_classes[nid] = WriteRequest
extension_object_ids['WriteRequest'] = nid
nid = FourByteNodeId(ObjectIds.WriteResponse_Encoding_DefaultBinary)
extension_object_classes[nid] = WriteResponse
extension_object_ids['WriteResponse'] = nid
nid = FourByteNodeId(ObjectIds.HistoryUpdateDetails_Encoding_DefaultBinary)
extension_object_classes[nid] = HistoryUpdateDetails
extension_object_ids['HistoryUpdateDetails'] = nid
nid = FourByteNodeId(ObjectIds.UpdateDataDetails_Encoding_DefaultBinary)
extension_object_classes[nid] = UpdateDataDetails
extension_object_ids['UpdateDataDetails'] = nid
nid = FourByteNodeId(ObjectIds.UpdateStructureDataDetails_Encoding_DefaultBinary)
extension_object_classes[nid] = UpdateStructureDataDetails
extension_object_ids['UpdateStructureDataDetails'] = nid
nid = FourByteNodeId(ObjectIds.UpdateEventDetails_Encoding_DefaultBinary)
extension_object_classes[nid] = UpdateEventDetails
extension_object_ids['UpdateEventDetails'] = nid
nid = FourByteNodeId(ObjectIds.DeleteRawModifiedDetails_Encoding_DefaultBinary)
extension_object_classes[nid] = DeleteRawModifiedDetails
extension_object_ids['DeleteRawModifiedDetails'] = nid
nid = FourByteNodeId(ObjectIds.DeleteAtTimeDetails_Encoding_DefaultBinary)
extension_object_classes[nid] = DeleteAtTimeDetails
extension_object_ids['DeleteAtTimeDetails'] = nid
nid = FourByteNodeId(ObjectIds.DeleteEventDetails_Encoding_DefaultBinary)
extension_object_classes[nid] = DeleteEventDetails
extension_object_ids['DeleteEventDetails'] = nid
nid = FourByteNodeId(ObjectIds.HistoryUpdateResult_Encoding_DefaultBinary)
extension_object_classes[nid] = HistoryUpdateResult
extension_object_ids['HistoryUpdateResult'] = nid
nid = FourByteNodeId(ObjectIds.HistoryUpdateRequest_Encoding_DefaultBinary)
extension_object_classes[nid] = HistoryUpdateRequest
extension_object_ids['HistoryUpdateRequest'] = nid
nid = FourByteNodeId(ObjectIds.HistoryUpdateResponse_Encoding_DefaultBinary)
extension_object_classes[nid] = HistoryUpdateResponse
extension_object_ids['HistoryUpdateResponse'] = nid
nid = FourByteNodeId(ObjectIds.CallMethodRequest_Encoding_DefaultBinary)
extension_object_classes[nid] = CallMethodRequest
extension_object_ids['CallMethodRequest'] = nid
nid = FourByteNodeId(ObjectIds.CallMethodResult_Encoding_DefaultBinary)
extension_object_classes[nid] = CallMethodResult
extension_object_ids['CallMethodResult'] = nid
nid = FourByteNodeId(ObjectIds.CallRequest_Encoding_DefaultBinary)
extension_object_classes[nid] = CallRequest
extension_object_ids['CallRequest'] = nid
nid = FourByteNodeId(ObjectIds.CallResponse_Encoding_DefaultBinary)
extension_object_classes[nid] = CallResponse
extension_object_ids['CallResponse'] = nid
nid = FourByteNodeId(ObjectIds.MonitoringFilter_Encoding_DefaultBinary)
extension_object_classes[nid] = MonitoringFilter
extension_object_ids['MonitoringFilter'] = nid
nid = FourByteNodeId(ObjectIds.DataChangeFilter_Encoding_DefaultBinary)
extension_object_classes[nid] = DataChangeFilter
extension_object_ids['DataChangeFilter'] = nid
nid = FourByteNodeId(ObjectIds.EventFilter_Encoding_DefaultBinary)
extension_object_classes[nid] = EventFilter
extension_object_ids['EventFilter'] = nid
nid = FourByteNodeId(ObjectIds.AggregateConfiguration_Encoding_DefaultBinary)
extension_object_classes[nid] = AggregateConfiguration
extension_object_ids['AggregateConfiguration'] = nid
nid = FourByteNodeId(ObjectIds.AggregateFilter_Encoding_DefaultBinary)
extension_object_classes[nid] = AggregateFilter
extension_object_ids['AggregateFilter'] = nid
nid = FourByteNodeId(ObjectIds.MonitoringFilterResult_Encoding_DefaultBinary)
extension_object_classes[nid] = MonitoringFilterResult
extension_object_ids['MonitoringFilterResult'] = nid
nid = FourByteNodeId(ObjectIds.EventFilterResult_Encoding_DefaultBinary)
extension_object_classes[nid] = EventFilterResult
extension_object_ids['EventFilterResult'] = nid
nid = FourByteNodeId(ObjectIds.AggregateFilterResult_Encoding_DefaultBinary)
extension_object_classes[nid] = AggregateFilterResult
extension_object_ids['AggregateFilterResult'] = nid
nid = FourByteNodeId(ObjectIds.MonitoringParameters_Encoding_DefaultBinary)
extension_object_classes[nid] = MonitoringParameters
extension_object_ids['MonitoringParameters'] = nid
nid = FourByteNodeId(ObjectIds.MonitoredItemCreateRequest_Encoding_DefaultBinary)
extension_object_classes[nid] = MonitoredItemCreateRequest
extension_object_ids['MonitoredItemCreateRequest'] = nid
nid = FourByteNodeId(ObjectIds.MonitoredItemCreateResult_Encoding_DefaultBinary)
extension_object_classes[nid] = MonitoredItemCreateResult
extension_object_ids['MonitoredItemCreateResult'] = nid
nid = FourByteNodeId(ObjectIds.CreateMonitoredItemsRequest_Encoding_DefaultBinary)
extension_object_classes[nid] = CreateMonitoredItemsRequest
extension_object_ids['CreateMonitoredItemsRequest'] = nid
nid = FourByteNodeId(ObjectIds.CreateMonitoredItemsResponse_Encoding_DefaultBinary)
extension_object_classes[nid] = CreateMonitoredItemsResponse
extension_object_ids['CreateMonitoredItemsResponse'] = nid
nid = FourByteNodeId(ObjectIds.MonitoredItemModifyRequest_Encoding_DefaultBinary)
extension_object_classes[nid] = MonitoredItemModifyRequest
extension_object_ids['MonitoredItemModifyRequest'] = nid
nid = FourByteNodeId(ObjectIds.MonitoredItemModifyResult_Encoding_DefaultBinary)
extension_object_classes[nid] = MonitoredItemModifyResult
extension_object_ids['MonitoredItemModifyResult'] = nid
nid = FourByteNodeId(ObjectIds.ModifyMonitoredItemsRequest_Encoding_DefaultBinary)
extension_object_classes[nid] = ModifyMonitoredItemsRequest
extension_object_ids['ModifyMonitoredItemsRequest'] = nid
nid = FourByteNodeId(ObjectIds.ModifyMonitoredItemsResponse_Encoding_DefaultBinary)
extension_object_classes[nid] = ModifyMonitoredItemsResponse
extension_object_ids['ModifyMonitoredItemsResponse'] = nid
nid = FourByteNodeId(ObjectIds.SetMonitoringModeRequest_Encoding_DefaultBinary)
extension_object_classes[nid] = SetMonitoringModeRequest
extension_object_ids['SetMonitoringModeRequest'] = nid
nid = FourByteNodeId(ObjectIds.SetMonitoringModeResponse_Encoding_DefaultBinary)
extension_object_classes[nid] = SetMonitoringModeResponse
extension_object_ids['SetMonitoringModeResponse'] = nid
nid = FourByteNodeId(ObjectIds.SetTriggeringRequest_Encoding_DefaultBinary)
extension_object_classes[nid] = SetTriggeringRequest
extension_object_ids['SetTriggeringRequest'] = nid
nid = FourByteNodeId(ObjectIds.SetTriggeringResponse_Encoding_DefaultBinary)
extension_object_classes[nid] = SetTriggeringResponse
extension_object_ids['SetTriggeringResponse'] = nid
nid = FourByteNodeId(ObjectIds.DeleteMonitoredItemsRequest_Encoding_DefaultBinary)
extension_object_classes[nid] = DeleteMonitoredItemsRequest
extension_object_ids['DeleteMonitoredItemsRequest'] = nid
nid = FourByteNodeId(ObjectIds.DeleteMonitoredItemsResponse_Encoding_DefaultBinary)
extension_object_classes[nid] = DeleteMonitoredItemsResponse
extension_object_ids['DeleteMonitoredItemsResponse'] = nid
nid = FourByteNodeId(ObjectIds.CreateSubscriptionRequest_Encoding_DefaultBinary)
extension_object_classes[nid] = CreateSubscriptionRequest
extension_object_ids['CreateSubscriptionRequest'] = nid
nid = FourByteNodeId(ObjectIds.CreateSubscriptionResponse_Encoding_DefaultBinary)
extension_object_classes[nid] = CreateSubscriptionResponse
extension_object_ids['CreateSubscriptionResponse'] = nid
nid = FourByteNodeId(ObjectIds.ModifySubscriptionRequest_Encoding_DefaultBinary)
extension_object_classes[nid] = ModifySubscriptionRequest
extension_object_ids['ModifySubscriptionRequest'] = nid
nid = FourByteNodeId(ObjectIds.ModifySubscriptionResponse_Encoding_DefaultBinary)
extension_object_classes[nid] = ModifySubscriptionResponse
extension_object_ids['ModifySubscriptionResponse'] = nid
nid = FourByteNodeId(ObjectIds.SetPublishingModeRequest_Encoding_DefaultBinary)
extension_object_classes[nid] = SetPublishingModeRequest
extension_object_ids['SetPublishingModeRequest'] = nid
nid = FourByteNodeId(ObjectIds.SetPublishingModeResponse_Encoding_DefaultBinary)
extension_object_classes[nid] = SetPublishingModeResponse
extension_object_ids['SetPublishingModeResponse'] = nid
nid = FourByteNodeId(ObjectIds.NotificationMessage_Encoding_DefaultBinary)
extension_object_classes[nid] = NotificationMessage
extension_object_ids['NotificationMessage'] = nid
nid = FourByteNodeId(ObjectIds.NotificationData_Encoding_DefaultBinary)
extension_object_classes[nid] = NotificationData
extension_object_ids['NotificationData'] = nid
nid = FourByteNodeId(ObjectIds.DataChangeNotification_Encoding_DefaultBinary)
extension_object_classes[nid] = DataChangeNotification
extension_object_ids['DataChangeNotification'] = nid
nid = FourByteNodeId(ObjectIds.MonitoredItemNotification_Encoding_DefaultBinary)
extension_object_classes[nid] = MonitoredItemNotification
extension_object_ids['MonitoredItemNotification'] = nid
nid = FourByteNodeId(ObjectIds.EventNotificationList_Encoding_DefaultBinary)
extension_object_classes[nid] = EventNotificationList
extension_object_ids['EventNotificationList'] = nid
nid = FourByteNodeId(ObjectIds.EventFieldList_Encoding_DefaultBinary)
extension_object_classes[nid] = EventFieldList
extension_object_ids['EventFieldList'] = nid
nid = FourByteNodeId(ObjectIds.HistoryEventFieldList_Encoding_DefaultBinary)
extension_object_classes[nid] = HistoryEventFieldList
extension_object_ids['HistoryEventFieldList'] = nid
nid = FourByteNodeId(ObjectIds.StatusChangeNotification_Encoding_DefaultBinary)
extension_object_classes[nid] = StatusChangeNotification
extension_object_ids['StatusChangeNotification'] = nid
nid = FourByteNodeId(ObjectIds.SubscriptionAcknowledgement_Encoding_DefaultBinary)
extension_object_classes[nid] = SubscriptionAcknowledgement
extension_object_ids['SubscriptionAcknowledgement'] = nid
nid = FourByteNodeId(ObjectIds.PublishRequest_Encoding_DefaultBinary)
extension_object_classes[nid] = PublishRequest
extension_object_ids['PublishRequest'] = nid
nid = FourByteNodeId(ObjectIds.PublishResponse_Encoding_DefaultBinary)
extension_object_classes[nid] = PublishResponse
extension_object_ids['PublishResponse'] = nid
nid = FourByteNodeId(ObjectIds.RepublishRequest_Encoding_DefaultBinary)
extension_object_classes[nid] = RepublishRequest
extension_object_ids['RepublishRequest'] = nid
nid = FourByteNodeId(ObjectIds.RepublishResponse_Encoding_DefaultBinary)
extension_object_classes[nid] = RepublishResponse
extension_object_ids['RepublishResponse'] = nid
nid = FourByteNodeId(ObjectIds.TransferResult_Encoding_DefaultBinary)
extension_object_classes[nid] = TransferResult
extension_object_ids['TransferResult'] = nid
nid = FourByteNodeId(ObjectIds.TransferSubscriptionsRequest_Encoding_DefaultBinary)
extension_object_classes[nid] = TransferSubscriptionsRequest
extension_object_ids['TransferSubscriptionsRequest'] = nid
nid = FourByteNodeId(ObjectIds.TransferSubscriptionsResponse_Encoding_DefaultBinary)
extension_object_classes[nid] = TransferSubscriptionsResponse
extension_object_ids['TransferSubscriptionsResponse'] = nid
nid = FourByteNodeId(ObjectIds.DeleteSubscriptionsRequest_Encoding_DefaultBinary)
extension_object_classes[nid] = DeleteSubscriptionsRequest
extension_object_ids['DeleteSubscriptionsRequest'] = nid
nid = FourByteNodeId(ObjectIds.DeleteSubscriptionsResponse_Encoding_DefaultBinary)
extension_object_classes[nid] = DeleteSubscriptionsResponse
extension_object_ids['DeleteSubscriptionsResponse'] = nid
nid = FourByteNodeId(ObjectIds.BuildInfo_Encoding_DefaultBinary)
extension_object_classes[nid] = BuildInfo
extension_object_ids['BuildInfo'] = nid
nid = FourByteNodeId(ObjectIds.RedundantServerDataType_Encoding_DefaultBinary)
extension_object_classes[nid] = RedundantServerDataType
extension_object_ids['RedundantServerDataType'] = nid
nid = FourByteNodeId(ObjectIds.EndpointUrlListDataType_Encoding_DefaultBinary)
extension_object_classes[nid] = EndpointUrlListDataType
extension_object_ids['EndpointUrlListDataType'] = nid
nid = FourByteNodeId(ObjectIds.NetworkGroupDataType_Encoding_DefaultBinary)
extension_object_classes[nid] = NetworkGroupDataType
extension_object_ids['NetworkGroupDataType'] = nid
nid = FourByteNodeId(ObjectIds.SamplingIntervalDiagnosticsDataType_Encoding_DefaultBinary)
extension_object_classes[nid] = SamplingIntervalDiagnosticsDataType
extension_object_ids['SamplingIntervalDiagnosticsDataType'] = nid
nid = FourByteNodeId(ObjectIds.ServerDiagnosticsSummaryDataType_Encoding_DefaultBinary)
extension_object_classes[nid] = ServerDiagnosticsSummaryDataType
extension_object_ids['ServerDiagnosticsSummaryDataType'] = nid
nid = FourByteNodeId(ObjectIds.ServerStatusDataType_Encoding_DefaultBinary)
extension_object_classes[nid] = ServerStatusDataType
extension_object_ids['ServerStatusDataType'] = nid
nid = FourByteNodeId(ObjectIds.SessionDiagnosticsDataType_Encoding_DefaultBinary)
extension_object_classes[nid] = SessionDiagnosticsDataType
extension_object_ids['SessionDiagnosticsDataType'] = nid
nid = FourByteNodeId(ObjectIds.SessionSecurityDiagnosticsDataType_Encoding_DefaultBinary)
extension_object_classes[nid] = SessionSecurityDiagnosticsDataType
extension_object_ids['SessionSecurityDiagnosticsDataType'] = nid
nid = FourByteNodeId(ObjectIds.ServiceCounterDataType_Encoding_DefaultBinary)
extension_object_classes[nid] = ServiceCounterDataType
extension_object_ids['ServiceCounterDataType'] = nid
nid = FourByteNodeId(ObjectIds.StatusResult_Encoding_DefaultBinary)
extension_object_classes[nid] = StatusResult
extension_object_ids['StatusResult'] = nid
nid = FourByteNodeId(ObjectIds.SubscriptionDiagnosticsDataType_Encoding_DefaultBinary)
extension_object_classes[nid] = SubscriptionDiagnosticsDataType
extension_object_ids['SubscriptionDiagnosticsDataType'] = nid
nid = FourByteNodeId(ObjectIds.ModelChangeStructureDataType_Encoding_DefaultBinary)
extension_object_classes[nid] = ModelChangeStructureDataType
extension_object_ids['ModelChangeStructureDataType'] = nid
nid = FourByteNodeId(ObjectIds.SemanticChangeStructureDataType_Encoding_DefaultBinary)
extension_object_classes[nid] = SemanticChangeStructureDataType
extension_object_ids['SemanticChangeStructureDataType'] = nid
nid = FourByteNodeId(ObjectIds.Range_Encoding_DefaultBinary)
extension_object_classes[nid] = Range
extension_object_ids['Range'] = nid
nid = FourByteNodeId(ObjectIds.EUInformation_Encoding_DefaultBinary)
extension_object_classes[nid] = EUInformation
extension_object_ids['EUInformation'] = nid
nid = FourByteNodeId(ObjectIds.ComplexNumberType_Encoding_DefaultBinary)
extension_object_classes[nid] = ComplexNumberType
extension_object_ids['ComplexNumberType'] = nid
nid = FourByteNodeId(ObjectIds.DoubleComplexNumberType_Encoding_DefaultBinary)
extension_object_classes[nid] = DoubleComplexNumberType
extension_object_ids['DoubleComplexNumberType'] = nid
nid = FourByteNodeId(ObjectIds.AxisInformation_Encoding_DefaultBinary)
extension_object_classes[nid] = AxisInformation
extension_object_ids['AxisInformation'] = nid
nid = FourByteNodeId(ObjectIds.XVType_Encoding_DefaultBinary)
extension_object_classes[nid] = XVType
extension_object_ids['XVType'] = nid
nid = FourByteNodeId(ObjectIds.ProgramDiagnosticDataType_Encoding_DefaultBinary)
extension_object_classes[nid] = ProgramDiagnosticDataType
extension_object_ids['ProgramDiagnosticDataType'] = nid
nid = FourByteNodeId(ObjectIds.ProgramDiagnostic2DataType_Encoding_DefaultBinary)
extension_object_classes[nid] = ProgramDiagnostic2DataType
extension_object_ids['ProgramDiagnostic2DataType'] = nid
nid = FourByteNodeId(ObjectIds.Annotation_Encoding_DefaultBinary)
extension_object_classes[nid] = Annotation
extension_object_ids['Annotation'] = nid
| 31.526812
| 130
| 0.643612
|
794b51fc6569344eaf6df528d15588063c9c2732
| 10,767
|
py
|
Python
|
rasa/nlu/featurizers/sparse_featurizer/_regex_featurizer.py
|
hercules261188/rasa
|
6b0631f1fddf95a044c728e885e2940dd2f8f83e
|
[
"Apache-2.0"
] | 1
|
2021-05-30T16:01:12.000Z
|
2021-05-30T16:01:12.000Z
|
rasa/nlu/featurizers/sparse_featurizer/_regex_featurizer.py
|
hercules261188/rasa
|
6b0631f1fddf95a044c728e885e2940dd2f8f83e
|
[
"Apache-2.0"
] | 64
|
2021-09-24T06:44:41.000Z
|
2022-03-14T12:12:28.000Z
|
rasa/nlu/featurizers/sparse_featurizer/_regex_featurizer.py
|
hercules261188/rasa
|
6b0631f1fddf95a044c728e885e2940dd2f8f83e
|
[
"Apache-2.0"
] | null | null | null |
# flake8: noqa
# WARNING: This module will be dropped before Rasa Open Source 3.0 is released.
# This module is a workaround to defer breaking changes due to the architecture
# revamp in 3.0.
import logging
import re
from typing import Any, Dict, List, Optional, Text, Type, Tuple
from pathlib import Path
import numpy as np
import scipy.sparse
import rasa.shared.utils.io
import rasa.utils.io
import rasa.nlu.utils.pattern_utils as pattern_utils
from rasa.nlu import utils
from rasa.nlu.components import Component
from rasa.nlu.config import RasaNLUModelConfig
from rasa.nlu.constants import (
TOKENS_NAMES,
FEATURIZER_CLASS_ALIAS,
)
from rasa.shared.nlu.constants import (
TEXT,
RESPONSE,
FEATURE_TYPE_SENTENCE,
FEATURE_TYPE_SEQUENCE,
ACTION_TEXT,
)
from rasa.nlu.featurizers.featurizer import SparseFeaturizer
from rasa.shared.nlu.training_data.features import Features
from rasa.nlu.model import Metadata
from rasa.nlu.tokenizers.tokenizer import Tokenizer
from rasa.shared.nlu.training_data.training_data import TrainingData
from rasa.shared.nlu.training_data.message import Message
logger = logging.getLogger(__name__)
class RegexFeaturizer(SparseFeaturizer):
@classmethod
def required_components(cls) -> List[Type[Component]]:
return [Tokenizer]
defaults = {
# text will be processed with case sensitive as default
"case_sensitive": True,
# use lookup tables to generate features
"use_lookup_tables": True,
# use regexes to generate features
"use_regexes": True,
# use match word boundaries for lookup table
"use_word_boundaries": True,
# Additional number of patterns to consider
# for incremental training
"number_additional_patterns": None,
}
def __init__(
self,
component_config: Optional[Dict[Text, Any]] = None,
known_patterns: Optional[List[Dict[Text, Text]]] = None,
finetune_mode: bool = False,
) -> None:
"""Constructs new features for regexes and lookup table using regex expressions.
Args:
component_config: Configuration for the component
known_patterns: Regex Patterns the component should pre-load itself with.
finetune_mode: Load component in finetune mode.
"""
super().__init__(component_config)
self.known_patterns = known_patterns if known_patterns else []
self.case_sensitive = self.component_config["case_sensitive"]
self.finetune_mode = finetune_mode
if self.component_config["number_additional_patterns"]:
rasa.shared.utils.io.raise_deprecation_warning(
"The parameter `number_additional_patterns` has been deprecated "
"since the pipeline does not create an extra buffer for new vocabulary "
"anymore. Any value assigned to this parameter will be ignored. "
"You can omit specifying `number_additional_patterns` in future runs."
)
def _merge_new_patterns(self, new_patterns: List[Dict[Text, Text]]) -> None:
"""Updates already known patterns with new patterns extracted from data.
New patterns should always be added to the end of the existing
patterns and the order of the existing patterns should not be disturbed.
Args:
new_patterns: Patterns extracted from training data and to be merged with
known patterns.
"""
pattern_name_index_map = {
pattern["name"]: index for index, pattern in enumerate(self.known_patterns)
}
for extra_pattern in new_patterns:
new_pattern_name = extra_pattern["name"]
# Some patterns may have just new examples added
# to them. These do not count as additional pattern.
if new_pattern_name in pattern_name_index_map:
self.known_patterns[pattern_name_index_map[new_pattern_name]][
"pattern"
] = extra_pattern["pattern"]
else:
self.known_patterns.append(extra_pattern)
def train(
self,
training_data: TrainingData,
config: Optional[RasaNLUModelConfig] = None,
**kwargs: Any,
) -> None:
"""Trains the component with all patterns extracted from training data.
Args:
training_data: Training data consisting of training examples and patterns
available.
config: NLU Pipeline config
**kwargs: Any other arguments
"""
patterns_from_data = pattern_utils.extract_patterns(
training_data,
use_lookup_tables=self.component_config["use_lookup_tables"],
use_regexes=self.component_config["use_regexes"],
use_word_boundaries=self.component_config["use_word_boundaries"],
)
if self.finetune_mode:
# Merge patterns extracted from data with known patterns
self._merge_new_patterns(patterns_from_data)
else:
self.known_patterns = patterns_from_data
for example in training_data.training_examples:
for attribute in [TEXT, RESPONSE, ACTION_TEXT]:
self._text_features_with_regex(example, attribute)
def process(self, message: Message, **kwargs: Any) -> None:
self._text_features_with_regex(message, TEXT)
def _text_features_with_regex(self, message: Message, attribute: Text) -> None:
"""Helper method to extract features and set them appropriately in the message.
Args:
message: Message to be featurized.
attribute: Attribute of message to be featurized.
"""
if self.known_patterns:
sequence_features, sentence_features = self._features_for_patterns(
message, attribute
)
if sequence_features is not None:
final_sequence_features = Features(
sequence_features,
FEATURE_TYPE_SEQUENCE,
attribute,
self.component_config[FEATURIZER_CLASS_ALIAS],
)
message.add_features(final_sequence_features)
if sentence_features is not None:
final_sentence_features = Features(
sentence_features,
FEATURE_TYPE_SENTENCE,
attribute,
self.component_config[FEATURIZER_CLASS_ALIAS],
)
message.add_features(final_sentence_features)
def _features_for_patterns(
self, message: Message, attribute: Text
) -> Tuple[Optional[scipy.sparse.coo_matrix], Optional[scipy.sparse.coo_matrix]]:
"""Checks which known patterns match the message.
Given a sentence, returns a vector of {1,0} values indicating which
regexes did match. Furthermore, if the
message is tokenized, the function will mark all tokens with a dict
relating the name of the regex to whether it was matched.
Args:
message: Message to be featurized.
attribute: Attribute of message to be featurized.
Returns:
Token and sentence level features of message attribute.
"""
# Attribute not set (e.g. response not present)
if not message.get(attribute):
return None, None
tokens = message.get(TOKENS_NAMES[attribute], [])
if not tokens:
# nothing to featurize
return None, None
flags = 0 # default flag
if not self.case_sensitive:
flags = re.IGNORECASE
sequence_length = len(tokens)
num_patterns = len(self.known_patterns)
sequence_features = np.zeros([sequence_length, num_patterns])
sentence_features = np.zeros([1, num_patterns])
for pattern_index, pattern in enumerate(self.known_patterns):
matches = re.finditer(
pattern["pattern"], message.get(attribute), flags=flags
)
matches = list(matches)
for token_index, t in enumerate(tokens):
patterns = t.get("pattern", default={})
patterns[pattern["name"]] = False
for match in matches:
if t.start < match.end() and t.end > match.start():
patterns[pattern["name"]] = True
sequence_features[token_index][pattern_index] = 1.0
if attribute in [RESPONSE, TEXT, ACTION_TEXT]:
# sentence vector should contain all patterns
sentence_features[0][pattern_index] = 1.0
t.set("pattern", patterns)
return (
scipy.sparse.coo_matrix(sequence_features),
scipy.sparse.coo_matrix(sentence_features),
)
@classmethod
def load(
cls,
meta: Dict[Text, Any],
model_dir: Text,
model_metadata: Optional[Metadata] = None,
cached_component: Optional["RegexFeaturizer"] = None,
should_finetune: bool = False,
**kwargs: Any,
) -> "RegexFeaturizer":
"""Loads a previously trained component.
Args:
meta: Configuration of trained component.
model_dir: Path where trained pipeline is stored.
model_metadata: Metadata for the trained pipeline.
cached_component: Previously cached component(if any).
should_finetune: Indicates whether to load the component for further
finetuning.
**kwargs: Any other arguments.
"""
file_name = meta["file"]
patterns_file_name = Path(model_dir) / (file_name + ".patterns.pkl")
known_patterns = None
if patterns_file_name.exists():
known_patterns = rasa.shared.utils.io.read_json_file(patterns_file_name)
return RegexFeaturizer(
meta, known_patterns=known_patterns, finetune_mode=should_finetune,
)
def persist(self, file_name: Text, model_dir: Text) -> Optional[Dict[Text, Any]]:
"""Persist this model into the passed directory.
Args:
file_name: Prefix to add to all files stored as part of this component.
model_dir: Path where files should be stored.
Returns:
Metadata necessary to load the model again.
"""
patterns_file_name = file_name + ".patterns.pkl"
regex_file = Path(model_dir) / patterns_file_name
utils.write_json_to_file(regex_file, self.known_patterns, indent=4)
return {"file": file_name}
| 38.180851
| 88
| 0.63611
|
794b5281ee91d6c1bcc69902ceaa2b309e1a5576
| 2,039
|
py
|
Python
|
source/preprocessing/heart_rate/heart_rate_service.py
|
marta18a/sleep_classifiers
|
75aaae37dafe752909cf56351911e246a4febb4d
|
[
"MIT"
] | 97
|
2019-02-18T20:19:21.000Z
|
2022-03-30T05:01:44.000Z
|
source/preprocessing/heart_rate/heart_rate_service.py
|
marta18a/sleep_classifiers
|
75aaae37dafe752909cf56351911e246a4febb4d
|
[
"MIT"
] | 13
|
2019-10-29T05:53:11.000Z
|
2022-03-14T21:38:49.000Z
|
source/preprocessing/heart_rate/heart_rate_service.py
|
marta18a/sleep_classifiers
|
75aaae37dafe752909cf56351911e246a4febb4d
|
[
"MIT"
] | 49
|
2019-06-03T23:22:19.000Z
|
2022-03-30T05:01:40.000Z
|
import numpy as np
import pandas as pd
from source import utils
from source.constants import Constants
from source.preprocessing.heart_rate.heart_rate_collection import HeartRateCollection
class HeartRateService(object):
@staticmethod
def load_raw(subject_id):
raw_hr_path = HeartRateService.get_raw_file_path(subject_id)
heart_rate_array = HeartRateService.load(raw_hr_path, ",")
heart_rate_array = utils.remove_repeats(heart_rate_array)
return HeartRateCollection(subject_id=subject_id, data=heart_rate_array)
@staticmethod
def load_cropped(subject_id):
cropped_hr_path = HeartRateService.get_cropped_file_path(subject_id)
heart_rate_array = HeartRateService.load(cropped_hr_path)
return HeartRateCollection(subject_id=subject_id, data=heart_rate_array)
@staticmethod
def load(hr_file, delimiter=" "):
heart_rate_array = pd.read_csv(str(hr_file), delimiter=delimiter).values
return heart_rate_array
@staticmethod
def write(heart_rate_collection):
hr_output_path = HeartRateService.get_cropped_file_path(heart_rate_collection.subject_id)
np.savetxt(hr_output_path, heart_rate_collection.data, fmt='%f')
@staticmethod
def crop(heart_rate_collection, interval):
subject_id = heart_rate_collection.subject_id
timestamps = heart_rate_collection.timestamps
valid_indices = ((timestamps >= interval.start_time)
& (timestamps < interval.end_time)).nonzero()[0]
cropped_data = heart_rate_collection.data[valid_indices, :]
return HeartRateCollection(subject_id=subject_id, data=cropped_data)
@staticmethod
def get_cropped_file_path(subject_id):
return Constants.CROPPED_FILE_PATH.joinpath(subject_id + "_cleaned_hr.out")
@staticmethod
def get_raw_file_path(subject_id):
heart_rate_dir = utils.get_project_root().joinpath('data/heart_rate/')
return heart_rate_dir.joinpath(subject_id + '_heartrate.txt')
| 39.211538
| 97
| 0.747916
|
794b52b75eb6200807bf5a55cb5b612a3a72f234
| 4,490
|
py
|
Python
|
benchmark/startQiskit1658.py
|
UCLA-SEAL/QDiff
|
d968cbc47fe926b7f88b4adf10490f1edd6f8819
|
[
"BSD-3-Clause"
] | null | null | null |
benchmark/startQiskit1658.py
|
UCLA-SEAL/QDiff
|
d968cbc47fe926b7f88b4adf10490f1edd6f8819
|
[
"BSD-3-Clause"
] | null | null | null |
benchmark/startQiskit1658.py
|
UCLA-SEAL/QDiff
|
d968cbc47fe926b7f88b4adf10490f1edd6f8819
|
[
"BSD-3-Clause"
] | null | null | null |
# qubit number=5
# total number=59
import cirq
import qiskit
from qiskit import QuantumCircuit, QuantumRegister, ClassicalRegister
from qiskit import BasicAer, execute, transpile
from pprint import pprint
from qiskit.test.mock import FakeVigo
from math import log2,floor, sqrt, pi
import numpy as np
import networkx as nx
def build_oracle(n: int, f) -> QuantumCircuit:
# implement the oracle O_f^\pm
# NOTE: use U1 gate (P gate) with \lambda = 180 ==> CZ gate
# or multi_control_Z_gate (issue #127)
controls = QuantumRegister(n, "ofc")
oracle = QuantumCircuit(controls, name="Zf")
for i in range(2 ** n):
rep = np.binary_repr(i, n)
if f(rep) == "1":
for j in range(n):
if rep[j] == "0":
oracle.x(controls[j])
# oracle.h(controls[n])
if n >= 2:
oracle.mcu1(pi, controls[1:], controls[0])
for j in range(n):
if rep[j] == "0":
oracle.x(controls[j])
# oracle.barrier()
return oracle
def make_circuit(n:int,f) -> QuantumCircuit:
# circuit begin
input_qubit = QuantumRegister(n,"qc")
classical = ClassicalRegister(n, "qm")
prog = QuantumCircuit(input_qubit, classical)
prog.h(input_qubit[0]) # number=3
prog.h(input_qubit[1]) # number=4
prog.h(input_qubit[2]) # number=5
prog.h(input_qubit[3]) # number=6
prog.h(input_qubit[4]) # number=21
Zf = build_oracle(n, f)
repeat = floor(sqrt(2 ** n) * pi / 4)
for i in range(repeat):
prog.append(Zf.to_gate(), [input_qubit[i] for i in range(n)])
prog.h(input_qubit[0]) # number=1
prog.h(input_qubit[1]) # number=2
prog.h(input_qubit[2]) # number=7
prog.h(input_qubit[3]) # number=8
prog.h(input_qubit[0]) # number=31
prog.cz(input_qubit[1],input_qubit[0]) # number=32
prog.h(input_qubit[1]) # number=52
prog.h(input_qubit[0]) # number=33
prog.h(input_qubit[1]) # number=44
prog.cz(input_qubit[0],input_qubit[1]) # number=45
prog.h(input_qubit[1]) # number=46
prog.h(input_qubit[1]) # number=56
prog.cz(input_qubit[0],input_qubit[1]) # number=57
prog.h(input_qubit[1]) # number=58
prog.x(input_qubit[1]) # number=54
prog.cx(input_qubit[0],input_qubit[1]) # number=55
prog.h(input_qubit[1]) # number=48
prog.cz(input_qubit[0],input_qubit[1]) # number=49
prog.h(input_qubit[1]) # number=50
prog.x(input_qubit[0]) # number=26
prog.cx(input_qubit[1],input_qubit[0]) # number=27
prog.h(input_qubit[1]) # number=37
prog.cz(input_qubit[0],input_qubit[1]) # number=38
prog.h(input_qubit[1]) # number=39
prog.x(input_qubit[1]) # number=35
prog.cx(input_qubit[0],input_qubit[1]) # number=36
prog.x(input_qubit[2]) # number=11
prog.x(input_qubit[3]) # number=12
prog.cx(input_qubit[3],input_qubit[2]) # number=43
prog.cx(input_qubit[3],input_qubit[2]) # number=47
if n>=2:
prog.mcu1(pi,input_qubit[1:],input_qubit[0])
prog.x(input_qubit[0]) # number=13
prog.cx(input_qubit[0],input_qubit[1]) # number=22
prog.x(input_qubit[1]) # number=23
prog.cx(input_qubit[0],input_qubit[1]) # number=24
prog.x(input_qubit[2]) # number=15
prog.x(input_qubit[1]) # number=29
prog.y(input_qubit[4]) # number=28
prog.x(input_qubit[3]) # number=16
prog.h(input_qubit[0]) # number=17
prog.h(input_qubit[1]) # number=18
prog.h(input_qubit[3]) # number=51
prog.h(input_qubit[2]) # number=19
prog.h(input_qubit[3]) # number=20
# circuit end
for i in range(n):
prog.measure(input_qubit[i], classical[i])
return prog
if __name__ == '__main__':
key = "00000"
f = lambda rep: str(int(rep == key))
prog = make_circuit(5,f)
backend = BasicAer.get_backend('qasm_simulator')
sample_shot =7924
info = execute(prog, backend=backend, shots=sample_shot).result().get_counts()
backend = FakeVigo()
circuit1 = transpile(prog,backend,optimization_level=2)
writefile = open("../data/startQiskit1658.csv","w")
print(info,file=writefile)
print("results end", file=writefile)
print(circuit1.depth(),file=writefile)
print(circuit1,file=writefile)
writefile.close()
| 32.536232
| 82
| 0.606459
|
794b532af6e5190b98d55f1f7b560e5114615c10
| 4,329
|
py
|
Python
|
src/OOMDP.py
|
joeyginorio/ObjectOrientedMDP
|
aa75e79f0c6ab912c34a8af8499a166c1f18526b
|
[
"MIT"
] | 1
|
2020-02-24T06:01:59.000Z
|
2020-02-24T06:01:59.000Z
|
src/OOMDP.py
|
joeyginorio/ObjectOrientedMDP
|
aa75e79f0c6ab912c34a8af8499a166c1f18526b
|
[
"MIT"
] | null | null | null |
src/OOMDP.py
|
joeyginorio/ObjectOrientedMDP
|
aa75e79f0c6ab912c34a8af8499a166c1f18526b
|
[
"MIT"
] | null | null | null |
# Joey Velez-Ginorio
# --------------------
# OOMDP Implementation
# --------------------
import itertools
import numpy as np
from Object import Object
class OOMDP(object):
"""
Defines an Object-Oriented Markov Decision Process, formally defined
as a nine tuple,
< classes, attributes(c), domain(a), objects, relations, actions, transitions,
rewards, discount >, where:
classes -> is a set of object classes.
attributes(c) -> is a function that specifies attributes of a
class c.
domain(a) -> is a function that specifies the range of possible
values for an attribute a.
objects -> is a set of objects, each of which belongs to a
class, C. The state of an object is a value assignment to all
its attributes.
relations -> is a set of relations, specifying how two objects may
interact.
actions -> is a finite set of actions.
transitions -> specifies T(s'|s,a), probability an agent goes to s'
by taking action a in state s.
rewards -> specifies R(s,a), the reward an agent receives from the
environment for taking action a in state s.
discount -> is the discount factor, [0,1). The smaller lambda is,
the more the agent will favor immediate reward.
"""
def __init__(self, objects=None, relations=None, actions=None,
transitions=None, rewards=None, discount=None):
"""
Initializes parts of the nine tuple (a, t, r, lam), then
extracts c, att(c), and dom(a) from o for the fully-defined
OOMDP.
Params:
objects -> is a 3D dict mapping objects to classes to
attributes. {Object: {Class: {Attribute: Value}}}
relations -> is a 2D dict mapping a set of classes, (c_1, c_2),
to a list of boolean functions that operate over the set
of classes.
actions -> is a list of actions, e.g. [Up, Down, Left, Right]
transitions -> is a 3D Matrix, specifiyng transition dynamics
from state to state, T[s'|s,a]. This can be learned through
DOORMAX.
rewards -> is a TBD
discount -> is a discrete value [0,1), the lower it is the more
the agent prefers immediate reward.
"""
# Initialize part of the nine-tuple OOMDP, M
self.relations = relations
self.actions = actions
self.transitions = transitions
self.rewards = rewards
self.discount = discount
# Instantiate all instances of Object from 3D dict, O.
updatedObjects = list()
for obj in objects:
# Extract object name
temp = obj
# Use name to create instance of Object, initialize with
# class membership, and attributes with values
temp = Object(temp, objects[temp].keys()[0], objects[temp].values()[0])
updatedObjects.append(temp)
# O is now in a more accessible representation
self.objects = updatedObjects
def getTerms(self):
"""
Retrieves all terms in the OOMDP. Terms come from all boolean
functions, which can be from object attributes, relations defined,
over classes, and any boolean function defined over the state space
that encodes prior knowledge.
"""
terms = ""
# Extract terms from objects
for o in self.objects:
terms += o.state()
# Extract terms from relations
for key, val in self.relations.items():
terms += self.checkRelations({key:val})
# Extract terms from boolean functions encoding prior knowledge
return terms
def checkRelations(self, relation):
"""
Provided a list of boolean functions defined over classes, returns
a string of terms corresponding to the T/F value of each relation.
"""
# Figure out which classes the relations are defined over
classPair = list(relation.keys()[0])
# Collect all objects of each class
o1 = list()
o2 = list()
for o in self.objects:
if o.c == classPair[0]:
o1.append(o)
if o.c == classPair[1]:
o2.append(o)
# Construct all object pairs that will be tested on the relation
objectPairs = list(itertools.product(o1,o2))
# Test all object pairs on all relations, return string of terms
terms = ""
for rel in relation.values()[0]:
term = 0
for objPair in objectPairs:
term |= int(rel(objPair[0], objPair[1]))
terms += str(term)
return terms
def effect_att(self, att, s0, s1):
"""
Returns one effect of each type that would tranform att in state s0
to it's value in state s1.
"""
| 26.236364
| 80
| 0.678679
|
794b537cbff4cea391b49de333bead3e681df391
| 2,348
|
py
|
Python
|
varats/varats/projects/c_projects/glib.py
|
ajrox090/VaRA-Tool-Suite
|
1550d36a4049e0615afb0bacfb96b1d506a29c98
|
[
"BSD-2-Clause"
] | null | null | null |
varats/varats/projects/c_projects/glib.py
|
ajrox090/VaRA-Tool-Suite
|
1550d36a4049e0615afb0bacfb96b1d506a29c98
|
[
"BSD-2-Clause"
] | null | null | null |
varats/varats/projects/c_projects/glib.py
|
ajrox090/VaRA-Tool-Suite
|
1550d36a4049e0615afb0bacfb96b1d506a29c98
|
[
"BSD-2-Clause"
] | null | null | null |
"""Project file for glib."""
import typing as tp
import benchbuild as bb
from benchbuild.utils.cmd import ninja, meson
from benchbuild.utils.settings import get_number_of_jobs
from plumbum import local
from varats.containers.containers import get_base_image, ImageBase
from varats.paper_mgmt.paper_config import project_filter_generator
from varats.project.project_domain import ProjectDomains
from varats.project.project_util import (
ProjectBinaryWrapper,
wrap_paths_to_binaries,
BinaryType,
verify_binaries,
)
from varats.project.varats_project import VProject
from varats.utils.git_util import ShortCommitHash
from varats.utils.settings import bb_cfg
class Glib(VProject):
"""
GLib is the low-level core library that forms the basis for projects such as
GTK and GNOME. It provides data structure handling for C, portability
wrappers, and interfaces for such runtime functionality as an event loop,
threads, dynamic loading, and an object system.
(fetched by Git)
"""
NAME = 'glib'
GROUP = 'c_projects'
DOMAIN = ProjectDomains.DATA_STRUCTURES
SOURCE = [
bb.source.Git(
remote="https://github.com/GNOME/glib.git",
local="glib",
refspec="HEAD",
limit=None,
shallow=False,
version_filter=project_filter_generator("glib")
)
]
CONTAINER = get_base_image(
ImageBase.DEBIAN_10
).run('apt', 'install', '-y', 'meson', 'ninja-build')
@staticmethod
def binaries_for_revision(
revision: ShortCommitHash # pylint: disable=W0613
) -> tp.List[ProjectBinaryWrapper]:
return wrap_paths_to_binaries([
('build/glib/libglib-2.0.so', BinaryType.SHARED_LIBRARY)
])
def run_tests(self) -> None:
pass
def compile(self) -> None:
"""Compile the project."""
glib_source = local.path(self.source_of_primary)
cc_compiler = bb.compiler.cc(self)
with local.cwd(glib_source):
with local.env(CC=str(cc_compiler)):
bb.watch(meson)("build")
bb.watch(ninja)("-j", get_number_of_jobs(bb_cfg()), "-C", "build")
verify_binaries(self)
@classmethod
def get_cve_product_info(cls) -> tp.List[tp.Tuple[str, str]]:
return [("Gnome", "Glib")]
| 29.721519
| 80
| 0.669932
|
794b538c2904836ec42985dbbefede195b4e454e
| 4,114
|
py
|
Python
|
run-tests.py
|
siraben/nixpkgs-hammering
|
f9e9674ddf0e6861d2009c25a3667aace1de67df
|
[
"MIT"
] | null | null | null |
run-tests.py
|
siraben/nixpkgs-hammering
|
f9e9674ddf0e6861d2009c25a3667aace1de67df
|
[
"MIT"
] | null | null | null |
run-tests.py
|
siraben/nixpkgs-hammering
|
f9e9674ddf0e6861d2009c25a3667aace1de67df
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python3
import os
import subprocess
import textwrap
import unittest
script_dir = os.path.dirname(os.path.realpath(__file__))
def make_test_variant(rule, variant=None, should_match=True):
def case():
attr_path=f'{rule}.{variant}' if variant is not None else rule
test_build = subprocess.run(
[
os.path.join(script_dir, 'tools/nixpkgs-hammer'),
'-f', './tests',
attr_path
],
stdout=subprocess.PIPE,
stderr=subprocess.STDOUT,
)
if test_build.returncode != 0:
raise Exception('error building the test:' + test_build.stdout.decode('utf-8'))
else:
matches = f'explanations/{rule}.md'.encode('utf-8') in test_build.stdout
if should_match and not matches:
raise Exception('error matching the rule')
elif not should_match and matches:
raise Exception('rule should not match')
return case
def make_test_rule(rule, matching_variants=None, nonmatching_variants=[]):
if matching_variants is not None:
suite = unittest.TestSuite()
for variant in matching_variants:
suite.addTest(FunctionTestCase(make_test_variant(rule, variant), description=f'{rule}.{variant}'))
for variant in nonmatching_variants:
suite.addTest(FunctionTestCase(make_test_variant(rule, variant, should_match=False), description=f'{rule}.{variant}'))
return suite
else:
return FunctionTestCase(make_test_variant(rule), description=rule)
class FunctionTestCase(unittest.FunctionTestCase):
# We do not care that the test class is unittest.case.FunctionTestCase (case)
def __str__(self):
return self._description
# Do not print description twice
def shortDescription(self):
return None
class TestSuite(unittest.TestSuite):
# do not run _removeTestAtIndex since there is nothing in _tests
_cleanup = False
def __iter__(self):
yield make_test_rule(
'attribute-ordering',
[
'out-of-order',
],
[
'properly-ordered',
]
)
yield make_test_rule(
'build-tools-in-build-inputs',
[
'cmake',
'meson',
'ninja',
'pkg-config',
],
)
yield make_test_rule(
'explicit-phases',
[
'configure',
'build',
'check',
'install',
],
)
yield make_test_rule(
'fixup-phase'
)
yield make_test_rule(
'meson-cmake'
)
yield make_test_rule(
'missing-phase-hooks',
[
'configure-pre',
'configure-post',
'configure-both',
'build-pre',
'build-post',
'build-both',
'check-pre',
'check-post',
'check-both',
'install-pre',
'install-post',
'install-both',
],
)
yield make_test_rule(
'patch-phase'
)
yield make_test_rule(
'unnecessary-parallel-building',
[
'cmake',
'meson',
'qmake',
'qt-derivation',
],
)
yield make_test_rule(
'unclear-gpl',
[
'agpl3',
'fdl11',
'fdl12',
'fdl13',
'gpl1',
'gpl2',
'gpl3',
'lgpl2',
'lgpl21',
'lgpl3',
],
[
'single-nonmatching-license',
]
)
def load_tests(loader, tests, pattern):
return TestSuite()
if __name__ == '__main__':
unittest.main(verbosity=2)
| 26.203822
| 130
| 0.494409
|
794b53dd13d6f9017c252f49b9ae038b26dd0589
| 796
|
py
|
Python
|
iris/setup.py
|
Babak-Ebrahimi/cloudml-sample
|
34959ec732ac8efa7fb3efdd293ac7956fa5253a
|
[
"Apache-2.0"
] | null | null | null |
iris/setup.py
|
Babak-Ebrahimi/cloudml-sample
|
34959ec732ac8efa7fb3efdd293ac7956fa5253a
|
[
"Apache-2.0"
] | null | null | null |
iris/setup.py
|
Babak-Ebrahimi/cloudml-sample
|
34959ec732ac8efa7fb3efdd293ac7956fa5253a
|
[
"Apache-2.0"
] | null | null | null |
# Copyright 2016 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import setuptools
NAME = 'trainer'
VERSION = '1.0'
if __name__ == '__main__':
setuptools.setup(name=NAME, version=VERSION, packages=['trainer'],
install_requires=['protobuf'])
| 34.608696
| 74
| 0.734925
|
794b54f9559c14d42cb19c8eeb7faf920093e638
| 5,325
|
py
|
Python
|
codango/codango/settings/base.py
|
alexkiura/codango
|
47f94515c4e7437341ffb0d9bf09cfeb876b59e1
|
[
"MIT"
] | 1
|
2021-03-24T22:41:19.000Z
|
2021-03-24T22:41:19.000Z
|
codango/codango/settings/base.py
|
mrkiura/codango
|
47f94515c4e7437341ffb0d9bf09cfeb876b59e1
|
[
"MIT"
] | null | null | null |
codango/codango/settings/base.py
|
mrkiura/codango
|
47f94515c4e7437341ffb0d9bf09cfeb876b59e1
|
[
"MIT"
] | 1
|
2019-04-10T19:40:28.000Z
|
2019-04-10T19:40:28.000Z
|
"""
Django settings for codango project.
Generated by 'django-admin startproject' using Django 1.8.3.
For more information on this file, see
https://docs.djangoproject.com/en/1.8/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/1.8/ref/settings/
"""
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
from __future__ import absolute_import
import os
import cloudinary
# This will make sure the app is always imported when
# Django starts so that shared_task will use this app.
from .celery import app as celery_app
from django.conf.global_settings import TEMPLATE_CONTEXT_PROCESSORS
from django.contrib.messages import constants as message_constants
from celery.schedules import crontab
BASE_DIR = os.path.dirname(
os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
BOWER_INSTALLED_APPS = (
'mdi',
'moment',
'jquery',
'bootstrap',
'ace-builds',
)
BOWER_COMPONENTS_ROOT = os.path.join(BASE_DIR, 'static')
# context processor for django-endless-pagination
TEMPLATE_CONTEXT_PROCESSORS += (
'django.core.context_processors.request',
)
ENDLESS_PAGINATION_LOADING = """<img src="/static/img/ajax-loader.gif" alt="loading"/>"""
STATICFILES_FINDERS = (
'django.contrib.staticfiles.finders.FileSystemFinder',
'django.contrib.staticfiles.finders.AppDirectoriesFinder',
'djangobower.finders.BowerFinder',
)
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/1.8/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = os.getenv('SECRET_KEY')
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
ALLOWED_HOSTS = []
# Application definition
INSTALLED_APPS = (
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'account',
'resources',
'userprofile',
'comments',
'votes',
'pairprogram',
'bootstrapform',
'cloudinary',
'djangobower',
'endless_pagination',
'djcelery'
)
MIDDLEWARE_CLASSES = (
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.auth.middleware.SessionAuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
'django.middleware.security.SecurityMiddleware',
'middleware.activity.ActivityMiddleWare',
)
ROOT_URLCONF = 'codango.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [os.path.join(BASE_DIR, 'templates')],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
AUTHENTICATION_BACKEND = (
'django.contrib.auth.backends.ModelBackend',
)
WSGI_APPLICATION = 'codango.wsgi.application'
# Internationalization
# https://docs.djangoproject.com/en/1.8/topics/i18n/
LANGUAGE_CODE = 'en'
TIME_ZONE = 'Africa/Lagos'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/1.8/howto/static-files/
STATICFILES_DIRS = (
os.path.join(BASE_DIR, 'static'),
)
STATIC_URL = '/static/'
APPEND_SLASH = False
STATIC_ROOT = 'staticfiles'
STATICFILES_STORAGE = 'whitenoise.django.GzipManifestStaticFilesStorage'
MEDIA_ROOT = os.path.join(BASE_DIR, 'media')
MEDIA_URL = '/media/'
cloudinary.config(
cloud_name=os.getenv('cloud_name'),
api_key=os.getenv('api_key'),
api_secret=os.getenv('api_secret')
)
# custom message tag for django messaging middleware
MESSAGE_TAGS = {
message_constants.ERROR: 'danger'
}
# Custom Email
ADMIN_EMAIL = 'olufunmilade.oshodi@andela.com'
CODANGO_EMAIL = 'noreply@codango.com'
# Celery configuration
# The backend used to store task results using RabbitMQ as a broker
# This sends results back as AMQP messages
CELERY_RESULT_BACKEND = 'amqp'
# Scheduling periodic task with Celery
CELERYBEAT_SCHEDULE = {
# Executes daily at midnight
'popular-post-updates-daily': {
'task': 'resources.tasks.send_recent_posts',
'schedule': crontab(minute=0, hour=0),
'args': ['daily'],
},
# Executes every sunday at midnight
'popular-post-updates-weekly': {
'task': 'resources.tasks.send_recent_posts',
'schedule': crontab(minute=0, hour=0, day_of_week='sun'),
'args': ['weekly'],
},
# Executes every first day of the month
'popular-post-updates-monthly': {
'task': 'resources.tasks.send_recent_posts',
'schedule': crontab(0, 0, day_of_month='1'),
'args': ['monthly'],
},
}
# Celery Test Runner for unit tests
TEST_RUNNER = 'djcelery.contrib.test_runner.CeleryTestSuiteRunner'
| 26.492537
| 89
| 0.711549
|
794b556b8b7c79c5b61fc09a99a1a9979a8e207f
| 924
|
py
|
Python
|
examples/chunking/udf/ext_features.py
|
shyamalschandra/deepdive
|
56576d63fe5d7cb926a3bafa5890676bb9e5ee5b
|
[
"Apache-2.0"
] | 1
|
2015-04-06T16:20:00.000Z
|
2015-04-06T16:20:00.000Z
|
examples/chunking/udf/ext_features.py
|
shyamalschandra/deepdive
|
56576d63fe5d7cb926a3bafa5890676bb9e5ee5b
|
[
"Apache-2.0"
] | null | null | null |
examples/chunking/udf/ext_features.py
|
shyamalschandra/deepdive
|
56576d63fe5d7cb926a3bafa5890676bb9e5ee5b
|
[
"Apache-2.0"
] | null | null | null |
#! /usr/bin/env python
import fileinput
import json
import itertools
import sys
def tostr(s):
# In TSV extractor, '\N' is NULL in psql, 'NULL' is NULL in mysql
return '' if s is None or s in ['\N', 'NULL'] else str(s)
# for each word
for row in sys.stdin:
# obj = json.loads(row)
word_id, word1, pos1, word2, pos2 = row.rstrip().split('\t')
features = set()
# sys.stderr.write(str(obj))
# features
w1_word = 'word=' + tostr(word1)
w1_pos = 'pos=' + tostr(pos1)
# if 'w2.word' in obj.keys():
if word2 != 'NULL' and word2 != '\N':
# w2_word = 'prev_word=' + tostr(word2)
w2_pos = 'prev_pos=' + tostr(pos2)
else:
# w2_word = 'prev_word='
w2_pos = 'prev_pos='
#w3_word = 'next_word=' + tostr(obj["words_raw.w3.word"])
#w3_pos = 'next_pos=' + tostr(obj["words_raw.w3.pos"])
features.add(w1_word)
features.add(w1_pos)
features.add(w2_pos)
for f in features:
print '\t'.join([word_id, f, '\N'])
| 23.1
| 66
| 0.638528
|
794b56ab081bf072980f82acd289b96d2089ffee
| 2,477
|
py
|
Python
|
tests/large/test_mode_replay.py
|
hexecute/eventgen
|
9978ef0725ad63a717e0019c6b30c5a5d9086fe1
|
[
"Apache-2.0"
] | null | null | null |
tests/large/test_mode_replay.py
|
hexecute/eventgen
|
9978ef0725ad63a717e0019c6b30c5a5d9086fe1
|
[
"Apache-2.0"
] | 1
|
2019-06-28T01:40:16.000Z
|
2019-06-28T01:40:16.000Z
|
tests/large/test_mode_replay.py
|
hexecute/eventgen
|
9978ef0725ad63a717e0019c6b30c5a5d9086fe1
|
[
"Apache-2.0"
] | null | null | null |
from datetime import datetime
import re
import time
def test_mode_replay(eventgen_test_helper):
"""Test normal replay mode settings"""
events = eventgen_test_helper('eventgen_replay.conf').get_events()
# assert the event length is the same as sample file size
assert len(events) == 12
pattern = re.compile(r"\d{4}-\d{2}-\d{2} \d{2}:\d{2}:\d{2}")
for event in events:
# assert that integer token is replaced
assert "@@integer" not in event
result = pattern.match(event)
assert result is not None
def test_mode_replay_end_1(eventgen_test_helper):
"""Test normal replay mode with end = 2 which will replay the sample twice and exit"""
events = eventgen_test_helper('eventgen_replay_end_1.conf').get_events()
# assert the event length is twice of the events in the sample file
assert len(events) == 24
def test_mode_replay_end_2(eventgen_test_helper):
"""Test normal replay mode with end = -1 which will replay the sample forever"""
helper = eventgen_test_helper('eventgen_replay_end_2.conf')
time.sleep(60)
assert helper.is_alive()
def test_mode_replay_backfill(eventgen_test_helper):
"""Test normal replay mode with backfill = -5s which should be ignore since backfill < interval"""
events = eventgen_test_helper('eventgen_replay_backfill.conf').get_events()
# assert the events length is twice of the events in the sample file
assert len(events) == 24
def test_mode_replay_timemultiple(eventgen_test_helper):
"""Test normal replay mode with timeMultiple = 0.5 which will replay the sample with half time interval"""
current_datetime = datetime.now()
events = eventgen_test_helper('eventgen_replay_timeMultiple.conf').get_events()
pattern = re.compile(r"\d{4}-\d{2}-\d{2} \d{2}:\d{2}:\d{2}")
for event in events:
result = pattern.match(event)
assert result is not None
event_datetime = datetime.strptime(result.group(), "%Y-%m-%d %H:%M:%S")
delter_seconds = (event_datetime - current_datetime).total_seconds()
# assert the event time is after (now - earliest) time
assert delter_seconds < 12
def test_mode_replay_csv(eventgen_test_helper):
"""Test normal replay mode with sampletype = csv which will get _raw row from the sample"""
events = eventgen_test_helper('eventgen_replay_csv.conf').get_events()
# assert the events equals to the sample csv file
assert len(events) == 10
| 41.283333
| 110
| 0.712959
|
794b57975137079218a6d8c3783c9e7cf7ecbf2e
| 14,893
|
py
|
Python
|
game.py
|
tontonlepatate/retro-gaming
|
9f9ea2def5047e94520c266e626f87dd7c892cb1
|
[
"Apache-2.0"
] | 1
|
2019-06-12T07:18:46.000Z
|
2019-06-12T07:18:46.000Z
|
game.py
|
tontonlepatate/retro-gaming
|
9f9ea2def5047e94520c266e626f87dd7c892cb1
|
[
"Apache-2.0"
] | null | null | null |
game.py
|
tontonlepatate/retro-gaming
|
9f9ea2def5047e94520c266e626f87dd7c892cb1
|
[
"Apache-2.0"
] | null | null | null |
import inspect
import os
import numpy as np
import pygame
from math import sqrt
from pygame import transform
from pygame.draw import rect
from pygame.font import SysFont
from pygame.rect import Rect
from pygame.surface import Surface
from pygame.transform import scale
from common import palette, RED, WHITE, YELLOW, BLACK, GREEN, units
from unite import Unite
scriptPATH = os.path.abspath(inspect.getsourcefile(lambda: 0)) # compatible interactive Python Shell
scriptDIR = os.path.dirname(scriptPATH)
assets = os.path.join(scriptDIR, "data/unites")
pygame.init()
pygame.display.set_caption("Wargame")
son = pygame.mixer.Sound(os.path.join(scriptDIR, "musique/hoi2-kriegsgewitter.wav"))
son.play(loops=-1)
police = pygame.font.SysFont("arial", 15, True)
base_hp = 100
taille_case = 40
plan = [
"PPPPPPPPPSEESPPPPRRR",
"PfffPPPPPSEESPPPPRBR",
"fffffffPFSEESFffVRRR",
"fffRRRRRRRRRRRRRRRFP",
"PPfRVfPPSEESPffPPRPP",
"PPPRffPPSEESPPffPRPP",
"PPPRPPCPSEESPfffVRfP",
"PPVRPPCPSEESPffffRfP",
"PPPRPCCCSEESPPfffRPP",
"PPPRPCCPSEESPPPPPRPP",
"PPPRPCCPSEESMMPCCRVP",
"PPFRVPPPSEEMMMPCCRPP",
"RRRRPPPPSMMMMPPCPRPP",
"RBRPPPPPMMMMPPPPRRPP",
"RRRPPPPMMMMPPPPPPPPP",
]
bases = [
{
"X": 0,
"Y": 0,
"hp": base_hp,
},
{
"X": 0,
"Y": 0,
"hp": base_hp,
},
]
x_counter = 0
y_counter = 0
base_counter = 0
for line in plan:
for letter in line:
if letter == "B":
bases[base_counter]["X"] = x_counter
bases[base_counter]["Y"] = y_counter
if base_counter > 1:
raise Exception("Il ne peut y avoir que 2 bases par carte")
base_counter += 1
x_counter += 1
x_counter = 0
y_counter += 1
terrain_dim = [len(plan[0]), len(plan)]
WINDOW_SIZE = [taille_case * terrain_dim[0], taille_case * terrain_dim[1] + 1]
screen = pygame.display.set_mode(WINDOW_SIZE)
argent_player = [2000, 2000]
revenu = [600, 600]
clock = pygame.time.Clock()
units_id = []
terrain_units = [
Unite(WINDOW_SIZE[0], WINDOW_SIZE[1], 1, units["infanterie"]),
]
done = False
selected_unit = -1
selected_unit_create = -1
lastclick = False
tour_equipe = 0
terrain_bonus = {
'B': 3,
'E': 0.25,
'R': 0.75,
'V': 2,
'f': 1.5,
'S': 0.5,
'M': 3,
'C': 2,
'P': 1,
'F': 3,
'T': 2,
'U': 2,
'u': 2,
' ': 1,
}
def equipe_differente(unite1: int, unite2: int) -> bool:
return terrain_units[unite1].equipe != terrain_units[unite2].equipe
def attaque(id_unite: int, id_cible: int):
print(str(id_unite) + " attaque " + str(id_cible))
terrain_units[id_unite].att = True
unite = terrain_units[id_unite]
cible = terrain_units[id_cible]
terrain_u = plan[unite.X][unite.Y]
terrain_c = plan[cible.X][cible.Y]
print(terrain_c)
# Début de attaque
cible.hp -= int(((unite.classeunite.stat["softattack"] / cible.classeunite.stat["softness"]) + (
unite.classeunite.stat["hardattack"] / cible.classeunite.stat["toughness"]) + (
unite.classeunite.stat["airattack"] / cible.classeunite.stat["airdefence"])) * (
unite.classeunite.stat["hp"] / cible.classeunite.stat["defensiveness"] * (
1 / terrain_bonus[terrain_c])))
# print(cible.hp)
# print(terrain_var(terrain_c))
if cible.hp <= 0:
terrain_units.remove(cible)
return
# Fin de attaque
terrain_units[id_unite] = unite
terrain_units[id_cible] = cible
def attaque_range(id_unite: int, id_cible: int):
if id_unite == id_cible:
return False
type = terrain_units[id_unite].classeunite
min = float(type.cible.start)
max = float(type.cible.stop)
dist = distance(terrain_units[id_unite].X, terrain_units[id_unite].Y,
terrain_units[id_cible].X, terrain_units[id_cible].Y)
return min <= dist <= max
def distance(x1, y1, x2, y2) -> float:
return sqrt(pow(x1 - x2, 2) + pow(y1 - y2, 2))
def trans_case(color, pos):
s = Surface((taille_case, taille_case))
s.set_alpha(100)
s.fill(color)
screen.blit(s, (pos[0] * taille_case, pos[1] * taille_case))
def changer_tour():
global tour_equipe
global selected_unit
print("TOUR SUIVANT")
argent_player[tour_equipe] += revenu[tour_equipe]
tour_equipe = 1 - tour_equipe
selected_unit = -1
for unite in terrain_units:
if unite.equipe == tour_equipe:
unite.nouveau_tour()
def verifier_touche():
global taille_case
global lastclick
global done
click = False
for event in pygame.event.get():
if event.type == pygame.QUIT:
done = True
return
if event.type == pygame.MOUSEBUTTONDOWN:
click_handler()
click = True
if click is False:
lastclick = False
pygame.event.pump()
keys_pressed = pygame.key.get_pressed()
if keys_pressed[pygame.K_PAGEUP]:
taille_case += 1
elif keys_pressed[pygame.K_PAGEDOWN]:
taille_case -= 1
def click_handler():
global lastclick
global selected_unit
global selected_unit_create
global terrain
global terrain_dim
global bases
if lastclick:
return
else:
lastclick = True
pos = pygame.mouse.get_pos()
x_cursor = pos[0] // taille_case
y_cursor = pos[1] // taille_case
# Gestion de la barre d'outils
if y_cursor >= terrain_dim[1]:
# BTN tour suivant
if terrain_dim[0] - 2 <= x_cursor <= terrain_dim[0] and terrain_dim[1] - 1 <= y_cursor <= terrain_dim[0]:
changer_tour()
# Création d'unite
elif x_cursor == selected_unit_create:
selected_unit_create = -1
elif x_cursor < len(units_id):
selected_unit_create = x_cursor
return
# Gestion du clique sur unité
for unite in terrain_units:
xu = unite.X * taille_case
yu = unite.Y * taille_case
# Detection de la collision (entre la souris et l'unité)
rect_col = Rect(xu, yu, taille_case, taille_case)
if rect_col.collidepoint(x_cursor * taille_case, y_cursor * taille_case):
cible_id = terrain_units.index(unite)
if cible_id != selected_unit:
# Si une unité est déjà select
if selected_unit != -1:
if equipe_differente(selected_unit, cible_id) and attaque_range(selected_unit, cible_id) \
and terrain_units[selected_unit].att is False:
attaque(selected_unit, cible_id)
selected_unit = -1
elif terrain_units[cible_id].equipe == tour_equipe:
selected_unit = cible_id
else:
selected_unit = -1
# S'il s'agissait d'une unité alors il ne sert à rien de rester dans la fonction
return
x_base_adv = bases[1 - tour_equipe]["X"]
y_base_adv = bases[1 - tour_equipe]["Y"]
sunit = terrain_units[selected_unit]
distance_base = distance(sunit.X, sunit.Y, x_base_adv, y_base_adv)
if x_base_adv == x_cursor and y_base_adv == y_cursor and selected_unit != -1 and sunit.att is False \
and sunit.classeunite.cible.start <= distance_base <= sunit.classeunite.cible.stop:
bases[1 - tour_equipe]["hp"] -= sunit.classeunite.stat["softattack"]
sunit.att = True
return
# Si on ne clique pas sur une unité
# Si une unité est sur le point d'être acheté
x_base_all = bases[tour_equipe]["X"]
y_base_all = bases[tour_equipe]["Y"]
if selected_unit_create != -1 and distance(x_base_all, y_base_all, x_cursor, y_cursor) < 2:
type_u = units[units_id[selected_unit_create]]
cout = type_u.stat["cost"]
if cout <= argent_player[tour_equipe]:
argent_player[tour_equipe] -= cout
terrain_units.append(
Unite(int(x_cursor), int(y_cursor), tour_equipe, units[units_id[selected_unit_create]]))
else:
print("Pas assez d'argent pour acheter l'unité")
selected_unit_create = -1
return
# Si une unité est select
elif selected_unit != -1:
unite = terrain_units[selected_unit]
x_unit = unite.X
y_unit = unite.Y
terrain = plan[y_cursor][x_cursor]
cout = unite.classeunite.terrain[terrain]
dep = unite.pts_dep
dist = distance(x_unit, y_unit, x_cursor, y_cursor)
if dist == 1 and -1 != cout <= dep:
terrain_units[selected_unit].deplacer(x_cursor, y_cursor, cout)
return
def afficher_terrain():
screen.fill(WHITE)
laby = np.zeros((terrain_dim[0], terrain_dim[1]), pygame.Surface)
for y in range(terrain_dim[1]):
ligne = plan[y]
ypix = taille_case * y
for x in range(terrain_dim[0]):
xpix = taille_case * x
biome = ligne[x]
image = transform.scale(palette['P'], (taille_case, taille_case))
screen.blit(image, [xpix, ypix])
image = transform.scale(palette[biome], (taille_case, taille_case))
screen.blit(image, [xpix, ypix])
trans_case(GREEN, (bases[0]["X"], bases[0]["Y"]))
afficher_hp(bases[0]["hp"], bases[0]["X"], bases[0]["Y"])
trans_case(RED, (bases[1]["X"], bases[1]["Y"]))
afficher_hp(bases[1]["hp"], bases[1]["X"], bases[1]["Y"])
def afficher_unite():
for unite in terrain_units:
cible_id = terrain_units.index(unite)
if selected_unit == cible_id:
select_rect = Rect(unite.X * taille_case, unite.Y * taille_case, taille_case, taille_case)
rect(screen, [255, 100, 0], select_rect)
image: Surface
if unite.equipe == 1:
image = unite.classeunite.sprite2
else:
image = unite.classeunite.sprite1
icon_unite: Surface = scale(image, (taille_case - 8, taille_case - 8))
screen.blit(icon_unite, (int(unite.X * taille_case + 4), int(unite.Y * taille_case + 4)))
# Affichage des HP
afficher_hp(terrain_units[cible_id].hp, terrain_units[cible_id].X, terrain_units[cible_id].Y)
def afficher_hp(hp: int, x, y):
hp_text = police.render("HP: " + str(hp), True, WHITE)
hp_text_rat = hp_text.get_height() / hp_text.get_width()
hp_text = scale(hp_text, (taille_case, int(taille_case * hp_text_rat)))
hp_surface = Surface((taille_case, int(taille_case * hp_text_rat)))
hp_surface.fill((0, 0, 0))
hp_surface.set_alpha(100)
hp_surface.blit(hp_text, (0, 0))
screen.blit(hp_surface,
(x * taille_case, y * taille_case))
while not done:
units_id = []
for unite in units:
units_id.append(unite)
verifier_touche()
WINDOW_SIZE = [taille_case * terrain_dim[0], taille_case * (terrain_dim[1] + 1)]
screen = pygame.display.set_mode(WINDOW_SIZE)
afficher_terrain()
# Affichage des unités
afficher_unite()
# Affichage des indications
if selected_unit is not -1:
sct_unite = terrain_units[selected_unit]
sct_type = sct_unite.classeunite
x_sct = sct_unite.X
y_sct = sct_unite.Y
equipe_sct = sct_unite.equipe
dep = sct_unite.pts_dep
# Affichage des indications de déplacement
for case_x in range(-1, 2):
for case_y in range(-1, 2):
try:
terrain = plan[y_sct + case_y][x_sct + case_x]
cout = sct_type.terrain[terrain]
if dep >= cout != -1 and case_y != case_x != -case_y:
trans_case([0, 0, 255], (x_sct + case_x, y_sct + case_y))
except:
""
# Affichage des indications de destruction
for unite in terrain_units:
x_unit = unite.X
y_unit = unite.Y
equipe_unit = unite.equipe
if attaque_range(selected_unit, terrain_units.index(unite)) \
and terrain_units[selected_unit].att is False:
if equipe_unit == equipe_sct:
trans_case([255, 0, 0], (x_unit, y_unit))
else:
trans_case([0, 255, 0], (x_unit, y_unit))
# Affichage bouton TOUR SUIVANT
btn_toursuiv = Surface((taille_case * 2, taille_case))
btn_toursuiv.fill(YELLOW)
text = police.render("TOUR SUIVANT", True, BLACK)
text = transform.scale(text, (taille_case * 2, taille_case))
btn_toursuiv.blit(text, (0, 0))
screen.blit(btn_toursuiv, ((terrain_dim[0] - 2) * taille_case, terrain_dim[1] * taille_case))
# AFFICHAGE DU TOUR
color = GREEN
if tour_equipe == 1:
color = RED
text = police.render("TOUR DE L'EQUIPE " + str(tour_equipe + 1), True, color)
text = transform.scale(text, (taille_case * 4, taille_case))
screen.blit(text, ((terrain_dim[0] - 4) // 2 * taille_case, 0))
# AFFICHAGE DES INFORMATIONS UTILISATEURS
messages = [
{
"texte": "argent : ",
"contenu": argent_player
},
{
"texte": "revenu : ",
"contenu": revenu
}
]
message_counter = 0
for message in messages:
y = message_counter * 20
redtxt = police.render(message["texte"] + str(message["contenu"][1]), True, RED)
bluetxt = police.render(message["texte"] + str(message["contenu"][0]), True, GREEN)
screen.blit(redtxt, (0, y))
screen.blit(bluetxt, (screen.get_width() - redtxt.get_width(), y))
message_counter += 1
# Affichage de la barre d'achat
for unite in range(0, len(units_id)):
unite_src = units[units_id[unite]]
sprite_unite: Surface = unite_src.sprite1
if tour_equipe == 1:
sprite_unite = unite_src.sprite2
image_unite = scale(sprite_unite, (taille_case - 4, taille_case - 4))
frame_unite = Surface((taille_case, taille_case))
if unite == selected_unit_create:
frame_unite.fill((255, 255, 0))
else:
frame_unite.fill(WHITE)
frame_unite.blit(image_unite, (2, 2))
screen.blit(frame_unite, (taille_case * unite, taille_case * terrain_dim[1]))
if unite_src.stat["cost"] > argent_player[tour_equipe]:
trans_case(BLACK, (unite, terrain_dim[1]))
for base in bases:
if base["hp"] <= 0:
winr = SysFont("arial", 50, True).render("EQUIPE " + str(1 + (1 - bases.index(base))) + " GAGNE", True,
YELLOW)
w, h = winr.get_size()
screen.blit(scale(winr, (WINDOW_SIZE[0], int(h / w * WINDOW_SIZE[1]))), (0, WINDOW_SIZE[1] // 2))
clock.tick(30)
pygame.display.flip()
pygame.quit()
| 31.222222
| 115
| 0.608339
|
794b57cb10e681dccf1562e10f16dd7084d9bdb6
| 2,132
|
py
|
Python
|
getDataAndLabels1Subj1Filtered.py
|
WillSmithTE/arl-eegmodels
|
08cd6e98592abb8a12425cbfc2cd2d05ef1476d3
|
[
"CC0-1.0"
] | null | null | null |
getDataAndLabels1Subj1Filtered.py
|
WillSmithTE/arl-eegmodels
|
08cd6e98592abb8a12425cbfc2cd2d05ef1476d3
|
[
"CC0-1.0"
] | null | null | null |
getDataAndLabels1Subj1Filtered.py
|
WillSmithTE/arl-eegmodels
|
08cd6e98592abb8a12425cbfc2cd2d05ef1476d3
|
[
"CC0-1.0"
] | null | null | null |
import scipy.io
import numpy as np
from util import save, read
from channelPrune import takeOnlyCertainChannels
from downsample import downSample
from takeSubset import takeSubset
files = {
'01': ['1', '2', '3', '4']
# '02': ['1', '2', '3'],
# '03': ['1', '2', '3', '4'],
# '04': ['1', '2', '3', '4']
}
directories = ['S01', 'S02', 'S03', 'S04']
indexes = ['1', '2', '3', '4']
DATA_PATH = 'pickles/datasubj1filtered.pickle'
LABELS_PATH = 'pickles/labelssubj1filtered.pickle'
def getLabels(dir, index):
mat = getMatFile(dir, index, 'labels')
return mat['labels'][0]
def getData(dir, index):
mat = getMatFile(dir, index, 'data')
return mat['data']
def getFilteredData():
return scipy.io.loadmat('filtered_erp1_subj1')['Filtered_signal']
def getMatFile(dir, index, dataOrLabels):
return scipy.io.loadmat('Dataset1/S' + dir + '/S' + dir + '_session' + index + '_' + dataOrLabels + '.mat')
def doStuff(dir, index, data, labels):
print('reading file ', dir, index)
if labels is None:
labels = getLabels(dir, index)
else:
labels = np.concatenate([labels, getLabels(dir, index)])
if data is None:
data = getData(dir, index)
else:
data = np.concatenate([data, getData(dir, index)], axis=2)
return [data, labels]
def getDataAndLabels():
data = read(DATA_PATH)
labels = read(LABELS_PATH)
if data is None or labels is None:
for dir in files:
for index in files[dir]:
[data, labels] = doStuff(dir, index, data, labels)
data = getFilteredData()
save(data, DATA_PATH)
save(labels, LABELS_PATH)
labels = transformLabels(labels)
data = transformData(data)
return [data, labels]
def channelsSamplesTrialKernels(data):
return data.shape[0], data.shape[1], data.shape[2], 1
def transformLabels(labels):
return labels - 1
def transformData(data):
# data = takeOnlyCertainChannels(data)
# data = downSample(data)
# data = takeSubset(data)
return data
def getConfusionMatrixNames():
return ['1', '2']
def getNumClasses():
return 2
| 28.052632
| 111
| 0.631801
|
794b595a880bf36d27a621421a5a39b22789fc02
| 4,600
|
py
|
Python
|
sdk/python/pulumi_azure_nextgen/documentdb/v20160331/get_database_account_mongo_db_database.py
|
pulumi/pulumi-azure-nextgen
|
452736b0a1cf584c2d4c04666e017af6e9b2c15c
|
[
"Apache-2.0"
] | 31
|
2020-09-21T09:41:01.000Z
|
2021-02-26T13:21:59.000Z
|
sdk/python/pulumi_azure_nextgen/documentdb/v20160331/get_database_account_mongo_db_database.py
|
pulumi/pulumi-azure-nextgen
|
452736b0a1cf584c2d4c04666e017af6e9b2c15c
|
[
"Apache-2.0"
] | 231
|
2020-09-21T09:38:45.000Z
|
2021-03-01T11:16:03.000Z
|
sdk/python/pulumi_azure_nextgen/documentdb/v20160331/get_database_account_mongo_db_database.py
|
pulumi/pulumi-azure-nextgen
|
452736b0a1cf584c2d4c04666e017af6e9b2c15c
|
[
"Apache-2.0"
] | 4
|
2020-09-29T14:14:59.000Z
|
2021-02-10T20:38:16.000Z
|
# coding=utf-8
# *** WARNING: this file was generated by the Pulumi SDK Generator. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union
from ... import _utilities, _tables
__all__ = [
'GetDatabaseAccountMongoDBDatabaseResult',
'AwaitableGetDatabaseAccountMongoDBDatabaseResult',
'get_database_account_mongo_db_database',
]
@pulumi.output_type
class GetDatabaseAccountMongoDBDatabaseResult:
"""
An Azure Cosmos DB MongoDB database.
"""
def __init__(__self__, id=None, location=None, name=None, tags=None, type=None):
if id and not isinstance(id, str):
raise TypeError("Expected argument 'id' to be a str")
pulumi.set(__self__, "id", id)
if location and not isinstance(location, str):
raise TypeError("Expected argument 'location' to be a str")
pulumi.set(__self__, "location", location)
if name and not isinstance(name, str):
raise TypeError("Expected argument 'name' to be a str")
pulumi.set(__self__, "name", name)
if tags and not isinstance(tags, dict):
raise TypeError("Expected argument 'tags' to be a dict")
pulumi.set(__self__, "tags", tags)
if type and not isinstance(type, str):
raise TypeError("Expected argument 'type' to be a str")
pulumi.set(__self__, "type", type)
@property
@pulumi.getter
def id(self) -> str:
"""
The unique resource identifier of the database account.
"""
return pulumi.get(self, "id")
@property
@pulumi.getter
def location(self) -> Optional[str]:
"""
The location of the resource group to which the resource belongs.
"""
return pulumi.get(self, "location")
@property
@pulumi.getter
def name(self) -> str:
"""
The name of the database account.
"""
return pulumi.get(self, "name")
@property
@pulumi.getter
def tags(self) -> Optional[Mapping[str, str]]:
"""
Tags are a list of key-value pairs that describe the resource. These tags can be used in viewing and grouping this resource (across resource groups). A maximum of 15 tags can be provided for a resource. Each tag must have a key no greater than 128 characters and value no greater than 256 characters. For example, the default experience for a template type is set with "defaultExperience": "Cassandra". Current "defaultExperience" values also include "Table", "Graph", "DocumentDB", and "MongoDB".
"""
return pulumi.get(self, "tags")
@property
@pulumi.getter
def type(self) -> str:
"""
The type of Azure resource.
"""
return pulumi.get(self, "type")
class AwaitableGetDatabaseAccountMongoDBDatabaseResult(GetDatabaseAccountMongoDBDatabaseResult):
# pylint: disable=using-constant-test
def __await__(self):
if False:
yield self
return GetDatabaseAccountMongoDBDatabaseResult(
id=self.id,
location=self.location,
name=self.name,
tags=self.tags,
type=self.type)
def get_database_account_mongo_db_database(account_name: Optional[str] = None,
database_name: Optional[str] = None,
resource_group_name: Optional[str] = None,
opts: Optional[pulumi.InvokeOptions] = None) -> AwaitableGetDatabaseAccountMongoDBDatabaseResult:
"""
An Azure Cosmos DB MongoDB database.
:param str account_name: Cosmos DB database account name.
:param str database_name: Cosmos DB database name.
:param str resource_group_name: Name of an Azure resource group.
"""
__args__ = dict()
__args__['accountName'] = account_name
__args__['databaseName'] = database_name
__args__['resourceGroupName'] = resource_group_name
if opts is None:
opts = pulumi.InvokeOptions()
if opts.version is None:
opts.version = _utilities.get_version()
__ret__ = pulumi.runtime.invoke('azure-nextgen:documentdb/v20160331:getDatabaseAccountMongoDBDatabase', __args__, opts=opts, typ=GetDatabaseAccountMongoDBDatabaseResult).value
return AwaitableGetDatabaseAccountMongoDBDatabaseResult(
id=__ret__.id,
location=__ret__.location,
name=__ret__.name,
tags=__ret__.tags,
type=__ret__.type)
| 38.016529
| 505
| 0.653478
|
794b5a52fb8500153a1e5148d28751215637c292
| 431
|
py
|
Python
|
ChoreographyHive/loadout_generator.py
|
VirxEC/RLBotChoreography
|
579aa3590868a2e9fd99cd1f9dd32a4c3058c55e
|
[
"MIT"
] | null | null | null |
ChoreographyHive/loadout_generator.py
|
VirxEC/RLBotChoreography
|
579aa3590868a2e9fd99cd1f9dd32a4c3058c55e
|
[
"MIT"
] | null | null | null |
ChoreographyHive/loadout_generator.py
|
VirxEC/RLBotChoreography
|
579aa3590868a2e9fd99cd1f9dd32a4c3058c55e
|
[
"MIT"
] | null | null | null |
import random
from pathlib import Path
from rlbot.agents.base_loadout_generator import BaseLoadoutGenerator
from rlbot.matchconfig.loadout_config import LoadoutConfig
class LoadoutGenerator(BaseLoadoutGenerator):
def generate_loadout(self, player_index: int, team: int) -> LoadoutConfig:
loadout = self.load_cfg_file(Path('bots/default.cfg' if player_index != 60 else 'bots/alpha.cfg'), team)
return loadout
| 35.916667
| 112
| 0.786543
|
794b5a8f9dab5889b69d3a9a55873bdd27e09335
| 201
|
py
|
Python
|
mileage.py
|
pjack484/Mileage-Converter-
|
c87f86196a743fff528fe4726f747af2f41c5c01
|
[
"MIT"
] | null | null | null |
mileage.py
|
pjack484/Mileage-Converter-
|
c87f86196a743fff528fe4726f747af2f41c5c01
|
[
"MIT"
] | null | null | null |
mileage.py
|
pjack484/Mileage-Converter-
|
c87f86196a743fff528fe4726f747af2f41c5c01
|
[
"MIT"
] | null | null | null |
print("How many kilometers did you cycle today?")
kms = input()
miles = float(kms)/1.60934
print(f"That is equal to {round(miles, 2)} miles ")
# round(thing to round, how many decimal points)
| 25.125
| 52
| 0.681592
|
794b5ae4be1c579e35258370431092b091f62123
| 1,320
|
py
|
Python
|
checkdb.py
|
anvilventures/lookinsidethebox
|
42870254d6835ba49f8859d160c60c27765188c6
|
[
"0BSD"
] | 79
|
2019-05-08T10:57:10.000Z
|
2020-11-24T02:16:01.000Z
|
checkdb.py
|
anvilsecure/lookinsidethebox
|
42870254d6835ba49f8859d160c60c27765188c6
|
[
"0BSD"
] | null | null | null |
checkdb.py
|
anvilsecure/lookinsidethebox
|
42870254d6835ba49f8859d160c60c27765188c6
|
[
"0BSD"
] | 13
|
2019-05-08T08:51:46.000Z
|
2020-09-18T16:22:09.000Z
|
#!/usr/bin/env python3
import argparse
import dis
import opcodemap
if __name__ == "__main__":
parser = argparse.ArgumentParser()
parser.add_argument("--db")
ns = parser.parse_args()
if not ns.db:
ns.db = "opcode.db"
with opcodemap.OpcodeMapping(ns.db, False) as opc_map:
table = opc_map.reverse_mapping()
# Based on some manual debugging we found that earlier versions of
# gendb didn't properly derive some opcodes such as BUILD_CONST_KEY_MAP
# / 156. The gendb script has been changed and with this script we can
# now quickly check whether we found this one and if the mapping is
# still the same. Just for testing purposes of the other scripts.
assert(opc_map.get(252) == 156)
assert(table.get(156) == 252)
print("mapping as defined in %s is as follows:" % ns.db)
fmt = "| {0:<30} | {1:>7} | {2:>7} |"
print(fmt.format("="*30, "="*7, "="*7))
print(fmt.format("OPCODE", "PYTHON", "DROPBOX"))
print(fmt.format("="*30, "="*7, "="*7))
for i, opname in enumerate(dis.opname):
db_i = table.get(i)
if db_i is None:
continue
print(fmt.format(opname, i, db_i))
print(fmt.format("="*30, "="*7, "="*7))
print("")
| 34.736842
| 79
| 0.584848
|
794b5afb51a991df5413630f5a629aea91a99889
| 1,193
|
py
|
Python
|
gnodatapackages-1.2.3/setup.py
|
charmandersgsg/gnodatapackages
|
f8970b9ea356ee4e31f56ef52ecb3be645166b4e
|
[
"MIT"
] | null | null | null |
gnodatapackages-1.2.3/setup.py
|
charmandersgsg/gnodatapackages
|
f8970b9ea356ee4e31f56ef52ecb3be645166b4e
|
[
"MIT"
] | null | null | null |
gnodatapackages-1.2.3/setup.py
|
charmandersgsg/gnodatapackages
|
f8970b9ea356ee4e31f56ef52ecb3be645166b4e
|
[
"MIT"
] | null | null | null |
import setuptools
from os import path
# def readme():
with open('README.md') as f:
long_description = f.read()
# def readme():
# with open('README.rst') as f:
# return f.read()
# this_directory = path.abspath(path.dirname(__file__))
# with open(path.join(this_directory, 'README.md'), encoding='utf-8') as f:
# long_description = f.read()
setuptools.setup(
name="gnodatapackages", # Replace with your own username
version="1.2.3",
author="charmandersgsg",
author_email="charmandersgsg@gmail.com",
description="data def packages for gno",
# long_description="data def packages for gno",
long_description = long_description,
long_description_content_type="text/markdown",
url="https://github.com/charmandersgsg/gnodatapackages",
packages=['gnodatapackages'],
include_package_data=True,
install_requires=[
'pandas',
'ftfy'
],
test_suite='nose.collector',
tests_require=['nose'],
classifiers=[
"Programming Language :: Python :: 3",
"License :: OSI Approved :: MIT License",
"Operating System :: OS Independent",
],
python_requires='>=3.6',
)
| 28.404762
| 75
| 0.648785
|
794b5b1b386ecf759cfddfa466ee4a0ee6f25b06
| 4,977
|
py
|
Python
|
msticpy/datamodel/entities/host.py
|
ekmixon/msticpy-1
|
3587ed4f604529c6f5784d51a557c3ad2379f781
|
[
"MIT"
] | 16
|
2019-02-25T01:34:49.000Z
|
2019-05-05T16:55:21.000Z
|
msticpy/datamodel/entities/host.py
|
ekmixon/msticpy-1
|
3587ed4f604529c6f5784d51a557c3ad2379f781
|
[
"MIT"
] | null | null | null |
msticpy/datamodel/entities/host.py
|
ekmixon/msticpy-1
|
3587ed4f604529c6f5784d51a557c3ad2379f781
|
[
"MIT"
] | 1
|
2019-02-27T10:26:42.000Z
|
2019-02-27T10:26:42.000Z
|
# -------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
# --------------------------------------------------------------------------
"""Host Entity class."""
from typing import Any, Mapping, Optional
from ..._version import VERSION
from ...common.utility import export
from .entity import Entity
from .entity_enums import OSFamily
__version__ = VERSION
__author__ = "Ian Hellen"
# pylint: disable=invalid-name, too-many-instance-attributes
@export
class Host(Entity):
"""
Host Entity class.
Attributes
----------
DnsDomain : str
Host DnsDomain
NTDomain : str
Host NTDomain
HostName : str
Host HostName
NetBiosName : str
Host NetBiosName
AzureID : str
Host AzureID
OMSAgentID : str
Host OMSAgentID
OSFamily : str
Host OSFamily
OSVersion : str
Host OSVersion
IsDomainJoined : bool
Host IsDomainJoined
"""
ID_PROPERTIES = ["fqdn", "AzureID", "OMSAgentID"]
def __init__(
self,
src_entity: Mapping[str, Any] = None,
src_event: Mapping[str, Any] = None,
**kwargs,
):
"""
Create a new instance of the entity type.
Parameters
----------
src_entity : Mapping[str, Any], optional
Create entity from existing entity or
other mapping object that implements entity properties.
(the default is None)
src_event : Mapping[str, Any], optional
Create entity from event properties
(the default is None)
Other Parameters
----------------
kwargs : Dict[str, Any]
Supply the entity properties as a set of
kw arguments.
"""
self.DnsDomain: Optional[str] = None
self.NTDomain: Optional[str] = None
self.HostName: Optional[str] = None
self.NetBiosName: Optional[str] = None
self.AzureID: Optional[str] = None
self.OMSAgentID: Optional[str] = None
self.OSFamily: OSFamily = OSFamily.Windows
self.OSVersion: Optional[str] = None
self.IsDomainJoined: bool = False
super().__init__(src_entity=src_entity, **kwargs)
self._computer = None
if src_event is not None:
self._create_from_event(src_event)
@property
def computer(self) -> Optional[str]:
"""Return computer from source event."""
return self._computer if self._computer is not None else self.fqdn
@property
def fqdn(self) -> Optional[str]:
"""Construct FQDN from host + dns."""
if self.DnsDomain:
return f"{self.HostName}.{self.DnsDomain}"
return self.HostName
@property
def FullName(self) -> Optional[str]: # noqa: N802
"""Return the full name of the host - either FQDN or Netbiosname.""" # noqa N802
if self.DnsDomain:
return f"{self.HostName or self.NetBiosName}.{self.DnsDomain}"
if self.NTDomain:
return f"{self.HostName or self.NetBiosName}.{self.NTDomain}"
return self.HostName or self.NetBiosName
@property
def description_str(self) -> str:
"""Return Entity Description."""
return f"{self.fqdn} ({self.OSFamily})"
@property
def name_str(self) -> str:
"""Return Entity Name."""
return self.HostName or self.__class__.__name__
def _create_from_event(self, src_event):
if "Computer" in src_event:
self._computer = src_event["Computer"]
if "." in src_event["Computer"]:
self.HostName = src_event["Computer"].split(".", 1)[0]
self.DnsDomain = src_event["Computer"].split(".", 1)[1]
else:
self.HostName = src_event["Computer"]
elif "HostName" in src_event:
self.HostName = src_event["HostName"]
if "DnsDomain" in src_event:
self.DnsDomain = src_event["DnsDomain"]
self.NetBiosName = self.HostName
_entity_schema = {
# DnsDomain (type System.String)
"DnsDomain": None,
# NTDomain (type System.String)
"NTDomain": None,
# HostName (type System.String)
"HostName": None,
# NetBiosName (type System.String)
"NetBiosName": None,
# AzureID (type System.String)
"AzureID": None,
# OMSAgentID (type System.String)
"OMSAgentID": None,
# OSFamily (type System.Nullable`1
# [Microsoft.Azure.Security.Detection.AlertContracts.V3.Entities.OSFamily])
"OSFamily": "OSFamily",
# IsDomainJoined (type System.Nullable`1[System.Boolean])
"IsDomainJoined": None,
"TimeGenerated": None,
"StartTime": None,
"EndTime": None,
}
| 31.301887
| 89
| 0.581475
|
794b5b31e3132e93612ae558674cd923edcd9731
| 661
|
py
|
Python
|
setup.py
|
UFM-Market-Trends/UFM-Market-Trends-SDK
|
12d94ca7a905caec3d8038f4df5631097cbcf1af
|
[
"CC0-1.0"
] | null | null | null |
setup.py
|
UFM-Market-Trends/UFM-Market-Trends-SDK
|
12d94ca7a905caec3d8038f4df5631097cbcf1af
|
[
"CC0-1.0"
] | null | null | null |
setup.py
|
UFM-Market-Trends/UFM-Market-Trends-SDK
|
12d94ca7a905caec3d8038f4df5631097cbcf1af
|
[
"CC0-1.0"
] | null | null | null |
import setuptools
with open("README.md", "r", encoding="utf-8") as fh:
long_description = fh.read()
setuptools.setup(
name='ufmtrends-sdk',
version='0.0.5',
author='Jose Alvarez',
author_email='jose@tecuntecs.com',
description='Testing installation of Package',
long_description=long_description,
long_description_content_type="text/markdown",
url='https://github.com/UFM-Market-Trends/UFM-Market-Trends-SDK',
project_urls = {
"Bug Tracker": "https://github.com/UFM-Market-Trends/UFM-Market-Trends-SDK/issues"
},
license='cc.license',
packages=['ufmtrends_sdk'],
install_requires=['requests'],
)
| 31.47619
| 90
| 0.688351
|
794b5c461a3318b031d2ac8dad257ce6cb3cdff2
| 1,223
|
py
|
Python
|
authors/apps/articles/migrations/0001_initial.py
|
AmosWels/ah-django
|
5b8e39053d63f1dfb3c14066b163a1d37af91076
|
[
"BSD-3-Clause"
] | null | null | null |
authors/apps/articles/migrations/0001_initial.py
|
AmosWels/ah-django
|
5b8e39053d63f1dfb3c14066b163a1d37af91076
|
[
"BSD-3-Clause"
] | null | null | null |
authors/apps/articles/migrations/0001_initial.py
|
AmosWels/ah-django
|
5b8e39053d63f1dfb3c14066b163a1d37af91076
|
[
"BSD-3-Clause"
] | null | null | null |
# Generated by Django 2.1.7 on 2019-04-05 07:52
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
initial = True
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
]
operations = [
migrations.CreateModel(
name='Article',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('slug', models.SlugField(max_length=255)),
('title', models.CharField(db_index=True, max_length=255)),
('description', models.TextField()),
('body', models.TextField()),
('created_at', models.DateTimeField(auto_now_add=True)),
('updated_at', models.DateTimeField(auto_now_add=True)),
('favorited', models.BooleanField(default=False)),
('favorites_count', models.IntegerField(default=0)),
('author', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='articles', to=settings.AUTH_USER_MODEL)),
],
),
]
| 37.060606
| 145
| 0.616517
|
794b5d41366fa2bee547d0fec6926e5689b87f25
| 462
|
py
|
Python
|
bspump/declarative/expression/utility/debugexpr.py
|
LibertyAces/BitSwanPump
|
02301bfd4e807836403ce6a22030ad47058541d6
|
[
"BSD-3-Clause"
] | 17
|
2019-02-14T09:26:03.000Z
|
2022-03-11T09:23:52.000Z
|
bspump/declarative/expression/utility/debugexpr.py
|
LibertyAces/BitSwanPump
|
02301bfd4e807836403ce6a22030ad47058541d6
|
[
"BSD-3-Clause"
] | 91
|
2019-05-06T18:59:02.000Z
|
2022-01-11T06:22:32.000Z
|
bspump/declarative/expression/utility/debugexpr.py
|
LibertyAces/BitSwanPump
|
02301bfd4e807836403ce6a22030ad47058541d6
|
[
"BSD-3-Clause"
] | 10
|
2019-04-23T08:48:58.000Z
|
2022-02-13T14:24:28.000Z
|
import pprint
import logging
from ...abc import Expression, evaluate
###
L = logging.getLogger("bspump.declarative.DEBUG")
###
class DEBUG(Expression):
Attributes = {
"What": ["*"], # TODO: This ...
}
def __init__(self, app, *, arg_what):
super().__init__(app)
self.What = arg_what
def __call__(self, context, event, *args, **kwargs):
ret = evaluate(self.What, context, event, *args, **kwargs)
L.warning(pprint.pformat(ret))
return ret
| 17.111111
| 60
| 0.662338
|
794b5e15578331531bc2d884198d7edb81db3d72
| 5,500
|
py
|
Python
|
algs/fedavg.py
|
Dev-Jahn/Fed-Framework
|
cb6d56f17d54f771cb40c640e30cc7b498daf089
|
[
"MIT"
] | 2
|
2022-03-30T05:05:25.000Z
|
2022-03-30T05:06:14.000Z
|
algs/fedavg.py
|
Dev-Jahn/Fed-Framework
|
cb6d56f17d54f771cb40c640e30cc7b498daf089
|
[
"MIT"
] | null | null | null |
algs/fedavg.py
|
Dev-Jahn/Fed-Framework
|
cb6d56f17d54f771cb40c640e30cc7b498daf089
|
[
"MIT"
] | null | null | null |
import logging
import wandb
from torch import optim
from torch.utils.data import DataLoader
from losses import build_loss
from metrics.basic import AverageMeter, compute_accuracy
from utils import save_model
logger = logging.getLogger(__name__)
def train_local(net_id, net, trainloader, testloader, comm_round, args, device):
# train_acc = compute_accuracy(net, trainloader, device=device)
# test_acc, conf_matrix = compute_accuracy(net, testloader, get_confusion_matrix=True, device=device)
# logger.info(f'<< Train accuracy: {train_acc * 100:5.2f} %')
# logger.info(f'<< Test accuracy: {test_acc * 100:5.2f} %')
# wandb.log(
# data={
# f'Client {net_id}': {
# 'train': {'Accuracy': train_acc},
# 'test': {'Accuracy': test_acc},
# },
# 'round': comm_round - 0.5
# },
# )
if args.optimizer == 'adam':
optimizer = optim.Adam(filter(lambda p: p.requires_grad, net.parameters()), lr=args.lr, weight_decay=args.reg)
elif args.optimizer == 'amsgrad':
optimizer = optim.Adam(filter(lambda p: p.requires_grad, net.parameters()), lr=args.lr, weight_decay=args.reg,
amsgrad=True)
elif args.optimizer == 'sgd':
optimizer = optim.SGD(filter(lambda p: p.requires_grad, net.parameters()), lr=args.lr, momentum=args.momentum,
weight_decay=args.reg)
criterion = build_loss(args.loss)
metrics = {
'total_loss': AverageMeter(),
args.loss: AverageMeter(),
}
for epoch in range(1, args.epochs + 1):
metrics['total_loss'].reset()
metrics[args.loss].reset()
for batch_idx, (x, target) in enumerate(trainloader):
x, target = x.to(device), target.to(device)
optimizer.zero_grad()
x.requires_grad = True
target.requires_grad = False
target = target.long()
out = net(x)
loss, additional = criterion(out, target, model=net, decay=args.odecay)
loss.backward()
optimizer.step()
# Metrics update
metrics['total_loss'].update(loss, len(x))
metrics[args.loss].update(additional if additional else loss, len(x))
# Logging
logger.info(f'Epoch: {epoch:>3} | Loss: {metrics["total_loss"].avg:.6f}')
wandb.log(
data={
f'Client {net_id}': {
'train': {
'Loss': metrics['total_loss'].avg,
args.loss: metrics[args.loss].avg
},
},
'epochsum': (comm_round - 1) * args.epochs + epoch
}
)
# Save local model
cond_comm = (comm_round % args.save_round == 0) or comm_round == args.comm_round
cond_epoch = (epoch % args.save_epoch == 0) or epoch == args.epochs
if args.save_local and cond_comm and cond_epoch:
save_model(net, args.name, args.modeldir, f'comm{comm_round:03}-epoch{epoch:03}-CLIENT{net_id:02}')
# calc acc for local (optional)
# train_acc = compute_accuracy(net, train_dataloader, device=device)
# test_acc, conf_matrix = compute_accuracy(net, test_dataloader, get_confusion_matrix=True, device=device)
# if epoch % 10 == 0:
# logger.info('Epoch: %d Loss: %f' % (epoch, epoch_loss))
# train_acc = compute_accuracy(net, train_dataloader, device=device)
# test_acc, conf_matrix = compute_accuracy(net, test_dataloader, get_confusion_matrix=True, device=device)
#
# logger.info('>> Training accuracy: %f' % train_acc)
# logger.info('>> Test accuracy: %f' % test_acc)
train_acc = compute_accuracy(net, trainloader, device=device)
test_acc, conf_matrix = compute_accuracy(net, testloader, get_confusion_matrix=True, device=device)
logger.info(f'>> Train accuracy: {train_acc * 100:5.2f} %')
logger.info(f'>> Test accuracy: {test_acc * 100:5.2f} %')
wandb.log(
data={
f'Client {net_id}': {
'train': {'Accuracy': train_acc},
'test': {'Accuracy': test_acc},
},
'round': comm_round
},
)
return train_acc, test_acc
def train_nets(nets, selected, args, net_dataidx_map, loaderargs, comm_round, testargs=None, device='cuda'):
avg_acc = 0.0
for net_id, net in nets.items():
if net_id not in selected:
continue
dataidxs = net_dataidx_map[net_id]
logger.info('-' * 58)
logger.info(f'Training client {net_id:>3} with {len(dataidxs):>6} data')
net.to(device)
loader = DataLoader(**loaderargs[net_id])
testloader = DataLoader(**testargs)
trainacc, testacc = train_local(net_id, net, loader, testloader, comm_round, args, device=device)
del loader
del testloader
net.cpu()
avg_acc += testacc
# Save model
# save_model(net, net_id, args)
# else:
# load_model(net, net_id, device=device)
logger.info('-' * 58)
avg_acc /= len(selected)
if args.alg == 'local_training':
logger.info("avg test acc %f" % avg_acc)
nets_list = list(nets.values())
return nets_list
| 39.007092
| 119
| 0.574364
|
794b5ebc9d785b5402d417e7cac3eddf69b89e6c
| 1,205
|
py
|
Python
|
tests/test_ri-04-03.py
|
ivanbukhtiyarov/elevators
|
e7ff582bbc9a26d22880bec61bede747427430c2
|
[
"MIT"
] | 2
|
2021-03-22T16:12:56.000Z
|
2021-03-22T16:19:09.000Z
|
tests/test_ri-04-03.py
|
ivanbukhtiyarov/elevators
|
e7ff582bbc9a26d22880bec61bede747427430c2
|
[
"MIT"
] | 46
|
2021-04-01T10:25:25.000Z
|
2021-12-26T23:43:46.000Z
|
tests/test_ri-04-03.py
|
ivanbukhtiyarov/elevators
|
e7ff582bbc9a26d22880bec61bede747427430c2
|
[
"MIT"
] | 4
|
2021-04-01T10:22:46.000Z
|
2021-12-26T21:51:10.000Z
|
import pytest
from src.elevator import (
Elevator, MoveRequest,
)
from src.operator import Operator
def test_simple():
'''Tests calls of default elevators methods from operator'''
elevator = Elevator(
tonnage=1000,
floors_count=25,
current_direction=0,
current_weight=0,
current_floor=17,
is_light_on=True,
is_smoked=True,
requests=[],
is_communication_on=False,
is_doors_open=False,
is_doors_blocked=False,
is_empty=True
)
operator = Operator([elevator])
operator.open_doors(0)
assert operator.elevators_list[0].is_doors_open == True
operator.close_doors(0)
assert operator.elevators_list[0].is_doors_open == False
with pytest.raises(Exception) as e_info:
assert operator.open_doors(1)
operator.call_dispatcher(0)
assert operator.elevators_list[0].is_communication_on == True
operator.move_to_floor(20, 0)
assert operator.elevators_list[0].current_floor == 20
operator.turn_light_off(0)
assert operator.elevators_list[0].is_light_on == False
operator.turn_smoke_on(0)
assert operator.elevators_list[0].is_smoked == True
| 26.777778
| 65
| 0.690456
|
794b5f251ed2cbbce96e080d03428887bf7c1e2d
| 6,663
|
py
|
Python
|
bindings/python/ensmallen_graph/datasets/string/aromatoleumaromaticum.py
|
caufieldjh/ensmallen_graph
|
14e98b1cdbc73193a84a913d7d4f2b2b3eb2c43a
|
[
"MIT"
] | null | null | null |
bindings/python/ensmallen_graph/datasets/string/aromatoleumaromaticum.py
|
caufieldjh/ensmallen_graph
|
14e98b1cdbc73193a84a913d7d4f2b2b3eb2c43a
|
[
"MIT"
] | null | null | null |
bindings/python/ensmallen_graph/datasets/string/aromatoleumaromaticum.py
|
caufieldjh/ensmallen_graph
|
14e98b1cdbc73193a84a913d7d4f2b2b3eb2c43a
|
[
"MIT"
] | null | null | null |
"""
This file offers the methods to automatically retrieve the graph Aromatoleum aromaticum.
The graph is automatically retrieved from the STRING repository.
Report
---------------------
At the time of rendering these methods (please see datetime below), the graph
had the following characteristics:
Datetime: 2021-02-02 18:32:42.176310
The undirected graph Aromatoleum aromaticum has 4298 nodes and 405979 weighted
edges, of which none are self-loops. The graph is dense as it has a density
of 0.04396 and has 16 connected components, where the component with most
nodes has 4260 nodes and the component with the least nodes has 2 nodes.
The graph median node degree is 166, the mean node degree is 188.92, and
the node degree mode is 3. The top 5 most central nodes are 76114.ebA791
(degree 1641), 76114.ebA3982 (degree 1630), 76114.ebA6051 (degree 1332),
76114.ebA6645 (degree 1207) and 76114.ebA2252 (degree 1164).
References
---------------------
Please cite the following if you use the data:
@article{szklarczyk2019string,
title={STRING v11: protein--protein association networks with increased coverage, supporting functional discovery in genome-wide experimental datasets},
author={Szklarczyk, Damian and Gable, Annika L and Lyon, David and Junge, Alexander and Wyder, Stefan and Huerta-Cepas, Jaime and Simonovic, Milan and Doncheva, Nadezhda T and Morris, John H and Bork, Peer and others},
journal={Nucleic acids research},
volume={47},
number={D1},
pages={D607--D613},
year={2019},
publisher={Oxford University Press}
}
Usage example
----------------------
The usage of this graph is relatively straightforward:
.. code:: python
# First import the function to retrieve the graph from the datasets
from ensmallen_graph.datasets.string import AromatoleumAromaticum
# Then load the graph
graph = AromatoleumAromaticum()
# Finally, you can do anything with it, for instance, compute its report:
print(graph)
# If you need to run a link prediction task with validation,
# you can split the graph using a connected holdout as follows:
train_graph, validation_graph = graph.connected_holdout(
# You can use an 80/20 split the holdout, for example.
train_size=0.8,
# The random state is used to reproduce the holdout.
random_state=42,
# Wether to show a loading bar.
verbose=True
)
# Remember that, if you need, you can enable the memory-time trade-offs:
train_graph.enable(
vector_sources=True,
vector_destinations=True,
vector_outbounds=True
)
# Consider using the methods made available in the Embiggen package
# to run graph embedding or link prediction tasks.
"""
from typing import Dict
from ..automatic_graph_retrieval import AutomaticallyRetrievedGraph
from ...ensmallen_graph import EnsmallenGraph # pylint: disable=import-error
def AromatoleumAromaticum(
directed: bool = False,
verbose: int = 2,
cache_path: str = "graphs/string",
**additional_graph_kwargs: Dict
) -> EnsmallenGraph:
"""Return new instance of the Aromatoleum aromaticum graph.
The graph is automatically retrieved from the STRING repository.
Parameters
-------------------
directed: bool = False,
Wether to load the graph as directed or undirected.
By default false.
verbose: int = 2,
Wether to show loading bars during the retrieval and building
of the graph.
cache_path: str = "graphs",
Where to store the downloaded graphs.
additional_graph_kwargs: Dict,
Additional graph kwargs.
Returns
-----------------------
Instace of Aromatoleum aromaticum graph.
Report
---------------------
At the time of rendering these methods (please see datetime below), the graph
had the following characteristics:
Datetime: 2021-02-02 18:32:42.176310
The undirected graph Aromatoleum aromaticum has 4298 nodes and 405979 weighted
edges, of which none are self-loops. The graph is dense as it has a density
of 0.04396 and has 16 connected components, where the component with most
nodes has 4260 nodes and the component with the least nodes has 2 nodes.
The graph median node degree is 166, the mean node degree is 188.92, and
the node degree mode is 3. The top 5 most central nodes are 76114.ebA791
(degree 1641), 76114.ebA3982 (degree 1630), 76114.ebA6051 (degree 1332),
76114.ebA6645 (degree 1207) and 76114.ebA2252 (degree 1164).
References
---------------------
Please cite the following if you use the data:
@article{szklarczyk2019string,
title={STRING v11: protein--protein association networks with increased coverage, supporting functional discovery in genome-wide experimental datasets},
author={Szklarczyk, Damian and Gable, Annika L and Lyon, David and Junge, Alexander and Wyder, Stefan and Huerta-Cepas, Jaime and Simonovic, Milan and Doncheva, Nadezhda T and Morris, John H and Bork, Peer and others},
journal={Nucleic acids research},
volume={47},
number={D1},
pages={D607--D613},
year={2019},
publisher={Oxford University Press}
}
Usage example
----------------------
The usage of this graph is relatively straightforward:
.. code:: python
# First import the function to retrieve the graph from the datasets
from ensmallen_graph.datasets.string import AromatoleumAromaticum
# Then load the graph
graph = AromatoleumAromaticum()
# Finally, you can do anything with it, for instance, compute its report:
print(graph)
# If you need to run a link prediction task with validation,
# you can split the graph using a connected holdout as follows:
train_graph, validation_graph = graph.connected_holdout(
# You can use an 80/20 split the holdout, for example.
train_size=0.8,
# The random state is used to reproduce the holdout.
random_state=42,
# Wether to show a loading bar.
verbose=True
)
# Remember that, if you need, you can enable the memory-time trade-offs:
train_graph.enable(
vector_sources=True,
vector_destinations=True,
vector_outbounds=True
)
# Consider using the methods made available in the Embiggen package
# to run graph embedding or link prediction tasks.
"""
return AutomaticallyRetrievedGraph(
graph_name="AromatoleumAromaticum",
dataset="string",
directed=directed,
verbose=verbose,
cache_path=cache_path,
additional_graph_kwargs=additional_graph_kwargs
)()
| 35.253968
| 223
| 0.702686
|
794b5f4aa431a2cb150614365e19b8889651e6d9
| 64,719
|
py
|
Python
|
simple_history/tests/tests/test_models.py
|
rdurica/django-simple-history
|
84d17f40be68e9ac7744b773451be83720c4c13a
|
[
"BSD-3-Clause"
] | null | null | null |
simple_history/tests/tests/test_models.py
|
rdurica/django-simple-history
|
84d17f40be68e9ac7744b773451be83720c4c13a
|
[
"BSD-3-Clause"
] | 12
|
2021-09-24T18:20:34.000Z
|
2022-03-29T18:26:50.000Z
|
simple_history/tests/tests/test_models.py
|
rdurica/django-simple-history
|
84d17f40be68e9ac7744b773451be83720c4c13a
|
[
"BSD-3-Clause"
] | null | null | null |
import unittest
import uuid
import warnings
from datetime import datetime, timedelta
import django
from django.apps import apps
from django.contrib.auth import get_user_model
from django.core.exceptions import ObjectDoesNotExist
from django.core.files.base import ContentFile
from django.db import IntegrityError, models
from django.db.models.fields.proxy import OrderWrt
from django.test import TestCase, override_settings
from django.urls import reverse
from simple_history import register
from simple_history.exceptions import RelatedNameConflictError
from simple_history.models import HistoricalRecords, ModelChange
from simple_history.signals import pre_create_historical_record
from simple_history.tests.custom_user.models import CustomUser
from simple_history.tests.tests.utils import (
database_router_override_settings,
database_router_override_settings_history_in_diff_db,
middleware_override_settings,
)
from simple_history.utils import get_history_model_for_model, update_change_reason
from ..external.models import (
ExternalModel,
ExternalModelRegistered,
ExternalModelWithCustomUserIdField,
)
from ..models import (
AbstractBase,
AdminProfile,
BasePlace,
Book,
Bookcase,
BucketData,
BucketDataRegisterChangedBy,
BucketMember,
CharFieldChangeReasonModel,
CharFieldFileModel,
Choice,
City,
ConcreteAttr,
ConcreteExternal,
ConcreteUtil,
Contact,
ContactRegister,
Country,
CustomManagerNameModel,
DefaultTextFieldChangeReasonModel,
Document,
Employee,
ExternalModelSpecifiedWithAppParam,
ExternalModelWithAppLabel,
FileModel,
ForeignKeyToSelfModel,
HistoricalChoice,
HistoricalCustomFKError,
HistoricalPoll,
HistoricalPollWithHistoricalIPAddress,
HistoricalState,
InheritedRestaurant,
Library,
ManyToManyModelOther,
ModelWithExcludedManyToMany,
ModelWithFkToModelWithHistoryUsingBaseModelDb,
ModelWithHistoryInDifferentDb,
ModelWithHistoryUsingBaseModelDb,
MultiOneToOne,
MyOverrideModelNameRegisterMethod1,
OverrideModelNameAsCallable,
OverrideModelNameUsingBaseModel1,
Person,
Place,
Poll,
PollInfo,
PollWithExcludedFieldsWithDefaults,
PollWithExcludedFKField,
PollWithExcludeFields,
PollWithHistoricalIPAddress,
Province,
Restaurant,
SelfFK,
Series,
SeriesWork,
State,
Street,
Temperature,
UnicodeVerboseName,
UserTextFieldChangeReasonModel,
UUIDDefaultModel,
UUIDModel,
WaterLevel,
)
get_model = apps.get_model
User = get_user_model()
today = datetime(3021, 1, 1, 10, 0)
tomorrow = today + timedelta(days=1)
yesterday = today - timedelta(days=1)
def get_fake_file(filename):
fake_file = ContentFile("file data")
fake_file.name = filename
return fake_file
class HistoricalRecordsTest(TestCase):
def assertDatetimesEqual(self, time1, time2):
self.assertAlmostEqual(time1, time2, delta=timedelta(seconds=2))
def assertRecordValues(self, record, klass, values_dict):
for key, value in values_dict.items():
self.assertEqual(getattr(record, key), value)
self.assertEqual(record.history_object.__class__, klass)
for key, value in values_dict.items():
if key not in ["history_type", "history_change_reason"]:
self.assertEqual(getattr(record.history_object, key), value)
def test_create(self):
p = Poll(question="what's up?", pub_date=today)
p.save()
(record,) = p.history.all()
self.assertRecordValues(
record,
Poll,
{
"question": "what's up?",
"pub_date": today,
"id": p.id,
"history_type": "+",
},
)
self.assertDatetimesEqual(record.history_date, datetime.now())
def test_update(self):
Poll.objects.create(question="what's up?", pub_date=today)
p = Poll.objects.get()
p.pub_date = tomorrow
p.save()
update_change_reason(p, "future poll")
update_record, create_record = p.history.all()
self.assertRecordValues(
create_record,
Poll,
{
"question": "what's up?",
"pub_date": today,
"id": p.id,
"history_change_reason": None,
"history_type": "+",
},
)
self.assertRecordValues(
update_record,
Poll,
{
"question": "what's up?",
"pub_date": tomorrow,
"id": p.id,
"history_change_reason": "future poll",
"history_type": "~",
},
)
self.assertDatetimesEqual(update_record.history_date, datetime.now())
def test_delete_verify_change_reason_implicitly(self):
p = Poll.objects.create(question="what's up?", pub_date=today)
poll_id = p.id
p._change_reason = "wrongEntry"
p.delete()
delete_record, create_record = Poll.history.all()
self.assertRecordValues(
create_record,
Poll,
{
"question": "what's up?",
"pub_date": today,
"id": poll_id,
"history_change_reason": None,
"history_type": "+",
},
)
self.assertRecordValues(
delete_record,
Poll,
{
"question": "what's up?",
"pub_date": today,
"id": poll_id,
"history_change_reason": "wrongEntry",
"history_type": "-",
},
)
def test_delete_verify_change_reason_explicity(self):
p = Poll.objects.create(question="what's up?", pub_date=today)
poll_id = p.id
p.delete()
update_change_reason(p, "wrongEntry")
delete_record, create_record = Poll.history.all()
self.assertRecordValues(
create_record,
Poll,
{
"question": "what's up?",
"pub_date": today,
"id": poll_id,
"history_change_reason": None,
"history_type": "+",
},
)
self.assertRecordValues(
delete_record,
Poll,
{
"question": "what's up?",
"pub_date": today,
"id": poll_id,
"history_change_reason": "wrongEntry",
"history_type": "-",
},
)
def test_cascade_delete_history(self):
thames = WaterLevel.objects.create(waters="Thames", level=2.5, date=today)
nile = WaterLevel.objects.create(waters="Nile", level=2.5, date=today)
self.assertEqual(len(thames.history.all()), 1)
self.assertEqual(len(nile.history.all()), 1)
nile.delete()
self.assertEqual(len(thames.history.all()), 1)
self.assertEqual(len(nile.history.all()), 0)
def test_save_without_historical_record(self):
pizza_place = Restaurant.objects.create(name="Pizza Place", rating=3)
pizza_place.rating = 4
pizza_place.save_without_historical_record()
pizza_place.rating = 6
pizza_place.save()
update_record, create_record = Restaurant.updates.all()
self.assertRecordValues(
create_record,
Restaurant,
{
"name": "Pizza Place",
"rating": 3,
"id": pizza_place.id,
"history_type": "+",
},
)
self.assertRecordValues(
update_record,
Restaurant,
{
"name": "Pizza Place",
"rating": 6,
"id": pizza_place.id,
"history_type": "~",
},
)
def test_save_without_historical_record_for_registered_model(self):
model = ExternalModelSpecifiedWithAppParam.objects.create(
name="registered model"
)
self.assertTrue(hasattr(model, "save_without_historical_record"))
def test_save_raises_exception(self):
anthony = Person(name="Anthony Gillard")
with self.assertRaises(RuntimeError):
anthony.save_without_historical_record()
self.assertFalse(hasattr(anthony, "skip_history_when_saving"))
self.assertEqual(Person.history.count(), 0)
anthony.save()
self.assertEqual(Person.history.count(), 1)
def test_foreignkey_field(self):
why_poll = Poll.objects.create(question="why?", pub_date=today)
how_poll = Poll.objects.create(question="how?", pub_date=today)
choice = Choice.objects.create(poll=why_poll, votes=0)
choice.poll = how_poll
choice.save()
update_record, create_record = Choice.history.all()
self.assertRecordValues(
create_record,
Choice,
{"poll_id": why_poll.id, "votes": 0, "id": choice.id, "history_type": "+"},
)
self.assertRecordValues(
update_record,
Choice,
{"poll_id": how_poll.id, "votes": 0, "id": choice.id, "history_type": "~"},
)
def test_foreignkey_still_allows_reverse_lookup_via_set_attribute(self):
lib = Library.objects.create()
state = State.objects.create(library=lib)
self.assertTrue(hasattr(lib, "state_set"))
self.assertIsNone(
state._meta.get_field("library").remote_field.related_name,
"the '+' shouldn't leak through to the original "
"model's field related_name",
)
def test_file_field(self):
filename = str(uuid.uuid4())
model = FileModel.objects.create(file=get_fake_file(filename))
self.assertEqual(model.file.name, "files/{}".format(filename))
model.file.delete()
update_record, create_record = model.history.all()
self.assertEqual(create_record.file, "files/{}".format(filename))
self.assertEqual(update_record.file, "")
def test_file_field_with_char_field_setting(self):
# setting means history table's file field is a CharField
file_field = CharFieldFileModel.history.model._meta.get_field("file")
self.assertIs(type(file_field), models.CharField)
self.assertEqual(file_field.max_length, 100)
# file field works the same as test_file_field()
filename = str(uuid.uuid4())
model = CharFieldFileModel.objects.create(file=get_fake_file(filename))
self.assertEqual(model.file.name, "files/{}".format(filename))
model.file.delete()
update_record, create_record = model.history.all()
self.assertEqual(create_record.file, "files/{}".format(filename))
self.assertEqual(update_record.file, "")
def test_inheritance(self):
pizza_place = Restaurant.objects.create(name="Pizza Place", rating=3)
pizza_place.rating = 4
pizza_place.save()
update_record, create_record = Restaurant.updates.all()
self.assertRecordValues(
create_record,
Restaurant,
{
"name": "Pizza Place",
"rating": 3,
"id": pizza_place.id,
"history_type": "+",
},
)
self.assertRecordValues(
update_record,
Restaurant,
{
"name": "Pizza Place",
"rating": 4,
"id": pizza_place.id,
"history_type": "~",
},
)
def test_specify_history_user(self):
user1 = User.objects.create_user("user1", "1@example.com")
user2 = User.objects.create_user("user2", "1@example.com")
document = Document.objects.create(changed_by=user1)
document.changed_by = user2
document.save()
document.changed_by = None
document.save()
self.assertEqual(
[d.history_user for d in document.history.all()], [None, user2, user1]
)
def test_specify_history_user_self_reference_delete(self):
user1 = User.objects.create_user("user1", "1@example.com")
user2 = User.objects.create_user("user2", "1@example.com")
document = Document.objects.create(changed_by=user1)
document.changed_by = user2
document.save()
document.changed_by = None
document.save()
self.assertEqual(
[d.history_user for d in document.history.all()], [None, user2, user1]
)
# Change back to user1
document.changed_by = user1
document.save()
# Deleting user1 will cascade delete the document,
# but fails when it tries to make the historical
# record for the deleted user1.
# This test performs differently on Postgres vs. SQLite
# because of how the two database handle database constraints
try:
user1.delete()
except IntegrityError as e:
self.fail(e)
def test_specify_history_date_1(self):
temperature = Temperature.objects.create(
location="London", temperature=14, _history_date=today
)
temperature.temperature = 16
temperature._history_date = yesterday
temperature.save()
self.assertEqual(
[t.history_date for t in temperature.history.all()], [today, yesterday]
)
def test_specify_history_date_2(self):
river = WaterLevel.objects.create(waters="Thames", level=2.5, date=today)
river.level = 2.6
river.date = yesterday
river.save()
for t in river.history.all():
self.assertEqual(t.date, t.history_date)
def test_non_default_primary_key_save(self):
book1 = Book.objects.create(isbn="1-84356-028-1")
book2 = Book.objects.create(isbn="1-84356-028-2")
library = Library.objects.create(book=book1)
library.book = book2
library.save()
library.book = None
library.save()
self.assertEqual(
[lib.book_id for lib in library.history.all()], [None, book2.pk, book1.pk]
)
def test_string_defined_foreign_key_save(self):
library1 = Library.objects.create()
library2 = Library.objects.create()
state = State.objects.create(library=library1)
state.library = library2
state.save()
state.library = None
state.save()
self.assertEqual(
[s.library_id for s in state.history.all()],
[None, library2.pk, library1.pk],
)
def test_self_referential_foreign_key(self):
model = SelfFK.objects.create()
other = SelfFK.objects.create()
model.fk = model
model.save()
model.fk = other
model.save()
self.assertEqual(
[m.fk_id for m in model.history.all()], [other.id, model.id, None]
)
def test_to_field_foreign_key_save(self):
country = Country.objects.create(code="US")
country2 = Country.objects.create(code="CA")
province = Province.objects.create(country=country)
province.country = country2
province.save()
self.assertEqual(
[c.country_id for c in province.history.all()],
[country2.code, country.code],
)
def test_db_column_foreign_key_save(self):
country = Country.objects.create(code="US")
city = City.objects.create(country=country)
country_field = City._meta.get_field("country")
self.assertIn(
getattr(country_field, "db_column"), str(city.history.all().query)
)
def test_raw_save(self):
document = Document()
document.save_base(raw=True)
self.assertEqual(document.history.count(), 0)
document.save()
self.assertRecordValues(
document.history.get(),
Document,
{"changed_by_id": None, "id": document.id, "history_type": "~"},
)
def test_unicode_verbose_name(self):
instance = UnicodeVerboseName()
instance.save()
self.assertEqual(
"historical \u570b", instance.history.all()[0]._meta.verbose_name
)
def test_user_can_set_verbose_name(self):
b = Book(isbn="54321")
b.save()
self.assertEqual("dead trees", b.history.all()[0]._meta.verbose_name)
def test_historical_verbose_name_follows_model_verbose_name(self):
library = Library()
library.save()
self.assertEqual(
"historical quiet please", library.history.get()._meta.verbose_name
)
def test_foreignkey_primarykey(self):
"""Test saving a tracked model with a `ForeignKey` primary key."""
poll = Poll(pub_date=today)
poll.save()
poll_info = PollInfo(poll=poll)
poll_info.save()
def test_model_with_excluded_fields(self):
p = PollWithExcludeFields(
question="what's up?", pub_date=today, place="The Pub"
)
p.save()
history = PollWithExcludeFields.history.all()[0]
all_fields_names = [f.name for f in history._meta.fields]
self.assertIn("question", all_fields_names)
self.assertNotIn("pub_date", all_fields_names)
self.assertEqual(history.question, p.question)
self.assertEqual(history.place, p.place)
most_recent = p.history.most_recent()
self.assertIn("question", all_fields_names)
self.assertNotIn("pub_date", all_fields_names)
self.assertEqual(most_recent.__class__, PollWithExcludeFields)
self.assertIn("pub_date", history._history_excluded_fields)
self.assertEqual(most_recent.question, p.question)
self.assertEqual(most_recent.place, p.place)
def test_user_model_override(self):
user1 = User.objects.create_user("user1", "1@example.com")
user2 = User.objects.create_user("user2", "1@example.com")
member1 = BucketMember.objects.create(name="member1", user=user1)
member2 = BucketMember.objects.create(name="member2", user=user2)
bucket_data = BucketData.objects.create(changed_by=member1)
bucket_data.changed_by = member2
bucket_data.save()
bucket_data.changed_by = None
bucket_data.save()
self.assertEqual(
[d.history_user for d in bucket_data.history.all()],
[None, member2, member1],
)
def test_user_model_override_registered(self):
user1 = User.objects.create_user("user1", "1@example.com")
user2 = User.objects.create_user("user2", "1@example.com")
member1 = BucketMember.objects.create(name="member1", user=user1)
member2 = BucketMember.objects.create(name="member2", user=user2)
bucket_data = BucketDataRegisterChangedBy.objects.create(changed_by=member1)
bucket_data.changed_by = member2
bucket_data.save()
bucket_data.changed_by = None
bucket_data.save()
self.assertEqual(
[d.history_user for d in bucket_data.history.all()],
[None, member2, member1],
)
def test_uuid_history_id(self):
entry = UUIDModel.objects.create()
history = entry.history.all()[0]
self.assertTrue(isinstance(history.history_id, uuid.UUID))
def test_uuid_default_history_id(self):
entry = UUIDDefaultModel.objects.create()
history = entry.history.all()[0]
self.assertTrue(isinstance(history.history_id, uuid.UUID))
def test_default_history_change_reason(self):
entry = CharFieldChangeReasonModel.objects.create(greeting="what's up?")
history = entry.history.get()
self.assertEqual(history.history_change_reason, None)
def test_charfield_history_change_reason(self):
# Default CharField and length
entry = CharFieldChangeReasonModel.objects.create(greeting="what's up?")
entry.greeting = "what is happening?"
entry.save()
update_change_reason(entry, "Change greeting.")
history = entry.history.all()[0]
field = history._meta.get_field("history_change_reason")
self.assertTrue(isinstance(field, models.CharField))
self.assertTrue(field.max_length, 100)
def test_default_textfield_history_change_reason(self):
# TextField usage is determined by settings
entry = DefaultTextFieldChangeReasonModel.objects.create(greeting="what's up?")
entry.greeting = "what is happening?"
entry.save()
reason = "Change greeting"
update_change_reason(entry, reason)
history = entry.history.all()[0]
field = history._meta.get_field("history_change_reason")
self.assertTrue(isinstance(field, models.TextField))
self.assertEqual(history.history_change_reason, reason)
def test_user_textfield_history_change_reason(self):
# TextField instance is passed in init
entry = UserTextFieldChangeReasonModel.objects.create(greeting="what's up?")
entry.greeting = "what is happening?"
entry.save()
reason = "Change greeting"
update_change_reason(entry, reason)
history = entry.history.all()[0]
field = history._meta.get_field("history_change_reason")
self.assertTrue(isinstance(field, models.TextField))
self.assertEqual(history.history_change_reason, reason)
def test_history_diff_includes_changed_fields(self):
p = Poll.objects.create(question="what's up?", pub_date=today)
p.question = "what's up, man?"
p.save()
new_record, old_record = p.history.all()
delta = new_record.diff_against(old_record)
expected_change = ModelChange("question", "what's up?", "what's up, man")
self.assertEqual(delta.changed_fields, ["question"])
self.assertEqual(delta.old_record, old_record)
self.assertEqual(delta.new_record, new_record)
self.assertEqual(expected_change.field, delta.changes[0].field)
def test_history_diff_does_not_include_unchanged_fields(self):
p = Poll.objects.create(question="what's up?", pub_date=today)
p.question = "what's up, man?"
p.save()
new_record, old_record = p.history.all()
delta = new_record.diff_against(old_record)
self.assertNotIn("pub_date", delta.changed_fields)
def test_history_diff_includes_changed_fields_of_base_model(self):
r = InheritedRestaurant.objects.create(name="McDonna", serves_hot_dogs=False)
# change base model field
r.name = "DonnutsKing"
r.save()
new_record, old_record = r.history.all()
delta = new_record.diff_against(old_record)
expected_change = ModelChange("name", "McDonna", "DonnutsKing")
self.assertEqual(delta.changed_fields, ["name"])
self.assertEqual(delta.old_record, old_record)
self.assertEqual(delta.new_record, new_record)
self.assertEqual(expected_change.field, delta.changes[0].field)
def test_history_diff_with_incorrect_type(self):
p = Poll.objects.create(question="what's up?", pub_date=today)
p.question = "what's up, man?"
p.save()
new_record, old_record = p.history.all()
with self.assertRaises(TypeError):
new_record.diff_against("something")
def test_history_diff_with_excluded_fields(self):
p = Poll.objects.create(question="what's up?", pub_date=today)
p.question = "what's up, man?"
p.save()
new_record, old_record = p.history.all()
delta = new_record.diff_against(old_record, excluded_fields=("question",))
self.assertEqual(delta.changed_fields, [])
self.assertEqual(delta.changes, [])
class GetPrevRecordAndNextRecordTestCase(TestCase):
def assertRecordsMatch(self, record_a, record_b):
self.assertEqual(record_a, record_b)
self.assertEqual(record_a.question, record_b.question)
def setUp(self):
self.poll = Poll(question="what's up?", pub_date=today)
self.poll.save()
def test_get_prev_record(self):
self.poll.question = "ask questions?"
self.poll.save()
self.poll.question = "eh?"
self.poll.save()
self.poll.question = "one more?"
self.poll.save()
first_record = self.poll.history.filter(question="what's up?").get()
second_record = self.poll.history.filter(question="ask questions?").get()
third_record = self.poll.history.filter(question="eh?").get()
fourth_record = self.poll.history.filter(question="one more?").get()
with self.assertNumQueries(1):
self.assertRecordsMatch(second_record.prev_record, first_record)
with self.assertNumQueries(1):
self.assertRecordsMatch(third_record.prev_record, second_record)
with self.assertNumQueries(1):
self.assertRecordsMatch(fourth_record.prev_record, third_record)
def test_get_prev_record_none_if_only(self):
self.assertEqual(self.poll.history.count(), 1)
record = self.poll.history.get()
self.assertIsNone(record.prev_record)
def test_get_prev_record_none_if_earliest(self):
self.poll.question = "ask questions?"
self.poll.save()
first_record = self.poll.history.filter(question="what's up?").get()
self.assertIsNone(first_record.prev_record)
def test_get_prev_record_with_custom_manager_name(self):
instance = CustomManagerNameModel.objects.create(name="Test name 1")
instance.name = "Test name 2"
instance.save()
first_record = instance.log.filter(name="Test name 1").get()
second_record = instance.log.filter(name="Test name 2").get()
self.assertEqual(second_record.prev_record, first_record)
def test_get_prev_record_with_excluded_field(self):
instance = PollWithExcludeFields.objects.create(
question="what's up?", pub_date=today
)
instance.question = "ask questions?"
instance.save()
first_record = instance.history.filter(question="what's up?").get()
second_record = instance.history.filter(question="ask questions?").get()
with self.assertNumQueries(1):
self.assertRecordsMatch(second_record.prev_record, first_record)
def test_get_next_record(self):
self.poll.question = "ask questions?"
self.poll.save()
self.poll.question = "eh?"
self.poll.save()
self.poll.question = "one more?"
self.poll.save()
first_record = self.poll.history.filter(question="what's up?").get()
second_record = self.poll.history.filter(question="ask questions?").get()
third_record = self.poll.history.filter(question="eh?").get()
fourth_record = self.poll.history.filter(question="one more?").get()
self.assertIsNone(fourth_record.next_record)
with self.assertNumQueries(1):
self.assertRecordsMatch(first_record.next_record, second_record)
with self.assertNumQueries(1):
self.assertRecordsMatch(second_record.next_record, third_record)
with self.assertNumQueries(1):
self.assertRecordsMatch(third_record.next_record, fourth_record)
def test_get_next_record_none_if_only(self):
self.assertEqual(self.poll.history.count(), 1)
record = self.poll.history.get()
self.assertIsNone(record.next_record)
def test_get_next_record_none_if_most_recent(self):
self.poll.question = "ask questions?"
self.poll.save()
recent_record = self.poll.history.filter(question="ask questions?").get()
self.assertIsNone(recent_record.next_record)
def test_get_next_record_with_custom_manager_name(self):
instance = CustomManagerNameModel.objects.create(name="Test name 1")
instance.name = "Test name 2"
instance.save()
first_record = instance.log.filter(name="Test name 1").get()
second_record = instance.log.filter(name="Test name 2").get()
self.assertEqual(first_record.next_record, second_record)
def test_get_next_record_with_excluded_field(self):
instance = PollWithExcludeFields.objects.create(
question="what's up?", pub_date=today
)
instance.question = "ask questions?"
instance.save()
first_record = instance.history.filter(question="what's up?").get()
second_record = instance.history.filter(question="ask questions?").get()
with self.assertNumQueries(1):
self.assertRecordsMatch(first_record.next_record, second_record)
class CreateHistoryModelTests(unittest.TestCase):
def test_create_history_model_with_one_to_one_field_to_integer_field(self):
records = HistoricalRecords()
records.module = AdminProfile.__module__
try:
records.create_history_model(AdminProfile, False)
except Exception:
self.fail(
"SimpleHistory should handle foreign keys to one to one"
"fields to integer fields without throwing an exception"
)
def test_create_history_model_with_one_to_one_field_to_char_field(self):
records = HistoricalRecords()
records.module = Bookcase.__module__
try:
records.create_history_model(Bookcase, False)
except Exception:
self.fail(
"SimpleHistory should handle foreign keys to one to one"
"fields to char fields without throwing an exception."
)
def test_create_history_model_with_multiple_one_to_ones(self):
records = HistoricalRecords()
records.module = MultiOneToOne.__module__
try:
records.create_history_model(MultiOneToOne, False)
except Exception:
self.fail(
"SimpleHistory should handle foreign keys to one to one"
"fields to one to one fields without throwing an "
"exception."
)
class CustomModelNameTests(unittest.TestCase):
def verify_custom_model_name_feature(
self, model, expected_class_name, expected_table_name
):
history_model = model.history.model
self.assertEqual(history_model.__name__, expected_class_name)
self.assertEqual(history_model._meta.db_table, expected_table_name)
def test_instantiate_history_model_with_custom_model_name_as_string(self):
try:
from ..models import OverrideModelNameAsString
except ImportError:
self.fail("{}OverrideModelNameAsString is in wrong module")
expected_cls_name = "MyHistoricalCustomNameModel"
self.verify_custom_model_name_feature(
OverrideModelNameAsString(),
expected_cls_name,
"tests_{}".format(expected_cls_name.lower()),
)
def test_register_history_model_with_custom_model_name_override(self):
try:
from ..models import OverrideModelNameRegisterMethod1
except ImportError:
self.fail("OverrideModelNameRegisterMethod1 is in wrong module")
cls = OverrideModelNameRegisterMethod1()
expected_cls_name = "MyOverrideModelNameRegisterMethod1"
self.verify_custom_model_name_feature(
cls, expected_cls_name, "tests_{}".format(expected_cls_name.lower())
)
from simple_history import register
from ..models import OverrideModelNameRegisterMethod2
try:
register(
OverrideModelNameRegisterMethod2,
custom_model_name=lambda x: "{}".format(x),
)
except ValueError:
self.assertRaises(ValueError)
def test_register_history_model_with_custom_model_name_from_abstract_model(self):
cls = OverrideModelNameUsingBaseModel1
expected_cls_name = "Audit{}".format(cls.__name__)
self.verify_custom_model_name_feature(
cls, expected_cls_name, "tests_" + expected_cls_name.lower()
)
def test_register_history_model_with_custom_model_name_from_external_model(self):
from ..models import OverrideModelNameUsingExternalModel1
cls = OverrideModelNameUsingExternalModel1
expected_cls_name = "Audit{}".format(cls.__name__)
self.verify_custom_model_name_feature(
cls, expected_cls_name, "tests_" + expected_cls_name.lower()
)
from ..models import OverrideModelNameUsingExternalModel2
cls = OverrideModelNameUsingExternalModel2
expected_cls_name = "Audit{}".format(cls.__name__)
self.verify_custom_model_name_feature(
cls, expected_cls_name, "external_" + expected_cls_name.lower()
)
class AppLabelTest(TestCase):
def get_table_name(self, manager):
return manager.model._meta.db_table
def test_explicit_app_label(self):
self.assertEqual(
self.get_table_name(ExternalModelWithAppLabel.objects),
"external_externalmodelwithapplabel",
)
self.assertEqual(
self.get_table_name(ExternalModelWithAppLabel.history),
"external_historicalexternalmodelwithapplabel",
)
def test_default_app_label(self):
self.assertEqual(
self.get_table_name(ExternalModel.objects), "external_externalmodel"
)
self.assertEqual(
self.get_table_name(ExternalModel.history),
"external_historicalexternalmodel",
)
def test_register_app_label(self):
self.assertEqual(
self.get_table_name(ExternalModelSpecifiedWithAppParam.objects),
"tests_externalmodelspecifiedwithappparam",
)
self.assertEqual(
self.get_table_name(ExternalModelSpecifiedWithAppParam.histories),
"external_historicalexternalmodelspecifiedwithappparam",
)
self.assertEqual(
self.get_table_name(ExternalModelRegistered.objects),
"external_externalmodelregistered",
)
self.assertEqual(
self.get_table_name(ExternalModelRegistered.histories),
"tests_historicalexternalmodelregistered",
)
self.assertEqual(
self.get_table_name(ConcreteExternal.objects), "tests_concreteexternal"
)
self.assertEqual(
self.get_table_name(ConcreteExternal.history),
"tests_historicalconcreteexternal",
)
def test_get_model(self):
self.assertEqual(
get_model("external", "ExternalModelWithAppLabel"),
ExternalModelWithAppLabel,
)
self.assertEqual(
get_model("external", "HistoricalExternalModelWithAppLabel"),
ExternalModelWithAppLabel.history.model,
)
self.assertEqual(get_model("external", "ExternalModel"), ExternalModel)
self.assertEqual(
get_model("external", "HistoricalExternalModel"),
ExternalModel.history.model,
)
self.assertEqual(
get_model("tests", "ExternalModelSpecifiedWithAppParam"),
ExternalModelSpecifiedWithAppParam,
)
self.assertEqual(
get_model("external", "HistoricalExternalModelSpecifiedWithAppParam"),
ExternalModelSpecifiedWithAppParam.histories.model,
)
self.assertEqual(
get_model("external", "ExternalModelRegistered"), ExternalModelRegistered
)
self.assertEqual(
get_model("tests", "HistoricalExternalModelRegistered"),
ExternalModelRegistered.histories.model,
)
# Test that historical model is defined within app of concrete
# model rather than abstract base model
self.assertEqual(get_model("tests", "ConcreteExternal"), ConcreteExternal)
self.assertEqual(
get_model("tests", "HistoricalConcreteExternal"),
ConcreteExternal.history.model,
)
class HistoryManagerTest(TestCase):
def test_most_recent(self):
poll = Poll.objects.create(question="what's up?", pub_date=today)
poll.question = "how's it going?"
poll.save()
poll.question = "why?"
poll.save()
poll.question = "how?"
most_recent = poll.history.most_recent()
self.assertEqual(most_recent.__class__, Poll)
self.assertEqual(most_recent.question, "why?")
def test_get_model(self):
self.assertEqual(get_model("tests", "poll"), Poll)
self.assertEqual(get_model("tests", "historicalpoll"), HistoricalPoll)
def test_most_recent_on_model_class(self):
Poll.objects.create(question="what's up?", pub_date=today)
self.assertRaises(TypeError, Poll.history.most_recent)
def test_most_recent_nonexistant(self):
# Unsaved poll
poll = Poll(question="what's up?", pub_date=today)
self.assertRaises(Poll.DoesNotExist, poll.history.most_recent)
# Deleted poll
poll.save()
poll.delete()
self.assertRaises(Poll.DoesNotExist, poll.history.most_recent)
def test_as_of(self):
poll = Poll.objects.create(question="what's up?", pub_date=today)
poll.question = "how's it going?"
poll.save()
poll.question = "why?"
poll.save()
poll.question = "how?"
most_recent = poll.history.most_recent()
self.assertEqual(most_recent.question, "why?")
times = [r.history_date for r in poll.history.all()]
def question_as_of(time):
return poll.history.as_of(time).question
self.assertEqual(question_as_of(times[0]), "why?")
self.assertEqual(question_as_of(times[1]), "how's it going?")
self.assertEqual(question_as_of(times[2]), "what's up?")
def test_as_of_nonexistant(self):
# Unsaved poll
poll = Poll(question="what's up?", pub_date=today)
time = datetime.now()
self.assertRaises(Poll.DoesNotExist, poll.history.as_of, time)
# Deleted poll
poll.save()
poll.delete()
self.assertRaises(Poll.DoesNotExist, poll.history.as_of, time)
def test_as_of_excluded_many_to_many_succeeds(self):
other1 = ManyToManyModelOther.objects.create(name="test1")
other2 = ManyToManyModelOther.objects.create(name="test2")
m = ModelWithExcludedManyToMany.objects.create(name="test")
m.other.add(other1, other2)
# This will fail if the ManyToMany field is not excluded.
self.assertEqual(m.history.as_of(datetime.now()), m)
def test_foreignkey_field(self):
why_poll = Poll.objects.create(question="why?", pub_date=today)
how_poll = Poll.objects.create(question="how?", pub_date=today)
choice = Choice.objects.create(poll=why_poll, votes=0)
choice.poll = how_poll
choice.save()
most_recent = choice.history.most_recent()
self.assertEqual(most_recent.poll.pk, how_poll.pk)
times = [r.history_date for r in choice.history.all()]
def poll_as_of(time):
return choice.history.as_of(time).poll
self.assertEqual(poll_as_of(times[0]).pk, how_poll.pk)
self.assertEqual(poll_as_of(times[1]).pk, why_poll.pk)
def test_abstract_inheritance(self):
for klass in (ConcreteAttr, ConcreteUtil):
obj = klass.objects.create()
obj.save()
update_record, create_record = klass.history.all()
self.assertTrue(isinstance(update_record, AbstractBase))
self.assertTrue(isinstance(create_record, AbstractBase))
def test_invalid_bases(self):
invalid_bases = (AbstractBase, "InvalidBases")
for bases in invalid_bases:
self.assertRaises(TypeError, HistoricalRecords, bases=bases)
def test_import_related(self):
field_object = HistoricalChoice._meta.get_field("poll")
related_model = field_object.remote_field.related_model
self.assertEqual(related_model, HistoricalChoice)
def test_string_related(self):
field_object = HistoricalState._meta.get_field("library")
related_model = field_object.remote_field.related_model
self.assertEqual(related_model, HistoricalState)
def test_state_serialization_of_customfk(self):
from django.db.migrations import state
state.ModelState.from_model(HistoricalCustomFKError)
class TestOrderWrtField(TestCase):
"""Check behaviour of _order field added by Meta.order_with_respect_to.
The Meta.order_with_respect_to option adds an OrderWrt field named
"_order", where OrderWrt is a proxy class for an IntegerField that sets
some default options.
The simple_history strategy is:
- Convert to a plain IntegerField in the historical record
- When restoring a historical instance, add the old value. This may
result in duplicate ordering values and non-deterministic ordering.
"""
def setUp(self):
"""Create works in published order."""
s = self.series = Series.objects.create(
name="The Chronicles of Narnia", author="C.S. Lewis"
)
self.w_lion = s.works.create(title="The Lion, the Witch and the Wardrobe")
self.w_caspian = s.works.create(title="Prince Caspian")
self.w_voyage = s.works.create(title="The Voyage of the Dawn Treader")
self.w_chair = s.works.create(title="The Silver Chair")
self.w_horse = s.works.create(title="The Horse and His Boy")
self.w_nephew = s.works.create(title="The Magician's Nephew")
self.w_battle = s.works.create(title="The Last Battle")
def test_order(self):
"""Confirm that works are ordered by creation."""
order = self.series.get_serieswork_order()
expected = [
self.w_lion.pk,
self.w_caspian.pk,
self.w_voyage.pk,
self.w_chair.pk,
self.w_horse.pk,
self.w_nephew.pk,
self.w_battle.pk,
]
self.assertSequenceEqual(order, expected)
self.assertEqual(0, self.w_lion._order)
self.assertEqual(1, self.w_caspian._order)
self.assertEqual(2, self.w_voyage._order)
self.assertEqual(3, self.w_chair._order)
self.assertEqual(4, self.w_horse._order)
self.assertEqual(5, self.w_nephew._order)
self.assertEqual(6, self.w_battle._order)
def test_order_field_in_historical_model(self):
work_order_field = self.w_lion._meta.get_field("_order")
self.assertEqual(type(work_order_field), OrderWrt)
history = self.w_lion.history.all()[0]
history_order_field = history._meta.get_field("_order")
self.assertEqual(type(history_order_field), models.IntegerField)
def test_history_object_has_order(self):
history = self.w_lion.history.all()[0]
self.assertEqual(self.w_lion._order, history.history_object._order)
def test_restore_object_with_changed_order(self):
# Change a title
self.w_caspian.title = "Prince Caspian: The Return to Narnia"
self.w_caspian.save()
self.assertEqual(2, len(self.w_caspian.history.all()))
self.assertEqual(1, self.w_caspian._order)
# Switch to internal chronological order
chronological = [
self.w_nephew.pk,
self.w_lion.pk,
self.w_horse.pk,
self.w_caspian.pk,
self.w_voyage.pk,
self.w_chair.pk,
self.w_battle.pk,
]
self.series.set_serieswork_order(chronological)
self.assertSequenceEqual(self.series.get_serieswork_order(), chronological)
# This uses an update, not a save, so no new history is created
w_caspian = SeriesWork.objects.get(id=self.w_caspian.id)
self.assertEqual(2, len(w_caspian.history.all()))
self.assertEqual(1, w_caspian.history.all()[0]._order)
self.assertEqual(1, w_caspian.history.all()[1]._order)
self.assertEqual(3, w_caspian._order)
# Revert to first title, old order
old = w_caspian.history.all()[1].history_object
old.save()
w_caspian = SeriesWork.objects.get(id=self.w_caspian.id)
self.assertEqual(3, len(w_caspian.history.all()))
self.assertEqual(1, w_caspian.history.all()[0]._order)
self.assertEqual(1, w_caspian.history.all()[1]._order)
self.assertEqual(1, w_caspian.history.all()[2]._order)
self.assertEqual(1, w_caspian._order) # The order changed
w_lion = SeriesWork.objects.get(id=self.w_lion.id)
self.assertEqual(1, w_lion._order) # and is identical to another order
# New order is non-deterministic around identical IDs
series = Series.objects.get(id=self.series.id)
order = series.get_serieswork_order()
self.assertEqual(order[0], self.w_nephew.pk)
self.assertTrue(order[1] in (self.w_lion.pk, self.w_caspian.pk))
self.assertTrue(order[2] in (self.w_lion.pk, self.w_caspian.pk))
self.assertEqual(order[3], self.w_horse.pk)
self.assertEqual(order[4], self.w_voyage.pk)
self.assertEqual(order[5], self.w_chair.pk)
self.assertEqual(order[6], self.w_battle.pk)
def test_migrations_include_order(self):
from django.db.migrations import state
model_state = state.ModelState.from_model(SeriesWork.history.model)
found = False
# `fields` is a dict in Django 3.1
fields = None
if isinstance(model_state.fields, dict):
fields = model_state.fields.items()
else:
fields = model_state.fields
for name, field in fields:
if name == "_order":
found = True
self.assertEqual(type(field), models.IntegerField)
assert found, "_order not in fields " + repr(model_state.fields)
class TestLatest(TestCase):
"""Test behavior of `latest()` without any field parameters"""
def setUp(self):
poll = Poll.objects.create(question="Does `latest()` work?", pub_date=yesterday)
poll.pub_date = today
poll.save()
def write_history(self, new_attributes):
poll_history = HistoricalPoll.objects.all()
for historical_poll, new_values in zip(poll_history, new_attributes):
for fieldname, value in new_values.items():
setattr(historical_poll, fieldname, value)
historical_poll.save()
def test_ordered(self):
self.write_history(
[{"pk": 1, "history_date": yesterday}, {"pk": 2, "history_date": today}]
)
assert HistoricalPoll.objects.latest().pk == 2
def test_jumbled(self):
self.write_history(
[{"pk": 1, "history_date": today}, {"pk": 2, "history_date": yesterday}]
)
assert HistoricalPoll.objects.latest().pk == 1
def test_sameinstant(self):
self.write_history(
[{"pk": 1, "history_date": yesterday}, {"pk": 2, "history_date": yesterday}]
)
assert HistoricalPoll.objects.latest().pk == 2
class TestMissingOneToOne(TestCase):
def setUp(self):
self.manager1 = Employee.objects.create()
self.manager2 = Employee.objects.create()
self.employee = Employee.objects.create(manager=self.manager1)
self.employee.manager = self.manager2
self.employee.save()
self.manager1_id = self.manager1.id
self.manager1.delete()
def test_history_is_complete(self):
historical_manager_ids = list(
self.employee.history.order_by("pk").values_list("manager_id", flat=True)
)
self.assertEqual(historical_manager_ids, [self.manager1_id, self.manager2.id])
def test_restore_employee(self):
historical = self.employee.history.order_by("pk")[0]
original = historical.instance
self.assertEqual(original.manager_id, self.manager1_id)
with self.assertRaises(Employee.DoesNotExist):
original.manager
class CustomTableNameTest1(TestCase):
@staticmethod
def get_table_name(manager):
return manager.model._meta.db_table
def test_custom_table_name(self):
self.assertEqual(self.get_table_name(Contact.history), "contacts_history")
def test_custom_table_name_from_register(self):
self.assertEqual(
self.get_table_name(ContactRegister.history), "contacts_register_history"
)
class ExcludeFieldsTest(TestCase):
def test_restore_pollwithexclude(self):
poll = PollWithExcludeFields.objects.create(
question="what's up?", pub_date=today
)
historical = poll.history.order_by("pk")[0]
with self.assertRaises(AttributeError):
historical.pub_date
original = historical.instance
self.assertEqual(original.pub_date, poll.pub_date)
class ExcludeFieldsForDeletedObjectTest(TestCase):
def setUp(self):
self.poll = PollWithExcludedFieldsWithDefaults.objects.create(
question="what's up?", pub_date=today, max_questions=12
)
self.historical = self.poll.history.order_by("pk")[0]
self.poll.delete()
def test_restore_deleted_poll_exclude_fields(self):
original = self.historical.instance
# pub_date don't have default value so it will be None
self.assertIsNone(original.pub_date)
# same for max_questions
self.assertIsNone(original.max_questions)
def test_restore_deleted_poll_exclude_fields_with_defaults(self):
poll = self.poll
original = self.historical.instance
self.assertEqual(original.expiration_time, poll.expiration_time)
self.assertEqual(original.place, poll.place)
self.assertEqual(original.min_questions, poll.min_questions)
class ExcludeForeignKeyTest(TestCase):
def setUp(self):
self.poll = PollWithExcludedFKField.objects.create(
question="Is it?",
pub_date=today,
place=Place.objects.create(name="Somewhere"),
)
def get_first_historical(self):
"""
Retrieve the idx'th HistoricalPoll, ordered by time.
"""
return self.poll.history.order_by("history_date")[0]
def test_instance_fk_value(self):
historical = self.get_first_historical()
original = historical.instance
self.assertEqual(original.place, self.poll.place)
def test_history_lacks_fk(self):
historical = self.get_first_historical()
with self.assertRaises(AttributeError):
historical.place
def test_nb_queries(self):
with self.assertNumQueries(2):
historical = self.get_first_historical()
historical.instance
def test_changed_value_lost(self):
new_place = Place.objects.create(name="More precise")
self.poll.place = new_place
self.poll.save()
historical = self.get_first_historical()
instance = historical.instance
self.assertEqual(instance.place, new_place)
def add_static_history_ip_address(sender, **kwargs):
history_instance = kwargs["history_instance"]
history_instance.ip_address = "192.168.0.1"
class ExtraFieldsStaticIPAddressTestCase(TestCase):
def setUp(self):
pre_create_historical_record.connect(
add_static_history_ip_address,
sender=HistoricalPollWithHistoricalIPAddress,
dispatch_uid="add_static_history_ip_address",
)
def tearDown(self):
pre_create_historical_record.disconnect(
add_static_history_ip_address,
sender=HistoricalPollWithHistoricalIPAddress,
dispatch_uid="add_static_history_ip_address",
)
def test_extra_ip_address_field_populated_on_save(self):
poll = PollWithHistoricalIPAddress.objects.create(
question="Will it blend?", pub_date=today
)
poll_history = poll.history.first()
self.assertEqual("192.168.0.1", poll_history.ip_address)
def test_extra_ip_address_field_not_present_on_poll(self):
poll = PollWithHistoricalIPAddress.objects.create(
question="Will it blend?", pub_date=today
)
with self.assertRaises(AttributeError):
poll.ip_address
def add_dynamic_history_ip_address(sender, **kwargs):
history_instance = kwargs["history_instance"]
history_instance.ip_address = HistoricalRecords.context.request.META["REMOTE_ADDR"]
@override_settings(**middleware_override_settings)
class ExtraFieldsDynamicIPAddressTestCase(TestCase):
def setUp(self):
pre_create_historical_record.connect(
add_dynamic_history_ip_address,
sender=HistoricalPollWithHistoricalIPAddress,
dispatch_uid="add_dynamic_history_ip_address",
)
def tearDown(self):
pre_create_historical_record.disconnect(
add_dynamic_history_ip_address,
sender=HistoricalPollWithHistoricalIPAddress,
dispatch_uid="add_dynamic_history_ip_address",
)
def test_signal_is_able_to_retrieve_request_from_context(self):
data = {"question": "Will it blend?", "pub_date": "2018-10-30"}
self.client.post(reverse("pollip-add"), data=data)
polls = PollWithHistoricalIPAddress.objects.all()
self.assertEqual(1, polls.count())
poll_history = polls[0].history.first()
self.assertEqual("127.0.0.1", poll_history.ip_address)
class WarningOnAbstractModelWithInheritFalseTest(TestCase):
def test_warning_on_abstract_model_with_inherit_false(self):
with warnings.catch_warnings(record=True) as w:
class AbstractModelWithInheritFalse(models.Model):
string = models.CharField()
history = HistoricalRecords()
class Meta:
abstract = True
self.assertEqual(len(w), 1)
self.assertTrue(issubclass(w[0].category, UserWarning))
self.assertEqual(
str(w[0].message),
"HistoricalRecords added to abstract model "
"(AbstractModelWithInheritFalse) without "
"inherit=True",
)
class MultiDBWithUsingTest(TestCase):
"""Asserts historical manager respects `using()` and the `using`
keyword argument in `save()`.
"""
databases = {"default", "other"}
db_name = "other"
def test_multidb_with_using_not_on_default(self):
model = ModelWithHistoryUsingBaseModelDb.objects.using(self.db_name).create(
name="1-84356-028-1"
)
self.assertRaises(ObjectDoesNotExist, model.history.get, name="1-84356-028-1")
def test_multidb_with_using_is_on_dbtwo(self):
model = ModelWithHistoryUsingBaseModelDb.objects.using(self.db_name).create(
name="1-84356-028-1"
)
try:
model.history.using(self.db_name).get(name="1-84356-028-1")
except ObjectDoesNotExist:
self.fail("ObjectDoesNotExist unexpectedly raised.")
def test_multidb_with_using_and_fk_not_on_default(self):
model = ModelWithHistoryUsingBaseModelDb.objects.using(self.db_name).create(
name="1-84356-028-1"
)
parent_model = ModelWithFkToModelWithHistoryUsingBaseModelDb.objects.using(
self.db_name
).create(fk=model)
self.assertRaises(ObjectDoesNotExist, parent_model.history.get, fk=model)
def test_multidb_with_using_and_fk_on_dbtwo(self):
model = ModelWithHistoryUsingBaseModelDb.objects.using(self.db_name).create(
name="1-84356-028-1"
)
parent_model = ModelWithFkToModelWithHistoryUsingBaseModelDb.objects.using(
self.db_name
).create(fk=model)
try:
parent_model.history.using(self.db_name).get(fk=model)
except ObjectDoesNotExist:
self.fail("ObjectDoesNotExist unexpectedly raised.")
def test_multidb_with_using_keyword_in_save_not_on_default(self):
model = ModelWithHistoryUsingBaseModelDb(name="1-84356-028-1")
model.save(using=self.db_name)
self.assertRaises(ObjectDoesNotExist, model.history.get, name="1-84356-028-1")
def test_multidb_with_using_keyword_in_save_on_dbtwo(self):
model = ModelWithHistoryUsingBaseModelDb(name="1-84356-028-1")
model.save(using=self.db_name)
try:
model.history.using(self.db_name).get(name="1-84356-028-1")
except ObjectDoesNotExist:
self.fail("ObjectDoesNotExist unexpectedly raised.")
def test_multidb_with_using_keyword_in_save_with_fk(self):
model = ModelWithHistoryUsingBaseModelDb(name="1-84356-028-1")
model.save(using=self.db_name)
parent_model = ModelWithFkToModelWithHistoryUsingBaseModelDb(fk=model)
parent_model.save(using=self.db_name)
# assert not created on default
self.assertRaises(ObjectDoesNotExist, parent_model.history.get, fk=model)
# assert created on dbtwo
try:
parent_model.history.using(self.db_name).get(fk=model)
except ObjectDoesNotExist:
self.fail("ObjectDoesNotExist unexpectedly raised.")
def test_multidb_with_using_keyword_in_save_and_update(self):
model = ModelWithHistoryUsingBaseModelDb.objects.using(self.db_name).create(
name="1-84356-028-1"
)
model.save(using=self.db_name)
self.assertEqual(
["+", "~"],
[
obj.history_type
for obj in model.history.using(self.db_name)
.all()
.order_by("history_date")
],
)
def test_multidb_with_using_keyword_in_save_and_delete(self):
HistoricalModelWithHistoryUseBaseModelDb = get_history_model_for_model(
ModelWithHistoryUsingBaseModelDb
)
model = ModelWithHistoryUsingBaseModelDb.objects.using(self.db_name).create(
name="1-84356-028-1"
)
model.save(using=self.db_name)
model.delete(using=self.db_name)
self.assertEqual(
["+", "~", "-"],
[
obj.history_type
for obj in HistoricalModelWithHistoryUseBaseModelDb.objects.using(
self.db_name
)
.all()
.order_by("history_date")
],
)
class ForeignKeyToSelfTest(TestCase):
def setUp(self):
self.model = ForeignKeyToSelfModel
self.history_model = self.model.history.model
def test_foreign_key_to_self_using_model_str(self):
self.assertEqual(
self.model, self.history_model.fk_to_self.field.remote_field.model
)
def test_foreign_key_to_self_using_self_str(self):
self.assertEqual(
self.model, self.history_model.fk_to_self_using_str.field.remote_field.model
)
@override_settings(**database_router_override_settings)
class MultiDBExplicitHistoryUserIDTest(TestCase):
databases = {"default", "other"}
def setUp(self):
self.user = get_user_model().objects.create(
username="username", email="username@test.com", password="top_secret"
)
def test_history_user_with_fk_in_different_db_raises_value_error(self):
instance = ExternalModel(name="random_name")
instance._history_user = self.user
with self.assertRaises(ValueError):
instance.save()
def test_history_user_with_integer_field(self):
instance = ExternalModelWithCustomUserIdField(name="random_name")
instance._history_user = self.user
instance.save()
self.assertEqual(self.user.id, instance.history.first().history_user_id)
self.assertEqual(self.user, instance.history.first().history_user)
def test_history_user_is_none(self):
instance = ExternalModelWithCustomUserIdField.objects.create(name="random_name")
self.assertIsNone(instance.history.first().history_user_id)
self.assertIsNone(instance.history.first().history_user)
def test_history_user_does_not_exist(self):
instance = ExternalModelWithCustomUserIdField(name="random_name")
instance._history_user = self.user
instance.save()
self.assertEqual(self.user.id, instance.history.first().history_user_id)
self.assertEqual(self.user, instance.history.first().history_user)
user_id = self.user.id
self.user.delete()
self.assertEqual(user_id, instance.history.first().history_user_id)
self.assertIsNone(instance.history.first().history_user)
class RelatedNameTest(TestCase):
def setUp(self):
self.user_one = get_user_model().objects.create(
username="username_one", email="first@user.com", password="top_secret"
)
self.user_two = get_user_model().objects.create(
username="username_two", email="second@user.com", password="top_secret"
)
self.one = Street(name="Test Street")
self.one._history_user = self.user_one
self.one.save()
self.two = Street(name="Sesame Street")
self.two._history_user = self.user_two
self.two.save()
self.one.name = "ABC Street"
self.one._history_user = self.user_two
self.one.save()
def test_relation(self):
self.assertEqual(self.one.history.count(), 2)
self.assertEqual(self.two.history.count(), 1)
def test_filter(self):
self.assertEqual(
Street.objects.filter(history__history_user=self.user_one.pk).count(), 1
)
self.assertEqual(
Street.objects.filter(history__history_user=self.user_two.pk).count(), 2
)
def test_name_equals_manager(self):
with self.assertRaises(RelatedNameConflictError):
register(Place, manager_name="history", related_name="history")
def test_deletion(self):
self.two.delete()
self.assertEqual(Street.log.filter(history_relation=2).count(), 2)
self.assertEqual(Street.log.count(), 4)
def test_revert(self):
id = self.one.pk
self.one.delete()
self.assertEqual(
Street.objects.filter(history__history_user=self.user_one.pk).count(), 0
)
self.assertEqual(Street.objects.filter(pk=id).count(), 0)
old = Street.log.filter(id=id).first()
old.history_object.save()
self.assertEqual(
Street.objects.filter(history__history_user=self.user_one.pk).count(), 1
)
self.one = Street.objects.get(pk=id)
self.assertEqual(self.one.history.count(), 4)
@override_settings(**database_router_override_settings_history_in_diff_db)
class SaveHistoryInSeparateDatabaseTestCase(TestCase):
databases = {"default", "other"}
def setUp(self):
self.model = ModelWithHistoryInDifferentDb.objects.create(name="test")
def test_history_model_saved_in_separate_db(self):
self.assertEqual(0, self.model.history.using("default").count())
self.assertEqual(1, self.model.history.count())
self.assertEqual(1, self.model.history.using("other").count())
self.assertEqual(
1, ModelWithHistoryInDifferentDb.objects.using("default").count()
)
self.assertEqual(1, ModelWithHistoryInDifferentDb.objects.count())
self.assertEqual(
0, ModelWithHistoryInDifferentDb.objects.using("other").count()
)
def test_history_model_saved_in_separate_db_on_delete(self):
id = self.model.id
self.model.delete()
self.assertEqual(
0,
ModelWithHistoryInDifferentDb.history.using("default")
.filter(id=id)
.count(),
)
self.assertEqual(2, ModelWithHistoryInDifferentDb.history.filter(id=id).count())
self.assertEqual(
2,
ModelWithHistoryInDifferentDb.history.using("other").filter(id=id).count(),
)
self.assertEqual(
0, ModelWithHistoryInDifferentDb.objects.using("default").count()
)
self.assertEqual(0, ModelWithHistoryInDifferentDb.objects.count())
self.assertEqual(
0, ModelWithHistoryInDifferentDb.objects.using("other").count()
)
| 37.431463
| 88
| 0.652142
|
794b6019cc2cd0c6c9be6e4d9c571ef5ab3063d6
| 5,671
|
py
|
Python
|
aodh/tests/unit/evaluator/test_loadbalancer.py
|
yi-cloud/aodh
|
bf2371a1f2175b87b7769ebf76f3cc74b35d3c87
|
[
"Apache-2.0"
] | 49
|
2015-07-01T21:28:25.000Z
|
2021-04-20T05:24:43.000Z
|
aodh/tests/unit/evaluator/test_loadbalancer.py
|
yi-cloud/aodh
|
bf2371a1f2175b87b7769ebf76f3cc74b35d3c87
|
[
"Apache-2.0"
] | null | null | null |
aodh/tests/unit/evaluator/test_loadbalancer.py
|
yi-cloud/aodh
|
bf2371a1f2175b87b7769ebf76f3cc74b35d3c87
|
[
"Apache-2.0"
] | 37
|
2015-08-20T15:06:54.000Z
|
2022-01-23T07:53:32.000Z
|
# Copyright 2019 Catalyst Cloud Ltd.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import datetime
from unittest import mock
from oslo_utils import timeutils
from oslo_utils import uuidutils
from aodh import evaluator
from aodh.evaluator import loadbalancer
from aodh.storage import models
from aodh.tests import constants
from aodh.tests.unit.evaluator import base
@mock.patch('octaviaclient.api.v2.octavia.OctaviaAPI')
@mock.patch('aodh.keystone_client.get_session')
class TestLoadBalancerMemberHealthEvaluator(base.TestEvaluatorBase):
EVALUATOR = loadbalancer.LoadBalancerMemberHealthEvaluator
def test_evaluate(self, mock_session, mock_octavia):
alarm = models.Alarm(
name='lb_member_alarm',
description='lb_member_alarm',
type=loadbalancer.ALARM_TYPE,
enabled=True,
user_id=uuidutils.generate_uuid(),
project_id=uuidutils.generate_uuid(dashed=False),
alarm_id=uuidutils.generate_uuid(),
state='insufficient data',
state_reason='insufficient data',
state_timestamp=constants.MIN_DATETIME,
timestamp=constants.MIN_DATETIME,
insufficient_data_actions=[],
ok_actions=[],
alarm_actions=[],
repeat_actions=False,
time_constraints=[],
severity='low',
rule=dict(
pool_id=uuidutils.generate_uuid(),
stack_id=uuidutils.generate_uuid(),
autoscaling_group_id=uuidutils.generate_uuid(),
)
)
mock_client = mock.MagicMock()
mock_octavia.return_value = mock_client
created_at = timeutils.utcnow() - datetime.timedelta(days=1)
mock_client.member_list.return_value = {
'members': [
{
'created_at': created_at.isoformat(),
'admin_state_up': True,
'operating_status': 'ERROR',
}
]
}
self.evaluator.evaluate(alarm)
self.assertEqual(evaluator.ALARM, alarm.state)
def test_evaluate_octavia_error(self, mock_session, mock_octavia):
class Response(object):
def __init__(self, status_code, content):
self.status_code = status_code
self.content = content
alarm = models.Alarm(
name='lb_member_alarm',
description='lb_member_alarm',
type=loadbalancer.ALARM_TYPE,
enabled=True,
user_id=uuidutils.generate_uuid(),
project_id=uuidutils.generate_uuid(dashed=False),
alarm_id=uuidutils.generate_uuid(),
state='insufficient data',
state_reason='insufficient data',
state_timestamp=constants.MIN_DATETIME,
timestamp=constants.MIN_DATETIME,
insufficient_data_actions=[],
ok_actions=[],
alarm_actions=[],
repeat_actions=False,
time_constraints=[],
severity='low',
rule=dict(
pool_id=uuidutils.generate_uuid(),
stack_id=uuidutils.generate_uuid(),
autoscaling_group_id=uuidutils.generate_uuid(),
)
)
mock_client = mock.MagicMock()
mock_octavia.return_value = mock_client
msg = 'Pool NotFound'
mock_client.member_list.return_value = Response(404, msg)
self.evaluator.evaluate(alarm)
self.assertEqual(evaluator.UNKNOWN, alarm.state)
self.assertEqual(msg, alarm.state_reason)
def test_evaluate_alarm_to_ok(self, mock_session, mock_octavia):
alarm = models.Alarm(
name='lb_member_alarm',
description='lb_member_alarm',
type=loadbalancer.ALARM_TYPE,
enabled=True,
user_id=uuidutils.generate_uuid(),
project_id=uuidutils.generate_uuid(dashed=False),
alarm_id=uuidutils.generate_uuid(),
state=evaluator.ALARM,
state_reason='alarm',
state_timestamp=constants.MIN_DATETIME,
timestamp=constants.MIN_DATETIME,
insufficient_data_actions=[],
ok_actions=[],
alarm_actions=[],
repeat_actions=False,
time_constraints=[],
severity='low',
rule=dict(
pool_id=uuidutils.generate_uuid(),
stack_id=uuidutils.generate_uuid(),
autoscaling_group_id=uuidutils.generate_uuid(),
)
)
mock_client = mock.MagicMock()
mock_octavia.return_value = mock_client
created_at = timeutils.utcnow() - datetime.timedelta(days=1)
mock_client.member_list.return_value = {
'members': [
{
'created_at': created_at.isoformat(),
'admin_state_up': True,
'operating_status': 'ACTIVE',
}
]
}
self.evaluator.evaluate(alarm)
self.assertEqual(evaluator.OK, alarm.state)
| 35.666667
| 77
| 0.611003
|
794b601bc3fd6f72410e7f8880f9831ea5091f74
| 389
|
py
|
Python
|
mapr/ojai/exceptions/ConditionNotClosedError.py
|
mapr/maprdb-python-client
|
ea7b7f1fb6c212e76bd799867e272eafd345f2e2
|
[
"Apache-2.0"
] | 3
|
2020-04-01T12:01:50.000Z
|
2022-03-23T01:18:36.000Z
|
mapr/ojai/exceptions/ConditionNotClosedError.py
|
mapr/maprdb-python-client
|
ea7b7f1fb6c212e76bd799867e272eafd345f2e2
|
[
"Apache-2.0"
] | 7
|
2019-02-10T19:31:09.000Z
|
2022-02-08T17:04:17.000Z
|
mapr/ojai/exceptions/ConditionNotClosedError.py
|
mapr/maprdb-python-client
|
ea7b7f1fb6c212e76bd799867e272eafd345f2e2
|
[
"Apache-2.0"
] | 3
|
2020-05-27T09:52:32.000Z
|
2021-09-07T14:16:43.000Z
|
from __future__ import unicode_literals
from __future__ import print_function
from __future__ import division
from __future__ import absolute_import
from future import standard_library
standard_library.install_aliases()
from builtins import *
class ConditionNotClosedError(Exception):
def __init__(self, m):
self.message = m
def __str__(self):
return self.message
| 27.785714
| 41
| 0.796915
|
794b6087a91a87dd8eddbc6058fe9106be379228
| 14,068
|
py
|
Python
|
lib/blockchain.py
|
Deimoscoin/deielectrum
|
3f033dc63e9b58fb872a9a0ac41912be367a774a
|
[
"MIT"
] | null | null | null |
lib/blockchain.py
|
Deimoscoin/deielectrum
|
3f033dc63e9b58fb872a9a0ac41912be367a774a
|
[
"MIT"
] | 2
|
2020-04-12T02:45:18.000Z
|
2020-04-12T02:46:01.000Z
|
lib/blockchain.py
|
Deimoscoin/deielectrum
|
3f033dc63e9b58fb872a9a0ac41912be367a774a
|
[
"MIT"
] | null | null | null |
# Electrum - lightweight Bitcoin client
# Copyright (C) 2012 thomasv@ecdsa.org
#
# Permission is hereby granted, free of charge, to any person
# obtaining a copy of this software and associated documentation files
# (the "Software"), to deal in the Software without restriction,
# including without limitation the rights to use, copy, modify, merge,
# publish, distribute, sublicense, and/or sell copies of the Software,
# and to permit persons to whom the Software is furnished to do so,
# subject to the following conditions:
#
# The above copyright notice and this permission notice shall be
# included in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS
# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN
# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
import os
import threading
from . import util
from .bitcoin import Hash, hash_encode, int_to_hex, rev_hex
from . import constants
from .util import bfh, bh2u
MAX_TARGET = 0x00000FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFF
class MissingHeader(Exception):
pass
def serialize_header(res):
s = int_to_hex(res.get('version'), 4) \
+ rev_hex(res.get('prev_block_hash')) \
+ rev_hex(res.get('merkle_root')) \
+ int_to_hex(int(res.get('timestamp')), 4) \
+ int_to_hex(int(res.get('bits')), 4) \
+ int_to_hex(int(res.get('nonce')), 4)
return s
def deserialize_header(s, height):
if not s:
raise Exception('Invalid header: {}'.format(s))
if len(s) != 80:
raise Exception('Invalid header length: {}'.format(len(s)))
hex_to_int = lambda s: int('0x' + bh2u(s[::-1]), 16)
h = {}
h['version'] = hex_to_int(s[0:4])
h['prev_block_hash'] = hash_encode(s[4:36])
h['merkle_root'] = hash_encode(s[36:68])
h['timestamp'] = hex_to_int(s[68:72])
h['bits'] = hex_to_int(s[72:76])
h['nonce'] = hex_to_int(s[76:80])
h['block_height'] = height
return h
def hash_header(header):
if header is None:
return '0' * 64
if header.get('prev_block_hash') is None:
header['prev_block_hash'] = '00'*32
return hash_encode(Hash(bfh(serialize_header(header))))
blockchains = {}
def read_blockchains(config):
blockchains[0] = Blockchain(config, 0, None)
fdir = os.path.join(util.get_headers_dir(config), 'forks')
util.make_dir(fdir)
l = filter(lambda x: x.startswith('fork_'), os.listdir(fdir))
l = sorted(l, key = lambda x: int(x.split('_')[1]))
for filename in l:
checkpoint = int(filename.split('_')[2])
parent_id = int(filename.split('_')[1])
b = Blockchain(config, checkpoint, parent_id)
h = b.read_header(b.checkpoint)
if b.parent().can_connect(h, check_height=False):
blockchains[b.checkpoint] = b
else:
util.print_error("cannot connect", filename)
return blockchains
def check_header(header):
if type(header) is not dict:
return False
for b in blockchains.values():
if b.check_header(header):
return b
return False
def can_connect(header):
for b in blockchains.values():
if b.can_connect(header):
return b
return False
class Blockchain(util.PrintError):
"""
Manages blockchain headers and their verification
"""
def __init__(self, config, checkpoint, parent_id):
self.config = config
self.catch_up = None # interface catching up
self.checkpoint = checkpoint
self.checkpoints = constants.net.CHECKPOINTS
self.parent_id = parent_id
self.lock = threading.Lock()
with self.lock:
self.update_size()
def parent(self):
return blockchains[self.parent_id]
def get_max_child(self):
children = list(filter(lambda y: y.parent_id==self.checkpoint, blockchains.values()))
return max([x.checkpoint for x in children]) if children else None
def get_checkpoint(self):
mc = self.get_max_child()
return mc if mc is not None else self.checkpoint
def get_branch_size(self):
return self.height() - self.get_checkpoint() + 1
def get_name(self):
return self.get_hash(self.get_checkpoint()).lstrip('00')[0:10]
def check_header(self, header):
header_hash = hash_header(header)
height = header.get('block_height')
return header_hash == self.get_hash(height)
def fork(parent, header):
checkpoint = header.get('block_height')
self = Blockchain(parent.config, checkpoint, parent.checkpoint)
open(self.path(), 'w+').close()
self.save_header(header)
return self
def height(self):
return self.checkpoint + self.size() - 1
def size(self):
with self.lock:
return self._size
def update_size(self):
p = self.path()
self._size = os.path.getsize(p)//80 if os.path.exists(p) else 0
def verify_header(self, header, prev_hash, target):
_hash = hash_header(header)
if prev_hash != header.get('prev_block_hash'):
raise Exception("prev hash mismatch: %s vs %s" % (prev_hash, header.get('prev_block_hash')))
if constants.net.TESTNET:
return
bits = self.target_to_bits(target)
if bits != header.get('bits'):
raise Exception("bits mismatch: %s vs %s" % (bits, header.get('bits')))
if int('0x' + _hash, 16) > target:
raise Exception("insufficient proof of work: %s vs target %s" % (int('0x' + _hash, 16), target))
def verify_chunk(self, index, data):
num = len(data) // 80
prev_hash = self.get_hash(index * 2016 - 1)
target = self.get_target(index-1)
for i in range(num):
raw_header = data[i*80:(i+1) * 80]
header = deserialize_header(raw_header, index*2016 + i)
self.verify_header(header, prev_hash, target)
prev_hash = hash_header(header)
def path(self):
d = util.get_headers_dir(self.config)
filename = 'blockchain_headers' if self.parent_id is None else os.path.join('forks', 'fork_%d_%d'%(self.parent_id, self.checkpoint))
return os.path.join(d, filename)
def save_chunk(self, index, chunk):
filename = self.path()
d = (index * 2016 - self.checkpoint) * 80
if d < 0:
chunk = chunk[-d:]
d = 0
truncate = index >= len(self.checkpoints)
self.write(chunk, d, truncate)
self.swap_with_parent()
def swap_with_parent(self):
if self.parent_id is None:
return
parent_branch_size = self.parent().height() - self.checkpoint + 1
if parent_branch_size >= self.size():
return
self.print_error("swap", self.checkpoint, self.parent_id)
parent_id = self.parent_id
checkpoint = self.checkpoint
parent = self.parent()
self.assert_headers_file_available(self.path())
with open(self.path(), 'rb') as f:
my_data = f.read()
self.assert_headers_file_available(parent.path())
with open(parent.path(), 'rb') as f:
f.seek((checkpoint - parent.checkpoint)*80)
parent_data = f.read(parent_branch_size*80)
self.write(parent_data, 0)
parent.write(my_data, (checkpoint - parent.checkpoint)*80)
# store file path
for b in blockchains.values():
b.old_path = b.path()
# swap parameters
self.parent_id = parent.parent_id; parent.parent_id = parent_id
self.checkpoint = parent.checkpoint; parent.checkpoint = checkpoint
self._size = parent._size; parent._size = parent_branch_size
# move files
for b in blockchains.values():
if b in [self, parent]: continue
if b.old_path != b.path():
self.print_error("renaming", b.old_path, b.path())
os.rename(b.old_path, b.path())
# update pointers
blockchains[self.checkpoint] = self
blockchains[parent.checkpoint] = parent
def assert_headers_file_available(self, path):
if os.path.exists(path):
return
elif not os.path.exists(util.get_headers_dir(self.config)):
raise FileNotFoundError('Electrum headers_dir does not exist. Was it deleted while running?')
else:
raise FileNotFoundError('Cannot find headers file but headers_dir is there. Should be at {}'.format(path))
def write(self, data, offset, truncate=True):
filename = self.path()
with self.lock:
self.assert_headers_file_available(filename)
with open(filename, 'rb+') as f:
if truncate and offset != self._size*80:
f.seek(offset)
f.truncate()
f.seek(offset)
f.write(data)
f.flush()
os.fsync(f.fileno())
self.update_size()
def save_header(self, header):
delta = header.get('block_height') - self.checkpoint
data = bfh(serialize_header(header))
assert delta == self.size()
assert len(data) == 80
self.write(data, delta*80)
self.swap_with_parent()
def read_header(self, height):
assert self.parent_id != self.checkpoint
if height < 0:
return
if height < self.checkpoint:
return self.parent().read_header(height)
if height > self.height():
return
delta = height - self.checkpoint
name = self.path()
self.assert_headers_file_available(name)
with open(name, 'rb') as f:
f.seek(delta * 80)
h = f.read(80)
if len(h) < 80:
raise Exception('Expected to read a full header. This was only {} bytes'.format(len(h)))
if h == bytes([0])*80:
return None
return deserialize_header(h, height)
def get_hash(self, height):
if height == -1:
return '0000000000000000000000000000000000000000000000000000000000000000'
elif height == 0:
return constants.net.GENESIS
elif height < len(self.checkpoints) * 2016:
assert (height+1) % 2016 == 0, height
index = height // 2016
h, t = self.checkpoints[index]
return h
else:
return hash_header(self.read_header(height))
def get_target(self, index):
# compute target from chunk x, used in chunk x+1
if constants.net.TESTNET:
return 0
if index == -1:
return MAX_TARGET
if index < len(self.checkpoints):
h, t = self.checkpoints[index]
return t
# new target
first = self.read_header(index * 2016)
last = self.read_header(index * 2016 + 2015)
if not first or not last:
raise MissingHeader()
bits = last.get('bits')
target = self.bits_to_target(bits)
nActualTimespan = last.get('timestamp') - first.get('timestamp')
nTargetTimespan = 14 * 24 * 60 * 60
nActualTimespan = max(nActualTimespan, nTargetTimespan // 4)
nActualTimespan = min(nActualTimespan, nTargetTimespan * 4)
new_target = min(MAX_TARGET, (target * nActualTimespan) // nTargetTimespan)
return new_target
def bits_to_target(self, bits):
bitsN = (bits >> 24) & 0xff
if not (bitsN >= 0x03 and bitsN <= 0x1d):
raise Exception("First part of bits should be in [0x03, 0x1d]")
bitsBase = bits & 0xffffff
if not (bitsBase >= 0x8000 and bitsBase <= 0x7fffff):
raise Exception("Second part of bits should be in [0x8000, 0x7fffff]")
return bitsBase << (8 * (bitsN-3))
def target_to_bits(self, target):
c = ("%064x" % target)[2:]
while c[:2] == '00' and len(c) > 6:
c = c[2:]
bitsN, bitsBase = len(c) // 2, int('0x' + c[:6], 16)
if bitsBase >= 0x800000:
bitsN += 1
bitsBase >>= 8
return bitsN << 24 | bitsBase
def can_connect(self, header, check_height=True):
if header is None:
return False
height = header['block_height']
if check_height and self.height() != height - 1:
#self.print_error("cannot connect at height", height)
return False
if height == 0:
return hash_header(header) == constants.net.GENESIS
try:
prev_hash = self.get_hash(height - 1)
except:
return False
if prev_hash != header.get('prev_block_hash'):
return False
try:
target = self.get_target(height // 2016 - 1)
except MissingHeader:
return False
try:
self.verify_header(header, prev_hash, target)
except BaseException as e:
return False
return True
def connect_chunk(self, idx, hexdata):
try:
data = bfh(hexdata)
self.verify_chunk(idx, data)
#self.print_error("validated chunk %d" % idx)
self.save_chunk(idx, data)
return True
except BaseException as e:
self.print_error('verify_chunk %d failed'%idx, str(e))
return False
def get_checkpoints(self):
# for each chunk, store the hash of the last block and the target after the chunk
cp = []
n = self.height() // 2016
for index in range(n):
h = self.get_hash((index+1) * 2016 -1)
target = self.get_target(index)
cp.append((h, target))
return cp
| 36.73107
| 140
| 0.607123
|
794b611d6f1307deb4f40044e0920cd606cf3cd9
| 1,445
|
py
|
Python
|
desktop/core/ext-py/celery-4.2.1/t/unit/contrib/test_abortable.py
|
maulikjs/hue
|
59ac879b55bb6fb26ecb4e85f4c70836fc21173f
|
[
"Apache-2.0"
] | 5,079
|
2015-01-01T03:39:46.000Z
|
2022-03-31T07:38:22.000Z
|
desktop/core/ext-py/celery-4.2.1/t/unit/contrib/test_abortable.py
|
zks888/hue
|
93a8c370713e70b216c428caa2f75185ef809deb
|
[
"Apache-2.0"
] | 1,623
|
2015-01-01T08:06:24.000Z
|
2022-03-30T19:48:52.000Z
|
desktop/core/ext-py/celery-4.2.1/t/unit/contrib/test_abortable.py
|
zks888/hue
|
93a8c370713e70b216c428caa2f75185ef809deb
|
[
"Apache-2.0"
] | 2,033
|
2015-01-04T07:18:02.000Z
|
2022-03-28T19:55:47.000Z
|
from __future__ import absolute_import, unicode_literals
from celery.contrib.abortable import AbortableAsyncResult, AbortableTask
class test_AbortableTask:
def setup(self):
@self.app.task(base=AbortableTask, shared=False)
def abortable():
return True
self.abortable = abortable
def test_async_result_is_abortable(self):
result = self.abortable.apply_async()
tid = result.id
assert isinstance(
self.abortable.AsyncResult(tid), AbortableAsyncResult)
def test_is_not_aborted(self):
self.abortable.push_request()
try:
result = self.abortable.apply_async()
tid = result.id
assert not self.abortable.is_aborted(task_id=tid)
finally:
self.abortable.pop_request()
def test_is_aborted_not_abort_result(self):
self.abortable.AsyncResult = self.app.AsyncResult
self.abortable.push_request()
try:
self.abortable.request.id = 'foo'
assert not self.abortable.is_aborted()
finally:
self.abortable.pop_request()
def test_abort_yields_aborted(self):
self.abortable.push_request()
try:
result = self.abortable.apply_async()
result.abort()
tid = result.id
assert self.abortable.is_aborted(task_id=tid)
finally:
self.abortable.pop_request()
| 30.744681
| 72
| 0.638062
|
794b61630cf8be7ca9d4008570bd141e4675e6ca
| 972
|
py
|
Python
|
setup.py
|
azdle/exoline
|
f84ca1f988c596f33637d1179f4a0026ef7a2dcc
|
[
"BSD-2-Clause-FreeBSD",
"BSD-3-Clause"
] | null | null | null |
setup.py
|
azdle/exoline
|
f84ca1f988c596f33637d1179f4a0026ef7a2dcc
|
[
"BSD-2-Clause-FreeBSD",
"BSD-3-Clause"
] | null | null | null |
setup.py
|
azdle/exoline
|
f84ca1f988c596f33637d1179f4a0026ef7a2dcc
|
[
"BSD-2-Clause-FreeBSD",
"BSD-3-Clause"
] | null | null | null |
#!/usr/bin/env python
from setuptools import setup
from exoline import __version__ as version
with open('requirements.txt') as f:
required = f.read().splitlines()
try:
from collections import OrderedDict
except ImportError:
required.append('ordereddict>=1.1')
try:
import importlib
except ImportError:
required.append('importlib>=1.0.2')
setup(
name='exoline',
version=version,
url = 'http://github.com/exosite/exoline',
author = 'Dan Weaver',
author_email = 'danweaver@exosite.com',
description = 'Command line interface for Exosite platform.',
long_description = open('README.md').read() + '\n\n' +
open('HISTORY.md').read(),
packages=['exoline', 'exoline.plugins'],
package_dir={'exoline': 'exoline', 'plugins': 'exoline/plugins'},
scripts=['bin/exo', 'bin/exoline'],
keywords=['exosite', 'onep', 'one platform', 'm2m'],
install_requires=required,
zip_safe=False,
)
| 27.771429
| 69
| 0.658436
|
794b62887569a5ecebd645befdfdfca37a9a8d90
| 1,907
|
py
|
Python
|
src/LogManager.py
|
roy20021/SerraAutoLogin
|
ad502b76545343a87f824deed74e3e2f450dbdfa
|
[
"BSD-3-Clause"
] | null | null | null |
src/LogManager.py
|
roy20021/SerraAutoLogin
|
ad502b76545343a87f824deed74e3e2f450dbdfa
|
[
"BSD-3-Clause"
] | null | null | null |
src/LogManager.py
|
roy20021/SerraAutoLogin
|
ad502b76545343a87f824deed74e3e2f450dbdfa
|
[
"BSD-3-Clause"
] | null | null | null |
# Copyright (c) 2014, Andrea Esposito <info@andreaesposito.org>
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# * Neither the name of the Andrea Esposito nor the
# names of its contributors may be used to endorse or promote products
# derived from this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL Andrea Esposito BE LIABLE FOR ANY
# DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
# (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
# ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
import datetime
def createLog():
file = open('app.log', "w")
file.write("Log File Created\n")
file.close()
def log(entry):
file = open('app.log', 'a')
time = str(datetime.datetime.now())
file.write("["+time+"] "+entry+"\n")
file.close()
def readLog():
file = open('app.log', 'r')
ret = file.readlines()
file.close()
return ret
| 44.348837
| 81
| 0.748296
|
794b645188f5117cd91bdfd83738fae69cdce341
| 7,164
|
py
|
Python
|
stats_lib.py
|
mcgov/yava
|
514ac060024c34ebc4b527c3ca2866cffb276824
|
[
"MIT"
] | null | null | null |
stats_lib.py
|
mcgov/yava
|
514ac060024c34ebc4b527c3ca2866cffb276824
|
[
"MIT"
] | null | null | null |
stats_lib.py
|
mcgov/yava
|
514ac060024c34ebc4b527c3ca2866cffb276824
|
[
"MIT"
] | null | null | null |
#!/usr/bin/python3
#!encoding=UTF-8
import math
import numpy as np
#import lda
"""
tools for counting the statistics of things.
@authors matthew g mcgovern,
"""
def count_symbol_frequency( text ):
""" Count the frequency of symbol occurances in a body of text.
"""
frequencies = dict()
for ch in text:
if ch in frequencies:
frequencies[ch] += 1
else:
frequencies[ch] = 1
return frequencies
def count_phrase_frequency( raw_list ):
""" count the frequency that phrases and words appear in a text,
when passed the list of phrases (which are kept as lists).
It's a list of lists.
"""
frequencies = dict()
for sub_list in raw_list: #for each sub list in the list
items = [item for item in sub_list ]
joined = " ".join( items ) #flatten it into a string
#and count that ish
if joined in frequencies:
frequencies[joined] += 1
else:
frequencies[joined] = 1
return frequencies
def calculate_symbol_entropy( frequency_dict ):
"""
returns the entropy of a symbol frequency list.
thiiis might not be correct
"""
distinct_symbol_count = len( frequency_dict )
total = sum(frequency_dict.values())
#print(frequency_dict.values())
return -sum( [ (frequency/total) * math.log(frequency/total, 2) for frequency in frequency_dict.values() ] )
def generate_list_of_raw_phrases( split_text, up_to_length ):
"""
Gathers frequency data for n-grams (words and phrases up to length n).
Pass a body of text and the length N that you want the phrases to be.
"""
list_of_phrases = []
for j in range(1, up_to_length+1): #inclusive
for i in range(0, len(split_text)):
phrase = split_text[i:i+j]
if len( phrase) == j:
list_of_phrases.append( phrase )
return list_of_phrases
def count_phrases( text, up_to_length, logographic=False ):
split_text = None
"""
if logographic:
split_text = [ ch for ch in text ]
#print( split_text )
"""
split_text = split_with_selector(text, include_dubious=False)
#print( split_text )
raw_phrases = generate_list_of_raw_phrases( split_text , up_to_length )
#print( raw_phrases )
phrase_count = count_phrase_frequency( raw_phrases )
return phrase_count
def collect_ngrams(text, ngram_length=10, single_level=False):
ngram_dict = dict()
if single_level:
for i in range(len(text)):
substring = None
try:
substring = text[i:i+ngram_length]
except :
break;
#print( substring)
if substring in ngram_dict:
ngram_dict[substring] += 1
else:
ngram_dict[substring] = 1
else:
for length in range(1, ngram_length):
for i in range(len(text)):
substring = None
try:
substring = text[i:i+length]
except :
break;
#print( substring)
if substring in ngram_dict:
ngram_dict[substring] += 1
else:
ngram_dict[substring] = 1
return ngram_dict
def set_up_character_ranges_table():
# (low, high, string_name )
ranges = []
with open( 'unicode_ranges.txt', 'r') as range_file:
for line in range_file:
split = line.split()
joined = [ int( split[0], 16 ), int(split[1], 16), " ".join(split[2:]) ]
#print( joined )
ranges.append( joined )
return ranges
def check_ranges_from_table( character, table ):
order = ord(character)
for i in range( 0 ,len(table)):
if table[i][0] <= order and order <= table[i][1]:
#print( character, " is from character set ", table[i][2])
return table[i][2]
#shoutout to the lda demo code.
"""
def run_lda( matrix ):
lda_model = lda.LDA(n_topics=50, n_iter=2000, random_state=1)
lda_model.fit(matrix)
return lda_model
def print_lda_topic_results( post_run_model, vocabulary, document_titles):
n_top_words = 9
for i, topic_distribution in enumerate(model.topic_word_):
topic_words = np.array(vocabulary)[np.argsort(topic_distribution)][:-n_top_words:-1]
print('Topics {}: {}'.format(i,' '.join(topic_words)))
def print_lda_document_topic_results( model, vocab, document_titles, how_many ):
document_topic = model.doc_topic_
for i in range( how_many ):
print("{} (top topic: {})".format(titles[i], document_topic[i].argmax()))
"""
from voynich import *
if __name__ == "__main__":
import sys
assert( len(sys.argv )> 1 )
in_file = sys.argv[1]
with open( in_file, 'r', encoding="latin-1") as inputfile:
body = ""
for line in inputfile:
if line[0] != '#' and parse_line_header(line, transcription='H'):
line = remove_comments(line)
line = remove_filler(line)
line = remove_breaks(line)
body += remove_line_header(line).strip()
print( body )
symb_freq = count_symbol_frequency(body)
print( 'symbols:', symb_freq )
entropy = calculate_symbol_entropy(symb_freq)
#phrases = count_phrases( body, 5, logographic=False )
#print( phrases )
ngrams = collect_ngrams(remove_breaks(body, remove_spaces=False), ngram_length=7)
#print( ngrams )
ngram_levels = [ collect_ngrams(remove_breaks(body, remove_spaces=False), ngram_length=i, single_level=True) for i in range(1,20) ]
ngram_entropies = [ calculate_symbol_entropy( level ) for level in ngram_levels ]
print( "ngram_entropies(1:20)", ngram_entropies )
print( 'entropy:', entropy )
print("Found alphabet of %d characters"%len(symb_freq))
"""
test_text = "Do you love me or do you love me not?" #ima be me
test_text2 = "你爱不爱我?"
print("SYMBOL FREQUENCY:")
test1 = count_symbol_frequency(test_text)
print( test1, calculate_symbol_entropy(test1) )
test2 = count_symbol_frequency(test_text2)
print( test2, calculate_symbol_entropy(test2 ) )
print("phrase FREQUENCY:")
print( count_phrases( test_text2, 5, logographic=True ) )
print( count_phrases( test_text, 5, logographic=False ) )
table = set_up_character_ranges_table()
print("LANGUAGE IDENT FROM UNICODE:")
print( check_ranges_from_table( '你' , table) )
print( check_ranges_from_table( 'L' , table) )
print( check_ranges_from_table( 'ᜊ' , table) )
print( check_ranges_from_table( 'ʃ' , table) )
print( check_ranges_from_table( 'к' , table) )
##import lda.datasets
# demo = lda.datasets.load_reuters()
# vocab = lda.datasets.load_reuters_vocab()
# titles = lda.datasets.load_reuters_titles()
# model = run_lda( demo )
# print_lda_document_topic_results(model, vocab, titles, 10)
# print_lda_topic_results(model, vocab, titles)
#print( "woot")
"""
| 33.166667
| 139
| 0.619626
|
794b6643ae834b525388d4340144329b8dc13e23
| 6,481
|
py
|
Python
|
homeassistant/components/device_automation/toggle_entity.py
|
havivi86/core
|
4a6ca8a04e5b1f84dcd80d7cab97726e887fbff1
|
[
"Apache-2.0"
] | 1
|
2022-01-05T16:48:58.000Z
|
2022-01-05T16:48:58.000Z
|
homeassistant/components/device_automation/toggle_entity.py
|
havivi86/core
|
4a6ca8a04e5b1f84dcd80d7cab97726e887fbff1
|
[
"Apache-2.0"
] | 36
|
2020-07-19T13:24:30.000Z
|
2022-03-31T06:02:01.000Z
|
homeassistant/components/device_automation/toggle_entity.py
|
havivi86/core
|
4a6ca8a04e5b1f84dcd80d7cab97726e887fbff1
|
[
"Apache-2.0"
] | 1
|
2022-02-15T10:17:40.000Z
|
2022-02-15T10:17:40.000Z
|
"""Device automation helpers for toggle entity."""
from __future__ import annotations
import voluptuous as vol
from homeassistant.components.automation import AutomationActionType
from homeassistant.components.device_automation.const import (
CONF_IS_OFF,
CONF_IS_ON,
CONF_TOGGLE,
CONF_TURN_OFF,
CONF_TURN_ON,
CONF_TURNED_OFF,
CONF_TURNED_ON,
)
from homeassistant.components.homeassistant.triggers import state as state_trigger
from homeassistant.const import (
ATTR_ENTITY_ID,
CONF_CONDITION,
CONF_ENTITY_ID,
CONF_FOR,
CONF_PLATFORM,
CONF_TYPE,
)
from homeassistant.core import CALLBACK_TYPE, Context, HomeAssistant, callback
from homeassistant.helpers import condition, config_validation as cv
from homeassistant.helpers.entity_registry import async_entries_for_device
from homeassistant.helpers.typing import ConfigType, TemplateVarsType
from . import DEVICE_TRIGGER_BASE_SCHEMA
# mypy: allow-untyped-calls, allow-untyped-defs
ENTITY_ACTIONS = [
{
# Turn entity off
CONF_TYPE: CONF_TURN_OFF
},
{
# Turn entity on
CONF_TYPE: CONF_TURN_ON
},
{
# Toggle entity
CONF_TYPE: CONF_TOGGLE
},
]
ENTITY_CONDITIONS = [
{
# True when entity is turned off
CONF_CONDITION: "device",
CONF_TYPE: CONF_IS_OFF,
},
{
# True when entity is turned on
CONF_CONDITION: "device",
CONF_TYPE: CONF_IS_ON,
},
]
ENTITY_TRIGGERS = [
{
# Trigger when entity is turned off
CONF_PLATFORM: "device",
CONF_TYPE: CONF_TURNED_OFF,
},
{
# Trigger when entity is turned on
CONF_PLATFORM: "device",
CONF_TYPE: CONF_TURNED_ON,
},
]
DEVICE_ACTION_TYPES = [CONF_TOGGLE, CONF_TURN_OFF, CONF_TURN_ON]
ACTION_SCHEMA = cv.DEVICE_ACTION_BASE_SCHEMA.extend(
{
vol.Required(CONF_ENTITY_ID): cv.entity_id,
vol.Required(CONF_TYPE): vol.In(DEVICE_ACTION_TYPES),
}
)
CONDITION_SCHEMA = cv.DEVICE_CONDITION_BASE_SCHEMA.extend(
{
vol.Required(CONF_ENTITY_ID): cv.entity_id,
vol.Required(CONF_TYPE): vol.In([CONF_IS_OFF, CONF_IS_ON]),
vol.Optional(CONF_FOR): cv.positive_time_period_dict,
}
)
TRIGGER_SCHEMA = DEVICE_TRIGGER_BASE_SCHEMA.extend(
{
vol.Required(CONF_ENTITY_ID): cv.entity_id,
vol.Required(CONF_TYPE): vol.In([CONF_TURNED_OFF, CONF_TURNED_ON]),
vol.Optional(CONF_FOR): cv.positive_time_period_dict,
}
)
async def async_call_action_from_config(
hass: HomeAssistant,
config: ConfigType,
variables: TemplateVarsType,
context: Context,
domain: str,
) -> None:
"""Change state based on configuration."""
action_type = config[CONF_TYPE]
if action_type == CONF_TURN_ON:
action = "turn_on"
elif action_type == CONF_TURN_OFF:
action = "turn_off"
else:
action = "toggle"
service_data = {ATTR_ENTITY_ID: config[CONF_ENTITY_ID]}
await hass.services.async_call(
domain, action, service_data, blocking=True, context=context
)
@callback
def async_condition_from_config(config: ConfigType) -> condition.ConditionCheckerType:
"""Evaluate state based on configuration."""
condition_type = config[CONF_TYPE]
if condition_type == CONF_IS_ON:
stat = "on"
else:
stat = "off"
state_config = {
condition.CONF_CONDITION: "state",
condition.CONF_ENTITY_ID: config[CONF_ENTITY_ID],
condition.CONF_STATE: stat,
}
if CONF_FOR in config:
state_config[CONF_FOR] = config[CONF_FOR]
return condition.state_from_config(state_config)
async def async_attach_trigger(
hass: HomeAssistant,
config: ConfigType,
action: AutomationActionType,
automation_info: dict,
) -> CALLBACK_TYPE:
"""Listen for state changes based on configuration."""
trigger_type = config[CONF_TYPE]
if trigger_type == CONF_TURNED_ON:
to_state = "on"
else:
to_state = "off"
state_config = {
CONF_PLATFORM: "state",
state_trigger.CONF_ENTITY_ID: config[CONF_ENTITY_ID],
state_trigger.CONF_TO: to_state,
}
if CONF_FOR in config:
state_config[CONF_FOR] = config[CONF_FOR]
state_config = state_trigger.TRIGGER_SCHEMA(state_config)
return await state_trigger.async_attach_trigger(
hass, state_config, action, automation_info, platform_type="device"
)
async def _async_get_automations(
hass: HomeAssistant,
device_id: str,
automation_templates: list[dict[str, str]],
domain: str,
) -> list[dict[str, str]]:
"""List device automations."""
automations: list[dict[str, str]] = []
entity_registry = await hass.helpers.entity_registry.async_get_registry()
entries = [
entry
for entry in async_entries_for_device(entity_registry, device_id)
if entry.domain == domain
]
for entry in entries:
automations.extend(
{
**template,
"device_id": device_id,
"entity_id": entry.entity_id,
"domain": domain,
}
for template in automation_templates
)
return automations
async def async_get_actions(
hass: HomeAssistant, device_id: str, domain: str
) -> list[dict[str, str]]:
"""List device actions."""
return await _async_get_automations(hass, device_id, ENTITY_ACTIONS, domain)
async def async_get_conditions(
hass: HomeAssistant, device_id: str, domain: str
) -> list[dict[str, str]]:
"""List device conditions."""
return await _async_get_automations(hass, device_id, ENTITY_CONDITIONS, domain)
async def async_get_triggers(
hass: HomeAssistant, device_id: str, domain: str
) -> list[dict]:
"""List device triggers."""
return await _async_get_automations(hass, device_id, ENTITY_TRIGGERS, domain)
async def async_get_condition_capabilities(
hass: HomeAssistant, config: ConfigType
) -> dict[str, vol.Schema]:
"""List condition capabilities."""
return {
"extra_fields": vol.Schema(
{vol.Optional(CONF_FOR): cv.positive_time_period_dict}
)
}
async def async_get_trigger_capabilities(hass: HomeAssistant, config: dict) -> dict:
"""List trigger capabilities."""
return {
"extra_fields": vol.Schema(
{vol.Optional(CONF_FOR): cv.positive_time_period_dict}
)
}
| 27.231092
| 86
| 0.677519
|
794b6737136a791e253275657be4ad783b9e93c1
| 7,495
|
py
|
Python
|
discussion/views.py
|
lehins/lehins-discussion
|
8ff8aae239c68e634fafd11a711468ab5bcda0b7
|
[
"BSD-2-Clause"
] | null | null | null |
discussion/views.py
|
lehins/lehins-discussion
|
8ff8aae239c68e634fafd11a711468ab5bcda0b7
|
[
"BSD-2-Clause"
] | null | null | null |
discussion/views.py
|
lehins/lehins-discussion
|
8ff8aae239c68e634fafd11a711468ab5bcda0b7
|
[
"BSD-2-Clause"
] | null | null | null |
from django.contrib.auth.decorators import login_required
from django.core.urlresolvers import reverse
from django.http import HttpResponseRedirect
from django.views.generic import CreateView, DetailView, ListView, FormView
from django.views.generic.list import BaseListView
from django.db.models import Q
from discussion.forms import CommentForm, PostForm, SearchForm, SubscribeForm
from discussion.models import Discussion, Comment, Post
from discussion.utils import class_view_decorator
class SearchFormMixin(object):
"""Add the basic search form to your view."""
search_initial = {}
def get_context_data(self, **kwargs):
context = super(SearchFormMixin, self).get_context_data(**kwargs)
context['search_form'] = self.get_search_form(SearchForm)
return context
def get_search_form(self, form_class):
"""Returns an instance of the search form to be used in this view."""
return form_class(**self.get_search_form_kwargs())
def get_search_form_kwargs(self):
"""Returns the keyword arguments for instanciating the search form."""
return {'initial': self.get_search_initial()}
def get_search_initial(self):
"""Returns the initial data to use for search forms on this view."""
return self.search_initial
class DiscussionMixin(object):
discussion_slug = 'discussion_slug'
def dispatch(self, request, *args, **kwargs):
self.discussion = self.get_discussion(slug=kwargs[self.discussion_slug])
return super(DiscussionMixin, self).dispatch(request, *args, **kwargs)
def get_discussion(self, slug=None):
if slug is None:
slug = self.kwargs[self.discussion_slug]
return Discussion.objects.get(slug=slug)
def get_queryset(self):
qs = super(DiscussionMixin, self).get_queryset()
return qs.filter(discussion=self.discussion)
@class_view_decorator(login_required)
class DiscussionList(SearchFormMixin, ListView):
model = Discussion
@class_view_decorator(login_required)
class DiscussionView(SearchFormMixin, DetailView):
model = Discussion
def get_search_initial(self):
initial = {}
initial.update(**super(DiscussionView, self).get_search_initial())
initial.update({'discussion': self.object})
return initial
def get_context_data(self, **kwargs):
context = super(DiscussionView, self).get_context_data(**kwargs)
context.update({'form': PostForm()})
return context
def get(self, request, *args, **kwargs):
self.object = self.get_object()
context = self.get_context_data(object=self.object)
return self.render_to_response(context)
def post(self, request, *args, **kwargs):
self.object = self.get_object()
context = self.get_context_data(object=self.object)
return self.render_to_response(context)
@class_view_decorator(login_required)
class CreatePost(DiscussionMixin, CreateView):
form_class = PostForm
model = Post
def get_form_kwargs(self):
kwargs = super(CreatePost, self).get_form_kwargs()
instance = self.model(user=self.request.user, discussion=self.discussion)
kwargs.update({'instance': instance})
return kwargs
def get_context_data(self, **kwargs):
context = super(CreatePost, self).get_context_data(**kwargs)
context['discussion'] = self.discussion
return context
@class_view_decorator(login_required)
class PostView(DiscussionMixin, CreateView):
form_class = CommentForm
model = Comment
template_name = 'discussion/post_detail.html'
ajax_form_valid_template_name = 'discussion/_comment_detail.html'
ajax_form_invalid_template_name = 'discussion/comment_form.html'
def dispatch(self, request, *args, **kwargs):
self.post_obj = self.get_post(kwargs['pk'])
return super(PostView, self).dispatch(request, *args, **kwargs)
def get_form_kwargs(self):
kwargs = super(PostView, self).get_form_kwargs()
instance = self.model(user=self.request.user, post=self.post_obj)
kwargs.update({
'instance': instance,
'prefix': self.post_obj.prefix,
})
return kwargs
def get_context_data(self, **kwargs):
context = super(PostView, self).get_context_data(**kwargs)
context['post'] = self.post_obj
return context
def get_post(self, pk=None):
if pk is None:
pk = self.kwargs['pk']
return Post.objects.get(pk=pk)
def get_success_url(self):
kwargs = {
'pk': self.kwargs['pk'],
'discussion_slug': self.kwargs[self.discussion_slug]
}
return reverse('discussion_post', kwargs=kwargs)
def form_valid(self, form):
if self.request.is_ajax():
self.object = form.save()
self.template_name = self.ajax_form_valid_template_name
return self.render_to_response(self.get_context_data(comment=self.object))
else:
return super(PostView, self).form_valid(form)
def form_invalid(self, form):
if self.request.is_ajax():
self.template_name = self.ajax_form_invalid_template_name
return self.render_to_response(self.get_context_data(form=form), status=400)
else:
return super(PostView, self).form_invalid(form)
@class_view_decorator(login_required)
class Search(BaseListView, FormView):
form_class = SearchForm
model = Post
template_name = 'discussion/search.html'
def form_invalid(self, form):
return self.render_to_response(self.get_context_data(form=form,
object_list=self.get_queryset()))
def form_valid(self, form):
"""Using the name field try to find any posts that match."""
search_term = form.cleaned_data['search']
object_list = self.model.objects.filter(Q(body__icontains=search_term) |
Q(comment__body__icontains=search_term))
if form.cleaned_data.get('discussion', None) is not None:
object_list = object_list.filter(discussion=form.cleaned_data['discussion'])
return self.render_to_response(self.get_context_data(form=form,
object_list=object_list.distinct(),
search_term=search_term))
def get_context_data(self, **kwargs):
if 'form' in kwargs:
kwargs.update({'search_form': kwargs.pop('form')})
return super(Search, self).get_context_data(**kwargs)
def get_queryset(self):
"""
Don't like this currently. I don't want to display all() on a GET but
don't want to stop subclassers from using this method themselves.
"""
return self.model.objects.none()
def get_success_url(self):
"""Defined in case we ever get sent to it by accident"""
return reverse('discussion_search')
def get_search_form_kwargs(self):
"""Returns the keyword arguments for instanciating the search form."""
kwargs = super(Search, self).get_search_form_kwargs()
if self.request.method in ('POST', 'PUT'):
kwargs.update({
'data': self.request.POST,
'files': self.request.FILES,
})
return kwargs
| 36.560976
| 96
| 0.661374
|
794b6841029a01257a0f289c09ef36d08fabcf1c
| 2,274
|
py
|
Python
|
azure-cognitiveservices-vision-customvision/azure/cognitiveservices/vision/customvision/training/models/image_region_create_result.py
|
JonathanGailliez/azure-sdk-for-python
|
f0f051bfd27f8ea512aea6fc0c3212ee9ee0029b
|
[
"MIT"
] | null | null | null |
azure-cognitiveservices-vision-customvision/azure/cognitiveservices/vision/customvision/training/models/image_region_create_result.py
|
JonathanGailliez/azure-sdk-for-python
|
f0f051bfd27f8ea512aea6fc0c3212ee9ee0029b
|
[
"MIT"
] | 1
|
2018-11-29T14:46:42.000Z
|
2018-11-29T14:46:42.000Z
|
azure-cognitiveservices-vision-customvision/azure/cognitiveservices/vision/customvision/training/models/image_region_create_result.py
|
JonathanGailliez/azure-sdk-for-python
|
f0f051bfd27f8ea512aea6fc0c3212ee9ee0029b
|
[
"MIT"
] | null | null | null |
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
# --------------------------------------------------------------------------
from msrest.serialization import Model
class ImageRegionCreateResult(Model):
"""ImageRegionCreateResult.
Variables are only populated by the server, and will be ignored when
sending a request.
:ivar image_id:
:vartype image_id: str
:ivar region_id:
:vartype region_id: str
:ivar tag_name:
:vartype tag_name: str
:ivar created:
:vartype created: datetime
:param tag_id: Id of the tag associated with this region.
:type tag_id: str
:param left:
:type left: float
:param top:
:type top: float
:param width:
:type width: float
:param height:
:type height: float
"""
_validation = {
'image_id': {'readonly': True},
'region_id': {'readonly': True},
'tag_name': {'readonly': True},
'created': {'readonly': True},
}
_attribute_map = {
'image_id': {'key': 'imageId', 'type': 'str'},
'region_id': {'key': 'regionId', 'type': 'str'},
'tag_name': {'key': 'tagName', 'type': 'str'},
'created': {'key': 'created', 'type': 'iso-8601'},
'tag_id': {'key': 'tagId', 'type': 'str'},
'left': {'key': 'left', 'type': 'float'},
'top': {'key': 'top', 'type': 'float'},
'width': {'key': 'width', 'type': 'float'},
'height': {'key': 'height', 'type': 'float'},
}
def __init__(self, **kwargs):
super(ImageRegionCreateResult, self).__init__(**kwargs)
self.image_id = None
self.region_id = None
self.tag_name = None
self.created = None
self.tag_id = kwargs.get('tag_id', None)
self.left = kwargs.get('left', None)
self.top = kwargs.get('top', None)
self.width = kwargs.get('width', None)
self.height = kwargs.get('height', None)
| 32.028169
| 76
| 0.554529
|
794b68b30285bcf13272b1bf25247d26a589ca8a
| 283
|
py
|
Python
|
ping_multiplo/ping_multiplo.py
|
mauriciopicirillo/Seguran-a_da_informa-o_com_Python
|
0a439066dda42268de7338c815c4cf7872079211
|
[
"MIT"
] | null | null | null |
ping_multiplo/ping_multiplo.py
|
mauriciopicirillo/Seguran-a_da_informa-o_com_Python
|
0a439066dda42268de7338c815c4cf7872079211
|
[
"MIT"
] | null | null | null |
ping_multiplo/ping_multiplo.py
|
mauriciopicirillo/Seguran-a_da_informa-o_com_Python
|
0a439066dda42268de7338c815c4cf7872079211
|
[
"MIT"
] | null | null | null |
import os
import time
with open('hosts.txt') as file:
dump = file.read()
dump = dump.splitlines()
for ip in dump:
print('Verificando o ip: ', ip)
print('-' * 60)
os.system('ping -n 2 {}' .format(ip))
print('-' * 60)
time.sleep(5)
| 21.769231
| 45
| 0.522968
|
794b69e64ae775672890ac0f8ee3c75b24418261
| 2,898
|
py
|
Python
|
src/junction/markdown/info_panels.py
|
explody/Junction
|
700df9385fceda00d6830816606d8854dc9cef7b
|
[
"MIT"
] | 16
|
2020-04-28T07:03:26.000Z
|
2022-03-05T14:26:40.000Z
|
src/junction/markdown/info_panels.py
|
explody/Junction
|
700df9385fceda00d6830816606d8854dc9cef7b
|
[
"MIT"
] | 14
|
2020-03-19T04:32:18.000Z
|
2021-03-05T23:54:47.000Z
|
src/junction/markdown/info_panels.py
|
explody/Junction
|
700df9385fceda00d6830816606d8854dc9cef7b
|
[
"MIT"
] | 3
|
2021-01-19T18:39:00.000Z
|
2022-02-14T23:51:07.000Z
|
from typing import List, Any
from markdown import Markdown
from markdown.extensions import Extension
from markdown.blockprocessors import BlockProcessor
import re
import xml.etree.ElementTree as etree
class InfoPanelExtension(Extension):
"""Markdown extension for rendering the Confluence info panel macro. Only supports
the "original" info panels AKA info (blue), success (green), warning (yellow), and error (red).
Example:
```
Normal, introductory paragraph.
Warning: info panels like this must be isolated into their own blocks with surrounding blank lines.
This will be a plain old paragraph, and not included in the warning above.
```
"""
def extendMarkdown(self, md: Markdown) -> None:
md.registerExtension(self)
md.parser.blockprocessors.register(
InfoPanelBlockProcessor(
"Info:", "info", "42afc5c4-fb53-4483-9f1a-a87a7ad033e6", md.parser
),
"info-panel",
25,
)
md.parser.blockprocessors.register(
InfoPanelBlockProcessor(
"Success:", "tip", "d60a142d-bc62-4f37-a091-7254c4472bdf", md.parser
),
"success-panel",
25,
)
md.parser.blockprocessors.register(
InfoPanelBlockProcessor(
"Warning:", "note", "9e14a573-943e-4691-919b-a9f6a389da71", md.parser
),
"warning-panel",
25,
)
md.parser.blockprocessors.register(
InfoPanelBlockProcessor(
"Error:", "warning", "2e759c9c-11f1-4959-82e7-901a2dc737d7", md.parser
),
"error-panel",
25,
)
class InfoPanelBlockProcessor(BlockProcessor):
def __init__(
self, prefix: str, name: str, macro_id: str, *args: Any, **kwargs: Any
):
self._prefix = prefix
self._block_re = re.compile(
r"\s*{}.*".format(prefix), re.MULTILINE | re.DOTALL | re.VERBOSE
)
self._name = name
self._macro_id = macro_id
super().__init__(*args, **kwargs)
def test(self, parent: etree.Element, block: str) -> bool:
return bool(self._block_re.match(block))
def run(self, parent: etree.Element, blocks: List[str]) -> None:
raw_content = blocks.pop(0).lstrip(self._prefix).lstrip()
info_panel = etree.SubElement(
parent,
"ac:structured-macro",
{
"ac:name": self._name,
"ac:schema-version": "1",
"ac:macro-id": self._macro_id,
},
)
rich_text_body = etree.SubElement(info_panel, "ac:rich-text-body")
self.parser.parseChunk(rich_text_body, raw_content)
info_panel.tail = "\n"
def makeExtension(**kwargs: Any) -> InfoPanelExtension:
return InfoPanelExtension(**kwargs)
| 32.931818
| 103
| 0.596963
|
794b6a080b227eaa3846d9350b5a430ee03959e7
| 9,856
|
py
|
Python
|
competition-docker-files/model/model.py
|
mmcenta/helpful-bookworm
|
dd1dc36ccc732684bd6a3f4fa7a8b4b8c242acf7
|
[
"MIT"
] | null | null | null |
competition-docker-files/model/model.py
|
mmcenta/helpful-bookworm
|
dd1dc36ccc732684bd6a3f4fa7a8b4b8c242acf7
|
[
"MIT"
] | null | null | null |
competition-docker-files/model/model.py
|
mmcenta/helpful-bookworm
|
dd1dc36ccc732684bd6a3f4fa7a8b4b8c242acf7
|
[
"MIT"
] | null | null | null |
import pandas as pd
import os
import gzip
import argparse
import time
import re
import jieba
import pickle
import tensorflow as tf
import numpy as np
import sys, getopt
from subprocess import check_output
from keras import models
from keras.layers import Dense
from keras.layers import Dropout
from keras.layers import Embedding
from keras.layers import GlobalAveragePooling1D
from keras.optimizers import Adagrad
from keras.initializers import Constant
from keras.preprocessing import text
from keras.preprocessing import sequence
from keras.backend.tensorflow_backend import set_session
config = tf.ConfigProto()
config.gpu_options.allow_growth = True # dynamically grow the memory used on the GPU
config.log_device_placement = False # to log device placement (on which device the operation ran)
# (nothing gets printed in Jupyter, only if you run it standalone)
sess = tf.Session(config=config)
set_session(sess) # set this TensorFlow session as the default session for Keras
EMBEDDINGS_DIR = "/app/embedding"
MAX_SEQ_LENGTH = 500
MAX_VOCAB_SIZE = 20000 # Limit on the number of features. We use the top 20K features
NUM_EPOCHS = 10
BATCH_SIZE = 32
# Functions to clean instances both in English and Chinese. Heavily inspired on the Baseline 2.
# Code from https://towardsdatascience.com/multi-class-text-classification-with-lstm-1590bee1bd17
def clean_en_instances(instances):
REPLACE_BY_SPACE_RE = re.compile('["/(){}\[\]\|@,;]')
BAD_SYMBOLS_RE = re.compile('[^0-9a-zA-Z #+_]')
tokenization_clean = lambda ex: ' '.join(jieba.cut(ex, cut_all=False))
cleaned = []
for instance in instances:
instance = instance.lower()
instance = REPLACE_BY_SPACE_RE.sub(' ', instance)
instance = BAD_SYMBOLS_RE.sub('', instance)
instance = instance.strip()
cleaned.append(instance)
return cleaned
def clean_zh_instances(instances):
REPLACE_BY_SPACE_RE = re.compile('[“”【】/():!~「」、|,;。"/(){}\[\]\|@,\.;]')
tokenization_clean = lambda instance: ' '.join(jieba.cut(instance, cut_all=False))
cleaned = []
for instance in instances:
instance = REPLACE_BY_SPACE_RE.sub(' ', instance)
instance = instance.strip()
cleaned.append(tokenization_clean(instance))
return cleaned
def get_tokenizer(instances, language):
# Clean text
if language == 'EN':
instances = clean_en_instances(instances)
else:
instances = clean_zh_instances(instances)
# Create a tokenizer on the instances corpus
tokenizer = text.Tokenizer(num_words=MAX_VOCAB_SIZE)
tokenizer.fit_on_texts(instances)
sequences = tokenizer.texts_to_sequences(instances)
# Get the maximum length on these sequences
max_seq_length = len(max(sequences, key=len))
if max_seq_length > MAX_SEQ_LENGTH:
max_seq_length = MAX_SEQ_LENGTH
# Get the vocab size
vocab_size = min(len(tokenizer.word_index) + 1, MAX_VOCAB_SIZE)
return tokenizer, vocab_size, max_seq_length
def preprocess_text(instances, tokenizer, max_seq_length, language):
# Clean text
if language == 'EN':
instances = clean_en_instances(instances)
else:
instances = clean_zh_instances(instances)
# Apply tokenizer to text
sequences = tokenizer.texts_to_sequences(instances)
# Pad sequences
sequences = sequence.pad_sequences(sequences, maxlen=max_seq_length)
return sequences
def load_embedding(embedding_file, language):
# Load pretrained embedding
embedding_path = os.path.join(EMBEDDINGS_DIR, embedding_file)
# Read file and construct lookup table
with gzip.open(embedding_path, 'rb') as f:
embedding = {}
for line in f.readlines():
values = line.strip().split()
if language == 'ZH':
word = values[0].decode('utf8')
else:
word = values[0]
vector = np.asarray(values[1:], dtype='float32')
embedding[word] = vector
print("Found {} fastText word vectors for language {}.".format(len(embedding), language))
return embedding
def get_emb_mlp_model(vocab_size,
input_length,
num_classes,
embedding_matrix,
hidden_layer_units,
dropout_rate=0.5):
embedding_dim = embedding_matrix.shape[1]
# Instantiate model and embedding layer
model = models.Sequential()
model.add(Embedding(input_dim=vocab_size, output_dim=embedding_dim, input_length=input_length,
embeddings_initializer=Constant(embedding_matrix)))
# Average the embeddings of all words per example
model.add(GlobalAveragePooling1D())
# Add the hidden layers
for num_units in hidden_layer_units:
model.add(Dropout(rate=dropout_rate))
model.add(Dense(num_units, activation='relu'))
# Add the final layer
model.add(Dropout(rate=dropout_rate))
model.add(Dense(num_classes, activation='softmax'))
# Compile model
optimizer = Adagrad()
model.compile(optimizer=optimizer, loss='categorical_crossentropy', metrics=['accuracy'])
return model
class Model(object):
"""
model of CNN baseline without pretraining.
see `https://aclweb.org/anthology/D14-1181` for more information.
"""
def __init__(self, metadata, train_output_path="./", test_input_path="./"):
""" Initialization for model
:param metadata: a dict formed like:
{"class_num": 10,
"language": ZH,
"train_num": 10000,
"test_num": 1000,
"time_budget": 300}
"""
self.done_training = False
self.metadata = metadata
self.train_output_path = train_output_path
self.test_input_path = test_input_path
# Added attributes
self.max_seq_length = None
self.tokenizer = None
self.model = None
self.x_train = None
self.x_test = None
# Load embeddings
self.embedding = None
if metadata['language'] == 'EN':
self.embedding = load_embedding('cc.en.300.vec.gz', 'EN')
else:
self.embedding = load_embedding('cc.zh.300.vec.gz', 'ZH')
def train(self, train_dataset, remaining_time_budget=None):
"""model training on train_dataset.
:param train_dataset: tuple, (x_train, y_train)
x_train: list of str, input training sentences.
y_train: A `numpy.ndarray` matrix of shape (sample_count, class_num).
here `sample_count` is the number of instances in this dataset as train
set and `class_num` is the same as the class_num in metadata. The
values should be binary.
:param remaining_time_budget:
"""
if self.done_training:
return
x_train, y_train = train_dataset
if self.model is None:
# If the model was not initialized
num_classes = self.metadata['class_num']
# Get the tokenizer based on the training instances
self.tokenizer, vocab_size, self.max_seq_length = get_tokenizer(x_train, self.metadata['language'])
# Build the embedding matrix of the vocab
word_index = self.tokenizer.word_index
embedding_dim = len(next(iter(self.embedding.values())))
embedding_matrix = np.zeros((vocab_size, embedding_dim))
oov_count = 0
for word, i in word_index.items():
if i >= vocab_size:
continue
vector = self.embedding.get(word)
if vector is not None:
embedding_matrix[i] = vector
else:
# Words not found in the embedding will be assigned to vectors of zeros
embedding_matrix[i] = np.zeros(300)
oov_count += 1
print('Embedding out of vocabulary words: {}'.format(oov_count))
# Initialize model
self.model = get_emb_mlp_model(vocab_size,
self.max_seq_length,
num_classes,
embedding_matrix,
hidden_layer_units=[1000])
if self.x_train is None:
# If the training instances are not cached
self.x_train = preprocess_text(x_train, self.tokenizer, self.max_seq_length, self.metadata['language'])
# Train model
history = self.model.fit(
x=self.x_train,
y=y_train,
epochs=NUM_EPOCHS,
callbacks=[tf.keras.callbacks.EarlyStopping(monitor='val_loss', patience=10)],
validation_split=0.2,
verbose=2, # Logs once per epoch.
batch_size=BATCH_SIZE,
shuffle=True)
self.done_training = True
def test(self, x_test, remaining_time_budget=None):
"""
:param x_test: list of str, input test sentences.
:param remaining_time_budget:
:return: A `numpy.ndarray` matrix of shape (sample_count, class_num).
here `sample_count` is the number of instances in this dataset as test
set and `class_num` is the same as the class_num in metadata. The
values should be binary or in the interval [0,1].
"""
if self.x_test is None:
# If the test instances are not cached
self.x_test = preprocess_text(x_test, self.tokenizer, self.max_seq_length, self.metadata['language'])
# Evaluate model
return self.model.predict(self.x_test)
| 36.235294
| 115
| 0.632102
|
794b6a2a17cba475ba172d378e620c1fdce0c7e0
| 23
|
py
|
Python
|
example_snippets/multimenus_snippets/Snippets/SciPy/Special functions/Gamma and Related Functions/polygamma Polygamma function which is the $n$th derivative of the digamma (psi) function.py
|
kuanpern/jupyterlab-snippets-multimenus
|
477f51cfdbad7409eab45abe53cf774cd70f380c
|
[
"BSD-3-Clause"
] | null | null | null |
example_snippets/multimenus_snippets/Snippets/SciPy/Special functions/Gamma and Related Functions/polygamma Polygamma function which is the $n$th derivative of the digamma (psi) function.py
|
kuanpern/jupyterlab-snippets-multimenus
|
477f51cfdbad7409eab45abe53cf774cd70f380c
|
[
"BSD-3-Clause"
] | null | null | null |
example_snippets/multimenus_snippets/Snippets/SciPy/Special functions/Gamma and Related Functions/polygamma Polygamma function which is the $n$th derivative of the digamma (psi) function.py
|
kuanpern/jupyterlab-snippets-multimenus
|
477f51cfdbad7409eab45abe53cf774cd70f380c
|
[
"BSD-3-Clause"
] | 1
|
2021-02-04T04:51:48.000Z
|
2021-02-04T04:51:48.000Z
|
special.polygamma(n, x)
| 23
| 23
| 0.782609
|
794b6a992d6c7b8180f50f70d4f7a12e6326122e
| 21,537
|
py
|
Python
|
pgmpy/tests/test_readwrite/test_XMLBeliefNetwork.py
|
daanknoope/pgmpy
|
b1ad5ec01837dc1fc369dd542971492fae642ab4
|
[
"MIT"
] | null | null | null |
pgmpy/tests/test_readwrite/test_XMLBeliefNetwork.py
|
daanknoope/pgmpy
|
b1ad5ec01837dc1fc369dd542971492fae642ab4
|
[
"MIT"
] | 20
|
2019-02-22T09:24:57.000Z
|
2019-02-25T14:53:54.000Z
|
pgmpy/tests/test_readwrite/test_XMLBeliefNetwork.py
|
daanknoope/pgmpy
|
b1ad5ec01837dc1fc369dd542971492fae642ab4
|
[
"MIT"
] | null | null | null |
import unittest
import warnings
import numpy as np
import numpy.testing as np_test
import networkx as nx
from pgmpy.readwrite import XMLBeliefNetwork
from pgmpy.models import BayesianModel
from pgmpy.factors.discrete import TabularCPD
from pgmpy.extern import six
try:
from lxml import etree
except ImportError:
try:
import xml.etree.cElementTree as etree
except ImportError:
try:
import xml.etree.ElementTree as etree
except ImportError:
warnings.warn("Failed to import ElementTree from any known place")
class TestXBNReader(unittest.TestCase):
def setUp(self):
string = """<ANALYSISNOTEBOOK NAME="Notebook.Cancer Example From Neapolitan" ROOT="Cancer">
<BNMODEL NAME="Cancer">
<STATICPROPERTIES>
<FORMAT VALUE="MSR DTAS XML"/>
<VERSION VALUE="0.2"/>
<CREATOR VALUE="Microsoft Research DTAS"/>
</STATICPROPERTIES>
<VARIABLES>
<VAR NAME="a" TYPE="discrete" XPOS="13495" YPOS="10465">
<DESCRIPTION>(a) Metastatic Cancer</DESCRIPTION>
<STATENAME>Present</STATENAME>
<STATENAME>Absent</STATENAME>
</VAR>
<VAR NAME="b" TYPE="discrete" XPOS="11290" YPOS="11965">
<DESCRIPTION>(b) Serum Calcium Increase</DESCRIPTION>
<STATENAME>Present</STATENAME>
<STATENAME>Absent</STATENAME>
</VAR>
<VAR NAME="c" TYPE="discrete" XPOS="15250" YPOS="11935">
<DESCRIPTION>(c) Brain Tumor</DESCRIPTION>
<STATENAME>Present</STATENAME>
<STATENAME>Absent</STATENAME>
</VAR>
<VAR NAME="d" TYPE="discrete" XPOS="13960" YPOS="12985">
<DESCRIPTION>(d) Coma</DESCRIPTION>
<STATENAME>Present</STATENAME>
<STATENAME>Absent</STATENAME>
</VAR>
<VAR NAME="e" TYPE="discrete" XPOS="17305" YPOS="13240">
<DESCRIPTION>(e) Papilledema</DESCRIPTION>
<STATENAME>Present</STATENAME>
<STATENAME>Absent</STATENAME>
</VAR>
<VAR NAME="f" TYPE="discrete" XPOS="13440" YPOS="10489">
<DESCRIPTION>(f) Asthma</DESCRIPTION>
<STATENAME>Present</STATENAME>
<STATENAME>Absent</STATENAME>
</VAR>
</VARIABLES>
<STRUCTURE>
<ARC PARENT="a" CHILD="b"/>
<ARC PARENT="a" CHILD="c"/>
<ARC PARENT="b" CHILD="d"/>
<ARC PARENT="c" CHILD="d"/>
<ARC PARENT="c" CHILD="e"/>
</STRUCTURE>
<DISTRIBUTIONS>
<DIST TYPE="discrete">
<PRIVATE NAME="a"/>
<DPIS>
<DPI> 0.2 0.8</DPI>
</DPIS>
</DIST>
<DIST TYPE="discrete">
<CONDSET>
<CONDELEM NAME="a"/>
</CONDSET>
<PRIVATE NAME="b"/>
<DPIS>
<DPI INDEXES=" 0 "> 0.8 0.2</DPI>
<DPI INDEXES=" 1 "> 0.2 0.8</DPI>
</DPIS>
</DIST>
<DIST TYPE="discrete">
<CONDSET>
<CONDELEM NAME="a"/>
</CONDSET>
<PRIVATE NAME="c"/>
<DPIS>
<DPI INDEXES=" 0 "> 0.2 0.8</DPI>
<DPI INDEXES=" 1 "> 0.05 0.95</DPI>
</DPIS>
</DIST>
<DIST TYPE="discrete">
<CONDSET>
<CONDELEM NAME="b"/>
<CONDELEM NAME="c"/>
</CONDSET>
<PRIVATE NAME="d"/>
<DPIS>
<DPI INDEXES=" 0 0 "> 0.8 0.2</DPI>
<DPI INDEXES=" 0 1 "> 0.9 0.1</DPI>
<DPI INDEXES=" 1 0 "> 0.7 0.3</DPI>
<DPI INDEXES=" 1 1 "> 0.05 0.95</DPI>
</DPIS>
</DIST>
<DIST TYPE="discrete">
<CONDSET>
<CONDELEM NAME="c"/>
</CONDSET>
<PRIVATE NAME="e"/>
<DPIS>
<DPI INDEXES=" 0 "> 0.8 0.2</DPI>
<DPI INDEXES=" 1 "> 0.6 0.4</DPI>
</DPIS>
</DIST>
<DIST TYPE="discrete">
<PRIVATE NAME="f"/>
<DPIS>
<DPI> 0.3 0.7</DPI>
</DPIS>
</DIST>
</DISTRIBUTIONS>
</BNMODEL>
</ANALYSISNOTEBOOK>"""
self.reader_string = XMLBeliefNetwork.XBNReader(string=string)
self.reader_file = XMLBeliefNetwork.XBNReader(path=six.StringIO(string))
def test_init_exception(self):
self.assertRaises(ValueError, XMLBeliefNetwork.XBNReader)
def test_get_analysis_notebook(self):
self.assertEqual(self.reader_string.get_analysisnotebook_values()['NAME'],
"Notebook.Cancer Example From Neapolitan")
self.assertEqual(self.reader_string.get_analysisnotebook_values()['ROOT'], "Cancer")
self.assertEqual(self.reader_file.get_analysisnotebook_values()['NAME'],
"Notebook.Cancer Example From Neapolitan")
self.assertEqual(self.reader_file.get_analysisnotebook_values()['ROOT'], "Cancer")
def test_get_bnmodel_name(self):
self.assertEqual(self.reader_string.get_bnmodel_name(), "Cancer")
self.assertEqual(self.reader_file.get_bnmodel_name(), "Cancer")
def test_get_static_properties(self):
properties = self.reader_string.get_static_properties()
self.assertEqual(properties['FORMAT'], "MSR DTAS XML")
self.assertEqual(properties['VERSION'], "0.2")
self.assertEqual(properties['CREATOR'], "Microsoft Research DTAS")
properties = self.reader_file.get_static_properties()
self.assertEqual(properties['FORMAT'], "MSR DTAS XML")
self.assertEqual(properties['VERSION'], "0.2")
self.assertEqual(properties['CREATOR'], "Microsoft Research DTAS")
def test_get_variables(self):
self.assertListEqual(sorted(list(self.reader_string.get_variables())), ['a', 'b', 'c', 'd', 'e', 'f'])
self.assertListEqual(sorted(list(self.reader_file.get_variables())), ['a', 'b', 'c', 'd', 'e', 'f'])
self.assertEqual(self.reader_string.get_variables()['a']['TYPE'], 'discrete')
self.assertEqual(self.reader_string.get_variables()['a']['XPOS'], '13495')
self.assertEqual(self.reader_string.get_variables()['a']['YPOS'], '10465')
self.assertEqual(self.reader_string.get_variables()['a']['DESCRIPTION'], '(a) Metastatic Cancer')
self.assertListEqual(self.reader_string.get_variables()['a']['STATES'], ['Present', 'Absent'])
self.assertEqual(self.reader_file.get_variables()['a']['TYPE'], 'discrete')
self.assertEqual(self.reader_file.get_variables()['a']['XPOS'], '13495')
self.assertEqual(self.reader_file.get_variables()['a']['YPOS'], '10465')
self.assertEqual(self.reader_file.get_variables()['a']['DESCRIPTION'], '(a) Metastatic Cancer')
self.assertListEqual(self.reader_file.get_variables()['a']['STATES'], ['Present', 'Absent'])
def test_get_edges(self):
self.assertListEqual(self.reader_string.get_edges(),
[('a', 'b'), ('a', 'c'), ('b', 'd'), ('c', 'd'), ('c', 'e')])
self.assertListEqual(self.reader_file.get_edges(), [('a', 'b'), ('a', 'c'), ('b', 'd'), ('c', 'd'), ('c', 'e')])
def test_get_distribution(self):
distribution = self.reader_string.get_distributions()
self.assertEqual(distribution['a']['TYPE'], 'discrete')
self.assertListEqual(distribution['b']['CONDSET'], ['a'])
np_test.assert_array_equal(distribution['a']['DPIS'], np.array([[0.2, 0.8]]))
np_test.assert_array_equal(distribution['f']['DPIS'], np.array([[0.3, 0.7]]))
np_test.assert_array_equal(distribution['e']['DPIS'], np.array([[0.8, 0.2], [0.6, 0.4]]))
np_test.assert_array_equal(distribution['e']['CARDINALITY'], np.array([2]))
np_test.assert_array_equal(distribution['d']['DPIS'],
np.array([[0.8, 0.2], [0.9, 0.1], [0.7, 0.3], [0.05, 0.95]]))
np_test.assert_array_equal(distribution['b']['DPIS'], np.array([[0.8, 0.2], [0.2, 0.8]]))
np_test.assert_array_equal(distribution['d']['CARDINALITY'], np.array([2, 2]))
np_test.assert_array_equal(distribution['c']['DPIS'], np.array([[0.2, 0.8], [0.05, 0.95]]))
np_test.assert_array_equal(distribution['c']['CARDINALITY'], np.array([2]))
distribution = self.reader_file.get_distributions()
self.assertEqual(distribution['a']['TYPE'], 'discrete')
self.assertListEqual(distribution['b']['CONDSET'], ['a'])
np_test.assert_array_equal(distribution['a']['DPIS'], np.array([[0.2, 0.8]]))
np_test.assert_array_equal(distribution['f']['DPIS'], np.array([[0.3, 0.7]]))
np_test.assert_array_equal(distribution['e']['DPIS'], np.array([[0.8, 0.2], [0.6, 0.4]]))
np_test.assert_array_equal(distribution['e']['CARDINALITY'], np.array([2]))
np_test.assert_array_equal(distribution['d']['DPIS'],
np.array([[0.8, 0.2], [0.9, 0.1], [0.7, 0.3], [0.05, 0.95]]))
np_test.assert_array_equal(distribution['d']['CARDINALITY'], np.array([2, 2]))
np_test.assert_array_equal(distribution['b']['DPIS'], np.array([[0.8, 0.2], [0.2, 0.8]]))
np_test.assert_array_equal(distribution['c']['DPIS'], np.array([[0.2, 0.8], [0.05, 0.95]]))
np_test.assert_array_equal(distribution['c']['CARDINALITY'], np.array([2]))
def test_get_model(self):
model = self.reader_string.get_model()
node_expected = {'c': {'STATES': ['Present', 'Absent'],
'DESCRIPTION': '(c) Brain Tumor',
'YPOS': '11935',
'XPOS': '15250',
'TYPE': 'discrete'},
'a': {'STATES': ['Present', 'Absent'],
'DESCRIPTION': '(a) Metastatic Cancer',
'YPOS': '10465',
'XPOS': '13495',
'TYPE': 'discrete'},
'b': {'STATES': ['Present', 'Absent'],
'DESCRIPTION': '(b) Serum Calcium Increase',
'YPOS': '11965',
'XPOS': '11290',
'TYPE': 'discrete'},
'e': {'STATES': ['Present', 'Absent'],
'DESCRIPTION': '(e) Papilledema',
'YPOS': '13240',
'XPOS': '17305',
'TYPE': 'discrete'},
'f': {'STATES': ['Present', 'Absent'],
'DESCRIPTION': '(f) Asthma',
'YPOS': '10489',
'XPOS': '13440',
'TYPE': 'discrete'},
'd': {'STATES': ['Present', 'Absent'],
'DESCRIPTION': '(d) Coma',
'YPOS': '12985',
'XPOS': '13960',
'TYPE': 'discrete'}}
cpds_expected = {'b': np.array([[0.8, 0.2],
[0.2, 0.8]]),
'e': np.array([[0.8, 0.2],
[0.6, 0.4]]),
'f': np.array([[0.3],
[0.7]]),
'c': np.array([[0.2, 0.8],
[0.05, 0.95]]),
'a': np.array([[0.2],
[0.8]]),
'd': np.array([[0.8, 0.2, 0.9, 0.1],
[0.7, 0.3, 0.05, 0.95]])}
for cpd in model.get_cpds():
np_test.assert_array_equal(cpd.get_values(), cpds_expected[cpd.variable])
self.assertListEqual(sorted(model.edges()), sorted([('b', 'd'), ('a', 'b'), ('a', 'c'),
('c', 'd'), ('c', 'e')]))
self.assertDictEqual(dict(model.node), node_expected)
class TestXBNWriter(unittest.TestCase):
def setUp(self):
nodes = {'c': {'STATES': ['Present', 'Absent'],
'DESCRIPTION': '(c) Brain Tumor',
'YPOS': '11935',
'XPOS': '15250',
'TYPE': 'discrete'},
'a': {'STATES': ['Present', 'Absent'],
'DESCRIPTION': '(a) Metastatic Cancer',
'YPOS': '10465',
'XPOS': '13495',
'TYPE': 'discrete'},
'b': {'STATES': ['Present', 'Absent'],
'DESCRIPTION': '(b) Serum Calcium Increase',
'YPOS': '11965',
'XPOS': '11290',
'TYPE': 'discrete'},
'e': {'STATES': ['Present', 'Absent'],
'DESCRIPTION': '(e) Papilledema',
'YPOS': '13240',
'XPOS': '17305',
'TYPE': 'discrete'},
'f': {'STATES': ['Present', 'Absent'],
'DESCRIPTION': '(f) Asthma',
'YPOS': '10489',
'XPOS': '13440',
'TYPE': 'discrete'},
'd': {'STATES': ['Present', 'Absent'],
'DESCRIPTION': '(d) Coma',
'YPOS': '12985',
'XPOS': '13960',
'TYPE': 'discrete'}}
model = BayesianModel()
model.add_nodes_from(['a', 'b', 'c', 'd', 'e', 'f'])
model.add_edges_from([('b', 'd'), ('a', 'b'), ('a', 'c'), ('c', 'd'), ('c', 'e')])
cpd_distribution = {'a': {'TYPE': 'discrete',
'DPIS': np.array([[0.2, 0.8]])},
'e': {'TYPE': 'discrete',
'DPIS': np.array([[0.8, 0.2],
[0.6, 0.4]]),
'CONDSET': ['c'],
'CARDINALITY': [2]},
'f': {'TYPE': 'discrete',
'DPIS': np.array([[0.3, 0.7]])},
'b': {'TYPE': 'discrete',
'DPIS': np.array([[0.8, 0.2],
[0.2, 0.8]]),
'CONDSET': ['a'],
'CARDINALITY': [2]},
'c': {'TYPE': 'discrete',
'DPIS': np.array([[0.2, 0.8],
[0.05, 0.95]]),
'CONDSET': ['a'],
'CARDINALITY': [2]},
'd': {'TYPE': 'discrete',
'DPIS': np.array([[0.8, 0.2],
[0.9, 0.1],
[0.7, 0.3],
[0.05, 0.95]]),
'CONDSET': ['b', 'c'],
'CARDINALITY': [2, 2]}}
tabular_cpds = []
for var, values in cpd_distribution.items():
evidence = values['CONDSET'] if 'CONDSET' in values else []
cpd = values['DPIS']
evidence_card = values['CARDINALITY'] if 'CARDINALITY' in values else []
states = nodes[var]['STATES']
cpd = TabularCPD(var, len(states), cpd,
evidence=evidence,
evidence_card=evidence_card)
tabular_cpds.append(cpd)
model.add_cpds(*tabular_cpds)
if nx.__version__.startswith('1'):
for var, properties in nodes.items():
model.node[var] = properties
else:
for var, properties in nodes.items():
model._node[var] = properties
self.maxDiff = None
self.writer = XMLBeliefNetwork.XBNWriter(model=model)
def test_file(self):
self.expected_xml = etree.XML("""<ANALYSISNOTEBOOK>
<BNMODEL>
<VARIABLES>
<VAR NAME="a" TYPE="discrete" XPOS="13495" YPOS="10465">
<DESCRIPTION DESCRIPTION="(a) Metastatic Cancer"/>
<STATENAME>Present</STATENAME>
<STATENAME>Absent</STATENAME>
</VAR>
<VAR NAME="b" TYPE="discrete" XPOS="11290" YPOS="11965">
<DESCRIPTION DESCRIPTION="(b) Serum Calcium Increase"/>
<STATENAME>Present</STATENAME>
<STATENAME>Absent</STATENAME>
</VAR>
<VAR NAME="c" TYPE="discrete" XPOS="15250" YPOS="11935">
<DESCRIPTION DESCRIPTION="(c) Brain Tumor"/>
<STATENAME>Present</STATENAME>
<STATENAME>Absent</STATENAME>
</VAR>
<VAR NAME="d" TYPE="discrete" XPOS="13960" YPOS="12985">
<DESCRIPTION DESCRIPTION="(d) Coma"/>
<STATENAME>Present</STATENAME>
<STATENAME>Absent</STATENAME>
</VAR>
<VAR NAME="e" TYPE="discrete" XPOS="17305" YPOS="13240">
<DESCRIPTION DESCRIPTION="(e) Papilledema"/>
<STATENAME>Present</STATENAME>
<STATENAME>Absent</STATENAME>
</VAR>
<VAR NAME="f" TYPE="discrete" XPOS="13440" YPOS="10489">
<DESCRIPTION DESCRIPTION="(f) Asthma"/>
<STATENAME>Present</STATENAME>
<STATENAME>Absent</STATENAME>
</VAR>
</VARIABLES>
<STRUCTURE>
<ARC CHILD="b" PARENT="a"/>
<ARC CHILD="c" PARENT="a"/>
<ARC CHILD="d" PARENT="b"/>
<ARC CHILD="d" PARENT="c"/>
<ARC CHILD="e" PARENT="c"/>
</STRUCTURE>
<DISTRIBUTIONS>
<DIST TYPE="discrete">
<PRIVATE NAME="a"/>
<DPIS>
<DPI> 0.2 0.8</DPI>
</DPIS>
</DIST>
<DIST TYPE="discrete">
<PRIVATE NAME="b"/>
<DPIS>
<DPI INDEXES=" "> 0.8 0.2 </DPI>
<DPI INDEXES=" "> 0.2 0.8 </DPI>
</DPIS>
<CONDSET>
<CONDELEM NAME="a"/>
</CONDSET>
</DIST>
<DIST TYPE="discrete">
<PRIVATE NAME="c"/>
<DPIS>
<DPI INDEXES=" "> 0.2 0.8 </DPI>
<DPI INDEXES=" "> 0.05 0.95 </DPI>
</DPIS>
<CONDSET>
<CONDELEM NAME="a"/>
</CONDSET>
</DIST>
<DIST TYPE="discrete">
<PRIVATE NAME="d"/>
<DPIS>
<DPI INDEXES=" "> 0.8 0.2 </DPI>
<DPI INDEXES=" "> 0.9 0.1 </DPI>
<DPI INDEXES=" "> 0.7 0.3 </DPI>
<DPI INDEXES=" "> 0.05 0.95 </DPI>
</DPIS>
<CONDSET>
<CONDELEM NAME="b"/>
<CONDELEM NAME="c"/>
</CONDSET>
</DIST>
<DIST TYPE="discrete">
<PRIVATE NAME="e"/>
<DPIS>
<DPI INDEXES=" "> 0.8 0.2 </DPI>
<DPI INDEXES=" "> 0.6 0.4 </DPI>
</DPIS>
<CONDSET>
<CONDELEM NAME="c"/>
</CONDSET>
</DIST>
<DIST TYPE="discrete">
<PRIVATE NAME="f"/>
<DPIS>
<DPI> 0.3 0.7</DPI>
</DPIS>
</DIST>
</DISTRIBUTIONS>
</BNMODEL>
</ANALYSISNOTEBOOK>""")
self.assertEqual(str(self.writer.__str__()[:-1]), str(etree.tostring(self.expected_xml)))
| 48.397753
| 120
| 0.424804
|
794b6b20f0ff9ea5bf027f9d9cda0640a1ac30b0
| 4,868
|
py
|
Python
|
engine/search.py
|
chasingegg/Predator
|
ce7a2239e60a69b6b573383337b88e5739f3d1cb
|
[
"MIT"
] | null | null | null |
engine/search.py
|
chasingegg/Predator
|
ce7a2239e60a69b6b573383337b88e5739f3d1cb
|
[
"MIT"
] | null | null | null |
engine/search.py
|
chasingegg/Predator
|
ce7a2239e60a69b6b573383337b88e5739f3d1cb
|
[
"MIT"
] | null | null | null |
import sqlite3
import jieba
import operator
import configparser
import math
import time
class SearchEngine:
def __init__(self, config_path, config_encoding):
self.config_path = config_path
self.config_encoding = config_encoding
config = configparser.ConfigParser()
config.read(config_path, config_encoding)
with open(config['DEFAULT']['stop_words_path'], encoding=config['DEFAULT']['stop_words_encoding']) as f:
words = f.read()
self.stop_words = set(words.split('\n'))
self.conn_baike = sqlite3.connect(config['DEFAULT']['db_path_baike'],check_same_thread=False)
self.conn_zhidao = sqlite3.connect(config['DEFAULT']['db_path_zhidao'], check_same_thread=False)
self.N_baike = int(config['DEFAULT']['n_baike'])
self.N_zhidao = int(config['DEFAULT']['n_zhidao'])
self.K1_baike = float(config['DEFAULT']['k1_baike'])
self.K1_zhidao = float(config['DEFAULT']['k1_zhidao'])
self.B_baike = float(config['DEFAULT']['b_baike'])
self.B_zhidao = float(config['DEFAULT']['b_zhidao'])
self.AVG_L_baike = float(config['DEFAULT']['avg_l_baike'])
self.AVG_L_zhidao = float(config['DEFAULT']['avg_l_zhidao'])
def __del__(self):
self.conn_baike.close()
self.conn_zhidao.close()
def is_number(self, s):
try:
float(s)
return True
except ValueError:
return False
# wordcount and total number
def clean_list(self, seg_list):
cleaned_dict = dict()
n = 0
for i in seg_list:
i = i.strip().lower()
if i != '' and not self.is_number(i) and i not in self.stop_words:
n = n + 1
if i in cleaned_dict:
cleaned_dict[i] = cleaned_dict[i] + 1
else:
cleaned_dict[i] = 1
return n, cleaned_dict
def fetch_baike(self, keyword):
c = self.conn_baike.cursor()
c.execute('SELECT * FROM baike_postings WHERE term=?', (keyword, ))
data = c.fetchone()
c.close()
return data
def fetch_zhidao(self, keyword):
c = self.conn_zhidao.cursor()
c.execute('SELECT * FROM zhidao_postings WHERE term=?', (keyword, ))
data = c.fetchone()
c.close()
return data
def BM25_baike(self, sentence):
seg_list = jieba.lcut(sentence, cut_all=False)
n, cleaned_dict = self.clean_list(seg_list) # get query terms
BM25_scores = dict()
for term in cleaned_dict.keys():
r = self.fetch_baike(term)
if r is None:
continue
df = r[1] # doc nums which contain this term
w = math.log2((self.N_baike - df + 0.5) / (df + 0.5))
docs = r[2].split('\n')
for doc in docs:
docid, tf, ld = doc.split('\t')
docid = int(docid)
tf = int(tf)
ld = int(ld)
s = (self.K1_baike * tf * w) / (tf + self.K1_baike * (1 - self.B_baike + self.B_baike * ld / self.AVG_L_baike))
if docid in BM25_scores:
BM25_scores[docid] = BM25_scores[docid] + s
else:
BM25_scores[docid] = s
BM25_scores = sorted(BM25_scores.items(), key = operator.itemgetter(1), reverse=True)
if len(BM25_scores) == 0:
return 0, []
else:
return len(BM25_scores), BM25_scores
def BM25_zhidao(self, sentence):
seg_list = jieba.lcut(sentence, cut_all=False)
n, cleaned_dict = self.clean_list(seg_list) # get query terms
BM25_scores = dict()
for term in cleaned_dict.keys():
r = self.fetch_zhidao(term)
if r is None:
continue
df = r[1] # doc nums which contain this term
w = math.log2((self.N_zhidao - df + 0.5) / (df + 0.5))
docs = r[2].split('\n')
for doc in docs:
docid, tf, ld = doc.split('\t')
docid = int(docid)
tf = int(tf)
ld = int(ld)
s = (self.K1_zhidao * tf * w) / (tf + self.K1_zhidao * (1 - self.B_zhidao + self.B_zhidao * ld / self.AVG_L_zhidao))
if docid in BM25_scores:
BM25_scores[docid] = BM25_scores[docid] + s
else:
BM25_scores[docid] = s
BM25_scores = sorted(BM25_scores.items(), key = operator.itemgetter(1), reverse=True)
if len(BM25_scores) == 0:
return 0, []
else:
return len(BM25_scores), BM25_scores
if __name__ == '__main__':
engine = SearchEngine("./config.ini", "utf-8")
f, score = engine.BM25_baike("篮球")
print(f, len(score))
| 38.330709
| 132
| 0.553205
|
794b6b7f480eaacd525359901f4f3b9b41019e28
| 895
|
py
|
Python
|
build/lib/examples/forms.py
|
kimbackdoo/Web-Cralwer
|
6a92ec00ea2273f228b8c304cd596ad9120c4709
|
[
"MIT"
] | null | null | null |
build/lib/examples/forms.py
|
kimbackdoo/Web-Cralwer
|
6a92ec00ea2273f228b8c304cd596ad9120c4709
|
[
"MIT"
] | null | null | null |
build/lib/examples/forms.py
|
kimbackdoo/Web-Cralwer
|
6a92ec00ea2273f228b8c304cd596ad9120c4709
|
[
"MIT"
] | null | null | null |
from django import forms
from django.contrib.auth.forms import UserCreationForm, AuthenticationForm
from django.contrib.auth.models import User
from bootstrap_modal_forms.mixins import PopRequestMixin, CreateUpdateAjaxMixin
from bootstrap_modal_forms.forms import BSModalForm
from .models import Book
class BookForm(BSModalForm):
publication_date = forms.DateField(
error_messages={'invalid': 'Enter a valid date in YYYY-MM-DD format.'}
)
class Meta:
model = Book
exclude = ['timestamp']
#label이 여기에
class CustomUserCreationForm(PopRequestMixin, CreateUpdateAjaxMixin,
UserCreationForm):
class Meta:
model = User
fields = ['username', 'password1', 'password2']
class CustomAuthenticationForm(AuthenticationForm):
class Meta:
model = User
fields = ['username', 'password']
| 27.121212
| 79
| 0.702793
|
794b6c4b3d90b6e93927dc5f937693af1ae5ae94
| 125
|
py
|
Python
|
python/testData/quickFixes/PyRemoveUnusedLocalQuickFixTest/removeAssignmentStatementTarget_after.py
|
alexey-anufriev/intellij-community
|
ffcd46f14e630acdefcc76e2bfc7c43d2449013a
|
[
"Apache-2.0"
] | 2
|
2019-04-28T07:48:50.000Z
|
2020-12-11T14:18:08.000Z
|
python/testData/quickFixes/PyRemoveUnusedLocalQuickFixTest/removeAssignmentStatementTarget_after.py
|
alexey-anufriev/intellij-community
|
ffcd46f14e630acdefcc76e2bfc7c43d2449013a
|
[
"Apache-2.0"
] | null | null | null |
python/testData/quickFixes/PyRemoveUnusedLocalQuickFixTest/removeAssignmentStatementTarget_after.py
|
alexey-anufriev/intellij-community
|
ffcd46f14e630acdefcc76e2bfc7c43d2449013a
|
[
"Apache-2.0"
] | 1
|
2020-10-15T05:56:42.000Z
|
2020-10-15T05:56:42.000Z
|
class MyClass(object):
def __init__(self, **kwargs):
kwargs.pop("stuff", None)
print("that's all folks!")
| 31.25
| 34
| 0.6
|
794b6cad4c2b6ca93147e4924d49b118d3bb58f7
| 2,127
|
py
|
Python
|
pytket/backends/measurements.py
|
Travis-S/pytket
|
ba1b63b5b22533d9366c431b91d69bf9cf77b0d7
|
[
"Apache-2.0"
] | null | null | null |
pytket/backends/measurements.py
|
Travis-S/pytket
|
ba1b63b5b22533d9366c431b91d69bf9cf77b0d7
|
[
"Apache-2.0"
] | null | null | null |
pytket/backends/measurements.py
|
Travis-S/pytket
|
ba1b63b5b22533d9366c431b91d69bf9cf77b0d7
|
[
"Apache-2.0"
] | null | null | null |
# Copyright 2019 Cambridge Quantum Computing
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import numpy as np
from typing import Iterable, Tuple
from pytket import Circuit
def pauli_measurement(pauli_string:Iterable[Tuple[int,str]], circ:Circuit) :
"""Appends measurement instructions to a given circuit, measuring each qubit in a given basis
:param pauli_string: The pauli operator to measure, as tuples of pauli name and qubit.
:type pauli_string: Iterable[Tuple[int,str]]
:param circ: Circuit to add measurement to.
:type circ: Circuit
"""
measured_qbs = []
for qb_idx, p in pauli_string:
measured_qbs.append(qb_idx)
if p=='X':
circ.H(qb_idx)
elif p=='Y':
circ.Sdg(qb_idx)
circ.H(qb_idx)
for idx in measured_qbs:
circ.Measure(idx)
def bin_str_2_table(strings:Iterable[str]) -> np.ndarray:
"""Convert string of measurements to shot table
:param strings: List of strings, one per shot
:type strings: Iterable[str]
:return: Shot table, one row per shot, columns in qubit order.
:rtype: np.ndarray
"""
# infer number of qubits from first space separated bit string
start_string = strings[0]
n_qubits = len(start_string.replace(' ', ''))
output = np.zeros((len(strings), n_qubits), dtype=int)
for index, string in enumerate(strings):
count = 0
for sub in string.split(' '):
ln = len(sub)
output[index, count:count+ln] = np.array(list(sub), dtype=int)
count += ln
output = np.fliplr(output)
return output
| 34.868852
| 97
| 0.67795
|
794b6ccbc11b475c3010d30bde53c4cee26e5e5f
| 111
|
py
|
Python
|
wsgi.py
|
Schluggi/codenames
|
094f3621ad17597fede8d6438f41e60dbb1f7057
|
[
"MIT"
] | 3
|
2020-12-13T18:16:06.000Z
|
2021-04-13T09:25:23.000Z
|
wsgi.py
|
Schluggi/codenames
|
094f3621ad17597fede8d6438f41e60dbb1f7057
|
[
"MIT"
] | 12
|
2020-06-17T18:23:05.000Z
|
2022-03-12T00:52:43.000Z
|
wsgi.py
|
Schluggi/codenames
|
094f3621ad17597fede8d6438f41e60dbb1f7057
|
[
"MIT"
] | null | null | null |
import os
import sys
sys.path.insert(0, os.path.dirname(__file__))
from codenames import app as application
| 13.875
| 45
| 0.783784
|
794b6d78e55b2350dcd3230c4fc4a3ec00e4e166
| 16,918
|
py
|
Python
|
run.py
|
my-personal-forks/dart-sublime-bundle
|
580127c13c3a97d7cb0c9ac09152f68cb665946c
|
[
"BSD-3-Clause"
] | 182
|
2017-03-05T07:43:13.000Z
|
2022-03-15T13:09:07.000Z
|
run.py
|
kkurian/dart-sublime-bundle
|
429324e43ea487f8c05a4fb7002c5fd2c8c56b42
|
[
"BSD-3-Clause"
] | 93
|
2015-01-25T00:10:01.000Z
|
2021-02-01T12:11:31.000Z
|
run.py
|
kkurian/dart-sublime-bundle
|
429324e43ea487f8c05a4fb7002c5fd2c8c56b42
|
[
"BSD-3-Clause"
] | 45
|
2015-01-25T00:18:01.000Z
|
2021-02-25T18:09:08.000Z
|
# Copyright (c) 2014, Guillermo López-Anglada. Please see the AUTHORS file for details.
# All rights reserved. Use of this source code is governed by a BSD-style
# license that can be found in the LICENSE file.)
'''Builds Dart projects intelligently.
'''
import sublime
import sublime_plugin
import os
import time
import re
from Dart.sublime_plugin_lib import PluginLogger
from Dart.lib.build.base import DartBuildCommandBase
from Dart.sublime_plugin_lib.panels import OutputPanel
from Dart.lib.pub_package import DartFile
from Dart.lib.pub_package import find_pubspec
from Dart.lib.sdk import Dartium
from Dart.lib.sdk import RunDartWithObservatory
from Dart.lib.sdk import SDK
from Dart.sublime_plugin_lib.sublime import after
from Dart.sublime_plugin_lib.subprocess import GenericBinary
from Dart.lib.sdk import PubServe
from Dart.lib.event import EventSource
from Dart.lib import ga
_logger = PluginLogger(__name__)
def plugin_unloaded():
# Kill any existing server.
# FIXME(guillermooo): this doesn't manage to clean up resources when
# ST exits.
sublime.active_window().run_command('dart_run_file', {'kill_only': True,
'file_name': '???'})
class DartRunInObservatoryCommand(sublime_plugin.WindowCommand):
'''Runs a server app through the Observatory.
Note:
- We don't need this for web apps, because in that case
the Observatory is always available in the Dartium
Dev Tools panel.
'''
def is_enabled(self):
# TODO(guillermooo): Fix this in pub_package.DartFile
view = self.window.active_view()
if not view:
return False
dart_view = DartFile(view)
return (not dart_view.is_web_app) and dart_view.is_server_app
def run(self):
# TODO(guillermooo): Document this
view = self.window.active_view()
self.window.run_command('dart_run_file', {
"file_name": view.file_name(),
"action": "secondary"
})
class ContextProvider(sublime_plugin.EventListener):
'''Implements the 'dart_can_do_launch' context for .sublime-keymap
files.
'''
def on_query_context(self, view, key, operator, operand, match_all):
if key == 'dart_can_do_launch':
value = DartFile(view).is_runnable
return self._check(value, operator, operand, match_all)
if key == 'dart_can_do_relaunch':
value = (DartFile(view).is_runnable or
DartSmartRunCommand.last_run_file[1])
return self._check(value, operator, operand, match_all)
if key == 'dart_can_show_observatory':
value = DartRunFileCommand.observatory != None
return self._check(value, operator, operand, match_all)
if key == 'dart_services_running':
value = any((DartRunFileCommand.observatory != None,
DartRunFileCommand.is_server_running,
DartRunFileCommand.is_script_running))
return self._check(value, operator, operand, match_all)
def _check(self, value, operator, operand, match_all):
if operator == sublime.OP_EQUAL:
if operand == True:
return value
elif operand == False:
return not value
elif operator == sublime.OP_NOT_EQUAL:
if operand == True:
return not value
elif operand == False:
return value
class PubServeListener(object):
'''
Special listener to capture 'pub serve --port=0' port information.
Also starts Dartium.
'''
def __init__(self, instance, panel, path):
self.instance = instance
self.panel = panel
self.path = path
def on_data(self, text):
if not self.instance.port:
m = re.match('^Serving .*? on http://.*?:(\d+)', text)
if m:
self.instance.port = int(m.groups()[0])
_logger.debug('captured pub serve port: %d' % self.instance.port)
_logger.debug('starting dartium...')
self.panel.write('Starting Dartium...\n')
url = 'http://localhost:' + str(self.instance.port)
if self.path:
url += '/' + self.path
Dartium().start(url)
self.panel.write(text)
def on_error(self, text):
self.panel.write(text)
class DartSmartRunCommand(DartBuildCommandBase):
'''
Runs the current file in the most appropriate way.
'''
last_run_file = (None, None)
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.add_event_handler(EventSource.ON_DART_RUN,
DartSmartRunCommand.on_dart_run)
def run(self, action='primary', force_update=False, kill_only=False):
'''
@action
One of: primary, secondary
'''
self.raise_event(self, EventSource.ON_DART_RUN)
try:
view = self.window.active_view()
except TypeError:
return
if force_update or DartSmartRunCommand.last_run_file[0] is None:
try:
DartSmartRunCommand.last_run_file = (
DartFile(view).is_pubspec,
view.file_name())
except TypeError:
return
if DartSmartRunCommand.last_run_file[0]:
self.window.run_command('dart_run_pubspec', {
'action': action,
'file_name': DartSmartRunCommand.last_run_file[1]
})
return
self.window.run_command('dart_run_file', {
'action': action,
'file_name': DartSmartRunCommand.last_run_file[1],
'kill_only': kill_only,
})
# This class will be instantiated for each view/window, so we need to
# ensure that only one function will be registered as event handler.
# Therefore, we use a function whose id is the same across all instances.
@classmethod
def on_dart_run(cls, *args, **kwargs):
ga.Event(category='actions',
action='on_dart_run',
label='Running "Run" command',
value=1,
).send()
class DartRunFileCommand(DartBuildCommandBase):
'''Runs a file with the most appropriate action.
Runs .dart and .html files.
'''
observatory = None
pub_serve = None
is_server_running = False
is_script_running = False
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.panel = None
def __del__(self):
# FIXME(guillermooo): this doesn't manage to clean up resources when
# ST exits.
self.stop_server_observatory()
self.execute(kill=True)
def observatory_port(self):
try:
return DartRunFileCommand.observatory.port
except Exception:
_logger.error('could not retrieve Observatory port')
return
@property
def pub_serve_port(self):
try:
return DartRunFileCommand.pub_serve.port
except Exception:
_logger.error('could not retrieve pub serve port')
return
@pub_serve_port.setter
def pub_serve_port(self, value):
DartRunFileCommand.pub_serve.port = value
def _cleanup(self):
# Stop up any existing processes.
if DartRunFileCommand.is_server_running:
self.execute(kill=True)
self.pub_serve.stop()
DartRunFileCommand.is_server_running = False
if self.panel:
self.panel.write('[pub serve stopped]\n')
self.stop_server_observatory()
def _kill(self):
self.window.run_command("dart_exec", {
"kill": True
})
DartRunFileCommand.is_script_running = False
def run(self, file_name=None, action='primary', kill_only=False):
'''
@action
One of: [primary, secondary]
@kill_only
If `True`, simply kill any running processes we've started.
'''
assert kill_only or file_name, 'wrong call'
self._cleanup()
if kill_only:
self._kill()
return
working_dir = None
try:
working_dir = os.path.dirname(find_pubspec(file_name))
except (TypeError, AttributeError):
try:
if not working_dir:
working_dir = os.path.dirname(file_name)
except TypeError as e:
_logger.debug('cannot run an unsaved file')
_logger.debug(e)
return
except Exception as e:
_logger.error('programmer error: this exception needs to be handled')
_logger.error(e)
return
except Exception as e:
_logger.error('programmer error: this exception needs to be handled')
_logger.error(e)
return
dart_view = DartFile.from_path(file_name)
if dart_view.is_server_app:
self.run_server_app(file_name, working_dir, action)
return
if dart_view.is_web_app:
self.run_web_app(dart_view, working_dir, action)
return
# TODO: improve detection of runnable file (for example, don't attempt
# to run a part of a library).
# At this point, we are looking at a file that either:
# - is not a .dart or .html file
# - is outside of a pub package
# As a last resort, try to run the file as a script.
if action != 'primary' or not dart_view.is_dart_file:
print("Dart: Cannot determine best action for {}".format(
dart_view.path
))
_logger.debug("cannot determine best run action for %s",
dart_view.path)
return
self.run_server_app(file_name, working_dir, action)
def start_default_browser(self, file_name):
sdk = SDK()
if not sdk.path_to_default_user_browser:
_logger.info('no default user browser defined')
print("Dart: No default user browser defined "
"in Dart plugin settings")
return
dart_view = DartFile.from_path(file_name)
url = 'http://localhost:8080'
if dart_view.url_path:
url = url + "/" + dart_view.url_path
# TODO(guillermooo): make GUIProcess wrapper to abstract out some of
# the stuff below?
if sublime.platform() == 'osx':
bin_ = GenericBinary('open', sdk.path_to_default_user_browser)
after(1000, lambda: bin_.start(args=[url]))
return
elif sublime.platform() == 'windows':
# FIXME(guillermooo): On Windows, Firefox won't work when started
# from the cmdline only. If it's started first from the shell, it
# will work here as well.
path = sdk.path_to_default_user_browser
bin_ = GenericBinary(path)
after(1000, lambda: bin_.start(
args=[url],
shell=True,
cwd=os.path.dirname(path),
))
return
path = sdk.path_to_default_user_browser
bin_ = GenericBinary(path)
after(1000, lambda: bin_.start(
args=[url],
shell=True,
cwd=os.path.dirname(path),
))
def run_server_app(self, file_name, working_dir, action):
if action == 'secondary':
# run with observatory
# we need to do additional processing in this case, so we don't
# use the regular .execute() method to manage the subprocess.
self.panel = OutputPanel('dart.out')
self.panel.write('=' * 80)
self.panel.write('\n')
self.panel.write('Running dart with Observatory.\n')
self.panel.write('=' * 80)
self.panel.write('\n')
self.panel.show()
DartRunFileCommand.observatory = RunDartWithObservatory(
file_name,
cwd=working_dir,
listener=self)
DartRunFileCommand.observatory.start()
def start_dartium():
d = Dartium()
port = DartRunFileCommand.observatory.port
if port is None:
_logger.debug('could not capture observatory port')
print("Dart: Cannot start Observatory "
"because its port couldn't be retrieved")
return
d.start('http://localhost:{}'.format(port))
after(1000, lambda: start_dartium())
return
preamble = '''* Running {0}
* (output starts below the next line)
==============================================================================
'''
# TODO(guillermooo): improve event args
self.execute(
cmd=[SDK().path_to_dart, '--checked', file_name],
working_dir=working_dir,
file_regex=r"'file:///(.+)': error: line (\d+) pos (\d+): (.*)$",
preamble=preamble.format(file_name),
)
DartRunFileCommand.is_script_running = True
def run_web_app(self, dart_view, working_dir, action):
sdk = SDK()
if action == 'secondary':
if not sdk.path_to_default_user_browser:
print("Dart: No default browser found")
_logger.info('no default browser found')
return
cmd=[sdk.path_to_pub, 'serve']
if dart_view.is_example:
cmd.append('example')
self.execute(cmd=cmd, working_dir=working_dir)
DartRunFileCommand.is_server_running = True
self.start_default_browser(dart_view.path)
return
self.panel = OutputPanel('dart.out')
self.panel.write('=' * 80)
self.panel.write('\n')
self.panel.write('Running pub serve...\n')
self.panel.write('=' * 80)
self.panel.write('\n')
self.panel.show()
DartRunFileCommand.pub_serve = PubServe(
cwd=working_dir,
is_example=dart_view.is_example,
)
pub_serve_listener = PubServeListener(DartRunFileCommand.pub_serve,
self.panel,
dart_view.url_path)
DartRunFileCommand.pub_serve.listener = pub_serve_listener
DartRunFileCommand.pub_serve.start()
DartRunFileCommand.is_server_running = True
def stop_server_observatory(self):
if DartRunFileCommand.observatory:
DartRunFileCommand.observatory.stop()
DartRunFileCommand.observatory = None
if self.panel:
self.panel.write('[Observatory stopped]\n')
def on_data(self, text):
self.panel.write(text)
def on_error(self, text):
self.panel.write(text)
class DartRunPubspecCommand(DartBuildCommandBase):
'''Performs actions on a pubspec.yaml file.
'''
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
PUB_CMDS = [
'deps',
'help',
'upgrade',
'version',
]
def run(self, action, file_name):
'''
@action
One of: primary, secondary
@file_name
A valid path.
'''
working_dir = os.path.dirname(file_name)
if action == 'primary':
self.execute(
cmd=[SDK().path_to_pub] + ['get'],
working_dir=working_dir,
preamble="Running pub...\n",
panel_name='dart.out',
)
return
if action != 'secondary':
_logger.error('not implemented')
return
f = lambda i: self.on_done(i, file_name, working_dir)
self.window.show_quick_panel(self.PUB_CMDS, f)
def on_done(self, idx, file_name, working_dir):
if idx == -1:
return
self.execute(
cmd=[SDK().path_to_pub] + [self.PUB_CMDS[idx]],
working_dir=os.path.dirname(file_name),
preamble="Running pub...\n",
panel_name='dart.out',
)
| 34.177778
| 87
| 0.56236
|
794b6dba959eb1db7d5f62c640e8fd55ff0c0786
| 15,498
|
py
|
Python
|
src/webapp/sample_app/manager/classify.py
|
Chibikuri/Quantum-Othello
|
ddbb54819181c5c0f4efdb5395fdd482e83ffccd
|
[
"Apache-2.0"
] | 2
|
2019-12-06T09:03:48.000Z
|
2020-02-22T00:27:24.000Z
|
src/webapp/sample_app/manager/classify.py
|
Chibikuri/Quantum-Othello
|
ddbb54819181c5c0f4efdb5395fdd482e83ffccd
|
[
"Apache-2.0"
] | null | null | null |
src/webapp/sample_app/manager/classify.py
|
Chibikuri/Quantum-Othello
|
ddbb54819181c5c0f4efdb5395fdd482e83ffccd
|
[
"Apache-2.0"
] | 2
|
2019-11-19T06:26:09.000Z
|
2019-11-19T06:27:56.000Z
|
# -*- coding: utf-8 -*-
import warnings
# warnings.filterwarnings('ignore')
import matplotlib
import numpy as np
import matplotlib.pyplot as plt
import sys
import os
import np
import time
import datetime
import random
import np
import pandas as pd
import multiprocessing as mul
import umap
import csv
import pandas as pd
from scipy.sparse.csgraph import connected_components
from notification import Notify
from scipy.special import expit
from multiprocessing import pool
from pprint import pprint
from sklearn import datasets
from sklearn.preprocessing import StandardScaler
from sklearn.model_selection import train_test_split
from sklearn.manifold import TSNE
from qiskit import IBMQ, QuantumCircuit, ClassicalRegister, QuantumRegister
from qiskit import execute, Aer, compile
from numpy import pi
from qiskit.tools.visualization import plot_histogram, circuit_drawer
from qiskit.tools.visualization import matplotlib_circuit_drawer
from numba import jit
from matplotlib.colors import ListedColormap as clp
class QVC:
def __init__(self, qubits, cbits, target, shots, l_iteration, dimension, n_class):
'''
This is initial config.
qubits, cbits: the instance of qubits, classical bits
qc: the name of circuit
num_q, num_c: the number of qubits, cbits
train, test: the directory of training data, test data
'''
self.q = QuantumRegister(qubits)
self.c = ClassicalRegister(cbits)
self.qc = QuantumCircuit(self.q, self.c)
self.num_q = qubits
self.num_c = cbits
self.target = target
self.shots = shots
self.l_iter = l_iteration
self.dim = dimension
self.n_class = n_class
def _reduce_dimension(self):
pass
def _feature_map(self, qc, S, data_angle):
'''
Quantum State Mapping
apply feature map circuit(fig1 b)
using two qubits for making reading feature map
1.applying feature map circuit to 0state. <-- in this part
2.training theta
3.measurement
4.fitting cost function
qc : circuit name
S : the number of test data set
x_angle : the angle for fitting traingin data sets
### In this paper, x_angle is artifitially generated.
'''
q = self.q
n = self.num_q
# TODO understand how to decide theta of u1
for i in range(n):
qc.h(q[i])
qc.u1(data_angle[0], q[0])
for j in range(S-1):
qc.cx(q[j], q[j+1])
qc.u1(data_angle[j], q[j+1])
qc.cx(q[j], q[j+1])
qc.u1(data_angle[S-1], q[S-1])
for i in range(n):
qc.h(q[i])
qc.u1(data_angle[0], q[0])
for j in range(S-1):
qc.cx(q[j], q[j+1])
qc.u1(data_angle[j], q[j+1])
qc.cx(q[j], q[j+1])
qc.u1(data_angle[S-1], q[S-1])
return qc
def _w_circuit(self, qc, theta_list):
'''
repeat this circuit for l times to classify
qc: The name of quantum circuit
theta : the angle for u3gate
'''
q = self.q
n = self.num_q
# TODO how to tune the theta_list
# for ini in range(n):
# qc.u3(theta_list[ini], 0, theta_list[ini], q[ini]) # FIXME This part is wrong? should be one time but this part is apply many times.
for iter in range(self.l_iter):
for j in range(1, n):
qc.cz(q[j-1], q[j])
qc.cz(q[n-1], q[0])
# TODO how to decide lambda of u3?
for m in range(n):
qc.u3(0, 0, theta_list[m], q[m])
return qc
def _R_emp(self, distribution, y, bias):
'''
this is cost fucntion for optimize theta(lambda)
theta: lambda for u3 gate
'''
a_1 = (np.sqrt(self.shots)*(1/2-(distribution-y*bias/2)))
a_2 = np.sqrt(abs(2*(1-distribution)*distribution))
sig = expit(a_1/a_2) # expit is sigmoid function
print(sig)
# FIXME 1/T should multiplied by the sum of emps?
return 1/self.dim * sig
def _multi_emp_cost(self, count, correct_class):
binlabel = self._label2binary(correct_class)
print(max(count, key=count.get))
n_c = count[binlabel]
oth_dict = count.pop(binlabel)
at = (np.sqrt(self.shots)*max(count.values())-n_c)
bt = np.sqrt(2*(self.shots-n_c)*n_c)
return expit(at/bt)
# @jit
def _multic_cost(self, val_list, correct_class):
# print(val_list)
n_c = val_list[correct_class]
_ = val_list.pop(correct_class)
at = (np.sqrt(self.shots)*max(val_list)-n_c)
bt = np.sqrt(2*(self.shots-n_c)*n_c)
return expit(at/bt)
def _label2binary(self, correct_class):
'''
maybe no need this function.
input: class label ex 3
-------------------------------------
output: binary(String) ex.'0000000100'
correct class -> binary label boolean
now:10qubit # FIXME fir for any qubits.
'''
if correct_class == 0:
return '0'*self.dim
else:
return '0'*(self.dim-correct_class)+'1'+'0'*(correct_class-1)
def _aggregate(self, count, labels):
'''
input:count
output:list(aggregate by number)
'''
values = []
for k in labels:
rc = 0
for i, j in zip(count.keys(), count.values()):
if list(i)[self.dim-1-k] == '1': # FIXME wrong?
rc += (j/self.shots)
values.append(rc)
return values
def fit(self, x_data, y_data, labels):
''' training and fitting parameter
1.applying feature map circuit to 0state.
2.training theta <-- in this part
3.measurement
4.fitting cost function
'''
initial_theta = [0.01]*self.num_q
b = list(np.arange(-1, 1, 0.1))
x_data = zip(*[iter(x_data)]*3)
y_data = zip(*[iter(y_data)]*3)
while True:
count = 0
params = []
emp_cost = [99, 99]
theta_l = [initial_theta, initial_theta]
for training_data, t_label in zip(x_data, y_data): # like(1, 3, 4)
fit_theta = self._fitting_theta(theta_l, emp_cost, count)
# print("fit!", fit_theta)
count_results = self._circuit(fit_theta, list(training_data)) # array
# print(theta_l)
theta_l.append(fit_theta)
bias = random.choice(b)
# print(t_label)
for i, j in zip(count_results, t_label):
count_vals = self._aggregate(i, labels)
empirical = self._multic_cost(count_vals, list(t_label).index(j))
emp_cost.append(empirical)
# print(emp_cost)
count += 1
print("="*25, count, "="*25)
if self.isOptimized(min(emp_cost)):
break
index = np.array(emp_cost).argmin()
# print("min 1", theta_l[-1])
return theta_l[-1]
def isOptimized(self, empirical_cost):
'''
This fucntion is for checking R_empirical is optimized or not.
empirical_cost : the value of R_emp()
'''
# return True
# if len(empirical_cost) > 3:
# if empirical_cost[-1] == min(empirical_cost):
# return True
# else:
# return False
# if len(empirical_cost) > 5:
# return True
return True
def _fitting_theta(self, theta_list, Rempirical_cost, count):
# print("fit_theta!", theta_list)
# print("emps!", Rempirical_cost)
theta_range = 2*self.dim*(self.l_iter+1)
interval = 2*pi/theta_range
index = np.mod(count, self.dim+1)
sum_list = [interval if i == index else 0 for i in range(self.dim)]
n_thetalist = np.array(theta_list[-2]) + np.array(sum_list)
theta_list.append(list(n_thetalist))
if Rempirical_cost[-1] < Rempirical_cost[-2]:
return theta_list[-1]
else:
return theta_list[-2]
def _circuit(self, theta_list, training_data):
qc = self.qc
q = self.q
c = self.c
# TODO we have to chenge the angle of feature map for each data.
# TODO multi circuit
mean = np.median(training_data, axis=0)
# feature_angle = [((mean - (training_data[i]))**2) for i in range(self.dim)]
# feature_angle = [(np.sin(training_data[0]))*(np.sin(training_data[1]))*(np.sin(training_data[2])) for i in range(3)]
qc_list = []
for data in training_data:
# print(data)
feature_angle = [(pi - 1/np.exp(i)) for i in data]
self._feature_map(qc, self.dim, feature_angle)
self._w_circuit(qc, theta_list)
qc.measure(q, c)
qc_list.append(qc)
backends = ['ibmq_20_tokyo',
'qasm_simulator',
'ibmqx_hpc_qasm_simulator',
'statevector_simulator']
backend_options = {'max_parallel_threads': 0,
'max_parallel_experiments': 0,
'max_parallel_shots': 0,
'statevector_parallel_threshold': 12}
backend = Aer.get_backend(backends[1])
qobj_list = [compile(qc, backend) for qc in qc_list]
count_list = []
job_list = [backend.run(qobj) for qobj in qobj_list]
for job in job_list:
counts = job.result().get_counts()
# print([(k,v) for k, v in counts.items() if v > 10])
count_list.append(counts)
# print(count_list)
return count_list
def _test_circuit(self, theta_list, test_data):
qc = self.qc
q = self.q
c = self.c
# TODO we have to chenge the angle of feature map for each data.
# TODO multi circuit
# mean = np.median(training_data, axis=0)
# feature_angle = [((mean - (training_data[i]))**2) for i in range(self.dim)]
# feature_angle = [(np.sin(training_data[0]))*(np.sin(training_data[1]))*(np.sin(training_data[2])) for i in range(3)]
feature_angle = [(pi - np.sin(i)) for i in test_data]
self._feature_map(qc, self.dim, feature_angle)
self._w_circuit(qc, theta_list)
qc.measure(q, c)
# qc_list.append(qc)
backends = ['ibmq_20_tokyo',
'qasm_simulator',
'ibmqx_hpc_qasm_simulator',
'statevector_simulator']
backend_options = {'max_parallel_threads': 0,
'max_parallel_experiments': 0,
'max_parallel_shots': 0,
'statevector_parallel_threshold': 12}
backend = Aer.get_backend(backends[1])
exec = execute(qc, backend, shots=self.shots, config=backend_options)
result = exec.result()
counts = result.get_counts(qc)
# print([k for k, v in counts.items() if v > 10])
return counts
def predict(self, test_data, theta_list, label):
# FIXME have to modify add testdata and
# for p in parameter:
vals = []
# for theta in theta_list:
count_results = self._test_circuit(theta_list, test_data)
test_val = self._aggregate(count_results, label)
answer = label[np.array(test_val).argmax()]
return answer
@staticmethod
def calc_accuracy(labels, test_y):
correct_answer = 0
for i, j in zip(labels, test_y):
if i == j:
correct_answer += 1
return correct_answer/len(test_y)
def visualize(self, x, y, theta, bias, resolution=0.5):
# print(x)
markers = ('o', 'x')
cmap = clp(('red', 'blue'))
x1_min, x1_max = x[:, 0].min()-1, x[:, 0].max()+1
x2_min, x2_max = x[:, 1].min()-1, x[:, 1].max()+1
x1_mesh, x2_mesh = np.meshgrid(np.arange(x1_min, x1_max, resolution),
np.arange(x2_min, x2_max, resolution))
z = self.predict(np.array([x1_mesh.ravel(), x2_mesh.ravel()]).T, theta, bias)
z = np.array(z)
z = z.reshape(x1_mesh.shape)
# print(z)
plt.contourf(x1_mesh, x2_mesh, z, alpha=0.4, cmap=cmap)
plt.xlim(x1_mesh.min(), x1_mesh.max())
plt.ylim(x2_mesh.min(), x2_mesh.max())
@staticmethod
def _sigmoid(x):
return 1/(1+np.exp(-x))
@staticmethod
def _ReLU(x):
return max(0, x)
@staticmethod
def _ELU(x):
if x > 0:
return x
else:
return np.exp(x) - 1
@staticmethod
def circle_data(r):
x = np.arange(-r, r, r/100)
print(np.sqrt((r**2)-(x**2)), -np.sqrt((r**2)-(x**2)))
return x, np.array(np.sqrt((r**2)-(x**2))), np.array(-np.sqrt((r**2)-(x**2)))
def wrapper(self, args):
return self.fit(*args)
def multi_process(self, data_list):
p = mul.Pool(8)
output = p.map(self.wrapper, data_list)
p.close()
return output
if __name__ == '__main__':
print('start')
start = time.time()
fig = plt.figure()
# mnist dataset
digits = datasets.load_digits()
x_data = digits.data[0:100]
y_d = digits.target[0:100]
labels = (2, 3, 7)
x_list = []
y_list = []
for i, j in zip(x_data, y_d):
if j in labels:
x_list.append(i)
y_list.append(j)
x_data = umap.UMAP(n_neighbors=20,
n_components=10,
min_dist=0.01,
metric='correlation').fit_transform(x_list, y=y_list)
parameters = []
sc = StandardScaler()
sc.fit(x_data)
x_data = sc.transform(x_data)
# labels = random.sample(range(10), k=3)
x_train, x_test, y_train, y_test = train_test_split(x_data,
y_list,
test_size=0.1,
shuffle=False)
dim = len(x_data[0])
theta_list = []
test = QVC(dim, dim, ["0"*dim, "1"*dim], 16384, 1, dim, max(y_d))
parameter = test.fit(x_train, y_train, labels)
# theta_list.append(parameter)
# print("theta", theta_list)
count = 1
answers = []
print("param!",parameter)
for i in x_test:
prob_all = []
print("="*25, "test", count, "="*25)
label = test.predict(i, parameter, labels)
answers.append(label)
count += 1
acc = test.calc_accuracy(answers, y_test)
Notify.notify(acc)
print(acc)
print(answers)
print(y_test)
print(parameters)
# df = pd.DataFrame([[acc], [parameters]])
# print(df)
df.to_csv('./data/%sn%s.csv' % ('237', str(sys.argv[1])))
print(time.time() - start)
| 35.142857
| 147
| 0.542199
|
794b6dc589da63878d279a4a4ec647b1006259d1
| 52,161
|
py
|
Python
|
seaborn/_core.py
|
matzegoebel/seaborn
|
b1fee86fdf1c96159ebb01a20be14ba136df3431
|
[
"MIT",
"BSD-3-Clause"
] | null | null | null |
seaborn/_core.py
|
matzegoebel/seaborn
|
b1fee86fdf1c96159ebb01a20be14ba136df3431
|
[
"MIT",
"BSD-3-Clause"
] | null | null | null |
seaborn/_core.py
|
matzegoebel/seaborn
|
b1fee86fdf1c96159ebb01a20be14ba136df3431
|
[
"MIT",
"BSD-3-Clause"
] | null | null | null |
import warnings
import itertools
from copy import copy
from functools import partial
from collections.abc import Iterable, Sequence, Mapping
from numbers import Number
from datetime import datetime
from distutils.version import LooseVersion
import numpy as np
import pandas as pd
import matplotlib as mpl
from ._decorators import (
share_init_params_with_map,
)
from .palettes import (
QUAL_PALETTES,
color_palette,
)
from .utils import (
get_color_cycle,
remove_na,
)
class SemanticMapping:
"""Base class for mapping data values to plot attributes."""
# -- Default attributes that all SemanticMapping subclasses must set
# Whether the mapping is numeric, categorical, or datetime
map_type = None
# Ordered list of unique values in the input data
levels = None
# A mapping from the data values to corresponding plot attributes
lookup_table = None
def __init__(self, plotter):
# TODO Putting this here so we can continue to use a lot of the
# logic that's built into the library, but the idea of this class
# is to move towards semantic mappings that are agnositic about the
# kind of plot they're going to be used to draw.
# Fully achieving that is going to take some thinking.
self.plotter = plotter
def map(cls, plotter, *args, **kwargs):
# This method is assigned the __init__ docstring
method_name = "_{}_map".format(cls.__name__[:-7].lower())
setattr(plotter, method_name, cls(plotter, *args, **kwargs))
return plotter
def _lookup_single(self, key):
"""Apply the mapping to a single data value."""
return self.lookup_table[key]
def __call__(self, key, *args, **kwargs):
"""Get the attribute(s) values for the data key."""
if isinstance(key, (list, np.ndarray, pd.Series)):
return [self._lookup_single(k, *args, **kwargs) for k in key]
else:
return self._lookup_single(key, *args, **kwargs)
@share_init_params_with_map
class HueMapping(SemanticMapping):
"""Mapping that sets artist colors according to data values."""
# A specification of the colors that should appear in the plot
palette = None
# An object that normalizes data values to [0, 1] range for color mapping
norm = None
# A continuous colormap object for interpolating in a numeric context
cmap = None
def __init__(
self, plotter, palette=None, order=None, norm=None,
):
"""Map the levels of the `hue` variable to distinct colors.
Parameters
----------
# TODO add generic parameters
"""
super().__init__(plotter)
data = plotter.plot_data.get("hue", pd.Series(dtype=float))
if data.notna().any():
map_type = self.infer_map_type(
palette, norm, plotter.input_format, plotter.var_types["hue"]
)
# Our goal is to end up with a dictionary mapping every unique
# value in `data` to a color. We will also keep track of the
# metadata about this mapping we will need for, e.g., a legend
# --- Option 1: numeric mapping with a matplotlib colormap
if map_type == "numeric":
data = pd.to_numeric(data)
levels, lookup_table, norm, cmap = self.numeric_mapping(
data, palette, norm,
)
# --- Option 2: categorical mapping using seaborn palette
elif map_type == "categorical":
cmap = norm = None
levels, lookup_table = self.categorical_mapping(
data, palette, order,
)
# --- Option 3: datetime mapping
else:
# TODO this needs actual implementation
cmap = norm = None
levels, lookup_table = self.categorical_mapping(
# Casting data to list to handle differences in the way
# pandas and numpy represent datetime64 data
list(data), palette, order,
)
self.map_type = map_type
self.lookup_table = lookup_table
self.palette = palette
self.levels = levels
self.norm = norm
self.cmap = cmap
def _lookup_single(self, key):
"""Get the color for a single value, using colormap to interpolate."""
try:
# Use a value that's in the original data vector
value = self.lookup_table[key]
except KeyError:
# Use the colormap to interpolate between existing datapoints
# (e.g. in the context of making a continuous legend)
try:
normed = self.norm(key)
except TypeError as err:
if np.isnan(key):
value = (0, 0, 0, 0)
else:
raise err
else:
if np.ma.is_masked(normed):
normed = np.nan
value = self.cmap(normed)
return value
def infer_map_type(self, palette, norm, input_format, var_type):
"""Determine how to implement the mapping."""
if palette in QUAL_PALETTES:
map_type = "categorical"
elif norm is not None:
map_type = "numeric"
elif isinstance(palette, (dict, list)):
map_type = "categorical"
elif input_format == "wide":
map_type = "categorical"
else:
map_type = var_type
return map_type
def categorical_mapping(self, data, palette, order):
"""Determine colors when the hue mapping is categorical."""
# -- Identify the order and name of the levels
levels = categorical_order(data, order)
n_colors = len(levels)
# -- Identify the set of colors to use
if isinstance(palette, dict):
missing = set(levels) - set(palette)
if any(missing):
err = "The palette dictionary is missing keys: {}"
raise ValueError(err.format(missing))
lookup_table = palette
else:
if palette is None:
if n_colors <= len(get_color_cycle()):
colors = color_palette(None, n_colors)
else:
colors = color_palette("husl", n_colors)
elif isinstance(palette, list):
if len(palette) != n_colors:
err = "The palette list has the wrong number of colors."
raise ValueError(err)
colors = palette
else:
colors = color_palette(palette, n_colors)
lookup_table = dict(zip(levels, colors))
return levels, lookup_table
def numeric_mapping(self, data, palette, norm):
"""Determine colors when the hue variable is quantitative."""
if isinstance(palette, dict):
# The presence of a norm object overrides a dictionary of hues
# in specifying a numeric mapping, so we need to process it here.
levels = list(sorted(palette))
colors = [palette[k] for k in sorted(palette)]
cmap = mpl.colors.ListedColormap(colors)
lookup_table = palette.copy()
else:
# The levels are the sorted unique values in the data
levels = list(np.sort(remove_na(data.unique())))
# --- Sort out the colormap to use from the palette argument
# Default numeric palette is our default cubehelix palette
# TODO do we want to do something complicated to ensure contrast?
palette = "ch:" if palette is None else palette
if isinstance(palette, mpl.colors.Colormap):
cmap = palette
else:
cmap = color_palette(palette, as_cmap=True)
# Now sort out the data normalization
if norm is None:
norm = mpl.colors.Normalize()
elif isinstance(norm, tuple):
norm = mpl.colors.Normalize(*norm)
elif not isinstance(norm, mpl.colors.Normalize):
err = "``hue_norm`` must be None, tuple, or Normalize object."
raise ValueError(err)
if not norm.scaled():
norm(np.asarray(data.dropna()))
lookup_table = dict(zip(levels, cmap(norm(levels))))
return levels, lookup_table, norm, cmap
@share_init_params_with_map
class SizeMapping(SemanticMapping):
"""Mapping that sets artist sizes according to data values."""
# An object that normalizes data values to [0, 1] range
norm = None
def __init__(
self, plotter, sizes=None, order=None, norm=None,
):
"""Map the levels of the `size` variable to distinct values.
Parameters
----------
# TODO add generic parameters
"""
super().__init__(plotter)
data = plotter.plot_data.get("size", pd.Series(dtype=float))
if data.notna().any():
map_type = self.infer_map_type(
norm, sizes, plotter.var_types["size"]
)
# --- Option 1: numeric mapping
if map_type == "numeric":
levels, lookup_table, norm = self.numeric_mapping(
data, sizes, norm,
)
# --- Option 2: categorical mapping
elif map_type == "categorical":
levels, lookup_table = self.categorical_mapping(
data, sizes, order,
)
# --- Option 3: datetime mapping
# TODO this needs an actual implementation
else:
levels, lookup_table = self.categorical_mapping(
# Casting data to list to handle differences in the way
# pandas and numpy represent datetime64 data
list(data), sizes, order,
)
self.map_type = map_type
self.levels = levels
self.norm = norm
self.sizes = sizes
self.lookup_table = lookup_table
def infer_map_type(self, norm, sizes, var_type):
if norm is not None:
map_type = "numeric"
elif isinstance(sizes, (dict, list)):
map_type = "categorical"
else:
map_type = var_type
return map_type
def _lookup_single(self, key):
try:
value = self.lookup_table[key]
except KeyError:
normed = self.norm(key)
if np.ma.is_masked(normed):
normed = np.nan
size_values = self.lookup_table.values()
size_range = min(size_values), max(size_values)
value = size_range[0] + normed * np.ptp(size_range)
return value
def categorical_mapping(self, data, sizes, order):
levels = categorical_order(data, order)
if isinstance(sizes, dict):
# Dict inputs map existing data values to the size attribute
missing = set(levels) - set(sizes)
if any(missing):
err = f"Missing sizes for the following levels: {missing}"
raise ValueError(err)
lookup_table = sizes.copy()
elif isinstance(sizes, list):
# List inputs give size values in the same order as the levels
if len(sizes) != len(levels):
err = "The `sizes` list has the wrong number of values."
raise ValueError(err)
lookup_table = dict(zip(levels, sizes))
else:
if isinstance(sizes, tuple):
# Tuple input sets the min, max size values
if len(sizes) != 2:
err = "A `sizes` tuple must have only 2 values"
raise ValueError(err)
elif sizes is not None:
err = f"Value for `sizes` not understood: {sizes}"
raise ValueError(err)
else:
# Otherwise, we need to get the min, max size values from
# the plotter object we are attached to.
# TODO this is going to cause us trouble later, because we
# want to restructure things so that the plotter is generic
# across the visual representation of the data. But at this
# point, we don't know the visual representation. Likely we
# want to change the logic of this Mapping so that it gives
# points on a nornalized range that then gets unnormalized
# when we know what we're drawing. But given the way the
# package works now, this way is cleanest.
sizes = self.plotter._default_size_range
# For categorical sizes, use regularly-spaced linear steps
# between the minimum and maximum sizes. Then reverse the
# ramp so that the largest value is used for the first entry
# in size_order, etc. This is because "ordered" categoricals
# are often though to go in decreasing priority.
sizes = np.linspace(*sizes, len(levels))[::-1]
lookup_table = dict(zip(levels, sizes))
return levels, lookup_table
def numeric_mapping(self, data, sizes, norm):
if isinstance(sizes, dict):
# The presence of a norm object overrides a dictionary of sizes
# in specifying a numeric mapping, so we need to process it
# dictionary here
levels = list(np.sort(list(sizes)))
size_values = sizes.values()
size_range = min(size_values), max(size_values)
else:
# The levels here will be the unique values in the data
levels = list(np.sort(remove_na(data.unique())))
if isinstance(sizes, tuple):
# For numeric inputs, the size can be parametrized by
# the minimum and maximum artist values to map to. The
# norm object that gets set up next specifies how to
# do the mapping.
if len(sizes) != 2:
err = "A `sizes` tuple must have only 2 values"
raise ValueError(err)
size_range = sizes
elif sizes is not None:
err = f"Value for `sizes` not understood: {sizes}"
raise ValueError(err)
else:
# When not provided, we get the size range from the plotter
# object we are attached to. See the note in the categorical
# method about how this is suboptimal for future development.:
size_range = self.plotter._default_size_range
# Now that we know the minimum and maximum sizes that will get drawn,
# we need to map the data values that we have into that range. We will
# use a matplotlib Normalize class, which is typically used for numeric
# color mapping but works fine here too. It takes data values and maps
# them into a [0, 1] interval, potentially nonlinear-ly.
if norm is None:
# Default is a linear function between the min and max data values
norm = mpl.colors.Normalize()
elif isinstance(norm, tuple):
# It is also possible to give different limits in data space
norm = mpl.colors.Normalize(*norm)
elif not isinstance(norm, mpl.colors.Normalize):
err = f"Value for size `norm` parameter not understood: {norm}"
raise ValueError(err)
else:
# If provided with Normalize object, copy it so we can modify
norm = copy(norm)
# Set the mapping so all output values are in [0, 1]
norm.clip = True
# If the input range is not set, use the full range of the data
if not norm.scaled():
norm(levels)
# Map from data values to [0, 1] range
sizes_scaled = norm(levels)
# Now map from the scaled range into the artist units
if isinstance(sizes, dict):
lookup_table = sizes
else:
lo, hi = size_range
sizes = lo + sizes_scaled * (hi - lo)
lookup_table = dict(zip(levels, sizes))
return levels, lookup_table, norm
@share_init_params_with_map
class StyleMapping(SemanticMapping):
"""Mapping that sets artist style according to data values."""
# Style mapping is always treated as categorical
map_type = "categorical"
def __init__(
self, plotter, markers=None, dashes=None, order=None,
):
"""Map the levels of the `style` variable to distinct values.
Parameters
----------
# TODO add generic parameters
"""
super().__init__(plotter)
data = plotter.plot_data.get("style", pd.Series(dtype=float))
if data.notna().any():
# Cast to list to handle numpy/pandas datetime quirks
if variable_type(data) == "datetime":
data = list(data)
# Find ordered unique values
levels = categorical_order(data, order)
markers = self._map_attributes(
markers, levels, unique_markers(len(levels)), "markers",
)
dashes = self._map_attributes(
dashes, levels, unique_dashes(len(levels)), "dashes",
)
# Build the paths matplotlib will use to draw the markers
paths = {}
filled_markers = []
for k, m in markers.items():
if not isinstance(m, mpl.markers.MarkerStyle):
m = mpl.markers.MarkerStyle(m)
paths[k] = m.get_path().transformed(m.get_transform())
filled_markers.append(m.is_filled())
# Mixture of filled and unfilled markers will show line art markers
# in the edge color, which defaults to white. This can be handled,
# but there would be additional complexity with specifying the
# weight of the line art markers without overwhelming the filled
# ones with the edges. So for now, we will disallow mixtures.
if any(filled_markers) and not all(filled_markers):
err = "Filled and line art markers cannot be mixed"
raise ValueError(err)
lookup_table = {}
for key in levels:
lookup_table[key] = {}
if markers:
lookup_table[key]["marker"] = markers[key]
lookup_table[key]["path"] = paths[key]
if dashes:
lookup_table[key]["dashes"] = dashes[key]
self.levels = levels
self.lookup_table = lookup_table
def _lookup_single(self, key, attr=None):
"""Get attribute(s) for a given data point."""
if attr is None:
value = self.lookup_table[key]
else:
value = self.lookup_table[key][attr]
return value
def _map_attributes(self, arg, levels, defaults, attr):
"""Handle the specification for a given style attribute."""
if arg is True:
lookup_table = dict(zip(levels, defaults))
elif isinstance(arg, dict):
missing = set(levels) - set(arg)
if missing:
err = f"These `{attr}` levels are missing values: {missing}"
raise ValueError(err)
lookup_table = arg
elif isinstance(arg, Sequence):
if len(levels) != len(arg):
err = f"The `{attr}` argument has the wrong number of values"
raise ValueError(err)
lookup_table = dict(zip(levels, arg))
elif arg:
err = f"This `{attr}` argument was not understood: {arg}"
raise ValueError(err)
else:
lookup_table = {}
return lookup_table
# =========================================================================== #
class VectorPlotter:
"""Base class for objects underlying *plot functions."""
_semantic_mappings = {
"hue": HueMapping,
"size": SizeMapping,
"style": StyleMapping,
}
# TODO units is another example of a non-mapping "semantic"
# we need a general name for this and separate handling
semantics = "x", "y", "hue", "size", "style", "units", "ci_low", "ci_high"
wide_structure = {
"x": "@index", "y": "@values", "hue": "@columns", "style": "@columns",
}
flat_structure = {"x": "@index", "y": "@values"}
_default_size_range = 1, 2 # Unused but needed in tests, ugh
def __init__(self, data=None, variables={}):
self.assign_variables(data, variables)
for var, cls in self._semantic_mappings.items():
# Create the mapping function
map_func = partial(cls.map, plotter=self)
setattr(self, f"map_{var}", map_func)
# Call the mapping function to initialize with default values
getattr(self, f"map_{var}")()
self._var_levels = {}
@classmethod
def get_semantics(cls, kwargs, semantics=None):
"""Subset a dictionary` arguments with known semantic variables."""
# TODO this should be get_variables since we have included x and y
if semantics is None:
semantics = cls.semantics
variables = {}
for key, val in kwargs.items():
if key in semantics and val is not None:
variables[key] = val
return variables
@property
def has_xy_data(self):
"""Return True at least one of x or y is defined."""
return bool({"x", "y"} & set(self.variables))
@property
def var_levels(self):
"""Property interface to ordered list of variables levels.
Each time it's accessed, it updates the var_levels dictionary with the
list of levels in the current semantic mappers. But it also allows the
dictionary to persist, so it can be used to set levels by a key. This is
used to track the list of col/row levels using an attached FacetGrid
object, but it's kind of messy and ideally fixed by improving the
faceting logic so it interfaces better with the modern approach to
tracking plot variables.
"""
for var in self.variables:
try:
map_obj = getattr(self, f"_{var}_map")
self._var_levels[var] = map_obj.levels
except AttributeError:
pass
return self._var_levels
def assign_variables(self, data=None, variables={}):
"""Define plot variables, optionally using lookup from `data`."""
x = variables.get("x", None)
y = variables.get("y", None)
if x is None and y is None:
self.input_format = "wide"
plot_data, variables = self._assign_variables_wideform(
data, **variables,
)
else:
self.input_format = "long"
plot_data, variables = self._assign_variables_longform(
data, **variables,
)
self.plot_data = plot_data
self.variables = variables
self.var_types = {
v: variable_type(
plot_data[v],
boolean_type="numeric" if v in "xy" else "categorical"
)
for v in variables
}
return self
def _assign_variables_wideform(self, data=None, **kwargs):
"""Define plot variables given wide-form data.
Parameters
----------
data : flat vector or collection of vectors
Data can be a vector or mapping that is coerceable to a Series
or a sequence- or mapping-based collection of such vectors, or a
rectangular numpy array, or a Pandas DataFrame.
kwargs : variable -> data mappings
Behavior with keyword arguments is currently undefined.
Returns
-------
plot_data : :class:`pandas.DataFrame`
Long-form data object mapping seaborn variables (x, y, hue, ...)
to data vectors.
variables : dict
Keys are defined seaborn variables; values are names inferred from
the inputs (or None when no name can be determined).
"""
# Raise if semantic or other variables are assigned in wide-form mode
assigned = [k for k, v in kwargs.items() if v is not None]
if any(assigned):
s = "s" if len(assigned) > 1 else ""
err = f"The following variable{s} cannot be assigned with wide-form data: "
err += ", ".join(f"`{v}`" for v in assigned)
raise ValueError(err)
# Determine if the data object actually has any data in it
empty = data is None or not len(data)
# Then, determine if we have "flat" data (a single vector)
if isinstance(data, dict):
values = data.values()
else:
values = np.atleast_1d(np.asarray(data, dtype=object))
flat = not any(
isinstance(v, Iterable) and not isinstance(v, (str, bytes))
for v in values
)
if empty:
# Make an object with the structure of plot_data, but empty
plot_data = pd.DataFrame()
variables = {}
elif flat:
# Handle flat data by converting to pandas Series and using the
# index and/or values to define x and/or y
# (Could be accomplished with a more general to_series() interface)
flat_data = pd.Series(data).copy()
names = {
"@values": flat_data.name,
"@index": flat_data.index.name
}
plot_data = {}
variables = {}
for var in ["x", "y"]:
if var in self.flat_structure:
attr = self.flat_structure[var]
plot_data[var] = getattr(flat_data, attr[1:])
variables[var] = names[self.flat_structure[var]]
plot_data = pd.DataFrame(plot_data)
else:
# Otherwise assume we have some collection of vectors.
# Handle Python sequences such that entries end up in the columns,
# not in the rows, of the intermediate wide DataFrame.
# One way to accomplish this is to convert to a dict of Series.
if isinstance(data, Sequence):
data_dict = {}
for i, var in enumerate(data):
key = getattr(var, "name", i)
# TODO is there a safer/more generic way to ensure Series?
# sort of like np.asarray, but for pandas?
data_dict[key] = pd.Series(var)
data = data_dict
# Pandas requires that dict values either be Series objects
# or all have the same length, but we want to allow "ragged" inputs
if isinstance(data, Mapping):
data = {key: pd.Series(val) for key, val in data.items()}
# Otherwise, delegate to the pandas DataFrame constructor
# This is where we'd prefer to use a general interface that says
# "give me this data as a pandas DataFrame", so we can accept
# DataFrame objects from other libraries
wide_data = pd.DataFrame(data, copy=True)
# At this point we should reduce the dataframe to numeric cols
numeric_cols = wide_data.apply(variable_type) == "numeric"
wide_data = wide_data.loc[:, numeric_cols]
# Now melt the data to long form
melt_kws = {"var_name": "@columns", "value_name": "@values"}
use_index = "@index" in self.wide_structure.values()
if use_index:
melt_kws["id_vars"] = "@index"
try:
orig_categories = wide_data.columns.categories
orig_ordered = wide_data.columns.ordered
wide_data.columns = wide_data.columns.add_categories("@index")
except AttributeError:
category_columns = False
else:
category_columns = True
wide_data["@index"] = wide_data.index.to_series()
plot_data = wide_data.melt(**melt_kws)
if use_index and category_columns:
plot_data["@columns"] = pd.Categorical(plot_data["@columns"],
orig_categories,
orig_ordered)
# Assign names corresponding to plot semantics
for var, attr in self.wide_structure.items():
plot_data[var] = plot_data[attr]
# Define the variable names
variables = {}
for var, attr in self.wide_structure.items():
obj = getattr(wide_data, attr[1:])
variables[var] = getattr(obj, "name", None)
# Remove redundant columns from plot_data
plot_data = plot_data[list(variables)]
return plot_data, variables
def _assign_variables_longform(self, data=None, **kwargs):
"""Define plot variables given long-form data and/or vector inputs.
Parameters
----------
data : dict-like collection of vectors
Input data where variable names map to vector values.
kwargs : variable -> data mappings
Keys are seaborn variables (x, y, hue, ...) and values are vectors
in any format that can construct a :class:`pandas.DataFrame` or
names of columns or index levels in ``data``.
Returns
-------
plot_data : :class:`pandas.DataFrame`
Long-form data object mapping seaborn variables (x, y, hue, ...)
to data vectors.
variables : dict
Keys are defined seaborn variables; values are names inferred from
the inputs (or None when no name can be determined).
Raises
------
ValueError
When variables are strings that don't appear in ``data``.
"""
plot_data = {}
variables = {}
# Data is optional; all variables can be defined as vectors
if data is None:
data = {}
# TODO should we try a data.to_dict() or similar here to more
# generally accept objects with that interface?
# Note that dict(df) also works for pandas, and gives us what we
# want, whereas DataFrame.to_dict() gives a nested dict instead of
# a dict of series.
# Variables can also be extraced from the index attribute
# TODO is this the most general way to enable it?
# There is no index.to_dict on multiindex, unfortunately
try:
index = data.index.to_frame()
except AttributeError:
index = {}
# The caller will determine the order of variables in plot_data
for key, val in kwargs.items():
if isinstance(val, (str, bytes)):
# String inputs trigger __getitem__
if val in data:
# First try to get an entry in the data object
plot_data[key] = data[val]
variables[key] = val
elif val in index:
# Failing that, try to get an entry in the index object
plot_data[key] = index[val]
variables[key] = val
else:
# We don't know what this name means
err = f"Could not interpret value `{val}` for parameter `{key}`"
raise ValueError(err)
else:
# Otherwise, assume the value is itself a vector of data
# Raise when data is present and a vector can't be combined with it
if isinstance(data, pd.DataFrame) and not isinstance(val, pd.Series):
if val is not None and len(data) != len(val):
val_cls = val.__class__.__name__
err = (
f"Length of {val_cls} vectors must match length of `data`"
f" when both are used, but `data` has length {len(data)}"
f" and the vector passed to `{key}` has length {len(val)}."
)
raise ValueError(err)
plot_data[key] = val
# Try to infer the name of the variable
variables[key] = getattr(val, "name", None)
# Construct a tidy plot DataFrame. This will convert a number of
# types automatically, aligning on index in case of pandas objects
plot_data = pd.DataFrame(plot_data)
# Reduce the variables dictionary to fields with valid data
variables = {
var: name
for var, name in variables.items()
if plot_data[var].notnull().any()
}
return plot_data, variables
def iter_data(
self, grouping_vars=None, reverse=False, from_comp_data=False,
):
"""Generator for getting subsets of data defined by semantic variables.
Also injects "col" and "row" into grouping semantics.
Parameters
----------
grouping_vars : string or list of strings
Semantic variables that define the subsets of data.
reverse : bool, optional
If True, reverse the order of iteration.
from_comp_data : bool, optional
If True, use self.comp_data rather than self.plot_data
Yields
------
sub_vars : dict
Keys are semantic names, values are the level of that semantic.
sub_data : :class:`pandas.DataFrame`
Subset of ``plot_data`` for this combination of semantic values.
"""
# TODO should this default to using all (non x/y?) semantics?
# or define groupping vars somewhere?
if grouping_vars is None:
grouping_vars = []
elif isinstance(grouping_vars, str):
grouping_vars = [grouping_vars]
elif isinstance(grouping_vars, tuple):
grouping_vars = list(grouping_vars)
# Always insert faceting variables
facet_vars = {"col", "row"}
grouping_vars.extend(
facet_vars & set(self.variables) - set(grouping_vars)
)
# Reduce to the semantics used in this plot
grouping_vars = [
var for var in grouping_vars if var in self.variables
]
if from_comp_data:
data = self.comp_data
else:
data = self.plot_data
if grouping_vars:
grouped_data = data.groupby(
grouping_vars, sort=False, as_index=False
)
grouping_keys = []
for var in grouping_vars:
grouping_keys.append(self.var_levels.get(var, []))
iter_keys = itertools.product(*grouping_keys)
if reverse:
iter_keys = reversed(list(iter_keys))
for key in iter_keys:
# Pandas fails with singleton tuple inputs
pd_key = key[0] if len(key) == 1 else key
try:
data_subset = grouped_data.get_group(pd_key)
except KeyError:
continue
sub_vars = dict(zip(grouping_vars, key))
yield sub_vars, data_subset
else:
yield {}, data
@property
def comp_data(self):
"""Dataframe with numeric x and y, after unit conversion and log scaling."""
if not hasattr(self, "ax"):
# Probably a good idea, but will need a bunch of tests updated
# Most of these tests should just use the external interface
# Then this can be re-enabled.
# raise AttributeError("No Axes attached to plotter")
return self.plot_data
if not hasattr(self, "_comp_data"):
comp_data = (
self.plot_data
.copy(deep=False)
.drop(["x", "y"], axis=1, errors="ignore")
)
for var in "yx":
if var not in self.variables:
continue
# Get a corresponding axis object so that we can convert the units
# to matplotlib's numeric representation, which we can compute on
# This is messy and it would probably be better for VectorPlotter
# to manage its own converters (using the matplotlib tools).
# XXX Currently does not support unshared categorical axes!
# (But see comment in _attach about how those don't exist)
if self.ax is None:
ax = self.facets.axes.flat[0]
else:
ax = self.ax
axis = getattr(ax, f"{var}axis")
comp_var = axis.convert_units(self.plot_data[var])
if axis.get_scale() == "log":
comp_var = np.log10(comp_var)
comp_data.insert(0, var, comp_var)
self._comp_data = comp_data
return self._comp_data
def _get_axes(self, sub_vars):
"""Return an Axes object based on existence of row/col variables."""
row = sub_vars.get("row", None)
col = sub_vars.get("col", None)
if row is not None and col is not None:
return self.facets.axes_dict[(row, col)]
elif row is not None:
return self.facets.axes_dict[row]
elif col is not None:
return self.facets.axes_dict[col]
elif self.ax is None:
return self.facets.ax
else:
return self.ax
def _attach(self, obj, allowed_types=None, log_scale=None):
"""Associate the plotter with an Axes manager and initialize its units.
Parameters
----------
obj : :class:`matplotlib.axes.Axes` or :class:'FacetGrid`
Structural object that we will eventually plot onto.
allowed_types : str or list of str
If provided, raise when either the x or y variable does not have
one of the declared seaborn types.
log_scale : bool, number, or pair of bools or numbers
If not False, set the axes to use log scaling, with the given
base or defaulting to 10. If a tuple, interpreted as separate
arguments for the x and y axes.
"""
from .axisgrid import FacetGrid
if isinstance(obj, FacetGrid):
self.ax = None
self.facets = obj
ax_list = obj.axes.flatten()
if obj.col_names is not None:
self.var_levels["col"] = obj.col_names
if obj.row_names is not None:
self.var_levels["row"] = obj.row_names
else:
self.ax = obj
self.facets = None
ax_list = [obj]
if allowed_types is None:
allowed_types = ["numeric", "datetime", "categorical"]
elif isinstance(allowed_types, str):
allowed_types = [allowed_types]
for var in set("xy").intersection(self.variables):
# Check types of x/y variables
var_type = self.var_types[var]
if var_type not in allowed_types:
err = (
f"The {var} variable is {var_type}, but one of "
f"{allowed_types} is required"
)
raise TypeError(err)
# Register with the matplotlib unit conversion machinery
# Perhaps cleaner to manage our own transform objects?
# XXX Currently this does not allow "unshared" categorical axes
# We could add metadata to a FacetGrid and set units based on that.
# See also comment in comp_data, which only uses a single axes to do
# its mapping, meaning that it won't handle unshared axes well either.
for ax in ax_list:
axis = getattr(ax, f"{var}axis")
seed_data = self.plot_data[var]
if var_type == "categorical":
seed_data = categorical_order(seed_data)
axis.update_units(seed_data)
# For categorical y, we want the "first" level to be at the top of the axis
if self.var_types.get("y", None) == "categorical":
for ax in ax_list:
try:
ax.yaxis.set_inverted(True)
except AttributeError: # mpl < 3.1
if not ax.yaxis_inverted():
ax.invert_yaxis()
# Possibly log-scale one or both axes
if log_scale is not None:
# Allow single value or x, y tuple
try:
scalex, scaley = log_scale
except TypeError:
scalex = log_scale if "x" in self.variables else False
scaley = log_scale if "y" in self.variables else False
for axis, scale in zip("xy", (scalex, scaley)):
if scale:
for ax in ax_list:
set_scale = getattr(ax, f"set_{axis}scale")
if scale is True:
set_scale("log")
else:
if LooseVersion(mpl.__version__) >= "3.3":
set_scale("log", base=scale)
else:
set_scale("log", **{f"base{axis}": scale})
def _log_scaled(self, axis):
"""Return True if specified axis is log scaled on all attached axes."""
if self.ax is None:
axes_list = self.facets.axes.flatten()
else:
axes_list = [self.ax]
log_scaled = []
for ax in axes_list:
data_axis = getattr(ax, f"{axis}axis")
log_scaled.append(data_axis.get_scale() == "log")
if any(log_scaled) and not all(log_scaled):
raise RuntimeError("Axis scaling is not consistent")
return any(log_scaled)
def _add_axis_labels(self, ax, default_x="", default_y=""):
"""Add axis labels if not present, set visibility to match ticklabels."""
# TODO ax could default to None and use attached axes if present
# but what to do about the case of facets? Currently using FacetGrid's
# set_axis_labels method, which doesn't add labels to the interior even
# when the axes are not shared. Maybe that makes sense?
if not ax.get_xlabel():
x_visible = any(t.get_visible() for t in ax.get_xticklabels())
ax.set_xlabel(self.variables.get("x", default_x), visible=x_visible)
if not ax.get_ylabel():
y_visible = any(t.get_visible() for t in ax.get_yticklabels())
ax.set_ylabel(self.variables.get("y", default_y), visible=y_visible)
def variable_type(vector, boolean_type="numeric"):
"""Determine whether a vector contains numeric, categorical, or dateime data.
This function differs from the pandas typing API in two ways:
- Python sequences or object-typed PyData objects are considered numeric if
all of their entries are numeric.
- String or mixed-type data are considered categorical even if not
explicitly represented as a :class:pandas.api.types.CategoricalDtype`.
Parameters
----------
vector : :func:`pandas.Series`, :func:`numpy.ndarray`, or Python sequence
Input data to test.
binary_type : 'numeric' or 'categorical'
Type to use for vectors containing only 0s and 1s (and NAs).
Returns
-------
var_type : 'numeric', 'categorical', or 'datetime'
Name identifying the type of data in the vector.
"""
# Special-case all-na data, which is always "numeric"
if pd.isna(vector).all():
return "numeric"
# Special-case binary/boolean data, allow caller to determine
# This triggers a numpy warning when vector has strings/objects
# https://github.com/numpy/numpy/issues/6784
# Because we reduce with .all(), we are agnostic about whether the
# comparison returns a scalar or vector, so we will ignore the warning.
# It triggers a separate DeprecationWarning when the vector has datetimes:
# https://github.com/numpy/numpy/issues/13548
# This is considered a bug by numpy and will likely go away.
with warnings.catch_warnings():
warnings.simplefilter(
action='ignore', category=(FutureWarning, DeprecationWarning)
)
if np.isin(vector, [0, 1, np.nan]).all():
return boolean_type
# Defer to positive pandas tests
if pd.api.types.is_numeric_dtype(vector):
return "numeric"
if pd.api.types.is_categorical_dtype(vector):
return "categorical"
if pd.api.types.is_datetime64_dtype(vector):
return "datetime"
# --- If we get to here, we need to check the entries
# Check for a collection where everything is a number
def all_numeric(x):
for x_i in x:
if not isinstance(x_i, Number):
return False
return True
if all_numeric(vector):
return "numeric"
# Check for a collection where everything is a datetime
def all_datetime(x):
for x_i in x:
if not isinstance(x_i, (datetime, np.datetime64)):
return False
return True
if all_datetime(vector):
return "datetime"
# Otherwise, our final fallback is to consider things categorical
return "categorical"
def infer_orient(x=None, y=None, orient=None, require_numeric=True):
"""Determine how the plot should be oriented based on the data.
For historical reasons, the convention is to call a plot "horizontally"
or "vertically" oriented based on the axis representing its dependent
variable. Practically, this is used when determining the axis for
numerical aggregation.
Paramters
---------
x, y : Vector data or None
Positional data vectors for the plot.
orient : string or None
Specified orientation, which must start with "v" or "h" if not None.
require_numeric : bool
If set, raise when the implied dependent variable is not numeric.
Returns
-------
orient : "v" or "h"
Raises
------
ValueError: When `orient` is not None and does not start with "h" or "v"
TypeError: When dependant variable is not numeric, with `require_numeric`
"""
x_type = None if x is None else variable_type(x)
y_type = None if y is None else variable_type(y)
nonnumeric_dv_error = "{} orientation requires numeric `{}` variable."
single_var_warning = "{} orientation ignored with only `{}` specified."
if x is None:
if str(orient).startswith("h"):
warnings.warn(single_var_warning.format("Horizontal", "y"))
if require_numeric and y_type != "numeric":
raise TypeError(nonnumeric_dv_error.format("Vertical", "y"))
return "v"
elif y is None:
if str(orient).startswith("v"):
warnings.warn(single_var_warning.format("Vertical", "x"))
if require_numeric and x_type != "numeric":
raise TypeError(nonnumeric_dv_error.format("Horizontal", "x"))
return "h"
elif str(orient).startswith("v"):
if require_numeric and y_type != "numeric":
raise TypeError(nonnumeric_dv_error.format("Vertical", "y"))
return "v"
elif str(orient).startswith("h"):
if require_numeric and x_type != "numeric":
raise TypeError(nonnumeric_dv_error.format("Horizontal", "x"))
return "h"
elif orient is not None:
raise ValueError(f"Value for `orient` not understood: {orient}")
elif x_type != "numeric" and y_type == "numeric":
return "v"
elif x_type == "numeric" and y_type != "numeric":
return "h"
elif require_numeric and "numeric" not in (x_type, y_type):
err = "Neither the `x` nor `y` variable appears to be numeric."
raise TypeError(err)
else:
return "v"
def unique_dashes(n):
"""Build an arbitrarily long list of unique dash styles for lines.
Parameters
----------
n : int
Number of unique dash specs to generate.
Returns
-------
dashes : list of strings or tuples
Valid arguments for the ``dashes`` parameter on
:class:`matplotlib.lines.Line2D`. The first spec is a solid
line (``""``), the remainder are sequences of long and short
dashes.
"""
# Start with dash specs that are well distinguishable
dashes = [
"",
(4, 1.5),
(1, 1),
(3, 1.25, 1.5, 1.25),
(5, 1, 1, 1),
]
# Now programatically build as many as we need
p = 3
while len(dashes) < n:
# Take combinations of long and short dashes
a = itertools.combinations_with_replacement([3, 1.25], p)
b = itertools.combinations_with_replacement([4, 1], p)
# Interleave the combinations, reversing one of the streams
segment_list = itertools.chain(*zip(
list(a)[1:-1][::-1],
list(b)[1:-1]
))
# Now insert the gaps
for segments in segment_list:
gap = min(segments)
spec = tuple(itertools.chain(*((seg, gap) for seg in segments)))
dashes.append(spec)
p += 1
return dashes[:n]
def unique_markers(n):
"""Build an arbitrarily long list of unique marker styles for points.
Parameters
----------
n : int
Number of unique marker specs to generate.
Returns
-------
markers : list of string or tuples
Values for defining :class:`matplotlib.markers.MarkerStyle` objects.
All markers will be filled.
"""
# Start with marker specs that are well distinguishable
markers = [
"o",
"X",
(4, 0, 45),
"P",
(4, 0, 0),
(4, 1, 0),
"^",
(4, 1, 45),
"v",
]
# Now generate more from regular polygons of increasing order
s = 5
while len(markers) < n:
a = 360 / (s + 1) / 2
markers.extend([
(s + 1, 1, a),
(s + 1, 0, a),
(s, 1, 0),
(s, 0, 0),
])
s += 1
# Convert to MarkerStyle object, using only exactly what we need
# markers = [mpl.markers.MarkerStyle(m) for m in markers[:n]]
return markers[:n]
def categorical_order(vector, order=None):
"""Return a list of unique data values.
Determine an ordered list of levels in ``values``.
Parameters
----------
vector : list, array, Categorical, or Series
Vector of "categorical" values
order : list-like, optional
Desired order of category levels to override the order determined
from the ``values`` object.
Returns
-------
order : list
Ordered list of category levels not including null values.
"""
if order is None:
if hasattr(vector, "categories"):
order = vector.categories
else:
try:
order = vector.cat.categories
except (TypeError, AttributeError):
try:
order = vector.unique()
except AttributeError:
order = pd.unique(vector)
if variable_type(vector) == "numeric":
order = np.sort(order)
order = filter(pd.notnull, order)
return list(order)
| 35.507828
| 87
| 0.570465
|
794b6dde713220aabbb385c00496b5989c915959
| 15,483
|
py
|
Python
|
lib/Ixia/generate.py
|
ctgriffiths/twister
|
b3930549551b0104738d56f402eb9b4b90dd692c
|
[
"Apache-2.0"
] | 19
|
2015-01-29T11:02:42.000Z
|
2021-06-03T11:45:42.000Z
|
lib/Ixia/generate.py
|
ctgriffiths/twister
|
b3930549551b0104738d56f402eb9b4b90dd692c
|
[
"Apache-2.0"
] | 47
|
2015-01-02T11:39:39.000Z
|
2022-02-05T11:29:07.000Z
|
lib/Ixia/generate.py
|
ctgriffiths/twister
|
b3930549551b0104738d56f402eb9b4b90dd692c
|
[
"Apache-2.0"
] | 10
|
2015-01-12T07:24:39.000Z
|
2017-11-05T00:17:30.000Z
|
#!/usr/bin/env python
# File: generate.py ; This file is part of Twister.
# version: 2.003
# Copyright (C) 2013 , Luxoft
# Authors:
# Adrian Toader <adtoader@luxoft.com>
# Andrei Costachi <acostachi@luxoft.com>
# Andrei Toma <atoma@luxoft.com>
# Cristi Constantin <crconstantin@luxoft.com>
# Daniel Cioata <dcioata@luxoft.com>
# Mihail Tudoran <mtudoran@luxoft.com>
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at:
# http://www.apache.org/licenses/LICENSE-2.0
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
'''
This file generates Ixia Library wrapper in Python, using TCL functions from IxTclHal.
Params: - IxOS TCL lib path
- IP address of Ixia chassis
'''
from Tkinter import Tcl
from collections import OrderedDict
import re
import socket
import sys
import os
chassis_ip = '10.100.100.45'
tcl_lib_path = '/home/twister/ixos/ixia/lib/ixTcl1.0'
if len(sys.argv) < 3:
print 'Usage: python generate.py <ixos_tcl_lib_path> <chassis_ip_address>'
exit()
tcl_lib_path = sys.argv[1]
chassis_ip = sys.argv[2]
# some minimal checks on IP address
if chassis_ip.count('.') < 3:
print 'ERROR: IP address not valid !'
try:
socket.inet_aton(chassis_ip)
except socket.error:
print 'ERROR: IP address not valid !'
exit()
# check if IxOS tcl lib path exists
if tcl_lib_path[-1] == '/':
tcl_lib_path = tcl_lib_path.rstrip('/')
if not os.path.isdir(tcl_lib_path):
print 'ERROR: IxOS tcl lib path doesn\'t exists'
exit()
tcl = Tcl()
tcl.eval('package req IxTclHal')
tcl.eval('ixConnectToTclServer ' + chassis_ip)
tcl.eval('ixConnectToChassis ' + chassis_ip)
# # # # # # # #
HEAD = """
from Tkinter import Tcl
t = Tcl()
t.eval('package req IxTclHal')
true = True
false = False
yes = True
no = False
none = None
"""
TAIL = """
def ix_exec(cmd):
# This is used for executing custom TCL commands
r = t.eval(cmd)
return r
if __name__ == "__main__":
chassis_ip = '%s'
funcs = [x[0] for x in locals().items() if callable( x[1] )]
# print sorted(funcs)
print 'Found {} functions!'.format(len(funcs))
print 'Is this UNIX?', isUNIX()
print 'Connect to TCL Server:', ixConnectToTclServer(chassis_ip)
print 'Connect to Chassis', ixConnectToChassis(chassis_ip)
print 'Config chassis...'
portList = ''
chassis('get ' + chassis_ip)
py_chassis = chassis('cget -id')
print py_chassis
print 'Config card...'
py_card = 1
card('setDefault')
card('config -txFrequencyDeviation 0')
print py_card
print 'Config port...'
py_port = 1
port('setFactoryDefaults {} {} {}'.format(py_chassis, py_card, py_port))
port('config -speed 100')
port('config -duplex full')
port('config -flowControl false')
print py_port
print 'Config stat...'
stat('setDefault')
stat('config -mode statNormal')
stat('config -enableValidStats false')
stat('config -enableProtocolServerStats true')
stat('config -enableArpStats true')
stat('config -enablePosExtendedStats true')
stat('config -enableDhcpStats false')
stat('config -enableDhcpV6Stats false')
stat('config -enableEthernetOamStats false')
print 'Done.'
print 'Config flexibleTimestamp...'
flexibleTimestamp('setDefault')
flexibleTimestamp('config -type timestampBeforeCrc')
flexibleTimestamp('config -offset 23')
print 'Done.'
print 'Config filter...'
ix_filter('setDefault')
ix_filter('config -captureTriggerDA anyAddr')
ix_filter('config -captureTriggerSA anyAddr')
ix_filter('config -captureTriggerPattern anyPattern')
ix_filter('config -captureTriggerError errAnyFrame')
print 'Done.'
print 'Config filterPallette...'
filterPallette('setDefault')
filterPallette('config -DA1 "00 00 00 00 00 00"')
filterPallette('config -DAMask1 "00 00 00 00 00 00"')
filterPallette('config -DA2 "00 00 00 00 00 00"')
filterPallette('config -DAMask2 "00 00 00 00 00 00"')
filterPallette('config -SA1 "00 00 00 00 00 00"')
filterPallette('config -SAMask1 "00 00 00 00 00 00"')
filterPallette('config -SA2 "00 00 00 00 00 00"')
filterPallette('config -SAMask2 "00 00 00 00 00 00"')
filterPallette('config -pattern1 "DE ED EF FE AC CA"')
filterPallette('config -patternMask1 "00 00 00 00 00 00"')
print 'Done.'
print 'Config capture...'
capture('setDefault')
capture('config -fullAction lock')
capture('config -sliceSize 8191')
print 'Done.'
print 'Config ipAddressTable...'
ipAddressTable('setDefault')
ipAddressTable('config -defaultGateway "0.0.0.0"')
print 'Done.'
print 'Config arpServer...'
arpServer('setDefault')
arpServer('config -retries 3')
arpServer('config -mode arpGatewayOnly')
arpServer('config -rate 208333')
arpServer('config -requestRepeatCount 3')
print 'Done.'
print 'Config interfaceTable...'
interfaceTable('setDefault')
interfaceTable('config -dhcpV4RequestRate 0')
interfaceTable('config -dhcpV6RequestRate 0')
interfaceTable('config -dhcpV4MaximumOutstandingRequests 100')
interfaceTable('config -dhcpV6MaximumOutstandingRequests 100')
#interfaceTable('config -fcoeRequestRate 500')
print 'Done.'
print 'Clear All Interfaces ...'
interfaceTable('clearAllInterfaces')
print 'Done.'
print 'Config protocolServer...'
protocolServer('setDefault')
protocolServer('config -enableArpResponse true')
protocolServer('config -enablePingResponse false')
print 'Done.'
print 'Config oamPort...'
oamPort('setDefault')
oamPort('config -enable false')
oamPort('config -macAddress "00 00 AB BA DE AD"')
oamPort('config -enableLoopback false')
oamPort('config -enableLinkEvents false')
print 'Done.'
print 'Call ixWritePortsToHardware and ixCheckLinkState ...'
# lappend portList [list $chassis $card $port] # ???
ixWritePortsToHardware(portList, None)
ixCheckLinkState(portList)
print 'Done.'
#
""" % (chassis_ip)
# # # # # # # #
def tcl_convert(variable):
"""
This returns the TCL value converted into Pyton string repr.
alnum
Any Unicode alphabet or digit character.
alpha
Any Unicode alphabet character.
ascii
Any character with a value less than \u0080 (those that are in the 7-bit ascii range).
boolean
Any of the forms allowed to Tcl_GetBoolean.
In the case of boolean, true and false, if the function will return 0, then the varname will always be set to 0,
due to the varied nature of a valid boolean value.
control
Any Unicode control character.
digit
Any Unicode digit character. Note that this includes characters outside of the [0-9] range.
double
Any of the valid forms for a double in Tcl, with optional surrounding \
whitespace. In case of under/overflow in the value,
0 is returned and the varname will contain -1.
false
Any of the forms allowed to Tcl_GetBoolean where the value is false.
graph
Any Unicode printing character, except space.
integer
Any of the valid forms for an ordinary integer in Tcl, with optional \
surrounding whitespace. In case of under/overflow in the value,
0 is returned and the varname will contain -1.
lower
Any Unicode lower case alphabet character.
print
Any Unicode printing character, including space.
punct
Any Unicode punctuation character.
space
Any Unicode space character.
true
Any of the forms allowed to Tcl_GetBoolean where the value is true.
upper
Any upper case alphabet character in the Unicode character set.
wordchar
Any Unicode word character. That is any alphanumeric character, and \
any Unicode connector punctuation characters (e.g. underscore).
xdigit
Any hexadecimal digit character ([0-9A-Fa-f]).
"""
global tcl
types = OrderedDict([
['integer', int],
['digit', int],
['double', float],
['true', bool],
['false', bool],
['boolean', bool],
['xdigit', str],
['alnum', str],
['alpha', str],
['ascii', str],
['control', str],
['graph', str],
['lower', str],
['print', str],
['punct', str],
['space', str],
['upper', str],
['wordchar', str],
])
for tcl_type, py_type in types.iteritems():
found = tcl.eval("string is {} -strict ${}".format(tcl_type, variable))
found = int(found)
if found:
value = tcl.getvar('vDefaultArg')
value = str(value)
print 'Converting value `{}` into TCL type `{}`.'.format(value, tcl_type)
if value == 'false' or value == 'no':
return False
elif py_type == str:
return '"{}"'.format(value)
else:
return value
return '!!!'
# # # # # # # #
IX_VARS = {
'from': 'ix_from',
'for': 'ix_for',
'while': 'ix_while',
'file': 'ix_file',
'object': 'ix_object',
'range': 'ix_range',
'map': 'ix_map',
'filter': 'ix_filter',
}
def fix_tcl_var(variable):
""" replace TCL variable """
global IX_VARS
if variable in IX_VARS:
return IX_VARS[variable]
return variable
def fix_tcl_func(func):
""" change TCL function """
global IX_VARS
func = func.replace('::', '_')
if func in IX_VARS:
return IX_VARS[func]
return func
# # # # # # # #
'''
Get Ixia libray version from ixTclHal.tcl file
'''
ixos_version = ''
version_file = tcl_lib_path + '/ixTclHal.tcl'
try:
with open(version_file, 'r') as v_file:
for line in v_file:
if line.startswith('package provide IxTclHal'):
ixos_version = line.split()[-1]
break
except:
print 'ERROR: Cannot get IxOS version. Exiting !'
exit()
# # # # # # # #
'''
Generate functions.txt file from file tclIndex
'''
FUNCTIONS = []
tcl_index_file = tcl_lib_path + '/tclIndex'
try:
for line in open(tcl_index_file).readlines():
if line.startswith('set auto_index(', 0):
line = line.replace('set auto_index(','')
FUNCTIONS.append(line.split(')')[0])
except:
pass
# # # # # # # #
'''
Update functions.txt file from file ixTclSetup.tcl
'''
ix_tcl_setup = tcl_lib_path + '/ixTclSetup.tcl'
try:
file_content = open(ix_tcl_setup, 'r').read()
# get entries from ixTclHal::noArgList
no_arg_list = re.findall('set ixTclHal::noArgList(.*?)\{(.+?)\}',file_content, re.M | re.S)
FUNCTIONS.extend(no_arg_list[0][1].replace('\\','').split())
# get entries from ixTclHal::pointerList
pointer_list = re.findall('set ixTclHal::pointerList(.*?)\{(.+?)\}',file_content, re.M | re.S)
FUNCTIONS.extend(pointer_list[0][1].replace('\\','').split())
# get entries from ixTclHal::command
command_list = re.findall('set ixTclHal::commandList(.*?)\{(.+?)\}',file_content, re.M | re.S)
FUNCTIONS.extend(command_list[0][1].replace('\\','').split())
except:
pass
try:
with open('functions.txt', 'w') as OUTPUT:
OUTPUT.write('\n'.join(FUNCTIONS))
except:
pass
# # # # # # # #
FUNCTIONS = []
for line in open('functions.txt').readlines():
if not line.strip():
continue
if '::' in line:
continue
func_name = line.strip() # TCL Function name
tcl_args = []
tcl_args_long = []
pyc_args = []
pyc_args_long = []
tmpl = '# Function `{}` is invalid'.format(func_name) # Default string, sais the func is invalid
tcl.eval('set vDefaultArg ""')
defaultArgFound = False
defaultArgs = [] # The list of mandatory arguments
try:
# Enable TCL Function. RISK excuting the function!
try:
tcl.eval(func_name)
except:
pass
proc_args = tcl.eval('info args ' + func_name)
for arg in proc_args.split():
has_default = tcl.eval('info default %s %s vDefaultArg' % (func_name, arg))
arg_fixed = fix_tcl_var(arg)
# Args for executing
tcl_args.append(arg)
# Args for calling the TCL function
pyc_args.append(arg_fixed)
# If this argument has a default value
if int(has_default) and tcl.getvar('vDefaultArg'):
defaultArgFound = True
# Args for comment
tcl_args_long.append('%s {%s}' % (arg, str(tcl.getvar('vDefaultArg'))))
# Args for defining Python functions
pyc_args_long.append('%s=%s' % (arg_fixed, str(tcl_convert('vDefaultArg'))))
else:
tcl_args_long.append(arg)
if not defaultArgFound:
pyc_args_long.append(arg_fixed)
else:
defaultArgs.append(arg_fixed)
pyc_args_long.append('%s=None' % (arg_fixed))
# Reset variable for the next cycle
tcl.eval('set vDefaultArg ""')
leng = len(tcl_args)
tcl_args = ', '.join(tcl_args)
tcl_args_long = ' '.join(tcl_args_long)
pyc_args = ', '.join(pyc_args)
pyc_args_long = ', '.join(pyc_args_long)
except Exception, e:
print('>>> Cannot create function `{}`, exception: `{}`!\n'.format(func_name, e))
continue
if defaultArgs:
defaultArgs = '\n'.join([ ' if {0} is None: print "TCL argument ' \
' `{0}` cannot be empty!"; return False'.format(x) for x in defaultArgs ])
tmpl = """
def {py}({py_arg_l}):
'''\n Method Name : {tcl}\n Arguments : {tcl_arg_l}
{def_arg}\n '''
r = t.eval("{tcl} {le}".format({py_arg}))
return r
""".format(py=fix_tcl_func(func_name), tcl=func_name, py_arg=pyc_args, py_arg_l=pyc_args_long,
tcl_arg=tcl_args, tcl_arg_l=tcl_args_long, le='{} '*leng, def_arg=defaultArgs)
else:
tmpl = """
def {py}({py_arg_l}):
'''\n Method Name : {tcl}\n Arguments : {tcl_arg_l} \n '''
r = t.eval("{tcl} {le}".format({py_arg}))
return r
""".format(py=fix_tcl_func(func_name), tcl=func_name, py_arg=pyc_args, py_arg_l=pyc_args_long,
tcl_arg=tcl_args, tcl_arg_l=tcl_args_long, le='{} '*leng)
FUNCTIONS.append(tmpl)
#
output_file = 'TscIxPythonLib_v{}.py'.format(ixos_version)
OUTPUT = open(output_file, 'w')
OUTPUT.write(HEAD)
OUTPUT.write('\n'.join(FUNCTIONS))
OUTPUT.write(TAIL)
OUTPUT.close()
# Eof()
| 30.478346
| 120
| 0.610799
|
794b6e041ddee4b1ef2edcb46168f1a08c03d153
| 18,254
|
py
|
Python
|
official/vision/beta/configs/maskrcnn.py
|
airman00/models
|
ea90f0c118fd39d93a1ac740a99094687f1e243a
|
[
"Apache-2.0"
] | 2
|
2021-09-03T23:47:59.000Z
|
2021-09-03T23:48:04.000Z
|
official/vision/beta/configs/maskrcnn.py
|
mendoza689/models
|
7dcd3f6b3151ae968f71b7e53b32c25750245606
|
[
"Apache-2.0"
] | null | null | null |
official/vision/beta/configs/maskrcnn.py
|
mendoza689/models
|
7dcd3f6b3151ae968f71b7e53b32c25750245606
|
[
"Apache-2.0"
] | 2
|
2021-08-17T22:07:17.000Z
|
2021-12-25T12:25:47.000Z
|
# Copyright 2021 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# Lint as: python3
"""Mask R-CNN configuration definition."""
import dataclasses
import os
from typing import List, Optional
from official.core import config_definitions as cfg
from official.core import exp_factory
from official.modeling import hyperparams
from official.modeling import optimization
from official.vision.beta.configs import common
from official.vision.beta.configs import decoders
from official.vision.beta.configs import backbones
# pylint: disable=missing-class-docstring
@dataclasses.dataclass
class Parser(hyperparams.Config):
num_channels: int = 3
match_threshold: float = 0.5
unmatched_threshold: float = 0.5
aug_rand_hflip: bool = False
aug_scale_min: float = 1.0
aug_scale_max: float = 1.0
skip_crowd_during_training: bool = True
max_num_instances: int = 100
rpn_match_threshold: float = 0.7
rpn_unmatched_threshold: float = 0.3
rpn_batch_size_per_im: int = 256
rpn_fg_fraction: float = 0.5
mask_crop_size: int = 112
@dataclasses.dataclass
class DataConfig(cfg.DataConfig):
"""Input config for training."""
input_path: str = ''
global_batch_size: int = 0
is_training: bool = False
dtype: str = 'bfloat16'
decoder: common.DataDecoder = common.DataDecoder()
parser: Parser = Parser()
shuffle_buffer_size: int = 10000
file_type: str = 'tfrecord'
drop_remainder: bool = True
# Number of examples in the data set, it's used to create the annotation file.
num_examples: int = -1
@dataclasses.dataclass
class Anchor(hyperparams.Config):
num_scales: int = 1
aspect_ratios: List[float] = dataclasses.field(
default_factory=lambda: [0.5, 1.0, 2.0])
anchor_size: float = 8.0
@dataclasses.dataclass
class RPNHead(hyperparams.Config):
num_convs: int = 1
num_filters: int = 256
use_separable_conv: bool = False
@dataclasses.dataclass
class DetectionHead(hyperparams.Config):
num_convs: int = 4
num_filters: int = 256
use_separable_conv: bool = False
num_fcs: int = 1
fc_dims: int = 1024
class_agnostic_bbox_pred: bool = False # Has to be True for Cascade RCNN.
# If additional IoUs are passed in 'cascade_iou_thresholds'
# then ensemble the class probabilities from all heads.
cascade_class_ensemble: bool = False
@dataclasses.dataclass
class ROIGenerator(hyperparams.Config):
pre_nms_top_k: int = 2000
pre_nms_score_threshold: float = 0.0
pre_nms_min_size_threshold: float = 0.0
nms_iou_threshold: float = 0.7
num_proposals: int = 1000
test_pre_nms_top_k: int = 1000
test_pre_nms_score_threshold: float = 0.0
test_pre_nms_min_size_threshold: float = 0.0
test_nms_iou_threshold: float = 0.7
test_num_proposals: int = 1000
use_batched_nms: bool = False
@dataclasses.dataclass
class ROISampler(hyperparams.Config):
mix_gt_boxes: bool = True
num_sampled_rois: int = 512
foreground_fraction: float = 0.25
foreground_iou_threshold: float = 0.5
background_iou_high_threshold: float = 0.5
background_iou_low_threshold: float = 0.0
# IoU thresholds for additional FRCNN heads in Cascade mode.
# `foreground_iou_threshold` is the first threshold.
cascade_iou_thresholds: Optional[List[float]] = None
@dataclasses.dataclass
class ROIAligner(hyperparams.Config):
crop_size: int = 7
sample_offset: float = 0.5
@dataclasses.dataclass
class DetectionGenerator(hyperparams.Config):
apply_nms: bool = True
pre_nms_top_k: int = 5000
pre_nms_score_threshold: float = 0.05
nms_iou_threshold: float = 0.5
max_num_detections: int = 100
use_batched_nms: bool = False
@dataclasses.dataclass
class MaskHead(hyperparams.Config):
upsample_factor: int = 2
num_convs: int = 4
num_filters: int = 256
use_separable_conv: bool = False
class_agnostic: bool = False
@dataclasses.dataclass
class MaskSampler(hyperparams.Config):
num_sampled_masks: int = 128
@dataclasses.dataclass
class MaskROIAligner(hyperparams.Config):
crop_size: int = 14
sample_offset: float = 0.5
@dataclasses.dataclass
class MaskRCNN(hyperparams.Config):
num_classes: int = 0
input_size: List[int] = dataclasses.field(default_factory=list)
min_level: int = 2
max_level: int = 6
anchor: Anchor = Anchor()
include_mask: bool = True
backbone: backbones.Backbone = backbones.Backbone(
type='resnet', resnet=backbones.ResNet())
decoder: decoders.Decoder = decoders.Decoder(
type='fpn', fpn=decoders.FPN())
rpn_head: RPNHead = RPNHead()
detection_head: DetectionHead = DetectionHead()
roi_generator: ROIGenerator = ROIGenerator()
roi_sampler: ROISampler = ROISampler()
roi_aligner: ROIAligner = ROIAligner()
detection_generator: DetectionGenerator = DetectionGenerator()
mask_head: Optional[MaskHead] = MaskHead()
mask_sampler: Optional[MaskSampler] = MaskSampler()
mask_roi_aligner: Optional[MaskROIAligner] = MaskROIAligner()
norm_activation: common.NormActivation = common.NormActivation(
norm_momentum=0.997,
norm_epsilon=0.0001,
use_sync_bn=True)
@dataclasses.dataclass
class Losses(hyperparams.Config):
rpn_huber_loss_delta: float = 1. / 9.
frcnn_huber_loss_delta: float = 1.
l2_weight_decay: float = 0.0
rpn_score_weight: float = 1.0
rpn_box_weight: float = 1.0
frcnn_class_weight: float = 1.0
frcnn_box_weight: float = 1.0
mask_weight: float = 1.0
@dataclasses.dataclass
class MaskRCNNTask(cfg.TaskConfig):
model: MaskRCNN = MaskRCNN()
train_data: DataConfig = DataConfig(is_training=True)
validation_data: DataConfig = DataConfig(is_training=False,
drop_remainder=False)
losses: Losses = Losses()
init_checkpoint: Optional[str] = None
init_checkpoint_modules: str = 'all' # all or backbone
annotation_file: Optional[str] = None
per_category_metrics: bool = False
# If set, we only use masks for the specified class IDs.
allowed_mask_class_ids: Optional[List[int]] = None
COCO_INPUT_PATH_BASE = 'coco'
@exp_factory.register_config_factory('fasterrcnn_resnetfpn_coco')
def fasterrcnn_resnetfpn_coco() -> cfg.ExperimentConfig:
"""COCO object detection with Faster R-CNN."""
steps_per_epoch = 500
coco_val_samples = 5000
train_batch_size = 64
eval_batch_size = 8
config = cfg.ExperimentConfig(
runtime=cfg.RuntimeConfig(mixed_precision_dtype='bfloat16'),
task=MaskRCNNTask(
init_checkpoint='gs://cloud-tpu-checkpoints/vision-2.0/resnet50_imagenet/ckpt-28080',
init_checkpoint_modules='backbone',
annotation_file=os.path.join(COCO_INPUT_PATH_BASE,
'instances_val2017.json'),
model=MaskRCNN(
num_classes=91,
input_size=[1024, 1024, 3],
include_mask=False,
mask_head=None,
mask_sampler=None,
mask_roi_aligner=None),
losses=Losses(l2_weight_decay=0.00004),
train_data=DataConfig(
input_path=os.path.join(COCO_INPUT_PATH_BASE, 'train*'),
is_training=True,
global_batch_size=train_batch_size,
parser=Parser(
aug_rand_hflip=True, aug_scale_min=0.8, aug_scale_max=1.25)),
validation_data=DataConfig(
input_path=os.path.join(COCO_INPUT_PATH_BASE, 'val*'),
is_training=False,
global_batch_size=eval_batch_size,
drop_remainder=False)),
trainer=cfg.TrainerConfig(
train_steps=22500,
validation_steps=coco_val_samples // eval_batch_size,
validation_interval=steps_per_epoch,
steps_per_loop=steps_per_epoch,
summary_interval=steps_per_epoch,
checkpoint_interval=steps_per_epoch,
optimizer_config=optimization.OptimizationConfig({
'optimizer': {
'type': 'sgd',
'sgd': {
'momentum': 0.9
}
},
'learning_rate': {
'type': 'stepwise',
'stepwise': {
'boundaries': [15000, 20000],
'values': [0.12, 0.012, 0.0012],
}
},
'warmup': {
'type': 'linear',
'linear': {
'warmup_steps': 500,
'warmup_learning_rate': 0.0067
}
}
})),
restrictions=[
'task.train_data.is_training != None',
'task.validation_data.is_training != None'
])
return config
@exp_factory.register_config_factory('maskrcnn_resnetfpn_coco')
def maskrcnn_resnetfpn_coco() -> cfg.ExperimentConfig:
"""COCO object detection with Mask R-CNN."""
steps_per_epoch = 500
coco_val_samples = 5000
train_batch_size = 64
eval_batch_size = 8
config = cfg.ExperimentConfig(
runtime=cfg.RuntimeConfig(mixed_precision_dtype='bfloat16'),
task=MaskRCNNTask(
init_checkpoint='gs://cloud-tpu-checkpoints/vision-2.0/resnet50_imagenet/ckpt-28080',
init_checkpoint_modules='backbone',
annotation_file=os.path.join(COCO_INPUT_PATH_BASE,
'instances_val2017.json'),
model=MaskRCNN(
num_classes=91, input_size=[1024, 1024, 3], include_mask=True),
losses=Losses(l2_weight_decay=0.00004),
train_data=DataConfig(
input_path=os.path.join(COCO_INPUT_PATH_BASE, 'train*'),
is_training=True,
global_batch_size=train_batch_size,
parser=Parser(
aug_rand_hflip=True, aug_scale_min=0.8, aug_scale_max=1.25)),
validation_data=DataConfig(
input_path=os.path.join(COCO_INPUT_PATH_BASE, 'val*'),
is_training=False,
global_batch_size=eval_batch_size,
drop_remainder=False)),
trainer=cfg.TrainerConfig(
train_steps=22500,
validation_steps=coco_val_samples // eval_batch_size,
validation_interval=steps_per_epoch,
steps_per_loop=steps_per_epoch,
summary_interval=steps_per_epoch,
checkpoint_interval=steps_per_epoch,
optimizer_config=optimization.OptimizationConfig({
'optimizer': {
'type': 'sgd',
'sgd': {
'momentum': 0.9
}
},
'learning_rate': {
'type': 'stepwise',
'stepwise': {
'boundaries': [15000, 20000],
'values': [0.12, 0.012, 0.0012],
}
},
'warmup': {
'type': 'linear',
'linear': {
'warmup_steps': 500,
'warmup_learning_rate': 0.0067
}
}
})),
restrictions=[
'task.train_data.is_training != None',
'task.validation_data.is_training != None'
])
return config
@exp_factory.register_config_factory('maskrcnn_spinenet_coco')
def maskrcnn_spinenet_coco() -> cfg.ExperimentConfig:
"""COCO object detection with Mask R-CNN with SpineNet backbone."""
steps_per_epoch = 463
coco_val_samples = 5000
train_batch_size = 256
eval_batch_size = 8
config = cfg.ExperimentConfig(
runtime=cfg.RuntimeConfig(mixed_precision_dtype='bfloat16'),
task=MaskRCNNTask(
annotation_file=os.path.join(COCO_INPUT_PATH_BASE,
'instances_val2017.json'),
model=MaskRCNN(
backbone=backbones.Backbone(
type='spinenet',
spinenet=backbones.SpineNet(
model_id='49',
min_level=3,
max_level=7,
)),
decoder=decoders.Decoder(
type='identity', identity=decoders.Identity()),
anchor=Anchor(anchor_size=3),
norm_activation=common.NormActivation(use_sync_bn=True),
num_classes=91,
input_size=[640, 640, 3],
min_level=3,
max_level=7,
include_mask=True),
losses=Losses(l2_weight_decay=0.00004),
train_data=DataConfig(
input_path=os.path.join(COCO_INPUT_PATH_BASE, 'train*'),
is_training=True,
global_batch_size=train_batch_size,
parser=Parser(
aug_rand_hflip=True, aug_scale_min=0.5, aug_scale_max=2.0)),
validation_data=DataConfig(
input_path=os.path.join(COCO_INPUT_PATH_BASE, 'val*'),
is_training=False,
global_batch_size=eval_batch_size,
drop_remainder=False)),
trainer=cfg.TrainerConfig(
train_steps=steps_per_epoch * 350,
validation_steps=coco_val_samples // eval_batch_size,
validation_interval=steps_per_epoch,
steps_per_loop=steps_per_epoch,
summary_interval=steps_per_epoch,
checkpoint_interval=steps_per_epoch,
optimizer_config=optimization.OptimizationConfig({
'optimizer': {
'type': 'sgd',
'sgd': {
'momentum': 0.9
}
},
'learning_rate': {
'type': 'stepwise',
'stepwise': {
'boundaries': [
steps_per_epoch * 320, steps_per_epoch * 340
],
'values': [0.32, 0.032, 0.0032],
}
},
'warmup': {
'type': 'linear',
'linear': {
'warmup_steps': 2000,
'warmup_learning_rate': 0.0067
}
}
})),
restrictions=[
'task.train_data.is_training != None',
'task.validation_data.is_training != None',
'task.model.min_level == task.model.backbone.spinenet.min_level',
'task.model.max_level == task.model.backbone.spinenet.max_level',
])
return config
@exp_factory.register_config_factory('cascadercnn_spinenet_coco')
def cascadercnn_spinenet_coco() -> cfg.ExperimentConfig:
"""COCO object detection with Cascade R-CNN with SpineNet backbone."""
steps_per_epoch = 463
coco_val_samples = 5000
train_batch_size = 256
eval_batch_size = 8
config = cfg.ExperimentConfig(
runtime=cfg.RuntimeConfig(mixed_precision_dtype='bfloat16'),
task=MaskRCNNTask(
annotation_file=os.path.join(COCO_INPUT_PATH_BASE,
'instances_val2017.json'),
model=MaskRCNN(
backbone=backbones.Backbone(
type='spinenet',
spinenet=backbones.SpineNet(
model_id='49',
min_level=3,
max_level=7,
)),
decoder=decoders.Decoder(
type='identity', identity=decoders.Identity()),
roi_sampler=ROISampler(cascade_iou_thresholds=[0.6, 0.7]),
detection_head=DetectionHead(
class_agnostic_bbox_pred=True, cascade_class_ensemble=True),
anchor=Anchor(anchor_size=3),
norm_activation=common.NormActivation(
use_sync_bn=True, activation='swish'),
num_classes=91,
input_size=[640, 640, 3],
min_level=3,
max_level=7,
include_mask=True),
losses=Losses(l2_weight_decay=0.00004),
train_data=DataConfig(
input_path=os.path.join(COCO_INPUT_PATH_BASE, 'train*'),
is_training=True,
global_batch_size=train_batch_size,
parser=Parser(
aug_rand_hflip=True, aug_scale_min=0.1, aug_scale_max=2.5)),
validation_data=DataConfig(
input_path=os.path.join(COCO_INPUT_PATH_BASE, 'val*'),
is_training=False,
global_batch_size=eval_batch_size,
drop_remainder=False)),
trainer=cfg.TrainerConfig(
train_steps=steps_per_epoch * 500,
validation_steps=coco_val_samples // eval_batch_size,
validation_interval=steps_per_epoch,
steps_per_loop=steps_per_epoch,
summary_interval=steps_per_epoch,
checkpoint_interval=steps_per_epoch,
optimizer_config=optimization.OptimizationConfig({
'optimizer': {
'type': 'sgd',
'sgd': {
'momentum': 0.9
}
},
'learning_rate': {
'type': 'stepwise',
'stepwise': {
'boundaries': [
steps_per_epoch * 475, steps_per_epoch * 490
],
'values': [0.32, 0.032, 0.0032],
}
},
'warmup': {
'type': 'linear',
'linear': {
'warmup_steps': 2000,
'warmup_learning_rate': 0.0067
}
}
})),
restrictions=[
'task.train_data.is_training != None',
'task.validation_data.is_training != None',
'task.model.min_level == task.model.backbone.spinenet.min_level',
'task.model.max_level == task.model.backbone.spinenet.max_level',
])
return config
| 35.44466
| 95
| 0.614824
|
794b6e18ceabfb48cdb68a340ddc467afba233e8
| 1,171
|
py
|
Python
|
grokking-the-coding-interview/two-pointers/Dutch-National-Flag-Problem-(medium).py
|
huandrew99/LeetCode
|
aa36b48d06100ce5f0bc64c789a906ec29409440
|
[
"MIT"
] | 36
|
2021-12-23T15:44:41.000Z
|
2022-03-31T04:26:26.000Z
|
grokking-the-coding-interview/two-pointers/Dutch-National-Flag-Problem-(medium).py
|
wzy0766/LeetCode-1
|
3070e672c519e8af74966811b8058a9baef8c0bc
|
[
"MIT"
] | null | null | null |
grokking-the-coding-interview/two-pointers/Dutch-National-Flag-Problem-(medium).py
|
wzy0766/LeetCode-1
|
3070e672c519e8af74966811b8058a9baef8c0bc
|
[
"MIT"
] | 11
|
2022-02-26T22:41:26.000Z
|
2022-03-02T07:18:41.000Z
|
"""
LC 75
Given an array containing 0s, 1s and 2s, sort the array in-place. You should treat numbers of the array as objects, hence, we can’t count 0s, 1s, and 2s to recreate the array.
The flag of the Netherlands consists of three colors: red, white and blue; and since our input array also consists of three different numbers that is why it is called Dutch National Flag problem.
Example 1:
Input: [1, 0, 2, 1, 0]
Output: [0, 0, 1, 1, 2]
Example 2:
Input: [2, 2, 0, 1, 2, 0]
Output: [0, 0, 1, 2, 2, 2,]
"""
def dutch_flag_sort(arr):
l, r = 0, len(arr) - 1
# l: next place to put 0
# r: next place to put 2
# 1s are within [l, r]
i = 0
while i <= r: # not <
if arr[i] == 0:
swap(arr, l, i)
l += 1
i += 1 # make sure l <= i
elif arr[i] == 2:
swap(arr, r, i)
r -= 1
else:
i += 1
return arr
def swap(arr, i, j):
arr[i], arr[j] = arr[j], arr[i]
def main():
arr = [1, 0, 2, 1, 0]
dutch_flag_sort(arr)
print(arr)
arr = [2, 2, 0, 1, 2, 0]
dutch_flag_sort(arr)
print(arr)
main()
"""
Time O(N)
Space O(1)
"""
| 19.847458
| 195
| 0.537148
|
794b6e2a512370e58c33c27444e248a4b24a9968
| 814
|
py
|
Python
|
examples/idioms/programs/177.2725-find-files-with-a-given-list-of-filename-extensions.py
|
laowantong/paroxython
|
4626798a60eeaa765dbfab9e63e04030c9fcb1d0
|
[
"MIT"
] | 31
|
2020-05-02T13:34:26.000Z
|
2021-06-06T17:25:52.000Z
|
examples/idioms/programs/177.2725-find-files-with-a-given-list-of-filename-extensions.py
|
laowantong/paroxython
|
4626798a60eeaa765dbfab9e63e04030c9fcb1d0
|
[
"MIT"
] | 108
|
2019-11-18T19:41:52.000Z
|
2022-03-18T13:58:17.000Z
|
examples/idioms/programs/177.2725-find-files-with-a-given-list-of-filename-extensions.py
|
laowantong/paroxython
|
4626798a60eeaa765dbfab9e63e04030c9fcb1d0
|
[
"MIT"
] | 4
|
2020-05-19T08:57:44.000Z
|
2020-09-21T08:53:46.000Z
|
"""Find files with a given list of filename extensions.
Construct a list _L that contains all filenames that have the extension ".jpg" , ".jpeg" or ".png" in directory _D and all it's subdirectories.
Source: Bart
"""
# Implementation author: âne O' nym
# Created on 2019-09-26T14:12:32.0025Z
# Last modified on 2019-09-26T14:13:11.52677Z
# Version 2
# * list comprehension
# * iterate over all files and all directories in tree under _D (os module)
# * iterate over all files found
# * filter with regex matching the end of the filename (re module)
# * regex is cached, but may be compiled beforehands
import re
import os
filtered_files = [
"{}/{}".format(dirpath, filename)
for dirpath, _, filenames in os.walk(D)
for filename in filenames
if re.match(r"^.*\.(?:jpg|jpeg|png)$", filename)
]
| 29.071429
| 143
| 0.710074
|
794b6f74fe9db7b7704d3cb61494c4545c71d00b
| 320
|
py
|
Python
|
config.py
|
muhozi/WeConnect
|
46825d6ae27a7addbe8f7f2e298f5ee07cd9cf5f
|
[
"MIT"
] | 3
|
2018-04-07T07:39:19.000Z
|
2018-04-13T14:15:26.000Z
|
config.py
|
muhozi/WeConnect
|
46825d6ae27a7addbe8f7f2e298f5ee07cd9cf5f
|
[
"MIT"
] | 8
|
2018-03-01T21:11:05.000Z
|
2022-03-21T22:16:43.000Z
|
config.py
|
muhozi/WeConnect
|
46825d6ae27a7addbe8f7f2e298f5ee07cd9cf5f
|
[
"MIT"
] | 3
|
2018-04-24T03:34:44.000Z
|
2018-07-19T09:53:08.000Z
|
"""
Flask configurations and other configurations
"""
import os
from dotenv import load_dotenv
# Load Configs from .env
load_dotenv()
# App Directory
BASE_DIR = os.path.abspath(os.path.dirname(__file__))
JSON_SORT_KEYS = False
# Configs loaded from env
SECRET_KEY = os.getenv('SECRET_KEY')
DEBUG = os.getenv('DEBUG')
| 20
| 53
| 0.7625
|
794b6f99d9ca2f94628b3511ef6ba1969e900463
| 952
|
py
|
Python
|
example/demo/frontend_views.py
|
aldslvda/django-echarts
|
835032def8c67c5ff81824be0c9a6a776334c3e9
|
[
"MIT"
] | 181
|
2017-07-30T04:37:41.000Z
|
2022-03-27T04:58:04.000Z
|
example/demo/frontend_views.py
|
aldslvda/django-echarts
|
835032def8c67c5ff81824be0c9a6a776334c3e9
|
[
"MIT"
] | 22
|
2017-07-30T11:49:34.000Z
|
2022-03-28T12:44:07.000Z
|
example/demo/frontend_views.py
|
aldslvda/django-echarts
|
835032def8c67c5ff81824be0c9a6a776334c3e9
|
[
"MIT"
] | 57
|
2017-08-12T09:47:09.000Z
|
2022-03-28T07:20:53.000Z
|
# coding=utf8
from django.views.generic.base import TemplateView
from django_echarts.views.frontend import EChartsFrontView
from .demo_data import FACTORY
class IndexView(TemplateView):
template_name = 'index.html'
class FrontendEchartsTemplate(TemplateView):
template_name = 'frontend_charts.html'
class SimpleBarView(EChartsFrontView):
def get_echarts_instance(self, **kwargs):
return FACTORY.create('bar')
class SimpleKLineView(EChartsFrontView):
def get_echarts_instance(self, **kwargs):
return FACTORY.create('kline')
class SimpleMapView(EChartsFrontView):
def get_echarts_instance(self, **kwargs):
return FACTORY.create('map')
class SimplePieView(EChartsFrontView):
def get_echarts_instance(self, **kwargs):
return FACTORY.create('pie')
class WordCloudView(EChartsFrontView):
def get_echarts_instance(self, *args, **kwargs):
return FACTORY.create('word_cloud')
| 23.219512
| 58
| 0.74895
|
794b7032cd65cfd00551ff4bea6e87ad40383356
| 1,362
|
py
|
Python
|
vsts/vsts/maven/v4_1/models/maven_pom_parent.py
|
kenkuo/azure-devops-python-api
|
9e920bd25e938fa89ff7f60153e5b9e113ca839d
|
[
"MIT"
] | null | null | null |
vsts/vsts/maven/v4_1/models/maven_pom_parent.py
|
kenkuo/azure-devops-python-api
|
9e920bd25e938fa89ff7f60153e5b9e113ca839d
|
[
"MIT"
] | null | null | null |
vsts/vsts/maven/v4_1/models/maven_pom_parent.py
|
kenkuo/azure-devops-python-api
|
9e920bd25e938fa89ff7f60153e5b9e113ca839d
|
[
"MIT"
] | null | null | null |
# --------------------------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# --------------------------------------------------------------------------------------------
# Generated file, DO NOT EDIT
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------------------------
from .maven_pom_gav import MavenPomGav
class MavenPomParent(MavenPomGav):
"""MavenPomParent.
:param artifact_id:
:type artifact_id: str
:param group_id:
:type group_id: str
:param version:
:type version: str
:param relative_path:
:type relative_path: str
"""
_attribute_map = {
'artifact_id': {'key': 'artifactId', 'type': 'str'},
'group_id': {'key': 'groupId', 'type': 'str'},
'version': {'key': 'version', 'type': 'str'},
'relative_path': {'key': 'relativePath', 'type': 'str'}
}
def __init__(self, artifact_id=None, group_id=None, version=None, relative_path=None):
super(MavenPomParent, self).__init__(artifact_id=artifact_id, group_id=group_id, version=version)
self.relative_path = relative_path
| 38.914286
| 105
| 0.534508
|
794b710cfed0d731360bdc8ffd638ae52ead7b00
| 165
|
py
|
Python
|
keys.py
|
Anamay-Agnihotri/Twitter_API
|
c50fce5f7de00e6d791b3e9dfae8f16d93ae642c
|
[
"MIT"
] | null | null | null |
keys.py
|
Anamay-Agnihotri/Twitter_API
|
c50fce5f7de00e6d791b3e9dfae8f16d93ae642c
|
[
"MIT"
] | null | null | null |
keys.py
|
Anamay-Agnihotri/Twitter_API
|
c50fce5f7de00e6d791b3e9dfae8f16d93ae642c
|
[
"MIT"
] | null | null | null |
keychain = {'twitter':{'api_secret':'' ,
'api_key':'' ,
'access_token':'' ,
'access_token_secret':''}}
| 33
| 49
| 0.381818
|
794b7197d5cea3708101e036ae1196c66600082f
| 4,768
|
py
|
Python
|
test/test_python_basic.py
|
kjdev/unit
|
de885e10cb895751d9994e8ab99bdb56da891d3d
|
[
"Apache-2.0"
] | 58
|
2017-09-09T23:55:33.000Z
|
2021-11-25T07:13:36.000Z
|
test/test_python_basic.py
|
kjdev/unit
|
de885e10cb895751d9994e8ab99bdb56da891d3d
|
[
"Apache-2.0"
] | 2
|
2018-10-30T08:29:41.000Z
|
2021-06-25T15:16:29.000Z
|
test/test_python_basic.py
|
kjdev/unit
|
de885e10cb895751d9994e8ab99bdb56da891d3d
|
[
"Apache-2.0"
] | 12
|
2017-09-12T02:52:10.000Z
|
2019-02-20T16:15:13.000Z
|
import unittest
import unit
class TestUnitPythonBasic(unit.TestUnitControl):
def setUpClass():
unit.TestUnit().check_modules('python')
conf_app = {
"app": {
"type": "python",
"processes": { "spare": 0 },
"path": "/app",
"module": "wsgi"
}
}
conf_basic = {
"listeners": {
"*:7080": {
"application": "app"
}
},
"applications": conf_app
}
def test_python_get_empty(self):
self.assertEqual(self.conf_get(),
{'listeners': {}, 'applications': {}}, 'empty')
def test_python_get_prefix_listeners(self):
self.assertEqual(self.conf_get('/listeners'), {}, 'listeners prefix')
def test_python_get_prefix_applications(self):
self.assertEqual(self.conf_get('/applications'), {}, 'applications prefix')
def test_python_get_applications(self):
self.conf(self.conf_app, '/applications')
conf = self.conf_get()
self.assertEqual(conf['listeners'], {}, 'listeners')
self.assertEqual(conf['applications'],
{
"app": {
"type": "python",
"processes": { "spare": 0 },
"path": "/app",
"module": "wsgi"
}
},
'applications')
def test_python_get_applications_prefix(self):
self.conf(self.conf_app, '/applications')
self.assertEqual(self.conf_get('/applications'),
{
"app": {
"type": "python",
"processes": { "spare": 0 },
"path": "/app",
"module":"wsgi"
}
},
'applications prefix')
def test_python_get_applications_prefix_2(self):
self.conf(self.conf_app, '/applications')
self.assertEqual(self.conf_get('/applications/app'),
{
"type": "python",
"processes": { "spare": 0 },
"path": "/app",
"module": "wsgi"
},
'applications prefix 2')
def test_python_get_applications_prefix_3(self):
self.conf(self.conf_app, '/applications')
self.assertEqual(self.conf_get('/applications/app/type'), 'python',
'type')
self.assertEqual(self.conf_get('/applications/app/processes/spare'), 0,
'spare')
def test_python_get_listeners(self):
self.conf(self.conf_basic)
self.assertEqual(self.conf_get()['listeners'],
{"*:7080":{"application":"app"}}, 'listeners')
def test_python_get_listeners_prefix(self):
self.conf(self.conf_basic)
self.assertEqual(self.conf_get('/listeners'),
{"*:7080":{"application":"app"}}, 'listeners prefix')
def test_python_get_listeners_prefix_2(self):
self.conf(self.conf_basic)
self.assertEqual(self.conf_get('/listeners/*:7080'),
{"application":"app"}, 'listeners prefix 2')
def test_python_change_listener(self):
self.conf(self.conf_basic)
self.conf({"*:7081":{"application":"app"}}, '/listeners')
self.assertEqual(self.conf_get('/listeners'),
{"*:7081": {"application":"app"}}, 'change listener')
def test_python_add_listener(self):
self.conf(self.conf_basic)
self.conf({"application":"app"}, '/listeners/*:7082')
self.assertEqual(self.conf_get('/listeners'),
{
"*:7080": {
"application": "app"
},
"*:7082": {
"application": "app"
}
},
'add listener')
def test_python_change_application(self):
self.conf(self.conf_basic)
self.conf('30', '/applications/app/processes/max')
self.assertEqual(self.conf_get('/applications/app/processes/max'), 30,
'change application max')
self.conf('"/www"', '/applications/app/path')
self.assertEqual(self.conf_get('/applications/app/path'), '/www',
'change application path')
def test_python_delete(self):
self.conf(self.conf_basic)
self.assertIn('error', self.conf_delete('/applications/app'),
'delete app before listener')
self.assertIn('success', self.conf_delete('/listeners/*:7080'),
'delete listener')
self.assertIn('success', self.conf_delete('/applications/app'),
'delete app after listener')
self.assertIn('error', self.conf_delete('/applications/app'),
'delete app again')
if __name__ == '__main__':
unittest.main()
| 31.163399
| 83
| 0.53901
|
794b71d7de7fdcdfca363e5418eb97ee659ca64d
| 82
|
py
|
Python
|
dsfaker/__init__.py
|
pajachiet/dsfaker
|
0e65ba336608c2ccc5e32a541f3b66dfad019b35
|
[
"MIT"
] | 3
|
2017-03-12T22:08:59.000Z
|
2017-05-22T16:57:17.000Z
|
dsfaker/__init__.py
|
pajachiet/dsfaker
|
0e65ba336608c2ccc5e32a541f3b66dfad019b35
|
[
"MIT"
] | 12
|
2017-03-01T10:14:08.000Z
|
2017-04-23T12:15:10.000Z
|
dsfaker/__init__.py
|
pajachiet/dsfaker
|
0e65ba336608c2ccc5e32a541f3b66dfad019b35
|
[
"MIT"
] | 2
|
2017-05-04T15:36:21.000Z
|
2018-02-07T13:49:13.000Z
|
# -*- coding: utf-8 -*-
from .exceptions import *
from . import generators, noise
| 20.5
| 31
| 0.670732
|
794b73a2f0fb491800b0412b14dfb3036b400534
| 6,594
|
py
|
Python
|
web/tests/functional/statistics/test_statistics.py
|
gkunz/codechecker
|
5adc515e83bd16b7c3c45e9ca709969db3212093
|
[
"Apache-2.0"
] | null | null | null |
web/tests/functional/statistics/test_statistics.py
|
gkunz/codechecker
|
5adc515e83bd16b7c3c45e9ca709969db3212093
|
[
"Apache-2.0"
] | null | null | null |
web/tests/functional/statistics/test_statistics.py
|
gkunz/codechecker
|
5adc515e83bd16b7c3c45e9ca709969db3212093
|
[
"Apache-2.0"
] | null | null | null |
#
# -------------------------------------------------------------------------
#
# Part of the CodeChecker project, under the Apache License v2.0 with
# LLVM Exceptions. See LICENSE for license information.
# SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
#
# -------------------------------------------------------------------------
""" statistics collector feature test. """
from distutils import util
import os
import unittest
import shlex
from libtest import env
from libtest.codechecker import call_command
NO_STATISTICS_MESSAGE = "Statistics collector checkers are not supported"
class TestSkeleton(unittest.TestCase):
_ccClient = None
def setUp(self):
# TEST_WORKSPACE is automatically set by test package __init__.py .
test_workspace = os.environ['TEST_WORKSPACE']
test_class = self.__class__.__name__
print('Running ' + test_class + ' tests in ' + test_workspace)
# Get the test project configuration from the prepared test workspace.
self._testproject_data = env.setup_test_proj_cfg(test_workspace)
self.assertIsNotNone(self._testproject_data)
# Get the CodeChecker cmd if needed for the tests.
self._codechecker_cmd = env.codechecker_cmd()
self.env = env.codechecker_env()
# Get if the package is able to collect statistics or not.
cmd = [self._codechecker_cmd, 'analyze', '-h']
output, _ = call_command(cmd, cwd=test_workspace, env=self.env)
self.stats_capable = '--stats' in output
print("'analyze' reported statistics collector-compatibility? " +
str(self.stats_capable))
if not self.stats_capable:
try:
self.stats_capable = bool(util.strtobool(
os.environ['CC_TEST_FORCE_STATS_CAPABLE']))
except (ValueError, KeyError):
pass
test_project_path = self._testproject_data['project_path']
test_project_build = shlex.split(self._testproject_data['build_cmd'])
test_project_clean = shlex.split(self._testproject_data['clean_cmd'])
# Clean the test project before logging the compiler commands.
output, err = call_command(test_project_clean,
cwd=test_project_path,
env=self.env)
print(output)
print(err)
# Create compilation log used in the tests.
log_cmd = [self._codechecker_cmd, 'log', '-o', 'compile_command.json',
'-b']
log_cmd.extend(test_project_build)
output, err = call_command(log_cmd,
cwd=test_project_path,
env=self.env)
print(output)
print(err)
def test_stats(self):
"""
Enable statistics collection for the analysis.
"""
if not self.stats_capable:
self.skipTest(NO_STATISTICS_MESSAGE)
test_project_path = self._testproject_data['project_path']
cmd = [self._codechecker_cmd, 'analyze', '-o', 'reports', '--stats',
'compile_command.json']
output, err = call_command(cmd, cwd=test_project_path, env=self.env)
print(output)
print(err)
collect_msg = "Collecting data for statistical analysis."
self.assertIn(collect_msg, output)
def test_stats_collect(self):
"""
Enable statistics collection.
Without analysis.
"""
if not self.stats_capable:
self.skipTest(NO_STATISTICS_MESSAGE)
test_project_path = self._testproject_data['project_path']
stats_dir = os.path.join(test_project_path, 'stats')
cmd = [self._codechecker_cmd, 'analyze', '--stats-collect', stats_dir,
'compile_command.json', '-o', 'reports']
output, err = call_command(cmd, cwd=test_project_path, env=self.env)
print(output)
print(err)
analyze_msg = "Starting static analysis"
self.assertNotIn(analyze_msg, output)
stat_files = os.listdir(stats_dir)
print(stat_files)
self.assertIn('SpecialReturn.yaml', stat_files)
self.assertIn('UncheckedReturn.yaml', stat_files)
def test_stats_collect_params(self):
"""
Testing collection parameters
"""
if not self.stats_capable:
self.skipTest(NO_STATISTICS_MESSAGE)
test_project_path = self._testproject_data['project_path']
stats_dir = os.path.join(test_project_path, 'stats')
cmd = [self._codechecker_cmd, 'analyze', '--stats-collect', stats_dir,
'compile_command.json',
'--stats-min-sample-count', '10',
'--stats-relevance-threshold', '0.8',
'-o', 'reports']
output, err = call_command(cmd, cwd=test_project_path, env=self.env)
print(output)
print(err)
analyze_msg = "Starting static analysis"
self.assertNotIn(analyze_msg, output)
stat_files = os.listdir(stats_dir)
print(stat_files)
self.assertIn('SpecialReturn.yaml', stat_files)
self.assertIn('UncheckedReturn.yaml', stat_files)
with open(os.path.join(stats_dir, 'UncheckedReturn.yaml'), 'r',
encoding="utf-8", errors="ignore") as statfile:
unchecked_stats = statfile.read()
self.assertIn("c:@F@readFromFile#*1C#*C#", unchecked_stats)
def test_stats_use(self):
"""
Use the already collected statistics for the analysis.
"""
if not self.stats_capable:
self.skipTest(NO_STATISTICS_MESSAGE)
test_project_path = self._testproject_data['project_path']
stats_dir = os.path.join(test_project_path, 'stats')
cmd = [self._codechecker_cmd, 'analyze', '--stats-collect', stats_dir,
'compile_command.json', '-o', 'reports']
out, err = call_command(cmd, cwd=test_project_path, env=self.env)
print(out)
print(err)
analyze_msg = "Starting static analysis"
self.assertNotIn(analyze_msg, out)
cmd = [self._codechecker_cmd, 'analyze', '--stats-use', stats_dir,
'compile_command.json', '-o', 'reports']
output, err = call_command(cmd, cwd=test_project_path, env=self.env)
print(output)
print(err)
self.assertIn(analyze_msg, output)
stat_files = os.listdir(stats_dir)
self.assertIn('SpecialReturn.yaml', stat_files)
self.assertIn('UncheckedReturn.yaml', stat_files)
| 37.465909
| 78
| 0.614801
|
794b741dcebb245ab793391f8bba2c8436f0ec91
| 963
|
py
|
Python
|
utils/generate_processing.py
|
strakam/PyEasyGraphics
|
57a586aa92385d26725d4ec3d61b2bbbe970195d
|
[
"BSD-3-Clause"
] | 5
|
2019-09-23T05:15:47.000Z
|
2021-01-17T08:06:47.000Z
|
utils/generate_processing.py
|
strakam/PyEasyGraphics
|
57a586aa92385d26725d4ec3d61b2bbbe970195d
|
[
"BSD-3-Clause"
] | 3
|
2019-05-03T05:25:17.000Z
|
2021-04-15T04:53:16.000Z
|
utils/generate_processing.py
|
strakam/PyEasyGraphics
|
57a586aa92385d26725d4ec3d61b2bbbe970195d
|
[
"BSD-3-Clause"
] | 4
|
2019-05-04T13:42:40.000Z
|
2021-04-15T10:38:48.000Z
|
from easygraphics.processing import ProcessingWidget
from easygraphics.image import Image
import easygraphics
import inspect
image_funcs = dir(Image)
for func in easygraphics.__all__:
if not func in image_funcs:
continue
if func.startswith("_"):
continue
fun = eval(f"easygraphics.{func}")
if not callable(fun):
continue
sig = inspect.signature(fun)
parameters = []
for param in sig.parameters:
if param != 'self':
if param == 'args':
parameters.append('*args')
elif param == 'kwargs':
parameters.append('**kwargs')
else:
parameters.append(param)
print(f"def {func}{sig}:")
if sig.return_annotation is not inspect.Signature.empty:
print(f" return _widget.get_canvas().{func}({','.join(parameters)})")
else:
print(f" _widget.get_canvas().{func}({','.join(parameters)})")
print("")
| 31.064516
| 80
| 0.603323
|
794b744bad2d7fbe3c96d2a840d39863819d87ff
| 540
|
py
|
Python
|
Gathered CTF writeups/ptr-yudai-writeups/2019/PragyanCTF_2019/Feed_me/solve.py
|
mihaid-b/CyberSakura
|
f60e6b6bfd6898c69b84424b080090ae98f8076c
|
[
"MIT"
] | 1
|
2022-03-27T06:00:41.000Z
|
2022-03-27T06:00:41.000Z
|
Gathered CTF writeups/ptr-yudai-writeups/2019/PragyanCTF_2019/Feed_me/solve.py
|
mihaid-b/CyberSakura
|
f60e6b6bfd6898c69b84424b080090ae98f8076c
|
[
"MIT"
] | null | null | null |
Gathered CTF writeups/ptr-yudai-writeups/2019/PragyanCTF_2019/Feed_me/solve.py
|
mihaid-b/CyberSakura
|
f60e6b6bfd6898c69b84424b080090ae98f8076c
|
[
"MIT"
] | 1
|
2022-03-27T06:01:42.000Z
|
2022-03-27T06:01:42.000Z
|
from ptrlib import *
sock = Socket("127.0.0.1", 9800)
sock.recvline()
r1 = int(sock.recvuntil(" ;").rstrip(b";"))
r2 = int(sock.recvuntil(" ;").rstrip(b";"))
r3 = int(sock.recvuntil(" ;").rstrip(b";"))
dump("(r1, r2, r3) = ({}, {}, {})".format(r1, r2, r3))
x = (r1 + r2 - r3) // 2
y = r2 - x
val = r1 - x
payload1 = str(val)
payload1 += "-" * (10 - len(payload1))
payload2 = str(x)
payload2 += "-" * (10 - len(payload2))
payload3 = str(y)
payload = payload1 + payload2 + payload3
_ = input()
sock.sendline(payload)
sock.interactive()
| 20.769231
| 54
| 0.581481
|
794b753a65eb463e5f0d93d06fb4aaa5e8ca1833
| 2,253
|
py
|
Python
|
pytorch_block_sparse/tests/test_basic.py
|
adbmd/pytorch_block_sparse
|
0985083851a5708cfb3adf50da19860f467e51ae
|
[
"BSD-3-Clause"
] | 1
|
2020-09-10T16:24:19.000Z
|
2020-09-10T16:24:19.000Z
|
pytorch_block_sparse/tests/test_basic.py
|
adbmd/pytorch_block_sparse
|
0985083851a5708cfb3adf50da19860f467e51ae
|
[
"BSD-3-Clause"
] | null | null | null |
pytorch_block_sparse/tests/test_basic.py
|
adbmd/pytorch_block_sparse
|
0985083851a5708cfb3adf50da19860f467e51ae
|
[
"BSD-3-Clause"
] | null | null | null |
from unittest import TestCase
import torch
from torch import tensor
import unittest
from pytorch_block_sparse import BlockSparseMatrix
class TestFun(TestCase):
def test0(self):
tests = [dict(size= [128, 64],
blocks= [(0, 0), (1, 0), (2, 0), (0, 1), ],
row_start_ends_a=tensor([0, 2, 3, 4, 4]),
cols_a=tensor([[0, 0],[1, 1],[0, 2],[0, 3]]),
col_start_ends_b =tensor([0, 3, 4]),
rows_b= tensor([[0, 0], [1, 2], [2, 3],[0, 1]])
)
]
block_shape = (32, 32)
device = "cuda"
for test_info in tests:
size = test_info["size"]
blocks =test_info["blocks"]
bsm = BlockSparseMatrix.randn((size[0], size[1]), None, blocks=blocks, block_shape=block_shape, device=device)
for key in test_info:
if "row" in key or "col" in key:
bsm_a = getattr(bsm, key)
ref = test_info[key].to(device=device, dtype=torch.int32)
check = (bsm_a == ref).all()
if not check:
raise Exception(f"Non matching attribute {key}:\n{bsm_a}\n!=\n{ref} (ref).")
def test1(self):
sizes = [(32, 32), (64, 32), (32, 64), (64, 64), (256, 64)]
for size in sizes:
print(f"size={size}")
block_shape = (32, 32)
block_count = size[0] * size[1] // (block_shape[0] * block_shape[1])
device = "cuda"
bsm = BlockSparseMatrix.randn(size, block_count, block_shape=block_shape, device=device)
a = bsm.to_dense()
bsm.check_with_dense(a)
bsm2 = BlockSparseMatrix.from_dense(a, block_shape, block_count = None)
bsm2.check_with_dense(a)
a2 = bsm2.to_dense()
if not (a == a2).all():
print((a == a2)[::8,::8])
raise Exception("Non matching matrices, BlockSparseMatrix.from_dense is not correct.")
def test2(self):
bsm = BlockSparseMatrix.zeros((32, 32), 1, block_shape=(32,32), device="cuda")
hash(bsm)
if __name__ == '__main__':
unittest.main()
| 36.33871
| 122
| 0.510874
|
794b753e68107b20757c9e8bab2c12689447da11
| 790
|
py
|
Python
|
source/ImageGeneration/imageTest.py
|
Jacktavitt/navigate_building
|
cfa6aa2fffdf107a619b2627f9a0b0397cd1a8c2
|
[
"MIT"
] | null | null | null |
source/ImageGeneration/imageTest.py
|
Jacktavitt/navigate_building
|
cfa6aa2fffdf107a619b2627f9a0b0397cd1a8c2
|
[
"MIT"
] | null | null | null |
source/ImageGeneration/imageTest.py
|
Jacktavitt/navigate_building
|
cfa6aa2fffdf107a619b2627f9a0b0397cd1a8c2
|
[
"MIT"
] | null | null | null |
import CustomImage as ci
import numpy as np
import random
import cv2
genim1 = ci.GeneratedImage(np.full((512,512,3), (120,120,120),dtype = np.uint8), color=True)
genim1.random_lines(num_lines = 13)
# genim1.show()
genim2 = ci.GeneratedImage(genim1, copy =True)
genim2.random_rectangles(num_recs=4)
# genim2.show()
genim3 = ci.GeneratedImage(genim2, copy =True)
genim3.salt_and_pepper(.03)
# genim3.show()
genim4 = ci.GeneratedImage(genim3, copy =True)
genim4.blur()
# genim4.show()
h1 = cv2.hconcat([genim1.image, genim2.image,genim3.image, genim4.image])
# h2 = cv2.hconcat([genim3.image, genim4.image])
# tot = cv2.vconcat([h1,h2])
# cv2.imshow('',tot)
# cv2.waitKey()
total = ci.GeneratedImage(h1)
total.resize(percentage=0.5)
total.show()
total.save('initial_generation_test.png')
| 25.483871
| 92
| 0.737975
|
794b7540f14368e1eac2d5072798b24ed7684187
| 17,864
|
py
|
Python
|
src/python/grpcio_tests/tests/_result.py
|
danzh1989/grpc
|
c2bc59b2b0de079c64a2e544354b699685ea2853
|
[
"BSD-3-Clause"
] | 1
|
2021-04-27T20:09:23.000Z
|
2021-04-27T20:09:23.000Z
|
src/python/grpcio_tests/tests/_result.py
|
danzh1989/grpc
|
c2bc59b2b0de079c64a2e544354b699685ea2853
|
[
"BSD-3-Clause"
] | null | null | null |
src/python/grpcio_tests/tests/_result.py
|
danzh1989/grpc
|
c2bc59b2b0de079c64a2e544354b699685ea2853
|
[
"BSD-3-Clause"
] | null | null | null |
# Copyright 2015, Google Inc.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
from __future__ import absolute_import
import collections
import itertools
import traceback
import unittest
from xml.etree import ElementTree
import coverage
from six import moves
from tests import _loader
class CaseResult(
collections.namedtuple('CaseResult', [
'id', 'name', 'kind', 'stdout', 'stderr', 'skip_reason', 'traceback'
])):
"""A serializable result of a single test case.
Attributes:
id (object): Any serializable object used to denote the identity of this
test case.
name (str or None): A human-readable name of the test case.
kind (CaseResult.Kind): The kind of test result.
stdout (object or None): Output on stdout, or None if nothing was captured.
stderr (object or None): Output on stderr, or None if nothing was captured.
skip_reason (object or None): The reason the test was skipped. Must be
something if self.kind is CaseResult.Kind.SKIP, else None.
traceback (object or None): The traceback of the test. Must be something if
self.kind is CaseResult.Kind.{ERROR, FAILURE, EXPECTED_FAILURE}, else
None.
"""
class Kind:
UNTESTED = 'untested'
RUNNING = 'running'
ERROR = 'error'
FAILURE = 'failure'
SUCCESS = 'success'
SKIP = 'skip'
EXPECTED_FAILURE = 'expected failure'
UNEXPECTED_SUCCESS = 'unexpected success'
def __new__(cls,
id=None,
name=None,
kind=None,
stdout=None,
stderr=None,
skip_reason=None,
traceback=None):
"""Helper keyword constructor for the namedtuple.
See this class' attributes for information on the arguments."""
assert id is not None
assert name is None or isinstance(name, str)
if kind is CaseResult.Kind.UNTESTED:
pass
elif kind is CaseResult.Kind.RUNNING:
pass
elif kind is CaseResult.Kind.ERROR:
assert traceback is not None
elif kind is CaseResult.Kind.FAILURE:
assert traceback is not None
elif kind is CaseResult.Kind.SUCCESS:
pass
elif kind is CaseResult.Kind.SKIP:
assert skip_reason is not None
elif kind is CaseResult.Kind.EXPECTED_FAILURE:
assert traceback is not None
elif kind is CaseResult.Kind.UNEXPECTED_SUCCESS:
pass
else:
assert False
return super(cls, CaseResult).__new__(cls, id, name, kind, stdout,
stderr, skip_reason, traceback)
def updated(self,
name=None,
kind=None,
stdout=None,
stderr=None,
skip_reason=None,
traceback=None):
"""Get a new validated CaseResult with the fields updated.
See this class' attributes for information on the arguments."""
name = self.name if name is None else name
kind = self.kind if kind is None else kind
stdout = self.stdout if stdout is None else stdout
stderr = self.stderr if stderr is None else stderr
skip_reason = self.skip_reason if skip_reason is None else skip_reason
traceback = self.traceback if traceback is None else traceback
return CaseResult(
id=self.id,
name=name,
kind=kind,
stdout=stdout,
stderr=stderr,
skip_reason=skip_reason,
traceback=traceback)
class AugmentedResult(unittest.TestResult):
"""unittest.Result that keeps track of additional information.
Uses CaseResult objects to store test-case results, providing additional
information beyond that of the standard Python unittest library, such as
standard output.
Attributes:
id_map (callable): A unary callable mapping unittest.TestCase objects to
unique identifiers.
cases (dict): A dictionary mapping from the identifiers returned by id_map
to CaseResult objects corresponding to those IDs.
"""
def __init__(self, id_map):
"""Initialize the object with an identifier mapping.
Arguments:
id_map (callable): Corresponds to the attribute `id_map`."""
super(AugmentedResult, self).__init__()
self.id_map = id_map
self.cases = None
def startTestRun(self):
"""See unittest.TestResult.startTestRun."""
super(AugmentedResult, self).startTestRun()
self.cases = dict()
def stopTestRun(self):
"""See unittest.TestResult.stopTestRun."""
super(AugmentedResult, self).stopTestRun()
def startTest(self, test):
"""See unittest.TestResult.startTest."""
super(AugmentedResult, self).startTest(test)
case_id = self.id_map(test)
self.cases[case_id] = CaseResult(
id=case_id, name=test.id(), kind=CaseResult.Kind.RUNNING)
def addError(self, test, error):
"""See unittest.TestResult.addError."""
super(AugmentedResult, self).addError(test, error)
case_id = self.id_map(test)
self.cases[case_id] = self.cases[case_id].updated(
kind=CaseResult.Kind.ERROR, traceback=error)
def addFailure(self, test, error):
"""See unittest.TestResult.addFailure."""
super(AugmentedResult, self).addFailure(test, error)
case_id = self.id_map(test)
self.cases[case_id] = self.cases[case_id].updated(
kind=CaseResult.Kind.FAILURE, traceback=error)
def addSuccess(self, test):
"""See unittest.TestResult.addSuccess."""
super(AugmentedResult, self).addSuccess(test)
case_id = self.id_map(test)
self.cases[case_id] = self.cases[case_id].updated(
kind=CaseResult.Kind.SUCCESS)
def addSkip(self, test, reason):
"""See unittest.TestResult.addSkip."""
super(AugmentedResult, self).addSkip(test, reason)
case_id = self.id_map(test)
self.cases[case_id] = self.cases[case_id].updated(
kind=CaseResult.Kind.SKIP, skip_reason=reason)
def addExpectedFailure(self, test, error):
"""See unittest.TestResult.addExpectedFailure."""
super(AugmentedResult, self).addExpectedFailure(test, error)
case_id = self.id_map(test)
self.cases[case_id] = self.cases[case_id].updated(
kind=CaseResult.Kind.EXPECTED_FAILURE, traceback=error)
def addUnexpectedSuccess(self, test):
"""See unittest.TestResult.addUnexpectedSuccess."""
super(AugmentedResult, self).addUnexpectedSuccess(test)
case_id = self.id_map(test)
self.cases[case_id] = self.cases[case_id].updated(
kind=CaseResult.Kind.UNEXPECTED_SUCCESS)
def set_output(self, test, stdout, stderr):
"""Set the output attributes for the CaseResult corresponding to a test.
Args:
test (unittest.TestCase): The TestCase to set the outputs of.
stdout (str): Output from stdout to assign to self.id_map(test).
stderr (str): Output from stderr to assign to self.id_map(test).
"""
case_id = self.id_map(test)
self.cases[case_id] = self.cases[case_id].updated(
stdout=stdout.decode(), stderr=stderr.decode())
def augmented_results(self, filter):
"""Convenience method to retrieve filtered case results.
Args:
filter (callable): A unary predicate to filter over CaseResult objects.
"""
return (self.cases[case_id] for case_id in self.cases
if filter(self.cases[case_id]))
class CoverageResult(AugmentedResult):
"""Extension to AugmentedResult adding coverage.py support per test.\
Attributes:
coverage_context (coverage.Coverage): coverage.py management object.
"""
def __init__(self, id_map):
"""See AugmentedResult.__init__."""
super(CoverageResult, self).__init__(id_map=id_map)
self.coverage_context = None
def startTest(self, test):
"""See unittest.TestResult.startTest.
Additionally initializes and begins code coverage tracking."""
super(CoverageResult, self).startTest(test)
self.coverage_context = coverage.Coverage(data_suffix=True)
self.coverage_context.start()
def stopTest(self, test):
"""See unittest.TestResult.stopTest.
Additionally stops and deinitializes code coverage tracking."""
super(CoverageResult, self).stopTest(test)
self.coverage_context.stop()
self.coverage_context.save()
self.coverage_context = None
def stopTestRun(self):
"""See unittest.TestResult.stopTestRun."""
super(CoverageResult, self).stopTestRun()
# TODO(atash): Dig deeper into why the following line fails to properly
# combine coverage data from the Cython plugin.
#coverage.Coverage().combine()
class _Colors:
"""Namespaced constants for terminal color magic numbers."""
HEADER = '\033[95m'
INFO = '\033[94m'
OK = '\033[92m'
WARN = '\033[93m'
FAIL = '\033[91m'
BOLD = '\033[1m'
UNDERLINE = '\033[4m'
END = '\033[0m'
class TerminalResult(CoverageResult):
"""Extension to CoverageResult adding basic terminal reporting."""
def __init__(self, out, id_map):
"""Initialize the result object.
Args:
out (file-like): Output file to which terminal-colored live results will
be written.
id_map (callable): See AugmentedResult.__init__.
"""
super(TerminalResult, self).__init__(id_map=id_map)
self.out = out
def startTestRun(self):
"""See unittest.TestResult.startTestRun."""
super(TerminalResult, self).startTestRun()
self.out.write(_Colors.HEADER + 'Testing gRPC Python...\n' +
_Colors.END)
def stopTestRun(self):
"""See unittest.TestResult.stopTestRun."""
super(TerminalResult, self).stopTestRun()
self.out.write(summary(self))
self.out.flush()
def addError(self, test, error):
"""See unittest.TestResult.addError."""
super(TerminalResult, self).addError(test, error)
self.out.write(_Colors.FAIL + 'ERROR {}\n'.format(test.id()) +
_Colors.END)
self.out.flush()
def addFailure(self, test, error):
"""See unittest.TestResult.addFailure."""
super(TerminalResult, self).addFailure(test, error)
self.out.write(_Colors.FAIL + 'FAILURE {}\n'.format(test.id()) +
_Colors.END)
self.out.flush()
def addSuccess(self, test):
"""See unittest.TestResult.addSuccess."""
super(TerminalResult, self).addSuccess(test)
self.out.write(_Colors.OK + 'SUCCESS {}\n'.format(test.id()) +
_Colors.END)
self.out.flush()
def addSkip(self, test, reason):
"""See unittest.TestResult.addSkip."""
super(TerminalResult, self).addSkip(test, reason)
self.out.write(_Colors.INFO + 'SKIP {}\n'.format(test.id()) +
_Colors.END)
self.out.flush()
def addExpectedFailure(self, test, error):
"""See unittest.TestResult.addExpectedFailure."""
super(TerminalResult, self).addExpectedFailure(test, error)
self.out.write(_Colors.INFO + 'FAILURE_OK {}\n'.format(test.id()) +
_Colors.END)
self.out.flush()
def addUnexpectedSuccess(self, test):
"""See unittest.TestResult.addUnexpectedSuccess."""
super(TerminalResult, self).addUnexpectedSuccess(test)
self.out.write(_Colors.INFO + 'UNEXPECTED_OK {}\n'.format(test.id()) +
_Colors.END)
self.out.flush()
def _traceback_string(type, value, trace):
"""Generate a descriptive string of a Python exception traceback.
Args:
type (class): The type of the exception.
value (Exception): The value of the exception.
trace (traceback): Traceback of the exception.
Returns:
str: Formatted exception descriptive string.
"""
buffer = moves.cStringIO()
traceback.print_exception(type, value, trace, file=buffer)
return buffer.getvalue()
def summary(result):
"""A summary string of a result object.
Args:
result (AugmentedResult): The result object to get the summary of.
Returns:
str: The summary string.
"""
assert isinstance(result, AugmentedResult)
untested = list(
result.augmented_results(
lambda case_result: case_result.kind is CaseResult.Kind.UNTESTED))
running = list(
result.augmented_results(
lambda case_result: case_result.kind is CaseResult.Kind.RUNNING))
failures = list(
result.augmented_results(
lambda case_result: case_result.kind is CaseResult.Kind.FAILURE))
errors = list(
result.augmented_results(
lambda case_result: case_result.kind is CaseResult.Kind.ERROR))
successes = list(
result.augmented_results(
lambda case_result: case_result.kind is CaseResult.Kind.SUCCESS))
skips = list(
result.augmented_results(
lambda case_result: case_result.kind is CaseResult.Kind.SKIP))
expected_failures = list(
result.augmented_results(
lambda case_result: case_result.kind is CaseResult.Kind.EXPECTED_FAILURE
))
unexpected_successes = list(
result.augmented_results(
lambda case_result: case_result.kind is CaseResult.Kind.UNEXPECTED_SUCCESS
))
running_names = [case.name for case in running]
finished_count = (len(failures) + len(errors) + len(successes) +
len(expected_failures) + len(unexpected_successes))
statistics = ('{finished} tests finished:\n'
'\t{successful} successful\n'
'\t{unsuccessful} unsuccessful\n'
'\t{skipped} skipped\n'
'\t{expected_fail} expected failures\n'
'\t{unexpected_successful} unexpected successes\n'
'Interrupted Tests:\n'
'\t{interrupted}\n'.format(
finished=finished_count,
successful=len(successes),
unsuccessful=(len(failures) + len(errors)),
skipped=len(skips),
expected_fail=len(expected_failures),
unexpected_successful=len(unexpected_successes),
interrupted=str(running_names)))
tracebacks = '\n\n'.join(
[(_Colors.FAIL + '{test_name}' + _Colors.END + '\n' + _Colors.BOLD +
'traceback:' + _Colors.END + '\n' + '{traceback}\n' + _Colors.BOLD +
'stdout:' + _Colors.END + '\n' + '{stdout}\n' + _Colors.BOLD +
'stderr:' + _Colors.END + '\n' + '{stderr}\n').format(
test_name=result.name,
traceback=_traceback_string(*result.traceback),
stdout=result.stdout,
stderr=result.stderr)
for result in itertools.chain(failures, errors)])
notes = 'Unexpected successes: {}\n'.format(
[result.name for result in unexpected_successes])
return statistics + '\nErrors/Failures: \n' + tracebacks + '\n' + notes
def jenkins_junit_xml(result):
"""An XML tree object that when written is recognizable by Jenkins.
Args:
result (AugmentedResult): The result object to get the junit xml output of.
Returns:
ElementTree.ElementTree: The XML tree.
"""
assert isinstance(result, AugmentedResult)
root = ElementTree.Element('testsuites')
suite = ElementTree.SubElement(root, 'testsuite', {
'name': 'Python gRPC tests',
})
for case in result.cases.values():
if case.kind is CaseResult.Kind.SUCCESS:
ElementTree.SubElement(suite, 'testcase', {'name': case.name,})
elif case.kind in (CaseResult.Kind.ERROR, CaseResult.Kind.FAILURE):
case_xml = ElementTree.SubElement(suite, 'testcase', {
'name': case.name,
})
error_xml = ElementTree.SubElement(case_xml, 'error', {})
error_xml.text = ''.format(case.stderr, case.traceback)
return ElementTree.ElementTree(element=root)
| 38.5
| 86
| 0.644313
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.