max_stars_repo_path
stringlengths 4
286
| max_stars_repo_name
stringlengths 5
119
| max_stars_count
int64 0
191k
| id
stringlengths 1
7
| content
stringlengths 6
1.03M
| content_cleaned
stringlengths 6
1.03M
| language
stringclasses 111
values | language_score
float64 0.03
1
| comments
stringlengths 0
556k
| edu_score
float64 0.32
5.03
| edu_int_score
int64 0
5
|
|---|---|---|---|---|---|---|---|---|---|---|
bfs/scripts/2_generate_params.py
|
KastnerRG/spector
| 45
|
6626751
|
#!/usr/bin/python
# ----------------------------------------------------------------------
# Copyright (c) 2016, The Regents of the University of California All
# rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
#
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following
# disclaimer in the documentation and/or other materials provided
# with the distribution.
#
# * Neither the name of The Regents of the University of California
# nor the names of its contributors may be used to endorse or
# promote products derived from this software without specific
# prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL REGENTS OF THE
# UNIVERSITY OF CALIFORNIA BE LIABLE FOR ANY DIRECT, INDIRECT,
# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS
# OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
# ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR
# TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE
# USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH
# DAMAGE.
# ----------------------------------------------------------------------
# Filename: 2_generate_params.py
# Version: 1.0
# Description: Python script to generate unique designs for the BFS benchmark.
# Author: <NAME>
import itertools
import sys
sys.path.append("../../common/scripts")
from generateDesigns import createFolders
templateFilepath = "../src/knobs.h.template" # Knobs template file
kernelFilename = "../src/bfs_fpga.cl" # Kernel file to copy
dirToCopy = "../benchmarks/basefolder" # Directory containing the source code
outRootPath = "../benchmarks" # Name of output directory where all folders are generated
outBasename = "bfs" # Base name of generated folders
outKnobFilename = "knobs.h" # Name of generated knob file
logFilename = "params.log" # Log file to copy useful information
# ***************************************************************************
# Knobs
# ***********
FULL_UNROLL = -1
KNOB_UNROLL_FACTOR = [1, 4, 6, 8, 10, 12, 16]
KNOB_COMPUTE_UNITS_1 = [1, 2, 4, 6]
KNOB_COMPUTE_UNITS_2 = [1, 2, 4, 8, 10]
KNOB_SIMD_2 = [2, 4, 8]
KNOB_BRANCH = [0, 1]
KNOB_MASK_TYPE = [1, 2, 3]
allCombinations = list(itertools.product(
KNOB_UNROLL_FACTOR, # 0
KNOB_COMPUTE_UNITS_1, # 1
KNOB_COMPUTE_UNITS_2, # 2
KNOB_SIMD_2, # 3
KNOB_BRANCH, # 4
KNOB_MASK_TYPE # 5
))
# ***************************************************************************
def removeCombinations(combs):
finalList = []
for c in combs:
copyit = True
if c[0] >= 8 and c[1] >= 2 : copyit = False
if c[1] > 1 and c[4] == 0: copyit = False
if copyit:
finalList.append(c)
return finalList
def main():
doCreateFolders = 0
if len(sys.argv) > 1:
doCreateFolders = int(sys.argv[1])
finalCombinations = removeCombinations(allCombinations)
print("Num combinations: " + str(len(finalCombinations)))
print("vs " + str(len(allCombinations)))
if doCreateFolders == 1:
createFolders(
finalCombinations,
templateFilepath,
kernelFilename,
dirToCopy,
outRootPath,
outBasename,
outKnobFilename,
logFilename)
else:
print("\nNote: To actually create the folders, run:\n" + sys.argv[0] + " 1\n")
if __name__ == "__main__":
main()
|
#!/usr/bin/python
# ----------------------------------------------------------------------
# Copyright (c) 2016, The Regents of the University of California All
# rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
#
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following
# disclaimer in the documentation and/or other materials provided
# with the distribution.
#
# * Neither the name of The Regents of the University of California
# nor the names of its contributors may be used to endorse or
# promote products derived from this software without specific
# prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL REGENTS OF THE
# UNIVERSITY OF CALIFORNIA BE LIABLE FOR ANY DIRECT, INDIRECT,
# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS
# OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
# ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR
# TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE
# USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH
# DAMAGE.
# ----------------------------------------------------------------------
# Filename: 2_generate_params.py
# Version: 1.0
# Description: Python script to generate unique designs for the BFS benchmark.
# Author: <NAME>
import itertools
import sys
sys.path.append("../../common/scripts")
from generateDesigns import createFolders
templateFilepath = "../src/knobs.h.template" # Knobs template file
kernelFilename = "../src/bfs_fpga.cl" # Kernel file to copy
dirToCopy = "../benchmarks/basefolder" # Directory containing the source code
outRootPath = "../benchmarks" # Name of output directory where all folders are generated
outBasename = "bfs" # Base name of generated folders
outKnobFilename = "knobs.h" # Name of generated knob file
logFilename = "params.log" # Log file to copy useful information
# ***************************************************************************
# Knobs
# ***********
FULL_UNROLL = -1
KNOB_UNROLL_FACTOR = [1, 4, 6, 8, 10, 12, 16]
KNOB_COMPUTE_UNITS_1 = [1, 2, 4, 6]
KNOB_COMPUTE_UNITS_2 = [1, 2, 4, 8, 10]
KNOB_SIMD_2 = [2, 4, 8]
KNOB_BRANCH = [0, 1]
KNOB_MASK_TYPE = [1, 2, 3]
allCombinations = list(itertools.product(
KNOB_UNROLL_FACTOR, # 0
KNOB_COMPUTE_UNITS_1, # 1
KNOB_COMPUTE_UNITS_2, # 2
KNOB_SIMD_2, # 3
KNOB_BRANCH, # 4
KNOB_MASK_TYPE # 5
))
# ***************************************************************************
def removeCombinations(combs):
finalList = []
for c in combs:
copyit = True
if c[0] >= 8 and c[1] >= 2 : copyit = False
if c[1] > 1 and c[4] == 0: copyit = False
if copyit:
finalList.append(c)
return finalList
def main():
doCreateFolders = 0
if len(sys.argv) > 1:
doCreateFolders = int(sys.argv[1])
finalCombinations = removeCombinations(allCombinations)
print("Num combinations: " + str(len(finalCombinations)))
print("vs " + str(len(allCombinations)))
if doCreateFolders == 1:
createFolders(
finalCombinations,
templateFilepath,
kernelFilename,
dirToCopy,
outRootPath,
outBasename,
outKnobFilename,
logFilename)
else:
print("\nNote: To actually create the folders, run:\n" + sys.argv[0] + " 1\n")
if __name__ == "__main__":
main()
|
en
| 0.652881
|
#!/usr/bin/python # ---------------------------------------------------------------------- # Copyright (c) 2016, The Regents of the University of California All # rights reserved. # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions are # met: # # * Redistributions of source code must retain the above copyright # notice, this list of conditions and the following disclaimer. # # * Redistributions in binary form must reproduce the above # copyright notice, this list of conditions and the following # disclaimer in the documentation and/or other materials provided # with the distribution. # # * Neither the name of The Regents of the University of California # nor the names of its contributors may be used to endorse or # promote products derived from this software without specific # prior written permission. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS # "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT # LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR # A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL REGENTS OF THE # UNIVERSITY OF CALIFORNIA BE LIABLE FOR ANY DIRECT, INDIRECT, # INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, # BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS # OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND # ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR # TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE # USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH # DAMAGE. # ---------------------------------------------------------------------- # Filename: 2_generate_params.py # Version: 1.0 # Description: Python script to generate unique designs for the BFS benchmark. # Author: <NAME> # Knobs template file # Kernel file to copy # Directory containing the source code # Name of output directory where all folders are generated # Base name of generated folders # Name of generated knob file # Log file to copy useful information # *************************************************************************** # Knobs # *********** # 0 # 1 # 2 # 3 # 4 # 5 # ***************************************************************************
| 1.380013
| 1
|
vi_lomas_changes/migrations/0012_auto_20200428_2032.py
|
dymaxionlabs/satlomas-back
| 1
|
6626752
|
<filename>vi_lomas_changes/migrations/0012_auto_20200428_2032.py<gh_stars>1-10
# Generated by Django 3.0.5 on 2020-04-28 20:32
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('vi_lomas_changes', '0011_auto_20200428_0256'),
]
operations = [
migrations.RenameField(
model_name='coveragemeasurement',
old_name='change_area',
new_name='area',
),
migrations.RenameField(
model_name='coveragemeasurement',
old_name='perc_change_area',
new_name='perc_area',
),
migrations.RemoveField(
model_name='coveragemeasurement',
name='changes_mask',
),
migrations.DeleteModel(
name='ChangesMask',
),
]
|
<filename>vi_lomas_changes/migrations/0012_auto_20200428_2032.py<gh_stars>1-10
# Generated by Django 3.0.5 on 2020-04-28 20:32
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('vi_lomas_changes', '0011_auto_20200428_0256'),
]
operations = [
migrations.RenameField(
model_name='coveragemeasurement',
old_name='change_area',
new_name='area',
),
migrations.RenameField(
model_name='coveragemeasurement',
old_name='perc_change_area',
new_name='perc_area',
),
migrations.RemoveField(
model_name='coveragemeasurement',
name='changes_mask',
),
migrations.DeleteModel(
name='ChangesMask',
),
]
|
en
| 0.773764
|
# Generated by Django 3.0.5 on 2020-04-28 20:32
| 1.408985
| 1
|
checkout/migrations/0002_auto_20201209_2330.py
|
sctlcd/pearl-v2
| 1
|
6626753
|
<reponame>sctlcd/pearl-v2
# Generated by Django 3.1.3 on 2020-12-09 23:30
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('checkout', '0001_initial'),
]
operations = [
migrations.AlterField(
model_name='order',
name='country',
field=models.CharField(max_length=254),
),
migrations.AlterField(
model_name='order',
name='county',
field=models.CharField(blank=True, max_length=254, null=True),
),
migrations.AlterField(
model_name='order',
name='full_name',
field=models.CharField(max_length=254),
),
migrations.AlterField(
model_name='order',
name='street_address1',
field=models.CharField(max_length=254),
),
migrations.AlterField(
model_name='order',
name='street_address2',
field=models.CharField(blank=True, max_length=254, null=True),
),
migrations.AlterField(
model_name='order',
name='town_or_city',
field=models.CharField(max_length=254),
),
]
|
# Generated by Django 3.1.3 on 2020-12-09 23:30
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('checkout', '0001_initial'),
]
operations = [
migrations.AlterField(
model_name='order',
name='country',
field=models.CharField(max_length=254),
),
migrations.AlterField(
model_name='order',
name='county',
field=models.CharField(blank=True, max_length=254, null=True),
),
migrations.AlterField(
model_name='order',
name='full_name',
field=models.CharField(max_length=254),
),
migrations.AlterField(
model_name='order',
name='street_address1',
field=models.CharField(max_length=254),
),
migrations.AlterField(
model_name='order',
name='street_address2',
field=models.CharField(blank=True, max_length=254, null=True),
),
migrations.AlterField(
model_name='order',
name='town_or_city',
field=models.CharField(max_length=254),
),
]
|
en
| 0.836889
|
# Generated by Django 3.1.3 on 2020-12-09 23:30
| 1.750882
| 2
|
2bcmc.py
|
pxeger/2bcmc
| 0
|
6626754
|
#!/usr/bin/python
# © <NAME> <_<EMAIL>> 2021
# Licensed under the Artistic License 2.0 (https://github.com/pxeger/2bcmc/blob/main/LICENCE.txt)
# Created for Redwolf Programs' CMC (https://chat.stackexchange.com/transcript/message/59389303#59389303)
# a somewhat golf-oriented language with a two bit code page
from collections.abc import Iterator, Iterable
codepage = {
"=": 0, # push literal integer
"*": 1, # repeat + eval
"-": 2, # subtract
# TODO: op 3??
" ": None, # ignored
"\n": None, # ignored
}
def interpret(qits: Iterable[int]):
stack = []
_interpret(iter(qits), stack)
print("\n".join(stack))
def _interpret(qits: Iterator[int], stack: list):
def pop():
try:
return stack.pop()
except IndexError:
# implicit input
return eval(input())
for qit in qits:
if qit == 0:
# push literal integer
n = 0
while x := next(qits, 0):
n *= 3
n += x - 1
stack.append(n)
elif qit == 1:
# repeat + eval
qits2 = []
p = pop()
while n:
qits2.append(p & 0b11)
p >>= 2
n = pop()
for _ in range(n if n >= 0 else 1):
_interpret(qits2, stack)
elif qit == 2:
# subtract
a = pop()
b = pop()
stack.append(a - b)
elif qit == 3:
raise NotImplementedError("command 3 is unimplemented")
elif qit is None:
# gracefully ignore to facilitate parsing lol
pass
else:
assert False
if __name__ == "__main__":
import sys
prog, args = sys.argv
if "--help" in args:
print(f"""\
Usage: {prog} (-b | -u | -c) <program>
Executes `2bcmc` code from <program>
Options:
\t-b\tread code from the file <program> in binary mode (4 commands per byte)
\t-u\tread code from the file <program> in unicode mode (using code page)
\t-c\t<program> is a literal string of code (using the code page)
More info: https://github.com/pxeger/2bcmc""")
sys.exit(0)
match args:
case ["-b", path]:
with open(path, "rb") as file:
code = file.read()
qits = reversed([(byte >> n) & 0b11 for byte in code for n in range(0, 8, 2)])
case ["-u", path]:
with open(path, encoding="utf-8") as file:
code = file.read()
try:
qits = [codepage[char] for char in code]
except KeyError:
sys.exit(f"invalid character in code")
case ["-c", code]:
qits = [codepage[char] for char in code]
case _:
sys.exit(f"{prog}: invalid options. Try `{prog} --help`")
interpret(qits)
|
#!/usr/bin/python
# © <NAME> <_<EMAIL>> 2021
# Licensed under the Artistic License 2.0 (https://github.com/pxeger/2bcmc/blob/main/LICENCE.txt)
# Created for Redwolf Programs' CMC (https://chat.stackexchange.com/transcript/message/59389303#59389303)
# a somewhat golf-oriented language with a two bit code page
from collections.abc import Iterator, Iterable
codepage = {
"=": 0, # push literal integer
"*": 1, # repeat + eval
"-": 2, # subtract
# TODO: op 3??
" ": None, # ignored
"\n": None, # ignored
}
def interpret(qits: Iterable[int]):
stack = []
_interpret(iter(qits), stack)
print("\n".join(stack))
def _interpret(qits: Iterator[int], stack: list):
def pop():
try:
return stack.pop()
except IndexError:
# implicit input
return eval(input())
for qit in qits:
if qit == 0:
# push literal integer
n = 0
while x := next(qits, 0):
n *= 3
n += x - 1
stack.append(n)
elif qit == 1:
# repeat + eval
qits2 = []
p = pop()
while n:
qits2.append(p & 0b11)
p >>= 2
n = pop()
for _ in range(n if n >= 0 else 1):
_interpret(qits2, stack)
elif qit == 2:
# subtract
a = pop()
b = pop()
stack.append(a - b)
elif qit == 3:
raise NotImplementedError("command 3 is unimplemented")
elif qit is None:
# gracefully ignore to facilitate parsing lol
pass
else:
assert False
if __name__ == "__main__":
import sys
prog, args = sys.argv
if "--help" in args:
print(f"""\
Usage: {prog} (-b | -u | -c) <program>
Executes `2bcmc` code from <program>
Options:
\t-b\tread code from the file <program> in binary mode (4 commands per byte)
\t-u\tread code from the file <program> in unicode mode (using code page)
\t-c\t<program> is a literal string of code (using the code page)
More info: https://github.com/pxeger/2bcmc""")
sys.exit(0)
match args:
case ["-b", path]:
with open(path, "rb") as file:
code = file.read()
qits = reversed([(byte >> n) & 0b11 for byte in code for n in range(0, 8, 2)])
case ["-u", path]:
with open(path, encoding="utf-8") as file:
code = file.read()
try:
qits = [codepage[char] for char in code]
except KeyError:
sys.exit(f"invalid character in code")
case ["-c", code]:
qits = [codepage[char] for char in code]
case _:
sys.exit(f"{prog}: invalid options. Try `{prog} --help`")
interpret(qits)
|
en
| 0.633612
|
#!/usr/bin/python # © <NAME> <_<EMAIL>> 2021 # Licensed under the Artistic License 2.0 (https://github.com/pxeger/2bcmc/blob/main/LICENCE.txt) # Created for Redwolf Programs' CMC (https://chat.stackexchange.com/transcript/message/59389303#59389303) # a somewhat golf-oriented language with a two bit code page # push literal integer # repeat + eval # subtract # TODO: op 3?? # ignored # ignored # implicit input # push literal integer # repeat + eval # subtract # gracefully ignore to facilitate parsing lol \ Usage: {prog} (-b | -u | -c) <program> Executes `2bcmc` code from <program> Options: \t-b\tread code from the file <program> in binary mode (4 commands per byte) \t-u\tread code from the file <program> in unicode mode (using code page) \t-c\t<program> is a literal string of code (using the code page) More info: https://github.com/pxeger/2bcmc
| 3.297819
| 3
|
app/legacy/keras_wrappers.py
|
rahowa/workzone
| 1
|
6626755
|
<reponame>rahowa/workzone
import keras
import numpy as np
from typing import Union, Any
from app.nn_inference import BaseWrapper
from app.base_types import Image, Descriptors
class KerasBase(BaseWrapper):
"""
Example wrapper for pure Keras face reсognition system
"""
def __init__(self, config: Union[str, Any], model: keras.models.Model):
super().__init__()
self.model = model
if isinstance(config, (str)):
self.config = self.load_config(config)
else:
self.config = config
def preprocess_image(self, image: Image) -> Image:
return image / 255.0
def predict(self, data: Image) -> Descriptors:
descriptors = self.model.predict(data[np.newaxis, ...])
return descriptors
|
import keras
import numpy as np
from typing import Union, Any
from app.nn_inference import BaseWrapper
from app.base_types import Image, Descriptors
class KerasBase(BaseWrapper):
"""
Example wrapper for pure Keras face reсognition system
"""
def __init__(self, config: Union[str, Any], model: keras.models.Model):
super().__init__()
self.model = model
if isinstance(config, (str)):
self.config = self.load_config(config)
else:
self.config = config
def preprocess_image(self, image: Image) -> Image:
return image / 255.0
def predict(self, data: Image) -> Descriptors:
descriptors = self.model.predict(data[np.newaxis, ...])
return descriptors
|
en
| 0.373837
|
Example wrapper for pure Keras face reсognition system
| 2.495816
| 2
|
stores/migrations/0006_auto_20181002_1626.py
|
fats2005/pma-backend
| 0
|
6626756
|
<gh_stars>0
# -*- coding: utf-8 -*-
# Generated by Django 1.10.8 on 2018-10-02 21:26
from __future__ import unicode_literals
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('stores', '0005_auto_20181002_1446'),
]
operations = [
migrations.RenameModel(
old_name='Unity',
new_name='Unit',
),
migrations.RenameField(
model_name='product',
old_name='unity',
new_name='unit',
),
]
|
# -*- coding: utf-8 -*-
# Generated by Django 1.10.8 on 2018-10-02 21:26
from __future__ import unicode_literals
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('stores', '0005_auto_20181002_1446'),
]
operations = [
migrations.RenameModel(
old_name='Unity',
new_name='Unit',
),
migrations.RenameField(
model_name='product',
old_name='unity',
new_name='unit',
),
]
|
en
| 0.735872
|
# -*- coding: utf-8 -*- # Generated by Django 1.10.8 on 2018-10-02 21:26
| 1.664494
| 2
|
python/bl_gob.py
|
lihebi/DAG-EQ
| 5
|
6626757
|
import os, sys
sys.path.append("/home/jovyan/DAG-EQ/python/pygobnilp")
import networkx as nx
import matplotlib.pyplot as plt
from pygobnilp.gobnilp import Gobnilp
m = Gobnilp()
epsilon = 0.0001
def optimal_score_is(m,score):
return abs(m.learned_scores[0] - score) < epsilon
def test_bge():
m.learn('data/gaussian.dat',data_type='continuous',score='BGe',plot=False,palim=None)
assert optimal_score_is(m, -53258.94161814058)
def test_bic():
m.learn('data/gaussian.dat',data_type='continuous',score='GaussianBIC',plot=False,palim=None)
assert optimal_score_is(m, -53221.34568733569)
m.learn('data/gaussian.dat',data_type='continuous',score='GaussianBIC',plot=False,palim=None,sdresidparam=False)
assert optimal_score_is(m, -53191.53551116573)
def test_bdeu():
m.learn('data/discrete.dat',plot=False,palim=None)
assert optimal_score_is(m, -24028.0947783535)
# run it
def run_Gob(x, y, score='BGe'):
x.columns = [str(x) for x in x.columns]
m.learn(x, data_type='continuous',score=score,plot=False,palim=None)
return nx.to_numpy_matrix(m.learned_bn)
|
import os, sys
sys.path.append("/home/jovyan/DAG-EQ/python/pygobnilp")
import networkx as nx
import matplotlib.pyplot as plt
from pygobnilp.gobnilp import Gobnilp
m = Gobnilp()
epsilon = 0.0001
def optimal_score_is(m,score):
return abs(m.learned_scores[0] - score) < epsilon
def test_bge():
m.learn('data/gaussian.dat',data_type='continuous',score='BGe',plot=False,palim=None)
assert optimal_score_is(m, -53258.94161814058)
def test_bic():
m.learn('data/gaussian.dat',data_type='continuous',score='GaussianBIC',plot=False,palim=None)
assert optimal_score_is(m, -53221.34568733569)
m.learn('data/gaussian.dat',data_type='continuous',score='GaussianBIC',plot=False,palim=None,sdresidparam=False)
assert optimal_score_is(m, -53191.53551116573)
def test_bdeu():
m.learn('data/discrete.dat',plot=False,palim=None)
assert optimal_score_is(m, -24028.0947783535)
# run it
def run_Gob(x, y, score='BGe'):
x.columns = [str(x) for x in x.columns]
m.learn(x, data_type='continuous',score=score,plot=False,palim=None)
return nx.to_numpy_matrix(m.learned_bn)
|
none
| 1
| 2.584183
| 3
|
|
scripts/generate_gif_jobs.py
|
weepingwillowben/reward-surfaces
| 0
|
6626758
|
<filename>scripts/generate_gif_jobs.py<gh_stars>0
import torch
import numpy as np
from reward_surfaces.experiments import generate_plane_data
import json
import subprocess
import argparse
from reward_surfaces.utils.surface_utils import filter_normalize
from pathlib import Path
import random
import os
def main():
parser = argparse.ArgumentParser(description='generate jobs for plane')
parser.add_argument('train_dir', type=str)
parser.add_argument('out_dir', type=str)
parser.add_argument('--device', type=str, default='cpu')
parser.add_argument('--num-steps', type=int)
parser.add_argument('--num-episodes', type=int)
parser.add_argument('--seed', type=int)
parser.add_argument('--magnitude', type=float, default=0.2, help="scales directions by given amount")
parser.add_argument('--grid-size', type=int, default=5)
args = parser.parse_args()
generate_gif_jobs(
args.train_dir,
args.out_dir,
num_steps=args.num_steps,
num_episodes=args.num_episodes,
device=args.device,
magnitude=args.magnitude,
grid_size=args.grid_size,
seed=args.seed
)
def generate_gif_jobs(train_dir, out_dir, num_steps=None, num_episodes=None, device="cpu", seed=None, grid_size=5, magnitude=0.2):
train_dir = Path(train_dir)
out_dir = Path(out_dir)
if seed is None:
seed = random.randint(0,2**31)
os.mkdir(out_dir)
train_info = json.load(open(train_dir / "info.json"))
train_info['magnitude'] = magnitude
train_info['grid_size'] = grid_size
train_info['num_steps'] = num_steps
train_info['num_episodes'] = num_episodes
train_info['seed'] = seed
open(out_dir/"info.json",'w').write(json.dumps(train_info))
all_gen_dirs = []
for p_str in sorted(os.listdir(train_dir)):
checkpoint_param_fname = train_dir / p_str / "parameters.th"
if os.path.exists(checkpoint_param_fname):
trained_checkpoint = train_dir / p_str
generated_dir = out_dir / p_str
train_info = json.load(open(train_dir / "info.json"))
# reseeds so that random generated directions are the same every time
np.random.seed(seed)
checkpoint_params = [v.cpu().detach().numpy() for v in torch.load(checkpoint_param_fname,map_location=torch.device('cpu')).values()]
dir1 = [filter_normalize(v)*magnitude for v in checkpoint_params]
dir2 = [filter_normalize(v)*magnitude for v in checkpoint_params]
generate_plane_data(trained_checkpoint, generated_dir, dir1, dir2, train_info, num_steps=num_steps, num_episodes=num_episodes, device=device,grid_size=grid_size)
all_gen_dirs.append(generated_dir)
# concatenate all jobs
all_job_paths = "".join([open(dir/"jobs.sh").read() for dir in all_gen_dirs])
multiproc_job_paths = "\n".join([f"python3 scripts/run_jobs_multiproc.py {dir/'jobs.sh'}" for dir in all_gen_dirs])
open(out_dir/"all_jobs.sh",'w').write(all_job_paths)
open(out_dir/"multiproc_jobs.sh",'w').write(multiproc_job_paths)
if __name__ == "__main__":
main()
|
<filename>scripts/generate_gif_jobs.py<gh_stars>0
import torch
import numpy as np
from reward_surfaces.experiments import generate_plane_data
import json
import subprocess
import argparse
from reward_surfaces.utils.surface_utils import filter_normalize
from pathlib import Path
import random
import os
def main():
parser = argparse.ArgumentParser(description='generate jobs for plane')
parser.add_argument('train_dir', type=str)
parser.add_argument('out_dir', type=str)
parser.add_argument('--device', type=str, default='cpu')
parser.add_argument('--num-steps', type=int)
parser.add_argument('--num-episodes', type=int)
parser.add_argument('--seed', type=int)
parser.add_argument('--magnitude', type=float, default=0.2, help="scales directions by given amount")
parser.add_argument('--grid-size', type=int, default=5)
args = parser.parse_args()
generate_gif_jobs(
args.train_dir,
args.out_dir,
num_steps=args.num_steps,
num_episodes=args.num_episodes,
device=args.device,
magnitude=args.magnitude,
grid_size=args.grid_size,
seed=args.seed
)
def generate_gif_jobs(train_dir, out_dir, num_steps=None, num_episodes=None, device="cpu", seed=None, grid_size=5, magnitude=0.2):
train_dir = Path(train_dir)
out_dir = Path(out_dir)
if seed is None:
seed = random.randint(0,2**31)
os.mkdir(out_dir)
train_info = json.load(open(train_dir / "info.json"))
train_info['magnitude'] = magnitude
train_info['grid_size'] = grid_size
train_info['num_steps'] = num_steps
train_info['num_episodes'] = num_episodes
train_info['seed'] = seed
open(out_dir/"info.json",'w').write(json.dumps(train_info))
all_gen_dirs = []
for p_str in sorted(os.listdir(train_dir)):
checkpoint_param_fname = train_dir / p_str / "parameters.th"
if os.path.exists(checkpoint_param_fname):
trained_checkpoint = train_dir / p_str
generated_dir = out_dir / p_str
train_info = json.load(open(train_dir / "info.json"))
# reseeds so that random generated directions are the same every time
np.random.seed(seed)
checkpoint_params = [v.cpu().detach().numpy() for v in torch.load(checkpoint_param_fname,map_location=torch.device('cpu')).values()]
dir1 = [filter_normalize(v)*magnitude for v in checkpoint_params]
dir2 = [filter_normalize(v)*magnitude for v in checkpoint_params]
generate_plane_data(trained_checkpoint, generated_dir, dir1, dir2, train_info, num_steps=num_steps, num_episodes=num_episodes, device=device,grid_size=grid_size)
all_gen_dirs.append(generated_dir)
# concatenate all jobs
all_job_paths = "".join([open(dir/"jobs.sh").read() for dir in all_gen_dirs])
multiproc_job_paths = "\n".join([f"python3 scripts/run_jobs_multiproc.py {dir/'jobs.sh'}" for dir in all_gen_dirs])
open(out_dir/"all_jobs.sh",'w').write(all_job_paths)
open(out_dir/"multiproc_jobs.sh",'w').write(multiproc_job_paths)
if __name__ == "__main__":
main()
|
en
| 0.837593
|
# reseeds so that random generated directions are the same every time # concatenate all jobs
| 2.375332
| 2
|
yt_concate/logs.py
|
RobertChienShiba/yt_concate
| 0
|
6626759
|
<reponame>RobertChienShiba/yt_concate
import logging
def set_log(file_level,stream_level):
logger=logging.getLogger(__name__)
logger.setLevel(logging.DEBUG)
formatter=logging.Formatter('%(asctime)s:%(levelname)s:%(message)s')
file_handler=logging.FileHandler('youtube.log')
file_handler.setLevel(file_level)
file_handler.setFormatter(formatter)
stream_handler=logging.StreamHandler()
stream_handler.setLevel(stream_level)
stream_handler.setFormatter(formatter)
logger.addHandler(file_handler)
logger.addHandler(stream_handler)
|
import logging
def set_log(file_level,stream_level):
logger=logging.getLogger(__name__)
logger.setLevel(logging.DEBUG)
formatter=logging.Formatter('%(asctime)s:%(levelname)s:%(message)s')
file_handler=logging.FileHandler('youtube.log')
file_handler.setLevel(file_level)
file_handler.setFormatter(formatter)
stream_handler=logging.StreamHandler()
stream_handler.setLevel(stream_level)
stream_handler.setFormatter(formatter)
logger.addHandler(file_handler)
logger.addHandler(stream_handler)
|
none
| 1
| 2.627111
| 3
|
|
src/py-opentimelineio/opentimelineio/console/otioconvert.py
|
michdolan/OpenTimelineIO
| 0
|
6626760
|
#!/usr/bin/env python
#
# SPDX-License-Identifier: Apache-2.0
# Copyright Contributors to the OpenTimelineIO project
import argparse
import sys
import copy
import opentimelineio as otio
# on some python interpreters, pkg_resources is not available
try:
import pkg_resources
except ImportError:
pkg_resources = None
__doc__ = """ Python wrapper around OTIO to convert timeline files between \
formats.
Available adapters: {}
""".format(otio.adapters.available_adapter_names())
def _parsed_args():
""" parse commandline arguments with argparse """
parser = argparse.ArgumentParser(
description=__doc__,
formatter_class=argparse.ArgumentDefaultsHelpFormatter
)
parser.add_argument(
'-i',
'--input',
type=str,
required=False,
help='path to input file',
)
parser.add_argument(
'-o',
'--output',
type=str,
required=False,
help='path to output file',
)
parser.add_argument(
'-I',
'--input-adapter',
type=str,
default=None,
help="Explicitly use this adapter for reading the input file",
)
parser.add_argument(
'-O',
'--output-adapter',
type=str,
default=None,
help="Explicitly use this adapter for writing the output file",
)
parser.add_argument(
'-T',
'--tracks',
type=str,
default=None,
help="Pick one or more tracks, by 0-based index, separated by commas.",
)
parser.add_argument(
'-m',
'--media-linker',
type=str,
default="Default",
help=(
"Specify a media linker. 'Default' means use the "
"$OTIO_DEFAULT_MEDIA_LINKER if set, 'None' or '' means explicitly "
"disable the linker, and anything else is interpreted as the name"
" of the media linker to use."
)
)
parser.add_argument(
'-H',
'--hook-function-arg',
type=str,
default=[],
action='append',
help='Extra arguments to be passed to the hook functions in the form of '
'key=value. Values are strings, numbers or Python literals: True, '
'False, etc. Can be used multiple times: -H burrito="bar" -H taco=12.'
)
parser.add_argument(
'-M',
'--media-linker-arg',
type=str,
default=[],
action='append',
help='Extra arguments to be passed to the media linker in the form of '
'key=value. Values are strings, numbers or Python literals: True, '
'False, etc. Can be used multiple times: -M burrito="bar" -M taco=12.'
)
parser.add_argument(
'-a',
'--adapter-arg',
type=str,
default=[],
action='append',
help='Extra arguments to be passed to input adapter in the form of '
'key=value. Values are strings, numbers or Python literals: True, '
'False, etc. Can be used multiple times: -a burrito="bar" -a taco=12.'
)
parser.add_argument(
'-A',
'--output-adapter-arg',
type=str,
default=[],
action='append',
help='Extra arguments to be passed to output adapter in the form of '
'key=value. Values are strings, numbers or Python literals: True, '
'False, etc. Can be used multiple times: -A burrito="bar" -A taco=12.'
)
parser.add_argument(
'--version',
default=False,
action="store_true",
help=(
"Print the otio and pkg_resource installed plugin version "
"information to the commandline and then exit."
),
)
trim_args = parser.add_argument_group(
title="Trim Arguments",
description="Arguments that allow you to trim the OTIO file."
)
trim_args.add_argument(
'--begin',
type=str,
default=None,
help=(
"Trim out everything in the timeline before this time, in the "
"global time frame of the timeline. Argument should be in the form"
' "VALUE,RATE", eg: --begin "10,24". Requires --end argument.'
),
)
trim_args.add_argument(
'--end',
type=str,
default=None,
help=(
"Trim out everything in the timeline after this time, in the "
"global time frame of the timeline. Argument should be in the form"
' "VALUE,RATE", eg: --begin "10,24". Requires --begin argument.'
),
)
result = parser.parse_args()
# print version information to the shell
if result.version:
print("OpenTimelineIO version: {}".format(otio.__version__))
if pkg_resources:
pkg_resource_plugins = list(
pkg_resources.iter_entry_points("opentimelineio.plugins")
)
if pkg_resource_plugins:
print("Plugins from pkg_resources:")
for plugin in pkg_resource_plugins:
print(" {}".format(plugin.dist))
else:
print("No pkg_resource plugins installed.")
parser.exit()
if not result.input:
parser.error("-i/--input is a required argument")
if not result.output:
parser.error("-o/--output is a required argument")
if result.begin is not None and result.end is None:
parser.error("--begin requires --end.")
if result.end is not None and result.begin is None:
parser.error("--end requires --begin.")
if result.begin is not None:
try:
value, rate = result.begin.split(",")
result.begin = otio.opentime.RationalTime(float(value), float(rate))
except ValueError:
parser.error(
"--begin argument needs to be of the form: VALUE,RATE where "
"VALUE is the (float) time value of the resulting RationalTime "
"and RATE is the (float) time rate of the resulting RationalTime,"
" not '{}'".format(result.begin)
)
if result.end is not None:
try:
value, rate = result.end.split(",")
result.end = otio.opentime.RationalTime(float(value), float(rate))
except ValueError:
parser.error(
"--end argument needs to be of the form: VALUE,RATE where "
"VALUE is the (float) time value of the resulting RationalTime "
"and RATE is the (float) time rate of the resulting RationalTime,"
" not '{}'".format(result.begin)
)
return result
def main():
"""Parse arguments and convert the files."""
args = _parsed_args()
in_adapter = args.input_adapter
if in_adapter is None:
in_adapter = otio.adapters.from_filepath(args.input).name
out_adapter = args.output_adapter
if out_adapter is None:
out_adapter = otio.adapters.from_filepath(args.output).name
media_linker_name = otio.console.console_utils.media_linker_name(
args.media_linker
)
try:
read_adapter_arg_map = otio.console.console_utils.arg_list_to_map(
args.adapter_arg,
"input adapter"
)
hooks_args = otio.console.console_utils.arg_list_to_map(
args.hook_function_arg,
"hook function"
)
ml_args = otio.console.console_utils.arg_list_to_map(
args.media_linker_arg,
"media linker"
)
except ValueError as exc:
sys.stderr.write("\n" + str(exc) + "\n")
sys.exit(1)
result_tl = otio.adapters.read_from_file(
args.input,
in_adapter,
hook_function_argument_map=hooks_args,
media_linker_name=media_linker_name,
media_linker_argument_map=ml_args,
**read_adapter_arg_map
)
if args.tracks:
result_tracks = copy.deepcopy(otio.schema.Stack())
del result_tracks[:]
for track in args.tracks.split(","):
tr = result_tl.tracks[int(track)]
del result_tl.tracks[int(track)]
print("track {0} is of kind: '{1}'".format(track, tr.kind))
result_tracks.append(tr)
result_tl.tracks = result_tracks
# handle trim arguments
if args.begin is not None and args.end is not None:
result_tl = otio.algorithms.timeline_trimmed_to_range(
result_tl,
otio.opentime.range_from_start_end_time(args.begin, args.end)
)
try:
write_adapter_arg_map = otio.console.console_utils.arg_list_to_map(
args.output_adapter_arg,
"output adapter"
)
except ValueError as exc:
sys.stderr.write("\n" + str(exc) + "\n")
sys.exit(1)
otio.adapters.write_to_file(
result_tl,
args.output,
out_adapter,
hook_function_argument_map=hooks_args,
**write_adapter_arg_map
)
if __name__ == '__main__':
try:
main()
except otio.exceptions.OTIOError as err:
sys.stderr.write("ERROR: " + str(err) + "\n")
sys.exit(1)
|
#!/usr/bin/env python
#
# SPDX-License-Identifier: Apache-2.0
# Copyright Contributors to the OpenTimelineIO project
import argparse
import sys
import copy
import opentimelineio as otio
# on some python interpreters, pkg_resources is not available
try:
import pkg_resources
except ImportError:
pkg_resources = None
__doc__ = """ Python wrapper around OTIO to convert timeline files between \
formats.
Available adapters: {}
""".format(otio.adapters.available_adapter_names())
def _parsed_args():
""" parse commandline arguments with argparse """
parser = argparse.ArgumentParser(
description=__doc__,
formatter_class=argparse.ArgumentDefaultsHelpFormatter
)
parser.add_argument(
'-i',
'--input',
type=str,
required=False,
help='path to input file',
)
parser.add_argument(
'-o',
'--output',
type=str,
required=False,
help='path to output file',
)
parser.add_argument(
'-I',
'--input-adapter',
type=str,
default=None,
help="Explicitly use this adapter for reading the input file",
)
parser.add_argument(
'-O',
'--output-adapter',
type=str,
default=None,
help="Explicitly use this adapter for writing the output file",
)
parser.add_argument(
'-T',
'--tracks',
type=str,
default=None,
help="Pick one or more tracks, by 0-based index, separated by commas.",
)
parser.add_argument(
'-m',
'--media-linker',
type=str,
default="Default",
help=(
"Specify a media linker. 'Default' means use the "
"$OTIO_DEFAULT_MEDIA_LINKER if set, 'None' or '' means explicitly "
"disable the linker, and anything else is interpreted as the name"
" of the media linker to use."
)
)
parser.add_argument(
'-H',
'--hook-function-arg',
type=str,
default=[],
action='append',
help='Extra arguments to be passed to the hook functions in the form of '
'key=value. Values are strings, numbers or Python literals: True, '
'False, etc. Can be used multiple times: -H burrito="bar" -H taco=12.'
)
parser.add_argument(
'-M',
'--media-linker-arg',
type=str,
default=[],
action='append',
help='Extra arguments to be passed to the media linker in the form of '
'key=value. Values are strings, numbers or Python literals: True, '
'False, etc. Can be used multiple times: -M burrito="bar" -M taco=12.'
)
parser.add_argument(
'-a',
'--adapter-arg',
type=str,
default=[],
action='append',
help='Extra arguments to be passed to input adapter in the form of '
'key=value. Values are strings, numbers or Python literals: True, '
'False, etc. Can be used multiple times: -a burrito="bar" -a taco=12.'
)
parser.add_argument(
'-A',
'--output-adapter-arg',
type=str,
default=[],
action='append',
help='Extra arguments to be passed to output adapter in the form of '
'key=value. Values are strings, numbers or Python literals: True, '
'False, etc. Can be used multiple times: -A burrito="bar" -A taco=12.'
)
parser.add_argument(
'--version',
default=False,
action="store_true",
help=(
"Print the otio and pkg_resource installed plugin version "
"information to the commandline and then exit."
),
)
trim_args = parser.add_argument_group(
title="Trim Arguments",
description="Arguments that allow you to trim the OTIO file."
)
trim_args.add_argument(
'--begin',
type=str,
default=None,
help=(
"Trim out everything in the timeline before this time, in the "
"global time frame of the timeline. Argument should be in the form"
' "VALUE,RATE", eg: --begin "10,24". Requires --end argument.'
),
)
trim_args.add_argument(
'--end',
type=str,
default=None,
help=(
"Trim out everything in the timeline after this time, in the "
"global time frame of the timeline. Argument should be in the form"
' "VALUE,RATE", eg: --begin "10,24". Requires --begin argument.'
),
)
result = parser.parse_args()
# print version information to the shell
if result.version:
print("OpenTimelineIO version: {}".format(otio.__version__))
if pkg_resources:
pkg_resource_plugins = list(
pkg_resources.iter_entry_points("opentimelineio.plugins")
)
if pkg_resource_plugins:
print("Plugins from pkg_resources:")
for plugin in pkg_resource_plugins:
print(" {}".format(plugin.dist))
else:
print("No pkg_resource plugins installed.")
parser.exit()
if not result.input:
parser.error("-i/--input is a required argument")
if not result.output:
parser.error("-o/--output is a required argument")
if result.begin is not None and result.end is None:
parser.error("--begin requires --end.")
if result.end is not None and result.begin is None:
parser.error("--end requires --begin.")
if result.begin is not None:
try:
value, rate = result.begin.split(",")
result.begin = otio.opentime.RationalTime(float(value), float(rate))
except ValueError:
parser.error(
"--begin argument needs to be of the form: VALUE,RATE where "
"VALUE is the (float) time value of the resulting RationalTime "
"and RATE is the (float) time rate of the resulting RationalTime,"
" not '{}'".format(result.begin)
)
if result.end is not None:
try:
value, rate = result.end.split(",")
result.end = otio.opentime.RationalTime(float(value), float(rate))
except ValueError:
parser.error(
"--end argument needs to be of the form: VALUE,RATE where "
"VALUE is the (float) time value of the resulting RationalTime "
"and RATE is the (float) time rate of the resulting RationalTime,"
" not '{}'".format(result.begin)
)
return result
def main():
"""Parse arguments and convert the files."""
args = _parsed_args()
in_adapter = args.input_adapter
if in_adapter is None:
in_adapter = otio.adapters.from_filepath(args.input).name
out_adapter = args.output_adapter
if out_adapter is None:
out_adapter = otio.adapters.from_filepath(args.output).name
media_linker_name = otio.console.console_utils.media_linker_name(
args.media_linker
)
try:
read_adapter_arg_map = otio.console.console_utils.arg_list_to_map(
args.adapter_arg,
"input adapter"
)
hooks_args = otio.console.console_utils.arg_list_to_map(
args.hook_function_arg,
"hook function"
)
ml_args = otio.console.console_utils.arg_list_to_map(
args.media_linker_arg,
"media linker"
)
except ValueError as exc:
sys.stderr.write("\n" + str(exc) + "\n")
sys.exit(1)
result_tl = otio.adapters.read_from_file(
args.input,
in_adapter,
hook_function_argument_map=hooks_args,
media_linker_name=media_linker_name,
media_linker_argument_map=ml_args,
**read_adapter_arg_map
)
if args.tracks:
result_tracks = copy.deepcopy(otio.schema.Stack())
del result_tracks[:]
for track in args.tracks.split(","):
tr = result_tl.tracks[int(track)]
del result_tl.tracks[int(track)]
print("track {0} is of kind: '{1}'".format(track, tr.kind))
result_tracks.append(tr)
result_tl.tracks = result_tracks
# handle trim arguments
if args.begin is not None and args.end is not None:
result_tl = otio.algorithms.timeline_trimmed_to_range(
result_tl,
otio.opentime.range_from_start_end_time(args.begin, args.end)
)
try:
write_adapter_arg_map = otio.console.console_utils.arg_list_to_map(
args.output_adapter_arg,
"output adapter"
)
except ValueError as exc:
sys.stderr.write("\n" + str(exc) + "\n")
sys.exit(1)
otio.adapters.write_to_file(
result_tl,
args.output,
out_adapter,
hook_function_argument_map=hooks_args,
**write_adapter_arg_map
)
if __name__ == '__main__':
try:
main()
except otio.exceptions.OTIOError as err:
sys.stderr.write("ERROR: " + str(err) + "\n")
sys.exit(1)
|
en
| 0.431375
|
#!/usr/bin/env python # # SPDX-License-Identifier: Apache-2.0 # Copyright Contributors to the OpenTimelineIO project # on some python interpreters, pkg_resources is not available Python wrapper around OTIO to convert timeline files between \ formats. Available adapters: {} parse commandline arguments with argparse # print version information to the shell Parse arguments and convert the files. # handle trim arguments
| 2.616939
| 3
|
Packs/Devo/Integrations/Devo_v2/Devo_v2.py
|
ddi-danielsantander/content
| 1
|
6626761
|
import demistomock as demisto
from CommonServerPython import *
''' IMPORTS '''
import base64
import json
import time
import devodsconnector as ds
import concurrent.futures
import tempfile
import urllib.parse
import re
import os
from datetime import datetime
from devo.sender import Lookup, SenderConfigSSL, Sender
from typing import List, Dict, Set
''' GLOBAL VARS '''
ALLOW_INSECURE = demisto.params().get('insecure', False)
READER_ENDPOINT = demisto.params().get('reader_endpoint', None)
READER_OAUTH_TOKEN = demisto.params().get('reader_oauth_token', None)
WRITER_RELAY = demisto.params().get('writer_relay', None)
WRITER_CREDENTIALS = demisto.params().get('writer_credentials', None)
LINQ_LINK_BASE = demisto.params().get('linq_link_base', "https://us.devo.com/welcome")
FETCH_INCIDENTS_FILTER = demisto.params().get('fetch_incidents_filters', None)
FETCH_INCIDENTS_DEDUPE = demisto.params().get('fetch_incidents_deduplication', None)
HEALTHCHECK_WRITER_RECORD = [{'hello': 'world', 'from': 'demisto-integration'}]
HEALTHCHECK_WRITER_TABLE = 'test.keep.free'
RANGE_PATTERN = re.compile('^[0-9]+ [a-zA-Z]+')
TIMESTAMP_PATTERN = re.compile('^[0-9]+')
TIMESTAMP_PATTERN_MILLI = re.compile('^[0-9]+.[0-9]+')
ALERTS_QUERY = '''
from
siem.logtrust.alert.info
select
eventdate,
alertHost,
domain,
priority,
context,
category,
status,
alertId,
srcIp,
srcPort,
srcHost,
dstIp,
dstPort,
dstHost,
application,
engine,
extraData
'''
HEALTHCHECK_QUERY = '''
from
test.keep.free
select
*
'''
SEVERITY_LEVELS_MAP = {
'1': 0.5,
'2': 1,
'3': 2,
'4': 3,
'5': 4,
'informational': 0.5,
'low': 1,
'medium': 2,
'high': 3,
'critical': 4
}
''' HELPER FUNCTIONS '''
def alert_to_incident(alert):
alert_severity = float(1)
alert_name = alert['context'].split('.')[-1]
alert_description = None
alert_occurred = demisto_ISO(float(alert['eventdate']))
alert_labels = []
if demisto.get(alert['extraData'], 'alertPriority'):
alert_severity = SEVERITY_LEVELS_MAP[str(alert['extraData']['alertPriority']).lower()]
if demisto.get(alert['extraData'], 'alertName'):
alert_name = alert['extraData']['alertName']
if demisto.get(alert['extraData'], 'alertDescription'):
alert_description = alert['extraData']['alertDescription']
new_alert: Dict = {
'devo.metadata.alert': {}
}
for key in alert:
if key == 'extraData':
continue
new_alert['devo.metadata.alert'][key] = alert[key]
alert_labels.append({'type': f'devo.metadata.alert.{key}', 'value': str(alert[key])})
for key in alert['extraData']:
new_alert[key] = alert['extraData'][key]
alert_labels.append({'type': f'{key}', 'value': str(alert['extraData'][key])})
incident = {
'name': alert_name,
'severity': alert_severity,
'details': alert_description,
'occurred': alert_occurred,
'labels': alert_labels,
'rawJSON': json.dumps(new_alert)
}
return incident
def build_link(query, start_ts_milli, end_ts_milli, mode='queryApp'):
myb64str = base64.b64encode((json.dumps({
'query': query,
'mode': mode,
'dates': {
'from': start_ts_milli,
'to': end_ts_milli
}
}).encode('ascii'))).decode()
url = LINQ_LINK_BASE + f"#/verticalApp?path=apps/custom/queryApp_dev&targetQuery={myb64str}"
return url
def check_configuration():
# Check all settings related if set
# Basic functionality of integration
list(ds.Reader(oauth_token=READER_OAUTH_TOKEN, end_point=READER_ENDPOINT)
.query(HEALTHCHECK_QUERY, start=int(time.time() - 1), stop=int(time.time()), output='dict'))
if WRITER_RELAY and WRITER_CREDENTIALS:
creds = get_writer_creds()
ds.Writer(key=creds['key'].name, crt=creds['crt'].name, chain=creds['chain'].name, relay=WRITER_RELAY)\
.load(HEALTHCHECK_WRITER_RECORD, HEALTHCHECK_WRITER_TABLE, historical=False)
if FETCH_INCIDENTS_FILTER:
alert_filters = check_type(FETCH_INCIDENTS_FILTER, dict)
assert alert_filters['type'] in ['AND', 'OR'], 'Missing key:"type" or unsupported value in fetch_incidents_filters'
filters = check_type(alert_filters['filters'], list)
for filt in filters:
assert filt['key'], 'Missing key: "key" in fetch_incidents_filters.filters configuration'
assert filt['operator'] in ['=', '/=', '>', '<', '>=', '<=', 'and', 'or', '->'], 'Missing key: "operator"'\
' or unsupported operator in fetch_incidents_filters.filters configuration'
assert filt['value'], 'Missing key:"value" in fetch_incidents_filters.filters configuration'
if FETCH_INCIDENTS_DEDUPE:
dedupe_conf = check_type(FETCH_INCIDENTS_DEDUPE, dict)
assert isinstance(dedupe_conf['cooldown'], (int, float)), 'Invalid fetch_incidents_deduplication configuration'
return True
def check_type(input, tar_type):
if isinstance(input, str):
input = json.loads(input)
if not isinstance(input, tar_type):
raise ValueError(f'tables to query should either be a json string of a {tar_type} or a {tar_type} input')
elif isinstance(input, tar_type):
pass
else:
raise ValueError(f'tables to query should either be a json string of a {tar_type} or a {tar_type} input')
return input
# Converts epoch (miliseconds) to ISO string
def demisto_ISO(s_epoch):
if s_epoch >= 0:
return datetime.utcfromtimestamp(s_epoch).strftime("%Y-%m-%dT%H:%M:%S.%fZ")
return s_epoch
# We will assume timestamp_from and timestamp_to will be the same format or to will be None
def get_time_range(timestamp_from, timestamp_to):
if isinstance(timestamp_from, (int, float)):
t_from = timestamp_from
if timestamp_to is None:
t_to = time.time()
else:
t_to = timestamp_to
elif isinstance(timestamp_from, str):
if re.fullmatch(RANGE_PATTERN, timestamp_from):
t_range = parse_date_range(timestamp_from)
t_from = t_range[0].timestamp()
t_to = t_range[1].timestamp()
elif re.fullmatch(TIMESTAMP_PATTERN, timestamp_from) or re.fullmatch(TIMESTAMP_PATTERN_MILLI, timestamp_from):
t_from = float(timestamp_from)
if timestamp_to is None:
t_to = time.time()
else:
t_to = float(timestamp_to)
else:
t_from = date_to_timestamp(timestamp_from) / 1000
if timestamp_to is None:
t_to = time.time()
else:
t_to = date_to_timestamp(timestamp_to) / 1000
elif isinstance(timestamp_from, datetime):
t_from = timestamp_from.timestamp()
if timestamp_to is None:
t_to = time.time()
else:
t_to = timestamp_to.timestamp()
return (t_from, t_to)
def get_writer_creds():
if WRITER_RELAY is None:
raise ValueError('writer_relay is not set in your Devo Integration')
if WRITER_CREDENTIALS is None:
raise ValueError('writer_credentials are not set in your Devo Integration')
write_credentials = check_type(WRITER_CREDENTIALS, dict)
assert write_credentials['key'], 'Required key: "key" is not present in writer credentials'
assert write_credentials['crt'], 'Required key: "crt" is not present in writer credentials'
assert write_credentials['chain'], 'Required key: "chain" is not present in writer credentials'
# Limitation in Devo DS Connector SDK. Currently require filepaths for credentials.
# Will accept file-handler type objects in the future.
key_tmp = tempfile.NamedTemporaryFile(mode='w')
crt_tmp = tempfile.NamedTemporaryFile(mode='w')
chain_tmp = tempfile.NamedTemporaryFile(mode='w')
key_tmp.write(write_credentials['key'])
crt_tmp.write(write_credentials['crt'])
chain_tmp.write(write_credentials['chain'])
key_tmp.flush()
crt_tmp.flush()
chain_tmp.flush()
creds = {
'key': key_tmp,
'crt': crt_tmp,
'chain': chain_tmp
}
return creds
def parallel_query_helper(sub_query, append_list, timestamp_from, timestamp_to):
append_list.extend(list(ds.Reader(oauth_token=READER_OAUTH_TOKEN, end_point=READER_ENDPOINT)
.query(sub_query, start=float(timestamp_from), stop=float(timestamp_to),
output='dict', ts_format='iso')))
''' FUNCTIONS '''
def fetch_incidents():
last_run = demisto.getLastRun()
alert_query = ALERTS_QUERY
to_time = time.time()
dedupe_config = None
alerts_list: Dict = {}
new_last_run: Dict = {
'from_time': to_time
}
if FETCH_INCIDENTS_FILTER:
alert_filters = check_type(FETCH_INCIDENTS_FILTER, dict)
if alert_filters['type'] == 'AND':
filter_string = ' , '.join([f'{filt["key"]} {filt["operator"]} "{urllib.parse.quote(filt["value"])}"'
for filt in alert_filters['filters']])
elif alert_filters['type'] == 'OR':
filter_string = ' or '.join([f'{filt["key"]} {filt["operator"]} "{urllib.parse.quote(filt["value"])}"'
for filt in alert_filters['filters']])
alert_query = f'{alert_query} where {filter_string}'
from_time = to_time - 3600
if 'from_time' in last_run:
from_time = float(last_run['from_time'])
if FETCH_INCIDENTS_DEDUPE:
dedupe_config = check_type(FETCH_INCIDENTS_DEDUPE, dict)
if 'alerts_list' in last_run:
alerts_list = last_run['alerts_list']
alerts_list = {k: v for k, v in alerts_list.items() if alerts_list[k] >= (to_time - float(dedupe_config['cooldown']))}
# execute the query and get the events
# reverse the list so that the most recent event timestamp event is taken when de-duping if needed.
events = list(ds.Reader(oauth_token=READER_OAUTH_TOKEN, end_point=READER_ENDPOINT)
.query(alert_query, start=float(from_time), stop=float(to_time),
output='dict', ts_format='timestamp'))[::-1]
deduped_events: List[Dict] = []
if FETCH_INCIDENTS_DEDUPE:
# Expire out of rolling time window events
for event in events:
if any(de['context'] == event['context'] for de in deduped_events):
continue
if event['context'] in alerts_list:
continue
deduped_events.append(event)
alerts_list[event['context']] = event['eventdate']
events = deduped_events
new_last_run['alerts_list'] = alerts_list
# convert the events to demisto incident
incidents = []
for event in events:
event['extraData'] = json.loads(event['extraData'])
for ed in event['extraData']:
event['extraData'][ed] = urllib.parse.unquote_plus(event['extraData'][ed])
inc = alert_to_incident(event)
incidents.append(inc)
demisto.setLastRun(new_last_run)
# this command will create incidents in Demisto
demisto.incidents(incidents)
return incidents
def run_query_command():
to_query = demisto.args()['query']
timestamp_from = demisto.args()['from']
timestamp_to = demisto.args().get('to', None)
write_context = demisto.args()['writeToContext'].lower()
time_range = get_time_range(timestamp_from, timestamp_to)
results = list(ds.Reader(oauth_token=READER_OAUTH_TOKEN, end_point=READER_ENDPOINT)
.query(to_query, start=float(time_range[0]), stop=float(time_range[1]),
output='dict', ts_format='iso'))
querylink = {'DevoTableLink': build_link(to_query, int(1000 * float(time_range[0])), int(1000 * float(time_range[1])))}
entry = {
'Type': entryTypes['note'],
'Contents': results,
'ContentsFormat': formats['json'],
'ReadableContentsFormat': formats['markdown']
}
entry_linq = {
'Type': entryTypes['note'],
'Contents': querylink,
'ContentsFormat': formats['json'],
'ReadableContentsFormat': formats['markdown']
}
if len(results) == 0:
entry['HumanReadable'] = 'No results found'
entry['Devo.QueryResults'] = None
entry['Devo.QueryLink'] = querylink
return entry
headers = list(results[0].keys())
md = tableToMarkdown('Devo query results', results, headers)
entry['HumanReadable'] = md
md_linq = tableToMarkdown('Link to Devo Query', {'DevoTableLink': f'[Devo Direct Link]({querylink["DevoTableLink"]})'})
entry_linq['HumanReadable'] = md_linq
if write_context == 'true':
entry['EntryContext'] = {
'Devo.QueryResults': createContext(results)
}
entry_linq['EntryContext'] = {
'Devo.QueryLink': createContext(querylink)
}
return [entry, entry_linq]
def get_alerts_command():
timestamp_from = demisto.args()['from']
timestamp_to = demisto.args().get('to', None)
alert_filters = demisto.args().get('filters', None)
write_context = demisto.args()['writeToContext'].lower()
alert_query = ALERTS_QUERY
time_range = get_time_range(timestamp_from, timestamp_to)
if alert_filters:
alert_filters = check_type(alert_filters, dict)
if alert_filters['type'] == 'AND':
filter_string = ', '\
.join([f'{filt["key"]} {filt["operator"]} "{urllib.parse.quote(filt["value"])}"'
for filt in alert_filters['filters']])
elif alert_filters['type'] == 'OR':
filter_string = ' or '\
.join([f'{filt["key"]} {filt["operator"]} "{urllib.parse.quote(filt["value"])}"'
for filt in alert_filters['filters']])
alert_query = f'{alert_query} where {filter_string}'
results = list(ds.Reader(oauth_token=READER_OAUTH_TOKEN, end_point=READER_ENDPOINT)
.query(alert_query, start=float(time_range[0]), stop=float(time_range[1]),
output='dict', ts_format='iso'))
querylink = {'DevoTableLink': build_link(alert_query, int(1000 * float(time_range[0])), int(1000 * float(time_range[1])))}
for res in results:
res['extraData'] = json.loads(res['extraData'])
for ed in res['extraData']:
res['extraData'][ed] = urllib.parse.unquote_plus(res['extraData'][ed])
entry = {
'Type': entryTypes['note'],
'Contents': results,
'ContentsFormat': formats['json'],
'ReadableContentsFormat': formats['markdown']
}
entry_linq = {
'Type': entryTypes['note'],
'Contents': querylink,
'ContentsFormat': formats['json'],
'ReadableContentsFormat': formats['markdown']
}
if len(results) == 0:
entry['HumanReadable'] = 'No results found'
entry['Devo.AlertsResults'] = None
entry_linq['Devo.QueryLink'] = querylink
return entry
headers = list(results[0].keys())
md = tableToMarkdown('Devo query results', results, headers)
entry['HumanReadable'] = md
md_linq = tableToMarkdown('Link to Devo Query', {'DevoTableLink': f'[Devo Direct Link]({querylink["DevoTableLink"]})'})
entry_linq['HumanReadable'] = md_linq
if write_context == 'true':
entry['EntryContext'] = {
'Devo.AlertsResults': createContext(results)
}
entry_linq['EntryContext'] = {
'Devo.QueryLink': createContext(querylink)
}
return [entry, entry_linq]
def multi_table_query_command():
tables_to_query = check_type(demisto.args()['tables'], list)
search_token = demisto.args()['searchToken']
timestamp_from = demisto.args()['from']
timestamp_to = demisto.args().get('to', None)
write_context = demisto.args()['writeToContext'].lower()
time_range = get_time_range(timestamp_from, timestamp_to)
futures = []
all_results: List[Dict] = []
sub_queries = []
for table in tables_to_query:
fields = ds.Reader(oauth_token=READER_OAUTH_TOKEN, end_point=READER_ENDPOINT)\
._get_types(f'from {table} select *', 'now', 'iso').keys()
clauses = [f"( isnotnull({field}) and str({field})->\"" + search_token + "\")" for field in fields]
sub_queries.append("from " + table + " where" + " or ".join(clauses) + " select *")
with concurrent.futures.ThreadPoolExecutor(max_workers=10) as executor:
for q in sub_queries:
futures.append(executor.submit(parallel_query_helper, q, all_results, time_range[0], time_range[1]))
concurrent.futures.wait(futures)
entry = {
'Type': entryTypes['note'],
'Contents': all_results,
'ContentsFormat': formats['json'],
'ReadableContentsFormat': formats['markdown']
}
if len(all_results) == 0:
entry['HumanReadable'] = 'No results found'
return entry
headers: Set = set().union(*(r.keys() for r in all_results))
md = tableToMarkdown('Devo query results', all_results, headers)
entry['HumanReadable'] = md
if write_context == 'true':
entry['EntryContext'] = {
'Devo.MultiResults': createContext(all_results)
}
return entry
def write_to_table_command():
table_name = demisto.args()['tableName']
records = check_type(demisto.args()['records'], list)
creds = get_writer_creds()
linq = ds.Writer(key=creds['key'].name, crt=creds['crt'].name, chain=creds['chain'].name, relay=WRITER_RELAY)\
.load(records, table_name, historical=False, linq_func=(lambda x: x))
querylink = {'DevoTableLink': build_link(linq, int(1000 * time.time()) - 3600000, int(1000 * time.time()))}
entry = {
'Type': entryTypes['note'],
'Contents': {'recordsWritten': records},
'ContentsFormat': formats['json'],
'ReadableContentsFormat': formats['markdown'],
'EntryContext': {
'Devo.RecordsWritten': records,
'Devo.LinqQuery': linq
}
}
entry_linq = {
'Type': entryTypes['note'],
'Contents': querylink,
'ContentsFormat': formats['json'],
'ReadableContentsFormat': formats['markdown'],
'EntryContext': {
'Devo.QueryLink': createContext(querylink)
}
}
md = tableToMarkdown('Entries to load into Devo', records)
entry['HumanReadable'] = md
md_linq = tableToMarkdown('Link to Devo Query', {'DevoTableLink': f'[Devo Direct Link]({querylink["DevoTableLink"]})'})
entry_linq['HumanReadable'] = md_linq
return [entry, entry_linq]
def write_to_lookup_table_command():
lookup_table_name = demisto.args()['lookupTableName']
headers = check_type(demisto.args()['headers'], list)
records = check_type(demisto.args()['records'], list)
creds = get_writer_creds()
engine_config = SenderConfigSSL(address=(WRITER_RELAY, 443),
key=creds['key'].name,
cert=creds['crt'].name,
chain=creds['chain'].name)
try:
con = Sender(config=engine_config, timeout=60)
lookup = Lookup(name=lookup_table_name,
historic_tag=None,
con=con)
# Order sensitive list
pHeaders = json.dumps(headers)
lookup.send_control('START', pHeaders, 'INC')
for r in records:
lookup.send_data_line(key=r['key'], fields=r['values'])
lookup.send_control('END', pHeaders, 'INC')
finally:
con.flush_buffer()
con.socket.shutdown(0)
entry = {
'Type': entryTypes['note'],
'Contents': {'recordsWritten': records},
'ContentsFormat': formats['json'],
'ReadableContentsFormat': formats['markdown'],
'EntryContext': {
'Devo.RecordsWritten': records
}
}
md = tableToMarkdown('Entries to load into Devo', records)
entry['HumanReadable'] = md
return [entry]
''' EXECUTION CODE '''
try:
if ALLOW_INSECURE:
os.environ['CURL_CA_BUNDLE'] = ''
os.environ['PYTHONWARNINGS'] = 'ignore:Unverified HTTPS request'
handle_proxy()
if demisto.command() == 'test-module':
check_configuration()
demisto.results('ok')
elif demisto.command() == 'fetch-incidents':
fetch_incidents()
elif demisto.command() == 'devo-run-query':
demisto.results(run_query_command())
elif demisto.command() == 'devo-get-alerts':
demisto.results(get_alerts_command())
elif demisto.command() == 'devo-multi-table-query':
demisto.results(multi_table_query_command())
elif demisto.command() == 'devo-write-to-table':
demisto.results(write_to_table_command())
elif demisto.command() == 'devo-write-to-lookup-table':
demisto.results(write_to_lookup_table_command())
except Exception as e:
return_error('Failed to execute command {}. Error: {}'.format(demisto.command(), str(e)))
|
import demistomock as demisto
from CommonServerPython import *
''' IMPORTS '''
import base64
import json
import time
import devodsconnector as ds
import concurrent.futures
import tempfile
import urllib.parse
import re
import os
from datetime import datetime
from devo.sender import Lookup, SenderConfigSSL, Sender
from typing import List, Dict, Set
''' GLOBAL VARS '''
ALLOW_INSECURE = demisto.params().get('insecure', False)
READER_ENDPOINT = demisto.params().get('reader_endpoint', None)
READER_OAUTH_TOKEN = demisto.params().get('reader_oauth_token', None)
WRITER_RELAY = demisto.params().get('writer_relay', None)
WRITER_CREDENTIALS = demisto.params().get('writer_credentials', None)
LINQ_LINK_BASE = demisto.params().get('linq_link_base', "https://us.devo.com/welcome")
FETCH_INCIDENTS_FILTER = demisto.params().get('fetch_incidents_filters', None)
FETCH_INCIDENTS_DEDUPE = demisto.params().get('fetch_incidents_deduplication', None)
HEALTHCHECK_WRITER_RECORD = [{'hello': 'world', 'from': 'demisto-integration'}]
HEALTHCHECK_WRITER_TABLE = 'test.keep.free'
RANGE_PATTERN = re.compile('^[0-9]+ [a-zA-Z]+')
TIMESTAMP_PATTERN = re.compile('^[0-9]+')
TIMESTAMP_PATTERN_MILLI = re.compile('^[0-9]+.[0-9]+')
ALERTS_QUERY = '''
from
siem.logtrust.alert.info
select
eventdate,
alertHost,
domain,
priority,
context,
category,
status,
alertId,
srcIp,
srcPort,
srcHost,
dstIp,
dstPort,
dstHost,
application,
engine,
extraData
'''
HEALTHCHECK_QUERY = '''
from
test.keep.free
select
*
'''
SEVERITY_LEVELS_MAP = {
'1': 0.5,
'2': 1,
'3': 2,
'4': 3,
'5': 4,
'informational': 0.5,
'low': 1,
'medium': 2,
'high': 3,
'critical': 4
}
''' HELPER FUNCTIONS '''
def alert_to_incident(alert):
alert_severity = float(1)
alert_name = alert['context'].split('.')[-1]
alert_description = None
alert_occurred = demisto_ISO(float(alert['eventdate']))
alert_labels = []
if demisto.get(alert['extraData'], 'alertPriority'):
alert_severity = SEVERITY_LEVELS_MAP[str(alert['extraData']['alertPriority']).lower()]
if demisto.get(alert['extraData'], 'alertName'):
alert_name = alert['extraData']['alertName']
if demisto.get(alert['extraData'], 'alertDescription'):
alert_description = alert['extraData']['alertDescription']
new_alert: Dict = {
'devo.metadata.alert': {}
}
for key in alert:
if key == 'extraData':
continue
new_alert['devo.metadata.alert'][key] = alert[key]
alert_labels.append({'type': f'devo.metadata.alert.{key}', 'value': str(alert[key])})
for key in alert['extraData']:
new_alert[key] = alert['extraData'][key]
alert_labels.append({'type': f'{key}', 'value': str(alert['extraData'][key])})
incident = {
'name': alert_name,
'severity': alert_severity,
'details': alert_description,
'occurred': alert_occurred,
'labels': alert_labels,
'rawJSON': json.dumps(new_alert)
}
return incident
def build_link(query, start_ts_milli, end_ts_milli, mode='queryApp'):
myb64str = base64.b64encode((json.dumps({
'query': query,
'mode': mode,
'dates': {
'from': start_ts_milli,
'to': end_ts_milli
}
}).encode('ascii'))).decode()
url = LINQ_LINK_BASE + f"#/verticalApp?path=apps/custom/queryApp_dev&targetQuery={myb64str}"
return url
def check_configuration():
# Check all settings related if set
# Basic functionality of integration
list(ds.Reader(oauth_token=READER_OAUTH_TOKEN, end_point=READER_ENDPOINT)
.query(HEALTHCHECK_QUERY, start=int(time.time() - 1), stop=int(time.time()), output='dict'))
if WRITER_RELAY and WRITER_CREDENTIALS:
creds = get_writer_creds()
ds.Writer(key=creds['key'].name, crt=creds['crt'].name, chain=creds['chain'].name, relay=WRITER_RELAY)\
.load(HEALTHCHECK_WRITER_RECORD, HEALTHCHECK_WRITER_TABLE, historical=False)
if FETCH_INCIDENTS_FILTER:
alert_filters = check_type(FETCH_INCIDENTS_FILTER, dict)
assert alert_filters['type'] in ['AND', 'OR'], 'Missing key:"type" or unsupported value in fetch_incidents_filters'
filters = check_type(alert_filters['filters'], list)
for filt in filters:
assert filt['key'], 'Missing key: "key" in fetch_incidents_filters.filters configuration'
assert filt['operator'] in ['=', '/=', '>', '<', '>=', '<=', 'and', 'or', '->'], 'Missing key: "operator"'\
' or unsupported operator in fetch_incidents_filters.filters configuration'
assert filt['value'], 'Missing key:"value" in fetch_incidents_filters.filters configuration'
if FETCH_INCIDENTS_DEDUPE:
dedupe_conf = check_type(FETCH_INCIDENTS_DEDUPE, dict)
assert isinstance(dedupe_conf['cooldown'], (int, float)), 'Invalid fetch_incidents_deduplication configuration'
return True
def check_type(input, tar_type):
if isinstance(input, str):
input = json.loads(input)
if not isinstance(input, tar_type):
raise ValueError(f'tables to query should either be a json string of a {tar_type} or a {tar_type} input')
elif isinstance(input, tar_type):
pass
else:
raise ValueError(f'tables to query should either be a json string of a {tar_type} or a {tar_type} input')
return input
# Converts epoch (miliseconds) to ISO string
def demisto_ISO(s_epoch):
if s_epoch >= 0:
return datetime.utcfromtimestamp(s_epoch).strftime("%Y-%m-%dT%H:%M:%S.%fZ")
return s_epoch
# We will assume timestamp_from and timestamp_to will be the same format or to will be None
def get_time_range(timestamp_from, timestamp_to):
if isinstance(timestamp_from, (int, float)):
t_from = timestamp_from
if timestamp_to is None:
t_to = time.time()
else:
t_to = timestamp_to
elif isinstance(timestamp_from, str):
if re.fullmatch(RANGE_PATTERN, timestamp_from):
t_range = parse_date_range(timestamp_from)
t_from = t_range[0].timestamp()
t_to = t_range[1].timestamp()
elif re.fullmatch(TIMESTAMP_PATTERN, timestamp_from) or re.fullmatch(TIMESTAMP_PATTERN_MILLI, timestamp_from):
t_from = float(timestamp_from)
if timestamp_to is None:
t_to = time.time()
else:
t_to = float(timestamp_to)
else:
t_from = date_to_timestamp(timestamp_from) / 1000
if timestamp_to is None:
t_to = time.time()
else:
t_to = date_to_timestamp(timestamp_to) / 1000
elif isinstance(timestamp_from, datetime):
t_from = timestamp_from.timestamp()
if timestamp_to is None:
t_to = time.time()
else:
t_to = timestamp_to.timestamp()
return (t_from, t_to)
def get_writer_creds():
if WRITER_RELAY is None:
raise ValueError('writer_relay is not set in your Devo Integration')
if WRITER_CREDENTIALS is None:
raise ValueError('writer_credentials are not set in your Devo Integration')
write_credentials = check_type(WRITER_CREDENTIALS, dict)
assert write_credentials['key'], 'Required key: "key" is not present in writer credentials'
assert write_credentials['crt'], 'Required key: "crt" is not present in writer credentials'
assert write_credentials['chain'], 'Required key: "chain" is not present in writer credentials'
# Limitation in Devo DS Connector SDK. Currently require filepaths for credentials.
# Will accept file-handler type objects in the future.
key_tmp = tempfile.NamedTemporaryFile(mode='w')
crt_tmp = tempfile.NamedTemporaryFile(mode='w')
chain_tmp = tempfile.NamedTemporaryFile(mode='w')
key_tmp.write(write_credentials['key'])
crt_tmp.write(write_credentials['crt'])
chain_tmp.write(write_credentials['chain'])
key_tmp.flush()
crt_tmp.flush()
chain_tmp.flush()
creds = {
'key': key_tmp,
'crt': crt_tmp,
'chain': chain_tmp
}
return creds
def parallel_query_helper(sub_query, append_list, timestamp_from, timestamp_to):
append_list.extend(list(ds.Reader(oauth_token=READER_OAUTH_TOKEN, end_point=READER_ENDPOINT)
.query(sub_query, start=float(timestamp_from), stop=float(timestamp_to),
output='dict', ts_format='iso')))
''' FUNCTIONS '''
def fetch_incidents():
last_run = demisto.getLastRun()
alert_query = ALERTS_QUERY
to_time = time.time()
dedupe_config = None
alerts_list: Dict = {}
new_last_run: Dict = {
'from_time': to_time
}
if FETCH_INCIDENTS_FILTER:
alert_filters = check_type(FETCH_INCIDENTS_FILTER, dict)
if alert_filters['type'] == 'AND':
filter_string = ' , '.join([f'{filt["key"]} {filt["operator"]} "{urllib.parse.quote(filt["value"])}"'
for filt in alert_filters['filters']])
elif alert_filters['type'] == 'OR':
filter_string = ' or '.join([f'{filt["key"]} {filt["operator"]} "{urllib.parse.quote(filt["value"])}"'
for filt in alert_filters['filters']])
alert_query = f'{alert_query} where {filter_string}'
from_time = to_time - 3600
if 'from_time' in last_run:
from_time = float(last_run['from_time'])
if FETCH_INCIDENTS_DEDUPE:
dedupe_config = check_type(FETCH_INCIDENTS_DEDUPE, dict)
if 'alerts_list' in last_run:
alerts_list = last_run['alerts_list']
alerts_list = {k: v for k, v in alerts_list.items() if alerts_list[k] >= (to_time - float(dedupe_config['cooldown']))}
# execute the query and get the events
# reverse the list so that the most recent event timestamp event is taken when de-duping if needed.
events = list(ds.Reader(oauth_token=READER_OAUTH_TOKEN, end_point=READER_ENDPOINT)
.query(alert_query, start=float(from_time), stop=float(to_time),
output='dict', ts_format='timestamp'))[::-1]
deduped_events: List[Dict] = []
if FETCH_INCIDENTS_DEDUPE:
# Expire out of rolling time window events
for event in events:
if any(de['context'] == event['context'] for de in deduped_events):
continue
if event['context'] in alerts_list:
continue
deduped_events.append(event)
alerts_list[event['context']] = event['eventdate']
events = deduped_events
new_last_run['alerts_list'] = alerts_list
# convert the events to demisto incident
incidents = []
for event in events:
event['extraData'] = json.loads(event['extraData'])
for ed in event['extraData']:
event['extraData'][ed] = urllib.parse.unquote_plus(event['extraData'][ed])
inc = alert_to_incident(event)
incidents.append(inc)
demisto.setLastRun(new_last_run)
# this command will create incidents in Demisto
demisto.incidents(incidents)
return incidents
def run_query_command():
to_query = demisto.args()['query']
timestamp_from = demisto.args()['from']
timestamp_to = demisto.args().get('to', None)
write_context = demisto.args()['writeToContext'].lower()
time_range = get_time_range(timestamp_from, timestamp_to)
results = list(ds.Reader(oauth_token=READER_OAUTH_TOKEN, end_point=READER_ENDPOINT)
.query(to_query, start=float(time_range[0]), stop=float(time_range[1]),
output='dict', ts_format='iso'))
querylink = {'DevoTableLink': build_link(to_query, int(1000 * float(time_range[0])), int(1000 * float(time_range[1])))}
entry = {
'Type': entryTypes['note'],
'Contents': results,
'ContentsFormat': formats['json'],
'ReadableContentsFormat': formats['markdown']
}
entry_linq = {
'Type': entryTypes['note'],
'Contents': querylink,
'ContentsFormat': formats['json'],
'ReadableContentsFormat': formats['markdown']
}
if len(results) == 0:
entry['HumanReadable'] = 'No results found'
entry['Devo.QueryResults'] = None
entry['Devo.QueryLink'] = querylink
return entry
headers = list(results[0].keys())
md = tableToMarkdown('Devo query results', results, headers)
entry['HumanReadable'] = md
md_linq = tableToMarkdown('Link to Devo Query', {'DevoTableLink': f'[Devo Direct Link]({querylink["DevoTableLink"]})'})
entry_linq['HumanReadable'] = md_linq
if write_context == 'true':
entry['EntryContext'] = {
'Devo.QueryResults': createContext(results)
}
entry_linq['EntryContext'] = {
'Devo.QueryLink': createContext(querylink)
}
return [entry, entry_linq]
def get_alerts_command():
timestamp_from = demisto.args()['from']
timestamp_to = demisto.args().get('to', None)
alert_filters = demisto.args().get('filters', None)
write_context = demisto.args()['writeToContext'].lower()
alert_query = ALERTS_QUERY
time_range = get_time_range(timestamp_from, timestamp_to)
if alert_filters:
alert_filters = check_type(alert_filters, dict)
if alert_filters['type'] == 'AND':
filter_string = ', '\
.join([f'{filt["key"]} {filt["operator"]} "{urllib.parse.quote(filt["value"])}"'
for filt in alert_filters['filters']])
elif alert_filters['type'] == 'OR':
filter_string = ' or '\
.join([f'{filt["key"]} {filt["operator"]} "{urllib.parse.quote(filt["value"])}"'
for filt in alert_filters['filters']])
alert_query = f'{alert_query} where {filter_string}'
results = list(ds.Reader(oauth_token=READER_OAUTH_TOKEN, end_point=READER_ENDPOINT)
.query(alert_query, start=float(time_range[0]), stop=float(time_range[1]),
output='dict', ts_format='iso'))
querylink = {'DevoTableLink': build_link(alert_query, int(1000 * float(time_range[0])), int(1000 * float(time_range[1])))}
for res in results:
res['extraData'] = json.loads(res['extraData'])
for ed in res['extraData']:
res['extraData'][ed] = urllib.parse.unquote_plus(res['extraData'][ed])
entry = {
'Type': entryTypes['note'],
'Contents': results,
'ContentsFormat': formats['json'],
'ReadableContentsFormat': formats['markdown']
}
entry_linq = {
'Type': entryTypes['note'],
'Contents': querylink,
'ContentsFormat': formats['json'],
'ReadableContentsFormat': formats['markdown']
}
if len(results) == 0:
entry['HumanReadable'] = 'No results found'
entry['Devo.AlertsResults'] = None
entry_linq['Devo.QueryLink'] = querylink
return entry
headers = list(results[0].keys())
md = tableToMarkdown('Devo query results', results, headers)
entry['HumanReadable'] = md
md_linq = tableToMarkdown('Link to Devo Query', {'DevoTableLink': f'[Devo Direct Link]({querylink["DevoTableLink"]})'})
entry_linq['HumanReadable'] = md_linq
if write_context == 'true':
entry['EntryContext'] = {
'Devo.AlertsResults': createContext(results)
}
entry_linq['EntryContext'] = {
'Devo.QueryLink': createContext(querylink)
}
return [entry, entry_linq]
def multi_table_query_command():
tables_to_query = check_type(demisto.args()['tables'], list)
search_token = demisto.args()['searchToken']
timestamp_from = demisto.args()['from']
timestamp_to = demisto.args().get('to', None)
write_context = demisto.args()['writeToContext'].lower()
time_range = get_time_range(timestamp_from, timestamp_to)
futures = []
all_results: List[Dict] = []
sub_queries = []
for table in tables_to_query:
fields = ds.Reader(oauth_token=READER_OAUTH_TOKEN, end_point=READER_ENDPOINT)\
._get_types(f'from {table} select *', 'now', 'iso').keys()
clauses = [f"( isnotnull({field}) and str({field})->\"" + search_token + "\")" for field in fields]
sub_queries.append("from " + table + " where" + " or ".join(clauses) + " select *")
with concurrent.futures.ThreadPoolExecutor(max_workers=10) as executor:
for q in sub_queries:
futures.append(executor.submit(parallel_query_helper, q, all_results, time_range[0], time_range[1]))
concurrent.futures.wait(futures)
entry = {
'Type': entryTypes['note'],
'Contents': all_results,
'ContentsFormat': formats['json'],
'ReadableContentsFormat': formats['markdown']
}
if len(all_results) == 0:
entry['HumanReadable'] = 'No results found'
return entry
headers: Set = set().union(*(r.keys() for r in all_results))
md = tableToMarkdown('Devo query results', all_results, headers)
entry['HumanReadable'] = md
if write_context == 'true':
entry['EntryContext'] = {
'Devo.MultiResults': createContext(all_results)
}
return entry
def write_to_table_command():
table_name = demisto.args()['tableName']
records = check_type(demisto.args()['records'], list)
creds = get_writer_creds()
linq = ds.Writer(key=creds['key'].name, crt=creds['crt'].name, chain=creds['chain'].name, relay=WRITER_RELAY)\
.load(records, table_name, historical=False, linq_func=(lambda x: x))
querylink = {'DevoTableLink': build_link(linq, int(1000 * time.time()) - 3600000, int(1000 * time.time()))}
entry = {
'Type': entryTypes['note'],
'Contents': {'recordsWritten': records},
'ContentsFormat': formats['json'],
'ReadableContentsFormat': formats['markdown'],
'EntryContext': {
'Devo.RecordsWritten': records,
'Devo.LinqQuery': linq
}
}
entry_linq = {
'Type': entryTypes['note'],
'Contents': querylink,
'ContentsFormat': formats['json'],
'ReadableContentsFormat': formats['markdown'],
'EntryContext': {
'Devo.QueryLink': createContext(querylink)
}
}
md = tableToMarkdown('Entries to load into Devo', records)
entry['HumanReadable'] = md
md_linq = tableToMarkdown('Link to Devo Query', {'DevoTableLink': f'[Devo Direct Link]({querylink["DevoTableLink"]})'})
entry_linq['HumanReadable'] = md_linq
return [entry, entry_linq]
def write_to_lookup_table_command():
lookup_table_name = demisto.args()['lookupTableName']
headers = check_type(demisto.args()['headers'], list)
records = check_type(demisto.args()['records'], list)
creds = get_writer_creds()
engine_config = SenderConfigSSL(address=(WRITER_RELAY, 443),
key=creds['key'].name,
cert=creds['crt'].name,
chain=creds['chain'].name)
try:
con = Sender(config=engine_config, timeout=60)
lookup = Lookup(name=lookup_table_name,
historic_tag=None,
con=con)
# Order sensitive list
pHeaders = json.dumps(headers)
lookup.send_control('START', pHeaders, 'INC')
for r in records:
lookup.send_data_line(key=r['key'], fields=r['values'])
lookup.send_control('END', pHeaders, 'INC')
finally:
con.flush_buffer()
con.socket.shutdown(0)
entry = {
'Type': entryTypes['note'],
'Contents': {'recordsWritten': records},
'ContentsFormat': formats['json'],
'ReadableContentsFormat': formats['markdown'],
'EntryContext': {
'Devo.RecordsWritten': records
}
}
md = tableToMarkdown('Entries to load into Devo', records)
entry['HumanReadable'] = md
return [entry]
''' EXECUTION CODE '''
try:
if ALLOW_INSECURE:
os.environ['CURL_CA_BUNDLE'] = ''
os.environ['PYTHONWARNINGS'] = 'ignore:Unverified HTTPS request'
handle_proxy()
if demisto.command() == 'test-module':
check_configuration()
demisto.results('ok')
elif demisto.command() == 'fetch-incidents':
fetch_incidents()
elif demisto.command() == 'devo-run-query':
demisto.results(run_query_command())
elif demisto.command() == 'devo-get-alerts':
demisto.results(get_alerts_command())
elif demisto.command() == 'devo-multi-table-query':
demisto.results(multi_table_query_command())
elif demisto.command() == 'devo-write-to-table':
demisto.results(write_to_table_command())
elif demisto.command() == 'devo-write-to-lookup-table':
demisto.results(write_to_lookup_table_command())
except Exception as e:
return_error('Failed to execute command {}. Error: {}'.format(demisto.command(), str(e)))
|
en
| 0.707752
|
IMPORTS GLOBAL VARS from siem.logtrust.alert.info select eventdate, alertHost, domain, priority, context, category, status, alertId, srcIp, srcPort, srcHost, dstIp, dstPort, dstHost, application, engine, extraData from test.keep.free select * HELPER FUNCTIONS # Check all settings related if set # Basic functionality of integration # Converts epoch (miliseconds) to ISO string # We will assume timestamp_from and timestamp_to will be the same format or to will be None # Limitation in Devo DS Connector SDK. Currently require filepaths for credentials. # Will accept file-handler type objects in the future. FUNCTIONS # execute the query and get the events # reverse the list so that the most recent event timestamp event is taken when de-duping if needed. # Expire out of rolling time window events # convert the events to demisto incident # this command will create incidents in Demisto # Order sensitive list EXECUTION CODE
| 1.776865
| 2
|
tests/test_consultarstatusoperacional.py
|
danielgoncalves/satcfe
| 38
|
6626762
|
# -*- coding: utf-8 -*-
#
# tests/test_consultarstatusoperacional.py
#
# Copyright 2015 Base4 Sistemas Ltda ME
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from __future__ import absolute_import
from __future__ import print_function
from __future__ import unicode_literals
from io import open
from builtins import str as text
import pytest
from satcfe.excecoes import ErroRespostaSATInvalida
from satcfe.excecoes import ExcecaoRespostaSAT
from satcfe.resposta import RespostaConsultarStatusOperacional
from satcfe.resposta.consultarstatusoperacional import DESBLOQUEADO
from satcfe.util import as_date
from satcfe.util import as_datetime
def test_resposta_consultarstatusoperacional(datadir):
arquivo = text(datadir.join('respostas-de-sucesso.txt'))
with open(arquivo, 'r', encoding='utf-8') as f:
r_sucessos = f.read().splitlines()
resposta = RespostaConsultarStatusOperacional.analisar(r_sucessos[0])
assert resposta.numeroSessao == 61407
assert resposta.EEEEE == '10000'
assert resposta.mensagem == 'Resposta com sucesso'
assert resposta.cod == ''
assert resposta.mensagemSEFAZ == ''
assert resposta.NSERIE == '900004019'
assert resposta.TIPO_LAN == 'DHCP'
assert resposta.LAN_IP == '10.0.0.108'
assert resposta.LAN_MAC == '30:40:03:19:19:40'
assert resposta.LAN_MASK == '255.255.255.0'
assert resposta.LAN_GW == '10.0.0.1'
assert resposta.LAN_DNS_1 == '10.0.0.1'
assert resposta.LAN_DNS_2 == '10.0.0.1'
assert resposta.STATUS_LAN == 'CONECTADO'
assert resposta.NIVEL_BATERIA == 'ALTO'
assert resposta.MT_TOTAL == '4 GB'
assert resposta.MT_USADA == '260 MB'
assert resposta.DH_ATUAL == as_datetime('20150912113321')
assert resposta.VER_SB == '01.00.00'
assert resposta.VER_LAYOUT == '00.06'
assert resposta.ULTIMO_CF_E_SAT == '35150908723218000186599000040190000723645630' # noqa: E501
assert resposta.LISTA_INICIAL == '00000000000000000000000000000000000000000000' # noqa: E501
assert resposta.LISTA_FINAL == '00000000000000000000000000000000000000000000' # noqa: E501
assert resposta.DH_CFE == as_datetime('20150912104828')
assert resposta.DH_ULTIMA == as_datetime('20150912113039')
assert resposta.CERT_EMISSAO == as_date('20150708')
assert resposta.CERT_VENCIMENTO == as_date('20200708')
assert resposta.ESTADO_OPERACAO == 0
resposta = RespostaConsultarStatusOperacional.analisar(r_sucessos[1])
# Resposta de um equipamento SAT que ainda não transmitiu nenhum CF-e,
# conforme ocorrência https://github.com/base4sistemas/sathub/issues/1
# Note que os campos 16 (ULTIMO_CF_E_SAT), 17 (LISTA_INICIAL),
# 18 (LISTA_FINAL) e 19 (DH_CFE) estão vazios.
assert resposta.EEEEE == '10000'
assert resposta.ULTIMO_CF_E_SAT == ''
assert resposta.LISTA_INICIAL == ''
assert resposta.LISTA_FINAL == ''
assert resposta.DH_CFE is None
# demais respostas de sucesso, se houverem
for retorno in r_sucessos[2:]:
resposta = RespostaConsultarStatusOperacional.analisar(retorno)
assert resposta.EEEEE == '10000'
def test_respostas_de_falha(datadir):
arquivo = text(datadir.join('respostas-de-falha.txt'))
with open(arquivo, 'r', encoding='utf-8') as f:
respostas = f.read().splitlines()
for retorno in respostas:
with pytest.raises(ExcecaoRespostaSAT):
RespostaConsultarStatusOperacional.analisar(retorno)
def test_respostas_invalidas(datadir):
arquivo = text(datadir.join('respostas-invalidas.txt'))
with open(arquivo, 'r', encoding='utf-8') as f:
respostas = f.read().splitlines()
for retorno in respostas:
with pytest.raises(ErroRespostaSATInvalida):
RespostaConsultarStatusOperacional.analisar(retorno)
@pytest.mark.acessa_sat
@pytest.mark.invoca_consultarstatusoperacional
def test_funcao_consultarstatusoperacional(clientesatlocal):
resposta = clientesatlocal.consultar_status_operacional()
assert resposta.EEEEE == '10000'
assert resposta.TIPO_LAN == 'DHCP'
assert resposta.STATUS_LAN == 'CONECTADO'
assert resposta.NIVEL_BATERIA == 'ALTO'
assert resposta.ESTADO_OPERACAO == DESBLOQUEADO
|
# -*- coding: utf-8 -*-
#
# tests/test_consultarstatusoperacional.py
#
# Copyright 2015 Base4 Sistemas Ltda ME
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from __future__ import absolute_import
from __future__ import print_function
from __future__ import unicode_literals
from io import open
from builtins import str as text
import pytest
from satcfe.excecoes import ErroRespostaSATInvalida
from satcfe.excecoes import ExcecaoRespostaSAT
from satcfe.resposta import RespostaConsultarStatusOperacional
from satcfe.resposta.consultarstatusoperacional import DESBLOQUEADO
from satcfe.util import as_date
from satcfe.util import as_datetime
def test_resposta_consultarstatusoperacional(datadir):
arquivo = text(datadir.join('respostas-de-sucesso.txt'))
with open(arquivo, 'r', encoding='utf-8') as f:
r_sucessos = f.read().splitlines()
resposta = RespostaConsultarStatusOperacional.analisar(r_sucessos[0])
assert resposta.numeroSessao == 61407
assert resposta.EEEEE == '10000'
assert resposta.mensagem == 'Resposta com sucesso'
assert resposta.cod == ''
assert resposta.mensagemSEFAZ == ''
assert resposta.NSERIE == '900004019'
assert resposta.TIPO_LAN == 'DHCP'
assert resposta.LAN_IP == '10.0.0.108'
assert resposta.LAN_MAC == '30:40:03:19:19:40'
assert resposta.LAN_MASK == '255.255.255.0'
assert resposta.LAN_GW == '10.0.0.1'
assert resposta.LAN_DNS_1 == '10.0.0.1'
assert resposta.LAN_DNS_2 == '10.0.0.1'
assert resposta.STATUS_LAN == 'CONECTADO'
assert resposta.NIVEL_BATERIA == 'ALTO'
assert resposta.MT_TOTAL == '4 GB'
assert resposta.MT_USADA == '260 MB'
assert resposta.DH_ATUAL == as_datetime('20150912113321')
assert resposta.VER_SB == '01.00.00'
assert resposta.VER_LAYOUT == '00.06'
assert resposta.ULTIMO_CF_E_SAT == '35150908723218000186599000040190000723645630' # noqa: E501
assert resposta.LISTA_INICIAL == '00000000000000000000000000000000000000000000' # noqa: E501
assert resposta.LISTA_FINAL == '00000000000000000000000000000000000000000000' # noqa: E501
assert resposta.DH_CFE == as_datetime('20150912104828')
assert resposta.DH_ULTIMA == as_datetime('20150912113039')
assert resposta.CERT_EMISSAO == as_date('20150708')
assert resposta.CERT_VENCIMENTO == as_date('20200708')
assert resposta.ESTADO_OPERACAO == 0
resposta = RespostaConsultarStatusOperacional.analisar(r_sucessos[1])
# Resposta de um equipamento SAT que ainda não transmitiu nenhum CF-e,
# conforme ocorrência https://github.com/base4sistemas/sathub/issues/1
# Note que os campos 16 (ULTIMO_CF_E_SAT), 17 (LISTA_INICIAL),
# 18 (LISTA_FINAL) e 19 (DH_CFE) estão vazios.
assert resposta.EEEEE == '10000'
assert resposta.ULTIMO_CF_E_SAT == ''
assert resposta.LISTA_INICIAL == ''
assert resposta.LISTA_FINAL == ''
assert resposta.DH_CFE is None
# demais respostas de sucesso, se houverem
for retorno in r_sucessos[2:]:
resposta = RespostaConsultarStatusOperacional.analisar(retorno)
assert resposta.EEEEE == '10000'
def test_respostas_de_falha(datadir):
arquivo = text(datadir.join('respostas-de-falha.txt'))
with open(arquivo, 'r', encoding='utf-8') as f:
respostas = f.read().splitlines()
for retorno in respostas:
with pytest.raises(ExcecaoRespostaSAT):
RespostaConsultarStatusOperacional.analisar(retorno)
def test_respostas_invalidas(datadir):
arquivo = text(datadir.join('respostas-invalidas.txt'))
with open(arquivo, 'r', encoding='utf-8') as f:
respostas = f.read().splitlines()
for retorno in respostas:
with pytest.raises(ErroRespostaSATInvalida):
RespostaConsultarStatusOperacional.analisar(retorno)
@pytest.mark.acessa_sat
@pytest.mark.invoca_consultarstatusoperacional
def test_funcao_consultarstatusoperacional(clientesatlocal):
resposta = clientesatlocal.consultar_status_operacional()
assert resposta.EEEEE == '10000'
assert resposta.TIPO_LAN == 'DHCP'
assert resposta.STATUS_LAN == 'CONECTADO'
assert resposta.NIVEL_BATERIA == 'ALTO'
assert resposta.ESTADO_OPERACAO == DESBLOQUEADO
|
en
| 0.579953
|
# -*- coding: utf-8 -*- # # tests/test_consultarstatusoperacional.py # # Copyright 2015 Base4 Sistemas Ltda ME # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # # noqa: E501 # noqa: E501 # noqa: E501 # Resposta de um equipamento SAT que ainda não transmitiu nenhum CF-e, # conforme ocorrência https://github.com/base4sistemas/sathub/issues/1 # Note que os campos 16 (ULTIMO_CF_E_SAT), 17 (LISTA_INICIAL), # 18 (LISTA_FINAL) e 19 (DH_CFE) estão vazios. # demais respostas de sucesso, se houverem
| 1.808927
| 2
|
chikn.py
|
Meshiest/chikn-game
| 0
|
6626763
|
<gh_stars>0
import sys, pygame, time, math, random
pygame.init()
pygame.mixer.init(frequency=22050, size=-16, channels=2, buffer=512)
# import my own code
from constants import *
screen = pygame.display.set_mode([WIDTH, HEIGHT]) #, pygame.RESIZABLE, 32)
from feather import *
from chicken import *
from utils import *
from bot import *
winner = None
tick = time.time()
lastTick = tick
lastSpawn = time.time()
lastMenu = -1
MENU_EXIT = -1
MENU_MAIN = 0
MENU_PLAY = 1
MENU_JOIN = 2
MENU_WIN = 3
menu = MENU_MAIN
mainMenu = ['Play','Quit']
mainMenuCurr = 0
mainMenuPos = 0
players = []
allocatedColors = {}
endData = {}
grass = [[0, 0] for i in range(0, platform_width, 5)]
def reset():
global lastSpawn, grass
grass = [[0, 0] for i in range(0, platform_width, 5)]
lastSpawn = time.time()
del effects[:]
for obj in objects:
obj.reset()
def drawGame(keys):
pygame.draw.rect(screen, SKY_COLOR, (0, 0, WIDTH, HEIGHT))
pygame.draw.rect(screen, DIRT_COLOR, ( # draw the dirt
WIDTH/2-platform_width/2,
HEIGHT-platform_height,
platform_width,
platform_height
))
pygame.draw.rect(screen, GRASS_COLOR, ( # draw the grass
WIDTH/2-platform_width/2,
HEIGHT-platform_height,
platform_width,
50
))
resetLives = playersLeft = len(objects)
lastPlayer = False
now = time.time()
for obj in objects:
if not obj.living:
playersLeft -= 1 # remove player from living player count
if obj.lives < 1:
resetLives -= 1 # remove player from players with lives
continue
# set the last player to current obj, if resetLives == 1, this is the last player
obj.tick(delta, getControls(obj.controller, keys))
# Update grass around this obj
if obj.canJump():
playerPos = platform_width/2 - WIDTH/2.0 + obj.x
center = int(round((playerPos) / 5)) * 5
slamDelta = now - obj.groundTime
slamMod = slamDelta < 0.4 and (1 - slamDelta / 0.4) * 2 or 1
dist = int(round(4 * slamMod) * 5)
for i in range(max(0, center - dist), min(platform_width, center + dist), 5):
grass[i/5][0] = max(min((playerPos - i) * slamMod, 70), -70)
# set the player to not living if it is off screen
if obj.y > HEIGHT + 50:
for i in range(10):
angle = -random.random() * math.pi / 2 - math.pi/4
power = random.random() * 200 + 100
effects.append(Feather( # spawn a new feather
obj.x,
obj.y-obj.size/2,
math.cos(angle)*power,
math.sin(angle)*power,
random.random() < 0.2 and obj.color or None)
)
obj.attacker.killData.append(("kill",obj.color))
obj.killData.append(("death",obj.attacker.color))
obj.lives -= 1 # remove a life and kill the player
obj.living = False
deathSound.play()
playersLeft -= 1
if obj.lives < 1:
resetLives -= 1
if obj.lives > 0 and obj.living:
lastPlayer = obj
if resetLives <= 1: # if there's one player with lives, give everyone more lives
global winner, menu, endData
endData = {} # clear the last scoreboard
for obj in objects: # add kills and deaths to the scoreboard
endData[obj.controller] = obj.killData[:]
if lastPlayer:
lastPlayer.killData.append(("win", (0, 0, 0)))
# the winner is the controller of the last player
winner = lastPlayer.controller
del objects[:]
global menu
menu = MENU_WIN # change the menu to the win menu
return
if playersLeft <= 1: # reset if there is one player left
if playersLeft > 0:
lastPlayer.killData.append(("win", (0, 0, 0)))
reset()
return
for obj in effects:
obj.tick(delta)
if obj.y > HEIGHT+50: # remove effects that are off screen
effects.remove(obj)
count = len(objects)
part = WIDTH*2/3.0/(count) # take a fraction of two thirds of the screen for lives
for obj in objects:
if obj.lives < 1:
continue
if obj.living:
obj.draw(screen)
pos = objects.index(obj)
# position where center of lives should be displayed
centerX = WIDTH/2-(len(objects)/2.0-pos-0.5)*part
# draw the lives of the players
for i in range(obj.lives):
#pygame.draw.rect(screen, (0, 0, 0), (centerX, 0, 1, HEIGHT))
drawLife(screen, # draw a life
centerX+part/maxLives/3*(2*i+1-obj.lives),
HEIGHT - 20,
obj.color
)
for i in range(0, platform_width, 5):
surface = pygame.Surface((3, 20), pygame.SRCALPHA, 32)
pygame.draw.rect(surface, GRASS_COLOR, (0, 0, 3, 10))
grass[i/5][1] += (grass[i/5][0] - grass[i/5][1]) * 20 * delta
grass[i/5][0] += (0 - grass[i/5][0]) * 10 * delta
surface = pygame.transform.rotate(
surface,
grass[i/5][1]
)
screen.blit(surface, (
WIDTH/2 - platform_width/2 + i - surface.get_width()/2.0 + 1.5,
HEIGHT - platform_height - surface.get_height()/2 + 2,
))
for obj in effects:
obj.draw(screen)
spawnDelta = spawnTimeout - time.time() + lastSpawn
if spawnDelta > 0:
text = playerListFont.render(str(int(round(spawnDelta))), 1, (10, 10, 10))
rad = (((spawnDelta+0.5) % 1)*4)**2 + 20
pygame.draw.rect(screen, (255, 255, 255), (WIDTH/2-rad, 100+text.get_height()/2-rad, rad*2, rad*2))
screen.blit(text, (WIDTH/2-text.get_width()/2, 100))
def nextAvailableColor(color=None):
# if a color in the list is given, shift the colors to that index, otherwise use all colors
openColors = colors.count(color) != 0 and colors[colors.index(color):]+colors[:colors.index(color)] or colors[:]
for player in players: # go through all players and remove colors that are in use
playerColor = allocatedColors[player]
if openColors.count(playerColor) > 0:
openColors.remove(playerColor)
if len(openColors) > 0: # if there are any more open colors
return openColors[0]
elif colors.count(color) != 0: # if the original color exists
return color
else:
return colors[0]
def drawJoinMenu(keys):
for i in range(len(keyboards)): # handle keyboards joining
keyboard = keyboards[i]
name = "kb%i" % (i)
if players.count(name) == 1: # player is in lobby
if keys.get(keyboard['down']): # keyboard press down
del allocatedColors[name] # remove player
players.remove(name)
leaveSound.play()
else:
if keys.get(keyboard['up']):
allocatedColors[name] = nextAvailableColor(allocatedColors[name])
else:
if keys.get(keyboard['up']): # keyboard press up
allocatedColors["kb%i" % (i)] = nextAvailableColor()
players.append("kb%i" % (i))
joinSound.play()
for i in range(pygame.joystick.get_count()): # handle controllers joining
name = "joy%i"%(i)
if players.count(name) == 1: # player is in lobby
if keys['joy'].get("joy%i_%i" % (i, BTN_B)): # controller press B
del allocatedColors[name]
players.remove(name) # remove player
leaveSound.play()
else:
if keys['joy'].get("joy%i_%i" % (i, BTN_A)):
allocatedColors[name] = nextAvailableColor(allocatedColors[name])
else:
if keys['joy'].get("joy%i_%i" % (i, BTN_A)): # controller press A
allocatedColors[name] = nextAvailableColor()
players.append(name)
joinSound.play()
for i in range(10):
char = chr(i+48)
name = "bot%s" % (char)
if keys.get(48+i):
if players.count(name) == 1:
del allocatedColors[name]
players.remove(name)
leaveSound.play()
else:
allocatedColors[name] = nextAvailableColor()
players.append(name)
joinSound.play()
pygame.draw.rect(screen, SKY_COLOR, (0, 0, WIDTH, HEIGHT))
joinText = playerListFont.render("Press Jump to Join", 1, (255, 255, 255))
screen.blit(joinText, (WIDTH/2 - joinText.get_width()/2, 10))
if keys.get(pygame.K_p):
global LEARNING
LEARNING = not LEARNING
print("LEARNING is "+str(LEARNING))
tick = time.time()
centerX = WIDTH/2
centerY = HEIGHT/2
playerCount = len(players)
for i in range(playerCount):
yOff = i * 30 + 80
xOff = 10
name = displayPlayerName(players[i])
text = playerListFont.render(name, 1, (10, 10, 10))
screen.blit(text, (xOff + 36, yOff-text.get_height()/2)) # draw text
color = allocatedColors[players[i]]
# white border
pygame.draw.rect(screen, (255, 255, 255), (xOff, yOff-13, 26, 26))
# colored middle
pygame.draw.rect(screen, color, (xOff+3, yOff-10, 20, 20))
theta = tick + math.pi*2/playerCount*i
posX = centerX + math.cos(theta) * (playerCount * 10 + 20)
posY = centerY + math.sin(theta) * (playerCount * 10 + 20)
#pygame.draw.circle(screen, (255, 255, 255), (int(posX), int(posY)), 21)
pygame.draw.ellipse(screen, color, (posX-18, posY-30, 36, 60))
pygame.draw.rect(screen, SKY_COLOR, (posX-18, posY, 36, 30))
pygame.draw.circle(screen, color, (int(posX), int(posY)), 18)
playerCount = len(players)
if (keys.get(pygame.K_RETURN) or keys['joy'].get(BTN_START)) and playerCount > 1:
confirmSound.play()
remainder = playerCount % 4
for i in range(playerCount):
print("Adding player %s" % (players[i]))
total = (i < playerCount - remainder and 4 or remainder) # num players on row
part = platform_width*2/3.0/total # space between players
centerX = WIDTH/2 - (i % 4 + 0.5 - total/2.0) * part
posY = HEIGHT-platform_height-200 - i/4 * 60
objects.append(Chicken(
centerX,
posY,
players[i],
allocatedColors[players[i]]
))
global menu
menu = MENU_PLAY
if keys.get(pygame.K_ESCAPE) or keys['joy'].get(BTN_SELECT):
menu = MENU_MAIN
def drawMainMenu(delta, keys):
global menu, mainMenu, mainMenuCurr, mainMenuPos
pygame.draw.rect(screen, SKY_COLOR, (0, 0, WIDTH, HEIGHT))
titleText = titleFont.render("Chikn", 1, (255, 255, 255))
screen.blit(titleText, (WIDTH/2 - titleText.get_width()/2, 80))
if keys.get(pygame.K_RETURN) or keys.get(pygame.K_SPACE) or keys['joy'].get(BTN_START):
confirmSound.play()
if mainMenu[mainMenuCurr] == 'Play':
menu = MENU_JOIN
elif mainMenu[mainMenuCurr] == 'Quit':
menu = MENU_EXIT
numOptions = len(mainMenu)
if keys.get(pygame.K_w) or keys.get(pygame.K_UP) or keys['joy'].get(BTN_SELECT): # previous menu option
mainMenuCurr = (mainMenuCurr + 1 + numOptions) % numOptions
selectSound.play()
if keys.get(pygame.K_s) or keys.get(pygame.K_DOWN): # next menu option
mainMenuCurr = (mainMenuCurr - 1 + numOptions) % numOptions
selectSound.play()
mainMenuPos += (mainMenuCurr-mainMenuPos) * 10 * delta # translate towards selected option
pygame.draw.rect(screen, (255, 255, 255), (0, HEIGHT/2-25, WIDTH, 50)) # draw a rectangle to hold the selected menu
for i in range(numOptions):
option = mainMenu[i]
text = playerListFont.render(option, 1, (10, 10, 10))
screen.blit(text, ( # draw the option
WIDTH/2 - text.get_width()/2,
HEIGHT/2 + (i-mainMenuPos) * 50 + - text.get_height()/2)
)
def drawWinMenu(keys):
global menu
winnerColor = winner and allocatedColors[winner] or SKY_COLOR
playerCount = len(players)
pygame.draw.rect(screen, SKY_COLOR, (0, 0, WIDTH, HEIGHT))
scale = HEIGHT/10
# draw large background egg
pygame.draw.ellipse(screen, winnerColor, (WIDTH/2-3*scale, HEIGHT/2-5*scale*3/4, 6*scale, 10*scale))
pygame.draw.rect(screen, SKY_COLOR, (WIDTH/2-3*scale, HEIGHT/2+5*scale/4, 6*scale, 5*scale))
pygame.draw.circle(screen, winnerColor, (int(WIDTH/2), int(HEIGHT/2+5*scale/4)), 3*scale)
part = WIDTH*2/3.0/playerCount
vert = HEIGHT*2/3.0
for i in range(playerCount):
player = players[i]
centerX = WIDTH/2 - (i + 0.5 - playerCount/2.0) * part
left = centerX-part/3
top = HEIGHT/2-vert/2
width = part*2/3
roundedWidth = (int(width-15)/30*30)
pygame.draw.rect(screen, (255, 255 ,255), ( # draw border
left-3,
top-3,
width+6,
vert+6
))
pygame.draw.rect(screen, (allocatedColors[player]), ( # draw box
left,
top,
width,
vert
))
killData = endData[player]
for k in range(len(killData)):
data = killData[k]
drawLife(screen,
left + k*30 % roundedWidth + 30,
top + k*30/roundedWidth * 30 + 30,
data[1],
data[0] == "death" and True or data[0] == "win" and "win" or False
)
if keys.get(pygame.K_RETURN) or keys.get(pygame.K_SPACE) or keys['joy'].get(BTN_START):
menu = MENU_JOIN
elif keys.get(pygame.K_ESCAPE) or keys['joy'].get(BTN_SELECT):
menu = MENU_MAIN
while 1:
lastTick, tick = tick, time.time()
delta = tick - lastTick
keys = pygame.key.get_pressed()
keyPressed = {'joy': {}}
close = False
for event in pygame.event.get():
if event.type==pygame.QUIT:
close = True
if event.type == pygame.VIDEORESIZE: #not used
WIDTH = event.dict['size'][0]
HEIGHT = event.dict['size'][1]
reset()
#screen = pygame.display.set_mode((WIDTH, HEIGHT),pygame.HWSURFACE|pygame.DOUBLEBUF|pygame.RESIZABLE)
keyPressed['joy'] = {}
if event.type == pygame.KEYDOWN: # assign the key if it's down
keyPressed[event.dict['key']] = True
if event.type == pygame.JOYBUTTONDOWN: # assign the joy button twice (one for ambiguous presses)
keyPressed['joy'][event.dict['button']] = True
keyPressed['joy']["joy%i_%i" % (event.dict['joy'], event.dict['button'])] = True
if close:
saveBrain()
break
if menu != lastMenu: # when the menu changes
lastMenu = menu
if menu == MENU_MAIN:
initJoysticks()
continue
if menu == MENU_PLAY:
reset()
if menu == MENU_MAIN:
drawMainMenu(delta, keyPressed)
elif menu == MENU_JOIN:
drawJoinMenu(keyPressed)
elif menu == MENU_PLAY:
drawGame(keys)
if keyPressed.get(pygame.K_ESCAPE): # return to join menu if pressed
del objects[:] # remove players
menu = MENU_JOIN
elif menu == MENU_WIN:
drawWinMenu(keyPressed)
elif menu == MENU_EXIT:
saveBrain()
break
pygame.display.update()
|
import sys, pygame, time, math, random
pygame.init()
pygame.mixer.init(frequency=22050, size=-16, channels=2, buffer=512)
# import my own code
from constants import *
screen = pygame.display.set_mode([WIDTH, HEIGHT]) #, pygame.RESIZABLE, 32)
from feather import *
from chicken import *
from utils import *
from bot import *
winner = None
tick = time.time()
lastTick = tick
lastSpawn = time.time()
lastMenu = -1
MENU_EXIT = -1
MENU_MAIN = 0
MENU_PLAY = 1
MENU_JOIN = 2
MENU_WIN = 3
menu = MENU_MAIN
mainMenu = ['Play','Quit']
mainMenuCurr = 0
mainMenuPos = 0
players = []
allocatedColors = {}
endData = {}
grass = [[0, 0] for i in range(0, platform_width, 5)]
def reset():
global lastSpawn, grass
grass = [[0, 0] for i in range(0, platform_width, 5)]
lastSpawn = time.time()
del effects[:]
for obj in objects:
obj.reset()
def drawGame(keys):
pygame.draw.rect(screen, SKY_COLOR, (0, 0, WIDTH, HEIGHT))
pygame.draw.rect(screen, DIRT_COLOR, ( # draw the dirt
WIDTH/2-platform_width/2,
HEIGHT-platform_height,
platform_width,
platform_height
))
pygame.draw.rect(screen, GRASS_COLOR, ( # draw the grass
WIDTH/2-platform_width/2,
HEIGHT-platform_height,
platform_width,
50
))
resetLives = playersLeft = len(objects)
lastPlayer = False
now = time.time()
for obj in objects:
if not obj.living:
playersLeft -= 1 # remove player from living player count
if obj.lives < 1:
resetLives -= 1 # remove player from players with lives
continue
# set the last player to current obj, if resetLives == 1, this is the last player
obj.tick(delta, getControls(obj.controller, keys))
# Update grass around this obj
if obj.canJump():
playerPos = platform_width/2 - WIDTH/2.0 + obj.x
center = int(round((playerPos) / 5)) * 5
slamDelta = now - obj.groundTime
slamMod = slamDelta < 0.4 and (1 - slamDelta / 0.4) * 2 or 1
dist = int(round(4 * slamMod) * 5)
for i in range(max(0, center - dist), min(platform_width, center + dist), 5):
grass[i/5][0] = max(min((playerPos - i) * slamMod, 70), -70)
# set the player to not living if it is off screen
if obj.y > HEIGHT + 50:
for i in range(10):
angle = -random.random() * math.pi / 2 - math.pi/4
power = random.random() * 200 + 100
effects.append(Feather( # spawn a new feather
obj.x,
obj.y-obj.size/2,
math.cos(angle)*power,
math.sin(angle)*power,
random.random() < 0.2 and obj.color or None)
)
obj.attacker.killData.append(("kill",obj.color))
obj.killData.append(("death",obj.attacker.color))
obj.lives -= 1 # remove a life and kill the player
obj.living = False
deathSound.play()
playersLeft -= 1
if obj.lives < 1:
resetLives -= 1
if obj.lives > 0 and obj.living:
lastPlayer = obj
if resetLives <= 1: # if there's one player with lives, give everyone more lives
global winner, menu, endData
endData = {} # clear the last scoreboard
for obj in objects: # add kills and deaths to the scoreboard
endData[obj.controller] = obj.killData[:]
if lastPlayer:
lastPlayer.killData.append(("win", (0, 0, 0)))
# the winner is the controller of the last player
winner = lastPlayer.controller
del objects[:]
global menu
menu = MENU_WIN # change the menu to the win menu
return
if playersLeft <= 1: # reset if there is one player left
if playersLeft > 0:
lastPlayer.killData.append(("win", (0, 0, 0)))
reset()
return
for obj in effects:
obj.tick(delta)
if obj.y > HEIGHT+50: # remove effects that are off screen
effects.remove(obj)
count = len(objects)
part = WIDTH*2/3.0/(count) # take a fraction of two thirds of the screen for lives
for obj in objects:
if obj.lives < 1:
continue
if obj.living:
obj.draw(screen)
pos = objects.index(obj)
# position where center of lives should be displayed
centerX = WIDTH/2-(len(objects)/2.0-pos-0.5)*part
# draw the lives of the players
for i in range(obj.lives):
#pygame.draw.rect(screen, (0, 0, 0), (centerX, 0, 1, HEIGHT))
drawLife(screen, # draw a life
centerX+part/maxLives/3*(2*i+1-obj.lives),
HEIGHT - 20,
obj.color
)
for i in range(0, platform_width, 5):
surface = pygame.Surface((3, 20), pygame.SRCALPHA, 32)
pygame.draw.rect(surface, GRASS_COLOR, (0, 0, 3, 10))
grass[i/5][1] += (grass[i/5][0] - grass[i/5][1]) * 20 * delta
grass[i/5][0] += (0 - grass[i/5][0]) * 10 * delta
surface = pygame.transform.rotate(
surface,
grass[i/5][1]
)
screen.blit(surface, (
WIDTH/2 - platform_width/2 + i - surface.get_width()/2.0 + 1.5,
HEIGHT - platform_height - surface.get_height()/2 + 2,
))
for obj in effects:
obj.draw(screen)
spawnDelta = spawnTimeout - time.time() + lastSpawn
if spawnDelta > 0:
text = playerListFont.render(str(int(round(spawnDelta))), 1, (10, 10, 10))
rad = (((spawnDelta+0.5) % 1)*4)**2 + 20
pygame.draw.rect(screen, (255, 255, 255), (WIDTH/2-rad, 100+text.get_height()/2-rad, rad*2, rad*2))
screen.blit(text, (WIDTH/2-text.get_width()/2, 100))
def nextAvailableColor(color=None):
# if a color in the list is given, shift the colors to that index, otherwise use all colors
openColors = colors.count(color) != 0 and colors[colors.index(color):]+colors[:colors.index(color)] or colors[:]
for player in players: # go through all players and remove colors that are in use
playerColor = allocatedColors[player]
if openColors.count(playerColor) > 0:
openColors.remove(playerColor)
if len(openColors) > 0: # if there are any more open colors
return openColors[0]
elif colors.count(color) != 0: # if the original color exists
return color
else:
return colors[0]
def drawJoinMenu(keys):
for i in range(len(keyboards)): # handle keyboards joining
keyboard = keyboards[i]
name = "kb%i" % (i)
if players.count(name) == 1: # player is in lobby
if keys.get(keyboard['down']): # keyboard press down
del allocatedColors[name] # remove player
players.remove(name)
leaveSound.play()
else:
if keys.get(keyboard['up']):
allocatedColors[name] = nextAvailableColor(allocatedColors[name])
else:
if keys.get(keyboard['up']): # keyboard press up
allocatedColors["kb%i" % (i)] = nextAvailableColor()
players.append("kb%i" % (i))
joinSound.play()
for i in range(pygame.joystick.get_count()): # handle controllers joining
name = "joy%i"%(i)
if players.count(name) == 1: # player is in lobby
if keys['joy'].get("joy%i_%i" % (i, BTN_B)): # controller press B
del allocatedColors[name]
players.remove(name) # remove player
leaveSound.play()
else:
if keys['joy'].get("joy%i_%i" % (i, BTN_A)):
allocatedColors[name] = nextAvailableColor(allocatedColors[name])
else:
if keys['joy'].get("joy%i_%i" % (i, BTN_A)): # controller press A
allocatedColors[name] = nextAvailableColor()
players.append(name)
joinSound.play()
for i in range(10):
char = chr(i+48)
name = "bot%s" % (char)
if keys.get(48+i):
if players.count(name) == 1:
del allocatedColors[name]
players.remove(name)
leaveSound.play()
else:
allocatedColors[name] = nextAvailableColor()
players.append(name)
joinSound.play()
pygame.draw.rect(screen, SKY_COLOR, (0, 0, WIDTH, HEIGHT))
joinText = playerListFont.render("Press Jump to Join", 1, (255, 255, 255))
screen.blit(joinText, (WIDTH/2 - joinText.get_width()/2, 10))
if keys.get(pygame.K_p):
global LEARNING
LEARNING = not LEARNING
print("LEARNING is "+str(LEARNING))
tick = time.time()
centerX = WIDTH/2
centerY = HEIGHT/2
playerCount = len(players)
for i in range(playerCount):
yOff = i * 30 + 80
xOff = 10
name = displayPlayerName(players[i])
text = playerListFont.render(name, 1, (10, 10, 10))
screen.blit(text, (xOff + 36, yOff-text.get_height()/2)) # draw text
color = allocatedColors[players[i]]
# white border
pygame.draw.rect(screen, (255, 255, 255), (xOff, yOff-13, 26, 26))
# colored middle
pygame.draw.rect(screen, color, (xOff+3, yOff-10, 20, 20))
theta = tick + math.pi*2/playerCount*i
posX = centerX + math.cos(theta) * (playerCount * 10 + 20)
posY = centerY + math.sin(theta) * (playerCount * 10 + 20)
#pygame.draw.circle(screen, (255, 255, 255), (int(posX), int(posY)), 21)
pygame.draw.ellipse(screen, color, (posX-18, posY-30, 36, 60))
pygame.draw.rect(screen, SKY_COLOR, (posX-18, posY, 36, 30))
pygame.draw.circle(screen, color, (int(posX), int(posY)), 18)
playerCount = len(players)
if (keys.get(pygame.K_RETURN) or keys['joy'].get(BTN_START)) and playerCount > 1:
confirmSound.play()
remainder = playerCount % 4
for i in range(playerCount):
print("Adding player %s" % (players[i]))
total = (i < playerCount - remainder and 4 or remainder) # num players on row
part = platform_width*2/3.0/total # space between players
centerX = WIDTH/2 - (i % 4 + 0.5 - total/2.0) * part
posY = HEIGHT-platform_height-200 - i/4 * 60
objects.append(Chicken(
centerX,
posY,
players[i],
allocatedColors[players[i]]
))
global menu
menu = MENU_PLAY
if keys.get(pygame.K_ESCAPE) or keys['joy'].get(BTN_SELECT):
menu = MENU_MAIN
def drawMainMenu(delta, keys):
global menu, mainMenu, mainMenuCurr, mainMenuPos
pygame.draw.rect(screen, SKY_COLOR, (0, 0, WIDTH, HEIGHT))
titleText = titleFont.render("Chikn", 1, (255, 255, 255))
screen.blit(titleText, (WIDTH/2 - titleText.get_width()/2, 80))
if keys.get(pygame.K_RETURN) or keys.get(pygame.K_SPACE) or keys['joy'].get(BTN_START):
confirmSound.play()
if mainMenu[mainMenuCurr] == 'Play':
menu = MENU_JOIN
elif mainMenu[mainMenuCurr] == 'Quit':
menu = MENU_EXIT
numOptions = len(mainMenu)
if keys.get(pygame.K_w) or keys.get(pygame.K_UP) or keys['joy'].get(BTN_SELECT): # previous menu option
mainMenuCurr = (mainMenuCurr + 1 + numOptions) % numOptions
selectSound.play()
if keys.get(pygame.K_s) or keys.get(pygame.K_DOWN): # next menu option
mainMenuCurr = (mainMenuCurr - 1 + numOptions) % numOptions
selectSound.play()
mainMenuPos += (mainMenuCurr-mainMenuPos) * 10 * delta # translate towards selected option
pygame.draw.rect(screen, (255, 255, 255), (0, HEIGHT/2-25, WIDTH, 50)) # draw a rectangle to hold the selected menu
for i in range(numOptions):
option = mainMenu[i]
text = playerListFont.render(option, 1, (10, 10, 10))
screen.blit(text, ( # draw the option
WIDTH/2 - text.get_width()/2,
HEIGHT/2 + (i-mainMenuPos) * 50 + - text.get_height()/2)
)
def drawWinMenu(keys):
global menu
winnerColor = winner and allocatedColors[winner] or SKY_COLOR
playerCount = len(players)
pygame.draw.rect(screen, SKY_COLOR, (0, 0, WIDTH, HEIGHT))
scale = HEIGHT/10
# draw large background egg
pygame.draw.ellipse(screen, winnerColor, (WIDTH/2-3*scale, HEIGHT/2-5*scale*3/4, 6*scale, 10*scale))
pygame.draw.rect(screen, SKY_COLOR, (WIDTH/2-3*scale, HEIGHT/2+5*scale/4, 6*scale, 5*scale))
pygame.draw.circle(screen, winnerColor, (int(WIDTH/2), int(HEIGHT/2+5*scale/4)), 3*scale)
part = WIDTH*2/3.0/playerCount
vert = HEIGHT*2/3.0
for i in range(playerCount):
player = players[i]
centerX = WIDTH/2 - (i + 0.5 - playerCount/2.0) * part
left = centerX-part/3
top = HEIGHT/2-vert/2
width = part*2/3
roundedWidth = (int(width-15)/30*30)
pygame.draw.rect(screen, (255, 255 ,255), ( # draw border
left-3,
top-3,
width+6,
vert+6
))
pygame.draw.rect(screen, (allocatedColors[player]), ( # draw box
left,
top,
width,
vert
))
killData = endData[player]
for k in range(len(killData)):
data = killData[k]
drawLife(screen,
left + k*30 % roundedWidth + 30,
top + k*30/roundedWidth * 30 + 30,
data[1],
data[0] == "death" and True or data[0] == "win" and "win" or False
)
if keys.get(pygame.K_RETURN) or keys.get(pygame.K_SPACE) or keys['joy'].get(BTN_START):
menu = MENU_JOIN
elif keys.get(pygame.K_ESCAPE) or keys['joy'].get(BTN_SELECT):
menu = MENU_MAIN
while 1:
lastTick, tick = tick, time.time()
delta = tick - lastTick
keys = pygame.key.get_pressed()
keyPressed = {'joy': {}}
close = False
for event in pygame.event.get():
if event.type==pygame.QUIT:
close = True
if event.type == pygame.VIDEORESIZE: #not used
WIDTH = event.dict['size'][0]
HEIGHT = event.dict['size'][1]
reset()
#screen = pygame.display.set_mode((WIDTH, HEIGHT),pygame.HWSURFACE|pygame.DOUBLEBUF|pygame.RESIZABLE)
keyPressed['joy'] = {}
if event.type == pygame.KEYDOWN: # assign the key if it's down
keyPressed[event.dict['key']] = True
if event.type == pygame.JOYBUTTONDOWN: # assign the joy button twice (one for ambiguous presses)
keyPressed['joy'][event.dict['button']] = True
keyPressed['joy']["joy%i_%i" % (event.dict['joy'], event.dict['button'])] = True
if close:
saveBrain()
break
if menu != lastMenu: # when the menu changes
lastMenu = menu
if menu == MENU_MAIN:
initJoysticks()
continue
if menu == MENU_PLAY:
reset()
if menu == MENU_MAIN:
drawMainMenu(delta, keyPressed)
elif menu == MENU_JOIN:
drawJoinMenu(keyPressed)
elif menu == MENU_PLAY:
drawGame(keys)
if keyPressed.get(pygame.K_ESCAPE): # return to join menu if pressed
del objects[:] # remove players
menu = MENU_JOIN
elif menu == MENU_WIN:
drawWinMenu(keyPressed)
elif menu == MENU_EXIT:
saveBrain()
break
pygame.display.update()
|
en
| 0.842833
|
# import my own code #, pygame.RESIZABLE, 32) # draw the dirt # draw the grass # remove player from living player count # remove player from players with lives # set the last player to current obj, if resetLives == 1, this is the last player # Update grass around this obj # set the player to not living if it is off screen # spawn a new feather # remove a life and kill the player # if there's one player with lives, give everyone more lives # clear the last scoreboard # add kills and deaths to the scoreboard # the winner is the controller of the last player # change the menu to the win menu # reset if there is one player left # remove effects that are off screen # take a fraction of two thirds of the screen for lives # position where center of lives should be displayed # draw the lives of the players #pygame.draw.rect(screen, (0, 0, 0), (centerX, 0, 1, HEIGHT)) # draw a life # if a color in the list is given, shift the colors to that index, otherwise use all colors # go through all players and remove colors that are in use # if there are any more open colors # if the original color exists # handle keyboards joining # player is in lobby # keyboard press down # remove player # keyboard press up # handle controllers joining # player is in lobby # controller press B # remove player # controller press A # draw text # white border # colored middle #pygame.draw.circle(screen, (255, 255, 255), (int(posX), int(posY)), 21) # num players on row # space between players # previous menu option # next menu option # translate towards selected option # draw a rectangle to hold the selected menu # draw the option # draw large background egg # draw border # draw box #not used #screen = pygame.display.set_mode((WIDTH, HEIGHT),pygame.HWSURFACE|pygame.DOUBLEBUF|pygame.RESIZABLE) # assign the key if it's down # assign the joy button twice (one for ambiguous presses) # when the menu changes # return to join menu if pressed # remove players
| 2.465173
| 2
|
mmd_scripting/scripts_for_gui/file_recompress_images.py
|
Nuthouse01/PMX-VMD-Scripting-Tools
| 54
|
6626764
|
import os
import shutil
import mmd_scripting.core.nuthouse01_core as core
import mmd_scripting.core.nuthouse01_io as io
import mmd_scripting.core.nuthouse01_pmx_parser as pmxlib
from mmd_scripting.scripts_for_gui import file_sort_textures
_SCRIPT_VERSION = "Script version: Nuthouse01 - v1.07.03 - 8/9/2021"
# This code is free to use and re-distribute, but I cannot be held responsible for damages that it may or may not cause.
#####################
Image = None
# NOTE: i comment this block before compiling the EXE cuz the Pillow library is gigantic & makes the exe version like 200K
try:
from PIL import Image
except ImportError:
Image = None
# print extra messages when certain things fail in certain ways
DEBUG = False
TEMPORARY_RECOMPRESS_LOCATION = "../TEMPORARY_RECOMPRESS_LOCATION/"
# this is recommended true, for obvious reasons
MAKE_BACKUP_ZIPFILE = True
# note: zipper automatically appends .zip onto whatever output name i give it, so dont give it a .zip suffix here
BACKUP_SUFFIX = "beforePNG"
IM_FORMAT_ALWAYS_CONVERT = ("DDS", "TIFF", "TGA")
IM_FORMAT_ALWAYS_SKIP = ("JPEG", "GIF")
# these are rare BMP formats that are known to be incompatible with MocuMocuDance
KNOWN_BAD_FORMATS = ("BGR;15", "BGR;16")
# if recompression saves less than XXX KB, then don't save the result
REQUIRED_COMPRESSION_AMOUNT_KB = 100
# how PIL reads things:
# PNG, JPEG, BMP, DDS, TIFF, GIF
IMG_TYPE_TO_EXT = file_sort_textures.IMG_TYPE_TO_EXT
IMG_EXT = file_sort_textures.IMG_EXT
helptext = '''=================================================
file_recompress_images:
This tool will try to re-compress all image files in the file tree.
Generally this means converting BMP/TGA/other images to PNG format, for maximum lossless image compression.
JPEG image compression is more aggressive than PNG, so JPEG images will stay as JPEG. GIFs are weird so they are also not modified.
This requires a PMX file to use as a root so it knows where to start reading files from.
Before actually changing anything, it will list all proposed file renames and ask for final confirmation.
It also creates a zipfile backup of the entire folder, just in case.
Bonus: this can process all "neighbor" pmx files in addition to the target, this highly recommended because neighbors usually reference similar sets of files.
Note: this script requires the Python library 'Pillow' to be installed.
Note: unlike my other scripts, this overwrites the original input PMX file(s) instead of creating a new file with a suffix. This is because I already create a zipfile that contains the original input PMX, so that serves as a good backup.
'''
# dds/tga/tiff will always be converted to png
# jpeg/gif will always be skipped (jpeg is already lossy & therefore compresses better than png, gif is animated & complex)
# bmp will be re-compressed to png if the original bmp is in 15-bit or 16-bit encoding (mocumocudance compatability)
# other image types are re-compressed to png if doing so saves 100kb or more
# also, all images are renamed so that the file extension matches the actual image data format
def main(moreinfo=False):
# step zero: verify that Pillow exists
if Image is None:
core.MY_PRINT_FUNC("ERROR: Python library 'Pillow' not found. This script requires this library to run!")
core.MY_PRINT_FUNC("This script cannot be ran from the EXE version, the Pillow library is too large to package into the executable.")
core.MY_PRINT_FUNC("To install Pillow, please use the command 'pip install Pillow' in the Windows command prompt and then run the Python scripts directly.")
return None
# print pillow version just cuz
core.MY_PRINT_FUNC("Using Pillow version '%s'" % Image.__version__)
core.MY_PRINT_FUNC("Please enter name of PMX model file:")
input_filename_pmx = core.MY_FILEPROMPT_FUNC("PMX file", ".pmx")
# absolute path to directory holding the pmx
input_filename_pmx_abs = os.path.normpath(os.path.abspath(input_filename_pmx))
startpath, input_filename_pmx_rel = os.path.split(input_filename_pmx_abs)
# =========================================================================================================
# =========================================================================================================
# =========================================================================================================
# first, build the list of ALL files that actually exist, then filter it down to neighbor PMXs and relevant files
relative_all_exist_files = file_sort_textures.walk_filetree_from_root(startpath)
core.MY_PRINT_FUNC("ALL EXISTING FILES:", len(relative_all_exist_files))
# now fill "neighbor_pmx" by finding files without path separator that end in PMX
# these are relative paths tho
neighbor_pmx = [f for f in relative_all_exist_files if
(f.lower().endswith(".pmx")) and
(os.path.sep not in f) and
f != input_filename_pmx_rel]
core.MY_PRINT_FUNC("NEIGHBOR PMX FILES:", len(neighbor_pmx))
# filter down to just image files
relevant_exist_files = [f for f in relative_all_exist_files if f.lower().endswith(IMG_EXT)]
core.MY_PRINT_FUNC("RELEVANT EXISTING FILES:", len(relevant_exist_files))
# =========================================================================================================
# =========================================================================================================
# =========================================================================================================
# now ask if I care about the neighbors and read the PMXes into memory
pmx_filenames = [input_filename_pmx_rel]
if neighbor_pmx:
core.MY_PRINT_FUNC("")
info = [
"Detected %d top-level neighboring PMX files, these probably share the same filebase as the target." % len(neighbor_pmx),
"If files are moved/renamed but the neighbors are not processed, the neighbor texture references will probably break.",
"Do you want to process all neighbors in addition to the target? (highly recommended)",
"1 = Yes, 2 = No"]
r = core.MY_SIMPLECHOICE_FUNC((1, 2), info)
if r == 1:
core.MY_PRINT_FUNC("Processing target + all neighbor files")
# append neighbor PMX files onto the list of files to be processed
pmx_filenames += neighbor_pmx
else:
core.MY_PRINT_FUNC("WARNING: Processing only target, ignoring %d neighbor PMX files" % len(neighbor_pmx))
# now read all the PMX objects & store in dict alongside the relative name
# dictionary where keys are filename and values are resulting pmx objects
all_pmx_obj = {}
for this_pmx_name in pmx_filenames:
this_pmx_obj = pmxlib.read_pmx(os.path.join(startpath, this_pmx_name), moreinfo=moreinfo)
all_pmx_obj[this_pmx_name] = this_pmx_obj
# =========================================================================================================
# =========================================================================================================
# =========================================================================================================
# for each pmx, for each file on disk, match against files used in textures (case-insensitive) and replace with canonical name-on-disk
# also fill out how much and how each file is used, and unify dupes between files, all that good stuff
filerecord_list = file_sort_textures.build_filerecord_list(all_pmx_obj, relevant_exist_files, moreinfo)
# =========================================================================================================
# =========================================================================================================
# =========================================================================================================
# DETERMINE NEW NAMES FOR FILES
# note: need to put this tempdire ONE LEVEL UP or else it will be included in the zip! lol
tempdir = os.path.join(startpath, TEMPORARY_RECOMPRESS_LOCATION)
tempdir = os.path.normpath(tempdir)
os.makedirs(tempdir, exist_ok=True)
pil_cannot_inspect_list = []
pil_imgext_mismatch = 0
num_recompressed = 0
# list of memory saved by recompressing each file. same order/length as "image_filerecords"
mem_saved = []
mem_original = []
# only iterate over images that exist, obviously
image_filerecords = [f for f in filerecord_list if f.exists]
virtual_nameset = set([f.name for f in image_filerecords])
# iterate over the images
for i, p in enumerate(image_filerecords):
abspath = os.path.join(startpath, p.name)
orig_size = os.path.getsize(abspath)
mem_original.append(orig_size)
mem_saved.append(0) # if i succesfully recompress this image, I will overwrite this 0
# if not moreinfo, then each line overwrites the previous like a progress printout does
# if moreinfo, then each line is printed permanently
core.MY_PRINT_FUNC("...analyzing {:>3}/{:>3}, file='{}', size={} ".format(
i+1, len(image_filerecords), p.name, core.prettyprint_file_size(orig_size)), is_progress=(not moreinfo))
# open the image & catch all possible errors
try:
im = Image.open(abspath)
except FileNotFoundError as eeee:
core.MY_PRINT_FUNC("FILESYSTEM MALFUNCTION!!", eeee.__class__.__name__, eeee)
core.MY_PRINT_FUNC("os.walk created a list of all filenames on disk, but then this filename doesn't exist when i try to open it?")
pil_cannot_inspect_list.append(p.name)
continue
except OSError as eeee:
# this has 2 causes, "Unsupported BMP bitfields layout" or "cannot identify image file"
if DEBUG:
print("CANNOT INSPECT!1", eeee.__class__.__name__, eeee, p.name)
pil_cannot_inspect_list.append(p.name)
continue
except NotImplementedError as eeee:
# this is because there's some DDS format it can't make sense of
if DEBUG:
print("CANNOT INSPECT!2", eeee.__class__.__name__, eeee, p.name)
pil_cannot_inspect_list.append(p.name)
continue
if im.format not in IMG_TYPE_TO_EXT:
core.MY_PRINT_FUNC("WARNING: file '%s' has unusual image format '%s', attempting to continue" % (p.name, im.format))
##################################################
# now the image is successfully opened!
base, currext = os.path.splitext(p.name)
newname_as_png = base + ".png"
if p.name != newname_as_png:
# if the newname is going to be the same, it came from the disk so it's already guaranteed unique
# if the newname is going to be different, it might collide with something else getting renamed to png!
# i might not end up going thru with the rename, but I should get the unique name figured out now.
# since I am simulating a rename, remove the original from the list.
virtual_nameset.remove(p.name)
newname_as_png = core.filepath_get_unused_name(newname_as_png, checkdisk=False, namelist=virtual_nameset)
newname_as_png_full = os.path.join(tempdir, newname_as_png)
# 1, depending on image format, attempt to re-save as PNG
if im.format not in IM_FORMAT_ALWAYS_SKIP:
try:
# create all needed subfolders for the destination
os.makedirs(os.path.dirname(newname_as_png_full), exist_ok=True)
except OSError as e:
core.MY_PRINT_FUNC(e.__class__.__name__, e)
core.MY_PRINT_FUNC("ERROR1: failed to create intermediate folders for '%s'" % newname_as_png_full)
virtual_nameset.add(p.name) # aborted the rename, so put the original name back!
continue
try:
# save to tempfilename with png format, use optimize=true
im.save(newname_as_png_full, format="PNG", optimize=True)
except OSError as e:
core.MY_PRINT_FUNC(e.__class__.__name__, e)
core.MY_PRINT_FUNC("ERROR2: failed to re-compress image '%s', original not modified" % p.name)
virtual_nameset.add(p.name) # aborted the rename, so put the original name back!
continue
##################################################################
# now i have succesfully re-saved the image as a PNG!
# next question, do I want to keep this result or the original?
# the new version is kept if:
# 1) the new version is sufficiently smaller,
# 2) the old version is a filetype I specifically hate (dds, tga, tiff)
# 3) the old version is a known-bad BMP type,
# measure & compare file size
new_size = os.path.getsize(newname_as_png_full)
diff = orig_size - new_size
is_sufficiently_smaller = (diff > (REQUIRED_COMPRESSION_AMOUNT_KB * 1024))
is_alwaysconvert_format = (im.format in IM_FORMAT_ALWAYS_CONVERT)
# if using a 16-bit BMP format, i want to re-compress it
is_bad_bmp = False
if im.format == "BMP":
try:
# this might fail, images are weird, sometimes they don't have the attributes i expect
if im.tile[0][3][0] in KNOWN_BAD_FORMATS:
is_bad_bmp = True
except Exception as e:
if DEBUG:
print(e.__class__.__name__, e, "BMP CHECK FAILED", p.name, im.tile)
if is_sufficiently_smaller or is_bad_bmp or is_alwaysconvert_format:
# if any of these 3 is true, then I am going to keep it!
num_recompressed += 1
p.newname = newname_as_png
virtual_nameset.add(newname_as_png) # i'm keeping this rename, so add the new name to the set
mem_saved[-1] = diff # overwrite the 0 at the end of the list with the correct value
continue # if succesfully re-saved, do not do the extension-checking below
# # if this is not sufficiently compressed, do not use "continue", DO hit the extension-checking below
# 2, if the file extension doesn't match with the image type, then make it match
# this only happens if the image was not re-saved above
if im.format in IMG_TYPE_TO_EXT and currext not in IMG_TYPE_TO_EXT[im.format]:
newname = base + IMG_TYPE_TO_EXT[im.format][0]
# resolve potential collisions by adding numbers suffix to file names
newname = core.filepath_get_unused_name(newname, checkdisk=False, namelist=virtual_nameset)
pil_imgext_mismatch += 1
p.newname = newname
virtual_nameset.add(newname) # i'm keeping this rename, so add the new name to the set
continue
# since i didn't commit to any of the rename ideas, put the original name back
virtual_nameset.add(p.name)
pass
# these must be the same length after iterating
assert len(mem_saved) == len(image_filerecords)
# if the image is still open, close it
if im is not None:
im.close()
# =========================================================================================================
# =========================================================================================================
# =========================================================================================================
# are there any with proposed renaming?
if not any(u.newname is not None for u in image_filerecords):
core.MY_PRINT_FUNC("No proposed file changes")
core.MY_PRINT_FUNC("Aborting: no files were changed")
# also delete the tempspace!
try:
shutil.rmtree(tempdir)
except OSError as e:
core.MY_PRINT_FUNC(e.__class__.__name__, e)
core.MY_PRINT_FUNC("ERROR3: failed to delete temporary folder '%s'" % tempdir)
return None
# =========================================================================================================
# =========================================================================================================
# =========================================================================================================
# now, display all the proposed changes...
mem_new = [original - saved for original, saved in zip(mem_original, mem_saved)]
# attach the mem-savings to the name and stuff
filerecord_with_savings = list(zip(image_filerecords, mem_saved))
# sort descending by savings, most savings first
filerecord_with_savings.sort(key=core.get2nd, reverse=True)
# filter it
changed_files = [u for u in filerecord_with_savings if u[0].newname is not None]
core.MY_PRINT_FUNC("="*60)
if pil_cannot_inspect_list:
core.MY_PRINT_FUNC("WARNING: failed to inspect %d image files, these must be handled manually" % len(pil_cannot_inspect_list))
core.MY_PRINT_FUNC(pil_cannot_inspect_list)
if num_recompressed:
core.MY_PRINT_FUNC("Recompressed %d images! %s of disk space has been freed" % (num_recompressed, core.prettyprint_file_size(sum(mem_saved))))
core.MY_PRINT_FUNC("Reduction = {:.1%}... initial size = {:s}, new size = {:s}".format(
sum(mem_saved)/sum(mem_original),
core.prettyprint_file_size(sum(mem_original)),
core.prettyprint_file_size(sum(mem_new)),))
if pil_imgext_mismatch:
core.MY_PRINT_FUNC("Renamed %d images that had incorrect extensions (included below)" % pil_imgext_mismatch)
oldname_list = []
newname_list = []
savings_list = []
for C,saved in changed_files:
oldname_list.append(C.name)
if saved == 0: savings_list.append("")
elif saved > 0: savings_list.append("reduced " + core.prettyprint_file_size(saved))
else: savings_list.append("increased " + core.prettyprint_file_size(abs(saved)))
# if newname == oldname, then just display "SAME-NAME" instead
if C.newname == C.name: newname_list.append("SAME-NAME")
else: newname_list.append(C.newname)
# justify the first 2 columns
oldname_list_j = core.MY_JUSTIFY_STRINGLIST(oldname_list)
newname_list_j = core.MY_JUSTIFY_STRINGLIST(newname_list)
# zip everything for easy iter
zipped = zip(oldname_list_j, newname_list_j, savings_list)
for o,n,s in zipped:
# print 'from' with the case/separator it uses in the PMX
core.MY_PRINT_FUNC(" {:s} --> {:s} | {:s}".format(o, n, s))
info = ["Do you accept these new names/locations?",
"1 = Yes, 2 = No (abort)"]
r = core.MY_SIMPLECHOICE_FUNC((1, 2), info)
if r == 2:
core.MY_PRINT_FUNC("Aborting: no files were changed")
# also delete the tempspace!
try:
shutil.rmtree(tempdir)
except OSError as e:
core.MY_PRINT_FUNC(e.__class__.__name__, e)
core.MY_PRINT_FUNC("ERROR4: failed to delete temporary folder '%s'" % tempdir)
return None
# =========================================================================================================
# =========================================================================================================
# =========================================================================================================
# NOW do the actual renaming!
# first, create a backup of the folder
if MAKE_BACKUP_ZIPFILE:
r = file_sort_textures.make_zipfile_backup(startpath, BACKUP_SUFFIX)
if not r:
# this happens if the backup failed somehow AND the user decided to quit
core.MY_PRINT_FUNC("Aborting: no files were changed")
# also delete the tempspace!
try:
shutil.rmtree(tempdir)
except OSError as e:
core.MY_PRINT_FUNC(e.__class__.__name__, e)
core.MY_PRINT_FUNC("ERROR6: failed to delete temporary folder '%s'" % tempdir)
return None
# then, replace the original images with the new versions
core.MY_PRINT_FUNC("...renaming files on disk...")
for C,saved in changed_files:
# if this file exists on disk and there is a new name for this file,
if C.exists and C.newname is not None:
path_original = os.path.join(startpath, C.name)
path_newfrom = os.path.join(tempdir, C.newname)
path_newto = os.path.join(startpath, C.newname)
# 1. delete C.name
try:
io.check_and_fix_readonly(path_original)
os.remove(path_original)
except OSError as e:
core.MY_PRINT_FUNC(e.__class__.__name__, e)
core.MY_PRINT_FUNC("ERROR: failed to delete original image file '%s'" % path_original)
core.MY_PRINT_FUNC("I will try to continue.")
# 2. move C.newname from tempdir to proper dir
try:
# os.renames creates all necessary intermediate folders needed for the destination
# it also deletes the source folders if they become empty after the rename operation
os.renames(path_newfrom, path_newto)
except OSError as e:
# ending the operation halfway through is unacceptable! attempt to continue
core.MY_PRINT_FUNC(e.__class__.__name__, e)
core.MY_PRINT_FUNC("ERROR: failed to move newly-compressed file '%s' to location '%s'" % (path_newfrom, path_newto))
core.MY_PRINT_FUNC("I will try to continue.")
# change this to empty to signify that it didn't actually get moved, check this before changing PMX paths
C.newname = None
# lastly, do all renaming in PMXes, but only if some of the names changed!
# if i renamed a few .pngs to the same names, no point in re-writing the PMXs
if any((u.newname != u.name and u.newname is not None)for u in image_filerecords):
file_sort_textures.apply_file_renaming(all_pmx_obj, image_filerecords, startpath, skipdiskrename=True)
# write out
for this_pmx_name, this_pmx_obj in all_pmx_obj.items():
# NOTE: this is OVERWRITING THE PREVIOUS PMX FILE, NOT CREATING A NEW ONE
# because I make a zipfile backup I don't need to feel worried about preserving the old version
output_filename_pmx = os.path.join(startpath, this_pmx_name)
# output_filename_pmx = core.filepath_get_unused_name(output_filename_pmx)
pmxlib.write_pmx(output_filename_pmx, this_pmx_obj, moreinfo=moreinfo)
else:
core.MY_PRINT_FUNC("No names were changed, no need to re-write the PMX.")
# also delete the tempspace!
try:
shutil.rmtree(tempdir)
except OSError as e:
core.MY_PRINT_FUNC(e.__class__.__name__, e)
core.MY_PRINT_FUNC("ERROR5: failed to delete temporary folder '%s'" % tempdir)
core.MY_PRINT_FUNC("Done!")
return None
if __name__ == '__main__':
core.MY_PRINT_FUNC(_SCRIPT_VERSION)
core.MY_PRINT_FUNC(helptext)
core.RUN_WITH_TRACEBACK(main)
|
import os
import shutil
import mmd_scripting.core.nuthouse01_core as core
import mmd_scripting.core.nuthouse01_io as io
import mmd_scripting.core.nuthouse01_pmx_parser as pmxlib
from mmd_scripting.scripts_for_gui import file_sort_textures
_SCRIPT_VERSION = "Script version: Nuthouse01 - v1.07.03 - 8/9/2021"
# This code is free to use and re-distribute, but I cannot be held responsible for damages that it may or may not cause.
#####################
Image = None
# NOTE: i comment this block before compiling the EXE cuz the Pillow library is gigantic & makes the exe version like 200K
try:
from PIL import Image
except ImportError:
Image = None
# print extra messages when certain things fail in certain ways
DEBUG = False
TEMPORARY_RECOMPRESS_LOCATION = "../TEMPORARY_RECOMPRESS_LOCATION/"
# this is recommended true, for obvious reasons
MAKE_BACKUP_ZIPFILE = True
# note: zipper automatically appends .zip onto whatever output name i give it, so dont give it a .zip suffix here
BACKUP_SUFFIX = "beforePNG"
IM_FORMAT_ALWAYS_CONVERT = ("DDS", "TIFF", "TGA")
IM_FORMAT_ALWAYS_SKIP = ("JPEG", "GIF")
# these are rare BMP formats that are known to be incompatible with MocuMocuDance
KNOWN_BAD_FORMATS = ("BGR;15", "BGR;16")
# if recompression saves less than XXX KB, then don't save the result
REQUIRED_COMPRESSION_AMOUNT_KB = 100
# how PIL reads things:
# PNG, JPEG, BMP, DDS, TIFF, GIF
IMG_TYPE_TO_EXT = file_sort_textures.IMG_TYPE_TO_EXT
IMG_EXT = file_sort_textures.IMG_EXT
helptext = '''=================================================
file_recompress_images:
This tool will try to re-compress all image files in the file tree.
Generally this means converting BMP/TGA/other images to PNG format, for maximum lossless image compression.
JPEG image compression is more aggressive than PNG, so JPEG images will stay as JPEG. GIFs are weird so they are also not modified.
This requires a PMX file to use as a root so it knows where to start reading files from.
Before actually changing anything, it will list all proposed file renames and ask for final confirmation.
It also creates a zipfile backup of the entire folder, just in case.
Bonus: this can process all "neighbor" pmx files in addition to the target, this highly recommended because neighbors usually reference similar sets of files.
Note: this script requires the Python library 'Pillow' to be installed.
Note: unlike my other scripts, this overwrites the original input PMX file(s) instead of creating a new file with a suffix. This is because I already create a zipfile that contains the original input PMX, so that serves as a good backup.
'''
# dds/tga/tiff will always be converted to png
# jpeg/gif will always be skipped (jpeg is already lossy & therefore compresses better than png, gif is animated & complex)
# bmp will be re-compressed to png if the original bmp is in 15-bit or 16-bit encoding (mocumocudance compatability)
# other image types are re-compressed to png if doing so saves 100kb or more
# also, all images are renamed so that the file extension matches the actual image data format
def main(moreinfo=False):
# step zero: verify that Pillow exists
if Image is None:
core.MY_PRINT_FUNC("ERROR: Python library 'Pillow' not found. This script requires this library to run!")
core.MY_PRINT_FUNC("This script cannot be ran from the EXE version, the Pillow library is too large to package into the executable.")
core.MY_PRINT_FUNC("To install Pillow, please use the command 'pip install Pillow' in the Windows command prompt and then run the Python scripts directly.")
return None
# print pillow version just cuz
core.MY_PRINT_FUNC("Using Pillow version '%s'" % Image.__version__)
core.MY_PRINT_FUNC("Please enter name of PMX model file:")
input_filename_pmx = core.MY_FILEPROMPT_FUNC("PMX file", ".pmx")
# absolute path to directory holding the pmx
input_filename_pmx_abs = os.path.normpath(os.path.abspath(input_filename_pmx))
startpath, input_filename_pmx_rel = os.path.split(input_filename_pmx_abs)
# =========================================================================================================
# =========================================================================================================
# =========================================================================================================
# first, build the list of ALL files that actually exist, then filter it down to neighbor PMXs and relevant files
relative_all_exist_files = file_sort_textures.walk_filetree_from_root(startpath)
core.MY_PRINT_FUNC("ALL EXISTING FILES:", len(relative_all_exist_files))
# now fill "neighbor_pmx" by finding files without path separator that end in PMX
# these are relative paths tho
neighbor_pmx = [f for f in relative_all_exist_files if
(f.lower().endswith(".pmx")) and
(os.path.sep not in f) and
f != input_filename_pmx_rel]
core.MY_PRINT_FUNC("NEIGHBOR PMX FILES:", len(neighbor_pmx))
# filter down to just image files
relevant_exist_files = [f for f in relative_all_exist_files if f.lower().endswith(IMG_EXT)]
core.MY_PRINT_FUNC("RELEVANT EXISTING FILES:", len(relevant_exist_files))
# =========================================================================================================
# =========================================================================================================
# =========================================================================================================
# now ask if I care about the neighbors and read the PMXes into memory
pmx_filenames = [input_filename_pmx_rel]
if neighbor_pmx:
core.MY_PRINT_FUNC("")
info = [
"Detected %d top-level neighboring PMX files, these probably share the same filebase as the target." % len(neighbor_pmx),
"If files are moved/renamed but the neighbors are not processed, the neighbor texture references will probably break.",
"Do you want to process all neighbors in addition to the target? (highly recommended)",
"1 = Yes, 2 = No"]
r = core.MY_SIMPLECHOICE_FUNC((1, 2), info)
if r == 1:
core.MY_PRINT_FUNC("Processing target + all neighbor files")
# append neighbor PMX files onto the list of files to be processed
pmx_filenames += neighbor_pmx
else:
core.MY_PRINT_FUNC("WARNING: Processing only target, ignoring %d neighbor PMX files" % len(neighbor_pmx))
# now read all the PMX objects & store in dict alongside the relative name
# dictionary where keys are filename and values are resulting pmx objects
all_pmx_obj = {}
for this_pmx_name in pmx_filenames:
this_pmx_obj = pmxlib.read_pmx(os.path.join(startpath, this_pmx_name), moreinfo=moreinfo)
all_pmx_obj[this_pmx_name] = this_pmx_obj
# =========================================================================================================
# =========================================================================================================
# =========================================================================================================
# for each pmx, for each file on disk, match against files used in textures (case-insensitive) and replace with canonical name-on-disk
# also fill out how much and how each file is used, and unify dupes between files, all that good stuff
filerecord_list = file_sort_textures.build_filerecord_list(all_pmx_obj, relevant_exist_files, moreinfo)
# =========================================================================================================
# =========================================================================================================
# =========================================================================================================
# DETERMINE NEW NAMES FOR FILES
# note: need to put this tempdire ONE LEVEL UP or else it will be included in the zip! lol
tempdir = os.path.join(startpath, TEMPORARY_RECOMPRESS_LOCATION)
tempdir = os.path.normpath(tempdir)
os.makedirs(tempdir, exist_ok=True)
pil_cannot_inspect_list = []
pil_imgext_mismatch = 0
num_recompressed = 0
# list of memory saved by recompressing each file. same order/length as "image_filerecords"
mem_saved = []
mem_original = []
# only iterate over images that exist, obviously
image_filerecords = [f for f in filerecord_list if f.exists]
virtual_nameset = set([f.name for f in image_filerecords])
# iterate over the images
for i, p in enumerate(image_filerecords):
abspath = os.path.join(startpath, p.name)
orig_size = os.path.getsize(abspath)
mem_original.append(orig_size)
mem_saved.append(0) # if i succesfully recompress this image, I will overwrite this 0
# if not moreinfo, then each line overwrites the previous like a progress printout does
# if moreinfo, then each line is printed permanently
core.MY_PRINT_FUNC("...analyzing {:>3}/{:>3}, file='{}', size={} ".format(
i+1, len(image_filerecords), p.name, core.prettyprint_file_size(orig_size)), is_progress=(not moreinfo))
# open the image & catch all possible errors
try:
im = Image.open(abspath)
except FileNotFoundError as eeee:
core.MY_PRINT_FUNC("FILESYSTEM MALFUNCTION!!", eeee.__class__.__name__, eeee)
core.MY_PRINT_FUNC("os.walk created a list of all filenames on disk, but then this filename doesn't exist when i try to open it?")
pil_cannot_inspect_list.append(p.name)
continue
except OSError as eeee:
# this has 2 causes, "Unsupported BMP bitfields layout" or "cannot identify image file"
if DEBUG:
print("CANNOT INSPECT!1", eeee.__class__.__name__, eeee, p.name)
pil_cannot_inspect_list.append(p.name)
continue
except NotImplementedError as eeee:
# this is because there's some DDS format it can't make sense of
if DEBUG:
print("CANNOT INSPECT!2", eeee.__class__.__name__, eeee, p.name)
pil_cannot_inspect_list.append(p.name)
continue
if im.format not in IMG_TYPE_TO_EXT:
core.MY_PRINT_FUNC("WARNING: file '%s' has unusual image format '%s', attempting to continue" % (p.name, im.format))
##################################################
# now the image is successfully opened!
base, currext = os.path.splitext(p.name)
newname_as_png = base + ".png"
if p.name != newname_as_png:
# if the newname is going to be the same, it came from the disk so it's already guaranteed unique
# if the newname is going to be different, it might collide with something else getting renamed to png!
# i might not end up going thru with the rename, but I should get the unique name figured out now.
# since I am simulating a rename, remove the original from the list.
virtual_nameset.remove(p.name)
newname_as_png = core.filepath_get_unused_name(newname_as_png, checkdisk=False, namelist=virtual_nameset)
newname_as_png_full = os.path.join(tempdir, newname_as_png)
# 1, depending on image format, attempt to re-save as PNG
if im.format not in IM_FORMAT_ALWAYS_SKIP:
try:
# create all needed subfolders for the destination
os.makedirs(os.path.dirname(newname_as_png_full), exist_ok=True)
except OSError as e:
core.MY_PRINT_FUNC(e.__class__.__name__, e)
core.MY_PRINT_FUNC("ERROR1: failed to create intermediate folders for '%s'" % newname_as_png_full)
virtual_nameset.add(p.name) # aborted the rename, so put the original name back!
continue
try:
# save to tempfilename with png format, use optimize=true
im.save(newname_as_png_full, format="PNG", optimize=True)
except OSError as e:
core.MY_PRINT_FUNC(e.__class__.__name__, e)
core.MY_PRINT_FUNC("ERROR2: failed to re-compress image '%s', original not modified" % p.name)
virtual_nameset.add(p.name) # aborted the rename, so put the original name back!
continue
##################################################################
# now i have succesfully re-saved the image as a PNG!
# next question, do I want to keep this result or the original?
# the new version is kept if:
# 1) the new version is sufficiently smaller,
# 2) the old version is a filetype I specifically hate (dds, tga, tiff)
# 3) the old version is a known-bad BMP type,
# measure & compare file size
new_size = os.path.getsize(newname_as_png_full)
diff = orig_size - new_size
is_sufficiently_smaller = (diff > (REQUIRED_COMPRESSION_AMOUNT_KB * 1024))
is_alwaysconvert_format = (im.format in IM_FORMAT_ALWAYS_CONVERT)
# if using a 16-bit BMP format, i want to re-compress it
is_bad_bmp = False
if im.format == "BMP":
try:
# this might fail, images are weird, sometimes they don't have the attributes i expect
if im.tile[0][3][0] in KNOWN_BAD_FORMATS:
is_bad_bmp = True
except Exception as e:
if DEBUG:
print(e.__class__.__name__, e, "BMP CHECK FAILED", p.name, im.tile)
if is_sufficiently_smaller or is_bad_bmp or is_alwaysconvert_format:
# if any of these 3 is true, then I am going to keep it!
num_recompressed += 1
p.newname = newname_as_png
virtual_nameset.add(newname_as_png) # i'm keeping this rename, so add the new name to the set
mem_saved[-1] = diff # overwrite the 0 at the end of the list with the correct value
continue # if succesfully re-saved, do not do the extension-checking below
# # if this is not sufficiently compressed, do not use "continue", DO hit the extension-checking below
# 2, if the file extension doesn't match with the image type, then make it match
# this only happens if the image was not re-saved above
if im.format in IMG_TYPE_TO_EXT and currext not in IMG_TYPE_TO_EXT[im.format]:
newname = base + IMG_TYPE_TO_EXT[im.format][0]
# resolve potential collisions by adding numbers suffix to file names
newname = core.filepath_get_unused_name(newname, checkdisk=False, namelist=virtual_nameset)
pil_imgext_mismatch += 1
p.newname = newname
virtual_nameset.add(newname) # i'm keeping this rename, so add the new name to the set
continue
# since i didn't commit to any of the rename ideas, put the original name back
virtual_nameset.add(p.name)
pass
# these must be the same length after iterating
assert len(mem_saved) == len(image_filerecords)
# if the image is still open, close it
if im is not None:
im.close()
# =========================================================================================================
# =========================================================================================================
# =========================================================================================================
# are there any with proposed renaming?
if not any(u.newname is not None for u in image_filerecords):
core.MY_PRINT_FUNC("No proposed file changes")
core.MY_PRINT_FUNC("Aborting: no files were changed")
# also delete the tempspace!
try:
shutil.rmtree(tempdir)
except OSError as e:
core.MY_PRINT_FUNC(e.__class__.__name__, e)
core.MY_PRINT_FUNC("ERROR3: failed to delete temporary folder '%s'" % tempdir)
return None
# =========================================================================================================
# =========================================================================================================
# =========================================================================================================
# now, display all the proposed changes...
mem_new = [original - saved for original, saved in zip(mem_original, mem_saved)]
# attach the mem-savings to the name and stuff
filerecord_with_savings = list(zip(image_filerecords, mem_saved))
# sort descending by savings, most savings first
filerecord_with_savings.sort(key=core.get2nd, reverse=True)
# filter it
changed_files = [u for u in filerecord_with_savings if u[0].newname is not None]
core.MY_PRINT_FUNC("="*60)
if pil_cannot_inspect_list:
core.MY_PRINT_FUNC("WARNING: failed to inspect %d image files, these must be handled manually" % len(pil_cannot_inspect_list))
core.MY_PRINT_FUNC(pil_cannot_inspect_list)
if num_recompressed:
core.MY_PRINT_FUNC("Recompressed %d images! %s of disk space has been freed" % (num_recompressed, core.prettyprint_file_size(sum(mem_saved))))
core.MY_PRINT_FUNC("Reduction = {:.1%}... initial size = {:s}, new size = {:s}".format(
sum(mem_saved)/sum(mem_original),
core.prettyprint_file_size(sum(mem_original)),
core.prettyprint_file_size(sum(mem_new)),))
if pil_imgext_mismatch:
core.MY_PRINT_FUNC("Renamed %d images that had incorrect extensions (included below)" % pil_imgext_mismatch)
oldname_list = []
newname_list = []
savings_list = []
for C,saved in changed_files:
oldname_list.append(C.name)
if saved == 0: savings_list.append("")
elif saved > 0: savings_list.append("reduced " + core.prettyprint_file_size(saved))
else: savings_list.append("increased " + core.prettyprint_file_size(abs(saved)))
# if newname == oldname, then just display "SAME-NAME" instead
if C.newname == C.name: newname_list.append("SAME-NAME")
else: newname_list.append(C.newname)
# justify the first 2 columns
oldname_list_j = core.MY_JUSTIFY_STRINGLIST(oldname_list)
newname_list_j = core.MY_JUSTIFY_STRINGLIST(newname_list)
# zip everything for easy iter
zipped = zip(oldname_list_j, newname_list_j, savings_list)
for o,n,s in zipped:
# print 'from' with the case/separator it uses in the PMX
core.MY_PRINT_FUNC(" {:s} --> {:s} | {:s}".format(o, n, s))
info = ["Do you accept these new names/locations?",
"1 = Yes, 2 = No (abort)"]
r = core.MY_SIMPLECHOICE_FUNC((1, 2), info)
if r == 2:
core.MY_PRINT_FUNC("Aborting: no files were changed")
# also delete the tempspace!
try:
shutil.rmtree(tempdir)
except OSError as e:
core.MY_PRINT_FUNC(e.__class__.__name__, e)
core.MY_PRINT_FUNC("ERROR4: failed to delete temporary folder '%s'" % tempdir)
return None
# =========================================================================================================
# =========================================================================================================
# =========================================================================================================
# NOW do the actual renaming!
# first, create a backup of the folder
if MAKE_BACKUP_ZIPFILE:
r = file_sort_textures.make_zipfile_backup(startpath, BACKUP_SUFFIX)
if not r:
# this happens if the backup failed somehow AND the user decided to quit
core.MY_PRINT_FUNC("Aborting: no files were changed")
# also delete the tempspace!
try:
shutil.rmtree(tempdir)
except OSError as e:
core.MY_PRINT_FUNC(e.__class__.__name__, e)
core.MY_PRINT_FUNC("ERROR6: failed to delete temporary folder '%s'" % tempdir)
return None
# then, replace the original images with the new versions
core.MY_PRINT_FUNC("...renaming files on disk...")
for C,saved in changed_files:
# if this file exists on disk and there is a new name for this file,
if C.exists and C.newname is not None:
path_original = os.path.join(startpath, C.name)
path_newfrom = os.path.join(tempdir, C.newname)
path_newto = os.path.join(startpath, C.newname)
# 1. delete C.name
try:
io.check_and_fix_readonly(path_original)
os.remove(path_original)
except OSError as e:
core.MY_PRINT_FUNC(e.__class__.__name__, e)
core.MY_PRINT_FUNC("ERROR: failed to delete original image file '%s'" % path_original)
core.MY_PRINT_FUNC("I will try to continue.")
# 2. move C.newname from tempdir to proper dir
try:
# os.renames creates all necessary intermediate folders needed for the destination
# it also deletes the source folders if they become empty after the rename operation
os.renames(path_newfrom, path_newto)
except OSError as e:
# ending the operation halfway through is unacceptable! attempt to continue
core.MY_PRINT_FUNC(e.__class__.__name__, e)
core.MY_PRINT_FUNC("ERROR: failed to move newly-compressed file '%s' to location '%s'" % (path_newfrom, path_newto))
core.MY_PRINT_FUNC("I will try to continue.")
# change this to empty to signify that it didn't actually get moved, check this before changing PMX paths
C.newname = None
# lastly, do all renaming in PMXes, but only if some of the names changed!
# if i renamed a few .pngs to the same names, no point in re-writing the PMXs
if any((u.newname != u.name and u.newname is not None)for u in image_filerecords):
file_sort_textures.apply_file_renaming(all_pmx_obj, image_filerecords, startpath, skipdiskrename=True)
# write out
for this_pmx_name, this_pmx_obj in all_pmx_obj.items():
# NOTE: this is OVERWRITING THE PREVIOUS PMX FILE, NOT CREATING A NEW ONE
# because I make a zipfile backup I don't need to feel worried about preserving the old version
output_filename_pmx = os.path.join(startpath, this_pmx_name)
# output_filename_pmx = core.filepath_get_unused_name(output_filename_pmx)
pmxlib.write_pmx(output_filename_pmx, this_pmx_obj, moreinfo=moreinfo)
else:
core.MY_PRINT_FUNC("No names were changed, no need to re-write the PMX.")
# also delete the tempspace!
try:
shutil.rmtree(tempdir)
except OSError as e:
core.MY_PRINT_FUNC(e.__class__.__name__, e)
core.MY_PRINT_FUNC("ERROR5: failed to delete temporary folder '%s'" % tempdir)
core.MY_PRINT_FUNC("Done!")
return None
if __name__ == '__main__':
core.MY_PRINT_FUNC(_SCRIPT_VERSION)
core.MY_PRINT_FUNC(helptext)
core.RUN_WITH_TRACEBACK(main)
|
en
| 0.807956
|
# This code is free to use and re-distribute, but I cannot be held responsible for damages that it may or may not cause. ##################### # NOTE: i comment this block before compiling the EXE cuz the Pillow library is gigantic & makes the exe version like 200K # print extra messages when certain things fail in certain ways # this is recommended true, for obvious reasons # note: zipper automatically appends .zip onto whatever output name i give it, so dont give it a .zip suffix here # these are rare BMP formats that are known to be incompatible with MocuMocuDance # if recompression saves less than XXX KB, then don't save the result # how PIL reads things: # PNG, JPEG, BMP, DDS, TIFF, GIF ================================================= file_recompress_images: This tool will try to re-compress all image files in the file tree. Generally this means converting BMP/TGA/other images to PNG format, for maximum lossless image compression. JPEG image compression is more aggressive than PNG, so JPEG images will stay as JPEG. GIFs are weird so they are also not modified. This requires a PMX file to use as a root so it knows where to start reading files from. Before actually changing anything, it will list all proposed file renames and ask for final confirmation. It also creates a zipfile backup of the entire folder, just in case. Bonus: this can process all "neighbor" pmx files in addition to the target, this highly recommended because neighbors usually reference similar sets of files. Note: this script requires the Python library 'Pillow' to be installed. Note: unlike my other scripts, this overwrites the original input PMX file(s) instead of creating a new file with a suffix. This is because I already create a zipfile that contains the original input PMX, so that serves as a good backup. # dds/tga/tiff will always be converted to png # jpeg/gif will always be skipped (jpeg is already lossy & therefore compresses better than png, gif is animated & complex) # bmp will be re-compressed to png if the original bmp is in 15-bit or 16-bit encoding (mocumocudance compatability) # other image types are re-compressed to png if doing so saves 100kb or more # also, all images are renamed so that the file extension matches the actual image data format # step zero: verify that Pillow exists # print pillow version just cuz # absolute path to directory holding the pmx # ========================================================================================================= # ========================================================================================================= # ========================================================================================================= # first, build the list of ALL files that actually exist, then filter it down to neighbor PMXs and relevant files # now fill "neighbor_pmx" by finding files without path separator that end in PMX # these are relative paths tho # filter down to just image files # ========================================================================================================= # ========================================================================================================= # ========================================================================================================= # now ask if I care about the neighbors and read the PMXes into memory # append neighbor PMX files onto the list of files to be processed # now read all the PMX objects & store in dict alongside the relative name # dictionary where keys are filename and values are resulting pmx objects # ========================================================================================================= # ========================================================================================================= # ========================================================================================================= # for each pmx, for each file on disk, match against files used in textures (case-insensitive) and replace with canonical name-on-disk # also fill out how much and how each file is used, and unify dupes between files, all that good stuff # ========================================================================================================= # ========================================================================================================= # ========================================================================================================= # DETERMINE NEW NAMES FOR FILES # note: need to put this tempdire ONE LEVEL UP or else it will be included in the zip! lol # list of memory saved by recompressing each file. same order/length as "image_filerecords" # only iterate over images that exist, obviously # iterate over the images # if i succesfully recompress this image, I will overwrite this 0 # if not moreinfo, then each line overwrites the previous like a progress printout does # if moreinfo, then each line is printed permanently # open the image & catch all possible errors # this has 2 causes, "Unsupported BMP bitfields layout" or "cannot identify image file" # this is because there's some DDS format it can't make sense of ################################################## # now the image is successfully opened! # if the newname is going to be the same, it came from the disk so it's already guaranteed unique # if the newname is going to be different, it might collide with something else getting renamed to png! # i might not end up going thru with the rename, but I should get the unique name figured out now. # since I am simulating a rename, remove the original from the list. # 1, depending on image format, attempt to re-save as PNG # create all needed subfolders for the destination # aborted the rename, so put the original name back! # save to tempfilename with png format, use optimize=true # aborted the rename, so put the original name back! ################################################################## # now i have succesfully re-saved the image as a PNG! # next question, do I want to keep this result or the original? # the new version is kept if: # 1) the new version is sufficiently smaller, # 2) the old version is a filetype I specifically hate (dds, tga, tiff) # 3) the old version is a known-bad BMP type, # measure & compare file size # if using a 16-bit BMP format, i want to re-compress it # this might fail, images are weird, sometimes they don't have the attributes i expect # if any of these 3 is true, then I am going to keep it! # i'm keeping this rename, so add the new name to the set # overwrite the 0 at the end of the list with the correct value # if succesfully re-saved, do not do the extension-checking below # # if this is not sufficiently compressed, do not use "continue", DO hit the extension-checking below # 2, if the file extension doesn't match with the image type, then make it match # this only happens if the image was not re-saved above # resolve potential collisions by adding numbers suffix to file names # i'm keeping this rename, so add the new name to the set # since i didn't commit to any of the rename ideas, put the original name back # these must be the same length after iterating # if the image is still open, close it # ========================================================================================================= # ========================================================================================================= # ========================================================================================================= # are there any with proposed renaming? # also delete the tempspace! # ========================================================================================================= # ========================================================================================================= # ========================================================================================================= # now, display all the proposed changes... # attach the mem-savings to the name and stuff # sort descending by savings, most savings first # filter it # if newname == oldname, then just display "SAME-NAME" instead # justify the first 2 columns # zip everything for easy iter # print 'from' with the case/separator it uses in the PMX # also delete the tempspace! # ========================================================================================================= # ========================================================================================================= # ========================================================================================================= # NOW do the actual renaming! # first, create a backup of the folder # this happens if the backup failed somehow AND the user decided to quit # also delete the tempspace! # then, replace the original images with the new versions # if this file exists on disk and there is a new name for this file, # 1. delete C.name # 2. move C.newname from tempdir to proper dir # os.renames creates all necessary intermediate folders needed for the destination # it also deletes the source folders if they become empty after the rename operation # ending the operation halfway through is unacceptable! attempt to continue # change this to empty to signify that it didn't actually get moved, check this before changing PMX paths # lastly, do all renaming in PMXes, but only if some of the names changed! # if i renamed a few .pngs to the same names, no point in re-writing the PMXs # write out # NOTE: this is OVERWRITING THE PREVIOUS PMX FILE, NOT CREATING A NEW ONE # because I make a zipfile backup I don't need to feel worried about preserving the old version # output_filename_pmx = core.filepath_get_unused_name(output_filename_pmx) # also delete the tempspace!
| 2.100512
| 2
|
ajaxuploader/backends/s3.py
|
ixc/django-ajax-uploader
| 1
|
6626765
|
from multiprocessing import Pool
from StringIO import StringIO
import boto
from django.conf import settings
from ajaxuploader.backends.base import AbstractUploadBackend
class S3UploadBackend(AbstractUploadBackend):
NUM_PARALLEL_PROCESSES = 4
def upload_chunk(self, chunk):
self._counter += 1
buffer = StringIO()
buffer.write(chunk)
self._pool.apply_async(
self._mp.upload_part_from_file(buffer, self._counter))
buffer.close()
def setup(self, filename):
self._bucket = boto.connect_s3(settings.AWS_ACCESS_KEY_ID,
settings.AWS_SECRET_ACCESS_KEY)\
.lookup(settings.AWS_BUCKET_NAME)
self._mp = self._bucket.initiate_multipart_upload(filename)
self._pool = Pool(processes=self.NUM_PARALLEL_PROCESSES)
self._counter = 0
def upload_complete(self, request, filename):
# Tie up loose ends, and finish the upload
self._pool.close()
self._pool.join()
self._mp.complete_upload()
|
from multiprocessing import Pool
from StringIO import StringIO
import boto
from django.conf import settings
from ajaxuploader.backends.base import AbstractUploadBackend
class S3UploadBackend(AbstractUploadBackend):
NUM_PARALLEL_PROCESSES = 4
def upload_chunk(self, chunk):
self._counter += 1
buffer = StringIO()
buffer.write(chunk)
self._pool.apply_async(
self._mp.upload_part_from_file(buffer, self._counter))
buffer.close()
def setup(self, filename):
self._bucket = boto.connect_s3(settings.AWS_ACCESS_KEY_ID,
settings.AWS_SECRET_ACCESS_KEY)\
.lookup(settings.AWS_BUCKET_NAME)
self._mp = self._bucket.initiate_multipart_upload(filename)
self._pool = Pool(processes=self.NUM_PARALLEL_PROCESSES)
self._counter = 0
def upload_complete(self, request, filename):
# Tie up loose ends, and finish the upload
self._pool.close()
self._pool.join()
self._mp.complete_upload()
|
en
| 0.907176
|
# Tie up loose ends, and finish the upload
| 2.203517
| 2
|
tests/test.py
|
bbitarello/ldpred
| 89
|
6626766
|
"""
A test file for LDpred.
Examples
--------
To run all tests:
$ python -m tests.test
To run a specific test:
$ python -m unittest tests.test.SimpleTests.test_ldpred_inf
"""
import pickle
import filecmp
import gzip
import h5py
from ldpred import coord_genotypes
from ldpred import ld
from ldpred import sum_stats_parsers
from ldpred import run
import numpy as np
import os
import tempfile
import unittest
import sys
np.set_printoptions(linewidth=int(os.environ.get('COLUMNS', 100)))
TEST_DIR = os.path.dirname(os.path.abspath(__file__))
def run_test(mesg, cmd_str, error_mesg, *actual_and_golden_outputs):
print(mesg)
print(cmd_str + '\n')
cmd_args = cmd_str.split()
try:
run.main_with_args(cmd_args)
for i in range(0, len(actual_and_golden_outputs), 2):
actual_output = actual_and_golden_outputs[i]
golden_output = os.path.join(TEST_DIR, actual_and_golden_outputs[i + 1])
print('Diffing actual (%s) vs. golden (%s) outputs...' % (actual_output, golden_output))
assert_files_equal(actual_output, golden_output)
print('Diff passed!')
except:
print(error_mesg + '\n')
raise
def h5_node_walker(h5_node, key_prefix=''):
"""Generator function that walks an hdf5 File or Group object.
Args:
h5_node: an h5py.File or h5py.Group object
key_prefix: the '/' delimited string representing the name path of the
node within the .hdf5 file.
Yields:
(child_key, child_value)
"""
for k, v in h5_node.items():
v_type = type(v)
v_path = key_prefix + '/' + k
if v_type == h5py.Group:
for nested_key, nested_value in h5_node_walker(v, v_path):
yield nested_key, nested_value
elif v_type == h5py.Dataset:
yield v_path, v[...]
else:
assert False, 'Unexpected v_type: %s' % v_type
def h5_file_walker(h5_file):
"""Generator function that walks an hdf5 file.
Args:
h5_file: a string, the name of the .hdf5 file to walk.
Yields:
(child_key, child_value)
"""
with h5py.File(h5_file, 'r') as h5_root_node:
for k, v in h5_node_walker(h5_root_node):
yield k, v
def pkl_node_walker(pkl_node, key_prefix=''):
"""Generator function that walks a Python pickle node (i.e. a dict).
Args:
pkl_node: A dict coming from a depickled object.
key_prefix: the '/' delimited string representing the name path of the
node within the pickle file.
Yields:
(child_key, child_value)
"""
for k in sorted(pkl_node.keys()):
v = pkl_node[k]
v_type = type(v)
v_path = key_prefix + '/' + str(k)
if v_type == dict:
for nested_key, nested_value in pkl_node_walker(v, v_path):
yield nested_key, nested_value
elif v_type == list:
# Convert Python list to Numpy ndarray for assert_deep_equals.
yield v_path, np.array(v)
elif v_type in (float, np.float64, np.float32, int, str, np.ndarray):
yield v_path, v
else:
assert False, 'Unexpected v_type: %s' % v_type
def pkl_file_walker(pkl_file):
"""Generator function that walks a Python pickle file.
Args:
pkl_file: a string, the name of the .pkl.gz file to walk.
Yields:
(child_key, child_value)
"""
try:
with gzip.open(pkl_file) as f:
pkl_root_node = pickle.load(f)
except UnicodeDecodeError as e:
with gzip.open(pkl_file) as f:
pkl_root_node = pickle.load(f,encoding='latin1')
except Exception as e:
print('Unable to load data ', pkl_file, ':', e)
raise
for k, v in pkl_node_walker(pkl_root_node):
yield k, v
def assert_deep_equals(walker1, walker2):
"""Test function that does a deep comparison of two structure walkers."""
for (k1, v1), (k2, v2) in zip(walker1, walker2):
assert k1 == k2, 'Key mismatch: %s vs. %s' % (k1, k2)
assert type(v1) == type(v2), 'Type mismatch: %s vs. %s' % (type(v1), type(v2))
if isinstance(v1, str) or isinstance(v1, int):
assert v1 == v2, 'Value mismatch: %s vs. %s' % (v1, v2)
elif isinstance(v1, float) or isinstance(v1, np.float32):
assert np.isclose(v1, v2), 'Float mismatch: %s vs. %s' % (v1, v2)
elif isinstance(v1, np.ndarray):
assert v1.dtype == v2.dtype, 'dtype mismatch: %s vs. %s' % (v1.dtype, v2.dtype)
if np.issubdtype(v1.dtype, np.number):
assert np.allclose(v1, v2), 'ndarray number mismatch in key %s' % k1
else:
assert np.array_equal(v1, v2), 'ndarray non-number mismatch in key %s: v1=%s ; v2=%s' % (k1,str(v1),str(v2))
def assert_files_equal(file1, file2):
if file1.endswith('.hdf5'):
assert_deep_equals(h5_file_walker(file1), h5_file_walker(file2))
elif file1.endswith('.pkl.gz'):
assert_deep_equals(pkl_file_walker(file1), pkl_file_walker(file2))
else:
assert filecmp.cmp(file1, file2), "Mismatch between: %s and %s" % (file1, file2)
def make_p_dict(*args):
return vars(run.parser.parse_args(args))
class SimpleTests(unittest.TestCase):
@classmethod
def setUpClass(cls):
print('Testing LDpred.\n')
print('Note that this test currently only tests the core functionality of LDpred.')
print('Please report bugs on github (https://github.com/bvilhjal/ldpred) or to <NAME> (<EMAIL>).\n')
def setUp(self):
self.tf = tempfile.NamedTemporaryFile()
self.tmp_file_prefix = next(tempfile._get_candidate_names())
def tearDown(self):
print('Cleaning up files: %s* ' % self.tmp_file_prefix)
cmd_str = 'rm -f %s*' % self.tmp_file_prefix
print(cmd_str + '\n')
assert os.system(cmd_str) == 0, 'Problems cleaning up test files! Testing stopped'
def test_parse_sum_stats(self):
p_dict = {
'ssf': os.path.join(TEST_DIR, 'test_data/sim1_0_ss.txt'),
'ssf_format': 'LDPRED',
'only_hm3': False,
'N': 10000,
'debug': True,
'z_from_se':False,
'match_genomic_pos': False,
'eff_type':'LINREG'}
bimfile = os.path.join(TEST_DIR, 'test_data/sim1_0_test.bim')
summary_dict = {}
out = '%s_parse_sum_stats.hdf5' % self.tmp_file_prefix
with h5py.File(out, 'w') as h5f:
sum_stats_parsers.parse_sum_stats(h5f, p_dict, bimfile, summary_dict)
self.assertEqual(len(h5f['sum_stats']['chrom_1']['betas']), 2000)
p_dict = {
'ssf': os.path.join(TEST_DIR, 'test_data/sim4_0_ss.txt'),
'ssf_format': 'LDPRED',
'only_hm3': False,
'N': None,
'debug': True,
'z_from_se':True,
'match_genomic_pos': False,}
bimfile = os.path.join(TEST_DIR, 'test_data/sim4_0_test.bim')
summary_dict = {}
out = '%s_parse_sum_stats.hdf5' % self.tmp_file_prefix
with h5py.File(out, 'w') as h5f:
sum_stats_parsers.parse_sum_stats(h5f, p_dict, bimfile, summary_dict)
self.assertEqual(len(h5f['sum_stats']['chrom_1']['betas']), 2000)
def test_coord_genotypes(self):
p_dict = make_p_dict(
'--debug',
'coord',
'--gf=%s/test_data/sim1_0_test' % TEST_DIR,
'--vgf=%s/test_data/sim1_0_test' % TEST_DIR,
'--ssf=%s/test_data/sim1_0_ss.txt' % TEST_DIR,
'--ssf-format=LDPRED',
'--out=%s_coord_genotypes.hdf5' % self.tmp_file_prefix,
)
summary_dict = coord_genotypes.main(p_dict)
# summary_dict[11]['value'], if present, is the count of non-matching nts.
# It should be 0.
self.assertEqual(summary_dict.get(11, {}).get('value', 0), 0)
with h5py.File(p_dict['out'], 'r') as h5f:
self.assertEqual(len(h5f['sum_stats']['chrom_1']['betas']), 2000)
def test_ld_calculation(self):
df = h5py.File('%s/test_data/goldens/golden.coord0.hdf5' % TEST_DIR, 'r')
g = df['cord_data']['chrom_1']
snps, n_raw_snps, n_snps = ld.extract_snps_from_cord_data_chrom(g)
first_10_snps = snps[:10]
self.assertEqual(len(first_10_snps), 10)
ld_dict_and_scores = ld.get_LDpred_ld_tables(first_10_snps)
ld_dict = ld_dict_and_scores['ld_dict']
ld_mat = np.vstack([ld_dict[i] for i in range(10)])
# np.savez(os.path.join(TEST_DIR, 'test_data/goldens/ld_data'),ld=ld_mat)
golden_ld_mat = np.load(os.path.join(TEST_DIR, 'test_data/goldens/ld_data.npz'))['ld']
self.assertTrue(np.allclose(ld_mat, golden_ld_mat))
def test_get_chromosome_herits(self):
p_dict = make_p_dict(
'--debug',
'inf',
'--cf=%s/test_data/goldens/golden.coord.hdf5' % TEST_DIR,
'--ldr=100',
'--ldf=' + self.tmp_file_prefix,
'--N=4000',
'--out=' + self.tmp_file_prefix,
)
summary_dict = {}
ld_dict = ld.get_ld_dict_using_p_dict(p_dict, summary_dict)
coord_file = os.path.join(TEST_DIR, 'test_data/goldens/golden.coord.hdf5')
df = h5py.File(coord_file, 'r')
herit_dict = ld.get_chromosome_herits(df['cord_data'], ld_dict['ld_scores_dict'], n=p_dict['N'])
print(herit_dict)
self.assertAlmostEqual(herit_dict['chrom_1']['h2'], 0.10640501626651437)
self.assertAlmostEqual(herit_dict['gw_h2_ld_score_est'], 0.10640501626651437)
def test_ldpred_coord0(self):
coord_file = self.tmp_file_prefix + '.coord0.hdf5'
run_test(
'Coordinating test data into file %s' % coord_file,
'coord --gf=%s/test_data/sim1_0_test --vgf=%s/test_data/sim1_0_test --ssf=%s/test_data/sim1_0_ss.txt --ssf-format=LDPRED --eff_type LINREG --out=%s' % (TEST_DIR, TEST_DIR, TEST_DIR, coord_file),
'Problems when coordinating data!',
coord_file,
'test_data/goldens/golden.coord0.hdf5'
)
def test_ldpred_coord(self):
coord_file = self.tmp_file_prefix + '.coord.hdf5'
run_test(
'Coordinating test data into file %s' % coord_file,
'--debug coord --gf=%s/test_data/sim2_0_test --vbim=%s/test_data/sim2_0_test.bim --ssf=%s/test_data/sim2_0_ss.txt --ssf-format=LDPRED --eff_type LINREG --out=%s' % (TEST_DIR, TEST_DIR, TEST_DIR, coord_file),
'Problems when coordinating data!',
coord_file,
'test_data/goldens/golden.coord.hdf5')
def test_ldpred_inf(self):
run_test(
'Running LDpred-inf with output file prefix: %s ' % self.tmp_file_prefix,
'--debug inf --cf=%s/test_data/goldens/golden.coord.hdf5 --ldr=100 --ldf=%s --out=%s' % (TEST_DIR, self.tmp_file_prefix, self.tmp_file_prefix),
'Problems when running LDpred_inf!',
self.tmp_file_prefix + '_ldradius100.pkl.gz',
'test_data/goldens/golden_inf_ldradius100.pkl.gz')
def test_ldpred_fast(self):
run_test(
'Running LDpred-inf with output file prefix: %s ' % self.tmp_file_prefix,
'--debug fast --cf=%s/test_data/goldens/golden.coord.hdf5 --f 0.3 0.1 0.03 0.01 --ldr=100 --ldf=%s --out=%s' % (TEST_DIR, self.tmp_file_prefix, self.tmp_file_prefix),
'Problems when running LDpred_fast!')
def test_ldpred_gibbs(self):
run_test(
'Running LDpred with output file prefix: %s ' % self.tmp_file_prefix,
'--debug gibbs --cf=%s/test_data/goldens/golden.coord.hdf5 --ldr=100 --ldf=%s --f=0.001 --out=%s' % (TEST_DIR, self.tmp_file_prefix, self.tmp_file_prefix),
'Problems when running LDpred!')
def test_ldpred_p_plus_t(self):
run_test(
'Running P+T with coordinated file prefix: %s ' % self.tmp_file_prefix,
'--debug p+t --cf=%s/test_data/goldens/golden.coord.hdf5 --ldr=100 --p=0.001 --out=%s' % (TEST_DIR, self.tmp_file_prefix),
'Problems when running P+T!',
self.tmp_file_prefix + '_P+T_r0.20_p1.0000e-03.txt',
'test_data/goldens/golden_P+T_r0.20_p1.0000e-03.txt')
def test_ldpred_score_1(self):
prs_file_prefix = self.tmp_file_prefix
run_test(
'Validating results with output file prefix: %s' % prs_file_prefix,
'--debug score --gf=%s/test_data/sim2_0_test --rf=%s/test_data/goldens/golden --out=%s' % (TEST_DIR, TEST_DIR, prs_file_prefix),
'Problems with the validation step!',
prs_file_prefix + '_LDpred-inf.txt',
'test_data/goldens/goldenprs_LDpred-inf.txt',
prs_file_prefix + '_LDpred_p1.0000e-03.txt',
'test_data/goldens/goldenprs_LDpred_p1.0000e-03.txt')
def test_ldpred_score_2(self):
prs_file_prefix = self.tmp_file_prefix
run_test(
'Validating results with output file prefix: %s' % self.tmp_file_prefix,
'score --gf=%s/test_data/sim2_0_test --rf-format LDPRED --rf=%s/test_data/goldens/golden --out=%s' % (TEST_DIR, TEST_DIR, prs_file_prefix),
'Problems with the validation step!',
prs_file_prefix + '_LDpred-inf.txt',
'test_data/goldens/goldenprs_LDpred-inf.txt',
prs_file_prefix + '_LDpred_p1.0000e-03.txt',
'test_data/goldens/goldenprs_LDpred_p1.0000e-03.txt',)
def test_ldpred_score_3(self):
prs_file_prefix = self.tmp_file_prefix
run_test(
'Validating results with output file prefix: %s' % self.tmp_file_prefix,
'score --gf=%s/test_data/sim2_0_test --only-score --rf=%s/test_data/goldens/golden --rf-format=P+T --out=%s' % (TEST_DIR, TEST_DIR, prs_file_prefix),
'Problems with the P+T validation step!',
prs_file_prefix + '_P+T_r0.20_p1.0000e-03.txt',
'test_data/goldens/goldenprs_only_score_P+T_r0.20_p1.0000e-03.txt')
def test_ldpred_score_4(self):
prs_file_prefix = self.tmp_file_prefix
run_test(
'Validating results with output file prefix: %s' % self.tmp_file_prefix,
'score --gf=%s/test_data/sim2_0_test --rf=%s/test_data/goldens/golden --rf-format=P+T --out=%s' % (TEST_DIR, TEST_DIR, prs_file_prefix),
'Problems with the P+T validation step!',
prs_file_prefix + '_P+T_r0.20_p1.0000e-03.txt',
'test_data/goldens/goldenprs_P+T_r0.20_p1.0000e-03.txt')
class ComplexTests(unittest.TestCase):
@classmethod
def setUpClass(cls):
print('Testing LDpred: Integration tests.\n')
print('Note that this test currently only tests the core functionality of LDpred.')
print('Please report bugs on github (https://github.com/bvilhjal/ldpred) or to <NAME> (<EMAIL>).\n')
def setUp(self):
self.tf = tempfile.NamedTemporaryFile()
self.tmp_file_prefix = next(tempfile._get_candidate_names())
def tearDown(self):
print('Cleaning up files: %s* ' % self.tmp_file_prefix)
cmd_str = 'rm -f %s*' % self.tmp_file_prefix
print(cmd_str + '\n')
assert os.system(cmd_str) == 0, 'Problems cleaning up test files! Testing stopped'
def test_mix1(self):
t_i = 0
label='mix1'
for sim_i in range(1,6):
td = '%s/test_data/sim%d'%(TEST_DIR, sim_i)
file_prefix = '%s_%s_sim%d_%d'%(self.tmp_file_prefix,label,sim_i,t_i)
df_prefix = '%s_%d'%(td,t_i)
coord_file = file_prefix+'.hdf5'
run_test(
'Validating results with output file prefix: %s' % self.tmp_file_prefix,
'coord --gf=%s_test --vbim=%s_test.bim --ssf=%s_ss.txt --ssf-format=LDPRED --out=%s' % (df_prefix,df_prefix,df_prefix,coord_file),
'Problems when coordinating data!')
ld_file = file_prefix+'.ld'
weights_file = file_prefix+'.weights'
run_test(
'Running LDpred-fast with coordinated file prefix: %s ' % coord_file,
'--debug fast --cf=%s --f 0.3 0.1 0.03 0.01 --ldr=100 --ldf=%s --out=%s' % (coord_file, ld_file, weights_file),
'Problems when running LDpred_fast!')
run_test(
'Running LDpred with coordinated file prefix: %s ' % coord_file,
'gibbs --N 5500 --use-gw-h2 --n-burn-in 5 --n-iter 50 --cf=%s --ldr=100 --ldf=%s --f 1 0.3 0.1 --out=%s' % (coord_file, ld_file, weights_file),
'Problems when running LDpred!')
run_test(
'Running P+T with coordinated file prefix: %s ' % coord_file,
'p+t --cf=%s --ldr=100 --p 1 0.3 0.1 --out=%s' % (coord_file, weights_file),
'Problems when running P+T!')
prs_file_prefix = file_prefix+'.prs'
golden_prs_prefix = '%s/test_data/goldens/golden_%s_prs_%i_%i'%(TEST_DIR,label,sim_i,t_i)
golden_summary_file = '%s.summary.txt'%golden_prs_prefix
summary_file = file_prefix+'.summary.txt'
run_test(
'Validating results with output file prefix: %s' % prs_file_prefix,
'score --gf=%s_test --rf=%s --out=%s --summary-file=%s' % (df_prefix, weights_file, prs_file_prefix, summary_file),
'Problems with the validation step!',
summary_file,golden_summary_file)
def test_mix2(self):
t_i = 0
label='mix2'
for sim_i in range(1,6):
td = '%s/test_data/sim%d'%(TEST_DIR, sim_i)
file_prefix = '%s_%s_sim%d_%d'%(self.tmp_file_prefix,label,sim_i,t_i)
df_prefix = '%s_%d'%(td,t_i)
coord_file = file_prefix+'.hdf5'
run_test(
'Validating results with output file prefix: %s' % self.tmp_file_prefix,
'coord --gf=%s_test --vbim=%s_test.bim --z-from-se --ssf=%s_ss.txt --ssf-format=LDPRED --out=%s' % (df_prefix,df_prefix,df_prefix,coord_file),
'Problems when coordinating data!')
ld_file = file_prefix+'.ld'
weights_file = file_prefix+'.weights'
run_test(
'Running LDpred-fast with coordinated file prefix: %s ' % coord_file,
'--debug fast --cf=%s --f 0.3 0.1 0.03 0.01 0.001 --ldr=150 --ldf=%s --out=%s' % (coord_file, ld_file, weights_file),
'Problems when running LDpred_fast!')
run_test(
'Running LDpred with coordinated file prefix: %s ' % coord_file,
'gibbs --n-burn-in 5 --n-iter 50 --cf=%s --ldr=150 --ldf=%s --f 1 0.1 0.01 0.001 --out=%s' % (coord_file, ld_file, weights_file),
'Problems when running LDpred!')
run_test(
'Running P+T with coordinated file prefix: %s ' % coord_file,
'p+t --cf=%s --ldr=150 --r2 0.5 0.2 0.1 --p 1 0.3 0.1 0.03 0.01 0.003 0.001 0.0003 0.0001 0.00001 --out=%s' % (coord_file, weights_file),
'Problems when running P+T!')
prs_file_prefix = file_prefix+'.prs'
golden_prs_prefix = '%s/test_data/goldens/golden_%s_prs_%i_%i'%(TEST_DIR,label,sim_i,t_i)
golden_summary_file = '%s.summary.txt'%golden_prs_prefix
summary_file = file_prefix+'.summary.txt'
run_test(
'Validating results with output file prefix: %s' % prs_file_prefix,
'score --gf=%s_test --r2 0.5 0.2 0.1 --p 1 0.3 0.1 0.03 0.01 0.003 0.001 0.0003 0.0001 0.00001 --rf=%s --out=%s --summary-file=%s' % (df_prefix, weights_file,
prs_file_prefix, summary_file),
'Problems with the validation step!',
summary_file,golden_summary_file)
def update_golden_files_mix1():
label = 'mix1'
tf = tempfile.NamedTemporaryFile()
tmp_file_prefix = next(tempfile._get_candidate_names())
for sim_i in range(1,6):
print('Updating golden results')
coord_file = '%s_%i_coord.hdf5'%(tmp_file_prefix,sim_i)
cmd_str = 'python -m ldpred --debug coord --gf %s/test_data/sim%i_0_test --vbim %s/test_data/sim%i_0_test.bim --ssf %s/test_data/sim%i_0_ss.txt --ssf-format LDPRED --out=%s' % (TEST_DIR,sim_i,TEST_DIR,sim_i,TEST_DIR,sim_i,coord_file)
print(cmd_str + '\n')
assert os.system(cmd_str) == 0, 'Problems when updating golden files'
weights_prefix = '%s_%i_weights'%(tmp_file_prefix,sim_i)
ld_prefix = '%s_%i'%(tmp_file_prefix,sim_i)
cmd_str = 'python -m ldpred fast --cf %s --ldr 100 --f 0.3 0.1 0.03 0.01 --ldf %s --out %s' % (coord_file,ld_prefix,weights_prefix)
print(cmd_str + '\n')
assert os.system(cmd_str) == 0, 'Problems when updating golden files'
cmd_str = 'python -m ldpred gibbs --N 5500 --use-gw-h2 --n-burn-in 5 --n-iter 50 --cf %s --ldr 100 --ldf %s --f 1 0.3 0.1 --out %s' % (coord_file,ld_prefix,weights_prefix)
print(cmd_str + '\n')
assert os.system(cmd_str) == 0, 'Problems when updating golden files'
cmd_str = 'python -m ldpred p+t --cf %s --ldr 100 --p 1 0.3 0.1 --out %s' % (coord_file,weights_prefix)
print(cmd_str + '\n')
assert os.system(cmd_str) == 0, 'Problems when updating golden files'
prs_prefix = '%s_prs_%i_0'%(tmp_file_prefix,sim_i)
golden_summary_file = '%s/test_data/goldens/golden_%s_prs_%i_0.summary.txt'%(TEST_DIR, label,sim_i)
cmd_str = 'python -m ldpred --debug score --gf %s/test_data/sim%i_0_test --rf %s --out %s --summary-file %s' % (TEST_DIR, sim_i,weights_prefix,prs_prefix, golden_summary_file)
print(cmd_str + '\n')
assert os.system(cmd_str) == 0, 'Problems when updating golden files'
print('Cleaning up files.')
cmd_str = 'rm %s*' % tmp_file_prefix
print(cmd_str + '\n')
assert os.system(cmd_str) == 0, 'Problems cleaning up test files! Testing stopped'
def update_golden_files_mix2():
label = 'mix2'
tf = tempfile.NamedTemporaryFile()
tmp_file_prefix = next(tempfile._get_candidate_names())
for sim_i in range(1,6):
print('Updating golden results')
coord_file = '%s_%i_coord.hdf5'%(tmp_file_prefix,sim_i)
cmd_str = 'python -m ldpred coord --gf %s/test_data/sim%i_0_test --vbim %s/test_data/sim%i_0_test.bim --z-from-se --ssf %s/test_data/sim%i_0_ss.txt --ssf-format LDPRED --out=%s' % (TEST_DIR,sim_i,TEST_DIR,sim_i,TEST_DIR,sim_i,coord_file)
print(cmd_str + '\n')
assert os.system(cmd_str) == 0, 'Problems when updating golden files'
weights_prefix = '%s_%i_weights'%(tmp_file_prefix,sim_i)
ld_prefix = '%s_%i'%(tmp_file_prefix,sim_i)
cmd_str = 'python -m ldpred fast --cf %s --ldr 150 --f 0.3 0.1 0.03 0.01 0.001 --ldf %s --out %s' % (coord_file,ld_prefix,weights_prefix)
print(cmd_str + '\n')
assert os.system(cmd_str) == 0, 'Problems when updating golden files'
cmd_str = 'python -m ldpred gibbs --n-burn-in 5 --n-iter 50 --cf %s --ldr 150 --ldf %s --f 1 0.1 0.01 0.001 --out %s' % (coord_file,ld_prefix,weights_prefix)
print(cmd_str + '\n')
assert os.system(cmd_str) == 0, 'Problems when updating golden files'
cmd_str = 'python -m ldpred p+t --cf %s --ldr 150 --r2 0.5 0.2 0.1 --p 1 0.3 0.1 0.03 0.01 0.003 0.001 0.0003 0.0001 0.00001 --out %s' % (coord_file,weights_prefix)
print(cmd_str + '\n')
assert os.system(cmd_str) == 0, 'Problems when updating golden files'
prs_prefix = '%s_prs_%i_0'%(tmp_file_prefix,sim_i)
golden_summary_file = '%s/test_data/goldens/golden_%s_prs_%i_0.summary.txt'%(TEST_DIR, label,sim_i)
cmd_str = 'python -m ldpred score --gf %s/test_data/sim%i_0_test --r2 0.5 0.2 0.1 --p 1 0.3 0.1 0.03 0.01 0.003 0.001 0.0003 0.0001 0.00001 --rf %s --out %s --summary-file %s' % (TEST_DIR,sim_i,weights_prefix,prs_prefix, golden_summary_file)
print(cmd_str + '\n')
assert os.system(cmd_str) == 0, 'Problems when updating golden files'
print('Cleaning up files.')
cmd_str = 'rm %s*' % tmp_file_prefix
print(cmd_str + '\n')
assert os.system(cmd_str) == 0, 'Problems cleaning up test files! Testing stopped'
def run_integration_tests():
complex_suite = unittest.TestLoader().loadTestsFromTestCase(ComplexTests)
unittest.TextTestRunner().run(complex_suite)
def run_unit_tests():
simple_suite = unittest.TestLoader().loadTestsFromTestCase(SimpleTests)
unittest.TextTestRunner().run(simple_suite)
if __name__ == '__main__':
unittest.main()
|
"""
A test file for LDpred.
Examples
--------
To run all tests:
$ python -m tests.test
To run a specific test:
$ python -m unittest tests.test.SimpleTests.test_ldpred_inf
"""
import pickle
import filecmp
import gzip
import h5py
from ldpred import coord_genotypes
from ldpred import ld
from ldpred import sum_stats_parsers
from ldpred import run
import numpy as np
import os
import tempfile
import unittest
import sys
np.set_printoptions(linewidth=int(os.environ.get('COLUMNS', 100)))
TEST_DIR = os.path.dirname(os.path.abspath(__file__))
def run_test(mesg, cmd_str, error_mesg, *actual_and_golden_outputs):
print(mesg)
print(cmd_str + '\n')
cmd_args = cmd_str.split()
try:
run.main_with_args(cmd_args)
for i in range(0, len(actual_and_golden_outputs), 2):
actual_output = actual_and_golden_outputs[i]
golden_output = os.path.join(TEST_DIR, actual_and_golden_outputs[i + 1])
print('Diffing actual (%s) vs. golden (%s) outputs...' % (actual_output, golden_output))
assert_files_equal(actual_output, golden_output)
print('Diff passed!')
except:
print(error_mesg + '\n')
raise
def h5_node_walker(h5_node, key_prefix=''):
"""Generator function that walks an hdf5 File or Group object.
Args:
h5_node: an h5py.File or h5py.Group object
key_prefix: the '/' delimited string representing the name path of the
node within the .hdf5 file.
Yields:
(child_key, child_value)
"""
for k, v in h5_node.items():
v_type = type(v)
v_path = key_prefix + '/' + k
if v_type == h5py.Group:
for nested_key, nested_value in h5_node_walker(v, v_path):
yield nested_key, nested_value
elif v_type == h5py.Dataset:
yield v_path, v[...]
else:
assert False, 'Unexpected v_type: %s' % v_type
def h5_file_walker(h5_file):
"""Generator function that walks an hdf5 file.
Args:
h5_file: a string, the name of the .hdf5 file to walk.
Yields:
(child_key, child_value)
"""
with h5py.File(h5_file, 'r') as h5_root_node:
for k, v in h5_node_walker(h5_root_node):
yield k, v
def pkl_node_walker(pkl_node, key_prefix=''):
"""Generator function that walks a Python pickle node (i.e. a dict).
Args:
pkl_node: A dict coming from a depickled object.
key_prefix: the '/' delimited string representing the name path of the
node within the pickle file.
Yields:
(child_key, child_value)
"""
for k in sorted(pkl_node.keys()):
v = pkl_node[k]
v_type = type(v)
v_path = key_prefix + '/' + str(k)
if v_type == dict:
for nested_key, nested_value in pkl_node_walker(v, v_path):
yield nested_key, nested_value
elif v_type == list:
# Convert Python list to Numpy ndarray for assert_deep_equals.
yield v_path, np.array(v)
elif v_type in (float, np.float64, np.float32, int, str, np.ndarray):
yield v_path, v
else:
assert False, 'Unexpected v_type: %s' % v_type
def pkl_file_walker(pkl_file):
"""Generator function that walks a Python pickle file.
Args:
pkl_file: a string, the name of the .pkl.gz file to walk.
Yields:
(child_key, child_value)
"""
try:
with gzip.open(pkl_file) as f:
pkl_root_node = pickle.load(f)
except UnicodeDecodeError as e:
with gzip.open(pkl_file) as f:
pkl_root_node = pickle.load(f,encoding='latin1')
except Exception as e:
print('Unable to load data ', pkl_file, ':', e)
raise
for k, v in pkl_node_walker(pkl_root_node):
yield k, v
def assert_deep_equals(walker1, walker2):
"""Test function that does a deep comparison of two structure walkers."""
for (k1, v1), (k2, v2) in zip(walker1, walker2):
assert k1 == k2, 'Key mismatch: %s vs. %s' % (k1, k2)
assert type(v1) == type(v2), 'Type mismatch: %s vs. %s' % (type(v1), type(v2))
if isinstance(v1, str) or isinstance(v1, int):
assert v1 == v2, 'Value mismatch: %s vs. %s' % (v1, v2)
elif isinstance(v1, float) or isinstance(v1, np.float32):
assert np.isclose(v1, v2), 'Float mismatch: %s vs. %s' % (v1, v2)
elif isinstance(v1, np.ndarray):
assert v1.dtype == v2.dtype, 'dtype mismatch: %s vs. %s' % (v1.dtype, v2.dtype)
if np.issubdtype(v1.dtype, np.number):
assert np.allclose(v1, v2), 'ndarray number mismatch in key %s' % k1
else:
assert np.array_equal(v1, v2), 'ndarray non-number mismatch in key %s: v1=%s ; v2=%s' % (k1,str(v1),str(v2))
def assert_files_equal(file1, file2):
if file1.endswith('.hdf5'):
assert_deep_equals(h5_file_walker(file1), h5_file_walker(file2))
elif file1.endswith('.pkl.gz'):
assert_deep_equals(pkl_file_walker(file1), pkl_file_walker(file2))
else:
assert filecmp.cmp(file1, file2), "Mismatch between: %s and %s" % (file1, file2)
def make_p_dict(*args):
return vars(run.parser.parse_args(args))
class SimpleTests(unittest.TestCase):
@classmethod
def setUpClass(cls):
print('Testing LDpred.\n')
print('Note that this test currently only tests the core functionality of LDpred.')
print('Please report bugs on github (https://github.com/bvilhjal/ldpred) or to <NAME> (<EMAIL>).\n')
def setUp(self):
self.tf = tempfile.NamedTemporaryFile()
self.tmp_file_prefix = next(tempfile._get_candidate_names())
def tearDown(self):
print('Cleaning up files: %s* ' % self.tmp_file_prefix)
cmd_str = 'rm -f %s*' % self.tmp_file_prefix
print(cmd_str + '\n')
assert os.system(cmd_str) == 0, 'Problems cleaning up test files! Testing stopped'
def test_parse_sum_stats(self):
p_dict = {
'ssf': os.path.join(TEST_DIR, 'test_data/sim1_0_ss.txt'),
'ssf_format': 'LDPRED',
'only_hm3': False,
'N': 10000,
'debug': True,
'z_from_se':False,
'match_genomic_pos': False,
'eff_type':'LINREG'}
bimfile = os.path.join(TEST_DIR, 'test_data/sim1_0_test.bim')
summary_dict = {}
out = '%s_parse_sum_stats.hdf5' % self.tmp_file_prefix
with h5py.File(out, 'w') as h5f:
sum_stats_parsers.parse_sum_stats(h5f, p_dict, bimfile, summary_dict)
self.assertEqual(len(h5f['sum_stats']['chrom_1']['betas']), 2000)
p_dict = {
'ssf': os.path.join(TEST_DIR, 'test_data/sim4_0_ss.txt'),
'ssf_format': 'LDPRED',
'only_hm3': False,
'N': None,
'debug': True,
'z_from_se':True,
'match_genomic_pos': False,}
bimfile = os.path.join(TEST_DIR, 'test_data/sim4_0_test.bim')
summary_dict = {}
out = '%s_parse_sum_stats.hdf5' % self.tmp_file_prefix
with h5py.File(out, 'w') as h5f:
sum_stats_parsers.parse_sum_stats(h5f, p_dict, bimfile, summary_dict)
self.assertEqual(len(h5f['sum_stats']['chrom_1']['betas']), 2000)
def test_coord_genotypes(self):
p_dict = make_p_dict(
'--debug',
'coord',
'--gf=%s/test_data/sim1_0_test' % TEST_DIR,
'--vgf=%s/test_data/sim1_0_test' % TEST_DIR,
'--ssf=%s/test_data/sim1_0_ss.txt' % TEST_DIR,
'--ssf-format=LDPRED',
'--out=%s_coord_genotypes.hdf5' % self.tmp_file_prefix,
)
summary_dict = coord_genotypes.main(p_dict)
# summary_dict[11]['value'], if present, is the count of non-matching nts.
# It should be 0.
self.assertEqual(summary_dict.get(11, {}).get('value', 0), 0)
with h5py.File(p_dict['out'], 'r') as h5f:
self.assertEqual(len(h5f['sum_stats']['chrom_1']['betas']), 2000)
def test_ld_calculation(self):
df = h5py.File('%s/test_data/goldens/golden.coord0.hdf5' % TEST_DIR, 'r')
g = df['cord_data']['chrom_1']
snps, n_raw_snps, n_snps = ld.extract_snps_from_cord_data_chrom(g)
first_10_snps = snps[:10]
self.assertEqual(len(first_10_snps), 10)
ld_dict_and_scores = ld.get_LDpred_ld_tables(first_10_snps)
ld_dict = ld_dict_and_scores['ld_dict']
ld_mat = np.vstack([ld_dict[i] for i in range(10)])
# np.savez(os.path.join(TEST_DIR, 'test_data/goldens/ld_data'),ld=ld_mat)
golden_ld_mat = np.load(os.path.join(TEST_DIR, 'test_data/goldens/ld_data.npz'))['ld']
self.assertTrue(np.allclose(ld_mat, golden_ld_mat))
def test_get_chromosome_herits(self):
p_dict = make_p_dict(
'--debug',
'inf',
'--cf=%s/test_data/goldens/golden.coord.hdf5' % TEST_DIR,
'--ldr=100',
'--ldf=' + self.tmp_file_prefix,
'--N=4000',
'--out=' + self.tmp_file_prefix,
)
summary_dict = {}
ld_dict = ld.get_ld_dict_using_p_dict(p_dict, summary_dict)
coord_file = os.path.join(TEST_DIR, 'test_data/goldens/golden.coord.hdf5')
df = h5py.File(coord_file, 'r')
herit_dict = ld.get_chromosome_herits(df['cord_data'], ld_dict['ld_scores_dict'], n=p_dict['N'])
print(herit_dict)
self.assertAlmostEqual(herit_dict['chrom_1']['h2'], 0.10640501626651437)
self.assertAlmostEqual(herit_dict['gw_h2_ld_score_est'], 0.10640501626651437)
def test_ldpred_coord0(self):
coord_file = self.tmp_file_prefix + '.coord0.hdf5'
run_test(
'Coordinating test data into file %s' % coord_file,
'coord --gf=%s/test_data/sim1_0_test --vgf=%s/test_data/sim1_0_test --ssf=%s/test_data/sim1_0_ss.txt --ssf-format=LDPRED --eff_type LINREG --out=%s' % (TEST_DIR, TEST_DIR, TEST_DIR, coord_file),
'Problems when coordinating data!',
coord_file,
'test_data/goldens/golden.coord0.hdf5'
)
def test_ldpred_coord(self):
coord_file = self.tmp_file_prefix + '.coord.hdf5'
run_test(
'Coordinating test data into file %s' % coord_file,
'--debug coord --gf=%s/test_data/sim2_0_test --vbim=%s/test_data/sim2_0_test.bim --ssf=%s/test_data/sim2_0_ss.txt --ssf-format=LDPRED --eff_type LINREG --out=%s' % (TEST_DIR, TEST_DIR, TEST_DIR, coord_file),
'Problems when coordinating data!',
coord_file,
'test_data/goldens/golden.coord.hdf5')
def test_ldpred_inf(self):
run_test(
'Running LDpred-inf with output file prefix: %s ' % self.tmp_file_prefix,
'--debug inf --cf=%s/test_data/goldens/golden.coord.hdf5 --ldr=100 --ldf=%s --out=%s' % (TEST_DIR, self.tmp_file_prefix, self.tmp_file_prefix),
'Problems when running LDpred_inf!',
self.tmp_file_prefix + '_ldradius100.pkl.gz',
'test_data/goldens/golden_inf_ldradius100.pkl.gz')
def test_ldpred_fast(self):
run_test(
'Running LDpred-inf with output file prefix: %s ' % self.tmp_file_prefix,
'--debug fast --cf=%s/test_data/goldens/golden.coord.hdf5 --f 0.3 0.1 0.03 0.01 --ldr=100 --ldf=%s --out=%s' % (TEST_DIR, self.tmp_file_prefix, self.tmp_file_prefix),
'Problems when running LDpred_fast!')
def test_ldpred_gibbs(self):
run_test(
'Running LDpred with output file prefix: %s ' % self.tmp_file_prefix,
'--debug gibbs --cf=%s/test_data/goldens/golden.coord.hdf5 --ldr=100 --ldf=%s --f=0.001 --out=%s' % (TEST_DIR, self.tmp_file_prefix, self.tmp_file_prefix),
'Problems when running LDpred!')
def test_ldpred_p_plus_t(self):
run_test(
'Running P+T with coordinated file prefix: %s ' % self.tmp_file_prefix,
'--debug p+t --cf=%s/test_data/goldens/golden.coord.hdf5 --ldr=100 --p=0.001 --out=%s' % (TEST_DIR, self.tmp_file_prefix),
'Problems when running P+T!',
self.tmp_file_prefix + '_P+T_r0.20_p1.0000e-03.txt',
'test_data/goldens/golden_P+T_r0.20_p1.0000e-03.txt')
def test_ldpred_score_1(self):
prs_file_prefix = self.tmp_file_prefix
run_test(
'Validating results with output file prefix: %s' % prs_file_prefix,
'--debug score --gf=%s/test_data/sim2_0_test --rf=%s/test_data/goldens/golden --out=%s' % (TEST_DIR, TEST_DIR, prs_file_prefix),
'Problems with the validation step!',
prs_file_prefix + '_LDpred-inf.txt',
'test_data/goldens/goldenprs_LDpred-inf.txt',
prs_file_prefix + '_LDpred_p1.0000e-03.txt',
'test_data/goldens/goldenprs_LDpred_p1.0000e-03.txt')
def test_ldpred_score_2(self):
prs_file_prefix = self.tmp_file_prefix
run_test(
'Validating results with output file prefix: %s' % self.tmp_file_prefix,
'score --gf=%s/test_data/sim2_0_test --rf-format LDPRED --rf=%s/test_data/goldens/golden --out=%s' % (TEST_DIR, TEST_DIR, prs_file_prefix),
'Problems with the validation step!',
prs_file_prefix + '_LDpred-inf.txt',
'test_data/goldens/goldenprs_LDpred-inf.txt',
prs_file_prefix + '_LDpred_p1.0000e-03.txt',
'test_data/goldens/goldenprs_LDpred_p1.0000e-03.txt',)
def test_ldpred_score_3(self):
prs_file_prefix = self.tmp_file_prefix
run_test(
'Validating results with output file prefix: %s' % self.tmp_file_prefix,
'score --gf=%s/test_data/sim2_0_test --only-score --rf=%s/test_data/goldens/golden --rf-format=P+T --out=%s' % (TEST_DIR, TEST_DIR, prs_file_prefix),
'Problems with the P+T validation step!',
prs_file_prefix + '_P+T_r0.20_p1.0000e-03.txt',
'test_data/goldens/goldenprs_only_score_P+T_r0.20_p1.0000e-03.txt')
def test_ldpred_score_4(self):
prs_file_prefix = self.tmp_file_prefix
run_test(
'Validating results with output file prefix: %s' % self.tmp_file_prefix,
'score --gf=%s/test_data/sim2_0_test --rf=%s/test_data/goldens/golden --rf-format=P+T --out=%s' % (TEST_DIR, TEST_DIR, prs_file_prefix),
'Problems with the P+T validation step!',
prs_file_prefix + '_P+T_r0.20_p1.0000e-03.txt',
'test_data/goldens/goldenprs_P+T_r0.20_p1.0000e-03.txt')
class ComplexTests(unittest.TestCase):
@classmethod
def setUpClass(cls):
print('Testing LDpred: Integration tests.\n')
print('Note that this test currently only tests the core functionality of LDpred.')
print('Please report bugs on github (https://github.com/bvilhjal/ldpred) or to <NAME> (<EMAIL>).\n')
def setUp(self):
self.tf = tempfile.NamedTemporaryFile()
self.tmp_file_prefix = next(tempfile._get_candidate_names())
def tearDown(self):
print('Cleaning up files: %s* ' % self.tmp_file_prefix)
cmd_str = 'rm -f %s*' % self.tmp_file_prefix
print(cmd_str + '\n')
assert os.system(cmd_str) == 0, 'Problems cleaning up test files! Testing stopped'
def test_mix1(self):
t_i = 0
label='mix1'
for sim_i in range(1,6):
td = '%s/test_data/sim%d'%(TEST_DIR, sim_i)
file_prefix = '%s_%s_sim%d_%d'%(self.tmp_file_prefix,label,sim_i,t_i)
df_prefix = '%s_%d'%(td,t_i)
coord_file = file_prefix+'.hdf5'
run_test(
'Validating results with output file prefix: %s' % self.tmp_file_prefix,
'coord --gf=%s_test --vbim=%s_test.bim --ssf=%s_ss.txt --ssf-format=LDPRED --out=%s' % (df_prefix,df_prefix,df_prefix,coord_file),
'Problems when coordinating data!')
ld_file = file_prefix+'.ld'
weights_file = file_prefix+'.weights'
run_test(
'Running LDpred-fast with coordinated file prefix: %s ' % coord_file,
'--debug fast --cf=%s --f 0.3 0.1 0.03 0.01 --ldr=100 --ldf=%s --out=%s' % (coord_file, ld_file, weights_file),
'Problems when running LDpred_fast!')
run_test(
'Running LDpred with coordinated file prefix: %s ' % coord_file,
'gibbs --N 5500 --use-gw-h2 --n-burn-in 5 --n-iter 50 --cf=%s --ldr=100 --ldf=%s --f 1 0.3 0.1 --out=%s' % (coord_file, ld_file, weights_file),
'Problems when running LDpred!')
run_test(
'Running P+T with coordinated file prefix: %s ' % coord_file,
'p+t --cf=%s --ldr=100 --p 1 0.3 0.1 --out=%s' % (coord_file, weights_file),
'Problems when running P+T!')
prs_file_prefix = file_prefix+'.prs'
golden_prs_prefix = '%s/test_data/goldens/golden_%s_prs_%i_%i'%(TEST_DIR,label,sim_i,t_i)
golden_summary_file = '%s.summary.txt'%golden_prs_prefix
summary_file = file_prefix+'.summary.txt'
run_test(
'Validating results with output file prefix: %s' % prs_file_prefix,
'score --gf=%s_test --rf=%s --out=%s --summary-file=%s' % (df_prefix, weights_file, prs_file_prefix, summary_file),
'Problems with the validation step!',
summary_file,golden_summary_file)
def test_mix2(self):
t_i = 0
label='mix2'
for sim_i in range(1,6):
td = '%s/test_data/sim%d'%(TEST_DIR, sim_i)
file_prefix = '%s_%s_sim%d_%d'%(self.tmp_file_prefix,label,sim_i,t_i)
df_prefix = '%s_%d'%(td,t_i)
coord_file = file_prefix+'.hdf5'
run_test(
'Validating results with output file prefix: %s' % self.tmp_file_prefix,
'coord --gf=%s_test --vbim=%s_test.bim --z-from-se --ssf=%s_ss.txt --ssf-format=LDPRED --out=%s' % (df_prefix,df_prefix,df_prefix,coord_file),
'Problems when coordinating data!')
ld_file = file_prefix+'.ld'
weights_file = file_prefix+'.weights'
run_test(
'Running LDpred-fast with coordinated file prefix: %s ' % coord_file,
'--debug fast --cf=%s --f 0.3 0.1 0.03 0.01 0.001 --ldr=150 --ldf=%s --out=%s' % (coord_file, ld_file, weights_file),
'Problems when running LDpred_fast!')
run_test(
'Running LDpred with coordinated file prefix: %s ' % coord_file,
'gibbs --n-burn-in 5 --n-iter 50 --cf=%s --ldr=150 --ldf=%s --f 1 0.1 0.01 0.001 --out=%s' % (coord_file, ld_file, weights_file),
'Problems when running LDpred!')
run_test(
'Running P+T with coordinated file prefix: %s ' % coord_file,
'p+t --cf=%s --ldr=150 --r2 0.5 0.2 0.1 --p 1 0.3 0.1 0.03 0.01 0.003 0.001 0.0003 0.0001 0.00001 --out=%s' % (coord_file, weights_file),
'Problems when running P+T!')
prs_file_prefix = file_prefix+'.prs'
golden_prs_prefix = '%s/test_data/goldens/golden_%s_prs_%i_%i'%(TEST_DIR,label,sim_i,t_i)
golden_summary_file = '%s.summary.txt'%golden_prs_prefix
summary_file = file_prefix+'.summary.txt'
run_test(
'Validating results with output file prefix: %s' % prs_file_prefix,
'score --gf=%s_test --r2 0.5 0.2 0.1 --p 1 0.3 0.1 0.03 0.01 0.003 0.001 0.0003 0.0001 0.00001 --rf=%s --out=%s --summary-file=%s' % (df_prefix, weights_file,
prs_file_prefix, summary_file),
'Problems with the validation step!',
summary_file,golden_summary_file)
def update_golden_files_mix1():
label = 'mix1'
tf = tempfile.NamedTemporaryFile()
tmp_file_prefix = next(tempfile._get_candidate_names())
for sim_i in range(1,6):
print('Updating golden results')
coord_file = '%s_%i_coord.hdf5'%(tmp_file_prefix,sim_i)
cmd_str = 'python -m ldpred --debug coord --gf %s/test_data/sim%i_0_test --vbim %s/test_data/sim%i_0_test.bim --ssf %s/test_data/sim%i_0_ss.txt --ssf-format LDPRED --out=%s' % (TEST_DIR,sim_i,TEST_DIR,sim_i,TEST_DIR,sim_i,coord_file)
print(cmd_str + '\n')
assert os.system(cmd_str) == 0, 'Problems when updating golden files'
weights_prefix = '%s_%i_weights'%(tmp_file_prefix,sim_i)
ld_prefix = '%s_%i'%(tmp_file_prefix,sim_i)
cmd_str = 'python -m ldpred fast --cf %s --ldr 100 --f 0.3 0.1 0.03 0.01 --ldf %s --out %s' % (coord_file,ld_prefix,weights_prefix)
print(cmd_str + '\n')
assert os.system(cmd_str) == 0, 'Problems when updating golden files'
cmd_str = 'python -m ldpred gibbs --N 5500 --use-gw-h2 --n-burn-in 5 --n-iter 50 --cf %s --ldr 100 --ldf %s --f 1 0.3 0.1 --out %s' % (coord_file,ld_prefix,weights_prefix)
print(cmd_str + '\n')
assert os.system(cmd_str) == 0, 'Problems when updating golden files'
cmd_str = 'python -m ldpred p+t --cf %s --ldr 100 --p 1 0.3 0.1 --out %s' % (coord_file,weights_prefix)
print(cmd_str + '\n')
assert os.system(cmd_str) == 0, 'Problems when updating golden files'
prs_prefix = '%s_prs_%i_0'%(tmp_file_prefix,sim_i)
golden_summary_file = '%s/test_data/goldens/golden_%s_prs_%i_0.summary.txt'%(TEST_DIR, label,sim_i)
cmd_str = 'python -m ldpred --debug score --gf %s/test_data/sim%i_0_test --rf %s --out %s --summary-file %s' % (TEST_DIR, sim_i,weights_prefix,prs_prefix, golden_summary_file)
print(cmd_str + '\n')
assert os.system(cmd_str) == 0, 'Problems when updating golden files'
print('Cleaning up files.')
cmd_str = 'rm %s*' % tmp_file_prefix
print(cmd_str + '\n')
assert os.system(cmd_str) == 0, 'Problems cleaning up test files! Testing stopped'
def update_golden_files_mix2():
label = 'mix2'
tf = tempfile.NamedTemporaryFile()
tmp_file_prefix = next(tempfile._get_candidate_names())
for sim_i in range(1,6):
print('Updating golden results')
coord_file = '%s_%i_coord.hdf5'%(tmp_file_prefix,sim_i)
cmd_str = 'python -m ldpred coord --gf %s/test_data/sim%i_0_test --vbim %s/test_data/sim%i_0_test.bim --z-from-se --ssf %s/test_data/sim%i_0_ss.txt --ssf-format LDPRED --out=%s' % (TEST_DIR,sim_i,TEST_DIR,sim_i,TEST_DIR,sim_i,coord_file)
print(cmd_str + '\n')
assert os.system(cmd_str) == 0, 'Problems when updating golden files'
weights_prefix = '%s_%i_weights'%(tmp_file_prefix,sim_i)
ld_prefix = '%s_%i'%(tmp_file_prefix,sim_i)
cmd_str = 'python -m ldpred fast --cf %s --ldr 150 --f 0.3 0.1 0.03 0.01 0.001 --ldf %s --out %s' % (coord_file,ld_prefix,weights_prefix)
print(cmd_str + '\n')
assert os.system(cmd_str) == 0, 'Problems when updating golden files'
cmd_str = 'python -m ldpred gibbs --n-burn-in 5 --n-iter 50 --cf %s --ldr 150 --ldf %s --f 1 0.1 0.01 0.001 --out %s' % (coord_file,ld_prefix,weights_prefix)
print(cmd_str + '\n')
assert os.system(cmd_str) == 0, 'Problems when updating golden files'
cmd_str = 'python -m ldpred p+t --cf %s --ldr 150 --r2 0.5 0.2 0.1 --p 1 0.3 0.1 0.03 0.01 0.003 0.001 0.0003 0.0001 0.00001 --out %s' % (coord_file,weights_prefix)
print(cmd_str + '\n')
assert os.system(cmd_str) == 0, 'Problems when updating golden files'
prs_prefix = '%s_prs_%i_0'%(tmp_file_prefix,sim_i)
golden_summary_file = '%s/test_data/goldens/golden_%s_prs_%i_0.summary.txt'%(TEST_DIR, label,sim_i)
cmd_str = 'python -m ldpred score --gf %s/test_data/sim%i_0_test --r2 0.5 0.2 0.1 --p 1 0.3 0.1 0.03 0.01 0.003 0.001 0.0003 0.0001 0.00001 --rf %s --out %s --summary-file %s' % (TEST_DIR,sim_i,weights_prefix,prs_prefix, golden_summary_file)
print(cmd_str + '\n')
assert os.system(cmd_str) == 0, 'Problems when updating golden files'
print('Cleaning up files.')
cmd_str = 'rm %s*' % tmp_file_prefix
print(cmd_str + '\n')
assert os.system(cmd_str) == 0, 'Problems cleaning up test files! Testing stopped'
def run_integration_tests():
complex_suite = unittest.TestLoader().loadTestsFromTestCase(ComplexTests)
unittest.TextTestRunner().run(complex_suite)
def run_unit_tests():
simple_suite = unittest.TestLoader().loadTestsFromTestCase(SimpleTests)
unittest.TextTestRunner().run(simple_suite)
if __name__ == '__main__':
unittest.main()
|
en
| 0.639184
|
A test file for LDpred. Examples -------- To run all tests: $ python -m tests.test To run a specific test: $ python -m unittest tests.test.SimpleTests.test_ldpred_inf Generator function that walks an hdf5 File or Group object. Args: h5_node: an h5py.File or h5py.Group object key_prefix: the '/' delimited string representing the name path of the node within the .hdf5 file. Yields: (child_key, child_value) Generator function that walks an hdf5 file. Args: h5_file: a string, the name of the .hdf5 file to walk. Yields: (child_key, child_value) Generator function that walks a Python pickle node (i.e. a dict). Args: pkl_node: A dict coming from a depickled object. key_prefix: the '/' delimited string representing the name path of the node within the pickle file. Yields: (child_key, child_value) # Convert Python list to Numpy ndarray for assert_deep_equals. Generator function that walks a Python pickle file. Args: pkl_file: a string, the name of the .pkl.gz file to walk. Yields: (child_key, child_value) Test function that does a deep comparison of two structure walkers. # summary_dict[11]['value'], if present, is the count of non-matching nts. # It should be 0. # np.savez(os.path.join(TEST_DIR, 'test_data/goldens/ld_data'),ld=ld_mat)
| 2.84602
| 3
|
apfv/utils/mAP.py
|
anonymous0120/attentional_pervasive_fabricate_vanish_attack
| 0
|
6626767
|
import glob
import json
import os
import shutil
import operator
import sys
import argparse
import math
import numpy as np
import pdb
def save_detection_to_file(input_dic, output_file, task):
with open(output_file, 'w') as txt_file:
if not input_dic:
return
class_list = input_dic['classes']
bbox_list = input_dic['boxes']
scores_list = input_dic['scores']
for temp_class, temp_bbox, temp_score in zip(class_list, bbox_list, scores_list):
top, left, bottom, right = temp_bbox
if task == 'ground_truth':
txt_file.write(str(temp_class) + "," + str(int(left)) + "," + str(int(top)) + "," + str(int(right)) + "," + str(int(bottom)) + '\n')
elif task == 'detection':
txt_file.write(str(temp_class) + "," + str(temp_score) + "," + str(int(left)) + "," + str(int(top)) + "," + str(int(right)) + "," + str(int(bottom)) + '\n')
else:
raise ValueError('Invalid task.')
def calculate_mAP_from_files(gt_dir, pd_dir, min_overlap=0.5):
"""
Create a ".temp_files/" and "results/" directory
"""
TEMP_FILES_PATH = ".temp_files"
if not os.path.exists(TEMP_FILES_PATH): # if it doesn't exist already
os.makedirs(TEMP_FILES_PATH)
"""
ground-truth
Load each of the ground-truth files into a temporary ".json" file.
Create a list of all the class names present in the ground-truth (gt_classes).
"""
# get a list with the ground-truth files
ground_truth_files_list = glob.glob(gt_dir + '/*.txt')
ground_truth_files_list.sort()
# dictionary with counter per class
gt_counter_per_class = {}
for txt_file in ground_truth_files_list:
file_id = txt_file.split(".txt", 1)[0]
file_id = os.path.basename(os.path.normpath(file_id))
lines_list = _file_lines_to_list(txt_file)
# create ground-truth dictionary
bounding_boxes = []
already_seen_classes = []
for line in lines_list:
class_name, left, top, right, bottom = line.split(',')
bbox = left + " " + top + " " + right + " " + bottom
bounding_boxes.append({"class_name" : class_name, "bbox" : bbox, "used" : False})
# count that object
if class_name in gt_counter_per_class:
gt_counter_per_class[class_name] += 1
else:
gt_counter_per_class[class_name] = 1
if class_name not in already_seen_classes:
already_seen_classes.append(class_name)
# dump bounding_boxes into a ".json" file
with open(TEMP_FILES_PATH + "/" + file_id + "_ground_truth.json", 'w') as outfile:
json.dump(bounding_boxes, outfile)
# get a list with the detection-results files
dr_files_list = glob.glob(pd_dir + '/*.txt')
dr_files_list.sort()
for txt_file_pd in dr_files_list:
lines = _file_lines_to_list(txt_file_pd)
for line in lines:
tmp_class_name, confidence, left, top, right, bottom = line.split(',')
if tmp_class_name not in list(gt_counter_per_class.keys()):
gt_counter_per_class[tmp_class_name] = 0
gt_classes = list(gt_counter_per_class.keys())
gt_classes = sorted(gt_classes)
n_classes = len(gt_classes)
"""
detection-results
Load each of the detection-results files into a temporary ".json" file.
"""
for _, class_name in enumerate(gt_classes):
bounding_boxes = []
for txt_file in dr_files_list:
file_id = txt_file.split(".txt",1)[0]
file_id = os.path.basename(os.path.normpath(file_id))
lines = _file_lines_to_list(txt_file)
for line in lines:
tmp_class_name, confidence, left, top, right, bottom = line.split(',')
if tmp_class_name == class_name:
bbox = left + " " + top + " " + right + " " + bottom
bounding_boxes.append({"confidence" : confidence, "file_id" : file_id, "bbox" : bbox})
# sort detection-results by decreasing confidence
bounding_boxes.sort(key=lambda x : float(x['confidence']), reverse=True)
with open(TEMP_FILES_PATH + "/" + class_name + "_dr.json", 'w') as outfile:
json.dump(bounding_boxes, outfile)
"""
Calculate the AP for each class
"""
sum_AP = 0.0
# open file to store the results
count_true_positives = {}
for _, class_name in enumerate(gt_classes):
count_true_positives[class_name] = 0
"""
Load detection-results of that class
"""
dr_file = TEMP_FILES_PATH + "/" + class_name + "_dr.json"
dr_data = json.load(open(dr_file))
"""
Assign detection-results to ground-truth objects
"""
nd = len(dr_data)
tp = [0] * nd # creates an array of zeros of size nd
fp = [0] * nd
for idx, detection in enumerate(dr_data):
file_id = detection["file_id"]
# assign detection-results to ground truth object if any
# open ground-truth with that file_id
gt_file = TEMP_FILES_PATH + "/" + file_id + "_ground_truth.json"
ground_truth_data = json.load(open(gt_file))
ovmax = -1
gt_match = -1
# load detected object bounding-box
bb = [ float(x) for x in detection["bbox"].split() ]
for obj in ground_truth_data:
# look for a class_name match
if obj["class_name"] == class_name:
bbgt = [ float(x) for x in obj["bbox"].split() ]
bi = [max(bb[0],bbgt[0]), max(bb[1],bbgt[1]), min(bb[2],bbgt[2]), min(bb[3],bbgt[3])]
iw = bi[2] - bi[0] + 1
ih = bi[3] - bi[1] + 1
if iw > 0 and ih > 0:
# compute overlap (IoU) = area of intersection / area of union
ua = (bb[2] - bb[0] + 1) * (bb[3] - bb[1] + 1) + (bbgt[2] - bbgt[0]
+ 1) * (bbgt[3] - bbgt[1] + 1) - iw * ih
ov = iw * ih / ua
if ov > ovmax:
ovmax = ov
gt_match = obj
# set minimum overlap
min_overlap = min_overlap
if ovmax >= min_overlap:
if "difficult" not in gt_match:
if not bool(gt_match["used"]):
# true positive
tp[idx] = 1
gt_match["used"] = True
count_true_positives[class_name] += 1
# update the ".json" file
with open(gt_file, 'w') as f:
f.write(json.dumps(ground_truth_data))
else:
# false positive (multiple detection)
fp[idx] = 1
else:
# false positive
fp[idx] = 1
# compute precision/recall
cumsum = 0
for idx, val in enumerate(fp):
fp[idx] += cumsum
cumsum += val
cumsum = 0
for idx, val in enumerate(tp):
tp[idx] += cumsum
cumsum += val
rec = tp[:]
for idx, val in enumerate(tp):
if gt_counter_per_class[class_name] == 0:
rec[idx] = 0
else:
rec[idx] = float(tp[idx]) / gt_counter_per_class[class_name]
prec = tp[:]
for idx, val in enumerate(tp):
prec[idx] = float(tp[idx]) / (fp[idx] + tp[idx])
ap, _, _ = voc_ap(rec[:], prec[:])
sum_AP += ap
mAP = sum_AP / n_classes
text = "mAP = {0:.2f}%".format(mAP*100)
print(text)
# remove the temp_files directory
shutil.rmtree(TEMP_FILES_PATH)
return mAP
def _file_lines_to_list(path):
# open txt file lines to a list
with open(path) as f:
content = f.readlines()
# remove whitespace characters like `\n` at the end of each line
content = [x.strip() for x in content]
return content
def voc_ap(rec, prec):
"""
--- Official matlab code VOC2012---
mrec=[0 ; rec ; 1];
mpre=[0 ; prec ; 0];
for i=numel(mpre)-1:-1:1
mpre(i)=max(mpre(i),mpre(i+1));
end
i=find(mrec(2:end)~=mrec(1:end-1))+1;
ap=sum((mrec(i)-mrec(i-1)).*mpre(i));
"""
rec.insert(0, 0.0) # insert 0.0 at begining of list
rec.append(1.0) # insert 1.0 at end of list
mrec = rec[:]
prec.insert(0, 0.0) # insert 0.0 at begining of list
prec.append(0.0) # insert 0.0 at end of list
mpre = prec[:]
"""
This part makes the precision monotonically decreasing
(goes from the end to the beginning)
matlab: for i=numel(mpre)-1:-1:1
mpre(i)=max(mpre(i),mpre(i+1));
"""
# matlab indexes start in 1 but python in 0, so I have to do:
# range(start=(len(mpre) - 2), end=0, step=-1)
# also the python function range excludes the end, resulting in:
# range(start=(len(mpre) - 2), end=-1, step=-1)
for i in range(len(mpre)-2, -1, -1):
mpre[i] = max(mpre[i], mpre[i+1])
"""
This part creates a list of indexes where the recall changes
matlab: i=find(mrec(2:end)~=mrec(1:end-1))+1;
"""
i_list = []
for i in range(1, len(mrec)):
if mrec[i] != mrec[i-1]:
i_list.append(i) # if it was matlab would be i + 1
"""
The Average Precision (AP) is the area under the curve
(numerical integration)
matlab: ap=sum((mrec(i)-mrec(i-1)).*mpre(i));
"""
ap = 0.0
for i in i_list:
ap += ((mrec[i]-mrec[i-1])*mpre[i])
return ap, mrec, mpre
if __name__ == "__main__":
calculate_mAP_from_files('out/DispersionAttack_opt_det_out/gt', 'out/DispersionAttack_opt_det_out/pd')
|
import glob
import json
import os
import shutil
import operator
import sys
import argparse
import math
import numpy as np
import pdb
def save_detection_to_file(input_dic, output_file, task):
with open(output_file, 'w') as txt_file:
if not input_dic:
return
class_list = input_dic['classes']
bbox_list = input_dic['boxes']
scores_list = input_dic['scores']
for temp_class, temp_bbox, temp_score in zip(class_list, bbox_list, scores_list):
top, left, bottom, right = temp_bbox
if task == 'ground_truth':
txt_file.write(str(temp_class) + "," + str(int(left)) + "," + str(int(top)) + "," + str(int(right)) + "," + str(int(bottom)) + '\n')
elif task == 'detection':
txt_file.write(str(temp_class) + "," + str(temp_score) + "," + str(int(left)) + "," + str(int(top)) + "," + str(int(right)) + "," + str(int(bottom)) + '\n')
else:
raise ValueError('Invalid task.')
def calculate_mAP_from_files(gt_dir, pd_dir, min_overlap=0.5):
"""
Create a ".temp_files/" and "results/" directory
"""
TEMP_FILES_PATH = ".temp_files"
if not os.path.exists(TEMP_FILES_PATH): # if it doesn't exist already
os.makedirs(TEMP_FILES_PATH)
"""
ground-truth
Load each of the ground-truth files into a temporary ".json" file.
Create a list of all the class names present in the ground-truth (gt_classes).
"""
# get a list with the ground-truth files
ground_truth_files_list = glob.glob(gt_dir + '/*.txt')
ground_truth_files_list.sort()
# dictionary with counter per class
gt_counter_per_class = {}
for txt_file in ground_truth_files_list:
file_id = txt_file.split(".txt", 1)[0]
file_id = os.path.basename(os.path.normpath(file_id))
lines_list = _file_lines_to_list(txt_file)
# create ground-truth dictionary
bounding_boxes = []
already_seen_classes = []
for line in lines_list:
class_name, left, top, right, bottom = line.split(',')
bbox = left + " " + top + " " + right + " " + bottom
bounding_boxes.append({"class_name" : class_name, "bbox" : bbox, "used" : False})
# count that object
if class_name in gt_counter_per_class:
gt_counter_per_class[class_name] += 1
else:
gt_counter_per_class[class_name] = 1
if class_name not in already_seen_classes:
already_seen_classes.append(class_name)
# dump bounding_boxes into a ".json" file
with open(TEMP_FILES_PATH + "/" + file_id + "_ground_truth.json", 'w') as outfile:
json.dump(bounding_boxes, outfile)
# get a list with the detection-results files
dr_files_list = glob.glob(pd_dir + '/*.txt')
dr_files_list.sort()
for txt_file_pd in dr_files_list:
lines = _file_lines_to_list(txt_file_pd)
for line in lines:
tmp_class_name, confidence, left, top, right, bottom = line.split(',')
if tmp_class_name not in list(gt_counter_per_class.keys()):
gt_counter_per_class[tmp_class_name] = 0
gt_classes = list(gt_counter_per_class.keys())
gt_classes = sorted(gt_classes)
n_classes = len(gt_classes)
"""
detection-results
Load each of the detection-results files into a temporary ".json" file.
"""
for _, class_name in enumerate(gt_classes):
bounding_boxes = []
for txt_file in dr_files_list:
file_id = txt_file.split(".txt",1)[0]
file_id = os.path.basename(os.path.normpath(file_id))
lines = _file_lines_to_list(txt_file)
for line in lines:
tmp_class_name, confidence, left, top, right, bottom = line.split(',')
if tmp_class_name == class_name:
bbox = left + " " + top + " " + right + " " + bottom
bounding_boxes.append({"confidence" : confidence, "file_id" : file_id, "bbox" : bbox})
# sort detection-results by decreasing confidence
bounding_boxes.sort(key=lambda x : float(x['confidence']), reverse=True)
with open(TEMP_FILES_PATH + "/" + class_name + "_dr.json", 'w') as outfile:
json.dump(bounding_boxes, outfile)
"""
Calculate the AP for each class
"""
sum_AP = 0.0
# open file to store the results
count_true_positives = {}
for _, class_name in enumerate(gt_classes):
count_true_positives[class_name] = 0
"""
Load detection-results of that class
"""
dr_file = TEMP_FILES_PATH + "/" + class_name + "_dr.json"
dr_data = json.load(open(dr_file))
"""
Assign detection-results to ground-truth objects
"""
nd = len(dr_data)
tp = [0] * nd # creates an array of zeros of size nd
fp = [0] * nd
for idx, detection in enumerate(dr_data):
file_id = detection["file_id"]
# assign detection-results to ground truth object if any
# open ground-truth with that file_id
gt_file = TEMP_FILES_PATH + "/" + file_id + "_ground_truth.json"
ground_truth_data = json.load(open(gt_file))
ovmax = -1
gt_match = -1
# load detected object bounding-box
bb = [ float(x) for x in detection["bbox"].split() ]
for obj in ground_truth_data:
# look for a class_name match
if obj["class_name"] == class_name:
bbgt = [ float(x) for x in obj["bbox"].split() ]
bi = [max(bb[0],bbgt[0]), max(bb[1],bbgt[1]), min(bb[2],bbgt[2]), min(bb[3],bbgt[3])]
iw = bi[2] - bi[0] + 1
ih = bi[3] - bi[1] + 1
if iw > 0 and ih > 0:
# compute overlap (IoU) = area of intersection / area of union
ua = (bb[2] - bb[0] + 1) * (bb[3] - bb[1] + 1) + (bbgt[2] - bbgt[0]
+ 1) * (bbgt[3] - bbgt[1] + 1) - iw * ih
ov = iw * ih / ua
if ov > ovmax:
ovmax = ov
gt_match = obj
# set minimum overlap
min_overlap = min_overlap
if ovmax >= min_overlap:
if "difficult" not in gt_match:
if not bool(gt_match["used"]):
# true positive
tp[idx] = 1
gt_match["used"] = True
count_true_positives[class_name] += 1
# update the ".json" file
with open(gt_file, 'w') as f:
f.write(json.dumps(ground_truth_data))
else:
# false positive (multiple detection)
fp[idx] = 1
else:
# false positive
fp[idx] = 1
# compute precision/recall
cumsum = 0
for idx, val in enumerate(fp):
fp[idx] += cumsum
cumsum += val
cumsum = 0
for idx, val in enumerate(tp):
tp[idx] += cumsum
cumsum += val
rec = tp[:]
for idx, val in enumerate(tp):
if gt_counter_per_class[class_name] == 0:
rec[idx] = 0
else:
rec[idx] = float(tp[idx]) / gt_counter_per_class[class_name]
prec = tp[:]
for idx, val in enumerate(tp):
prec[idx] = float(tp[idx]) / (fp[idx] + tp[idx])
ap, _, _ = voc_ap(rec[:], prec[:])
sum_AP += ap
mAP = sum_AP / n_classes
text = "mAP = {0:.2f}%".format(mAP*100)
print(text)
# remove the temp_files directory
shutil.rmtree(TEMP_FILES_PATH)
return mAP
def _file_lines_to_list(path):
# open txt file lines to a list
with open(path) as f:
content = f.readlines()
# remove whitespace characters like `\n` at the end of each line
content = [x.strip() for x in content]
return content
def voc_ap(rec, prec):
"""
--- Official matlab code VOC2012---
mrec=[0 ; rec ; 1];
mpre=[0 ; prec ; 0];
for i=numel(mpre)-1:-1:1
mpre(i)=max(mpre(i),mpre(i+1));
end
i=find(mrec(2:end)~=mrec(1:end-1))+1;
ap=sum((mrec(i)-mrec(i-1)).*mpre(i));
"""
rec.insert(0, 0.0) # insert 0.0 at begining of list
rec.append(1.0) # insert 1.0 at end of list
mrec = rec[:]
prec.insert(0, 0.0) # insert 0.0 at begining of list
prec.append(0.0) # insert 0.0 at end of list
mpre = prec[:]
"""
This part makes the precision monotonically decreasing
(goes from the end to the beginning)
matlab: for i=numel(mpre)-1:-1:1
mpre(i)=max(mpre(i),mpre(i+1));
"""
# matlab indexes start in 1 but python in 0, so I have to do:
# range(start=(len(mpre) - 2), end=0, step=-1)
# also the python function range excludes the end, resulting in:
# range(start=(len(mpre) - 2), end=-1, step=-1)
for i in range(len(mpre)-2, -1, -1):
mpre[i] = max(mpre[i], mpre[i+1])
"""
This part creates a list of indexes where the recall changes
matlab: i=find(mrec(2:end)~=mrec(1:end-1))+1;
"""
i_list = []
for i in range(1, len(mrec)):
if mrec[i] != mrec[i-1]:
i_list.append(i) # if it was matlab would be i + 1
"""
The Average Precision (AP) is the area under the curve
(numerical integration)
matlab: ap=sum((mrec(i)-mrec(i-1)).*mpre(i));
"""
ap = 0.0
for i in i_list:
ap += ((mrec[i]-mrec[i-1])*mpre[i])
return ap, mrec, mpre
if __name__ == "__main__":
calculate_mAP_from_files('out/DispersionAttack_opt_det_out/gt', 'out/DispersionAttack_opt_det_out/pd')
|
en
| 0.68565
|
Create a ".temp_files/" and "results/" directory # if it doesn't exist already ground-truth Load each of the ground-truth files into a temporary ".json" file. Create a list of all the class names present in the ground-truth (gt_classes). # get a list with the ground-truth files # dictionary with counter per class # create ground-truth dictionary # count that object # dump bounding_boxes into a ".json" file # get a list with the detection-results files detection-results Load each of the detection-results files into a temporary ".json" file. # sort detection-results by decreasing confidence Calculate the AP for each class # open file to store the results Load detection-results of that class Assign detection-results to ground-truth objects # creates an array of zeros of size nd # assign detection-results to ground truth object if any # open ground-truth with that file_id # load detected object bounding-box # look for a class_name match # compute overlap (IoU) = area of intersection / area of union # set minimum overlap # true positive # update the ".json" file # false positive (multiple detection) # false positive # compute precision/recall # remove the temp_files directory # open txt file lines to a list # remove whitespace characters like `\n` at the end of each line --- Official matlab code VOC2012--- mrec=[0 ; rec ; 1]; mpre=[0 ; prec ; 0]; for i=numel(mpre)-1:-1:1 mpre(i)=max(mpre(i),mpre(i+1)); end i=find(mrec(2:end)~=mrec(1:end-1))+1; ap=sum((mrec(i)-mrec(i-1)).*mpre(i)); # insert 0.0 at begining of list # insert 1.0 at end of list # insert 0.0 at begining of list # insert 0.0 at end of list This part makes the precision monotonically decreasing (goes from the end to the beginning) matlab: for i=numel(mpre)-1:-1:1 mpre(i)=max(mpre(i),mpre(i+1)); # matlab indexes start in 1 but python in 0, so I have to do: # range(start=(len(mpre) - 2), end=0, step=-1) # also the python function range excludes the end, resulting in: # range(start=(len(mpre) - 2), end=-1, step=-1) This part creates a list of indexes where the recall changes matlab: i=find(mrec(2:end)~=mrec(1:end-1))+1; # if it was matlab would be i + 1 The Average Precision (AP) is the area under the curve (numerical integration) matlab: ap=sum((mrec(i)-mrec(i-1)).*mpre(i));
| 2.704206
| 3
|
unionchan/main.py
|
moepoi/union-chan
| 1
|
6626768
|
<reponame>moepoi/union-chan
from discord.ext import commands
from src.convertdata import sekantung_kata, kata, label, data, net
import random
import numpy
import tflearn
import sys
args = sys.argv[1]
model = tflearn.DNN(net)
model.load('./src/model/model.tfl')
client = commands.Bot(command_prefix='!union ')
global is_on
is_on = False
@client.event
async def on_message(msg):
author = msg.author
content = msg.content
channel = msg.channel
sender = channel
debugchannel = client.get_channel("debug channel id")
results = model.predict([sekantung_kata(content, kata)])
results_index = numpy.argmax(results)
tag = label[results_index]
for tg in data["intents"]:
if tg['tag'] == tag:
responses = tg['responses']
if "!union" in content:
await client.process_commands(msg)
else:
if is_on:
if args == '--debug':
if not author.bot and channel.id == debugchannel.id:
await sender.send(random.choice(responses))
elif args == '--start':
if not author.bot:
await sender.send(random.choice(responses))
else:
pass
@client.command()
async def nyalakan(ctx):
global is_on
is_on = True
await ctx.send("Menyalakan bot...")
@client.command()
async def matikan(ctx):
global is_on
is_on = False
await ctx.send("Mematikan bot...")
client.run('TOKEN')
|
from discord.ext import commands
from src.convertdata import sekantung_kata, kata, label, data, net
import random
import numpy
import tflearn
import sys
args = sys.argv[1]
model = tflearn.DNN(net)
model.load('./src/model/model.tfl')
client = commands.Bot(command_prefix='!union ')
global is_on
is_on = False
@client.event
async def on_message(msg):
author = msg.author
content = msg.content
channel = msg.channel
sender = channel
debugchannel = client.get_channel("debug channel id")
results = model.predict([sekantung_kata(content, kata)])
results_index = numpy.argmax(results)
tag = label[results_index]
for tg in data["intents"]:
if tg['tag'] == tag:
responses = tg['responses']
if "!union" in content:
await client.process_commands(msg)
else:
if is_on:
if args == '--debug':
if not author.bot and channel.id == debugchannel.id:
await sender.send(random.choice(responses))
elif args == '--start':
if not author.bot:
await sender.send(random.choice(responses))
else:
pass
@client.command()
async def nyalakan(ctx):
global is_on
is_on = True
await ctx.send("Menyalakan bot...")
@client.command()
async def matikan(ctx):
global is_on
is_on = False
await ctx.send("Mematikan bot...")
client.run('TOKEN')
|
none
| 1
| 2.248025
| 2
|
|
tests/test_rrset.py
|
balabit-deps/balabit-os-6-dnspython
| 1
|
6626769
|
# Copyright (C) 2003-2007, 2009-2011 Nominum, Inc.
#
# Permission to use, copy, modify, and distribute this software and its
# documentation for any purpose with or without fee is hereby granted,
# provided that the above copyright notice and this permission notice
# appear in all copies.
#
# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES
# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR
# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT
# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
import unittest
import dns.rrset
class RRsetTestCase(unittest.TestCase):
def testEqual1(self):
r1 = dns.rrset.from_text('foo', 300, 'in', 'a', '10.0.0.1', '10.0.0.2')
r2 = dns.rrset.from_text('FOO', 300, 'in', 'a', '10.0.0.2', '10.0.0.1')
self.failUnless(r1 == r2)
def testEqual2(self):
r1 = dns.rrset.from_text('foo', 300, 'in', 'a', '10.0.0.1', '10.0.0.2')
r2 = dns.rrset.from_text('FOO', 600, 'in', 'a', '10.0.0.2', '10.0.0.1')
self.failUnless(r1 == r2)
def testNotEqual1(self):
r1 = dns.rrset.from_text('fooa', 30, 'in', 'a', '10.0.0.1', '10.0.0.2')
r2 = dns.rrset.from_text('FOO', 30, 'in', 'a', '10.0.0.2', '10.0.0.1')
self.failUnless(r1 != r2)
def testNotEqual2(self):
r1 = dns.rrset.from_text('foo', 30, 'in', 'a', '10.0.0.1', '10.0.0.3')
r2 = dns.rrset.from_text('FOO', 30, 'in', 'a', '10.0.0.2', '10.0.0.1')
self.failUnless(r1 != r2)
def testNotEqual3(self):
r1 = dns.rrset.from_text('foo', 30, 'in', 'a', '10.0.0.1', '10.0.0.2',
'10.0.0.3')
r2 = dns.rrset.from_text('FOO', 30, 'in', 'a', '10.0.0.2', '10.0.0.1')
self.failUnless(r1 != r2)
def testNotEqual4(self):
r1 = dns.rrset.from_text('foo', 30, 'in', 'a', '10.0.0.1')
r2 = dns.rrset.from_text('FOO', 30, 'in', 'a', '10.0.0.2', '10.0.0.1')
self.failUnless(r1 != r2)
if __name__ == '__main__':
unittest.main()
|
# Copyright (C) 2003-2007, 2009-2011 Nominum, Inc.
#
# Permission to use, copy, modify, and distribute this software and its
# documentation for any purpose with or without fee is hereby granted,
# provided that the above copyright notice and this permission notice
# appear in all copies.
#
# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES
# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR
# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT
# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
import unittest
import dns.rrset
class RRsetTestCase(unittest.TestCase):
def testEqual1(self):
r1 = dns.rrset.from_text('foo', 300, 'in', 'a', '10.0.0.1', '10.0.0.2')
r2 = dns.rrset.from_text('FOO', 300, 'in', 'a', '10.0.0.2', '10.0.0.1')
self.failUnless(r1 == r2)
def testEqual2(self):
r1 = dns.rrset.from_text('foo', 300, 'in', 'a', '10.0.0.1', '10.0.0.2')
r2 = dns.rrset.from_text('FOO', 600, 'in', 'a', '10.0.0.2', '10.0.0.1')
self.failUnless(r1 == r2)
def testNotEqual1(self):
r1 = dns.rrset.from_text('fooa', 30, 'in', 'a', '10.0.0.1', '10.0.0.2')
r2 = dns.rrset.from_text('FOO', 30, 'in', 'a', '10.0.0.2', '10.0.0.1')
self.failUnless(r1 != r2)
def testNotEqual2(self):
r1 = dns.rrset.from_text('foo', 30, 'in', 'a', '10.0.0.1', '10.0.0.3')
r2 = dns.rrset.from_text('FOO', 30, 'in', 'a', '10.0.0.2', '10.0.0.1')
self.failUnless(r1 != r2)
def testNotEqual3(self):
r1 = dns.rrset.from_text('foo', 30, 'in', 'a', '10.0.0.1', '10.0.0.2',
'10.0.0.3')
r2 = dns.rrset.from_text('FOO', 30, 'in', 'a', '10.0.0.2', '10.0.0.1')
self.failUnless(r1 != r2)
def testNotEqual4(self):
r1 = dns.rrset.from_text('foo', 30, 'in', 'a', '10.0.0.1')
r2 = dns.rrset.from_text('FOO', 30, 'in', 'a', '10.0.0.2', '10.0.0.1')
self.failUnless(r1 != r2)
if __name__ == '__main__':
unittest.main()
|
en
| 0.622654
|
# Copyright (C) 2003-2007, 2009-2011 Nominum, Inc. # # Permission to use, copy, modify, and distribute this software and its # documentation for any purpose with or without fee is hereby granted, # provided that the above copyright notice and this permission notice # appear in all copies. # # THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES # WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF # MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR # ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES # WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN # ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT # OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
| 2.243012
| 2
|
test/test_numpy_binding.py
|
Saad-K/snowflake-connector-python
| 1
|
6626770
|
<gh_stars>1-10
import datetime
import time
import numpy as np
def test_numpy_datatype_binding(conn_cnx, db_parameters):
"""
Tests numpy data type binding
"""
epoch_time = time.time()
current_datetime = datetime.datetime.fromtimestamp(epoch_time)
current_datetime64 = np.datetime64(current_datetime)
all_data = [{
'tz': 'America/Los_Angeles',
'float': '1.79769313486e+308',
'numpy_bool': np.True_,
'epoch_time': epoch_time,
'current_time': current_datetime64,
'specific_date': np.datetime64('2005-02-25T03:30'),
'expected_specific_date': np.datetime64('2005-02-25T03:30').astype(datetime.datetime)
}, {
'tz': 'Asia/Tokyo',
'float': '-1.79769313486e+308',
'numpy_bool': np.False_,
'epoch_time': epoch_time,
'current_time': current_datetime64,
'specific_date': np.datetime64('1970-12-31T05:00:00'),
'expected_specific_date': np.datetime64('1970-12-31T05:00:00').astype(datetime.datetime)
}, {
'tz': 'America/New_York',
'float': '-1.79769313486e+308',
'numpy_bool': np.True_,
'epoch_time': epoch_time,
'current_time': current_datetime64,
'specific_date': np.datetime64('1969-12-31T05:00:00'),
'expected_specific_date': np.datetime64('1969-12-31T05:00:00').astype(datetime.datetime)
}, {
'tz': 'UTC',
'float': '-1.79769313486e+308',
'numpy_bool': np.False_,
'epoch_time': epoch_time,
'current_time': current_datetime64,
'specific_date': np.datetime64('1968-11-12T07:00:00.123'),
'expected_specific_date': np.datetime64('1968-11-12T07:00:00.123').astype(datetime.datetime)
}]
try:
with conn_cnx(numpy=True) as cnx:
cnx.cursor().execute("""
CREATE OR REPLACE TABLE {name} (
c1 integer, -- int8
c2 integer, -- int16
c3 integer, -- int32
c4 integer, -- int64
c5 float, -- float16
c6 float, -- float32
c7 float, -- float64
c8 timestamp_ntz, -- datetime64
c9 date, -- datetime64
c10 timestamp_ltz, -- datetime64,
c11 timestamp_tz, -- datetime64
c12 boolean) -- numpy.bool_
""".format(name=db_parameters['name']))
for data in all_data:
cnx.cursor().execute("""
ALTER SESSION SET timezone='{tz}'""".format(tz=data['tz']))
cnx.cursor().execute("""
INSERT INTO {name}(
c1,
c2,
c3,
c4,
c5,
c6,
c7,
c8,
c9,
c10,
c11,
c12
)
VALUES(
%s,
%s,
%s,
%s,
%s,
%s,
%s,
%s,
%s,
%s,
%s,
%s)""".format(
name=db_parameters['name']), (
np.iinfo(np.int8).max,
np.iinfo(np.int16).max,
np.iinfo(np.int32).max,
np.iinfo(np.int64).max,
np.finfo(np.float16).max,
np.finfo(np.float32).max,
np.float64(data['float']),
data['current_time'],
data['current_time'],
data['current_time'],
data['specific_date'],
data['numpy_bool']
))
rec = cnx.cursor().execute("""
SELECT
c1,
c2,
c3,
c4,
c5,
c6,
c7,
c8,
c9,
c10,
c11,
c12
FROM {name}""".format(
name=db_parameters['name'])).fetchone()
assert np.int8(rec[0]) == np.iinfo(np.int8).max
assert np.int16(rec[1]) == np.iinfo(np.int16).max
assert np.int32(rec[2]) == np.iinfo(np.int32).max
assert np.int64(rec[3]) == np.iinfo(np.int64).max
assert np.float16(rec[4]) == np.finfo(np.float16).max
assert np.float32(rec[5]) == np.finfo(np.float32).max
assert rec[6] == np.float64(data['float'])
assert rec[7] == data['current_time']
assert str(rec[8]) == str(data['current_time'])[0:10]
assert rec[9] == datetime.datetime.fromtimestamp(epoch_time, rec[9].tzinfo)
assert rec[10] == data['expected_specific_date'].replace(tzinfo=rec[10].tzinfo)
assert isinstance(rec[11], bool) and rec[11] == data['numpy_bool'] and np.bool_(rec[11]) == data['numpy_bool']
cnx.cursor().execute("""
DELETE FROM {name}""".format(name=db_parameters['name']))
finally:
with conn_cnx() as cnx:
cnx.cursor().execute("""
DROP TABLE IF EXISTS {name}
""".format(name=db_parameters['name']))
|
import datetime
import time
import numpy as np
def test_numpy_datatype_binding(conn_cnx, db_parameters):
"""
Tests numpy data type binding
"""
epoch_time = time.time()
current_datetime = datetime.datetime.fromtimestamp(epoch_time)
current_datetime64 = np.datetime64(current_datetime)
all_data = [{
'tz': 'America/Los_Angeles',
'float': '1.79769313486e+308',
'numpy_bool': np.True_,
'epoch_time': epoch_time,
'current_time': current_datetime64,
'specific_date': np.datetime64('2005-02-25T03:30'),
'expected_specific_date': np.datetime64('2005-02-25T03:30').astype(datetime.datetime)
}, {
'tz': 'Asia/Tokyo',
'float': '-1.79769313486e+308',
'numpy_bool': np.False_,
'epoch_time': epoch_time,
'current_time': current_datetime64,
'specific_date': np.datetime64('1970-12-31T05:00:00'),
'expected_specific_date': np.datetime64('1970-12-31T05:00:00').astype(datetime.datetime)
}, {
'tz': 'America/New_York',
'float': '-1.79769313486e+308',
'numpy_bool': np.True_,
'epoch_time': epoch_time,
'current_time': current_datetime64,
'specific_date': np.datetime64('1969-12-31T05:00:00'),
'expected_specific_date': np.datetime64('1969-12-31T05:00:00').astype(datetime.datetime)
}, {
'tz': 'UTC',
'float': '-1.79769313486e+308',
'numpy_bool': np.False_,
'epoch_time': epoch_time,
'current_time': current_datetime64,
'specific_date': np.datetime64('1968-11-12T07:00:00.123'),
'expected_specific_date': np.datetime64('1968-11-12T07:00:00.123').astype(datetime.datetime)
}]
try:
with conn_cnx(numpy=True) as cnx:
cnx.cursor().execute("""
CREATE OR REPLACE TABLE {name} (
c1 integer, -- int8
c2 integer, -- int16
c3 integer, -- int32
c4 integer, -- int64
c5 float, -- float16
c6 float, -- float32
c7 float, -- float64
c8 timestamp_ntz, -- datetime64
c9 date, -- datetime64
c10 timestamp_ltz, -- datetime64,
c11 timestamp_tz, -- datetime64
c12 boolean) -- numpy.bool_
""".format(name=db_parameters['name']))
for data in all_data:
cnx.cursor().execute("""
ALTER SESSION SET timezone='{tz}'""".format(tz=data['tz']))
cnx.cursor().execute("""
INSERT INTO {name}(
c1,
c2,
c3,
c4,
c5,
c6,
c7,
c8,
c9,
c10,
c11,
c12
)
VALUES(
%s,
%s,
%s,
%s,
%s,
%s,
%s,
%s,
%s,
%s,
%s,
%s)""".format(
name=db_parameters['name']), (
np.iinfo(np.int8).max,
np.iinfo(np.int16).max,
np.iinfo(np.int32).max,
np.iinfo(np.int64).max,
np.finfo(np.float16).max,
np.finfo(np.float32).max,
np.float64(data['float']),
data['current_time'],
data['current_time'],
data['current_time'],
data['specific_date'],
data['numpy_bool']
))
rec = cnx.cursor().execute("""
SELECT
c1,
c2,
c3,
c4,
c5,
c6,
c7,
c8,
c9,
c10,
c11,
c12
FROM {name}""".format(
name=db_parameters['name'])).fetchone()
assert np.int8(rec[0]) == np.iinfo(np.int8).max
assert np.int16(rec[1]) == np.iinfo(np.int16).max
assert np.int32(rec[2]) == np.iinfo(np.int32).max
assert np.int64(rec[3]) == np.iinfo(np.int64).max
assert np.float16(rec[4]) == np.finfo(np.float16).max
assert np.float32(rec[5]) == np.finfo(np.float32).max
assert rec[6] == np.float64(data['float'])
assert rec[7] == data['current_time']
assert str(rec[8]) == str(data['current_time'])[0:10]
assert rec[9] == datetime.datetime.fromtimestamp(epoch_time, rec[9].tzinfo)
assert rec[10] == data['expected_specific_date'].replace(tzinfo=rec[10].tzinfo)
assert isinstance(rec[11], bool) and rec[11] == data['numpy_bool'] and np.bool_(rec[11]) == data['numpy_bool']
cnx.cursor().execute("""
DELETE FROM {name}""".format(name=db_parameters['name']))
finally:
with conn_cnx() as cnx:
cnx.cursor().execute("""
DROP TABLE IF EXISTS {name}
""".format(name=db_parameters['name']))
|
en
| 0.26479
|
Tests numpy data type binding CREATE OR REPLACE TABLE {name} ( c1 integer, -- int8 c2 integer, -- int16 c3 integer, -- int32 c4 integer, -- int64 c5 float, -- float16 c6 float, -- float32 c7 float, -- float64 c8 timestamp_ntz, -- datetime64 c9 date, -- datetime64 c10 timestamp_ltz, -- datetime64, c11 timestamp_tz, -- datetime64 c12 boolean) -- numpy.bool_ ALTER SESSION SET timezone='{tz}' INSERT INTO {name}( c1, c2, c3, c4, c5, c6, c7, c8, c9, c10, c11, c12 ) VALUES( %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s) SELECT c1, c2, c3, c4, c5, c6, c7, c8, c9, c10, c11, c12 FROM {name} DELETE FROM {name} DROP TABLE IF EXISTS {name}
| 2.496908
| 2
|
os.path.py
|
sicilyChen/python
| 0
|
6626771
|
<reponame>sicilyChen/python<gh_stars>0
import os.path
print(os.path.abspath('hello.py'))#用于获取文件或目录的绝对路径
print(os.path.exists('hello.py'),os.path.exists('1.py'))#用于判断文件或目录是否存在,如果存在返回True,否则返回False
print(os.path.join('D:\\python程序练习','hello.py'))#将目录与目录或者文件名拼接起来
print(os.path.split('D:\\python程序练习\\hello.py'))#文件名和扩展名
print(os.path.basename('D:\\python程序练习\\hello.py'))#分离文件名和扩展名
print(os.path.dirname('D:\\python程序练习\\hello.py'))#从一个路径中提取文件路径,不包括文件名
print(os.path.isdir('D:\\python程序练习\\hello.py'))#用于判断是否为路径
|
import os.path
print(os.path.abspath('hello.py'))#用于获取文件或目录的绝对路径
print(os.path.exists('hello.py'),os.path.exists('1.py'))#用于判断文件或目录是否存在,如果存在返回True,否则返回False
print(os.path.join('D:\\python程序练习','hello.py'))#将目录与目录或者文件名拼接起来
print(os.path.split('D:\\python程序练习\\hello.py'))#文件名和扩展名
print(os.path.basename('D:\\python程序练习\\hello.py'))#分离文件名和扩展名
print(os.path.dirname('D:\\python程序练习\\hello.py'))#从一个路径中提取文件路径,不包括文件名
print(os.path.isdir('D:\\python程序练习\\hello.py'))#用于判断是否为路径
|
zh
| 0.994901
|
#用于获取文件或目录的绝对路径 #用于判断文件或目录是否存在,如果存在返回True,否则返回False #将目录与目录或者文件名拼接起来 #文件名和扩展名 #分离文件名和扩展名 #从一个路径中提取文件路径,不包括文件名 #用于判断是否为路径
| 3.413546
| 3
|
matminer/featurizers/site.py
|
AmalieT/matminer
| 0
|
6626772
|
from __future__ import division
import copy
from functools import lru_cache
from matminer.featurizers.utils.grdf import Gaussian, Histogram
from matminer.utils.caching import get_nearest_neighbors
from matminer.utils.data import MagpieData
"""
Features that describe the local environment of a single atom. Note that
structural features can be constructed from a combination of site features from
every site in the structure.
The `featurize` function takes two arguments:
struct (Structure): Object representing the structure containing the site
of interest
idx (int): Index of the site to be featurized
We have to use two parameters because the Site object does not hold a pointer
back to its structure and often information on neighbors is required. To run
:code:`featurize_dataframe`, you must pass the column names for both the site
index and the structure. For example:
.. code:: python
f = AGNIFingerprints()
f.featurize_dataframe(data, ['structure', 'site_idx'])
"""
import os
import warnings
import ruamel.yaml as yaml
import itertools
import numpy as np
import scipy.integrate as integrate
from matminer.featurizers.base import BaseFeaturizer
from math import pi
from scipy.special import sph_harm
from sympy.physics.wigner import wigner_3j
from pymatgen import Structure
from pymatgen.core.periodic_table import Element
from pymatgen.analysis.local_env import LocalStructOrderParams, \
VoronoiNN, CrystalNN
import pymatgen.analysis
from pymatgen.analysis.ewald import EwaldSummation
from pymatgen.analysis.chemenv.coordination_environments.coordination_geometry_finder \
import LocalGeometryFinder
from pymatgen.analysis.chemenv.coordination_environments.chemenv_strategies \
import SimplestChemenvStrategy, MultiWeightsChemenvStrategy
from matminer.featurizers.utils.stats import PropertyStats
from sklearn.utils.validation import check_is_fitted
cn_motif_op_params = {}
with open(os.path.join(os.path.dirname(
pymatgen.analysis.__file__), 'cn_opt_params.yaml'), 'r') as f:
cn_motif_op_params = yaml.safe_load(f)
cn_target_motif_op = {}
with open(os.path.join(os.path.dirname(
__file__), 'cn_target_motif_op.yaml'), 'r') as f:
cn_target_motif_op = yaml.safe_load(f)
class AGNIFingerprints(BaseFeaturizer):
"""
Product integral of RDF and Gaussian window function, from `Botu et al <http://pubs.acs.org/doi/abs/10.1021/acs.jpcc.6b10908>`_.
Integral of the product of the radial distribution function and a
Gaussian window function. Originally used by
`Botu et al <http://pubs.acs.org/doi/abs/10.1021/acs.jpcc.6b10908>`_ to fit empiricial
potentials. These features come in two forms: atomic fingerprints and
direction-resolved fingerprints.
Atomic fingerprints describe the local environment of an atom and are
computed using the function:
:math:`A_i(\eta) = \sum\limits_{i \\ne j} e^{-(\\frac{r_{ij}}{\eta})^2} f(r_{ij})`
where :math:`i` is the index of the atom, :math:`j` is the index of a neighboring atom, :math:`\eta` is a scaling function,
:math:`r_{ij}` is the distance between atoms :math:`i` and :math:`j`, and :math:`f(r)` is a cutoff function where
:math:`f(r) = 0.5[\cos(\\frac{\pi r_{ij}}{R_c}) + 1]` if :math:`r < R_c` and :math:`0` otherwise.
The direction-resolved fingerprints are computed using
:math:`V_i^k(\eta) = \sum\limits_{i \\ne j} \\frac{r_{ij}^k}{r_{ij}} e^{-(\\frac{r_{ij}}{\eta})^2} f(r_{ij})`
where :math:`r_{ij}^k` is the :math:`k^{th}` component of :math:`\\bold{r}_i - \\bold{r}_j`.
Parameters:
TODO: Differentiate between different atom types (maybe as another class)
"""
def __init__(self, directions=(None, 'x', 'y', 'z'), etas=None,
cutoff=8):
"""
Args:
directions (iterable): List of directions for the fingerprints. Can
be one or more of 'None`, 'x', 'y', or 'z'
etas (iterable of floats): List of which window widths to compute
cutoff (float): Cutoff distance (Angstroms)
"""
self.directions = directions
self.etas = etas
if self.etas is None:
self.etas = np.logspace(np.log10(0.8), np.log10(16), 8)
self.cutoff = cutoff
def featurize(self, struct, idx):
# Get all neighbors of this site
my_site = struct[idx]
sites, dists = zip(*struct.get_neighbors(my_site, self.cutoff))
# Convert dists to a ndarray
dists = np.array(dists)
# If one of the features is direction-dependent, compute the :math:`(r_i - r_j) / r_{ij}`
if any([x in self.directions for x in ['x', 'y', 'z']]):
disps = np.array(
[my_site.coords - s.coords for s in sites]) / dists[:,
np.newaxis]
# Compute the cutoff function
cutoff_func = 0.5 * (np.cos(np.pi * dists / self.cutoff) + 1)
# Compute "e^(r/eta) * cutoff_func" for each eta
windowed = np.zeros((len(dists), len(self.etas)))
for i, eta in enumerate(self.etas):
windowed[:, i] = np.multiply(
np.exp(-1 * np.power(np.true_divide(dists, eta), 2)),
cutoff_func)
# Compute the fingerprints
output = []
for d in self.directions:
if d is None:
output.append(np.sum(windowed, axis=0))
else:
if d == 'x':
proj = [1., 0., 0.]
elif d == 'y':
proj = [0., 1., 0.]
elif d == 'z':
proj = [0., 0., 1.]
else:
raise Exception('Unrecognized direction')
output.append(
np.sum(windowed * np.dot(disps, proj)[:, np.newaxis],
axis=0))
# Return the results
return np.hstack(output)
def feature_labels(self):
labels = []
for d in self.directions:
for e in self.etas:
if d is None:
labels.append('AGNI eta=%.2e' % e)
else:
labels.append('AGNI dir=%s eta=%.2e' % (d, e))
return labels
def citations(self):
return ["@article{Botu2015, author = {<NAME> and <NAME>},doi = {10.1002/qua.24836}," \
"journal = {International Journal of Quantum Chemistry},number = {16},pages = {1074--1083}," \
"title = {{Adaptive machine learning framework to accelerate ab initio molecular dynamics}}," \
"volume = {115},year = {2015}}"]
def implementors(self):
return ['<NAME>']
class OPSiteFingerprint(BaseFeaturizer):
"""
Local structure order parameters computed from a site's neighbor env.
For each order parameter, we determine
the neighbor shell that complies with the expected
coordination number. For example, we find the 4 nearest
neighbors for the tetrahedral OP, the 6 nearest for the
octahedral OP, and the 8 nearest neighbors for the bcc OP.
If we don't find such a shell, the OP is either set to zero
or evaluated with the shell of the next largest observed
coordination number.
Args:
target_motifs (dict): target op or motif type where keys
are corresponding coordination numbers
(e.g., {4: "tetrahedral"}).
dr (float): width for binning neighbors in unit of relative
distances (= distance/nearest neighbor
distance). The binning is necessary to make the
neighbor-finding step robust against small numerical
variations in neighbor distances (default: 0.1).
ddr (float): variation of width for finding stable OP values.
ndr (int): number of width variations for each variation direction
(e.g., ndr = 0 only uses the input dr, whereas
ndr=1 tests dr = dr - ddr, dr, and dr + ddr.
dop (float): binning width to compute histogram for each OP
if ndr > 0.
dist_exp (boolean): exponent for distance factor to multiply
order parameters with that penalizes (large)
variations in distances in a given motif.
0 will switch the option off
(default: 2).
zero_ops (boolean): set an OP to zero if there is no neighbor
shell that complies with the expected
coordination number of a given OP
(e.g., CN=4 for tetrahedron;
default: True).
"""
def __init__(self, target_motifs=None, dr=0.1, ddr=0.01, ndr=1, dop=0.001,
dist_exp=2, zero_ops=True):
self.cn_target_motif_op = copy.deepcopy(cn_target_motif_op) \
if target_motifs is None else copy.deepcopy(target_motifs)
self.dr = dr
self.ddr = ddr
self.ndr = ndr
self.dop = dop
self.dist_exp = dist_exp
self.zero_ops = zero_ops
self.ops = {}
for cn, t_list in self.cn_target_motif_op.items():
self.ops[cn] = []
for t in t_list:
ot = t
p = None
if cn in cn_motif_op_params.keys():
if t in cn_motif_op_params[cn].keys():
ot = cn_motif_op_params[cn][t][0]
if len(cn_motif_op_params[cn][t]) > 1:
p = cn_motif_op_params[cn][t][1]
self.ops[cn].append(LocalStructOrderParams([ot], parameters=[p]))
def featurize(self, struct, idx):
"""
Get OP fingerprint of site with given index in input
structure.
Args:
struct (Structure): Pymatgen Structure object.
idx (int): index of target site in structure.
Returns:
opvals (numpy array): order parameters of target site.
"""
idop = 1.0 / self.dop
opvals = {}
s = struct.sites[idx]
neigh_dist = []
r = 6
while len(neigh_dist) < 12:
r += 1.0
neigh_dist = struct.get_neighbors(s, r)
# Smoothen distance, but use relative distances.
dmin = min([d for n, d in neigh_dist])
neigh_dist = [[n, d / dmin] for n, d in neigh_dist]
neigh_dist_alldrs = {}
d_sorted_alldrs = {}
for i in range(-self.ndr, self.ndr + 1):
opvals[i] = []
this_dr = self.dr + float(i) * self.ddr
this_idr = 1.0 / this_dr
neigh_dist_alldrs[i] = []
for j in range(len(neigh_dist)):
neigh_dist_alldrs[i].append([neigh_dist[j][0],
(float(
int(neigh_dist[j][1] * this_idr \
+ 0.5)) + 0.5) * this_dr])
d_sorted_alldrs[i] = []
for n, d in neigh_dist_alldrs[i]:
if d not in d_sorted_alldrs[i]:
d_sorted_alldrs[i].append(d)
d_sorted_alldrs[i] = sorted(d_sorted_alldrs[i])
# Do q_sgl_bd separately.
#if self.optypes[1][0] == "sgl_bd":
if self.cn_target_motif_op[1][0] == "sgl_bd":
for i in range(-self.ndr, self.ndr + 1):
site_list = [s]
for n, dn in neigh_dist_alldrs[i]:
site_list.append(n)
opval = self.ops[1][0].get_order_parameters(
site_list, 0,
indices_neighs=[j for j in range(1, len(site_list))])
opvals[i].append(opval[0])
for i in range(-self.ndr, self.ndr + 1):
prev_cn = 0
for d in d_sorted_alldrs[i]:
this_cn = 0
site_list = [s]
this_av_inv_drel = 0.0
for j, [n, dn] in enumerate(neigh_dist_alldrs[i]):
if dn <= d:
this_cn += 1
site_list.append(n)
this_av_inv_drel += (1.0 / (neigh_dist[j][1]))
this_av_inv_drel = this_av_inv_drel / float(this_cn)
d_fac = this_av_inv_drel ** self.dist_exp
for cn in range(max(2, prev_cn + 1), min(this_cn + 1, 13)):
# Set all OPs of non-CN-complying neighbor environments
# to zero if applicable.
if self.zero_ops and cn != this_cn:
for it in range(len(self.cn_target_motif_op[cn])):
opvals[i].append(0)
continue
# Set all (remaining) OPs.
for it in range(len(self.cn_target_motif_op[cn])):
opval = self.ops[cn][it].get_order_parameters(
site_list, 0,
indices_neighs=[j for j in
range(1, len(site_list))])
if opval[0] is None:
opval[0] = 0
else:
opval[0] = d_fac * opval[0]
opvals[i].append(opval[0])
prev_cn = this_cn
if prev_cn >= 12:
break
opvals_out = []
for j in range(len(opvals[0])):
# Compute histogram, determine peak, and location
# of peak value.
op_tmp = [opvals[i][j] for i in range(-self.ndr, self.ndr + 1)]
minval = float(int(min(op_tmp) * idop - 1.5)) * self.dop
# print(minval)
if minval < 0.0:
minval = 0.0
if minval > 1.0:
minval = 1.0
# print(minval)
maxval = float(int(max(op_tmp) * idop + 1.5)) * self.dop
# print(maxval)
if maxval < 0.0:
maxval = 0.0
if maxval > 1.0:
maxval = 1.0
# print(maxval)
if minval == maxval:
minval = minval - self.dop
maxval = maxval + self.dop
# print(minval)
# print(maxval)
nbins = int((maxval - minval) * idop)
# print('{} {} {}'.format(minval, maxval, nbins))
hist, bin_edges = np.histogram(
op_tmp, bins=nbins, range=(minval, maxval),
weights=None, density=False)
max_hist = max(hist)
op_peaks = []
for i, h in enumerate(hist):
if h == max_hist:
op_peaks.append(
[i, 0.5 * (bin_edges[i] + bin_edges[i + 1])])
# Address problem that 2 OP values can be close to a bin edge.
hist2 = []
op_peaks2 = []
i = 0
while i < len(op_peaks):
if i < len(op_peaks) - 1:
if op_peaks[i + 1][0] - op_peaks[i][0] == 1:
op_peaks2.append(
0.5 * (op_peaks[i][1] + op_peaks[i + 1][1]))
hist2.append(
hist[op_peaks[i][0]] + hist[op_peaks[i + 1][0]])
i += 1
else:
op_peaks2.append(op_peaks[i][1])
hist2.append(hist[op_peaks[i][0]])
else:
op_peaks2.append(op_peaks[i][1])
hist2.append(hist[op_peaks[i][0]])
i += 1
opvals_out.append(op_peaks2[list(hist2).index(max(hist2))])
return np.array(opvals_out)
def feature_labels(self):
labels = []
for cn, li in self.cn_target_motif_op.items():
for e in li:
labels.append('{} CN_{}'.format(e, cn))
return labels
def citations(self):
return ['@article{zimmermann_jain_2017, title={Applications of order'
' parameter feature vectors}, journal={in progress}, author={'
'<NAME>. and <NAME>.}, year={2017}}']
def implementors(self):
return ['<NAME>']
class CrystalNNFingerprint(BaseFeaturizer):
"""
A local order parameter fingerprint for periodic crystals.
The fingerprint represents the value of various order parameters for the
site. The "wt" order parameter describes how consistent a site is with a
certain coordination number. The remaining order parameters are computed
by multiplying the "wt" for that coordination number with the OP value.
The chem_info parameter can be used to also get chemical descriptors that
describe differences in some chemical parameter (e.g., electronegativity)
between the central site and the site neighbors.
"""
@staticmethod
def from_preset(preset, **kwargs):
"""
Use preset parameters to get the fingerprint
Args:
preset (str): name of preset ("cn" or "ops")
**kwargs: other settings to be passed into CrystalNN class
"""
if preset == "cn":
op_types = dict([(k + 1, ["wt"]) for k in range(24)])
return CrystalNNFingerprint(op_types, **kwargs)
elif preset == "ops":
op_types = copy.deepcopy(cn_target_motif_op)
for k in range(24):
if k + 1 in op_types:
op_types[k + 1].insert(0, "wt")
else:
op_types[k + 1] = ["wt"]
return CrystalNNFingerprint(op_types, chem_info=None, **kwargs)
else:
raise RuntimeError('preset "{}" is not supported in '
'CrystalNNFingerprint'.format(preset))
def __init__(self, op_types, chem_info=None, **kwargs):
"""
Initialize the CrystalNNFingerprint. Use the from_preset() function to
use default params.
Args:
op_types (dict): a dict of coordination number (int) to a list of str
representing the order parameter types
chem_info (dict): a dict of chemical properties (e.g., atomic mass)
to dictionaries that map an element to a value
(e.g., chem_info["Pauling scale"]["O"] = 3.44)
**kwargs: other settings to be passed into CrystalNN class
"""
self.op_types = copy.deepcopy(op_types)
self.cnn = CrystalNN(**kwargs)
if chem_info is not None:
self.chem_info = copy.deepcopy(chem_info)
self.chem_props = list(chem_info.keys())
else:
self.chem_info = None
self.ops = {} # load order parameter objects & paramaters
for cn, t_list in self.op_types.items():
self.ops[cn] = []
for t in t_list:
if t == "wt":
self.ops[cn].append(t)
else:
ot = t
p = None
if cn in cn_motif_op_params.keys():
if t in cn_motif_op_params[cn].keys():
ot = cn_motif_op_params[cn][t][0]
if len(cn_motif_op_params[cn][t]) > 1:
p = cn_motif_op_params[cn][t][1]
self.ops[cn].append(LocalStructOrderParams([ot], parameters=[p]))
def featurize(self, struct, idx):
"""
Get crystal fingerprint of site with given index in input
structure.
Args:
struct (Structure): Pymatgen Structure object.
idx (int): index of target site in structure.
Returns:
list of weighted order parameters of target site.
"""
nndata = self.cnn.get_nn_data(struct, idx)
max_cn = sorted(self.op_types)[-1]
cn_fingerprint = []
if self.chem_info is not None:
prop_delta = {} # dictionary of chemical property to final value
for prop in self.chem_props:
prop_delta[prop] = 0
sum_wt = 0
elem_central = struct.sites[idx].specie.symbol
specie_central = str(struct.sites[idx].specie)
for k in range(max_cn):
cn = k + 1
wt = nndata.cn_weights.get(cn, 0)
if cn in self.ops:
for op in self.ops[cn]:
if op == "wt":
cn_fingerprint.append(wt)
if self.chem_info is not None and wt != 0:
# Compute additional chemistry-related features
sum_wt += wt
neigh_sites = [d["site"] for d in
nndata.cn_nninfo[cn]]
for prop in self.chem_props:
# get the value for specie, if not fall back to
# value defined for element
prop_central = self.chem_info[prop].get(
specie_central, self.chem_info[prop].get(
elem_central))
for neigh in neigh_sites:
elem_neigh = neigh.specie.symbol
specie_neigh = str(neigh.specie)
prop_neigh = self.chem_info[prop].get(
specie_neigh,
self.chem_info[prop].get(
elem_neigh))
prop_delta[prop] += wt * \
(prop_neigh -
prop_central) / cn
elif wt == 0:
cn_fingerprint.append(wt)
else:
neigh_sites = [d["site"] for d in nndata.cn_nninfo[cn]]
opval = op.get_order_parameters(
[struct[idx]] + neigh_sites, 0,
indices_neighs=[i for i in
range(1, len(neigh_sites) + 1)])[0]
opval = opval or 0 # handles None
cn_fingerprint.append(wt * opval)
chem_fingerprint = []
if self.chem_info is not None:
for val in prop_delta.values():
chem_fingerprint.append(val / sum_wt)
return cn_fingerprint + chem_fingerprint
def feature_labels(self):
labels = []
max_cn = sorted(self.op_types)[-1]
for k in range(max_cn):
cn = k + 1
if cn in list(self.ops.keys()):
for op in self.op_types[cn]:
labels.append("{} CN_{}".format(op, cn))
if self.chem_info is not None:
for prop in self.chem_props:
labels.append("{} local diff".format(prop))
return labels
def citations(self):
return []
def implementors(self):
return ['<NAME>', '<NAME>']
class VoronoiFingerprint(BaseFeaturizer):
"""
Voronoi tessellation-based features around target site.
Calculate the following sets of features based on Voronoi tessellation
analysis around the target site:
Voronoi indices
n_i denotes the number of i-edged facets, and i is in the range of 3-10.
e.g.
for bcc lattice, the Voronoi indices are [0,6,0,8,...];
for fcc/hcp lattice, the Voronoi indices are [0,12,0,0,...];
for icosahedra, the Voronoi indices are [0,0,12,0,...];
i-fold symmetry indices
computed as n_i/sum(n_i), and i is in the range of 3-10.
reflect the strength of i-fold symmetry in local sites.
e.g.
for bcc lattice, the i-fold symmetry indices are [0,6/14,0,8/14,...]
indicating both 4-fold and a stronger 6-fold symmetries are present;
for fcc/hcp lattice, the i-fold symmetry factors are [0,1,0,0,...],
indicating only 4-fold symmetry is present;
for icosahedra, the Voronoi indices are [0,0,1,0,...],
indicating only 5-fold symmetry is present;
Weighted i-fold symmetry indices
if use_weights = True
Voronoi volume
total volume of the Voronoi polyhedron around the target site
Voronoi volume statistics of sub_polyhedra formed by each facet + center
stats_vol = ['mean', 'std_dev', 'minimum', 'maximum']
Voronoi area
total area of the Voronoi polyhedron around the target site
Voronoi area statistics of the facets
stats_area = ['mean', 'std_dev', 'minimum', 'maximum']
Voronoi nearest-neighboring distance statistics
stats_dist = ['mean', 'std_dev', 'minimum', 'maximum']
Args:
cutoff (float): cutoff distance in determining the potential
neighbors for Voronoi tessellation analysis.
(default: 6.5)
use_symm_weights(bool): whether to use weights to derive weighted
i-fold symmetry indices.
symm_weights(str): weights to be used in weighted i-fold symmetry
indices.
Supported options: 'solid_angle', 'area', 'volume',
'face_dist'. (default: 'solid_angle')
stats_vol (list of str): volume statistics types.
stats_area (list of str): area statistics types.
stats_dist (list of str): neighboring distance statistics types.
"""
def __init__(self, cutoff=6.5,
use_symm_weights=False, symm_weights='solid_angle',
stats_vol=None, stats_area=None, stats_dist=None):
self.cutoff = cutoff
self.use_symm_weights = use_symm_weights
self.symm_weights = symm_weights
self.stats_vol = ['mean', 'std_dev', 'minimum', 'maximum'] \
if stats_vol is None else copy.deepcopy(stats_vol)
self.stats_area = ['mean', 'std_dev', 'minimum', 'maximum'] \
if stats_area is None else copy.deepcopy(stats_area)
self.stats_dist = ['mean', 'std_dev', 'minimum', 'maximum'] \
if stats_dist is None else copy.deepcopy(stats_dist)
def featurize(self, struct, idx):
"""
Get Voronoi fingerprints of site with given index in input structure.
Args:
struct (Structure): Pymatgen Structure object.
idx (int): index of target site in structure.
Returns:
(list of floats): Voronoi fingerprints.
-Voronoi indices
-i-fold symmetry indices
-weighted i-fold symmetry indices (if use_symm_weights = True)
-Voronoi volume
-Voronoi volume statistics
-Voronoi area
-Voronoi area statistics
-Voronoi dist statistics
"""
# Get the nearest neighbors using a Voronoi tessellation
n_w = get_nearest_neighbors(VoronoiNN(cutoff=self.cutoff), struct, idx)
# Prepare storage for the Voronoi indices
voro_idx_list = np.zeros(8, int)
voro_idx_weights = np.zeros(8)
vol_list = []
area_list = []
dist_list = []
# Get statistics
for nn in n_w:
if nn['poly_info']['n_verts'] <= 10:
# If a facet has more than 10 edges, it's skipped here.
voro_idx_list[nn['poly_info']['n_verts'] - 3] += 1
vol_list.append(nn['poly_info']['volume'])
area_list.append(nn['poly_info']['area'])
dist_list.append(nn['poly_info']['face_dist'] * 2)
if self.use_symm_weights:
voro_idx_weights[nn['poly_info']['n_verts'] - 3] += \
nn['poly_info'][self.symm_weights]
symm_idx_list = voro_idx_list / sum(voro_idx_list)
if self.use_symm_weights:
symm_wt_list = voro_idx_weights / sum(voro_idx_weights)
voro_fps = list(np.concatenate((voro_idx_list, symm_idx_list,
symm_wt_list), axis=0))
else:
voro_fps = list(np.concatenate((voro_idx_list,
symm_idx_list), axis=0))
voro_fps.append(sum(vol_list))
voro_fps.append(sum(area_list))
voro_fps += [PropertyStats().calc_stat(vol_list, stat_vol)
for stat_vol in self.stats_vol]
voro_fps += [PropertyStats().calc_stat(area_list, stat_area)
for stat_area in self.stats_area]
voro_fps += [PropertyStats().calc_stat(dist_list, stat_dist)
for stat_dist in self.stats_dist]
return voro_fps
def feature_labels(self):
labels = ['Voro_index_%d' % i for i in range(3, 11)]
labels += ['Symmetry_index_%d' % i for i in range(3, 11)]
if self.use_symm_weights:
labels += ['Symmetry_weighted_index_%d' % i for i in range(3, 11)]
labels.append('Voro_vol_sum')
labels.append('Voro_area_sum')
labels += ['Voro_vol_%s' % stat_vol for stat_vol in self.stats_vol]
labels += ['Voro_area_%s' % stat_area for stat_area in self.stats_area]
labels += ['Voro_dist_%s' % stat_dist for stat_dist in self.stats_dist]
return labels
def citations(self):
citation = ['@book{okabe1992spatial, '
'title = {Spatial tessellations}, '
'author = {<NAME>}, '
'year = {1992}, '
'publisher = {Wiley Online Library}}']
return citation
def implementors(self):
return ['<NAME>']
class ChemicalSRO(BaseFeaturizer):
"""
Chemical short range ordering, deviation of local site and nominal structure compositions
Chemical SRO features to evaluate the deviation
of local chemistry with the nominal composition of the structure.
A local bonding preference is computed using
f_el = N_el/(sum of N_el) - c_el,
where N_el is the number of each element type in the neighbors around
the target site, sum of N_el is the sum of all possible element types
(coordination number), and c_el is the composition of the specific
element in the entire structure.
A positive f_el indicates the "bonding" with the specific element
is favored, at least in the target site;
A negative f_el indicates the "bonding" is not favored, at least
in the target site.
Note that ChemicalSRO is only featurized for elements identified by
"fit" (see following), thus "fit" must be called before "featurize",
or else an error will be raised.
Features:
CSRO__[nn method]_[element] - The Chemical SRO of a site computed based
on neighbors determined with a certain NN-detection method for
a certain element.
"""
def __init__(self, nn, includes=None, excludes=None, sort=True):
"""Initialize the featurizer
Args:
nn (NearestNeighbor): instance of one of pymatgen's NearestNeighbor
classes.
includes (array-like or str): elements included to calculate CSRO.
excludes (array-like or str): elements excluded to calculate CSRO.
sort (bool): whether to sort elements by mendeleev number."""
self.nn = nn
self.includes = includes
if self.includes:
self.includes = [Element(el).symbol
for el in np.atleast_1d(self.includes)]
self.excludes = excludes
if self.excludes:
self.excludes = [Element(el).symbol
for el in np.atleast_1d(self.excludes)]
self.sort = sort
self.el_list_ = None
self.el_amt_dict_ = None
@staticmethod
def from_preset(preset, **kwargs):
"""
Use one of the standard instances of a given NearNeighbor class.
Args:
preset (str): preset type ("VoronoiNN", "JmolNN",
"MiniumDistanceNN", "MinimumOKeeffeNN",
or "MinimumVIRENN").
**kwargs: allow to pass args to the NearNeighbor class.
Returns:
ChemicalSRO from a preset.
"""
nn_ = getattr(pymatgen.analysis.local_env, preset)
return ChemicalSRO(nn_(**kwargs))
def fit(self, X, y=None):
"""
Identify elements to be included in the following featurization,
by intersecting the elements present in the passed structures with
those explicitly included (or excluded) in __init__. Only elements
in the self.el_list_ will be featurized.
Besides, compositions of the passed structures will also be "stored"
in a dict of self.el_amt_dict_, avoiding repeated calculation of
composition when featurizing multiple sites in the same structure.
Args:
X (array-like): containing Pymatgen structures and sites, supports
multiple choices:
-2D array-like object:
e.g. [[struct, site], [struct, site], …]
np.array([[struct, site], [struct, site], …])
-Pandas dataframe:
e.g. df[['struct', 'site']]
y : unused (added for consistency with overridden method signature)
Returns:
self
"""
structs = np.atleast_2d(X)[:, 0]
if not all([isinstance(struct, Structure) for struct in structs]):
raise TypeError("This fit requires an array-like input of Pymatgen "
"Structures and sites!")
self.el_amt_dict_ = {}
el_set_ = set()
for s in structs:
if str(s) not in self.el_amt_dict_.keys():
el_amt_ = s.composition.fractional_composition.get_el_amt_dict()
els_ = set(el_amt_.keys()) if self.includes is None \
else set([el for el in el_amt_.keys()
if el in self.includes])
els_ = els_ if self.excludes is None \
else els_ - set(self.excludes)
if els_:
self.el_amt_dict_[str(s)] = el_amt_
el_set_ = el_set_ | els_
self.el_list_ = sorted(list(el_set_), key=lambda el:
Element(el).mendeleev_no) if self.sort else list(el_set_)
return self
def featurize(self, struct, idx):
"""
Get CSRO features of site with given index in input structure.
Args:
struct (Structure): Pymatgen Structure object.
idx (int): index of target site in structure.
Returns:
(list of floats): Chemical SRO features for each element.
"""
check_is_fitted(self, ['el_amt_dict_', 'el_list_'])
csro = [0.]*len(self.el_list_)
if str(struct) in self.el_amt_dict_.keys():
el_amt = self.el_amt_dict_[str(struct)]
nn_el_amt = dict.fromkeys(el_amt, 0)
nn_list = self.nn.get_nn(struct, idx)
for nn in nn_list:
if str(nn.specie.symbol) in self.el_list_:
nn_el_amt[str(nn.specie.symbol)] += 1/len(nn_list)
for el in el_amt.keys():
if el in self.el_list_:
csro[self.el_list_.index(el)] = nn_el_amt[el] - el_amt[el]
return csro
def feature_labels(self):
check_is_fitted(self, ['el_amt_dict_', 'el_list_'])
return ['CSRO_{}_{}'.format(el, self.nn.__class__.__name__)
for el in self.el_list_]
def citations(self):
citations = []
if self.nn.__class__.__name__ == 'VoronoiNN':
citations.append('@article{voronoi_jreineangewmath_1908, title={'
'Nouvelles applications des param\\`{e}tres continus \\`{a} la '
'th\'{e}orie des formes quadratiques. Sur quelques '
'propri\'{e}t\'{e}s des formes quadratiques positives'
' parfaites}, journal={Journal f\"ur die reine und angewandte '
'Mathematik}, number={133}, pages={97-178}, year={1908}}')
citations.append('@article{dirichlet_jreineangewmath_1850, title={'
'\"{U}ber die Reduction der positiven quadratischen Formen '
'mit drei unbestimmten ganzen Zahlen}, journal={Journal '
'f\"ur die reine und angewandte Mathematik}, number={40}, '
'pages={209-227}, doi={10.1515/crll.1850.40.209}, year={1850}}')
if self.nn.__class__.__name__ == 'JmolNN':
citations.append('@misc{jmol, title = {Jmol: an open-source Java '
'viewer for chemical structures in 3D}, howpublished = {'
'\\url{http://www.jmol.org/}}}')
if self.nn.__class__.__name__ == 'MinimumOKeeffeNN':
citations.append('@article{okeeffe_jamchemsoc_1991, title={Atom '
'sizes and bond lengths in molecules and crystals}, journal='
'{Journal of the American Chemical Society}, author={'
'<NAME>. and <NAME>.}, number={113}, pages={'
'3226-3229}, doi={doi:10.1021/ja00009a002}, year={1991}}')
if self.nn.__class__.__name__ == 'MinimumVIRENN':
citations.append('@article{shannon_actacryst_1976, title={'
'Revised effective ionic radii and systematic studies of '
'interatomic distances in halides and chalcogenides}, '
'journal={Acta Crystallographica}, author={<NAME>.}, '
'number={A32}, pages={751-767}, doi={'
'10.1107/S0567739476001551}, year={1976}')
if self.nn.__class__.__name__ in [
'MinimumDistanceNN', 'MinimumOKeeffeNN', 'MinimumVIRENN']:
citations.append('@article{zimmermann_frontmater_2017, '
'title={Assessing local structure motifs using order '
'parameters for motif recognition, interstitial '
'identification, and diffusion path characterization}, '
'journal={Frontiers in Materials}, author={Zimmermann, '
'<NAME>. and <NAME>. and <NAME>. and <NAME>.}, '
'number={4:34}, doi={10.3389/fmats.2017.00034}, year={2017}}')
return citations
def implementors(self):
return ['Qi Wang']
class GaussianSymmFunc(BaseFeaturizer):
"""
Gaussian symmetry function features suggested by Behler et al.
The function is based on pair distances and angles, to approximate the functional
dependence of local energies, originally used in the fitting of
machine-learning potentials.
The symmetry functions can be divided to a set of radial functions
(g2 function), and a set of angular functions (g4 function).
The number of symmetry functions returned are based on parameters
of etas_g2, etas_g4, zetas_g4 and gammas_g4.
See the original papers for more details:
“Atom-centered symmetry functions for constructing high-dimensional
neural network potentials”, <NAME>, J Chem Phys 134, 074106 (2011).
The cutoff function is taken as the polynomial form (cosine_cutoff)
to give a smoothed truncation.
A Fortran and a different Python version can be found in the code
Amp: Atomistic Machine-learning Package
(https://bitbucket.org/andrewpeterson/amp).
Args:
etas_g2 (list of floats): etas used in radial functions.
(default: [0.05, 4., 20., 80.])
etas_g4 (list of floats): etas used in angular functions.
(default: [0.005])
zetas_g4 (list of floats): zetas used in angular functions.
(default: [1., 4.])
gammas_g4 (list of floats): gammas used in angular functions.
(default: [+1., -1.])
cutoff (float): cutoff distance. (default: 6.5)
"""
def __init__(self, etas_g2=None, etas_g4=None, zetas_g4=None,
gammas_g4=None, cutoff=6.5):
self.etas_g2 = etas_g2 if etas_g2 else [0.05, 4., 20., 80.]
self.etas_g4 = etas_g4 if etas_g4 else [0.005]
self.zetas_g4 = zetas_g4 if zetas_g4 else [1., 4.]
self.gammas_g4 = gammas_g4 if gammas_g4 else [+1., -1.]
self.cutoff = cutoff
@staticmethod
def cosine_cutoff(rs, cutoff):
"""
Polynomial cutoff function to give a smoothed truncation of the Gaussian
symmetry functions.
Args:
rs (ndarray): distances to elements
cutoff (float): cutoff distance.
Returns:
(ndarray) cutoff function.
"""
cutoff_fun = 0.5 * (np.cos(np.pi * rs / cutoff) + 1.)
cutoff_fun[rs > cutoff] = 0
return cutoff_fun
@staticmethod
def g2(eta, rs, cutoff):
"""
Gaussian radial symmetry function of the center atom,
given an eta parameter.
Args:
eta: radial function parameter.
rs: distances from the central atom to each neighbor
cutoff (float): cutoff distance.
Returns:
(float) Gaussian radial symmetry function.
"""
ridge = (np.exp(-eta * (rs ** 2.) / (cutoff ** 2.)) *
GaussianSymmFunc.cosine_cutoff(rs, cutoff))
return ridge.sum()
@staticmethod
def g4(etas, zetas, gammas, neigh_dist, neigh_coords, cutoff):
"""
Gaussian angular symmetry function of the center atom,
given a set of eta, zeta and gamma parameters.
Args:
eta ([float]): angular function parameters.
zeta ([float]): angular function parameters.
gamma ([float]): angular function parameters.
neigh_coords (list of [floats]): coordinates of neighboring atoms, with respect
to the central atom
cutoff (float): cutoff parameter.
Returns:
(float) Gaussian angular symmetry function for all combinations of eta, zeta, gamma
"""
output = np.zeros((len(etas)*len(zetas)*len(gammas),))
# Loop over each neighbor j
for j, neigh_j in enumerate(neigh_coords):
# Compute the distance of each neighbor (k) to r
r_ij = neigh_dist[j]
d_jk = neigh_coords[(j+1):] - neigh_coords[j]
r_jk = np.linalg.norm(d_jk, 2, axis=1)
r_ik = neigh_dist[(j+1):]
# Compute the cosine term
cos_theta = np.dot(neigh_coords[(j + 1):], neigh_coords[j]) / r_ij / r_ik
# Compute the cutoff function (independent of eta/zeta/gamma)
cutoff_fun = GaussianSymmFunc.cosine_cutoff(np.array([r_ij]), cutoff) * \
GaussianSymmFunc.cosine_cutoff(r_ik, cutoff) * \
GaussianSymmFunc.cosine_cutoff(r_jk, cutoff)
# Compute the g4 for each combination of eta/gamma/zeta
ind = 0
for eta in etas:
# Compute the eta term
eta_term = np.exp(-eta * (r_ij ** 2. + r_ik ** 2. + r_jk ** 2.) /
(cutoff ** 2.)) * cutoff_fun
for zeta in zetas:
for gamma in gammas:
term = (1. + gamma * cos_theta) ** zeta * eta_term
output[ind] += term.sum() * 2. ** (1. - zeta)
ind += 1
return output
def featurize(self, struct, idx):
"""
Get Gaussian symmetry function features of site with given index
in input structure.
Args:
struct (Structure): Pymatgen Structure object.
idx (int): index of target site in structure.
Returns:
(list of floats): Gaussian symmetry function features.
"""
gaussian_funcs = []
# Get the neighbors within the cutoff
neighbors = struct.get_neighbors(struct[idx], self.cutoff)
# Get coordinates of the neighbors, relative to the central atom
neigh_coords = np.subtract([neigh[0].coords for neigh in neighbors], struct[idx].coords)
# Get the distances for later use
neigh_dists = np.array([neigh[1] for neigh in neighbors])
# Compute all G2
for eta_g2 in self.etas_g2:
gaussian_funcs.append(self.g2(eta_g2, neigh_dists, self.cutoff))
# Compute all G4s
gaussian_funcs.extend(GaussianSymmFunc.g4(self.etas_g4, self.zetas_g4, self.gammas_g4,
neigh_dists, neigh_coords, self.cutoff))
return gaussian_funcs
def feature_labels(self):
return ['G2_{}'.format(eta_g2) for eta_g2 in self.etas_g2] + \
['G4_{}_{}_{}'.format(eta_g4, zeta_g4, gamma_g4)
for eta_g4 in self.etas_g4
for zeta_g4 in self.zetas_g4
for gamma_g4 in self.gammas_g4]
def citations(self):
gsf_citation = (
'@Article{Behler2011, author = {<NAME>}, '
'title = {Atom-centered symmetry functions for constructing '
'high-dimensional neural network potentials}, '
'journal = {The Journal of Chemical Physics}, year = {2011}, '
'volume = {134}, number = {7}, pages = {074106}, '
'doi = {10.1063/1.3553717}}')
amp_citation = (
'@Article{Khorshidi2016, '
'author = {<NAME> and <NAME>}, '
'title = {Amp : A modular approach to machine learning in '
'atomistic simulations}, '
'journal = {Computer Physics Communications}, year = {2016}, '
'volume = {207}, pages = {310--324}, '
'doi = {10.1016/j.cpc.2016.05.010}}')
return [gsf_citation, amp_citation]
def implementors(self):
return ['<NAME>']
class EwaldSiteEnergy(BaseFeaturizer):
"""
Compute site energy from Coulombic interactions
User notes:
- This class uses that `charges that are already-defined for the structure`.
- Ewald summations can be expensive. If you evaluating every site in many
large structures, run all of the sites for each structure at the same time.
We cache the Ewald result for the structure that was run last, so looping
over sites and then structures is faster than structures than sites.
Features:
ewald_site_energy - Energy for the site computed from Coulombic interactions"""
def __init__(self, accuracy=None):
"""
Args:
accuracy (int): Accuracy of Ewald summation, number of decimal places
"""
self.accuracy = accuracy
# Variables used then caching the Ewald result
self.__last_structure = None
self.__last_ewald = None
def featurize(self, strc, idx):
"""
Args:
struct (Structure): Pymatgen Structure object.
idx (int): index of target site in structure.
Returns:
([float]) - Electrostatic energy of the site
"""
# Check if the new input is the last
# Note: We use 'is' rather than structure comparisons for speed
if strc is self.__last_structure:
ewald = self.__last_ewald
else:
self.__last_structure = strc
ewald = EwaldSummation(strc, acc_factor=self.accuracy)
self.__last_ewald = ewald
return [ewald.get_site_energy(idx)]
def feature_labels(self):
return ["ewald_site_energy"]
def implementors(self):
return ["<NAME>"]
def citations(self):
return ["@Article{Ewald1921,"
"author = {<NAME>.},"
"doi = {10.1002/andp.19213690304},"
"issn = {00033804},"
"journal = {Annalen der Physik},"
"number = {3},"
"pages = {253--287},"
"title = {{Die Berechnung optischer und elektrostatischer Gitterpotentiale}},"
"url = {http://doi.wiley.com/10.1002/andp.19213690304},"
"volume = {369},"
"year = {1921}"
"}"]
class ChemEnvSiteFingerprint(BaseFeaturizer):
"""
Resemblance of given sites to ideal environments
Site fingerprint computed from pymatgen's ChemEnv package
that provides resemblance percentages of a given site
to ideal environments.
Args:
cetypes ([str]): chemical environments (CEs) to be
considered.
strategy (ChemenvStrategy): ChemEnv neighbor-finding strategy.
geom_finder (LocalGeometryFinder): ChemEnv local geometry finder.
max_csm (float): maximum continuous symmetry measure (CSM;
default of 8 taken from chemenv). Note that any CSM
larger than max_csm will be set to max_csm in order
to avoid negative values (i.e., all features are
constrained to be between 0 and 1).
max_dist_fac (float): maximum distance factor (default: 1.41).
"""
@staticmethod
def from_preset(preset):
"""
Use a standard collection of CE types and
choose your ChemEnv neighbor-finding strategy.
Args:
preset (str): preset types ("simple" or
"multi_weights").
Returns:
ChemEnvSiteFingerprint object from a preset.
"""
cetypes = [
'S:1', 'L:2', 'A:2', 'TL:3', 'TY:3', 'TS:3', 'T:4',
'S:4', 'SY:4', 'SS:4', 'PP:5', 'S:5', 'T:5', 'O:6',
'T:6', 'PP:6', 'PB:7', 'ST:7', 'ET:7', 'FO:7', 'C:8',
'SA:8', 'SBT:8', 'TBT:8', 'DD:8', 'DDPN:8', 'HB:8',
'BO_1:8', 'BO_2:8', 'BO_3:8', 'TC:9', 'TT_1:9',
'TT_2:9', 'TT_3:9', 'HD:9', 'TI:9', 'SMA:9', 'SS:9',
'TO_1:9', 'TO_2:9', 'TO_3:9', 'PP:10', 'PA:10',
'SBSA:10', 'MI:10', 'S:10', 'H:10', 'BS_1:10',
'BS_2:10', 'TBSA:10', 'PCPA:11', 'H:11', 'SH:11',
'CO:11', 'DI:11', 'I:12', 'PBP:12', 'TT:12', 'C:12',
'AC:12', 'SC:12', 'S:12', 'HP:12', 'HA:12', 'SH:13',
'DD:20']
lgf = LocalGeometryFinder()
lgf.setup_parameters(
centering_type='centroid',
include_central_site_in_centroid=True,
structure_refinement=lgf.STRUCTURE_REFINEMENT_NONE)
if preset == "simple":
return ChemEnvSiteFingerprint(
cetypes,
SimplestChemenvStrategy(distance_cutoff=1.4, angle_cutoff=0.3),
lgf)
elif preset == "multi_weights":
return ChemEnvSiteFingerprint(
cetypes,
MultiWeightsChemenvStrategy.stats_article_weights_parameters(),
lgf)
else:
raise RuntimeError('unknown neighbor-finding strategy preset.')
def __init__(self, cetypes, strategy, geom_finder, max_csm=8, \
max_dist_fac=1.41):
self.cetypes = tuple(cetypes)
self.strat = strategy
self.lgf = geom_finder
self.max_csm = max_csm
self.max_dist_fac = max_dist_fac
def featurize(self, struct, idx):
"""
Get ChemEnv fingerprint of site with given index in input
structure.
Args:
struct (Structure): Pymatgen Structure object.
idx (int): index of target site in structure struct.
Returns:
(numpy array): resemblance fraction of target site to ideal
local environments.
"""
cevals = []
self.lgf.setup_structure(structure=struct)
se = self.lgf.compute_structure_environments(
only_indices=[idx],
maximum_distance_factor=self.max_dist_fac)
for ce in self.cetypes:
try:
tmp = se.get_csms(idx, ce)
tmp = tmp[0]['symmetry_measure'] if len(tmp) != 0 \
else self.max_csm
tmp = tmp if tmp < self.max_csm else self.max_csm
cevals.append(1 - tmp / self.max_csm)
except IndexError:
cevals.append(0)
return np.array(cevals)
def feature_labels(self):
return list(self.cetypes)
def citations(self):
return ['@article{waroquiers_chemmater_2017, '
'title={Statistical analysis of coordination environments '
'in oxides}, journal={Chemistry of Materials},'
'author={<NAME>. and <NAME>.'
'and <NAME>. and <NAME>. and <NAME>. '
'and <NAME>. and <NAME>. and <NAME>. '
'and <NAME>. and <NAME>.}, year={2017}}']
def implementors(self):
return ['<NAME>. <NAME>']
class CoordinationNumber(BaseFeaturizer):
"""
Number of first nearest neighbors of a site.
Determines the number of nearest neighbors of a site using one of
pymatgen's NearNeighbor classes. These nearest neighbor calculators
can return weights related to the proximity of each neighbor to this
site. It is possible to take these weights into account to prevent
the coordination number from changing discontinuously with small
perturbations of a structure, either by summing the total weights
or using the normalization method presented by
[Ward et al.](http://link.aps.org/doi/10.1103/PhysRevB.96.014107)
Features:
CN_[method] - Coordination number computed using a certain method
for calculating nearest neighbors.
"""
@staticmethod
def from_preset(preset, **kwargs):
"""
Use one of the standard instances of a given NearNeighbor class.
Args:
preset (str): preset type ("VoronoiNN", "JmolNN",
"MiniumDistanceNN", "MinimumOKeeffeNN",
or "MinimumVIRENN").
**kwargs: allow to pass args to the NearNeighbor class.
Returns:
CoordinationNumber from a preset.
"""
nn_ = getattr(pymatgen.analysis.local_env, preset)
return CoordinationNumber(nn_(**kwargs))
def __init__(self, nn=None, use_weights='none'):
"""Initialize the featurizer
Args:
nn (NearestNeighbor) - Method used to determine coordination number
use_weights (string) - Method used to account for weights of neighbors:
'none' - Do not use weights when computing coordination number
'sum' - Use sum of weights as the coordination number
'effective' - Compute the 'effective coordination number', which
is computed as :math:`\\frac{(\sum_n w_n)^2)}{\sum_n w_n^2}`
"""
self.nn = nn or VoronoiNN()
self.use_weights = use_weights
def featurize(self, struct, idx):
"""
Get coordintion number of site with given index in input
structure.
Args:
struct (Structure): Pymatgen Structure object.
idx (int): index of target site in structure struct.
Returns:
[float] - Coordination number
"""
if self.use_weights is None or self.use_weights == 'none':
return [self.nn.get_cn(struct, idx, use_weights=False)]
elif self.use_weights == 'sum':
return [self.nn.get_cn(struct, idx, use_weights=True)]
elif self.use_weights == 'effective':
# TODO: Should this weighting code go in pymatgen? I'm not sure if it even necessary to distinguish it from the 'sum' method -lw
nns = get_nearest_neighbors(self.nn, struct, idx)
weights = [n['weight'] for n in nns]
return [np.sum(weights) ** 2 / np.sum(np.power(weights, 2))]
else:
raise ValueError('Weighting method not recognized: ' + str(self.use_weights))
def feature_labels(self):
# TODO: Should names contain weighting scheme? -lw
return ['CN_{}'.format(self.nn.__class__.__name__)]
def citations(self):
citations = []
if self.nn.__class__.__name__ == 'VoronoiNN':
citations.append('@article{voronoi_jreineangewmath_1908, title={'
'Nouvelles applications des param\\`{e}tres continus \\`{a} la '
'th\'{e}orie des formes quadratiques. Sur quelques '
'propri\'{e}t\'{e}s des formes quadratiques positives'
' parfaites}, journal={Journal f\"ur die reine und angewandte '
'Mathematik}, number={133}, pages={97-178}, year={1908}}')
citations.append('@article{dirichlet_jreineangewmath_1850, title={'
'\"{U}ber die Reduction der positiven quadratischen Formen '
'mit drei unbestimmten ganzen Zahlen}, journal={Journal '
'f\"ur die reine und angewandte Mathematik}, number={40}, '
'pages={209-227}, doi={10.1515/crll.1850.40.209}, year={1850}}')
if self.nn.__class__.__name__ == 'JmolNN':
citations.append('@misc{jmol, title = {Jmol: an open-source Java '
'viewer for chemical structures in 3D}, howpublished = {'
'\\url{http://www.jmol.org/}}}')
if self.nn.__class__.__name__ == 'MinimumOKeeffeNN':
citations.append('@article{okeeffe_jamchemsoc_1991, title={Atom '
'sizes and bond lengths in molecules and crystals}, journal='
'{Journal of the American Chemical Society}, author={'
'<NAME>. and <NAME>.}, number={113}, pages={'
'3226-3229}, doi={doi:10.1021/ja00009a002}, year={1991}}')
if self.nn.__class__.__name__ == 'MinimumVIRENN':
citations.append('@article{shannon_actacryst_1976, title={'
'Revised effective ionic radii and systematic studies of '
'interatomic distances in halides and chalcogenides}, '
'journal={Acta Crystallographica}, author={<NAME>.}, '
'number={A32}, pages={751-767}, doi={'
'10.1107/S0567739476001551}, year={1976}')
if self.nn.__class__.__name__ in [
'MinimumDistanceNN', 'MinimumOKeeffeNN', 'MinimumVIRENN']:
citations.append('@article{zimmermann_frontmater_2017, '
'title={Assessing local structure motifs using order '
'parameters for motif recognition, interstitial '
'identification, and diffusion path characterization}, '
'journal={Frontiers in Materials}, author={Zimmermann, '
'<NAME>. and <NAME>. and <NAME>. and <NAME>.}, '
'number={4:34}, doi={10.3389/fmats.2017.00034}, year={2017}}')
return citations
def implementors(self):
return ['<NAME>', '<NAME>']
class GeneralizedRadialDistributionFunction(BaseFeaturizer):
"""
Compute the general radial distribution function (GRDF) for a site.
The GRDF is a radial measure of crystal order around a site. There are two
featurizing modes:
1. GRDF: (recommended) - n_bins length vector
In GRDF mode, The GRDF is computed by considering all sites around a
central site (i.e., no sites are omitted when computing the GRDF). The
features output from this mode will be vectors with length n_bins.
2. pairwise GRDF: (advanced users) - n_bins x n_sites matrix
In this mode, GRDFs are are still computed around a central site, but
only one other site (and their translational equivalents) are used to
compute a GRDF (e.g. site 1 with site 2 and the translational
equivalents of site 2). This results in a a n_sites x n_bins matrix of
features. Requires `fit` for determining the max number of sites for
The GRDF is a generalization of the partial radial distribution function
(PRDF). In contrast with the PRDF, the bins of the GRDF are not mutually-
exclusive and need not carry a constant weight of 1. The PRDF is a case of
the GRDF when the bins are rectangular functions. Examples of other
functions to use with the GRDF are Gaussian, trig, and Bessel functions.
See :func:`~matminer.featurizers.utils.grdf` for a full list of available binning functions.
There are two preset conditions:
gaussian: bin functions are gaussians
histogram: bin functions are rectangular functions
Args:
bins: ([AbstractPairwise]) List of pairwise binning functions. Each of these functions
must implement the AbstractPairwise class.
cutoff: (float) maximum distance to look for neighbors
mode: (str) the featurizing mode. supported options are:
'GRDF' and 'pairwise_GRDF'
"""
def __init__(self, bins, cutoff=20.0, mode='GRDF'):
self.bins = bins
self.cutoff = cutoff
if mode not in ['GRDF', 'pairwise_GRDF']:
raise AttributeError('{} is not a valid GRDF mode. try '
'"GRDF" or "pairwise_GRDF"'.format(mode))
else:
self.mode = mode
self.fit_labels = None
def fit(self, X, y=None, **fit_kwargs):
"""
Determine the maximum number of sites in X to assign correct feature
labels
Args:
X - [list of tuples], training data
tuple values should be (struc, idx)
Returns:
self
"""
max_sites = max([len(X[i][0]._sites) for i in range(len(X))])
self.fit_labels = ['site2 {} {}'.format(i, bin.name()) for bin in self.bins
for i in range(max_sites)]
return self
def featurize(self, struct, idx):
"""
Get GRDF of the input structure.
Args:
struct (Structure): Pymatgen Structure object.
idx (int): index of target site in structure struct.
Returns:
Flattened list of GRDF values. For each run mode the list order is:
GRDF: bin#
pairwise GRDF: site2# bin#
The site2# corresponds to a pymatgen site index and bin#
corresponds to one of the bin functions
"""
if not struct.is_ordered:
raise ValueError("Disordered structure support not built yet")
# Get list of neighbors by site
# Indexing is [site#][neighbor#][pymatgen Site, distance, site index]
sites = struct._sites
central_site = sites[idx]
neighbors_lst = struct.get_neighbors(central_site, self.cutoff,
include_index=True)
sites = range(0, len(sites))
# Generate lists of pairwise distances according to run mode
if self.mode == 'GRDF':
# Make a single distance collection
distance_collection = [[neighbor[1] for neighbor in neighbors_lst]]
else:
# Make pairwise distance collections for pairwise GRDF
distance_collection = [
[neighbor[1] for neighbor in neighbors_lst
if neighbor[2] == site_idx] for site_idx in sites]
# compute bin counts for each list of pairwise distances
bin_counts = []
for values in distance_collection:
bin_counts.append([sum(bin(values)) for bin in self.bins])
# Compute "volume" of each bin to normalize GRDFs
volumes = [bin.volume(self.cutoff) for bin in self.bins]
# normalize the bin counts by the bin volume to compute features
features = []
for values in bin_counts:
features.extend(np.array(values) / np.array(volumes))
return features
def feature_labels(self):
if self.mode == 'GRDF':
return [bin.name() for bin in self.bins]
else:
if self.fit_labels:
return self.fit_labels
else:
raise AttributeError('the fit method must be called first, to '
'determine the correct feature labels.')
@staticmethod
def from_preset(preset, width=1.0, spacing=1.0, cutoff=10, mode='GRDF'):
"""
Preset bin functions for this featurizer. Example use:
>>> GRDF = GeneralizedRadialDistributionFunction.from_preset('gaussian')
>>> GRDF.featurize(struct, idx)
Args:
preset (str): shape of bin (either 'gaussian' or 'histogram')
width (float): bin width. std dev for gaussian, width for histogram
spacing (float): the spacing between bin centers
cutoff (float): maximum distance to look for neighbors
mode (str): featurizing mode. either 'GRDF' or 'pairwise_GRDF'
"""
# Generate bin functions
if preset == "gaussian":
bins = []
for center in np.arange(0., cutoff, spacing):
bins.append(Gaussian(width, center))
elif preset == "histogram":
bins = []
for start in np.arange(0, cutoff, spacing):
bins.append(Histogram(start, width))
else:
raise ValueError('Not a valid preset condition.')
return GeneralizedRadialDistributionFunction(bins, cutoff=cutoff, mode=mode)
def citations(self):
return ['@article{PhysRevB.95.144110, title = {Representation of compo'
'unds for machine-learning prediction of physical properties},'
' author = {<NAME> and <NAME> and Nakayama, '
'Keita and Takahashi, Akira and Tanaka, Isao},'
'journal = {Phys. Rev. B}, volume = {95}, issue = {14}, '
'pages = {144110}, year = {2017}, publisher = {American Physic'
'al Society}, doi = {10.1103/PhysRevB.95.144110}}']
def implementors(self):
return ["<NAME>", "<NAME>", "<NAME>"]
class AngularFourierSeries(BaseFeaturizer):
"""
Compute the angular Fourier series (AFS), including both angular and radial info
The AFS is the product of pairwise distance function (g_n, g_n') between two pairs
of atoms (sharing the common central site) and the cosine of the angle
between the two pairs. The AFS is a 2-dimensional feature (the axes are g_n,
g_n').
Examples of distance functionals are square functions, Gaussian, trig
functions, and Bessel functions. An example for Gaussian:
lambda d: exp( -(d - d_n)**2 ), where d_n is the coefficient for g_n
See :func:`~matminer.featurizers.utils.grdf` for a full list of available binning functions.
There are two preset conditions:
gaussian: bin functions are gaussians
histogram: bin functions are rectangular functions
Features:
AFS ([gn], [gn']) - Angular Fourier Series between binning functions (g1 and g2)
Args:
bins: ([AbstractPairwise]) a list of binning functions that
implement the AbstractPairwise base class
cutoff: (float) maximum distance to look for neighbors. The
featurizer will run slowly for large distance cutoffs
because of the number of neighbor pairs scales as
the square of the number of neighbors
"""
def __init__(self, bins, cutoff=10.0):
self.bins = bins
self.cutoff = cutoff
def featurize(self, struct, idx):
"""
Get AFS of the input structure.
Args:
struct (Structure): Pymatgen Structure object.
idx (int): index of target site in structure struct.
Returns:
Flattened list of AFS values. the list order is:
g_n g_n'
"""
if not struct.is_ordered:
raise ValueError("Disordered structure support not built yet")
# Generate list of neighbor position vectors (relative to central
# atom) and distances from each central site as tuples
sites = struct._sites
central_site = sites[idx]
neighbors_lst = struct.get_neighbors(central_site, self.cutoff)
neighbor_collection = [
(neighbor[0].coords - central_site.coords, neighbor[1])
for neighbor in neighbors_lst]
# Generate exhaustive permutations of neighbor pairs around each
# central site (order matters). Does not allow repeat elements (i.e.
# there are two distinct sites in every permutation)
neighbor_tuples = itertools.permutations(neighbor_collection, 2)
# Generate cos(theta) between neighbor pairs for each central site.
# Also, retain data on neighbor distances for each pair
# process with matrix algebra, we really need the speed here
data = np.array(list(neighbor_tuples))
v1, v2 = np.vstack(data[:, 0, 0]), np.vstack(data[:, 1, 0])
distances = data[:, :, 1]
neighbor_pairs = np.concatenate([
np.clip(np.einsum('ij,ij->i', v1, v2) /
np.linalg.norm(v1, axis=1) /
np.linalg.norm(v2, axis=1), -1.0, 1.0).reshape(-1, 1),
distances], axis=1)
# Generate distance functional matrix (g_n, g_n')
bin_combos = list(itertools.product(self.bins, repeat=2))
# Compute AFS values for each element of the bin matrix
# need to cast arrays as floats to use np.exp
cos_angles, dist1, dist2 = neighbor_pairs[:, 0].astype(float),\
neighbor_pairs[:, 1].astype(float),\
neighbor_pairs[:, 2].astype(float)
features = [sum(combo[0](dist1) * combo[1](dist2) *
cos_angles) for combo in bin_combos]
return features
def feature_labels(self):
bin_combos = list(itertools.product(self.bins, repeat=2))
return ['AFS ({}, {})'.format(combo[0].name(), combo[1].name())
for combo in bin_combos]
@staticmethod
def from_preset(preset, width=0.5, spacing=0.5, cutoff=10):
"""
Preset bin functions for this featurizer. Example use:
>>> AFS = AngularFourierSeries.from_preset('gaussian')
>>> AFS.featurize(struct, idx)
Args:
preset (str): shape of bin (either 'gaussian' or 'histogram')
width (float): bin width. std dev for gaussian, width for histogram
spacing (float): the spacing between bin centers
cutoff (float): maximum distance to look for neighbors
"""
# Generate bin functions
if preset == "gaussian":
bins = []
for center in np.arange(0., cutoff, spacing):
bins.append(Gaussian(width, center))
elif preset == "histogram":
bins = []
for start in np.arange(0, cutoff, spacing):
bins.append(Histogram(start, width))
else:
raise ValueError('Not a valid preset condition.')
return AngularFourierSeries(bins, cutoff=cutoff)
def citations(self):
return ['@article{PhysRevB.95.144110, title = {Representation of compo'
'unds for machine-learning prediction of physical properties},'
' author = {<NAME> and <NAME> and Nakayama, '
'Keita and <NAME> and Tanaka, Isao},'
'journal = {Phys. Rev. B}, volume = {95}, issue = {14}, '
'pages = {144110}, year = {2017}, publisher = {American Physic'
'al Society}, doi = {10.1103/PhysRevB.95.144110}}']
def implementors(self):
return ["<NAME>", "<NAME>"]
# TODO: Figure out whether to take NN-counting method as an option (see VoronoiFingerprint)
class LocalPropertyDifference(BaseFeaturizer):
"""
Differences in elemental properties between site and its neighboring sites.
Uses the Voronoi tessellation of the structure to determine the
neighbors of the site, and assigns each neighbor (:math:`n`) a
weight (:math:`A_n`) that corresponds to the area of the facet
on the tessellation corresponding to that neighbor.
The local property difference is then computed by
:math:`\\frac{\sum_n {A_n |p_n - p_0|}}{\sum_n {A_n}}`
where :math:`p_n` is the property (e.g., atomic number) of a neighbor
and :math:`p_0` is the property of a site. If signed parameter is assigned
True, signed difference of the properties is returned instead of absolute
difference.
Features:
- "local property difference in [property]" - Weighted average
of differences between an elemental property of a site and
that of each of its neighbors, weighted by size of face on
Voronoi tessellation
References:
`Ward et al. _PRB_ 2017 <http://link.aps.org/doi/10.1103/PhysRevB.96.024104>`_
"""
def __init__(self, data_source=MagpieData(), weight='area',
properties=('Electronegativity',), signed=False):
""" Initialize the featurizer
Args:
data_source (AbstractData) - Class from which to retrieve
elemental properties
weight (str) - What aspect of each voronoi facet to use to
weigh each neighbor (see VoronoiNN)
properties ([str]) - List of properties to use (default=['Electronegativity'])
signed (bool) - whether to return absolute difference or signed difference of
properties(default=False (absolute difference))
"""
self.data_source = data_source
self.properties = properties
self.weight = weight
self.signed = signed
@staticmethod
def from_preset(preset):
"""
Create a new LocalPropertyDifference class according to a preset
Args:
preset (str) - Name of preset
"""
if preset == "ward-prb-2017":
return LocalPropertyDifference(
data_source=MagpieData(),
properties=["Number", "MendeleevNumber", "AtomicWeight",
"MeltingT", "Column", "Row", "CovalentRadius",
"Electronegativity", "NsValence", "NpValence",
"NdValence", "NfValence", "NValence", "NsUnfilled",
"NpUnfilled", "NdUnfilled", "NfUnfilled",
"NUnfilled", "GSvolume_pa", "GSbandgap",
"GSmagmom", "SpaceGroupNumber"]
)
else:
raise ValueError('Unrecognized preset: ' + preset)
def featurize(self, strc, idx):
# Get the targeted site
my_site = strc[idx]
# Get the tessellation of a site
nn = get_nearest_neighbors(VoronoiNN(weight=self.weight), strc, idx)
# Get the element and weight of each site
elems = [n['site'].specie for n in nn]
weights = [n['weight'] for n in nn]
# Compute the difference for each property
output = np.zeros((len(self.properties),))
total_weight = np.sum(weights)
for i, p in enumerate(self.properties):
my_prop = self.data_source.get_elemental_property(my_site.specie, p)
n_props = self.data_source.get_elemental_properties(elems, p)
if self.signed == False:
output[i] = np.dot(weights, np.abs(np.subtract(n_props, my_prop))) / total_weight
else:
output[i] = np.dot(weights, np.subtract(n_props, my_prop)) / total_weight
return output
def feature_labels(self):
if self.signed == False:
return ['local difference in ' + p for p in self.properties]
else:
return ['local signed difference in ' + p for p in self.properties]
def citations(self):
return ["@article{Ward2017,"
"author = {<NAME> and <NAME> "
"and <NAME> and Hegde, <NAME>. "
"and <NAME> and <NAME> "
"and Wolverton, Chris},"
"doi = {10.1103/PhysRevB.96.024104},"
"journal = {Physical Review B},"
"pages = {024104},"
"title = {{Including crystal structure attributes "
"in machine learning models of formation energies "
"via Voronoi tessellations}},"
"url = {http://link.aps.org/doi/10.1103/PhysRevB.96.014107},"
"volume = {96},year = {2017}}",
'@article{jong_chen_notestine_persson_ceder_jain_asta_gamst_2016,'
'title={A Statistical Learning Framework for Materials Science: '
'Application to Elastic Moduli of k-nary Inorganic Polycrystalline Compounds}, '
'volume={6}, DOI={10.1038/srep34256}, number={1}, journal={Scientific Reports}, '
'author={<NAME> and <NAME> and Notestine, Randy and Persson, '
'Kristin and Ceder, Gerbrand and Jain, Anubhav and Asta, Mark and Gamst, Anthony}, '
'year={2016}, month={Mar}}'
]
def implementors(self):
return ['<NAME>', '<NAME>']
class BondOrientationalParameter(BaseFeaturizer):
"""
Averages of spherical harmonics of local neighbors
Bond Orientational Parameters (BOPs) describe the local environment around an atom by
considering the local symmetry of the bonds as computed using spherical harmonics.
To create descriptors that are invariant to rotating the coordinate system, we use the
average of all spherical harmonics of a certain degree - following the approach of
`Steinhardt et al. <https://link.aps.org/doi/10.1103/PhysRevB.28.784>`_.
We weigh the contributions of each neighbor with the solid angle of the Voronoi tessellation
(see `Mickel et al. <https://aip.scitation.org/doi/abs/10.1063/1.4774084>_` for further
discussion). The weighing scheme makes these descriptors vary smoothly with small distortions
of a crystal structure.
In addition to the average spherical harmonics, this class can also compute the :math:`W` and
:math:`\hat{W}` parameters proposed by `Steinhardt et al. <https://link.aps.org/doi/10.1103/PhysRevB.28.784>`_.
Attributes:
BOOP Q l=<n> - Average spherical harmonic for a certain degree, n.
BOOP W l=<n> - W parameter for a certain degree of spherical harmonic, n.
BOOP What l=<n> - :math:`\hat{W}` parameter for a certain degree of spherical harmonic, n.
References:
`Steinhardt et al., _PRB_ (1983) <https://link.aps.org/doi/10.1103/PhysRevB.28.784>`_
`Seko et al., _PRB_ (2017) <http://link.aps.org/doi/10.1103/PhysRevB.95.144110>`_
"""
def __init__(self, max_l=10, compute_w=False, compute_w_hat=False):
"""
Initialize the featurizer
Args:
max_l (int) - Maximum spherical harmonic to consider
compute_w (bool) - Whether to compute Ws as well
compute_w_hat (bool) - Whether to compute What
"""
self._nn = VoronoiNN(weight='solid_angle')
self.max_l = max_l
self.compute_W = compute_w
self.compute_What = compute_w_hat
def featurize(self, strc, idx):
# Get the nearest neighbors of the atom
nns = get_nearest_neighbors(self._nn, strc, idx)
# Get the polar and azimuthal angles of each face
phi = np.arccos([x['poly_info']['normal'][-1] for x in nns])
theta = np.arctan2([x['poly_info']['normal'][1] for x in nns],
[x['poly_info']['normal'][0] for x in nns])
# Get the weights for each neighbor
weights = np.array([x['weight'] for x in nns])
weights /= weights.sum()
# Compute the spherical harmonics for the desired `l`s
Qs = []
Ws = []
for l in range(1, self.max_l + 1):
# Average the spherical harmonic over each neighbor, weighted by solid angle
qlm = dict((m, np.dot(weights, sph_harm(m, l, theta, phi)))
for m in range(-l, l + 1))
# Compute the average over all m's
Qs.append(np.sqrt(np.pi * 4 / (2 * l + 1) *
np.sum(np.abs(list(qlm.values())) ** 2)))
# Compute the W, if desired
if self.compute_W or self.compute_What:
w = 0
# Loop over all non-zero Wigner 3j coefficients
for (m1, m2, m3), wcoeff in get_wigner_coeffs(l):
w += qlm[m1] * qlm[m2] * qlm[m3] * wcoeff
Ws.append(w.real)
# Compute Whats, if desired
if self.compute_What:
Whats = [w / (q / np.sqrt(np.pi * 4 / (2 * l + 1))) ** 3 if abs(q) > 1.0e-6 else 0.0
for l, q, w in zip(range(1, self.max_l + 1), Qs, Ws)]
# Compile the results. Always returns Qs, and optionally the W/What
if self.compute_W:
Qs += Ws
if self.compute_What:
Qs += Whats
return Qs
def feature_labels(self):
q_labels = ['BOOP Q l={}'.format(l) for l in range(1, self.max_l+1)]
if self.compute_W:
q_labels += ['BOOP W l={}'.format(l) for l in range(1, self.max_l+1)]
if self.compute_What:
q_labels += ['BOOP What l={}'.format(l) for l in range(1, self.max_l + 1)]
return q_labels
def citations(self):
return ["@article{Seko2017,"
"author = {<NAME> <NAME> and Nakayama, "
"Keita and <NAME> and Tanaka, Isao},"
"doi = {10.1103/PhysRevB.95.144110},"
"journal = {Physical Review B}, number = {14}, pages = {144110},"
"title = {{Representation of compounds for machine-learning prediction of physical properties}},"
"url = {http://link.aps.org/doi/10.1103/PhysRevB.95.144110},"
"volume = {95},year = {2017}}",
"@article{Steinhardt1983,"
"author = {<NAME>. and <NAME>. and <NAME>},"
"doi = {10.1103/PhysRevB.28.784}, journal = {Physical Review B},"
"month = {jul}, number = {2}, pages = {784--805},"
"title = {{Bond-orientational order in liquids and glasses}},"
"url = {https://link.aps.org/doi/10.1103/PhysRevB.28.784}, "
"volume = {28}, year = {1983}}"]
def implementors(self):
return ['<NAME>', '<NAME>']
class SiteElementalProperty(BaseFeaturizer):
"""
Elemental properties of atom on a certain site
Features:
site [property] - Elemental property for this site
References:
`Seko et al., _PRB_ (2017) <http://link.aps.org/doi/10.1103/PhysRevB.95.144110>`_
`Schmidt et al., _Chem Mater_. (2017) <http://dx.doi.org/10.1021/acs.chemmater.7b00156>`_
"""
def __init__(self, data_source=None, properties=('Number',)):
"""Initialize the featurizer
Args:
data_source (AbstractData): Tool used to look up elemental properties
properties ([string]): List of properties to use for features
"""
self.data_source = data_source or MagpieData()
self.properties = properties
self._preset_citations = []
def featurize(self, strc, idx):
# Get the site
site = strc[idx]
# Get the properties
elem = site.specie if isinstance(site.specie, Element) else site.specie.element
props = [self.data_source.get_elemental_property(elem, p) for p in self.properties]
return props
def feature_labels(self):
return ['site {}'.format(p) for p in self.properties]
def citations(self):
return self._preset_citations
def implementors(self):
return ['<NAME>']
@staticmethod
def from_preset(preset):
"""Create the class with pre-defined settings
Args:
preset (string): Desired preset
Returns:
SiteElementalProperty initialized with desired settings
"""
if preset == "seko-prb-2017":
output = SiteElementalProperty(data_source=MagpieData(),
properties=["Number", "AtomicWeight", "Row", "Column",
"FirstIonizationEnergy",
"SecondIonizationEnergy",
"ElectronAffinity",
"Electronegativity",
"AllenElectronegativity",
"VdWRadius", "CovalentRadius",
"AtomicRadius",
"ZungerPP-r_s", "ZungerPP-r_p",
"MeltingT", "BoilingT", "Density",
"MolarVolume", "HeatFusion",
"HeatVaporization",
"LogThermalConductivity", "HeatCapacityMass"
])
output._preset_citations.append("@article{Seko2017,"
"author = {<NAME> and <NAME> and "
"<NAME> and <NAME> and <NAME>},"
"doi = {10.1103/PhysRevB.95.144110},"
"journal = {Physical Review B}, number = {14},"
"pages = {144110},"
"title = {{Representation of compounds for machine-learning prediction of physical properties}},"
"url = {http://link.aps.org/doi/10.1103/PhysRevB.95.144110},"
"volume = {95}, year = {2017}}")
return output
else:
raise ValueError('Unrecognized preset: {}'.format(preset))
@lru_cache(maxsize=32)
def get_wigner_coeffs(l):
"""Get the list of non-zero Wigner 3j triplets
Args:
l (int): Desired l
Returns:
List of tuples that contain:
- ((int)) m coordinates of the triplet
- (float) Wigner coefficient
"""
return [((m1, m2, m3), float(wigner_3j(l, l, l, m1, m2, m3)))
for m1, m2, m3 in _iterate_wigner_3j(l)]
def _iterate_wigner_3j(l):
"""Iterator over all non-zero Wigner 3j triplets
Args:
l (int) - Desired l
Generates:
pairs of acceptable l's
"""
for m1 in range(-l, l+1):
for m2 in range(-l, l+1):
m3 = -1 * (m1 + m2)
if -l <= m3 <= l:
yield m1, m2, m3
class AverageBondLength(BaseFeaturizer):
'''
Determines the average bond length between one specific site
and all its nearest neighbors using one of pymatgen's NearNeighbor
classes. These nearest neighbor calculators return weights related
to the proximity of each neighbor to this site. 'Average bond
length' of a site is the weighted average of the distance between
site and all its nearest neighbors.
'''
def __init__(self, method):
'''
Initialize featurizer
Args:
method (NearNeighbor) - subclass under NearNeighbor used to compute nearest neighbors
'''
self.method = method
def featurize(self, strc, idx):
'''
Get weighted average bond length of a site and all its nearest
neighbors.
Args:
strc (Structure): Pymatgen Structure object
idx (int): index of target site in structure object
Returns:
average bond length (list)
'''
# Compute nearest neighbors of the indexed site
nns = self.method.get_nn_info(strc, idx)
if len(nns) == 0:
raise IndexError("Input structure has no bonds.")
weights = [info['weight'] for info in nns]
center_coord = strc[idx].coords
dists = np.linalg.norm(np.subtract([site['site'].coords for site in nns], center_coord), axis=1)
return [PropertyStats.mean(dists, weights)]
def feature_labels(self):
return ['Average bond length']
def citations(self):
return ['@article{jong_chen_notestine_persson_ceder_jain_asta_gamst_2016,'
'title={A Statistical Learning Framework for Materials Science: '
'Application to Elastic Moduli of k-nary Inorganic Polycrystalline Compounds}, '
'volume={6}, DOI={10.1038/srep34256}, number={1}, journal={Scientific Reports}, '
'author={<NAME> and <NAME> and <NAME> and Persson, '
'Kristin and Ceder, Gerbrand and <NAME> and Asta, Mark and <NAME>}, '
'year={2016}, month={Mar}}'
]
def implementors(self):
return ['<NAME>', '<NAME>']
class AverageBondAngle(BaseFeaturizer):
'''
Determines the average bond angles of a specific site with
its nearest neighbors using one of pymatgen's NearNeighbor
classes. Neighbors that are adjacent to each other are stored
and angle between them are computed. 'Average bond angle' of
a site is the mean bond angle between all its nearest neighbors.
'''
def __init__(self, method):
'''
Initialize featurizer
Args:
method (NearNeighbor) - subclass under NearNeighbor used to compute nearest
neighbors
'''
self.method = method
def featurize(self, strc, idx):
'''
Get average bond length of a site and all its nearest
neighbors.
Args:
strc (Structure): Pymatgen Structure object
idx (int): index of target site in structure object
Returns:
average bond length (list)
'''
# Compute nearest neighbors of the indexed site
nns = self.method.get_nn_info(strc, idx)
if len(nns) == 0:
raise IndexError("Input structure has no bonds.")
center = strc[idx].coords
sites = [i['site'].coords for i in nns]
# Calculate bond angles for each neighbor
bond_angles = np.empty((len(sites), len(sites)))
bond_angles.fill(np.nan)
for a, a_site in enumerate(sites):
for b, b_site in enumerate(sites):
if (b == a):
continue
dot = np.dot(a_site - center, b_site - center) / (
np.linalg.norm(a_site - center) * np.linalg.norm(b_site - center))
if np.isnan(np.arccos(dot)):
bond_angles[a, b] = bond_angles[b, a] = np.arccos(round(dot, 5))
else:
bond_angles[a, b] = bond_angles[b, a] = np.arccos(dot)
# Take the minimum bond angle of each neighbor
minimum_bond_angles = np.nanmin(bond_angles, axis=1)
return [PropertyStats.mean(minimum_bond_angles)]
def feature_labels(self):
return ['Average bond angle']
def citations(self):
return ['@article{jong_chen_notestine_persson_ceder_jain_asta_gamst_2016,'
'title={A Statistical Learning Framework for Materials Science: '
'Application to Elastic Moduli of k-nary Inorganic Polycrystalline Compounds}, '
'volume={6}, DOI={10.1038/srep34256}, number={1}, journal={Scientific Reports}, '
'author={<NAME> and <NAME> and Notestine, Randy and Persson, '
'Kristin and Ceder, Gerbrand and <NAME> and Asta, Mark and Gamst, Anthony}, '
'year={2016}, month={Mar}}'
]
def implementors(self):
return ['<NAME>', '<NAME>']
|
from __future__ import division
import copy
from functools import lru_cache
from matminer.featurizers.utils.grdf import Gaussian, Histogram
from matminer.utils.caching import get_nearest_neighbors
from matminer.utils.data import MagpieData
"""
Features that describe the local environment of a single atom. Note that
structural features can be constructed from a combination of site features from
every site in the structure.
The `featurize` function takes two arguments:
struct (Structure): Object representing the structure containing the site
of interest
idx (int): Index of the site to be featurized
We have to use two parameters because the Site object does not hold a pointer
back to its structure and often information on neighbors is required. To run
:code:`featurize_dataframe`, you must pass the column names for both the site
index and the structure. For example:
.. code:: python
f = AGNIFingerprints()
f.featurize_dataframe(data, ['structure', 'site_idx'])
"""
import os
import warnings
import ruamel.yaml as yaml
import itertools
import numpy as np
import scipy.integrate as integrate
from matminer.featurizers.base import BaseFeaturizer
from math import pi
from scipy.special import sph_harm
from sympy.physics.wigner import wigner_3j
from pymatgen import Structure
from pymatgen.core.periodic_table import Element
from pymatgen.analysis.local_env import LocalStructOrderParams, \
VoronoiNN, CrystalNN
import pymatgen.analysis
from pymatgen.analysis.ewald import EwaldSummation
from pymatgen.analysis.chemenv.coordination_environments.coordination_geometry_finder \
import LocalGeometryFinder
from pymatgen.analysis.chemenv.coordination_environments.chemenv_strategies \
import SimplestChemenvStrategy, MultiWeightsChemenvStrategy
from matminer.featurizers.utils.stats import PropertyStats
from sklearn.utils.validation import check_is_fitted
cn_motif_op_params = {}
with open(os.path.join(os.path.dirname(
pymatgen.analysis.__file__), 'cn_opt_params.yaml'), 'r') as f:
cn_motif_op_params = yaml.safe_load(f)
cn_target_motif_op = {}
with open(os.path.join(os.path.dirname(
__file__), 'cn_target_motif_op.yaml'), 'r') as f:
cn_target_motif_op = yaml.safe_load(f)
class AGNIFingerprints(BaseFeaturizer):
"""
Product integral of RDF and Gaussian window function, from `Botu et al <http://pubs.acs.org/doi/abs/10.1021/acs.jpcc.6b10908>`_.
Integral of the product of the radial distribution function and a
Gaussian window function. Originally used by
`Botu et al <http://pubs.acs.org/doi/abs/10.1021/acs.jpcc.6b10908>`_ to fit empiricial
potentials. These features come in two forms: atomic fingerprints and
direction-resolved fingerprints.
Atomic fingerprints describe the local environment of an atom and are
computed using the function:
:math:`A_i(\eta) = \sum\limits_{i \\ne j} e^{-(\\frac{r_{ij}}{\eta})^2} f(r_{ij})`
where :math:`i` is the index of the atom, :math:`j` is the index of a neighboring atom, :math:`\eta` is a scaling function,
:math:`r_{ij}` is the distance between atoms :math:`i` and :math:`j`, and :math:`f(r)` is a cutoff function where
:math:`f(r) = 0.5[\cos(\\frac{\pi r_{ij}}{R_c}) + 1]` if :math:`r < R_c` and :math:`0` otherwise.
The direction-resolved fingerprints are computed using
:math:`V_i^k(\eta) = \sum\limits_{i \\ne j} \\frac{r_{ij}^k}{r_{ij}} e^{-(\\frac{r_{ij}}{\eta})^2} f(r_{ij})`
where :math:`r_{ij}^k` is the :math:`k^{th}` component of :math:`\\bold{r}_i - \\bold{r}_j`.
Parameters:
TODO: Differentiate between different atom types (maybe as another class)
"""
def __init__(self, directions=(None, 'x', 'y', 'z'), etas=None,
cutoff=8):
"""
Args:
directions (iterable): List of directions for the fingerprints. Can
be one or more of 'None`, 'x', 'y', or 'z'
etas (iterable of floats): List of which window widths to compute
cutoff (float): Cutoff distance (Angstroms)
"""
self.directions = directions
self.etas = etas
if self.etas is None:
self.etas = np.logspace(np.log10(0.8), np.log10(16), 8)
self.cutoff = cutoff
def featurize(self, struct, idx):
# Get all neighbors of this site
my_site = struct[idx]
sites, dists = zip(*struct.get_neighbors(my_site, self.cutoff))
# Convert dists to a ndarray
dists = np.array(dists)
# If one of the features is direction-dependent, compute the :math:`(r_i - r_j) / r_{ij}`
if any([x in self.directions for x in ['x', 'y', 'z']]):
disps = np.array(
[my_site.coords - s.coords for s in sites]) / dists[:,
np.newaxis]
# Compute the cutoff function
cutoff_func = 0.5 * (np.cos(np.pi * dists / self.cutoff) + 1)
# Compute "e^(r/eta) * cutoff_func" for each eta
windowed = np.zeros((len(dists), len(self.etas)))
for i, eta in enumerate(self.etas):
windowed[:, i] = np.multiply(
np.exp(-1 * np.power(np.true_divide(dists, eta), 2)),
cutoff_func)
# Compute the fingerprints
output = []
for d in self.directions:
if d is None:
output.append(np.sum(windowed, axis=0))
else:
if d == 'x':
proj = [1., 0., 0.]
elif d == 'y':
proj = [0., 1., 0.]
elif d == 'z':
proj = [0., 0., 1.]
else:
raise Exception('Unrecognized direction')
output.append(
np.sum(windowed * np.dot(disps, proj)[:, np.newaxis],
axis=0))
# Return the results
return np.hstack(output)
def feature_labels(self):
labels = []
for d in self.directions:
for e in self.etas:
if d is None:
labels.append('AGNI eta=%.2e' % e)
else:
labels.append('AGNI dir=%s eta=%.2e' % (d, e))
return labels
def citations(self):
return ["@article{Botu2015, author = {<NAME> and <NAME>},doi = {10.1002/qua.24836}," \
"journal = {International Journal of Quantum Chemistry},number = {16},pages = {1074--1083}," \
"title = {{Adaptive machine learning framework to accelerate ab initio molecular dynamics}}," \
"volume = {115},year = {2015}}"]
def implementors(self):
return ['<NAME>']
class OPSiteFingerprint(BaseFeaturizer):
"""
Local structure order parameters computed from a site's neighbor env.
For each order parameter, we determine
the neighbor shell that complies with the expected
coordination number. For example, we find the 4 nearest
neighbors for the tetrahedral OP, the 6 nearest for the
octahedral OP, and the 8 nearest neighbors for the bcc OP.
If we don't find such a shell, the OP is either set to zero
or evaluated with the shell of the next largest observed
coordination number.
Args:
target_motifs (dict): target op or motif type where keys
are corresponding coordination numbers
(e.g., {4: "tetrahedral"}).
dr (float): width for binning neighbors in unit of relative
distances (= distance/nearest neighbor
distance). The binning is necessary to make the
neighbor-finding step robust against small numerical
variations in neighbor distances (default: 0.1).
ddr (float): variation of width for finding stable OP values.
ndr (int): number of width variations for each variation direction
(e.g., ndr = 0 only uses the input dr, whereas
ndr=1 tests dr = dr - ddr, dr, and dr + ddr.
dop (float): binning width to compute histogram for each OP
if ndr > 0.
dist_exp (boolean): exponent for distance factor to multiply
order parameters with that penalizes (large)
variations in distances in a given motif.
0 will switch the option off
(default: 2).
zero_ops (boolean): set an OP to zero if there is no neighbor
shell that complies with the expected
coordination number of a given OP
(e.g., CN=4 for tetrahedron;
default: True).
"""
def __init__(self, target_motifs=None, dr=0.1, ddr=0.01, ndr=1, dop=0.001,
dist_exp=2, zero_ops=True):
self.cn_target_motif_op = copy.deepcopy(cn_target_motif_op) \
if target_motifs is None else copy.deepcopy(target_motifs)
self.dr = dr
self.ddr = ddr
self.ndr = ndr
self.dop = dop
self.dist_exp = dist_exp
self.zero_ops = zero_ops
self.ops = {}
for cn, t_list in self.cn_target_motif_op.items():
self.ops[cn] = []
for t in t_list:
ot = t
p = None
if cn in cn_motif_op_params.keys():
if t in cn_motif_op_params[cn].keys():
ot = cn_motif_op_params[cn][t][0]
if len(cn_motif_op_params[cn][t]) > 1:
p = cn_motif_op_params[cn][t][1]
self.ops[cn].append(LocalStructOrderParams([ot], parameters=[p]))
def featurize(self, struct, idx):
"""
Get OP fingerprint of site with given index in input
structure.
Args:
struct (Structure): Pymatgen Structure object.
idx (int): index of target site in structure.
Returns:
opvals (numpy array): order parameters of target site.
"""
idop = 1.0 / self.dop
opvals = {}
s = struct.sites[idx]
neigh_dist = []
r = 6
while len(neigh_dist) < 12:
r += 1.0
neigh_dist = struct.get_neighbors(s, r)
# Smoothen distance, but use relative distances.
dmin = min([d for n, d in neigh_dist])
neigh_dist = [[n, d / dmin] for n, d in neigh_dist]
neigh_dist_alldrs = {}
d_sorted_alldrs = {}
for i in range(-self.ndr, self.ndr + 1):
opvals[i] = []
this_dr = self.dr + float(i) * self.ddr
this_idr = 1.0 / this_dr
neigh_dist_alldrs[i] = []
for j in range(len(neigh_dist)):
neigh_dist_alldrs[i].append([neigh_dist[j][0],
(float(
int(neigh_dist[j][1] * this_idr \
+ 0.5)) + 0.5) * this_dr])
d_sorted_alldrs[i] = []
for n, d in neigh_dist_alldrs[i]:
if d not in d_sorted_alldrs[i]:
d_sorted_alldrs[i].append(d)
d_sorted_alldrs[i] = sorted(d_sorted_alldrs[i])
# Do q_sgl_bd separately.
#if self.optypes[1][0] == "sgl_bd":
if self.cn_target_motif_op[1][0] == "sgl_bd":
for i in range(-self.ndr, self.ndr + 1):
site_list = [s]
for n, dn in neigh_dist_alldrs[i]:
site_list.append(n)
opval = self.ops[1][0].get_order_parameters(
site_list, 0,
indices_neighs=[j for j in range(1, len(site_list))])
opvals[i].append(opval[0])
for i in range(-self.ndr, self.ndr + 1):
prev_cn = 0
for d in d_sorted_alldrs[i]:
this_cn = 0
site_list = [s]
this_av_inv_drel = 0.0
for j, [n, dn] in enumerate(neigh_dist_alldrs[i]):
if dn <= d:
this_cn += 1
site_list.append(n)
this_av_inv_drel += (1.0 / (neigh_dist[j][1]))
this_av_inv_drel = this_av_inv_drel / float(this_cn)
d_fac = this_av_inv_drel ** self.dist_exp
for cn in range(max(2, prev_cn + 1), min(this_cn + 1, 13)):
# Set all OPs of non-CN-complying neighbor environments
# to zero if applicable.
if self.zero_ops and cn != this_cn:
for it in range(len(self.cn_target_motif_op[cn])):
opvals[i].append(0)
continue
# Set all (remaining) OPs.
for it in range(len(self.cn_target_motif_op[cn])):
opval = self.ops[cn][it].get_order_parameters(
site_list, 0,
indices_neighs=[j for j in
range(1, len(site_list))])
if opval[0] is None:
opval[0] = 0
else:
opval[0] = d_fac * opval[0]
opvals[i].append(opval[0])
prev_cn = this_cn
if prev_cn >= 12:
break
opvals_out = []
for j in range(len(opvals[0])):
# Compute histogram, determine peak, and location
# of peak value.
op_tmp = [opvals[i][j] for i in range(-self.ndr, self.ndr + 1)]
minval = float(int(min(op_tmp) * idop - 1.5)) * self.dop
# print(minval)
if minval < 0.0:
minval = 0.0
if minval > 1.0:
minval = 1.0
# print(minval)
maxval = float(int(max(op_tmp) * idop + 1.5)) * self.dop
# print(maxval)
if maxval < 0.0:
maxval = 0.0
if maxval > 1.0:
maxval = 1.0
# print(maxval)
if minval == maxval:
minval = minval - self.dop
maxval = maxval + self.dop
# print(minval)
# print(maxval)
nbins = int((maxval - minval) * idop)
# print('{} {} {}'.format(minval, maxval, nbins))
hist, bin_edges = np.histogram(
op_tmp, bins=nbins, range=(minval, maxval),
weights=None, density=False)
max_hist = max(hist)
op_peaks = []
for i, h in enumerate(hist):
if h == max_hist:
op_peaks.append(
[i, 0.5 * (bin_edges[i] + bin_edges[i + 1])])
# Address problem that 2 OP values can be close to a bin edge.
hist2 = []
op_peaks2 = []
i = 0
while i < len(op_peaks):
if i < len(op_peaks) - 1:
if op_peaks[i + 1][0] - op_peaks[i][0] == 1:
op_peaks2.append(
0.5 * (op_peaks[i][1] + op_peaks[i + 1][1]))
hist2.append(
hist[op_peaks[i][0]] + hist[op_peaks[i + 1][0]])
i += 1
else:
op_peaks2.append(op_peaks[i][1])
hist2.append(hist[op_peaks[i][0]])
else:
op_peaks2.append(op_peaks[i][1])
hist2.append(hist[op_peaks[i][0]])
i += 1
opvals_out.append(op_peaks2[list(hist2).index(max(hist2))])
return np.array(opvals_out)
def feature_labels(self):
labels = []
for cn, li in self.cn_target_motif_op.items():
for e in li:
labels.append('{} CN_{}'.format(e, cn))
return labels
def citations(self):
return ['@article{zimmermann_jain_2017, title={Applications of order'
' parameter feature vectors}, journal={in progress}, author={'
'<NAME>. and <NAME>.}, year={2017}}']
def implementors(self):
return ['<NAME>']
class CrystalNNFingerprint(BaseFeaturizer):
"""
A local order parameter fingerprint for periodic crystals.
The fingerprint represents the value of various order parameters for the
site. The "wt" order parameter describes how consistent a site is with a
certain coordination number. The remaining order parameters are computed
by multiplying the "wt" for that coordination number with the OP value.
The chem_info parameter can be used to also get chemical descriptors that
describe differences in some chemical parameter (e.g., electronegativity)
between the central site and the site neighbors.
"""
@staticmethod
def from_preset(preset, **kwargs):
"""
Use preset parameters to get the fingerprint
Args:
preset (str): name of preset ("cn" or "ops")
**kwargs: other settings to be passed into CrystalNN class
"""
if preset == "cn":
op_types = dict([(k + 1, ["wt"]) for k in range(24)])
return CrystalNNFingerprint(op_types, **kwargs)
elif preset == "ops":
op_types = copy.deepcopy(cn_target_motif_op)
for k in range(24):
if k + 1 in op_types:
op_types[k + 1].insert(0, "wt")
else:
op_types[k + 1] = ["wt"]
return CrystalNNFingerprint(op_types, chem_info=None, **kwargs)
else:
raise RuntimeError('preset "{}" is not supported in '
'CrystalNNFingerprint'.format(preset))
def __init__(self, op_types, chem_info=None, **kwargs):
"""
Initialize the CrystalNNFingerprint. Use the from_preset() function to
use default params.
Args:
op_types (dict): a dict of coordination number (int) to a list of str
representing the order parameter types
chem_info (dict): a dict of chemical properties (e.g., atomic mass)
to dictionaries that map an element to a value
(e.g., chem_info["Pauling scale"]["O"] = 3.44)
**kwargs: other settings to be passed into CrystalNN class
"""
self.op_types = copy.deepcopy(op_types)
self.cnn = CrystalNN(**kwargs)
if chem_info is not None:
self.chem_info = copy.deepcopy(chem_info)
self.chem_props = list(chem_info.keys())
else:
self.chem_info = None
self.ops = {} # load order parameter objects & paramaters
for cn, t_list in self.op_types.items():
self.ops[cn] = []
for t in t_list:
if t == "wt":
self.ops[cn].append(t)
else:
ot = t
p = None
if cn in cn_motif_op_params.keys():
if t in cn_motif_op_params[cn].keys():
ot = cn_motif_op_params[cn][t][0]
if len(cn_motif_op_params[cn][t]) > 1:
p = cn_motif_op_params[cn][t][1]
self.ops[cn].append(LocalStructOrderParams([ot], parameters=[p]))
def featurize(self, struct, idx):
"""
Get crystal fingerprint of site with given index in input
structure.
Args:
struct (Structure): Pymatgen Structure object.
idx (int): index of target site in structure.
Returns:
list of weighted order parameters of target site.
"""
nndata = self.cnn.get_nn_data(struct, idx)
max_cn = sorted(self.op_types)[-1]
cn_fingerprint = []
if self.chem_info is not None:
prop_delta = {} # dictionary of chemical property to final value
for prop in self.chem_props:
prop_delta[prop] = 0
sum_wt = 0
elem_central = struct.sites[idx].specie.symbol
specie_central = str(struct.sites[idx].specie)
for k in range(max_cn):
cn = k + 1
wt = nndata.cn_weights.get(cn, 0)
if cn in self.ops:
for op in self.ops[cn]:
if op == "wt":
cn_fingerprint.append(wt)
if self.chem_info is not None and wt != 0:
# Compute additional chemistry-related features
sum_wt += wt
neigh_sites = [d["site"] for d in
nndata.cn_nninfo[cn]]
for prop in self.chem_props:
# get the value for specie, if not fall back to
# value defined for element
prop_central = self.chem_info[prop].get(
specie_central, self.chem_info[prop].get(
elem_central))
for neigh in neigh_sites:
elem_neigh = neigh.specie.symbol
specie_neigh = str(neigh.specie)
prop_neigh = self.chem_info[prop].get(
specie_neigh,
self.chem_info[prop].get(
elem_neigh))
prop_delta[prop] += wt * \
(prop_neigh -
prop_central) / cn
elif wt == 0:
cn_fingerprint.append(wt)
else:
neigh_sites = [d["site"] for d in nndata.cn_nninfo[cn]]
opval = op.get_order_parameters(
[struct[idx]] + neigh_sites, 0,
indices_neighs=[i for i in
range(1, len(neigh_sites) + 1)])[0]
opval = opval or 0 # handles None
cn_fingerprint.append(wt * opval)
chem_fingerprint = []
if self.chem_info is not None:
for val in prop_delta.values():
chem_fingerprint.append(val / sum_wt)
return cn_fingerprint + chem_fingerprint
def feature_labels(self):
labels = []
max_cn = sorted(self.op_types)[-1]
for k in range(max_cn):
cn = k + 1
if cn in list(self.ops.keys()):
for op in self.op_types[cn]:
labels.append("{} CN_{}".format(op, cn))
if self.chem_info is not None:
for prop in self.chem_props:
labels.append("{} local diff".format(prop))
return labels
def citations(self):
return []
def implementors(self):
return ['<NAME>', '<NAME>']
class VoronoiFingerprint(BaseFeaturizer):
"""
Voronoi tessellation-based features around target site.
Calculate the following sets of features based on Voronoi tessellation
analysis around the target site:
Voronoi indices
n_i denotes the number of i-edged facets, and i is in the range of 3-10.
e.g.
for bcc lattice, the Voronoi indices are [0,6,0,8,...];
for fcc/hcp lattice, the Voronoi indices are [0,12,0,0,...];
for icosahedra, the Voronoi indices are [0,0,12,0,...];
i-fold symmetry indices
computed as n_i/sum(n_i), and i is in the range of 3-10.
reflect the strength of i-fold symmetry in local sites.
e.g.
for bcc lattice, the i-fold symmetry indices are [0,6/14,0,8/14,...]
indicating both 4-fold and a stronger 6-fold symmetries are present;
for fcc/hcp lattice, the i-fold symmetry factors are [0,1,0,0,...],
indicating only 4-fold symmetry is present;
for icosahedra, the Voronoi indices are [0,0,1,0,...],
indicating only 5-fold symmetry is present;
Weighted i-fold symmetry indices
if use_weights = True
Voronoi volume
total volume of the Voronoi polyhedron around the target site
Voronoi volume statistics of sub_polyhedra formed by each facet + center
stats_vol = ['mean', 'std_dev', 'minimum', 'maximum']
Voronoi area
total area of the Voronoi polyhedron around the target site
Voronoi area statistics of the facets
stats_area = ['mean', 'std_dev', 'minimum', 'maximum']
Voronoi nearest-neighboring distance statistics
stats_dist = ['mean', 'std_dev', 'minimum', 'maximum']
Args:
cutoff (float): cutoff distance in determining the potential
neighbors for Voronoi tessellation analysis.
(default: 6.5)
use_symm_weights(bool): whether to use weights to derive weighted
i-fold symmetry indices.
symm_weights(str): weights to be used in weighted i-fold symmetry
indices.
Supported options: 'solid_angle', 'area', 'volume',
'face_dist'. (default: 'solid_angle')
stats_vol (list of str): volume statistics types.
stats_area (list of str): area statistics types.
stats_dist (list of str): neighboring distance statistics types.
"""
def __init__(self, cutoff=6.5,
use_symm_weights=False, symm_weights='solid_angle',
stats_vol=None, stats_area=None, stats_dist=None):
self.cutoff = cutoff
self.use_symm_weights = use_symm_weights
self.symm_weights = symm_weights
self.stats_vol = ['mean', 'std_dev', 'minimum', 'maximum'] \
if stats_vol is None else copy.deepcopy(stats_vol)
self.stats_area = ['mean', 'std_dev', 'minimum', 'maximum'] \
if stats_area is None else copy.deepcopy(stats_area)
self.stats_dist = ['mean', 'std_dev', 'minimum', 'maximum'] \
if stats_dist is None else copy.deepcopy(stats_dist)
def featurize(self, struct, idx):
"""
Get Voronoi fingerprints of site with given index in input structure.
Args:
struct (Structure): Pymatgen Structure object.
idx (int): index of target site in structure.
Returns:
(list of floats): Voronoi fingerprints.
-Voronoi indices
-i-fold symmetry indices
-weighted i-fold symmetry indices (if use_symm_weights = True)
-Voronoi volume
-Voronoi volume statistics
-Voronoi area
-Voronoi area statistics
-Voronoi dist statistics
"""
# Get the nearest neighbors using a Voronoi tessellation
n_w = get_nearest_neighbors(VoronoiNN(cutoff=self.cutoff), struct, idx)
# Prepare storage for the Voronoi indices
voro_idx_list = np.zeros(8, int)
voro_idx_weights = np.zeros(8)
vol_list = []
area_list = []
dist_list = []
# Get statistics
for nn in n_w:
if nn['poly_info']['n_verts'] <= 10:
# If a facet has more than 10 edges, it's skipped here.
voro_idx_list[nn['poly_info']['n_verts'] - 3] += 1
vol_list.append(nn['poly_info']['volume'])
area_list.append(nn['poly_info']['area'])
dist_list.append(nn['poly_info']['face_dist'] * 2)
if self.use_symm_weights:
voro_idx_weights[nn['poly_info']['n_verts'] - 3] += \
nn['poly_info'][self.symm_weights]
symm_idx_list = voro_idx_list / sum(voro_idx_list)
if self.use_symm_weights:
symm_wt_list = voro_idx_weights / sum(voro_idx_weights)
voro_fps = list(np.concatenate((voro_idx_list, symm_idx_list,
symm_wt_list), axis=0))
else:
voro_fps = list(np.concatenate((voro_idx_list,
symm_idx_list), axis=0))
voro_fps.append(sum(vol_list))
voro_fps.append(sum(area_list))
voro_fps += [PropertyStats().calc_stat(vol_list, stat_vol)
for stat_vol in self.stats_vol]
voro_fps += [PropertyStats().calc_stat(area_list, stat_area)
for stat_area in self.stats_area]
voro_fps += [PropertyStats().calc_stat(dist_list, stat_dist)
for stat_dist in self.stats_dist]
return voro_fps
def feature_labels(self):
labels = ['Voro_index_%d' % i for i in range(3, 11)]
labels += ['Symmetry_index_%d' % i for i in range(3, 11)]
if self.use_symm_weights:
labels += ['Symmetry_weighted_index_%d' % i for i in range(3, 11)]
labels.append('Voro_vol_sum')
labels.append('Voro_area_sum')
labels += ['Voro_vol_%s' % stat_vol for stat_vol in self.stats_vol]
labels += ['Voro_area_%s' % stat_area for stat_area in self.stats_area]
labels += ['Voro_dist_%s' % stat_dist for stat_dist in self.stats_dist]
return labels
def citations(self):
citation = ['@book{okabe1992spatial, '
'title = {Spatial tessellations}, '
'author = {<NAME>}, '
'year = {1992}, '
'publisher = {Wiley Online Library}}']
return citation
def implementors(self):
return ['<NAME>']
class ChemicalSRO(BaseFeaturizer):
"""
Chemical short range ordering, deviation of local site and nominal structure compositions
Chemical SRO features to evaluate the deviation
of local chemistry with the nominal composition of the structure.
A local bonding preference is computed using
f_el = N_el/(sum of N_el) - c_el,
where N_el is the number of each element type in the neighbors around
the target site, sum of N_el is the sum of all possible element types
(coordination number), and c_el is the composition of the specific
element in the entire structure.
A positive f_el indicates the "bonding" with the specific element
is favored, at least in the target site;
A negative f_el indicates the "bonding" is not favored, at least
in the target site.
Note that ChemicalSRO is only featurized for elements identified by
"fit" (see following), thus "fit" must be called before "featurize",
or else an error will be raised.
Features:
CSRO__[nn method]_[element] - The Chemical SRO of a site computed based
on neighbors determined with a certain NN-detection method for
a certain element.
"""
def __init__(self, nn, includes=None, excludes=None, sort=True):
"""Initialize the featurizer
Args:
nn (NearestNeighbor): instance of one of pymatgen's NearestNeighbor
classes.
includes (array-like or str): elements included to calculate CSRO.
excludes (array-like or str): elements excluded to calculate CSRO.
sort (bool): whether to sort elements by mendeleev number."""
self.nn = nn
self.includes = includes
if self.includes:
self.includes = [Element(el).symbol
for el in np.atleast_1d(self.includes)]
self.excludes = excludes
if self.excludes:
self.excludes = [Element(el).symbol
for el in np.atleast_1d(self.excludes)]
self.sort = sort
self.el_list_ = None
self.el_amt_dict_ = None
@staticmethod
def from_preset(preset, **kwargs):
"""
Use one of the standard instances of a given NearNeighbor class.
Args:
preset (str): preset type ("VoronoiNN", "JmolNN",
"MiniumDistanceNN", "MinimumOKeeffeNN",
or "MinimumVIRENN").
**kwargs: allow to pass args to the NearNeighbor class.
Returns:
ChemicalSRO from a preset.
"""
nn_ = getattr(pymatgen.analysis.local_env, preset)
return ChemicalSRO(nn_(**kwargs))
def fit(self, X, y=None):
"""
Identify elements to be included in the following featurization,
by intersecting the elements present in the passed structures with
those explicitly included (or excluded) in __init__. Only elements
in the self.el_list_ will be featurized.
Besides, compositions of the passed structures will also be "stored"
in a dict of self.el_amt_dict_, avoiding repeated calculation of
composition when featurizing multiple sites in the same structure.
Args:
X (array-like): containing Pymatgen structures and sites, supports
multiple choices:
-2D array-like object:
e.g. [[struct, site], [struct, site], …]
np.array([[struct, site], [struct, site], …])
-Pandas dataframe:
e.g. df[['struct', 'site']]
y : unused (added for consistency with overridden method signature)
Returns:
self
"""
structs = np.atleast_2d(X)[:, 0]
if not all([isinstance(struct, Structure) for struct in structs]):
raise TypeError("This fit requires an array-like input of Pymatgen "
"Structures and sites!")
self.el_amt_dict_ = {}
el_set_ = set()
for s in structs:
if str(s) not in self.el_amt_dict_.keys():
el_amt_ = s.composition.fractional_composition.get_el_amt_dict()
els_ = set(el_amt_.keys()) if self.includes is None \
else set([el for el in el_amt_.keys()
if el in self.includes])
els_ = els_ if self.excludes is None \
else els_ - set(self.excludes)
if els_:
self.el_amt_dict_[str(s)] = el_amt_
el_set_ = el_set_ | els_
self.el_list_ = sorted(list(el_set_), key=lambda el:
Element(el).mendeleev_no) if self.sort else list(el_set_)
return self
def featurize(self, struct, idx):
"""
Get CSRO features of site with given index in input structure.
Args:
struct (Structure): Pymatgen Structure object.
idx (int): index of target site in structure.
Returns:
(list of floats): Chemical SRO features for each element.
"""
check_is_fitted(self, ['el_amt_dict_', 'el_list_'])
csro = [0.]*len(self.el_list_)
if str(struct) in self.el_amt_dict_.keys():
el_amt = self.el_amt_dict_[str(struct)]
nn_el_amt = dict.fromkeys(el_amt, 0)
nn_list = self.nn.get_nn(struct, idx)
for nn in nn_list:
if str(nn.specie.symbol) in self.el_list_:
nn_el_amt[str(nn.specie.symbol)] += 1/len(nn_list)
for el in el_amt.keys():
if el in self.el_list_:
csro[self.el_list_.index(el)] = nn_el_amt[el] - el_amt[el]
return csro
def feature_labels(self):
check_is_fitted(self, ['el_amt_dict_', 'el_list_'])
return ['CSRO_{}_{}'.format(el, self.nn.__class__.__name__)
for el in self.el_list_]
def citations(self):
citations = []
if self.nn.__class__.__name__ == 'VoronoiNN':
citations.append('@article{voronoi_jreineangewmath_1908, title={'
'Nouvelles applications des param\\`{e}tres continus \\`{a} la '
'th\'{e}orie des formes quadratiques. Sur quelques '
'propri\'{e}t\'{e}s des formes quadratiques positives'
' parfaites}, journal={Journal f\"ur die reine und angewandte '
'Mathematik}, number={133}, pages={97-178}, year={1908}}')
citations.append('@article{dirichlet_jreineangewmath_1850, title={'
'\"{U}ber die Reduction der positiven quadratischen Formen '
'mit drei unbestimmten ganzen Zahlen}, journal={Journal '
'f\"ur die reine und angewandte Mathematik}, number={40}, '
'pages={209-227}, doi={10.1515/crll.1850.40.209}, year={1850}}')
if self.nn.__class__.__name__ == 'JmolNN':
citations.append('@misc{jmol, title = {Jmol: an open-source Java '
'viewer for chemical structures in 3D}, howpublished = {'
'\\url{http://www.jmol.org/}}}')
if self.nn.__class__.__name__ == 'MinimumOKeeffeNN':
citations.append('@article{okeeffe_jamchemsoc_1991, title={Atom '
'sizes and bond lengths in molecules and crystals}, journal='
'{Journal of the American Chemical Society}, author={'
'<NAME>. and <NAME>.}, number={113}, pages={'
'3226-3229}, doi={doi:10.1021/ja00009a002}, year={1991}}')
if self.nn.__class__.__name__ == 'MinimumVIRENN':
citations.append('@article{shannon_actacryst_1976, title={'
'Revised effective ionic radii and systematic studies of '
'interatomic distances in halides and chalcogenides}, '
'journal={Acta Crystallographica}, author={<NAME>.}, '
'number={A32}, pages={751-767}, doi={'
'10.1107/S0567739476001551}, year={1976}')
if self.nn.__class__.__name__ in [
'MinimumDistanceNN', 'MinimumOKeeffeNN', 'MinimumVIRENN']:
citations.append('@article{zimmermann_frontmater_2017, '
'title={Assessing local structure motifs using order '
'parameters for motif recognition, interstitial '
'identification, and diffusion path characterization}, '
'journal={Frontiers in Materials}, author={Zimmermann, '
'<NAME>. and <NAME>. and <NAME>. and <NAME>.}, '
'number={4:34}, doi={10.3389/fmats.2017.00034}, year={2017}}')
return citations
def implementors(self):
return ['Qi Wang']
class GaussianSymmFunc(BaseFeaturizer):
"""
Gaussian symmetry function features suggested by Behler et al.
The function is based on pair distances and angles, to approximate the functional
dependence of local energies, originally used in the fitting of
machine-learning potentials.
The symmetry functions can be divided to a set of radial functions
(g2 function), and a set of angular functions (g4 function).
The number of symmetry functions returned are based on parameters
of etas_g2, etas_g4, zetas_g4 and gammas_g4.
See the original papers for more details:
“Atom-centered symmetry functions for constructing high-dimensional
neural network potentials”, <NAME>, J Chem Phys 134, 074106 (2011).
The cutoff function is taken as the polynomial form (cosine_cutoff)
to give a smoothed truncation.
A Fortran and a different Python version can be found in the code
Amp: Atomistic Machine-learning Package
(https://bitbucket.org/andrewpeterson/amp).
Args:
etas_g2 (list of floats): etas used in radial functions.
(default: [0.05, 4., 20., 80.])
etas_g4 (list of floats): etas used in angular functions.
(default: [0.005])
zetas_g4 (list of floats): zetas used in angular functions.
(default: [1., 4.])
gammas_g4 (list of floats): gammas used in angular functions.
(default: [+1., -1.])
cutoff (float): cutoff distance. (default: 6.5)
"""
def __init__(self, etas_g2=None, etas_g4=None, zetas_g4=None,
gammas_g4=None, cutoff=6.5):
self.etas_g2 = etas_g2 if etas_g2 else [0.05, 4., 20., 80.]
self.etas_g4 = etas_g4 if etas_g4 else [0.005]
self.zetas_g4 = zetas_g4 if zetas_g4 else [1., 4.]
self.gammas_g4 = gammas_g4 if gammas_g4 else [+1., -1.]
self.cutoff = cutoff
@staticmethod
def cosine_cutoff(rs, cutoff):
"""
Polynomial cutoff function to give a smoothed truncation of the Gaussian
symmetry functions.
Args:
rs (ndarray): distances to elements
cutoff (float): cutoff distance.
Returns:
(ndarray) cutoff function.
"""
cutoff_fun = 0.5 * (np.cos(np.pi * rs / cutoff) + 1.)
cutoff_fun[rs > cutoff] = 0
return cutoff_fun
@staticmethod
def g2(eta, rs, cutoff):
"""
Gaussian radial symmetry function of the center atom,
given an eta parameter.
Args:
eta: radial function parameter.
rs: distances from the central atom to each neighbor
cutoff (float): cutoff distance.
Returns:
(float) Gaussian radial symmetry function.
"""
ridge = (np.exp(-eta * (rs ** 2.) / (cutoff ** 2.)) *
GaussianSymmFunc.cosine_cutoff(rs, cutoff))
return ridge.sum()
@staticmethod
def g4(etas, zetas, gammas, neigh_dist, neigh_coords, cutoff):
"""
Gaussian angular symmetry function of the center atom,
given a set of eta, zeta and gamma parameters.
Args:
eta ([float]): angular function parameters.
zeta ([float]): angular function parameters.
gamma ([float]): angular function parameters.
neigh_coords (list of [floats]): coordinates of neighboring atoms, with respect
to the central atom
cutoff (float): cutoff parameter.
Returns:
(float) Gaussian angular symmetry function for all combinations of eta, zeta, gamma
"""
output = np.zeros((len(etas)*len(zetas)*len(gammas),))
# Loop over each neighbor j
for j, neigh_j in enumerate(neigh_coords):
# Compute the distance of each neighbor (k) to r
r_ij = neigh_dist[j]
d_jk = neigh_coords[(j+1):] - neigh_coords[j]
r_jk = np.linalg.norm(d_jk, 2, axis=1)
r_ik = neigh_dist[(j+1):]
# Compute the cosine term
cos_theta = np.dot(neigh_coords[(j + 1):], neigh_coords[j]) / r_ij / r_ik
# Compute the cutoff function (independent of eta/zeta/gamma)
cutoff_fun = GaussianSymmFunc.cosine_cutoff(np.array([r_ij]), cutoff) * \
GaussianSymmFunc.cosine_cutoff(r_ik, cutoff) * \
GaussianSymmFunc.cosine_cutoff(r_jk, cutoff)
# Compute the g4 for each combination of eta/gamma/zeta
ind = 0
for eta in etas:
# Compute the eta term
eta_term = np.exp(-eta * (r_ij ** 2. + r_ik ** 2. + r_jk ** 2.) /
(cutoff ** 2.)) * cutoff_fun
for zeta in zetas:
for gamma in gammas:
term = (1. + gamma * cos_theta) ** zeta * eta_term
output[ind] += term.sum() * 2. ** (1. - zeta)
ind += 1
return output
def featurize(self, struct, idx):
"""
Get Gaussian symmetry function features of site with given index
in input structure.
Args:
struct (Structure): Pymatgen Structure object.
idx (int): index of target site in structure.
Returns:
(list of floats): Gaussian symmetry function features.
"""
gaussian_funcs = []
# Get the neighbors within the cutoff
neighbors = struct.get_neighbors(struct[idx], self.cutoff)
# Get coordinates of the neighbors, relative to the central atom
neigh_coords = np.subtract([neigh[0].coords for neigh in neighbors], struct[idx].coords)
# Get the distances for later use
neigh_dists = np.array([neigh[1] for neigh in neighbors])
# Compute all G2
for eta_g2 in self.etas_g2:
gaussian_funcs.append(self.g2(eta_g2, neigh_dists, self.cutoff))
# Compute all G4s
gaussian_funcs.extend(GaussianSymmFunc.g4(self.etas_g4, self.zetas_g4, self.gammas_g4,
neigh_dists, neigh_coords, self.cutoff))
return gaussian_funcs
def feature_labels(self):
return ['G2_{}'.format(eta_g2) for eta_g2 in self.etas_g2] + \
['G4_{}_{}_{}'.format(eta_g4, zeta_g4, gamma_g4)
for eta_g4 in self.etas_g4
for zeta_g4 in self.zetas_g4
for gamma_g4 in self.gammas_g4]
def citations(self):
gsf_citation = (
'@Article{Behler2011, author = {<NAME>}, '
'title = {Atom-centered symmetry functions for constructing '
'high-dimensional neural network potentials}, '
'journal = {The Journal of Chemical Physics}, year = {2011}, '
'volume = {134}, number = {7}, pages = {074106}, '
'doi = {10.1063/1.3553717}}')
amp_citation = (
'@Article{Khorshidi2016, '
'author = {<NAME> and <NAME>}, '
'title = {Amp : A modular approach to machine learning in '
'atomistic simulations}, '
'journal = {Computer Physics Communications}, year = {2016}, '
'volume = {207}, pages = {310--324}, '
'doi = {10.1016/j.cpc.2016.05.010}}')
return [gsf_citation, amp_citation]
def implementors(self):
return ['<NAME>']
class EwaldSiteEnergy(BaseFeaturizer):
"""
Compute site energy from Coulombic interactions
User notes:
- This class uses that `charges that are already-defined for the structure`.
- Ewald summations can be expensive. If you evaluating every site in many
large structures, run all of the sites for each structure at the same time.
We cache the Ewald result for the structure that was run last, so looping
over sites and then structures is faster than structures than sites.
Features:
ewald_site_energy - Energy for the site computed from Coulombic interactions"""
def __init__(self, accuracy=None):
"""
Args:
accuracy (int): Accuracy of Ewald summation, number of decimal places
"""
self.accuracy = accuracy
# Variables used then caching the Ewald result
self.__last_structure = None
self.__last_ewald = None
def featurize(self, strc, idx):
"""
Args:
struct (Structure): Pymatgen Structure object.
idx (int): index of target site in structure.
Returns:
([float]) - Electrostatic energy of the site
"""
# Check if the new input is the last
# Note: We use 'is' rather than structure comparisons for speed
if strc is self.__last_structure:
ewald = self.__last_ewald
else:
self.__last_structure = strc
ewald = EwaldSummation(strc, acc_factor=self.accuracy)
self.__last_ewald = ewald
return [ewald.get_site_energy(idx)]
def feature_labels(self):
return ["ewald_site_energy"]
def implementors(self):
return ["<NAME>"]
def citations(self):
return ["@Article{Ewald1921,"
"author = {<NAME>.},"
"doi = {10.1002/andp.19213690304},"
"issn = {00033804},"
"journal = {Annalen der Physik},"
"number = {3},"
"pages = {253--287},"
"title = {{Die Berechnung optischer und elektrostatischer Gitterpotentiale}},"
"url = {http://doi.wiley.com/10.1002/andp.19213690304},"
"volume = {369},"
"year = {1921}"
"}"]
class ChemEnvSiteFingerprint(BaseFeaturizer):
"""
Resemblance of given sites to ideal environments
Site fingerprint computed from pymatgen's ChemEnv package
that provides resemblance percentages of a given site
to ideal environments.
Args:
cetypes ([str]): chemical environments (CEs) to be
considered.
strategy (ChemenvStrategy): ChemEnv neighbor-finding strategy.
geom_finder (LocalGeometryFinder): ChemEnv local geometry finder.
max_csm (float): maximum continuous symmetry measure (CSM;
default of 8 taken from chemenv). Note that any CSM
larger than max_csm will be set to max_csm in order
to avoid negative values (i.e., all features are
constrained to be between 0 and 1).
max_dist_fac (float): maximum distance factor (default: 1.41).
"""
@staticmethod
def from_preset(preset):
"""
Use a standard collection of CE types and
choose your ChemEnv neighbor-finding strategy.
Args:
preset (str): preset types ("simple" or
"multi_weights").
Returns:
ChemEnvSiteFingerprint object from a preset.
"""
cetypes = [
'S:1', 'L:2', 'A:2', 'TL:3', 'TY:3', 'TS:3', 'T:4',
'S:4', 'SY:4', 'SS:4', 'PP:5', 'S:5', 'T:5', 'O:6',
'T:6', 'PP:6', 'PB:7', 'ST:7', 'ET:7', 'FO:7', 'C:8',
'SA:8', 'SBT:8', 'TBT:8', 'DD:8', 'DDPN:8', 'HB:8',
'BO_1:8', 'BO_2:8', 'BO_3:8', 'TC:9', 'TT_1:9',
'TT_2:9', 'TT_3:9', 'HD:9', 'TI:9', 'SMA:9', 'SS:9',
'TO_1:9', 'TO_2:9', 'TO_3:9', 'PP:10', 'PA:10',
'SBSA:10', 'MI:10', 'S:10', 'H:10', 'BS_1:10',
'BS_2:10', 'TBSA:10', 'PCPA:11', 'H:11', 'SH:11',
'CO:11', 'DI:11', 'I:12', 'PBP:12', 'TT:12', 'C:12',
'AC:12', 'SC:12', 'S:12', 'HP:12', 'HA:12', 'SH:13',
'DD:20']
lgf = LocalGeometryFinder()
lgf.setup_parameters(
centering_type='centroid',
include_central_site_in_centroid=True,
structure_refinement=lgf.STRUCTURE_REFINEMENT_NONE)
if preset == "simple":
return ChemEnvSiteFingerprint(
cetypes,
SimplestChemenvStrategy(distance_cutoff=1.4, angle_cutoff=0.3),
lgf)
elif preset == "multi_weights":
return ChemEnvSiteFingerprint(
cetypes,
MultiWeightsChemenvStrategy.stats_article_weights_parameters(),
lgf)
else:
raise RuntimeError('unknown neighbor-finding strategy preset.')
def __init__(self, cetypes, strategy, geom_finder, max_csm=8, \
max_dist_fac=1.41):
self.cetypes = tuple(cetypes)
self.strat = strategy
self.lgf = geom_finder
self.max_csm = max_csm
self.max_dist_fac = max_dist_fac
def featurize(self, struct, idx):
"""
Get ChemEnv fingerprint of site with given index in input
structure.
Args:
struct (Structure): Pymatgen Structure object.
idx (int): index of target site in structure struct.
Returns:
(numpy array): resemblance fraction of target site to ideal
local environments.
"""
cevals = []
self.lgf.setup_structure(structure=struct)
se = self.lgf.compute_structure_environments(
only_indices=[idx],
maximum_distance_factor=self.max_dist_fac)
for ce in self.cetypes:
try:
tmp = se.get_csms(idx, ce)
tmp = tmp[0]['symmetry_measure'] if len(tmp) != 0 \
else self.max_csm
tmp = tmp if tmp < self.max_csm else self.max_csm
cevals.append(1 - tmp / self.max_csm)
except IndexError:
cevals.append(0)
return np.array(cevals)
def feature_labels(self):
return list(self.cetypes)
def citations(self):
return ['@article{waroquiers_chemmater_2017, '
'title={Statistical analysis of coordination environments '
'in oxides}, journal={Chemistry of Materials},'
'author={<NAME>. and <NAME>.'
'and <NAME>. and <NAME>. and <NAME>. '
'and <NAME>. and <NAME>. and <NAME>. '
'and <NAME>. and <NAME>.}, year={2017}}']
def implementors(self):
return ['<NAME>. <NAME>']
class CoordinationNumber(BaseFeaturizer):
"""
Number of first nearest neighbors of a site.
Determines the number of nearest neighbors of a site using one of
pymatgen's NearNeighbor classes. These nearest neighbor calculators
can return weights related to the proximity of each neighbor to this
site. It is possible to take these weights into account to prevent
the coordination number from changing discontinuously with small
perturbations of a structure, either by summing the total weights
or using the normalization method presented by
[Ward et al.](http://link.aps.org/doi/10.1103/PhysRevB.96.014107)
Features:
CN_[method] - Coordination number computed using a certain method
for calculating nearest neighbors.
"""
@staticmethod
def from_preset(preset, **kwargs):
"""
Use one of the standard instances of a given NearNeighbor class.
Args:
preset (str): preset type ("VoronoiNN", "JmolNN",
"MiniumDistanceNN", "MinimumOKeeffeNN",
or "MinimumVIRENN").
**kwargs: allow to pass args to the NearNeighbor class.
Returns:
CoordinationNumber from a preset.
"""
nn_ = getattr(pymatgen.analysis.local_env, preset)
return CoordinationNumber(nn_(**kwargs))
def __init__(self, nn=None, use_weights='none'):
"""Initialize the featurizer
Args:
nn (NearestNeighbor) - Method used to determine coordination number
use_weights (string) - Method used to account for weights of neighbors:
'none' - Do not use weights when computing coordination number
'sum' - Use sum of weights as the coordination number
'effective' - Compute the 'effective coordination number', which
is computed as :math:`\\frac{(\sum_n w_n)^2)}{\sum_n w_n^2}`
"""
self.nn = nn or VoronoiNN()
self.use_weights = use_weights
def featurize(self, struct, idx):
"""
Get coordintion number of site with given index in input
structure.
Args:
struct (Structure): Pymatgen Structure object.
idx (int): index of target site in structure struct.
Returns:
[float] - Coordination number
"""
if self.use_weights is None or self.use_weights == 'none':
return [self.nn.get_cn(struct, idx, use_weights=False)]
elif self.use_weights == 'sum':
return [self.nn.get_cn(struct, idx, use_weights=True)]
elif self.use_weights == 'effective':
# TODO: Should this weighting code go in pymatgen? I'm not sure if it even necessary to distinguish it from the 'sum' method -lw
nns = get_nearest_neighbors(self.nn, struct, idx)
weights = [n['weight'] for n in nns]
return [np.sum(weights) ** 2 / np.sum(np.power(weights, 2))]
else:
raise ValueError('Weighting method not recognized: ' + str(self.use_weights))
def feature_labels(self):
# TODO: Should names contain weighting scheme? -lw
return ['CN_{}'.format(self.nn.__class__.__name__)]
def citations(self):
citations = []
if self.nn.__class__.__name__ == 'VoronoiNN':
citations.append('@article{voronoi_jreineangewmath_1908, title={'
'Nouvelles applications des param\\`{e}tres continus \\`{a} la '
'th\'{e}orie des formes quadratiques. Sur quelques '
'propri\'{e}t\'{e}s des formes quadratiques positives'
' parfaites}, journal={Journal f\"ur die reine und angewandte '
'Mathematik}, number={133}, pages={97-178}, year={1908}}')
citations.append('@article{dirichlet_jreineangewmath_1850, title={'
'\"{U}ber die Reduction der positiven quadratischen Formen '
'mit drei unbestimmten ganzen Zahlen}, journal={Journal '
'f\"ur die reine und angewandte Mathematik}, number={40}, '
'pages={209-227}, doi={10.1515/crll.1850.40.209}, year={1850}}')
if self.nn.__class__.__name__ == 'JmolNN':
citations.append('@misc{jmol, title = {Jmol: an open-source Java '
'viewer for chemical structures in 3D}, howpublished = {'
'\\url{http://www.jmol.org/}}}')
if self.nn.__class__.__name__ == 'MinimumOKeeffeNN':
citations.append('@article{okeeffe_jamchemsoc_1991, title={Atom '
'sizes and bond lengths in molecules and crystals}, journal='
'{Journal of the American Chemical Society}, author={'
'<NAME>. and <NAME>.}, number={113}, pages={'
'3226-3229}, doi={doi:10.1021/ja00009a002}, year={1991}}')
if self.nn.__class__.__name__ == 'MinimumVIRENN':
citations.append('@article{shannon_actacryst_1976, title={'
'Revised effective ionic radii and systematic studies of '
'interatomic distances in halides and chalcogenides}, '
'journal={Acta Crystallographica}, author={<NAME>.}, '
'number={A32}, pages={751-767}, doi={'
'10.1107/S0567739476001551}, year={1976}')
if self.nn.__class__.__name__ in [
'MinimumDistanceNN', 'MinimumOKeeffeNN', 'MinimumVIRENN']:
citations.append('@article{zimmermann_frontmater_2017, '
'title={Assessing local structure motifs using order '
'parameters for motif recognition, interstitial '
'identification, and diffusion path characterization}, '
'journal={Frontiers in Materials}, author={Zimmermann, '
'<NAME>. and <NAME>. and <NAME>. and <NAME>.}, '
'number={4:34}, doi={10.3389/fmats.2017.00034}, year={2017}}')
return citations
def implementors(self):
return ['<NAME>', '<NAME>']
class GeneralizedRadialDistributionFunction(BaseFeaturizer):
"""
Compute the general radial distribution function (GRDF) for a site.
The GRDF is a radial measure of crystal order around a site. There are two
featurizing modes:
1. GRDF: (recommended) - n_bins length vector
In GRDF mode, The GRDF is computed by considering all sites around a
central site (i.e., no sites are omitted when computing the GRDF). The
features output from this mode will be vectors with length n_bins.
2. pairwise GRDF: (advanced users) - n_bins x n_sites matrix
In this mode, GRDFs are are still computed around a central site, but
only one other site (and their translational equivalents) are used to
compute a GRDF (e.g. site 1 with site 2 and the translational
equivalents of site 2). This results in a a n_sites x n_bins matrix of
features. Requires `fit` for determining the max number of sites for
The GRDF is a generalization of the partial radial distribution function
(PRDF). In contrast with the PRDF, the bins of the GRDF are not mutually-
exclusive and need not carry a constant weight of 1. The PRDF is a case of
the GRDF when the bins are rectangular functions. Examples of other
functions to use with the GRDF are Gaussian, trig, and Bessel functions.
See :func:`~matminer.featurizers.utils.grdf` for a full list of available binning functions.
There are two preset conditions:
gaussian: bin functions are gaussians
histogram: bin functions are rectangular functions
Args:
bins: ([AbstractPairwise]) List of pairwise binning functions. Each of these functions
must implement the AbstractPairwise class.
cutoff: (float) maximum distance to look for neighbors
mode: (str) the featurizing mode. supported options are:
'GRDF' and 'pairwise_GRDF'
"""
def __init__(self, bins, cutoff=20.0, mode='GRDF'):
self.bins = bins
self.cutoff = cutoff
if mode not in ['GRDF', 'pairwise_GRDF']:
raise AttributeError('{} is not a valid GRDF mode. try '
'"GRDF" or "pairwise_GRDF"'.format(mode))
else:
self.mode = mode
self.fit_labels = None
def fit(self, X, y=None, **fit_kwargs):
"""
Determine the maximum number of sites in X to assign correct feature
labels
Args:
X - [list of tuples], training data
tuple values should be (struc, idx)
Returns:
self
"""
max_sites = max([len(X[i][0]._sites) for i in range(len(X))])
self.fit_labels = ['site2 {} {}'.format(i, bin.name()) for bin in self.bins
for i in range(max_sites)]
return self
def featurize(self, struct, idx):
"""
Get GRDF of the input structure.
Args:
struct (Structure): Pymatgen Structure object.
idx (int): index of target site in structure struct.
Returns:
Flattened list of GRDF values. For each run mode the list order is:
GRDF: bin#
pairwise GRDF: site2# bin#
The site2# corresponds to a pymatgen site index and bin#
corresponds to one of the bin functions
"""
if not struct.is_ordered:
raise ValueError("Disordered structure support not built yet")
# Get list of neighbors by site
# Indexing is [site#][neighbor#][pymatgen Site, distance, site index]
sites = struct._sites
central_site = sites[idx]
neighbors_lst = struct.get_neighbors(central_site, self.cutoff,
include_index=True)
sites = range(0, len(sites))
# Generate lists of pairwise distances according to run mode
if self.mode == 'GRDF':
# Make a single distance collection
distance_collection = [[neighbor[1] for neighbor in neighbors_lst]]
else:
# Make pairwise distance collections for pairwise GRDF
distance_collection = [
[neighbor[1] for neighbor in neighbors_lst
if neighbor[2] == site_idx] for site_idx in sites]
# compute bin counts for each list of pairwise distances
bin_counts = []
for values in distance_collection:
bin_counts.append([sum(bin(values)) for bin in self.bins])
# Compute "volume" of each bin to normalize GRDFs
volumes = [bin.volume(self.cutoff) for bin in self.bins]
# normalize the bin counts by the bin volume to compute features
features = []
for values in bin_counts:
features.extend(np.array(values) / np.array(volumes))
return features
def feature_labels(self):
if self.mode == 'GRDF':
return [bin.name() for bin in self.bins]
else:
if self.fit_labels:
return self.fit_labels
else:
raise AttributeError('the fit method must be called first, to '
'determine the correct feature labels.')
@staticmethod
def from_preset(preset, width=1.0, spacing=1.0, cutoff=10, mode='GRDF'):
"""
Preset bin functions for this featurizer. Example use:
>>> GRDF = GeneralizedRadialDistributionFunction.from_preset('gaussian')
>>> GRDF.featurize(struct, idx)
Args:
preset (str): shape of bin (either 'gaussian' or 'histogram')
width (float): bin width. std dev for gaussian, width for histogram
spacing (float): the spacing between bin centers
cutoff (float): maximum distance to look for neighbors
mode (str): featurizing mode. either 'GRDF' or 'pairwise_GRDF'
"""
# Generate bin functions
if preset == "gaussian":
bins = []
for center in np.arange(0., cutoff, spacing):
bins.append(Gaussian(width, center))
elif preset == "histogram":
bins = []
for start in np.arange(0, cutoff, spacing):
bins.append(Histogram(start, width))
else:
raise ValueError('Not a valid preset condition.')
return GeneralizedRadialDistributionFunction(bins, cutoff=cutoff, mode=mode)
def citations(self):
return ['@article{PhysRevB.95.144110, title = {Representation of compo'
'unds for machine-learning prediction of physical properties},'
' author = {<NAME> and <NAME> and Nakayama, '
'Keita and Takahashi, Akira and Tanaka, Isao},'
'journal = {Phys. Rev. B}, volume = {95}, issue = {14}, '
'pages = {144110}, year = {2017}, publisher = {American Physic'
'al Society}, doi = {10.1103/PhysRevB.95.144110}}']
def implementors(self):
return ["<NAME>", "<NAME>", "<NAME>"]
class AngularFourierSeries(BaseFeaturizer):
"""
Compute the angular Fourier series (AFS), including both angular and radial info
The AFS is the product of pairwise distance function (g_n, g_n') between two pairs
of atoms (sharing the common central site) and the cosine of the angle
between the two pairs. The AFS is a 2-dimensional feature (the axes are g_n,
g_n').
Examples of distance functionals are square functions, Gaussian, trig
functions, and Bessel functions. An example for Gaussian:
lambda d: exp( -(d - d_n)**2 ), where d_n is the coefficient for g_n
See :func:`~matminer.featurizers.utils.grdf` for a full list of available binning functions.
There are two preset conditions:
gaussian: bin functions are gaussians
histogram: bin functions are rectangular functions
Features:
AFS ([gn], [gn']) - Angular Fourier Series between binning functions (g1 and g2)
Args:
bins: ([AbstractPairwise]) a list of binning functions that
implement the AbstractPairwise base class
cutoff: (float) maximum distance to look for neighbors. The
featurizer will run slowly for large distance cutoffs
because of the number of neighbor pairs scales as
the square of the number of neighbors
"""
def __init__(self, bins, cutoff=10.0):
self.bins = bins
self.cutoff = cutoff
def featurize(self, struct, idx):
"""
Get AFS of the input structure.
Args:
struct (Structure): Pymatgen Structure object.
idx (int): index of target site in structure struct.
Returns:
Flattened list of AFS values. the list order is:
g_n g_n'
"""
if not struct.is_ordered:
raise ValueError("Disordered structure support not built yet")
# Generate list of neighbor position vectors (relative to central
# atom) and distances from each central site as tuples
sites = struct._sites
central_site = sites[idx]
neighbors_lst = struct.get_neighbors(central_site, self.cutoff)
neighbor_collection = [
(neighbor[0].coords - central_site.coords, neighbor[1])
for neighbor in neighbors_lst]
# Generate exhaustive permutations of neighbor pairs around each
# central site (order matters). Does not allow repeat elements (i.e.
# there are two distinct sites in every permutation)
neighbor_tuples = itertools.permutations(neighbor_collection, 2)
# Generate cos(theta) between neighbor pairs for each central site.
# Also, retain data on neighbor distances for each pair
# process with matrix algebra, we really need the speed here
data = np.array(list(neighbor_tuples))
v1, v2 = np.vstack(data[:, 0, 0]), np.vstack(data[:, 1, 0])
distances = data[:, :, 1]
neighbor_pairs = np.concatenate([
np.clip(np.einsum('ij,ij->i', v1, v2) /
np.linalg.norm(v1, axis=1) /
np.linalg.norm(v2, axis=1), -1.0, 1.0).reshape(-1, 1),
distances], axis=1)
# Generate distance functional matrix (g_n, g_n')
bin_combos = list(itertools.product(self.bins, repeat=2))
# Compute AFS values for each element of the bin matrix
# need to cast arrays as floats to use np.exp
cos_angles, dist1, dist2 = neighbor_pairs[:, 0].astype(float),\
neighbor_pairs[:, 1].astype(float),\
neighbor_pairs[:, 2].astype(float)
features = [sum(combo[0](dist1) * combo[1](dist2) *
cos_angles) for combo in bin_combos]
return features
def feature_labels(self):
bin_combos = list(itertools.product(self.bins, repeat=2))
return ['AFS ({}, {})'.format(combo[0].name(), combo[1].name())
for combo in bin_combos]
@staticmethod
def from_preset(preset, width=0.5, spacing=0.5, cutoff=10):
"""
Preset bin functions for this featurizer. Example use:
>>> AFS = AngularFourierSeries.from_preset('gaussian')
>>> AFS.featurize(struct, idx)
Args:
preset (str): shape of bin (either 'gaussian' or 'histogram')
width (float): bin width. std dev for gaussian, width for histogram
spacing (float): the spacing between bin centers
cutoff (float): maximum distance to look for neighbors
"""
# Generate bin functions
if preset == "gaussian":
bins = []
for center in np.arange(0., cutoff, spacing):
bins.append(Gaussian(width, center))
elif preset == "histogram":
bins = []
for start in np.arange(0, cutoff, spacing):
bins.append(Histogram(start, width))
else:
raise ValueError('Not a valid preset condition.')
return AngularFourierSeries(bins, cutoff=cutoff)
def citations(self):
return ['@article{PhysRevB.95.144110, title = {Representation of compo'
'unds for machine-learning prediction of physical properties},'
' author = {<NAME> and <NAME> and Nakayama, '
'Keita and <NAME> and Tanaka, Isao},'
'journal = {Phys. Rev. B}, volume = {95}, issue = {14}, '
'pages = {144110}, year = {2017}, publisher = {American Physic'
'al Society}, doi = {10.1103/PhysRevB.95.144110}}']
def implementors(self):
return ["<NAME>", "<NAME>"]
# TODO: Figure out whether to take NN-counting method as an option (see VoronoiFingerprint)
class LocalPropertyDifference(BaseFeaturizer):
"""
Differences in elemental properties between site and its neighboring sites.
Uses the Voronoi tessellation of the structure to determine the
neighbors of the site, and assigns each neighbor (:math:`n`) a
weight (:math:`A_n`) that corresponds to the area of the facet
on the tessellation corresponding to that neighbor.
The local property difference is then computed by
:math:`\\frac{\sum_n {A_n |p_n - p_0|}}{\sum_n {A_n}}`
where :math:`p_n` is the property (e.g., atomic number) of a neighbor
and :math:`p_0` is the property of a site. If signed parameter is assigned
True, signed difference of the properties is returned instead of absolute
difference.
Features:
- "local property difference in [property]" - Weighted average
of differences between an elemental property of a site and
that of each of its neighbors, weighted by size of face on
Voronoi tessellation
References:
`Ward et al. _PRB_ 2017 <http://link.aps.org/doi/10.1103/PhysRevB.96.024104>`_
"""
def __init__(self, data_source=MagpieData(), weight='area',
properties=('Electronegativity',), signed=False):
""" Initialize the featurizer
Args:
data_source (AbstractData) - Class from which to retrieve
elemental properties
weight (str) - What aspect of each voronoi facet to use to
weigh each neighbor (see VoronoiNN)
properties ([str]) - List of properties to use (default=['Electronegativity'])
signed (bool) - whether to return absolute difference or signed difference of
properties(default=False (absolute difference))
"""
self.data_source = data_source
self.properties = properties
self.weight = weight
self.signed = signed
@staticmethod
def from_preset(preset):
"""
Create a new LocalPropertyDifference class according to a preset
Args:
preset (str) - Name of preset
"""
if preset == "ward-prb-2017":
return LocalPropertyDifference(
data_source=MagpieData(),
properties=["Number", "MendeleevNumber", "AtomicWeight",
"MeltingT", "Column", "Row", "CovalentRadius",
"Electronegativity", "NsValence", "NpValence",
"NdValence", "NfValence", "NValence", "NsUnfilled",
"NpUnfilled", "NdUnfilled", "NfUnfilled",
"NUnfilled", "GSvolume_pa", "GSbandgap",
"GSmagmom", "SpaceGroupNumber"]
)
else:
raise ValueError('Unrecognized preset: ' + preset)
def featurize(self, strc, idx):
# Get the targeted site
my_site = strc[idx]
# Get the tessellation of a site
nn = get_nearest_neighbors(VoronoiNN(weight=self.weight), strc, idx)
# Get the element and weight of each site
elems = [n['site'].specie for n in nn]
weights = [n['weight'] for n in nn]
# Compute the difference for each property
output = np.zeros((len(self.properties),))
total_weight = np.sum(weights)
for i, p in enumerate(self.properties):
my_prop = self.data_source.get_elemental_property(my_site.specie, p)
n_props = self.data_source.get_elemental_properties(elems, p)
if self.signed == False:
output[i] = np.dot(weights, np.abs(np.subtract(n_props, my_prop))) / total_weight
else:
output[i] = np.dot(weights, np.subtract(n_props, my_prop)) / total_weight
return output
def feature_labels(self):
if self.signed == False:
return ['local difference in ' + p for p in self.properties]
else:
return ['local signed difference in ' + p for p in self.properties]
def citations(self):
return ["@article{Ward2017,"
"author = {<NAME> and <NAME> "
"and <NAME> and Hegde, <NAME>. "
"and <NAME> and <NAME> "
"and Wolverton, Chris},"
"doi = {10.1103/PhysRevB.96.024104},"
"journal = {Physical Review B},"
"pages = {024104},"
"title = {{Including crystal structure attributes "
"in machine learning models of formation energies "
"via Voronoi tessellations}},"
"url = {http://link.aps.org/doi/10.1103/PhysRevB.96.014107},"
"volume = {96},year = {2017}}",
'@article{jong_chen_notestine_persson_ceder_jain_asta_gamst_2016,'
'title={A Statistical Learning Framework for Materials Science: '
'Application to Elastic Moduli of k-nary Inorganic Polycrystalline Compounds}, '
'volume={6}, DOI={10.1038/srep34256}, number={1}, journal={Scientific Reports}, '
'author={<NAME> and <NAME> and Notestine, Randy and Persson, '
'Kristin and Ceder, Gerbrand and Jain, Anubhav and Asta, Mark and Gamst, Anthony}, '
'year={2016}, month={Mar}}'
]
def implementors(self):
return ['<NAME>', '<NAME>']
class BondOrientationalParameter(BaseFeaturizer):
"""
Averages of spherical harmonics of local neighbors
Bond Orientational Parameters (BOPs) describe the local environment around an atom by
considering the local symmetry of the bonds as computed using spherical harmonics.
To create descriptors that are invariant to rotating the coordinate system, we use the
average of all spherical harmonics of a certain degree - following the approach of
`Steinhardt et al. <https://link.aps.org/doi/10.1103/PhysRevB.28.784>`_.
We weigh the contributions of each neighbor with the solid angle of the Voronoi tessellation
(see `Mickel et al. <https://aip.scitation.org/doi/abs/10.1063/1.4774084>_` for further
discussion). The weighing scheme makes these descriptors vary smoothly with small distortions
of a crystal structure.
In addition to the average spherical harmonics, this class can also compute the :math:`W` and
:math:`\hat{W}` parameters proposed by `Steinhardt et al. <https://link.aps.org/doi/10.1103/PhysRevB.28.784>`_.
Attributes:
BOOP Q l=<n> - Average spherical harmonic for a certain degree, n.
BOOP W l=<n> - W parameter for a certain degree of spherical harmonic, n.
BOOP What l=<n> - :math:`\hat{W}` parameter for a certain degree of spherical harmonic, n.
References:
`Steinhardt et al., _PRB_ (1983) <https://link.aps.org/doi/10.1103/PhysRevB.28.784>`_
`Seko et al., _PRB_ (2017) <http://link.aps.org/doi/10.1103/PhysRevB.95.144110>`_
"""
def __init__(self, max_l=10, compute_w=False, compute_w_hat=False):
"""
Initialize the featurizer
Args:
max_l (int) - Maximum spherical harmonic to consider
compute_w (bool) - Whether to compute Ws as well
compute_w_hat (bool) - Whether to compute What
"""
self._nn = VoronoiNN(weight='solid_angle')
self.max_l = max_l
self.compute_W = compute_w
self.compute_What = compute_w_hat
def featurize(self, strc, idx):
# Get the nearest neighbors of the atom
nns = get_nearest_neighbors(self._nn, strc, idx)
# Get the polar and azimuthal angles of each face
phi = np.arccos([x['poly_info']['normal'][-1] for x in nns])
theta = np.arctan2([x['poly_info']['normal'][1] for x in nns],
[x['poly_info']['normal'][0] for x in nns])
# Get the weights for each neighbor
weights = np.array([x['weight'] for x in nns])
weights /= weights.sum()
# Compute the spherical harmonics for the desired `l`s
Qs = []
Ws = []
for l in range(1, self.max_l + 1):
# Average the spherical harmonic over each neighbor, weighted by solid angle
qlm = dict((m, np.dot(weights, sph_harm(m, l, theta, phi)))
for m in range(-l, l + 1))
# Compute the average over all m's
Qs.append(np.sqrt(np.pi * 4 / (2 * l + 1) *
np.sum(np.abs(list(qlm.values())) ** 2)))
# Compute the W, if desired
if self.compute_W or self.compute_What:
w = 0
# Loop over all non-zero Wigner 3j coefficients
for (m1, m2, m3), wcoeff in get_wigner_coeffs(l):
w += qlm[m1] * qlm[m2] * qlm[m3] * wcoeff
Ws.append(w.real)
# Compute Whats, if desired
if self.compute_What:
Whats = [w / (q / np.sqrt(np.pi * 4 / (2 * l + 1))) ** 3 if abs(q) > 1.0e-6 else 0.0
for l, q, w in zip(range(1, self.max_l + 1), Qs, Ws)]
# Compile the results. Always returns Qs, and optionally the W/What
if self.compute_W:
Qs += Ws
if self.compute_What:
Qs += Whats
return Qs
def feature_labels(self):
q_labels = ['BOOP Q l={}'.format(l) for l in range(1, self.max_l+1)]
if self.compute_W:
q_labels += ['BOOP W l={}'.format(l) for l in range(1, self.max_l+1)]
if self.compute_What:
q_labels += ['BOOP What l={}'.format(l) for l in range(1, self.max_l + 1)]
return q_labels
def citations(self):
return ["@article{Seko2017,"
"author = {<NAME> <NAME> and Nakayama, "
"Keita and <NAME> and Tanaka, Isao},"
"doi = {10.1103/PhysRevB.95.144110},"
"journal = {Physical Review B}, number = {14}, pages = {144110},"
"title = {{Representation of compounds for machine-learning prediction of physical properties}},"
"url = {http://link.aps.org/doi/10.1103/PhysRevB.95.144110},"
"volume = {95},year = {2017}}",
"@article{Steinhardt1983,"
"author = {<NAME>. and <NAME>. and <NAME>},"
"doi = {10.1103/PhysRevB.28.784}, journal = {Physical Review B},"
"month = {jul}, number = {2}, pages = {784--805},"
"title = {{Bond-orientational order in liquids and glasses}},"
"url = {https://link.aps.org/doi/10.1103/PhysRevB.28.784}, "
"volume = {28}, year = {1983}}"]
def implementors(self):
return ['<NAME>', '<NAME>']
class SiteElementalProperty(BaseFeaturizer):
"""
Elemental properties of atom on a certain site
Features:
site [property] - Elemental property for this site
References:
`Seko et al., _PRB_ (2017) <http://link.aps.org/doi/10.1103/PhysRevB.95.144110>`_
`Schmidt et al., _Chem Mater_. (2017) <http://dx.doi.org/10.1021/acs.chemmater.7b00156>`_
"""
def __init__(self, data_source=None, properties=('Number',)):
"""Initialize the featurizer
Args:
data_source (AbstractData): Tool used to look up elemental properties
properties ([string]): List of properties to use for features
"""
self.data_source = data_source or MagpieData()
self.properties = properties
self._preset_citations = []
def featurize(self, strc, idx):
# Get the site
site = strc[idx]
# Get the properties
elem = site.specie if isinstance(site.specie, Element) else site.specie.element
props = [self.data_source.get_elemental_property(elem, p) for p in self.properties]
return props
def feature_labels(self):
return ['site {}'.format(p) for p in self.properties]
def citations(self):
return self._preset_citations
def implementors(self):
return ['<NAME>']
@staticmethod
def from_preset(preset):
"""Create the class with pre-defined settings
Args:
preset (string): Desired preset
Returns:
SiteElementalProperty initialized with desired settings
"""
if preset == "seko-prb-2017":
output = SiteElementalProperty(data_source=MagpieData(),
properties=["Number", "AtomicWeight", "Row", "Column",
"FirstIonizationEnergy",
"SecondIonizationEnergy",
"ElectronAffinity",
"Electronegativity",
"AllenElectronegativity",
"VdWRadius", "CovalentRadius",
"AtomicRadius",
"ZungerPP-r_s", "ZungerPP-r_p",
"MeltingT", "BoilingT", "Density",
"MolarVolume", "HeatFusion",
"HeatVaporization",
"LogThermalConductivity", "HeatCapacityMass"
])
output._preset_citations.append("@article{Seko2017,"
"author = {<NAME> and <NAME> and "
"<NAME> and <NAME> and <NAME>},"
"doi = {10.1103/PhysRevB.95.144110},"
"journal = {Physical Review B}, number = {14},"
"pages = {144110},"
"title = {{Representation of compounds for machine-learning prediction of physical properties}},"
"url = {http://link.aps.org/doi/10.1103/PhysRevB.95.144110},"
"volume = {95}, year = {2017}}")
return output
else:
raise ValueError('Unrecognized preset: {}'.format(preset))
@lru_cache(maxsize=32)
def get_wigner_coeffs(l):
"""Get the list of non-zero Wigner 3j triplets
Args:
l (int): Desired l
Returns:
List of tuples that contain:
- ((int)) m coordinates of the triplet
- (float) Wigner coefficient
"""
return [((m1, m2, m3), float(wigner_3j(l, l, l, m1, m2, m3)))
for m1, m2, m3 in _iterate_wigner_3j(l)]
def _iterate_wigner_3j(l):
"""Iterator over all non-zero Wigner 3j triplets
Args:
l (int) - Desired l
Generates:
pairs of acceptable l's
"""
for m1 in range(-l, l+1):
for m2 in range(-l, l+1):
m3 = -1 * (m1 + m2)
if -l <= m3 <= l:
yield m1, m2, m3
class AverageBondLength(BaseFeaturizer):
'''
Determines the average bond length between one specific site
and all its nearest neighbors using one of pymatgen's NearNeighbor
classes. These nearest neighbor calculators return weights related
to the proximity of each neighbor to this site. 'Average bond
length' of a site is the weighted average of the distance between
site and all its nearest neighbors.
'''
def __init__(self, method):
'''
Initialize featurizer
Args:
method (NearNeighbor) - subclass under NearNeighbor used to compute nearest neighbors
'''
self.method = method
def featurize(self, strc, idx):
'''
Get weighted average bond length of a site and all its nearest
neighbors.
Args:
strc (Structure): Pymatgen Structure object
idx (int): index of target site in structure object
Returns:
average bond length (list)
'''
# Compute nearest neighbors of the indexed site
nns = self.method.get_nn_info(strc, idx)
if len(nns) == 0:
raise IndexError("Input structure has no bonds.")
weights = [info['weight'] for info in nns]
center_coord = strc[idx].coords
dists = np.linalg.norm(np.subtract([site['site'].coords for site in nns], center_coord), axis=1)
return [PropertyStats.mean(dists, weights)]
def feature_labels(self):
return ['Average bond length']
def citations(self):
return ['@article{jong_chen_notestine_persson_ceder_jain_asta_gamst_2016,'
'title={A Statistical Learning Framework for Materials Science: '
'Application to Elastic Moduli of k-nary Inorganic Polycrystalline Compounds}, '
'volume={6}, DOI={10.1038/srep34256}, number={1}, journal={Scientific Reports}, '
'author={<NAME> and <NAME> and <NAME> and Persson, '
'Kristin and Ceder, Gerbrand and <NAME> and Asta, Mark and <NAME>}, '
'year={2016}, month={Mar}}'
]
def implementors(self):
return ['<NAME>', '<NAME>']
class AverageBondAngle(BaseFeaturizer):
'''
Determines the average bond angles of a specific site with
its nearest neighbors using one of pymatgen's NearNeighbor
classes. Neighbors that are adjacent to each other are stored
and angle between them are computed. 'Average bond angle' of
a site is the mean bond angle between all its nearest neighbors.
'''
def __init__(self, method):
'''
Initialize featurizer
Args:
method (NearNeighbor) - subclass under NearNeighbor used to compute nearest
neighbors
'''
self.method = method
def featurize(self, strc, idx):
'''
Get average bond length of a site and all its nearest
neighbors.
Args:
strc (Structure): Pymatgen Structure object
idx (int): index of target site in structure object
Returns:
average bond length (list)
'''
# Compute nearest neighbors of the indexed site
nns = self.method.get_nn_info(strc, idx)
if len(nns) == 0:
raise IndexError("Input structure has no bonds.")
center = strc[idx].coords
sites = [i['site'].coords for i in nns]
# Calculate bond angles for each neighbor
bond_angles = np.empty((len(sites), len(sites)))
bond_angles.fill(np.nan)
for a, a_site in enumerate(sites):
for b, b_site in enumerate(sites):
if (b == a):
continue
dot = np.dot(a_site - center, b_site - center) / (
np.linalg.norm(a_site - center) * np.linalg.norm(b_site - center))
if np.isnan(np.arccos(dot)):
bond_angles[a, b] = bond_angles[b, a] = np.arccos(round(dot, 5))
else:
bond_angles[a, b] = bond_angles[b, a] = np.arccos(dot)
# Take the minimum bond angle of each neighbor
minimum_bond_angles = np.nanmin(bond_angles, axis=1)
return [PropertyStats.mean(minimum_bond_angles)]
def feature_labels(self):
return ['Average bond angle']
def citations(self):
return ['@article{jong_chen_notestine_persson_ceder_jain_asta_gamst_2016,'
'title={A Statistical Learning Framework for Materials Science: '
'Application to Elastic Moduli of k-nary Inorganic Polycrystalline Compounds}, '
'volume={6}, DOI={10.1038/srep34256}, number={1}, journal={Scientific Reports}, '
'author={<NAME> and <NAME> and Notestine, Randy and Persson, '
'Kristin and Ceder, Gerbrand and <NAME> and Asta, Mark and Gamst, Anthony}, '
'year={2016}, month={Mar}}'
]
def implementors(self):
return ['<NAME>', '<NAME>']
|
en
| 0.769248
|
Features that describe the local environment of a single atom. Note that structural features can be constructed from a combination of site features from every site in the structure. The `featurize` function takes two arguments: struct (Structure): Object representing the structure containing the site of interest idx (int): Index of the site to be featurized We have to use two parameters because the Site object does not hold a pointer back to its structure and often information on neighbors is required. To run :code:`featurize_dataframe`, you must pass the column names for both the site index and the structure. For example: .. code:: python f = AGNIFingerprints() f.featurize_dataframe(data, ['structure', 'site_idx']) Product integral of RDF and Gaussian window function, from `Botu et al <http://pubs.acs.org/doi/abs/10.1021/acs.jpcc.6b10908>`_. Integral of the product of the radial distribution function and a Gaussian window function. Originally used by `Botu et al <http://pubs.acs.org/doi/abs/10.1021/acs.jpcc.6b10908>`_ to fit empiricial potentials. These features come in two forms: atomic fingerprints and direction-resolved fingerprints. Atomic fingerprints describe the local environment of an atom and are computed using the function: :math:`A_i(\eta) = \sum\limits_{i \\ne j} e^{-(\\frac{r_{ij}}{\eta})^2} f(r_{ij})` where :math:`i` is the index of the atom, :math:`j` is the index of a neighboring atom, :math:`\eta` is a scaling function, :math:`r_{ij}` is the distance between atoms :math:`i` and :math:`j`, and :math:`f(r)` is a cutoff function where :math:`f(r) = 0.5[\cos(\\frac{\pi r_{ij}}{R_c}) + 1]` if :math:`r < R_c` and :math:`0` otherwise. The direction-resolved fingerprints are computed using :math:`V_i^k(\eta) = \sum\limits_{i \\ne j} \\frac{r_{ij}^k}{r_{ij}} e^{-(\\frac{r_{ij}}{\eta})^2} f(r_{ij})` where :math:`r_{ij}^k` is the :math:`k^{th}` component of :math:`\\bold{r}_i - \\bold{r}_j`. Parameters: TODO: Differentiate between different atom types (maybe as another class) Args: directions (iterable): List of directions for the fingerprints. Can be one or more of 'None`, 'x', 'y', or 'z' etas (iterable of floats): List of which window widths to compute cutoff (float): Cutoff distance (Angstroms) # Get all neighbors of this site # Convert dists to a ndarray # If one of the features is direction-dependent, compute the :math:`(r_i - r_j) / r_{ij}` # Compute the cutoff function # Compute "e^(r/eta) * cutoff_func" for each eta # Compute the fingerprints # Return the results Local structure order parameters computed from a site's neighbor env. For each order parameter, we determine the neighbor shell that complies with the expected coordination number. For example, we find the 4 nearest neighbors for the tetrahedral OP, the 6 nearest for the octahedral OP, and the 8 nearest neighbors for the bcc OP. If we don't find such a shell, the OP is either set to zero or evaluated with the shell of the next largest observed coordination number. Args: target_motifs (dict): target op or motif type where keys are corresponding coordination numbers (e.g., {4: "tetrahedral"}). dr (float): width for binning neighbors in unit of relative distances (= distance/nearest neighbor distance). The binning is necessary to make the neighbor-finding step robust against small numerical variations in neighbor distances (default: 0.1). ddr (float): variation of width for finding stable OP values. ndr (int): number of width variations for each variation direction (e.g., ndr = 0 only uses the input dr, whereas ndr=1 tests dr = dr - ddr, dr, and dr + ddr. dop (float): binning width to compute histogram for each OP if ndr > 0. dist_exp (boolean): exponent for distance factor to multiply order parameters with that penalizes (large) variations in distances in a given motif. 0 will switch the option off (default: 2). zero_ops (boolean): set an OP to zero if there is no neighbor shell that complies with the expected coordination number of a given OP (e.g., CN=4 for tetrahedron; default: True). Get OP fingerprint of site with given index in input structure. Args: struct (Structure): Pymatgen Structure object. idx (int): index of target site in structure. Returns: opvals (numpy array): order parameters of target site. # Smoothen distance, but use relative distances. # Do q_sgl_bd separately. #if self.optypes[1][0] == "sgl_bd": # Set all OPs of non-CN-complying neighbor environments # to zero if applicable. # Set all (remaining) OPs. # Compute histogram, determine peak, and location # of peak value. # print(minval) # print(minval) # print(maxval) # print(maxval) # print(minval) # print(maxval) # print('{} {} {}'.format(minval, maxval, nbins)) # Address problem that 2 OP values can be close to a bin edge. A local order parameter fingerprint for periodic crystals. The fingerprint represents the value of various order parameters for the site. The "wt" order parameter describes how consistent a site is with a certain coordination number. The remaining order parameters are computed by multiplying the "wt" for that coordination number with the OP value. The chem_info parameter can be used to also get chemical descriptors that describe differences in some chemical parameter (e.g., electronegativity) between the central site and the site neighbors. Use preset parameters to get the fingerprint Args: preset (str): name of preset ("cn" or "ops") **kwargs: other settings to be passed into CrystalNN class Initialize the CrystalNNFingerprint. Use the from_preset() function to use default params. Args: op_types (dict): a dict of coordination number (int) to a list of str representing the order parameter types chem_info (dict): a dict of chemical properties (e.g., atomic mass) to dictionaries that map an element to a value (e.g., chem_info["Pauling scale"]["O"] = 3.44) **kwargs: other settings to be passed into CrystalNN class # load order parameter objects & paramaters Get crystal fingerprint of site with given index in input structure. Args: struct (Structure): Pymatgen Structure object. idx (int): index of target site in structure. Returns: list of weighted order parameters of target site. # dictionary of chemical property to final value # Compute additional chemistry-related features # get the value for specie, if not fall back to # value defined for element # handles None Voronoi tessellation-based features around target site. Calculate the following sets of features based on Voronoi tessellation analysis around the target site: Voronoi indices n_i denotes the number of i-edged facets, and i is in the range of 3-10. e.g. for bcc lattice, the Voronoi indices are [0,6,0,8,...]; for fcc/hcp lattice, the Voronoi indices are [0,12,0,0,...]; for icosahedra, the Voronoi indices are [0,0,12,0,...]; i-fold symmetry indices computed as n_i/sum(n_i), and i is in the range of 3-10. reflect the strength of i-fold symmetry in local sites. e.g. for bcc lattice, the i-fold symmetry indices are [0,6/14,0,8/14,...] indicating both 4-fold and a stronger 6-fold symmetries are present; for fcc/hcp lattice, the i-fold symmetry factors are [0,1,0,0,...], indicating only 4-fold symmetry is present; for icosahedra, the Voronoi indices are [0,0,1,0,...], indicating only 5-fold symmetry is present; Weighted i-fold symmetry indices if use_weights = True Voronoi volume total volume of the Voronoi polyhedron around the target site Voronoi volume statistics of sub_polyhedra formed by each facet + center stats_vol = ['mean', 'std_dev', 'minimum', 'maximum'] Voronoi area total area of the Voronoi polyhedron around the target site Voronoi area statistics of the facets stats_area = ['mean', 'std_dev', 'minimum', 'maximum'] Voronoi nearest-neighboring distance statistics stats_dist = ['mean', 'std_dev', 'minimum', 'maximum'] Args: cutoff (float): cutoff distance in determining the potential neighbors for Voronoi tessellation analysis. (default: 6.5) use_symm_weights(bool): whether to use weights to derive weighted i-fold symmetry indices. symm_weights(str): weights to be used in weighted i-fold symmetry indices. Supported options: 'solid_angle', 'area', 'volume', 'face_dist'. (default: 'solid_angle') stats_vol (list of str): volume statistics types. stats_area (list of str): area statistics types. stats_dist (list of str): neighboring distance statistics types. Get Voronoi fingerprints of site with given index in input structure. Args: struct (Structure): Pymatgen Structure object. idx (int): index of target site in structure. Returns: (list of floats): Voronoi fingerprints. -Voronoi indices -i-fold symmetry indices -weighted i-fold symmetry indices (if use_symm_weights = True) -Voronoi volume -Voronoi volume statistics -Voronoi area -Voronoi area statistics -Voronoi dist statistics # Get the nearest neighbors using a Voronoi tessellation # Prepare storage for the Voronoi indices # Get statistics # If a facet has more than 10 edges, it's skipped here. Chemical short range ordering, deviation of local site and nominal structure compositions Chemical SRO features to evaluate the deviation of local chemistry with the nominal composition of the structure. A local bonding preference is computed using f_el = N_el/(sum of N_el) - c_el, where N_el is the number of each element type in the neighbors around the target site, sum of N_el is the sum of all possible element types (coordination number), and c_el is the composition of the specific element in the entire structure. A positive f_el indicates the "bonding" with the specific element is favored, at least in the target site; A negative f_el indicates the "bonding" is not favored, at least in the target site. Note that ChemicalSRO is only featurized for elements identified by "fit" (see following), thus "fit" must be called before "featurize", or else an error will be raised. Features: CSRO__[nn method]_[element] - The Chemical SRO of a site computed based on neighbors determined with a certain NN-detection method for a certain element. Initialize the featurizer Args: nn (NearestNeighbor): instance of one of pymatgen's NearestNeighbor classes. includes (array-like or str): elements included to calculate CSRO. excludes (array-like or str): elements excluded to calculate CSRO. sort (bool): whether to sort elements by mendeleev number. Use one of the standard instances of a given NearNeighbor class. Args: preset (str): preset type ("VoronoiNN", "JmolNN", "MiniumDistanceNN", "MinimumOKeeffeNN", or "MinimumVIRENN"). **kwargs: allow to pass args to the NearNeighbor class. Returns: ChemicalSRO from a preset. Identify elements to be included in the following featurization, by intersecting the elements present in the passed structures with those explicitly included (or excluded) in __init__. Only elements in the self.el_list_ will be featurized. Besides, compositions of the passed structures will also be "stored" in a dict of self.el_amt_dict_, avoiding repeated calculation of composition when featurizing multiple sites in the same structure. Args: X (array-like): containing Pymatgen structures and sites, supports multiple choices: -2D array-like object: e.g. [[struct, site], [struct, site], …] np.array([[struct, site], [struct, site], …]) -Pandas dataframe: e.g. df[['struct', 'site']] y : unused (added for consistency with overridden method signature) Returns: self Get CSRO features of site with given index in input structure. Args: struct (Structure): Pymatgen Structure object. idx (int): index of target site in structure. Returns: (list of floats): Chemical SRO features for each element. Gaussian symmetry function features suggested by Behler et al. The function is based on pair distances and angles, to approximate the functional dependence of local energies, originally used in the fitting of machine-learning potentials. The symmetry functions can be divided to a set of radial functions (g2 function), and a set of angular functions (g4 function). The number of symmetry functions returned are based on parameters of etas_g2, etas_g4, zetas_g4 and gammas_g4. See the original papers for more details: “Atom-centered symmetry functions for constructing high-dimensional neural network potentials”, <NAME>, J Chem Phys 134, 074106 (2011). The cutoff function is taken as the polynomial form (cosine_cutoff) to give a smoothed truncation. A Fortran and a different Python version can be found in the code Amp: Atomistic Machine-learning Package (https://bitbucket.org/andrewpeterson/amp). Args: etas_g2 (list of floats): etas used in radial functions. (default: [0.05, 4., 20., 80.]) etas_g4 (list of floats): etas used in angular functions. (default: [0.005]) zetas_g4 (list of floats): zetas used in angular functions. (default: [1., 4.]) gammas_g4 (list of floats): gammas used in angular functions. (default: [+1., -1.]) cutoff (float): cutoff distance. (default: 6.5) Polynomial cutoff function to give a smoothed truncation of the Gaussian symmetry functions. Args: rs (ndarray): distances to elements cutoff (float): cutoff distance. Returns: (ndarray) cutoff function. Gaussian radial symmetry function of the center atom, given an eta parameter. Args: eta: radial function parameter. rs: distances from the central atom to each neighbor cutoff (float): cutoff distance. Returns: (float) Gaussian radial symmetry function. Gaussian angular symmetry function of the center atom, given a set of eta, zeta and gamma parameters. Args: eta ([float]): angular function parameters. zeta ([float]): angular function parameters. gamma ([float]): angular function parameters. neigh_coords (list of [floats]): coordinates of neighboring atoms, with respect to the central atom cutoff (float): cutoff parameter. Returns: (float) Gaussian angular symmetry function for all combinations of eta, zeta, gamma # Loop over each neighbor j # Compute the distance of each neighbor (k) to r # Compute the cosine term # Compute the cutoff function (independent of eta/zeta/gamma) # Compute the g4 for each combination of eta/gamma/zeta # Compute the eta term Get Gaussian symmetry function features of site with given index in input structure. Args: struct (Structure): Pymatgen Structure object. idx (int): index of target site in structure. Returns: (list of floats): Gaussian symmetry function features. # Get the neighbors within the cutoff # Get coordinates of the neighbors, relative to the central atom # Get the distances for later use # Compute all G2 # Compute all G4s Compute site energy from Coulombic interactions User notes: - This class uses that `charges that are already-defined for the structure`. - Ewald summations can be expensive. If you evaluating every site in many large structures, run all of the sites for each structure at the same time. We cache the Ewald result for the structure that was run last, so looping over sites and then structures is faster than structures than sites. Features: ewald_site_energy - Energy for the site computed from Coulombic interactions Args: accuracy (int): Accuracy of Ewald summation, number of decimal places # Variables used then caching the Ewald result Args: struct (Structure): Pymatgen Structure object. idx (int): index of target site in structure. Returns: ([float]) - Electrostatic energy of the site # Check if the new input is the last # Note: We use 'is' rather than structure comparisons for speed Resemblance of given sites to ideal environments Site fingerprint computed from pymatgen's ChemEnv package that provides resemblance percentages of a given site to ideal environments. Args: cetypes ([str]): chemical environments (CEs) to be considered. strategy (ChemenvStrategy): ChemEnv neighbor-finding strategy. geom_finder (LocalGeometryFinder): ChemEnv local geometry finder. max_csm (float): maximum continuous symmetry measure (CSM; default of 8 taken from chemenv). Note that any CSM larger than max_csm will be set to max_csm in order to avoid negative values (i.e., all features are constrained to be between 0 and 1). max_dist_fac (float): maximum distance factor (default: 1.41). Use a standard collection of CE types and choose your ChemEnv neighbor-finding strategy. Args: preset (str): preset types ("simple" or "multi_weights"). Returns: ChemEnvSiteFingerprint object from a preset. Get ChemEnv fingerprint of site with given index in input structure. Args: struct (Structure): Pymatgen Structure object. idx (int): index of target site in structure struct. Returns: (numpy array): resemblance fraction of target site to ideal local environments. Number of first nearest neighbors of a site. Determines the number of nearest neighbors of a site using one of pymatgen's NearNeighbor classes. These nearest neighbor calculators can return weights related to the proximity of each neighbor to this site. It is possible to take these weights into account to prevent the coordination number from changing discontinuously with small perturbations of a structure, either by summing the total weights or using the normalization method presented by [Ward et al.](http://link.aps.org/doi/10.1103/PhysRevB.96.014107) Features: CN_[method] - Coordination number computed using a certain method for calculating nearest neighbors. Use one of the standard instances of a given NearNeighbor class. Args: preset (str): preset type ("VoronoiNN", "JmolNN", "MiniumDistanceNN", "MinimumOKeeffeNN", or "MinimumVIRENN"). **kwargs: allow to pass args to the NearNeighbor class. Returns: CoordinationNumber from a preset. Initialize the featurizer Args: nn (NearestNeighbor) - Method used to determine coordination number use_weights (string) - Method used to account for weights of neighbors: 'none' - Do not use weights when computing coordination number 'sum' - Use sum of weights as the coordination number 'effective' - Compute the 'effective coordination number', which is computed as :math:`\\frac{(\sum_n w_n)^2)}{\sum_n w_n^2}` Get coordintion number of site with given index in input structure. Args: struct (Structure): Pymatgen Structure object. idx (int): index of target site in structure struct. Returns: [float] - Coordination number # TODO: Should this weighting code go in pymatgen? I'm not sure if it even necessary to distinguish it from the 'sum' method -lw # TODO: Should names contain weighting scheme? -lw Compute the general radial distribution function (GRDF) for a site. The GRDF is a radial measure of crystal order around a site. There are two featurizing modes: 1. GRDF: (recommended) - n_bins length vector In GRDF mode, The GRDF is computed by considering all sites around a central site (i.e., no sites are omitted when computing the GRDF). The features output from this mode will be vectors with length n_bins. 2. pairwise GRDF: (advanced users) - n_bins x n_sites matrix In this mode, GRDFs are are still computed around a central site, but only one other site (and their translational equivalents) are used to compute a GRDF (e.g. site 1 with site 2 and the translational equivalents of site 2). This results in a a n_sites x n_bins matrix of features. Requires `fit` for determining the max number of sites for The GRDF is a generalization of the partial radial distribution function (PRDF). In contrast with the PRDF, the bins of the GRDF are not mutually- exclusive and need not carry a constant weight of 1. The PRDF is a case of the GRDF when the bins are rectangular functions. Examples of other functions to use with the GRDF are Gaussian, trig, and Bessel functions. See :func:`~matminer.featurizers.utils.grdf` for a full list of available binning functions. There are two preset conditions: gaussian: bin functions are gaussians histogram: bin functions are rectangular functions Args: bins: ([AbstractPairwise]) List of pairwise binning functions. Each of these functions must implement the AbstractPairwise class. cutoff: (float) maximum distance to look for neighbors mode: (str) the featurizing mode. supported options are: 'GRDF' and 'pairwise_GRDF' Determine the maximum number of sites in X to assign correct feature labels Args: X - [list of tuples], training data tuple values should be (struc, idx) Returns: self Get GRDF of the input structure. Args: struct (Structure): Pymatgen Structure object. idx (int): index of target site in structure struct. Returns: Flattened list of GRDF values. For each run mode the list order is: GRDF: bin# pairwise GRDF: site2# bin# The site2# corresponds to a pymatgen site index and bin# corresponds to one of the bin functions # Get list of neighbors by site # Indexing is [site#][neighbor#][pymatgen Site, distance, site index] # Generate lists of pairwise distances according to run mode # Make a single distance collection # Make pairwise distance collections for pairwise GRDF # compute bin counts for each list of pairwise distances # Compute "volume" of each bin to normalize GRDFs # normalize the bin counts by the bin volume to compute features Preset bin functions for this featurizer. Example use: >>> GRDF = GeneralizedRadialDistributionFunction.from_preset('gaussian') >>> GRDF.featurize(struct, idx) Args: preset (str): shape of bin (either 'gaussian' or 'histogram') width (float): bin width. std dev for gaussian, width for histogram spacing (float): the spacing between bin centers cutoff (float): maximum distance to look for neighbors mode (str): featurizing mode. either 'GRDF' or 'pairwise_GRDF' # Generate bin functions Compute the angular Fourier series (AFS), including both angular and radial info The AFS is the product of pairwise distance function (g_n, g_n') between two pairs of atoms (sharing the common central site) and the cosine of the angle between the two pairs. The AFS is a 2-dimensional feature (the axes are g_n, g_n'). Examples of distance functionals are square functions, Gaussian, trig functions, and Bessel functions. An example for Gaussian: lambda d: exp( -(d - d_n)**2 ), where d_n is the coefficient for g_n See :func:`~matminer.featurizers.utils.grdf` for a full list of available binning functions. There are two preset conditions: gaussian: bin functions are gaussians histogram: bin functions are rectangular functions Features: AFS ([gn], [gn']) - Angular Fourier Series between binning functions (g1 and g2) Args: bins: ([AbstractPairwise]) a list of binning functions that implement the AbstractPairwise base class cutoff: (float) maximum distance to look for neighbors. The featurizer will run slowly for large distance cutoffs because of the number of neighbor pairs scales as the square of the number of neighbors Get AFS of the input structure. Args: struct (Structure): Pymatgen Structure object. idx (int): index of target site in structure struct. Returns: Flattened list of AFS values. the list order is: g_n g_n' # Generate list of neighbor position vectors (relative to central # atom) and distances from each central site as tuples # Generate exhaustive permutations of neighbor pairs around each # central site (order matters). Does not allow repeat elements (i.e. # there are two distinct sites in every permutation) # Generate cos(theta) between neighbor pairs for each central site. # Also, retain data on neighbor distances for each pair # process with matrix algebra, we really need the speed here # Generate distance functional matrix (g_n, g_n') # Compute AFS values for each element of the bin matrix # need to cast arrays as floats to use np.exp Preset bin functions for this featurizer. Example use: >>> AFS = AngularFourierSeries.from_preset('gaussian') >>> AFS.featurize(struct, idx) Args: preset (str): shape of bin (either 'gaussian' or 'histogram') width (float): bin width. std dev for gaussian, width for histogram spacing (float): the spacing between bin centers cutoff (float): maximum distance to look for neighbors # Generate bin functions # TODO: Figure out whether to take NN-counting method as an option (see VoronoiFingerprint) Differences in elemental properties between site and its neighboring sites. Uses the Voronoi tessellation of the structure to determine the neighbors of the site, and assigns each neighbor (:math:`n`) a weight (:math:`A_n`) that corresponds to the area of the facet on the tessellation corresponding to that neighbor. The local property difference is then computed by :math:`\\frac{\sum_n {A_n |p_n - p_0|}}{\sum_n {A_n}}` where :math:`p_n` is the property (e.g., atomic number) of a neighbor and :math:`p_0` is the property of a site. If signed parameter is assigned True, signed difference of the properties is returned instead of absolute difference. Features: - "local property difference in [property]" - Weighted average of differences between an elemental property of a site and that of each of its neighbors, weighted by size of face on Voronoi tessellation References: `Ward et al. _PRB_ 2017 <http://link.aps.org/doi/10.1103/PhysRevB.96.024104>`_ Initialize the featurizer Args: data_source (AbstractData) - Class from which to retrieve elemental properties weight (str) - What aspect of each voronoi facet to use to weigh each neighbor (see VoronoiNN) properties ([str]) - List of properties to use (default=['Electronegativity']) signed (bool) - whether to return absolute difference or signed difference of properties(default=False (absolute difference)) Create a new LocalPropertyDifference class according to a preset Args: preset (str) - Name of preset # Get the targeted site # Get the tessellation of a site # Get the element and weight of each site # Compute the difference for each property Averages of spherical harmonics of local neighbors Bond Orientational Parameters (BOPs) describe the local environment around an atom by considering the local symmetry of the bonds as computed using spherical harmonics. To create descriptors that are invariant to rotating the coordinate system, we use the average of all spherical harmonics of a certain degree - following the approach of `Steinhardt et al. <https://link.aps.org/doi/10.1103/PhysRevB.28.784>`_. We weigh the contributions of each neighbor with the solid angle of the Voronoi tessellation (see `Mickel et al. <https://aip.scitation.org/doi/abs/10.1063/1.4774084>_` for further discussion). The weighing scheme makes these descriptors vary smoothly with small distortions of a crystal structure. In addition to the average spherical harmonics, this class can also compute the :math:`W` and :math:`\hat{W}` parameters proposed by `Steinhardt et al. <https://link.aps.org/doi/10.1103/PhysRevB.28.784>`_. Attributes: BOOP Q l=<n> - Average spherical harmonic for a certain degree, n. BOOP W l=<n> - W parameter for a certain degree of spherical harmonic, n. BOOP What l=<n> - :math:`\hat{W}` parameter for a certain degree of spherical harmonic, n. References: `Steinhardt et al., _PRB_ (1983) <https://link.aps.org/doi/10.1103/PhysRevB.28.784>`_ `Seko et al., _PRB_ (2017) <http://link.aps.org/doi/10.1103/PhysRevB.95.144110>`_ Initialize the featurizer Args: max_l (int) - Maximum spherical harmonic to consider compute_w (bool) - Whether to compute Ws as well compute_w_hat (bool) - Whether to compute What # Get the nearest neighbors of the atom # Get the polar and azimuthal angles of each face # Get the weights for each neighbor # Compute the spherical harmonics for the desired `l`s # Average the spherical harmonic over each neighbor, weighted by solid angle # Compute the average over all m's # Compute the W, if desired # Loop over all non-zero Wigner 3j coefficients # Compute Whats, if desired # Compile the results. Always returns Qs, and optionally the W/What Elemental properties of atom on a certain site Features: site [property] - Elemental property for this site References: `Seko et al., _PRB_ (2017) <http://link.aps.org/doi/10.1103/PhysRevB.95.144110>`_ `Schmidt et al., _Chem Mater_. (2017) <http://dx.doi.org/10.1021/acs.chemmater.7b00156>`_ Initialize the featurizer Args: data_source (AbstractData): Tool used to look up elemental properties properties ([string]): List of properties to use for features # Get the site # Get the properties Create the class with pre-defined settings Args: preset (string): Desired preset Returns: SiteElementalProperty initialized with desired settings Get the list of non-zero Wigner 3j triplets Args: l (int): Desired l Returns: List of tuples that contain: - ((int)) m coordinates of the triplet - (float) Wigner coefficient Iterator over all non-zero Wigner 3j triplets Args: l (int) - Desired l Generates: pairs of acceptable l's Determines the average bond length between one specific site and all its nearest neighbors using one of pymatgen's NearNeighbor classes. These nearest neighbor calculators return weights related to the proximity of each neighbor to this site. 'Average bond length' of a site is the weighted average of the distance between site and all its nearest neighbors. Initialize featurizer Args: method (NearNeighbor) - subclass under NearNeighbor used to compute nearest neighbors Get weighted average bond length of a site and all its nearest neighbors. Args: strc (Structure): Pymatgen Structure object idx (int): index of target site in structure object Returns: average bond length (list) # Compute nearest neighbors of the indexed site Determines the average bond angles of a specific site with its nearest neighbors using one of pymatgen's NearNeighbor classes. Neighbors that are adjacent to each other are stored and angle between them are computed. 'Average bond angle' of a site is the mean bond angle between all its nearest neighbors. Initialize featurizer Args: method (NearNeighbor) - subclass under NearNeighbor used to compute nearest neighbors Get average bond length of a site and all its nearest neighbors. Args: strc (Structure): Pymatgen Structure object idx (int): index of target site in structure object Returns: average bond length (list) # Compute nearest neighbors of the indexed site # Calculate bond angles for each neighbor # Take the minimum bond angle of each neighbor
| 2.250291
| 2
|
transform_binary_payload/src-payload-decoders/python/sentrius_rs1xx.py
|
iot-systems-gmbh/aws-iot-core-lorawan
| 0
|
6626773
|
# Copyright IoT Systems GmbH (www.iot-systems.at). All Rights Reserved.
# Affiliate of KaWa commerce GmbH, AWS Consulting Partner (www.kawa-commerce.com)
# SPDX-License-Identifier: MIT-0
#
# Permission is hereby granted, free of charge, to any person obtaining a copy of this
# software and associated documentation files (the "Software"), to deal in the Software
# without restriction, including without limitation the rights to use, copy, modify,
# merge, publish, distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED,
# INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A
# PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
# HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
# SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
# Payload definition can be found here
# https://www.lairdconnect.com/documentation/application-note-rs1xx-lora-protocol
import base64
import helpers
# DEBUG MODE
DEBUG_OUTPUT = False
# Send Temp RH Data Notification
# | byte | bit7 | bit6 | bit5 | bit4 | bit3 | bit2 | bit1 | bit0 |
# |------|--------|--------|------|------|------|------|------|------|
# | 0 | MsgType |
# |------|-----------------------------------------------------------|
# | 1 | Options |
# |------|-----------------------------------------------------------|
# | 2 | Humidity Fractional |
# |------|-----------------------------------------------------------|
# | 3 | Humidity Integer |
# |------|-----------------------------------------------------------|
# | 4 | Temp Fractional |
# |------|-----------------------------------------------------------|
# | 5 | Temp Integer |
# |------|-----------------------------------------------------------|
# | 6 | Battery Capacity |
# |------|-----------------------------------------------------------|
# | 7 | AlarmMsg Count |
# | 8 | |
# |------|-----------------------------------------------------------|
# | 9 | BacklogMsg Count |
# | 10 | |
# Send Battery Voltage
# | byte | bit7 | bit6 | bit5 | bit4 | bit3 | bit2 | bit1 | bit0 |
# |------|--------|--------|------|------|------|------|------|------|
# | 0 | MsgType |
# |------|-----------------------------------------------------------|
# | 1 | Options |
# |------|-----------------------------------------------------------|
# | 2 | Voltage Fractional |
# |------|-----------------------------------------------------------|
# | 3 | Voltage Integer |
def dict_from_payload(base64_input: str, fport: int = None):
""" Decodes a base64-encoded binary payload into JSON.
Parameters
----------
base64_input : str
Base64-encoded binary payload
fport: int
FPort as provided in the metadata. Please note the fport is optional and can have value "None", if not
provided by the LNS or invoking function.
If fport is None and binary decoder can not proceed because of that, it should should raise an exception.
Returns
-------
JSON object with key/value pairs of decoded attributes
"""
decoded = base64.b64decode(base64_input)
# Printing the debug output
if DEBUG_OUTPUT:
print(f"Input: {decoded.hex().upper()}")
if len(decoded):
# dict for result
result = {}
# type of message
msg_type = decoded[0]
# sensor to server message options
options = decoded[1]
sensor_req_server_time = options & 0b00000001
if sensor_req_server_time == 0b1:
result["options"] = "Sensor request for server time"
sensor_config_error = (options & 0b00000010) >> 1
if sensor_config_error == 0b1:
result["options"] = "Sensor configuration error"
sensor_alarm_flag = (options & 0b00000100) >> 2
if sensor_alarm_flag == 0b1:
result["options"] = "Sensor alarm flag"
sensor_reset_flag = (options & 0b00001000) >> 3
if sensor_reset_flag == 0b1:
result["options"] = "Sensor reset flag"
sensor_fault_flag = (options & 0b00010000) >> 4
if sensor_fault_flag == 0b1:
result["options"] = "Sensor fault flag"
if msg_type == 0x01:
# message type
result["msg_type"] = "SendTempRHData"
# Fractional portion of humidity measurement in %
humidity_fract = decoded[2]
# Integer portion of humidity measurement in %
humidity_int = decoded[3]
# Each byte needs to be decoded separately then the fractional data divided by 100.
# The sum of the two gives the resultant value.
humidity = humidity_int + (humidity_fract / 100)
result["humidity"] = humidity
# Fractional portion of temperature measurement in C
temp_fract = helpers.bin8dec(decoded[4])
# Integer portion of temperature measurement in C
temp_int = helpers.bin8dec(decoded[5])
# Each byte needs to be decoded separately then the fractional data divided by 100.
# The sum of the two gives the resultant value.
temperature = temp_int + (temp_fract / 100)
result["temperature"] = temperature
# battery capacity
batt_cap = decoded[6]
# Index for percentage of battery capacity remaining
if batt_cap == 0:
result["battery_capacity"] = "0-5%"
elif batt_cap == 1:
result["battery_capacity"] = "5-20%"
elif batt_cap == 2:
result["battery_capacity"] = "20-40%"
elif batt_cap == 3:
result["battery_capacity"] = "40-60%"
elif batt_cap == 4:
result["battery_capacity"] = "60-80%"
elif batt_cap == 5:
result["battery_capacity"] = "80-100%"
else:
result["battery_capacity"] = "unsupported value"
# Number of backlog alarm messages in sensor FLASH
alarm_msg_cnt = decoded[7] << 8 | decoded[8]
result["alarm_msg_count"] = alarm_msg_cnt
# Number of backlog non-alarm messages in sensor FLASH
backlog_msg_cnt = decoded[9] << 8 | decoded[10]
result["backlog_msg_count"] = backlog_msg_cnt
return result
# message type SendBatteryVoltage = 0x0A
elif msg_type == 0x0A:
# set message type
result["msg_type"] = "SendBatteryVoltage"
# Fractional part of the last measured battery voltage
volt_fract = helpers.bin8dec(decoded[2])
# Integer part of the last measured battery voltage
volt_int = helpers.bin8dec(decoded[3])
# Each byte needs to be decoded separately then the fractional data divided by 100.
# The sum of the two gives the resultant value.
volt = volt_int + (volt_fract / 100)
result["voltage"] = volt
return result
else:
raise Exception(f"message type {msg_type} not implemented")
# Tests
if __name__ == "__main__":
test_definition = [
{
"input_encoding": "hex",
"input_value": "01001E0141190200000000",
"output": {
"msg_type": "SendTempRHData",
"humidity": 1.3,
"temperature": 25.65,
"battery_capacity": "20-40%",
"alarm_msg_count": 0,
"backlog_msg_count": 0
}
},
{
"input_encoding": "hex",
"input_value": "0A000A03",
"output": {
"msg_type": "SendBatteryVoltage",
"voltage": 3.1,
}
},
{
"input_encoding": "hex",
"input_value": "0A010A03",
"output": {
"options": "Sensor request for server time",
"msg_type": "SendBatteryVoltage",
"voltage": 3.1,
}
}
]
for testcase in test_definition:
base64_input = None
if testcase.get("input_encoding") == "base64":
base64_input = testcase.get("input_value")
elif testcase.get("input_encoding") == "hex":
base64_input = base64.b64encode(
bytearray.fromhex(testcase.get("input_value"))).decode("utf-8")
output = dict_from_payload(base64_input)
for key in testcase.get("output"):
if testcase.get("output").get(key) != output.get(key):
raise Exception(
f'Assertion failed for input {testcase.get("input_value")}, key {key}, expected {testcase.get("output").get(key)}, got {output.get(key)}')
else:
print(
f'"{testcase.get("input_value")}" : Successful test for key "{key}", value "{testcase.get("output").get(key)}"')
|
# Copyright IoT Systems GmbH (www.iot-systems.at). All Rights Reserved.
# Affiliate of KaWa commerce GmbH, AWS Consulting Partner (www.kawa-commerce.com)
# SPDX-License-Identifier: MIT-0
#
# Permission is hereby granted, free of charge, to any person obtaining a copy of this
# software and associated documentation files (the "Software"), to deal in the Software
# without restriction, including without limitation the rights to use, copy, modify,
# merge, publish, distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED,
# INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A
# PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
# HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
# SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
# Payload definition can be found here
# https://www.lairdconnect.com/documentation/application-note-rs1xx-lora-protocol
import base64
import helpers
# DEBUG MODE
DEBUG_OUTPUT = False
# Send Temp RH Data Notification
# | byte | bit7 | bit6 | bit5 | bit4 | bit3 | bit2 | bit1 | bit0 |
# |------|--------|--------|------|------|------|------|------|------|
# | 0 | MsgType |
# |------|-----------------------------------------------------------|
# | 1 | Options |
# |------|-----------------------------------------------------------|
# | 2 | Humidity Fractional |
# |------|-----------------------------------------------------------|
# | 3 | Humidity Integer |
# |------|-----------------------------------------------------------|
# | 4 | Temp Fractional |
# |------|-----------------------------------------------------------|
# | 5 | Temp Integer |
# |------|-----------------------------------------------------------|
# | 6 | Battery Capacity |
# |------|-----------------------------------------------------------|
# | 7 | AlarmMsg Count |
# | 8 | |
# |------|-----------------------------------------------------------|
# | 9 | BacklogMsg Count |
# | 10 | |
# Send Battery Voltage
# | byte | bit7 | bit6 | bit5 | bit4 | bit3 | bit2 | bit1 | bit0 |
# |------|--------|--------|------|------|------|------|------|------|
# | 0 | MsgType |
# |------|-----------------------------------------------------------|
# | 1 | Options |
# |------|-----------------------------------------------------------|
# | 2 | Voltage Fractional |
# |------|-----------------------------------------------------------|
# | 3 | Voltage Integer |
def dict_from_payload(base64_input: str, fport: int = None):
""" Decodes a base64-encoded binary payload into JSON.
Parameters
----------
base64_input : str
Base64-encoded binary payload
fport: int
FPort as provided in the metadata. Please note the fport is optional and can have value "None", if not
provided by the LNS or invoking function.
If fport is None and binary decoder can not proceed because of that, it should should raise an exception.
Returns
-------
JSON object with key/value pairs of decoded attributes
"""
decoded = base64.b64decode(base64_input)
# Printing the debug output
if DEBUG_OUTPUT:
print(f"Input: {decoded.hex().upper()}")
if len(decoded):
# dict for result
result = {}
# type of message
msg_type = decoded[0]
# sensor to server message options
options = decoded[1]
sensor_req_server_time = options & 0b00000001
if sensor_req_server_time == 0b1:
result["options"] = "Sensor request for server time"
sensor_config_error = (options & 0b00000010) >> 1
if sensor_config_error == 0b1:
result["options"] = "Sensor configuration error"
sensor_alarm_flag = (options & 0b00000100) >> 2
if sensor_alarm_flag == 0b1:
result["options"] = "Sensor alarm flag"
sensor_reset_flag = (options & 0b00001000) >> 3
if sensor_reset_flag == 0b1:
result["options"] = "Sensor reset flag"
sensor_fault_flag = (options & 0b00010000) >> 4
if sensor_fault_flag == 0b1:
result["options"] = "Sensor fault flag"
if msg_type == 0x01:
# message type
result["msg_type"] = "SendTempRHData"
# Fractional portion of humidity measurement in %
humidity_fract = decoded[2]
# Integer portion of humidity measurement in %
humidity_int = decoded[3]
# Each byte needs to be decoded separately then the fractional data divided by 100.
# The sum of the two gives the resultant value.
humidity = humidity_int + (humidity_fract / 100)
result["humidity"] = humidity
# Fractional portion of temperature measurement in C
temp_fract = helpers.bin8dec(decoded[4])
# Integer portion of temperature measurement in C
temp_int = helpers.bin8dec(decoded[5])
# Each byte needs to be decoded separately then the fractional data divided by 100.
# The sum of the two gives the resultant value.
temperature = temp_int + (temp_fract / 100)
result["temperature"] = temperature
# battery capacity
batt_cap = decoded[6]
# Index for percentage of battery capacity remaining
if batt_cap == 0:
result["battery_capacity"] = "0-5%"
elif batt_cap == 1:
result["battery_capacity"] = "5-20%"
elif batt_cap == 2:
result["battery_capacity"] = "20-40%"
elif batt_cap == 3:
result["battery_capacity"] = "40-60%"
elif batt_cap == 4:
result["battery_capacity"] = "60-80%"
elif batt_cap == 5:
result["battery_capacity"] = "80-100%"
else:
result["battery_capacity"] = "unsupported value"
# Number of backlog alarm messages in sensor FLASH
alarm_msg_cnt = decoded[7] << 8 | decoded[8]
result["alarm_msg_count"] = alarm_msg_cnt
# Number of backlog non-alarm messages in sensor FLASH
backlog_msg_cnt = decoded[9] << 8 | decoded[10]
result["backlog_msg_count"] = backlog_msg_cnt
return result
# message type SendBatteryVoltage = 0x0A
elif msg_type == 0x0A:
# set message type
result["msg_type"] = "SendBatteryVoltage"
# Fractional part of the last measured battery voltage
volt_fract = helpers.bin8dec(decoded[2])
# Integer part of the last measured battery voltage
volt_int = helpers.bin8dec(decoded[3])
# Each byte needs to be decoded separately then the fractional data divided by 100.
# The sum of the two gives the resultant value.
volt = volt_int + (volt_fract / 100)
result["voltage"] = volt
return result
else:
raise Exception(f"message type {msg_type} not implemented")
# Tests
if __name__ == "__main__":
test_definition = [
{
"input_encoding": "hex",
"input_value": "01001E0141190200000000",
"output": {
"msg_type": "SendTempRHData",
"humidity": 1.3,
"temperature": 25.65,
"battery_capacity": "20-40%",
"alarm_msg_count": 0,
"backlog_msg_count": 0
}
},
{
"input_encoding": "hex",
"input_value": "0A000A03",
"output": {
"msg_type": "SendBatteryVoltage",
"voltage": 3.1,
}
},
{
"input_encoding": "hex",
"input_value": "0A010A03",
"output": {
"options": "Sensor request for server time",
"msg_type": "SendBatteryVoltage",
"voltage": 3.1,
}
}
]
for testcase in test_definition:
base64_input = None
if testcase.get("input_encoding") == "base64":
base64_input = testcase.get("input_value")
elif testcase.get("input_encoding") == "hex":
base64_input = base64.b64encode(
bytearray.fromhex(testcase.get("input_value"))).decode("utf-8")
output = dict_from_payload(base64_input)
for key in testcase.get("output"):
if testcase.get("output").get(key) != output.get(key):
raise Exception(
f'Assertion failed for input {testcase.get("input_value")}, key {key}, expected {testcase.get("output").get(key)}, got {output.get(key)}')
else:
print(
f'"{testcase.get("input_value")}" : Successful test for key "{key}", value "{testcase.get("output").get(key)}"')
|
en
| 0.568936
|
# Copyright IoT Systems GmbH (www.iot-systems.at). All Rights Reserved. # Affiliate of KaWa commerce GmbH, AWS Consulting Partner (www.kawa-commerce.com) # SPDX-License-Identifier: MIT-0 # # Permission is hereby granted, free of charge, to any person obtaining a copy of this # software and associated documentation files (the "Software"), to deal in the Software # without restriction, including without limitation the rights to use, copy, modify, # merge, publish, distribute, sublicense, and/or sell copies of the Software, and to # permit persons to whom the Software is furnished to do so. # # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, # INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A # PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT # HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION # OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE # SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. # Payload definition can be found here # https://www.lairdconnect.com/documentation/application-note-rs1xx-lora-protocol # DEBUG MODE # Send Temp RH Data Notification # | byte | bit7 | bit6 | bit5 | bit4 | bit3 | bit2 | bit1 | bit0 | # |------|--------|--------|------|------|------|------|------|------| # | 0 | MsgType | # |------|-----------------------------------------------------------| # | 1 | Options | # |------|-----------------------------------------------------------| # | 2 | Humidity Fractional | # |------|-----------------------------------------------------------| # | 3 | Humidity Integer | # |------|-----------------------------------------------------------| # | 4 | Temp Fractional | # |------|-----------------------------------------------------------| # | 5 | Temp Integer | # |------|-----------------------------------------------------------| # | 6 | Battery Capacity | # |------|-----------------------------------------------------------| # | 7 | AlarmMsg Count | # | 8 | | # |------|-----------------------------------------------------------| # | 9 | BacklogMsg Count | # | 10 | | # Send Battery Voltage # | byte | bit7 | bit6 | bit5 | bit4 | bit3 | bit2 | bit1 | bit0 | # |------|--------|--------|------|------|------|------|------|------| # | 0 | MsgType | # |------|-----------------------------------------------------------| # | 1 | Options | # |------|-----------------------------------------------------------| # | 2 | Voltage Fractional | # |------|-----------------------------------------------------------| # | 3 | Voltage Integer | Decodes a base64-encoded binary payload into JSON. Parameters ---------- base64_input : str Base64-encoded binary payload fport: int FPort as provided in the metadata. Please note the fport is optional and can have value "None", if not provided by the LNS or invoking function. If fport is None and binary decoder can not proceed because of that, it should should raise an exception. Returns ------- JSON object with key/value pairs of decoded attributes # Printing the debug output # dict for result # type of message # sensor to server message options # message type # Fractional portion of humidity measurement in % # Integer portion of humidity measurement in % # Each byte needs to be decoded separately then the fractional data divided by 100. # The sum of the two gives the resultant value. # Fractional portion of temperature measurement in C # Integer portion of temperature measurement in C # Each byte needs to be decoded separately then the fractional data divided by 100. # The sum of the two gives the resultant value. # battery capacity # Index for percentage of battery capacity remaining # Number of backlog alarm messages in sensor FLASH # Number of backlog non-alarm messages in sensor FLASH # message type SendBatteryVoltage = 0x0A # set message type # Fractional part of the last measured battery voltage # Integer part of the last measured battery voltage # Each byte needs to be decoded separately then the fractional data divided by 100. # The sum of the two gives the resultant value. # Tests
| 1.473445
| 1
|
attendees/users/models/user.py
|
xjlin0/attendees32
| 0
|
6626774
|
<filename>attendees/users/models/user.py
from django.conf import settings
from django.contrib.auth.models import AbstractUser
from django.contrib.postgres.indexes import GinIndex
from django.db import models
from django.db.models import CharField
from django.urls import reverse
from django.utils import timezone
from django.contrib.auth import validators
from django.utils.translation import ugettext_lazy as _
import pghistory
from attendees.persons.models import Utility
from attendees.whereabouts.models import Organization
class User(AbstractUser):
# First Name and Last Name do not cover name patterns around the globe.
name = CharField(_("Name of User"), blank=True, max_length=255)
first_name = None # type: ignore
last_name = None # type: ignore
organization = models.ForeignKey(
Organization,
null=True,
blank=True,
default=None,
on_delete=models.SET_NULL,
help_text="Primary organization of the user",
)
infos = models.JSONField(
default=Utility.user_infos,
null=True,
blank=True,
help_text="please keep {} here even there's no data",
)
class Meta:
indexes = [
GinIndex(
fields=["infos"],
name="user_infos_gin",
),
]
def organization_pk(self):
return self.organization.pk if self.organization else None
def get_absolute_url(self):
"""Get url for user's detail view.
Returns:
str: URL for user detail.
"""
return reverse("users:detail", kwargs={"username": self.username})
def belongs_to_groups_of(
self, auth_group_names
): # .in_bulk() might take more memory
return self.groups.filter(name__in=auth_group_names).exists()
def belongs_to_organization_of(self, organization_slug):
if self.is_superuser:
return True
else:
return self.organization and self.organization.slug == organization_slug
def can_see_all_organizational_meets_attendees(self):
if self.organization:
return self.belongs_to_groups_of(
self.organization.infos["groups_see_all_meets_attendees"]
)
else:
return False
def privileged(self):
"""
check if user's in correct groups to see other's data without relationships, currently are data_admins or counselor group
Does NOT check if current user and targeting user are in the same organization!!
:return: boolean
"""
if self.organization:
privileged_groups = self.organization.infos.get(
"data_admins", []
) + self.organization.infos.get("counselor", [])
return self.belongs_to_groups_of(privileged_groups)
return False
def privileged_to_edit(self, other_attendee_id):
"""
check if user's in correct groups to see other attendee data (under same organization) without relationships, currently are data_admins or counselor group
:return: boolean
"""
if other_attendee_id and self.organization:
privileged_groups = self.organization.infos.get(
"data_admins", []
) + self.organization.infos.get("counselor", [])
return self.belongs_to_groups_of(
privileged_groups
) and self.attendee.under_same_org_with(other_attendee_id)
return False
def is_data_admin(self):
organization_data_admin_group = (
self.organization.infos.get("data_admins", []) if self.organization else []
)
return self.belongs_to_groups_of(organization_data_admin_group)
def is_counselor(self):
organization_counselor_groups = (
self.organization.infos.get("counselor", []) if self.organization else []
)
return self.belongs_to_groups_of(organization_counselor_groups)
def attendee_uuid_str(self):
return str(self.attendee.id) if hasattr(self, "attendee") else ""
def attend_divisions_of(self, division_slugs):
return (
self.attendee
and self.attendee.attending_set.filter(
divisions__slug__in=division_slugs
).exists()
)
def belongs_to_divisions_of(self, division_slugs):
# if self.is_superuser:
# return True
# else:
return (
self.organization
and self.organization.division_set.filter(slug__in=division_slugs).exists()
)
def belongs_to_organization_and_division(self, organization_slug, division_slug):
if self.is_superuser:
return True
else:
return (
self.organization
and self.organization.slug == organization_slug
and self.organization.division_set.filter(slug=division_slug).exists()
)
def attended_divisions_slugs(self):
if self.attendee:
return self.attendee.attending_set.values_list("division__slug", flat=True)
else:
return []
def allowed_url_names(self, menu_category="API"):
return (
self.groups.filter(
menuauthgroup__menu__organization=self.organization,
menuauthgroup__menu__category=menu_category,
)
.values_list("menuauthgroup__menu__url_name", flat=True)
.distinct()
)
class UserHistory(pghistory.get_event_model(
User,
pghistory.Snapshot('user.snapshot'),
name='UserHistory',
related_name='history',
exclude=['password'],
)):
pgh_id = models.BigAutoField(primary_key=True, serialize=False)
pgh_created_at = models.DateTimeField(auto_now_add=True)
id = models.IntegerField(db_index=True)
is_superuser = models.BooleanField(default=False, help_text='Designates that this user has all permissions without explicitly assigning them.', verbose_name='superuser status')
is_staff = models.BooleanField(default=False, help_text='Designates whether the user can log into this admin site.', verbose_name='staff status')
is_active = models.BooleanField(default=True, help_text='Designates whether this user should be treated as active. Unselect this instead of deleting accounts.', verbose_name='active')
organization = models.ForeignKey(blank=True, db_constraint=False, default=None, help_text='Primary organization of the user', null=True, on_delete=models.deletion.DO_NOTHING, related_name='+', related_query_name='+', to='whereabouts.organization')
date_joined = models.DateTimeField(default=timezone.now, verbose_name='date joined')
pgh_obj = models.ForeignKey(db_constraint=False, on_delete=models.deletion.DO_NOTHING, related_name='history', to=settings.AUTH_USER_MODEL)
# password = models.CharField(max_length=128, verbose_name='password')
pgh_label = models.TextField(help_text='The event label.')
username = models.CharField(error_messages={'unique': 'A user with that username already exists.'}, help_text='Required. 150 characters or fewer. Letters, digits and @/./+/-/_ only.', max_length=150, validators=[validators.UnicodeUsernameValidator()], verbose_name='username')
last_login = models.DateTimeField(blank=True, null=True, verbose_name='last login')
pgh_context = models.ForeignKey(db_constraint=False, null=True, on_delete=models.deletion.DO_NOTHING, related_name='+', to='pghistory.context')
email = models.EmailField(blank=True, max_length=254, verbose_name='email address')
infos = models.JSONField(blank=True, default=Utility.user_infos, help_text="please keep {} here even there's no data", null=True)
name = models.CharField(blank=True, max_length=255, verbose_name='Name of User')
class Meta:
db_table = "users_userhistory"
|
<filename>attendees/users/models/user.py
from django.conf import settings
from django.contrib.auth.models import AbstractUser
from django.contrib.postgres.indexes import GinIndex
from django.db import models
from django.db.models import CharField
from django.urls import reverse
from django.utils import timezone
from django.contrib.auth import validators
from django.utils.translation import ugettext_lazy as _
import pghistory
from attendees.persons.models import Utility
from attendees.whereabouts.models import Organization
class User(AbstractUser):
# First Name and Last Name do not cover name patterns around the globe.
name = CharField(_("Name of User"), blank=True, max_length=255)
first_name = None # type: ignore
last_name = None # type: ignore
organization = models.ForeignKey(
Organization,
null=True,
blank=True,
default=None,
on_delete=models.SET_NULL,
help_text="Primary organization of the user",
)
infos = models.JSONField(
default=Utility.user_infos,
null=True,
blank=True,
help_text="please keep {} here even there's no data",
)
class Meta:
indexes = [
GinIndex(
fields=["infos"],
name="user_infos_gin",
),
]
def organization_pk(self):
return self.organization.pk if self.organization else None
def get_absolute_url(self):
"""Get url for user's detail view.
Returns:
str: URL for user detail.
"""
return reverse("users:detail", kwargs={"username": self.username})
def belongs_to_groups_of(
self, auth_group_names
): # .in_bulk() might take more memory
return self.groups.filter(name__in=auth_group_names).exists()
def belongs_to_organization_of(self, organization_slug):
if self.is_superuser:
return True
else:
return self.organization and self.organization.slug == organization_slug
def can_see_all_organizational_meets_attendees(self):
if self.organization:
return self.belongs_to_groups_of(
self.organization.infos["groups_see_all_meets_attendees"]
)
else:
return False
def privileged(self):
"""
check if user's in correct groups to see other's data without relationships, currently are data_admins or counselor group
Does NOT check if current user and targeting user are in the same organization!!
:return: boolean
"""
if self.organization:
privileged_groups = self.organization.infos.get(
"data_admins", []
) + self.organization.infos.get("counselor", [])
return self.belongs_to_groups_of(privileged_groups)
return False
def privileged_to_edit(self, other_attendee_id):
"""
check if user's in correct groups to see other attendee data (under same organization) without relationships, currently are data_admins or counselor group
:return: boolean
"""
if other_attendee_id and self.organization:
privileged_groups = self.organization.infos.get(
"data_admins", []
) + self.organization.infos.get("counselor", [])
return self.belongs_to_groups_of(
privileged_groups
) and self.attendee.under_same_org_with(other_attendee_id)
return False
def is_data_admin(self):
organization_data_admin_group = (
self.organization.infos.get("data_admins", []) if self.organization else []
)
return self.belongs_to_groups_of(organization_data_admin_group)
def is_counselor(self):
organization_counselor_groups = (
self.organization.infos.get("counselor", []) if self.organization else []
)
return self.belongs_to_groups_of(organization_counselor_groups)
def attendee_uuid_str(self):
return str(self.attendee.id) if hasattr(self, "attendee") else ""
def attend_divisions_of(self, division_slugs):
return (
self.attendee
and self.attendee.attending_set.filter(
divisions__slug__in=division_slugs
).exists()
)
def belongs_to_divisions_of(self, division_slugs):
# if self.is_superuser:
# return True
# else:
return (
self.organization
and self.organization.division_set.filter(slug__in=division_slugs).exists()
)
def belongs_to_organization_and_division(self, organization_slug, division_slug):
if self.is_superuser:
return True
else:
return (
self.organization
and self.organization.slug == organization_slug
and self.organization.division_set.filter(slug=division_slug).exists()
)
def attended_divisions_slugs(self):
if self.attendee:
return self.attendee.attending_set.values_list("division__slug", flat=True)
else:
return []
def allowed_url_names(self, menu_category="API"):
return (
self.groups.filter(
menuauthgroup__menu__organization=self.organization,
menuauthgroup__menu__category=menu_category,
)
.values_list("menuauthgroup__menu__url_name", flat=True)
.distinct()
)
class UserHistory(pghistory.get_event_model(
User,
pghistory.Snapshot('user.snapshot'),
name='UserHistory',
related_name='history',
exclude=['password'],
)):
pgh_id = models.BigAutoField(primary_key=True, serialize=False)
pgh_created_at = models.DateTimeField(auto_now_add=True)
id = models.IntegerField(db_index=True)
is_superuser = models.BooleanField(default=False, help_text='Designates that this user has all permissions without explicitly assigning them.', verbose_name='superuser status')
is_staff = models.BooleanField(default=False, help_text='Designates whether the user can log into this admin site.', verbose_name='staff status')
is_active = models.BooleanField(default=True, help_text='Designates whether this user should be treated as active. Unselect this instead of deleting accounts.', verbose_name='active')
organization = models.ForeignKey(blank=True, db_constraint=False, default=None, help_text='Primary organization of the user', null=True, on_delete=models.deletion.DO_NOTHING, related_name='+', related_query_name='+', to='whereabouts.organization')
date_joined = models.DateTimeField(default=timezone.now, verbose_name='date joined')
pgh_obj = models.ForeignKey(db_constraint=False, on_delete=models.deletion.DO_NOTHING, related_name='history', to=settings.AUTH_USER_MODEL)
# password = models.CharField(max_length=128, verbose_name='password')
pgh_label = models.TextField(help_text='The event label.')
username = models.CharField(error_messages={'unique': 'A user with that username already exists.'}, help_text='Required. 150 characters or fewer. Letters, digits and @/./+/-/_ only.', max_length=150, validators=[validators.UnicodeUsernameValidator()], verbose_name='username')
last_login = models.DateTimeField(blank=True, null=True, verbose_name='last login')
pgh_context = models.ForeignKey(db_constraint=False, null=True, on_delete=models.deletion.DO_NOTHING, related_name='+', to='pghistory.context')
email = models.EmailField(blank=True, max_length=254, verbose_name='email address')
infos = models.JSONField(blank=True, default=Utility.user_infos, help_text="please keep {} here even there's no data", null=True)
name = models.CharField(blank=True, max_length=255, verbose_name='Name of User')
class Meta:
db_table = "users_userhistory"
|
en
| 0.768561
|
# First Name and Last Name do not cover name patterns around the globe. # type: ignore # type: ignore Get url for user's detail view. Returns: str: URL for user detail. # .in_bulk() might take more memory check if user's in correct groups to see other's data without relationships, currently are data_admins or counselor group Does NOT check if current user and targeting user are in the same organization!! :return: boolean check if user's in correct groups to see other attendee data (under same organization) without relationships, currently are data_admins or counselor group :return: boolean # if self.is_superuser: # return True # else: # password = models.CharField(max_length=128, verbose_name='password')
| 2.242209
| 2
|
sqli_platform/challenges/sesqli6/views.py
|
Xistens/SQLi-Sandbox
| 1
|
6626775
|
#!/usr/bin/python3
from flask import (
render_template,
Blueprint,
current_app,
redirect,
url_for,
session,
request,
flash
)
from sqli_platform import (app, clog, db)
from sqli_platform.utils.challenge import (get_flag, get_config, format_query, hash_pwd, login_required, clear_session)
"""
"""
_bp = "sesqli6"
sesqli6 = Blueprint(_bp , __name__, template_folder='templates', url_prefix=f"/{_bp}")
_templ = "challenges/sesqli"
_query = []
def get_profile():
query = f"SELECT uid, name, profileID, salary, passportNr, email, nickName, password FROM usertable WHERE UID = ?"
params = [session.get(f"{_bp}_user_id", None)]
return db.sql_query(_bp, query, params, one=True)
@sesqli6.context_processor
def context():
global _query
d = dict(
query=format_query(_query)
)
_query = []
return d
@sesqli6.route("/")
@sesqli6.route("/login", methods=["GET", "POST"])
def login():
global _query
if request.method == "POST":
username = request.form["profileID"]
password = request.form["password"]
password = <PASSWORD>)
# Hash password
if not (username and password):
flash("Username or Password cannot be empty.", "warning")
return redirect(url_for(f"{_bp}.login"))
query = f"SELECT uid, name, profileID, salary, passportNr, email, nickName, password FROM usertable WHERE profileID=? AND password=?"
params = [username, password]
_query.append((query, params))
user = db.sql_query(_bp, query, params, one=True)
if user:
session[f"{_bp}_user_id"] = user["uid"]
session[f"{_bp}_data"] = dict(user)
return redirect(url_for(f"{_bp}.home"))
else:
flash("The account information you provided does not exist!", "danger")
return render_template(f"{_bp}/login.html", slide_num=0)
@sesqli6.route("/profile", methods=["GET", "POST"])
@login_required(_bp)
def profile():
global _query
if request.method == "POST":
email = request.form["email"]
nick = request.form["nickName"]
password = request.form["password"]
query = ""
if password:
pwd_hash = hash_pwd(password)
query = f"UPDATE usertable SET nickName='{nick}',email='{email}',password='{pwd_<PASSWORD>}' WHERE UID='{session[f'{_bp}_user_id']}'"
else:
query = f"UPDATE usertable SET nickName='{nick}',email='{email}' WHERE UID='{session[f'{_bp}_user_id']}'"
_query.append(query)
db.sql_insert(_bp, query)
return redirect(url_for(f"{_bp}.home"))
return render_template(f"{_templ}/profile.html", csess_obj=get_profile())
@sesqli6.route("/home")
@login_required(_bp)
def home():
return render_template(f"{_templ}/index.html", csess_obj=get_profile())
@sesqli6.route("/logout")
def logout():
clear_session(_bp)
return redirect(url_for(f"{_bp}.login"))
|
#!/usr/bin/python3
from flask import (
render_template,
Blueprint,
current_app,
redirect,
url_for,
session,
request,
flash
)
from sqli_platform import (app, clog, db)
from sqli_platform.utils.challenge import (get_flag, get_config, format_query, hash_pwd, login_required, clear_session)
"""
"""
_bp = "sesqli6"
sesqli6 = Blueprint(_bp , __name__, template_folder='templates', url_prefix=f"/{_bp}")
_templ = "challenges/sesqli"
_query = []
def get_profile():
query = f"SELECT uid, name, profileID, salary, passportNr, email, nickName, password FROM usertable WHERE UID = ?"
params = [session.get(f"{_bp}_user_id", None)]
return db.sql_query(_bp, query, params, one=True)
@sesqli6.context_processor
def context():
global _query
d = dict(
query=format_query(_query)
)
_query = []
return d
@sesqli6.route("/")
@sesqli6.route("/login", methods=["GET", "POST"])
def login():
global _query
if request.method == "POST":
username = request.form["profileID"]
password = request.form["password"]
password = <PASSWORD>)
# Hash password
if not (username and password):
flash("Username or Password cannot be empty.", "warning")
return redirect(url_for(f"{_bp}.login"))
query = f"SELECT uid, name, profileID, salary, passportNr, email, nickName, password FROM usertable WHERE profileID=? AND password=?"
params = [username, password]
_query.append((query, params))
user = db.sql_query(_bp, query, params, one=True)
if user:
session[f"{_bp}_user_id"] = user["uid"]
session[f"{_bp}_data"] = dict(user)
return redirect(url_for(f"{_bp}.home"))
else:
flash("The account information you provided does not exist!", "danger")
return render_template(f"{_bp}/login.html", slide_num=0)
@sesqli6.route("/profile", methods=["GET", "POST"])
@login_required(_bp)
def profile():
global _query
if request.method == "POST":
email = request.form["email"]
nick = request.form["nickName"]
password = request.form["password"]
query = ""
if password:
pwd_hash = hash_pwd(password)
query = f"UPDATE usertable SET nickName='{nick}',email='{email}',password='{pwd_<PASSWORD>}' WHERE UID='{session[f'{_bp}_user_id']}'"
else:
query = f"UPDATE usertable SET nickName='{nick}',email='{email}' WHERE UID='{session[f'{_bp}_user_id']}'"
_query.append(query)
db.sql_insert(_bp, query)
return redirect(url_for(f"{_bp}.home"))
return render_template(f"{_templ}/profile.html", csess_obj=get_profile())
@sesqli6.route("/home")
@login_required(_bp)
def home():
return render_template(f"{_templ}/index.html", csess_obj=get_profile())
@sesqli6.route("/logout")
def logout():
clear_session(_bp)
return redirect(url_for(f"{_bp}.login"))
|
en
| 0.338871
|
#!/usr/bin/python3 # Hash password
| 2.313514
| 2
|
tools/livestreamer/librtmp/utils.py
|
Sparker0i/hotstarsportslivestreamer
| 2
|
6626776
|
from librtmp_ffi.binding import librtmp
from librtmp_ffi.ffi import ffi
from binascii import hexlify
from collections import namedtuple
from .aval import AVal
from .compat import bytes, str
from .exceptions import RTMPError
__all__ = ["add_signal_handler", "hash_swf"]
def add_signal_handler():
"""Adds a signal handler to handle KeyboardInterrupt."""
import signal
def handler(sig, frame):
if sig == signal.SIGINT:
librtmp.RTMP_UserInterrupt()
raise KeyboardInterrupt
signal.signal(signal.SIGINT, handler)
def hash_swf(url, age=30):
hash = ffi.new("unsigned char[]", 32)
size = ffi.new("unsigned int*")
url = bytes(url, "utf8")
res = librtmp.RTMP_HashSWF(url, size, hash, age)
if res == 0:
hash = hexlify(ffi.buffer(hash, 32)[:])
size = size[0]
return str(hash, "utf8"), size
else:
raise RTMPError("Failed to hash SWF")
RTMPURL = namedtuple("RTMPURL", ["protocol", "hostname",
"port", "playpath", "app"])
def parse_url(url):
protocol = ffi.new("int*")
hostname = AVal("")
port = ffi.new("unsigned int*")
playpath = AVal("")
app = AVal("")
res = librtmp.RTMP_ParseURL(bytes(url, "utf8"), protocol, hostname.aval, port,
playpath.aval, app.aval)
if res < 1:
result = RTMPURL(0, "", 0, "", "")
else:
result = RTMPURL(protocol[0], str(hostname.value, "utf8"), port[0],
str(playpath.value, "utf8"), str(app.value, "utf8"))
return result
|
from librtmp_ffi.binding import librtmp
from librtmp_ffi.ffi import ffi
from binascii import hexlify
from collections import namedtuple
from .aval import AVal
from .compat import bytes, str
from .exceptions import RTMPError
__all__ = ["add_signal_handler", "hash_swf"]
def add_signal_handler():
"""Adds a signal handler to handle KeyboardInterrupt."""
import signal
def handler(sig, frame):
if sig == signal.SIGINT:
librtmp.RTMP_UserInterrupt()
raise KeyboardInterrupt
signal.signal(signal.SIGINT, handler)
def hash_swf(url, age=30):
hash = ffi.new("unsigned char[]", 32)
size = ffi.new("unsigned int*")
url = bytes(url, "utf8")
res = librtmp.RTMP_HashSWF(url, size, hash, age)
if res == 0:
hash = hexlify(ffi.buffer(hash, 32)[:])
size = size[0]
return str(hash, "utf8"), size
else:
raise RTMPError("Failed to hash SWF")
RTMPURL = namedtuple("RTMPURL", ["protocol", "hostname",
"port", "playpath", "app"])
def parse_url(url):
protocol = ffi.new("int*")
hostname = AVal("")
port = ffi.new("unsigned int*")
playpath = AVal("")
app = AVal("")
res = librtmp.RTMP_ParseURL(bytes(url, "utf8"), protocol, hostname.aval, port,
playpath.aval, app.aval)
if res < 1:
result = RTMPURL(0, "", 0, "", "")
else:
result = RTMPURL(protocol[0], str(hostname.value, "utf8"), port[0],
str(playpath.value, "utf8"), str(app.value, "utf8"))
return result
|
en
| 0.68131
|
Adds a signal handler to handle KeyboardInterrupt.
| 2.217026
| 2
|
stock/old/gdr/spreadsheet.py
|
y-kitamu/stock
| 0
|
6626777
|
<filename>stock/old/gdr/spreadsheet.py
"""spreadsheet.py
Author : <NAME>
Create Date : 2021-12-05 11:17:42
Copyright (c) 2019- <NAME> <<EMAIL>>
"""
from typing import Any, List
from stock.gdr import get_service
UPLOAD_LOG_SS_GID = "1SRi3cDXctknMSMB1TrEctq4vVeqC4C3ZLEkmY-A1CII"
SCOPES = ['https://www.googleapis.com/auth/spreadsheets']
def update_row_to_ss(value: List[Any], sid: str = UPLOAD_LOG_SS_GID):
"""`sid`で指定したspreadsheetの末尾に`row`を追記する
"""
service = get_service(scopes=SCOPES, api_name="sheets", api_ver="v4")
body = {'values': [value]}
service.spreadsheets().values().append(spreadsheetId=sid,
range="A1:A2",
body=body,
valueInputOption="USER_ENTERED").execute()
|
<filename>stock/old/gdr/spreadsheet.py
"""spreadsheet.py
Author : <NAME>
Create Date : 2021-12-05 11:17:42
Copyright (c) 2019- <NAME> <<EMAIL>>
"""
from typing import Any, List
from stock.gdr import get_service
UPLOAD_LOG_SS_GID = "1SRi3cDXctknMSMB1TrEctq4vVeqC4C3ZLEkmY-A1CII"
SCOPES = ['https://www.googleapis.com/auth/spreadsheets']
def update_row_to_ss(value: List[Any], sid: str = UPLOAD_LOG_SS_GID):
"""`sid`で指定したspreadsheetの末尾に`row`を追記する
"""
service = get_service(scopes=SCOPES, api_name="sheets", api_ver="v4")
body = {'values': [value]}
service.spreadsheets().values().append(spreadsheetId=sid,
range="A1:A2",
body=body,
valueInputOption="USER_ENTERED").execute()
|
ja
| 0.428591
|
spreadsheet.py Author : <NAME> Create Date : 2021-12-05 11:17:42 Copyright (c) 2019- <NAME> <<EMAIL>> `sid`で指定したspreadsheetの末尾に`row`を追記する
| 2.337913
| 2
|
227-basic-calculator-ii/basic-calculator-ii.py
|
notech/leetcode
| 0
|
6626778
|
# -*- coding:utf-8 -*-
# Implement a basic calculator to evaluate a simple expression string.
#
# The expression string contains only non-negative integers, +, -, *, / operators and empty spaces . The integer division should truncate toward zero.
#
# You may assume that the given expression is always valid.
#
# Some examples:
#
# "3+2*2" = 7
# " 3/2 " = 1
# " 3+5 / 2 " = 5
#
#
#
#
# Note: Do not use the eval built-in library function.
#
#
# Credits:Special thanks to @ts for adding this problem and creating all test cases.
class Solution(object):
def calculate(self, s):
"""
:type s: str
:rtype: int
"""
if not s:
return 0
stack, num ,sign= [], 0, '+'
for i in xrange(len(s)):
if s[i].isdigit():
num = num*10+ord(s[i])-ord('0')
if (not s[i].isdigit() and not s[i].isspace()) or i == len(s)-1:
if sign == '-':
stack.append(-num)
elif sign == '+':
stack.append(num)
elif sign == '*':
stack.append(stack.pop()*num)
else:
tmp = stack.pop()
if tmp < 0 and abs(tmp)%num != 0:
stack.append(tmp/num+1)
else:
stack.append(tmp/num)
sign = s[i]
num = 0
return sum(stack)
|
# -*- coding:utf-8 -*-
# Implement a basic calculator to evaluate a simple expression string.
#
# The expression string contains only non-negative integers, +, -, *, / operators and empty spaces . The integer division should truncate toward zero.
#
# You may assume that the given expression is always valid.
#
# Some examples:
#
# "3+2*2" = 7
# " 3/2 " = 1
# " 3+5 / 2 " = 5
#
#
#
#
# Note: Do not use the eval built-in library function.
#
#
# Credits:Special thanks to @ts for adding this problem and creating all test cases.
class Solution(object):
def calculate(self, s):
"""
:type s: str
:rtype: int
"""
if not s:
return 0
stack, num ,sign= [], 0, '+'
for i in xrange(len(s)):
if s[i].isdigit():
num = num*10+ord(s[i])-ord('0')
if (not s[i].isdigit() and not s[i].isspace()) or i == len(s)-1:
if sign == '-':
stack.append(-num)
elif sign == '+':
stack.append(num)
elif sign == '*':
stack.append(stack.pop()*num)
else:
tmp = stack.pop()
if tmp < 0 and abs(tmp)%num != 0:
stack.append(tmp/num+1)
else:
stack.append(tmp/num)
sign = s[i]
num = 0
return sum(stack)
|
en
| 0.76103
|
# -*- coding:utf-8 -*- # Implement a basic calculator to evaluate a simple expression string. # # The expression string contains only non-negative integers, +, -, *, / operators and empty spaces . The integer division should truncate toward zero. # # You may assume that the given expression is always valid. # # Some examples: # # "3+2*2" = 7 # " 3/2 " = 1 # " 3+5 / 2 " = 5 # # # # # Note: Do not use the eval built-in library function. # # # Credits:Special thanks to @ts for adding this problem and creating all test cases. :type s: str :rtype: int
| 4.038778
| 4
|
tests/resources/selenium/generated_from_requests.py
|
abstracta/taurus
| 1
|
6626779
|
import unittest
import re
from time import sleep
from selenium import webdriver
from selenium.common.exceptions import NoSuchElementException
from selenium.common.exceptions import NoAlertPresentException
from selenium.webdriver.common.by import By
from selenium.webdriver.common.action_chains import ActionChains
from selenium.webdriver.support.ui import Select
from selenium.webdriver.support import expected_conditions as econd
from selenium.webdriver.support.wait import WebDriverWait
from selenium.webdriver.common.keys import Keys
import apiritif
import selenium_taurus_extras
_vars = {}
_tpl = selenium_taurus_extras.Template(_vars)
_vars['name'] = 'Name'
_vars['red_pill'] = 'take_it'
class TestRequests(unittest.TestCase):
def setUp(self):
options = webdriver.FirefoxOptions()
profile = webdriver.FirefoxProfile()
profile.set_preference('webdriver.log.file', '<somewhere>/webdriver.log')
self.driver = webdriver.Firefox(profile, firefox_options=options)
self.driver.implicitly_wait(3.5)
self.wnd_mng = selenium_taurus_extras.WindowManager(self.driver)
def tearDown(self):
self.driver.quit()
def test_requests(self):
self.driver.implicitly_wait(3.5)
with apiritif.transaction('/'):
self.driver.get('http://blazedemo.com/')
WebDriverWait(self.driver, 3.5).until(econd.presence_of_element_located((By.XPATH, _tpl.apply("//input[@type='submit']"))), 'Element "//input[@type=\'submit\']" failed to appear within 3.5s')
self.assertEqual(self.driver.title, _tpl.apply('BlazeDemo'))
ActionChains(self.driver).move_to_element(self.driver.find_element(By.XPATH, _tpl.apply('/html/body/div[2]/div/p[2]/a'))).perform()
ActionChains(self.driver).double_click(self.driver.find_element(By.XPATH, _tpl.apply('/html/body/div[3]/h2'))).perform()
ActionChains(self.driver).click_and_hold(self.driver.find_element(By.XPATH, _tpl.apply('/html/body/div[3]/form/select[1]'))).perform()
ActionChains(self.driver).release(self.driver.find_element(By.XPATH, _tpl.apply('/html/body/div[3]/form/select[1]/option[6]'))).perform()
Select(self.driver.find_element(By.NAME, _tpl.apply('toPort'))).select_by_visible_text(_tpl.apply('London'))
self.driver.find_element(By.CSS_SELECTOR, _tpl.apply('body input.btn.btn-primary')).send_keys(Keys.ENTER)
self.assertEqual(self.driver.find_element(By.ID, _tpl.apply('address')).get_attribute('value'), _tpl.apply('123 Beautiful st.'))
self.assertEqual(self.driver.find_element(By.XPATH, _tpl.apply('/html/body/div[2]/form/div[1]/label')).get_attribute('innerText'), _tpl.apply('${name}'))
WebDriverWait(self.driver, 3.5).until(econd.visibility_of_element_located((By.NAME, _tpl.apply('toPort'))), "Element 'toPort' failed to appear within 3.5s")
self.driver.find_element(By.NAME, _tpl.apply('toPort')).send_keys(_tpl.apply('B'))
self.driver.find_element(By.XPATH, _tpl.apply('//div[3]/form/select[1]//option[3]')).click()
self.driver.find_element(By.XPATH, _tpl.apply('//div[3]/form/select[2]//option[6]')).click()
self.wnd_mng.switch(_tpl.apply('0'))
self.wnd_mng.switch(_tpl.apply('win_ser_local'))
self.wnd_mng.switch(_tpl.apply('win_ser_1'))
self.wnd_mng.switch(_tpl.apply('that_window'))
self.wnd_mng.close(_tpl.apply('1'))
self.wnd_mng.close(_tpl.apply('win_ser_local'))
self.wnd_mng.close(_tpl.apply('win_ser_1'))
self.wnd_mng.close(_tpl.apply('that_window'))
self.driver.find_element(By.NAME, _tpl.apply('toPort')).submit()
self.driver.execute_script(_tpl.apply("alert('This is Sparta');"))
self.driver.switch_to.frame(self.driver.find_element(By.NAME, _tpl.apply('my_frame')))
self.driver.switch_to.frame(1)
if self.driver.find_element(By.ID, _tpl.apply('editor')).get_attribute('contenteditable'): self.driver.find_element(By.ID, _tpl.apply('editor')).clear(); self.driver.find_element(By.ID, _tpl.apply('editor')).send_keys(_tpl.apply('lo-la-lu'))
sleep(3)
self.driver.delete_all_cookies()
self.driver.find_element(By.LINK_TEXT, _tpl.apply('destination of the week! The Beach!')).click()
body = self.driver.page_source
re_pattern = re.compile(r'contained_text')
self.assertEqual(0, len(re.findall(re_pattern, body)), "Assertion: 'contained_text' found in BODY")
with apiritif.transaction('empty'):
pass
|
import unittest
import re
from time import sleep
from selenium import webdriver
from selenium.common.exceptions import NoSuchElementException
from selenium.common.exceptions import NoAlertPresentException
from selenium.webdriver.common.by import By
from selenium.webdriver.common.action_chains import ActionChains
from selenium.webdriver.support.ui import Select
from selenium.webdriver.support import expected_conditions as econd
from selenium.webdriver.support.wait import WebDriverWait
from selenium.webdriver.common.keys import Keys
import apiritif
import selenium_taurus_extras
_vars = {}
_tpl = selenium_taurus_extras.Template(_vars)
_vars['name'] = 'Name'
_vars['red_pill'] = 'take_it'
class TestRequests(unittest.TestCase):
def setUp(self):
options = webdriver.FirefoxOptions()
profile = webdriver.FirefoxProfile()
profile.set_preference('webdriver.log.file', '<somewhere>/webdriver.log')
self.driver = webdriver.Firefox(profile, firefox_options=options)
self.driver.implicitly_wait(3.5)
self.wnd_mng = selenium_taurus_extras.WindowManager(self.driver)
def tearDown(self):
self.driver.quit()
def test_requests(self):
self.driver.implicitly_wait(3.5)
with apiritif.transaction('/'):
self.driver.get('http://blazedemo.com/')
WebDriverWait(self.driver, 3.5).until(econd.presence_of_element_located((By.XPATH, _tpl.apply("//input[@type='submit']"))), 'Element "//input[@type=\'submit\']" failed to appear within 3.5s')
self.assertEqual(self.driver.title, _tpl.apply('BlazeDemo'))
ActionChains(self.driver).move_to_element(self.driver.find_element(By.XPATH, _tpl.apply('/html/body/div[2]/div/p[2]/a'))).perform()
ActionChains(self.driver).double_click(self.driver.find_element(By.XPATH, _tpl.apply('/html/body/div[3]/h2'))).perform()
ActionChains(self.driver).click_and_hold(self.driver.find_element(By.XPATH, _tpl.apply('/html/body/div[3]/form/select[1]'))).perform()
ActionChains(self.driver).release(self.driver.find_element(By.XPATH, _tpl.apply('/html/body/div[3]/form/select[1]/option[6]'))).perform()
Select(self.driver.find_element(By.NAME, _tpl.apply('toPort'))).select_by_visible_text(_tpl.apply('London'))
self.driver.find_element(By.CSS_SELECTOR, _tpl.apply('body input.btn.btn-primary')).send_keys(Keys.ENTER)
self.assertEqual(self.driver.find_element(By.ID, _tpl.apply('address')).get_attribute('value'), _tpl.apply('123 Beautiful st.'))
self.assertEqual(self.driver.find_element(By.XPATH, _tpl.apply('/html/body/div[2]/form/div[1]/label')).get_attribute('innerText'), _tpl.apply('${name}'))
WebDriverWait(self.driver, 3.5).until(econd.visibility_of_element_located((By.NAME, _tpl.apply('toPort'))), "Element 'toPort' failed to appear within 3.5s")
self.driver.find_element(By.NAME, _tpl.apply('toPort')).send_keys(_tpl.apply('B'))
self.driver.find_element(By.XPATH, _tpl.apply('//div[3]/form/select[1]//option[3]')).click()
self.driver.find_element(By.XPATH, _tpl.apply('//div[3]/form/select[2]//option[6]')).click()
self.wnd_mng.switch(_tpl.apply('0'))
self.wnd_mng.switch(_tpl.apply('win_ser_local'))
self.wnd_mng.switch(_tpl.apply('win_ser_1'))
self.wnd_mng.switch(_tpl.apply('that_window'))
self.wnd_mng.close(_tpl.apply('1'))
self.wnd_mng.close(_tpl.apply('win_ser_local'))
self.wnd_mng.close(_tpl.apply('win_ser_1'))
self.wnd_mng.close(_tpl.apply('that_window'))
self.driver.find_element(By.NAME, _tpl.apply('toPort')).submit()
self.driver.execute_script(_tpl.apply("alert('This is Sparta');"))
self.driver.switch_to.frame(self.driver.find_element(By.NAME, _tpl.apply('my_frame')))
self.driver.switch_to.frame(1)
if self.driver.find_element(By.ID, _tpl.apply('editor')).get_attribute('contenteditable'): self.driver.find_element(By.ID, _tpl.apply('editor')).clear(); self.driver.find_element(By.ID, _tpl.apply('editor')).send_keys(_tpl.apply('lo-la-lu'))
sleep(3)
self.driver.delete_all_cookies()
self.driver.find_element(By.LINK_TEXT, _tpl.apply('destination of the week! The Beach!')).click()
body = self.driver.page_source
re_pattern = re.compile(r'contained_text')
self.assertEqual(0, len(re.findall(re_pattern, body)), "Assertion: 'contained_text' found in BODY")
with apiritif.transaction('empty'):
pass
|
none
| 1
| 2.355628
| 2
|
|
supplychain/migrations/0001_initial.py
|
basp0/Mtech-refresher-DRF-assignment
| 0
|
6626780
|
# Generated by Django 3.2.6 on 2021-08-06 13:41
from django.db import migrations, models
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='Customer',
fields=[
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('customer_name', models.CharField(max_length=50)),
('mango_requirement', models.IntegerField()),
('x_coord', models.FloatField()),
('y_coord', models.FloatField()),
('city', models.CharField(max_length=50)),
],
),
migrations.CreateModel(
name='MangoFarm',
fields=[
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('farm_name', models.CharField(max_length=50)),
('max_mangos', models.IntegerField()),
('x_coord', models.FloatField()),
('y_coord', models.FloatField()),
],
),
]
|
# Generated by Django 3.2.6 on 2021-08-06 13:41
from django.db import migrations, models
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='Customer',
fields=[
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('customer_name', models.CharField(max_length=50)),
('mango_requirement', models.IntegerField()),
('x_coord', models.FloatField()),
('y_coord', models.FloatField()),
('city', models.CharField(max_length=50)),
],
),
migrations.CreateModel(
name='MangoFarm',
fields=[
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('farm_name', models.CharField(max_length=50)),
('max_mangos', models.IntegerField()),
('x_coord', models.FloatField()),
('y_coord', models.FloatField()),
],
),
]
|
en
| 0.910747
|
# Generated by Django 3.2.6 on 2021-08-06 13:41
| 1.951627
| 2
|
extract_reduced.py
|
sliedes/clang-triage
| 4
|
6626781
|
<reponame>sliedes/clang-triage
#!/usr/bin/env python3
from triage_db import TriageDb
from sha_file_tree import make_sha_tree
def extract_reduced(path):
'Extract all reduced cases to a s/h/sha path.'
db = TriageDb()
make_sha_tree(path, db.iterateDistinctReduced(),
suffix='.cpp', rm_old=False)
def main():
extract_reduced('cr')
if __name__ == '__main__':
main()
|
#!/usr/bin/env python3
from triage_db import TriageDb
from sha_file_tree import make_sha_tree
def extract_reduced(path):
'Extract all reduced cases to a s/h/sha path.'
db = TriageDb()
make_sha_tree(path, db.iterateDistinctReduced(),
suffix='.cpp', rm_old=False)
def main():
extract_reduced('cr')
if __name__ == '__main__':
main()
|
fr
| 0.221828
|
#!/usr/bin/env python3
| 2.402353
| 2
|
tests/testapp/models.py
|
uniacco-tech/django-admin-searchable-dropdown
| 5
|
6626782
|
<reponame>uniacco-tech/django-admin-searchable-dropdown
"""Defines the models for the test app."""
from django.db import models
class Food(models.Model):
name = models.CharField(max_length=100)
def __repr__(self):
return 'Food#' + str(self.id)
def __str__(self):
return self.name
def alternate_name(self):
return str(self.name).upper()
class Collection(models.Model):
name = models.CharField(max_length=100)
curators = models.ManyToManyField('Person', blank=True)
def __repr__(self):
return 'Collection#' + str(self.id)
def __str__(self):
return self.name
class Person(models.Model):
name = models.CharField(max_length=100)
best_friend = models.ForeignKey('self', on_delete=models.CASCADE, blank=True, null=True) # may not be reciprocated
twin = models.OneToOneField('self', on_delete=models.CASCADE, blank=True, null=True, related_name='rev_twin')
siblings = models.ManyToManyField('self', blank=True)
favorite_food = models.ForeignKey(Food, on_delete=models.CASCADE, blank=True, null=True)
least_favorite_food = models.ForeignKey(Food, on_delete=models.CASCADE, blank=True, null=True,
related_name='food_is_least_fav', related_query_name='people_with_this_least_fav_food')
curated_collections = models.ManyToManyField(Collection, blank=True, db_table=Collection.curators.field.db_table)
favorite_book = models.ForeignKey('Book', on_delete=models.CASCADE, blank=True, null=True, related_name='people_with_this_fav_book')
def __repr__(self):
return 'Person#' + str(self.id)
def __str__(self):
return self.name
# Use this and curated_collections.db_table to set up reverse M2M
# See https://code.djangoproject.com/ticket/897
Person.curated_collections.through._meta.managed = False
class Book(models.Model):
isbn = models.IntegerField(primary_key=True)
title = models.CharField(max_length=100)
author = models.ForeignKey(Person, on_delete=models.CASCADE, blank=True, null=True)
coll = models.ForeignKey(Collection, on_delete=models.CASCADE, blank=True, null=True) # just for test purposes
def __repr__(self):
return 'Book#' + str(self.isbn)
def __str__(self):
return self.title
|
"""Defines the models for the test app."""
from django.db import models
class Food(models.Model):
name = models.CharField(max_length=100)
def __repr__(self):
return 'Food#' + str(self.id)
def __str__(self):
return self.name
def alternate_name(self):
return str(self.name).upper()
class Collection(models.Model):
name = models.CharField(max_length=100)
curators = models.ManyToManyField('Person', blank=True)
def __repr__(self):
return 'Collection#' + str(self.id)
def __str__(self):
return self.name
class Person(models.Model):
name = models.CharField(max_length=100)
best_friend = models.ForeignKey('self', on_delete=models.CASCADE, blank=True, null=True) # may not be reciprocated
twin = models.OneToOneField('self', on_delete=models.CASCADE, blank=True, null=True, related_name='rev_twin')
siblings = models.ManyToManyField('self', blank=True)
favorite_food = models.ForeignKey(Food, on_delete=models.CASCADE, blank=True, null=True)
least_favorite_food = models.ForeignKey(Food, on_delete=models.CASCADE, blank=True, null=True,
related_name='food_is_least_fav', related_query_name='people_with_this_least_fav_food')
curated_collections = models.ManyToManyField(Collection, blank=True, db_table=Collection.curators.field.db_table)
favorite_book = models.ForeignKey('Book', on_delete=models.CASCADE, blank=True, null=True, related_name='people_with_this_fav_book')
def __repr__(self):
return 'Person#' + str(self.id)
def __str__(self):
return self.name
# Use this and curated_collections.db_table to set up reverse M2M
# See https://code.djangoproject.com/ticket/897
Person.curated_collections.through._meta.managed = False
class Book(models.Model):
isbn = models.IntegerField(primary_key=True)
title = models.CharField(max_length=100)
author = models.ForeignKey(Person, on_delete=models.CASCADE, blank=True, null=True)
coll = models.ForeignKey(Collection, on_delete=models.CASCADE, blank=True, null=True) # just for test purposes
def __repr__(self):
return 'Book#' + str(self.isbn)
def __str__(self):
return self.title
|
en
| 0.418418
|
Defines the models for the test app. #' + str(self.id) #' + str(self.id) # may not be reciprocated #' + str(self.id) # Use this and curated_collections.db_table to set up reverse M2M # See https://code.djangoproject.com/ticket/897 # just for test purposes #' + str(self.isbn)
| 2.634712
| 3
|
makemigrations.py
|
msukmanowsky/django-naics-scian
| 1
|
6626783
|
#!/usr/bin/env python3
import os
import sys
from django.core.management import execute_from_command_line
if __name__ == "__main__":
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "settings")
args = sys.argv + ["makemigrations", "naics_scian", "naics_tables"]
execute_from_command_line(args)
|
#!/usr/bin/env python3
import os
import sys
from django.core.management import execute_from_command_line
if __name__ == "__main__":
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "settings")
args = sys.argv + ["makemigrations", "naics_scian", "naics_tables"]
execute_from_command_line(args)
|
fr
| 0.221828
|
#!/usr/bin/env python3
| 1.616405
| 2
|
test/test_itemcollection.py
|
jmorton-usgs/sat-stac
| 61
|
6626784
|
<reponame>jmorton-usgs/sat-stac
import os
import unittest
from satstac import ItemCollection, Item
from shutil import rmtree
testpath = os.path.dirname(__file__)
class Test(unittest.TestCase):
path = os.path.join(testpath, 'test-item')
@classmethod
def tearDownClass(cls):
""" Remove test files """
if os.path.exists(cls.path):
rmtree(cls.path)
def load_items(self):
return ItemCollection.load(os.path.join(testpath, 'items.json'))
def test_load(self):
""" Initialize Scenes with list of Scene objects """
items = self.load_items()
assert(len(items._collections) == 1)
assert(len(items) == 2)
assert(isinstance(items[0], Item))
def test_save(self):
""" Save items list """
items = self.load_items()
fname = os.path.join(testpath, 'save-test.json')
items.save(fname)
assert(os.path.exists(fname))
os.remove(fname)
assert(not os.path.exists(fname))
def test_collection(self):
""" Get a collection """
items = self.load_items()
col = items.collection('landsat-8-l1')
assert(col.id == 'landsat-8-l1')
def test_no_collection(self):
""" Attempt to get non-existent collection """
items = self.load_items()
col = items.collection('nosuchcollection')
assert(col is None)
def test_get_properties(self):
""" Get set of properties """
items = self.load_items()
p = items.properties('eo:platform')
assert(len(p) == 1)
assert(p[0] == 'landsat-8')
def test_print_items(self):
""" Print summary of items """
items = self.load_items()
print(items.summary())
def test_dates(self):
""" Get dates of all items """
items = self.load_items()
dates = items.dates()
assert(len(dates) == 1)
def test_text_calendar(self):
""" Get calendar """
items = self.load_items()
cal = items.calendar()
assert(len(cal) > 250)
def test_download_thumbnails(self):
""" Download all thumbnails """
items = self.load_items()
fnames = items.download(key='thumbnail')
for f in fnames:
assert(os.path.exists(f))
os.remove(f)
assert(not os.path.exists(f))
#shutil.rmtree(os.path.join(testpath, 'landsat-8-l1'))
def test_filter(self):
items = self.load_items()
items.filter('eo:cloud_cover', [100])
assert(len(items) == 1)
def test_download_assets(self):
""" Download multiple assets from all items """
items = self.load_items()
filenames = items.download_assets(keys=['MTL', 'ANG'], filename_template=self.path)
assert(len(filenames) == 2)
for fnames in filenames:
assert(len(fnames) == 2)
for f in fnames:
assert(os.path.exists(f))
def test_download(self):
""" Download a data file from all items """
items = self.load_items()
fnames = items.download(key='MTL', filename_template=self.path)
assert(len(fnames) == 2)
for f in fnames:
assert(os.path.exists(f))
|
import os
import unittest
from satstac import ItemCollection, Item
from shutil import rmtree
testpath = os.path.dirname(__file__)
class Test(unittest.TestCase):
path = os.path.join(testpath, 'test-item')
@classmethod
def tearDownClass(cls):
""" Remove test files """
if os.path.exists(cls.path):
rmtree(cls.path)
def load_items(self):
return ItemCollection.load(os.path.join(testpath, 'items.json'))
def test_load(self):
""" Initialize Scenes with list of Scene objects """
items = self.load_items()
assert(len(items._collections) == 1)
assert(len(items) == 2)
assert(isinstance(items[0], Item))
def test_save(self):
""" Save items list """
items = self.load_items()
fname = os.path.join(testpath, 'save-test.json')
items.save(fname)
assert(os.path.exists(fname))
os.remove(fname)
assert(not os.path.exists(fname))
def test_collection(self):
""" Get a collection """
items = self.load_items()
col = items.collection('landsat-8-l1')
assert(col.id == 'landsat-8-l1')
def test_no_collection(self):
""" Attempt to get non-existent collection """
items = self.load_items()
col = items.collection('nosuchcollection')
assert(col is None)
def test_get_properties(self):
""" Get set of properties """
items = self.load_items()
p = items.properties('eo:platform')
assert(len(p) == 1)
assert(p[0] == 'landsat-8')
def test_print_items(self):
""" Print summary of items """
items = self.load_items()
print(items.summary())
def test_dates(self):
""" Get dates of all items """
items = self.load_items()
dates = items.dates()
assert(len(dates) == 1)
def test_text_calendar(self):
""" Get calendar """
items = self.load_items()
cal = items.calendar()
assert(len(cal) > 250)
def test_download_thumbnails(self):
""" Download all thumbnails """
items = self.load_items()
fnames = items.download(key='thumbnail')
for f in fnames:
assert(os.path.exists(f))
os.remove(f)
assert(not os.path.exists(f))
#shutil.rmtree(os.path.join(testpath, 'landsat-8-l1'))
def test_filter(self):
items = self.load_items()
items.filter('eo:cloud_cover', [100])
assert(len(items) == 1)
def test_download_assets(self):
""" Download multiple assets from all items """
items = self.load_items()
filenames = items.download_assets(keys=['MTL', 'ANG'], filename_template=self.path)
assert(len(filenames) == 2)
for fnames in filenames:
assert(len(fnames) == 2)
for f in fnames:
assert(os.path.exists(f))
def test_download(self):
""" Download a data file from all items """
items = self.load_items()
fnames = items.download(key='MTL', filename_template=self.path)
assert(len(fnames) == 2)
for f in fnames:
assert(os.path.exists(f))
|
en
| 0.596018
|
Remove test files Initialize Scenes with list of Scene objects Save items list Get a collection Attempt to get non-existent collection Get set of properties Print summary of items Get dates of all items Get calendar Download all thumbnails #shutil.rmtree(os.path.join(testpath, 'landsat-8-l1')) Download multiple assets from all items Download a data file from all items
| 2.727763
| 3
|
pyot/utils/decorators.py
|
bangingheads/Pyot
| 1
|
6626785
|
from functools import wraps
class cached_property:
"""
Decorator that converts a method with a single self argument into a
property cached on the instance.
A cached property can be made out of an existing method:
(e.g. ``url = cached_property(get_absolute_url)``).
"""
name = None
@staticmethod
def func(instance): # pylint: disable=method-hidden
raise TypeError(
'Cannot use cached_property instance without calling '
'__set_name__() on it.'
)
def __init__(self, func, name=None):
self.real_func = func
self.__doc__ = getattr(func, '__doc__')
def __set_name__(self, owner, name):
if self.name is None:
self.name = name
self.func = self.real_func
elif name != self.name:
raise TypeError(
"Cannot assign the same cached_property to two different names "
"(%r and %r)." % (self.name, name)
)
def __get__(self, instance, cls=None):
"""
Call the function and put the return value in instance.__dict__ so that
subsequent attribute access on the instance returns the cached value
instead of calling cached_property.__get__().
"""
if instance is None:
return self
res = instance.__dict__[self.name] = self.func(instance)
return res
|
from functools import wraps
class cached_property:
"""
Decorator that converts a method with a single self argument into a
property cached on the instance.
A cached property can be made out of an existing method:
(e.g. ``url = cached_property(get_absolute_url)``).
"""
name = None
@staticmethod
def func(instance): # pylint: disable=method-hidden
raise TypeError(
'Cannot use cached_property instance without calling '
'__set_name__() on it.'
)
def __init__(self, func, name=None):
self.real_func = func
self.__doc__ = getattr(func, '__doc__')
def __set_name__(self, owner, name):
if self.name is None:
self.name = name
self.func = self.real_func
elif name != self.name:
raise TypeError(
"Cannot assign the same cached_property to two different names "
"(%r and %r)." % (self.name, name)
)
def __get__(self, instance, cls=None):
"""
Call the function and put the return value in instance.__dict__ so that
subsequent attribute access on the instance returns the cached value
instead of calling cached_property.__get__().
"""
if instance is None:
return self
res = instance.__dict__[self.name] = self.func(instance)
return res
|
en
| 0.63286
|
Decorator that converts a method with a single self argument into a property cached on the instance. A cached property can be made out of an existing method: (e.g. ``url = cached_property(get_absolute_url)``). # pylint: disable=method-hidden Call the function and put the return value in instance.__dict__ so that subsequent attribute access on the instance returns the cached value instead of calling cached_property.__get__().
| 3.607502
| 4
|
aws-classic-py-aws-load-balancer-controller-helm-release/iam.py
|
tusharshahrs/pulumi-home
| 4
|
6626786
|
<gh_stars>1-10
import json
import pulumi_aws as aws
managed_policy_arns = [
"arn:aws:iam::aws:policy/AmazonEKSWorkerNodePolicy",
"arn:aws:iam::aws:policy/AmazonEKS_CNI_Policy",
"arn:aws:iam::aws:policy/AmazonEC2ContainerRegistryReadOnly",
]
# Creates a role and attaches the EKS worker node IAM managed policies
def create_role(name: str) -> aws.iam.Role:
role = aws.iam.Role(name, assume_role_policy=json.dumps({
"Version": "2012-10-17",
"Statement": [
{
"Sid": "AllowAssumeRole",
"Effect": "Allow",
"Principal": {
"Service": "ec2.amazonaws.com",
},
"Action": "sts:AssumeRole"
}
]
}))
for i, policy in enumerate(managed_policy_arns):
# Create RolePolicyAttachment without returning it.
rpa = aws.iam.RolePolicyAttachment(f"{name}-policy-{i}",
policy_arn=policy,
role=role.id)
return role
|
import json
import pulumi_aws as aws
managed_policy_arns = [
"arn:aws:iam::aws:policy/AmazonEKSWorkerNodePolicy",
"arn:aws:iam::aws:policy/AmazonEKS_CNI_Policy",
"arn:aws:iam::aws:policy/AmazonEC2ContainerRegistryReadOnly",
]
# Creates a role and attaches the EKS worker node IAM managed policies
def create_role(name: str) -> aws.iam.Role:
role = aws.iam.Role(name, assume_role_policy=json.dumps({
"Version": "2012-10-17",
"Statement": [
{
"Sid": "AllowAssumeRole",
"Effect": "Allow",
"Principal": {
"Service": "ec2.amazonaws.com",
},
"Action": "sts:AssumeRole"
}
]
}))
for i, policy in enumerate(managed_policy_arns):
# Create RolePolicyAttachment without returning it.
rpa = aws.iam.RolePolicyAttachment(f"{name}-policy-{i}",
policy_arn=policy,
role=role.id)
return role
|
en
| 0.857003
|
# Creates a role and attaches the EKS worker node IAM managed policies # Create RolePolicyAttachment without returning it.
| 2.227902
| 2
|
data/transcoder_evaluation_gfg/python/COUNT_DERANGEMENTS_PERMUTATION_SUCH_THAT_NO_ELEMENT_APPEARS_IN_ITS_ORIGINAL_POSITION_1.py
|
mxl1n/CodeGen
| 241
|
6626787
|
<reponame>mxl1n/CodeGen
# Copyright (c) 2019-present, Facebook, Inc.
# All rights reserved.
#
# This source code is licensed under the license found in the
# LICENSE file in the root directory of this source tree.
#
def f_gold ( n ) :
der = [ 0 for i in range ( n + 1 ) ]
der [ 0 ] = 1
der [ 1 ] = 0
der [ 2 ] = 1
for i in range ( 3 , n + 1 ) :
der [ i ] = ( i - 1 ) * ( der [ i - 1 ] + der [ i - 2 ] )
return der [ n ]
#TOFILL
if __name__ == '__main__':
param = [
(22,),
(91,),
(33,),
(93,),
(90,),
(59,),
(88,),
(41,),
(70,),
(63,)
]
n_success = 0
for i, parameters_set in enumerate(param):
if f_filled(*parameters_set) == f_gold(*parameters_set):
n_success+=1
print("#Results: %i, %i" % (n_success, len(param)))
|
# Copyright (c) 2019-present, Facebook, Inc.
# All rights reserved.
#
# This source code is licensed under the license found in the
# LICENSE file in the root directory of this source tree.
#
def f_gold ( n ) :
der = [ 0 for i in range ( n + 1 ) ]
der [ 0 ] = 1
der [ 1 ] = 0
der [ 2 ] = 1
for i in range ( 3 , n + 1 ) :
der [ i ] = ( i - 1 ) * ( der [ i - 1 ] + der [ i - 2 ] )
return der [ n ]
#TOFILL
if __name__ == '__main__':
param = [
(22,),
(91,),
(33,),
(93,),
(90,),
(59,),
(88,),
(41,),
(70,),
(63,)
]
n_success = 0
for i, parameters_set in enumerate(param):
if f_filled(*parameters_set) == f_gold(*parameters_set):
n_success+=1
print("#Results: %i, %i" % (n_success, len(param)))
|
en
| 0.894477
|
# Copyright (c) 2019-present, Facebook, Inc. # All rights reserved. # # This source code is licensed under the license found in the # LICENSE file in the root directory of this source tree. # #TOFILL
| 3.253258
| 3
|
1-list.py
|
omostovyijr/TasksForLecture8
| 0
|
6626788
|
<reponame>omostovyijr/TasksForLecture8<gh_stars>0
ages = [10, 12, 15, 15, 17, 18, 18, 19, 20]
first = ages[0]
last = ages[-1]
ages.insert(0, 11) # Add at the top of array
ages.append(22) # Add at the end of array
ages.pop(0) # Remove at the top of array
ages.pop(-1) # Remove at the end of array
print(ages)
|
ages = [10, 12, 15, 15, 17, 18, 18, 19, 20]
first = ages[0]
last = ages[-1]
ages.insert(0, 11) # Add at the top of array
ages.append(22) # Add at the end of array
ages.pop(0) # Remove at the top of array
ages.pop(-1) # Remove at the end of array
print(ages)
|
en
| 0.750128
|
# Add at the top of array # Add at the end of array # Remove at the top of array # Remove at the end of array
| 3.86519
| 4
|
preprocess/body_model.py
|
xuchen-ethz/snarf
| 150
|
6626789
|
# -*- coding: utf-8 -*-
#
# Copyright (C) 2019 Max-Planck-Gesellschaft zur Förderung der Wissenschaften e.V. (MPG),
# acting on behalf of its Max Planck Institute for Intelligent Systems and the
# Max Planck Institute for Biological Cybernetics. All rights reserved.
#
# Max-Planck-Gesellschaft zur Förderung der Wissenschaften e.V. (MPG) is holder of all proprietary rights
# on this computer program. You can only use this computer program if you have closed a license agreement
# with MPG or you get the right to use the computer program from someone who is authorized to grant you that right.
# Any use of the computer program without a valid license is prohibited and liable to prosecution.
# Contact: <EMAIL>
#
#
# If you use this code in a research publication please consider citing the following:
#
# Expressive Body Capture: 3D Hands, Face, and Body from a Single Image <https://arxiv.org/abs/1904.05866>
#
#
# Code Developed by:
# <NAME> <https://nghorbani.github.io/>
#
# 2018.12.13
import numpy as np
import torch
import torch.nn as nn
# from smplx.lbs import lbs
from lbs import lbs
import pickle
class Struct(object):
def __init__(self, **kwargs):
for key, val in kwargs.items():
setattr(self, key, val)
def to_tensor(array, dtype=torch.float32):
if 'torch.tensor' not in str(type(array)):
return torch.tensor(array, dtype=dtype)
def to_np(array, dtype=np.float32):
if 'scipy.sparse' in str(type(array)):
array = array.todense()
return np.array(array, dtype=dtype)
class BodyModel(nn.Module):
def __init__(self,
bm_path,
params=None,
num_betas=10,
batch_size=1, v_template = None,
num_dmpls=None, path_dmpl=None,
num_expressions=10,
use_posedirs=True,
dtype=torch.float32):
super(BodyModel, self).__init__()
'''
:param bm_path: path to a SMPL model as pkl file
:param num_betas: number of shape parameters to include.
if betas are provided in params, num_betas would be overloaded with number of thoes betas
:param batch_size: number of smpl vertices to get
:param device: default on gpu
:param dtype: float precision of the compuations
:return: verts, trans, pose, betas
'''
# Todo: if params the batchsize should be read from one of the params
self.dtype = dtype
if params is None: params = {}
# -- Load SMPL params --
if '.npz' in bm_path:
smpl_dict = np.load(bm_path, encoding='latin1')
elif '.pkl' in bm_path:
with open(bm_path, 'rb') as smpl_file:
smpl_dict = Struct(**pickle.load(smpl_file,encoding='latin1'))
else:
raise ValueError('bm_path should be either a .pkl nor .npz file')
njoints = smpl_dict.posedirs.shape[2] // 3
self.model_type = {69: 'smpl', 153: 'smplh', 162: 'smplx', 45: 'mano'}[njoints]
assert self.model_type in ['smpl', 'smplh', 'smplx', 'mano', 'mano'], ValueError(
'model_type should be in smpl/smplh/smplx/mano.')
self.use_dmpl = False
if num_dmpls is not None:
if path_dmpl is not None:
self.use_dmpl = True
else:
raise (ValueError('path_dmpl should be provided when using dmpls!'))
if self.use_dmpl and self.model_type in ['smplx', 'mano']: raise (
NotImplementedError('DMPLs only work with SMPL/SMPLH models for now.'))
# Mean template vertices
if v_template is None:
v_template = np.repeat(smpl_dict.v_template[np.newaxis], batch_size, axis=0)
else:
v_template = np.repeat(v_template[np.newaxis], batch_size, axis=0)
self.register_buffer('v_template', torch.tensor(v_template, dtype=dtype))
self.register_buffer('f', torch.tensor(smpl_dict.f.astype(np.int32), dtype=torch.int32))
if len(params):
if 'betas' in params.keys():
num_betas = params['betas'].shape[1]
if 'dmpls' in params.keys():
num_dmpls = params['dmpls'].shape[1]
num_total_betas = smpl_dict.shapedirs.shape[-1]
if num_betas < 1:
num_betas = num_total_betas
shapedirs = smpl_dict.shapedirs[:, :, :num_betas]
self.register_buffer('shapedirs', torch.tensor(to_np(shapedirs), dtype=dtype))
if self.model_type == 'smplx':
begin_shape_id = 300 if smpl_dict.shapedirs.shape[-1] > 300 else 10
exprdirs = smpl_dict.shapedirs[:, :, begin_shape_id:(begin_shape_id + num_expressions)]
self.register_buffer('exprdirs', torch.tensor(exprdirs, dtype=dtype))
expression = torch.tensor(np.zeros((batch_size, num_expressions)), dtype=dtype, requires_grad=True)
self.register_parameter('expression', nn.Parameter(expression, requires_grad=True))
if self.use_dmpl:
dmpldirs = np.load(path_dmpl)['eigvec']
dmpldirs = dmpldirs[:, :, :num_dmpls]
self.register_buffer('dmpldirs', torch.tensor(dmpldirs, dtype=dtype))
# Regressor for joint locations given shape - 6890 x 24
self.register_buffer('J_regressor', to_tensor(to_np(
smpl_dict.J_regressor), dtype=dtype))
# Pose blend shape basis: 6890 x 3 x 207, reshaped to 6890*30 x 207
if use_posedirs:
posedirs = smpl_dict.posedirs
posedirs = posedirs.reshape([posedirs.shape[0] * 3, -1]).T
self.register_buffer('posedirs', torch.tensor(posedirs, dtype=dtype))
else:
self.posedirs = None
# indices of parents for each joints
kintree_table = smpl_dict.kintree_table.astype(np.int32)
self.register_buffer('kintree_table', torch.tensor(kintree_table, dtype=torch.int32))
# LBS weights
# weights = np.repeat(smpl_dict.weights[np.newaxis], batch_size, axis=0)
weights = smpl_dict.weights
self.register_buffer('weights', torch.tensor(weights, dtype=dtype))
if 'trans' in params.keys():
trans = params['trans']
else:
trans = torch.tensor(np.zeros((batch_size, 3)), dtype=dtype, requires_grad=True)
self.register_parameter('trans', nn.Parameter(trans, requires_grad=True))
# root_orient
# if self.model_type in ['smpl', 'smplh']:
root_orient = torch.tensor(np.zeros((batch_size, 3)), dtype=dtype, requires_grad=True)
self.register_parameter('root_orient', nn.Parameter(root_orient, requires_grad=True))
# pose_body
if self.model_type in ['smpl', 'smplh', 'smplx']:
pose_body = torch.tensor(np.zeros((batch_size, 63)), dtype=dtype, requires_grad=True)
self.register_parameter('pose_body', nn.Parameter(pose_body, requires_grad=True))
# pose_hand
if 'pose_hand' in params.keys():
pose_hand = params['pose_hand']
else:
if self.model_type in ['smpl']:
pose_hand = torch.tensor(np.zeros((batch_size, 1 * 3 * 2)), dtype=dtype, requires_grad=True)
elif self.model_type in ['smplh', 'smplx']:
pose_hand = torch.tensor(np.zeros((batch_size, 15 * 3 * 2)), dtype=dtype, requires_grad=True)
elif self.model_type in ['mano']:
pose_hand = torch.tensor(np.zeros((batch_size, 15 * 3)), dtype=dtype, requires_grad=True)
self.register_parameter('pose_hand', nn.Parameter(pose_hand, requires_grad=True))
# face poses
if self.model_type == 'smplx':
pose_jaw = torch.tensor(np.zeros((batch_size, 1 * 3)), dtype=dtype, requires_grad=True)
self.register_parameter('pose_jaw', nn.Parameter(pose_jaw, requires_grad=True))
pose_eye = torch.tensor(np.zeros((batch_size, 2 * 3)), dtype=dtype, requires_grad=True)
self.register_parameter('pose_eye', nn.Parameter(pose_eye, requires_grad=True))
if 'betas' in params.keys():
betas = params['betas']
else:
betas = torch.tensor(np.zeros((batch_size, num_betas)), dtype=dtype, requires_grad=True)
self.register_parameter('betas', nn.Parameter(betas, requires_grad=True))
if self.use_dmpl:
if 'dmpls' in params.keys():
dmpls = params['dmpls']
else:
dmpls = torch.tensor(np.zeros((batch_size, num_dmpls)), dtype=dtype, requires_grad=True)
self.register_parameter('dmpls', nn.Parameter(dmpls, requires_grad=True))
self.batch_size = batch_size
def r(self):
from human_body_prior.tools.omni_tools import copy2cpu as c2c
return c2c(self.forward().v)
def forward(self, root_orient=None, pose_body=None, pose_hand=None, pose_jaw=None, pose_eye=None, betas=None,
trans=None, dmpls=None, expression=None, return_dict=False, v_template =None, **kwargs):
'''
:param root_orient: Nx3
:param pose_body:
:param pose_hand:
:param pose_jaw:
:param pose_eye:
:param kwargs:
:return:
'''
assert not (v_template is not None and betas is not None), ValueError('vtemplate and betas could not be used jointly.')
assert self.model_type in ['smpl', 'smplh', 'smplx', 'mano', 'mano'], ValueError(
'model_type should be in smpl/smplh/smplx/mano')
if root_orient is None: root_orient = self.root_orient
if self.model_type in ['smplh', 'smpl']:
if pose_body is None: pose_body = self.pose_body
if pose_hand is None: pose_hand = self.pose_hand
elif self.model_type == 'smplx':
if pose_body is None: pose_body = self.pose_body
if pose_hand is None: pose_hand = self.pose_hand
if pose_jaw is None: pose_jaw = self.pose_jaw
if pose_eye is None: pose_eye = self.pose_eye
elif self.model_type in ['mano', 'mano']:
if pose_hand is None: pose_hand = self.pose_hand
if pose_hand is None: pose_hand = self.pose_hand
if trans is None: trans = self.trans
if v_template is None: v_template = self.v_template
if betas is None: betas = self.betas
if v_template.size(0) != pose_body.size(0):
v_template = v_template[:pose_body.size(0)] # this is fine since actual batch size will
# only be equal to or less than specified batch
# size
if self.model_type in ['smplh', 'smpl']:
full_pose = torch.cat([root_orient, pose_body, pose_hand], dim=1)
elif self.model_type == 'smplx':
full_pose = torch.cat([root_orient, pose_body, pose_jaw, pose_eye, pose_hand],
dim=1) # orient:3, body:63, jaw:3, eyel:3, eyer:3, handl, handr
elif self.model_type in ['mano', 'mano']:
full_pose = torch.cat([root_orient, pose_hand], dim=1)
if self.use_dmpl:
if dmpls is None: dmpls = self.dmpls
shape_components = torch.cat([betas, dmpls], dim=-1)
shapedirs = torch.cat([self.shapedirs, self.dmpldirs], dim=-1)
elif self.model_type == 'smplx':
if expression is None: expression = self.expression
shape_components = torch.cat([betas, expression], dim=-1)
shapedirs = torch.cat([self.shapedirs, self.exprdirs], dim=-1)
else:
shape_components = betas
shapedirs = self.shapedirs
verts, joints, bone_transforms = lbs(betas=shape_components, pose=full_pose, v_template=v_template,
shapedirs=shapedirs, posedirs=self.posedirs,
J_regressor=self.J_regressor, parents=self.kintree_table[0].long(),
lbs_weights=self.weights,
dtype=self.dtype)
Jtr = joints + trans.unsqueeze(dim=1)
verts = verts + trans.unsqueeze(dim=1)
res = {}
res['v'] = verts
res['f'] = self.f
res['bone_transforms'] = bone_transforms
res['betas'] = self.betas
res['Jtr'] = Jtr # Todo: ik can be made with vposer
if self.model_type == 'smpl':
res['pose_body'] = pose_body
elif self.model_type == 'smplh':
res['pose_body'] = pose_body
res['pose_hand'] = pose_hand
elif self.model_type == 'smplx':
res['pose_body'] = pose_body
res['pose_hand'] = pose_hand
res['pose_jaw'] = pose_jaw
res['pose_eye'] = pose_eye
elif self.model_type in ['mano', 'mano']:
res['pose_hand'] = pose_hand
res['full_pose'] = full_pose
if not return_dict:
class result_meta(object):
pass
res_class = result_meta()
for k, v in res.items():
res_class.__setattr__(k, v)
res = res_class
return res
|
# -*- coding: utf-8 -*-
#
# Copyright (C) 2019 Max-Planck-Gesellschaft zur Förderung der Wissenschaften e.V. (MPG),
# acting on behalf of its Max Planck Institute for Intelligent Systems and the
# Max Planck Institute for Biological Cybernetics. All rights reserved.
#
# Max-Planck-Gesellschaft zur Förderung der Wissenschaften e.V. (MPG) is holder of all proprietary rights
# on this computer program. You can only use this computer program if you have closed a license agreement
# with MPG or you get the right to use the computer program from someone who is authorized to grant you that right.
# Any use of the computer program without a valid license is prohibited and liable to prosecution.
# Contact: <EMAIL>
#
#
# If you use this code in a research publication please consider citing the following:
#
# Expressive Body Capture: 3D Hands, Face, and Body from a Single Image <https://arxiv.org/abs/1904.05866>
#
#
# Code Developed by:
# <NAME> <https://nghorbani.github.io/>
#
# 2018.12.13
import numpy as np
import torch
import torch.nn as nn
# from smplx.lbs import lbs
from lbs import lbs
import pickle
class Struct(object):
def __init__(self, **kwargs):
for key, val in kwargs.items():
setattr(self, key, val)
def to_tensor(array, dtype=torch.float32):
if 'torch.tensor' not in str(type(array)):
return torch.tensor(array, dtype=dtype)
def to_np(array, dtype=np.float32):
if 'scipy.sparse' in str(type(array)):
array = array.todense()
return np.array(array, dtype=dtype)
class BodyModel(nn.Module):
def __init__(self,
bm_path,
params=None,
num_betas=10,
batch_size=1, v_template = None,
num_dmpls=None, path_dmpl=None,
num_expressions=10,
use_posedirs=True,
dtype=torch.float32):
super(BodyModel, self).__init__()
'''
:param bm_path: path to a SMPL model as pkl file
:param num_betas: number of shape parameters to include.
if betas are provided in params, num_betas would be overloaded with number of thoes betas
:param batch_size: number of smpl vertices to get
:param device: default on gpu
:param dtype: float precision of the compuations
:return: verts, trans, pose, betas
'''
# Todo: if params the batchsize should be read from one of the params
self.dtype = dtype
if params is None: params = {}
# -- Load SMPL params --
if '.npz' in bm_path:
smpl_dict = np.load(bm_path, encoding='latin1')
elif '.pkl' in bm_path:
with open(bm_path, 'rb') as smpl_file:
smpl_dict = Struct(**pickle.load(smpl_file,encoding='latin1'))
else:
raise ValueError('bm_path should be either a .pkl nor .npz file')
njoints = smpl_dict.posedirs.shape[2] // 3
self.model_type = {69: 'smpl', 153: 'smplh', 162: 'smplx', 45: 'mano'}[njoints]
assert self.model_type in ['smpl', 'smplh', 'smplx', 'mano', 'mano'], ValueError(
'model_type should be in smpl/smplh/smplx/mano.')
self.use_dmpl = False
if num_dmpls is not None:
if path_dmpl is not None:
self.use_dmpl = True
else:
raise (ValueError('path_dmpl should be provided when using dmpls!'))
if self.use_dmpl and self.model_type in ['smplx', 'mano']: raise (
NotImplementedError('DMPLs only work with SMPL/SMPLH models for now.'))
# Mean template vertices
if v_template is None:
v_template = np.repeat(smpl_dict.v_template[np.newaxis], batch_size, axis=0)
else:
v_template = np.repeat(v_template[np.newaxis], batch_size, axis=0)
self.register_buffer('v_template', torch.tensor(v_template, dtype=dtype))
self.register_buffer('f', torch.tensor(smpl_dict.f.astype(np.int32), dtype=torch.int32))
if len(params):
if 'betas' in params.keys():
num_betas = params['betas'].shape[1]
if 'dmpls' in params.keys():
num_dmpls = params['dmpls'].shape[1]
num_total_betas = smpl_dict.shapedirs.shape[-1]
if num_betas < 1:
num_betas = num_total_betas
shapedirs = smpl_dict.shapedirs[:, :, :num_betas]
self.register_buffer('shapedirs', torch.tensor(to_np(shapedirs), dtype=dtype))
if self.model_type == 'smplx':
begin_shape_id = 300 if smpl_dict.shapedirs.shape[-1] > 300 else 10
exprdirs = smpl_dict.shapedirs[:, :, begin_shape_id:(begin_shape_id + num_expressions)]
self.register_buffer('exprdirs', torch.tensor(exprdirs, dtype=dtype))
expression = torch.tensor(np.zeros((batch_size, num_expressions)), dtype=dtype, requires_grad=True)
self.register_parameter('expression', nn.Parameter(expression, requires_grad=True))
if self.use_dmpl:
dmpldirs = np.load(path_dmpl)['eigvec']
dmpldirs = dmpldirs[:, :, :num_dmpls]
self.register_buffer('dmpldirs', torch.tensor(dmpldirs, dtype=dtype))
# Regressor for joint locations given shape - 6890 x 24
self.register_buffer('J_regressor', to_tensor(to_np(
smpl_dict.J_regressor), dtype=dtype))
# Pose blend shape basis: 6890 x 3 x 207, reshaped to 6890*30 x 207
if use_posedirs:
posedirs = smpl_dict.posedirs
posedirs = posedirs.reshape([posedirs.shape[0] * 3, -1]).T
self.register_buffer('posedirs', torch.tensor(posedirs, dtype=dtype))
else:
self.posedirs = None
# indices of parents for each joints
kintree_table = smpl_dict.kintree_table.astype(np.int32)
self.register_buffer('kintree_table', torch.tensor(kintree_table, dtype=torch.int32))
# LBS weights
# weights = np.repeat(smpl_dict.weights[np.newaxis], batch_size, axis=0)
weights = smpl_dict.weights
self.register_buffer('weights', torch.tensor(weights, dtype=dtype))
if 'trans' in params.keys():
trans = params['trans']
else:
trans = torch.tensor(np.zeros((batch_size, 3)), dtype=dtype, requires_grad=True)
self.register_parameter('trans', nn.Parameter(trans, requires_grad=True))
# root_orient
# if self.model_type in ['smpl', 'smplh']:
root_orient = torch.tensor(np.zeros((batch_size, 3)), dtype=dtype, requires_grad=True)
self.register_parameter('root_orient', nn.Parameter(root_orient, requires_grad=True))
# pose_body
if self.model_type in ['smpl', 'smplh', 'smplx']:
pose_body = torch.tensor(np.zeros((batch_size, 63)), dtype=dtype, requires_grad=True)
self.register_parameter('pose_body', nn.Parameter(pose_body, requires_grad=True))
# pose_hand
if 'pose_hand' in params.keys():
pose_hand = params['pose_hand']
else:
if self.model_type in ['smpl']:
pose_hand = torch.tensor(np.zeros((batch_size, 1 * 3 * 2)), dtype=dtype, requires_grad=True)
elif self.model_type in ['smplh', 'smplx']:
pose_hand = torch.tensor(np.zeros((batch_size, 15 * 3 * 2)), dtype=dtype, requires_grad=True)
elif self.model_type in ['mano']:
pose_hand = torch.tensor(np.zeros((batch_size, 15 * 3)), dtype=dtype, requires_grad=True)
self.register_parameter('pose_hand', nn.Parameter(pose_hand, requires_grad=True))
# face poses
if self.model_type == 'smplx':
pose_jaw = torch.tensor(np.zeros((batch_size, 1 * 3)), dtype=dtype, requires_grad=True)
self.register_parameter('pose_jaw', nn.Parameter(pose_jaw, requires_grad=True))
pose_eye = torch.tensor(np.zeros((batch_size, 2 * 3)), dtype=dtype, requires_grad=True)
self.register_parameter('pose_eye', nn.Parameter(pose_eye, requires_grad=True))
if 'betas' in params.keys():
betas = params['betas']
else:
betas = torch.tensor(np.zeros((batch_size, num_betas)), dtype=dtype, requires_grad=True)
self.register_parameter('betas', nn.Parameter(betas, requires_grad=True))
if self.use_dmpl:
if 'dmpls' in params.keys():
dmpls = params['dmpls']
else:
dmpls = torch.tensor(np.zeros((batch_size, num_dmpls)), dtype=dtype, requires_grad=True)
self.register_parameter('dmpls', nn.Parameter(dmpls, requires_grad=True))
self.batch_size = batch_size
def r(self):
from human_body_prior.tools.omni_tools import copy2cpu as c2c
return c2c(self.forward().v)
def forward(self, root_orient=None, pose_body=None, pose_hand=None, pose_jaw=None, pose_eye=None, betas=None,
trans=None, dmpls=None, expression=None, return_dict=False, v_template =None, **kwargs):
'''
:param root_orient: Nx3
:param pose_body:
:param pose_hand:
:param pose_jaw:
:param pose_eye:
:param kwargs:
:return:
'''
assert not (v_template is not None and betas is not None), ValueError('vtemplate and betas could not be used jointly.')
assert self.model_type in ['smpl', 'smplh', 'smplx', 'mano', 'mano'], ValueError(
'model_type should be in smpl/smplh/smplx/mano')
if root_orient is None: root_orient = self.root_orient
if self.model_type in ['smplh', 'smpl']:
if pose_body is None: pose_body = self.pose_body
if pose_hand is None: pose_hand = self.pose_hand
elif self.model_type == 'smplx':
if pose_body is None: pose_body = self.pose_body
if pose_hand is None: pose_hand = self.pose_hand
if pose_jaw is None: pose_jaw = self.pose_jaw
if pose_eye is None: pose_eye = self.pose_eye
elif self.model_type in ['mano', 'mano']:
if pose_hand is None: pose_hand = self.pose_hand
if pose_hand is None: pose_hand = self.pose_hand
if trans is None: trans = self.trans
if v_template is None: v_template = self.v_template
if betas is None: betas = self.betas
if v_template.size(0) != pose_body.size(0):
v_template = v_template[:pose_body.size(0)] # this is fine since actual batch size will
# only be equal to or less than specified batch
# size
if self.model_type in ['smplh', 'smpl']:
full_pose = torch.cat([root_orient, pose_body, pose_hand], dim=1)
elif self.model_type == 'smplx':
full_pose = torch.cat([root_orient, pose_body, pose_jaw, pose_eye, pose_hand],
dim=1) # orient:3, body:63, jaw:3, eyel:3, eyer:3, handl, handr
elif self.model_type in ['mano', 'mano']:
full_pose = torch.cat([root_orient, pose_hand], dim=1)
if self.use_dmpl:
if dmpls is None: dmpls = self.dmpls
shape_components = torch.cat([betas, dmpls], dim=-1)
shapedirs = torch.cat([self.shapedirs, self.dmpldirs], dim=-1)
elif self.model_type == 'smplx':
if expression is None: expression = self.expression
shape_components = torch.cat([betas, expression], dim=-1)
shapedirs = torch.cat([self.shapedirs, self.exprdirs], dim=-1)
else:
shape_components = betas
shapedirs = self.shapedirs
verts, joints, bone_transforms = lbs(betas=shape_components, pose=full_pose, v_template=v_template,
shapedirs=shapedirs, posedirs=self.posedirs,
J_regressor=self.J_regressor, parents=self.kintree_table[0].long(),
lbs_weights=self.weights,
dtype=self.dtype)
Jtr = joints + trans.unsqueeze(dim=1)
verts = verts + trans.unsqueeze(dim=1)
res = {}
res['v'] = verts
res['f'] = self.f
res['bone_transforms'] = bone_transforms
res['betas'] = self.betas
res['Jtr'] = Jtr # Todo: ik can be made with vposer
if self.model_type == 'smpl':
res['pose_body'] = pose_body
elif self.model_type == 'smplh':
res['pose_body'] = pose_body
res['pose_hand'] = pose_hand
elif self.model_type == 'smplx':
res['pose_body'] = pose_body
res['pose_hand'] = pose_hand
res['pose_jaw'] = pose_jaw
res['pose_eye'] = pose_eye
elif self.model_type in ['mano', 'mano']:
res['pose_hand'] = pose_hand
res['full_pose'] = full_pose
if not return_dict:
class result_meta(object):
pass
res_class = result_meta()
for k, v in res.items():
res_class.__setattr__(k, v)
res = res_class
return res
|
en
| 0.717916
|
# -*- coding: utf-8 -*- # # Copyright (C) 2019 Max-Planck-Gesellschaft zur Förderung der Wissenschaften e.V. (MPG), # acting on behalf of its Max Planck Institute for Intelligent Systems and the # Max Planck Institute for Biological Cybernetics. All rights reserved. # # Max-Planck-Gesellschaft zur Förderung der Wissenschaften e.V. (MPG) is holder of all proprietary rights # on this computer program. You can only use this computer program if you have closed a license agreement # with MPG or you get the right to use the computer program from someone who is authorized to grant you that right. # Any use of the computer program without a valid license is prohibited and liable to prosecution. # Contact: <EMAIL> # # # If you use this code in a research publication please consider citing the following: # # Expressive Body Capture: 3D Hands, Face, and Body from a Single Image <https://arxiv.org/abs/1904.05866> # # # Code Developed by: # <NAME> <https://nghorbani.github.io/> # # 2018.12.13 # from smplx.lbs import lbs :param bm_path: path to a SMPL model as pkl file :param num_betas: number of shape parameters to include. if betas are provided in params, num_betas would be overloaded with number of thoes betas :param batch_size: number of smpl vertices to get :param device: default on gpu :param dtype: float precision of the compuations :return: verts, trans, pose, betas # Todo: if params the batchsize should be read from one of the params # -- Load SMPL params -- # Mean template vertices # Regressor for joint locations given shape - 6890 x 24 # Pose blend shape basis: 6890 x 3 x 207, reshaped to 6890*30 x 207 # indices of parents for each joints # LBS weights # weights = np.repeat(smpl_dict.weights[np.newaxis], batch_size, axis=0) # root_orient # if self.model_type in ['smpl', 'smplh']: # pose_body # pose_hand # face poses :param root_orient: Nx3 :param pose_body: :param pose_hand: :param pose_jaw: :param pose_eye: :param kwargs: :return: # this is fine since actual batch size will # only be equal to or less than specified batch # size # orient:3, body:63, jaw:3, eyel:3, eyer:3, handl, handr # Todo: ik can be made with vposer
| 1.844074
| 2
|
tests/test_hessian.py
|
Juancarlos1983/Pymop
| 60
|
6626790
|
<reponame>Juancarlos1983/Pymop
import unittest
import autograd.numpy as anp
import numpy as np
from pymop import Problem
class HessianTest(unittest.TestCase):
def test_hessian(self):
auto_diff = MyProblem()
correct = MyProblemWithHessian()
np.random.seed(1)
X = np.random.random((100, correct.n_var))
X = np.row_stack([np.array([0.5, 0.5]), X])
F, dF, hF = correct.evaluate(X, return_values_of=["F", "dF", "hF"])
_F, _dF, _hF = auto_diff.evaluate(X, return_values_of=["F", "dF", "hF"])
self.assertTrue(np.all(np.abs(_F - F) < 0.00001))
self.assertTrue(np.all(np.abs(_dF - dF) < 0.00001))
class MyProblem(Problem):
def __init__(self, **kwargs):
super().__init__(n_var=2, n_obj=1, **kwargs)
def _evaluate(self, x, out, *args, **kwargs):
out["F"] = 3 * x[:, 0] ** 3 + 10 * x[:, 1] ** 4 + 4 * x[:, 0] ** 2 * x[:, 1] ** 2
class MyProblemWithHessian(Problem):
def __init__(self, **kwargs):
super().__init__(n_var=2, n_obj=1, evaluation_of=["F", "dF", "hF"], **kwargs)
def _evaluate(self, x, out, *args, **kwargs):
out["F"] = 3 * x[:, 0] ** 3 + 10 * x[:, 1] ** 4 + 4 * x[:, 0] ** 2 * x[:, 1] ** 2
if "dF" in out:
f_x1 = 9 * x[:, 0] ** 2 + 8 * x[:, 0] * x[:, 1] ** 2
f_x2 = 40 * x[:, 1] ** 3 + 8 * x[:, 0] ** 2 * x[:, 1]
dF = np.column_stack([f_x1, f_x2])
out["dF"] = dF[:, None, :]
if "hF" in out:
f_x1_x1 = 18 * x[:, 0] + 8 * x[:, 1] ** 2
f_x1_x2 = 16 * x[:, 0] * x[:, 1]
f_x2_x1 = 16 * x[:, 0] * x[:, 1]
f_x2_x2 = 120 * x[:, 1] ** 2 + 8 * x[:, 0] ** 2
out["hF"] = np.array([[f_x1_x1, f_x1_x2], [f_x2_x1, f_x2_x2]]).swapaxes(0, 2)[:, None, ...]
if __name__ == '__main__':
unittest.main()
|
import unittest
import autograd.numpy as anp
import numpy as np
from pymop import Problem
class HessianTest(unittest.TestCase):
def test_hessian(self):
auto_diff = MyProblem()
correct = MyProblemWithHessian()
np.random.seed(1)
X = np.random.random((100, correct.n_var))
X = np.row_stack([np.array([0.5, 0.5]), X])
F, dF, hF = correct.evaluate(X, return_values_of=["F", "dF", "hF"])
_F, _dF, _hF = auto_diff.evaluate(X, return_values_of=["F", "dF", "hF"])
self.assertTrue(np.all(np.abs(_F - F) < 0.00001))
self.assertTrue(np.all(np.abs(_dF - dF) < 0.00001))
class MyProblem(Problem):
def __init__(self, **kwargs):
super().__init__(n_var=2, n_obj=1, **kwargs)
def _evaluate(self, x, out, *args, **kwargs):
out["F"] = 3 * x[:, 0] ** 3 + 10 * x[:, 1] ** 4 + 4 * x[:, 0] ** 2 * x[:, 1] ** 2
class MyProblemWithHessian(Problem):
def __init__(self, **kwargs):
super().__init__(n_var=2, n_obj=1, evaluation_of=["F", "dF", "hF"], **kwargs)
def _evaluate(self, x, out, *args, **kwargs):
out["F"] = 3 * x[:, 0] ** 3 + 10 * x[:, 1] ** 4 + 4 * x[:, 0] ** 2 * x[:, 1] ** 2
if "dF" in out:
f_x1 = 9 * x[:, 0] ** 2 + 8 * x[:, 0] * x[:, 1] ** 2
f_x2 = 40 * x[:, 1] ** 3 + 8 * x[:, 0] ** 2 * x[:, 1]
dF = np.column_stack([f_x1, f_x2])
out["dF"] = dF[:, None, :]
if "hF" in out:
f_x1_x1 = 18 * x[:, 0] + 8 * x[:, 1] ** 2
f_x1_x2 = 16 * x[:, 0] * x[:, 1]
f_x2_x1 = 16 * x[:, 0] * x[:, 1]
f_x2_x2 = 120 * x[:, 1] ** 2 + 8 * x[:, 0] ** 2
out["hF"] = np.array([[f_x1_x1, f_x1_x2], [f_x2_x1, f_x2_x2]]).swapaxes(0, 2)[:, None, ...]
if __name__ == '__main__':
unittest.main()
|
none
| 1
| 3.014468
| 3
|
|
vaccine_feed_ingest/utils/normalize.py
|
jeremyschlatter/vaccine-feed-ingest
| 0
|
6626791
|
"""
Various tricks for matching source locations to product locations from VIAL
"""
import re
from typing import Optional, Tuple
import url_normalize
from vaccine_feed_ingest_schema.location import VaccineProvider
from .log import getLogger
logger = getLogger(__file__)
# Add to this list in alphabetical order
VACCINE_PROVIDER_REGEXES = {
VaccineProvider.ACME: [
re.compile(r"ACME PHARMACY #(\d+)", re.I),
],
VaccineProvider.ALBERTSONS: [
re.compile(r"ALBERTSONS(?: MARKET)? PHARMACY #(\d+)", re.I),
],
VaccineProvider.BIG_Y: [
re.compile(r"BIG Y PHARMACY(?: #\d+ Rx)? #(\d+)", re.I),
],
VaccineProvider.BROOKSHIRE: [
re.compile(r"BROOKSHIRE PHARMACY #\d+ #(\d+)", re.I),
],
VaccineProvider.COSTCO: [
re.compile(r"COSTCO(?: MARKET)? PHARMACY #\s*(\d+)", re.I),
re.compile(r"COSTCO WHOLESALE CORPORATION #(\d+)", re.I),
],
VaccineProvider.CUB: [
re.compile(r"CUB PHARMACY #\d+ #(\d+)", re.I),
],
VaccineProvider.CVS: [
re.compile(r"CVS\s(?:STORE)?(?:PHARMACY)?(?:, INC.?)?\s?#?(\d+)", re.I),
],
VaccineProvider.DILLONS: [
re.compile(r"DILLON\'S PHARMACY #(\d+)", re.I),
],
VaccineProvider.DRUGCO: [
re.compile(r"DRUGCO DISCOUNT PHARMACY #(\d+)", re.I),
],
VaccineProvider.FAMILY_FARE: [
re.compile(r"FAMILY\s+FARE\s+PHARMACY\s+#?\d+\s+#(\d+)", re.I),
],
VaccineProvider.FOOD_CITY: [
re.compile(r"FOOD CITY PHARMACY #\d+ #(\d+)", re.I),
],
VaccineProvider.FOOD_LION: [
re.compile(r"FOOD LION #(\d+)", re.I),
],
VaccineProvider.FRED_MEYER: [
re.compile(r"FRED MEYER(?: PHARMACY)? #(\d+)", re.I),
],
VaccineProvider.FRYS: [
re.compile(r"FRY\'S FOOD AND DRUG #(\d+)", re.I),
],
VaccineProvider.GENOA: [
re.compile(r"GENOA HEALTHCARE (\d+) \(", re.I),
re.compile(r"GENOA HEALTHCARE LLC #(\d+)", re.I),
],
VaccineProvider.GIANT: [
re.compile(r"GIANT #(\d+)", re.I),
],
VaccineProvider.GIANT_EAGLE: [
re.compile(r"GIANT EAGLE PHARMACY #\d+ #G(\d+)", re.I),
],
VaccineProvider.GIANT_FOOD: [
re.compile(r"GIANT FOOD #(\d+)", re.I),
],
VaccineProvider.HAGGEN: [
re.compile(r"HAGGEN PHARMACY #(\d+)", re.I),
],
VaccineProvider.HANNAFORD: [
re.compile(r"HANNAFORD #(\d+)", re.I),
],
VaccineProvider.HARMONS: [
re.compile(r"HARMONS PHARMACY #(\d+)", re.I),
],
VaccineProvider.HARPS: [
re.compile(r"HARPS PHARMACY #(\d+)", re.I),
],
VaccineProvider.HARRIS_TEETER: [
re.compile(r"HARRIS TEETER PHARMACY #(\d+)", re.I),
],
VaccineProvider.HARTIG: [
re.compile(r"HARTIG DRUG CO #?\d+ #(\d+)", re.I),
],
VaccineProvider.HEB: [
re.compile(r"H-E-B #(\d+)", re.I),
],
VaccineProvider.HOMELAND: [
re.compile(r"HOMELAND PHARMACY #(\d+)", re.I),
],
VaccineProvider.HY_VEE: [
re.compile(r"HY-VEE INC. #(\d+)", re.I),
],
VaccineProvider.INGLES: [
re.compile(r"INGLES PHARMACY #\d+ #(\d+)", re.I),
],
VaccineProvider.KAISER_HEALTH_PLAN: [
re.compile(r"KAISER HEALTH PLAN \w+(?: \w+)? PHY (\d+)", re.I),
],
VaccineProvider.KAISER_PERMANENTE: [
re.compile(r"KAISER PERMANENTE PHARMACY #(\d+)", re.I),
],
VaccineProvider.KING_SOOPERS: [
re.compile(r"KING SOOPERS PHARMACY #?(\d+)", re.I),
],
VaccineProvider.KROGER: [
re.compile(r"KROGER PHARMACY #?(\d+)", re.I),
],
VaccineProvider.LITTLE_CLINIC: [
re.compile(r"THE LITTLE CLINIC #(\d+)", re.I),
],
VaccineProvider.MARIANOS: [
re.compile(r"MARIANO\'S PHARMACY #(\d+)", re.I),
],
VaccineProvider.OSCO: [
re.compile(r"OSCO (?:DRUG|PHARMACY) #(\d+)", re.I),
],
VaccineProvider.MARKET_STREET: [
re.compile(r"MARKET STREET PHARMACY #(\d+)", re.I),
],
VaccineProvider.MEDICAP: [
re.compile(r"MEDICAP PHARMACY #\d+ #(\d+)", re.I),
],
VaccineProvider.MEIJER: [
re.compile(r"MEIJER #(\d+)", re.I),
],
VaccineProvider.PAVILIONS: [
re.compile(r"PAVILIONS PHARMACY #(\d+)", re.I),
],
VaccineProvider.PICK_N_SAVE: [
re.compile(r"PICK N SAVE PHARMACY #(\d+)", re.I),
],
VaccineProvider.PRICE_CHOPPER: [
re.compile(r"PRICE CHOPPER PHARMACY #?\d+ #(?:MS)?(\d+)", re.I),
],
VaccineProvider.PUBLIX: [
re.compile(r"PUBLIX SUPER MARKETS INC\. #(\d+)", re.I),
],
VaccineProvider.QFC: [
re.compile(r"QFC PHARMACY #(\d+)", re.I),
],
VaccineProvider.RALEYS: [
re.compile(r"RALEY\'S PHARMACY #(\d+)", re.I),
],
VaccineProvider.RITE_AID: [
re.compile(r"RITE AID (?:PHARMACY |#RA)(\d+)", re.I),
],
VaccineProvider.SAMS: [
re.compile(r"SAM'?S PHARMACY (?:10-|#\s*)(\d+)", re.I),
re.compile(r"SAMS CLUB (?:#\d+\-)?(\d+)", re.I),
],
VaccineProvider.SAFEWAY: [
re.compile(r"Safeway (?:PHARMACY )?\s?#?(\d+)", re.I),
],
VaccineProvider.SAV_ON: [
re.compile(r"SAV-?ON PHARMACY #\s?(\d+)", re.I),
],
VaccineProvider.SHOP_RITE: [
re.compile(r"SHOPRITE PHARMACY #(\d+)", re.I),
],
VaccineProvider.SMITHS: [
re.compile(r"SMITH\'S PHARMACY #(\d+)", re.I),
],
VaccineProvider.STOP_AND_SHOP: [
re.compile(r"STOP \& SHOP #(\d+)", re.I),
],
VaccineProvider.TOM_THUMB: [
re.compile(r"TOM THUMB PHARMACY #(\d+)", re.I),
],
VaccineProvider.THRIFTY: [
re.compile(r"THRIFTY DRUG STORES INC #(\d+)", re.I),
],
VaccineProvider.VONS: [
re.compile(r"VONS PHARMACY #(\d+)", re.I),
],
VaccineProvider.WALGREENS: [
re.compile(r"Walgreens (?:Specialty )?(?:Pharmacy )?#(\d+)", re.I),
re.compile(r"Walgreens Co\. #(\d+)", re.I),
],
VaccineProvider.WALMART: [
re.compile(r"WALMART INC #10-(\d+)", re.I),
re.compile(r"WALMART PHARMACY 10-(\d+)", re.I),
re.compile(r"WALMART (?:INC,|PHARMACY) #(\d+)", re.I),
],
VaccineProvider.WEIS: [
re.compile(r"WEIS PHARMACY #\d+ #(\d+)", re.I),
],
VaccineProvider.WINN_DIXIE: [
re.compile(r"WINN-DIXIE #(\d+)", re.I),
],
}
def provider_id_from_name(
name: str,
) -> Optional[Tuple[VaccineProvider, str]]:
"""Generate provider ids for retail pharmacies (riteaid:123)"""
for vaccine_provider, regexes in VACCINE_PROVIDER_REGEXES.items():
for regex in regexes:
m = regex.search(name)
if m:
return vaccine_provider, str(int(m.group(1)))
return None
ZIP_RE = re.compile(r"([0-9]{5})([0-9]{4})")
def normalize_zip(zipc: Optional[str]) -> Optional[str]:
if zipc is not None:
if ZIP_RE.match(zipc):
zipc = ZIP_RE.sub(r"\1-\2", zipc)
length = len(zipc)
if length != 5 and length != 10:
zipc = None
return zipc
def normalize_url(url: Optional[str]) -> Optional[str]:
if url is None:
return url
return url_normalize.url_normalize(url)
|
"""
Various tricks for matching source locations to product locations from VIAL
"""
import re
from typing import Optional, Tuple
import url_normalize
from vaccine_feed_ingest_schema.location import VaccineProvider
from .log import getLogger
logger = getLogger(__file__)
# Add to this list in alphabetical order
VACCINE_PROVIDER_REGEXES = {
VaccineProvider.ACME: [
re.compile(r"ACME PHARMACY #(\d+)", re.I),
],
VaccineProvider.ALBERTSONS: [
re.compile(r"ALBERTSONS(?: MARKET)? PHARMACY #(\d+)", re.I),
],
VaccineProvider.BIG_Y: [
re.compile(r"BIG Y PHARMACY(?: #\d+ Rx)? #(\d+)", re.I),
],
VaccineProvider.BROOKSHIRE: [
re.compile(r"BROOKSHIRE PHARMACY #\d+ #(\d+)", re.I),
],
VaccineProvider.COSTCO: [
re.compile(r"COSTCO(?: MARKET)? PHARMACY #\s*(\d+)", re.I),
re.compile(r"COSTCO WHOLESALE CORPORATION #(\d+)", re.I),
],
VaccineProvider.CUB: [
re.compile(r"CUB PHARMACY #\d+ #(\d+)", re.I),
],
VaccineProvider.CVS: [
re.compile(r"CVS\s(?:STORE)?(?:PHARMACY)?(?:, INC.?)?\s?#?(\d+)", re.I),
],
VaccineProvider.DILLONS: [
re.compile(r"DILLON\'S PHARMACY #(\d+)", re.I),
],
VaccineProvider.DRUGCO: [
re.compile(r"DRUGCO DISCOUNT PHARMACY #(\d+)", re.I),
],
VaccineProvider.FAMILY_FARE: [
re.compile(r"FAMILY\s+FARE\s+PHARMACY\s+#?\d+\s+#(\d+)", re.I),
],
VaccineProvider.FOOD_CITY: [
re.compile(r"FOOD CITY PHARMACY #\d+ #(\d+)", re.I),
],
VaccineProvider.FOOD_LION: [
re.compile(r"FOOD LION #(\d+)", re.I),
],
VaccineProvider.FRED_MEYER: [
re.compile(r"FRED MEYER(?: PHARMACY)? #(\d+)", re.I),
],
VaccineProvider.FRYS: [
re.compile(r"FRY\'S FOOD AND DRUG #(\d+)", re.I),
],
VaccineProvider.GENOA: [
re.compile(r"GENOA HEALTHCARE (\d+) \(", re.I),
re.compile(r"GENOA HEALTHCARE LLC #(\d+)", re.I),
],
VaccineProvider.GIANT: [
re.compile(r"GIANT #(\d+)", re.I),
],
VaccineProvider.GIANT_EAGLE: [
re.compile(r"GIANT EAGLE PHARMACY #\d+ #G(\d+)", re.I),
],
VaccineProvider.GIANT_FOOD: [
re.compile(r"GIANT FOOD #(\d+)", re.I),
],
VaccineProvider.HAGGEN: [
re.compile(r"HAGGEN PHARMACY #(\d+)", re.I),
],
VaccineProvider.HANNAFORD: [
re.compile(r"HANNAFORD #(\d+)", re.I),
],
VaccineProvider.HARMONS: [
re.compile(r"HARMONS PHARMACY #(\d+)", re.I),
],
VaccineProvider.HARPS: [
re.compile(r"HARPS PHARMACY #(\d+)", re.I),
],
VaccineProvider.HARRIS_TEETER: [
re.compile(r"HARRIS TEETER PHARMACY #(\d+)", re.I),
],
VaccineProvider.HARTIG: [
re.compile(r"HARTIG DRUG CO #?\d+ #(\d+)", re.I),
],
VaccineProvider.HEB: [
re.compile(r"H-E-B #(\d+)", re.I),
],
VaccineProvider.HOMELAND: [
re.compile(r"HOMELAND PHARMACY #(\d+)", re.I),
],
VaccineProvider.HY_VEE: [
re.compile(r"HY-VEE INC. #(\d+)", re.I),
],
VaccineProvider.INGLES: [
re.compile(r"INGLES PHARMACY #\d+ #(\d+)", re.I),
],
VaccineProvider.KAISER_HEALTH_PLAN: [
re.compile(r"KAISER HEALTH PLAN \w+(?: \w+)? PHY (\d+)", re.I),
],
VaccineProvider.KAISER_PERMANENTE: [
re.compile(r"KAISER PERMANENTE PHARMACY #(\d+)", re.I),
],
VaccineProvider.KING_SOOPERS: [
re.compile(r"KING SOOPERS PHARMACY #?(\d+)", re.I),
],
VaccineProvider.KROGER: [
re.compile(r"KROGER PHARMACY #?(\d+)", re.I),
],
VaccineProvider.LITTLE_CLINIC: [
re.compile(r"THE LITTLE CLINIC #(\d+)", re.I),
],
VaccineProvider.MARIANOS: [
re.compile(r"MARIANO\'S PHARMACY #(\d+)", re.I),
],
VaccineProvider.OSCO: [
re.compile(r"OSCO (?:DRUG|PHARMACY) #(\d+)", re.I),
],
VaccineProvider.MARKET_STREET: [
re.compile(r"MARKET STREET PHARMACY #(\d+)", re.I),
],
VaccineProvider.MEDICAP: [
re.compile(r"MEDICAP PHARMACY #\d+ #(\d+)", re.I),
],
VaccineProvider.MEIJER: [
re.compile(r"MEIJER #(\d+)", re.I),
],
VaccineProvider.PAVILIONS: [
re.compile(r"PAVILIONS PHARMACY #(\d+)", re.I),
],
VaccineProvider.PICK_N_SAVE: [
re.compile(r"PICK N SAVE PHARMACY #(\d+)", re.I),
],
VaccineProvider.PRICE_CHOPPER: [
re.compile(r"PRICE CHOPPER PHARMACY #?\d+ #(?:MS)?(\d+)", re.I),
],
VaccineProvider.PUBLIX: [
re.compile(r"PUBLIX SUPER MARKETS INC\. #(\d+)", re.I),
],
VaccineProvider.QFC: [
re.compile(r"QFC PHARMACY #(\d+)", re.I),
],
VaccineProvider.RALEYS: [
re.compile(r"RALEY\'S PHARMACY #(\d+)", re.I),
],
VaccineProvider.RITE_AID: [
re.compile(r"RITE AID (?:PHARMACY |#RA)(\d+)", re.I),
],
VaccineProvider.SAMS: [
re.compile(r"SAM'?S PHARMACY (?:10-|#\s*)(\d+)", re.I),
re.compile(r"SAMS CLUB (?:#\d+\-)?(\d+)", re.I),
],
VaccineProvider.SAFEWAY: [
re.compile(r"Safeway (?:PHARMACY )?\s?#?(\d+)", re.I),
],
VaccineProvider.SAV_ON: [
re.compile(r"SAV-?ON PHARMACY #\s?(\d+)", re.I),
],
VaccineProvider.SHOP_RITE: [
re.compile(r"SHOPRITE PHARMACY #(\d+)", re.I),
],
VaccineProvider.SMITHS: [
re.compile(r"SMITH\'S PHARMACY #(\d+)", re.I),
],
VaccineProvider.STOP_AND_SHOP: [
re.compile(r"STOP \& SHOP #(\d+)", re.I),
],
VaccineProvider.TOM_THUMB: [
re.compile(r"TOM THUMB PHARMACY #(\d+)", re.I),
],
VaccineProvider.THRIFTY: [
re.compile(r"THRIFTY DRUG STORES INC #(\d+)", re.I),
],
VaccineProvider.VONS: [
re.compile(r"VONS PHARMACY #(\d+)", re.I),
],
VaccineProvider.WALGREENS: [
re.compile(r"Walgreens (?:Specialty )?(?:Pharmacy )?#(\d+)", re.I),
re.compile(r"Walgreens Co\. #(\d+)", re.I),
],
VaccineProvider.WALMART: [
re.compile(r"WALMART INC #10-(\d+)", re.I),
re.compile(r"WALMART PHARMACY 10-(\d+)", re.I),
re.compile(r"WALMART (?:INC,|PHARMACY) #(\d+)", re.I),
],
VaccineProvider.WEIS: [
re.compile(r"WEIS PHARMACY #\d+ #(\d+)", re.I),
],
VaccineProvider.WINN_DIXIE: [
re.compile(r"WINN-DIXIE #(\d+)", re.I),
],
}
def provider_id_from_name(
name: str,
) -> Optional[Tuple[VaccineProvider, str]]:
"""Generate provider ids for retail pharmacies (riteaid:123)"""
for vaccine_provider, regexes in VACCINE_PROVIDER_REGEXES.items():
for regex in regexes:
m = regex.search(name)
if m:
return vaccine_provider, str(int(m.group(1)))
return None
ZIP_RE = re.compile(r"([0-9]{5})([0-9]{4})")
def normalize_zip(zipc: Optional[str]) -> Optional[str]:
if zipc is not None:
if ZIP_RE.match(zipc):
zipc = ZIP_RE.sub(r"\1-\2", zipc)
length = len(zipc)
if length != 5 and length != 10:
zipc = None
return zipc
def normalize_url(url: Optional[str]) -> Optional[str]:
if url is None:
return url
return url_normalize.url_normalize(url)
|
en
| 0.412495
|
Various tricks for matching source locations to product locations from VIAL # Add to this list in alphabetical order #(\d+)", re.I), #(\d+)", re.I), #\d+ Rx)? #(\d+)", re.I), #\d+ #(\d+)", re.I), #\s*(\d+)", re.I), #(\d+)", re.I), #\d+ #(\d+)", re.I), #?(\d+)", re.I), #(\d+)", re.I), #(\d+)", re.I), #?\d+\s+#(\d+)", re.I), #\d+ #(\d+)", re.I), #(\d+)", re.I), #(\d+)", re.I), #(\d+)", re.I), #(\d+)", re.I), #(\d+)", re.I), #\d+ #G(\d+)", re.I), #(\d+)", re.I), #(\d+)", re.I), #(\d+)", re.I), #(\d+)", re.I), #(\d+)", re.I), #(\d+)", re.I), #?\d+ #(\d+)", re.I), #(\d+)", re.I), #(\d+)", re.I), #(\d+)", re.I), #\d+ #(\d+)", re.I), #(\d+)", re.I), #?(\d+)", re.I), #?(\d+)", re.I), #(\d+)", re.I), #(\d+)", re.I), #(\d+)", re.I), #(\d+)", re.I), #\d+ #(\d+)", re.I), #(\d+)", re.I), #(\d+)", re.I), #(\d+)", re.I), #?\d+ #(?:MS)?(\d+)", re.I), #(\d+)", re.I), #(\d+)", re.I), #(\d+)", re.I), #RA)(\d+)", re.I), #\s*)(\d+)", re.I), #\d+\-)?(\d+)", re.I), #?(\d+)", re.I), #\s?(\d+)", re.I), #(\d+)", re.I), #(\d+)", re.I), #(\d+)", re.I), #(\d+)", re.I), #(\d+)", re.I), #(\d+)", re.I), #(\d+)", re.I), #(\d+)", re.I), #10-(\d+)", re.I), #(\d+)", re.I), #\d+ #(\d+)", re.I), #(\d+)", re.I), Generate provider ids for retail pharmacies (riteaid:123)
| 2.491071
| 2
|
gs_manager/command/config.py
|
AngellusMortis/game_server_manager
| 6
|
6626792
|
from __future__ import annotations
from copy import deepcopy
import inspect
import os
from collections.abc import Iterable
from typing import (
Any,
Callable,
Dict,
List,
Optional,
Tuple,
Union,
get_type_hints,
)
import click
import yaml
from gs_manager.command.types import Server
from gs_manager.command.validators import (
DirectoryConfigType,
GenericConfigType,
ServerType,
)
from gs_manager.logger import get_logger
from gs_manager.null import NullServer
__all__ = [
"DEFAULT_CONFIG",
"DEFAULT_SERVER_TYPE",
"BaseConfig",
"Config",
]
DEFAULT_CONFIG = ".gs_config.yml"
DEFAULT_SERVER_TYPE = Server("null", NullServer)
class BaseConfig:
_validators: Dict[str, List[GenericConfigType]] = {}
_serializers: Dict[str, Callable] = {}
_excluded_properties: List[str] = ["global_options", "parent"]
_excluded_from_save: List[str] = []
_options: Optional[List[str]] = None
_types: Optional[List[type]] = None
parent: Optional[BaseConfig] = None
@property
def _config_options(self) -> List[str]:
if self._options is None:
attributes = inspect.getmembers(
self.__class__, lambda a: not (inspect.isroutine(a))
)
options = []
for attribute in attributes:
if not (
attribute[0].startswith("_")
or attribute[0] in self._excluded_properties
):
options.append(attribute[0])
self._options = options
return self._options
@property
def __dict__(self) -> dict:
config_dict = {}
for key in self._config_options:
if key in self._excluded_from_save:
continue
value = getattr(self, key)
if key in self._serializers:
value = self._serializers[key](value)
config_dict[key] = value
return config_dict
@property
def _config_types(self) -> List[type]:
if self._types is None:
self._types = get_type_hints(self.__class__)
return self._types
@property
def global_options(self):
return {"all": [], "instance_enabled": []}
def get_type_for_param(self, param) -> Optional[type]:
if param in self._config_types:
return self._config_types[param]
return None
def validate(self, param: str, value: Any) -> Tuple[Any, bool]:
if param in self._validators:
for validator in self._validators[param]:
value = validator.validate(value)
has_content = True
if value is None:
has_content = False
elif isinstance(value, Iterable) and len(value) == 0:
has_content = False
elif isinstance(value, bool) and value is False:
has_content = False
return value, has_content
def _update_config_from_dict(
self, config_dict: dict, ignore_unknown=False, ignore_bool=False
) -> None:
for key, value in config_dict.items():
if not (
ignore_unknown
or key in self._config_options
or key.startswith("x-")
):
raise ValueError(f"Unknown config option: {key}")
elif key not in self._config_options or value is None:
continue
value, has_content = self.validate(key, value)
expected_type = self.get_type_for_param(key)
if (expected_type == bool and not ignore_bool) or has_content:
if isinstance(value, dict):
new_value = value.copy()
value = getattr(self, key)
value.update(new_value)
setattr(self, key, value)
def _update_config_from_context(self, context: click.Context) -> None:
self._update_config_from_dict(
context.params, ignore_unknown=True, ignore_bool=True
)
def update_config(self, data: Union[dict, click.Context]) -> None:
if isinstance(data, click.Context):
self._update_config_from_context(data)
else:
self._update_config_from_dict(data)
def make_server_config(self, context: click.Context) -> Config:
server = context.params["server_type"].server
server_config = self
if hasattr(server, "config_class") and server.config_class is not None:
server_config = server.config_class(self._file_path)
server_config.update_config(context)
server._config = server_config
return server_config
class Config(BaseConfig):
instance_name: Optional[str] = None
server_path: str = "."
server_type: Server = DEFAULT_SERVER_TYPE
_validators = {
"server_path": [DirectoryConfigType],
"server_type": [ServerType],
}
_serializers = {"server_type": lambda server_type: server_type.name}
_file_path: Optional[str]
_instances: Dict[str, BaseConfig] = {}
_excluded_properties: List[str] = BaseConfig._excluded_properties + [
"instances",
"all_instance_names",
"instance_name",
"current_instance",
"config_path",
]
_instance_properties: List[str] = []
_extra_attr: List[str] = []
def __init__(
self,
config_file: Optional[str] = None,
ignore_unknown: bool = False,
load_config: bool = True,
):
if load_config:
self._file_path = self._discover_config(config_file)
if self._file_path is not None:
self.load_config(ignore_unknown)
@property
def __dict__(self) -> dict:
config_dict = super().__dict__
if len(self._instances.keys()) > 0:
config_dict["instance_overrides"] = {}
for name, instance_config in self._instances.items():
config_dict["instance_overrides"][name] = {}
instance_dict = instance_config.__dict__
for key, value in instance_dict.items():
if config_dict.get(key) != value:
config_dict["instance_overrides"][name][key] = value
return config_dict
@property
def config_path(self):
return self._file_path
@property
def all_instance_names(self) -> str:
return self._instances.keys()
@property
def instances(self) -> Dict[str, BaseConfig]:
return self._instances
@property
def current_instance(self) -> BaseConfig:
if self.instance_name is None:
return self
return self.get_instance(self.instance_name)
def get_instance(self, name="default") -> BaseConfig:
config = self
context = click.get_current_context()
if name != "default":
if name not in self._instances:
raise click.ClickException(f"instance {name} does not exist")
config = self._instances[name]
config.update_config(context)
return config
def copy(self) -> Config:
copy = self.__class__(load_config=False)
copy._file_path = self._file_path
copy._update_config_from_dict(self.__dict__, ignore_unknown=True)
return copy
def _update_config_from_dict(
self, config_dict: dict, ignore_unknown=False, ignore_bool=False
) -> None:
super()._update_config_from_dict(
config_dict, ignore_unknown, ignore_bool
)
for instance in self._instances.values():
instance._update_config_from_dict(
config_dict, ignore_unknown, ignore_bool
)
def _discover_config(self, file_path: Optional[str]) -> Optional[str]:
if file_path is None:
file_path = DEFAULT_CONFIG
abs_file_path = os.path.abspath(file_path)
if not (
abs_file_path == file_path
or file_path.startswith("./")
or file_path.startswith("../")
):
abs_file_path = None
path = os.getcwd()
search_path = path
for x in range(5):
if search_path == "/":
file_path = None
break
check_file_path = os.path.join(search_path, file_path)
if os.path.isfile(check_file_path):
abs_file_path = os.path.abspath(check_file_path)
break
search_path = os.path.abspath(
os.path.join(search_path, os.pardir)
)
return abs_file_path
def _make_instance_config_factory(self) -> BaseConfig:
class InstanceConfig(BaseConfig):
_validators: Dict[str, List[GenericConfigType]] = self._validators
_serializers: Dict[str, Callable] = self._serializers
_excluded_properties: List[str] = self._excluded_properties
for option in self._config_options:
setattr(InstanceConfig, option, None)
for name in self._instance_properties:
prop_method = getattr(self.__class__, name).fget
setattr(InstanceConfig, name, property(prop_method))
for attr in self._extra_attr:
setattr(InstanceConfig, attr, None)
return InstanceConfig()
def _make_instance_config(self, instance_dict: dict):
instance_config = self._make_instance_config_factory()
instance_config.parent = self
instance_config._validators = self._validators
instance_config._serializers = self._serializers
instance_config._excluded_properties = self._excluded_properties
for option in self._config_options:
setattr(instance_config, option, deepcopy(getattr(self, option)))
instance_config._update_config_from_dict(
instance_dict, ignore_unknown=True
)
return instance_config
def load_config(self, ignore_unknown: bool = False) -> None:
if not os.path.isfile(self._file_path):
raise ValueError("Invalid config path")
config_dict = {}
with open(self._file_path, "r") as f:
config_dict = yaml.safe_load(f)
if config_dict is not None:
instance_configs = {}
# reset all of the instance configs
self._instances = {}
if "instance_overrides" in config_dict:
instance_configs = config_dict.pop("instance_overrides")
self._update_config_from_dict(
config_dict, ignore_unknown=ignore_unknown
)
for instance_name, instance_dict in instance_configs.items():
self._instances[instance_name] = self._make_instance_config(
instance_dict
)
def save_config(self) -> None:
if self._file_path is None:
self._file_path = os.path.abspath(DEFAULT_CONFIG)
config_dict = self.__dict__
logger = get_logger()
logger.debug(f"Saving config to {self._file_path}:")
logger.debug(config_dict)
with open(self._file_path, "w") as f:
yaml.dump(config_dict, f)
|
from __future__ import annotations
from copy import deepcopy
import inspect
import os
from collections.abc import Iterable
from typing import (
Any,
Callable,
Dict,
List,
Optional,
Tuple,
Union,
get_type_hints,
)
import click
import yaml
from gs_manager.command.types import Server
from gs_manager.command.validators import (
DirectoryConfigType,
GenericConfigType,
ServerType,
)
from gs_manager.logger import get_logger
from gs_manager.null import NullServer
__all__ = [
"DEFAULT_CONFIG",
"DEFAULT_SERVER_TYPE",
"BaseConfig",
"Config",
]
DEFAULT_CONFIG = ".gs_config.yml"
DEFAULT_SERVER_TYPE = Server("null", NullServer)
class BaseConfig:
_validators: Dict[str, List[GenericConfigType]] = {}
_serializers: Dict[str, Callable] = {}
_excluded_properties: List[str] = ["global_options", "parent"]
_excluded_from_save: List[str] = []
_options: Optional[List[str]] = None
_types: Optional[List[type]] = None
parent: Optional[BaseConfig] = None
@property
def _config_options(self) -> List[str]:
if self._options is None:
attributes = inspect.getmembers(
self.__class__, lambda a: not (inspect.isroutine(a))
)
options = []
for attribute in attributes:
if not (
attribute[0].startswith("_")
or attribute[0] in self._excluded_properties
):
options.append(attribute[0])
self._options = options
return self._options
@property
def __dict__(self) -> dict:
config_dict = {}
for key in self._config_options:
if key in self._excluded_from_save:
continue
value = getattr(self, key)
if key in self._serializers:
value = self._serializers[key](value)
config_dict[key] = value
return config_dict
@property
def _config_types(self) -> List[type]:
if self._types is None:
self._types = get_type_hints(self.__class__)
return self._types
@property
def global_options(self):
return {"all": [], "instance_enabled": []}
def get_type_for_param(self, param) -> Optional[type]:
if param in self._config_types:
return self._config_types[param]
return None
def validate(self, param: str, value: Any) -> Tuple[Any, bool]:
if param in self._validators:
for validator in self._validators[param]:
value = validator.validate(value)
has_content = True
if value is None:
has_content = False
elif isinstance(value, Iterable) and len(value) == 0:
has_content = False
elif isinstance(value, bool) and value is False:
has_content = False
return value, has_content
def _update_config_from_dict(
self, config_dict: dict, ignore_unknown=False, ignore_bool=False
) -> None:
for key, value in config_dict.items():
if not (
ignore_unknown
or key in self._config_options
or key.startswith("x-")
):
raise ValueError(f"Unknown config option: {key}")
elif key not in self._config_options or value is None:
continue
value, has_content = self.validate(key, value)
expected_type = self.get_type_for_param(key)
if (expected_type == bool and not ignore_bool) or has_content:
if isinstance(value, dict):
new_value = value.copy()
value = getattr(self, key)
value.update(new_value)
setattr(self, key, value)
def _update_config_from_context(self, context: click.Context) -> None:
self._update_config_from_dict(
context.params, ignore_unknown=True, ignore_bool=True
)
def update_config(self, data: Union[dict, click.Context]) -> None:
if isinstance(data, click.Context):
self._update_config_from_context(data)
else:
self._update_config_from_dict(data)
def make_server_config(self, context: click.Context) -> Config:
server = context.params["server_type"].server
server_config = self
if hasattr(server, "config_class") and server.config_class is not None:
server_config = server.config_class(self._file_path)
server_config.update_config(context)
server._config = server_config
return server_config
class Config(BaseConfig):
instance_name: Optional[str] = None
server_path: str = "."
server_type: Server = DEFAULT_SERVER_TYPE
_validators = {
"server_path": [DirectoryConfigType],
"server_type": [ServerType],
}
_serializers = {"server_type": lambda server_type: server_type.name}
_file_path: Optional[str]
_instances: Dict[str, BaseConfig] = {}
_excluded_properties: List[str] = BaseConfig._excluded_properties + [
"instances",
"all_instance_names",
"instance_name",
"current_instance",
"config_path",
]
_instance_properties: List[str] = []
_extra_attr: List[str] = []
def __init__(
self,
config_file: Optional[str] = None,
ignore_unknown: bool = False,
load_config: bool = True,
):
if load_config:
self._file_path = self._discover_config(config_file)
if self._file_path is not None:
self.load_config(ignore_unknown)
@property
def __dict__(self) -> dict:
config_dict = super().__dict__
if len(self._instances.keys()) > 0:
config_dict["instance_overrides"] = {}
for name, instance_config in self._instances.items():
config_dict["instance_overrides"][name] = {}
instance_dict = instance_config.__dict__
for key, value in instance_dict.items():
if config_dict.get(key) != value:
config_dict["instance_overrides"][name][key] = value
return config_dict
@property
def config_path(self):
return self._file_path
@property
def all_instance_names(self) -> str:
return self._instances.keys()
@property
def instances(self) -> Dict[str, BaseConfig]:
return self._instances
@property
def current_instance(self) -> BaseConfig:
if self.instance_name is None:
return self
return self.get_instance(self.instance_name)
def get_instance(self, name="default") -> BaseConfig:
config = self
context = click.get_current_context()
if name != "default":
if name not in self._instances:
raise click.ClickException(f"instance {name} does not exist")
config = self._instances[name]
config.update_config(context)
return config
def copy(self) -> Config:
copy = self.__class__(load_config=False)
copy._file_path = self._file_path
copy._update_config_from_dict(self.__dict__, ignore_unknown=True)
return copy
def _update_config_from_dict(
self, config_dict: dict, ignore_unknown=False, ignore_bool=False
) -> None:
super()._update_config_from_dict(
config_dict, ignore_unknown, ignore_bool
)
for instance in self._instances.values():
instance._update_config_from_dict(
config_dict, ignore_unknown, ignore_bool
)
def _discover_config(self, file_path: Optional[str]) -> Optional[str]:
if file_path is None:
file_path = DEFAULT_CONFIG
abs_file_path = os.path.abspath(file_path)
if not (
abs_file_path == file_path
or file_path.startswith("./")
or file_path.startswith("../")
):
abs_file_path = None
path = os.getcwd()
search_path = path
for x in range(5):
if search_path == "/":
file_path = None
break
check_file_path = os.path.join(search_path, file_path)
if os.path.isfile(check_file_path):
abs_file_path = os.path.abspath(check_file_path)
break
search_path = os.path.abspath(
os.path.join(search_path, os.pardir)
)
return abs_file_path
def _make_instance_config_factory(self) -> BaseConfig:
class InstanceConfig(BaseConfig):
_validators: Dict[str, List[GenericConfigType]] = self._validators
_serializers: Dict[str, Callable] = self._serializers
_excluded_properties: List[str] = self._excluded_properties
for option in self._config_options:
setattr(InstanceConfig, option, None)
for name in self._instance_properties:
prop_method = getattr(self.__class__, name).fget
setattr(InstanceConfig, name, property(prop_method))
for attr in self._extra_attr:
setattr(InstanceConfig, attr, None)
return InstanceConfig()
def _make_instance_config(self, instance_dict: dict):
instance_config = self._make_instance_config_factory()
instance_config.parent = self
instance_config._validators = self._validators
instance_config._serializers = self._serializers
instance_config._excluded_properties = self._excluded_properties
for option in self._config_options:
setattr(instance_config, option, deepcopy(getattr(self, option)))
instance_config._update_config_from_dict(
instance_dict, ignore_unknown=True
)
return instance_config
def load_config(self, ignore_unknown: bool = False) -> None:
if not os.path.isfile(self._file_path):
raise ValueError("Invalid config path")
config_dict = {}
with open(self._file_path, "r") as f:
config_dict = yaml.safe_load(f)
if config_dict is not None:
instance_configs = {}
# reset all of the instance configs
self._instances = {}
if "instance_overrides" in config_dict:
instance_configs = config_dict.pop("instance_overrides")
self._update_config_from_dict(
config_dict, ignore_unknown=ignore_unknown
)
for instance_name, instance_dict in instance_configs.items():
self._instances[instance_name] = self._make_instance_config(
instance_dict
)
def save_config(self) -> None:
if self._file_path is None:
self._file_path = os.path.abspath(DEFAULT_CONFIG)
config_dict = self.__dict__
logger = get_logger()
logger.debug(f"Saving config to {self._file_path}:")
logger.debug(config_dict)
with open(self._file_path, "w") as f:
yaml.dump(config_dict, f)
|
en
| 0.558302
|
# reset all of the instance configs
| 2.001998
| 2
|
webtry.py
|
gustavo-zsilva/webtry
| 0
|
6626793
|
<filename>webtry.py
import requests
from time import sleep
cabecalho = {
'User-agent' : 'Mozilla/5.0 (X11; Linux x86_64)',
'Referer' : 'https://google.com'
}
print('Bem-Vindo ao WebTry!')
options = '''Que ação deseja realizar?
[ 1 ] Acessar 1 URL
[ 2 ] Acessar mais de uma URL
[ 3 ] Pegar código-fonte de uma página
[ 4 ] Sair do programa'''
print(options)
option = int(input('Opção: '))
while option != 4:
if option == 1:
print('Digite a URL do site que deseja checar; ')
url = str(input('URL: '))
try:
req = requests.get(url)
print(f'\033[32mOnline; status_code = {req.status_code}\033[m')
except Exception as e:
print(f'\033[1;31mRequisição deu erro:\033[m {e}')
if option == 2:
urls = []
num_url = int(input('Quantos sites deseja checar? '))
for c in range(0, num_url):
url = str(input(f'URL {c}: '))
urls.append(url)
for url in range(0, len(urls)):
try:
req = requests.get(urls[url])
print(f'\033[32mOnline; status_code = {req.status_code}\033[m')
except Exception as e:
print(f'\033[1;31mRequisição deu erro:\033[m {e}')
sleep(3)
if option == 3:
print('Digite a URL do site que deseja obter o código-fonte; ')
url = str(input('URL: '))
try:
req = requests.get(url)
print('\033[32mCódigo-Fonte acessado com sucesso. Carregando...\033[m')
sleep(3)
print(req.text)
except Exception as e:
print(f'\033[1;31mRequisição deu erro:\033[m {e}')
else:
print('Ainda estamos trabalhando nesta opção.')
print(options)
option = int(input('Opção: '))
print('\033[31mPrograma Encerrado.')
|
<filename>webtry.py
import requests
from time import sleep
cabecalho = {
'User-agent' : 'Mozilla/5.0 (X11; Linux x86_64)',
'Referer' : 'https://google.com'
}
print('Bem-Vindo ao WebTry!')
options = '''Que ação deseja realizar?
[ 1 ] Acessar 1 URL
[ 2 ] Acessar mais de uma URL
[ 3 ] Pegar código-fonte de uma página
[ 4 ] Sair do programa'''
print(options)
option = int(input('Opção: '))
while option != 4:
if option == 1:
print('Digite a URL do site que deseja checar; ')
url = str(input('URL: '))
try:
req = requests.get(url)
print(f'\033[32mOnline; status_code = {req.status_code}\033[m')
except Exception as e:
print(f'\033[1;31mRequisição deu erro:\033[m {e}')
if option == 2:
urls = []
num_url = int(input('Quantos sites deseja checar? '))
for c in range(0, num_url):
url = str(input(f'URL {c}: '))
urls.append(url)
for url in range(0, len(urls)):
try:
req = requests.get(urls[url])
print(f'\033[32mOnline; status_code = {req.status_code}\033[m')
except Exception as e:
print(f'\033[1;31mRequisição deu erro:\033[m {e}')
sleep(3)
if option == 3:
print('Digite a URL do site que deseja obter o código-fonte; ')
url = str(input('URL: '))
try:
req = requests.get(url)
print('\033[32mCódigo-Fonte acessado com sucesso. Carregando...\033[m')
sleep(3)
print(req.text)
except Exception as e:
print(f'\033[1;31mRequisição deu erro:\033[m {e}')
else:
print('Ainda estamos trabalhando nesta opção.')
print(options)
option = int(input('Opção: '))
print('\033[31mPrograma Encerrado.')
|
pt
| 0.998447
|
Que ação deseja realizar? [ 1 ] Acessar 1 URL [ 2 ] Acessar mais de uma URL [ 3 ] Pegar código-fonte de uma página [ 4 ] Sair do programa
| 3.18535
| 3
|
pexdoc/pinspect.py
|
pmacosta/pexdoc
| 0
|
6626794
|
# pinspect.py
# Copyright (c) 2013-2019 <NAME>
# See LICENSE for details
# pylint: disable=C0103,C0111,C0411,E0012,E0611
# pylint: disable=F0401,R0205,R0912,R0914,R0916,R1710,R1717,W0212,W0631,W1504
# Standard library imports
from __future__ import print_function
import ast
import collections
import copy
import json
import os
import platform
import re
import sys
import types
# PyPI imports
try: # pragma: no cover
from inspect import Parameter, signature
except ImportError: # pragma: no cover
from funcsigs import Parameter, signature
# Intra-package imports
if sys.hexversion < 0x03000000: # pragma: no cover
from pexdoc.compat2 import _unicode_to_ascii, _readlines, _unicode_char
else: # pragma: no cover
from pexdoc.compat3 import _readlines, _unicode_char
###
# Global constants
###
_PRIVATE_PROP_REGEXP = re.compile("_[^_]+")
###
# Functions
###
def _get_module_name_from_fname(fname):
"""Get module name from module file name."""
fname = fname.replace(".pyc", ".py")
for mobj in sys.modules.values():
if (
hasattr(mobj, "__file__")
and mobj.__file__
and (mobj.__file__.replace(".pyc", ".py") == fname)
):
module_name = mobj.__name__
return module_name
raise RuntimeError("Module could not be found")
def _validate_fname(fname):
"""Validate that a string is a valid file name."""
msg = "Argument `callables_fname` is not valid"
if not isinstance(fname, str):
raise RuntimeError(msg)
try:
if not os.path.exists(fname):
os.access(fname, os.W_OK)
except (TypeError, ValueError): # pragma: no cover
raise RuntimeError(msg)
def get_function_args(func, no_self=False, no_varargs=False):
"""
Return tuple of the function argument names in the order of the function signature.
:param func: Function
:type func: function object
:param no_self: Flag that indicates whether the function argument *self*,
if present, is included in the output (False) or not (True)
:type no_self: boolean
:param no_varargs: Flag that indicates whether keyword arguments are
included in the output (True) or not (False)
:type no_varargs: boolean
:rtype: tuple
For example:
>>> import pexdoc.pinspect
>>> class MyClass(object):
... def __init__(self, value, **kwargs):
... pass
...
>>> pexdoc.pinspect.get_function_args(MyClass.__init__)
('self', 'value', '**kwargs')
>>> pexdoc.pinspect.get_function_args(
... MyClass.__init__, no_self=True
... )
('value', '**kwargs')
>>> pexdoc.pinspect.get_function_args(
... MyClass.__init__, no_self=True, no_varargs=True
... )
('value',)
>>> pexdoc.pinspect.get_function_args(
... MyClass.__init__, no_varargs=True
... )
('self', 'value')
"""
par_dict = signature(func).parameters
# Mark positional and/or keyword arguments (if any)
pos = lambda x: x.kind == Parameter.VAR_POSITIONAL
kw = lambda x: x.kind == Parameter.VAR_KEYWORD
opts = ["", "*", "**"]
args = [
"{prefix}{arg}".format(prefix=opts[pos(value) + 2 * kw(value)], arg=par)
for par, value in par_dict.items()
]
# Filter out 'self' from parameter list (optional)
self_filtered_args = (
args if not args else (args[1 if (args[0] == "self") and no_self else 0 :])
)
# Filter out positional or keyword arguments (optional)
pos = lambda x: (len(x) > 1) and (x[0] == "*") and (x[1] != "*")
kw = lambda x: (len(x) > 2) and (x[:2] == "**")
varargs_filtered_args = [
arg
for arg in self_filtered_args
if (not no_varargs) or all([no_varargs, not pos(arg), not kw(arg)])
]
return tuple(varargs_filtered_args)
def get_module_name(module_obj):
r"""
Retrieve the module name from a module object.
:param module_obj: Module object
:type module_obj: object
:rtype: string
:raises:
* RuntimeError (Argument \`module_obj\` is not valid)
* RuntimeError (Module object \`*[module_name]*\` could not be found in
loaded modules)
For example:
>>> import pexdoc.pinspect
>>> pexdoc.pinspect.get_module_name(sys.modules['pexdoc.pinspect'])
'pexdoc.pinspect'
"""
if not is_object_module(module_obj):
raise RuntimeError("Argument `module_obj` is not valid")
name = module_obj.__name__
msg = "Module object `{name}` could not be found in loaded modules"
if name not in sys.modules:
raise RuntimeError(msg.format(name=name))
return name
def is_object_module(obj):
"""
Test if the argument is a module object.
:param obj: Object
:type obj: any
:rtype: boolean
"""
return isinstance(obj, types.ModuleType)
def is_special_method(name):
"""
Test if callable name is a special Python method.
:param name: Callable name
:type name: string
:rtype: boolean
"""
return name.startswith("__")
def private_props(obj):
"""
Yield private properties of an object.
A private property is defined as one that has a single underscore
(:code:`_`) before its name
:param obj: Object
:type obj: object
:returns: iterator
"""
# Get private properties but NOT magic methods
props = [item for item in dir(obj)]
priv_props = [_PRIVATE_PROP_REGEXP.match(item) for item in props]
call_props = [callable(getattr(obj, item)) for item in props]
iobj = zip(props, priv_props, call_props)
for obj_name in [prop for prop, priv, call in iobj if priv and (not call)]:
yield obj_name
###
# Classes
###
class Callables(object):
r"""
Generate a list of module callables and get their attributes.
Callables are functions, classes, methods and class
properties; attributes are callable type, file name, and lines
span.
Information from multiple modules can be stored in the callables
database of the object by repeatedly calling
:py:meth:`pexdoc.pinspect.Callables.trace` with different module file
names. A :py:class:`pexdoc.pinspect.Callables` object retains knowledge of
which modules have been traced so repeated calls to
:py:meth:`pexdoc.pinspect.Callables.trace` with the *same* module object
will *not* result in module re-traces (and the consequent performance hit)
:param fnames: File names of the modules to trace. If None no immediate
tracing is done
:type fnames: list of strings or None
:raises:
* OSError (File *[fname]* could not be found)
* RuntimeError (Argument \`fnames\` is not valid)
"""
# pylint: disable=R0903
def __init__(self, fnames=None): # noqa
self._callables_db = {}
self._reverse_callables_db = {}
self._modules_dict = {}
self._fnames = {}
self._module_names = []
self._class_names = []
if fnames:
self.trace(fnames)
def __add__(self, other):
"""
Merge two objects.
:raises: RuntimeError (Conflicting information between objects)
For example:
>>> import pexdoc.pcontracts, pexdoc.exh, pexdoc.pinspect, sys
>>> obj1 = pexdoc.pinspect.Callables(
... [sys.modules['pexdoc.exh'].__file__]
... )
>>> obj2 = pexdoc.pinspect.Callables(
... [sys.modules['pexdoc.pcontracts'].__file__]
... )
>>> obj3 = pexdoc.pinspect.Callables([
... sys.modules['pexdoc.exh'].__file__,
... sys.modules['pexdoc.pcontracts'].__file__,
... ])
>>> obj1 == obj3
False
>>> obj1 == obj2
False
>>> obj1+obj2 == obj3
True
"""
self._check_intersection(other)
robj = Callables()
robj._callables_db = copy.deepcopy(self._callables_db)
robj._callables_db.update(copy.deepcopy(other._callables_db))
robj._reverse_callables_db = copy.deepcopy(self._reverse_callables_db)
robj._reverse_callables_db.update(copy.deepcopy(other._reverse_callables_db))
robj._modules_dict = copy.deepcopy(self._modules_dict)
robj._modules_dict.update(copy.deepcopy(other._modules_dict))
robj._module_names = list(set(self._module_names[:] + other._module_names[:]))
robj._class_names = list(set(self._class_names[:] + other._class_names[:]))
robj._fnames = copy.deepcopy(self._fnames)
robj._fnames.update(copy.deepcopy(other._fnames))
return robj
def __bool__(self): # pragma: no cover
"""
Return :code:`False` if no modules have been traced, :code:`True` otherwise.
For example:
>>> from __future__ import print_function
>>> import pexdoc.pcontracts, pexdoc.pinspect, sys
>>> obj = pexdoc.pinspect.Callables()
>>> if obj:
... print('Boolean test returned: True')
... else:
... print('Boolean test returned: False')
Boolean test returned: False
>>> obj.trace([sys.modules['pexdoc.pcontracts'].__file__])
>>> if obj:
... print('Boolean test returned: True')
... else:
... print('Boolean test returned: False')
Boolean test returned: True
"""
return bool(self._module_names)
def __copy__(self):
"""
Copy object.
For example:
>>> import copy, pexdoc.exh, pexdoc.pinspect, sys
>>> obj1 = pexdoc.pinspect.Callables(
... [sys.modules['pexdoc.exh'].__file__]
... )
>>> obj2 = copy.copy(obj1)
>>> obj1 == obj2
True
"""
cobj = Callables()
for prop_name in private_props(self):
setattr(cobj, prop_name, copy.deepcopy(getattr(self, prop_name)))
return cobj
def __eq__(self, other):
"""
Test object equality.
For example:
>>> import pexdoc.pcontracts, pexdoc.exh, pexdoc.pinspect, sys
>>> obj1 = pexdoc.pinspect.Callables(
... [sys.modules['pexdoc.exh'].__file__]
... )
>>> obj2 = pexdoc.pinspect.Callables(
... [sys.modules['pexdoc.exh'].__file__]
... )
>>> obj3 = pexdoc.pinspect.Callables(
... [sys.modules['pexdoc.pcontracts'].__file__]
... )
>>> obj1 == obj2
True
>>> obj1 == obj3
False
>>> 5 == obj3
False
"""
return isinstance(other, Callables) and all(
[
sorted(getattr(self, attr)) == sorted(getattr(other, attr))
for attr in private_props(self)
]
)
def __iadd__(self, other):
"""
Merge an object into an existing object.
:raises: RuntimeError (Conflicting information between objects)
For example:
>>> import pexdoc.pcontracts, pexdoc.exh, pexdoc.pinspect, sys
>>> obj1 = pexdoc.pinspect.Callables(
... [sys.modules['pexdoc.exh'].__file__]
... )
>>> obj2 = pexdoc.pinspect.Callables(
... [sys.modules['pexdoc.pcontracts'].__file__]
... )
>>> obj3 = pexdoc.pinspect.Callables([
... sys.modules['pexdoc.exh'].__file__,
... sys.modules['pexdoc.pcontracts'].__file__,
... ])
>>> obj1 == obj3
False
>>> obj1 == obj2
False
>>> obj1 += obj2
>>> obj1 == obj3
True
"""
self._check_intersection(other)
self._callables_db.update(copy.deepcopy(other._callables_db))
self._reverse_callables_db.update(copy.deepcopy(other._reverse_callables_db))
self._modules_dict.update(copy.deepcopy(other._modules_dict))
self._module_names = list(set(self._module_names + other._module_names[:]))
self._class_names = list(set(self._class_names + other._class_names[:]))
self._fnames.update(copy.deepcopy(other._fnames))
return self
def __nonzero__(self): # pragma: no cover
"""
Return :code:`False` if no modules have been traced, :code:`True` otherwise.
For example:
>>> from __future__ import print_function
>>> import pexdoc.pcontracts, pexdoc.pinspect, sys
>>> obj = pexdoc.pinspect.Callables()
>>> if obj:
... print('Boolean test returned: True')
... else:
... print('Boolean test returned: False')
Boolean test returned: False
>>> obj.trace([sys.modules['pexdoc.pcontracts'].__file__])
>>> if obj:
... print('Boolean test returned: True')
... else:
... print('Boolean test returned: False')
Boolean test returned: True
"""
return bool(self._module_names)
def __repr__(self):
"""
Return a string with the expression needed to re-create the object.
For example:
>>> import pexdoc.exh, pexdoc.pinspect, sys
>>> obj1 = pexdoc.pinspect.Callables(
... [sys.modules['pexdoc.exh'].__file__]
... )
>>> repr(obj1) #doctest: +ELLIPSIS
"pexdoc.pinspect.Callables(['...exh.py'])"
>>> exec("obj2="+repr(obj1))
>>> obj1 == obj2
True
"""
return "pexdoc.pinspect.Callables({0})".format(sorted(self._fnames.keys()))
def __str__(self):
"""
Return a string with a detailed description of the object's contents.
For example:
>>> from __future__ import print_function
>>> import pexdoc.pinspect, os, sys
>>> import docs.support.pinspect_example_1
>>> cobj = pexdoc.pinspect.Callables([
... sys.modules['docs.support.pinspect_example_1'].__file__
... ])
>>> print(cobj) #doctest: +ELLIPSIS
Modules:
...pinspect_example_1
Classes:
...pinspect_example_1.my_func.MyClass
...pinspect_example_1.my_func: func (10-29)
...pinspect_example_1.my_func.MyClass: class (13-29)
...pinspect_example_1.my_func.MyClass.__init__: meth (21-23)
...pinspect_example_1.my_func.MyClass._get_value: meth (24-26)
...pinspect_example_1.my_func.MyClass.value: prop (27-29)
...pinspect_example_1.print_name: func (30-31)
The numbers in parenthesis indicate the line number in which the
callable starts and ends within the file it is defined in
"""
if self._module_names:
ret = []
# List traced modules
ret.append("Modules:")
for module_name in sorted(self._module_names):
ret.append(" {0}".format(module_name))
# List traced classes
if self._class_names:
ret.append("Classes:")
for class_name in sorted(self._class_names):
ret.append(" {0}".format(class_name))
# List traced callables (methods, functions, properties)
for entry in sorted(self._modules_dict):
dict_value = self._modules_dict[entry]
for value in sorted(dict_value, key=lambda x: x["code_id"][1]):
start, stop = value["code_id"][1], value["last_lineno"]
cname, ctype = value["name"], value["type"]
range_lines = (start, ["", "-" + str(stop)][start != stop])
crange = " ({0}{1})".format(*range_lines)
ret.append("{0}: {1}{2}".format(cname, ctype, crange))
return "\n".join(ret)
return ""
def _check_intersection(self, other):
"""Check that intersection of two objects has the same information."""
# pylint: disable=C0123
props = ["_callables_db", "_reverse_callables_db", "_modules_dict"]
for prop in props:
self_dict = getattr(self, prop)
other_dict = getattr(other, prop)
keys_self = set(self_dict.keys())
keys_other = set(other_dict.keys())
for key in keys_self & keys_other:
svalue = self_dict[key]
ovalue = other_dict[key]
same_type = type(svalue) == type(ovalue)
if same_type:
list_comp = isinstance(svalue, list) and any(
[item not in svalue for item in ovalue]
)
str_comp = isinstance(svalue, str) and svalue != ovalue
dict_comp = isinstance(svalue, dict) and svalue != ovalue
comp = any([list_comp, str_comp, dict_comp])
if (not same_type) or (same_type and comp):
emsg = "Conflicting information between objects"
raise RuntimeError(emsg)
def _get_callables_db(self):
"""Get callables_db property."""
return self._callables_db
def get_callable_from_line(self, module_file, lineno):
"""Get the callable that the line number belongs to."""
module_name = _get_module_name_from_fname(module_file)
if module_name not in self._modules_dict:
self.trace([module_file])
ret = None
# Sort callables by starting line number
iobj = sorted(self._modules_dict[module_name], key=lambda x: x["code_id"][1])
for value in iobj:
if value["code_id"][1] <= lineno <= value["last_lineno"]:
ret = value["name"]
elif value["code_id"][1] > lineno:
break
return ret if ret else module_name
def _get_reverse_callables_db(self):
"""Getter reverse_callables_db property."""
return self._reverse_callables_db
def load(self, callables_fname):
r"""
Load traced modules information from a `JSON <http://www.json.org/>`_ file.
The loaded module information is merged with any existing module information
:param callables_fname: File name
:type callables_fname: :ref:`FileNameExists`
:raises:
* OSError (File *[fname]* could not be found)
* RuntimeError (Argument \\`callables_fname\\` is not valid)
"""
# Validate file name
_validate_fname(callables_fname)
if not os.path.exists(callables_fname):
raise OSError("File {0} could not be found".format(callables_fname))
with open(callables_fname, "r") as fobj:
fdict = json.load(fobj)
if sys.hexversion < 0x03000000: # pragma: no cover
fdict = _unicode_to_ascii(fdict)
self._callables_db.update(fdict["_callables_db"])
# Reverse the tuple-to-string conversion that the save method
# does due to the fact that JSON keys need to be strings and the
# keys of the reverse callable dictionary are tuples where the first
# item is a file name and the second item is the starting line of the
# callable within that file (dictionary value)
rdict = {}
for key, value in fdict["_reverse_callables_db"].items():
tokens = key[1:-1].split(",")
key = tokens[0].strip()[1:-1]
if platform.system().lower() == "windows": # pragma: no cover
while True:
tmp = key
key = key.replace("\\\\", "\\")
if tmp == key:
break
rdict[(key, int(tokens[1]))] = value
self._reverse_callables_db.update(rdict)
self._modules_dict.update(fdict["_modules_dict"])
self._fnames.update(fdict["_fnames"])
self._module_names.extend(fdict["_module_names"])
self._class_names.extend(fdict["_class_names"])
self._module_names = sorted(list(set(self._module_names)))
self._class_names = sorted(list(set(self._class_names)))
def refresh(self):
"""Re-traces modules modified since the time they were traced."""
self.trace(list(self._fnames.keys()), _refresh=True)
def save(self, callables_fname):
r"""
Save traced modules information to a `JSON`_ file.
If the file exists it is overwritten
:param callables_fname: File name
:type callables_fname: :ref:`FileName`
:raises: RuntimeError (Argument \\`fname\\` is not valid)
"""
# Validate file name
_validate_fname(callables_fname)
# JSON keys have to be strings but the reverse callables dictionary
# keys are tuples, where the first item is a file name and the
# second item is the starting line of the callable within that file
# (dictionary value), thus need to convert the key to a string
items = self._reverse_callables_db.items()
fdict = {
"_callables_db": self._callables_db,
"_reverse_callables_db": dict([(str(k), v) for k, v in items]),
"_modules_dict": self._modules_dict,
"_fnames": self._fnames,
"_module_names": self._module_names,
"_class_names": self._class_names,
}
with open(callables_fname, "w") as fobj:
json.dump(fdict, fobj)
def trace(self, fnames, _refresh=False):
r"""
Generate a list of module callables and gets their attributes.
Callables are functions, classes, methods and class properties; their
attributes are callable type, file name, and lines span
:param fnames: File names of the modules to trace
:type fnames: list
:raises:
* OSError (File *[fname]* could not be found)
* RuntimeError (Argument \`fnames\` is not valid)
"""
# pylint: disable=R0101
if fnames and (not isinstance(fnames, list)):
raise RuntimeError("Argument `fnames` is not valid")
if fnames and any([not isinstance(item, str) for item in fnames]):
raise RuntimeError("Argument `fnames` is not valid")
for fname in fnames:
if not os.path.exists(fname):
raise OSError("File {0} could not be found".format(fname))
fnames = [item.replace(".pyc", ".py") for item in fnames]
bobj = collections.namedtuple("Bundle", ["lineno", "col_offset"])
for fname in fnames:
if (fname not in self._fnames) or (
_refresh
and (fname in self._fnames)
and (self._fnames[fname]["date"] < os.path.getmtime(fname))
):
module_name = (
_get_module_name_from_fname(fname)
if not _refresh
else self._fnames[fname]["name"]
)
# Remove old module information if it is going to be refreshed
if _refresh:
self._module_names.pop(self._module_names.index(module_name))
for cls in self._fnames[fname]["classes"]:
self._class_names.pop(self._class_names.index(cls))
dlist = []
for key, value in self._reverse_callables_db.items():
if key[0] == fname:
dlist.append(key)
try:
del self._callables_db[value]
except KeyError:
pass
for item in set(dlist):
del self._reverse_callables_db[item]
lines = _readlines(fname)
# Eliminate all Unicode characters till the first ASCII
# character is found in first line of file, to deal with
# Unicode-encoded source files
for num, char in enumerate(lines[0]): # pragma: no cover
if not _unicode_char(char):
break
lines[0] = lines[0][num:]
tree = ast.parse("".join(lines))
aobj = _AstTreeScanner(module_name, fname, lines)
aobj.visit(tree)
# Create a fake callable at the end of the file to properly
# 'close', i.e. assign a last line number to the last
# callable in file
fake_node = bobj(len(lines) + 1, -1)
aobj._close_callable(fake_node, force=True)
self._class_names += aobj._class_names[:]
self._module_names.append(module_name)
self._callables_db.update(aobj._callables_db)
self._reverse_callables_db.update(aobj._reverse_callables_db)
# Split into modules
self._modules_dict[module_name] = []
iobj = [
item
for item in self._callables_db.values()
if item["name"].startswith(module_name + ".")
]
for entry in iobj:
self._modules_dict[module_name].append(entry)
self._fnames[fname] = {
"name": module_name,
"date": os.path.getmtime(fname),
"classes": aobj._class_names[:],
}
# Managed attributes
callables_db = property(_get_callables_db, doc="Module(s) callables database")
"""
Return the callables database.
:rtype: dictionary
The callable database is a dictionary that has the following structure:
* **full callable name** *(string)* -- Dictionary key. Elements in the
callable path are separated by periods (:code:`'.'`). For example, method
:code:`my_method()` from class
:code:`MyClass` from module :code:`my_module` appears as
:code:`'my_module.MyClass.my_method'`
* **callable properties** *(dictionary)* -- Dictionary value. The elements
of this dictionary are:
* **type** *(string)* -- :code:`'class'` for classes, :code:`'meth'` for
methods, :code:`'func'` for functions or :code:`'prop'` for properties
or class attributes
* **code_id** *(tuple or None)* -- A tuple with the following items:
* **file name** *(string)* -- the first item contains the file name
where the callable can be found
* **line number** *(integer)* -- the second item contains the line
number in which the callable code starts (including decorators)
* **last_lineno** *(integer)* -- line number in which the callable code
ends (including blank lines and comments regardless of their
indentation level)
"""
reverse_callables_db = property(
_get_reverse_callables_db, doc="Reverse module(s) callables database"
)
"""
Returns the reverse callables database
:rtype: dictionary
The reverse callable database is a dictionary that has the following
structure:
* **callable id** *(tuple)* -- Dictionary key. Two-element tuple in which
the first tuple item is the file name where the callable is defined
and the second tuple item is the line number where the callable
definition starts
* **full callable name** *(string)* -- Dictionary value. Elements in the
callable path are separated by periods (:code:`'.'`). For example,
method :code:`my_method()` from class :code:`MyClass` from module
:code:`my_module` appears as :code:`'my_module.MyClass.my_method'`
"""
# [[[cog
# code = """
# def pcolor(text, color, indent=0):
# esc_dict = {
# 'black':30, 'red':31, 'green':32, 'yellow':33, 'blue':34,
# 'magenta':35, 'cyan':36, 'white':37, 'none':-1
# }
# color = color.lower()
# if esc_dict[color] != -1:
# return (
# '\033[{color_code}m{indent}{text}\033[0m'.format(
# color_code=esc_dict[color], indent=' '*indent, text=text
# )
# )
# return '{indent}{text}'.format(indent=' '*indent, text=text)
# """
# cog.out(code)
# ]]]
# [[[end]]]
class _AstTreeScanner(ast.NodeVisitor):
"""Get all callables from a given module by traversing abstract syntax tree."""
# pylint: disable=R0902,W0702
def __init__(self, mname, fname, lines):
super(_AstTreeScanner, self).__init__()
self._lines = lines
self._wsregexp = re.compile(r"^(\s*).+")
self._fname = fname.replace(".pyc", ".py")
self._module = mname
self._indent_stack = [
{"level": 0, "type": "module", "prefix": "", "full_name": None, "lineno": 0}
]
self._callables_db = {}
self._reverse_callables_db = {}
self._class_names = []
self._processed_line = 0
def _close_callable(self, node, force=False):
"""Record last line number of callable."""
# Only nodes that have a line number can be considered for closing
# callables. Similarly, only nodes with lines greater than the one
# already processed can be considered for closing callables
try:
lineno = node.lineno
except AttributeError:
return
if lineno <= self._processed_line:
return
# [[[cog
# code = """
# print(pcolor('Close callable @ line = {0}'.format(lineno), 'green'))
# """
# cog.out(code)
# ]]]
# [[[end]]]
# Extract node name for property closing. Once a property is found,
# it can only be closed out by a node type that has a name
name = ""
try:
name = (
node.name
if hasattr(node, "name")
else (
node.targets[0].id
if hasattr(node.targets[0], "id")
else node.targets[0].value.id
)
)
except AttributeError:
pass
# Traverse backwards through call stack and close callables as needed
indent = self._get_indent(node)
count = -1
# [[[cog
# code = """
# print(
# pcolor(
# ' Name {0} @ {1}, indent = {2}'.format(
# name if name else 'None', lineno, indent
# ),
# 'yellow'
# )
# )
# """
# cog.out(code)
# ]]]
# [[[end]]]
dlist = []
while count >= -len(self._indent_stack):
element_full_name = self._indent_stack[count]["full_name"]
edict = self._callables_db.get(element_full_name, None)
stack_indent = self._indent_stack[count]["level"]
open_callable = element_full_name and (not edict["last_lineno"])
# [[[cog
# code = """
# print(
# pcolor(
# ' Name {0}, indent, {1}, stack_indent {2}'.format(
# element_full_name, indent, stack_indent
# ),
# 'yellow'
# )
# )
# """
# cog.out(code)
# ]]]
# [[[end]]]
if open_callable and (
force
or (indent < stack_indent)
or (
(indent == stack_indent)
and (
(edict["type"] != "prop")
or (
(edict["type"] == "prop")
and (name and (name != element_full_name))
)
)
)
):
# [[[cog
# code = """
# print(
# pcolor(
# ' Closing {0} @ {1}'.format(
# element_full_name, lineno-1
# ),
# 'yellow'
# )
# )
# """
# cog.out(code)
# ]]]
# [[[end]]]
edict["last_lineno"] = lineno - 1
dlist.append(count)
if indent > stack_indent:
break
count -= 1
# Callables have to be removed from stack when they are closed,
# otherwise if a callable is subsequently followed after a few
# lines by another callable at a further indentation level (like a for
# loop) the second callable would incorrectly appear within the scope
# of the first callable
stack = self._indent_stack
stack_length = len(self._indent_stack)
dlist = [item for item in dlist if stack[item]["type"] != "module"]
for item in dlist:
del self._indent_stack[stack_length + item]
def _get_indent(self, node):
"""Get node indentation level."""
lineno = node.lineno
if lineno > len(self._lines):
return -1
wsindent = self._wsregexp.match(self._lines[lineno - 1])
return len(wsindent.group(1))
def _in_class(self, node):
"""Find if callable is function or method."""
# Move left one indentation level and check if that callable is a class
indent = self._get_indent(node)
for indent_dict in reversed(self._indent_stack): # pragma: no branch
if (indent_dict["level"] < indent) or (indent_dict["type"] == "module"):
return indent_dict["type"] == "class"
def _pop_indent_stack(self, node, node_type=None, action=None):
"""Get callable full name."""
indent = self._get_indent(node)
indent_stack = copy.deepcopy(self._indent_stack)
# Find enclosing scope
while (len(indent_stack) > 1) and (
(
(indent <= indent_stack[-1]["level"])
and (indent_stack[-1]["type"] != "module")
)
or (indent_stack[-1]["type"] == "prop")
):
self._close_callable(node)
indent_stack.pop()
# Construct new callable name
name = (
(
node.targets[0].id
if hasattr(node.targets[0], "id")
else node.targets[0].value.id
)
if node_type == "prop"
else node.name
)
element_full_name = ".".join(
[self._module]
+ [
indent_dict["prefix"]
for indent_dict in indent_stack
if indent_dict["type"] != "module"
]
+ [name]
) + ("({0})".format(action) if action else "")
# Add new callable entry to indentation stack
self._indent_stack = indent_stack
self._indent_stack.append(
{
"level": indent,
"prefix": name,
"type": node_type,
"full_name": element_full_name,
"lineno": node.lineno,
}
)
return element_full_name
def generic_visit(self, node):
"""Implement generic node."""
# [[[cog
# cog.out("print(pcolor('Enter generic visitor', 'magenta'))")
# ]]]
# [[[end]]]
# A generic visitor that potentially closes callables is needed to
# close enclosed callables that are not at the end of the enclosing
# callable, otherwise the ending line of the enclosed callable would
# be the ending line of the enclosing callable, which would be
# incorrect
self._close_callable(node)
super(_AstTreeScanner, self).generic_visit(node)
def visit_arguments(self, node):
# Decorated callables go to visit_FunctionDef in the first line of the
# decorator, but the actual function definition would go the generic
# visitor if it is not caught when processing the function arguments.
# This would close the callable prematurely, so the argument walk needs
# to be intercepted and suppressed
pass
def visit_Assign(self, node):
"""
Implement assignment walker.
Parse class properties defined via the property() function
"""
# [[[cog
# cog.out("print(pcolor('Enter assign visitor', 'magenta'))")
# ]]]
# [[[end]]]
# ###
# Class-level assignment may also be a class attribute that is not
# a managed attribute, record it anyway, no harm in doing so as it
# is not attached to a callable
if self._in_class(node):
element_full_name = self._pop_indent_stack(node, "prop")
code_id = (self._fname, node.lineno)
self._processed_line = node.lineno
self._callables_db[element_full_name] = {
"name": element_full_name,
"type": "prop",
"code_id": code_id,
"last_lineno": None,
}
self._reverse_callables_db[code_id] = element_full_name
# [[[cog
# code = """
# print(
# pcolor(
# 'Visiting property {0} @ {1}'.format(
# element_full_name, code_id[1]
# ),
# 'green'
# )
# )
# """
# cog.out(code)
# ]]]
# [[[end]]]
# Get property actions
self.generic_visit(node)
def visit_ClassDef(self, node):
"""Implement class walker."""
# [[[cog
# cog.out("print(pcolor('Enter class visitor', 'magenta'))")
# ]]]
# [[[end]]]
# Get class information (name, line number, etc.)
element_full_name = self._pop_indent_stack(node, "class")
code_id = (self._fname, node.lineno)
self._processed_line = node.lineno
# Add class entry to dictionaries
self._class_names.append(element_full_name)
self._callables_db[element_full_name] = {
"name": element_full_name,
"type": "class",
"code_id": code_id,
"last_lineno": None,
}
self._reverse_callables_db[code_id] = element_full_name
# [[[cog
# code = """
# print(
# pcolor(
# 'Visiting class {0} @ {1}, indent = {2}'.format(
# element_full_name, code_id[1], self._get_indent(node)
# ),
# 'green'
# )
# )
# """
# cog.out(code)
# ]]]
# [[[end]]]
self.generic_visit(node)
def visit_FunctionDef(self, node):
"""Implement function/method walker."""
# [[[cog
# cog.out("print(pcolor('Enter function visitor', 'magenta'))")
# ]]]
# [[[end]]]
in_class = self._in_class(node)
decorator_list = [
dobj.id if hasattr(dobj, "id") else dobj.attr
for dobj in node.decorator_list
if hasattr(dobj, "id") or hasattr(dobj, "attr")
]
node.decorator_list = []
# Callable can be:
# a) A class property defined via decorated methods
# b) A class method
# c) A function
# Get callable information (name, line number, etc.)
action = (
"getter"
if "property" in decorator_list
else (
"setter"
if "setter" in decorator_list
else ("deleter" if "deleter" in decorator_list else None)
)
)
element_type = "meth" if in_class else "func"
element_full_name = self._pop_indent_stack(node, element_type, action=action)
code_id = (self._fname, node.lineno)
self._processed_line = node.lineno
self._callables_db[element_full_name] = {
"name": element_full_name,
"type": element_type,
"code_id": code_id,
"last_lineno": None,
}
self._reverse_callables_db[code_id] = element_full_name
# [[[cog
# code = """
# print(
# pcolor(
# 'Visiting callable {0} @ {1}'.format(
# element_full_name, code_id[1]
# ),
# 'green'
# )
# )
# print(pcolor(' in_class = {}'.format(in_class), 'yellow'))
# """
# cog.out(code)
# ]]]
# [[[end]]]
self.generic_visit(node)
|
# pinspect.py
# Copyright (c) 2013-2019 <NAME>
# See LICENSE for details
# pylint: disable=C0103,C0111,C0411,E0012,E0611
# pylint: disable=F0401,R0205,R0912,R0914,R0916,R1710,R1717,W0212,W0631,W1504
# Standard library imports
from __future__ import print_function
import ast
import collections
import copy
import json
import os
import platform
import re
import sys
import types
# PyPI imports
try: # pragma: no cover
from inspect import Parameter, signature
except ImportError: # pragma: no cover
from funcsigs import Parameter, signature
# Intra-package imports
if sys.hexversion < 0x03000000: # pragma: no cover
from pexdoc.compat2 import _unicode_to_ascii, _readlines, _unicode_char
else: # pragma: no cover
from pexdoc.compat3 import _readlines, _unicode_char
###
# Global constants
###
_PRIVATE_PROP_REGEXP = re.compile("_[^_]+")
###
# Functions
###
def _get_module_name_from_fname(fname):
"""Get module name from module file name."""
fname = fname.replace(".pyc", ".py")
for mobj in sys.modules.values():
if (
hasattr(mobj, "__file__")
and mobj.__file__
and (mobj.__file__.replace(".pyc", ".py") == fname)
):
module_name = mobj.__name__
return module_name
raise RuntimeError("Module could not be found")
def _validate_fname(fname):
"""Validate that a string is a valid file name."""
msg = "Argument `callables_fname` is not valid"
if not isinstance(fname, str):
raise RuntimeError(msg)
try:
if not os.path.exists(fname):
os.access(fname, os.W_OK)
except (TypeError, ValueError): # pragma: no cover
raise RuntimeError(msg)
def get_function_args(func, no_self=False, no_varargs=False):
"""
Return tuple of the function argument names in the order of the function signature.
:param func: Function
:type func: function object
:param no_self: Flag that indicates whether the function argument *self*,
if present, is included in the output (False) or not (True)
:type no_self: boolean
:param no_varargs: Flag that indicates whether keyword arguments are
included in the output (True) or not (False)
:type no_varargs: boolean
:rtype: tuple
For example:
>>> import pexdoc.pinspect
>>> class MyClass(object):
... def __init__(self, value, **kwargs):
... pass
...
>>> pexdoc.pinspect.get_function_args(MyClass.__init__)
('self', 'value', '**kwargs')
>>> pexdoc.pinspect.get_function_args(
... MyClass.__init__, no_self=True
... )
('value', '**kwargs')
>>> pexdoc.pinspect.get_function_args(
... MyClass.__init__, no_self=True, no_varargs=True
... )
('value',)
>>> pexdoc.pinspect.get_function_args(
... MyClass.__init__, no_varargs=True
... )
('self', 'value')
"""
par_dict = signature(func).parameters
# Mark positional and/or keyword arguments (if any)
pos = lambda x: x.kind == Parameter.VAR_POSITIONAL
kw = lambda x: x.kind == Parameter.VAR_KEYWORD
opts = ["", "*", "**"]
args = [
"{prefix}{arg}".format(prefix=opts[pos(value) + 2 * kw(value)], arg=par)
for par, value in par_dict.items()
]
# Filter out 'self' from parameter list (optional)
self_filtered_args = (
args if not args else (args[1 if (args[0] == "self") and no_self else 0 :])
)
# Filter out positional or keyword arguments (optional)
pos = lambda x: (len(x) > 1) and (x[0] == "*") and (x[1] != "*")
kw = lambda x: (len(x) > 2) and (x[:2] == "**")
varargs_filtered_args = [
arg
for arg in self_filtered_args
if (not no_varargs) or all([no_varargs, not pos(arg), not kw(arg)])
]
return tuple(varargs_filtered_args)
def get_module_name(module_obj):
r"""
Retrieve the module name from a module object.
:param module_obj: Module object
:type module_obj: object
:rtype: string
:raises:
* RuntimeError (Argument \`module_obj\` is not valid)
* RuntimeError (Module object \`*[module_name]*\` could not be found in
loaded modules)
For example:
>>> import pexdoc.pinspect
>>> pexdoc.pinspect.get_module_name(sys.modules['pexdoc.pinspect'])
'pexdoc.pinspect'
"""
if not is_object_module(module_obj):
raise RuntimeError("Argument `module_obj` is not valid")
name = module_obj.__name__
msg = "Module object `{name}` could not be found in loaded modules"
if name not in sys.modules:
raise RuntimeError(msg.format(name=name))
return name
def is_object_module(obj):
"""
Test if the argument is a module object.
:param obj: Object
:type obj: any
:rtype: boolean
"""
return isinstance(obj, types.ModuleType)
def is_special_method(name):
"""
Test if callable name is a special Python method.
:param name: Callable name
:type name: string
:rtype: boolean
"""
return name.startswith("__")
def private_props(obj):
"""
Yield private properties of an object.
A private property is defined as one that has a single underscore
(:code:`_`) before its name
:param obj: Object
:type obj: object
:returns: iterator
"""
# Get private properties but NOT magic methods
props = [item for item in dir(obj)]
priv_props = [_PRIVATE_PROP_REGEXP.match(item) for item in props]
call_props = [callable(getattr(obj, item)) for item in props]
iobj = zip(props, priv_props, call_props)
for obj_name in [prop for prop, priv, call in iobj if priv and (not call)]:
yield obj_name
###
# Classes
###
class Callables(object):
r"""
Generate a list of module callables and get their attributes.
Callables are functions, classes, methods and class
properties; attributes are callable type, file name, and lines
span.
Information from multiple modules can be stored in the callables
database of the object by repeatedly calling
:py:meth:`pexdoc.pinspect.Callables.trace` with different module file
names. A :py:class:`pexdoc.pinspect.Callables` object retains knowledge of
which modules have been traced so repeated calls to
:py:meth:`pexdoc.pinspect.Callables.trace` with the *same* module object
will *not* result in module re-traces (and the consequent performance hit)
:param fnames: File names of the modules to trace. If None no immediate
tracing is done
:type fnames: list of strings or None
:raises:
* OSError (File *[fname]* could not be found)
* RuntimeError (Argument \`fnames\` is not valid)
"""
# pylint: disable=R0903
def __init__(self, fnames=None): # noqa
self._callables_db = {}
self._reverse_callables_db = {}
self._modules_dict = {}
self._fnames = {}
self._module_names = []
self._class_names = []
if fnames:
self.trace(fnames)
def __add__(self, other):
"""
Merge two objects.
:raises: RuntimeError (Conflicting information between objects)
For example:
>>> import pexdoc.pcontracts, pexdoc.exh, pexdoc.pinspect, sys
>>> obj1 = pexdoc.pinspect.Callables(
... [sys.modules['pexdoc.exh'].__file__]
... )
>>> obj2 = pexdoc.pinspect.Callables(
... [sys.modules['pexdoc.pcontracts'].__file__]
... )
>>> obj3 = pexdoc.pinspect.Callables([
... sys.modules['pexdoc.exh'].__file__,
... sys.modules['pexdoc.pcontracts'].__file__,
... ])
>>> obj1 == obj3
False
>>> obj1 == obj2
False
>>> obj1+obj2 == obj3
True
"""
self._check_intersection(other)
robj = Callables()
robj._callables_db = copy.deepcopy(self._callables_db)
robj._callables_db.update(copy.deepcopy(other._callables_db))
robj._reverse_callables_db = copy.deepcopy(self._reverse_callables_db)
robj._reverse_callables_db.update(copy.deepcopy(other._reverse_callables_db))
robj._modules_dict = copy.deepcopy(self._modules_dict)
robj._modules_dict.update(copy.deepcopy(other._modules_dict))
robj._module_names = list(set(self._module_names[:] + other._module_names[:]))
robj._class_names = list(set(self._class_names[:] + other._class_names[:]))
robj._fnames = copy.deepcopy(self._fnames)
robj._fnames.update(copy.deepcopy(other._fnames))
return robj
def __bool__(self): # pragma: no cover
"""
Return :code:`False` if no modules have been traced, :code:`True` otherwise.
For example:
>>> from __future__ import print_function
>>> import pexdoc.pcontracts, pexdoc.pinspect, sys
>>> obj = pexdoc.pinspect.Callables()
>>> if obj:
... print('Boolean test returned: True')
... else:
... print('Boolean test returned: False')
Boolean test returned: False
>>> obj.trace([sys.modules['pexdoc.pcontracts'].__file__])
>>> if obj:
... print('Boolean test returned: True')
... else:
... print('Boolean test returned: False')
Boolean test returned: True
"""
return bool(self._module_names)
def __copy__(self):
"""
Copy object.
For example:
>>> import copy, pexdoc.exh, pexdoc.pinspect, sys
>>> obj1 = pexdoc.pinspect.Callables(
... [sys.modules['pexdoc.exh'].__file__]
... )
>>> obj2 = copy.copy(obj1)
>>> obj1 == obj2
True
"""
cobj = Callables()
for prop_name in private_props(self):
setattr(cobj, prop_name, copy.deepcopy(getattr(self, prop_name)))
return cobj
def __eq__(self, other):
"""
Test object equality.
For example:
>>> import pexdoc.pcontracts, pexdoc.exh, pexdoc.pinspect, sys
>>> obj1 = pexdoc.pinspect.Callables(
... [sys.modules['pexdoc.exh'].__file__]
... )
>>> obj2 = pexdoc.pinspect.Callables(
... [sys.modules['pexdoc.exh'].__file__]
... )
>>> obj3 = pexdoc.pinspect.Callables(
... [sys.modules['pexdoc.pcontracts'].__file__]
... )
>>> obj1 == obj2
True
>>> obj1 == obj3
False
>>> 5 == obj3
False
"""
return isinstance(other, Callables) and all(
[
sorted(getattr(self, attr)) == sorted(getattr(other, attr))
for attr in private_props(self)
]
)
def __iadd__(self, other):
"""
Merge an object into an existing object.
:raises: RuntimeError (Conflicting information between objects)
For example:
>>> import pexdoc.pcontracts, pexdoc.exh, pexdoc.pinspect, sys
>>> obj1 = pexdoc.pinspect.Callables(
... [sys.modules['pexdoc.exh'].__file__]
... )
>>> obj2 = pexdoc.pinspect.Callables(
... [sys.modules['pexdoc.pcontracts'].__file__]
... )
>>> obj3 = pexdoc.pinspect.Callables([
... sys.modules['pexdoc.exh'].__file__,
... sys.modules['pexdoc.pcontracts'].__file__,
... ])
>>> obj1 == obj3
False
>>> obj1 == obj2
False
>>> obj1 += obj2
>>> obj1 == obj3
True
"""
self._check_intersection(other)
self._callables_db.update(copy.deepcopy(other._callables_db))
self._reverse_callables_db.update(copy.deepcopy(other._reverse_callables_db))
self._modules_dict.update(copy.deepcopy(other._modules_dict))
self._module_names = list(set(self._module_names + other._module_names[:]))
self._class_names = list(set(self._class_names + other._class_names[:]))
self._fnames.update(copy.deepcopy(other._fnames))
return self
def __nonzero__(self): # pragma: no cover
"""
Return :code:`False` if no modules have been traced, :code:`True` otherwise.
For example:
>>> from __future__ import print_function
>>> import pexdoc.pcontracts, pexdoc.pinspect, sys
>>> obj = pexdoc.pinspect.Callables()
>>> if obj:
... print('Boolean test returned: True')
... else:
... print('Boolean test returned: False')
Boolean test returned: False
>>> obj.trace([sys.modules['pexdoc.pcontracts'].__file__])
>>> if obj:
... print('Boolean test returned: True')
... else:
... print('Boolean test returned: False')
Boolean test returned: True
"""
return bool(self._module_names)
def __repr__(self):
"""
Return a string with the expression needed to re-create the object.
For example:
>>> import pexdoc.exh, pexdoc.pinspect, sys
>>> obj1 = pexdoc.pinspect.Callables(
... [sys.modules['pexdoc.exh'].__file__]
... )
>>> repr(obj1) #doctest: +ELLIPSIS
"pexdoc.pinspect.Callables(['...exh.py'])"
>>> exec("obj2="+repr(obj1))
>>> obj1 == obj2
True
"""
return "pexdoc.pinspect.Callables({0})".format(sorted(self._fnames.keys()))
def __str__(self):
"""
Return a string with a detailed description of the object's contents.
For example:
>>> from __future__ import print_function
>>> import pexdoc.pinspect, os, sys
>>> import docs.support.pinspect_example_1
>>> cobj = pexdoc.pinspect.Callables([
... sys.modules['docs.support.pinspect_example_1'].__file__
... ])
>>> print(cobj) #doctest: +ELLIPSIS
Modules:
...pinspect_example_1
Classes:
...pinspect_example_1.my_func.MyClass
...pinspect_example_1.my_func: func (10-29)
...pinspect_example_1.my_func.MyClass: class (13-29)
...pinspect_example_1.my_func.MyClass.__init__: meth (21-23)
...pinspect_example_1.my_func.MyClass._get_value: meth (24-26)
...pinspect_example_1.my_func.MyClass.value: prop (27-29)
...pinspect_example_1.print_name: func (30-31)
The numbers in parenthesis indicate the line number in which the
callable starts and ends within the file it is defined in
"""
if self._module_names:
ret = []
# List traced modules
ret.append("Modules:")
for module_name in sorted(self._module_names):
ret.append(" {0}".format(module_name))
# List traced classes
if self._class_names:
ret.append("Classes:")
for class_name in sorted(self._class_names):
ret.append(" {0}".format(class_name))
# List traced callables (methods, functions, properties)
for entry in sorted(self._modules_dict):
dict_value = self._modules_dict[entry]
for value in sorted(dict_value, key=lambda x: x["code_id"][1]):
start, stop = value["code_id"][1], value["last_lineno"]
cname, ctype = value["name"], value["type"]
range_lines = (start, ["", "-" + str(stop)][start != stop])
crange = " ({0}{1})".format(*range_lines)
ret.append("{0}: {1}{2}".format(cname, ctype, crange))
return "\n".join(ret)
return ""
def _check_intersection(self, other):
"""Check that intersection of two objects has the same information."""
# pylint: disable=C0123
props = ["_callables_db", "_reverse_callables_db", "_modules_dict"]
for prop in props:
self_dict = getattr(self, prop)
other_dict = getattr(other, prop)
keys_self = set(self_dict.keys())
keys_other = set(other_dict.keys())
for key in keys_self & keys_other:
svalue = self_dict[key]
ovalue = other_dict[key]
same_type = type(svalue) == type(ovalue)
if same_type:
list_comp = isinstance(svalue, list) and any(
[item not in svalue for item in ovalue]
)
str_comp = isinstance(svalue, str) and svalue != ovalue
dict_comp = isinstance(svalue, dict) and svalue != ovalue
comp = any([list_comp, str_comp, dict_comp])
if (not same_type) or (same_type and comp):
emsg = "Conflicting information between objects"
raise RuntimeError(emsg)
def _get_callables_db(self):
"""Get callables_db property."""
return self._callables_db
def get_callable_from_line(self, module_file, lineno):
"""Get the callable that the line number belongs to."""
module_name = _get_module_name_from_fname(module_file)
if module_name not in self._modules_dict:
self.trace([module_file])
ret = None
# Sort callables by starting line number
iobj = sorted(self._modules_dict[module_name], key=lambda x: x["code_id"][1])
for value in iobj:
if value["code_id"][1] <= lineno <= value["last_lineno"]:
ret = value["name"]
elif value["code_id"][1] > lineno:
break
return ret if ret else module_name
def _get_reverse_callables_db(self):
"""Getter reverse_callables_db property."""
return self._reverse_callables_db
def load(self, callables_fname):
r"""
Load traced modules information from a `JSON <http://www.json.org/>`_ file.
The loaded module information is merged with any existing module information
:param callables_fname: File name
:type callables_fname: :ref:`FileNameExists`
:raises:
* OSError (File *[fname]* could not be found)
* RuntimeError (Argument \\`callables_fname\\` is not valid)
"""
# Validate file name
_validate_fname(callables_fname)
if not os.path.exists(callables_fname):
raise OSError("File {0} could not be found".format(callables_fname))
with open(callables_fname, "r") as fobj:
fdict = json.load(fobj)
if sys.hexversion < 0x03000000: # pragma: no cover
fdict = _unicode_to_ascii(fdict)
self._callables_db.update(fdict["_callables_db"])
# Reverse the tuple-to-string conversion that the save method
# does due to the fact that JSON keys need to be strings and the
# keys of the reverse callable dictionary are tuples where the first
# item is a file name and the second item is the starting line of the
# callable within that file (dictionary value)
rdict = {}
for key, value in fdict["_reverse_callables_db"].items():
tokens = key[1:-1].split(",")
key = tokens[0].strip()[1:-1]
if platform.system().lower() == "windows": # pragma: no cover
while True:
tmp = key
key = key.replace("\\\\", "\\")
if tmp == key:
break
rdict[(key, int(tokens[1]))] = value
self._reverse_callables_db.update(rdict)
self._modules_dict.update(fdict["_modules_dict"])
self._fnames.update(fdict["_fnames"])
self._module_names.extend(fdict["_module_names"])
self._class_names.extend(fdict["_class_names"])
self._module_names = sorted(list(set(self._module_names)))
self._class_names = sorted(list(set(self._class_names)))
def refresh(self):
"""Re-traces modules modified since the time they were traced."""
self.trace(list(self._fnames.keys()), _refresh=True)
def save(self, callables_fname):
r"""
Save traced modules information to a `JSON`_ file.
If the file exists it is overwritten
:param callables_fname: File name
:type callables_fname: :ref:`FileName`
:raises: RuntimeError (Argument \\`fname\\` is not valid)
"""
# Validate file name
_validate_fname(callables_fname)
# JSON keys have to be strings but the reverse callables dictionary
# keys are tuples, where the first item is a file name and the
# second item is the starting line of the callable within that file
# (dictionary value), thus need to convert the key to a string
items = self._reverse_callables_db.items()
fdict = {
"_callables_db": self._callables_db,
"_reverse_callables_db": dict([(str(k), v) for k, v in items]),
"_modules_dict": self._modules_dict,
"_fnames": self._fnames,
"_module_names": self._module_names,
"_class_names": self._class_names,
}
with open(callables_fname, "w") as fobj:
json.dump(fdict, fobj)
def trace(self, fnames, _refresh=False):
r"""
Generate a list of module callables and gets their attributes.
Callables are functions, classes, methods and class properties; their
attributes are callable type, file name, and lines span
:param fnames: File names of the modules to trace
:type fnames: list
:raises:
* OSError (File *[fname]* could not be found)
* RuntimeError (Argument \`fnames\` is not valid)
"""
# pylint: disable=R0101
if fnames and (not isinstance(fnames, list)):
raise RuntimeError("Argument `fnames` is not valid")
if fnames and any([not isinstance(item, str) for item in fnames]):
raise RuntimeError("Argument `fnames` is not valid")
for fname in fnames:
if not os.path.exists(fname):
raise OSError("File {0} could not be found".format(fname))
fnames = [item.replace(".pyc", ".py") for item in fnames]
bobj = collections.namedtuple("Bundle", ["lineno", "col_offset"])
for fname in fnames:
if (fname not in self._fnames) or (
_refresh
and (fname in self._fnames)
and (self._fnames[fname]["date"] < os.path.getmtime(fname))
):
module_name = (
_get_module_name_from_fname(fname)
if not _refresh
else self._fnames[fname]["name"]
)
# Remove old module information if it is going to be refreshed
if _refresh:
self._module_names.pop(self._module_names.index(module_name))
for cls in self._fnames[fname]["classes"]:
self._class_names.pop(self._class_names.index(cls))
dlist = []
for key, value in self._reverse_callables_db.items():
if key[0] == fname:
dlist.append(key)
try:
del self._callables_db[value]
except KeyError:
pass
for item in set(dlist):
del self._reverse_callables_db[item]
lines = _readlines(fname)
# Eliminate all Unicode characters till the first ASCII
# character is found in first line of file, to deal with
# Unicode-encoded source files
for num, char in enumerate(lines[0]): # pragma: no cover
if not _unicode_char(char):
break
lines[0] = lines[0][num:]
tree = ast.parse("".join(lines))
aobj = _AstTreeScanner(module_name, fname, lines)
aobj.visit(tree)
# Create a fake callable at the end of the file to properly
# 'close', i.e. assign a last line number to the last
# callable in file
fake_node = bobj(len(lines) + 1, -1)
aobj._close_callable(fake_node, force=True)
self._class_names += aobj._class_names[:]
self._module_names.append(module_name)
self._callables_db.update(aobj._callables_db)
self._reverse_callables_db.update(aobj._reverse_callables_db)
# Split into modules
self._modules_dict[module_name] = []
iobj = [
item
for item in self._callables_db.values()
if item["name"].startswith(module_name + ".")
]
for entry in iobj:
self._modules_dict[module_name].append(entry)
self._fnames[fname] = {
"name": module_name,
"date": os.path.getmtime(fname),
"classes": aobj._class_names[:],
}
# Managed attributes
callables_db = property(_get_callables_db, doc="Module(s) callables database")
"""
Return the callables database.
:rtype: dictionary
The callable database is a dictionary that has the following structure:
* **full callable name** *(string)* -- Dictionary key. Elements in the
callable path are separated by periods (:code:`'.'`). For example, method
:code:`my_method()` from class
:code:`MyClass` from module :code:`my_module` appears as
:code:`'my_module.MyClass.my_method'`
* **callable properties** *(dictionary)* -- Dictionary value. The elements
of this dictionary are:
* **type** *(string)* -- :code:`'class'` for classes, :code:`'meth'` for
methods, :code:`'func'` for functions or :code:`'prop'` for properties
or class attributes
* **code_id** *(tuple or None)* -- A tuple with the following items:
* **file name** *(string)* -- the first item contains the file name
where the callable can be found
* **line number** *(integer)* -- the second item contains the line
number in which the callable code starts (including decorators)
* **last_lineno** *(integer)* -- line number in which the callable code
ends (including blank lines and comments regardless of their
indentation level)
"""
reverse_callables_db = property(
_get_reverse_callables_db, doc="Reverse module(s) callables database"
)
"""
Returns the reverse callables database
:rtype: dictionary
The reverse callable database is a dictionary that has the following
structure:
* **callable id** *(tuple)* -- Dictionary key. Two-element tuple in which
the first tuple item is the file name where the callable is defined
and the second tuple item is the line number where the callable
definition starts
* **full callable name** *(string)* -- Dictionary value. Elements in the
callable path are separated by periods (:code:`'.'`). For example,
method :code:`my_method()` from class :code:`MyClass` from module
:code:`my_module` appears as :code:`'my_module.MyClass.my_method'`
"""
# [[[cog
# code = """
# def pcolor(text, color, indent=0):
# esc_dict = {
# 'black':30, 'red':31, 'green':32, 'yellow':33, 'blue':34,
# 'magenta':35, 'cyan':36, 'white':37, 'none':-1
# }
# color = color.lower()
# if esc_dict[color] != -1:
# return (
# '\033[{color_code}m{indent}{text}\033[0m'.format(
# color_code=esc_dict[color], indent=' '*indent, text=text
# )
# )
# return '{indent}{text}'.format(indent=' '*indent, text=text)
# """
# cog.out(code)
# ]]]
# [[[end]]]
class _AstTreeScanner(ast.NodeVisitor):
"""Get all callables from a given module by traversing abstract syntax tree."""
# pylint: disable=R0902,W0702
def __init__(self, mname, fname, lines):
super(_AstTreeScanner, self).__init__()
self._lines = lines
self._wsregexp = re.compile(r"^(\s*).+")
self._fname = fname.replace(".pyc", ".py")
self._module = mname
self._indent_stack = [
{"level": 0, "type": "module", "prefix": "", "full_name": None, "lineno": 0}
]
self._callables_db = {}
self._reverse_callables_db = {}
self._class_names = []
self._processed_line = 0
def _close_callable(self, node, force=False):
"""Record last line number of callable."""
# Only nodes that have a line number can be considered for closing
# callables. Similarly, only nodes with lines greater than the one
# already processed can be considered for closing callables
try:
lineno = node.lineno
except AttributeError:
return
if lineno <= self._processed_line:
return
# [[[cog
# code = """
# print(pcolor('Close callable @ line = {0}'.format(lineno), 'green'))
# """
# cog.out(code)
# ]]]
# [[[end]]]
# Extract node name for property closing. Once a property is found,
# it can only be closed out by a node type that has a name
name = ""
try:
name = (
node.name
if hasattr(node, "name")
else (
node.targets[0].id
if hasattr(node.targets[0], "id")
else node.targets[0].value.id
)
)
except AttributeError:
pass
# Traverse backwards through call stack and close callables as needed
indent = self._get_indent(node)
count = -1
# [[[cog
# code = """
# print(
# pcolor(
# ' Name {0} @ {1}, indent = {2}'.format(
# name if name else 'None', lineno, indent
# ),
# 'yellow'
# )
# )
# """
# cog.out(code)
# ]]]
# [[[end]]]
dlist = []
while count >= -len(self._indent_stack):
element_full_name = self._indent_stack[count]["full_name"]
edict = self._callables_db.get(element_full_name, None)
stack_indent = self._indent_stack[count]["level"]
open_callable = element_full_name and (not edict["last_lineno"])
# [[[cog
# code = """
# print(
# pcolor(
# ' Name {0}, indent, {1}, stack_indent {2}'.format(
# element_full_name, indent, stack_indent
# ),
# 'yellow'
# )
# )
# """
# cog.out(code)
# ]]]
# [[[end]]]
if open_callable and (
force
or (indent < stack_indent)
or (
(indent == stack_indent)
and (
(edict["type"] != "prop")
or (
(edict["type"] == "prop")
and (name and (name != element_full_name))
)
)
)
):
# [[[cog
# code = """
# print(
# pcolor(
# ' Closing {0} @ {1}'.format(
# element_full_name, lineno-1
# ),
# 'yellow'
# )
# )
# """
# cog.out(code)
# ]]]
# [[[end]]]
edict["last_lineno"] = lineno - 1
dlist.append(count)
if indent > stack_indent:
break
count -= 1
# Callables have to be removed from stack when they are closed,
# otherwise if a callable is subsequently followed after a few
# lines by another callable at a further indentation level (like a for
# loop) the second callable would incorrectly appear within the scope
# of the first callable
stack = self._indent_stack
stack_length = len(self._indent_stack)
dlist = [item for item in dlist if stack[item]["type"] != "module"]
for item in dlist:
del self._indent_stack[stack_length + item]
def _get_indent(self, node):
"""Get node indentation level."""
lineno = node.lineno
if lineno > len(self._lines):
return -1
wsindent = self._wsregexp.match(self._lines[lineno - 1])
return len(wsindent.group(1))
def _in_class(self, node):
"""Find if callable is function or method."""
# Move left one indentation level and check if that callable is a class
indent = self._get_indent(node)
for indent_dict in reversed(self._indent_stack): # pragma: no branch
if (indent_dict["level"] < indent) or (indent_dict["type"] == "module"):
return indent_dict["type"] == "class"
def _pop_indent_stack(self, node, node_type=None, action=None):
"""Get callable full name."""
indent = self._get_indent(node)
indent_stack = copy.deepcopy(self._indent_stack)
# Find enclosing scope
while (len(indent_stack) > 1) and (
(
(indent <= indent_stack[-1]["level"])
and (indent_stack[-1]["type"] != "module")
)
or (indent_stack[-1]["type"] == "prop")
):
self._close_callable(node)
indent_stack.pop()
# Construct new callable name
name = (
(
node.targets[0].id
if hasattr(node.targets[0], "id")
else node.targets[0].value.id
)
if node_type == "prop"
else node.name
)
element_full_name = ".".join(
[self._module]
+ [
indent_dict["prefix"]
for indent_dict in indent_stack
if indent_dict["type"] != "module"
]
+ [name]
) + ("({0})".format(action) if action else "")
# Add new callable entry to indentation stack
self._indent_stack = indent_stack
self._indent_stack.append(
{
"level": indent,
"prefix": name,
"type": node_type,
"full_name": element_full_name,
"lineno": node.lineno,
}
)
return element_full_name
def generic_visit(self, node):
"""Implement generic node."""
# [[[cog
# cog.out("print(pcolor('Enter generic visitor', 'magenta'))")
# ]]]
# [[[end]]]
# A generic visitor that potentially closes callables is needed to
# close enclosed callables that are not at the end of the enclosing
# callable, otherwise the ending line of the enclosed callable would
# be the ending line of the enclosing callable, which would be
# incorrect
self._close_callable(node)
super(_AstTreeScanner, self).generic_visit(node)
def visit_arguments(self, node):
# Decorated callables go to visit_FunctionDef in the first line of the
# decorator, but the actual function definition would go the generic
# visitor if it is not caught when processing the function arguments.
# This would close the callable prematurely, so the argument walk needs
# to be intercepted and suppressed
pass
def visit_Assign(self, node):
"""
Implement assignment walker.
Parse class properties defined via the property() function
"""
# [[[cog
# cog.out("print(pcolor('Enter assign visitor', 'magenta'))")
# ]]]
# [[[end]]]
# ###
# Class-level assignment may also be a class attribute that is not
# a managed attribute, record it anyway, no harm in doing so as it
# is not attached to a callable
if self._in_class(node):
element_full_name = self._pop_indent_stack(node, "prop")
code_id = (self._fname, node.lineno)
self._processed_line = node.lineno
self._callables_db[element_full_name] = {
"name": element_full_name,
"type": "prop",
"code_id": code_id,
"last_lineno": None,
}
self._reverse_callables_db[code_id] = element_full_name
# [[[cog
# code = """
# print(
# pcolor(
# 'Visiting property {0} @ {1}'.format(
# element_full_name, code_id[1]
# ),
# 'green'
# )
# )
# """
# cog.out(code)
# ]]]
# [[[end]]]
# Get property actions
self.generic_visit(node)
def visit_ClassDef(self, node):
"""Implement class walker."""
# [[[cog
# cog.out("print(pcolor('Enter class visitor', 'magenta'))")
# ]]]
# [[[end]]]
# Get class information (name, line number, etc.)
element_full_name = self._pop_indent_stack(node, "class")
code_id = (self._fname, node.lineno)
self._processed_line = node.lineno
# Add class entry to dictionaries
self._class_names.append(element_full_name)
self._callables_db[element_full_name] = {
"name": element_full_name,
"type": "class",
"code_id": code_id,
"last_lineno": None,
}
self._reverse_callables_db[code_id] = element_full_name
# [[[cog
# code = """
# print(
# pcolor(
# 'Visiting class {0} @ {1}, indent = {2}'.format(
# element_full_name, code_id[1], self._get_indent(node)
# ),
# 'green'
# )
# )
# """
# cog.out(code)
# ]]]
# [[[end]]]
self.generic_visit(node)
def visit_FunctionDef(self, node):
"""Implement function/method walker."""
# [[[cog
# cog.out("print(pcolor('Enter function visitor', 'magenta'))")
# ]]]
# [[[end]]]
in_class = self._in_class(node)
decorator_list = [
dobj.id if hasattr(dobj, "id") else dobj.attr
for dobj in node.decorator_list
if hasattr(dobj, "id") or hasattr(dobj, "attr")
]
node.decorator_list = []
# Callable can be:
# a) A class property defined via decorated methods
# b) A class method
# c) A function
# Get callable information (name, line number, etc.)
action = (
"getter"
if "property" in decorator_list
else (
"setter"
if "setter" in decorator_list
else ("deleter" if "deleter" in decorator_list else None)
)
)
element_type = "meth" if in_class else "func"
element_full_name = self._pop_indent_stack(node, element_type, action=action)
code_id = (self._fname, node.lineno)
self._processed_line = node.lineno
self._callables_db[element_full_name] = {
"name": element_full_name,
"type": element_type,
"code_id": code_id,
"last_lineno": None,
}
self._reverse_callables_db[code_id] = element_full_name
# [[[cog
# code = """
# print(
# pcolor(
# 'Visiting callable {0} @ {1}'.format(
# element_full_name, code_id[1]
# ),
# 'green'
# )
# )
# print(pcolor(' in_class = {}'.format(in_class), 'yellow'))
# """
# cog.out(code)
# ]]]
# [[[end]]]
self.generic_visit(node)
|
en
| 0.621668
|
# pinspect.py # Copyright (c) 2013-2019 <NAME> # See LICENSE for details # pylint: disable=C0103,C0111,C0411,E0012,E0611 # pylint: disable=F0401,R0205,R0912,R0914,R0916,R1710,R1717,W0212,W0631,W1504 # Standard library imports # PyPI imports # pragma: no cover # pragma: no cover # Intra-package imports # pragma: no cover # pragma: no cover ### # Global constants ### ### # Functions ### Get module name from module file name. Validate that a string is a valid file name. # pragma: no cover Return tuple of the function argument names in the order of the function signature. :param func: Function :type func: function object :param no_self: Flag that indicates whether the function argument *self*, if present, is included in the output (False) or not (True) :type no_self: boolean :param no_varargs: Flag that indicates whether keyword arguments are included in the output (True) or not (False) :type no_varargs: boolean :rtype: tuple For example: >>> import pexdoc.pinspect >>> class MyClass(object): ... def __init__(self, value, **kwargs): ... pass ... >>> pexdoc.pinspect.get_function_args(MyClass.__init__) ('self', 'value', '**kwargs') >>> pexdoc.pinspect.get_function_args( ... MyClass.__init__, no_self=True ... ) ('value', '**kwargs') >>> pexdoc.pinspect.get_function_args( ... MyClass.__init__, no_self=True, no_varargs=True ... ) ('value',) >>> pexdoc.pinspect.get_function_args( ... MyClass.__init__, no_varargs=True ... ) ('self', 'value') # Mark positional and/or keyword arguments (if any) # Filter out 'self' from parameter list (optional) # Filter out positional or keyword arguments (optional) Retrieve the module name from a module object. :param module_obj: Module object :type module_obj: object :rtype: string :raises: * RuntimeError (Argument \`module_obj\` is not valid) * RuntimeError (Module object \`*[module_name]*\` could not be found in loaded modules) For example: >>> import pexdoc.pinspect >>> pexdoc.pinspect.get_module_name(sys.modules['pexdoc.pinspect']) 'pexdoc.pinspect' Test if the argument is a module object. :param obj: Object :type obj: any :rtype: boolean Test if callable name is a special Python method. :param name: Callable name :type name: string :rtype: boolean Yield private properties of an object. A private property is defined as one that has a single underscore (:code:`_`) before its name :param obj: Object :type obj: object :returns: iterator # Get private properties but NOT magic methods ### # Classes ### Generate a list of module callables and get their attributes. Callables are functions, classes, methods and class properties; attributes are callable type, file name, and lines span. Information from multiple modules can be stored in the callables database of the object by repeatedly calling :py:meth:`pexdoc.pinspect.Callables.trace` with different module file names. A :py:class:`pexdoc.pinspect.Callables` object retains knowledge of which modules have been traced so repeated calls to :py:meth:`pexdoc.pinspect.Callables.trace` with the *same* module object will *not* result in module re-traces (and the consequent performance hit) :param fnames: File names of the modules to trace. If None no immediate tracing is done :type fnames: list of strings or None :raises: * OSError (File *[fname]* could not be found) * RuntimeError (Argument \`fnames\` is not valid) # pylint: disable=R0903 # noqa Merge two objects. :raises: RuntimeError (Conflicting information between objects) For example: >>> import pexdoc.pcontracts, pexdoc.exh, pexdoc.pinspect, sys >>> obj1 = pexdoc.pinspect.Callables( ... [sys.modules['pexdoc.exh'].__file__] ... ) >>> obj2 = pexdoc.pinspect.Callables( ... [sys.modules['pexdoc.pcontracts'].__file__] ... ) >>> obj3 = pexdoc.pinspect.Callables([ ... sys.modules['pexdoc.exh'].__file__, ... sys.modules['pexdoc.pcontracts'].__file__, ... ]) >>> obj1 == obj3 False >>> obj1 == obj2 False >>> obj1+obj2 == obj3 True # pragma: no cover Return :code:`False` if no modules have been traced, :code:`True` otherwise. For example: >>> from __future__ import print_function >>> import pexdoc.pcontracts, pexdoc.pinspect, sys >>> obj = pexdoc.pinspect.Callables() >>> if obj: ... print('Boolean test returned: True') ... else: ... print('Boolean test returned: False') Boolean test returned: False >>> obj.trace([sys.modules['pexdoc.pcontracts'].__file__]) >>> if obj: ... print('Boolean test returned: True') ... else: ... print('Boolean test returned: False') Boolean test returned: True Copy object. For example: >>> import copy, pexdoc.exh, pexdoc.pinspect, sys >>> obj1 = pexdoc.pinspect.Callables( ... [sys.modules['pexdoc.exh'].__file__] ... ) >>> obj2 = copy.copy(obj1) >>> obj1 == obj2 True Test object equality. For example: >>> import pexdoc.pcontracts, pexdoc.exh, pexdoc.pinspect, sys >>> obj1 = pexdoc.pinspect.Callables( ... [sys.modules['pexdoc.exh'].__file__] ... ) >>> obj2 = pexdoc.pinspect.Callables( ... [sys.modules['pexdoc.exh'].__file__] ... ) >>> obj3 = pexdoc.pinspect.Callables( ... [sys.modules['pexdoc.pcontracts'].__file__] ... ) >>> obj1 == obj2 True >>> obj1 == obj3 False >>> 5 == obj3 False Merge an object into an existing object. :raises: RuntimeError (Conflicting information between objects) For example: >>> import pexdoc.pcontracts, pexdoc.exh, pexdoc.pinspect, sys >>> obj1 = pexdoc.pinspect.Callables( ... [sys.modules['pexdoc.exh'].__file__] ... ) >>> obj2 = pexdoc.pinspect.Callables( ... [sys.modules['pexdoc.pcontracts'].__file__] ... ) >>> obj3 = pexdoc.pinspect.Callables([ ... sys.modules['pexdoc.exh'].__file__, ... sys.modules['pexdoc.pcontracts'].__file__, ... ]) >>> obj1 == obj3 False >>> obj1 == obj2 False >>> obj1 += obj2 >>> obj1 == obj3 True # pragma: no cover Return :code:`False` if no modules have been traced, :code:`True` otherwise. For example: >>> from __future__ import print_function >>> import pexdoc.pcontracts, pexdoc.pinspect, sys >>> obj = pexdoc.pinspect.Callables() >>> if obj: ... print('Boolean test returned: True') ... else: ... print('Boolean test returned: False') Boolean test returned: False >>> obj.trace([sys.modules['pexdoc.pcontracts'].__file__]) >>> if obj: ... print('Boolean test returned: True') ... else: ... print('Boolean test returned: False') Boolean test returned: True Return a string with the expression needed to re-create the object. For example: >>> import pexdoc.exh, pexdoc.pinspect, sys >>> obj1 = pexdoc.pinspect.Callables( ... [sys.modules['pexdoc.exh'].__file__] ... ) >>> repr(obj1) #doctest: +ELLIPSIS "pexdoc.pinspect.Callables(['...exh.py'])" >>> exec("obj2="+repr(obj1)) >>> obj1 == obj2 True Return a string with a detailed description of the object's contents. For example: >>> from __future__ import print_function >>> import pexdoc.pinspect, os, sys >>> import docs.support.pinspect_example_1 >>> cobj = pexdoc.pinspect.Callables([ ... sys.modules['docs.support.pinspect_example_1'].__file__ ... ]) >>> print(cobj) #doctest: +ELLIPSIS Modules: ...pinspect_example_1 Classes: ...pinspect_example_1.my_func.MyClass ...pinspect_example_1.my_func: func (10-29) ...pinspect_example_1.my_func.MyClass: class (13-29) ...pinspect_example_1.my_func.MyClass.__init__: meth (21-23) ...pinspect_example_1.my_func.MyClass._get_value: meth (24-26) ...pinspect_example_1.my_func.MyClass.value: prop (27-29) ...pinspect_example_1.print_name: func (30-31) The numbers in parenthesis indicate the line number in which the callable starts and ends within the file it is defined in # List traced modules # List traced classes # List traced callables (methods, functions, properties) Check that intersection of two objects has the same information. # pylint: disable=C0123 Get callables_db property. Get the callable that the line number belongs to. # Sort callables by starting line number Getter reverse_callables_db property. Load traced modules information from a `JSON <http://www.json.org/>`_ file. The loaded module information is merged with any existing module information :param callables_fname: File name :type callables_fname: :ref:`FileNameExists` :raises: * OSError (File *[fname]* could not be found) * RuntimeError (Argument \\`callables_fname\\` is not valid) # Validate file name # pragma: no cover # Reverse the tuple-to-string conversion that the save method # does due to the fact that JSON keys need to be strings and the # keys of the reverse callable dictionary are tuples where the first # item is a file name and the second item is the starting line of the # callable within that file (dictionary value) # pragma: no cover Re-traces modules modified since the time they were traced. Save traced modules information to a `JSON`_ file. If the file exists it is overwritten :param callables_fname: File name :type callables_fname: :ref:`FileName` :raises: RuntimeError (Argument \\`fname\\` is not valid) # Validate file name # JSON keys have to be strings but the reverse callables dictionary # keys are tuples, where the first item is a file name and the # second item is the starting line of the callable within that file # (dictionary value), thus need to convert the key to a string Generate a list of module callables and gets their attributes. Callables are functions, classes, methods and class properties; their attributes are callable type, file name, and lines span :param fnames: File names of the modules to trace :type fnames: list :raises: * OSError (File *[fname]* could not be found) * RuntimeError (Argument \`fnames\` is not valid) # pylint: disable=R0101 # Remove old module information if it is going to be refreshed # Eliminate all Unicode characters till the first ASCII # character is found in first line of file, to deal with # Unicode-encoded source files # pragma: no cover # Create a fake callable at the end of the file to properly # 'close', i.e. assign a last line number to the last # callable in file # Split into modules # Managed attributes Return the callables database. :rtype: dictionary The callable database is a dictionary that has the following structure: * **full callable name** *(string)* -- Dictionary key. Elements in the callable path are separated by periods (:code:`'.'`). For example, method :code:`my_method()` from class :code:`MyClass` from module :code:`my_module` appears as :code:`'my_module.MyClass.my_method'` * **callable properties** *(dictionary)* -- Dictionary value. The elements of this dictionary are: * **type** *(string)* -- :code:`'class'` for classes, :code:`'meth'` for methods, :code:`'func'` for functions or :code:`'prop'` for properties or class attributes * **code_id** *(tuple or None)* -- A tuple with the following items: * **file name** *(string)* -- the first item contains the file name where the callable can be found * **line number** *(integer)* -- the second item contains the line number in which the callable code starts (including decorators) * **last_lineno** *(integer)* -- line number in which the callable code ends (including blank lines and comments regardless of their indentation level) Returns the reverse callables database :rtype: dictionary The reverse callable database is a dictionary that has the following structure: * **callable id** *(tuple)* -- Dictionary key. Two-element tuple in which the first tuple item is the file name where the callable is defined and the second tuple item is the line number where the callable definition starts * **full callable name** *(string)* -- Dictionary value. Elements in the callable path are separated by periods (:code:`'.'`). For example, method :code:`my_method()` from class :code:`MyClass` from module :code:`my_module` appears as :code:`'my_module.MyClass.my_method'` # [[[cog # code = """ # def pcolor(text, color, indent=0): # esc_dict = { # 'black':30, 'red':31, 'green':32, 'yellow':33, 'blue':34, # 'magenta':35, 'cyan':36, 'white':37, 'none':-1 # } # color = color.lower() # if esc_dict[color] != -1: # return ( # '\033[{color_code}m{indent}{text}\033[0m'.format( # color_code=esc_dict[color], indent=' '*indent, text=text # ) # ) # return '{indent}{text}'.format(indent=' '*indent, text=text) # """ # cog.out(code) # ]]] # [[[end]]] Get all callables from a given module by traversing abstract syntax tree. # pylint: disable=R0902,W0702 Record last line number of callable. # Only nodes that have a line number can be considered for closing # callables. Similarly, only nodes with lines greater than the one # already processed can be considered for closing callables # [[[cog # code = """ # print(pcolor('Close callable @ line = {0}'.format(lineno), 'green')) # """ # cog.out(code) # ]]] # [[[end]]] # Extract node name for property closing. Once a property is found, # it can only be closed out by a node type that has a name # Traverse backwards through call stack and close callables as needed # [[[cog # code = """ # print( # pcolor( # ' Name {0} @ {1}, indent = {2}'.format( # name if name else 'None', lineno, indent # ), # 'yellow' # ) # ) # """ # cog.out(code) # ]]] # [[[end]]] # [[[cog # code = """ # print( # pcolor( # ' Name {0}, indent, {1}, stack_indent {2}'.format( # element_full_name, indent, stack_indent # ), # 'yellow' # ) # ) # """ # cog.out(code) # ]]] # [[[end]]] # [[[cog # code = """ # print( # pcolor( # ' Closing {0} @ {1}'.format( # element_full_name, lineno-1 # ), # 'yellow' # ) # ) # """ # cog.out(code) # ]]] # [[[end]]] # Callables have to be removed from stack when they are closed, # otherwise if a callable is subsequently followed after a few # lines by another callable at a further indentation level (like a for # loop) the second callable would incorrectly appear within the scope # of the first callable Get node indentation level. Find if callable is function or method. # Move left one indentation level and check if that callable is a class # pragma: no branch Get callable full name. # Find enclosing scope # Construct new callable name # Add new callable entry to indentation stack Implement generic node. # [[[cog # cog.out("print(pcolor('Enter generic visitor', 'magenta'))") # ]]] # [[[end]]] # A generic visitor that potentially closes callables is needed to # close enclosed callables that are not at the end of the enclosing # callable, otherwise the ending line of the enclosed callable would # be the ending line of the enclosing callable, which would be # incorrect # Decorated callables go to visit_FunctionDef in the first line of the # decorator, but the actual function definition would go the generic # visitor if it is not caught when processing the function arguments. # This would close the callable prematurely, so the argument walk needs # to be intercepted and suppressed Implement assignment walker. Parse class properties defined via the property() function # [[[cog # cog.out("print(pcolor('Enter assign visitor', 'magenta'))") # ]]] # [[[end]]] # ### # Class-level assignment may also be a class attribute that is not # a managed attribute, record it anyway, no harm in doing so as it # is not attached to a callable # [[[cog # code = """ # print( # pcolor( # 'Visiting property {0} @ {1}'.format( # element_full_name, code_id[1] # ), # 'green' # ) # ) # """ # cog.out(code) # ]]] # [[[end]]] # Get property actions Implement class walker. # [[[cog # cog.out("print(pcolor('Enter class visitor', 'magenta'))") # ]]] # [[[end]]] # Get class information (name, line number, etc.) # Add class entry to dictionaries # [[[cog # code = """ # print( # pcolor( # 'Visiting class {0} @ {1}, indent = {2}'.format( # element_full_name, code_id[1], self._get_indent(node) # ), # 'green' # ) # ) # """ # cog.out(code) # ]]] # [[[end]]] Implement function/method walker. # [[[cog # cog.out("print(pcolor('Enter function visitor', 'magenta'))") # ]]] # [[[end]]] # Callable can be: # a) A class property defined via decorated methods # b) A class method # c) A function # Get callable information (name, line number, etc.) # [[[cog # code = """ # print( # pcolor( # 'Visiting callable {0} @ {1}'.format( # element_full_name, code_id[1] # ), # 'green' # ) # ) # print(pcolor(' in_class = {}'.format(in_class), 'yellow')) # """ # cog.out(code) # ]]] # [[[end]]]
| 2.049369
| 2
|
corehq/motech/repeaters/tests/test_repeater.py
|
shyamkumarlchauhan/commcare-hq
| 0
|
6626795
|
import json
import uuid
from collections import namedtuple
from datetime import datetime, timedelta
from django.test import SimpleTestCase, TestCase, override_settings
import attr
from mock import Mock, patch
from casexml.apps.case.mock import CaseBlock, CaseFactory
from casexml.apps.case.xform import get_case_ids_from_form
from couchforms.const import DEVICE_LOG_XMLNS
from dimagi.utils.parsing import json_format_datetime
from corehq.apps.accounting.models import SoftwarePlanEdition
from corehq.apps.accounting.tests.utils import DomainSubscriptionMixin
from corehq.apps.accounting.utils import clear_plan_version_cache
from corehq.apps.app_manager.tests.util import TestXmlMixin
from corehq.apps.domain.shortcuts import create_domain
from corehq.apps.locations.models import LocationType, SQLLocation
from corehq.apps.receiverwrapper.exceptions import (
DuplicateFormatException,
IgnoreDocument,
)
from corehq.apps.receiverwrapper.util import submit_form_locally
from corehq.apps.users.models import CommCareUser
from corehq.form_processor.interfaces.dbaccessors import (
CaseAccessors,
FormAccessors,
)
from corehq.form_processor.tests.utils import (
FormProcessorTestUtils,
run_with_all_backends,
)
from corehq.motech.repeaters.const import (
MAX_RETRY_WAIT,
MIN_RETRY_WAIT,
RECORD_SUCCESS_STATE,
)
from corehq.motech.repeaters.dbaccessors import (
delete_all_repeat_records,
delete_all_repeaters,
)
from corehq.motech.repeaters.models import (
CaseRepeater,
FormRepeater,
LocationRepeater,
Repeater,
RepeatRecord,
ShortFormRepeater,
UserRepeater,
_get_retry_interval,
)
from corehq.motech.repeaters.repeater_generators import (
BasePayloadGenerator,
FormRepeaterXMLPayloadGenerator,
RegisterGenerator,
)
from corehq.motech.repeaters.tasks import (
check_repeaters,
process_repeat_record,
)
MockResponse = namedtuple('MockResponse', 'status_code reason')
CASE_ID = "ABC123CASEID"
USER_ID = 'mojo-jojo'
XFORM_XML_TEMPLATE = """<?xml version='1.0' ?>
<data xmlns:jrm="http://dev.commcarehq.org/jr/xforms" xmlns="{}">
<woman_name>Alpha</woman_name>
<husband_name>Beta</husband_name>
<meta>
<deviceID>O2XLT0WZW97W1A91E2W1Y0NJG</deviceID>
<timeStart>2011-10-01T15:25:18.404-04</timeStart>
<timeEnd>2011-10-01T15:26:29.551-04</timeEnd>
<username>admin</username>
<userID>{}</userID>
<instanceID>{}</instanceID>
</meta>
{}
</data>
"""
class BaseRepeaterTest(TestCase, DomainSubscriptionMixin):
domain = 'base-domain'
@classmethod
def setUpClass(cls):
super().setUpClass()
case_block = CaseBlock(
case_id=CASE_ID,
create=True,
case_type="repeater_case",
case_name="ABC 123",
).as_text()
update_case_block = CaseBlock(
case_id=CASE_ID,
create=False,
case_name="ABC 234",
).as_text()
cls.instance_id = uuid.uuid4().hex
cls.xform_xml = XFORM_XML_TEMPLATE.format(
"https://www.commcarehq.org/test/repeater/",
USER_ID,
cls.instance_id,
case_block
)
cls.update_xform_xml = XFORM_XML_TEMPLATE.format(
"https://www.commcarehq.org/test/repeater/",
USER_ID,
uuid.uuid4().hex,
update_case_block,
)
cls.domain_obj = create_domain(cls.domain)
# DATA_FORWARDING is on PRO and above
cls.setup_subscription(cls.domain, SoftwarePlanEdition.PRO)
@classmethod
def tearDownClass(cls):
cls.teardown_subscriptions()
cls.domain_obj.delete()
clear_plan_version_cache()
super().tearDownClass()
@classmethod
def post_xml(cls, xml, domain_name):
return submit_form_locally(xml, domain_name)
@classmethod
def repeat_records(cls, domain_name):
# Enqueued repeat records have next_check set 48 hours in the future.
later = datetime.utcnow() + timedelta(hours=48 + 1)
return RepeatRecord.all(domain=domain_name, due_before=later)
class RepeaterTest(BaseRepeaterTest):
domain = "repeater-test"
def setUp(self):
super(RepeaterTest, self).setUp()
self.case_repeater = CaseRepeater(
domain=self.domain,
url='case-repeater-url',
)
self.case_repeater.save()
self.form_repeater = FormRepeater(
domain=self.domain,
url='form-repeater-url',
)
self.form_repeater.save()
self.log = []
with patch('corehq.motech.repeaters.models.simple_post',
return_value=MockResponse(status_code=500, reason="Borked")) as mock_fire:
self.post_xml(self.xform_xml, self.domain)
self.initial_fire_call_count = mock_fire.call_count
def tearDown(self):
self.case_repeater.delete()
self.form_repeater.delete()
FormProcessorTestUtils.delete_all_cases_forms_ledgers(self.domain)
delete_all_repeat_records()
super(RepeaterTest, self).tearDown()
def repeat_records(self):
return super(RepeaterTest, self).repeat_records(self.domain)
@run_with_all_backends
def test_skip_device_logs(self):
devicelog_xml = XFORM_XML_TEMPLATE.format(DEVICE_LOG_XMLNS, USER_ID, '1234', '')
self.post_xml(devicelog_xml, self.domain)
for repeat_record in self.repeat_records():
self.assertNotEqual(repeat_record.payload_id, '1234')
@run_with_all_backends
def test_skip_duplicates(self):
"""
Ensure that submitting a duplicate form does not create extra RepeatRecords
"""
self.assertEqual(len(self.repeat_records()), 2)
# this form is already submitted during setUp so a second submission should be a duplicate
form = self.post_xml(self.xform_xml, self.domain).xform
self.assertTrue(form.is_duplicate)
self.assertEqual(len(self.repeat_records()), 2)
@run_with_all_backends
def test_repeater_failed_sends(self):
"""
This tests records that fail are requeued later
"""
def now():
return datetime.utcnow()
repeat_records = self.repeat_records()
self.assertEqual(len(repeat_records), 2)
for repeat_record in repeat_records:
with patch(
'corehq.motech.repeaters.models.simple_post',
return_value=MockResponse(status_code=404, reason='Not Found')) as mock_post:
repeat_record.fire()
self.assertEqual(mock_post.call_count, 1)
# Enqueued repeat records have next_check incremented by 48 hours
next_check_time = now() + timedelta(minutes=60) + timedelta(hours=48)
repeat_records = RepeatRecord.all(
domain=self.domain,
due_before=now() + timedelta(minutes=15),
)
self.assertEqual(len(repeat_records), 0)
repeat_records = RepeatRecord.all(
domain=self.domain,
due_before=next_check_time,
)
self.assertEqual(len(repeat_records), 2)
@run_with_all_backends
def test_update_failure_next_check(self):
now = datetime.utcnow()
record = RepeatRecord(domain=self.domain, next_check=now)
self.assertIsNone(record.last_checked)
attempt = record.make_set_next_try_attempt(None)
record.add_attempt(attempt)
self.assertTrue(record.last_checked > now)
self.assertEqual(record.next_check, record.last_checked + MIN_RETRY_WAIT)
@run_with_all_backends
def test_repeater_successful_send(self):
repeat_records = self.repeat_records()
for repeat_record in repeat_records:
with patch(
'corehq.motech.repeaters.models.simple_post',
return_value=MockResponse(status_code=200, reason='No Reason')) as mock_post:
repeat_record.fire()
self.assertEqual(mock_post.call_count, 1)
mock_post.assert_called_with(
self.domain,
repeat_record.repeater.get_url(repeat_record),
repeat_record.get_payload(),
headers=repeat_record.repeater.get_headers(repeat_record),
auth=repeat_record.repeater.get_auth(),
verify=repeat_record.repeater.verify,
notify_addresses=[],
payload_id=repeat_record.payload_id,
)
# The following is pretty fickle and depends on which of
# - corehq.motech.repeaters.signals
# - casexml.apps.case.signals
# gets loaded first.
# This is deterministic but easily affected by minor code changes
repeat_records = self.repeat_records()
for repeat_record in repeat_records:
self.assertEqual(repeat_record.succeeded, True)
self.assertEqual(repeat_record.next_check, None)
self.assertEqual(len(self.repeat_records()), 0)
self.post_xml(self.update_xform_xml, self.domain)
self.assertEqual(len(self.repeat_records()), 2)
@run_with_all_backends
def test_check_repeat_records(self):
self.assertEqual(len(self.repeat_records()), 2)
self.assertEqual(self.initial_fire_call_count, 2)
with patch('corehq.motech.repeaters.models.simple_post') as mock_fire:
check_repeaters()
self.assertEqual(mock_fire.call_count, 0)
@run_with_all_backends
def test_repeat_record_status_check(self):
self.assertEqual(len(self.repeat_records()), 2)
# Do not trigger cancelled records
for repeat_record in self.repeat_records():
repeat_record.cancelled = True
repeat_record.save()
with patch('corehq.motech.repeaters.models.simple_post') as mock_fire:
check_repeaters()
self.assertEqual(mock_fire.call_count, 0)
# trigger force send records if not cancelled and tries not exhausted
for repeat_record in self.repeat_records():
with patch('corehq.motech.repeaters.models.simple_post',
return_value=MockResponse(status_code=200, reason='')
) as mock_fire:
repeat_record.fire(force_send=True)
self.assertEqual(mock_fire.call_count, 1)
# all records should be in SUCCESS state after force try
for repeat_record in self.repeat_records():
self.assertEqual(repeat_record.state, RECORD_SUCCESS_STATE)
self.assertEqual(repeat_record.overall_tries, 1)
# not trigger records succeeded triggered after cancellation
with patch('corehq.motech.repeaters.models.simple_post') as mock_fire:
check_repeaters()
self.assertEqual(mock_fire.call_count, 0)
for repeat_record in self.repeat_records():
self.assertEqual(repeat_record.state, RECORD_SUCCESS_STATE)
@run_with_all_backends
def test_process_repeat_record_locking(self):
self.assertEqual(len(self.repeat_records()), 2)
with patch('corehq.motech.repeaters.tasks.process_repeat_record') as mock_process:
check_repeaters()
self.assertEqual(mock_process.delay.call_count, 0)
for record in self.repeat_records():
# Resetting next_check should allow them to be requeued
record.next_check = datetime.utcnow()
record.save()
with patch('corehq.motech.repeaters.tasks.process_repeat_record') as mock_process:
check_repeaters()
self.assertEqual(mock_process.delay.call_count, 2)
@run_with_all_backends
def test_automatic_cancel_repeat_record(self):
repeat_record = self.case_repeater.register(CaseAccessors(self.domain).get_case(CASE_ID))
self.assertEqual(1, repeat_record.overall_tries)
with patch('corehq.motech.repeaters.models.simple_post', side_effect=Exception('Boom!')):
for __ in range(repeat_record.max_possible_tries - repeat_record.overall_tries):
repeat_record.fire()
self.assertEqual(True, repeat_record.cancelled)
repeat_record.requeue()
self.assertEqual(0, repeat_record.overall_tries)
self.assertNotEqual(None, repeat_record.next_check)
class FormPayloadGeneratorTest(BaseRepeaterTest, TestXmlMixin):
domain = "form-payload"
@classmethod
def setUpClass(cls):
super().setUpClass()
cls.repeater = FormRepeater(
domain=cls.domain,
url="form-repeater-url",
)
cls.repeatergenerator = FormRepeaterXMLPayloadGenerator(
repeater=cls.repeater
)
cls.repeater.save()
@classmethod
def tearDownClass(cls):
cls.repeater.delete()
super().tearDownClass()
def tearDown(self):
FormProcessorTestUtils.delete_all_cases_forms_ledgers(self.domain)
delete_all_repeat_records()
super().tearDown()
@run_with_all_backends
def test_get_payload(self):
self.post_xml(self.xform_xml, self.domain)
payload_doc = FormAccessors(self.domain).get_form(self.instance_id)
payload = self.repeatergenerator.get_payload(None, payload_doc)
self.assertXmlEqual(self.xform_xml, payload)
class FormRepeaterTest(BaseRepeaterTest, TestXmlMixin):
domain = "form-repeater"
@classmethod
def setUpClass(cls):
super(FormRepeaterTest, cls).setUpClass()
cls.repeater = FormRepeater(
domain=cls.domain,
url="form-repeater-url",
)
cls.repeater.save()
@classmethod
def tearDownClass(cls):
cls.repeater.delete()
super(FormRepeaterTest, cls).tearDownClass()
def tearDown(self):
FormProcessorTestUtils.delete_all_cases(self.domain)
delete_all_repeat_records()
super(FormRepeaterTest, self).tearDown()
@run_with_all_backends
def test_payload(self):
self.post_xml(self.xform_xml, self.domain)
repeat_records = self.repeat_records(self.domain).all()
payload = repeat_records[0].get_payload().decode('utf-8')
self.assertXMLEqual(self.xform_xml, payload)
class ShortFormRepeaterTest(BaseRepeaterTest, TestXmlMixin):
domain = "sh-form-rep"
@classmethod
def setUpClass(cls):
super().setUpClass()
cls.repeater = ShortFormRepeater(
domain=cls.domain,
url="short-form-repeater-url",
)
cls.repeater.save()
@classmethod
def tearDownClass(cls):
cls.repeater.delete()
super().tearDownClass()
def tearDown(self):
FormProcessorTestUtils.delete_all_cases(self.domain)
delete_all_repeat_records()
super().tearDown()
@run_with_all_backends
def test_payload(self):
form = self.post_xml(self.xform_xml, self.domain).xform
repeat_records = self.repeat_records(self.domain).all()
payload = repeat_records[0].get_payload()
self.assertEqual(json.loads(payload), {
'received_on': json_format_datetime(form.received_on),
'form_id': form.form_id,
'case_ids': list(get_case_ids_from_form(form))
})
class CaseRepeaterTest(BaseRepeaterTest, TestXmlMixin):
domain = "case-rep"
@classmethod
def setUpClass(cls):
super().setUpClass()
cls.repeater = CaseRepeater(
domain=cls.domain,
url="case-repeater-url",
)
cls.repeater.save()
@classmethod
def tearDownClass(cls):
cls.repeater.delete()
super().tearDownClass()
def tearDown(self):
FormProcessorTestUtils.delete_all_cases(self.domain)
delete_all_repeat_records()
super().tearDown()
@run_with_all_backends
def test_case_close_format(self):
# create a case
self.post_xml(self.xform_xml, self.domain)
repeat_records = self.repeat_records(self.domain).all()
payload = repeat_records[0].get_payload()
self.assertXmlHasXpath(payload, '//*[local-name()="case"]')
self.assertXmlHasXpath(payload, '//*[local-name()="create"]')
# close the case
CaseFactory(self.domain).close_case(CASE_ID)
close_payload = self.repeat_records(self.domain).all()[1].get_payload()
self.assertXmlHasXpath(close_payload, '//*[local-name()="case"]')
self.assertXmlHasXpath(close_payload, '//*[local-name()="close"]')
@run_with_all_backends
def test_excluded_case_types_are_not_forwarded(self):
self.repeater.white_listed_case_types = ['planet']
self.repeater.save()
white_listed_case = CaseBlock(
case_id="a_case_id",
create=True,
case_type="planet",
).as_xml()
CaseFactory(self.domain).post_case_blocks([white_listed_case])
self.assertEqual(1, len(self.repeat_records(self.domain).all()))
non_white_listed_case = CaseBlock(
case_id="b_case_id",
create=True,
case_type="cat",
).as_xml()
CaseFactory(self.domain).post_case_blocks([non_white_listed_case])
self.assertEqual(1, len(self.repeat_records(self.domain).all()))
@run_with_all_backends
def test_black_listed_user_cases_do_not_forward(self):
self.repeater.black_listed_users = ['black_listed_user']
self.repeater.save()
black_list_user_id = 'black_listed_user'
# case-creations by black-listed users shouldn't be forwarded
black_listed_user_case = CaseBlock(
case_id="b_case_id",
create=True,
case_type="planet",
owner_id="owner",
user_id=black_list_user_id
).as_text()
xform_xml = XFORM_XML_TEMPLATE.format(
"https://www.commcarehq.org/test/repeater/",
black_list_user_id,
'1234',
black_listed_user_case,
)
self.post_xml(xform_xml, self.domain)
self.assertEqual(0, len(self.repeat_records(self.domain).all()))
# case-creations by normal users should be forwarded
normal_user_case = CaseBlock(
case_id="a_case_id",
create=True,
case_type="planet",
owner_id="owner",
user_id="normal_user"
).as_text()
xform_xml = XFORM_XML_TEMPLATE.format(
"https://www.commcarehq.org/test/repeater/",
USER_ID,
'6789',
normal_user_case,
)
self.post_xml(xform_xml, self.domain)
self.assertEqual(1, len(self.repeat_records(self.domain).all()))
# case-updates by black-listed users shouldn't be forwarded
black_listed_user_case = CaseBlock(
case_id="b_case_id",
case_type="planet",
owner_id="owner",
user_id=black_list_user_id,
).as_text()
xform_xml = XFORM_XML_TEMPLATE.format(
"https://www.commcarehq.org/test/repeater/",
black_list_user_id,
'2345',
black_listed_user_case,
)
self.post_xml(xform_xml, self.domain)
self.assertEqual(1, len(self.repeat_records(self.domain).all()))
# case-updates by normal users should be forwarded
normal_user_case = CaseBlock(
case_id="a_case_id",
case_type="planet",
owner_id="owner",
user_id="normal_user",
).as_text()
xform_xml = XFORM_XML_TEMPLATE.format(
"https://www.commcarehq.org/test/repeater/",
USER_ID,
'3456',
normal_user_case,
)
self.post_xml(xform_xml, self.domain)
self.assertEqual(2, len(self.repeat_records(self.domain).all()))
class RepeaterFailureTest(BaseRepeaterTest):
domain = 'repeat-fail'
def setUp(self):
super().setUp()
self.repeater = CaseRepeater(
domain=self.domain,
url='case-repeater-url',
)
self.repeater.save()
self.post_xml(self.xform_xml, self.domain)
def tearDown(self):
self.repeater.delete()
FormProcessorTestUtils.delete_all_cases_forms_ledgers(self.domain)
delete_all_repeat_records()
super().tearDown()
@run_with_all_backends
def test_get_payload_exception(self):
repeat_record = self.repeater.register(CaseAccessors(self.domain).get_case(CASE_ID))
with self.assertRaises(Exception):
with patch.object(CaseRepeater, 'get_payload', side_effect=Exception('Boom!')):
repeat_record.fire()
self.assertEqual(repeat_record.failure_reason, 'Boom!')
self.assertFalse(repeat_record.succeeded)
@run_with_all_backends
def test_failure(self):
repeat_record = self.repeater.register(CaseAccessors(self.domain).get_case(CASE_ID))
with patch('corehq.motech.repeaters.models.simple_post', side_effect=Exception('Boom!')):
repeat_record.fire()
self.assertEqual(repeat_record.failure_reason, 'Boom!')
self.assertFalse(repeat_record.succeeded)
# Should be marked as successful after a successful run
with patch('corehq.motech.repeaters.models.simple_post') as mock_simple_post:
mock_simple_post.return_value.status_code = 200
repeat_record.fire()
self.assertTrue(repeat_record.succeeded)
class IgnoreDocumentTest(BaseRepeaterTest):
domain = 'ignore-doc'
@classmethod
def setUpClass(cls):
super(IgnoreDocumentTest, cls).setUpClass()
class NewFormGenerator(BasePayloadGenerator):
format_name = 'new_format'
format_label = 'XML'
def get_payload(self, repeat_record, payload_doc):
raise IgnoreDocument
RegisterGenerator.get_collection(FormRepeater).add_new_format(NewFormGenerator)
def setUp(self):
super().setUp()
self.repeater = FormRepeater(
domain=self.domain,
url='form-repeater-url',
format='new_format'
)
self.repeater.save()
def tearDown(self):
self.repeater.delete()
FormProcessorTestUtils.delete_all_cases_forms_ledgers(self.domain)
delete_all_repeat_records()
super().tearDown()
@run_with_all_backends
def test_ignore_document(self):
"""
When get_payload raises IgnoreDocument, fire should call update_success
"""
repeat_records = RepeatRecord.all(
domain=self.domain,
)
for repeat_record_ in repeat_records:
repeat_record_.fire()
self.assertIsNone(repeat_record_.next_check)
self.assertTrue(repeat_record_.succeeded)
class TestRepeaterFormat(BaseRepeaterTest):
domain = 'test-fmt'
@classmethod
def setUpClass(cls):
super(TestRepeaterFormat, cls).setUpClass()
cls.payload = 'some random case'
class NewCaseGenerator(BasePayloadGenerator):
format_name = 'new_format'
format_label = 'XML'
deprecated_format_names = ('new_format_alias',)
def get_payload(self, repeat_record, payload_doc):
return cls.payload
RegisterGenerator.get_collection(CaseRepeater).add_new_format(NewCaseGenerator)
cls.new_generator = NewCaseGenerator
def setUp(self):
super(TestRepeaterFormat, self).setUp()
self.post_xml(self.xform_xml, self.domain)
self.repeater = CaseRepeater(
domain=self.domain,
url='case-repeater-url',
format='new_format',
)
self.repeater.save()
def tearDown(self):
self.repeater.delete()
FormProcessorTestUtils.delete_all_cases_forms_ledgers(self.domain)
delete_all_repeat_records()
super().tearDown()
def test_new_format_same_name(self):
class NewCaseGenerator(BasePayloadGenerator):
format_name = 'case_xml'
format_label = 'XML'
def get_payload(self, repeat_record, payload_doc):
return self.payload
with self.assertRaises(DuplicateFormatException):
RegisterGenerator.get_collection(CaseRepeater).add_new_format(NewCaseGenerator)
def test_new_format_second_default(self):
class NewCaseGenerator(BasePayloadGenerator):
format_name = 'rubbish'
format_label = 'XML'
def get_payload(self, repeat_record, payload_doc):
return self.payload
with self.assertRaises(DuplicateFormatException):
RegisterGenerator.get_collection(CaseRepeater).add_new_format(NewCaseGenerator, is_default=True)
@run_with_all_backends
def test_new_format_payload(self):
repeat_record = self.repeater.register(CaseAccessors(self.domain).get_case(CASE_ID))
with patch('corehq.motech.repeaters.models.simple_post') as mock_post:
mock_post.return_value.status_code = 200
repeat_record.fire()
headers = self.repeater.get_headers(repeat_record)
mock_post.assert_called_with(
self.domain,
self.repeater.url,
self.payload,
auth=self.repeater.get_auth(),
headers=headers,
notify_addresses=[],
payload_id='ABC123CASEID',
verify=self.repeater.verify,
)
def test_get_format_by_deprecated_name(self):
self.assertIsInstance(CaseRepeater(
domain=self.domain,
url='case-repeater-url',
format='new_format_alias',
).generator, self.new_generator)
@override_settings(TESTS_SHOULD_USE_SQL_BACKEND=True)
class UserRepeaterTest(TestCase, DomainSubscriptionMixin):
@classmethod
def setUpClass(cls):
super().setUpClass()
cls.domain = 'user-repeater'
cls.domain_obj = create_domain(name=cls.domain)
# DATA_FORWARDING is on PRO and above
cls.setup_subscription(cls.domain, SoftwarePlanEdition.PRO)
def setUp(self):
super(UserRepeaterTest, self).setUp()
self.repeater = UserRepeater(
domain=self.domain,
url='super-cool-url',
)
self.repeater.save()
@classmethod
def tearDownClass(cls):
cls.teardown_subscriptions()
cls.domain_obj.delete()
clear_plan_version_cache()
super().tearDownClass()
def tearDown(self):
super().tearDown()
delete_all_repeat_records()
delete_all_repeaters()
def repeat_records(self):
# Enqueued repeat records have next_check set 48 hours in the future.
later = datetime.utcnow() + timedelta(hours=48 + 1)
return RepeatRecord.all(domain=self.domain, due_before=later)
def make_user(self, username):
user = CommCareUser.create(
self.domain,
<EMAIL>".<EMAIL>(username, self.domain),
"123",
)
self.addCleanup(user.delete)
return user
def test_trigger(self):
self.assertEqual(0, len(self.repeat_records().all()))
user = self.make_user("bselmy")
records = self.repeat_records().all()
self.assertEqual(1, len(records))
record = records[0]
self.assertEqual(
json.loads(record.get_payload()),
{
'id': user._id,
'username': user.username,
'first_name': '',
'last_name': '',
'default_phone_number': None,
'user_data': {'commcare_project': self.domain},
'groups': [],
'phone_numbers': [],
'email': '',
'resource_uri': '/a/user-repeater/api/v0.5/user/{}/'.format(user._id),
}
)
@override_settings(TESTS_SHOULD_USE_SQL_BACKEND=True)
class LocationRepeaterTest(TestCase, DomainSubscriptionMixin):
@classmethod
def setUpClass(cls):
super().setUpClass()
cls.domain = 'loc-repeat'
cls.domain_obj = create_domain(name=cls.domain)
# DATA_FORWARDING is on PRO and above
cls.setup_subscription(cls.domain, SoftwarePlanEdition.PRO)
def setUp(self):
super().setUp()
self.repeater = LocationRepeater(
domain=self.domain,
url='super-cool-url',
)
self.repeater.save()
self.location_type = LocationType.objects.create(
domain=self.domain,
name="city",
)
@classmethod
def tearDownClass(cls):
cls.teardown_subscriptions()
cls.domain_obj.delete()
clear_plan_version_cache()
super().tearDownClass()
def tearDown(self):
super().tearDown()
delete_all_repeat_records()
delete_all_repeaters()
def repeat_records(self):
# Enqueued repeat records have next_check set 48 hours in the future.
later = datetime.utcnow() + timedelta(hours=48 + 1)
return RepeatRecord.all(domain=self.domain, due_before=later)
def make_location(self, name):
location = SQLLocation.objects.create(
domain=self.domain,
name=name,
site_code=name,
location_type=self.location_type,
)
self.addCleanup(location.delete)
return location
def test_trigger(self):
self.assertEqual(0, len(self.repeat_records().all()))
location = self.make_location('kings_landing')
records = self.repeat_records().all()
self.assertEqual(1, len(records))
record = records[0]
self.assertEqual(
json.loads(record.get_payload()),
{
'_id': location.location_id,
'doc_type': 'Location',
'domain': self.domain,
'external_id': None,
'is_archived': False,
'archived_on': None,
'last_modified': location.last_modified.isoformat(),
'latitude': None,
'lineage': [],
'location_id': location.location_id,
'location_type': 'city',
'location_type_code': 'city',
'longitude': None,
'metadata': {},
'name': location.name,
'parent_location_id': None,
'site_code': location.site_code,
}
)
class TestRepeaterPause(BaseRepeaterTest):
domain = 'rep-pause'
def setUp(self):
super().setUp()
self.repeater = CaseRepeater(
domain=self.domain,
url='case-repeater-url',
)
self.repeater.save()
self.post_xml(self.xform_xml, self.domain)
@run_with_all_backends
def test_trigger_when_paused(self):
# not paused
with patch.object(RepeatRecord, 'fire') as mock_fire:
with patch.object(RepeatRecord, 'postpone_by') as mock_postpone_fire:
# calls process_repeat_record():
self.repeat_record = self.repeater.register(CaseAccessors(self.domain_obj).get_case(CASE_ID))
self.assertEqual(mock_fire.call_count, 1)
self.assertEqual(mock_postpone_fire.call_count, 0)
# paused
self.repeater.pause()
# re fetch repeat record
repeat_record_id = self.repeat_record.get_id
self.repeat_record = RepeatRecord.get(repeat_record_id)
process_repeat_record(self.repeat_record)
self.assertEqual(mock_fire.call_count, 1)
self.assertEqual(mock_postpone_fire.call_count, 1)
# resumed
self.repeater.resume()
# re fetch repeat record
repeat_record_id = self.repeat_record.get_id
self.repeat_record = RepeatRecord.get(repeat_record_id)
process_repeat_record(self.repeat_record)
self.assertEqual(mock_fire.call_count, 2)
self.assertEqual(mock_postpone_fire.call_count, 1)
def tearDown(self):
self.repeater.delete()
FormProcessorTestUtils.delete_all_cases_forms_ledgers(self.domain)
delete_all_repeat_records()
super(TestRepeaterPause, self).tearDown()
class TestRepeaterDeleted(BaseRepeaterTest):
domain = 'rep-deleted'
def setUp(self):
super().setUp()
self.repeater = CaseRepeater(
domain=self.domain,
url='case-repeater-url',
)
self.repeater.save()
self.post_xml(self.xform_xml, self.domain)
def tearDown(self):
self.repeater.delete()
FormProcessorTestUtils.delete_all_cases_forms_ledgers(self.domain)
delete_all_repeat_records()
super().tearDown()
@run_with_all_backends
def test_trigger_when_deleted(self):
self.repeater.retire()
with patch.object(RepeatRecord, 'fire') as mock_fire:
self.repeat_record = self.repeater.register(CaseAccessors(self.domain_obj).get_case(CASE_ID))
process_repeat_record(self.repeat_record)
self.assertEqual(mock_fire.call_count, 0)
self.assertEqual(self.repeat_record.doc_type, "RepeatRecord-Deleted")
@run_with_all_backends
def test_paused_then_deleted(self):
self.repeater.pause()
self.repeater.retire()
with patch.object(RepeatRecord, 'fire') as mock_fire:
self.repeat_record = self.repeater.register(CaseAccessors(self.domain_obj).get_case(CASE_ID))
process_repeat_record(self.repeat_record)
self.assertEqual(mock_fire.call_count, 0)
self.assertEqual(self.repeat_record.doc_type, "RepeatRecord-Deleted")
@attr.s
class Response(object):
status_code = attr.ib()
reason = attr.ib()
content = attr.ib(default=None)
encoding = attr.ib(default='ascii')
@property
def text(self):
return '' if self.content is None else self.content.decode(self.encoding, errors='replace')
class DummyRepeater(Repeater):
@property
def generator(self):
return FormRepeaterXMLPayloadGenerator(self)
def payload_doc(self, repeat_record):
return {}
class HandleResponseTests(SimpleTestCase):
domain = 'handle-resp'
def setUp(self):
self.repeater = DummyRepeater(
domain=self.domain,
url="https://example.com/api/",
)
self.repeat_record = Mock()
def test_handle_ok_response(self):
response = Response(status_code=200, reason='OK', content=b'OK')
self.repeater.handle_response(response, self.repeat_record)
self.repeat_record.handle_exception.assert_not_called()
self.repeat_record.handle_success.assert_called()
self.repeat_record.handle_failure.assert_not_called()
def test_handle_true_response(self):
response = True
self.repeater.handle_response(response, self.repeat_record)
self.repeat_record.handle_exception.assert_not_called()
self.repeat_record.handle_success.assert_called()
self.repeat_record.handle_failure.assert_not_called()
def test_handle_none_response(self):
response = None
self.repeater.handle_response(response, self.repeat_record)
self.repeat_record.handle_exception.assert_not_called()
self.repeat_record.handle_success.assert_not_called()
self.repeat_record.handle_failure.assert_called()
def test_handle_500_response(self):
response = Response(status_code=500, reason='The core is exposed')
self.repeater.handle_response(response, self.repeat_record)
self.repeat_record.handle_exception.assert_not_called()
self.repeat_record.handle_success.assert_not_called()
self.repeat_record.handle_failure.assert_called()
def test_handle_exception(self):
err = Exception('The core is exposed')
self.repeater.handle_response(err, self.repeat_record)
self.repeat_record.handle_exception.assert_called()
self.repeat_record.handle_success.assert_not_called()
self.repeat_record.handle_failure.assert_not_called()
class FormatResponseTests(SimpleTestCase):
def test_content_is_ascii(self):
response = Response(
status_code=200,
reason='OK',
content=b'3.6 roentgen. Not great. Not terrible.'
)
formatted = RepeatRecord._format_response(response)
self.assertEqual(formatted, '200: OK.\n3.6 roentgen. Not great. Not terrible.')
def test_encoding_is_not_ascii(self):
response = Response(
status_code=200,
reason='OK',
content=b'3,6 \xe1\xa8\xd4\xe5\xac\xa8\xd4\xa0 \xd5\xa8 \xb5\xd6\xe1\xd6\xf5\xd6. '
b'\xd5\xa8 \xe3\xe5\xe1\xa0\xf5\xd4\xd6',
encoding='cp855'
)
formatted = RepeatRecord._format_response(response)
self.assertEqual(formatted, '200: OK.\n3,6 рентгена Не хорошо. Не страшно')
def test_content_is_None(self):
response = Response(500, 'The core is exposed')
formatted = RepeatRecord._format_response(response)
self.assertEqual(formatted, '500: The core is exposed.\n')
class NotifyAddressesTests(SimpleTestCase):
def test_default(self):
repeater = DummyRepeater.wrap({})
self.assertEqual(repeater.notify_addresses, [])
def test_empty(self):
repeater = DummyRepeater.wrap({
"notify_addresses_str": "",
})
self.assertEqual(repeater.notify_addresses, [])
def test_one(self):
repeater = DummyRepeater.wrap({
"notify_addresses_str": "<EMAIL>"
})
self.assertEqual(repeater.notify_addresses, ["<EMAIL>"])
def test_comma(self):
repeater = DummyRepeater.wrap({
"notify_addresses_str": "<EMAIL>,<EMAIL>"
})
self.assertEqual(repeater.notify_addresses, ["<EMAIL>",
"<EMAIL>"])
def test_space(self):
repeater = DummyRepeater.wrap({
"notify_addresses_str": "<EMAIL> <EMAIL>"
})
self.assertEqual(repeater.notify_addresses, ["<EMAIL>",
"<EMAIL>"])
def test_commaspace(self):
repeater = DummyRepeater.wrap({
"notify_addresses_str": "<EMAIL>, <EMAIL>"
})
self.assertEqual(repeater.notify_addresses, ["<EMAIL>",
"<EMAIL>"])
def test_mess(self):
repeater = DummyRepeater.wrap({
"notify_addresses_str": "<EMAIL>,,, , <EMAIL>"
})
self.assertEqual(repeater.notify_addresses, ["<EMAIL>",
"<EMAIL>"])
class TestGetRetryInterval(SimpleTestCase):
def test_min_interval(self):
last_checked = fromisoformat("2020-01-01 00:00:00")
now = fromisoformat("2020-01-01 00:05:00")
interval = _get_retry_interval(last_checked, now)
self.assertEqual(interval, MIN_RETRY_WAIT)
def test_max_interval(self):
last_checked = fromisoformat("2020-01-01 00:00:00")
now = fromisoformat("2020-02-01 00:00:00")
interval = _get_retry_interval(last_checked, now)
self.assertEqual(interval, MAX_RETRY_WAIT)
def test_three_times_interval(self):
last_checked = fromisoformat("2020-01-01 00:00:00")
now = fromisoformat("2020-01-01 01:00:00")
interval = _get_retry_interval(last_checked, now)
self.assertEqual(interval, timedelta(hours=3))
def test_five_retries(self):
# (Five retries because RepeatRecord.max_possible_tries is 6)
for last_checked, now, expected_interval_hours in [
(None, fromisoformat("2020-01-01 00:00:00"), 1),
(fromisoformat("2020-01-01 00:00:00"), fromisoformat("2020-01-01 01:00:00"), 3),
(fromisoformat("2020-01-01 01:00:00"), fromisoformat("2020-01-01 04:00:00"), 9),
(fromisoformat("2020-01-01 04:00:00"), fromisoformat("2020-01-01 13:00:00"), 27),
(fromisoformat("2020-01-01 13:00:00"), fromisoformat("2020-01-02 16:00:00"), 81),
]:
interval = _get_retry_interval(last_checked, now)
self.assertEqual(interval, timedelta(hours=expected_interval_hours))
def fromisoformat(isoformat):
"""
Return a datetime from a string in ISO 8601 date time format
>>> fromisoformat("2019-12-31 23:59:59")
datetime.datetime(2019, 12, 31, 23, 59, 59)
"""
try:
return datetime.fromisoformat(isoformat) # Python >= 3.7
except AttributeError:
return datetime.strptime(isoformat, "%Y-%m-%d %H:%M:%S")
|
import json
import uuid
from collections import namedtuple
from datetime import datetime, timedelta
from django.test import SimpleTestCase, TestCase, override_settings
import attr
from mock import Mock, patch
from casexml.apps.case.mock import CaseBlock, CaseFactory
from casexml.apps.case.xform import get_case_ids_from_form
from couchforms.const import DEVICE_LOG_XMLNS
from dimagi.utils.parsing import json_format_datetime
from corehq.apps.accounting.models import SoftwarePlanEdition
from corehq.apps.accounting.tests.utils import DomainSubscriptionMixin
from corehq.apps.accounting.utils import clear_plan_version_cache
from corehq.apps.app_manager.tests.util import TestXmlMixin
from corehq.apps.domain.shortcuts import create_domain
from corehq.apps.locations.models import LocationType, SQLLocation
from corehq.apps.receiverwrapper.exceptions import (
DuplicateFormatException,
IgnoreDocument,
)
from corehq.apps.receiverwrapper.util import submit_form_locally
from corehq.apps.users.models import CommCareUser
from corehq.form_processor.interfaces.dbaccessors import (
CaseAccessors,
FormAccessors,
)
from corehq.form_processor.tests.utils import (
FormProcessorTestUtils,
run_with_all_backends,
)
from corehq.motech.repeaters.const import (
MAX_RETRY_WAIT,
MIN_RETRY_WAIT,
RECORD_SUCCESS_STATE,
)
from corehq.motech.repeaters.dbaccessors import (
delete_all_repeat_records,
delete_all_repeaters,
)
from corehq.motech.repeaters.models import (
CaseRepeater,
FormRepeater,
LocationRepeater,
Repeater,
RepeatRecord,
ShortFormRepeater,
UserRepeater,
_get_retry_interval,
)
from corehq.motech.repeaters.repeater_generators import (
BasePayloadGenerator,
FormRepeaterXMLPayloadGenerator,
RegisterGenerator,
)
from corehq.motech.repeaters.tasks import (
check_repeaters,
process_repeat_record,
)
MockResponse = namedtuple('MockResponse', 'status_code reason')
CASE_ID = "ABC123CASEID"
USER_ID = 'mojo-jojo'
XFORM_XML_TEMPLATE = """<?xml version='1.0' ?>
<data xmlns:jrm="http://dev.commcarehq.org/jr/xforms" xmlns="{}">
<woman_name>Alpha</woman_name>
<husband_name>Beta</husband_name>
<meta>
<deviceID>O2XLT0WZW97W1A91E2W1Y0NJG</deviceID>
<timeStart>2011-10-01T15:25:18.404-04</timeStart>
<timeEnd>2011-10-01T15:26:29.551-04</timeEnd>
<username>admin</username>
<userID>{}</userID>
<instanceID>{}</instanceID>
</meta>
{}
</data>
"""
class BaseRepeaterTest(TestCase, DomainSubscriptionMixin):
domain = 'base-domain'
@classmethod
def setUpClass(cls):
super().setUpClass()
case_block = CaseBlock(
case_id=CASE_ID,
create=True,
case_type="repeater_case",
case_name="ABC 123",
).as_text()
update_case_block = CaseBlock(
case_id=CASE_ID,
create=False,
case_name="ABC 234",
).as_text()
cls.instance_id = uuid.uuid4().hex
cls.xform_xml = XFORM_XML_TEMPLATE.format(
"https://www.commcarehq.org/test/repeater/",
USER_ID,
cls.instance_id,
case_block
)
cls.update_xform_xml = XFORM_XML_TEMPLATE.format(
"https://www.commcarehq.org/test/repeater/",
USER_ID,
uuid.uuid4().hex,
update_case_block,
)
cls.domain_obj = create_domain(cls.domain)
# DATA_FORWARDING is on PRO and above
cls.setup_subscription(cls.domain, SoftwarePlanEdition.PRO)
@classmethod
def tearDownClass(cls):
cls.teardown_subscriptions()
cls.domain_obj.delete()
clear_plan_version_cache()
super().tearDownClass()
@classmethod
def post_xml(cls, xml, domain_name):
return submit_form_locally(xml, domain_name)
@classmethod
def repeat_records(cls, domain_name):
# Enqueued repeat records have next_check set 48 hours in the future.
later = datetime.utcnow() + timedelta(hours=48 + 1)
return RepeatRecord.all(domain=domain_name, due_before=later)
class RepeaterTest(BaseRepeaterTest):
domain = "repeater-test"
def setUp(self):
super(RepeaterTest, self).setUp()
self.case_repeater = CaseRepeater(
domain=self.domain,
url='case-repeater-url',
)
self.case_repeater.save()
self.form_repeater = FormRepeater(
domain=self.domain,
url='form-repeater-url',
)
self.form_repeater.save()
self.log = []
with patch('corehq.motech.repeaters.models.simple_post',
return_value=MockResponse(status_code=500, reason="Borked")) as mock_fire:
self.post_xml(self.xform_xml, self.domain)
self.initial_fire_call_count = mock_fire.call_count
def tearDown(self):
self.case_repeater.delete()
self.form_repeater.delete()
FormProcessorTestUtils.delete_all_cases_forms_ledgers(self.domain)
delete_all_repeat_records()
super(RepeaterTest, self).tearDown()
def repeat_records(self):
return super(RepeaterTest, self).repeat_records(self.domain)
@run_with_all_backends
def test_skip_device_logs(self):
devicelog_xml = XFORM_XML_TEMPLATE.format(DEVICE_LOG_XMLNS, USER_ID, '1234', '')
self.post_xml(devicelog_xml, self.domain)
for repeat_record in self.repeat_records():
self.assertNotEqual(repeat_record.payload_id, '1234')
@run_with_all_backends
def test_skip_duplicates(self):
"""
Ensure that submitting a duplicate form does not create extra RepeatRecords
"""
self.assertEqual(len(self.repeat_records()), 2)
# this form is already submitted during setUp so a second submission should be a duplicate
form = self.post_xml(self.xform_xml, self.domain).xform
self.assertTrue(form.is_duplicate)
self.assertEqual(len(self.repeat_records()), 2)
@run_with_all_backends
def test_repeater_failed_sends(self):
"""
This tests records that fail are requeued later
"""
def now():
return datetime.utcnow()
repeat_records = self.repeat_records()
self.assertEqual(len(repeat_records), 2)
for repeat_record in repeat_records:
with patch(
'corehq.motech.repeaters.models.simple_post',
return_value=MockResponse(status_code=404, reason='Not Found')) as mock_post:
repeat_record.fire()
self.assertEqual(mock_post.call_count, 1)
# Enqueued repeat records have next_check incremented by 48 hours
next_check_time = now() + timedelta(minutes=60) + timedelta(hours=48)
repeat_records = RepeatRecord.all(
domain=self.domain,
due_before=now() + timedelta(minutes=15),
)
self.assertEqual(len(repeat_records), 0)
repeat_records = RepeatRecord.all(
domain=self.domain,
due_before=next_check_time,
)
self.assertEqual(len(repeat_records), 2)
@run_with_all_backends
def test_update_failure_next_check(self):
now = datetime.utcnow()
record = RepeatRecord(domain=self.domain, next_check=now)
self.assertIsNone(record.last_checked)
attempt = record.make_set_next_try_attempt(None)
record.add_attempt(attempt)
self.assertTrue(record.last_checked > now)
self.assertEqual(record.next_check, record.last_checked + MIN_RETRY_WAIT)
@run_with_all_backends
def test_repeater_successful_send(self):
repeat_records = self.repeat_records()
for repeat_record in repeat_records:
with patch(
'corehq.motech.repeaters.models.simple_post',
return_value=MockResponse(status_code=200, reason='No Reason')) as mock_post:
repeat_record.fire()
self.assertEqual(mock_post.call_count, 1)
mock_post.assert_called_with(
self.domain,
repeat_record.repeater.get_url(repeat_record),
repeat_record.get_payload(),
headers=repeat_record.repeater.get_headers(repeat_record),
auth=repeat_record.repeater.get_auth(),
verify=repeat_record.repeater.verify,
notify_addresses=[],
payload_id=repeat_record.payload_id,
)
# The following is pretty fickle and depends on which of
# - corehq.motech.repeaters.signals
# - casexml.apps.case.signals
# gets loaded first.
# This is deterministic but easily affected by minor code changes
repeat_records = self.repeat_records()
for repeat_record in repeat_records:
self.assertEqual(repeat_record.succeeded, True)
self.assertEqual(repeat_record.next_check, None)
self.assertEqual(len(self.repeat_records()), 0)
self.post_xml(self.update_xform_xml, self.domain)
self.assertEqual(len(self.repeat_records()), 2)
@run_with_all_backends
def test_check_repeat_records(self):
self.assertEqual(len(self.repeat_records()), 2)
self.assertEqual(self.initial_fire_call_count, 2)
with patch('corehq.motech.repeaters.models.simple_post') as mock_fire:
check_repeaters()
self.assertEqual(mock_fire.call_count, 0)
@run_with_all_backends
def test_repeat_record_status_check(self):
self.assertEqual(len(self.repeat_records()), 2)
# Do not trigger cancelled records
for repeat_record in self.repeat_records():
repeat_record.cancelled = True
repeat_record.save()
with patch('corehq.motech.repeaters.models.simple_post') as mock_fire:
check_repeaters()
self.assertEqual(mock_fire.call_count, 0)
# trigger force send records if not cancelled and tries not exhausted
for repeat_record in self.repeat_records():
with patch('corehq.motech.repeaters.models.simple_post',
return_value=MockResponse(status_code=200, reason='')
) as mock_fire:
repeat_record.fire(force_send=True)
self.assertEqual(mock_fire.call_count, 1)
# all records should be in SUCCESS state after force try
for repeat_record in self.repeat_records():
self.assertEqual(repeat_record.state, RECORD_SUCCESS_STATE)
self.assertEqual(repeat_record.overall_tries, 1)
# not trigger records succeeded triggered after cancellation
with patch('corehq.motech.repeaters.models.simple_post') as mock_fire:
check_repeaters()
self.assertEqual(mock_fire.call_count, 0)
for repeat_record in self.repeat_records():
self.assertEqual(repeat_record.state, RECORD_SUCCESS_STATE)
@run_with_all_backends
def test_process_repeat_record_locking(self):
self.assertEqual(len(self.repeat_records()), 2)
with patch('corehq.motech.repeaters.tasks.process_repeat_record') as mock_process:
check_repeaters()
self.assertEqual(mock_process.delay.call_count, 0)
for record in self.repeat_records():
# Resetting next_check should allow them to be requeued
record.next_check = datetime.utcnow()
record.save()
with patch('corehq.motech.repeaters.tasks.process_repeat_record') as mock_process:
check_repeaters()
self.assertEqual(mock_process.delay.call_count, 2)
@run_with_all_backends
def test_automatic_cancel_repeat_record(self):
repeat_record = self.case_repeater.register(CaseAccessors(self.domain).get_case(CASE_ID))
self.assertEqual(1, repeat_record.overall_tries)
with patch('corehq.motech.repeaters.models.simple_post', side_effect=Exception('Boom!')):
for __ in range(repeat_record.max_possible_tries - repeat_record.overall_tries):
repeat_record.fire()
self.assertEqual(True, repeat_record.cancelled)
repeat_record.requeue()
self.assertEqual(0, repeat_record.overall_tries)
self.assertNotEqual(None, repeat_record.next_check)
class FormPayloadGeneratorTest(BaseRepeaterTest, TestXmlMixin):
domain = "form-payload"
@classmethod
def setUpClass(cls):
super().setUpClass()
cls.repeater = FormRepeater(
domain=cls.domain,
url="form-repeater-url",
)
cls.repeatergenerator = FormRepeaterXMLPayloadGenerator(
repeater=cls.repeater
)
cls.repeater.save()
@classmethod
def tearDownClass(cls):
cls.repeater.delete()
super().tearDownClass()
def tearDown(self):
FormProcessorTestUtils.delete_all_cases_forms_ledgers(self.domain)
delete_all_repeat_records()
super().tearDown()
@run_with_all_backends
def test_get_payload(self):
self.post_xml(self.xform_xml, self.domain)
payload_doc = FormAccessors(self.domain).get_form(self.instance_id)
payload = self.repeatergenerator.get_payload(None, payload_doc)
self.assertXmlEqual(self.xform_xml, payload)
class FormRepeaterTest(BaseRepeaterTest, TestXmlMixin):
domain = "form-repeater"
@classmethod
def setUpClass(cls):
super(FormRepeaterTest, cls).setUpClass()
cls.repeater = FormRepeater(
domain=cls.domain,
url="form-repeater-url",
)
cls.repeater.save()
@classmethod
def tearDownClass(cls):
cls.repeater.delete()
super(FormRepeaterTest, cls).tearDownClass()
def tearDown(self):
FormProcessorTestUtils.delete_all_cases(self.domain)
delete_all_repeat_records()
super(FormRepeaterTest, self).tearDown()
@run_with_all_backends
def test_payload(self):
self.post_xml(self.xform_xml, self.domain)
repeat_records = self.repeat_records(self.domain).all()
payload = repeat_records[0].get_payload().decode('utf-8')
self.assertXMLEqual(self.xform_xml, payload)
class ShortFormRepeaterTest(BaseRepeaterTest, TestXmlMixin):
domain = "sh-form-rep"
@classmethod
def setUpClass(cls):
super().setUpClass()
cls.repeater = ShortFormRepeater(
domain=cls.domain,
url="short-form-repeater-url",
)
cls.repeater.save()
@classmethod
def tearDownClass(cls):
cls.repeater.delete()
super().tearDownClass()
def tearDown(self):
FormProcessorTestUtils.delete_all_cases(self.domain)
delete_all_repeat_records()
super().tearDown()
@run_with_all_backends
def test_payload(self):
form = self.post_xml(self.xform_xml, self.domain).xform
repeat_records = self.repeat_records(self.domain).all()
payload = repeat_records[0].get_payload()
self.assertEqual(json.loads(payload), {
'received_on': json_format_datetime(form.received_on),
'form_id': form.form_id,
'case_ids': list(get_case_ids_from_form(form))
})
class CaseRepeaterTest(BaseRepeaterTest, TestXmlMixin):
domain = "case-rep"
@classmethod
def setUpClass(cls):
super().setUpClass()
cls.repeater = CaseRepeater(
domain=cls.domain,
url="case-repeater-url",
)
cls.repeater.save()
@classmethod
def tearDownClass(cls):
cls.repeater.delete()
super().tearDownClass()
def tearDown(self):
FormProcessorTestUtils.delete_all_cases(self.domain)
delete_all_repeat_records()
super().tearDown()
@run_with_all_backends
def test_case_close_format(self):
# create a case
self.post_xml(self.xform_xml, self.domain)
repeat_records = self.repeat_records(self.domain).all()
payload = repeat_records[0].get_payload()
self.assertXmlHasXpath(payload, '//*[local-name()="case"]')
self.assertXmlHasXpath(payload, '//*[local-name()="create"]')
# close the case
CaseFactory(self.domain).close_case(CASE_ID)
close_payload = self.repeat_records(self.domain).all()[1].get_payload()
self.assertXmlHasXpath(close_payload, '//*[local-name()="case"]')
self.assertXmlHasXpath(close_payload, '//*[local-name()="close"]')
@run_with_all_backends
def test_excluded_case_types_are_not_forwarded(self):
self.repeater.white_listed_case_types = ['planet']
self.repeater.save()
white_listed_case = CaseBlock(
case_id="a_case_id",
create=True,
case_type="planet",
).as_xml()
CaseFactory(self.domain).post_case_blocks([white_listed_case])
self.assertEqual(1, len(self.repeat_records(self.domain).all()))
non_white_listed_case = CaseBlock(
case_id="b_case_id",
create=True,
case_type="cat",
).as_xml()
CaseFactory(self.domain).post_case_blocks([non_white_listed_case])
self.assertEqual(1, len(self.repeat_records(self.domain).all()))
@run_with_all_backends
def test_black_listed_user_cases_do_not_forward(self):
self.repeater.black_listed_users = ['black_listed_user']
self.repeater.save()
black_list_user_id = 'black_listed_user'
# case-creations by black-listed users shouldn't be forwarded
black_listed_user_case = CaseBlock(
case_id="b_case_id",
create=True,
case_type="planet",
owner_id="owner",
user_id=black_list_user_id
).as_text()
xform_xml = XFORM_XML_TEMPLATE.format(
"https://www.commcarehq.org/test/repeater/",
black_list_user_id,
'1234',
black_listed_user_case,
)
self.post_xml(xform_xml, self.domain)
self.assertEqual(0, len(self.repeat_records(self.domain).all()))
# case-creations by normal users should be forwarded
normal_user_case = CaseBlock(
case_id="a_case_id",
create=True,
case_type="planet",
owner_id="owner",
user_id="normal_user"
).as_text()
xform_xml = XFORM_XML_TEMPLATE.format(
"https://www.commcarehq.org/test/repeater/",
USER_ID,
'6789',
normal_user_case,
)
self.post_xml(xform_xml, self.domain)
self.assertEqual(1, len(self.repeat_records(self.domain).all()))
# case-updates by black-listed users shouldn't be forwarded
black_listed_user_case = CaseBlock(
case_id="b_case_id",
case_type="planet",
owner_id="owner",
user_id=black_list_user_id,
).as_text()
xform_xml = XFORM_XML_TEMPLATE.format(
"https://www.commcarehq.org/test/repeater/",
black_list_user_id,
'2345',
black_listed_user_case,
)
self.post_xml(xform_xml, self.domain)
self.assertEqual(1, len(self.repeat_records(self.domain).all()))
# case-updates by normal users should be forwarded
normal_user_case = CaseBlock(
case_id="a_case_id",
case_type="planet",
owner_id="owner",
user_id="normal_user",
).as_text()
xform_xml = XFORM_XML_TEMPLATE.format(
"https://www.commcarehq.org/test/repeater/",
USER_ID,
'3456',
normal_user_case,
)
self.post_xml(xform_xml, self.domain)
self.assertEqual(2, len(self.repeat_records(self.domain).all()))
class RepeaterFailureTest(BaseRepeaterTest):
domain = 'repeat-fail'
def setUp(self):
super().setUp()
self.repeater = CaseRepeater(
domain=self.domain,
url='case-repeater-url',
)
self.repeater.save()
self.post_xml(self.xform_xml, self.domain)
def tearDown(self):
self.repeater.delete()
FormProcessorTestUtils.delete_all_cases_forms_ledgers(self.domain)
delete_all_repeat_records()
super().tearDown()
@run_with_all_backends
def test_get_payload_exception(self):
repeat_record = self.repeater.register(CaseAccessors(self.domain).get_case(CASE_ID))
with self.assertRaises(Exception):
with patch.object(CaseRepeater, 'get_payload', side_effect=Exception('Boom!')):
repeat_record.fire()
self.assertEqual(repeat_record.failure_reason, 'Boom!')
self.assertFalse(repeat_record.succeeded)
@run_with_all_backends
def test_failure(self):
repeat_record = self.repeater.register(CaseAccessors(self.domain).get_case(CASE_ID))
with patch('corehq.motech.repeaters.models.simple_post', side_effect=Exception('Boom!')):
repeat_record.fire()
self.assertEqual(repeat_record.failure_reason, 'Boom!')
self.assertFalse(repeat_record.succeeded)
# Should be marked as successful after a successful run
with patch('corehq.motech.repeaters.models.simple_post') as mock_simple_post:
mock_simple_post.return_value.status_code = 200
repeat_record.fire()
self.assertTrue(repeat_record.succeeded)
class IgnoreDocumentTest(BaseRepeaterTest):
domain = 'ignore-doc'
@classmethod
def setUpClass(cls):
super(IgnoreDocumentTest, cls).setUpClass()
class NewFormGenerator(BasePayloadGenerator):
format_name = 'new_format'
format_label = 'XML'
def get_payload(self, repeat_record, payload_doc):
raise IgnoreDocument
RegisterGenerator.get_collection(FormRepeater).add_new_format(NewFormGenerator)
def setUp(self):
super().setUp()
self.repeater = FormRepeater(
domain=self.domain,
url='form-repeater-url',
format='new_format'
)
self.repeater.save()
def tearDown(self):
self.repeater.delete()
FormProcessorTestUtils.delete_all_cases_forms_ledgers(self.domain)
delete_all_repeat_records()
super().tearDown()
@run_with_all_backends
def test_ignore_document(self):
"""
When get_payload raises IgnoreDocument, fire should call update_success
"""
repeat_records = RepeatRecord.all(
domain=self.domain,
)
for repeat_record_ in repeat_records:
repeat_record_.fire()
self.assertIsNone(repeat_record_.next_check)
self.assertTrue(repeat_record_.succeeded)
class TestRepeaterFormat(BaseRepeaterTest):
domain = 'test-fmt'
@classmethod
def setUpClass(cls):
super(TestRepeaterFormat, cls).setUpClass()
cls.payload = 'some random case'
class NewCaseGenerator(BasePayloadGenerator):
format_name = 'new_format'
format_label = 'XML'
deprecated_format_names = ('new_format_alias',)
def get_payload(self, repeat_record, payload_doc):
return cls.payload
RegisterGenerator.get_collection(CaseRepeater).add_new_format(NewCaseGenerator)
cls.new_generator = NewCaseGenerator
def setUp(self):
super(TestRepeaterFormat, self).setUp()
self.post_xml(self.xform_xml, self.domain)
self.repeater = CaseRepeater(
domain=self.domain,
url='case-repeater-url',
format='new_format',
)
self.repeater.save()
def tearDown(self):
self.repeater.delete()
FormProcessorTestUtils.delete_all_cases_forms_ledgers(self.domain)
delete_all_repeat_records()
super().tearDown()
def test_new_format_same_name(self):
class NewCaseGenerator(BasePayloadGenerator):
format_name = 'case_xml'
format_label = 'XML'
def get_payload(self, repeat_record, payload_doc):
return self.payload
with self.assertRaises(DuplicateFormatException):
RegisterGenerator.get_collection(CaseRepeater).add_new_format(NewCaseGenerator)
def test_new_format_second_default(self):
class NewCaseGenerator(BasePayloadGenerator):
format_name = 'rubbish'
format_label = 'XML'
def get_payload(self, repeat_record, payload_doc):
return self.payload
with self.assertRaises(DuplicateFormatException):
RegisterGenerator.get_collection(CaseRepeater).add_new_format(NewCaseGenerator, is_default=True)
@run_with_all_backends
def test_new_format_payload(self):
repeat_record = self.repeater.register(CaseAccessors(self.domain).get_case(CASE_ID))
with patch('corehq.motech.repeaters.models.simple_post') as mock_post:
mock_post.return_value.status_code = 200
repeat_record.fire()
headers = self.repeater.get_headers(repeat_record)
mock_post.assert_called_with(
self.domain,
self.repeater.url,
self.payload,
auth=self.repeater.get_auth(),
headers=headers,
notify_addresses=[],
payload_id='ABC123CASEID',
verify=self.repeater.verify,
)
def test_get_format_by_deprecated_name(self):
self.assertIsInstance(CaseRepeater(
domain=self.domain,
url='case-repeater-url',
format='new_format_alias',
).generator, self.new_generator)
@override_settings(TESTS_SHOULD_USE_SQL_BACKEND=True)
class UserRepeaterTest(TestCase, DomainSubscriptionMixin):
@classmethod
def setUpClass(cls):
super().setUpClass()
cls.domain = 'user-repeater'
cls.domain_obj = create_domain(name=cls.domain)
# DATA_FORWARDING is on PRO and above
cls.setup_subscription(cls.domain, SoftwarePlanEdition.PRO)
def setUp(self):
super(UserRepeaterTest, self).setUp()
self.repeater = UserRepeater(
domain=self.domain,
url='super-cool-url',
)
self.repeater.save()
@classmethod
def tearDownClass(cls):
cls.teardown_subscriptions()
cls.domain_obj.delete()
clear_plan_version_cache()
super().tearDownClass()
def tearDown(self):
super().tearDown()
delete_all_repeat_records()
delete_all_repeaters()
def repeat_records(self):
# Enqueued repeat records have next_check set 48 hours in the future.
later = datetime.utcnow() + timedelta(hours=48 + 1)
return RepeatRecord.all(domain=self.domain, due_before=later)
def make_user(self, username):
user = CommCareUser.create(
self.domain,
<EMAIL>".<EMAIL>(username, self.domain),
"123",
)
self.addCleanup(user.delete)
return user
def test_trigger(self):
self.assertEqual(0, len(self.repeat_records().all()))
user = self.make_user("bselmy")
records = self.repeat_records().all()
self.assertEqual(1, len(records))
record = records[0]
self.assertEqual(
json.loads(record.get_payload()),
{
'id': user._id,
'username': user.username,
'first_name': '',
'last_name': '',
'default_phone_number': None,
'user_data': {'commcare_project': self.domain},
'groups': [],
'phone_numbers': [],
'email': '',
'resource_uri': '/a/user-repeater/api/v0.5/user/{}/'.format(user._id),
}
)
@override_settings(TESTS_SHOULD_USE_SQL_BACKEND=True)
class LocationRepeaterTest(TestCase, DomainSubscriptionMixin):
@classmethod
def setUpClass(cls):
super().setUpClass()
cls.domain = 'loc-repeat'
cls.domain_obj = create_domain(name=cls.domain)
# DATA_FORWARDING is on PRO and above
cls.setup_subscription(cls.domain, SoftwarePlanEdition.PRO)
def setUp(self):
super().setUp()
self.repeater = LocationRepeater(
domain=self.domain,
url='super-cool-url',
)
self.repeater.save()
self.location_type = LocationType.objects.create(
domain=self.domain,
name="city",
)
@classmethod
def tearDownClass(cls):
cls.teardown_subscriptions()
cls.domain_obj.delete()
clear_plan_version_cache()
super().tearDownClass()
def tearDown(self):
super().tearDown()
delete_all_repeat_records()
delete_all_repeaters()
def repeat_records(self):
# Enqueued repeat records have next_check set 48 hours in the future.
later = datetime.utcnow() + timedelta(hours=48 + 1)
return RepeatRecord.all(domain=self.domain, due_before=later)
def make_location(self, name):
location = SQLLocation.objects.create(
domain=self.domain,
name=name,
site_code=name,
location_type=self.location_type,
)
self.addCleanup(location.delete)
return location
def test_trigger(self):
self.assertEqual(0, len(self.repeat_records().all()))
location = self.make_location('kings_landing')
records = self.repeat_records().all()
self.assertEqual(1, len(records))
record = records[0]
self.assertEqual(
json.loads(record.get_payload()),
{
'_id': location.location_id,
'doc_type': 'Location',
'domain': self.domain,
'external_id': None,
'is_archived': False,
'archived_on': None,
'last_modified': location.last_modified.isoformat(),
'latitude': None,
'lineage': [],
'location_id': location.location_id,
'location_type': 'city',
'location_type_code': 'city',
'longitude': None,
'metadata': {},
'name': location.name,
'parent_location_id': None,
'site_code': location.site_code,
}
)
class TestRepeaterPause(BaseRepeaterTest):
domain = 'rep-pause'
def setUp(self):
super().setUp()
self.repeater = CaseRepeater(
domain=self.domain,
url='case-repeater-url',
)
self.repeater.save()
self.post_xml(self.xform_xml, self.domain)
@run_with_all_backends
def test_trigger_when_paused(self):
# not paused
with patch.object(RepeatRecord, 'fire') as mock_fire:
with patch.object(RepeatRecord, 'postpone_by') as mock_postpone_fire:
# calls process_repeat_record():
self.repeat_record = self.repeater.register(CaseAccessors(self.domain_obj).get_case(CASE_ID))
self.assertEqual(mock_fire.call_count, 1)
self.assertEqual(mock_postpone_fire.call_count, 0)
# paused
self.repeater.pause()
# re fetch repeat record
repeat_record_id = self.repeat_record.get_id
self.repeat_record = RepeatRecord.get(repeat_record_id)
process_repeat_record(self.repeat_record)
self.assertEqual(mock_fire.call_count, 1)
self.assertEqual(mock_postpone_fire.call_count, 1)
# resumed
self.repeater.resume()
# re fetch repeat record
repeat_record_id = self.repeat_record.get_id
self.repeat_record = RepeatRecord.get(repeat_record_id)
process_repeat_record(self.repeat_record)
self.assertEqual(mock_fire.call_count, 2)
self.assertEqual(mock_postpone_fire.call_count, 1)
def tearDown(self):
self.repeater.delete()
FormProcessorTestUtils.delete_all_cases_forms_ledgers(self.domain)
delete_all_repeat_records()
super(TestRepeaterPause, self).tearDown()
class TestRepeaterDeleted(BaseRepeaterTest):
domain = 'rep-deleted'
def setUp(self):
super().setUp()
self.repeater = CaseRepeater(
domain=self.domain,
url='case-repeater-url',
)
self.repeater.save()
self.post_xml(self.xform_xml, self.domain)
def tearDown(self):
self.repeater.delete()
FormProcessorTestUtils.delete_all_cases_forms_ledgers(self.domain)
delete_all_repeat_records()
super().tearDown()
@run_with_all_backends
def test_trigger_when_deleted(self):
self.repeater.retire()
with patch.object(RepeatRecord, 'fire') as mock_fire:
self.repeat_record = self.repeater.register(CaseAccessors(self.domain_obj).get_case(CASE_ID))
process_repeat_record(self.repeat_record)
self.assertEqual(mock_fire.call_count, 0)
self.assertEqual(self.repeat_record.doc_type, "RepeatRecord-Deleted")
@run_with_all_backends
def test_paused_then_deleted(self):
self.repeater.pause()
self.repeater.retire()
with patch.object(RepeatRecord, 'fire') as mock_fire:
self.repeat_record = self.repeater.register(CaseAccessors(self.domain_obj).get_case(CASE_ID))
process_repeat_record(self.repeat_record)
self.assertEqual(mock_fire.call_count, 0)
self.assertEqual(self.repeat_record.doc_type, "RepeatRecord-Deleted")
@attr.s
class Response(object):
status_code = attr.ib()
reason = attr.ib()
content = attr.ib(default=None)
encoding = attr.ib(default='ascii')
@property
def text(self):
return '' if self.content is None else self.content.decode(self.encoding, errors='replace')
class DummyRepeater(Repeater):
@property
def generator(self):
return FormRepeaterXMLPayloadGenerator(self)
def payload_doc(self, repeat_record):
return {}
class HandleResponseTests(SimpleTestCase):
domain = 'handle-resp'
def setUp(self):
self.repeater = DummyRepeater(
domain=self.domain,
url="https://example.com/api/",
)
self.repeat_record = Mock()
def test_handle_ok_response(self):
response = Response(status_code=200, reason='OK', content=b'OK')
self.repeater.handle_response(response, self.repeat_record)
self.repeat_record.handle_exception.assert_not_called()
self.repeat_record.handle_success.assert_called()
self.repeat_record.handle_failure.assert_not_called()
def test_handle_true_response(self):
response = True
self.repeater.handle_response(response, self.repeat_record)
self.repeat_record.handle_exception.assert_not_called()
self.repeat_record.handle_success.assert_called()
self.repeat_record.handle_failure.assert_not_called()
def test_handle_none_response(self):
response = None
self.repeater.handle_response(response, self.repeat_record)
self.repeat_record.handle_exception.assert_not_called()
self.repeat_record.handle_success.assert_not_called()
self.repeat_record.handle_failure.assert_called()
def test_handle_500_response(self):
response = Response(status_code=500, reason='The core is exposed')
self.repeater.handle_response(response, self.repeat_record)
self.repeat_record.handle_exception.assert_not_called()
self.repeat_record.handle_success.assert_not_called()
self.repeat_record.handle_failure.assert_called()
def test_handle_exception(self):
err = Exception('The core is exposed')
self.repeater.handle_response(err, self.repeat_record)
self.repeat_record.handle_exception.assert_called()
self.repeat_record.handle_success.assert_not_called()
self.repeat_record.handle_failure.assert_not_called()
class FormatResponseTests(SimpleTestCase):
def test_content_is_ascii(self):
response = Response(
status_code=200,
reason='OK',
content=b'3.6 roentgen. Not great. Not terrible.'
)
formatted = RepeatRecord._format_response(response)
self.assertEqual(formatted, '200: OK.\n3.6 roentgen. Not great. Not terrible.')
def test_encoding_is_not_ascii(self):
response = Response(
status_code=200,
reason='OK',
content=b'3,6 \xe1\xa8\xd4\xe5\xac\xa8\xd4\xa0 \xd5\xa8 \xb5\xd6\xe1\xd6\xf5\xd6. '
b'\xd5\xa8 \xe3\xe5\xe1\xa0\xf5\xd4\xd6',
encoding='cp855'
)
formatted = RepeatRecord._format_response(response)
self.assertEqual(formatted, '200: OK.\n3,6 рентгена Не хорошо. Не страшно')
def test_content_is_None(self):
response = Response(500, 'The core is exposed')
formatted = RepeatRecord._format_response(response)
self.assertEqual(formatted, '500: The core is exposed.\n')
class NotifyAddressesTests(SimpleTestCase):
def test_default(self):
repeater = DummyRepeater.wrap({})
self.assertEqual(repeater.notify_addresses, [])
def test_empty(self):
repeater = DummyRepeater.wrap({
"notify_addresses_str": "",
})
self.assertEqual(repeater.notify_addresses, [])
def test_one(self):
repeater = DummyRepeater.wrap({
"notify_addresses_str": "<EMAIL>"
})
self.assertEqual(repeater.notify_addresses, ["<EMAIL>"])
def test_comma(self):
repeater = DummyRepeater.wrap({
"notify_addresses_str": "<EMAIL>,<EMAIL>"
})
self.assertEqual(repeater.notify_addresses, ["<EMAIL>",
"<EMAIL>"])
def test_space(self):
repeater = DummyRepeater.wrap({
"notify_addresses_str": "<EMAIL> <EMAIL>"
})
self.assertEqual(repeater.notify_addresses, ["<EMAIL>",
"<EMAIL>"])
def test_commaspace(self):
repeater = DummyRepeater.wrap({
"notify_addresses_str": "<EMAIL>, <EMAIL>"
})
self.assertEqual(repeater.notify_addresses, ["<EMAIL>",
"<EMAIL>"])
def test_mess(self):
repeater = DummyRepeater.wrap({
"notify_addresses_str": "<EMAIL>,,, , <EMAIL>"
})
self.assertEqual(repeater.notify_addresses, ["<EMAIL>",
"<EMAIL>"])
class TestGetRetryInterval(SimpleTestCase):
def test_min_interval(self):
last_checked = fromisoformat("2020-01-01 00:00:00")
now = fromisoformat("2020-01-01 00:05:00")
interval = _get_retry_interval(last_checked, now)
self.assertEqual(interval, MIN_RETRY_WAIT)
def test_max_interval(self):
last_checked = fromisoformat("2020-01-01 00:00:00")
now = fromisoformat("2020-02-01 00:00:00")
interval = _get_retry_interval(last_checked, now)
self.assertEqual(interval, MAX_RETRY_WAIT)
def test_three_times_interval(self):
last_checked = fromisoformat("2020-01-01 00:00:00")
now = fromisoformat("2020-01-01 01:00:00")
interval = _get_retry_interval(last_checked, now)
self.assertEqual(interval, timedelta(hours=3))
def test_five_retries(self):
# (Five retries because RepeatRecord.max_possible_tries is 6)
for last_checked, now, expected_interval_hours in [
(None, fromisoformat("2020-01-01 00:00:00"), 1),
(fromisoformat("2020-01-01 00:00:00"), fromisoformat("2020-01-01 01:00:00"), 3),
(fromisoformat("2020-01-01 01:00:00"), fromisoformat("2020-01-01 04:00:00"), 9),
(fromisoformat("2020-01-01 04:00:00"), fromisoformat("2020-01-01 13:00:00"), 27),
(fromisoformat("2020-01-01 13:00:00"), fromisoformat("2020-01-02 16:00:00"), 81),
]:
interval = _get_retry_interval(last_checked, now)
self.assertEqual(interval, timedelta(hours=expected_interval_hours))
def fromisoformat(isoformat):
"""
Return a datetime from a string in ISO 8601 date time format
>>> fromisoformat("2019-12-31 23:59:59")
datetime.datetime(2019, 12, 31, 23, 59, 59)
"""
try:
return datetime.fromisoformat(isoformat) # Python >= 3.7
except AttributeError:
return datetime.strptime(isoformat, "%Y-%m-%d %H:%M:%S")
|
en
| 0.786952
|
<?xml version='1.0' ?> <data xmlns:jrm="http://dev.commcarehq.org/jr/xforms" xmlns="{}"> <woman_name>Alpha</woman_name> <husband_name>Beta</husband_name> <meta> <deviceID>O2XLT0WZW97W1A91E2W1Y0NJG</deviceID> <timeStart>2011-10-01T15:25:18.404-04</timeStart> <timeEnd>2011-10-01T15:26:29.551-04</timeEnd> <username>admin</username> <userID>{}</userID> <instanceID>{}</instanceID> </meta> {} </data> # DATA_FORWARDING is on PRO and above # Enqueued repeat records have next_check set 48 hours in the future. Ensure that submitting a duplicate form does not create extra RepeatRecords # this form is already submitted during setUp so a second submission should be a duplicate This tests records that fail are requeued later # Enqueued repeat records have next_check incremented by 48 hours # The following is pretty fickle and depends on which of # - corehq.motech.repeaters.signals # - casexml.apps.case.signals # gets loaded first. # This is deterministic but easily affected by minor code changes # Do not trigger cancelled records # trigger force send records if not cancelled and tries not exhausted # all records should be in SUCCESS state after force try # not trigger records succeeded triggered after cancellation # Resetting next_check should allow them to be requeued # create a case # close the case # case-creations by black-listed users shouldn't be forwarded # case-creations by normal users should be forwarded # case-updates by black-listed users shouldn't be forwarded # case-updates by normal users should be forwarded # Should be marked as successful after a successful run When get_payload raises IgnoreDocument, fire should call update_success # DATA_FORWARDING is on PRO and above # Enqueued repeat records have next_check set 48 hours in the future. # DATA_FORWARDING is on PRO and above # Enqueued repeat records have next_check set 48 hours in the future. # not paused # calls process_repeat_record(): # paused # re fetch repeat record # resumed # re fetch repeat record # (Five retries because RepeatRecord.max_possible_tries is 6) Return a datetime from a string in ISO 8601 date time format >>> fromisoformat("2019-12-31 23:59:59") datetime.datetime(2019, 12, 31, 23, 59, 59) # Python >= 3.7
| 1.414882
| 1
|
server.py
|
brandiny/ncdaily-opensource
| 4
|
6626796
|
"""
server.py - Serves the Flask web application for the website
"""
import json
import admintools
import credentials
from flask import Flask, render_template, request, redirect, url_for, make_response, session
import hashlib
try:
import MySQLdb
except Exception as e:
import os
os.system("pip install mysqlclient")
import MySQLdb
import newsletter
import random
from validate_email import validate_email
app = Flask(__name__)
app.secret_key = 'any random string'
email_groups = credentials.blacklist
"""ADMIN login page"""
@app.route("/admin", methods=['GET', 'POST'])
def admin():
"""If SUBMITTING username and password """
if request.method == 'POST':
# Get database
db = credentials.dbconnect()
cursor = db.cursor()
sql = """SELECT * FROM admin"""
cursor.execute(sql)
results = [i[1:] for i in cursor.fetchall()]
# Get submitted details
submittedCredentials = (request.form['username'], hashlib.sha256(request.form['password'].encode()).hexdigest())
# Password handling
if submittedCredentials in results:
session['username'] = request.form['username']
return redirect(url_for('adminpanel'))
else:
return render_template('adminpanel--login.html', credentialsWrong=True)
"""IF first loading the page"""
if request.method == 'GET':
if 'username' in session:
return redirect(url_for('adminpanel'))
else:
return render_template('adminpanel--login.html')
"""ADMIN PANEL page
- Change holiday intervals
- Add days of relief
- Remove days of relief
- Create new admin account
- Change password
- Bulk add emails
- View email list
"""
@app.route("/adminpanel", methods=['GET', 'POST'])
def adminpanel():
# IF LOGGED IN
try:
if session['username']:
db = credentials.dbconnect()
cursor = db.cursor()
sql = """SELECT * FROM emails"""
cursor.execute(sql)
results = [i[0] for i in cursor.fetchall()]
total_subs = len(results)
db.commit()
sql = """SELECT * FROM statistics"""
cursor.execute(sql)
db.commit()
results = cursor.fetchall()
# Define statistical variables
loop_time = str(round(results[0][0], 2)) + 's'
uptime_days = str(results[0][1]) + ' days'
emails_sent = str(results[0][2]) + ' emails'
import time
try:
os.environ["TZ"] = "Pacific/Auckland"
time.tzset()
except Exception as e:
pass
print(admintools.is_schooltime() and (not admintools.is_weekend()) and admintools.is_ON_declaredbyuser())
if admintools.is_schooltime() and (not admintools.is_weekend()) and admintools.is_ON_declaredbyuser():
return render_template('adminpanel--home.html', appON='True', username=session['username'], totalSubscribers=total_subs, emails_sent=emails_sent, loop_time=loop_time, uptime_days=uptime_days, holiday_startdate=admintools.holiday_startdate(),holiday_enddate=admintools.holiday_enddate(),**request.args)
else:
return render_template('adminpanel--home.html', appON='False', username=session['username'], totalSubscribers=total_subs, emails_sent=emails_sent, loop_time=loop_time, uptime_days=uptime_days, holiday_startdate=admintools.holiday_startdate(),holiday_enddate=admintools.holiday_enddate(), **request.args)
# ELSE TAKE BACK TO ADMIN
except KeyError:
return redirect(url_for('admin'))
"""LOGOUT method"""
@app.route('/logout')
def logout():
# remove the username from the session if it is there
session.pop('username', None)
return redirect(url_for('home'))
@app.route('/add_emails', methods=['POST'])
def add_emails():
if session['username']:
details = request.form
emails = details['emails'].split(',')
print(emails)
# Connect to database and fetch all
db = credentials.dbconnect()
cursor = db.cursor()
sql2 = """SELECT * FROM emails"""
cursor.execute(sql2)
results = [i[0] for i in cursor.fetchall()]
for email in emails:
# Boolean for Valid Email
# BLANK ENTRY serverside validate
if len(email) == 0:
return redirect(url_for('view_subscribed', empty=True))
# SUBSCRIBING TWICE serverside validate
if email in results:
db.close()
return redirect(url_for('view_subscribed', email=email, duplicate=True))
# All serverside validation PASSED successfully
else:
# Add email to database
sql = """INSERT INTO emails(emails, subscription_status, unsubscribe_code) VALUES ('{}', 1, '{:05}' )""".format(email.strip(), random.randint(0, 10000))
cursor.execute(sql)
db.commit()
# Send them welcome message
# newsletter.send_newsletter_to(email)
# Renders the welcome page -- DOES NOT REDIRECT
db.close()
return redirect(url_for('view_subscribed', successfullyAdded=True))
else:
return 'unauthorised'
"""DELETE email method"""
@app.route('/delete_emails', methods=['POST'])
def delete_emails():
if session['username']:
try:
emailScrap = request.form['emailScrap'].split(',')
db = credentials.dbconnect()
cursor = db.cursor()
for email in emailScrap:
sql = """DELETE FROM emails WHERE emails='{}'""".format(email)
cursor.execute(sql)
db.commit()
db.close()
return redirect(url_for('view_subscribed', numberDeleted=len(emailScrap), errorDelete=False))
except Exception as e:
return redirect(url_for('view_subscribed', errorDelete=True))
else:
return 'unauthorised'
"""CHANGE PASSWORD method"""
@app.route('/adminpanel/changepassword', methods=['GET', 'POST'])
def changepassword():
try:
if session['username']:
# HANDLING POST DATA
if request.method == 'POST':
db = credentials.dbconnect()
cursor = db.cursor()
sql = """SELECT password FROM admin WHERE username='{}'""".format(session['username'])
cursor.execute(sql)
oldPassword = cursor.fetchall()[0][0]
# most important, check for the oldpasswords to match
if hashlib.sha256(request.form['oldPassword'].encode()).hexdigest() == oldPassword:
# check for newpasswords to match, otherwise return back
if request.form['newPassword'] != request.form['newPassword2']:
return redirect(url_for('changepassword', passwordsNotMatch=True))
else:
# delete the password that preexists
sql_deletepassword = """DELETE FROM admin WHERE username='{username}';""".format(username=session['username'])
cursor.execute(sql_deletepassword)
# add in the new password
sql_changepassword = """INSERT INTO admin (username, password) VALUES ('{username}', '{passwordHash}');""".format(username=session['username'], passwordHash=hashlib.sha256(request.form['newPassword'].encode()).hexdigest())
print(sql_deletepassword, sql_changepassword)
cursor.execute(sql_changepassword)
# commits the changes
db.commit()
db.close()
return redirect(url_for('changepassword', changedPassword=True))
# if old password match fails, return back
else:
return redirect(url_for('changepassword', incorrectPassword=True))
# IF RESPONDING TO PAGE LOAD
if request.method == "GET":
return render_template('adminpanel--changepassword.html', username=session['username'])
except Exception as e:
return 'unauthorised'
"""VIEW SUBSCRIPTIONS"""
@app.route('/adminpanel/view_subscribed', methods=['GET', 'POST'])
def view_subscribed():
try:
if session['username']:
db = credentials.dbconnect()
cursor = db.cursor()
sql = """SELECT * FROM emails"""
cursor.execute(sql)
emailList = [i[0] for i in cursor.fetchall()]
emailList.sort()
return render_template('adminpanel--view_subscribed.html', emailList=emailList)
except Exception as e:
return 'unauthorised'
"""HOLIDAYS"""
@app.route('/adminpanel/holidays', methods=['GET'])
def holidays():
try:
if session['username']:
with open('static/json/term_dates.json') as jsonfile:
data = json.load(jsonfile)
if request.method == 'GET':
return render_template('adminpanel--holidays.html', data=data)
except Exception as e:
return 'unauthorised'
@app.route('/change_holidays', methods=['POST'])
def change_holidays():
if request.method == 'POST':
try:
# check number
term = int(request.form['term'])
# check between 1/4
if not (term > 0 and term <= 4):
raise Exception
start_date = request.form['start_date']
end_date = request.form['end_date']
if not (len(start_date.split('/')) == 2 and len(end_date.split('/')) == 2):
raise Exception
admintools.change_termdates(int(term), dateStart=start_date, dateEnd=end_date)
with open('static/json/term_dates.json') as jsonfile:
data = json.load(jsonfile)
return redirect(url_for('holidays', error=False, data=data))
except:
with open('static/json/term_dates.json') as jsonfile:
data = json.load(jsonfile)
return redirect(url_for('holidays', error=True, data=data))
"""DISABLE"""
@app.route('/adminpanel/disable', methods=['GET', 'POST'])
def disable():
if request.method == 'POST':
with open('static/json/app_status.json', 'w+') as f:
print(request.form['result'])
if request.form['result'] == 'Turn off':
f.write('{"appisON": false}')
return render_template('adminpanel--disable.html', appON=False)
elif request.form['result'] == 'Turn on':
f.write('{"appisON": true}')
return render_template('adminpanel--disable.html', appON=True)
else:
if admintools.is_ON_declaredbyuser():
return render_template('adminpanel--disable.html', appON=True)
else:
return render_template('adminpanel--disable.html', appON=False)
"""
Index/Home page
- Subscription field
- Vector from vecteezy
- Introductory information to the app
"""
@app.route("/", methods=['GET', 'POST'])
def home():
db = credentials.dbconnect()
cursor = db.cursor()
sql = """SELECT `subscription_status` FROM emails"""
cursor.execute(sql)
results = [i[0] for i in cursor.fetchall()]
num_students = ((len(results) // 10) * 10) + 10
""" IF RESPONDING TO REGULAR GET REQUEST """
if request.method == 'GET':
return render_template('home.html', num_students=num_students)
""" IF RESPONDING TO SUBSCRIBE POST REQUEST """
if request.method == "POST":
# Define INITIAL VARIABLES
details = request.form
email = details['email']
#uncomment for disable
#return render_template('home.html', num_students=num_students, disabled=True)
# Boolean for Valid Email
# is_valid = validate_email(email_address=email, \
# check_regex=True, check_mx=True, \
# smtp_timeout=2, dns_timeout=2, use_blacklist=True)
# BLANK ENTRY serverside validate
if len(email) == 0:
return render_template('home.html', num_students=num_students, empty=True)
# # INVALID EMAIL serverside validate
# elif not is_valid:
# return render_template('home.html', num_students=num_students,invalid_email=True)
# Connect to database and fetch all
db = credentials.dbconnect()
cursor = db.cursor()
sql2 = """SELECT * FROM emails"""
cursor.execute(sql2)
results = [i[0] for i in cursor.fetchall()]
# SUBSCRIBING TWICE serverside validate
if email in results:
db.commit()
db.close()
return render_template('home.html', num_students=num_students, email=email, duplicate=True)
if email in email_groups:
return render_template('home.html', num_students=num_students, email=email, blocked=True)
# All serverside validation PASSED successfully
else:
# Add email to database
sql = """INSERT INTO emails(emails, subscription_status, unsubscribe_code) VALUES ('{}', 1, '{:05}' )""".format(email, random.randint(0, 10000))
cursor.execute(sql)
db.commit()
db.close()
# Send them welcome message
newsletter.send_newsletter_to(email)
# Renders the welcome page -- DOES NOT REDIRECT
return render_template('landing.html', email=email)
"""
About page
- How to use it
- How it works
- Who made it
"""
@app.route('/about')
def about():
return render_template('about.html')
"""
FAQ
- All the questions people ask
- Contact details of the administrator
"""
@app.route('/faq')
def faq():
return render_template('faq.html')
"""
Notices
- ***SECRET PAGE ***
- Testing environment with the email newsletter
"""
@app.route('/notices')
def notices():
return render_template('emailformatter.html')
"""
Unsubscribe
- Unsubscribe option
"""
@app.route('/unsubscribe', methods=['GET', 'POST'])
def unsubscribe():
""" IF JUST LOADING THE PAGE """
if request.method == "GET":
return render_template('unsub.html')
""" OTHERWISE RESPONDING TO UNSUBSCRIBE POST REQUEST """
if request.method == "POST":
# return render_template('unsub.html', disabled=True) # uncomment for disable
details = request.form
email = details['email']
# LOGIN TO DB
db = credentials.dbconnect()
cursor = db.cursor()
sql2 = """SELECT * FROM emails"""
cursor.execute(sql2)
results = [i[0] for i in cursor.fetchall()]
# BLANK ENTRY serverside validate
if len(email) == 0:
return render_template('unsub.html', subscribed='False')
# FALSE UNSUBSCRIBE serverside check
elif email not in results:
db.commit()
db.close()
return render_template('unsub.html', subscribed='False')
# If all serverside checking has PASSED
else:
# REDIRECT to the confirm unsubscribe page, with given email
return redirect(url_for('confirm_unsubscribe', email=details['email']))
"""
Confirm Unsubscribe
- Enter correct code
- Prevents unwanted unsubscription
"""
@app.route('/confirm_unsubscribe', methods=['GET', 'POST'])
def confirm_unsubscribe():
email = request.args['email']
db = credentials.dbconnect()
cursor = db.cursor()
sql = """SELECT * FROM emails WHERE emails='{}' """.format(email)
cursor.execute(sql)
unsubscribe_code = cursor.fetchall()[0][2]
""" IF THE PAGE IS LOADED WITH INITITALLY, SEND AN EMAIL TO """
if request.method == 'GET':
newsletter.send_code(email, unsubscribe_code)
return render_template('confirm_unsubscribe.html', email=email)
""" CODE CHECK - POST REQUEST """
if request.method == 'POST':
details = request.form
# If it matches the code -- DELETE QUERY
if details['unsubscribe_code'] == unsubscribe_code:
sql = """DELETE FROM emails WHERE emails='{}' """.format(email)
cursor.execute(sql)
db.commit()
db.close()
# FAREWELL page render
return render_template('farewell.html')
# Otherwise, re-request
else:
return render_template('confirm_unsubscribe.html', wrong='False')
@app.route('/cronjob', methods=['GET'])
def cronjob():
return 'OK'
@app.route('/testing', methods=['GET'])
def testing():
return render_template("confirm_unsubscribe.html", email='anyone')
@app.route('/tryitout', methods=['GET'])
def tryitout():
return render_template("emailformatter.html")
if __name__ == "__main__":
app.run(debug=True)
|
"""
server.py - Serves the Flask web application for the website
"""
import json
import admintools
import credentials
from flask import Flask, render_template, request, redirect, url_for, make_response, session
import hashlib
try:
import MySQLdb
except Exception as e:
import os
os.system("pip install mysqlclient")
import MySQLdb
import newsletter
import random
from validate_email import validate_email
app = Flask(__name__)
app.secret_key = 'any random string'
email_groups = credentials.blacklist
"""ADMIN login page"""
@app.route("/admin", methods=['GET', 'POST'])
def admin():
"""If SUBMITTING username and password """
if request.method == 'POST':
# Get database
db = credentials.dbconnect()
cursor = db.cursor()
sql = """SELECT * FROM admin"""
cursor.execute(sql)
results = [i[1:] for i in cursor.fetchall()]
# Get submitted details
submittedCredentials = (request.form['username'], hashlib.sha256(request.form['password'].encode()).hexdigest())
# Password handling
if submittedCredentials in results:
session['username'] = request.form['username']
return redirect(url_for('adminpanel'))
else:
return render_template('adminpanel--login.html', credentialsWrong=True)
"""IF first loading the page"""
if request.method == 'GET':
if 'username' in session:
return redirect(url_for('adminpanel'))
else:
return render_template('adminpanel--login.html')
"""ADMIN PANEL page
- Change holiday intervals
- Add days of relief
- Remove days of relief
- Create new admin account
- Change password
- Bulk add emails
- View email list
"""
@app.route("/adminpanel", methods=['GET', 'POST'])
def adminpanel():
# IF LOGGED IN
try:
if session['username']:
db = credentials.dbconnect()
cursor = db.cursor()
sql = """SELECT * FROM emails"""
cursor.execute(sql)
results = [i[0] for i in cursor.fetchall()]
total_subs = len(results)
db.commit()
sql = """SELECT * FROM statistics"""
cursor.execute(sql)
db.commit()
results = cursor.fetchall()
# Define statistical variables
loop_time = str(round(results[0][0], 2)) + 's'
uptime_days = str(results[0][1]) + ' days'
emails_sent = str(results[0][2]) + ' emails'
import time
try:
os.environ["TZ"] = "Pacific/Auckland"
time.tzset()
except Exception as e:
pass
print(admintools.is_schooltime() and (not admintools.is_weekend()) and admintools.is_ON_declaredbyuser())
if admintools.is_schooltime() and (not admintools.is_weekend()) and admintools.is_ON_declaredbyuser():
return render_template('adminpanel--home.html', appON='True', username=session['username'], totalSubscribers=total_subs, emails_sent=emails_sent, loop_time=loop_time, uptime_days=uptime_days, holiday_startdate=admintools.holiday_startdate(),holiday_enddate=admintools.holiday_enddate(),**request.args)
else:
return render_template('adminpanel--home.html', appON='False', username=session['username'], totalSubscribers=total_subs, emails_sent=emails_sent, loop_time=loop_time, uptime_days=uptime_days, holiday_startdate=admintools.holiday_startdate(),holiday_enddate=admintools.holiday_enddate(), **request.args)
# ELSE TAKE BACK TO ADMIN
except KeyError:
return redirect(url_for('admin'))
"""LOGOUT method"""
@app.route('/logout')
def logout():
# remove the username from the session if it is there
session.pop('username', None)
return redirect(url_for('home'))
@app.route('/add_emails', methods=['POST'])
def add_emails():
if session['username']:
details = request.form
emails = details['emails'].split(',')
print(emails)
# Connect to database and fetch all
db = credentials.dbconnect()
cursor = db.cursor()
sql2 = """SELECT * FROM emails"""
cursor.execute(sql2)
results = [i[0] for i in cursor.fetchall()]
for email in emails:
# Boolean for Valid Email
# BLANK ENTRY serverside validate
if len(email) == 0:
return redirect(url_for('view_subscribed', empty=True))
# SUBSCRIBING TWICE serverside validate
if email in results:
db.close()
return redirect(url_for('view_subscribed', email=email, duplicate=True))
# All serverside validation PASSED successfully
else:
# Add email to database
sql = """INSERT INTO emails(emails, subscription_status, unsubscribe_code) VALUES ('{}', 1, '{:05}' )""".format(email.strip(), random.randint(0, 10000))
cursor.execute(sql)
db.commit()
# Send them welcome message
# newsletter.send_newsletter_to(email)
# Renders the welcome page -- DOES NOT REDIRECT
db.close()
return redirect(url_for('view_subscribed', successfullyAdded=True))
else:
return 'unauthorised'
"""DELETE email method"""
@app.route('/delete_emails', methods=['POST'])
def delete_emails():
if session['username']:
try:
emailScrap = request.form['emailScrap'].split(',')
db = credentials.dbconnect()
cursor = db.cursor()
for email in emailScrap:
sql = """DELETE FROM emails WHERE emails='{}'""".format(email)
cursor.execute(sql)
db.commit()
db.close()
return redirect(url_for('view_subscribed', numberDeleted=len(emailScrap), errorDelete=False))
except Exception as e:
return redirect(url_for('view_subscribed', errorDelete=True))
else:
return 'unauthorised'
"""CHANGE PASSWORD method"""
@app.route('/adminpanel/changepassword', methods=['GET', 'POST'])
def changepassword():
try:
if session['username']:
# HANDLING POST DATA
if request.method == 'POST':
db = credentials.dbconnect()
cursor = db.cursor()
sql = """SELECT password FROM admin WHERE username='{}'""".format(session['username'])
cursor.execute(sql)
oldPassword = cursor.fetchall()[0][0]
# most important, check for the oldpasswords to match
if hashlib.sha256(request.form['oldPassword'].encode()).hexdigest() == oldPassword:
# check for newpasswords to match, otherwise return back
if request.form['newPassword'] != request.form['newPassword2']:
return redirect(url_for('changepassword', passwordsNotMatch=True))
else:
# delete the password that preexists
sql_deletepassword = """DELETE FROM admin WHERE username='{username}';""".format(username=session['username'])
cursor.execute(sql_deletepassword)
# add in the new password
sql_changepassword = """INSERT INTO admin (username, password) VALUES ('{username}', '{passwordHash}');""".format(username=session['username'], passwordHash=hashlib.sha256(request.form['newPassword'].encode()).hexdigest())
print(sql_deletepassword, sql_changepassword)
cursor.execute(sql_changepassword)
# commits the changes
db.commit()
db.close()
return redirect(url_for('changepassword', changedPassword=True))
# if old password match fails, return back
else:
return redirect(url_for('changepassword', incorrectPassword=True))
# IF RESPONDING TO PAGE LOAD
if request.method == "GET":
return render_template('adminpanel--changepassword.html', username=session['username'])
except Exception as e:
return 'unauthorised'
"""VIEW SUBSCRIPTIONS"""
@app.route('/adminpanel/view_subscribed', methods=['GET', 'POST'])
def view_subscribed():
try:
if session['username']:
db = credentials.dbconnect()
cursor = db.cursor()
sql = """SELECT * FROM emails"""
cursor.execute(sql)
emailList = [i[0] for i in cursor.fetchall()]
emailList.sort()
return render_template('adminpanel--view_subscribed.html', emailList=emailList)
except Exception as e:
return 'unauthorised'
"""HOLIDAYS"""
@app.route('/adminpanel/holidays', methods=['GET'])
def holidays():
try:
if session['username']:
with open('static/json/term_dates.json') as jsonfile:
data = json.load(jsonfile)
if request.method == 'GET':
return render_template('adminpanel--holidays.html', data=data)
except Exception as e:
return 'unauthorised'
@app.route('/change_holidays', methods=['POST'])
def change_holidays():
if request.method == 'POST':
try:
# check number
term = int(request.form['term'])
# check between 1/4
if not (term > 0 and term <= 4):
raise Exception
start_date = request.form['start_date']
end_date = request.form['end_date']
if not (len(start_date.split('/')) == 2 and len(end_date.split('/')) == 2):
raise Exception
admintools.change_termdates(int(term), dateStart=start_date, dateEnd=end_date)
with open('static/json/term_dates.json') as jsonfile:
data = json.load(jsonfile)
return redirect(url_for('holidays', error=False, data=data))
except:
with open('static/json/term_dates.json') as jsonfile:
data = json.load(jsonfile)
return redirect(url_for('holidays', error=True, data=data))
"""DISABLE"""
@app.route('/adminpanel/disable', methods=['GET', 'POST'])
def disable():
if request.method == 'POST':
with open('static/json/app_status.json', 'w+') as f:
print(request.form['result'])
if request.form['result'] == 'Turn off':
f.write('{"appisON": false}')
return render_template('adminpanel--disable.html', appON=False)
elif request.form['result'] == 'Turn on':
f.write('{"appisON": true}')
return render_template('adminpanel--disable.html', appON=True)
else:
if admintools.is_ON_declaredbyuser():
return render_template('adminpanel--disable.html', appON=True)
else:
return render_template('adminpanel--disable.html', appON=False)
"""
Index/Home page
- Subscription field
- Vector from vecteezy
- Introductory information to the app
"""
@app.route("/", methods=['GET', 'POST'])
def home():
db = credentials.dbconnect()
cursor = db.cursor()
sql = """SELECT `subscription_status` FROM emails"""
cursor.execute(sql)
results = [i[0] for i in cursor.fetchall()]
num_students = ((len(results) // 10) * 10) + 10
""" IF RESPONDING TO REGULAR GET REQUEST """
if request.method == 'GET':
return render_template('home.html', num_students=num_students)
""" IF RESPONDING TO SUBSCRIBE POST REQUEST """
if request.method == "POST":
# Define INITIAL VARIABLES
details = request.form
email = details['email']
#uncomment for disable
#return render_template('home.html', num_students=num_students, disabled=True)
# Boolean for Valid Email
# is_valid = validate_email(email_address=email, \
# check_regex=True, check_mx=True, \
# smtp_timeout=2, dns_timeout=2, use_blacklist=True)
# BLANK ENTRY serverside validate
if len(email) == 0:
return render_template('home.html', num_students=num_students, empty=True)
# # INVALID EMAIL serverside validate
# elif not is_valid:
# return render_template('home.html', num_students=num_students,invalid_email=True)
# Connect to database and fetch all
db = credentials.dbconnect()
cursor = db.cursor()
sql2 = """SELECT * FROM emails"""
cursor.execute(sql2)
results = [i[0] for i in cursor.fetchall()]
# SUBSCRIBING TWICE serverside validate
if email in results:
db.commit()
db.close()
return render_template('home.html', num_students=num_students, email=email, duplicate=True)
if email in email_groups:
return render_template('home.html', num_students=num_students, email=email, blocked=True)
# All serverside validation PASSED successfully
else:
# Add email to database
sql = """INSERT INTO emails(emails, subscription_status, unsubscribe_code) VALUES ('{}', 1, '{:05}' )""".format(email, random.randint(0, 10000))
cursor.execute(sql)
db.commit()
db.close()
# Send them welcome message
newsletter.send_newsletter_to(email)
# Renders the welcome page -- DOES NOT REDIRECT
return render_template('landing.html', email=email)
"""
About page
- How to use it
- How it works
- Who made it
"""
@app.route('/about')
def about():
return render_template('about.html')
"""
FAQ
- All the questions people ask
- Contact details of the administrator
"""
@app.route('/faq')
def faq():
return render_template('faq.html')
"""
Notices
- ***SECRET PAGE ***
- Testing environment with the email newsletter
"""
@app.route('/notices')
def notices():
return render_template('emailformatter.html')
"""
Unsubscribe
- Unsubscribe option
"""
@app.route('/unsubscribe', methods=['GET', 'POST'])
def unsubscribe():
""" IF JUST LOADING THE PAGE """
if request.method == "GET":
return render_template('unsub.html')
""" OTHERWISE RESPONDING TO UNSUBSCRIBE POST REQUEST """
if request.method == "POST":
# return render_template('unsub.html', disabled=True) # uncomment for disable
details = request.form
email = details['email']
# LOGIN TO DB
db = credentials.dbconnect()
cursor = db.cursor()
sql2 = """SELECT * FROM emails"""
cursor.execute(sql2)
results = [i[0] for i in cursor.fetchall()]
# BLANK ENTRY serverside validate
if len(email) == 0:
return render_template('unsub.html', subscribed='False')
# FALSE UNSUBSCRIBE serverside check
elif email not in results:
db.commit()
db.close()
return render_template('unsub.html', subscribed='False')
# If all serverside checking has PASSED
else:
# REDIRECT to the confirm unsubscribe page, with given email
return redirect(url_for('confirm_unsubscribe', email=details['email']))
"""
Confirm Unsubscribe
- Enter correct code
- Prevents unwanted unsubscription
"""
@app.route('/confirm_unsubscribe', methods=['GET', 'POST'])
def confirm_unsubscribe():
email = request.args['email']
db = credentials.dbconnect()
cursor = db.cursor()
sql = """SELECT * FROM emails WHERE emails='{}' """.format(email)
cursor.execute(sql)
unsubscribe_code = cursor.fetchall()[0][2]
""" IF THE PAGE IS LOADED WITH INITITALLY, SEND AN EMAIL TO """
if request.method == 'GET':
newsletter.send_code(email, unsubscribe_code)
return render_template('confirm_unsubscribe.html', email=email)
""" CODE CHECK - POST REQUEST """
if request.method == 'POST':
details = request.form
# If it matches the code -- DELETE QUERY
if details['unsubscribe_code'] == unsubscribe_code:
sql = """DELETE FROM emails WHERE emails='{}' """.format(email)
cursor.execute(sql)
db.commit()
db.close()
# FAREWELL page render
return render_template('farewell.html')
# Otherwise, re-request
else:
return render_template('confirm_unsubscribe.html', wrong='False')
@app.route('/cronjob', methods=['GET'])
def cronjob():
return 'OK'
@app.route('/testing', methods=['GET'])
def testing():
return render_template("confirm_unsubscribe.html", email='anyone')
@app.route('/tryitout', methods=['GET'])
def tryitout():
return render_template("emailformatter.html")
if __name__ == "__main__":
app.run(debug=True)
|
en
| 0.525818
|
server.py - Serves the Flask web application for the website ADMIN login page If SUBMITTING username and password # Get database SELECT * FROM admin # Get submitted details # Password handling IF first loading the page ADMIN PANEL page - Change holiday intervals - Add days of relief - Remove days of relief - Create new admin account - Change password - Bulk add emails - View email list # IF LOGGED IN SELECT * FROM emails SELECT * FROM statistics # Define statistical variables # ELSE TAKE BACK TO ADMIN LOGOUT method # remove the username from the session if it is there # Connect to database and fetch all SELECT * FROM emails # Boolean for Valid Email # BLANK ENTRY serverside validate # SUBSCRIBING TWICE serverside validate # All serverside validation PASSED successfully # Add email to database INSERT INTO emails(emails, subscription_status, unsubscribe_code) VALUES ('{}', 1, '{:05}' ) # Send them welcome message # newsletter.send_newsletter_to(email) # Renders the welcome page -- DOES NOT REDIRECT DELETE email method DELETE FROM emails WHERE emails='{}' CHANGE PASSWORD method # HANDLING POST DATA SELECT password FROM admin WHERE username='{}' # most important, check for the oldpasswords to match # check for newpasswords to match, otherwise return back # delete the password that preexists DELETE FROM admin WHERE username='{username}'; # add in the new password INSERT INTO admin (username, password) VALUES ('{username}', '{passwordHash}'); # commits the changes # if old password match fails, return back # IF RESPONDING TO PAGE LOAD VIEW SUBSCRIPTIONS SELECT * FROM emails HOLIDAYS # check number # check between 1/4 DISABLE Index/Home page - Subscription field - Vector from vecteezy - Introductory information to the app SELECT `subscription_status` FROM emails IF RESPONDING TO REGULAR GET REQUEST IF RESPONDING TO SUBSCRIBE POST REQUEST # Define INITIAL VARIABLES #uncomment for disable #return render_template('home.html', num_students=num_students, disabled=True) # Boolean for Valid Email # is_valid = validate_email(email_address=email, \ # check_regex=True, check_mx=True, \ # smtp_timeout=2, dns_timeout=2, use_blacklist=True) # BLANK ENTRY serverside validate # # INVALID EMAIL serverside validate # elif not is_valid: # return render_template('home.html', num_students=num_students,invalid_email=True) # Connect to database and fetch all SELECT * FROM emails # SUBSCRIBING TWICE serverside validate # All serverside validation PASSED successfully # Add email to database INSERT INTO emails(emails, subscription_status, unsubscribe_code) VALUES ('{}', 1, '{:05}' ) # Send them welcome message # Renders the welcome page -- DOES NOT REDIRECT About page - How to use it - How it works - Who made it FAQ - All the questions people ask - Contact details of the administrator Notices - ***SECRET PAGE *** - Testing environment with the email newsletter Unsubscribe - Unsubscribe option IF JUST LOADING THE PAGE OTHERWISE RESPONDING TO UNSUBSCRIBE POST REQUEST # return render_template('unsub.html', disabled=True) # uncomment for disable # LOGIN TO DB SELECT * FROM emails # BLANK ENTRY serverside validate # FALSE UNSUBSCRIBE serverside check # If all serverside checking has PASSED # REDIRECT to the confirm unsubscribe page, with given email Confirm Unsubscribe - Enter correct code - Prevents unwanted unsubscription SELECT * FROM emails WHERE emails='{}' IF THE PAGE IS LOADED WITH INITITALLY, SEND AN EMAIL TO CODE CHECK - POST REQUEST # If it matches the code -- DELETE QUERY DELETE FROM emails WHERE emails='{}' # FAREWELL page render # Otherwise, re-request
| 2.768677
| 3
|
appengine/swarming/swarming_bot/proto_bot/bots_pb2_grpc.py
|
stefb965/luci-py
| 0
|
6626797
|
<gh_stars>0
# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT!
import grpc
from grpc.framework.common import cardinality
from grpc.framework.interfaces.face import utilities as face_utilities
import bots_pb2 as bots__pb2
import google.protobuf.empty_pb2 as google_dot_protobuf_dot_empty__pb2
class BotsStub(object):
def __init__(self, channel):
"""Constructor.
Args:
channel: A grpc.Channel.
"""
self.CreateBotSession = channel.unary_unary(
'/google.devtools.remoteworkers.v1test2.Bots/CreateBotSession',
request_serializer=bots__pb2.CreateBotSessionRequest.SerializeToString,
response_deserializer=bots__pb2.BotSession.FromString,
)
self.UpdateBotSession = channel.unary_unary(
'/google.devtools.remoteworkers.v1test2.Bots/UpdateBotSession',
request_serializer=bots__pb2.UpdateBotSessionRequest.SerializeToString,
response_deserializer=bots__pb2.BotSession.FromString,
)
self.PostBotEventTemp = channel.unary_unary(
'/google.devtools.remoteworkers.v1test2.Bots/PostBotEventTemp',
request_serializer=bots__pb2.PostBotEventTempRequest.SerializeToString,
response_deserializer=google_dot_protobuf_dot_empty__pb2.Empty.FromString,
)
class BotsServicer(object):
def CreateBotSession(self, request, context):
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def UpdateBotSession(self, request, context):
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def PostBotEventTemp(self, request, context):
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def add_BotsServicer_to_server(servicer, server):
rpc_method_handlers = {
'CreateBotSession': grpc.unary_unary_rpc_method_handler(
servicer.CreateBotSession,
request_deserializer=bots__pb2.CreateBotSessionRequest.FromString,
response_serializer=bots__pb2.BotSession.SerializeToString,
),
'UpdateBotSession': grpc.unary_unary_rpc_method_handler(
servicer.UpdateBotSession,
request_deserializer=bots__pb2.UpdateBotSessionRequest.FromString,
response_serializer=bots__pb2.BotSession.SerializeToString,
),
'PostBotEventTemp': grpc.unary_unary_rpc_method_handler(
servicer.PostBotEventTemp,
request_deserializer=bots__pb2.PostBotEventTempRequest.FromString,
response_serializer=google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString,
),
}
generic_handler = grpc.method_handlers_generic_handler(
'google.devtools.remoteworkers.v1test2.Bots', rpc_method_handlers)
server.add_generic_rpc_handlers((generic_handler,))
|
# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT!
import grpc
from grpc.framework.common import cardinality
from grpc.framework.interfaces.face import utilities as face_utilities
import bots_pb2 as bots__pb2
import google.protobuf.empty_pb2 as google_dot_protobuf_dot_empty__pb2
class BotsStub(object):
def __init__(self, channel):
"""Constructor.
Args:
channel: A grpc.Channel.
"""
self.CreateBotSession = channel.unary_unary(
'/google.devtools.remoteworkers.v1test2.Bots/CreateBotSession',
request_serializer=bots__pb2.CreateBotSessionRequest.SerializeToString,
response_deserializer=bots__pb2.BotSession.FromString,
)
self.UpdateBotSession = channel.unary_unary(
'/google.devtools.remoteworkers.v1test2.Bots/UpdateBotSession',
request_serializer=bots__pb2.UpdateBotSessionRequest.SerializeToString,
response_deserializer=bots__pb2.BotSession.FromString,
)
self.PostBotEventTemp = channel.unary_unary(
'/google.devtools.remoteworkers.v1test2.Bots/PostBotEventTemp',
request_serializer=bots__pb2.PostBotEventTempRequest.SerializeToString,
response_deserializer=google_dot_protobuf_dot_empty__pb2.Empty.FromString,
)
class BotsServicer(object):
def CreateBotSession(self, request, context):
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def UpdateBotSession(self, request, context):
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def PostBotEventTemp(self, request, context):
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def add_BotsServicer_to_server(servicer, server):
rpc_method_handlers = {
'CreateBotSession': grpc.unary_unary_rpc_method_handler(
servicer.CreateBotSession,
request_deserializer=bots__pb2.CreateBotSessionRequest.FromString,
response_serializer=bots__pb2.BotSession.SerializeToString,
),
'UpdateBotSession': grpc.unary_unary_rpc_method_handler(
servicer.UpdateBotSession,
request_deserializer=bots__pb2.UpdateBotSessionRequest.FromString,
response_serializer=bots__pb2.BotSession.SerializeToString,
),
'PostBotEventTemp': grpc.unary_unary_rpc_method_handler(
servicer.PostBotEventTemp,
request_deserializer=bots__pb2.PostBotEventTempRequest.FromString,
response_serializer=google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString,
),
}
generic_handler = grpc.method_handlers_generic_handler(
'google.devtools.remoteworkers.v1test2.Bots', rpc_method_handlers)
server.add_generic_rpc_handlers((generic_handler,))
|
en
| 0.605688
|
# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT! Constructor. Args: channel: A grpc.Channel.
| 1.805808
| 2
|
web/src/additional_settings/jwt_settings.py
|
bandirom/django-blog
| 1
|
6626798
|
from datetime import timedelta
from os import environ
JWT_AUTH_REFRESH_COOKIE = 'refresh'
JWT_AUTH_COOKIE = 'jwt-auth'
REST_USE_JWT = True
REST_SESSION_LOGIN = False
CORS_ALLOW_CREDENTIALS = True
SIMPLE_JWT = {
'ACCESS_TOKEN_LIFETIME': timedelta(days=1),
'REFRESH_TOKEN_LIFETIME': timedelta(days=2),
'ROTATE_REFRESH_TOKENS': False,
'BLACKLIST_AFTER_ROTATION': True,
'UPDATE_LAST_LOGIN': False,
'ALGORITHM': 'HS256',
'SIGNING_KEY': environ.get('SECRET_KEY'),
'VERIFYING_KEY': None,
'AUDIENCE': None,
'ISSUER': None,
'AUTH_HEADER_TYPES': ('Bearer',),
'AUTH_HEADER_NAME': 'HTTP_AUTHORIZATION',
'USER_ID_FIELD': 'id',
'USER_ID_CLAIM': 'user_id',
'AUTH_TOKEN_CLASSES': ('rest_framework_simplejwt.tokens.AccessToken',),
'TOKEN_TYPE_CLAIM': 'token_type',
'JTI_CLAIM': 'jti',
'SLIDING_TOKEN_REFRESH_EXP_CLAIM': 'refresh_exp',
'SLIDING_TOKEN_LIFETIME': timedelta(days=1),
'SLIDING_TOKEN_REFRESH_LIFETIME': timedelta(days=2),
}
|
from datetime import timedelta
from os import environ
JWT_AUTH_REFRESH_COOKIE = 'refresh'
JWT_AUTH_COOKIE = 'jwt-auth'
REST_USE_JWT = True
REST_SESSION_LOGIN = False
CORS_ALLOW_CREDENTIALS = True
SIMPLE_JWT = {
'ACCESS_TOKEN_LIFETIME': timedelta(days=1),
'REFRESH_TOKEN_LIFETIME': timedelta(days=2),
'ROTATE_REFRESH_TOKENS': False,
'BLACKLIST_AFTER_ROTATION': True,
'UPDATE_LAST_LOGIN': False,
'ALGORITHM': 'HS256',
'SIGNING_KEY': environ.get('SECRET_KEY'),
'VERIFYING_KEY': None,
'AUDIENCE': None,
'ISSUER': None,
'AUTH_HEADER_TYPES': ('Bearer',),
'AUTH_HEADER_NAME': 'HTTP_AUTHORIZATION',
'USER_ID_FIELD': 'id',
'USER_ID_CLAIM': 'user_id',
'AUTH_TOKEN_CLASSES': ('rest_framework_simplejwt.tokens.AccessToken',),
'TOKEN_TYPE_CLAIM': 'token_type',
'JTI_CLAIM': 'jti',
'SLIDING_TOKEN_REFRESH_EXP_CLAIM': 'refresh_exp',
'SLIDING_TOKEN_LIFETIME': timedelta(days=1),
'SLIDING_TOKEN_REFRESH_LIFETIME': timedelta(days=2),
}
|
none
| 1
| 1.977694
| 2
|
|
valueIteration.py
|
RajatBhageria/Reinforcement-Learning
| 0
|
6626799
|
<reponame>RajatBhageria/Reinforcement-Learning
from maze import *
import numpy as np
from value_plot import value_plot
numStates = 112
numActions = 4
discount = 0.9
def valueIteration():
env = Maze()
initial_state = env.reset()
#get the transition proability matrix
transitions = np.zeros((numStates,numActions,numStates),dtype='float')
#get the reward matrix
rewards = np.zeros((numStates,numActions))
#create the transition and reward matrices
numIters = 500
for i in range(0,numIters):
for s in range(0,numStates):
for a in range(0,numActions):
#use step function to get the next state
reward, next_state, done = env.step(s, a)
#put the reward in the next state
rewards[s,a] = reward
#add one to the state transition matrix
transitions[s,a,next_state] = transitions[s,a,next_state] + 1.0
#standardize the transitions matrix
for s in range(0,numStates):
for a in range(0,numActions):
transitions[s,a,:] = transitions[s,a,:] / np.sum(transitions[s,a,:])
transitions = np.round(transitions,decimals=1)
#initialize the value function states as zeros
values = np.random.choice(a = ([0]), size=(112,))
#initialize the policies randomly
policies = np.random.choice(a = (0,1,2,3), size=(112,))
#do value iteration
for i in range (0,5000):
values, policies = valueIter(transitions, rewards, values, policies)
#best policy found so now save all the Q values
Qvals = np.zeros((numStates,numActions))
for s in range(0,numStates):
for a in range(0,numActions):
reward = rewards[s,a]
#get all the possible new states based on s and action (from policy pi)
possibleNewStates = transitions[s,a,:]
#get the s+1 state where probability != 0
indiciesSPrime = np.where(possibleNewStates > 0)[0]
probSPrime = possibleNewStates[indiciesSPrime]
#find the new value estimation for state s based on all the possible s+1 states sPrime
newValue = 0
for i in range(0,len(indiciesSPrime)):
#get the state sPrime
sPrime = indiciesSPrime[i]
#get the probability of that state sPrime
prob = probSPrime[i]
#get the value of sPrime
valueSPrime = values[sPrime]
#add the current value to the values for the other sPrimes
newValue = newValue + prob*(reward + discount*valueSPrime)
Qvals[s,a] = newValue
print Qvals
#save the q values
value_plot(Qvals, env, True, True)
np.save('QValues',Qvals)
def valueIter(transitions, rewards, values, policies):
epsilon = .03
delta = []
while delta > epsilon:
delta = 0.0
# iterate over all the states
for s in range(0 , numStates):
# get the current value v
v = values[s,]
maxAVal = 0
maxA = 0
for action in range(0,numActions):
# get all the possible new states based on s and action (from policy pi)
possibleNewStates = transitions[s, action, :]
# get the reward for state s
reward = rewards[s, action]
# find the new value estimation for state s based on all the possible s+1 states sPrime
newValue = 0
for i in range(0, len(possibleNewStates)):
# get the state sPrime
sPrime = i
# get the probability of that state sPrime
prob = possibleNewStates[i]
# get the value of sPrime
valueSPrime = values[sPrime]
# add the current value to the values for the other sPrimes
newValue = newValue + prob * (reward + discount * valueSPrime)
if newValue > maxAVal:
maxAVal = newValue
maxA = action
values[s] = maxAVal
policies[s] = maxA
# update delta
delta = max(delta, np.abs(v - values[s,]))
return values, policies
if __name__ == "__main__":
valueIteration()
|
from maze import *
import numpy as np
from value_plot import value_plot
numStates = 112
numActions = 4
discount = 0.9
def valueIteration():
env = Maze()
initial_state = env.reset()
#get the transition proability matrix
transitions = np.zeros((numStates,numActions,numStates),dtype='float')
#get the reward matrix
rewards = np.zeros((numStates,numActions))
#create the transition and reward matrices
numIters = 500
for i in range(0,numIters):
for s in range(0,numStates):
for a in range(0,numActions):
#use step function to get the next state
reward, next_state, done = env.step(s, a)
#put the reward in the next state
rewards[s,a] = reward
#add one to the state transition matrix
transitions[s,a,next_state] = transitions[s,a,next_state] + 1.0
#standardize the transitions matrix
for s in range(0,numStates):
for a in range(0,numActions):
transitions[s,a,:] = transitions[s,a,:] / np.sum(transitions[s,a,:])
transitions = np.round(transitions,decimals=1)
#initialize the value function states as zeros
values = np.random.choice(a = ([0]), size=(112,))
#initialize the policies randomly
policies = np.random.choice(a = (0,1,2,3), size=(112,))
#do value iteration
for i in range (0,5000):
values, policies = valueIter(transitions, rewards, values, policies)
#best policy found so now save all the Q values
Qvals = np.zeros((numStates,numActions))
for s in range(0,numStates):
for a in range(0,numActions):
reward = rewards[s,a]
#get all the possible new states based on s and action (from policy pi)
possibleNewStates = transitions[s,a,:]
#get the s+1 state where probability != 0
indiciesSPrime = np.where(possibleNewStates > 0)[0]
probSPrime = possibleNewStates[indiciesSPrime]
#find the new value estimation for state s based on all the possible s+1 states sPrime
newValue = 0
for i in range(0,len(indiciesSPrime)):
#get the state sPrime
sPrime = indiciesSPrime[i]
#get the probability of that state sPrime
prob = probSPrime[i]
#get the value of sPrime
valueSPrime = values[sPrime]
#add the current value to the values for the other sPrimes
newValue = newValue + prob*(reward + discount*valueSPrime)
Qvals[s,a] = newValue
print Qvals
#save the q values
value_plot(Qvals, env, True, True)
np.save('QValues',Qvals)
def valueIter(transitions, rewards, values, policies):
epsilon = .03
delta = []
while delta > epsilon:
delta = 0.0
# iterate over all the states
for s in range(0 , numStates):
# get the current value v
v = values[s,]
maxAVal = 0
maxA = 0
for action in range(0,numActions):
# get all the possible new states based on s and action (from policy pi)
possibleNewStates = transitions[s, action, :]
# get the reward for state s
reward = rewards[s, action]
# find the new value estimation for state s based on all the possible s+1 states sPrime
newValue = 0
for i in range(0, len(possibleNewStates)):
# get the state sPrime
sPrime = i
# get the probability of that state sPrime
prob = possibleNewStates[i]
# get the value of sPrime
valueSPrime = values[sPrime]
# add the current value to the values for the other sPrimes
newValue = newValue + prob * (reward + discount * valueSPrime)
if newValue > maxAVal:
maxAVal = newValue
maxA = action
values[s] = maxAVal
policies[s] = maxA
# update delta
delta = max(delta, np.abs(v - values[s,]))
return values, policies
if __name__ == "__main__":
valueIteration()
|
en
| 0.703134
|
#get the transition proability matrix #get the reward matrix #create the transition and reward matrices #use step function to get the next state #put the reward in the next state #add one to the state transition matrix #standardize the transitions matrix #initialize the value function states as zeros #initialize the policies randomly #do value iteration #best policy found so now save all the Q values #get all the possible new states based on s and action (from policy pi) #get the s+1 state where probability != 0 #find the new value estimation for state s based on all the possible s+1 states sPrime #get the state sPrime #get the probability of that state sPrime #get the value of sPrime #add the current value to the values for the other sPrimes #save the q values # iterate over all the states # get the current value v # get all the possible new states based on s and action (from policy pi) # get the reward for state s # find the new value estimation for state s based on all the possible s+1 states sPrime # get the state sPrime # get the probability of that state sPrime # get the value of sPrime # add the current value to the values for the other sPrimes # update delta
| 3.383985
| 3
|
tests/model_options/apps.py
|
ni-ning/django
| 61,676
|
6626800
|
<reponame>ni-ning/django<gh_stars>1000+
from django.apps import AppConfig
class ModelDefaultPKConfig(AppConfig):
name = 'model_options'
class ModelPKConfig(AppConfig):
name = 'model_options'
default_auto_field = 'django.db.models.SmallAutoField'
class ModelPKNonAutoConfig(AppConfig):
name = 'model_options'
default_auto_field = 'django.db.models.TextField'
class ModelPKNoneConfig(AppConfig):
name = 'model_options'
default_auto_field = None
class ModelPKNonexistentConfig(AppConfig):
name = 'model_options'
default_auto_field = 'django.db.models.NonexistentAutoField'
|
from django.apps import AppConfig
class ModelDefaultPKConfig(AppConfig):
name = 'model_options'
class ModelPKConfig(AppConfig):
name = 'model_options'
default_auto_field = 'django.db.models.SmallAutoField'
class ModelPKNonAutoConfig(AppConfig):
name = 'model_options'
default_auto_field = 'django.db.models.TextField'
class ModelPKNoneConfig(AppConfig):
name = 'model_options'
default_auto_field = None
class ModelPKNonexistentConfig(AppConfig):
name = 'model_options'
default_auto_field = 'django.db.models.NonexistentAutoField'
|
none
| 1
| 1.956114
| 2
|
|
tqdm/contrib/bells.py
|
insilications/tqdm-clr
| 22,617
|
6626801
|
"""
Even more features than `tqdm.auto` (all the bells & whistles):
- `tqdm.auto`
- `tqdm.tqdm.pandas`
- `tqdm.contrib.telegram`
+ uses `${TQDM_TELEGRAM_TOKEN}` and `${TQDM_TELEGRAM_CHAT_ID}`
- `tqdm.contrib.discord`
+ uses `${TQDM_DISCORD_TOKEN}` and `${TQDM_DISCORD_CHANNEL_ID}`
"""
__all__ = ['tqdm', 'trange']
import warnings
from os import getenv
if getenv("TQDM_TELEGRAM_TOKEN") and getenv("TQDM_TELEGRAM_CHAT_ID"):
from .telegram import tqdm, trange
elif getenv("TQDM_DISCORD_TOKEN") and getenv("TQDM_DISCORD_CHANNEL_ID"):
from .discord import tqdm, trange
else:
from ..auto import tqdm, trange
with warnings.catch_warnings():
warnings.simplefilter("ignore", category=FutureWarning)
tqdm.pandas()
|
"""
Even more features than `tqdm.auto` (all the bells & whistles):
- `tqdm.auto`
- `tqdm.tqdm.pandas`
- `tqdm.contrib.telegram`
+ uses `${TQDM_TELEGRAM_TOKEN}` and `${TQDM_TELEGRAM_CHAT_ID}`
- `tqdm.contrib.discord`
+ uses `${TQDM_DISCORD_TOKEN}` and `${TQDM_DISCORD_CHANNEL_ID}`
"""
__all__ = ['tqdm', 'trange']
import warnings
from os import getenv
if getenv("TQDM_TELEGRAM_TOKEN") and getenv("TQDM_TELEGRAM_CHAT_ID"):
from .telegram import tqdm, trange
elif getenv("TQDM_DISCORD_TOKEN") and getenv("TQDM_DISCORD_CHANNEL_ID"):
from .discord import tqdm, trange
else:
from ..auto import tqdm, trange
with warnings.catch_warnings():
warnings.simplefilter("ignore", category=FutureWarning)
tqdm.pandas()
|
en
| 0.317032
|
Even more features than `tqdm.auto` (all the bells & whistles): - `tqdm.auto` - `tqdm.tqdm.pandas` - `tqdm.contrib.telegram` + uses `${TQDM_TELEGRAM_TOKEN}` and `${TQDM_TELEGRAM_CHAT_ID}` - `tqdm.contrib.discord` + uses `${TQDM_DISCORD_TOKEN}` and `${TQDM_DISCORD_CHANNEL_ID}`
| 1.841117
| 2
|
test/test_mongo_cache.py
|
ktaragorn/mongo_cache
| 0
|
6626802
|
import unittest
from mongo_cache.mongo_cache import MongoCache
import time
class TestMongoCache(unittest.TestCase):
def setUp(self):
self.cache = MongoCache()
self.cache.clear()
self.cache.set("test", 1)
self.cache.set("test1", 2)
def test_set_get_basic(self):
self.assertEqual(self.cache.get("test"), 1)
self.assertEqual(self.cache.get("test1"), 2)
def test_unset(self):
self.cache.unset("test1")
self.assertEqual(self.cache.get("test1"), None)
def test_replace(self):
self.assertEqual(self.cache.get("test"), 1)
self.cache.set("test", 42)
self.assertEqual(self.cache.get("test"), 42)
def test_get_lambda(self):
self.assertEqual(self.cache.get("lambda"), None)
self.assertEqual(self.cache.get("lambda", lambda : "lambda"), "lambda")
self.assertEqual(self.cache.get("lambda"), "lambda")
def test_expires_in(self):
self.cache.set("set", "set", expires_in= -1)
self.cache.get("get", lambda: "get", expires_in= -1)
self.assertTrue(self.cache.cache_collection.find_one({"_id": "set"})) # value still present
self.assertEqual(self.cache.get("set"), None) # value deleted due to expiry
self.assertFalse(self.cache.cache_collection.find_one({"_id": "set"}))
self.assertEqual(self.cache.get("get"), None)
def test_expires_at(self):
self.cache.set("set", "set", expires_at= time.time() -1)
self.cache.get("get", lambda: "get", expires_at= time.time() -1)
self.assertTrue(self.cache.cache_collection.find_one({"_id": "set"})) # value still present
self.assertEqual(self.cache.get("set"), None) # value deleted due to expiry
self.assertFalse(self.cache.cache_collection.find_one({"_id": "set"}))
self.assertEqual(self.cache.get("get"), None)
def test_complex_values(self):
complex_obj = {"this": "is", "a": "complex object"}
self.cache.set("complex", complex_obj)
self.assertEqual(self.cache.get("complex"), complex_obj)
def test_setitem_getitem(self):
self.cache["test2"] = 1
self.assertEqual(self.cache["test2"], 1)
self.assertEqual(self.cache["test3"], None)
|
import unittest
from mongo_cache.mongo_cache import MongoCache
import time
class TestMongoCache(unittest.TestCase):
def setUp(self):
self.cache = MongoCache()
self.cache.clear()
self.cache.set("test", 1)
self.cache.set("test1", 2)
def test_set_get_basic(self):
self.assertEqual(self.cache.get("test"), 1)
self.assertEqual(self.cache.get("test1"), 2)
def test_unset(self):
self.cache.unset("test1")
self.assertEqual(self.cache.get("test1"), None)
def test_replace(self):
self.assertEqual(self.cache.get("test"), 1)
self.cache.set("test", 42)
self.assertEqual(self.cache.get("test"), 42)
def test_get_lambda(self):
self.assertEqual(self.cache.get("lambda"), None)
self.assertEqual(self.cache.get("lambda", lambda : "lambda"), "lambda")
self.assertEqual(self.cache.get("lambda"), "lambda")
def test_expires_in(self):
self.cache.set("set", "set", expires_in= -1)
self.cache.get("get", lambda: "get", expires_in= -1)
self.assertTrue(self.cache.cache_collection.find_one({"_id": "set"})) # value still present
self.assertEqual(self.cache.get("set"), None) # value deleted due to expiry
self.assertFalse(self.cache.cache_collection.find_one({"_id": "set"}))
self.assertEqual(self.cache.get("get"), None)
def test_expires_at(self):
self.cache.set("set", "set", expires_at= time.time() -1)
self.cache.get("get", lambda: "get", expires_at= time.time() -1)
self.assertTrue(self.cache.cache_collection.find_one({"_id": "set"})) # value still present
self.assertEqual(self.cache.get("set"), None) # value deleted due to expiry
self.assertFalse(self.cache.cache_collection.find_one({"_id": "set"}))
self.assertEqual(self.cache.get("get"), None)
def test_complex_values(self):
complex_obj = {"this": "is", "a": "complex object"}
self.cache.set("complex", complex_obj)
self.assertEqual(self.cache.get("complex"), complex_obj)
def test_setitem_getitem(self):
self.cache["test2"] = 1
self.assertEqual(self.cache["test2"], 1)
self.assertEqual(self.cache["test3"], None)
|
en
| 0.93973
|
# value still present # value deleted due to expiry # value still present # value deleted due to expiry
| 2.84459
| 3
|
models.py
|
bogdanovdya/Clients_Lifetime
| 2
|
6626803
|
<reponame>bogdanovdya/Clients_Lifetime
from app import db
class PortalAuth(db.Model):
portal = db.Column(db.CHAR(255), primary_key=True)
access_token = db.Column(db.CHAR(255))
refresh_token = db.Column(db.CHAR(255))
event_counter = db.Column(db.INTEGER, default=0)
def __init__(self, *args, **kwargs):
super(PortalAuth, self).__init__(*args, **kwargs)
|
from app import db
class PortalAuth(db.Model):
portal = db.Column(db.CHAR(255), primary_key=True)
access_token = db.Column(db.CHAR(255))
refresh_token = db.Column(db.CHAR(255))
event_counter = db.Column(db.INTEGER, default=0)
def __init__(self, *args, **kwargs):
super(PortalAuth, self).__init__(*args, **kwargs)
|
none
| 1
| 2.024983
| 2
|
|
exercises/storage.py
|
rattletat/homework-server
| 1
|
6626804
|
<reponame>rattletat/homework-server
from django.core.files.storage import FileSystemStorage
from django.conf import settings
import os
class OverwriteStorage(FileSystemStorage):
def get_available_name(self, name, max_length=None):
# If the filename already exists, remove it as if it was a true file system
if self.exists(name):
os.remove(os.path.join(settings.MEDIA_ROOT, name))
return name
|
from django.core.files.storage import FileSystemStorage
from django.conf import settings
import os
class OverwriteStorage(FileSystemStorage):
def get_available_name(self, name, max_length=None):
# If the filename already exists, remove it as if it was a true file system
if self.exists(name):
os.remove(os.path.join(settings.MEDIA_ROOT, name))
return name
|
en
| 0.963175
|
# If the filename already exists, remove it as if it was a true file system
| 2.274709
| 2
|
source/sphinx_extensions/checkpoint.py
|
antgonza/docs
| 1
|
6626805
|
<reponame>antgonza/docs<gh_stars>1-10
# ----------------------------------------------------------------------------
# Copyright (c) 2016-2019, QIIME 2 development team.
#
# Distributed under the terms of the Modified BSD License.
#
# The full license is in the file LICENSE, distributed with this software.
# ----------------------------------------------------------------------------
from docutils import nodes
from sphinx.util.docutils import SphinxDirective
class CheckpointAdmonition(nodes.Admonition, nodes.Element):
pass
class CheckpointDirective(SphinxDirective):
has_content = True
def run(self):
target_id = 'checkpoint-%d' % self.env.new_serialno('checkpoint')
target_node = nodes.target('', '', ids=[target_id])
checkpoint_node = CheckpointAdmonition(self.content)
checkpoint_node += nodes.title(text='Checkpoint')
checkpoint_node['classes'] += ['checkpoint']
self.state.nested_parse(self.content, self.content_offset,
checkpoint_node)
return [target_node, checkpoint_node]
def setup(app):
app.add_node(CheckpointAdmonition,
html=(lambda s, n: s.visit_admonition(n),
lambda s, n: s.depart_admonition(n)))
app.add_directive('checkpoint', CheckpointDirective)
return {'version': '0.0.2'}
|
# ----------------------------------------------------------------------------
# Copyright (c) 2016-2019, QIIME 2 development team.
#
# Distributed under the terms of the Modified BSD License.
#
# The full license is in the file LICENSE, distributed with this software.
# ----------------------------------------------------------------------------
from docutils import nodes
from sphinx.util.docutils import SphinxDirective
class CheckpointAdmonition(nodes.Admonition, nodes.Element):
pass
class CheckpointDirective(SphinxDirective):
has_content = True
def run(self):
target_id = 'checkpoint-%d' % self.env.new_serialno('checkpoint')
target_node = nodes.target('', '', ids=[target_id])
checkpoint_node = CheckpointAdmonition(self.content)
checkpoint_node += nodes.title(text='Checkpoint')
checkpoint_node['classes'] += ['checkpoint']
self.state.nested_parse(self.content, self.content_offset,
checkpoint_node)
return [target_node, checkpoint_node]
def setup(app):
app.add_node(CheckpointAdmonition,
html=(lambda s, n: s.visit_admonition(n),
lambda s, n: s.depart_admonition(n)))
app.add_directive('checkpoint', CheckpointDirective)
return {'version': '0.0.2'}
|
en
| 0.579473
|
# ---------------------------------------------------------------------------- # Copyright (c) 2016-2019, QIIME 2 development team. # # Distributed under the terms of the Modified BSD License. # # The full license is in the file LICENSE, distributed with this software. # ----------------------------------------------------------------------------
| 1.999337
| 2
|
MFLH/mflh/train.py
|
dickie0/mflh-master
| 0
|
6626806
|
import os
import argparse
import warnings
import utils.data_load.image as dataset
import utils.model.mflh as model
warnings.filterwarnings("ignore", category=DeprecationWarning)
warnings.filterwarnings("ignore", category=FutureWarning)
os.environ['TF_CPP_MIN_LOG_LEVEL'] = '3'
param = argparse.ArgumentParser(description='MFLH')
param.add_argument('--gpus', default='0', type=str)
param.add_argument('--dataset', default='cifar10', type=str)
param.add_argument('-b', '--batch-size', default=64, type=int)
param.add_argument('-vb', '--test-batch-size', default=16, type=int)
param.add_argument('--lr', default=1e-5, type=float)
param.add_argument('--epochs', default=100, type=int)
param.add_argument('--output-dim', default=12, type=int)
param.add_argument('--scale-factor', default=8, type=int)
param.add_argument('--triplet-margin', default=30, type=float)
param.add_argument('--strategy', default='all', choices=['hard', 'all'])
param.add_argument('--alpha', default=0.0, type=int)
param.add_argument('--eta', default=0.8, type=int)
param.add_argument('--gamma', default=0.1, type=int)
param.add_argument('--size', default=227, type=int)
param.add_argument('--decay-step', default=20, type=int)
param.add_argument('--decay-rate', default=0.96, type=int)
param.add_argument('--model-weights', type=str, default='../')
param.add_argument('--data-dir', default="../", type=str)
param.add_argument('--output-dir', default='../', type=str)
param.add_argument('--pre-model-path', default='../', type=str)
param.add_argument('--dist', default='euclidean2')
args = param.parse_args()
os.environ['CUDA_VISIBLE_DEVICES'] = args.gpus
class_dims = {'cifar10': 10, 'nuswide_81': 21}
Nums = {'cifar10': 59000, 'nuswide_81': 5000}
args.number = Nums[args.dataset]
args.label_dim = class_dims[args.dataset]
args.img_train = os.path.join(args.data_dir, args.dataset, "train.txt")
args.img_test = os.path.join(args.data_dir, args.dataset, "test.txt")
args.img_database = os.path.join(args.data_dir, args.dataset, "database.txt")
data_root = os.path.join(args.data_dir, args.dataset)
query_img, database_img = dataset.import_test(data_root, args.img_test, args.img_database, args)
train_img = dataset.import_train(data_root, args.img_train, args)
model_weights = model.train(train_img, database_img, query_img, args)
args.model_weights = model_weights
|
import os
import argparse
import warnings
import utils.data_load.image as dataset
import utils.model.mflh as model
warnings.filterwarnings("ignore", category=DeprecationWarning)
warnings.filterwarnings("ignore", category=FutureWarning)
os.environ['TF_CPP_MIN_LOG_LEVEL'] = '3'
param = argparse.ArgumentParser(description='MFLH')
param.add_argument('--gpus', default='0', type=str)
param.add_argument('--dataset', default='cifar10', type=str)
param.add_argument('-b', '--batch-size', default=64, type=int)
param.add_argument('-vb', '--test-batch-size', default=16, type=int)
param.add_argument('--lr', default=1e-5, type=float)
param.add_argument('--epochs', default=100, type=int)
param.add_argument('--output-dim', default=12, type=int)
param.add_argument('--scale-factor', default=8, type=int)
param.add_argument('--triplet-margin', default=30, type=float)
param.add_argument('--strategy', default='all', choices=['hard', 'all'])
param.add_argument('--alpha', default=0.0, type=int)
param.add_argument('--eta', default=0.8, type=int)
param.add_argument('--gamma', default=0.1, type=int)
param.add_argument('--size', default=227, type=int)
param.add_argument('--decay-step', default=20, type=int)
param.add_argument('--decay-rate', default=0.96, type=int)
param.add_argument('--model-weights', type=str, default='../')
param.add_argument('--data-dir', default="../", type=str)
param.add_argument('--output-dir', default='../', type=str)
param.add_argument('--pre-model-path', default='../', type=str)
param.add_argument('--dist', default='euclidean2')
args = param.parse_args()
os.environ['CUDA_VISIBLE_DEVICES'] = args.gpus
class_dims = {'cifar10': 10, 'nuswide_81': 21}
Nums = {'cifar10': 59000, 'nuswide_81': 5000}
args.number = Nums[args.dataset]
args.label_dim = class_dims[args.dataset]
args.img_train = os.path.join(args.data_dir, args.dataset, "train.txt")
args.img_test = os.path.join(args.data_dir, args.dataset, "test.txt")
args.img_database = os.path.join(args.data_dir, args.dataset, "database.txt")
data_root = os.path.join(args.data_dir, args.dataset)
query_img, database_img = dataset.import_test(data_root, args.img_test, args.img_database, args)
train_img = dataset.import_train(data_root, args.img_train, args)
model_weights = model.train(train_img, database_img, query_img, args)
args.model_weights = model_weights
|
none
| 1
| 1.896192
| 2
|
|
tests/test_selenium_product_check.py
|
hoelsner/product-database
| 38
|
6626807
|
"""
Test suite for the selenium test cases
"""
import os
import pytest
import time
from django.urls import reverse
from tests import BaseSeleniumTest
from selenium.webdriver.common.by import By
from selenium.webdriver.common.keys import Keys
from selenium.webdriver.support import expected_conditions as EC
from selenium.webdriver.support.wait import WebDriverWait
@pytest.mark.online
@pytest.mark.selenium
class TestBulkEolCheckFunction(BaseSeleniumTest):
def test_optional_product_migration_entry(self, browser, liveserver):
self.api_helper.drop_all_data(liveserver)
self.api_helper.load_base_test_data(liveserver)
# open the new Product Check page
browser.get(liveserver + reverse("productdb:create-product_check"))
browser.find_element_by_id("navbar_login").click()
homepage_message = "New Product Check"
self.login_user(browser, self.API_USERNAME, self.API_PASSWORD, homepage_message)
# the page contains a text field, where the product IDs must be entered
expected_text = "On this page, you can execute a bulk Product check of multiple Products against the local " \
"database. Please enter a list of Product IDs in the following text field separated by line " \
"breaks, e.g."
self.wait_for_text_to_be_displayed_in_body_tag(browser, expected_text)
assert "Migration source" not in browser.find_element_by_tag_name('body').text
# enable optional product migration source selection
browser.find_element_by_id("navbar_loggedin").click()
browser.find_element_by_id("navbar_loggedin_user_profile").click()
self.wait_for_text_to_be_displayed_in_body_tag(browser, "Edit User Profile")
browser.find_element_by_id("id_choose_migration_source").click()
browser.find_element_by_id("submit").click()
time.sleep(3)
# open the bulk eol check page
browser.get(liveserver + reverse("productdb:create-product_check"))
# the page contains a text field, where the product IDs must be entered
expected_text = "On this page, you can execute a bulk Product check of multiple Products against the local " \
"database. Please enter a list of Product IDs in the following text field separated by line " \
"breaks, e.g."
self.wait_for_text_to_be_displayed_in_body_tag(browser, expected_text)
assert "Migration source" in browser.find_element_by_tag_name('body').text
# end session
browser.get(liveserver + reverse("logout"))
def test_with_valid_query(self, browser, liveserver, test_download_dir):
self.api_helper.drop_all_data(liveserver)
self.api_helper.load_base_test_data(liveserver)
# open the new Product Check page
browser.get(liveserver + reverse("productdb:create-product_check"))
# the page contains a text field, where the product IDs must be entered
expected_text = "On this page, you can execute a bulk Product check of multiple Products against the local " \
"database. Please enter a list of Product IDs in the following text field separated by line " \
"breaks, e.g."
self.wait_for_text_to_be_displayed_in_body_tag(browser, expected_text)
# enter the query and submit (whitespace is stripped)
sample_eol_query = """WS-C2960-24LC-S
WS-C2960-24LC-S
WS-C2960-24LC-S
WS-C2960-24LC-S
WS-C2960-24LT-L
WS-C2960-24PC-S
WS-C2960X-24PD-L
WS-C2960X-24PD-L
WS-C2960X-24PD-L
MOH
WS-C2960-48PST-S
WS-C2960-24TC-L
MOH
WS-C2960-24TC-S
WS-C2960-24TT-L"""
browser.find_element_by_id("id_name").send_keys("Test")
browser.find_element_by_id("id_input_product_ids").send_keys(sample_eol_query)
browser.find_element_by_id("submit").click()
# verify result within the product summary table
expected_product_summary_row = "Cisco Systems WS-C2960-24LC-S 4 End of Support No"
expected_not_found_query = "MOH 2 Not found in Database --- --- ---"
# test that the Vendor Bulletin is not visible by default
assert "Vendor Bulletin" not in browser.find_element_by_tag_name("body").text
table = browser.find_element_by_class_name("table")
rows = table.find_elements_by_tag_name('tr')
assert expected_product_summary_row in [row.text for row in rows]
assert expected_not_found_query in [row.text for row in rows]
# scroll down
text_element = browser.find_element_by_class_name("alert-warning")
browser.execute_script("return arguments[0].scrollIntoView();", text_element)
# view the Vendor Bulletin
browser.find_element_by_xpath("//button[span='show additional columns ']").click()
browser.find_element_by_link_text("Vendor Bulletin").click()
browser.find_element_by_xpath("//button[span='show additional columns ']").send_keys(Keys.SPACE)
time.sleep(3)
WebDriverWait(browser, 10).until(EC.invisibility_of_element_located((
By.XPATH,
"//div[@class='dt-button-background']")
))
WebDriverWait(browser, 10).until(EC.element_to_be_clickable((
By.XPATH,
"//button[span='CSV']")
))
# test CSV download of the result table
browser.find_element_by_xpath("//button[span='CSV']").click()
time.sleep(5)
# The file should download automatically (firefox is configured this way)
# verify that the file is a CSV formatted field (with ";" as delimiter)
# verify that the second line contains a link (not the Bulletin number)
file = os.path.join(test_download_dir, "product check - Test.csv")
header_line = "Vendor;Product ID;Amount;Lifecycle State;Replacement Product ID;Replacement suggested by;" \
"Vendor Bulletin;LC auto-sync"
with open(file, "r", encoding="utf-8") as f:
content = f.read().splitlines()
assert header_line == content[0]
for line in content:
if "http://www.cisco.com/en/" in line:
break
else:
# no line matches, test failed
pytest.fail("expected content not found in file")
# test that the table view is stored
browser.execute_script("window.scrollTo(0, 0)")
time.sleep(1)
browser.find_element_by_id("_back").click()
self.wait_for_text_to_be_displayed_in_body_tag(browser, "All Product Checks")
time.sleep(2)
# go back to the product check view
browser.find_element_by_partial_link_text("Test").click()
time.sleep(5)
# test that the Vendor Bulletin is still visible (table state should persist)
assert "Vendor Bulletin" in browser.find_element_by_tag_name("body").text
# create new product check
browser.get(liveserver + reverse("productdb:create-product_check"))
browser.find_element_by_id("id_name").send_keys("Test")
browser.find_element_by_id("id_input_product_ids").send_keys(sample_eol_query)
browser.find_element_by_id("submit").click()
time.sleep(5)
# the new product check table should be displayed with the default options (without e.g. the Vendor Bulletin)
assert "Vendor Bulletin" not in browser.find_element_by_tag_name("body").text
def test_visible_of_product_checks(self, browser, liveserver):
self.api_helper.drop_all_data(liveserver)
self.api_helper.load_base_test_data(liveserver)
anonymous_product_check_name = "Public created Product Check"
private_product_check = "Private API User Product Check"
public_product_check = "Public API User Product Check"
sample_eol_query = """WS-C2960-24LC-S
WS-C2960-24LC-S
WS-C2960-24LC-S
WS-C2960-24LC-S
WS-C2960-24LT-L
WS-C2960-24PC-S
WS-C2960X-24PD-L
WS-C2960X-24PD-L
WS-C2960X-24PD-L
MOH
WS-C2960-48PST-S
WS-C2960-24TC-L
MOH
WS-C2960-24TC-S
WS-C2960-24TT-L"""
# open the new Product Check page
browser.get(liveserver + reverse("productdb:create-product_check"))
browser.find_element_by_id("id_name").send_keys(anonymous_product_check_name)
browser.find_element_by_id("id_input_product_ids").send_keys(sample_eol_query)
browser.find_element_by_id("submit").click()
time.sleep(2)
self.wait_for_text_to_be_displayed_in_body_tag(browser, "All Product Checks are")
# verify result
assert "All Product Checks are deleted every week on Sunday." in browser.find_element_by_tag_name("body").text
# verify list entries
browser.get(liveserver + reverse("productdb:list-product_checks"))
assert anonymous_product_check_name in browser.find_element_by_id("product_check_table").text
# login as API user
browser.get(liveserver + reverse("productdb:create-product_check"))
time.sleep(2)
browser.find_element_by_id("navbar_login").click()
time.sleep(2)
homepage_message = "New Product Check"
self.login_user(browser, self.API_USERNAME, self.API_PASSWORD, homepage_message)
# the page contains a text field, where the product IDs must be entered
expected_text = "On this page, you can execute a bulk Product check of multiple Products against the local " \
"database. Please enter a list of Product IDs in the following text field separated by line " \
"breaks, e.g."
self.wait_for_text_to_be_displayed_in_body_tag(browser, expected_text)
browser.find_element_by_id("id_name").send_keys(private_product_check)
browser.find_element_by_id("id_input_product_ids").send_keys(sample_eol_query)
browser.find_element_by_id("submit").click()
time.sleep(2)
self.wait_for_text_to_be_displayed_in_body_tag(browser, "All Product Checks are")
# verify result
assert "All Product Checks are deleted every week on Sunday." in browser.find_element_by_tag_name("body").text
# verify list entries
browser.get(liveserver + reverse("productdb:list-product_checks"))
assert private_product_check in browser.find_element_by_id("product_check_table").text
browser.get(liveserver + reverse("productdb:create-product_check"))
browser.find_element_by_id("id_name").send_keys(public_product_check)
browser.find_element_by_id("id_input_product_ids").send_keys(sample_eol_query)
browser.find_element_by_id("id_public_product_check").click()
browser.find_element_by_id("submit").click()
time.sleep(2)
self.wait_for_text_to_be_displayed_in_body_tag(browser, "All Product Checks are")
# verify result
assert "All Product Checks are deleted every week on Sunday." in browser.find_element_by_tag_name("body").text
browser.get(liveserver + reverse("productdb:list-product_checks"))
assert anonymous_product_check_name in browser.find_element_by_id("product_check_table").text
assert private_product_check in browser.find_element_by_id("product_check_table").text
assert public_product_check in browser.find_element_by_id("product_check_table").text
# logout
browser.find_element_by_id("navbar_loggedin").click()
browser.find_element_by_id("navbar_loggedin_logout").click()
time.sleep(3)
browser.get(liveserver + reverse("productdb:list-product_checks"))
# verify table entries
assert private_product_check not in browser.find_element_by_id("product_check_table").text
assert anonymous_product_check_name in browser.find_element_by_id("product_check_table").text
assert public_product_check in browser.find_element_by_id("product_check_table").text
# login as root (should also see only the public product checks)
browser.get(liveserver + reverse("productdb:list-product_checks"))
browser.find_element_by_id("navbar_login").click()
time.sleep(3)
homepage_message = "All Product Checks"
self.login_user(browser, self.ADMIN_USERNAME, self.ADMIN_PASSWORD, homepage_message)
# verify table entries
assert private_product_check not in browser.find_element_by_id("product_check_table").text
assert anonymous_product_check_name in browser.find_element_by_id("product_check_table").text
assert public_product_check in browser.find_element_by_id("product_check_table").text
|
"""
Test suite for the selenium test cases
"""
import os
import pytest
import time
from django.urls import reverse
from tests import BaseSeleniumTest
from selenium.webdriver.common.by import By
from selenium.webdriver.common.keys import Keys
from selenium.webdriver.support import expected_conditions as EC
from selenium.webdriver.support.wait import WebDriverWait
@pytest.mark.online
@pytest.mark.selenium
class TestBulkEolCheckFunction(BaseSeleniumTest):
def test_optional_product_migration_entry(self, browser, liveserver):
self.api_helper.drop_all_data(liveserver)
self.api_helper.load_base_test_data(liveserver)
# open the new Product Check page
browser.get(liveserver + reverse("productdb:create-product_check"))
browser.find_element_by_id("navbar_login").click()
homepage_message = "New Product Check"
self.login_user(browser, self.API_USERNAME, self.API_PASSWORD, homepage_message)
# the page contains a text field, where the product IDs must be entered
expected_text = "On this page, you can execute a bulk Product check of multiple Products against the local " \
"database. Please enter a list of Product IDs in the following text field separated by line " \
"breaks, e.g."
self.wait_for_text_to_be_displayed_in_body_tag(browser, expected_text)
assert "Migration source" not in browser.find_element_by_tag_name('body').text
# enable optional product migration source selection
browser.find_element_by_id("navbar_loggedin").click()
browser.find_element_by_id("navbar_loggedin_user_profile").click()
self.wait_for_text_to_be_displayed_in_body_tag(browser, "Edit User Profile")
browser.find_element_by_id("id_choose_migration_source").click()
browser.find_element_by_id("submit").click()
time.sleep(3)
# open the bulk eol check page
browser.get(liveserver + reverse("productdb:create-product_check"))
# the page contains a text field, where the product IDs must be entered
expected_text = "On this page, you can execute a bulk Product check of multiple Products against the local " \
"database. Please enter a list of Product IDs in the following text field separated by line " \
"breaks, e.g."
self.wait_for_text_to_be_displayed_in_body_tag(browser, expected_text)
assert "Migration source" in browser.find_element_by_tag_name('body').text
# end session
browser.get(liveserver + reverse("logout"))
def test_with_valid_query(self, browser, liveserver, test_download_dir):
self.api_helper.drop_all_data(liveserver)
self.api_helper.load_base_test_data(liveserver)
# open the new Product Check page
browser.get(liveserver + reverse("productdb:create-product_check"))
# the page contains a text field, where the product IDs must be entered
expected_text = "On this page, you can execute a bulk Product check of multiple Products against the local " \
"database. Please enter a list of Product IDs in the following text field separated by line " \
"breaks, e.g."
self.wait_for_text_to_be_displayed_in_body_tag(browser, expected_text)
# enter the query and submit (whitespace is stripped)
sample_eol_query = """WS-C2960-24LC-S
WS-C2960-24LC-S
WS-C2960-24LC-S
WS-C2960-24LC-S
WS-C2960-24LT-L
WS-C2960-24PC-S
WS-C2960X-24PD-L
WS-C2960X-24PD-L
WS-C2960X-24PD-L
MOH
WS-C2960-48PST-S
WS-C2960-24TC-L
MOH
WS-C2960-24TC-S
WS-C2960-24TT-L"""
browser.find_element_by_id("id_name").send_keys("Test")
browser.find_element_by_id("id_input_product_ids").send_keys(sample_eol_query)
browser.find_element_by_id("submit").click()
# verify result within the product summary table
expected_product_summary_row = "Cisco Systems WS-C2960-24LC-S 4 End of Support No"
expected_not_found_query = "MOH 2 Not found in Database --- --- ---"
# test that the Vendor Bulletin is not visible by default
assert "Vendor Bulletin" not in browser.find_element_by_tag_name("body").text
table = browser.find_element_by_class_name("table")
rows = table.find_elements_by_tag_name('tr')
assert expected_product_summary_row in [row.text for row in rows]
assert expected_not_found_query in [row.text for row in rows]
# scroll down
text_element = browser.find_element_by_class_name("alert-warning")
browser.execute_script("return arguments[0].scrollIntoView();", text_element)
# view the Vendor Bulletin
browser.find_element_by_xpath("//button[span='show additional columns ']").click()
browser.find_element_by_link_text("Vendor Bulletin").click()
browser.find_element_by_xpath("//button[span='show additional columns ']").send_keys(Keys.SPACE)
time.sleep(3)
WebDriverWait(browser, 10).until(EC.invisibility_of_element_located((
By.XPATH,
"//div[@class='dt-button-background']")
))
WebDriverWait(browser, 10).until(EC.element_to_be_clickable((
By.XPATH,
"//button[span='CSV']")
))
# test CSV download of the result table
browser.find_element_by_xpath("//button[span='CSV']").click()
time.sleep(5)
# The file should download automatically (firefox is configured this way)
# verify that the file is a CSV formatted field (with ";" as delimiter)
# verify that the second line contains a link (not the Bulletin number)
file = os.path.join(test_download_dir, "product check - Test.csv")
header_line = "Vendor;Product ID;Amount;Lifecycle State;Replacement Product ID;Replacement suggested by;" \
"Vendor Bulletin;LC auto-sync"
with open(file, "r", encoding="utf-8") as f:
content = f.read().splitlines()
assert header_line == content[0]
for line in content:
if "http://www.cisco.com/en/" in line:
break
else:
# no line matches, test failed
pytest.fail("expected content not found in file")
# test that the table view is stored
browser.execute_script("window.scrollTo(0, 0)")
time.sleep(1)
browser.find_element_by_id("_back").click()
self.wait_for_text_to_be_displayed_in_body_tag(browser, "All Product Checks")
time.sleep(2)
# go back to the product check view
browser.find_element_by_partial_link_text("Test").click()
time.sleep(5)
# test that the Vendor Bulletin is still visible (table state should persist)
assert "Vendor Bulletin" in browser.find_element_by_tag_name("body").text
# create new product check
browser.get(liveserver + reverse("productdb:create-product_check"))
browser.find_element_by_id("id_name").send_keys("Test")
browser.find_element_by_id("id_input_product_ids").send_keys(sample_eol_query)
browser.find_element_by_id("submit").click()
time.sleep(5)
# the new product check table should be displayed with the default options (without e.g. the Vendor Bulletin)
assert "Vendor Bulletin" not in browser.find_element_by_tag_name("body").text
def test_visible_of_product_checks(self, browser, liveserver):
self.api_helper.drop_all_data(liveserver)
self.api_helper.load_base_test_data(liveserver)
anonymous_product_check_name = "Public created Product Check"
private_product_check = "Private API User Product Check"
public_product_check = "Public API User Product Check"
sample_eol_query = """WS-C2960-24LC-S
WS-C2960-24LC-S
WS-C2960-24LC-S
WS-C2960-24LC-S
WS-C2960-24LT-L
WS-C2960-24PC-S
WS-C2960X-24PD-L
WS-C2960X-24PD-L
WS-C2960X-24PD-L
MOH
WS-C2960-48PST-S
WS-C2960-24TC-L
MOH
WS-C2960-24TC-S
WS-C2960-24TT-L"""
# open the new Product Check page
browser.get(liveserver + reverse("productdb:create-product_check"))
browser.find_element_by_id("id_name").send_keys(anonymous_product_check_name)
browser.find_element_by_id("id_input_product_ids").send_keys(sample_eol_query)
browser.find_element_by_id("submit").click()
time.sleep(2)
self.wait_for_text_to_be_displayed_in_body_tag(browser, "All Product Checks are")
# verify result
assert "All Product Checks are deleted every week on Sunday." in browser.find_element_by_tag_name("body").text
# verify list entries
browser.get(liveserver + reverse("productdb:list-product_checks"))
assert anonymous_product_check_name in browser.find_element_by_id("product_check_table").text
# login as API user
browser.get(liveserver + reverse("productdb:create-product_check"))
time.sleep(2)
browser.find_element_by_id("navbar_login").click()
time.sleep(2)
homepage_message = "New Product Check"
self.login_user(browser, self.API_USERNAME, self.API_PASSWORD, homepage_message)
# the page contains a text field, where the product IDs must be entered
expected_text = "On this page, you can execute a bulk Product check of multiple Products against the local " \
"database. Please enter a list of Product IDs in the following text field separated by line " \
"breaks, e.g."
self.wait_for_text_to_be_displayed_in_body_tag(browser, expected_text)
browser.find_element_by_id("id_name").send_keys(private_product_check)
browser.find_element_by_id("id_input_product_ids").send_keys(sample_eol_query)
browser.find_element_by_id("submit").click()
time.sleep(2)
self.wait_for_text_to_be_displayed_in_body_tag(browser, "All Product Checks are")
# verify result
assert "All Product Checks are deleted every week on Sunday." in browser.find_element_by_tag_name("body").text
# verify list entries
browser.get(liveserver + reverse("productdb:list-product_checks"))
assert private_product_check in browser.find_element_by_id("product_check_table").text
browser.get(liveserver + reverse("productdb:create-product_check"))
browser.find_element_by_id("id_name").send_keys(public_product_check)
browser.find_element_by_id("id_input_product_ids").send_keys(sample_eol_query)
browser.find_element_by_id("id_public_product_check").click()
browser.find_element_by_id("submit").click()
time.sleep(2)
self.wait_for_text_to_be_displayed_in_body_tag(browser, "All Product Checks are")
# verify result
assert "All Product Checks are deleted every week on Sunday." in browser.find_element_by_tag_name("body").text
browser.get(liveserver + reverse("productdb:list-product_checks"))
assert anonymous_product_check_name in browser.find_element_by_id("product_check_table").text
assert private_product_check in browser.find_element_by_id("product_check_table").text
assert public_product_check in browser.find_element_by_id("product_check_table").text
# logout
browser.find_element_by_id("navbar_loggedin").click()
browser.find_element_by_id("navbar_loggedin_logout").click()
time.sleep(3)
browser.get(liveserver + reverse("productdb:list-product_checks"))
# verify table entries
assert private_product_check not in browser.find_element_by_id("product_check_table").text
assert anonymous_product_check_name in browser.find_element_by_id("product_check_table").text
assert public_product_check in browser.find_element_by_id("product_check_table").text
# login as root (should also see only the public product checks)
browser.get(liveserver + reverse("productdb:list-product_checks"))
browser.find_element_by_id("navbar_login").click()
time.sleep(3)
homepage_message = "All Product Checks"
self.login_user(browser, self.ADMIN_USERNAME, self.ADMIN_PASSWORD, homepage_message)
# verify table entries
assert private_product_check not in browser.find_element_by_id("product_check_table").text
assert anonymous_product_check_name in browser.find_element_by_id("product_check_table").text
assert public_product_check in browser.find_element_by_id("product_check_table").text
|
en
| 0.743427
|
Test suite for the selenium test cases # open the new Product Check page # the page contains a text field, where the product IDs must be entered # enable optional product migration source selection # open the bulk eol check page # the page contains a text field, where the product IDs must be entered # end session # open the new Product Check page # the page contains a text field, where the product IDs must be entered # enter the query and submit (whitespace is stripped) WS-C2960-24LC-S WS-C2960-24LC-S WS-C2960-24LC-S WS-C2960-24LC-S WS-C2960-24LT-L WS-C2960-24PC-S WS-C2960X-24PD-L WS-C2960X-24PD-L WS-C2960X-24PD-L MOH WS-C2960-48PST-S WS-C2960-24TC-L MOH WS-C2960-24TC-S WS-C2960-24TT-L # verify result within the product summary table # test that the Vendor Bulletin is not visible by default # scroll down # view the Vendor Bulletin # test CSV download of the result table # The file should download automatically (firefox is configured this way) # verify that the file is a CSV formatted field (with ";" as delimiter) # verify that the second line contains a link (not the Bulletin number) # no line matches, test failed # test that the table view is stored # go back to the product check view # test that the Vendor Bulletin is still visible (table state should persist) # create new product check # the new product check table should be displayed with the default options (without e.g. the Vendor Bulletin) WS-C2960-24LC-S WS-C2960-24LC-S WS-C2960-24LC-S WS-C2960-24LC-S WS-C2960-24LT-L WS-C2960-24PC-S WS-C2960X-24PD-L WS-C2960X-24PD-L WS-C2960X-24PD-L MOH WS-C2960-48PST-S WS-C2960-24TC-L MOH WS-C2960-24TC-S WS-C2960-24TT-L # open the new Product Check page # verify result # verify list entries # login as API user # the page contains a text field, where the product IDs must be entered # verify result # verify list entries # verify result # logout # verify table entries # login as root (should also see only the public product checks) # verify table entries
| 2.560027
| 3
|
cloudformationLimits.py
|
leadingzero/LimitMonkey
| 0
|
6626808
|
<filename>cloudformationLimits.py
#!/usr/bin/python
import boto3
import json
from datetime import datetime
def json_serial(obj):
if isinstance(obj, datetime):
serial = obj.isoformat()
return serial
raise TypeError ("Type not serializable")
def get_limit(region):
cfn_client = boto3.client('cloudformation', region_name=region)
stack_limit = cfn_client.describe_account_limits()
print json.dumps(stack_limit, indent=4, separators=(',', ': '), default=json_serial)
if stack_limit['AccountLimits'][0]['Name'] == 'StackLimit':
num_of_stacks = str(stack_limit['AccountLimits'][0]['Value'])
print "Stack Limit: "
print num_of_stacks
return num_of_stacks;
def get_actual(region):
cfn_client = boto3.client('cloudformation', region_name=region)
stacks = cfn_client.describe_stacks()
stack_list = stacks['Stacks']
print "Actual Stacks: "
num_of_stacks = str(len(stack_list))
print num_of_stacks
return num_of_stacks;
# print "Region: " + region
# get_limit(region)
# get_actual(region)
|
<filename>cloudformationLimits.py
#!/usr/bin/python
import boto3
import json
from datetime import datetime
def json_serial(obj):
if isinstance(obj, datetime):
serial = obj.isoformat()
return serial
raise TypeError ("Type not serializable")
def get_limit(region):
cfn_client = boto3.client('cloudformation', region_name=region)
stack_limit = cfn_client.describe_account_limits()
print json.dumps(stack_limit, indent=4, separators=(',', ': '), default=json_serial)
if stack_limit['AccountLimits'][0]['Name'] == 'StackLimit':
num_of_stacks = str(stack_limit['AccountLimits'][0]['Value'])
print "Stack Limit: "
print num_of_stacks
return num_of_stacks;
def get_actual(region):
cfn_client = boto3.client('cloudformation', region_name=region)
stacks = cfn_client.describe_stacks()
stack_list = stacks['Stacks']
print "Actual Stacks: "
num_of_stacks = str(len(stack_list))
print num_of_stacks
return num_of_stacks;
# print "Region: " + region
# get_limit(region)
# get_actual(region)
|
en
| 0.23268
|
#!/usr/bin/python # print "Region: " + region # get_limit(region) # get_actual(region)
| 2.40586
| 2
|
sch_eq/function.py
|
hudalao/sch_eq
| 0
|
6626809
|
import sys
import os
#make sure the program can be executable from test file
dir_root = os.path.abspath(os.path.join(os.path.dirname(__file__), '.'))
sys.path.append(dir_root)
import numpy as np
import matplotlib.pyplot as plt
import math as mt
import numpy.polynomial.legendre as legen
import cmath
#domain is the range of x and V(x)
#c the constant
#N the size of the basis set
#V the potential energy V(x) ps: the size of V(x) should be same as the size of the basis set
#V_const the constant potential energy
#the choice of basis set function: 1 ---> the fourier basis 2 ---> the legendre polynomial basis
#ps: the fourier basis can take function V of x, but the legendre polynomial basis can only take the constant V. Be careful when you use different basis method
#with input wave function, calculate its coefficient under the fourier basis
def wave_fourier_basis(wave_func, domain, N):
x = np.linspace(-domain / 2, domain / 2, N)
n = np.linspace(-N / 2 + 1, N / 2, N)
exp_coeff = 1j * 2 * np.pi * n / domain
delta_x = domain / (N - 1)
a = np.zeros(N, dtype = complex)
for ii in range(1, N):
for kk in range(N):
add = wave_func[kk] * cmath.exp( -1 * exp_coeff[ii] * x[kk] )
a[ii] = a[ii] + add
a = a / N
return a
#reconstruct the original function for testing purpose
def reconstruct_wave(wave_fourier_coeff, domain, N):
x = np.linspace(-domain / 2, domain / 2, N)
n = np.linspace(-N / 2 + 1, N / 2, N)
exp_coeff = 1j * 2 * np.pi * n / domain
delta_p = 2 * np.pi / domain
wave = np.zeros(N, dtype = complex)
for kk in range(N):
for ii in range(N):
add = wave_fourier_coeff[ii] * \
cmath.exp( exp_coeff[ii] * x[kk] )
wave[kk] = wave[kk] + add
wave = wave * delta_p
return wave
#here, we use the momentum basis which is a fourier basis set, which means we reprsent the whole (-c Lap + V) as matrix with the momentum basis
#potential here refers to V in the equation shown above
#the reson using this method is that we can obtain the eigenvalues and eigenvectors directly by diaglize this matrix
def Hamiltonian_momentum_basis(c, potential, domain, N):
x = np.linspace(-domain / 2, domain / 2, N)
n = np.linspace(-N / 2 + 1, N / 2, N)
exp_coeff = 1j * 2 * np.pi * n / domain
delta_x = domain / (N - 1)
#potential term
V = np.zeros((N, N), dtype = complex)
for ii in range(N):
for jj in range(N):
for kk in range(N):
brax_ketp = cmath.exp( exp_coeff[jj] * x[kk] )
brap_ketx = cmath.exp( -1 * exp_coeff[ii] * x[kk] )
add = brap_ketx * potential[kk] * brax_ketp * delta_x
V[ii][jj] = V[ii][jj] + add
#kinetic term
K = np.zeros((N, N), dtype = complex)
K_coeff = c * 4 * np.pi ** 2 / domain ** 2
for ii in range(N):
K[ii][ii] = n[ii] ** 2 * N * delta_x
K = K_coeff * K
#it is known that HC = HSC, because S here is a identity matrix with elements
# equals to period, we can just divide the H by period value
H = (K + V) / domain
return H
def Legendre_polynomial_basis(c, potential, domain, N, wave_func):
x = np.linspace(-domain / 2, domain /2, N)
#represent out wave function in the legendre polynomial basis
wave_legen = legen.legfit(x, wave_func, N)
#calculate H |bj>, where H = -c Lap + V
#calculate -c Lap |bj>
Hbj_first = -1 * c * legen.legder(wave_legen, 2)
#calculate V|bj>, here, V is a constant
Hbj_secod = potential * wave_legen
Hbj = Hbj_first + Hbj_secod[0: N - 1]
return Hbj
def Hamiltonian_Legendre_polynomial(c, potential, domain, N):
#potential is a constant in this case
x = np.linspace(-domain / 2, domain /2, N)
delta_x = domain / (N - 1)
#here, the normalized legendre polynomical has been used
# for the nth polynomials, normalization constant is sqrt(2/(2n + 1))
#kinetic term
K = np.zeros((N, N))
for ii in range(N):
legen_left = np.zeros(N)
legen_left[ii] = mt.sqrt((2 * ii + 1) / 2)
for jj in range(N):
deriva_array = np.zeros(N + 2)
deriva_array[jj] = mt.sqrt((2 * jj + 1) / 2)
legen_right_deriva = legen.legder(deriva_array, 2)
#multiply them
legen_multiply = legen.legmul(legen_left, legen_right_deriva)
#integral
legen_integral = legen.legint(legen_multiply)
#calculate the matrix elements
K[ii][jj] = legen.legval(domain / 2, legen_integral) - \
legen.legval(-domain / 2, legen_integral)
#the S matrix, inside the [-1, 1] domain, the legendre ploynomial can be treatedas basis and satisfying <xi|xj> = delta ij, thus S matrix is a identity matrix
S = np.zeros((N, N))
for ii in range(N):
legen_left_S = np.zeros(N)
legen_left_S[ii] = mt.sqrt((2 * ii + 1) / 2)
legen_multiply_S = legen.legmul(legen_left_S, legen_left_S)
legen_integral_S = legen.legint(legen_multiply_S)
S[ii][ii] = legen.legval(domain / 2, legen_integral_S) - \
legen.legval(-domain / 2, legen_integral_S)
K = K * -1 * c
#because the potential is just a constant here, we can calculate the V matrix simply by multiply the matrix S a constant potential value
V = potential * S
##divide the obtained Hamiltonian by the S matrix
H = K + V
return H
|
import sys
import os
#make sure the program can be executable from test file
dir_root = os.path.abspath(os.path.join(os.path.dirname(__file__), '.'))
sys.path.append(dir_root)
import numpy as np
import matplotlib.pyplot as plt
import math as mt
import numpy.polynomial.legendre as legen
import cmath
#domain is the range of x and V(x)
#c the constant
#N the size of the basis set
#V the potential energy V(x) ps: the size of V(x) should be same as the size of the basis set
#V_const the constant potential energy
#the choice of basis set function: 1 ---> the fourier basis 2 ---> the legendre polynomial basis
#ps: the fourier basis can take function V of x, but the legendre polynomial basis can only take the constant V. Be careful when you use different basis method
#with input wave function, calculate its coefficient under the fourier basis
def wave_fourier_basis(wave_func, domain, N):
x = np.linspace(-domain / 2, domain / 2, N)
n = np.linspace(-N / 2 + 1, N / 2, N)
exp_coeff = 1j * 2 * np.pi * n / domain
delta_x = domain / (N - 1)
a = np.zeros(N, dtype = complex)
for ii in range(1, N):
for kk in range(N):
add = wave_func[kk] * cmath.exp( -1 * exp_coeff[ii] * x[kk] )
a[ii] = a[ii] + add
a = a / N
return a
#reconstruct the original function for testing purpose
def reconstruct_wave(wave_fourier_coeff, domain, N):
x = np.linspace(-domain / 2, domain / 2, N)
n = np.linspace(-N / 2 + 1, N / 2, N)
exp_coeff = 1j * 2 * np.pi * n / domain
delta_p = 2 * np.pi / domain
wave = np.zeros(N, dtype = complex)
for kk in range(N):
for ii in range(N):
add = wave_fourier_coeff[ii] * \
cmath.exp( exp_coeff[ii] * x[kk] )
wave[kk] = wave[kk] + add
wave = wave * delta_p
return wave
#here, we use the momentum basis which is a fourier basis set, which means we reprsent the whole (-c Lap + V) as matrix with the momentum basis
#potential here refers to V in the equation shown above
#the reson using this method is that we can obtain the eigenvalues and eigenvectors directly by diaglize this matrix
def Hamiltonian_momentum_basis(c, potential, domain, N):
x = np.linspace(-domain / 2, domain / 2, N)
n = np.linspace(-N / 2 + 1, N / 2, N)
exp_coeff = 1j * 2 * np.pi * n / domain
delta_x = domain / (N - 1)
#potential term
V = np.zeros((N, N), dtype = complex)
for ii in range(N):
for jj in range(N):
for kk in range(N):
brax_ketp = cmath.exp( exp_coeff[jj] * x[kk] )
brap_ketx = cmath.exp( -1 * exp_coeff[ii] * x[kk] )
add = brap_ketx * potential[kk] * brax_ketp * delta_x
V[ii][jj] = V[ii][jj] + add
#kinetic term
K = np.zeros((N, N), dtype = complex)
K_coeff = c * 4 * np.pi ** 2 / domain ** 2
for ii in range(N):
K[ii][ii] = n[ii] ** 2 * N * delta_x
K = K_coeff * K
#it is known that HC = HSC, because S here is a identity matrix with elements
# equals to period, we can just divide the H by period value
H = (K + V) / domain
return H
def Legendre_polynomial_basis(c, potential, domain, N, wave_func):
x = np.linspace(-domain / 2, domain /2, N)
#represent out wave function in the legendre polynomial basis
wave_legen = legen.legfit(x, wave_func, N)
#calculate H |bj>, where H = -c Lap + V
#calculate -c Lap |bj>
Hbj_first = -1 * c * legen.legder(wave_legen, 2)
#calculate V|bj>, here, V is a constant
Hbj_secod = potential * wave_legen
Hbj = Hbj_first + Hbj_secod[0: N - 1]
return Hbj
def Hamiltonian_Legendre_polynomial(c, potential, domain, N):
#potential is a constant in this case
x = np.linspace(-domain / 2, domain /2, N)
delta_x = domain / (N - 1)
#here, the normalized legendre polynomical has been used
# for the nth polynomials, normalization constant is sqrt(2/(2n + 1))
#kinetic term
K = np.zeros((N, N))
for ii in range(N):
legen_left = np.zeros(N)
legen_left[ii] = mt.sqrt((2 * ii + 1) / 2)
for jj in range(N):
deriva_array = np.zeros(N + 2)
deriva_array[jj] = mt.sqrt((2 * jj + 1) / 2)
legen_right_deriva = legen.legder(deriva_array, 2)
#multiply them
legen_multiply = legen.legmul(legen_left, legen_right_deriva)
#integral
legen_integral = legen.legint(legen_multiply)
#calculate the matrix elements
K[ii][jj] = legen.legval(domain / 2, legen_integral) - \
legen.legval(-domain / 2, legen_integral)
#the S matrix, inside the [-1, 1] domain, the legendre ploynomial can be treatedas basis and satisfying <xi|xj> = delta ij, thus S matrix is a identity matrix
S = np.zeros((N, N))
for ii in range(N):
legen_left_S = np.zeros(N)
legen_left_S[ii] = mt.sqrt((2 * ii + 1) / 2)
legen_multiply_S = legen.legmul(legen_left_S, legen_left_S)
legen_integral_S = legen.legint(legen_multiply_S)
S[ii][ii] = legen.legval(domain / 2, legen_integral_S) - \
legen.legval(-domain / 2, legen_integral_S)
K = K * -1 * c
#because the potential is just a constant here, we can calculate the V matrix simply by multiply the matrix S a constant potential value
V = potential * S
##divide the obtained Hamiltonian by the S matrix
H = K + V
return H
|
en
| 0.877091
|
#make sure the program can be executable from test file #domain is the range of x and V(x) #c the constant #N the size of the basis set #V the potential energy V(x) ps: the size of V(x) should be same as the size of the basis set #V_const the constant potential energy #the choice of basis set function: 1 ---> the fourier basis 2 ---> the legendre polynomial basis #ps: the fourier basis can take function V of x, but the legendre polynomial basis can only take the constant V. Be careful when you use different basis method #with input wave function, calculate its coefficient under the fourier basis #reconstruct the original function for testing purpose #here, we use the momentum basis which is a fourier basis set, which means we reprsent the whole (-c Lap + V) as matrix with the momentum basis #potential here refers to V in the equation shown above #the reson using this method is that we can obtain the eigenvalues and eigenvectors directly by diaglize this matrix #potential term #kinetic term #it is known that HC = HSC, because S here is a identity matrix with elements # equals to period, we can just divide the H by period value #represent out wave function in the legendre polynomial basis #calculate H |bj>, where H = -c Lap + V #calculate -c Lap |bj> #calculate V|bj>, here, V is a constant #potential is a constant in this case #here, the normalized legendre polynomical has been used # for the nth polynomials, normalization constant is sqrt(2/(2n + 1)) #kinetic term #multiply them #integral #calculate the matrix elements #the S matrix, inside the [-1, 1] domain, the legendre ploynomial can be treatedas basis and satisfying <xi|xj> = delta ij, thus S matrix is a identity matrix #because the potential is just a constant here, we can calculate the V matrix simply by multiply the matrix S a constant potential value ##divide the obtained Hamiltonian by the S matrix
| 2.557897
| 3
|
tests/test_consul.py
|
bspiritxp/jcutils
| 0
|
6626810
|
<filename>tests/test_consul.py
from jcutil.consul import ConfigFormat, KvProperty
class TestA:
name = KvProperty('name')
bar = KvProperty('foo', format=ConfigFormat.Yaml, cached=True)
def desc(self):
print('my name is:', self.name)
def test_kvp():
ta = TestA()
ta.desc()
assert ta.name == 'FooBar'
print(ta.bar, ta.foo)
|
<filename>tests/test_consul.py
from jcutil.consul import ConfigFormat, KvProperty
class TestA:
name = KvProperty('name')
bar = KvProperty('foo', format=ConfigFormat.Yaml, cached=True)
def desc(self):
print('my name is:', self.name)
def test_kvp():
ta = TestA()
ta.desc()
assert ta.name == 'FooBar'
print(ta.bar, ta.foo)
|
none
| 1
| 2.367403
| 2
|
|
exceptions.py
|
Ythosa/hokiyomi
| 9
|
6626811
|
class InvalidAttachments(Exception):
"""Invalid attachments in the message"""
|
class InvalidAttachments(Exception):
"""Invalid attachments in the message"""
|
en
| 0.54251
|
Invalid attachments in the message
| 1.644393
| 2
|
secistsploit/core/telnet/telnet_client.py
|
reneaicisneros/SecistSploit
| 15
|
6626812
|
import telnetlib
#Copyright 2018, The RouterSploit Framework (RSF) by Threat9 All rights reserved.
from secistsploit.core.exploit.exploit import Exploit
from secistsploit.core.exploit.exploit import Protocol
from secistsploit.core.exploit.option import OptBool
from secistsploit.core.exploit.printer import print_success
from secistsploit.core.exploit.printer import print_error
TELNET_TIMEOUT = 30.0
class TelnetCli(object):
""" Telnet Client provides methods to handle communication with Telnet server """
def __init__(self, telnet_target: str, telnet_port: int, verbosity=False) -> None:
""" Telnet client constructor
:param str telnet_target: target Telnet server ip address
:param int telnet_port: target Telnet server port
:param bool verbosity: display verbose output
:return None:
"""
self.telnet_target = telnet_target
self.telnet_port = telnet_port
self.verbosity = verbosity
self.peer = "{}:{}".format(self.telnet_target, self.telnet_port)
self.telnet_client = None
def connect(self) -> bool:
""" Connect to Telnet server
:return bool: True if connection was successful, False otherwise
"""
try:
self.telnet_client = telnetlib.Telnet(self.telnet_target, self.telnet_port, timeout=TELNET_TIMEOUT)
return True
except Exception as err:
print_error(self.peer, "Telnet Error while connecting to the server", err, verbose=self.verbosity)
return False
def login(self, username: str, password: str, retries: int=1) -> bool:
""" Login to Telnet server
:param str username: Telnet account username
:param str password: <PASSWORD>
:param int retries: number of authentication retries
:return bool: True if login was successful, False otherwise
"""
for _ in range(retries):
try:
if not self.connect():
continue
self.telnet_client.expect([b"Login: ", b"login: ", b"Username: ", b"username: "], 5)
self.telnet_client.write(bytes(username, "utf-8") + b"\r\n")
self.telnet_client.expect([b"Password: ", b"password: "], 5)
self.telnet_client.write(bytes(password, "utf-8") + b"\r\n")
self.telnet_client.write(b"\r\n")
(i, obj, res) = self.telnet_client.expect([b"Incorrect", b"incorrect"], 5)
if i == -1 and any([x in res for x in [b"#", b"$", b">"]]) or len(res) > 500: # big banner e.g. mikrotik
print_success(self.peer, "Telnet Authentication Successful - Username: '{}' Password: '{}'".format(username, password), verbose=self.verbosity)
return True
else:
print_error(self.peer, "Telnet Authentication Failed - Username: '{}' Password: '{}'".format(username, password), verbose=self.verbosity)
break
except Exception as err:
print_error(self.peer, "Telnet Error while authenticating to the server", err, verbose=self.verbosity)
return False
def test_connect(self) -> bool:
""" Test connection to Telnet server
:return bool: True if test connection was successful, False otherwise
"""
try:
self.telnet_client = telnetlib.Telnet(self.telnet_target, self.telnet_port, timeout=TELNET_TIMEOUT)
self.telnet_client.expect([b"Login: ", b"login: ", b"Username: ", b"username: "], 5)
self.telnet_client.close()
return True
except Exception as err:
print_error(self.peer, "Telnet Error while testing connection to the server", err, verbose=self.verbosity)
return False
def interactive(self) -> None:
""" Start interactive mode with Telnet server
:return None:
"""
self.telnet_client.interact()
def read_until(self, data: bytes) -> bytes:
""" Read until specified data found in response
:param bytes data: bytes until which data should be read
:return bytes: bytes read until data
"""
try:
response = self.telnet_client.read_until(data, 5)
return response
except Exception as err:
print_error(self.peer, "Telnet Error while reading data from the server", err, verbose=self.verbosity)
return None
def write(self, data: bytes) -> bool:
""" Write data to Telnet server
:param bytes data: data that should be written to Telnet server
:return bool: True if data was written successfuly, False otherwise
"""
try:
self.telnet_client.write(data, 5)
return True
except Exception as err:
print_error(self.peer, "Telnet Error while writing to the server", err, verbose=self.verbosity)
return False
def close(self) -> bool:
""" Close connection to Telnet server
:return bool: True if closing connection was successful, False otherwise
"""
try:
self.telnet_client.close()
return True
except Exception as err:
print_error(self.peer, "Telnet Error while closing connection", err, verbose=self.verbosity)
return False
class TelnetClient(Exploit):
""" Telnet Client exploit """
target_protocol = Protocol.TELNET
verbosity = OptBool(True, "Enable verbose output: true/false")
def telnet_create(self, target: str=None, port: int=None) -> TelnetCli:
""" Create Telnet client
:param str target: target Telnet ip address
:param int port: target Telnet port
:return TelnetCli: Telnet client object
"""
telnet_target = target if target else self.target
telnet_port = port if port else self.port
telnet_client = TelnetCli(telnet_target, telnet_port, verbosity=self.verbosity)
return telnet_client
|
import telnetlib
#Copyright 2018, The RouterSploit Framework (RSF) by Threat9 All rights reserved.
from secistsploit.core.exploit.exploit import Exploit
from secistsploit.core.exploit.exploit import Protocol
from secistsploit.core.exploit.option import OptBool
from secistsploit.core.exploit.printer import print_success
from secistsploit.core.exploit.printer import print_error
TELNET_TIMEOUT = 30.0
class TelnetCli(object):
""" Telnet Client provides methods to handle communication with Telnet server """
def __init__(self, telnet_target: str, telnet_port: int, verbosity=False) -> None:
""" Telnet client constructor
:param str telnet_target: target Telnet server ip address
:param int telnet_port: target Telnet server port
:param bool verbosity: display verbose output
:return None:
"""
self.telnet_target = telnet_target
self.telnet_port = telnet_port
self.verbosity = verbosity
self.peer = "{}:{}".format(self.telnet_target, self.telnet_port)
self.telnet_client = None
def connect(self) -> bool:
""" Connect to Telnet server
:return bool: True if connection was successful, False otherwise
"""
try:
self.telnet_client = telnetlib.Telnet(self.telnet_target, self.telnet_port, timeout=TELNET_TIMEOUT)
return True
except Exception as err:
print_error(self.peer, "Telnet Error while connecting to the server", err, verbose=self.verbosity)
return False
def login(self, username: str, password: str, retries: int=1) -> bool:
""" Login to Telnet server
:param str username: Telnet account username
:param str password: <PASSWORD>
:param int retries: number of authentication retries
:return bool: True if login was successful, False otherwise
"""
for _ in range(retries):
try:
if not self.connect():
continue
self.telnet_client.expect([b"Login: ", b"login: ", b"Username: ", b"username: "], 5)
self.telnet_client.write(bytes(username, "utf-8") + b"\r\n")
self.telnet_client.expect([b"Password: ", b"password: "], 5)
self.telnet_client.write(bytes(password, "utf-8") + b"\r\n")
self.telnet_client.write(b"\r\n")
(i, obj, res) = self.telnet_client.expect([b"Incorrect", b"incorrect"], 5)
if i == -1 and any([x in res for x in [b"#", b"$", b">"]]) or len(res) > 500: # big banner e.g. mikrotik
print_success(self.peer, "Telnet Authentication Successful - Username: '{}' Password: '{}'".format(username, password), verbose=self.verbosity)
return True
else:
print_error(self.peer, "Telnet Authentication Failed - Username: '{}' Password: '{}'".format(username, password), verbose=self.verbosity)
break
except Exception as err:
print_error(self.peer, "Telnet Error while authenticating to the server", err, verbose=self.verbosity)
return False
def test_connect(self) -> bool:
""" Test connection to Telnet server
:return bool: True if test connection was successful, False otherwise
"""
try:
self.telnet_client = telnetlib.Telnet(self.telnet_target, self.telnet_port, timeout=TELNET_TIMEOUT)
self.telnet_client.expect([b"Login: ", b"login: ", b"Username: ", b"username: "], 5)
self.telnet_client.close()
return True
except Exception as err:
print_error(self.peer, "Telnet Error while testing connection to the server", err, verbose=self.verbosity)
return False
def interactive(self) -> None:
""" Start interactive mode with Telnet server
:return None:
"""
self.telnet_client.interact()
def read_until(self, data: bytes) -> bytes:
""" Read until specified data found in response
:param bytes data: bytes until which data should be read
:return bytes: bytes read until data
"""
try:
response = self.telnet_client.read_until(data, 5)
return response
except Exception as err:
print_error(self.peer, "Telnet Error while reading data from the server", err, verbose=self.verbosity)
return None
def write(self, data: bytes) -> bool:
""" Write data to Telnet server
:param bytes data: data that should be written to Telnet server
:return bool: True if data was written successfuly, False otherwise
"""
try:
self.telnet_client.write(data, 5)
return True
except Exception as err:
print_error(self.peer, "Telnet Error while writing to the server", err, verbose=self.verbosity)
return False
def close(self) -> bool:
""" Close connection to Telnet server
:return bool: True if closing connection was successful, False otherwise
"""
try:
self.telnet_client.close()
return True
except Exception as err:
print_error(self.peer, "Telnet Error while closing connection", err, verbose=self.verbosity)
return False
class TelnetClient(Exploit):
""" Telnet Client exploit """
target_protocol = Protocol.TELNET
verbosity = OptBool(True, "Enable verbose output: true/false")
def telnet_create(self, target: str=None, port: int=None) -> TelnetCli:
""" Create Telnet client
:param str target: target Telnet ip address
:param int port: target Telnet port
:return TelnetCli: Telnet client object
"""
telnet_target = target if target else self.target
telnet_port = port if port else self.port
telnet_client = TelnetCli(telnet_target, telnet_port, verbosity=self.verbosity)
return telnet_client
|
en
| 0.749843
|
#Copyright 2018, The RouterSploit Framework (RSF) by Threat9 All rights reserved. Telnet Client provides methods to handle communication with Telnet server Telnet client constructor :param str telnet_target: target Telnet server ip address :param int telnet_port: target Telnet server port :param bool verbosity: display verbose output :return None: Connect to Telnet server :return bool: True if connection was successful, False otherwise Login to Telnet server :param str username: Telnet account username :param str password: <PASSWORD> :param int retries: number of authentication retries :return bool: True if login was successful, False otherwise # big banner e.g. mikrotik Test connection to Telnet server :return bool: True if test connection was successful, False otherwise Start interactive mode with Telnet server :return None: Read until specified data found in response :param bytes data: bytes until which data should be read :return bytes: bytes read until data Write data to Telnet server :param bytes data: data that should be written to Telnet server :return bool: True if data was written successfuly, False otherwise Close connection to Telnet server :return bool: True if closing connection was successful, False otherwise Telnet Client exploit Create Telnet client :param str target: target Telnet ip address :param int port: target Telnet port :return TelnetCli: Telnet client object
| 2.648813
| 3
|
cloud_ui/components/model_view/model.py
|
flexiblecloud/cloud_ui
| 2
|
6626813
|
<filename>cloud_ui/components/model_view/model.py
import logging
from abc import abstractmethod
from typing import List, Dict
from remi import gui as G
class Model(object):
PK = 'id'
ITEMS_PER_PAGE = 10
TABLE_NAME = "undefined"
def __init__(self):
self._ids = set()
self._dirty = set()
self.logger = logging.getLogger(f"{self}")
def __str__(self):
return f"<Model:{self.TABLE_NAME}>"
@property
def ids(self) -> list:
return list(self._ids)
@ids.setter
def ids(self, values):
self.logger.debug(f"setting new ids ... {values}")
self._ids = set(values)
async def fetch(self) -> bool:
try:
ids = await self.get_ids()
self.logger.debug(f"IDS = {ids}")
self.ids = ids
return True
except Exception as e:
return False
@abstractmethod
async def fetch_schema(self) -> List[Dict]:
"""
:return: [dict(name=field1_name, type=field1_type), ...]
"""
async def sync(self):
self._dirty = set()
@abstractmethod
async def get_count(self) -> int:
"""
returns data amount size
:return: int
"""
@abstractmethod
async def get_ids(self) -> List[int]:
"""
returns all elements ids
:return:
"""
@abstractmethod
async def get_by_id(self, id) -> Dict:
"""
returns element by id
:param id:
:return:
"""
|
<filename>cloud_ui/components/model_view/model.py
import logging
from abc import abstractmethod
from typing import List, Dict
from remi import gui as G
class Model(object):
PK = 'id'
ITEMS_PER_PAGE = 10
TABLE_NAME = "undefined"
def __init__(self):
self._ids = set()
self._dirty = set()
self.logger = logging.getLogger(f"{self}")
def __str__(self):
return f"<Model:{self.TABLE_NAME}>"
@property
def ids(self) -> list:
return list(self._ids)
@ids.setter
def ids(self, values):
self.logger.debug(f"setting new ids ... {values}")
self._ids = set(values)
async def fetch(self) -> bool:
try:
ids = await self.get_ids()
self.logger.debug(f"IDS = {ids}")
self.ids = ids
return True
except Exception as e:
return False
@abstractmethod
async def fetch_schema(self) -> List[Dict]:
"""
:return: [dict(name=field1_name, type=field1_type), ...]
"""
async def sync(self):
self._dirty = set()
@abstractmethod
async def get_count(self) -> int:
"""
returns data amount size
:return: int
"""
@abstractmethod
async def get_ids(self) -> List[int]:
"""
returns all elements ids
:return:
"""
@abstractmethod
async def get_by_id(self, id) -> Dict:
"""
returns element by id
:param id:
:return:
"""
|
en
| 0.321413
|
:return: [dict(name=field1_name, type=field1_type), ...] returns data amount size :return: int returns all elements ids :return: returns element by id :param id: :return:
| 2.49612
| 2
|
tests/forms_tests/field_tests/test_jsonfield.py
|
jpmallarino/django
| 16
|
6626814
|
<reponame>jpmallarino/django
import json
import uuid
from django.core.serializers.json import DjangoJSONEncoder
from django.forms import (
CharField,
Form,
JSONField,
Textarea,
TextInput,
ValidationError,
)
from django.test import SimpleTestCase
class JSONFieldTest(SimpleTestCase):
def test_valid(self):
field = JSONField()
value = field.clean('{"a": "b"}')
self.assertEqual(value, {"a": "b"})
def test_valid_empty(self):
field = JSONField(required=False)
self.assertIsNone(field.clean(""))
self.assertIsNone(field.clean(None))
def test_invalid(self):
field = JSONField()
with self.assertRaisesMessage(ValidationError, "Enter a valid JSON."):
field.clean("{some badly formed: json}")
def test_prepare_value(self):
field = JSONField()
self.assertEqual(field.prepare_value({"a": "b"}), '{"a": "b"}')
self.assertEqual(field.prepare_value(None), "null")
self.assertEqual(field.prepare_value("foo"), '"foo"')
self.assertEqual(field.prepare_value("你好,世界"), '"你好,世界"')
self.assertEqual(field.prepare_value({"a": "😀🐱"}), '{"a": "😀🐱"}')
self.assertEqual(
field.prepare_value(["你好,世界", "jaźń"]),
'["你好,世界", "jaźń"]',
)
def test_widget(self):
field = JSONField()
self.assertIsInstance(field.widget, Textarea)
def test_custom_widget_kwarg(self):
field = JSONField(widget=TextInput)
self.assertIsInstance(field.widget, TextInput)
def test_custom_widget_attribute(self):
"""The widget can be overridden with an attribute."""
class CustomJSONField(JSONField):
widget = TextInput
field = CustomJSONField()
self.assertIsInstance(field.widget, TextInput)
def test_converted_value(self):
field = JSONField(required=False)
tests = [
'["a", "b", "c"]',
'{"a": 1, "b": 2}',
"1",
"1.5",
'"foo"',
"true",
"false",
"null",
]
for json_string in tests:
with self.subTest(json_string=json_string):
val = field.clean(json_string)
self.assertEqual(field.clean(val), val)
def test_has_changed(self):
field = JSONField()
self.assertIs(field.has_changed({"a": True}, '{"a": 1}'), True)
self.assertIs(field.has_changed({"a": 1, "b": 2}, '{"b": 2, "a": 1}'), False)
def test_custom_encoder_decoder(self):
class CustomDecoder(json.JSONDecoder):
def __init__(self, object_hook=None, *args, **kwargs):
return super().__init__(object_hook=self.as_uuid, *args, **kwargs)
def as_uuid(self, dct):
if "uuid" in dct:
dct["uuid"] = uuid.UUID(dct["uuid"])
return dct
value = {"uuid": uuid.UUID("{c141e152-6550-4172-a784-05448d98204b}")}
encoded_value = '{"uuid": "c141e152-6550-4172-a784-05448d98204b"}'
field = JSONField(encoder=DjangoJSONEncoder, decoder=CustomDecoder)
self.assertEqual(field.prepare_value(value), encoded_value)
self.assertEqual(field.clean(encoded_value), value)
def test_formfield_disabled(self):
class JSONForm(Form):
json_field = JSONField(disabled=True)
form = JSONForm({"json_field": '["bar"]'}, initial={"json_field": ["foo"]})
self.assertIn("["foo"]</textarea>", form.as_p())
def test_redisplay_none_input(self):
class JSONForm(Form):
json_field = JSONField(required=True)
tests = [
{},
{"json_field": None},
]
for data in tests:
with self.subTest(data=data):
form = JSONForm(data)
self.assertEqual(form["json_field"].value(), "null")
self.assertIn("null</textarea>", form.as_p())
self.assertEqual(form.errors["json_field"], ["This field is required."])
def test_redisplay_wrong_input(self):
"""
Displaying a bound form (typically due to invalid input). The form
should not overquote JSONField inputs.
"""
class JSONForm(Form):
name = CharField(max_length=2)
json_field = JSONField()
# JSONField input is valid, name is too long.
form = JSONForm({"name": "xyz", "json_field": '["foo"]'})
self.assertNotIn("json_field", form.errors)
self.assertIn("["foo"]</textarea>", form.as_p())
# Invalid JSONField.
form = JSONForm({"name": "xy", "json_field": '{"foo"}'})
self.assertEqual(form.errors["json_field"], ["Enter a valid JSON."])
self.assertIn("{"foo"}</textarea>", form.as_p())
|
import json
import uuid
from django.core.serializers.json import DjangoJSONEncoder
from django.forms import (
CharField,
Form,
JSONField,
Textarea,
TextInput,
ValidationError,
)
from django.test import SimpleTestCase
class JSONFieldTest(SimpleTestCase):
def test_valid(self):
field = JSONField()
value = field.clean('{"a": "b"}')
self.assertEqual(value, {"a": "b"})
def test_valid_empty(self):
field = JSONField(required=False)
self.assertIsNone(field.clean(""))
self.assertIsNone(field.clean(None))
def test_invalid(self):
field = JSONField()
with self.assertRaisesMessage(ValidationError, "Enter a valid JSON."):
field.clean("{some badly formed: json}")
def test_prepare_value(self):
field = JSONField()
self.assertEqual(field.prepare_value({"a": "b"}), '{"a": "b"}')
self.assertEqual(field.prepare_value(None), "null")
self.assertEqual(field.prepare_value("foo"), '"foo"')
self.assertEqual(field.prepare_value("你好,世界"), '"你好,世界"')
self.assertEqual(field.prepare_value({"a": "😀🐱"}), '{"a": "😀🐱"}')
self.assertEqual(
field.prepare_value(["你好,世界", "jaźń"]),
'["你好,世界", "jaźń"]',
)
def test_widget(self):
field = JSONField()
self.assertIsInstance(field.widget, Textarea)
def test_custom_widget_kwarg(self):
field = JSONField(widget=TextInput)
self.assertIsInstance(field.widget, TextInput)
def test_custom_widget_attribute(self):
"""The widget can be overridden with an attribute."""
class CustomJSONField(JSONField):
widget = TextInput
field = CustomJSONField()
self.assertIsInstance(field.widget, TextInput)
def test_converted_value(self):
field = JSONField(required=False)
tests = [
'["a", "b", "c"]',
'{"a": 1, "b": 2}',
"1",
"1.5",
'"foo"',
"true",
"false",
"null",
]
for json_string in tests:
with self.subTest(json_string=json_string):
val = field.clean(json_string)
self.assertEqual(field.clean(val), val)
def test_has_changed(self):
field = JSONField()
self.assertIs(field.has_changed({"a": True}, '{"a": 1}'), True)
self.assertIs(field.has_changed({"a": 1, "b": 2}, '{"b": 2, "a": 1}'), False)
def test_custom_encoder_decoder(self):
class CustomDecoder(json.JSONDecoder):
def __init__(self, object_hook=None, *args, **kwargs):
return super().__init__(object_hook=self.as_uuid, *args, **kwargs)
def as_uuid(self, dct):
if "uuid" in dct:
dct["uuid"] = uuid.UUID(dct["uuid"])
return dct
value = {"uuid": uuid.UUID("{c141e152-6550-4172-a784-05448d98204b}")}
encoded_value = '{"uuid": "c141e152-6550-4172-a784-05448d98204b"}'
field = JSONField(encoder=DjangoJSONEncoder, decoder=CustomDecoder)
self.assertEqual(field.prepare_value(value), encoded_value)
self.assertEqual(field.clean(encoded_value), value)
def test_formfield_disabled(self):
class JSONForm(Form):
json_field = JSONField(disabled=True)
form = JSONForm({"json_field": '["bar"]'}, initial={"json_field": ["foo"]})
self.assertIn("["foo"]</textarea>", form.as_p())
def test_redisplay_none_input(self):
class JSONForm(Form):
json_field = JSONField(required=True)
tests = [
{},
{"json_field": None},
]
for data in tests:
with self.subTest(data=data):
form = JSONForm(data)
self.assertEqual(form["json_field"].value(), "null")
self.assertIn("null</textarea>", form.as_p())
self.assertEqual(form.errors["json_field"], ["This field is required."])
def test_redisplay_wrong_input(self):
"""
Displaying a bound form (typically due to invalid input). The form
should not overquote JSONField inputs.
"""
class JSONForm(Form):
name = CharField(max_length=2)
json_field = JSONField()
# JSONField input is valid, name is too long.
form = JSONForm({"name": "xyz", "json_field": '["foo"]'})
self.assertNotIn("json_field", form.errors)
self.assertIn("["foo"]</textarea>", form.as_p())
# Invalid JSONField.
form = JSONForm({"name": "xy", "json_field": '{"foo"}'})
self.assertEqual(form.errors["json_field"], ["Enter a valid JSON."])
self.assertIn("{"foo"}</textarea>", form.as_p())
|
en
| 0.746433
|
The widget can be overridden with an attribute. Displaying a bound form (typically due to invalid input). The form should not overquote JSONField inputs. # JSONField input is valid, name is too long. # Invalid JSONField.
| 2.504735
| 3
|
cmd/add_audio.py
|
SubhashPavan/Train-Custom-Speech-Model
| 54
|
6626815
|
<reponame>SubhashPavan/Train-Custom-Speech-Model<filename>cmd/add_audio.py
# -*- coding: utf-8 -*-
import requests
import json
import codecs
import sys, time
from requests.packages.urllib3.exceptions import InsecureRequestWarning
import env
requests.packages.urllib3.disable_warnings(InsecureRequestWarning)
##########################################################################
# Add an archive of audio files (wav files)
# You can add multiple audio sources to an acousic model
##########################################################################
audio_filename = env.get_arg("audio filename")
print("\nAdding audio source ...")
headers = {'Content-Type' : "application/zip"}
uri = env.get_endpoint() + "/v1/acoustic_customizations/"+env.get_acoustic_id()+"/audio/"+audio_filename
with open(audio_filename, 'rb') as f:
r = requests.post(uri, auth=(env.get_username(),env.get_password()), verify=False, headers=headers, data=f)
print("Adding audio source returns: ", r.status_code)
if r.status_code != 201:
print("Failed to add audio source")
print(r.text)
sys.exit(-1)
else:
sys.exit(0)
|
# -*- coding: utf-8 -*-
import requests
import json
import codecs
import sys, time
from requests.packages.urllib3.exceptions import InsecureRequestWarning
import env
requests.packages.urllib3.disable_warnings(InsecureRequestWarning)
##########################################################################
# Add an archive of audio files (wav files)
# You can add multiple audio sources to an acousic model
##########################################################################
audio_filename = env.get_arg("audio filename")
print("\nAdding audio source ...")
headers = {'Content-Type' : "application/zip"}
uri = env.get_endpoint() + "/v1/acoustic_customizations/"+env.get_acoustic_id()+"/audio/"+audio_filename
with open(audio_filename, 'rb') as f:
r = requests.post(uri, auth=(env.get_username(),env.get_password()), verify=False, headers=headers, data=f)
print("Adding audio source returns: ", r.status_code)
if r.status_code != 201:
print("Failed to add audio source")
print(r.text)
sys.exit(-1)
else:
sys.exit(0)
|
de
| 0.602385
|
# -*- coding: utf-8 -*- ########################################################################## # Add an archive of audio files (wav files) # You can add multiple audio sources to an acousic model ##########################################################################
| 2.36962
| 2
|
blog/migrations/0004_blog_course.py
|
vollov/django-blog
| 0
|
6626816
|
# -*- coding: utf-8 -*-
# Generated by Django 1.10 on 2016-11-10 15:16
from __future__ import unicode_literals
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('blog', '0003_course'),
]
operations = [
migrations.AddField(
model_name='blog',
name='course',
field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, to='blog.Course'),
),
]
|
# -*- coding: utf-8 -*-
# Generated by Django 1.10 on 2016-11-10 15:16
from __future__ import unicode_literals
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('blog', '0003_course'),
]
operations = [
migrations.AddField(
model_name='blog',
name='course',
field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, to='blog.Course'),
),
]
|
en
| 0.778212
|
# -*- coding: utf-8 -*- # Generated by Django 1.10 on 2016-11-10 15:16
| 1.57575
| 2
|
lib/pymedphys/_experimental/pinnacle/rtstruct.py
|
guanfada/pymedphys
| 1
|
6626817
|
<filename>lib/pymedphys/_experimental/pinnacle/rtstruct.py
# Copyright (C) 2019 South Western Sydney Local Health District,
# University of New South Wales
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# This work is derived from:
# https://github.com/AndrewWAlexander/Pinnacle-tar-DICOM
# which is released under the following license:
# Copyright (c) [2017] [<NAME>, <NAME>]
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
import os
import random
import re
import time
from pymedphys._imports import pydicom
from .constants import (
GImplementationClassUID,
GTransferSyntaxUID,
Manufacturer,
RTSTRUCTModality,
RTStructSOPClassUID,
colors,
)
# Determine which point to use for the iso center and set this value in
# the plan object
def find_iso_center(plan):
iso_center = []
ct_center = []
dose_ref_pt = []
for point in plan.points:
refpoint = plan.convert_point(point)
if (
"Iso" in point["Name"]
or "isocenter" in point["Name"]
or "isocentre" in point["Name"]
or "ISO" in point["Name"]
):
iso_center = refpoint
if (
"CT Center" in point["Name"]
or "ct center" in point["Name"]
or "ct centre" in point["Name"]
):
ct_center = refpoint
if "drp" in point["Name"] or "DRP" in point["Name"]:
dose_ref_pt = refpoint
if "PoiInterpretedType" in point.keys():
if "ISO" in point["PoiInterpretedType"]: # This point is Iso CenterAtZero
iso_center = refpoint
plan.logger.debug("ISO Center located: %s", iso_center)
if len(iso_center) < 2:
iso_center = ct_center
plan.logger.debug("Isocenter not located, setting to ct center: %s", iso_center)
if len(iso_center) < 2:
plan.logger.debug(
"Isocenter still not located, setting to point with center in name, if not, with iso in name"
)
temp_point1 = []
temp_point2 = []
for p in plan.points:
if "center" in p["Name"]:
temp_point1 = p["refpoint"]
elif "iso" in p["Name"]:
temp_point2 = p["refpoint"]
if len(temp_point1) > 1:
iso_center = temp_point1
elif len(temp_point2) > 1:
iso_center = temp_point2
else:
if len(plan.points) > 0:
# setting to first point if isocenter or ct center not found
iso_center = plan.points[0]["refpoint"]
plan.iso_center = iso_center
plan.ct_center = ct_center
plan.dose_ref_pt = dose_ref_pt
plan.logger.debug("Isocenter: %s", iso_center)
# Read points and insert them into the dicom dataset
def read_points(ds, plan):
plan.roi_count = 0
for point in plan.points:
plan.roi_count = plan.roi_count + 1
refpoint = plan.convert_point(point)
roi_contour = pydicom.dataset.Dataset()
roi_contour.ReferencedROINumber = str(plan.roi_count)
roi_contour.ROIDisplayColor = colors[point["Color"]]
roi_contour.ContourSequence = pydicom.sequence.Sequence()
contour = pydicom.dataset.Dataset()
contour.ContourData = refpoint
contour.ContourGeometricType = "POINT"
contour.NumberOfContourPoints = 1
contour.ContourImageSequence = pydicom.sequence.Sequence()
contour_image = pydicom.dataset.Dataset()
closestvalue = abs(
float(plan.primary_image.image_info[0]["TablePosition"])
- float(refpoint[-1])
)
for s in plan.primary_image.image_info:
if (
abs(float(s["TablePosition"]) - (-float(refpoint[-1] / 10)))
<= closestvalue
):
closestvalue = abs(
float(s["TablePosition"]) - (-float(refpoint[-1] / 10))
)
contour_image.ReferencedSOPClassUID = "1.2.840.10008.5.1.4.1.1.2"
contour_image.ReferencedSOPInstanceUID = s["InstanceUID"]
contour.ContourImageSequence.append(contour_image)
roi_contour.ContourSequence.append(contour)
ds.ROIContourSequence.append(roi_contour)
structure_set_roi = pydicom.dataset.Dataset()
structure_set_roi.ROINumber = plan.roi_count
structure_set_roi.ROIName = point["Name"]
plan.logger.info("Exporting point: %s", point["Name"])
# Not sure what this is for, just basing off template, should look into further
structure_set_roi.ROIGenerationAlgorithm = "SEMIAUTOMATIC"
structure_set_roi.ReferencedFrameOfReferenceUID = plan.primary_image.image_info[
0
]["FrameUID"]
ds.StructureSetROISequence.append(structure_set_roi)
rt_roi_observations = pydicom.dataset.Dataset()
rt_roi_observations.ObservationNumber = plan.roi_count
rt_roi_observations.ReferencedROINumber = plan.roi_count
rt_roi_observations.RTROIInterpretedType = "MARKER"
rt_roi_observations.ROIInterpreter = ""
ds.RTROIObservationsSequence.append(rt_roi_observations)
# Not applying any shifts of points at the moment. Needed for Pinnacle pre v9.0
# for enteredpoints in ds.ROIContourSequence:
# #logger.debug("In loop applying shifts: isocenter:" + str(data["isocenter"]) )
# enteredpoints.ContourSequence[0].ContourData[0] = str(float(enteredpoints.ContourSequence[0].ContourData[0]) - data["xshift"])
# enteredpoints.ContourSequence[0].ContourData[1] = str(float(enteredpoints.ContourSequence[0].ContourData[1]) - data["yshift"])
# #enteredpoints.ContourSequence[0].ContourData[2] = str(float(enteredpoints.ContourSequence[0].ContourData[2]) - float(data["isocenter"][2]))
# #logger.debug("bottom of loop applying shifts isocenter:" + str(data["isocenter"]))
return ds
# This function reads the plan.roi file line by line. This file is somehow not structured like the others,
# and isn't tab indented properly, so won't parse onto YAML.
def read_roi(ds, plan):
image_header = plan.primary_image.image_header
path_roi = os.path.join(plan.path, "plan.roi")
points = []
flag_points = (
False # bool value to tell me if I want to read the line in as point values
)
plan.logger.debug("Reading ROI from: %s", path_roi)
first_points = []
with open(path_roi, "rt") as f:
for _, line in enumerate(f, 1):
if (
"}; // End of points for curve" in line
): # this will tell me not to read in point values
# all points for current curve saved until now. Here is where I
# need to add them to dicom file
numfind = int(line.find("curve") + 5)
line = line[numfind : len(line)]
line = line.strip()
curvenum = int(line)
ds.ROIContourSequence[plan.roi_count - 1].ContourSequence[
int(curvenum) - 1
].NumberOfContourPoints = int(len(points) / 3)
ds.ROIContourSequence[plan.roi_count - 1].ContourSequence[
curvenum - 1
].ContourData = points
ds.ROIContourSequence[plan.roi_count - 1].ContourSequence[
int(curvenum) - 1
].ContourImageSequence = pydicom.sequence.Sequence()
contour_image = pydicom.dataset.Dataset()
closestvalue = abs(
float(plan.primary_image.image_info[0]["TablePosition"])
- float(points[-1])
)
for s in plan.primary_image.image_info:
if (
abs(float(s["TablePosition"]) - (-float(points[-1] / 10)))
<= closestvalue
):
closestvalue = abs(
float(s["TablePosition"]) - (-float(points[-1] / 10))
)
contour_image.ReferencedSOPClassUID = (
"1.2.840.10008.5.1.4.1.1.2"
)
contour_image.ReferencedSOPInstanceUID = s["InstanceUID"]
ds.ROIContourSequence[plan.roi_count - 1].ContourSequence[
int(curvenum) - 1
].ContourImageSequence.append(contour_image)
del points[:]
flag_points = False
if flag_points:
curr_points = line.split(" ")
if curr_points == first_points:
# These points are the exact same as the first point, skip it!
continue
if len(first_points) == 0:
first_points = curr_points
if image_header["patient_position"] == "HFS":
curr_points = [
float(curr_points[0]) * 10,
-float(curr_points[1]) * 10,
-float(curr_points[2]) * 10,
]
elif image_header["patient_position"] == "HFP":
curr_points = [
-float(curr_points[0]) * 10,
float(curr_points[1]) * 10,
-float(curr_points[2]) * 10,
]
elif image_header["patient_position"] == "FFP":
curr_points = [
float(curr_points[0]) * 10,
float(curr_points[1]) * 10,
float(curr_points[2]) * 10,
]
elif image_header["patient_position"] == "FFS":
curr_points = [
-float(curr_points[0]) * 10,
-float(curr_points[1]) * 10,
float(curr_points[2]) * 10,
]
if len(points) == 3:
points[0] = round(points[0], 5)
points[1] = round(points[1], 5)
points[2] = round(points[2], 5)
points = points + curr_points
if "Beginning of ROI" in line: # Start of ROI
plan.roi_count = (
plan.roi_count + 1
) # increment ROI_num because I've found a new ROI
roi_contour = pydicom.dataset.Dataset()
roi_contour.ReferencedROINumber = str(plan.roi_count)
ds.ROIContourSequence.append(roi_contour)
ds.StructureSetROISequence.append(roi_contour)
rt_roi_observations = pydicom.dataset.Dataset()
ds.RTROIObservationsSequence.append(rt_roi_observations)
ds.StructureSetROISequence[
plan.roi_count - 1
].ROINumber = plan.roi_count
ROIName = line[22:] # gets a string of ROI name
ROIName = ROIName.replace("\n", "")
ds.StructureSetROISequence[plan.roi_count - 1].ROIName = ROIName
ds.StructureSetROISequence[
plan.roi_count - 1
].ROIGenerationAlgorithm = "SEMIAUTOMATIC"
ds.StructureSetROISequence[
plan.roi_count - 1
].ReferencedFrameOfReferenceUID = plan.primary_image.image_info[0][
"FrameUID"
]
ds.ROIContourSequence[
plan.roi_count - 1
].ContourSequence = pydicom.sequence.Sequence()
roiinterpretedtype = "ORGAN"
plan.logger.info("Exporting ROI: %s", ROIName)
if "roiinterpretedtype:" in line:
roiinterpretedtype = line.split(" ")[-1].replace("\n", "")
if "color:" in line:
roi_color = line.split(" ")[-1].replace("\n", "")
try:
ds.ROIContourSequence[plan.roi_count - 1].ROIDisplayColor = colors[
roi_color
]
except KeyError:
plan.logger.info("ROI Color not known: %s", roi_color)
new_color = random.choice(list(colors))
plan.logger.info("Instead, assigning color: %s", new_color)
ds.ROIContourSequence[plan.roi_count - 1].ROIDisplayColor = colors[
new_color
]
if "}; // End of ROI" in line: # end of ROI found
ds.RTROIObservationsSequence[
plan.roi_count - 1
].ObservationNumber = plan.roi_count
ds.RTROIObservationsSequence[
plan.roi_count - 1
].ReferencedROINumber = plan.roi_count
ds.RTROIObservationsSequence[
plan.roi_count - 1
].RTROIInterpretedType = roiinterpretedtype
ds.RTROIObservationsSequence[plan.roi_count - 1].ROIInterpreter = ""
# add to ROI observation sequence
if "volume =" in line:
vol = re.findall(r"[-+]?\d*\.\d+|\d+", line)[0]
ds.StructureSetROISequence[plan.roi_count - 1].ROIVolume = vol
if "// Curve " in line: # found a curve
first_points = []
curvenum = re.findall(r"[-+]?\d*\.\d+|\d+", line)[0]
contour = pydicom.dataset.Dataset()
ds.ROIContourSequence[plan.roi_count - 1].ContourSequence.append(
contour
)
if "num_points =" in line:
npts = re.findall(r"[-+]?\d*\.\d+|\d+", line)[0]
ds.ROIContourSequence[plan.roi_count - 1].ContourSequence[
int(curvenum) - 1
].ContourGeometricType = "CLOSED_PLANAR"
ds.ROIContourSequence[plan.roi_count - 1].ContourSequence[
int(curvenum) - 1
].NumberOfContourPoints = npts
if "points=" in line:
flag_points = True
plan.logger.debug("patient pos: %s", image_header["patient_position"])
return ds
def convert_struct(plan, export_path):
# Check that the plan has a primary image, as we can't create a meaningful RTSTRUCT without it:
if not plan.primary_image:
plan.logger.error(
"No primary image found for plan. Unable to generate RTSTRUCT."
)
return
patient_info = plan.pinnacle.patient_info
struct_sop_instuid = plan.struct_inst_uid
# Populate required values for file meta information
file_meta = pydicom.dataset.Dataset()
file_meta.MediaStorageSOPClassUID = RTStructSOPClassUID
file_meta.TransferSyntaxUID = GTransferSyntaxUID
file_meta.MediaStorageSOPInstanceUID = struct_sop_instuid
file_meta.ImplementationClassUID = GImplementationClassUID
struct_filename = f"RS.{struct_sop_instuid}.dcm"
ds = pydicom.dataset.FileDataset(
struct_filename, {}, file_meta=file_meta, preamble=b"\x00" * 128
)
ds = pydicom.dataset.FileDataset(
struct_filename, {}, file_meta=file_meta, preamble=b"\x00" * 128
)
struct_series_instuid = pydicom.uid.generate_uid()
ds.ReferencedStudySequence = pydicom.sequence.Sequence()
# not sure what I want here, going off of template dicom file
ds.SpecificCharacterSet = "ISO_IR 100"
ds.InstanceCreationDate = time.strftime("%Y%m%d")
ds.InstanceCreationTime = time.strftime("%H%M%S")
ds.SOPClassUID = RTStructSOPClassUID
ds.SOPInstanceUID = struct_sop_instuid
ds.Modality = RTSTRUCTModality
ds.AccessionNumber = ""
ds.Manufacturer = Manufacturer # from sample dicom file, maybe should change?
# not sure where to get information for this element can find this and
# read in from
ds.StationName = "adacp3u7"
# ds.ManufacturersModelName = 'Pinnacle3'
ReferencedStudy1 = pydicom.dataset.Dataset()
ds.ReferencedStudySequence.append(ReferencedStudy1)
# Study Component Management SOP Class (chosen from template)
ds.ReferencedStudySequence[0].ReferencedSOPClassUID = "1.2.840.10008.3.1.2.3.2"
ds.ReferencedStudySequence[
0
].ReferencedSOPInstanceUID = plan.primary_image.image_info[0]["StudyInstanceUID"]
ds.StudyInstanceUID = plan.primary_image.image_info[0]["StudyInstanceUID"]
ds.SeriesInstanceUID = struct_series_instuid
ds.PatientID = patient_info["MedicalRecordNumber"]
ds.ReferringPhysiciansName = patient_info["ReferringPhysician"]
ds.PhysiciansOfRecord = patient_info["RadiationOncologist"]
ds.StudyDescription = patient_info["Comment"]
ds.PatientSex = patient_info["Gender"][0]
ds.PatientBirthDate = patient_info["DOB"]
ds.StructureSetLabel = plan.plan_info["PlanName"]
ds.StudyID = plan.primary_image.image["StudyID"]
datetimesplit = plan.plan_info["ObjectVersion"]["WriteTimeStamp"].split()
# Read more accurate date from trial file if it is available
trial_info = plan.trial_info
if trial_info:
datetimesplit = trial_info["ObjectVersion"]["WriteTimeStamp"].split()
study_date = datetimesplit[0].replace("-", "")
study_time = datetimesplit[1].replace(":", "")
ds.StructureSetDate = study_date
ds.StructureSetTime = study_time
ds.StudyDate = study_date
ds.StudyTime = study_time
ds.ManufacturersModelName = plan.plan_info["ToolType"]
ds.SoftwareVersions = plan.plan_info["PinnacleVersionDescription"]
ds.StructureSetName = "POIandROI"
ds.SeriesNumber = "1"
ds.PatientName = patient_info["FullName"]
ds.ReferencedFrameOfReferenceSequence = pydicom.sequence.Sequence()
ReferencedFrameofReference = pydicom.dataset.Dataset()
ds.ReferencedFrameOfReferenceSequence.append(ReferencedFrameofReference)
ds.ReferencedFrameOfReferenceSequence[
0
].FrameOfReferenceUID = plan.primary_image.image_info[0]["FrameUID"]
ds.ReferencedFrameOfReferenceSequence[
0
].RTReferencedStudySequence = pydicom.sequence.Sequence()
RTReferencedStudy = pydicom.dataset.Dataset()
ds.ReferencedFrameOfReferenceSequence[0].RTReferencedStudySequence.append(
RTReferencedStudy
)
ds.ReferencedFrameOfReferenceSequence[0].RTReferencedStudySequence[
0
].ReferencedSOPClassUID = "1.2.840.10008.3.1.2.3.2"
ds.ReferencedFrameOfReferenceSequence[0].RTReferencedStudySequence[
0
].ReferencedSOPInstanceUID = plan.primary_image.image_info[0]["StudyInstanceUID"]
ds.StudyInstanceUID = plan.primary_image.image_info[0]["StudyInstanceUID"]
ds.ReferencedFrameOfReferenceSequence[0].RTReferencedStudySequence[
0
].RTReferencedSeriesSequence = pydicom.sequence.Sequence()
RTReferencedSeries = pydicom.dataset.Dataset()
ds.ReferencedFrameOfReferenceSequence[0].RTReferencedStudySequence[
0
].RTReferencedSeriesSequence.append(RTReferencedSeries)
ds.ReferencedFrameOfReferenceSequence[0].RTReferencedStudySequence[
0
].RTReferencedSeriesSequence[0].SeriesInstanceUID = plan.primary_image.image_info[
0
][
"SeriesUID"
]
ds.ReferencedFrameOfReferenceSequence[0].RTReferencedStudySequence[
0
].RTReferencedSeriesSequence[0].ContourImageSequence = pydicom.sequence.Sequence()
for info in plan.primary_image.image_info:
contour_image = pydicom.dataset.Dataset()
contour_image.ReferencedSOPClassUID = "1.2.840.10008.5.1.4.1.1.2"
contour_image.ReferencedSOPInstanceUID = info["InstanceUID"]
ds.ReferencedFrameOfReferenceSequence[0].RTReferencedStudySequence[
0
].RTReferencedSeriesSequence[0].ContourImageSequence.append(contour_image)
ds.ROIContourSequence = pydicom.sequence.Sequence()
ds.StructureSetROISequence = pydicom.sequence.Sequence()
ds.RTROIObservationsSequence = pydicom.sequence.Sequence()
# Determine ISO Center
find_iso_center(plan)
ds = read_points(ds, plan)
ds = read_roi(ds, plan)
# find out where to get if its been approved or not
# find out how to insert proper 'CodeString' here
ds.ApprovalStatus = "UNAPPROVED"
# Set the transfer syntax
# TODO: Use `pymedphys._dicom.create.set_default_transfer_syntax` here
ds.is_little_endian = True
ds.is_implicit_VR = True
# Save the RTDose Dicom File
output_file = os.path.join(export_path, struct_filename)
plan.logger.info("Creating Struct file: %s", output_file)
ds.save_as(output_file, write_like_original=False)
|
<filename>lib/pymedphys/_experimental/pinnacle/rtstruct.py
# Copyright (C) 2019 South Western Sydney Local Health District,
# University of New South Wales
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# This work is derived from:
# https://github.com/AndrewWAlexander/Pinnacle-tar-DICOM
# which is released under the following license:
# Copyright (c) [2017] [<NAME>, <NAME>]
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
import os
import random
import re
import time
from pymedphys._imports import pydicom
from .constants import (
GImplementationClassUID,
GTransferSyntaxUID,
Manufacturer,
RTSTRUCTModality,
RTStructSOPClassUID,
colors,
)
# Determine which point to use for the iso center and set this value in
# the plan object
def find_iso_center(plan):
iso_center = []
ct_center = []
dose_ref_pt = []
for point in plan.points:
refpoint = plan.convert_point(point)
if (
"Iso" in point["Name"]
or "isocenter" in point["Name"]
or "isocentre" in point["Name"]
or "ISO" in point["Name"]
):
iso_center = refpoint
if (
"CT Center" in point["Name"]
or "ct center" in point["Name"]
or "ct centre" in point["Name"]
):
ct_center = refpoint
if "drp" in point["Name"] or "DRP" in point["Name"]:
dose_ref_pt = refpoint
if "PoiInterpretedType" in point.keys():
if "ISO" in point["PoiInterpretedType"]: # This point is Iso CenterAtZero
iso_center = refpoint
plan.logger.debug("ISO Center located: %s", iso_center)
if len(iso_center) < 2:
iso_center = ct_center
plan.logger.debug("Isocenter not located, setting to ct center: %s", iso_center)
if len(iso_center) < 2:
plan.logger.debug(
"Isocenter still not located, setting to point with center in name, if not, with iso in name"
)
temp_point1 = []
temp_point2 = []
for p in plan.points:
if "center" in p["Name"]:
temp_point1 = p["refpoint"]
elif "iso" in p["Name"]:
temp_point2 = p["refpoint"]
if len(temp_point1) > 1:
iso_center = temp_point1
elif len(temp_point2) > 1:
iso_center = temp_point2
else:
if len(plan.points) > 0:
# setting to first point if isocenter or ct center not found
iso_center = plan.points[0]["refpoint"]
plan.iso_center = iso_center
plan.ct_center = ct_center
plan.dose_ref_pt = dose_ref_pt
plan.logger.debug("Isocenter: %s", iso_center)
# Read points and insert them into the dicom dataset
def read_points(ds, plan):
plan.roi_count = 0
for point in plan.points:
plan.roi_count = plan.roi_count + 1
refpoint = plan.convert_point(point)
roi_contour = pydicom.dataset.Dataset()
roi_contour.ReferencedROINumber = str(plan.roi_count)
roi_contour.ROIDisplayColor = colors[point["Color"]]
roi_contour.ContourSequence = pydicom.sequence.Sequence()
contour = pydicom.dataset.Dataset()
contour.ContourData = refpoint
contour.ContourGeometricType = "POINT"
contour.NumberOfContourPoints = 1
contour.ContourImageSequence = pydicom.sequence.Sequence()
contour_image = pydicom.dataset.Dataset()
closestvalue = abs(
float(plan.primary_image.image_info[0]["TablePosition"])
- float(refpoint[-1])
)
for s in plan.primary_image.image_info:
if (
abs(float(s["TablePosition"]) - (-float(refpoint[-1] / 10)))
<= closestvalue
):
closestvalue = abs(
float(s["TablePosition"]) - (-float(refpoint[-1] / 10))
)
contour_image.ReferencedSOPClassUID = "1.2.840.10008.5.1.4.1.1.2"
contour_image.ReferencedSOPInstanceUID = s["InstanceUID"]
contour.ContourImageSequence.append(contour_image)
roi_contour.ContourSequence.append(contour)
ds.ROIContourSequence.append(roi_contour)
structure_set_roi = pydicom.dataset.Dataset()
structure_set_roi.ROINumber = plan.roi_count
structure_set_roi.ROIName = point["Name"]
plan.logger.info("Exporting point: %s", point["Name"])
# Not sure what this is for, just basing off template, should look into further
structure_set_roi.ROIGenerationAlgorithm = "SEMIAUTOMATIC"
structure_set_roi.ReferencedFrameOfReferenceUID = plan.primary_image.image_info[
0
]["FrameUID"]
ds.StructureSetROISequence.append(structure_set_roi)
rt_roi_observations = pydicom.dataset.Dataset()
rt_roi_observations.ObservationNumber = plan.roi_count
rt_roi_observations.ReferencedROINumber = plan.roi_count
rt_roi_observations.RTROIInterpretedType = "MARKER"
rt_roi_observations.ROIInterpreter = ""
ds.RTROIObservationsSequence.append(rt_roi_observations)
# Not applying any shifts of points at the moment. Needed for Pinnacle pre v9.0
# for enteredpoints in ds.ROIContourSequence:
# #logger.debug("In loop applying shifts: isocenter:" + str(data["isocenter"]) )
# enteredpoints.ContourSequence[0].ContourData[0] = str(float(enteredpoints.ContourSequence[0].ContourData[0]) - data["xshift"])
# enteredpoints.ContourSequence[0].ContourData[1] = str(float(enteredpoints.ContourSequence[0].ContourData[1]) - data["yshift"])
# #enteredpoints.ContourSequence[0].ContourData[2] = str(float(enteredpoints.ContourSequence[0].ContourData[2]) - float(data["isocenter"][2]))
# #logger.debug("bottom of loop applying shifts isocenter:" + str(data["isocenter"]))
return ds
# This function reads the plan.roi file line by line. This file is somehow not structured like the others,
# and isn't tab indented properly, so won't parse onto YAML.
def read_roi(ds, plan):
image_header = plan.primary_image.image_header
path_roi = os.path.join(plan.path, "plan.roi")
points = []
flag_points = (
False # bool value to tell me if I want to read the line in as point values
)
plan.logger.debug("Reading ROI from: %s", path_roi)
first_points = []
with open(path_roi, "rt") as f:
for _, line in enumerate(f, 1):
if (
"}; // End of points for curve" in line
): # this will tell me not to read in point values
# all points for current curve saved until now. Here is where I
# need to add them to dicom file
numfind = int(line.find("curve") + 5)
line = line[numfind : len(line)]
line = line.strip()
curvenum = int(line)
ds.ROIContourSequence[plan.roi_count - 1].ContourSequence[
int(curvenum) - 1
].NumberOfContourPoints = int(len(points) / 3)
ds.ROIContourSequence[plan.roi_count - 1].ContourSequence[
curvenum - 1
].ContourData = points
ds.ROIContourSequence[plan.roi_count - 1].ContourSequence[
int(curvenum) - 1
].ContourImageSequence = pydicom.sequence.Sequence()
contour_image = pydicom.dataset.Dataset()
closestvalue = abs(
float(plan.primary_image.image_info[0]["TablePosition"])
- float(points[-1])
)
for s in plan.primary_image.image_info:
if (
abs(float(s["TablePosition"]) - (-float(points[-1] / 10)))
<= closestvalue
):
closestvalue = abs(
float(s["TablePosition"]) - (-float(points[-1] / 10))
)
contour_image.ReferencedSOPClassUID = (
"1.2.840.10008.5.1.4.1.1.2"
)
contour_image.ReferencedSOPInstanceUID = s["InstanceUID"]
ds.ROIContourSequence[plan.roi_count - 1].ContourSequence[
int(curvenum) - 1
].ContourImageSequence.append(contour_image)
del points[:]
flag_points = False
if flag_points:
curr_points = line.split(" ")
if curr_points == first_points:
# These points are the exact same as the first point, skip it!
continue
if len(first_points) == 0:
first_points = curr_points
if image_header["patient_position"] == "HFS":
curr_points = [
float(curr_points[0]) * 10,
-float(curr_points[1]) * 10,
-float(curr_points[2]) * 10,
]
elif image_header["patient_position"] == "HFP":
curr_points = [
-float(curr_points[0]) * 10,
float(curr_points[1]) * 10,
-float(curr_points[2]) * 10,
]
elif image_header["patient_position"] == "FFP":
curr_points = [
float(curr_points[0]) * 10,
float(curr_points[1]) * 10,
float(curr_points[2]) * 10,
]
elif image_header["patient_position"] == "FFS":
curr_points = [
-float(curr_points[0]) * 10,
-float(curr_points[1]) * 10,
float(curr_points[2]) * 10,
]
if len(points) == 3:
points[0] = round(points[0], 5)
points[1] = round(points[1], 5)
points[2] = round(points[2], 5)
points = points + curr_points
if "Beginning of ROI" in line: # Start of ROI
plan.roi_count = (
plan.roi_count + 1
) # increment ROI_num because I've found a new ROI
roi_contour = pydicom.dataset.Dataset()
roi_contour.ReferencedROINumber = str(plan.roi_count)
ds.ROIContourSequence.append(roi_contour)
ds.StructureSetROISequence.append(roi_contour)
rt_roi_observations = pydicom.dataset.Dataset()
ds.RTROIObservationsSequence.append(rt_roi_observations)
ds.StructureSetROISequence[
plan.roi_count - 1
].ROINumber = plan.roi_count
ROIName = line[22:] # gets a string of ROI name
ROIName = ROIName.replace("\n", "")
ds.StructureSetROISequence[plan.roi_count - 1].ROIName = ROIName
ds.StructureSetROISequence[
plan.roi_count - 1
].ROIGenerationAlgorithm = "SEMIAUTOMATIC"
ds.StructureSetROISequence[
plan.roi_count - 1
].ReferencedFrameOfReferenceUID = plan.primary_image.image_info[0][
"FrameUID"
]
ds.ROIContourSequence[
plan.roi_count - 1
].ContourSequence = pydicom.sequence.Sequence()
roiinterpretedtype = "ORGAN"
plan.logger.info("Exporting ROI: %s", ROIName)
if "roiinterpretedtype:" in line:
roiinterpretedtype = line.split(" ")[-1].replace("\n", "")
if "color:" in line:
roi_color = line.split(" ")[-1].replace("\n", "")
try:
ds.ROIContourSequence[plan.roi_count - 1].ROIDisplayColor = colors[
roi_color
]
except KeyError:
plan.logger.info("ROI Color not known: %s", roi_color)
new_color = random.choice(list(colors))
plan.logger.info("Instead, assigning color: %s", new_color)
ds.ROIContourSequence[plan.roi_count - 1].ROIDisplayColor = colors[
new_color
]
if "}; // End of ROI" in line: # end of ROI found
ds.RTROIObservationsSequence[
plan.roi_count - 1
].ObservationNumber = plan.roi_count
ds.RTROIObservationsSequence[
plan.roi_count - 1
].ReferencedROINumber = plan.roi_count
ds.RTROIObservationsSequence[
plan.roi_count - 1
].RTROIInterpretedType = roiinterpretedtype
ds.RTROIObservationsSequence[plan.roi_count - 1].ROIInterpreter = ""
# add to ROI observation sequence
if "volume =" in line:
vol = re.findall(r"[-+]?\d*\.\d+|\d+", line)[0]
ds.StructureSetROISequence[plan.roi_count - 1].ROIVolume = vol
if "// Curve " in line: # found a curve
first_points = []
curvenum = re.findall(r"[-+]?\d*\.\d+|\d+", line)[0]
contour = pydicom.dataset.Dataset()
ds.ROIContourSequence[plan.roi_count - 1].ContourSequence.append(
contour
)
if "num_points =" in line:
npts = re.findall(r"[-+]?\d*\.\d+|\d+", line)[0]
ds.ROIContourSequence[plan.roi_count - 1].ContourSequence[
int(curvenum) - 1
].ContourGeometricType = "CLOSED_PLANAR"
ds.ROIContourSequence[plan.roi_count - 1].ContourSequence[
int(curvenum) - 1
].NumberOfContourPoints = npts
if "points=" in line:
flag_points = True
plan.logger.debug("patient pos: %s", image_header["patient_position"])
return ds
def convert_struct(plan, export_path):
# Check that the plan has a primary image, as we can't create a meaningful RTSTRUCT without it:
if not plan.primary_image:
plan.logger.error(
"No primary image found for plan. Unable to generate RTSTRUCT."
)
return
patient_info = plan.pinnacle.patient_info
struct_sop_instuid = plan.struct_inst_uid
# Populate required values for file meta information
file_meta = pydicom.dataset.Dataset()
file_meta.MediaStorageSOPClassUID = RTStructSOPClassUID
file_meta.TransferSyntaxUID = GTransferSyntaxUID
file_meta.MediaStorageSOPInstanceUID = struct_sop_instuid
file_meta.ImplementationClassUID = GImplementationClassUID
struct_filename = f"RS.{struct_sop_instuid}.dcm"
ds = pydicom.dataset.FileDataset(
struct_filename, {}, file_meta=file_meta, preamble=b"\x00" * 128
)
ds = pydicom.dataset.FileDataset(
struct_filename, {}, file_meta=file_meta, preamble=b"\x00" * 128
)
struct_series_instuid = pydicom.uid.generate_uid()
ds.ReferencedStudySequence = pydicom.sequence.Sequence()
# not sure what I want here, going off of template dicom file
ds.SpecificCharacterSet = "ISO_IR 100"
ds.InstanceCreationDate = time.strftime("%Y%m%d")
ds.InstanceCreationTime = time.strftime("%H%M%S")
ds.SOPClassUID = RTStructSOPClassUID
ds.SOPInstanceUID = struct_sop_instuid
ds.Modality = RTSTRUCTModality
ds.AccessionNumber = ""
ds.Manufacturer = Manufacturer # from sample dicom file, maybe should change?
# not sure where to get information for this element can find this and
# read in from
ds.StationName = "adacp3u7"
# ds.ManufacturersModelName = 'Pinnacle3'
ReferencedStudy1 = pydicom.dataset.Dataset()
ds.ReferencedStudySequence.append(ReferencedStudy1)
# Study Component Management SOP Class (chosen from template)
ds.ReferencedStudySequence[0].ReferencedSOPClassUID = "1.2.840.10008.3.1.2.3.2"
ds.ReferencedStudySequence[
0
].ReferencedSOPInstanceUID = plan.primary_image.image_info[0]["StudyInstanceUID"]
ds.StudyInstanceUID = plan.primary_image.image_info[0]["StudyInstanceUID"]
ds.SeriesInstanceUID = struct_series_instuid
ds.PatientID = patient_info["MedicalRecordNumber"]
ds.ReferringPhysiciansName = patient_info["ReferringPhysician"]
ds.PhysiciansOfRecord = patient_info["RadiationOncologist"]
ds.StudyDescription = patient_info["Comment"]
ds.PatientSex = patient_info["Gender"][0]
ds.PatientBirthDate = patient_info["DOB"]
ds.StructureSetLabel = plan.plan_info["PlanName"]
ds.StudyID = plan.primary_image.image["StudyID"]
datetimesplit = plan.plan_info["ObjectVersion"]["WriteTimeStamp"].split()
# Read more accurate date from trial file if it is available
trial_info = plan.trial_info
if trial_info:
datetimesplit = trial_info["ObjectVersion"]["WriteTimeStamp"].split()
study_date = datetimesplit[0].replace("-", "")
study_time = datetimesplit[1].replace(":", "")
ds.StructureSetDate = study_date
ds.StructureSetTime = study_time
ds.StudyDate = study_date
ds.StudyTime = study_time
ds.ManufacturersModelName = plan.plan_info["ToolType"]
ds.SoftwareVersions = plan.plan_info["PinnacleVersionDescription"]
ds.StructureSetName = "POIandROI"
ds.SeriesNumber = "1"
ds.PatientName = patient_info["FullName"]
ds.ReferencedFrameOfReferenceSequence = pydicom.sequence.Sequence()
ReferencedFrameofReference = pydicom.dataset.Dataset()
ds.ReferencedFrameOfReferenceSequence.append(ReferencedFrameofReference)
ds.ReferencedFrameOfReferenceSequence[
0
].FrameOfReferenceUID = plan.primary_image.image_info[0]["FrameUID"]
ds.ReferencedFrameOfReferenceSequence[
0
].RTReferencedStudySequence = pydicom.sequence.Sequence()
RTReferencedStudy = pydicom.dataset.Dataset()
ds.ReferencedFrameOfReferenceSequence[0].RTReferencedStudySequence.append(
RTReferencedStudy
)
ds.ReferencedFrameOfReferenceSequence[0].RTReferencedStudySequence[
0
].ReferencedSOPClassUID = "1.2.840.10008.3.1.2.3.2"
ds.ReferencedFrameOfReferenceSequence[0].RTReferencedStudySequence[
0
].ReferencedSOPInstanceUID = plan.primary_image.image_info[0]["StudyInstanceUID"]
ds.StudyInstanceUID = plan.primary_image.image_info[0]["StudyInstanceUID"]
ds.ReferencedFrameOfReferenceSequence[0].RTReferencedStudySequence[
0
].RTReferencedSeriesSequence = pydicom.sequence.Sequence()
RTReferencedSeries = pydicom.dataset.Dataset()
ds.ReferencedFrameOfReferenceSequence[0].RTReferencedStudySequence[
0
].RTReferencedSeriesSequence.append(RTReferencedSeries)
ds.ReferencedFrameOfReferenceSequence[0].RTReferencedStudySequence[
0
].RTReferencedSeriesSequence[0].SeriesInstanceUID = plan.primary_image.image_info[
0
][
"SeriesUID"
]
ds.ReferencedFrameOfReferenceSequence[0].RTReferencedStudySequence[
0
].RTReferencedSeriesSequence[0].ContourImageSequence = pydicom.sequence.Sequence()
for info in plan.primary_image.image_info:
contour_image = pydicom.dataset.Dataset()
contour_image.ReferencedSOPClassUID = "1.2.840.10008.5.1.4.1.1.2"
contour_image.ReferencedSOPInstanceUID = info["InstanceUID"]
ds.ReferencedFrameOfReferenceSequence[0].RTReferencedStudySequence[
0
].RTReferencedSeriesSequence[0].ContourImageSequence.append(contour_image)
ds.ROIContourSequence = pydicom.sequence.Sequence()
ds.StructureSetROISequence = pydicom.sequence.Sequence()
ds.RTROIObservationsSequence = pydicom.sequence.Sequence()
# Determine ISO Center
find_iso_center(plan)
ds = read_points(ds, plan)
ds = read_roi(ds, plan)
# find out where to get if its been approved or not
# find out how to insert proper 'CodeString' here
ds.ApprovalStatus = "UNAPPROVED"
# Set the transfer syntax
# TODO: Use `pymedphys._dicom.create.set_default_transfer_syntax` here
ds.is_little_endian = True
ds.is_implicit_VR = True
# Save the RTDose Dicom File
output_file = os.path.join(export_path, struct_filename)
plan.logger.info("Creating Struct file: %s", output_file)
ds.save_as(output_file, write_like_original=False)
|
en
| 0.796294
|
# Copyright (C) 2019 South Western Sydney Local Health District, # University of New South Wales # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # http://www.apache.org/licenses/LICENSE-2.0 # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # This work is derived from: # https://github.com/AndrewWAlexander/Pinnacle-tar-DICOM # which is released under the following license: # Copyright (c) [2017] [<NAME>, <NAME>] # Permission is hereby granted, free of charge, to any person obtaining a copy # of this software and associated documentation files (the "Software"), to deal # in the Software without restriction, including without limitation the rights # to use, copy, modify, merge, publish, distribute, sublicense, and/or sell # copies of the Software, and to permit persons to whom the Software is # furnished to do so, subject to the following conditions: # The above copyright notice and this permission notice shall be included in all # copies or substantial portions of the Software. # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE # SOFTWARE. # Determine which point to use for the iso center and set this value in # the plan object # This point is Iso CenterAtZero # setting to first point if isocenter or ct center not found # Read points and insert them into the dicom dataset # Not sure what this is for, just basing off template, should look into further # Not applying any shifts of points at the moment. Needed for Pinnacle pre v9.0 # for enteredpoints in ds.ROIContourSequence: # #logger.debug("In loop applying shifts: isocenter:" + str(data["isocenter"]) ) # enteredpoints.ContourSequence[0].ContourData[0] = str(float(enteredpoints.ContourSequence[0].ContourData[0]) - data["xshift"]) # enteredpoints.ContourSequence[0].ContourData[1] = str(float(enteredpoints.ContourSequence[0].ContourData[1]) - data["yshift"]) # #enteredpoints.ContourSequence[0].ContourData[2] = str(float(enteredpoints.ContourSequence[0].ContourData[2]) - float(data["isocenter"][2])) # #logger.debug("bottom of loop applying shifts isocenter:" + str(data["isocenter"])) # This function reads the plan.roi file line by line. This file is somehow not structured like the others, # and isn't tab indented properly, so won't parse onto YAML. # bool value to tell me if I want to read the line in as point values # this will tell me not to read in point values # all points for current curve saved until now. Here is where I # need to add them to dicom file # These points are the exact same as the first point, skip it! # Start of ROI # increment ROI_num because I've found a new ROI # gets a string of ROI name # end of ROI found # add to ROI observation sequence # found a curve # Check that the plan has a primary image, as we can't create a meaningful RTSTRUCT without it: # Populate required values for file meta information # not sure what I want here, going off of template dicom file # from sample dicom file, maybe should change? # not sure where to get information for this element can find this and # read in from # ds.ManufacturersModelName = 'Pinnacle3' # Study Component Management SOP Class (chosen from template) # Read more accurate date from trial file if it is available # Determine ISO Center # find out where to get if its been approved or not # find out how to insert proper 'CodeString' here # Set the transfer syntax # TODO: Use `pymedphys._dicom.create.set_default_transfer_syntax` here # Save the RTDose Dicom File
| 1.634717
| 2
|
datasets/offline_controller_with_small_rotation.py
|
cspatil8/savn
| 0
|
6626818
|
""" Exhaustive BFS and Offline Controller. """
import importlib
from collections import deque
import json
import copy
import time
import random
import os
import platform
try:
from queue import Queue
except ImportError:
from Queue import Queue
from ai2thor.controller import Controller, distance
from .base_controller import BaseController
class ThorAgentState:
""" Representation of a simple state of a Thor Agent which includes
the position, horizon and rotation. """
def __init__(self, x, y, z, rotation, horizon):
self.x = round(x, 2)
self.y = y
self.z = round(z, 2)
self.rotation = round(rotation)
self.horizon = round(horizon)
@classmethod
def get_state_from_evenet(cls, event, forced_y=None):
""" Extracts a state from an event. """
state = cls(
x=event.metadata["agent"]["position"]["x"],
y=event.metadata["agent"]["position"]["y"],
z=event.metadata["agent"]["position"]["z"],
rotation=event.metadata["agent"]["rotation"]["y"],
horizon=event.metadata["agent"]["cameraHorizon"],
)
if forced_y != None:
state.y = forced_y
return state
def __eq__(self, other):
""" If we check for exact equality then we get issues.
For now we consider this 'close enough'. """
if isinstance(other, ThorAgentState):
return (
self.x == other.x
and
# self.y == other.y and
self.z == other.z
and self.rotation == other.rotation
and self.horizon == other.horizon
)
return NotImplemented
def __str__(self):
""" Get the string representation of a state. """
"""
return '{:0.2f}|{:0.2f}|{:0.2f}|{:d}|{:d}'.format(
self.x,
self.y,
self.z,
round(self.rotation),
round(self.horizon)
)
"""
return "{:0.2f}|{:0.2f}|{:d}|{:d}".format(
self.x, self.z, round(self.rotation), round(self.horizon)
)
def position(self):
""" Returns just the position. """
return dict(x=self.x, y=self.y, z=self.z)
class ExhaustiveBFSController(Controller):
""" A much slower and more exhaustive version of the BFSController.
This may be helpful if you wish to find the shortest path to an object.
The usual BFSController does not consider things like rotate or look down
when you are navigating towards an object. Additionally, there is some
rare occurances of positions which you can only get to in a certain way.
This ExhaustiveBFSController introduces the safe_teleport method which
ensures that all states will be covered.
Strongly recomend having a seperate directory for each scene. See
OfflineControllerWithSmallRotation for more information on how the generated data may be used. """
def __init__(
self,
grid_size=0.25,
fov=90.0,
grid_file=None,
graph_file=None,
metadata_file=None,
images_file=None,
seg_file=None,
class_file=None,
depth_file=None,
debug_mode=True,
grid_assumption=False,
local_executable_path=None,
actions=["MoveAhead", "RotateLeft", "RotateRight", "LookUp", "LookDown"],
):
super(ExhaustiveBFSController, self).__init__()
# Allowed rotations.
self.rotations = [0, 45, 90, 135, 180, 225, 270, 315]
# Allowed horizons.
self.horizons = [0, 30]
self.allow_enqueue = True
self.queue = deque()
self.seen_points = []
self.grid_points = []
self.seen_states = []
self.bad_seen_states = []
self.visited_seen_states = []
self.grid_states = []
self.grid_size = grid_size
self._check_visited = False
self.scene_name = None
self.fov = fov
self.y = None
self.local_executable_path = local_executable_path
# distance_threshold to be consistent with BFSController in generating grid.
self.distance_threshold = self.grid_size / 5.0
self.debug_mode = debug_mode
self.actions = actions
self.grid_assumption = grid_assumption
self.grid_file = grid_file
self.metadata_file = metadata_file
self.graph_file = graph_file
self.images_file = images_file
self.seg_file = seg_file
self.class_file = class_file
self.depth_file = depth_file
# Optionally make a gird (including x,y,z points that are reachable)
self.make_grid = grid_file is not None
# Optionally store the metadata of each state.
self.make_metadata = metadata_file is not None
# Optionally make a directed of (s,t) where exists a in self.actions
# such that t is reachable via s via a.
self.make_graph = graph_file is not None
# Optionally store an hdf5 file which contains the frame for each state.
self.make_images = images_file is not None
self.make_seg = seg_file is not None
self.make_class = class_file is not None
self.make_depth = self.depth_file is not None
self.metadata = {}
self.classdata = {}
self.graph = None
if self.make_graph:
import networkx as nx
self.graph = nx.DiGraph()
if self.make_images:
import h5py
self.images = h5py.File(self.images_file, "w")
if self.make_seg:
import h5py
self.seg = h5py.File(self.seg_file, "w")
if self.make_depth:
import h5py
self.depth = h5py.File(self.depth_file, "w")
def safe_teleport(self, state):
""" Approach a state from all possible directions if the usual teleport fails. """
self.step(dict(action="Rotate", rotation=0))
event = self.step(dict(action="Teleport", x=state.x, y=state.y, z=state.z))
if event.metadata["lastActionSuccess"]:
return event
# Approach from the left.
event = self.step(
dict(action="Teleport", x=(state.x - self.grid_size), y=state.y, z=state.z)
)
if event.metadata["lastActionSuccess"]:
self.step(dict(action="Rotate", rotation=90))
event = self.step(dict(action="MoveAhead"))
if event.metadata["lastActionSuccess"]:
return event
# Approach from the right.
event = self.step(
dict(action="Teleport", x=(state.x + self.grid_size), y=state.y, z=state.z)
)
if event.metadata["lastActionSuccess"]:
self.step(dict(action="Rotate", rotation=270))
event = self.step(dict(action="MoveAhead"))
if event.metadata["lastActionSuccess"]:
return event
# Approach from the back.
event = self.step(
dict(action="Teleport", x=state.x, y=state.y, z=state.z - self.grid_size)
)
if event.metadata["lastActionSuccess"]:
self.step(dict(action="Rotate", rotation=0))
event = self.step(dict(action="MoveAhead"))
if event.metadata["lastActionSuccess"]:
return event
# Approach from the front.
event = self.step(
dict(action="Teleport", x=state.x, y=state.y, z=state.z + self.grid_size)
)
if event.metadata["lastActionSuccess"]:
self.step(dict(action="Rotate", rotation=180))
event = self.step(dict(action="MoveAhead"))
if event.metadata["lastActionSuccess"]:
return event
print(self.scene_name)
print(str(state))
raise Exception("Safe Teleport Failed")
def teleport_to_state(self, state):
""" Only use this method when we know the state is valid. """
event = self.safe_teleport(state)
assert event.metadata["lastActionSuccess"]
event = self.step(dict(action="Rotate", rotation=state.rotation))
assert event.metadata["lastActionSuccess"]
event = self.step(dict(action="Look", horizon=state.horizon))
assert event.metadata["lastActionSuccess"]
if self.debug_mode:
# Sanity check that we have teleported to the correct state.
new_state = self.get_state_from_event(event)
if state != new_state:
print(state)
print(new_state)
assert state == new_state
return event
def get_state_from_event(self, event):
return ThorAgentState.get_state_from_evenet(event, forced_y=self.y)
def get_point_from_event(self, event):
return event.metadata["agent"]["position"]
def get_next_state(self, state, action, copy_state=False):
""" Guess the next state when action is taken. Note that
this will not predict the correct y value. """
if copy_state:
next_state = copy.deepcopy(state)
else:
next_state = state
if action == "MoveAhead":
if next_state.rotation == 0:
next_state.z += self.grid_size
elif next_state.rotation == 90:
next_state.x += self.grid_size
elif next_state.rotation == 180:
next_state.z -= self.grid_size
elif next_state.rotation == 270:
next_state.x -= self.grid_size
elif next_state.rotation == 45:
next_state.z += self.grid_size
next_state.x += self.grid_size
elif next_state.rotation == 135:
next_state.z -= self.grid_size
next_state.x += self.grid_size
elif next_state.rotation == 225:
next_state.z -= self.grid_size
next_state.x -= self.grid_size
elif next_state.rotation == 315:
next_state.z += self.grid_size
next_state.x -= self.grid_size
else:
raise Exception("Unknown Rotation")
elif action == "RotateRight":
next_state.rotation = (next_state.rotation + 45) % 360
elif action == "RotateLeft":
next_state.rotation = (next_state.rotation - 45) % 360
elif action == "LookUp":
if abs(next_state.horizon) <= 1:
return None
next_state.horizon = next_state.horizon - 30
elif action == "LookDown":
if abs(next_state.horizon - 60) <= 1 or abs(next_state.horizon - 30) <= 1:
return None
next_state.horizon = next_state.horizon + 30
return next_state
def add_edge(self, curr_state, next_state):
self.graph.add_edge(str(curr_state), str(next_state))
def enqueue_state(self, state):
""" Returns true if state is valid. """
# ensure there are no dup states.
if state in self.seen_states:
return True
if state in self.bad_seen_states:
return False
# ensure state is a legal rotation and horizon.
if (
round(state.horizon) not in self.horizons
or round(state.rotation) not in self.rotations
):
self.bad_seen_states.append(state)
return False
self.seen_states.append(state)
self.queue.append(state)
return True
def enqueue_states(self, agent_state):
if not self.allow_enqueue:
return
# Take all action in self.action and enqueue if they are valid.
for action in self.actions:
next_state_guess = self.get_next_state(agent_state, action, True)
if next_state_guess is None:
continue
# # Bug.
# if (
# self.scene_name == "FloorPlan208_physics"
# and next_state_guess.x == 0
# and next_state_guess.z == 1.75
# ):
# self.teleport_to_state(agent_state)
# continue
# Grid assumption is meant to make things faster and should not
# be used in practice. In general it does not work when the y
# values fluctuate in a scene. It circumvents using the actual controller.
if self.grid_assumption:
if next_state_guess in self.seen_states:
if self.make_graph:
self.add_edge(agent_state, next_state_guess)
continue
event = self.step(
dict(
action="Teleport",
x=next_state_guess.x,
y=next_state_guess.y,
z=next_state_guess.z,
)
)
if not event.metadata["lastActionSuccess"]:
self.teleport_to_state(agent_state)
continue
event = self.step(dict(action="Rotate", rotation=next_state_guess.rotation))
if not event.metadata["lastActionSuccess"]:
self.teleport_to_state(agent_state)
continue
event = self.step(dict(action="Look", horizon=next_state_guess.horizon))
if not event.metadata["lastActionSuccess"]:
self.teleport_to_state(agent_state)
continue
next_state = self.get_state_from_event(event)
if next_state != next_state_guess:
print(next_state)
print(next_state_guess)
assert next_state == next_state_guess
if self.enqueue_state(next_state) and self.make_graph:
self.add_edge(agent_state, next_state)
# Return back to agents initial location.
self.teleport_to_state(agent_state)
def search_all_closed(self, scene_name):
""" Runs the ExhaustiveBFSController on scene_name. """
self.allow_enqueue = True
self.queue = deque()
self.seen_points = []
self.visited_seen_points = []
self.grid_points = []
self.seen_states = []
self.visited_seen_states = []
self.scene_name = scene_name
event = self.reset(scene_name)
if self.make_seg or self.make_class:
event = self.step(
dict(
action="Initialize",
gridSize=self.grid_size,
fieldOfView=self.fov,
renderClassImage=True,
renderObjectImage=True,
renderDepthImage=True,
)
)
else:
event = self.step(
dict(
action="Initialize",
renderDepthImage=True,
gridSize=self.grid_size,
fieldOfView=self.fov,
)
)
self.y = event.metadata["agent"]["position"]["y"]
self.enqueue_state(self.get_state_from_event(event))
while self.queue:
self.queue_step()
if self.make_grid:
with open(self.grid_file, "w") as outfile:
json.dump(self.grid_points, outfile)
if self.make_graph:
from networkx.readwrite import json_graph
with open(self.graph_file, "w") as outfile:
data = json_graph.node_link_data(self.graph)
json.dump(data, outfile)
if self.make_metadata:
with open(self.metadata_file, "w") as outfile:
json.dump(self.metadata, outfile)
if self.make_images:
self.images.close()
if self.make_seg:
self.seg.close()
if self.make_depth:
self.depth.close()
if self.make_class:
with open(self.class_file, "w") as outfile:
json.dump(self.classdata, outfile)
print("Finished :", self.scene_name)
def queue_step(self):
search_state = self.queue.popleft()
event = self.teleport_to_state(search_state)
# if search_state.y > 1.3:
# raise Exception("**** got big point ")
self.enqueue_states(search_state)
self.visited_seen_states.append(search_state)
if self.make_grid and not any(
map(
lambda p: distance(p, search_state.position())
< self.distance_threshold,
self.grid_points,
)
):
self.grid_points.append(search_state.position())
if self.make_metadata:
self.metadata[str(search_state)] = event.metadata
if self.make_class:
class_detections = event.class_detections2D
for k, v in class_detections.items():
class_detections[k] = str(v)
self.classdata[str(search_state)] = class_detections
if self.make_images and str(search_state) not in self.images:
self.images.create_dataset(str(search_state), data=event.frame)
if self.make_seg and str(search_state) not in self.seg:
self.seg.create_dataset(
str(search_state), data=event.class_segmentation_frame
)
if self.make_depth and str(search_state) not in self.depth:
self.depth.create_dataset(str(search_state), data=event.depth_frame)
elif str(search_state) in self.images:
print(self.scene_name, str(search_state))
class OfflineControllerWithSmallRotationEvent:
""" A stripped down version of an event. Only contains lastActionSuccess, sceneName,
and optionally state and frame. Does not contain the rest of the metadata. """
def __init__(self, last_action_success, scene_name, state=None, frame=None):
self.metadata = {
"lastActionSuccess": last_action_success,
"sceneName": scene_name,
}
if state is not None:
self.metadata["agent"] = {}
self.metadata["agent"]["position"] = state.position()
self.metadata["agent"]["rotation"] = {
"x": 0.0,
"y": state.rotation,
"z": 0.0,
}
self.metadata["agent"]["cameraHorizon"] = state.horizon
self.frame = frame
class OfflineControllerWithSmallRotation(BaseController):
""" A stripped down version of the controller for non-interactive settings.
Only allows for a few given actions. Note that you must use the
ExhaustiveBFSController to first generate the data used by OfflineControllerWithSmallRotation.
Data is stored in offline_data_dir/<scene_name>/.
Can swap the metadata.json for a visible_object_map.json. A script for generating
this is coming soon. If the swap is made then the OfflineControllerWithSmallRotation is faster and
self.using_raw_metadata will be set to false.
Additionally, images.hdf5 may be swapped out with ResNet features or anything
that you want to be returned for event.frame. """
def __init__(
self,
grid_size=0.25,
fov=100,
offline_data_dir="/mnt/6tb/mitchellw/data/living_room_offline_data",
grid_file_name="grid.json",
graph_file_name="graph.json",
metadata_file_name="visible_object_map.json",
# metadata_file_name='metadata.json',
images_file_name="images.hdf5",
debug_mode=True,
actions=["MoveAhead", "RotateLeft", "RotateRight", "LookUp", "LookDown"],
visualize=True,
local_executable_path=None,
):
super(OfflineControllerWithSmallRotation, self).__init__()
self.grid_size = grid_size
self.offline_data_dir = offline_data_dir
self.grid_file_name = grid_file_name
self.graph_file_name = graph_file_name
self.metadata_file_name = metadata_file_name
self.images_file_name = images_file_name
self.grid = None
self.graph = None
self.metadata = None
self.images = None
self.controller = None
self.using_raw_metadata = True
self.actions = actions
# Allowed rotations.
self.rotations = [0, 45, 90, 135, 180, 225, 270, 315]
# Allowed horizons.
self.horizons = [0, 30]
self.debug_mode = debug_mode
self.fov = fov
self.local_executable_path = local_executable_path
self.y = None
self.last_event = None
self.controller = ExhaustiveBFSController()
if self.local_executable_path is not None:
self.controller.local_executable_path = self.local_executable_path
self.visualize = visualize
self.scene_name = None
self.state = None
self.last_action_success = True
self.h5py = importlib.import_module("h5py")
self.nx = importlib.import_module("networkx")
self.json_graph_loader = importlib.import_module("networkx.readwrite")
def start(self):
if self.visualize:
self.controller.start()
self.controller.step(
dict(action="Initialize", gridSize=self.grid_size, fieldOfView=self.fov)
)
def get_full_state(self, x, y, z, rotation=0.0, horizon=0.0):
return ThorAgentState(x, y, z, rotation, horizon)
def get_state_from_str(self, x, z, rotation=0.0, horizon=0.0):
return ThorAgentState(x, self.y, z, rotation, horizon)
def reset(self, scene_name=None):
# print('yoyoyoyoyoyoyoyoyoyoyoyoyo')
if scene_name is None:
scene_name = "FloorPlan28"
if scene_name != self.scene_name:
self.scene_name = scene_name
with open(
os.path.join(
self.offline_data_dir, self.scene_name, self.grid_file_name
),
"r",
) as f:
self.grid = json.load(f)
with open(
os.path.join(
self.offline_data_dir, self.scene_name, self.graph_file_name
),
"r",
) as f:
graph_json = json.load(f)
self.graph = self.json_graph_loader.node_link_graph(
graph_json
).to_directed()
with open(
os.path.join(
self.offline_data_dir, self.scene_name, self.metadata_file_name
),
"r",
) as f:
self.metadata = json.load(f)
# Determine if using the raw metadata, which is structured as a dictionary of
# state -> metatdata. The alternative is a map of obj -> states where object is visible.
key = next(iter(self.metadata.keys()))
try:
float(key.split("|")[0])
self.using_raw_metadata = True
except ValueError:
self.using_raw_metadata = False
if self.images is not None:
self.images.close()
self.images = self.h5py.File(
os.path.join(
self.offline_data_dir, self.scene_name, self.images_file_name
),
"r",
)
self.state = self.get_full_state(
**self.grid[0], rotation=random.choice(self.rotations)
)
self.y = self.state.y
self.last_action_success = True
self.last_event = self._successful_event()
if self.visualize:
# print('qqqqqqqqqqqqqqqqqqqqqqqqqq')
self.controller.reset(scene_name)
self.controller.teleport_to_state(self.state)
# print('ppppppppppppppp')
def randomize_state(self):
self.state = self.get_state_from_str(
*[float(x) for x in random.choice(list(self.images.keys())).split("|")]
)
self.state.horizon = 0
self.last_action_success = True
self.last_event = self._successful_event()
if self.visualize:
self.controller.teleport_to_state(self.state)
def back_to_start(self, start):
self.state = start
if self.visualize:
self.controller.teleport_to_state(self.state)
def step(self, action, raise_for_failure=False):
if "action" not in action or action["action"] not in self.actions:
if action["action"] == "Initialize":
if self.visualize:
self.controller.step(action, raise_for_failure)
return
raise Exception("Unsupported action.")
action = action["action"]
next_state = self.controller.get_next_state(self.state, action, True)
if self.visualize and next_state is not None:
viz_event = self.controller.step(
dict(action="Teleport", x=next_state.x, y=next_state.y, z=next_state.z)
)
viz_event = self.controller.step(
dict(action="Rotate", rotation=next_state.rotation)
)
viz_event = self.controller.step(
dict(action="Look", horizon=next_state.horizon)
)
viz_next_state = self.controller.get_state_from_event(viz_event)
if (
round(viz_next_state.horizon) not in self.horizons
or round(viz_next_state.rotation) not in self.rotations
):
# return back to original state.
self.controller.teleport_to_state(self.state)
if next_state is not None:
next_state_key = str(next_state)
neighbors = self.graph.neighbors(str(self.state))
if next_state_key in neighbors:
self.state = self.get_state_from_str(
*[float(x) for x in next_state_key.split("|")]
)
self.last_action_success = True
event = self._successful_event()
if self.debug_mode and self.visualize:
if self.controller.get_state_from_event(
viz_event
) != self.controller.get_state_from_event(event):
print(action)
print(str(self.controller.get_state_from_event(viz_event)))
print(str(self.controller.get_state_from_event(event)))
assert self.controller.get_state_from_event(
viz_event
) == self.controller.get_state_from_event(event)
assert viz_event.metadata["lastActionSuccess"]
# Uncomment if you want to view the frames side by side to
# ensure that they are duplicated.
# from matplotlib import pyplot as plt
# fig = plt.figure()
# fig.add_subplot(2,1,1)
# plt.imshow(self.get_image())
# fig.add_subplot(2,1,2)
# plt.imshow(viz_event.frame)
# plt.show()
self.last_event = event
return event
self.last_action_success = False
self.last_event.metadata["lastActionSuccess"] = False
return self.last_event
def shortest_path(self, source_state, target_state):
return self.nx.shortest_path(self.graph, str(source_state), str(target_state))
def optimal_plan(self, source_state, path):
""" This is for debugging. It modifies the state. """
self.state = source_state
actions = []
i = 1
while i < len(path):
for a in self.actions:
next_state = self.controller.get_next_state(self.state, a, True)
if str(next_state) == path[i]:
actions.append(a)
i += 1
self.state = next_state
break
return actions
def shortest_path_to_target(self, source_state, objId, get_plan=False):
""" Many ways to reach objId, which one is best? """
states_where_visible = []
if self.using_raw_metadata:
for s in self.metadata:
objects = self.metadata[s]["objects"]
visible_objects = [o["objectId"] for o in objects if o["visible"]]
if objId in visible_objects:
states_where_visible.append(s)
else:
states_where_visible = self.metadata[objId]
# transform from strings into states
states_where_visible = [
self.get_state_from_str(*[float(x) for x in str_.split("|")])
for str_ in states_where_visible
]
best_path = None
best_path_len = 0
for t in states_where_visible:
path = self.shortest_path(source_state, t)
if len(path) < best_path_len or best_path is None:
best_path = path
best_path_len = len(path)
best_plan = []
if get_plan:
best_plan = self.optimal_plan(source_state, best_path)
return best_path, best_path_len, best_plan
def visualize_plan(self, source, plan):
""" Visualize the best path from source to plan. """
assert self.visualize
self.controller.teleport_to_state(source)
time.sleep(0.5)
for a in plan:
print(a)
self.controller.step(dict(action=a))
time.sleep(0.5)
def object_is_visible(self, objId):
if self.using_raw_metadata:
objects = self.metadata[str(self.state)]["objects"]
visible_objects = [o["objectId"] for o in objects if o["visible"]]
return objId in visible_objects
else:
return str(self.state) in self.metadata[objId]
def _successful_event(self):
return OfflineControllerWithSmallRotationEvent(
self.last_action_success, self.scene_name, self.state, self.get_image()
)
def get_image(self):
return self.images[str(self.state)][:]
def all_objects(self):
if self.using_raw_metadata:
return [o["objectId"] for o in self.metadata[str(self.state)]["objects"]]
else:
return self.metadata.keys()
|
""" Exhaustive BFS and Offline Controller. """
import importlib
from collections import deque
import json
import copy
import time
import random
import os
import platform
try:
from queue import Queue
except ImportError:
from Queue import Queue
from ai2thor.controller import Controller, distance
from .base_controller import BaseController
class ThorAgentState:
""" Representation of a simple state of a Thor Agent which includes
the position, horizon and rotation. """
def __init__(self, x, y, z, rotation, horizon):
self.x = round(x, 2)
self.y = y
self.z = round(z, 2)
self.rotation = round(rotation)
self.horizon = round(horizon)
@classmethod
def get_state_from_evenet(cls, event, forced_y=None):
""" Extracts a state from an event. """
state = cls(
x=event.metadata["agent"]["position"]["x"],
y=event.metadata["agent"]["position"]["y"],
z=event.metadata["agent"]["position"]["z"],
rotation=event.metadata["agent"]["rotation"]["y"],
horizon=event.metadata["agent"]["cameraHorizon"],
)
if forced_y != None:
state.y = forced_y
return state
def __eq__(self, other):
""" If we check for exact equality then we get issues.
For now we consider this 'close enough'. """
if isinstance(other, ThorAgentState):
return (
self.x == other.x
and
# self.y == other.y and
self.z == other.z
and self.rotation == other.rotation
and self.horizon == other.horizon
)
return NotImplemented
def __str__(self):
""" Get the string representation of a state. """
"""
return '{:0.2f}|{:0.2f}|{:0.2f}|{:d}|{:d}'.format(
self.x,
self.y,
self.z,
round(self.rotation),
round(self.horizon)
)
"""
return "{:0.2f}|{:0.2f}|{:d}|{:d}".format(
self.x, self.z, round(self.rotation), round(self.horizon)
)
def position(self):
""" Returns just the position. """
return dict(x=self.x, y=self.y, z=self.z)
class ExhaustiveBFSController(Controller):
""" A much slower and more exhaustive version of the BFSController.
This may be helpful if you wish to find the shortest path to an object.
The usual BFSController does not consider things like rotate or look down
when you are navigating towards an object. Additionally, there is some
rare occurances of positions which you can only get to in a certain way.
This ExhaustiveBFSController introduces the safe_teleport method which
ensures that all states will be covered.
Strongly recomend having a seperate directory for each scene. See
OfflineControllerWithSmallRotation for more information on how the generated data may be used. """
def __init__(
self,
grid_size=0.25,
fov=90.0,
grid_file=None,
graph_file=None,
metadata_file=None,
images_file=None,
seg_file=None,
class_file=None,
depth_file=None,
debug_mode=True,
grid_assumption=False,
local_executable_path=None,
actions=["MoveAhead", "RotateLeft", "RotateRight", "LookUp", "LookDown"],
):
super(ExhaustiveBFSController, self).__init__()
# Allowed rotations.
self.rotations = [0, 45, 90, 135, 180, 225, 270, 315]
# Allowed horizons.
self.horizons = [0, 30]
self.allow_enqueue = True
self.queue = deque()
self.seen_points = []
self.grid_points = []
self.seen_states = []
self.bad_seen_states = []
self.visited_seen_states = []
self.grid_states = []
self.grid_size = grid_size
self._check_visited = False
self.scene_name = None
self.fov = fov
self.y = None
self.local_executable_path = local_executable_path
# distance_threshold to be consistent with BFSController in generating grid.
self.distance_threshold = self.grid_size / 5.0
self.debug_mode = debug_mode
self.actions = actions
self.grid_assumption = grid_assumption
self.grid_file = grid_file
self.metadata_file = metadata_file
self.graph_file = graph_file
self.images_file = images_file
self.seg_file = seg_file
self.class_file = class_file
self.depth_file = depth_file
# Optionally make a gird (including x,y,z points that are reachable)
self.make_grid = grid_file is not None
# Optionally store the metadata of each state.
self.make_metadata = metadata_file is not None
# Optionally make a directed of (s,t) where exists a in self.actions
# such that t is reachable via s via a.
self.make_graph = graph_file is not None
# Optionally store an hdf5 file which contains the frame for each state.
self.make_images = images_file is not None
self.make_seg = seg_file is not None
self.make_class = class_file is not None
self.make_depth = self.depth_file is not None
self.metadata = {}
self.classdata = {}
self.graph = None
if self.make_graph:
import networkx as nx
self.graph = nx.DiGraph()
if self.make_images:
import h5py
self.images = h5py.File(self.images_file, "w")
if self.make_seg:
import h5py
self.seg = h5py.File(self.seg_file, "w")
if self.make_depth:
import h5py
self.depth = h5py.File(self.depth_file, "w")
def safe_teleport(self, state):
""" Approach a state from all possible directions if the usual teleport fails. """
self.step(dict(action="Rotate", rotation=0))
event = self.step(dict(action="Teleport", x=state.x, y=state.y, z=state.z))
if event.metadata["lastActionSuccess"]:
return event
# Approach from the left.
event = self.step(
dict(action="Teleport", x=(state.x - self.grid_size), y=state.y, z=state.z)
)
if event.metadata["lastActionSuccess"]:
self.step(dict(action="Rotate", rotation=90))
event = self.step(dict(action="MoveAhead"))
if event.metadata["lastActionSuccess"]:
return event
# Approach from the right.
event = self.step(
dict(action="Teleport", x=(state.x + self.grid_size), y=state.y, z=state.z)
)
if event.metadata["lastActionSuccess"]:
self.step(dict(action="Rotate", rotation=270))
event = self.step(dict(action="MoveAhead"))
if event.metadata["lastActionSuccess"]:
return event
# Approach from the back.
event = self.step(
dict(action="Teleport", x=state.x, y=state.y, z=state.z - self.grid_size)
)
if event.metadata["lastActionSuccess"]:
self.step(dict(action="Rotate", rotation=0))
event = self.step(dict(action="MoveAhead"))
if event.metadata["lastActionSuccess"]:
return event
# Approach from the front.
event = self.step(
dict(action="Teleport", x=state.x, y=state.y, z=state.z + self.grid_size)
)
if event.metadata["lastActionSuccess"]:
self.step(dict(action="Rotate", rotation=180))
event = self.step(dict(action="MoveAhead"))
if event.metadata["lastActionSuccess"]:
return event
print(self.scene_name)
print(str(state))
raise Exception("Safe Teleport Failed")
def teleport_to_state(self, state):
""" Only use this method when we know the state is valid. """
event = self.safe_teleport(state)
assert event.metadata["lastActionSuccess"]
event = self.step(dict(action="Rotate", rotation=state.rotation))
assert event.metadata["lastActionSuccess"]
event = self.step(dict(action="Look", horizon=state.horizon))
assert event.metadata["lastActionSuccess"]
if self.debug_mode:
# Sanity check that we have teleported to the correct state.
new_state = self.get_state_from_event(event)
if state != new_state:
print(state)
print(new_state)
assert state == new_state
return event
def get_state_from_event(self, event):
return ThorAgentState.get_state_from_evenet(event, forced_y=self.y)
def get_point_from_event(self, event):
return event.metadata["agent"]["position"]
def get_next_state(self, state, action, copy_state=False):
""" Guess the next state when action is taken. Note that
this will not predict the correct y value. """
if copy_state:
next_state = copy.deepcopy(state)
else:
next_state = state
if action == "MoveAhead":
if next_state.rotation == 0:
next_state.z += self.grid_size
elif next_state.rotation == 90:
next_state.x += self.grid_size
elif next_state.rotation == 180:
next_state.z -= self.grid_size
elif next_state.rotation == 270:
next_state.x -= self.grid_size
elif next_state.rotation == 45:
next_state.z += self.grid_size
next_state.x += self.grid_size
elif next_state.rotation == 135:
next_state.z -= self.grid_size
next_state.x += self.grid_size
elif next_state.rotation == 225:
next_state.z -= self.grid_size
next_state.x -= self.grid_size
elif next_state.rotation == 315:
next_state.z += self.grid_size
next_state.x -= self.grid_size
else:
raise Exception("Unknown Rotation")
elif action == "RotateRight":
next_state.rotation = (next_state.rotation + 45) % 360
elif action == "RotateLeft":
next_state.rotation = (next_state.rotation - 45) % 360
elif action == "LookUp":
if abs(next_state.horizon) <= 1:
return None
next_state.horizon = next_state.horizon - 30
elif action == "LookDown":
if abs(next_state.horizon - 60) <= 1 or abs(next_state.horizon - 30) <= 1:
return None
next_state.horizon = next_state.horizon + 30
return next_state
def add_edge(self, curr_state, next_state):
self.graph.add_edge(str(curr_state), str(next_state))
def enqueue_state(self, state):
""" Returns true if state is valid. """
# ensure there are no dup states.
if state in self.seen_states:
return True
if state in self.bad_seen_states:
return False
# ensure state is a legal rotation and horizon.
if (
round(state.horizon) not in self.horizons
or round(state.rotation) not in self.rotations
):
self.bad_seen_states.append(state)
return False
self.seen_states.append(state)
self.queue.append(state)
return True
def enqueue_states(self, agent_state):
if not self.allow_enqueue:
return
# Take all action in self.action and enqueue if they are valid.
for action in self.actions:
next_state_guess = self.get_next_state(agent_state, action, True)
if next_state_guess is None:
continue
# # Bug.
# if (
# self.scene_name == "FloorPlan208_physics"
# and next_state_guess.x == 0
# and next_state_guess.z == 1.75
# ):
# self.teleport_to_state(agent_state)
# continue
# Grid assumption is meant to make things faster and should not
# be used in practice. In general it does not work when the y
# values fluctuate in a scene. It circumvents using the actual controller.
if self.grid_assumption:
if next_state_guess in self.seen_states:
if self.make_graph:
self.add_edge(agent_state, next_state_guess)
continue
event = self.step(
dict(
action="Teleport",
x=next_state_guess.x,
y=next_state_guess.y,
z=next_state_guess.z,
)
)
if not event.metadata["lastActionSuccess"]:
self.teleport_to_state(agent_state)
continue
event = self.step(dict(action="Rotate", rotation=next_state_guess.rotation))
if not event.metadata["lastActionSuccess"]:
self.teleport_to_state(agent_state)
continue
event = self.step(dict(action="Look", horizon=next_state_guess.horizon))
if not event.metadata["lastActionSuccess"]:
self.teleport_to_state(agent_state)
continue
next_state = self.get_state_from_event(event)
if next_state != next_state_guess:
print(next_state)
print(next_state_guess)
assert next_state == next_state_guess
if self.enqueue_state(next_state) and self.make_graph:
self.add_edge(agent_state, next_state)
# Return back to agents initial location.
self.teleport_to_state(agent_state)
def search_all_closed(self, scene_name):
""" Runs the ExhaustiveBFSController on scene_name. """
self.allow_enqueue = True
self.queue = deque()
self.seen_points = []
self.visited_seen_points = []
self.grid_points = []
self.seen_states = []
self.visited_seen_states = []
self.scene_name = scene_name
event = self.reset(scene_name)
if self.make_seg or self.make_class:
event = self.step(
dict(
action="Initialize",
gridSize=self.grid_size,
fieldOfView=self.fov,
renderClassImage=True,
renderObjectImage=True,
renderDepthImage=True,
)
)
else:
event = self.step(
dict(
action="Initialize",
renderDepthImage=True,
gridSize=self.grid_size,
fieldOfView=self.fov,
)
)
self.y = event.metadata["agent"]["position"]["y"]
self.enqueue_state(self.get_state_from_event(event))
while self.queue:
self.queue_step()
if self.make_grid:
with open(self.grid_file, "w") as outfile:
json.dump(self.grid_points, outfile)
if self.make_graph:
from networkx.readwrite import json_graph
with open(self.graph_file, "w") as outfile:
data = json_graph.node_link_data(self.graph)
json.dump(data, outfile)
if self.make_metadata:
with open(self.metadata_file, "w") as outfile:
json.dump(self.metadata, outfile)
if self.make_images:
self.images.close()
if self.make_seg:
self.seg.close()
if self.make_depth:
self.depth.close()
if self.make_class:
with open(self.class_file, "w") as outfile:
json.dump(self.classdata, outfile)
print("Finished :", self.scene_name)
def queue_step(self):
search_state = self.queue.popleft()
event = self.teleport_to_state(search_state)
# if search_state.y > 1.3:
# raise Exception("**** got big point ")
self.enqueue_states(search_state)
self.visited_seen_states.append(search_state)
if self.make_grid and not any(
map(
lambda p: distance(p, search_state.position())
< self.distance_threshold,
self.grid_points,
)
):
self.grid_points.append(search_state.position())
if self.make_metadata:
self.metadata[str(search_state)] = event.metadata
if self.make_class:
class_detections = event.class_detections2D
for k, v in class_detections.items():
class_detections[k] = str(v)
self.classdata[str(search_state)] = class_detections
if self.make_images and str(search_state) not in self.images:
self.images.create_dataset(str(search_state), data=event.frame)
if self.make_seg and str(search_state) not in self.seg:
self.seg.create_dataset(
str(search_state), data=event.class_segmentation_frame
)
if self.make_depth and str(search_state) not in self.depth:
self.depth.create_dataset(str(search_state), data=event.depth_frame)
elif str(search_state) in self.images:
print(self.scene_name, str(search_state))
class OfflineControllerWithSmallRotationEvent:
""" A stripped down version of an event. Only contains lastActionSuccess, sceneName,
and optionally state and frame. Does not contain the rest of the metadata. """
def __init__(self, last_action_success, scene_name, state=None, frame=None):
self.metadata = {
"lastActionSuccess": last_action_success,
"sceneName": scene_name,
}
if state is not None:
self.metadata["agent"] = {}
self.metadata["agent"]["position"] = state.position()
self.metadata["agent"]["rotation"] = {
"x": 0.0,
"y": state.rotation,
"z": 0.0,
}
self.metadata["agent"]["cameraHorizon"] = state.horizon
self.frame = frame
class OfflineControllerWithSmallRotation(BaseController):
""" A stripped down version of the controller for non-interactive settings.
Only allows for a few given actions. Note that you must use the
ExhaustiveBFSController to first generate the data used by OfflineControllerWithSmallRotation.
Data is stored in offline_data_dir/<scene_name>/.
Can swap the metadata.json for a visible_object_map.json. A script for generating
this is coming soon. If the swap is made then the OfflineControllerWithSmallRotation is faster and
self.using_raw_metadata will be set to false.
Additionally, images.hdf5 may be swapped out with ResNet features or anything
that you want to be returned for event.frame. """
def __init__(
self,
grid_size=0.25,
fov=100,
offline_data_dir="/mnt/6tb/mitchellw/data/living_room_offline_data",
grid_file_name="grid.json",
graph_file_name="graph.json",
metadata_file_name="visible_object_map.json",
# metadata_file_name='metadata.json',
images_file_name="images.hdf5",
debug_mode=True,
actions=["MoveAhead", "RotateLeft", "RotateRight", "LookUp", "LookDown"],
visualize=True,
local_executable_path=None,
):
super(OfflineControllerWithSmallRotation, self).__init__()
self.grid_size = grid_size
self.offline_data_dir = offline_data_dir
self.grid_file_name = grid_file_name
self.graph_file_name = graph_file_name
self.metadata_file_name = metadata_file_name
self.images_file_name = images_file_name
self.grid = None
self.graph = None
self.metadata = None
self.images = None
self.controller = None
self.using_raw_metadata = True
self.actions = actions
# Allowed rotations.
self.rotations = [0, 45, 90, 135, 180, 225, 270, 315]
# Allowed horizons.
self.horizons = [0, 30]
self.debug_mode = debug_mode
self.fov = fov
self.local_executable_path = local_executable_path
self.y = None
self.last_event = None
self.controller = ExhaustiveBFSController()
if self.local_executable_path is not None:
self.controller.local_executable_path = self.local_executable_path
self.visualize = visualize
self.scene_name = None
self.state = None
self.last_action_success = True
self.h5py = importlib.import_module("h5py")
self.nx = importlib.import_module("networkx")
self.json_graph_loader = importlib.import_module("networkx.readwrite")
def start(self):
if self.visualize:
self.controller.start()
self.controller.step(
dict(action="Initialize", gridSize=self.grid_size, fieldOfView=self.fov)
)
def get_full_state(self, x, y, z, rotation=0.0, horizon=0.0):
return ThorAgentState(x, y, z, rotation, horizon)
def get_state_from_str(self, x, z, rotation=0.0, horizon=0.0):
return ThorAgentState(x, self.y, z, rotation, horizon)
def reset(self, scene_name=None):
# print('yoyoyoyoyoyoyoyoyoyoyoyoyo')
if scene_name is None:
scene_name = "FloorPlan28"
if scene_name != self.scene_name:
self.scene_name = scene_name
with open(
os.path.join(
self.offline_data_dir, self.scene_name, self.grid_file_name
),
"r",
) as f:
self.grid = json.load(f)
with open(
os.path.join(
self.offline_data_dir, self.scene_name, self.graph_file_name
),
"r",
) as f:
graph_json = json.load(f)
self.graph = self.json_graph_loader.node_link_graph(
graph_json
).to_directed()
with open(
os.path.join(
self.offline_data_dir, self.scene_name, self.metadata_file_name
),
"r",
) as f:
self.metadata = json.load(f)
# Determine if using the raw metadata, which is structured as a dictionary of
# state -> metatdata. The alternative is a map of obj -> states where object is visible.
key = next(iter(self.metadata.keys()))
try:
float(key.split("|")[0])
self.using_raw_metadata = True
except ValueError:
self.using_raw_metadata = False
if self.images is not None:
self.images.close()
self.images = self.h5py.File(
os.path.join(
self.offline_data_dir, self.scene_name, self.images_file_name
),
"r",
)
self.state = self.get_full_state(
**self.grid[0], rotation=random.choice(self.rotations)
)
self.y = self.state.y
self.last_action_success = True
self.last_event = self._successful_event()
if self.visualize:
# print('qqqqqqqqqqqqqqqqqqqqqqqqqq')
self.controller.reset(scene_name)
self.controller.teleport_to_state(self.state)
# print('ppppppppppppppp')
def randomize_state(self):
self.state = self.get_state_from_str(
*[float(x) for x in random.choice(list(self.images.keys())).split("|")]
)
self.state.horizon = 0
self.last_action_success = True
self.last_event = self._successful_event()
if self.visualize:
self.controller.teleport_to_state(self.state)
def back_to_start(self, start):
self.state = start
if self.visualize:
self.controller.teleport_to_state(self.state)
def step(self, action, raise_for_failure=False):
if "action" not in action or action["action"] not in self.actions:
if action["action"] == "Initialize":
if self.visualize:
self.controller.step(action, raise_for_failure)
return
raise Exception("Unsupported action.")
action = action["action"]
next_state = self.controller.get_next_state(self.state, action, True)
if self.visualize and next_state is not None:
viz_event = self.controller.step(
dict(action="Teleport", x=next_state.x, y=next_state.y, z=next_state.z)
)
viz_event = self.controller.step(
dict(action="Rotate", rotation=next_state.rotation)
)
viz_event = self.controller.step(
dict(action="Look", horizon=next_state.horizon)
)
viz_next_state = self.controller.get_state_from_event(viz_event)
if (
round(viz_next_state.horizon) not in self.horizons
or round(viz_next_state.rotation) not in self.rotations
):
# return back to original state.
self.controller.teleport_to_state(self.state)
if next_state is not None:
next_state_key = str(next_state)
neighbors = self.graph.neighbors(str(self.state))
if next_state_key in neighbors:
self.state = self.get_state_from_str(
*[float(x) for x in next_state_key.split("|")]
)
self.last_action_success = True
event = self._successful_event()
if self.debug_mode and self.visualize:
if self.controller.get_state_from_event(
viz_event
) != self.controller.get_state_from_event(event):
print(action)
print(str(self.controller.get_state_from_event(viz_event)))
print(str(self.controller.get_state_from_event(event)))
assert self.controller.get_state_from_event(
viz_event
) == self.controller.get_state_from_event(event)
assert viz_event.metadata["lastActionSuccess"]
# Uncomment if you want to view the frames side by side to
# ensure that they are duplicated.
# from matplotlib import pyplot as plt
# fig = plt.figure()
# fig.add_subplot(2,1,1)
# plt.imshow(self.get_image())
# fig.add_subplot(2,1,2)
# plt.imshow(viz_event.frame)
# plt.show()
self.last_event = event
return event
self.last_action_success = False
self.last_event.metadata["lastActionSuccess"] = False
return self.last_event
def shortest_path(self, source_state, target_state):
return self.nx.shortest_path(self.graph, str(source_state), str(target_state))
def optimal_plan(self, source_state, path):
""" This is for debugging. It modifies the state. """
self.state = source_state
actions = []
i = 1
while i < len(path):
for a in self.actions:
next_state = self.controller.get_next_state(self.state, a, True)
if str(next_state) == path[i]:
actions.append(a)
i += 1
self.state = next_state
break
return actions
def shortest_path_to_target(self, source_state, objId, get_plan=False):
""" Many ways to reach objId, which one is best? """
states_where_visible = []
if self.using_raw_metadata:
for s in self.metadata:
objects = self.metadata[s]["objects"]
visible_objects = [o["objectId"] for o in objects if o["visible"]]
if objId in visible_objects:
states_where_visible.append(s)
else:
states_where_visible = self.metadata[objId]
# transform from strings into states
states_where_visible = [
self.get_state_from_str(*[float(x) for x in str_.split("|")])
for str_ in states_where_visible
]
best_path = None
best_path_len = 0
for t in states_where_visible:
path = self.shortest_path(source_state, t)
if len(path) < best_path_len or best_path is None:
best_path = path
best_path_len = len(path)
best_plan = []
if get_plan:
best_plan = self.optimal_plan(source_state, best_path)
return best_path, best_path_len, best_plan
def visualize_plan(self, source, plan):
""" Visualize the best path from source to plan. """
assert self.visualize
self.controller.teleport_to_state(source)
time.sleep(0.5)
for a in plan:
print(a)
self.controller.step(dict(action=a))
time.sleep(0.5)
def object_is_visible(self, objId):
if self.using_raw_metadata:
objects = self.metadata[str(self.state)]["objects"]
visible_objects = [o["objectId"] for o in objects if o["visible"]]
return objId in visible_objects
else:
return str(self.state) in self.metadata[objId]
def _successful_event(self):
return OfflineControllerWithSmallRotationEvent(
self.last_action_success, self.scene_name, self.state, self.get_image()
)
def get_image(self):
return self.images[str(self.state)][:]
def all_objects(self):
if self.using_raw_metadata:
return [o["objectId"] for o in self.metadata[str(self.state)]["objects"]]
else:
return self.metadata.keys()
|
en
| 0.83211
|
Exhaustive BFS and Offline Controller. Representation of a simple state of a Thor Agent which includes the position, horizon and rotation. Extracts a state from an event. If we check for exact equality then we get issues. For now we consider this 'close enough'. # self.y == other.y and Get the string representation of a state. return '{:0.2f}|{:0.2f}|{:0.2f}|{:d}|{:d}'.format( self.x, self.y, self.z, round(self.rotation), round(self.horizon) ) Returns just the position. A much slower and more exhaustive version of the BFSController. This may be helpful if you wish to find the shortest path to an object. The usual BFSController does not consider things like rotate or look down when you are navigating towards an object. Additionally, there is some rare occurances of positions which you can only get to in a certain way. This ExhaustiveBFSController introduces the safe_teleport method which ensures that all states will be covered. Strongly recomend having a seperate directory for each scene. See OfflineControllerWithSmallRotation for more information on how the generated data may be used. # Allowed rotations. # Allowed horizons. # distance_threshold to be consistent with BFSController in generating grid. # Optionally make a gird (including x,y,z points that are reachable) # Optionally store the metadata of each state. # Optionally make a directed of (s,t) where exists a in self.actions # such that t is reachable via s via a. # Optionally store an hdf5 file which contains the frame for each state. Approach a state from all possible directions if the usual teleport fails. # Approach from the left. # Approach from the right. # Approach from the back. # Approach from the front. Only use this method when we know the state is valid. # Sanity check that we have teleported to the correct state. Guess the next state when action is taken. Note that this will not predict the correct y value. Returns true if state is valid. # ensure there are no dup states. # ensure state is a legal rotation and horizon. # Take all action in self.action and enqueue if they are valid. # # Bug. # if ( # self.scene_name == "FloorPlan208_physics" # and next_state_guess.x == 0 # and next_state_guess.z == 1.75 # ): # self.teleport_to_state(agent_state) # continue # Grid assumption is meant to make things faster and should not # be used in practice. In general it does not work when the y # values fluctuate in a scene. It circumvents using the actual controller. # Return back to agents initial location. Runs the ExhaustiveBFSController on scene_name. # if search_state.y > 1.3: # raise Exception("**** got big point ") A stripped down version of an event. Only contains lastActionSuccess, sceneName, and optionally state and frame. Does not contain the rest of the metadata. A stripped down version of the controller for non-interactive settings. Only allows for a few given actions. Note that you must use the ExhaustiveBFSController to first generate the data used by OfflineControllerWithSmallRotation. Data is stored in offline_data_dir/<scene_name>/. Can swap the metadata.json for a visible_object_map.json. A script for generating this is coming soon. If the swap is made then the OfflineControllerWithSmallRotation is faster and self.using_raw_metadata will be set to false. Additionally, images.hdf5 may be swapped out with ResNet features or anything that you want to be returned for event.frame. # metadata_file_name='metadata.json', # Allowed rotations. # Allowed horizons. # print('yoyoyoyoyoyoyoyoyoyoyoyoyo') # Determine if using the raw metadata, which is structured as a dictionary of # state -> metatdata. The alternative is a map of obj -> states where object is visible. # print('qqqqqqqqqqqqqqqqqqqqqqqqqq') # print('ppppppppppppppp') # return back to original state. # Uncomment if you want to view the frames side by side to # ensure that they are duplicated. # from matplotlib import pyplot as plt # fig = plt.figure() # fig.add_subplot(2,1,1) # plt.imshow(self.get_image()) # fig.add_subplot(2,1,2) # plt.imshow(viz_event.frame) # plt.show() This is for debugging. It modifies the state. Many ways to reach objId, which one is best? # transform from strings into states Visualize the best path from source to plan.
| 2.912344
| 3
|
tests/test.py
|
xumajie/face_recognition
| 0
|
6626819
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
'''
@Time : 2019/6/5 13:11
@Author : xumj
'''
import face_recognition
import os
import sys
import numpy as np
from click.testing import CliRunner
from face_recognition import api
from face_recognition import face_recognition_cli
from face_recognition import face_detection_cli
import dlib
# image = face_recognition.load_image_file("test_images/obama.jpg")
# face_locations = face_recognition.face_locations(image)
# face_landmarks_list = face_recognition.face_landmarks(image)
# known_image = face_recognition.load_image_file("test_images/400_0.jpg")
known_image = face_recognition.load_image_file("F:/faces/all_train/209/209_0.bmp")
# unknown_image = face_recognition.load_image_file("test_images/401_0.jpg")
unknown_image = face_recognition.load_image_file("F:/faces/all_train/209/209_1.bmp")
#
biden_encoding = face_recognition.face_encodings(known_image,face_recognition.face_locations(known_image,1,"cnn"))[0]
# biden_encoding = face_recognition.face_encodings(known_image)[0]
unknown_encoding = face_recognition.face_encodings(unknown_image,face_recognition.face_locations(unknown_image,1,"cnn"))[0]
# unknown_encoding = face_recognition.face_encodings(unknown_image)[0]
#
results = face_recognition.compare_faces([biden_encoding], unknown_encoding)
print(results)
print(sys.getfilesystemencoding())
print("")
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
'''
@Time : 2019/6/5 13:11
@Author : xumj
'''
import face_recognition
import os
import sys
import numpy as np
from click.testing import CliRunner
from face_recognition import api
from face_recognition import face_recognition_cli
from face_recognition import face_detection_cli
import dlib
# image = face_recognition.load_image_file("test_images/obama.jpg")
# face_locations = face_recognition.face_locations(image)
# face_landmarks_list = face_recognition.face_landmarks(image)
# known_image = face_recognition.load_image_file("test_images/400_0.jpg")
known_image = face_recognition.load_image_file("F:/faces/all_train/209/209_0.bmp")
# unknown_image = face_recognition.load_image_file("test_images/401_0.jpg")
unknown_image = face_recognition.load_image_file("F:/faces/all_train/209/209_1.bmp")
#
biden_encoding = face_recognition.face_encodings(known_image,face_recognition.face_locations(known_image,1,"cnn"))[0]
# biden_encoding = face_recognition.face_encodings(known_image)[0]
unknown_encoding = face_recognition.face_encodings(unknown_image,face_recognition.face_locations(unknown_image,1,"cnn"))[0]
# unknown_encoding = face_recognition.face_encodings(unknown_image)[0]
#
results = face_recognition.compare_faces([biden_encoding], unknown_encoding)
print(results)
print(sys.getfilesystemencoding())
print("")
|
en
| 0.626251
|
#!/usr/bin/env python # -*- coding: utf-8 -*- @Time : 2019/6/5 13:11 @Author : xumj # image = face_recognition.load_image_file("test_images/obama.jpg") # face_locations = face_recognition.face_locations(image) # face_landmarks_list = face_recognition.face_landmarks(image) # known_image = face_recognition.load_image_file("test_images/400_0.jpg") # unknown_image = face_recognition.load_image_file("test_images/401_0.jpg") # # biden_encoding = face_recognition.face_encodings(known_image)[0] # unknown_encoding = face_recognition.face_encodings(unknown_image)[0] #
| 2.526424
| 3
|
android/image/urls.py
|
gwhong917/A_C_C
| 0
|
6626820
|
<reponame>gwhong917/A_C_C
from django.urls import path
from . import views
app_name = 'image'
urlpatterns = [
path('image/', views.PostCreateAPIView.as_view(), name='postcreate'),
]
|
from django.urls import path
from . import views
app_name = 'image'
urlpatterns = [
path('image/', views.PostCreateAPIView.as_view(), name='postcreate'),
]
|
none
| 1
| 1.663608
| 2
|
|
musor/forms.py
|
MerlinEmris/eBazar
| 0
|
6626821
|
class ProfileForm(forms.ModelForm):
biography = forms.CharField(
max_length=2000,
widget=forms.Textarea(),
help_text='Write here your message!'
)
location = forms.CharField(
max_length=30,
widget=forms.TextInput(
attrs={
'style': 'border-color: blue;',
'placeholder': 'Write your location here'
}
),
help_text='your location please!'
)
birth_date = forms.DateField(
widget=forms.widgets.DateInput(format="%d/%m/%Y"),
help_text='input date by day month year !'
)
# user = forms.ChoiceField(
# choices=[(o.id, str(o)) for o in User.objects.all()],
# # widget=forms.HiddenInput(),
# )
# def clean(self):
# super(ProfileForm, self).full_clean()
# cleaned_data = super(ProfileForm, self).clean()
# biography = cleaned_data.get('biography')
# location = cleaned_data.get('location')
# birth_date = cleaned_data.get('birth_date')
# if not biography and not location and not birth_date:
# raise forms.ValidationError('You have to write something!')
class Meta:
model = Profile
# fields = ('biography', 'location', 'birth_date', )
exclude = ()
|
class ProfileForm(forms.ModelForm):
biography = forms.CharField(
max_length=2000,
widget=forms.Textarea(),
help_text='Write here your message!'
)
location = forms.CharField(
max_length=30,
widget=forms.TextInput(
attrs={
'style': 'border-color: blue;',
'placeholder': 'Write your location here'
}
),
help_text='your location please!'
)
birth_date = forms.DateField(
widget=forms.widgets.DateInput(format="%d/%m/%Y"),
help_text='input date by day month year !'
)
# user = forms.ChoiceField(
# choices=[(o.id, str(o)) for o in User.objects.all()],
# # widget=forms.HiddenInput(),
# )
# def clean(self):
# super(ProfileForm, self).full_clean()
# cleaned_data = super(ProfileForm, self).clean()
# biography = cleaned_data.get('biography')
# location = cleaned_data.get('location')
# birth_date = cleaned_data.get('birth_date')
# if not biography and not location and not birth_date:
# raise forms.ValidationError('You have to write something!')
class Meta:
model = Profile
# fields = ('biography', 'location', 'birth_date', )
exclude = ()
|
en
| 0.533145
|
# user = forms.ChoiceField( # choices=[(o.id, str(o)) for o in User.objects.all()], # # widget=forms.HiddenInput(), # ) # def clean(self): # super(ProfileForm, self).full_clean() # cleaned_data = super(ProfileForm, self).clean() # biography = cleaned_data.get('biography') # location = cleaned_data.get('location') # birth_date = cleaned_data.get('birth_date') # if not biography and not location and not birth_date: # raise forms.ValidationError('You have to write something!') # fields = ('biography', 'location', 'birth_date', )
| 2.547554
| 3
|
odo/backends/tests/test_bcolz.py
|
farukht/odo
| 844
|
6626822
|
from __future__ import absolute_import, division, print_function
import pytest
pytest.importorskip('bcolz')
from odo.backends.bcolz import (append, convert, ctable, carray, resource,
discover, drop)
from odo.chunks import chunks
from odo import append, convert, discover, into
import numpy as np
from odo.utils import tmpfile, ignoring, filetext
from contextlib import contextmanager
import shutil
import os
import uuid
@contextmanager
def tmpbcolz(*args, **kwargs):
fn = '.%s.bcolz' % str(uuid.uuid1())
r = resource(fn, *args, **kwargs)
try:
yield r
finally:
with ignoring(Exception):
r.flush()
if os.path.exists(fn):
shutil.rmtree(fn)
def eq(a, b):
c = a == b
if isinstance(c, np.ndarray):
c = c.all()
return c
a = carray([1, 2, 3, 4])
x = np.array([1, 2])
def test_discover():
assert discover(a) == discover(a[:])
def test_convert():
assert isinstance(convert(carray, np.ones([1, 2, 3])), carray)
b = carray([1, 2, 3])
assert isinstance(convert(np.ndarray, b), np.ndarray)
def test_chunks():
c = convert(chunks(np.ndarray), a, chunksize=2)
assert isinstance(c, chunks(np.ndarray))
assert len(list(c)) == 2
assert eq(list(c)[1], [3, 4])
assert eq(convert(np.ndarray, c), a[:])
def test_append_chunks():
b = carray(x)
append(b, chunks(np.ndarray)([x, x]))
assert len(b) == len(x) * 3
def test_append_other():
b = carray(x)
append(b, convert(list, x))
assert len(b) == 2 * len(x)
def test_resource_ctable():
with tmpbcolz(dshape='var * {name: string[5, "ascii"], balance: int32}') as r:
assert isinstance(r, ctable)
assert r.dtype == [('name', 'S5'), ('balance', 'i4')]
def get_expectedlen(x):
reader = getattr(x, '_read_meta', getattr(x, 'read_meta', None))
assert reader is not None
shape, cparams, dtype, _, expectedlen, _, chunklen = reader()
return expectedlen
def test_resource_ctable_overrides_expectedlen():
with tmpbcolz(dshape='100 * {name: string[5, "ascii"], balance: int32}',
expectedlen=200) as r:
assert isinstance(r, ctable)
assert r.dtype == [('name', 'S5'), ('balance', 'i4')]
assert all(get_expectedlen(r[c]) == 200 for c in r.names)
def test_resource_ctable_correctly_infers_length():
with tmpbcolz(dshape='100 * {name: string[5, "ascii"], balance: int32}') as r:
assert isinstance(r, ctable)
assert r.dtype == [('name', 'S5'), ('balance', 'i4')]
assert all(get_expectedlen(r[c]) == 100 for c in r.names)
def test_resource_carray():
with tmpbcolz(dshape='var * int32') as r:
assert isinstance(r, carray)
assert r.dtype == 'i4'
assert r.shape == (0,)
def test_resource_existing_carray():
with tmpbcolz(dshape='var * int32') as r:
append(r, [1, 2, 3])
r.flush()
newr = resource(r.rootdir)
assert isinstance(newr, carray)
def test_resource_carray_overrides_expectedlen():
with tmpbcolz(dshape='100 * int32', expectedlen=200) as r:
assert isinstance(r, carray)
assert r.dtype == 'i4'
assert r.shape == (100,)
assert get_expectedlen(r) == 200
def test_resource_ctable_correctly_infers_length():
with tmpbcolz(dshape='100 * int32') as r:
assert isinstance(r, carray)
assert r.dtype == 'i4'
assert get_expectedlen(r) == 100
def test_into_respects_expected_len_during_append():
with tmpfile('.bcolz') as fn:
b = into(fn, [1, 2, 3])
assert get_expectedlen(b) == 3
assert len(b) == 3
shutil.rmtree(fn)
def test_resource_nd_carray():
with tmpbcolz(dshape='10 * 10 * 10 * int32') as r:
assert isinstance(r, carray)
assert r.dtype == 'i4'
assert r.shape == (10, 10, 10)
y = np.array([('Alice', 100), ('Bob', 200)],
dtype=[('name', 'S7'), ('amount', 'i4')])
def test_convert_numpy_to_ctable():
b = convert(ctable, y)
assert isinstance(b, ctable)
assert eq(b[:], y)
def test_resource_existing_ctable():
with tmpfile('.bcolz') as fn:
r = into(fn, y)
r.flush()
r2 = resource(fn)
assert eq(r2[:], y)
shutil.rmtree(fn)
def test_drop():
with tmpbcolz(dshape='var * {name: string[5, "ascii"], balance: int32}') as b:
assert os.path.exists(b.rootdir)
drop(b)
assert not os.path.exists(b.rootdir)
def test_resource_shape():
with tmpbcolz(dshape='10 * int') as b:
assert b.shape == (10,)
with tmpbcolz(dshape='10 * 10 * int') as b:
assert b.shape == (10, 10)
with tmpbcolz(dshape='var * 10 * int') as b:
assert b.shape == (0, 10)
def test_csv_to_bcolz():
with filetext('name,runway,takeoff,datetime_nearest_close\n'
'S28,28,TRUE,A\n'
'S16,16,TRUE,Q\n'
'L14,14,FALSE,I', extension='csv') as src:
with tmpfile('bcolz') as tgt:
bc = into(tgt, src)
assert len(bc) == 3
|
from __future__ import absolute_import, division, print_function
import pytest
pytest.importorskip('bcolz')
from odo.backends.bcolz import (append, convert, ctable, carray, resource,
discover, drop)
from odo.chunks import chunks
from odo import append, convert, discover, into
import numpy as np
from odo.utils import tmpfile, ignoring, filetext
from contextlib import contextmanager
import shutil
import os
import uuid
@contextmanager
def tmpbcolz(*args, **kwargs):
fn = '.%s.bcolz' % str(uuid.uuid1())
r = resource(fn, *args, **kwargs)
try:
yield r
finally:
with ignoring(Exception):
r.flush()
if os.path.exists(fn):
shutil.rmtree(fn)
def eq(a, b):
c = a == b
if isinstance(c, np.ndarray):
c = c.all()
return c
a = carray([1, 2, 3, 4])
x = np.array([1, 2])
def test_discover():
assert discover(a) == discover(a[:])
def test_convert():
assert isinstance(convert(carray, np.ones([1, 2, 3])), carray)
b = carray([1, 2, 3])
assert isinstance(convert(np.ndarray, b), np.ndarray)
def test_chunks():
c = convert(chunks(np.ndarray), a, chunksize=2)
assert isinstance(c, chunks(np.ndarray))
assert len(list(c)) == 2
assert eq(list(c)[1], [3, 4])
assert eq(convert(np.ndarray, c), a[:])
def test_append_chunks():
b = carray(x)
append(b, chunks(np.ndarray)([x, x]))
assert len(b) == len(x) * 3
def test_append_other():
b = carray(x)
append(b, convert(list, x))
assert len(b) == 2 * len(x)
def test_resource_ctable():
with tmpbcolz(dshape='var * {name: string[5, "ascii"], balance: int32}') as r:
assert isinstance(r, ctable)
assert r.dtype == [('name', 'S5'), ('balance', 'i4')]
def get_expectedlen(x):
reader = getattr(x, '_read_meta', getattr(x, 'read_meta', None))
assert reader is not None
shape, cparams, dtype, _, expectedlen, _, chunklen = reader()
return expectedlen
def test_resource_ctable_overrides_expectedlen():
with tmpbcolz(dshape='100 * {name: string[5, "ascii"], balance: int32}',
expectedlen=200) as r:
assert isinstance(r, ctable)
assert r.dtype == [('name', 'S5'), ('balance', 'i4')]
assert all(get_expectedlen(r[c]) == 200 for c in r.names)
def test_resource_ctable_correctly_infers_length():
with tmpbcolz(dshape='100 * {name: string[5, "ascii"], balance: int32}') as r:
assert isinstance(r, ctable)
assert r.dtype == [('name', 'S5'), ('balance', 'i4')]
assert all(get_expectedlen(r[c]) == 100 for c in r.names)
def test_resource_carray():
with tmpbcolz(dshape='var * int32') as r:
assert isinstance(r, carray)
assert r.dtype == 'i4'
assert r.shape == (0,)
def test_resource_existing_carray():
with tmpbcolz(dshape='var * int32') as r:
append(r, [1, 2, 3])
r.flush()
newr = resource(r.rootdir)
assert isinstance(newr, carray)
def test_resource_carray_overrides_expectedlen():
with tmpbcolz(dshape='100 * int32', expectedlen=200) as r:
assert isinstance(r, carray)
assert r.dtype == 'i4'
assert r.shape == (100,)
assert get_expectedlen(r) == 200
def test_resource_ctable_correctly_infers_length():
with tmpbcolz(dshape='100 * int32') as r:
assert isinstance(r, carray)
assert r.dtype == 'i4'
assert get_expectedlen(r) == 100
def test_into_respects_expected_len_during_append():
with tmpfile('.bcolz') as fn:
b = into(fn, [1, 2, 3])
assert get_expectedlen(b) == 3
assert len(b) == 3
shutil.rmtree(fn)
def test_resource_nd_carray():
with tmpbcolz(dshape='10 * 10 * 10 * int32') as r:
assert isinstance(r, carray)
assert r.dtype == 'i4'
assert r.shape == (10, 10, 10)
y = np.array([('Alice', 100), ('Bob', 200)],
dtype=[('name', 'S7'), ('amount', 'i4')])
def test_convert_numpy_to_ctable():
b = convert(ctable, y)
assert isinstance(b, ctable)
assert eq(b[:], y)
def test_resource_existing_ctable():
with tmpfile('.bcolz') as fn:
r = into(fn, y)
r.flush()
r2 = resource(fn)
assert eq(r2[:], y)
shutil.rmtree(fn)
def test_drop():
with tmpbcolz(dshape='var * {name: string[5, "ascii"], balance: int32}') as b:
assert os.path.exists(b.rootdir)
drop(b)
assert not os.path.exists(b.rootdir)
def test_resource_shape():
with tmpbcolz(dshape='10 * int') as b:
assert b.shape == (10,)
with tmpbcolz(dshape='10 * 10 * int') as b:
assert b.shape == (10, 10)
with tmpbcolz(dshape='var * 10 * int') as b:
assert b.shape == (0, 10)
def test_csv_to_bcolz():
with filetext('name,runway,takeoff,datetime_nearest_close\n'
'S28,28,TRUE,A\n'
'S16,16,TRUE,Q\n'
'L14,14,FALSE,I', extension='csv') as src:
with tmpfile('bcolz') as tgt:
bc = into(tgt, src)
assert len(bc) == 3
|
none
| 1
| 2.05971
| 2
|
|
docs/core/examples/stdiodemo.py
|
neberej/twisted
| 1
|
6626823
|
#!/usr/bin/env python
# Copyright (c) Twisted Matrix Laboratories.
# See LICENSE for details.
"""
Example using stdio, Deferreds, LineReceiver and twisted.web.client.
Note that the WebCheckerCommandProtocol protocol could easily be used in e.g.
a telnet server instead; see the comments for details.
Based on an example by <NAME>.
"""
from twisted.internet import stdio, reactor
from twisted.protocols import basic
from twisted.web import client
class WebCheckerCommandProtocol(basic.LineReceiver):
delimiter = '\n' # unix terminal style newlines. remove this line
# for use with Telnet
def connectionMade(self):
self.sendLine("Web checker console. Type 'help' for help.")
def lineReceived(self, line):
# Ignore blank lines
if not line: return
# Parse the command
commandParts = line.split()
command = commandParts[0].lower()
args = commandParts[1:]
# Dispatch the command to the appropriate method. Note that all you
# need to do to implement a new command is add another do_* method.
try:
method = getattr(self, 'do_' + command)
except AttributeError as e:
self.sendLine('Error: no such command.')
else:
try:
method(*args)
except Exception as e:
self.sendLine('Error: ' + str(e))
def do_help(self, command=None):
"""help [command]: List commands, or show help on the given command"""
if command:
self.sendLine(getattr(self, 'do_' + command).__doc__)
else:
commands = [cmd[3:] for cmd in dir(self) if cmd.startswith('do_')]
self.sendLine("Valid commands: " +" ".join(commands))
def do_quit(self):
"""quit: Quit this session"""
self.sendLine('Goodbye.')
self.transport.loseConnection()
def do_check(self, url):
"""check <url>: Attempt to download the given web page"""
client.Agent(reactor).request('GET', url).addCallback(
client.readBody).addCallback(
self.__checkSuccess).addErrback(
self.__checkFailure)
def __checkSuccess(self, pageData):
self.sendLine("Success: got %i bytes." % len(pageData))
def __checkFailure(self, failure):
self.sendLine("Failure: " + failure.getErrorMessage())
def connectionLost(self, reason):
# stop the reactor, only because this is meant to be run in Stdio.
reactor.stop()
if __name__ == "__main__":
stdio.StandardIO(WebCheckerCommandProtocol())
reactor.run()
|
#!/usr/bin/env python
# Copyright (c) Twisted Matrix Laboratories.
# See LICENSE for details.
"""
Example using stdio, Deferreds, LineReceiver and twisted.web.client.
Note that the WebCheckerCommandProtocol protocol could easily be used in e.g.
a telnet server instead; see the comments for details.
Based on an example by <NAME>.
"""
from twisted.internet import stdio, reactor
from twisted.protocols import basic
from twisted.web import client
class WebCheckerCommandProtocol(basic.LineReceiver):
delimiter = '\n' # unix terminal style newlines. remove this line
# for use with Telnet
def connectionMade(self):
self.sendLine("Web checker console. Type 'help' for help.")
def lineReceived(self, line):
# Ignore blank lines
if not line: return
# Parse the command
commandParts = line.split()
command = commandParts[0].lower()
args = commandParts[1:]
# Dispatch the command to the appropriate method. Note that all you
# need to do to implement a new command is add another do_* method.
try:
method = getattr(self, 'do_' + command)
except AttributeError as e:
self.sendLine('Error: no such command.')
else:
try:
method(*args)
except Exception as e:
self.sendLine('Error: ' + str(e))
def do_help(self, command=None):
"""help [command]: List commands, or show help on the given command"""
if command:
self.sendLine(getattr(self, 'do_' + command).__doc__)
else:
commands = [cmd[3:] for cmd in dir(self) if cmd.startswith('do_')]
self.sendLine("Valid commands: " +" ".join(commands))
def do_quit(self):
"""quit: Quit this session"""
self.sendLine('Goodbye.')
self.transport.loseConnection()
def do_check(self, url):
"""check <url>: Attempt to download the given web page"""
client.Agent(reactor).request('GET', url).addCallback(
client.readBody).addCallback(
self.__checkSuccess).addErrback(
self.__checkFailure)
def __checkSuccess(self, pageData):
self.sendLine("Success: got %i bytes." % len(pageData))
def __checkFailure(self, failure):
self.sendLine("Failure: " + failure.getErrorMessage())
def connectionLost(self, reason):
# stop the reactor, only because this is meant to be run in Stdio.
reactor.stop()
if __name__ == "__main__":
stdio.StandardIO(WebCheckerCommandProtocol())
reactor.run()
|
en
| 0.781507
|
#!/usr/bin/env python # Copyright (c) Twisted Matrix Laboratories. # See LICENSE for details. Example using stdio, Deferreds, LineReceiver and twisted.web.client. Note that the WebCheckerCommandProtocol protocol could easily be used in e.g. a telnet server instead; see the comments for details. Based on an example by <NAME>. # unix terminal style newlines. remove this line # for use with Telnet # Ignore blank lines # Parse the command # Dispatch the command to the appropriate method. Note that all you # need to do to implement a new command is add another do_* method. help [command]: List commands, or show help on the given command quit: Quit this session check <url>: Attempt to download the given web page # stop the reactor, only because this is meant to be run in Stdio.
| 2.70718
| 3
|
crslab/model/recommendation/popularity/popularity.py
|
Xiaolong-Qi/CRSLab
| 1
|
6626824
|
<gh_stars>1-10
# @Time : 2020/12/16
# @Author : <NAME>
# @Email : <EMAIL>
# UPDATE
# @Time : 2020/12/29, 2021/1/4
# @Author : <NAME>, <NAME>
# @email : <EMAIL>, <EMAIL>
from collections import defaultdict
import torch
from loguru import logger
from crslab.model.base_model import BaseModel
class PopularityModel(BaseModel):
"""
Attributes:
item_size: A integer indicating the number of items
"""
def __init__(self, opt, device, vocab, side_data):
"""
Args:
opt (dict): A dictionary record the hyper parameters
device (torch.device): A variable indicating which device to place the data and model
vocab (dict): A dictionary record the vocabulary information
side_data (dict): A dictionary record the side data
"""
self.item_size = vocab['n_entity']
super(PopularityModel, self).__init__(opt, device)
def build_model(self):
self.item_frequency = defaultdict(int)
logger.debug('[Finish build rec layer]')
def recommend(self, batch, mode):
context, mask, input_ids, target_pos, input_mask, sample_negs, y = batch
if mode == 'train':
for ids in input_ids:
for id in ids:
self.item_frequency[id.item()] += 1
bs = input_ids.shape[0]
rec_score = [self.item_frequency.get(item_id, 0) for item_id in range(self.item_size)]
rec_scores = torch.tensor([rec_score] * bs, dtype=torch.long)
loss = torch.zeros(1, requires_grad=True)
return loss, rec_scores
|
# @Time : 2020/12/16
# @Author : <NAME>
# @Email : <EMAIL>
# UPDATE
# @Time : 2020/12/29, 2021/1/4
# @Author : <NAME>, <NAME>
# @email : <EMAIL>, <EMAIL>
from collections import defaultdict
import torch
from loguru import logger
from crslab.model.base_model import BaseModel
class PopularityModel(BaseModel):
"""
Attributes:
item_size: A integer indicating the number of items
"""
def __init__(self, opt, device, vocab, side_data):
"""
Args:
opt (dict): A dictionary record the hyper parameters
device (torch.device): A variable indicating which device to place the data and model
vocab (dict): A dictionary record the vocabulary information
side_data (dict): A dictionary record the side data
"""
self.item_size = vocab['n_entity']
super(PopularityModel, self).__init__(opt, device)
def build_model(self):
self.item_frequency = defaultdict(int)
logger.debug('[Finish build rec layer]')
def recommend(self, batch, mode):
context, mask, input_ids, target_pos, input_mask, sample_negs, y = batch
if mode == 'train':
for ids in input_ids:
for id in ids:
self.item_frequency[id.item()] += 1
bs = input_ids.shape[0]
rec_score = [self.item_frequency.get(item_id, 0) for item_id in range(self.item_size)]
rec_scores = torch.tensor([rec_score] * bs, dtype=torch.long)
loss = torch.zeros(1, requires_grad=True)
return loss, rec_scores
|
en
| 0.40266
|
# @Time : 2020/12/16 # @Author : <NAME> # @Email : <EMAIL> # UPDATE # @Time : 2020/12/29, 2021/1/4 # @Author : <NAME>, <NAME> # @email : <EMAIL>, <EMAIL> Attributes: item_size: A integer indicating the number of items Args: opt (dict): A dictionary record the hyper parameters device (torch.device): A variable indicating which device to place the data and model vocab (dict): A dictionary record the vocabulary information side_data (dict): A dictionary record the side data
| 2.329537
| 2
|
Mybase/solver.py
|
czyczyyzc/MyForElise
| 0
|
6626825
|
import os
import cv2
import pickle
import numpy as np
import tensorflow as tf
import matplotlib.pyplot as plt
from collections import defaultdict
from .load_weights import *
from .optim_utils import *
from .yale_utils.make_image import *
def get_data(fid):
try:
a = pickle.load(fid)
return 1, a
except EOFError:
return 0, 0
def get_all_data(fid):
data = []
while(True):
sig, dat = get_data(fid)
if(sig == 0): break
else:
data.append(dat)
return data
class Solver(object):
def __init__(self, mdl, **kwargs):
self.mdl = mdl
self.opm_cfg = kwargs.pop('opm_cfg', {})
self.gpu_lst = kwargs.pop('gpu_lst', '0')
self.gpu_num = len(self.gpu_lst.split(','))
self.mdl_dev = '/cpu:%d' if self.gpu_num == 0 else '/gpu:%d'
self.MDL_DEV = 'CPU_%d' if self.gpu_num == 0 else 'GPU_%d'
self.gpu_num = 1 if self.gpu_num == 0 else self.gpu_num
self.bat_siz = kwargs.pop('bat_siz', 2)
self.bat_siz_all = self.bat_siz * self.gpu_num
self.tra_num = kwargs.pop('tra_num', 8000)
self.val_num = kwargs.pop('val_num', 80)
self.epc_num = kwargs.pop('epc_num', 10)
self.min_que_tra = kwargs.pop('min_que_tra', 5000)
self.min_que_val = kwargs.pop('min_que_val', 1000)
self.prt_ena = kwargs.pop('prt_ena', True)
self.itr_per_prt = kwargs.pop('itr_per_prt', 20)
self.tst_num = kwargs.pop('tst_num', None)
self.tst_shw = kwargs.pop('tst_shw', True)
self.tst_sav = kwargs.pop('tst_sav', True)
self.mdl_nam = kwargs.pop('mdl_nam', 'model.ckpt' )
self.mdl_dir = kwargs.pop('mdl_dir', 'Mybase/Model' )
self.log_dir = kwargs.pop('log_dir', 'Mybase/logdata' )
self.dat_dir = kwargs.pop('dat_dir', 'Mybase/datasets')
self.mov_ave_dca = kwargs.pop('mov_ave_dca', 0.99)
self.dat_dir_tra = self.dat_dir + '/train'
self.dat_dir_val = self.dat_dir + '/val'
self.dat_dir_tst = self.dat_dir + '/test'
self.dat_dir_rst = self.dat_dir + '/result'
self.log_dir_tra = self.log_dir + '/train'
self.log_dir_val = self.log_dir + '/val'
self.log_dir_tst = self.log_dir + '/test'
os.environ['CUDA_DEVICE_ORDER'] = 'PCI_BUS_ID'
os.environ['CUDA_VISIBLE_DEVICES'] = self.gpu_lst
if len(kwargs) > 0:
extra = ', '.join('%s' % k for k in kwargs.keys())
raise ValueError('Unrecognized arguments %s' % extra)
"""
###############################For CLASSIFY################################
def _train_step(self, mtra=None, mtst=None, glb_stp=None):
#将简单的运算放在CPU上,只有神经网络的训练过程放在GPU上
with tf.device('/cpu:0'):
self.mdl.mod_tra = True
GI_tra = GeneratorForImageNet(True, self.dat_dir_tra, self.bat_siz, self.epc_num, \
self.min_que_tra, self.gpu_lst, 32)
GI_val = GeneratorForImageNet(False, self.dat_dir_val, self.bat_siz, self.epc_num, \
self.min_que_val, self.gpu_lst, 1)
imgs_lst_tra, lbls_lst_tra = GI_tra.get_input()
imgs_lst_val, lbls_lst_val = GI_val.get_input()
#with tf.name_scope('input_image'):
# tf.summary.image('input', X, 10)
self.opm_cfg['decay_step'] = self.opm_cfg['decay_step'] * self.tra_num
tra_stp, lrn_rat = update_rule(self.opm_cfg, glb_stp)
grds_lst = []
loss_lst = []
accs_lst = []
for i in range(self.gpu_num):
with tf.device(self.mdl_dev % i):
with tf.name_scope(self.MDL_DEV % i) as scp:
imgs_tra = GI_tra.preprocessing1(imgs_lst_tra[i])
imgs_val = GI_val.preprocessing1(imgs_lst_val[i])
lbls_tra = lbls_lst_tra[i]
lbls_val = lbls_lst_val[i]
imgs = tf.cond(mtst, lambda: imgs_val, lambda: imgs_tra, strict=True)
lbls = tf.cond(mtst, lambda: lbls_val, lambda: lbls_tra, strict=True)
loss, accs = \
self.mdl.forward(imgs, lbls, mtra=mtra, scp=scp)
#在第一次声明变量之后,将控制变量重用的参数设置为True。这样可以让不同的GPU更新同一组参数
#注意tf.name_scope函数并不会影响tf.get_variable的命名空间
tf.get_variable_scope().reuse_variables()
#使用当前GPU计算所有变量的梯度
grds = tra_stp.compute_gradients(loss[0])
#print(grds)
grds_lst.append(grds)
loss_lst.append(loss)
accs_lst.append(accs)
'''
with tf.variable_scope('average', reuse = tf.AUTO_REUSE):
mov_ave = tf.train.ExponentialMovingAverage(self.mov_ave_dca, glb_stp)
mov_ave_op = mov_ave.apply(tf.trainable_variables())
tf.add_to_collection(tf.GraphKeys.UPDATE_OPS, mov_ave_op)
'''
with tf.variable_scope('optimize', reuse = tf.AUTO_REUSE):
grds = average_gradients(grds_lst)
upd_opas = tf.get_collection(tf.GraphKeys.UPDATE_OPS)
with tf.control_dependencies(upd_opas):
tra_opa = tra_stp.apply_gradients(grds, global_step=glb_stp)
loss = tf.stack(loss_lst, axis=0)
accs = tf.stack(accs_lst, axis=0)
#tf.summary.scalar('loss', loss)
#tf.summary.scalar('acc', acc)
#for grad, var in grads:
# if grad is not None:
# tf.summary.histogram('gradients_on_average/%s' % var.op.name, grad)
#for var in tf.trainable_variables():
# tf.summary.histogram(var.op.name, var)
return tra_opa, lrn_rat, loss, accs
"""
###############################For Segmentation################################
def _train_step(self, mtra=None, mtst=None, glb_stp=None):
#将简单的运算放在CPU上,只有神经网络的训练过程放在GPU上
with tf.device('/cpu:0'):
self.mdl.mod_tra = True
GV_tra = GeneratorForVOC(True, self.dat_dir_tra, self.bat_siz, self.epc_num, self.min_que_tra, self.gpu_lst, 2)
GV_val = GeneratorForVOC(False, self.dat_dir_val, self.bat_siz, self.epc_num, self.min_que_val, self.gpu_lst, 1)
imgs_lst_tra, gbxs_lst_tra, gmk_inss_lst_tra, gmk_sems_lst_tra, \
gbx_nums_lst_tra, img_hgts_lst_tra, img_wdhs_lst_tra = GV_tra.get_input()
imgs_lst_val, gbxs_lst_val, gmk_inss_lst_val, gmk_sems_lst_val, \
gbx_nums_lst_val, img_hgts_lst_val, img_wdhs_lst_val = GV_val.get_input()
#with tf.name_scope('input_image'):
# tf.summary.image('input', X, 10)
self.opm_cfg['decay_step'] = self.opm_cfg['decay_step'] * self.tra_num
tra_stp, lrn_rat = update_rule(self.opm_cfg, glb_stp)
loss_lst = []
accs_lst = []
#msks_lst = []
grds_lst = []
for i in range(self.gpu_num):
with tf.device(self.mdl_dev % i):
with tf.name_scope(self.MDL_DEV % i) as scp:
imgs_tra, _, _, gmks_tra, _, _ = \
GV_tra.preprocessing1(imgs_lst_tra[i], gbxs_lst_tra[i], gmk_inss_lst_tra[i], gmk_sems_lst_tra[i], \
gbx_nums_lst_tra[i], img_hgts_lst_tra[i], img_wdhs_lst_tra[i])
imgs_val, _, _, gmks_val, _, _ = \
GV_val.preprocessing1(imgs_lst_val[i], gbxs_lst_val[i], gmk_inss_lst_val[i], gmk_sems_lst_val[i], \
gbx_nums_lst_val[i], img_hgts_lst_val[i], img_wdhs_lst_val[i])
imgs = tf.cond(mtst, lambda: imgs_val, lambda: imgs_tra, strict=True)
gmks = tf.cond(mtst, lambda: gmks_val, lambda: gmks_tra, strict=True)
loss, accs, msks = \
self.mdl.forward(imgs, gmks, mtra, scp)
#在第一次声明变量之后,将控制变量重用的参数设置为True。这样可以让不同的GPU更新同一组参数
#注意tf.name_scope函数并不会影响tf.get_variable的命名空间
tf.get_variable_scope().reuse_variables()
#使用当前GPU计算所有变量的梯度
vars_lst = tf.trainable_variables()
#vars_lst= [v for v in vars_lst if 'generator/' in v.name]
grds = tra_stp.compute_gradients(loss[0], var_list=vars_lst)
#print(grds)
grds_lst.append(grds)
loss_lst.append(loss)
accs_lst.append(accs)
#msks_lst.append(msks)
'''
with tf.variable_scope('average', reuse = tf.AUTO_REUSE):
mov_ave = tf.train.ExponentialMovingAverage(self.mov_ave_dca, glb_stp)
mov_ave_op = mov_ave.apply(tf.trainable_variables())
tf.add_to_collection(tf.GraphKeys.UPDATE_OPS, mov_ave_op)
'''
upd_opas = tf.get_collection(tf.GraphKeys.UPDATE_OPS)
with tf.control_dependencies(upd_opas):
with tf.variable_scope('optimize', reuse = tf.AUTO_REUSE):
grds = average_gradients(grds_lst)
tra_opa = tra_stp.apply_gradients(grds, global_step=glb_stp)
loss = tf.stack(loss_lst, axis=0)
accs = tf.concat(accs_lst, axis=0)
msks = None
#msks= tf.concat(msks_lst, axis=0)
#tf.summary.scalar('loss', loss)
#tf.summary.scalar('acc', acc)
#for grad, var in grads:
# if grad is not None:
# tf.summary.histogram('gradients_on_average/%s' % var.op.name, grad)
#for var in tf.trainable_variables():
# tf.summary.histogram(var.op.name, var)
return tra_opa, lrn_rat, loss, accs, msks
###############################For Segmentation################################
def _test_step(self):
with tf.device("/cpu:0"):
self.mdl.mod_tra = False
mtra = tf.constant(False, dtype=tf.bool)
GV = GeneratorForVOC(False, self.dat_dir_tst, self.bat_siz, self.epc_num, self.min_que_tra, self.gpu_lst, None)
imgs_lst, gbxs_lst, gmk_inss_lst, gmk_sems_lst, gbx_nums_lst, img_hgts_lst, img_wdhs_lst, img_nams_lst = GV.get_input2()
msks_lst = []
img_wdws_lst = []
for i in range(self.gpu_num):
with tf.device(self.mdl_dev % i):
with tf.name_scope(self.MDL_DEV % i) as scp:
imgs, _, _, gmks, _, img_wdws = \
GV.preprocessing1(imgs_lst[i], gbxs_lst[i], gmk_inss_lst[i], gmk_sems_lst[i], \
gbx_nums_lst[i], img_hgts_lst[i], img_wdhs_lst[i])
msks = self.mdl.forward(imgs, gmks, mtra, scp)
#在第一次声明变量之后,将控制变量重用的参数设置为True。这样可以让不同的GPU更新同一组参数
#注意tf.name_scope函数并不会影响tf.get_variable的命名空间
tf.get_variable_scope().reuse_variables()
msks_lst .append(msks)
img_wdws_lst.append(img_wdws)
msks = tf.concat(msks_lst, axis=0) #(N, H, W)
img_wdws = tf.concat(img_wdws_lst, axis=0)
img_hgts = tf.concat(img_hgts_lst, axis=0)
img_wdhs = tf.concat(img_wdhs_lst, axis=0)
img_nams = tf.concat(img_nams_lst, axis=0)
return msks, img_wdws, img_hgts, img_wdhs, img_nams
def concat(self, sess=None, fetches=None, feed_dict=None, itr_num=None):
rsts_lst = [[] for _ in range(len(fetches))]
itr_cnt = 0
try:
while True:
rsts = sess.run(fetches, feed_dict=feed_dict)
for i, rst in enumerate(rsts):
rsts_lst[i].append(rst)
itr_cnt = itr_cnt + 1
if itr_num != None and itr_cnt >= itr_num:
break
except tf.errors.OutOfRangeError:
print('Have reached the end of the dataset!')
for i, rst in enumerate(rsts_lst):
rsts_lst[i] = np.concatenate(rst, axis=0)
return rsts_lst
def merge(self, rsts=None, rst_nums=None):
rst_imxs = []
rsts_lst = [[] for _ in range(len(rsts))]
for i, rst_num in enumerate(rst_nums): #batch
rst_imxs.extend([i]*rst_num)
for j, rst in enumerate(rsts): #tensors
rsts_lst[j].append(rst[i][:rst_num])
rst_imxs = np.asarray(rst_imxs, dtype=np.int32)
for i, rst in enumerate(rsts_lst):
rsts_lst[i] = np.concatenate(rst, axis=0)
return rsts_lst, rst_imxs
"""
#####################################For CLASSIFY#####################################
def train(self):
tf.reset_default_graph()
mtra = tf.placeholder(dtype=tf.bool, name='train')
mtst = tf.placeholder(dtype=tf.bool, name='test' )
glb_stp = tf.Variable(0, trainable=False, name='global_step', dtype=tf.int64)
tra_opa, lrn_rat, loss, accs = self._train_step(mtra, mtst, glb_stp)
#var = tf.trainable_variables() #tf.global_variables()
#var = [v for v in var if 'layers_module1_0/' in v.name or 'layers_module1_1/' in v.name]
#var = [v for v in var if 'average/' not in v.name and 'optimize/' not in v.name]
#var_ave = tf.train.ExponentialMovingAverage(self.mv_ave_decay, glb_stp)
#var = var_ave.variables_to_restore()
#saver = tf.train.Saver(var)
#tf.summary.scalar('loss', loss)
#summary_op = tf.summary.merge_all()
#summary_loss = tf.summary.merge(loss)
#writer = tf.summary.FileWriter(LOG_PATH, sess.graph, flush_secs=5) #tf.get_default_graph()
#gpu_options = tf.GPUOptions(per_process_gpu_memory_fraction = 0.8)
#config = tf.ConfigProto(log_device_placement=False, allow_soft_placement=True, gpu_options=gpu_options)
#config = tf.ConfigProto(log_device_placement=False, allow_soft_placement=True, device_count={'CPU': 2}, \
# inter_op_parallelism_threads=16, intra_op_parallelism_threads=16)
config = tf.ConfigProto(log_device_placement=False, allow_soft_placement=True)
with tf.Session(config=config) as sess:
init_op = (tf.global_variables_initializer(), tf.local_variables_initializer())
sess.run(init_op)
#coord = tf.train.Coordinator()
#threads = tf.train.start_queue_runners(sess=sess, coord=coord)
ckpt = tf.train.get_checkpoint_state(self.mdl_dir)
saver = tf.train.Saver()
if ckpt and ckpt.model_checkpoint_path:
saver.restore(sess, ckpt.model_checkpoint_path)
with open(os.path.join(self.log_dir_tra, 'accs'), 'ab') as fid_tra_accs, \
open(os.path.join(self.log_dir_tra, 'loss'), 'ab') as fid_tra_loss, \
open(os.path.join(self.log_dir_val, 'accs'), 'ab') as fid_val_accs, \
open(os.path.join(self.log_dir_val, 'loss'), 'ab') as fid_val_loss:
try:
tra_cnt = 0
epc_cnt = 0
tra_loss_lst = []
while True:
#_, summary, loss1, = sess.run([train_op, summary_op, loss], feed_dict = {mtrain:True})
#writer.add_summary(summary, global_step=glb_stp.eval())
_, tra_loss = sess.run([tra_opa, loss], feed_dict={mtra:True, mtst:False})
tra_loss_lst.append(tra_loss)
tra_cnt = tra_cnt + 1
if self.prt_ena and tra_cnt % self.itr_per_prt == 0:
tra_loss_lst = np.concatenate(tra_loss_lst, axis=0)
tra_loss = np.mean(tra_loss_lst, axis=0)
tra_loss_lst = []
print('(Iteration %d) losses: %s' % (tra_cnt, str(tra_loss)))
if tra_cnt % self.tra_num == 0:
epc_cnt = epc_cnt + 1
saver.save(sess, os.path.join(self.mdl_dir, self.mdl_nam), global_step=glb_stp)
fetches = [accs, loss]
feed_dict = {mtra:False, mtst:False}
tra_accs, tra_loss = self.concat(sess, fetches, feed_dict, self.val_num)
fetches = [accs, loss]
feed_dict = {mtra:False, mtst:True }
val_accs, val_loss = self.concat(sess, fetches, feed_dict, self.val_num)
tra_accs = np.mean(tra_accs, axis=0)
val_accs = np.mean(val_accs, axis=0)
tra_loss = np.mean(tra_loss, axis=0)
val_loss = np.mean(val_loss, axis=0)
pickle.dump(tra_accs, fid_tra_accs, pickle.HIGHEST_PROTOCOL)
pickle.dump(val_accs, fid_val_accs, pickle.HIGHEST_PROTOCOL)
pickle.dump(tra_loss, fid_tra_loss, pickle.HIGHEST_PROTOCOL)
pickle.dump(val_loss, fid_val_loss, pickle.HIGHEST_PROTOCOL)
if self.prt_ena:
print('(Epoch %d) lrn_rate: %f \n tra_accs: %s \n val_accs: %s \n tra_loss: %s \n val_loss: %s' \
% (epc_cnt, lrn_rat.eval(), str(tra_accs), str(val_accs), str(tra_loss), str(val_loss)))
except tf.errors.OutOfRangeError:
print('Training is over!')
#coord.request_stop()
#coord.join(threads)
"""
#####################################For Segmentation##################################
def train(self):
tf.reset_default_graph()
mtra = tf.placeholder(dtype=tf.bool, name='train')
mtst = tf.placeholder(dtype=tf.bool, name='test' )
glb_stp = tf.Variable(0, trainable=False, name='global_step', dtype=tf.int64)
tra_opa, lrn_rat, loss, accs, msks = self._train_step(mtra, mtst, glb_stp)
#with tf.device(self.mdl_dev % 0):
#tf.summary.scalar('loss', loss)
#summary_op = tf.summary.merge_all()
#summary_loss = tf.summary.merge(loss)
#writer = tf.summary.FileWriter(LOG_PATH, sess.graph, flush_secs=5) #tf.get_default_graph()
#gpu_options = tf.GPUOptions(per_process_gpu_memory_fraction = 0.8)
#config = tf.ConfigProto(log_device_placement=False, allow_soft_placement=True, gpu_options=gpu_options)
#config = tf.ConfigProto(log_device_placement=False, allow_soft_placement=True, device_count={'CPU': 2}, \
# inter_op_parallelism_threads=16, intra_op_parallelism_threads=16)
config = tf.ConfigProto(log_device_placement=False, allow_soft_placement=True)
with tf.Session(config=config) as sess:
init_op = (tf.global_variables_initializer(), tf.local_variables_initializer())
sess.run(init_op)
saver = tf.train.Saver()
ckpt = tf.train.get_checkpoint_state(self.mdl_dir)
if ckpt and ckpt.model_checkpoint_path:
var = tf.global_variables() #tf.global_variables()tf.trainable_variables()
#var = [v for v in var if 'average/' not in v.name and 'optimize/' not in v.name]
#var = [v for v in var if 'group_block1_0_0' in v.name or 'group_block1_0_1' in v.name \
# or 'group_block1_0_2' in v.name or 'group_block1_0_3' in v.name]
#var_ave= tf.train.ExponentialMovingAverage(self.mv_ave_decay, glb_stp)
#var = var_ave.variables_to_restore()
saver = tf.train.Saver(var)
'''
mydict = load_weights()
mykeys = mydict.keys()
for i, v in enumerate(var):
if v.name in mykeys:
sess.run(tf.assign(v, mydict[v.name], validate_shape=True, use_locking=True))
else:
print(v.name)
saver.save(sess, os.path.join(self.mdl_dir, self.mdl_nam), global_step=glb_stp)
return
'''
saver.restore(sess, ckpt.model_checkpoint_path)
saver = tf.train.Saver()
with open(os.path.join(self.log_dir_tra, 'accs'), 'ab') as fid_tra_accs, \
open(os.path.join(self.log_dir_tra, 'loss'), 'ab') as fid_tra_loss, \
open(os.path.join(self.log_dir_val, 'accs'), 'ab') as fid_val_accs, \
open(os.path.join(self.log_dir_val, 'loss'), 'ab') as fid_val_loss:
try:
tra_cnt = 0
epc_cnt = 0
tra_loss_lst = []
while True:
#_, summary, loss1, = sess.run([train_op, summary_op, loss], feed_dict = {mtrain:True})
#writer.add_summary(summary, global_step=glb_stp.eval())
_, tra_loss = sess.run([tra_opa, loss], feed_dict={mtra:True, mtst:False})
tra_loss_lst.append(tra_loss)
tra_cnt = tra_cnt + 1
if self.prt_ena and tra_cnt % self.itr_per_prt == 0:
tra_loss_lst = np.concatenate(tra_loss_lst, axis=0)
tra_loss = np.mean(tra_loss_lst, axis=0)
tra_loss_lst = []
print('(Iteration %d) losses: %s' % (tra_cnt, str(tra_loss)))
if tra_cnt % self.tra_num == 0:
epc_cnt = epc_cnt + 1
saver.save(sess, os.path.join(self.mdl_dir, self.mdl_nam), global_step=glb_stp)
fetches = [accs, loss]
feed_dict = {mtra:False, mtst:False}
tra_accs, tra_loss = self.concat(sess, fetches, feed_dict, self.val_num)
fetches = [accs, loss]
feed_dict = {mtra:False, mtst:True }
val_accs, val_loss = self.concat(sess, fetches, feed_dict, self.val_num)
tra_accs = self.mdl.accs_seg_py(tra_accs)
val_accs = self.mdl.accs_seg_py(val_accs)
tra_loss = np.mean(tra_loss, axis=0)
val_loss = np.mean(val_loss, axis=0)
pickle.dump(tra_accs, fid_tra_accs, pickle.HIGHEST_PROTOCOL)
pickle.dump(val_accs, fid_val_accs, pickle.HIGHEST_PROTOCOL)
pickle.dump(tra_loss, fid_tra_loss, pickle.HIGHEST_PROTOCOL)
pickle.dump(val_loss, fid_val_loss, pickle.HIGHEST_PROTOCOL)
if self.prt_ena:
print('(Epoch %d) lrn_rate: %f\n tra_accs: %s\n val_accs: %s\n tra_loss: %s\n val_loss: %s\n ' \
% (epc_cnt, lrn_rat.eval(), \
str(tra_accs), str(val_accs), str(tra_loss), str(val_loss)))
except tf.errors.OutOfRangeError:
print('Training is over!')
###############################For Segmentation################################
def test(self):
GV = GeneratorForVOC()
tf.reset_default_graph()
glb_stp = tf.Variable(0, trainable=False, name="global_step", dtype=tf.int64)
msks, img_wdws, img_hgts, img_wdhs, img_nams = self._test_step()
#gpu_options = tf.GPUOptions(per_process_gpu_memory_fraction = 0.8)
#config = tf.ConfigProto(log_device_placement=False, allow_soft_placement=True, gpu_options=gpu_options)
#config = tf.ConfigProto(log_device_placement=False, allow_soft_placement=True, device_count={"CPU": 2}, \
# inter_op_parallelism_threads=16, intra_op_parallelism_threads=16)
config = tf.ConfigProto(log_device_placement=False, allow_soft_placement=True)
with tf.Session(config=config) as sess:
init_op = (tf.global_variables_initializer(), tf.local_variables_initializer())
sess.run(init_op)
saver = tf.train.Saver()
ckpt = tf.train.get_checkpoint_state(self.mdl_dir)
if ckpt and ckpt.model_checkpoint_path:
#var = tf.trainable_variables() #tf.global_variables()
#var_ave = tf.train.ExponentialMovingAverage(self.mv_ave_decay, glb_stp)
#var = var_ave.variables_to_restore()
#saver = tf.train.Saver(var)
saver.restore(sess, ckpt.model_checkpoint_path)
saver = tf.train.Saver()
else:
print("No checkpoint file found!")
return
with open(os.path.join(self.log_dir_tst, "imgs"), 'wb') as fid_tst_imgs, \
open(os.path.join(self.log_dir_tst, "msks"), 'wb') as fid_tst_msks:
fetches = [msks, img_wdws, img_hgts, img_wdhs, img_nams]
feed_dict = {}
[msks_kep, img_wdws_kep, img_hgts_kep, img_wdhs_kep, img_nams_kep] = self.concat(sess, fetches, feed_dict)
for i, img_nam_tmp in enumerate(img_nams_kep):
msks_tmp = msks_kep[i]
img_wdw_tmp = img_wdws_kep[i]
img_hgt_tmp = img_hgts_kep[i]
img_wdh_tmp = img_wdhs_kep[i]
img_nam_tmp = bytes.decode(img_nam_tmp)
_, _, _, msks_tmp = \
GV.recover_instances(None, None, None, msks_tmp, img_wdw_tmp, img_hgt_tmp, img_wdh_tmp)
pickle.dump(img_nam_tmp, fid_tst_imgs, pickle.HIGHEST_PROTOCOL)
pickle.dump(msks_tmp, fid_tst_msks, pickle.HIGHEST_PROTOCOL)
###############################For Segmentation################################
def display_detections(self):
GV = GeneratorForVOC(dat_dir=self.dat_dir_tst, tst_shw=self.tst_shw, tst_sav=self.tst_sav)
with open(os.path.join(self.log_dir_tst, "imgs"), 'rb') as fid_tst_imgs, \
open(os.path.join(self.log_dir_tst, "msks"), 'rb') as fid_tst_msks:
while True:
try:
img_nam = pickle.load(fid_tst_imgs)
msks = pickle.load(fid_tst_msks)
#print(msks.shape)
img_fil = os.path.join(self.dat_dir_tst, img_nam)
img = cv2.imread(img_fil)
#print(img.shape)
if type(img) != np.ndarray:
print("Failed to find image %s" %(img_fil))
continue
img_hgt, img_wdh = img.shape[0], img.shape[1]
if img.size == img_hgt * img_wdh:
print ('Gray Image %s' %(img_fil))
img_zro = np.empty((img_hgt, img_wdh, 3), dtype=np.uint8)
img_zro[:, :, :] = img[:, :, np.newaxis]
img = img_zro
assert img.size == img_wdh * img_hgt * 3, '%s' % img_nam
img = img[:, :, ::-1]
GV.display_instances(img, None, None, None, None, msks, img_nam)
except EOFError:
return
def show_loss_acc(self):
with open(os.path.join(LOG_PATH1, 'loss'), 'rb') as fid_train_loss, \
open(os.path.join(LOG_PATH1, 'mAP'), 'rb') as fid_train_mAP, \
open(os.path.join(LOG_PATH2, 'mAP'), 'rb') as fid_val_mAP:
loss_history = get_all_data(fid_train_loss)
train_acc_history = get_all_data(fid_train_mAP)
val_acc_history = get_all_data(fid_val_mAP)
plt.figure(1)
plt.subplot(2, 1, 1)
plt.title('Training loss')
plt.xlabel('Iteration')
plt.subplot(2, 1, 2)
plt.title('accuracy')
plt.xlabel('Epoch')
#plt.subplot(3, 1, 3)
#plt.title('Validation accuracy')
#plt.xlabel('Epoch')
plt.subplot(2, 1, 1)
plt.plot(loss_history, 'o')
plt.subplot(2, 1, 2)
plt.plot(train_acc_history, '-o', label='train_acc')
plt.plot(val_acc_history, '-o', label='val_acc')
for i in [1, 2]:
plt.subplot(2, 1, i)
plt.legend(loc='upper center', ncol=4)
plt.gcf().set_size_inches(15, 15)
plt.show()
|
import os
import cv2
import pickle
import numpy as np
import tensorflow as tf
import matplotlib.pyplot as plt
from collections import defaultdict
from .load_weights import *
from .optim_utils import *
from .yale_utils.make_image import *
def get_data(fid):
try:
a = pickle.load(fid)
return 1, a
except EOFError:
return 0, 0
def get_all_data(fid):
data = []
while(True):
sig, dat = get_data(fid)
if(sig == 0): break
else:
data.append(dat)
return data
class Solver(object):
def __init__(self, mdl, **kwargs):
self.mdl = mdl
self.opm_cfg = kwargs.pop('opm_cfg', {})
self.gpu_lst = kwargs.pop('gpu_lst', '0')
self.gpu_num = len(self.gpu_lst.split(','))
self.mdl_dev = '/cpu:%d' if self.gpu_num == 0 else '/gpu:%d'
self.MDL_DEV = 'CPU_%d' if self.gpu_num == 0 else 'GPU_%d'
self.gpu_num = 1 if self.gpu_num == 0 else self.gpu_num
self.bat_siz = kwargs.pop('bat_siz', 2)
self.bat_siz_all = self.bat_siz * self.gpu_num
self.tra_num = kwargs.pop('tra_num', 8000)
self.val_num = kwargs.pop('val_num', 80)
self.epc_num = kwargs.pop('epc_num', 10)
self.min_que_tra = kwargs.pop('min_que_tra', 5000)
self.min_que_val = kwargs.pop('min_que_val', 1000)
self.prt_ena = kwargs.pop('prt_ena', True)
self.itr_per_prt = kwargs.pop('itr_per_prt', 20)
self.tst_num = kwargs.pop('tst_num', None)
self.tst_shw = kwargs.pop('tst_shw', True)
self.tst_sav = kwargs.pop('tst_sav', True)
self.mdl_nam = kwargs.pop('mdl_nam', 'model.ckpt' )
self.mdl_dir = kwargs.pop('mdl_dir', 'Mybase/Model' )
self.log_dir = kwargs.pop('log_dir', 'Mybase/logdata' )
self.dat_dir = kwargs.pop('dat_dir', 'Mybase/datasets')
self.mov_ave_dca = kwargs.pop('mov_ave_dca', 0.99)
self.dat_dir_tra = self.dat_dir + '/train'
self.dat_dir_val = self.dat_dir + '/val'
self.dat_dir_tst = self.dat_dir + '/test'
self.dat_dir_rst = self.dat_dir + '/result'
self.log_dir_tra = self.log_dir + '/train'
self.log_dir_val = self.log_dir + '/val'
self.log_dir_tst = self.log_dir + '/test'
os.environ['CUDA_DEVICE_ORDER'] = 'PCI_BUS_ID'
os.environ['CUDA_VISIBLE_DEVICES'] = self.gpu_lst
if len(kwargs) > 0:
extra = ', '.join('%s' % k for k in kwargs.keys())
raise ValueError('Unrecognized arguments %s' % extra)
"""
###############################For CLASSIFY################################
def _train_step(self, mtra=None, mtst=None, glb_stp=None):
#将简单的运算放在CPU上,只有神经网络的训练过程放在GPU上
with tf.device('/cpu:0'):
self.mdl.mod_tra = True
GI_tra = GeneratorForImageNet(True, self.dat_dir_tra, self.bat_siz, self.epc_num, \
self.min_que_tra, self.gpu_lst, 32)
GI_val = GeneratorForImageNet(False, self.dat_dir_val, self.bat_siz, self.epc_num, \
self.min_que_val, self.gpu_lst, 1)
imgs_lst_tra, lbls_lst_tra = GI_tra.get_input()
imgs_lst_val, lbls_lst_val = GI_val.get_input()
#with tf.name_scope('input_image'):
# tf.summary.image('input', X, 10)
self.opm_cfg['decay_step'] = self.opm_cfg['decay_step'] * self.tra_num
tra_stp, lrn_rat = update_rule(self.opm_cfg, glb_stp)
grds_lst = []
loss_lst = []
accs_lst = []
for i in range(self.gpu_num):
with tf.device(self.mdl_dev % i):
with tf.name_scope(self.MDL_DEV % i) as scp:
imgs_tra = GI_tra.preprocessing1(imgs_lst_tra[i])
imgs_val = GI_val.preprocessing1(imgs_lst_val[i])
lbls_tra = lbls_lst_tra[i]
lbls_val = lbls_lst_val[i]
imgs = tf.cond(mtst, lambda: imgs_val, lambda: imgs_tra, strict=True)
lbls = tf.cond(mtst, lambda: lbls_val, lambda: lbls_tra, strict=True)
loss, accs = \
self.mdl.forward(imgs, lbls, mtra=mtra, scp=scp)
#在第一次声明变量之后,将控制变量重用的参数设置为True。这样可以让不同的GPU更新同一组参数
#注意tf.name_scope函数并不会影响tf.get_variable的命名空间
tf.get_variable_scope().reuse_variables()
#使用当前GPU计算所有变量的梯度
grds = tra_stp.compute_gradients(loss[0])
#print(grds)
grds_lst.append(grds)
loss_lst.append(loss)
accs_lst.append(accs)
'''
with tf.variable_scope('average', reuse = tf.AUTO_REUSE):
mov_ave = tf.train.ExponentialMovingAverage(self.mov_ave_dca, glb_stp)
mov_ave_op = mov_ave.apply(tf.trainable_variables())
tf.add_to_collection(tf.GraphKeys.UPDATE_OPS, mov_ave_op)
'''
with tf.variable_scope('optimize', reuse = tf.AUTO_REUSE):
grds = average_gradients(grds_lst)
upd_opas = tf.get_collection(tf.GraphKeys.UPDATE_OPS)
with tf.control_dependencies(upd_opas):
tra_opa = tra_stp.apply_gradients(grds, global_step=glb_stp)
loss = tf.stack(loss_lst, axis=0)
accs = tf.stack(accs_lst, axis=0)
#tf.summary.scalar('loss', loss)
#tf.summary.scalar('acc', acc)
#for grad, var in grads:
# if grad is not None:
# tf.summary.histogram('gradients_on_average/%s' % var.op.name, grad)
#for var in tf.trainable_variables():
# tf.summary.histogram(var.op.name, var)
return tra_opa, lrn_rat, loss, accs
"""
###############################For Segmentation################################
def _train_step(self, mtra=None, mtst=None, glb_stp=None):
#将简单的运算放在CPU上,只有神经网络的训练过程放在GPU上
with tf.device('/cpu:0'):
self.mdl.mod_tra = True
GV_tra = GeneratorForVOC(True, self.dat_dir_tra, self.bat_siz, self.epc_num, self.min_que_tra, self.gpu_lst, 2)
GV_val = GeneratorForVOC(False, self.dat_dir_val, self.bat_siz, self.epc_num, self.min_que_val, self.gpu_lst, 1)
imgs_lst_tra, gbxs_lst_tra, gmk_inss_lst_tra, gmk_sems_lst_tra, \
gbx_nums_lst_tra, img_hgts_lst_tra, img_wdhs_lst_tra = GV_tra.get_input()
imgs_lst_val, gbxs_lst_val, gmk_inss_lst_val, gmk_sems_lst_val, \
gbx_nums_lst_val, img_hgts_lst_val, img_wdhs_lst_val = GV_val.get_input()
#with tf.name_scope('input_image'):
# tf.summary.image('input', X, 10)
self.opm_cfg['decay_step'] = self.opm_cfg['decay_step'] * self.tra_num
tra_stp, lrn_rat = update_rule(self.opm_cfg, glb_stp)
loss_lst = []
accs_lst = []
#msks_lst = []
grds_lst = []
for i in range(self.gpu_num):
with tf.device(self.mdl_dev % i):
with tf.name_scope(self.MDL_DEV % i) as scp:
imgs_tra, _, _, gmks_tra, _, _ = \
GV_tra.preprocessing1(imgs_lst_tra[i], gbxs_lst_tra[i], gmk_inss_lst_tra[i], gmk_sems_lst_tra[i], \
gbx_nums_lst_tra[i], img_hgts_lst_tra[i], img_wdhs_lst_tra[i])
imgs_val, _, _, gmks_val, _, _ = \
GV_val.preprocessing1(imgs_lst_val[i], gbxs_lst_val[i], gmk_inss_lst_val[i], gmk_sems_lst_val[i], \
gbx_nums_lst_val[i], img_hgts_lst_val[i], img_wdhs_lst_val[i])
imgs = tf.cond(mtst, lambda: imgs_val, lambda: imgs_tra, strict=True)
gmks = tf.cond(mtst, lambda: gmks_val, lambda: gmks_tra, strict=True)
loss, accs, msks = \
self.mdl.forward(imgs, gmks, mtra, scp)
#在第一次声明变量之后,将控制变量重用的参数设置为True。这样可以让不同的GPU更新同一组参数
#注意tf.name_scope函数并不会影响tf.get_variable的命名空间
tf.get_variable_scope().reuse_variables()
#使用当前GPU计算所有变量的梯度
vars_lst = tf.trainable_variables()
#vars_lst= [v for v in vars_lst if 'generator/' in v.name]
grds = tra_stp.compute_gradients(loss[0], var_list=vars_lst)
#print(grds)
grds_lst.append(grds)
loss_lst.append(loss)
accs_lst.append(accs)
#msks_lst.append(msks)
'''
with tf.variable_scope('average', reuse = tf.AUTO_REUSE):
mov_ave = tf.train.ExponentialMovingAverage(self.mov_ave_dca, glb_stp)
mov_ave_op = mov_ave.apply(tf.trainable_variables())
tf.add_to_collection(tf.GraphKeys.UPDATE_OPS, mov_ave_op)
'''
upd_opas = tf.get_collection(tf.GraphKeys.UPDATE_OPS)
with tf.control_dependencies(upd_opas):
with tf.variable_scope('optimize', reuse = tf.AUTO_REUSE):
grds = average_gradients(grds_lst)
tra_opa = tra_stp.apply_gradients(grds, global_step=glb_stp)
loss = tf.stack(loss_lst, axis=0)
accs = tf.concat(accs_lst, axis=0)
msks = None
#msks= tf.concat(msks_lst, axis=0)
#tf.summary.scalar('loss', loss)
#tf.summary.scalar('acc', acc)
#for grad, var in grads:
# if grad is not None:
# tf.summary.histogram('gradients_on_average/%s' % var.op.name, grad)
#for var in tf.trainable_variables():
# tf.summary.histogram(var.op.name, var)
return tra_opa, lrn_rat, loss, accs, msks
###############################For Segmentation################################
def _test_step(self):
with tf.device("/cpu:0"):
self.mdl.mod_tra = False
mtra = tf.constant(False, dtype=tf.bool)
GV = GeneratorForVOC(False, self.dat_dir_tst, self.bat_siz, self.epc_num, self.min_que_tra, self.gpu_lst, None)
imgs_lst, gbxs_lst, gmk_inss_lst, gmk_sems_lst, gbx_nums_lst, img_hgts_lst, img_wdhs_lst, img_nams_lst = GV.get_input2()
msks_lst = []
img_wdws_lst = []
for i in range(self.gpu_num):
with tf.device(self.mdl_dev % i):
with tf.name_scope(self.MDL_DEV % i) as scp:
imgs, _, _, gmks, _, img_wdws = \
GV.preprocessing1(imgs_lst[i], gbxs_lst[i], gmk_inss_lst[i], gmk_sems_lst[i], \
gbx_nums_lst[i], img_hgts_lst[i], img_wdhs_lst[i])
msks = self.mdl.forward(imgs, gmks, mtra, scp)
#在第一次声明变量之后,将控制变量重用的参数设置为True。这样可以让不同的GPU更新同一组参数
#注意tf.name_scope函数并不会影响tf.get_variable的命名空间
tf.get_variable_scope().reuse_variables()
msks_lst .append(msks)
img_wdws_lst.append(img_wdws)
msks = tf.concat(msks_lst, axis=0) #(N, H, W)
img_wdws = tf.concat(img_wdws_lst, axis=0)
img_hgts = tf.concat(img_hgts_lst, axis=0)
img_wdhs = tf.concat(img_wdhs_lst, axis=0)
img_nams = tf.concat(img_nams_lst, axis=0)
return msks, img_wdws, img_hgts, img_wdhs, img_nams
def concat(self, sess=None, fetches=None, feed_dict=None, itr_num=None):
rsts_lst = [[] for _ in range(len(fetches))]
itr_cnt = 0
try:
while True:
rsts = sess.run(fetches, feed_dict=feed_dict)
for i, rst in enumerate(rsts):
rsts_lst[i].append(rst)
itr_cnt = itr_cnt + 1
if itr_num != None and itr_cnt >= itr_num:
break
except tf.errors.OutOfRangeError:
print('Have reached the end of the dataset!')
for i, rst in enumerate(rsts_lst):
rsts_lst[i] = np.concatenate(rst, axis=0)
return rsts_lst
def merge(self, rsts=None, rst_nums=None):
rst_imxs = []
rsts_lst = [[] for _ in range(len(rsts))]
for i, rst_num in enumerate(rst_nums): #batch
rst_imxs.extend([i]*rst_num)
for j, rst in enumerate(rsts): #tensors
rsts_lst[j].append(rst[i][:rst_num])
rst_imxs = np.asarray(rst_imxs, dtype=np.int32)
for i, rst in enumerate(rsts_lst):
rsts_lst[i] = np.concatenate(rst, axis=0)
return rsts_lst, rst_imxs
"""
#####################################For CLASSIFY#####################################
def train(self):
tf.reset_default_graph()
mtra = tf.placeholder(dtype=tf.bool, name='train')
mtst = tf.placeholder(dtype=tf.bool, name='test' )
glb_stp = tf.Variable(0, trainable=False, name='global_step', dtype=tf.int64)
tra_opa, lrn_rat, loss, accs = self._train_step(mtra, mtst, glb_stp)
#var = tf.trainable_variables() #tf.global_variables()
#var = [v for v in var if 'layers_module1_0/' in v.name or 'layers_module1_1/' in v.name]
#var = [v for v in var if 'average/' not in v.name and 'optimize/' not in v.name]
#var_ave = tf.train.ExponentialMovingAverage(self.mv_ave_decay, glb_stp)
#var = var_ave.variables_to_restore()
#saver = tf.train.Saver(var)
#tf.summary.scalar('loss', loss)
#summary_op = tf.summary.merge_all()
#summary_loss = tf.summary.merge(loss)
#writer = tf.summary.FileWriter(LOG_PATH, sess.graph, flush_secs=5) #tf.get_default_graph()
#gpu_options = tf.GPUOptions(per_process_gpu_memory_fraction = 0.8)
#config = tf.ConfigProto(log_device_placement=False, allow_soft_placement=True, gpu_options=gpu_options)
#config = tf.ConfigProto(log_device_placement=False, allow_soft_placement=True, device_count={'CPU': 2}, \
# inter_op_parallelism_threads=16, intra_op_parallelism_threads=16)
config = tf.ConfigProto(log_device_placement=False, allow_soft_placement=True)
with tf.Session(config=config) as sess:
init_op = (tf.global_variables_initializer(), tf.local_variables_initializer())
sess.run(init_op)
#coord = tf.train.Coordinator()
#threads = tf.train.start_queue_runners(sess=sess, coord=coord)
ckpt = tf.train.get_checkpoint_state(self.mdl_dir)
saver = tf.train.Saver()
if ckpt and ckpt.model_checkpoint_path:
saver.restore(sess, ckpt.model_checkpoint_path)
with open(os.path.join(self.log_dir_tra, 'accs'), 'ab') as fid_tra_accs, \
open(os.path.join(self.log_dir_tra, 'loss'), 'ab') as fid_tra_loss, \
open(os.path.join(self.log_dir_val, 'accs'), 'ab') as fid_val_accs, \
open(os.path.join(self.log_dir_val, 'loss'), 'ab') as fid_val_loss:
try:
tra_cnt = 0
epc_cnt = 0
tra_loss_lst = []
while True:
#_, summary, loss1, = sess.run([train_op, summary_op, loss], feed_dict = {mtrain:True})
#writer.add_summary(summary, global_step=glb_stp.eval())
_, tra_loss = sess.run([tra_opa, loss], feed_dict={mtra:True, mtst:False})
tra_loss_lst.append(tra_loss)
tra_cnt = tra_cnt + 1
if self.prt_ena and tra_cnt % self.itr_per_prt == 0:
tra_loss_lst = np.concatenate(tra_loss_lst, axis=0)
tra_loss = np.mean(tra_loss_lst, axis=0)
tra_loss_lst = []
print('(Iteration %d) losses: %s' % (tra_cnt, str(tra_loss)))
if tra_cnt % self.tra_num == 0:
epc_cnt = epc_cnt + 1
saver.save(sess, os.path.join(self.mdl_dir, self.mdl_nam), global_step=glb_stp)
fetches = [accs, loss]
feed_dict = {mtra:False, mtst:False}
tra_accs, tra_loss = self.concat(sess, fetches, feed_dict, self.val_num)
fetches = [accs, loss]
feed_dict = {mtra:False, mtst:True }
val_accs, val_loss = self.concat(sess, fetches, feed_dict, self.val_num)
tra_accs = np.mean(tra_accs, axis=0)
val_accs = np.mean(val_accs, axis=0)
tra_loss = np.mean(tra_loss, axis=0)
val_loss = np.mean(val_loss, axis=0)
pickle.dump(tra_accs, fid_tra_accs, pickle.HIGHEST_PROTOCOL)
pickle.dump(val_accs, fid_val_accs, pickle.HIGHEST_PROTOCOL)
pickle.dump(tra_loss, fid_tra_loss, pickle.HIGHEST_PROTOCOL)
pickle.dump(val_loss, fid_val_loss, pickle.HIGHEST_PROTOCOL)
if self.prt_ena:
print('(Epoch %d) lrn_rate: %f \n tra_accs: %s \n val_accs: %s \n tra_loss: %s \n val_loss: %s' \
% (epc_cnt, lrn_rat.eval(), str(tra_accs), str(val_accs), str(tra_loss), str(val_loss)))
except tf.errors.OutOfRangeError:
print('Training is over!')
#coord.request_stop()
#coord.join(threads)
"""
#####################################For Segmentation##################################
def train(self):
tf.reset_default_graph()
mtra = tf.placeholder(dtype=tf.bool, name='train')
mtst = tf.placeholder(dtype=tf.bool, name='test' )
glb_stp = tf.Variable(0, trainable=False, name='global_step', dtype=tf.int64)
tra_opa, lrn_rat, loss, accs, msks = self._train_step(mtra, mtst, glb_stp)
#with tf.device(self.mdl_dev % 0):
#tf.summary.scalar('loss', loss)
#summary_op = tf.summary.merge_all()
#summary_loss = tf.summary.merge(loss)
#writer = tf.summary.FileWriter(LOG_PATH, sess.graph, flush_secs=5) #tf.get_default_graph()
#gpu_options = tf.GPUOptions(per_process_gpu_memory_fraction = 0.8)
#config = tf.ConfigProto(log_device_placement=False, allow_soft_placement=True, gpu_options=gpu_options)
#config = tf.ConfigProto(log_device_placement=False, allow_soft_placement=True, device_count={'CPU': 2}, \
# inter_op_parallelism_threads=16, intra_op_parallelism_threads=16)
config = tf.ConfigProto(log_device_placement=False, allow_soft_placement=True)
with tf.Session(config=config) as sess:
init_op = (tf.global_variables_initializer(), tf.local_variables_initializer())
sess.run(init_op)
saver = tf.train.Saver()
ckpt = tf.train.get_checkpoint_state(self.mdl_dir)
if ckpt and ckpt.model_checkpoint_path:
var = tf.global_variables() #tf.global_variables()tf.trainable_variables()
#var = [v for v in var if 'average/' not in v.name and 'optimize/' not in v.name]
#var = [v for v in var if 'group_block1_0_0' in v.name or 'group_block1_0_1' in v.name \
# or 'group_block1_0_2' in v.name or 'group_block1_0_3' in v.name]
#var_ave= tf.train.ExponentialMovingAverage(self.mv_ave_decay, glb_stp)
#var = var_ave.variables_to_restore()
saver = tf.train.Saver(var)
'''
mydict = load_weights()
mykeys = mydict.keys()
for i, v in enumerate(var):
if v.name in mykeys:
sess.run(tf.assign(v, mydict[v.name], validate_shape=True, use_locking=True))
else:
print(v.name)
saver.save(sess, os.path.join(self.mdl_dir, self.mdl_nam), global_step=glb_stp)
return
'''
saver.restore(sess, ckpt.model_checkpoint_path)
saver = tf.train.Saver()
with open(os.path.join(self.log_dir_tra, 'accs'), 'ab') as fid_tra_accs, \
open(os.path.join(self.log_dir_tra, 'loss'), 'ab') as fid_tra_loss, \
open(os.path.join(self.log_dir_val, 'accs'), 'ab') as fid_val_accs, \
open(os.path.join(self.log_dir_val, 'loss'), 'ab') as fid_val_loss:
try:
tra_cnt = 0
epc_cnt = 0
tra_loss_lst = []
while True:
#_, summary, loss1, = sess.run([train_op, summary_op, loss], feed_dict = {mtrain:True})
#writer.add_summary(summary, global_step=glb_stp.eval())
_, tra_loss = sess.run([tra_opa, loss], feed_dict={mtra:True, mtst:False})
tra_loss_lst.append(tra_loss)
tra_cnt = tra_cnt + 1
if self.prt_ena and tra_cnt % self.itr_per_prt == 0:
tra_loss_lst = np.concatenate(tra_loss_lst, axis=0)
tra_loss = np.mean(tra_loss_lst, axis=0)
tra_loss_lst = []
print('(Iteration %d) losses: %s' % (tra_cnt, str(tra_loss)))
if tra_cnt % self.tra_num == 0:
epc_cnt = epc_cnt + 1
saver.save(sess, os.path.join(self.mdl_dir, self.mdl_nam), global_step=glb_stp)
fetches = [accs, loss]
feed_dict = {mtra:False, mtst:False}
tra_accs, tra_loss = self.concat(sess, fetches, feed_dict, self.val_num)
fetches = [accs, loss]
feed_dict = {mtra:False, mtst:True }
val_accs, val_loss = self.concat(sess, fetches, feed_dict, self.val_num)
tra_accs = self.mdl.accs_seg_py(tra_accs)
val_accs = self.mdl.accs_seg_py(val_accs)
tra_loss = np.mean(tra_loss, axis=0)
val_loss = np.mean(val_loss, axis=0)
pickle.dump(tra_accs, fid_tra_accs, pickle.HIGHEST_PROTOCOL)
pickle.dump(val_accs, fid_val_accs, pickle.HIGHEST_PROTOCOL)
pickle.dump(tra_loss, fid_tra_loss, pickle.HIGHEST_PROTOCOL)
pickle.dump(val_loss, fid_val_loss, pickle.HIGHEST_PROTOCOL)
if self.prt_ena:
print('(Epoch %d) lrn_rate: %f\n tra_accs: %s\n val_accs: %s\n tra_loss: %s\n val_loss: %s\n ' \
% (epc_cnt, lrn_rat.eval(), \
str(tra_accs), str(val_accs), str(tra_loss), str(val_loss)))
except tf.errors.OutOfRangeError:
print('Training is over!')
###############################For Segmentation################################
def test(self):
GV = GeneratorForVOC()
tf.reset_default_graph()
glb_stp = tf.Variable(0, trainable=False, name="global_step", dtype=tf.int64)
msks, img_wdws, img_hgts, img_wdhs, img_nams = self._test_step()
#gpu_options = tf.GPUOptions(per_process_gpu_memory_fraction = 0.8)
#config = tf.ConfigProto(log_device_placement=False, allow_soft_placement=True, gpu_options=gpu_options)
#config = tf.ConfigProto(log_device_placement=False, allow_soft_placement=True, device_count={"CPU": 2}, \
# inter_op_parallelism_threads=16, intra_op_parallelism_threads=16)
config = tf.ConfigProto(log_device_placement=False, allow_soft_placement=True)
with tf.Session(config=config) as sess:
init_op = (tf.global_variables_initializer(), tf.local_variables_initializer())
sess.run(init_op)
saver = tf.train.Saver()
ckpt = tf.train.get_checkpoint_state(self.mdl_dir)
if ckpt and ckpt.model_checkpoint_path:
#var = tf.trainable_variables() #tf.global_variables()
#var_ave = tf.train.ExponentialMovingAverage(self.mv_ave_decay, glb_stp)
#var = var_ave.variables_to_restore()
#saver = tf.train.Saver(var)
saver.restore(sess, ckpt.model_checkpoint_path)
saver = tf.train.Saver()
else:
print("No checkpoint file found!")
return
with open(os.path.join(self.log_dir_tst, "imgs"), 'wb') as fid_tst_imgs, \
open(os.path.join(self.log_dir_tst, "msks"), 'wb') as fid_tst_msks:
fetches = [msks, img_wdws, img_hgts, img_wdhs, img_nams]
feed_dict = {}
[msks_kep, img_wdws_kep, img_hgts_kep, img_wdhs_kep, img_nams_kep] = self.concat(sess, fetches, feed_dict)
for i, img_nam_tmp in enumerate(img_nams_kep):
msks_tmp = msks_kep[i]
img_wdw_tmp = img_wdws_kep[i]
img_hgt_tmp = img_hgts_kep[i]
img_wdh_tmp = img_wdhs_kep[i]
img_nam_tmp = bytes.decode(img_nam_tmp)
_, _, _, msks_tmp = \
GV.recover_instances(None, None, None, msks_tmp, img_wdw_tmp, img_hgt_tmp, img_wdh_tmp)
pickle.dump(img_nam_tmp, fid_tst_imgs, pickle.HIGHEST_PROTOCOL)
pickle.dump(msks_tmp, fid_tst_msks, pickle.HIGHEST_PROTOCOL)
###############################For Segmentation################################
def display_detections(self):
GV = GeneratorForVOC(dat_dir=self.dat_dir_tst, tst_shw=self.tst_shw, tst_sav=self.tst_sav)
with open(os.path.join(self.log_dir_tst, "imgs"), 'rb') as fid_tst_imgs, \
open(os.path.join(self.log_dir_tst, "msks"), 'rb') as fid_tst_msks:
while True:
try:
img_nam = pickle.load(fid_tst_imgs)
msks = pickle.load(fid_tst_msks)
#print(msks.shape)
img_fil = os.path.join(self.dat_dir_tst, img_nam)
img = cv2.imread(img_fil)
#print(img.shape)
if type(img) != np.ndarray:
print("Failed to find image %s" %(img_fil))
continue
img_hgt, img_wdh = img.shape[0], img.shape[1]
if img.size == img_hgt * img_wdh:
print ('Gray Image %s' %(img_fil))
img_zro = np.empty((img_hgt, img_wdh, 3), dtype=np.uint8)
img_zro[:, :, :] = img[:, :, np.newaxis]
img = img_zro
assert img.size == img_wdh * img_hgt * 3, '%s' % img_nam
img = img[:, :, ::-1]
GV.display_instances(img, None, None, None, None, msks, img_nam)
except EOFError:
return
def show_loss_acc(self):
with open(os.path.join(LOG_PATH1, 'loss'), 'rb') as fid_train_loss, \
open(os.path.join(LOG_PATH1, 'mAP'), 'rb') as fid_train_mAP, \
open(os.path.join(LOG_PATH2, 'mAP'), 'rb') as fid_val_mAP:
loss_history = get_all_data(fid_train_loss)
train_acc_history = get_all_data(fid_train_mAP)
val_acc_history = get_all_data(fid_val_mAP)
plt.figure(1)
plt.subplot(2, 1, 1)
plt.title('Training loss')
plt.xlabel('Iteration')
plt.subplot(2, 1, 2)
plt.title('accuracy')
plt.xlabel('Epoch')
#plt.subplot(3, 1, 3)
#plt.title('Validation accuracy')
#plt.xlabel('Epoch')
plt.subplot(2, 1, 1)
plt.plot(loss_history, 'o')
plt.subplot(2, 1, 2)
plt.plot(train_acc_history, '-o', label='train_acc')
plt.plot(val_acc_history, '-o', label='val_acc')
for i in [1, 2]:
plt.subplot(2, 1, i)
plt.legend(loc='upper center', ncol=4)
plt.gcf().set_size_inches(15, 15)
plt.show()
|
en
| 0.264975
|
###############################For CLASSIFY################################ def _train_step(self, mtra=None, mtst=None, glb_stp=None): #将简单的运算放在CPU上,只有神经网络的训练过程放在GPU上 with tf.device('/cpu:0'): self.mdl.mod_tra = True GI_tra = GeneratorForImageNet(True, self.dat_dir_tra, self.bat_siz, self.epc_num, \ self.min_que_tra, self.gpu_lst, 32) GI_val = GeneratorForImageNet(False, self.dat_dir_val, self.bat_siz, self.epc_num, \ self.min_que_val, self.gpu_lst, 1) imgs_lst_tra, lbls_lst_tra = GI_tra.get_input() imgs_lst_val, lbls_lst_val = GI_val.get_input() #with tf.name_scope('input_image'): # tf.summary.image('input', X, 10) self.opm_cfg['decay_step'] = self.opm_cfg['decay_step'] * self.tra_num tra_stp, lrn_rat = update_rule(self.opm_cfg, glb_stp) grds_lst = [] loss_lst = [] accs_lst = [] for i in range(self.gpu_num): with tf.device(self.mdl_dev % i): with tf.name_scope(self.MDL_DEV % i) as scp: imgs_tra = GI_tra.preprocessing1(imgs_lst_tra[i]) imgs_val = GI_val.preprocessing1(imgs_lst_val[i]) lbls_tra = lbls_lst_tra[i] lbls_val = lbls_lst_val[i] imgs = tf.cond(mtst, lambda: imgs_val, lambda: imgs_tra, strict=True) lbls = tf.cond(mtst, lambda: lbls_val, lambda: lbls_tra, strict=True) loss, accs = \ self.mdl.forward(imgs, lbls, mtra=mtra, scp=scp) #在第一次声明变量之后,将控制变量重用的参数设置为True。这样可以让不同的GPU更新同一组参数 #注意tf.name_scope函数并不会影响tf.get_variable的命名空间 tf.get_variable_scope().reuse_variables() #使用当前GPU计算所有变量的梯度 grds = tra_stp.compute_gradients(loss[0]) #print(grds) grds_lst.append(grds) loss_lst.append(loss) accs_lst.append(accs) ''' with tf.variable_scope('average', reuse = tf.AUTO_REUSE): mov_ave = tf.train.ExponentialMovingAverage(self.mov_ave_dca, glb_stp) mov_ave_op = mov_ave.apply(tf.trainable_variables()) tf.add_to_collection(tf.GraphKeys.UPDATE_OPS, mov_ave_op) ''' with tf.variable_scope('optimize', reuse = tf.AUTO_REUSE): grds = average_gradients(grds_lst) upd_opas = tf.get_collection(tf.GraphKeys.UPDATE_OPS) with tf.control_dependencies(upd_opas): tra_opa = tra_stp.apply_gradients(grds, global_step=glb_stp) loss = tf.stack(loss_lst, axis=0) accs = tf.stack(accs_lst, axis=0) #tf.summary.scalar('loss', loss) #tf.summary.scalar('acc', acc) #for grad, var in grads: # if grad is not None: # tf.summary.histogram('gradients_on_average/%s' % var.op.name, grad) #for var in tf.trainable_variables(): # tf.summary.histogram(var.op.name, var) return tra_opa, lrn_rat, loss, accs ###############################For Segmentation################################ #将简单的运算放在CPU上,只有神经网络的训练过程放在GPU上 #with tf.name_scope('input_image'): # tf.summary.image('input', X, 10) #msks_lst = [] #在第一次声明变量之后,将控制变量重用的参数设置为True。这样可以让不同的GPU更新同一组参数 #注意tf.name_scope函数并不会影响tf.get_variable的命名空间 #使用当前GPU计算所有变量的梯度 #vars_lst= [v for v in vars_lst if 'generator/' in v.name] #print(grds) #msks_lst.append(msks) with tf.variable_scope('average', reuse = tf.AUTO_REUSE): mov_ave = tf.train.ExponentialMovingAverage(self.mov_ave_dca, glb_stp) mov_ave_op = mov_ave.apply(tf.trainable_variables()) tf.add_to_collection(tf.GraphKeys.UPDATE_OPS, mov_ave_op) #msks= tf.concat(msks_lst, axis=0) #tf.summary.scalar('loss', loss) #tf.summary.scalar('acc', acc) #for grad, var in grads: # if grad is not None: # tf.summary.histogram('gradients_on_average/%s' % var.op.name, grad) #for var in tf.trainable_variables(): # tf.summary.histogram(var.op.name, var) ###############################For Segmentation################################ #在第一次声明变量之后,将控制变量重用的参数设置为True。这样可以让不同的GPU更新同一组参数 #注意tf.name_scope函数并不会影响tf.get_variable的命名空间 #(N, H, W) #batch #tensors #####################################For CLASSIFY##################################### def train(self): tf.reset_default_graph() mtra = tf.placeholder(dtype=tf.bool, name='train') mtst = tf.placeholder(dtype=tf.bool, name='test' ) glb_stp = tf.Variable(0, trainable=False, name='global_step', dtype=tf.int64) tra_opa, lrn_rat, loss, accs = self._train_step(mtra, mtst, glb_stp) #var = tf.trainable_variables() #tf.global_variables() #var = [v for v in var if 'layers_module1_0/' in v.name or 'layers_module1_1/' in v.name] #var = [v for v in var if 'average/' not in v.name and 'optimize/' not in v.name] #var_ave = tf.train.ExponentialMovingAverage(self.mv_ave_decay, glb_stp) #var = var_ave.variables_to_restore() #saver = tf.train.Saver(var) #tf.summary.scalar('loss', loss) #summary_op = tf.summary.merge_all() #summary_loss = tf.summary.merge(loss) #writer = tf.summary.FileWriter(LOG_PATH, sess.graph, flush_secs=5) #tf.get_default_graph() #gpu_options = tf.GPUOptions(per_process_gpu_memory_fraction = 0.8) #config = tf.ConfigProto(log_device_placement=False, allow_soft_placement=True, gpu_options=gpu_options) #config = tf.ConfigProto(log_device_placement=False, allow_soft_placement=True, device_count={'CPU': 2}, \ # inter_op_parallelism_threads=16, intra_op_parallelism_threads=16) config = tf.ConfigProto(log_device_placement=False, allow_soft_placement=True) with tf.Session(config=config) as sess: init_op = (tf.global_variables_initializer(), tf.local_variables_initializer()) sess.run(init_op) #coord = tf.train.Coordinator() #threads = tf.train.start_queue_runners(sess=sess, coord=coord) ckpt = tf.train.get_checkpoint_state(self.mdl_dir) saver = tf.train.Saver() if ckpt and ckpt.model_checkpoint_path: saver.restore(sess, ckpt.model_checkpoint_path) with open(os.path.join(self.log_dir_tra, 'accs'), 'ab') as fid_tra_accs, \ open(os.path.join(self.log_dir_tra, 'loss'), 'ab') as fid_tra_loss, \ open(os.path.join(self.log_dir_val, 'accs'), 'ab') as fid_val_accs, \ open(os.path.join(self.log_dir_val, 'loss'), 'ab') as fid_val_loss: try: tra_cnt = 0 epc_cnt = 0 tra_loss_lst = [] while True: #_, summary, loss1, = sess.run([train_op, summary_op, loss], feed_dict = {mtrain:True}) #writer.add_summary(summary, global_step=glb_stp.eval()) _, tra_loss = sess.run([tra_opa, loss], feed_dict={mtra:True, mtst:False}) tra_loss_lst.append(tra_loss) tra_cnt = tra_cnt + 1 if self.prt_ena and tra_cnt % self.itr_per_prt == 0: tra_loss_lst = np.concatenate(tra_loss_lst, axis=0) tra_loss = np.mean(tra_loss_lst, axis=0) tra_loss_lst = [] print('(Iteration %d) losses: %s' % (tra_cnt, str(tra_loss))) if tra_cnt % self.tra_num == 0: epc_cnt = epc_cnt + 1 saver.save(sess, os.path.join(self.mdl_dir, self.mdl_nam), global_step=glb_stp) fetches = [accs, loss] feed_dict = {mtra:False, mtst:False} tra_accs, tra_loss = self.concat(sess, fetches, feed_dict, self.val_num) fetches = [accs, loss] feed_dict = {mtra:False, mtst:True } val_accs, val_loss = self.concat(sess, fetches, feed_dict, self.val_num) tra_accs = np.mean(tra_accs, axis=0) val_accs = np.mean(val_accs, axis=0) tra_loss = np.mean(tra_loss, axis=0) val_loss = np.mean(val_loss, axis=0) pickle.dump(tra_accs, fid_tra_accs, pickle.HIGHEST_PROTOCOL) pickle.dump(val_accs, fid_val_accs, pickle.HIGHEST_PROTOCOL) pickle.dump(tra_loss, fid_tra_loss, pickle.HIGHEST_PROTOCOL) pickle.dump(val_loss, fid_val_loss, pickle.HIGHEST_PROTOCOL) if self.prt_ena: print('(Epoch %d) lrn_rate: %f \n tra_accs: %s \n val_accs: %s \n tra_loss: %s \n val_loss: %s' \ % (epc_cnt, lrn_rat.eval(), str(tra_accs), str(val_accs), str(tra_loss), str(val_loss))) except tf.errors.OutOfRangeError: print('Training is over!') #coord.request_stop() #coord.join(threads) #####################################For Segmentation################################## #with tf.device(self.mdl_dev % 0): #tf.summary.scalar('loss', loss) #summary_op = tf.summary.merge_all() #summary_loss = tf.summary.merge(loss) #writer = tf.summary.FileWriter(LOG_PATH, sess.graph, flush_secs=5) #tf.get_default_graph() #gpu_options = tf.GPUOptions(per_process_gpu_memory_fraction = 0.8) #config = tf.ConfigProto(log_device_placement=False, allow_soft_placement=True, gpu_options=gpu_options) #config = tf.ConfigProto(log_device_placement=False, allow_soft_placement=True, device_count={'CPU': 2}, \ # inter_op_parallelism_threads=16, intra_op_parallelism_threads=16) #tf.global_variables()tf.trainable_variables() #var = [v for v in var if 'average/' not in v.name and 'optimize/' not in v.name] #var = [v for v in var if 'group_block1_0_0' in v.name or 'group_block1_0_1' in v.name \ # or 'group_block1_0_2' in v.name or 'group_block1_0_3' in v.name] #var_ave= tf.train.ExponentialMovingAverage(self.mv_ave_decay, glb_stp) #var = var_ave.variables_to_restore() mydict = load_weights() mykeys = mydict.keys() for i, v in enumerate(var): if v.name in mykeys: sess.run(tf.assign(v, mydict[v.name], validate_shape=True, use_locking=True)) else: print(v.name) saver.save(sess, os.path.join(self.mdl_dir, self.mdl_nam), global_step=glb_stp) return #_, summary, loss1, = sess.run([train_op, summary_op, loss], feed_dict = {mtrain:True}) #writer.add_summary(summary, global_step=glb_stp.eval()) ###############################For Segmentation################################ #gpu_options = tf.GPUOptions(per_process_gpu_memory_fraction = 0.8) #config = tf.ConfigProto(log_device_placement=False, allow_soft_placement=True, gpu_options=gpu_options) #config = tf.ConfigProto(log_device_placement=False, allow_soft_placement=True, device_count={"CPU": 2}, \ # inter_op_parallelism_threads=16, intra_op_parallelism_threads=16) #var = tf.trainable_variables() #tf.global_variables() #var_ave = tf.train.ExponentialMovingAverage(self.mv_ave_decay, glb_stp) #var = var_ave.variables_to_restore() #saver = tf.train.Saver(var) ###############################For Segmentation################################ #print(msks.shape) #print(img.shape) #plt.subplot(3, 1, 3) #plt.title('Validation accuracy') #plt.xlabel('Epoch')
| 2.377949
| 2
|
tests/test_layers_convolution.py
|
TomLouisKeller/tensorlayer
| 1
|
6626826
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import os
import unittest
os.environ['TF_CPP_MIN_LOG_LEVEL'] = '3'
import tensorflow as tf
import tensorlayer as tl
from tests.utils import CustomTestCase
class Layer_Convolution_1D_Test(CustomTestCase):
@classmethod
def setUpClass(cls):
x = tf.placeholder(tf.float32, (None, 100, 1))
cls.input_layer = tl.layers.InputLayer(x, name='input_layer')
print("input:", cls.input_layer.all_layers)
cls.n1 = tl.layers.Conv1dLayer(cls.input_layer, shape=(5, 1, 32), stride=2)
cls.n2 = tl.layers.Conv1d(cls.n1, n_filter=32, filter_size=5, stride=2)
cls.n3 = tl.layers.SeparableConv1d(
cls.n2, n_filter=32, filter_size=3, strides=1, padding='VALID', act=tf.nn.relu, name='separable_1d'
)
@classmethod
def tearDownClass(cls):
tf.reset_default_graph()
def test_layer_n1(self):
self.assertEqual(len(self.n1.all_layers), 2)
self.assertEqual(len(self.n1.all_params), 2)
self.assertEqual(self.n1.count_params(), 192)
self.assertEqual(self.n1.outputs.get_shape().as_list()[1:], [50, 32])
def test_layer_n2(self):
self.assertEqual(len(self.n2.all_layers), 3)
self.assertEqual(len(self.n2.all_params), 4)
self.assertEqual(self.n2.count_params(), 5344)
self.assertEqual(self.n2.outputs.get_shape().as_list()[1:], [25, 32])
def test_layer_n3(self):
self.assertEqual(len(self.n3.all_layers), 4)
self.assertEqual(len(self.n3.all_params), 7)
self.assertEqual(self.n3.count_params(), 6496)
self.assertEqual(self.n3.outputs.get_shape().as_list()[1:], [23, 32])
class Layer_Convolution_2D_Test(CustomTestCase):
@classmethod
def setUpClass(cls):
x = tf.placeholder(tf.float32, (None, 100, 100, 3))
cls.input_layer = tl.layers.InputLayer(x, name='input_layer')
print(cls.input_layer.all_layers)
cls.n1 = tl.layers.Conv2dLayer(
cls.input_layer, act=tf.nn.relu, shape=(5, 5, 3, 32), strides=(1, 2, 2, 1), padding='SAME',
W_init=tf.truncated_normal_initializer(stddev=5e-2), b_init=tf.constant_initializer(value=0.0),
name='conv2dlayer'
)
print("#################################")
print("input:", cls.input_layer.all_layers)
print("input:", cls.n1.all_layers)
cls.n2 = tl.layers.Conv2d(cls.n1, n_filter=32, filter_size=(3, 3), strides=(2, 2), act=None, name='conv2d')
cls.n3 = tl.layers.Conv2d(
cls.n2, n_filter=32, filter_size=(3, 3), strides=(2, 2), act=tf.nn.relu, b_init=None, name='conv2d_no_bias'
)
cls.n4 = tl.layers.DeConv2dLayer(
cls.n3, shape=(5, 5, 32, 32), output_shape=(100, 200, 200, 32), strides=(1, 2, 2, 1), name='deconv2dlayer'
)
cls.n5 = tl.layers.DeConv2d(cls.n4, n_filter=32, filter_size=(3, 3), strides=(2, 2), name='DeConv2d')
cls.n6 = tl.layers.DepthwiseConv2d(
cls.n5, shape=(3, 3), strides=(2, 2), act=tf.nn.relu, depth_multiplier=2, name='depthwise'
)
cls.n7 = tl.layers.Conv2d(
cls.n6, n_filter=32, filter_size=(3, 3), strides=(2, 2), act=tf.nn.relu, name='conv2d2'
)
cls.n8 = tl.layers.GroupConv2d(cls.n7, n_filter=32, filter_size=(3, 3), strides=(2, 2), name='group')
cls.n9 = tl.layers.SeparableConv2d(
cls.n8, n_filter=32, filter_size=(3, 3), strides=(1, 1), act=tf.nn.relu, name='seperable2d1'
)
cls.n10 = tl.layers.TernaryConv2d(cls.n9, 64, (5, 5), (1, 1), act=tf.nn.relu, padding='SAME', name='cnn2')
cls.n11 = tl.layers.AtrousDeConv2dLayer(
cls.n10, shape=(3, 3, 32, 64), output_shape=(100, 96, 96, 32), rate=2, act=tf.nn.relu, name='atroustrans1'
)
cls.n12 = tl.layers.QuanConv2d(cls.n11, 64, (5, 5), (1, 1), act=tf.nn.relu, padding='SAME', name='quancnn')
@classmethod
def tearDownClass(cls):
tf.reset_default_graph()
def test_layer_n1(self):
self.assertEqual(len(self.n1.all_layers), 2)
self.assertEqual(len(self.n1.all_params), 2)
self.assertEqual(self.n1.count_params(), 2432)
self.assertEqual(self.n1.outputs.get_shape().as_list()[1:], [50, 50, 32])
def test_layer_n2(self):
self.assertEqual(len(self.n2.all_layers), 3)
self.assertEqual(len(self.n2.all_params), 4)
self.assertEqual(self.n2.count_params(), 11680)
self.assertEqual(self.n2.outputs.get_shape().as_list()[1:], [25, 25, 32])
def test_layer_n3(self):
self.assertEqual(len(self.n3.all_layers), 4)
self.assertEqual(len(self.n3.all_params), 5)
self.assertEqual(self.n3.count_params(), 20896)
self.assertEqual(self.n3.outputs.get_shape().as_list()[1:], [13, 13, 32])
def test_layer_n4(self):
self.assertEqual(len(self.n4.all_layers), 5)
self.assertEqual(len(self.n4.all_params), 7)
self.assertEqual(self.n4.count_params(), 46528)
self.assertEqual(self.n4.outputs.get_shape().as_list()[1:], [200, 200, 32])
def test_layer_n5(self):
self.assertEqual(len(self.n5.all_layers), 6)
self.assertEqual(len(self.n5.all_params), 9)
self.assertEqual(self.n5.count_params(), 55776)
self.assertEqual(self.n5.outputs.get_shape().as_list()[1:], [400, 400, 32])
def test_layer_n6(self):
self.assertEqual(len(self.n6.all_layers), 7)
self.assertEqual(len(self.n6.all_params), 11)
self.assertEqual(self.n6.count_params(), 56416)
self.assertEqual(self.n6.outputs.get_shape().as_list()[1:], [200, 200, 64])
def test_layer_n7(self):
self.assertEqual(len(self.n7.all_layers), 8)
self.assertEqual(len(self.n7.all_params), 13)
self.assertEqual(self.n7.count_params(), 74880)
self.assertEqual(self.n7.outputs.get_shape().as_list()[1:], [100, 100, 32])
def test_layer_n8(self):
self.assertEqual(len(self.n8.all_layers), 9)
self.assertEqual(len(self.n8.all_params), 15)
self.assertEqual(self.n8.count_params(), 79520)
self.assertEqual(self.n8.outputs.get_shape().as_list()[1:], [50, 50, 32])
def test_layer_n9(self):
self.assertEqual(len(self.n9.all_layers), 10)
self.assertEqual(len(self.n9.all_params), 18)
self.assertEqual(self.n9.count_params(), 80864)
self.assertEqual(self.n9.outputs.get_shape().as_list()[1:], [48, 48, 32])
def test_layer_n10(self):
self.assertEqual(len(self.n10.all_layers), 11)
self.assertEqual(len(self.n10.all_params), 20)
self.assertEqual(self.n10.count_params(), 132128)
self.assertEqual(self.n10.outputs.get_shape().as_list()[1:], [48, 48, 64])
def test_layer_n11(self):
self.assertEqual(len(self.n11.all_layers), 12)
self.assertEqual(len(self.n11.all_params), 22)
self.assertEqual(self.n11.count_params(), 150592)
self.assertEqual(self.n11.outputs.get_shape().as_list()[1:], [96, 96, 32])
def test_layer_n12(self):
self.assertEqual(len(self.n12.all_layers), 13)
self.assertEqual(len(self.n12.all_params), 24)
self.assertEqual(self.n12.count_params(), 201856)
self.assertEqual(self.n12.outputs.get_shape().as_list()[1:], [96, 96, 64])
class Layer_Convolution_3D_Test(CustomTestCase):
@classmethod
def setUpClass(cls):
x = tf.placeholder(tf.float32, (None, 100, 100, 100, 3))
cls.input_layer = tl.layers.InputLayer(x, name='input_layer')
cls.n1 = tl.layers.Conv3dLayer(cls.input_layer, shape=(2, 2, 2, 3, 32), strides=(1, 2, 2, 2, 1))
cls.n2 = tl.layers.DeConv3dLayer(
cls.n1, shape=(2, 2, 2, 128, 32), output_shape=(100, 12, 32, 32, 128), strides=(1, 2, 2, 2, 1)
)
cls.n3 = tl.layers.DeConv3d(cls.n2, n_filter=32, filter_size=(3, 3, 3), strides=(2, 2, 2))
@classmethod
def tearDownClass(cls):
tf.reset_default_graph()
def test_layer_n1(self):
self.assertEqual(len(self.n1.all_layers), 2)
self.assertEqual(len(self.n1.all_params), 2)
self.assertEqual(self.n1.count_params(), 800)
self.assertEqual(self.n1.outputs.get_shape().as_list()[1:], [50, 50, 50, 32])
def test_layer_n2(self):
self.assertEqual(len(self.n2.all_layers), 3)
self.assertEqual(len(self.n2.all_params), 4)
self.assertEqual(self.n2.count_params(), 33696)
self.assertEqual(self.n2.outputs.get_shape().as_list()[1:], [12, 32, 32, 128])
def test_layer_n3(self):
self.assertEqual(len(self.n3.all_layers), 4)
self.assertEqual(len(self.n3.all_params), 6)
self.assertEqual(self.n3.count_params(), 144320)
self.assertEqual(self.n3.outputs.get_shape().as_list()[1:], [24, 64, 64, 32])
class Layer_DeformableConvolution_Test(CustomTestCase):
@classmethod
def setUpClass(cls):
x = tf.placeholder(tf.float32, [None, 299, 299, 3])
net = tl.layers.InputLayer(x, name='input_layer')
print("input:", net.all_layers)
offset1 = tl.layers.Conv2d(net, 18, (3, 3), (1, 1), act=tf.nn.relu, padding='SAME', name='offset1')
cls.net1 = tl.layers.DeformableConv2d(net, offset1, 32, (3, 3), act=tf.nn.relu, name='deformable1')
offset2 = tl.layers.Conv2d(cls.net1, 18, (3, 3), (1, 1), act=tf.nn.relu, padding='SAME', name='offset2')
cls.net2 = tl.layers.DeformableConv2d(cls.net1, offset2, 64, (3, 3), act=tf.nn.relu, name='deformable2')
@classmethod
def tearDownClass(cls):
tf.reset_default_graph()
def test_layer_n1(self):
self.assertEqual(len(self.net1.all_layers), 2)
self.assertEqual(len(self.net1.all_params), 2)
self.assertEqual(self.net1.count_params(), 896)
self.assertEqual(self.net1.outputs.get_shape().as_list()[1:], [299, 299, 32])
def test_layer_n2(self):
self.assertEqual(len(self.net2.all_layers), 3)
self.assertEqual(len(self.net2.all_params), 4)
self.assertEqual(self.net2.count_params(), 19392)
self.assertEqual(self.net2.outputs.get_shape().as_list()[1:], [299, 299, 64])
if __name__ == '__main__':
tf.logging.set_verbosity(tf.logging.DEBUG)
tl.logging.set_verbosity(tl.logging.DEBUG)
unittest.main()
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import os
import unittest
os.environ['TF_CPP_MIN_LOG_LEVEL'] = '3'
import tensorflow as tf
import tensorlayer as tl
from tests.utils import CustomTestCase
class Layer_Convolution_1D_Test(CustomTestCase):
@classmethod
def setUpClass(cls):
x = tf.placeholder(tf.float32, (None, 100, 1))
cls.input_layer = tl.layers.InputLayer(x, name='input_layer')
print("input:", cls.input_layer.all_layers)
cls.n1 = tl.layers.Conv1dLayer(cls.input_layer, shape=(5, 1, 32), stride=2)
cls.n2 = tl.layers.Conv1d(cls.n1, n_filter=32, filter_size=5, stride=2)
cls.n3 = tl.layers.SeparableConv1d(
cls.n2, n_filter=32, filter_size=3, strides=1, padding='VALID', act=tf.nn.relu, name='separable_1d'
)
@classmethod
def tearDownClass(cls):
tf.reset_default_graph()
def test_layer_n1(self):
self.assertEqual(len(self.n1.all_layers), 2)
self.assertEqual(len(self.n1.all_params), 2)
self.assertEqual(self.n1.count_params(), 192)
self.assertEqual(self.n1.outputs.get_shape().as_list()[1:], [50, 32])
def test_layer_n2(self):
self.assertEqual(len(self.n2.all_layers), 3)
self.assertEqual(len(self.n2.all_params), 4)
self.assertEqual(self.n2.count_params(), 5344)
self.assertEqual(self.n2.outputs.get_shape().as_list()[1:], [25, 32])
def test_layer_n3(self):
self.assertEqual(len(self.n3.all_layers), 4)
self.assertEqual(len(self.n3.all_params), 7)
self.assertEqual(self.n3.count_params(), 6496)
self.assertEqual(self.n3.outputs.get_shape().as_list()[1:], [23, 32])
class Layer_Convolution_2D_Test(CustomTestCase):
@classmethod
def setUpClass(cls):
x = tf.placeholder(tf.float32, (None, 100, 100, 3))
cls.input_layer = tl.layers.InputLayer(x, name='input_layer')
print(cls.input_layer.all_layers)
cls.n1 = tl.layers.Conv2dLayer(
cls.input_layer, act=tf.nn.relu, shape=(5, 5, 3, 32), strides=(1, 2, 2, 1), padding='SAME',
W_init=tf.truncated_normal_initializer(stddev=5e-2), b_init=tf.constant_initializer(value=0.0),
name='conv2dlayer'
)
print("#################################")
print("input:", cls.input_layer.all_layers)
print("input:", cls.n1.all_layers)
cls.n2 = tl.layers.Conv2d(cls.n1, n_filter=32, filter_size=(3, 3), strides=(2, 2), act=None, name='conv2d')
cls.n3 = tl.layers.Conv2d(
cls.n2, n_filter=32, filter_size=(3, 3), strides=(2, 2), act=tf.nn.relu, b_init=None, name='conv2d_no_bias'
)
cls.n4 = tl.layers.DeConv2dLayer(
cls.n3, shape=(5, 5, 32, 32), output_shape=(100, 200, 200, 32), strides=(1, 2, 2, 1), name='deconv2dlayer'
)
cls.n5 = tl.layers.DeConv2d(cls.n4, n_filter=32, filter_size=(3, 3), strides=(2, 2), name='DeConv2d')
cls.n6 = tl.layers.DepthwiseConv2d(
cls.n5, shape=(3, 3), strides=(2, 2), act=tf.nn.relu, depth_multiplier=2, name='depthwise'
)
cls.n7 = tl.layers.Conv2d(
cls.n6, n_filter=32, filter_size=(3, 3), strides=(2, 2), act=tf.nn.relu, name='conv2d2'
)
cls.n8 = tl.layers.GroupConv2d(cls.n7, n_filter=32, filter_size=(3, 3), strides=(2, 2), name='group')
cls.n9 = tl.layers.SeparableConv2d(
cls.n8, n_filter=32, filter_size=(3, 3), strides=(1, 1), act=tf.nn.relu, name='seperable2d1'
)
cls.n10 = tl.layers.TernaryConv2d(cls.n9, 64, (5, 5), (1, 1), act=tf.nn.relu, padding='SAME', name='cnn2')
cls.n11 = tl.layers.AtrousDeConv2dLayer(
cls.n10, shape=(3, 3, 32, 64), output_shape=(100, 96, 96, 32), rate=2, act=tf.nn.relu, name='atroustrans1'
)
cls.n12 = tl.layers.QuanConv2d(cls.n11, 64, (5, 5), (1, 1), act=tf.nn.relu, padding='SAME', name='quancnn')
@classmethod
def tearDownClass(cls):
tf.reset_default_graph()
def test_layer_n1(self):
self.assertEqual(len(self.n1.all_layers), 2)
self.assertEqual(len(self.n1.all_params), 2)
self.assertEqual(self.n1.count_params(), 2432)
self.assertEqual(self.n1.outputs.get_shape().as_list()[1:], [50, 50, 32])
def test_layer_n2(self):
self.assertEqual(len(self.n2.all_layers), 3)
self.assertEqual(len(self.n2.all_params), 4)
self.assertEqual(self.n2.count_params(), 11680)
self.assertEqual(self.n2.outputs.get_shape().as_list()[1:], [25, 25, 32])
def test_layer_n3(self):
self.assertEqual(len(self.n3.all_layers), 4)
self.assertEqual(len(self.n3.all_params), 5)
self.assertEqual(self.n3.count_params(), 20896)
self.assertEqual(self.n3.outputs.get_shape().as_list()[1:], [13, 13, 32])
def test_layer_n4(self):
self.assertEqual(len(self.n4.all_layers), 5)
self.assertEqual(len(self.n4.all_params), 7)
self.assertEqual(self.n4.count_params(), 46528)
self.assertEqual(self.n4.outputs.get_shape().as_list()[1:], [200, 200, 32])
def test_layer_n5(self):
self.assertEqual(len(self.n5.all_layers), 6)
self.assertEqual(len(self.n5.all_params), 9)
self.assertEqual(self.n5.count_params(), 55776)
self.assertEqual(self.n5.outputs.get_shape().as_list()[1:], [400, 400, 32])
def test_layer_n6(self):
self.assertEqual(len(self.n6.all_layers), 7)
self.assertEqual(len(self.n6.all_params), 11)
self.assertEqual(self.n6.count_params(), 56416)
self.assertEqual(self.n6.outputs.get_shape().as_list()[1:], [200, 200, 64])
def test_layer_n7(self):
self.assertEqual(len(self.n7.all_layers), 8)
self.assertEqual(len(self.n7.all_params), 13)
self.assertEqual(self.n7.count_params(), 74880)
self.assertEqual(self.n7.outputs.get_shape().as_list()[1:], [100, 100, 32])
def test_layer_n8(self):
self.assertEqual(len(self.n8.all_layers), 9)
self.assertEqual(len(self.n8.all_params), 15)
self.assertEqual(self.n8.count_params(), 79520)
self.assertEqual(self.n8.outputs.get_shape().as_list()[1:], [50, 50, 32])
def test_layer_n9(self):
self.assertEqual(len(self.n9.all_layers), 10)
self.assertEqual(len(self.n9.all_params), 18)
self.assertEqual(self.n9.count_params(), 80864)
self.assertEqual(self.n9.outputs.get_shape().as_list()[1:], [48, 48, 32])
def test_layer_n10(self):
self.assertEqual(len(self.n10.all_layers), 11)
self.assertEqual(len(self.n10.all_params), 20)
self.assertEqual(self.n10.count_params(), 132128)
self.assertEqual(self.n10.outputs.get_shape().as_list()[1:], [48, 48, 64])
def test_layer_n11(self):
self.assertEqual(len(self.n11.all_layers), 12)
self.assertEqual(len(self.n11.all_params), 22)
self.assertEqual(self.n11.count_params(), 150592)
self.assertEqual(self.n11.outputs.get_shape().as_list()[1:], [96, 96, 32])
def test_layer_n12(self):
self.assertEqual(len(self.n12.all_layers), 13)
self.assertEqual(len(self.n12.all_params), 24)
self.assertEqual(self.n12.count_params(), 201856)
self.assertEqual(self.n12.outputs.get_shape().as_list()[1:], [96, 96, 64])
class Layer_Convolution_3D_Test(CustomTestCase):
@classmethod
def setUpClass(cls):
x = tf.placeholder(tf.float32, (None, 100, 100, 100, 3))
cls.input_layer = tl.layers.InputLayer(x, name='input_layer')
cls.n1 = tl.layers.Conv3dLayer(cls.input_layer, shape=(2, 2, 2, 3, 32), strides=(1, 2, 2, 2, 1))
cls.n2 = tl.layers.DeConv3dLayer(
cls.n1, shape=(2, 2, 2, 128, 32), output_shape=(100, 12, 32, 32, 128), strides=(1, 2, 2, 2, 1)
)
cls.n3 = tl.layers.DeConv3d(cls.n2, n_filter=32, filter_size=(3, 3, 3), strides=(2, 2, 2))
@classmethod
def tearDownClass(cls):
tf.reset_default_graph()
def test_layer_n1(self):
self.assertEqual(len(self.n1.all_layers), 2)
self.assertEqual(len(self.n1.all_params), 2)
self.assertEqual(self.n1.count_params(), 800)
self.assertEqual(self.n1.outputs.get_shape().as_list()[1:], [50, 50, 50, 32])
def test_layer_n2(self):
self.assertEqual(len(self.n2.all_layers), 3)
self.assertEqual(len(self.n2.all_params), 4)
self.assertEqual(self.n2.count_params(), 33696)
self.assertEqual(self.n2.outputs.get_shape().as_list()[1:], [12, 32, 32, 128])
def test_layer_n3(self):
self.assertEqual(len(self.n3.all_layers), 4)
self.assertEqual(len(self.n3.all_params), 6)
self.assertEqual(self.n3.count_params(), 144320)
self.assertEqual(self.n3.outputs.get_shape().as_list()[1:], [24, 64, 64, 32])
class Layer_DeformableConvolution_Test(CustomTestCase):
@classmethod
def setUpClass(cls):
x = tf.placeholder(tf.float32, [None, 299, 299, 3])
net = tl.layers.InputLayer(x, name='input_layer')
print("input:", net.all_layers)
offset1 = tl.layers.Conv2d(net, 18, (3, 3), (1, 1), act=tf.nn.relu, padding='SAME', name='offset1')
cls.net1 = tl.layers.DeformableConv2d(net, offset1, 32, (3, 3), act=tf.nn.relu, name='deformable1')
offset2 = tl.layers.Conv2d(cls.net1, 18, (3, 3), (1, 1), act=tf.nn.relu, padding='SAME', name='offset2')
cls.net2 = tl.layers.DeformableConv2d(cls.net1, offset2, 64, (3, 3), act=tf.nn.relu, name='deformable2')
@classmethod
def tearDownClass(cls):
tf.reset_default_graph()
def test_layer_n1(self):
self.assertEqual(len(self.net1.all_layers), 2)
self.assertEqual(len(self.net1.all_params), 2)
self.assertEqual(self.net1.count_params(), 896)
self.assertEqual(self.net1.outputs.get_shape().as_list()[1:], [299, 299, 32])
def test_layer_n2(self):
self.assertEqual(len(self.net2.all_layers), 3)
self.assertEqual(len(self.net2.all_params), 4)
self.assertEqual(self.net2.count_params(), 19392)
self.assertEqual(self.net2.outputs.get_shape().as_list()[1:], [299, 299, 64])
if __name__ == '__main__':
tf.logging.set_verbosity(tf.logging.DEBUG)
tl.logging.set_verbosity(tl.logging.DEBUG)
unittest.main()
|
de
| 0.299006
|
#!/usr/bin/env python # -*- coding: utf-8 -*- ################################")
| 2.429809
| 2
|
mysite/core/views.py
|
root121976/ticket
| 0
|
6626827
|
<reponame>root121976/ticket
from django.shortcuts import render, redirect, render_to_response
from django.contrib.auth.models import User
from django.contrib.auth.forms import UserCreationForm
from django.contrib.auth.decorators import login_required
from django.contrib.auth.mixins import LoginRequiredMixin
from django.views.generic import TemplateView, ListView, CreateView
from django.core.files.storage import FileSystemStorage
from django.urls import reverse_lazy
from .forms import BookForm
from .forms import OrderForm
from .forms import TripInOrderForm
from .models import Book
from .models import Order
from .models import TripInOrder
from mysite.choices import *
# class Home(TemplateView):
# count = User.objects.count()
# template_name = 'home.html'
# return render(request, 'home.html', {
# 'count': count
# })
def upload(request):
context = {}
if request.method == 'POST':
uploaded_file = request.FILES['document']
fs = FileSystemStorage()
name = fs.save(uploaded_file.name, uploaded_file)
context['url'] = fs.url(name)
return render(request, 'upload.html', context)
@login_required
def book_list(request):
orders = Order.objects.all()
return render(request, 'book_list.html', {
'orders': orders
})
def upload_book(request):
if request.method == 'POST':
form = OrderForm(request.POST, request.FILES,initial={'user': request.user})
if form.is_valid():
form.save()
return redirect('book_list')
else:
form = OrderForm(initial={'user': request.user})
return render(request, 'upload_book.html', {
'form': form
})
def order_detailview(request,pk):
order = Order.objects.get(pk=pk)
if request.method == 'POST':
form = OrderForm(request.POST, instance=order)
if form.is_valid():
form.save()
return redirect('book_list')
else:
form = OrderForm(instance=order)
return render(request, 'order_detail.html', {
'form': form
})
def ticket_detailview(request,pk):
ticket = TripInOrder.objects.get(pk=pk)
if request.method == 'POST':
form = TripInOrderForm(request.POST, request.FILES, instance=ticket)
if form.is_valid():
form.save()
return redirect('book_list')
else:
form = TripInOrderForm(instance=ticket)
return render(request, 'ticket_detail.html', {
'form': form
})
def delete_book(request, pk):
if request.method == 'POST':
book = Order.objects.get(pk=pk)
book.delete()
return redirect('book_list')
def delete_trip(request, pk):
if request.method == 'POST':
book = TripInOrder.objects.get(pk=pk)
book.delete()
return redirect('book_list')
def create_trips(request, pk):
order = Order.objects.get(pk=pk)
if request.method == 'POST':
form = TripInOrderForm(request.POST or None, initial={'order': order})
if form.is_valid():
form.save()
return redirect('book_list')
else:
form = TripInOrderForm(initial={'order': order})
return render(request, 'create_trips.html', {
'form': form
})
class BookListView(ListView):
model = Order
template_name = 'class_book_list.html'
context_object_name = 'books'
class UploadBookView(CreateView):
model = Book
form_class = BookForm
success_url = reverse_lazy('class_book_list')
template_name = 'upload_book.html'
def home(request):
count = Order.objects.count()
return render(request, 'home.html', {
'count': count
})
def signup(request):
if request.method == 'POST':
form = UserCreationForm(request.POST)
if form.is_valid():
form.save()
return redirect('home')
else:
form = UserCreationForm()
return render(request, 'registration/signup.html', {
'form': form
})
@login_required
def secret_page(request):
return render(request, 'secret_page.html')
class SecretPage(LoginRequiredMixin, TemplateView):
template_name = 'secret_page.html'
|
from django.shortcuts import render, redirect, render_to_response
from django.contrib.auth.models import User
from django.contrib.auth.forms import UserCreationForm
from django.contrib.auth.decorators import login_required
from django.contrib.auth.mixins import LoginRequiredMixin
from django.views.generic import TemplateView, ListView, CreateView
from django.core.files.storage import FileSystemStorage
from django.urls import reverse_lazy
from .forms import BookForm
from .forms import OrderForm
from .forms import TripInOrderForm
from .models import Book
from .models import Order
from .models import TripInOrder
from mysite.choices import *
# class Home(TemplateView):
# count = User.objects.count()
# template_name = 'home.html'
# return render(request, 'home.html', {
# 'count': count
# })
def upload(request):
context = {}
if request.method == 'POST':
uploaded_file = request.FILES['document']
fs = FileSystemStorage()
name = fs.save(uploaded_file.name, uploaded_file)
context['url'] = fs.url(name)
return render(request, 'upload.html', context)
@login_required
def book_list(request):
orders = Order.objects.all()
return render(request, 'book_list.html', {
'orders': orders
})
def upload_book(request):
if request.method == 'POST':
form = OrderForm(request.POST, request.FILES,initial={'user': request.user})
if form.is_valid():
form.save()
return redirect('book_list')
else:
form = OrderForm(initial={'user': request.user})
return render(request, 'upload_book.html', {
'form': form
})
def order_detailview(request,pk):
order = Order.objects.get(pk=pk)
if request.method == 'POST':
form = OrderForm(request.POST, instance=order)
if form.is_valid():
form.save()
return redirect('book_list')
else:
form = OrderForm(instance=order)
return render(request, 'order_detail.html', {
'form': form
})
def ticket_detailview(request,pk):
ticket = TripInOrder.objects.get(pk=pk)
if request.method == 'POST':
form = TripInOrderForm(request.POST, request.FILES, instance=ticket)
if form.is_valid():
form.save()
return redirect('book_list')
else:
form = TripInOrderForm(instance=ticket)
return render(request, 'ticket_detail.html', {
'form': form
})
def delete_book(request, pk):
if request.method == 'POST':
book = Order.objects.get(pk=pk)
book.delete()
return redirect('book_list')
def delete_trip(request, pk):
if request.method == 'POST':
book = TripInOrder.objects.get(pk=pk)
book.delete()
return redirect('book_list')
def create_trips(request, pk):
order = Order.objects.get(pk=pk)
if request.method == 'POST':
form = TripInOrderForm(request.POST or None, initial={'order': order})
if form.is_valid():
form.save()
return redirect('book_list')
else:
form = TripInOrderForm(initial={'order': order})
return render(request, 'create_trips.html', {
'form': form
})
class BookListView(ListView):
model = Order
template_name = 'class_book_list.html'
context_object_name = 'books'
class UploadBookView(CreateView):
model = Book
form_class = BookForm
success_url = reverse_lazy('class_book_list')
template_name = 'upload_book.html'
def home(request):
count = Order.objects.count()
return render(request, 'home.html', {
'count': count
})
def signup(request):
if request.method == 'POST':
form = UserCreationForm(request.POST)
if form.is_valid():
form.save()
return redirect('home')
else:
form = UserCreationForm()
return render(request, 'registration/signup.html', {
'form': form
})
@login_required
def secret_page(request):
return render(request, 'secret_page.html')
class SecretPage(LoginRequiredMixin, TemplateView):
template_name = 'secret_page.html'
|
en
| 0.217984
|
# class Home(TemplateView): # count = User.objects.count() # template_name = 'home.html' # return render(request, 'home.html', { # 'count': count # })
| 2.076879
| 2
|
eigen_update_model.py
|
vhr121/emotion_AI
| 0
|
6626828
|
<filename>eigen_update_model.py
import cv2
import glob
import time
import random
import numpy as np
import os
#create eigen face recogniser object
eigen = cv2.createEigenFaceRecognizer()
data = {}
#check if a file already exist if not create the file
def check_file_exists(folder, file_check):
if os.path.exists("%s/%s" % (folder, file_check)):
pass
else:
os.makedirs("%s/%s" % (folder, file_check))
#create the training and test data
def make_sets(emotions):
data = []
labels = []
for emotion in emotions:
training = glob.glob("datasets/%s/*" % emotion)
for item in training:
image = cv2.imread(item)
gray = cv2.cvtColor(image, cv2.COLOR_BGR2GRAY)
data.append(gray)
labels.append(emotions.index(emotion))
#combine data and lable and shuffel used for testing purpose
bind_lst = list(zip(data,labels))
random.shuffle(bind_lst)
data,labels=zip(*bind_lst)
return data,labels
#train the model usin
def run_recognizer(emotions):
data,labels = make_sets(emotions)
#segregate the data and lables as train and test data 80% train and 20% test
training_labels = labels[:int(len(data)*0.8)]
training_data= data[:int(len(data)*0.8)]
testing_data=data[int(len(data)*0.8):]
testing_labels=labels[int(len(data)*0.8):]
print "total data is %s"%str(len(data))
print("training eigen face classifier")
print("size of training set is: " + str(len(training_labels)) + " images")
start_time = time.time()
eigen.train(training_data, np.asarray(training_labels))
print "it to %s minutes to train your model"%str((time.time()-start_time))
print "testing your model "
count=0
index=0
for i in testing_data:
exp,conf=eigen.predict(i)
if(exp==testing_labels[index]):
count=count+1
index=index+1
accuracy=count/len(testing_data)*100.0
print "accuracy of the model is %s"%str(accuracy)
def update(emotions):
run_recognizer(emotions)
print("saving model")
check_file_exists('models','eigen')
count_files = len(glob.glob('models/eigen/*'))
eigen.save("models/eigen/trained_eigen_emoclassifier_%s.xml" %count_files)
print("model saved!")
expression = ['happy', 'sad', 'surprise', 'neutral', 'angry']
|
<filename>eigen_update_model.py
import cv2
import glob
import time
import random
import numpy as np
import os
#create eigen face recogniser object
eigen = cv2.createEigenFaceRecognizer()
data = {}
#check if a file already exist if not create the file
def check_file_exists(folder, file_check):
if os.path.exists("%s/%s" % (folder, file_check)):
pass
else:
os.makedirs("%s/%s" % (folder, file_check))
#create the training and test data
def make_sets(emotions):
data = []
labels = []
for emotion in emotions:
training = glob.glob("datasets/%s/*" % emotion)
for item in training:
image = cv2.imread(item)
gray = cv2.cvtColor(image, cv2.COLOR_BGR2GRAY)
data.append(gray)
labels.append(emotions.index(emotion))
#combine data and lable and shuffel used for testing purpose
bind_lst = list(zip(data,labels))
random.shuffle(bind_lst)
data,labels=zip(*bind_lst)
return data,labels
#train the model usin
def run_recognizer(emotions):
data,labels = make_sets(emotions)
#segregate the data and lables as train and test data 80% train and 20% test
training_labels = labels[:int(len(data)*0.8)]
training_data= data[:int(len(data)*0.8)]
testing_data=data[int(len(data)*0.8):]
testing_labels=labels[int(len(data)*0.8):]
print "total data is %s"%str(len(data))
print("training eigen face classifier")
print("size of training set is: " + str(len(training_labels)) + " images")
start_time = time.time()
eigen.train(training_data, np.asarray(training_labels))
print "it to %s minutes to train your model"%str((time.time()-start_time))
print "testing your model "
count=0
index=0
for i in testing_data:
exp,conf=eigen.predict(i)
if(exp==testing_labels[index]):
count=count+1
index=index+1
accuracy=count/len(testing_data)*100.0
print "accuracy of the model is %s"%str(accuracy)
def update(emotions):
run_recognizer(emotions)
print("saving model")
check_file_exists('models','eigen')
count_files = len(glob.glob('models/eigen/*'))
eigen.save("models/eigen/trained_eigen_emoclassifier_%s.xml" %count_files)
print("model saved!")
expression = ['happy', 'sad', 'surprise', 'neutral', 'angry']
|
en
| 0.798397
|
#create eigen face recogniser object #check if a file already exist if not create the file #create the training and test data #combine data and lable and shuffel used for testing purpose #train the model usin #segregate the data and lables as train and test data 80% train and 20% test
| 2.991663
| 3
|
aries_cloudagent/core/goal_code_registry.py
|
kuraakhilesh8230/aries-cloudagent-python
| 4
|
6626829
|
"""Handle registration and publication of supported goal codes."""
from typing import Sequence
from ..utils.classloader import ClassLoader
class GoalCodeRegistry:
"""Goal code registry."""
def __init__(self):
"""Initialize a `GoalCodeRegistry` instance."""
self.goal_codes = []
def register_controllers(self, *controller_sets):
"""
Add new controllers.
Args:
controller_sets: Mappings of controller to coroutines
"""
for controlset in controller_sets:
for key, ctl_cls in controlset.items():
ctl_cls = ClassLoader.load_class(ctl_cls)
ctl_inst = ctl_cls(protocol=key)
goal_codes_to_add = ctl_inst.determine_goal_codes()
for goal_code in goal_codes_to_add:
if goal_code not in self.goal_codes:
self.goal_codes.append(goal_code)
def goal_codes_matching_query(self, query: str) -> Sequence[str]:
"""Return a list of goal codes matching a query string."""
all_types = self.goal_codes
result = None
if query == "*" or query is None:
result = all_types
elif query:
if query.endswith("*"):
match = query[:-1]
result = tuple(k for k in all_types if k.startswith(match))
elif query in all_types:
result = (query,)
return result or ()
|
"""Handle registration and publication of supported goal codes."""
from typing import Sequence
from ..utils.classloader import ClassLoader
class GoalCodeRegistry:
"""Goal code registry."""
def __init__(self):
"""Initialize a `GoalCodeRegistry` instance."""
self.goal_codes = []
def register_controllers(self, *controller_sets):
"""
Add new controllers.
Args:
controller_sets: Mappings of controller to coroutines
"""
for controlset in controller_sets:
for key, ctl_cls in controlset.items():
ctl_cls = ClassLoader.load_class(ctl_cls)
ctl_inst = ctl_cls(protocol=key)
goal_codes_to_add = ctl_inst.determine_goal_codes()
for goal_code in goal_codes_to_add:
if goal_code not in self.goal_codes:
self.goal_codes.append(goal_code)
def goal_codes_matching_query(self, query: str) -> Sequence[str]:
"""Return a list of goal codes matching a query string."""
all_types = self.goal_codes
result = None
if query == "*" or query is None:
result = all_types
elif query:
if query.endswith("*"):
match = query[:-1]
result = tuple(k for k in all_types if k.startswith(match))
elif query in all_types:
result = (query,)
return result or ()
|
en
| 0.771005
|
Handle registration and publication of supported goal codes. Goal code registry. Initialize a `GoalCodeRegistry` instance. Add new controllers. Args: controller_sets: Mappings of controller to coroutines Return a list of goal codes matching a query string.
| 2.613116
| 3
|
networkx-d3-v2/lib/appengine_sessions/urls.py
|
suraj-testing2/Clock_Websites
| 0
|
6626830
|
<filename>networkx-d3-v2/lib/appengine_sessions/urls.py
from django.conf.urls.defaults import *
from appengine_sessions import views
urlpatterns = patterns(
'',
url(r'^clean-up/$', views.SessionCleanUpCron.as_view(), {}, name='session-clean-up'),
)
|
<filename>networkx-d3-v2/lib/appengine_sessions/urls.py
from django.conf.urls.defaults import *
from appengine_sessions import views
urlpatterns = patterns(
'',
url(r'^clean-up/$', views.SessionCleanUpCron.as_view(), {}, name='session-clean-up'),
)
|
none
| 1
| 1.78249
| 2
|
|
tests/distributions/test_empirical.py
|
gavincangan/pyro
| 10
|
6626831
|
<filename>tests/distributions/test_empirical.py
import pytest
import torch
from pyro.distributions.empirical import Empirical
from tests.common import assert_equal, assert_close
@pytest.mark.parametrize("size", [[], [1], [2, 3]])
@pytest.mark.parametrize("dtype", [torch.float32, torch.float64])
def test_unweighted_mean_and_var(size, dtype):
samples = []
for i in range(5):
samples.append(torch.ones(size, dtype=dtype) * i)
samples = torch.stack(samples)
empirical_dist = Empirical(samples, torch.ones(5, dtype=dtype))
true_mean = torch.ones(size) * 2
true_var = torch.ones(size) * 2
assert_equal(empirical_dist.mean, true_mean)
assert_equal(empirical_dist.variance, true_var)
@pytest.mark.parametrize("batch_shape, event_shape", [
([], []),
([2], []),
([2], [5]),
([2], [5, 3]),
([2, 5], [3]),
])
@pytest.mark.parametrize("sample_shape", [[], [20], [20, 3, 4]])
@pytest.mark.parametrize("dtype", [torch.long, torch.float32, torch.float64])
def test_unweighted_samples(batch_shape, event_shape, sample_shape, dtype):
agg_dim_size = 5
# empirical samples with desired shape
dim_ordering = list(range(len(batch_shape + event_shape) + 1)) # +1 for agg dim
dim_ordering.insert(len(batch_shape), dim_ordering.pop())
emp_samples = torch.arange(agg_dim_size, dtype=dtype)\
.expand(batch_shape + event_shape + [agg_dim_size])\
.permute(dim_ordering)
# initial weight assignment
weights = torch.ones(batch_shape + [agg_dim_size])
empirical_dist = Empirical(emp_samples, weights)
samples = empirical_dist.sample(sample_shape=torch.Size(sample_shape))
assert_equal(samples.size(), torch.Size(sample_shape + batch_shape + event_shape))
@pytest.mark.parametrize("sample, weights, expected_mean, expected_var", [(
torch.tensor([[0., 0., 0.], [1., 1., 1.]]),
torch.ones(2),
torch.tensor([0.5, 0.5, 0.5]),
torch.tensor([0.25, 0.25, 0.25]),
), (
torch.tensor([[0., 0., 0.], [1., 1., 1.]]),
torch.ones(2, 3),
torch.tensor([0., 1.]),
torch.tensor([0., 0.]),
),
])
def test_sample_examples(sample, weights, expected_mean, expected_var):
emp_dist = Empirical(sample, weights)
num_samples = 10000
assert_equal(emp_dist.mean, expected_mean)
assert_equal(emp_dist.variance, expected_var)
emp_samples = emp_dist.sample((num_samples,))
assert_close(emp_samples.mean(0), emp_dist.mean, rtol=1e-2)
assert_close(emp_samples.var(0), emp_dist.variance, rtol=1e-2)
@pytest.mark.parametrize("batch_shape, event_shape", [
([], []),
([1], []),
([10], []),
([10, 8], [3]),
([10, 8], [3, 4]),
])
@pytest.mark.parametrize("dtype", [torch.long, torch.float32, torch.float64])
def test_log_prob(batch_shape, event_shape, dtype):
samples = []
for i in range(5):
samples.append(torch.ones(event_shape, dtype=dtype) * i)
samples = torch.stack(samples).expand(batch_shape + [5] + event_shape)
weights = torch.tensor(1.).expand(batch_shape + [5])
empirical_dist = Empirical(samples, weights)
sample_to_score = torch.tensor(1, dtype=dtype).expand(batch_shape + event_shape)
log_prob = empirical_dist.log_prob(sample_to_score)
assert_equal(log_prob, (weights.new_ones(batch_shape + [1]) * 0.2).sum(-1).log())
# Value outside support returns -Inf
sample_to_score = torch.tensor(1, dtype=dtype).expand(batch_shape + event_shape) * 6
log_prob = empirical_dist.log_prob(sample_to_score)
assert log_prob.shape == torch.Size(batch_shape)
assert torch.isinf(log_prob).all()
# Vectorized ``log_prob`` raises ValueError
with pytest.raises(ValueError):
sample_to_score = torch.ones([3] + batch_shape + event_shape, dtype=dtype)
empirical_dist.log_prob(sample_to_score)
@pytest.mark.parametrize("event_shape", [[], [1], [2, 3]])
@pytest.mark.parametrize("dtype", [torch.long, torch.float32, torch.float64])
def test_weighted_sample_coherence(event_shape, dtype):
data = [(1.0, 0.5), (0.0, 1.5), (1.0, 0.5), (0.0, 1.5)]
samples, weights = [], []
for sample, weight in data:
samples.append(sample * torch.ones(event_shape, dtype=dtype))
weights.append(torch.tensor(weight).log())
samples, weights = torch.stack(samples), torch.stack(weights)
empirical_dist = Empirical(samples, weights)
assert_equal(empirical_dist.event_shape, torch.Size(event_shape))
assert_equal(empirical_dist.sample_size, 4)
sample_to_score = torch.ones(event_shape, dtype=dtype) * 1.0
assert_equal(empirical_dist.log_prob(sample_to_score), torch.tensor(0.25).log())
samples = empirical_dist.sample(sample_shape=torch.Size((1000,)))
zeros = torch.zeros(event_shape, dtype=dtype)
ones = torch.ones(event_shape, dtype=dtype)
num_zeros = samples.eq(zeros).contiguous().view(1000, -1).min(dim=-1)[0].float().sum()
num_ones = samples.eq(ones).contiguous().view(1000, -1).min(dim=-1)[0].float().sum()
assert_equal(num_zeros.item() / 1000, 0.75, prec=0.02)
assert_equal(num_ones.item() / 1000, 0.25, prec=0.02)
@pytest.mark.parametrize("batch_shape", [[], [1], [2], [2, 3]])
@pytest.mark.parametrize("event_shape", [[], [1], [2, 3]])
@pytest.mark.parametrize("dtype", [torch.long, torch.float32, torch.float64])
def test_weighted_mean_var(event_shape, dtype, batch_shape):
data = [(1.0, 0.5), (0.0, 1.5), (1.0, 0.5), (0.0, 1.5)]
samples, weights = [], []
for sample, weight in data:
samples.append(sample * torch.ones(event_shape, dtype=dtype))
weight_dtype = dtype if dtype is not torch.long else None
weights.append(torch.tensor(weight, dtype=weight_dtype).log())
samples = torch.stack(samples).expand(batch_shape + [4] + event_shape)
weights = torch.stack(weights).expand(batch_shape + [4])
empirical_dist = Empirical(samples, weights)
if dtype in (torch.float32, torch.float64):
true_mean = torch.ones(batch_shape + event_shape, dtype=dtype) * 0.25
true_var = torch.ones(batch_shape + event_shape, dtype=dtype) * 0.1875
assert_equal(empirical_dist.mean, true_mean)
assert_equal(empirical_dist.variance, true_var)
else:
with pytest.raises(ValueError):
empirical_dist.mean
empirical_dist.variance
def test_mean_var_non_nan():
true_mean = torch.randn([1, 2, 3])
samples, weights = [], []
for i in range(10):
samples.append(true_mean)
weights.append(torch.tensor(-1000.))
samples, weights = torch.stack(samples), torch.stack(weights)
empirical_dist = Empirical(samples, weights)
assert_equal(empirical_dist.mean, true_mean)
assert_equal(empirical_dist.variance, torch.zeros_like(true_mean))
|
<filename>tests/distributions/test_empirical.py
import pytest
import torch
from pyro.distributions.empirical import Empirical
from tests.common import assert_equal, assert_close
@pytest.mark.parametrize("size", [[], [1], [2, 3]])
@pytest.mark.parametrize("dtype", [torch.float32, torch.float64])
def test_unweighted_mean_and_var(size, dtype):
samples = []
for i in range(5):
samples.append(torch.ones(size, dtype=dtype) * i)
samples = torch.stack(samples)
empirical_dist = Empirical(samples, torch.ones(5, dtype=dtype))
true_mean = torch.ones(size) * 2
true_var = torch.ones(size) * 2
assert_equal(empirical_dist.mean, true_mean)
assert_equal(empirical_dist.variance, true_var)
@pytest.mark.parametrize("batch_shape, event_shape", [
([], []),
([2], []),
([2], [5]),
([2], [5, 3]),
([2, 5], [3]),
])
@pytest.mark.parametrize("sample_shape", [[], [20], [20, 3, 4]])
@pytest.mark.parametrize("dtype", [torch.long, torch.float32, torch.float64])
def test_unweighted_samples(batch_shape, event_shape, sample_shape, dtype):
agg_dim_size = 5
# empirical samples with desired shape
dim_ordering = list(range(len(batch_shape + event_shape) + 1)) # +1 for agg dim
dim_ordering.insert(len(batch_shape), dim_ordering.pop())
emp_samples = torch.arange(agg_dim_size, dtype=dtype)\
.expand(batch_shape + event_shape + [agg_dim_size])\
.permute(dim_ordering)
# initial weight assignment
weights = torch.ones(batch_shape + [agg_dim_size])
empirical_dist = Empirical(emp_samples, weights)
samples = empirical_dist.sample(sample_shape=torch.Size(sample_shape))
assert_equal(samples.size(), torch.Size(sample_shape + batch_shape + event_shape))
@pytest.mark.parametrize("sample, weights, expected_mean, expected_var", [(
torch.tensor([[0., 0., 0.], [1., 1., 1.]]),
torch.ones(2),
torch.tensor([0.5, 0.5, 0.5]),
torch.tensor([0.25, 0.25, 0.25]),
), (
torch.tensor([[0., 0., 0.], [1., 1., 1.]]),
torch.ones(2, 3),
torch.tensor([0., 1.]),
torch.tensor([0., 0.]),
),
])
def test_sample_examples(sample, weights, expected_mean, expected_var):
emp_dist = Empirical(sample, weights)
num_samples = 10000
assert_equal(emp_dist.mean, expected_mean)
assert_equal(emp_dist.variance, expected_var)
emp_samples = emp_dist.sample((num_samples,))
assert_close(emp_samples.mean(0), emp_dist.mean, rtol=1e-2)
assert_close(emp_samples.var(0), emp_dist.variance, rtol=1e-2)
@pytest.mark.parametrize("batch_shape, event_shape", [
([], []),
([1], []),
([10], []),
([10, 8], [3]),
([10, 8], [3, 4]),
])
@pytest.mark.parametrize("dtype", [torch.long, torch.float32, torch.float64])
def test_log_prob(batch_shape, event_shape, dtype):
samples = []
for i in range(5):
samples.append(torch.ones(event_shape, dtype=dtype) * i)
samples = torch.stack(samples).expand(batch_shape + [5] + event_shape)
weights = torch.tensor(1.).expand(batch_shape + [5])
empirical_dist = Empirical(samples, weights)
sample_to_score = torch.tensor(1, dtype=dtype).expand(batch_shape + event_shape)
log_prob = empirical_dist.log_prob(sample_to_score)
assert_equal(log_prob, (weights.new_ones(batch_shape + [1]) * 0.2).sum(-1).log())
# Value outside support returns -Inf
sample_to_score = torch.tensor(1, dtype=dtype).expand(batch_shape + event_shape) * 6
log_prob = empirical_dist.log_prob(sample_to_score)
assert log_prob.shape == torch.Size(batch_shape)
assert torch.isinf(log_prob).all()
# Vectorized ``log_prob`` raises ValueError
with pytest.raises(ValueError):
sample_to_score = torch.ones([3] + batch_shape + event_shape, dtype=dtype)
empirical_dist.log_prob(sample_to_score)
@pytest.mark.parametrize("event_shape", [[], [1], [2, 3]])
@pytest.mark.parametrize("dtype", [torch.long, torch.float32, torch.float64])
def test_weighted_sample_coherence(event_shape, dtype):
data = [(1.0, 0.5), (0.0, 1.5), (1.0, 0.5), (0.0, 1.5)]
samples, weights = [], []
for sample, weight in data:
samples.append(sample * torch.ones(event_shape, dtype=dtype))
weights.append(torch.tensor(weight).log())
samples, weights = torch.stack(samples), torch.stack(weights)
empirical_dist = Empirical(samples, weights)
assert_equal(empirical_dist.event_shape, torch.Size(event_shape))
assert_equal(empirical_dist.sample_size, 4)
sample_to_score = torch.ones(event_shape, dtype=dtype) * 1.0
assert_equal(empirical_dist.log_prob(sample_to_score), torch.tensor(0.25).log())
samples = empirical_dist.sample(sample_shape=torch.Size((1000,)))
zeros = torch.zeros(event_shape, dtype=dtype)
ones = torch.ones(event_shape, dtype=dtype)
num_zeros = samples.eq(zeros).contiguous().view(1000, -1).min(dim=-1)[0].float().sum()
num_ones = samples.eq(ones).contiguous().view(1000, -1).min(dim=-1)[0].float().sum()
assert_equal(num_zeros.item() / 1000, 0.75, prec=0.02)
assert_equal(num_ones.item() / 1000, 0.25, prec=0.02)
@pytest.mark.parametrize("batch_shape", [[], [1], [2], [2, 3]])
@pytest.mark.parametrize("event_shape", [[], [1], [2, 3]])
@pytest.mark.parametrize("dtype", [torch.long, torch.float32, torch.float64])
def test_weighted_mean_var(event_shape, dtype, batch_shape):
data = [(1.0, 0.5), (0.0, 1.5), (1.0, 0.5), (0.0, 1.5)]
samples, weights = [], []
for sample, weight in data:
samples.append(sample * torch.ones(event_shape, dtype=dtype))
weight_dtype = dtype if dtype is not torch.long else None
weights.append(torch.tensor(weight, dtype=weight_dtype).log())
samples = torch.stack(samples).expand(batch_shape + [4] + event_shape)
weights = torch.stack(weights).expand(batch_shape + [4])
empirical_dist = Empirical(samples, weights)
if dtype in (torch.float32, torch.float64):
true_mean = torch.ones(batch_shape + event_shape, dtype=dtype) * 0.25
true_var = torch.ones(batch_shape + event_shape, dtype=dtype) * 0.1875
assert_equal(empirical_dist.mean, true_mean)
assert_equal(empirical_dist.variance, true_var)
else:
with pytest.raises(ValueError):
empirical_dist.mean
empirical_dist.variance
def test_mean_var_non_nan():
true_mean = torch.randn([1, 2, 3])
samples, weights = [], []
for i in range(10):
samples.append(true_mean)
weights.append(torch.tensor(-1000.))
samples, weights = torch.stack(samples), torch.stack(weights)
empirical_dist = Empirical(samples, weights)
assert_equal(empirical_dist.mean, true_mean)
assert_equal(empirical_dist.variance, torch.zeros_like(true_mean))
|
en
| 0.797527
|
# empirical samples with desired shape # +1 for agg dim # initial weight assignment # Value outside support returns -Inf # Vectorized ``log_prob`` raises ValueError
| 1.901624
| 2
|
vistrails/db/versions/v1_0_3/domain/auto_gen.py
|
celiafish/VisTrails
| 1
|
6626832
|
###############################################################################
##
## Copyright (C) 2011-2014, NYU-Poly.
## Copyright (C) 2006-2011, University of Utah.
## All rights reserved.
## Contact: <EMAIL>
##
## This file is part of VisTrails.
##
## "Redistribution and use in source and binary forms, with or without
## modification, are permitted provided that the following conditions are met:
##
## - Redistributions of source code must retain the above copyright notice,
## this list of conditions and the following disclaimer.
## - Redistributions in binary form must reproduce the above copyright
## notice, this list of conditions and the following disclaimer in the
## documentation and/or other materials provided with the distribution.
## - Neither the name of the University of Utah nor the names of its
## contributors may be used to endorse or promote products derived from
## this software without specific prior written permission.
##
## THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
## AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
## THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
## PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR
## CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
## EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
## PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
## OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
## WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
## OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
## ADVISED OF THE POSSIBILITY OF SUCH DAMAGE."
##
###############################################################################
"""generated automatically by auto_dao.py"""
import copy
class DBOpmWasGeneratedBy(object):
vtType = 'opm_was_generated_by'
def __init__(self, effect=None, role=None, cause=None, accounts=None, opm_times=None):
self.db_deleted_effect = []
self._db_effect = effect
self.db_deleted_role = []
self._db_role = role
self.db_deleted_cause = []
self._db_cause = cause
self.db_deleted_accounts = []
if accounts is None:
self._db_accounts = []
else:
self._db_accounts = accounts
self.db_deleted_opm_times = []
if opm_times is None:
self._db_opm_times = []
else:
self._db_opm_times = opm_times
self.is_dirty = True
self.is_new = True
def __copy__(self):
return DBOpmWasGeneratedBy.do_copy(self)
def do_copy(self, new_ids=False, id_scope=None, id_remap=None):
cp = DBOpmWasGeneratedBy()
if self._db_effect is not None:
cp._db_effect = self._db_effect.do_copy(new_ids, id_scope, id_remap)
if self._db_role is not None:
cp._db_role = self._db_role.do_copy(new_ids, id_scope, id_remap)
if self._db_cause is not None:
cp._db_cause = self._db_cause.do_copy(new_ids, id_scope, id_remap)
if self._db_accounts is None:
cp._db_accounts = []
else:
cp._db_accounts = [v.do_copy(new_ids, id_scope, id_remap) for v in self._db_accounts]
if self._db_opm_times is None:
cp._db_opm_times = []
else:
cp._db_opm_times = [v.do_copy(new_ids, id_scope, id_remap) for v in self._db_opm_times]
# set new ids
if new_ids:
new_id = id_scope.getNewId(self.vtType)
if self.vtType in id_scope.remap:
id_remap[(id_scope.remap[self.vtType], self.db_id)] = new_id
else:
id_remap[(self.vtType, self.db_id)] = new_id
cp.db_id = new_id
# recreate indices and set flags
if not new_ids:
cp.is_dirty = self.is_dirty
cp.is_new = self.is_new
return cp
@staticmethod
def update_version(old_obj, trans_dict, new_obj=None):
if new_obj is None:
new_obj = DBOpmWasGeneratedBy()
class_dict = {}
if new_obj.__class__.__name__ in trans_dict:
class_dict = trans_dict[new_obj.__class__.__name__]
if 'effect' in class_dict:
res = class_dict['effect'](old_obj, trans_dict)
new_obj.db_effect = res
elif hasattr(old_obj, 'db_effect') and old_obj.db_effect is not None:
obj = old_obj.db_effect
new_obj.db_add_effect(DBOpmArtifactIdEffect.update_version(obj, trans_dict))
if hasattr(old_obj, 'db_deleted_effect') and hasattr(new_obj, 'db_deleted_effect'):
for obj in old_obj.db_deleted_effect:
n_obj = DBOpmArtifactIdEffect.update_version(obj, trans_dict)
new_obj.db_deleted_effect.append(n_obj)
if 'role' in class_dict:
res = class_dict['role'](old_obj, trans_dict)
new_obj.db_role = res
elif hasattr(old_obj, 'db_role') and old_obj.db_role is not None:
obj = old_obj.db_role
new_obj.db_add_role(DBOpmRole.update_version(obj, trans_dict))
if hasattr(old_obj, 'db_deleted_role') and hasattr(new_obj, 'db_deleted_role'):
for obj in old_obj.db_deleted_role:
n_obj = DBOpmRole.update_version(obj, trans_dict)
new_obj.db_deleted_role.append(n_obj)
if 'cause' in class_dict:
res = class_dict['cause'](old_obj, trans_dict)
new_obj.db_cause = res
elif hasattr(old_obj, 'db_cause') and old_obj.db_cause is not None:
obj = old_obj.db_cause
new_obj.db_add_cause(DBOpmProcessIdCause.update_version(obj, trans_dict))
if hasattr(old_obj, 'db_deleted_cause') and hasattr(new_obj, 'db_deleted_cause'):
for obj in old_obj.db_deleted_cause:
n_obj = DBOpmProcessIdCause.update_version(obj, trans_dict)
new_obj.db_deleted_cause.append(n_obj)
if 'accounts' in class_dict:
res = class_dict['accounts'](old_obj, trans_dict)
for obj in res:
new_obj.db_add_account(obj)
elif hasattr(old_obj, 'db_accounts') and old_obj.db_accounts is not None:
for obj in old_obj.db_accounts:
new_obj.db_add_account(DBOpmAccountId.update_version(obj, trans_dict))
if hasattr(old_obj, 'db_deleted_accounts') and hasattr(new_obj, 'db_deleted_accounts'):
for obj in old_obj.db_deleted_accounts:
n_obj = DBOpmAccountId.update_version(obj, trans_dict)
new_obj.db_deleted_accounts.append(n_obj)
if 'opm_times' in class_dict:
res = class_dict['opm_times'](old_obj, trans_dict)
for obj in res:
new_obj.db_add_opm_time(obj)
elif hasattr(old_obj, 'db_opm_times') and old_obj.db_opm_times is not None:
for obj in old_obj.db_opm_times:
new_obj.db_add_opm_time(DBOpmTime.update_version(obj, trans_dict))
if hasattr(old_obj, 'db_deleted_opm_times') and hasattr(new_obj, 'db_deleted_opm_times'):
for obj in old_obj.db_deleted_opm_times:
n_obj = DBOpmTime.update_version(obj, trans_dict)
new_obj.db_deleted_opm_times.append(n_obj)
new_obj.is_new = old_obj.is_new
new_obj.is_dirty = old_obj.is_dirty
return new_obj
def db_children(self, parent=(None,None), orphan=False, for_action=False):
children = []
if self._db_effect is not None:
children.extend(self._db_effect.db_children((self.vtType, self.db_id), orphan, for_action))
if orphan:
self._db_effect = None
if self._db_role is not None:
children.extend(self._db_role.db_children((self.vtType, self.db_id), orphan, for_action))
if orphan:
self._db_role = None
if self._db_cause is not None:
children.extend(self._db_cause.db_children((self.vtType, self.db_id), orphan, for_action))
if orphan:
self._db_cause = None
to_del = []
for child in self.db_accounts:
children.extend(child.db_children((self.vtType, self.db_id), orphan, for_action))
if orphan:
to_del.append(child)
for child in to_del:
self.db_delete_account(child)
to_del = []
for child in self.db_opm_times:
children.extend(child.db_children((self.vtType, self.db_id), orphan, for_action))
if orphan:
to_del.append(child)
for child in to_del:
self.db_delete_opm_time(child)
children.append((self, parent[0], parent[1]))
return children
def db_deleted_children(self, remove=False):
children = []
children.extend(self.db_deleted_effect)
children.extend(self.db_deleted_role)
children.extend(self.db_deleted_cause)
children.extend(self.db_deleted_accounts)
children.extend(self.db_deleted_opm_times)
if remove:
self.db_deleted_effect = []
self.db_deleted_role = []
self.db_deleted_cause = []
self.db_deleted_accounts = []
self.db_deleted_opm_times = []
return children
def has_changes(self):
if self.is_dirty:
return True
if self._db_effect is not None and self._db_effect.has_changes():
return True
if self._db_role is not None and self._db_role.has_changes():
return True
if self._db_cause is not None and self._db_cause.has_changes():
return True
for child in self._db_accounts:
if child.has_changes():
return True
for child in self._db_opm_times:
if child.has_changes():
return True
return False
def __get_db_effect(self):
return self._db_effect
def __set_db_effect(self, effect):
self._db_effect = effect
self.is_dirty = True
db_effect = property(__get_db_effect, __set_db_effect)
def db_add_effect(self, effect):
self._db_effect = effect
def db_change_effect(self, effect):
self._db_effect = effect
def db_delete_effect(self, effect):
if not self.is_new:
self.db_deleted_effect.append(self._db_effect)
self._db_effect = None
def __get_db_role(self):
return self._db_role
def __set_db_role(self, role):
self._db_role = role
self.is_dirty = True
db_role = property(__get_db_role, __set_db_role)
def db_add_role(self, role):
self._db_role = role
def db_change_role(self, role):
self._db_role = role
def db_delete_role(self, role):
if not self.is_new:
self.db_deleted_role.append(self._db_role)
self._db_role = None
def __get_db_cause(self):
return self._db_cause
def __set_db_cause(self, cause):
self._db_cause = cause
self.is_dirty = True
db_cause = property(__get_db_cause, __set_db_cause)
def db_add_cause(self, cause):
self._db_cause = cause
def db_change_cause(self, cause):
self._db_cause = cause
def db_delete_cause(self, cause):
if not self.is_new:
self.db_deleted_cause.append(self._db_cause)
self._db_cause = None
def __get_db_accounts(self):
return self._db_accounts
def __set_db_accounts(self, accounts):
self._db_accounts = accounts
self.is_dirty = True
db_accounts = property(__get_db_accounts, __set_db_accounts)
def db_get_accounts(self):
return self._db_accounts
def db_add_account(self, account):
self.is_dirty = True
self._db_accounts.append(account)
def db_change_account(self, account):
self.is_dirty = True
self._db_accounts.append(account)
def db_delete_account(self, account):
self.is_dirty = True
raise Exception('Cannot delete a non-keyed object')
def db_get_account(self, key):
return None
def __get_db_opm_times(self):
return self._db_opm_times
def __set_db_opm_times(self, opm_times):
self._db_opm_times = opm_times
self.is_dirty = True
db_opm_times = property(__get_db_opm_times, __set_db_opm_times)
def db_get_opm_times(self):
return self._db_opm_times
def db_add_opm_time(self, opm_time):
self.is_dirty = True
self._db_opm_times.append(opm_time)
def db_change_opm_time(self, opm_time):
self.is_dirty = True
self._db_opm_times.append(opm_time)
def db_delete_opm_time(self, opm_time):
self.is_dirty = True
raise Exception('Cannot delete a non-keyed object')
def db_get_opm_time(self, key):
return None
class DBConfigKey(object):
vtType = 'config_key'
def __init__(self, value=None, name=None):
self.db_deleted_value = []
self._db_value = value
self._db_name = name
self.is_dirty = True
self.is_new = True
def __copy__(self):
return DBConfigKey.do_copy(self)
def do_copy(self, new_ids=False, id_scope=None, id_remap=None):
cp = DBConfigKey(name=self._db_name)
if self._db_value is not None:
cp._db_value = self._db_value.do_copy(new_ids, id_scope, id_remap)
# set new ids
if new_ids:
new_id = id_scope.getNewId(self.vtType)
if self.vtType in id_scope.remap:
id_remap[(id_scope.remap[self.vtType], self.db_id)] = new_id
else:
id_remap[(self.vtType, self.db_id)] = new_id
cp.db_id = new_id
# recreate indices and set flags
if not new_ids:
cp.is_dirty = self.is_dirty
cp.is_new = self.is_new
return cp
@staticmethod
def update_version(old_obj, trans_dict, new_obj=None):
if new_obj is None:
new_obj = DBConfigKey()
class_dict = {}
if new_obj.__class__.__name__ in trans_dict:
class_dict = trans_dict[new_obj.__class__.__name__]
if 'value' in class_dict:
res = class_dict['value'](old_obj, trans_dict)
new_obj.db_value = res
elif hasattr(old_obj, 'db_value') and old_obj.db_value is not None:
obj = old_obj.db_value
if obj.vtType == 'config_str':
new_obj.db_add_value(DBConfigStr.update_version(obj, trans_dict))
elif obj.vtType == 'config_int':
new_obj.db_add_value(DBConfigInt.update_version(obj, trans_dict))
elif obj.vtType == 'config_float':
new_obj.db_add_value(DBConfigFloat.update_version(obj, trans_dict))
elif obj.vtType == 'config_bool':
new_obj.db_add_value(DBConfigBool.update_version(obj, trans_dict))
elif obj.vtType == 'configuration':
new_obj.db_add_value(DBConfiguration.update_version(obj, trans_dict))
if hasattr(old_obj, 'db_deleted_value') and hasattr(new_obj, 'db_deleted_value'):
for obj in old_obj.db_deleted_value:
if obj.vtType == 'config_str':
n_obj = DBConfigStr.update_version(obj, trans_dict)
new_obj.db_deleted_value.append(n_obj)
elif obj.vtType == 'config_int':
n_obj = DBConfigInt.update_version(obj, trans_dict)
new_obj.db_deleted_value.append(n_obj)
elif obj.vtType == 'config_float':
n_obj = DBConfigFloat.update_version(obj, trans_dict)
new_obj.db_deleted_value.append(n_obj)
elif obj.vtType == 'config_bool':
n_obj = DBConfigBool.update_version(obj, trans_dict)
new_obj.db_deleted_value.append(n_obj)
elif obj.vtType == 'configuration':
n_obj = DBConfiguration.update_version(obj, trans_dict)
new_obj.db_deleted_value.append(n_obj)
if 'name' in class_dict:
res = class_dict['name'](old_obj, trans_dict)
new_obj.db_name = res
elif hasattr(old_obj, 'db_name') and old_obj.db_name is not None:
new_obj.db_name = old_obj.db_name
new_obj.is_new = old_obj.is_new
new_obj.is_dirty = old_obj.is_dirty
return new_obj
def db_children(self, parent=(None,None), orphan=False, for_action=False):
children = []
if self._db_value is not None:
children.extend(self._db_value.db_children((self.vtType, self.db_id), orphan, for_action))
if orphan:
self._db_value = None
children.append((self, parent[0], parent[1]))
return children
def db_deleted_children(self, remove=False):
children = []
children.extend(self.db_deleted_value)
if remove:
self.db_deleted_value = []
return children
def has_changes(self):
if self.is_dirty:
return True
if self._db_value is not None and self._db_value.has_changes():
return True
return False
def __get_db_value(self):
return self._db_value
def __set_db_value(self, value):
self._db_value = value
self.is_dirty = True
db_value = property(__get_db_value, __set_db_value)
def db_add_value(self, value):
self._db_value = value
def db_change_value(self, value):
self._db_value = value
def db_delete_value(self, value):
if not self.is_new:
self.db_deleted_value.append(self._db_value)
self._db_value = None
def __get_db_name(self):
return self._db_name
def __set_db_name(self, name):
self._db_name = name
self.is_dirty = True
db_name = property(__get_db_name, __set_db_name)
def db_add_name(self, name):
self._db_name = name
def db_change_name(self, name):
self._db_name = name
def db_delete_name(self, name):
self._db_name = None
class DBMashupAlias(object):
vtType = 'mashup_alias'
def __init__(self, id=None, name=None, component=None):
self._db_id = id
self._db_name = name
self.db_deleted_component = []
self._db_component = component
self.is_dirty = True
self.is_new = True
def __copy__(self):
return DBMashupAlias.do_copy(self)
def do_copy(self, new_ids=False, id_scope=None, id_remap=None):
cp = DBMashupAlias(id=self._db_id,
name=self._db_name)
if self._db_component is not None:
cp._db_component = self._db_component.do_copy(new_ids, id_scope, id_remap)
# set new ids
if new_ids:
new_id = id_scope.getNewId(self.vtType)
if self.vtType in id_scope.remap:
id_remap[(id_scope.remap[self.vtType], self.db_id)] = new_id
else:
id_remap[(self.vtType, self.db_id)] = new_id
cp.db_id = new_id
# recreate indices and set flags
if not new_ids:
cp.is_dirty = self.is_dirty
cp.is_new = self.is_new
return cp
@staticmethod
def update_version(old_obj, trans_dict, new_obj=None):
if new_obj is None:
new_obj = DBMashupAlias()
class_dict = {}
if new_obj.__class__.__name__ in trans_dict:
class_dict = trans_dict[new_obj.__class__.__name__]
if 'id' in class_dict:
res = class_dict['id'](old_obj, trans_dict)
new_obj.db_id = res
elif hasattr(old_obj, 'db_id') and old_obj.db_id is not None:
new_obj.db_id = old_obj.db_id
if 'name' in class_dict:
res = class_dict['name'](old_obj, trans_dict)
new_obj.db_name = res
elif hasattr(old_obj, 'db_name') and old_obj.db_name is not None:
new_obj.db_name = old_obj.db_name
if 'component' in class_dict:
res = class_dict['component'](old_obj, trans_dict)
new_obj.db_component = res
elif hasattr(old_obj, 'db_component') and old_obj.db_component is not None:
obj = old_obj.db_component
new_obj.db_add_component(DBMashupComponent.update_version(obj, trans_dict))
if hasattr(old_obj, 'db_deleted_component') and hasattr(new_obj, 'db_deleted_component'):
for obj in old_obj.db_deleted_component:
n_obj = DBMashupComponent.update_version(obj, trans_dict)
new_obj.db_deleted_component.append(n_obj)
new_obj.is_new = old_obj.is_new
new_obj.is_dirty = old_obj.is_dirty
return new_obj
def db_children(self, parent=(None,None), orphan=False, for_action=False):
children = []
if self._db_component is not None:
children.extend(self._db_component.db_children((self.vtType, self.db_id), orphan, for_action))
if orphan:
self._db_component = None
children.append((self, parent[0], parent[1]))
return children
def db_deleted_children(self, remove=False):
children = []
children.extend(self.db_deleted_component)
if remove:
self.db_deleted_component = []
return children
def has_changes(self):
if self.is_dirty:
return True
if self._db_component is not None and self._db_component.has_changes():
return True
return False
def __get_db_id(self):
return self._db_id
def __set_db_id(self, id):
self._db_id = id
self.is_dirty = True
db_id = property(__get_db_id, __set_db_id)
def db_add_id(self, id):
self._db_id = id
def db_change_id(self, id):
self._db_id = id
def db_delete_id(self, id):
self._db_id = None
def __get_db_name(self):
return self._db_name
def __set_db_name(self, name):
self._db_name = name
self.is_dirty = True
db_name = property(__get_db_name, __set_db_name)
def db_add_name(self, name):
self._db_name = name
def db_change_name(self, name):
self._db_name = name
def db_delete_name(self, name):
self._db_name = None
def __get_db_component(self):
return self._db_component
def __set_db_component(self, component):
self._db_component = component
self.is_dirty = True
db_component = property(__get_db_component, __set_db_component)
def db_add_component(self, component):
self._db_component = component
def db_change_component(self, component):
self._db_component = component
def db_delete_component(self, component):
if not self.is_new:
self.db_deleted_component.append(self._db_component)
self._db_component = None
def getPrimaryKey(self):
return self._db_id
class DBGroup(object):
vtType = 'group'
def __init__(self, id=None, workflow=None, cache=None, name=None, namespace=None, package=None, version=None, location=None, functions=None, annotations=None):
self._db_id = id
self.db_deleted_workflow = []
self._db_workflow = workflow
self._db_cache = cache
self._db_name = name
self._db_namespace = namespace
self._db_package = package
self._db_version = version
self.db_deleted_location = []
self._db_location = location
self.db_deleted_functions = []
self.db_functions_id_index = {}
if functions is None:
self._db_functions = []
else:
self._db_functions = functions
for v in self._db_functions:
self.db_functions_id_index[v.db_id] = v
self.db_deleted_annotations = []
self.db_annotations_id_index = {}
self.db_annotations_key_index = {}
if annotations is None:
self._db_annotations = []
else:
self._db_annotations = annotations
for v in self._db_annotations:
self.db_annotations_id_index[v.db_id] = v
self.db_annotations_key_index[v.db_key] = v
self.is_dirty = True
self.is_new = True
def __copy__(self):
return DBGroup.do_copy(self)
def do_copy(self, new_ids=False, id_scope=None, id_remap=None):
cp = DBGroup(id=self._db_id,
cache=self._db_cache,
name=self._db_name,
namespace=self._db_namespace,
package=self._db_package,
version=self._db_version)
if self._db_workflow is not None:
cp._db_workflow = self._db_workflow.do_copy()
if self._db_location is not None:
cp._db_location = self._db_location.do_copy(new_ids, id_scope, id_remap)
if self._db_functions is None:
cp._db_functions = []
else:
cp._db_functions = [v.do_copy(new_ids, id_scope, id_remap) for v in self._db_functions]
if self._db_annotations is None:
cp._db_annotations = []
else:
cp._db_annotations = [v.do_copy(new_ids, id_scope, id_remap) for v in self._db_annotations]
# set new ids
if new_ids:
new_id = id_scope.getNewId(self.vtType)
if self.vtType in id_scope.remap:
id_remap[(id_scope.remap[self.vtType], self.db_id)] = new_id
else:
id_remap[(self.vtType, self.db_id)] = new_id
cp.db_id = new_id
# recreate indices and set flags
cp.db_functions_id_index = dict((v.db_id, v) for v in cp._db_functions)
cp.db_annotations_id_index = dict((v.db_id, v) for v in cp._db_annotations)
cp.db_annotations_key_index = dict((v.db_key, v) for v in cp._db_annotations)
if not new_ids:
cp.is_dirty = self.is_dirty
cp.is_new = self.is_new
return cp
@staticmethod
def update_version(old_obj, trans_dict, new_obj=None):
if new_obj is None:
new_obj = DBGroup()
class_dict = {}
if new_obj.__class__.__name__ in trans_dict:
class_dict = trans_dict[new_obj.__class__.__name__]
if 'id' in class_dict:
res = class_dict['id'](old_obj, trans_dict)
new_obj.db_id = res
elif hasattr(old_obj, 'db_id') and old_obj.db_id is not None:
new_obj.db_id = old_obj.db_id
if 'workflow' in class_dict:
res = class_dict['workflow'](old_obj, trans_dict)
new_obj.db_workflow = res
elif hasattr(old_obj, 'db_workflow') and old_obj.db_workflow is not None:
obj = old_obj.db_workflow
new_obj.db_add_workflow(DBWorkflow.update_version(obj, trans_dict))
if hasattr(old_obj, 'db_deleted_workflow') and hasattr(new_obj, 'db_deleted_workflow'):
for obj in old_obj.db_deleted_workflow:
n_obj = DBWorkflow.update_version(obj, trans_dict)
new_obj.db_deleted_workflow.append(n_obj)
if 'cache' in class_dict:
res = class_dict['cache'](old_obj, trans_dict)
new_obj.db_cache = res
elif hasattr(old_obj, 'db_cache') and old_obj.db_cache is not None:
new_obj.db_cache = old_obj.db_cache
if 'name' in class_dict:
res = class_dict['name'](old_obj, trans_dict)
new_obj.db_name = res
elif hasattr(old_obj, 'db_name') and old_obj.db_name is not None:
new_obj.db_name = old_obj.db_name
if 'namespace' in class_dict:
res = class_dict['namespace'](old_obj, trans_dict)
new_obj.db_namespace = res
elif hasattr(old_obj, 'db_namespace') and old_obj.db_namespace is not None:
new_obj.db_namespace = old_obj.db_namespace
if 'package' in class_dict:
res = class_dict['package'](old_obj, trans_dict)
new_obj.db_package = res
elif hasattr(old_obj, 'db_package') and old_obj.db_package is not None:
new_obj.db_package = old_obj.db_package
if 'version' in class_dict:
res = class_dict['version'](old_obj, trans_dict)
new_obj.db_version = res
elif hasattr(old_obj, 'db_version') and old_obj.db_version is not None:
new_obj.db_version = old_obj.db_version
if 'location' in class_dict:
res = class_dict['location'](old_obj, trans_dict)
new_obj.db_location = res
elif hasattr(old_obj, 'db_location') and old_obj.db_location is not None:
obj = old_obj.db_location
new_obj.db_add_location(DBLocation.update_version(obj, trans_dict))
if hasattr(old_obj, 'db_deleted_location') and hasattr(new_obj, 'db_deleted_location'):
for obj in old_obj.db_deleted_location:
n_obj = DBLocation.update_version(obj, trans_dict)
new_obj.db_deleted_location.append(n_obj)
if 'functions' in class_dict:
res = class_dict['functions'](old_obj, trans_dict)
for obj in res:
new_obj.db_add_function(obj)
elif hasattr(old_obj, 'db_functions') and old_obj.db_functions is not None:
for obj in old_obj.db_functions:
new_obj.db_add_function(DBFunction.update_version(obj, trans_dict))
if hasattr(old_obj, 'db_deleted_functions') and hasattr(new_obj, 'db_deleted_functions'):
for obj in old_obj.db_deleted_functions:
n_obj = DBFunction.update_version(obj, trans_dict)
new_obj.db_deleted_functions.append(n_obj)
if 'annotations' in class_dict:
res = class_dict['annotations'](old_obj, trans_dict)
for obj in res:
new_obj.db_add_annotation(obj)
elif hasattr(old_obj, 'db_annotations') and old_obj.db_annotations is not None:
for obj in old_obj.db_annotations:
new_obj.db_add_annotation(DBAnnotation.update_version(obj, trans_dict))
if hasattr(old_obj, 'db_deleted_annotations') and hasattr(new_obj, 'db_deleted_annotations'):
for obj in old_obj.db_deleted_annotations:
n_obj = DBAnnotation.update_version(obj, trans_dict)
new_obj.db_deleted_annotations.append(n_obj)
new_obj.is_new = old_obj.is_new
new_obj.is_dirty = old_obj.is_dirty
return new_obj
def db_children(self, parent=(None,None), orphan=False, for_action=False):
children = []
if self._db_location is not None:
children.extend(self._db_location.db_children((self.vtType, self.db_id), orphan, for_action))
if orphan:
self._db_location = None
to_del = []
for child in self.db_functions:
children.extend(child.db_children((self.vtType, self.db_id), orphan, for_action))
if orphan:
to_del.append(child)
for child in to_del:
self.db_delete_function(child)
to_del = []
for child in self.db_annotations:
children.extend(child.db_children((self.vtType, self.db_id), orphan, for_action))
if orphan:
to_del.append(child)
for child in to_del:
self.db_delete_annotation(child)
children.append((self, parent[0], parent[1]))
return children
def db_deleted_children(self, remove=False):
children = []
children.extend(self.db_deleted_workflow)
children.extend(self.db_deleted_location)
children.extend(self.db_deleted_functions)
children.extend(self.db_deleted_annotations)
if remove:
self.db_deleted_workflow = []
self.db_deleted_location = []
self.db_deleted_functions = []
self.db_deleted_annotations = []
return children
def has_changes(self):
if self.is_dirty:
return True
if self._db_workflow is not None and self._db_workflow.has_changes():
return True
if self._db_location is not None and self._db_location.has_changes():
return True
for child in self._db_functions:
if child.has_changes():
return True
for child in self._db_annotations:
if child.has_changes():
return True
return False
def __get_db_id(self):
return self._db_id
def __set_db_id(self, id):
self._db_id = id
self.is_dirty = True
db_id = property(__get_db_id, __set_db_id)
def db_add_id(self, id):
self._db_id = id
def db_change_id(self, id):
self._db_id = id
def db_delete_id(self, id):
self._db_id = None
def __get_db_workflow(self):
return self._db_workflow
def __set_db_workflow(self, workflow):
self._db_workflow = workflow
self.is_dirty = True
db_workflow = property(__get_db_workflow, __set_db_workflow)
def db_add_workflow(self, workflow):
self._db_workflow = workflow
def db_change_workflow(self, workflow):
self._db_workflow = workflow
def db_delete_workflow(self, workflow):
if not self.is_new:
self.db_deleted_workflow.append(self._db_workflow)
self._db_workflow = None
def __get_db_cache(self):
return self._db_cache
def __set_db_cache(self, cache):
self._db_cache = cache
self.is_dirty = True
db_cache = property(__get_db_cache, __set_db_cache)
def db_add_cache(self, cache):
self._db_cache = cache
def db_change_cache(self, cache):
self._db_cache = cache
def db_delete_cache(self, cache):
self._db_cache = None
def __get_db_name(self):
return self._db_name
def __set_db_name(self, name):
self._db_name = name
self.is_dirty = True
db_name = property(__get_db_name, __set_db_name)
def db_add_name(self, name):
self._db_name = name
def db_change_name(self, name):
self._db_name = name
def db_delete_name(self, name):
self._db_name = None
def __get_db_namespace(self):
return self._db_namespace
def __set_db_namespace(self, namespace):
self._db_namespace = namespace
self.is_dirty = True
db_namespace = property(__get_db_namespace, __set_db_namespace)
def db_add_namespace(self, namespace):
self._db_namespace = namespace
def db_change_namespace(self, namespace):
self._db_namespace = namespace
def db_delete_namespace(self, namespace):
self._db_namespace = None
def __get_db_package(self):
return self._db_package
def __set_db_package(self, package):
self._db_package = package
self.is_dirty = True
db_package = property(__get_db_package, __set_db_package)
def db_add_package(self, package):
self._db_package = package
def db_change_package(self, package):
self._db_package = package
def db_delete_package(self, package):
self._db_package = None
def __get_db_version(self):
return self._db_version
def __set_db_version(self, version):
self._db_version = version
self.is_dirty = True
db_version = property(__get_db_version, __set_db_version)
def db_add_version(self, version):
self._db_version = version
def db_change_version(self, version):
self._db_version = version
def db_delete_version(self, version):
self._db_version = None
def __get_db_location(self):
return self._db_location
def __set_db_location(self, location):
self._db_location = location
self.is_dirty = True
db_location = property(__get_db_location, __set_db_location)
def db_add_location(self, location):
self._db_location = location
def db_change_location(self, location):
self._db_location = location
def db_delete_location(self, location):
if not self.is_new:
self.db_deleted_location.append(self._db_location)
self._db_location = None
def __get_db_functions(self):
return self._db_functions
def __set_db_functions(self, functions):
self._db_functions = functions
self.is_dirty = True
db_functions = property(__get_db_functions, __set_db_functions)
def db_get_functions(self):
return self._db_functions
def db_add_function(self, function):
self.is_dirty = True
self._db_functions.append(function)
self.db_functions_id_index[function.db_id] = function
def db_change_function(self, function):
self.is_dirty = True
found = False
for i in xrange(len(self._db_functions)):
if self._db_functions[i].db_id == function.db_id:
self._db_functions[i] = function
found = True
break
if not found:
self._db_functions.append(function)
self.db_functions_id_index[function.db_id] = function
def db_delete_function(self, function):
self.is_dirty = True
for i in xrange(len(self._db_functions)):
if self._db_functions[i].db_id == function.db_id:
if not self._db_functions[i].is_new:
self.db_deleted_functions.append(self._db_functions[i])
del self._db_functions[i]
break
del self.db_functions_id_index[function.db_id]
def db_get_function(self, key):
for i in xrange(len(self._db_functions)):
if self._db_functions[i].db_id == key:
return self._db_functions[i]
return None
def db_get_function_by_id(self, key):
return self.db_functions_id_index[key]
def db_has_function_with_id(self, key):
return key in self.db_functions_id_index
def __get_db_annotations(self):
return self._db_annotations
def __set_db_annotations(self, annotations):
self._db_annotations = annotations
self.is_dirty = True
db_annotations = property(__get_db_annotations, __set_db_annotations)
def db_get_annotations(self):
return self._db_annotations
def db_add_annotation(self, annotation):
self.is_dirty = True
self._db_annotations.append(annotation)
self.db_annotations_id_index[annotation.db_id] = annotation
self.db_annotations_key_index[annotation.db_key] = annotation
def db_change_annotation(self, annotation):
self.is_dirty = True
found = False
for i in xrange(len(self._db_annotations)):
if self._db_annotations[i].db_id == annotation.db_id:
self._db_annotations[i] = annotation
found = True
break
if not found:
self._db_annotations.append(annotation)
self.db_annotations_id_index[annotation.db_id] = annotation
self.db_annotations_key_index[annotation.db_key] = annotation
def db_delete_annotation(self, annotation):
self.is_dirty = True
for i in xrange(len(self._db_annotations)):
if self._db_annotations[i].db_id == annotation.db_id:
if not self._db_annotations[i].is_new:
self.db_deleted_annotations.append(self._db_annotations[i])
del self._db_annotations[i]
break
del self.db_annotations_id_index[annotation.db_id]
del self.db_annotations_key_index[annotation.db_key]
def db_get_annotation(self, key):
for i in xrange(len(self._db_annotations)):
if self._db_annotations[i].db_id == key:
return self._db_annotations[i]
return None
def db_get_annotation_by_id(self, key):
return self.db_annotations_id_index[key]
def db_has_annotation_with_id(self, key):
return key in self.db_annotations_id_index
def db_get_annotation_by_key(self, key):
return self.db_annotations_key_index[key]
def db_has_annotation_with_key(self, key):
return key in self.db_annotations_key_index
def getPrimaryKey(self):
return self._db_id
class DBOpmWasControlledBy(object):
vtType = 'opm_was_controlled_by'
def __init__(self, effect=None, role=None, cause=None, accounts=None, starts=None, ends=None):
self.db_deleted_effect = []
self._db_effect = effect
self.db_deleted_role = []
self._db_role = role
self.db_deleted_cause = []
self._db_cause = cause
self.db_deleted_accounts = []
if accounts is None:
self._db_accounts = []
else:
self._db_accounts = accounts
self.db_deleted_starts = []
if starts is None:
self._db_starts = []
else:
self._db_starts = starts
self.db_deleted_ends = []
if ends is None:
self._db_ends = []
else:
self._db_ends = ends
self.is_dirty = True
self.is_new = True
def __copy__(self):
return DBOpmWasControlledBy.do_copy(self)
def do_copy(self, new_ids=False, id_scope=None, id_remap=None):
cp = DBOpmWasControlledBy()
if self._db_effect is not None:
cp._db_effect = self._db_effect.do_copy(new_ids, id_scope, id_remap)
if self._db_role is not None:
cp._db_role = self._db_role.do_copy(new_ids, id_scope, id_remap)
if self._db_cause is not None:
cp._db_cause = self._db_cause.do_copy(new_ids, id_scope, id_remap)
if self._db_accounts is None:
cp._db_accounts = []
else:
cp._db_accounts = [v.do_copy(new_ids, id_scope, id_remap) for v in self._db_accounts]
if self._db_starts is None:
cp._db_starts = []
else:
cp._db_starts = [v.do_copy(new_ids, id_scope, id_remap) for v in self._db_starts]
if self._db_ends is None:
cp._db_ends = []
else:
cp._db_ends = [v.do_copy(new_ids, id_scope, id_remap) for v in self._db_ends]
# set new ids
if new_ids:
new_id = id_scope.getNewId(self.vtType)
if self.vtType in id_scope.remap:
id_remap[(id_scope.remap[self.vtType], self.db_id)] = new_id
else:
id_remap[(self.vtType, self.db_id)] = new_id
cp.db_id = new_id
# recreate indices and set flags
if not new_ids:
cp.is_dirty = self.is_dirty
cp.is_new = self.is_new
return cp
@staticmethod
def update_version(old_obj, trans_dict, new_obj=None):
if new_obj is None:
new_obj = DBOpmWasControlledBy()
class_dict = {}
if new_obj.__class__.__name__ in trans_dict:
class_dict = trans_dict[new_obj.__class__.__name__]
if 'effect' in class_dict:
res = class_dict['effect'](old_obj, trans_dict)
new_obj.db_effect = res
elif hasattr(old_obj, 'db_effect') and old_obj.db_effect is not None:
obj = old_obj.db_effect
new_obj.db_add_effect(DBOpmProcessIdEffect.update_version(obj, trans_dict))
if hasattr(old_obj, 'db_deleted_effect') and hasattr(new_obj, 'db_deleted_effect'):
for obj in old_obj.db_deleted_effect:
n_obj = DBOpmProcessIdEffect.update_version(obj, trans_dict)
new_obj.db_deleted_effect.append(n_obj)
if 'role' in class_dict:
res = class_dict['role'](old_obj, trans_dict)
new_obj.db_role = res
elif hasattr(old_obj, 'db_role') and old_obj.db_role is not None:
obj = old_obj.db_role
new_obj.db_add_role(DBOpmRole.update_version(obj, trans_dict))
if hasattr(old_obj, 'db_deleted_role') and hasattr(new_obj, 'db_deleted_role'):
for obj in old_obj.db_deleted_role:
n_obj = DBOpmRole.update_version(obj, trans_dict)
new_obj.db_deleted_role.append(n_obj)
if 'cause' in class_dict:
res = class_dict['cause'](old_obj, trans_dict)
new_obj.db_cause = res
elif hasattr(old_obj, 'db_cause') and old_obj.db_cause is not None:
obj = old_obj.db_cause
new_obj.db_add_cause(DBOpmAgentId.update_version(obj, trans_dict))
if hasattr(old_obj, 'db_deleted_cause') and hasattr(new_obj, 'db_deleted_cause'):
for obj in old_obj.db_deleted_cause:
n_obj = DBOpmAgentId.update_version(obj, trans_dict)
new_obj.db_deleted_cause.append(n_obj)
if 'accounts' in class_dict:
res = class_dict['accounts'](old_obj, trans_dict)
for obj in res:
new_obj.db_add_account(obj)
elif hasattr(old_obj, 'db_accounts') and old_obj.db_accounts is not None:
for obj in old_obj.db_accounts:
new_obj.db_add_account(DBOpmAccountId.update_version(obj, trans_dict))
if hasattr(old_obj, 'db_deleted_accounts') and hasattr(new_obj, 'db_deleted_accounts'):
for obj in old_obj.db_deleted_accounts:
n_obj = DBOpmAccountId.update_version(obj, trans_dict)
new_obj.db_deleted_accounts.append(n_obj)
if 'starts' in class_dict:
res = class_dict['starts'](old_obj, trans_dict)
for obj in res:
new_obj.db_add_start(obj)
elif hasattr(old_obj, 'db_starts') and old_obj.db_starts is not None:
for obj in old_obj.db_starts:
new_obj.db_add_start(DBOpmTime.update_version(obj, trans_dict))
if hasattr(old_obj, 'db_deleted_starts') and hasattr(new_obj, 'db_deleted_starts'):
for obj in old_obj.db_deleted_starts:
n_obj = DBOpmTime.update_version(obj, trans_dict)
new_obj.db_deleted_starts.append(n_obj)
if 'ends' in class_dict:
res = class_dict['ends'](old_obj, trans_dict)
for obj in res:
new_obj.db_add_end(obj)
elif hasattr(old_obj, 'db_ends') and old_obj.db_ends is not None:
for obj in old_obj.db_ends:
new_obj.db_add_end(DBOpmTime.update_version(obj, trans_dict))
if hasattr(old_obj, 'db_deleted_ends') and hasattr(new_obj, 'db_deleted_ends'):
for obj in old_obj.db_deleted_ends:
n_obj = DBOpmTime.update_version(obj, trans_dict)
new_obj.db_deleted_ends.append(n_obj)
new_obj.is_new = old_obj.is_new
new_obj.is_dirty = old_obj.is_dirty
return new_obj
def db_children(self, parent=(None,None), orphan=False, for_action=False):
children = []
if self._db_effect is not None:
children.extend(self._db_effect.db_children((self.vtType, self.db_id), orphan, for_action))
if orphan:
self._db_effect = None
if self._db_role is not None:
children.extend(self._db_role.db_children((self.vtType, self.db_id), orphan, for_action))
if orphan:
self._db_role = None
if self._db_cause is not None:
children.extend(self._db_cause.db_children((self.vtType, self.db_id), orphan, for_action))
if orphan:
self._db_cause = None
to_del = []
for child in self.db_accounts:
children.extend(child.db_children((self.vtType, self.db_id), orphan, for_action))
if orphan:
to_del.append(child)
for child in to_del:
self.db_delete_account(child)
to_del = []
for child in self.db_starts:
children.extend(child.db_children((self.vtType, self.db_id), orphan, for_action))
if orphan:
to_del.append(child)
for child in to_del:
self.db_delete_start(child)
to_del = []
for child in self.db_ends:
children.extend(child.db_children((self.vtType, self.db_id), orphan, for_action))
if orphan:
to_del.append(child)
for child in to_del:
self.db_delete_end(child)
children.append((self, parent[0], parent[1]))
return children
def db_deleted_children(self, remove=False):
children = []
children.extend(self.db_deleted_effect)
children.extend(self.db_deleted_role)
children.extend(self.db_deleted_cause)
children.extend(self.db_deleted_accounts)
children.extend(self.db_deleted_starts)
children.extend(self.db_deleted_ends)
if remove:
self.db_deleted_effect = []
self.db_deleted_role = []
self.db_deleted_cause = []
self.db_deleted_accounts = []
self.db_deleted_starts = []
self.db_deleted_ends = []
return children
def has_changes(self):
if self.is_dirty:
return True
if self._db_effect is not None and self._db_effect.has_changes():
return True
if self._db_role is not None and self._db_role.has_changes():
return True
if self._db_cause is not None and self._db_cause.has_changes():
return True
for child in self._db_accounts:
if child.has_changes():
return True
for child in self._db_starts:
if child.has_changes():
return True
for child in self._db_ends:
if child.has_changes():
return True
return False
def __get_db_effect(self):
return self._db_effect
def __set_db_effect(self, effect):
self._db_effect = effect
self.is_dirty = True
db_effect = property(__get_db_effect, __set_db_effect)
def db_add_effect(self, effect):
self._db_effect = effect
def db_change_effect(self, effect):
self._db_effect = effect
def db_delete_effect(self, effect):
if not self.is_new:
self.db_deleted_effect.append(self._db_effect)
self._db_effect = None
def __get_db_role(self):
return self._db_role
def __set_db_role(self, role):
self._db_role = role
self.is_dirty = True
db_role = property(__get_db_role, __set_db_role)
def db_add_role(self, role):
self._db_role = role
def db_change_role(self, role):
self._db_role = role
def db_delete_role(self, role):
if not self.is_new:
self.db_deleted_role.append(self._db_role)
self._db_role = None
def __get_db_cause(self):
return self._db_cause
def __set_db_cause(self, cause):
self._db_cause = cause
self.is_dirty = True
db_cause = property(__get_db_cause, __set_db_cause)
def db_add_cause(self, cause):
self._db_cause = cause
def db_change_cause(self, cause):
self._db_cause = cause
def db_delete_cause(self, cause):
if not self.is_new:
self.db_deleted_cause.append(self._db_cause)
self._db_cause = None
def __get_db_accounts(self):
return self._db_accounts
def __set_db_accounts(self, accounts):
self._db_accounts = accounts
self.is_dirty = True
db_accounts = property(__get_db_accounts, __set_db_accounts)
def db_get_accounts(self):
return self._db_accounts
def db_add_account(self, account):
self.is_dirty = True
self._db_accounts.append(account)
def db_change_account(self, account):
self.is_dirty = True
self._db_accounts.append(account)
def db_delete_account(self, account):
self.is_dirty = True
raise Exception('Cannot delete a non-keyed object')
def db_get_account(self, key):
return None
def __get_db_starts(self):
return self._db_starts
def __set_db_starts(self, starts):
self._db_starts = starts
self.is_dirty = True
db_starts = property(__get_db_starts, __set_db_starts)
def db_get_starts(self):
return self._db_starts
def db_add_start(self, start):
self.is_dirty = True
self._db_starts.append(start)
def db_change_start(self, start):
self.is_dirty = True
self._db_starts.append(start)
def db_delete_start(self, start):
self.is_dirty = True
raise Exception('Cannot delete a non-keyed object')
def db_get_start(self, key):
return None
def __get_db_ends(self):
return self._db_ends
def __set_db_ends(self, ends):
self._db_ends = ends
self.is_dirty = True
db_ends = property(__get_db_ends, __set_db_ends)
def db_get_ends(self):
return self._db_ends
def db_add_end(self, end):
self.is_dirty = True
self._db_ends.append(end)
def db_change_end(self, end):
self.is_dirty = True
self._db_ends.append(end)
def db_delete_end(self, end):
self.is_dirty = True
raise Exception('Cannot delete a non-keyed object')
def db_get_end(self, key):
return None
class DBAdd(object):
vtType = 'add'
def __init__(self, data=None, id=None, what=None, objectId=None, parentObjId=None, parentObjType=None):
self.db_deleted_data = []
self._db_data = data
self._db_id = id
self._db_what = what
self._db_objectId = objectId
self._db_parentObjId = parentObjId
self._db_parentObjType = parentObjType
self.is_dirty = True
self.is_new = True
def __copy__(self):
return DBAdd.do_copy(self)
def do_copy(self, new_ids=False, id_scope=None, id_remap=None):
cp = DBAdd(id=self._db_id,
what=self._db_what,
objectId=self._db_objectId,
parentObjId=self._db_parentObjId,
parentObjType=self._db_parentObjType)
if self._db_data is not None:
cp._db_data = self._db_data.do_copy(new_ids, id_scope, id_remap)
# set new ids
if new_ids:
new_id = id_scope.getNewId(self.vtType)
if self.vtType in id_scope.remap:
id_remap[(id_scope.remap[self.vtType], self.db_id)] = new_id
else:
id_remap[(self.vtType, self.db_id)] = new_id
cp.db_id = new_id
if hasattr(self, 'db_objectId') and (self._db_what, self._db_objectId) in id_remap:
cp._db_objectId = id_remap[(self._db_what, self._db_objectId)]
if hasattr(self, 'db_parentObjId') and (self._db_parentObjType, self._db_parentObjId) in id_remap:
cp._db_parentObjId = id_remap[(self._db_parentObjType, self._db_parentObjId)]
# recreate indices and set flags
if not new_ids:
cp.is_dirty = self.is_dirty
cp.is_new = self.is_new
return cp
@staticmethod
def update_version(old_obj, trans_dict, new_obj=None):
if new_obj is None:
new_obj = DBAdd()
class_dict = {}
if new_obj.__class__.__name__ in trans_dict:
class_dict = trans_dict[new_obj.__class__.__name__]
if 'data' in class_dict:
res = class_dict['data'](old_obj, trans_dict)
new_obj.db_data = res
elif hasattr(old_obj, 'db_data') and old_obj.db_data is not None:
obj = old_obj.db_data
if obj.vtType == 'module':
new_obj.db_add_data(DBModule.update_version(obj, trans_dict))
elif obj.vtType == 'location':
new_obj.db_add_data(DBLocation.update_version(obj, trans_dict))
elif obj.vtType == 'annotation':
new_obj.db_add_data(DBAnnotation.update_version(obj, trans_dict))
elif obj.vtType == 'function':
new_obj.db_add_data(DBFunction.update_version(obj, trans_dict))
elif obj.vtType == 'connection':
new_obj.db_add_data(DBConnection.update_version(obj, trans_dict))
elif obj.vtType == 'port':
new_obj.db_add_data(DBPort.update_version(obj, trans_dict))
elif obj.vtType == 'parameter':
new_obj.db_add_data(DBParameter.update_version(obj, trans_dict))
elif obj.vtType == 'portSpec':
new_obj.db_add_data(DBPortSpec.update_version(obj, trans_dict))
elif obj.vtType == 'abstraction':
new_obj.db_add_data(DBAbstraction.update_version(obj, trans_dict))
elif obj.vtType == 'group':
new_obj.db_add_data(DBGroup.update_version(obj, trans_dict))
elif obj.vtType == 'other':
new_obj.db_add_data(DBOther.update_version(obj, trans_dict))
elif obj.vtType == 'plugin_data':
new_obj.db_add_data(DBPluginData.update_version(obj, trans_dict))
if hasattr(old_obj, 'db_deleted_data') and hasattr(new_obj, 'db_deleted_data'):
for obj in old_obj.db_deleted_data:
if obj.vtType == 'module':
n_obj = DBModule.update_version(obj, trans_dict)
new_obj.db_deleted_data.append(n_obj)
elif obj.vtType == 'location':
n_obj = DBLocation.update_version(obj, trans_dict)
new_obj.db_deleted_data.append(n_obj)
elif obj.vtType == 'annotation':
n_obj = DBAnnotation.update_version(obj, trans_dict)
new_obj.db_deleted_data.append(n_obj)
elif obj.vtType == 'function':
n_obj = DBFunction.update_version(obj, trans_dict)
new_obj.db_deleted_data.append(n_obj)
elif obj.vtType == 'connection':
n_obj = DBConnection.update_version(obj, trans_dict)
new_obj.db_deleted_data.append(n_obj)
elif obj.vtType == 'port':
n_obj = DBPort.update_version(obj, trans_dict)
new_obj.db_deleted_data.append(n_obj)
elif obj.vtType == 'parameter':
n_obj = DBParameter.update_version(obj, trans_dict)
new_obj.db_deleted_data.append(n_obj)
elif obj.vtType == 'portSpec':
n_obj = DBPortSpec.update_version(obj, trans_dict)
new_obj.db_deleted_data.append(n_obj)
elif obj.vtType == 'abstraction':
n_obj = DBAbstraction.update_version(obj, trans_dict)
new_obj.db_deleted_data.append(n_obj)
elif obj.vtType == 'group':
n_obj = DBGroup.update_version(obj, trans_dict)
new_obj.db_deleted_data.append(n_obj)
elif obj.vtType == 'other':
n_obj = DBOther.update_version(obj, trans_dict)
new_obj.db_deleted_data.append(n_obj)
elif obj.vtType == 'plugin_data':
n_obj = DBPluginData.update_version(obj, trans_dict)
new_obj.db_deleted_data.append(n_obj)
if 'id' in class_dict:
res = class_dict['id'](old_obj, trans_dict)
new_obj.db_id = res
elif hasattr(old_obj, 'db_id') and old_obj.db_id is not None:
new_obj.db_id = old_obj.db_id
if 'what' in class_dict:
res = class_dict['what'](old_obj, trans_dict)
new_obj.db_what = res
elif hasattr(old_obj, 'db_what') and old_obj.db_what is not None:
new_obj.db_what = old_obj.db_what
if 'objectId' in class_dict:
res = class_dict['objectId'](old_obj, trans_dict)
new_obj.db_objectId = res
elif hasattr(old_obj, 'db_objectId') and old_obj.db_objectId is not None:
new_obj.db_objectId = old_obj.db_objectId
if 'parentObjId' in class_dict:
res = class_dict['parentObjId'](old_obj, trans_dict)
new_obj.db_parentObjId = res
elif hasattr(old_obj, 'db_parentObjId') and old_obj.db_parentObjId is not None:
new_obj.db_parentObjId = old_obj.db_parentObjId
if 'parentObjType' in class_dict:
res = class_dict['parentObjType'](old_obj, trans_dict)
new_obj.db_parentObjType = res
elif hasattr(old_obj, 'db_parentObjType') and old_obj.db_parentObjType is not None:
new_obj.db_parentObjType = old_obj.db_parentObjType
new_obj.is_new = old_obj.is_new
new_obj.is_dirty = old_obj.is_dirty
return new_obj
def db_children(self, parent=(None,None), orphan=False, for_action=False):
children = []
if self._db_data is not None:
children.extend(self._db_data.db_children((self.vtType, self.db_id), orphan, for_action))
if orphan:
self._db_data = None
children.append((self, parent[0], parent[1]))
return children
def db_deleted_children(self, remove=False):
children = []
children.extend(self.db_deleted_data)
if remove:
self.db_deleted_data = []
return children
def has_changes(self):
if self.is_dirty:
return True
if self._db_data is not None and self._db_data.has_changes():
return True
return False
def __get_db_data(self):
return self._db_data
def __set_db_data(self, data):
self._db_data = data
self.is_dirty = True
db_data = property(__get_db_data, __set_db_data)
def db_add_data(self, data):
self._db_data = data
def db_change_data(self, data):
self._db_data = data
def db_delete_data(self, data):
if not self.is_new:
self.db_deleted_data.append(self._db_data)
self._db_data = None
def __get_db_id(self):
return self._db_id
def __set_db_id(self, id):
self._db_id = id
self.is_dirty = True
db_id = property(__get_db_id, __set_db_id)
def db_add_id(self, id):
self._db_id = id
def db_change_id(self, id):
self._db_id = id
def db_delete_id(self, id):
self._db_id = None
def __get_db_what(self):
return self._db_what
def __set_db_what(self, what):
self._db_what = what
self.is_dirty = True
db_what = property(__get_db_what, __set_db_what)
def db_add_what(self, what):
self._db_what = what
def db_change_what(self, what):
self._db_what = what
def db_delete_what(self, what):
self._db_what = None
def __get_db_objectId(self):
return self._db_objectId
def __set_db_objectId(self, objectId):
self._db_objectId = objectId
self.is_dirty = True
db_objectId = property(__get_db_objectId, __set_db_objectId)
def db_add_objectId(self, objectId):
self._db_objectId = objectId
def db_change_objectId(self, objectId):
self._db_objectId = objectId
def db_delete_objectId(self, objectId):
self._db_objectId = None
def __get_db_parentObjId(self):
return self._db_parentObjId
def __set_db_parentObjId(self, parentObjId):
self._db_parentObjId = parentObjId
self.is_dirty = True
db_parentObjId = property(__get_db_parentObjId, __set_db_parentObjId)
def db_add_parentObjId(self, parentObjId):
self._db_parentObjId = parentObjId
def db_change_parentObjId(self, parentObjId):
self._db_parentObjId = parentObjId
def db_delete_parentObjId(self, parentObjId):
self._db_parentObjId = None
def __get_db_parentObjType(self):
return self._db_parentObjType
def __set_db_parentObjType(self, parentObjType):
self._db_parentObjType = parentObjType
self.is_dirty = True
db_parentObjType = property(__get_db_parentObjType, __set_db_parentObjType)
def db_add_parentObjType(self, parentObjType):
self._db_parentObjType = parentObjType
def db_change_parentObjType(self, parentObjType):
self._db_parentObjType = parentObjType
def db_delete_parentObjType(self, parentObjType):
self._db_parentObjType = None
def getPrimaryKey(self):
return self._db_id
class DBProvGeneration(object):
vtType = 'prov_generation'
def __init__(self, prov_entity=None, prov_activity=None, prov_role=None):
self.db_deleted_prov_entity = []
self._db_prov_entity = prov_entity
self.db_deleted_prov_activity = []
self._db_prov_activity = prov_activity
self._db_prov_role = prov_role
self.is_dirty = True
self.is_new = True
def __copy__(self):
return DBProvGeneration.do_copy(self)
def do_copy(self, new_ids=False, id_scope=None, id_remap=None):
cp = DBProvGeneration(prov_role=self._db_prov_role)
if self._db_prov_entity is not None:
cp._db_prov_entity = self._db_prov_entity.do_copy(new_ids, id_scope, id_remap)
if self._db_prov_activity is not None:
cp._db_prov_activity = self._db_prov_activity.do_copy(new_ids, id_scope, id_remap)
# set new ids
if new_ids:
new_id = id_scope.getNewId(self.vtType)
if self.vtType in id_scope.remap:
id_remap[(id_scope.remap[self.vtType], self.db_id)] = new_id
else:
id_remap[(self.vtType, self.db_id)] = new_id
cp.db_id = new_id
# recreate indices and set flags
if not new_ids:
cp.is_dirty = self.is_dirty
cp.is_new = self.is_new
return cp
@staticmethod
def update_version(old_obj, trans_dict, new_obj=None):
if new_obj is None:
new_obj = DBProvGeneration()
class_dict = {}
if new_obj.__class__.__name__ in trans_dict:
class_dict = trans_dict[new_obj.__class__.__name__]
if 'prov_entity' in class_dict:
res = class_dict['prov_entity'](old_obj, trans_dict)
new_obj.db_prov_entity = res
elif hasattr(old_obj, 'db_prov_entity') and old_obj.db_prov_entity is not None:
obj = old_obj.db_prov_entity
new_obj.db_add_prov_entity(DBRefProvEntity.update_version(obj, trans_dict))
if hasattr(old_obj, 'db_deleted_prov_entity') and hasattr(new_obj, 'db_deleted_prov_entity'):
for obj in old_obj.db_deleted_prov_entity:
n_obj = DBRefProvEntity.update_version(obj, trans_dict)
new_obj.db_deleted_prov_entity.append(n_obj)
if 'prov_activity' in class_dict:
res = class_dict['prov_activity'](old_obj, trans_dict)
new_obj.db_prov_activity = res
elif hasattr(old_obj, 'db_prov_activity') and old_obj.db_prov_activity is not None:
obj = old_obj.db_prov_activity
new_obj.db_add_prov_activity(DBRefProvActivity.update_version(obj, trans_dict))
if hasattr(old_obj, 'db_deleted_prov_activity') and hasattr(new_obj, 'db_deleted_prov_activity'):
for obj in old_obj.db_deleted_prov_activity:
n_obj = DBRefProvActivity.update_version(obj, trans_dict)
new_obj.db_deleted_prov_activity.append(n_obj)
if 'prov_role' in class_dict:
res = class_dict['prov_role'](old_obj, trans_dict)
new_obj.db_prov_role = res
elif hasattr(old_obj, 'db_prov_role') and old_obj.db_prov_role is not None:
new_obj.db_prov_role = old_obj.db_prov_role
new_obj.is_new = old_obj.is_new
new_obj.is_dirty = old_obj.is_dirty
return new_obj
def db_children(self, parent=(None,None), orphan=False, for_action=False):
children = []
if self._db_prov_entity is not None:
children.extend(self._db_prov_entity.db_children((self.vtType, self.db_id), orphan, for_action))
if orphan:
self._db_prov_entity = None
if self._db_prov_activity is not None:
children.extend(self._db_prov_activity.db_children((self.vtType, self.db_id), orphan, for_action))
if orphan:
self._db_prov_activity = None
children.append((self, parent[0], parent[1]))
return children
def db_deleted_children(self, remove=False):
children = []
children.extend(self.db_deleted_prov_entity)
children.extend(self.db_deleted_prov_activity)
if remove:
self.db_deleted_prov_entity = []
self.db_deleted_prov_activity = []
return children
def has_changes(self):
if self.is_dirty:
return True
if self._db_prov_entity is not None and self._db_prov_entity.has_changes():
return True
if self._db_prov_activity is not None and self._db_prov_activity.has_changes():
return True
return False
def __get_db_prov_entity(self):
return self._db_prov_entity
def __set_db_prov_entity(self, prov_entity):
self._db_prov_entity = prov_entity
self.is_dirty = True
db_prov_entity = property(__get_db_prov_entity, __set_db_prov_entity)
def db_add_prov_entity(self, prov_entity):
self._db_prov_entity = prov_entity
def db_change_prov_entity(self, prov_entity):
self._db_prov_entity = prov_entity
def db_delete_prov_entity(self, prov_entity):
if not self.is_new:
self.db_deleted_prov_entity.append(self._db_prov_entity)
self._db_prov_entity = None
def __get_db_prov_activity(self):
return self._db_prov_activity
def __set_db_prov_activity(self, prov_activity):
self._db_prov_activity = prov_activity
self.is_dirty = True
db_prov_activity = property(__get_db_prov_activity, __set_db_prov_activity)
def db_add_prov_activity(self, prov_activity):
self._db_prov_activity = prov_activity
def db_change_prov_activity(self, prov_activity):
self._db_prov_activity = prov_activity
def db_delete_prov_activity(self, prov_activity):
if not self.is_new:
self.db_deleted_prov_activity.append(self._db_prov_activity)
self._db_prov_activity = None
def __get_db_prov_role(self):
return self._db_prov_role
def __set_db_prov_role(self, prov_role):
self._db_prov_role = prov_role
self.is_dirty = True
db_prov_role = property(__get_db_prov_role, __set_db_prov_role)
def db_add_prov_role(self, prov_role):
self._db_prov_role = prov_role
def db_change_prov_role(self, prov_role):
self._db_prov_role = prov_role
def db_delete_prov_role(self, prov_role):
self._db_prov_role = None
class DBOpmUsed(object):
vtType = 'opm_used'
def __init__(self, effect=None, role=None, cause=None, accounts=None, opm_times=None):
self.db_deleted_effect = []
self._db_effect = effect
self.db_deleted_role = []
self._db_role = role
self.db_deleted_cause = []
self._db_cause = cause
self.db_deleted_accounts = []
if accounts is None:
self._db_accounts = []
else:
self._db_accounts = accounts
self.db_deleted_opm_times = []
if opm_times is None:
self._db_opm_times = []
else:
self._db_opm_times = opm_times
self.is_dirty = True
self.is_new = True
def __copy__(self):
return DBOpmUsed.do_copy(self)
def do_copy(self, new_ids=False, id_scope=None, id_remap=None):
cp = DBOpmUsed()
if self._db_effect is not None:
cp._db_effect = self._db_effect.do_copy(new_ids, id_scope, id_remap)
if self._db_role is not None:
cp._db_role = self._db_role.do_copy(new_ids, id_scope, id_remap)
if self._db_cause is not None:
cp._db_cause = self._db_cause.do_copy(new_ids, id_scope, id_remap)
if self._db_accounts is None:
cp._db_accounts = []
else:
cp._db_accounts = [v.do_copy(new_ids, id_scope, id_remap) for v in self._db_accounts]
if self._db_opm_times is None:
cp._db_opm_times = []
else:
cp._db_opm_times = [v.do_copy(new_ids, id_scope, id_remap) for v in self._db_opm_times]
# set new ids
if new_ids:
new_id = id_scope.getNewId(self.vtType)
if self.vtType in id_scope.remap:
id_remap[(id_scope.remap[self.vtType], self.db_id)] = new_id
else:
id_remap[(self.vtType, self.db_id)] = new_id
cp.db_id = new_id
# recreate indices and set flags
if not new_ids:
cp.is_dirty = self.is_dirty
cp.is_new = self.is_new
return cp
@staticmethod
def update_version(old_obj, trans_dict, new_obj=None):
if new_obj is None:
new_obj = DBOpmUsed()
class_dict = {}
if new_obj.__class__.__name__ in trans_dict:
class_dict = trans_dict[new_obj.__class__.__name__]
if 'effect' in class_dict:
res = class_dict['effect'](old_obj, trans_dict)
new_obj.db_effect = res
elif hasattr(old_obj, 'db_effect') and old_obj.db_effect is not None:
obj = old_obj.db_effect
new_obj.db_add_effect(DBOpmProcessIdEffect.update_version(obj, trans_dict))
if hasattr(old_obj, 'db_deleted_effect') and hasattr(new_obj, 'db_deleted_effect'):
for obj in old_obj.db_deleted_effect:
n_obj = DBOpmProcessIdEffect.update_version(obj, trans_dict)
new_obj.db_deleted_effect.append(n_obj)
if 'role' in class_dict:
res = class_dict['role'](old_obj, trans_dict)
new_obj.db_role = res
elif hasattr(old_obj, 'db_role') and old_obj.db_role is not None:
obj = old_obj.db_role
new_obj.db_add_role(DBOpmRole.update_version(obj, trans_dict))
if hasattr(old_obj, 'db_deleted_role') and hasattr(new_obj, 'db_deleted_role'):
for obj in old_obj.db_deleted_role:
n_obj = DBOpmRole.update_version(obj, trans_dict)
new_obj.db_deleted_role.append(n_obj)
if 'cause' in class_dict:
res = class_dict['cause'](old_obj, trans_dict)
new_obj.db_cause = res
elif hasattr(old_obj, 'db_cause') and old_obj.db_cause is not None:
obj = old_obj.db_cause
new_obj.db_add_cause(DBOpmArtifactIdCause.update_version(obj, trans_dict))
if hasattr(old_obj, 'db_deleted_cause') and hasattr(new_obj, 'db_deleted_cause'):
for obj in old_obj.db_deleted_cause:
n_obj = DBOpmArtifactIdCause.update_version(obj, trans_dict)
new_obj.db_deleted_cause.append(n_obj)
if 'accounts' in class_dict:
res = class_dict['accounts'](old_obj, trans_dict)
for obj in res:
new_obj.db_add_account(obj)
elif hasattr(old_obj, 'db_accounts') and old_obj.db_accounts is not None:
for obj in old_obj.db_accounts:
new_obj.db_add_account(DBOpmAccountId.update_version(obj, trans_dict))
if hasattr(old_obj, 'db_deleted_accounts') and hasattr(new_obj, 'db_deleted_accounts'):
for obj in old_obj.db_deleted_accounts:
n_obj = DBOpmAccountId.update_version(obj, trans_dict)
new_obj.db_deleted_accounts.append(n_obj)
if 'opm_times' in class_dict:
res = class_dict['opm_times'](old_obj, trans_dict)
for obj in res:
new_obj.db_add_opm_time(obj)
elif hasattr(old_obj, 'db_opm_times') and old_obj.db_opm_times is not None:
for obj in old_obj.db_opm_times:
new_obj.db_add_opm_time(DBOpmTime.update_version(obj, trans_dict))
if hasattr(old_obj, 'db_deleted_opm_times') and hasattr(new_obj, 'db_deleted_opm_times'):
for obj in old_obj.db_deleted_opm_times:
n_obj = DBOpmTime.update_version(obj, trans_dict)
new_obj.db_deleted_opm_times.append(n_obj)
new_obj.is_new = old_obj.is_new
new_obj.is_dirty = old_obj.is_dirty
return new_obj
def db_children(self, parent=(None,None), orphan=False, for_action=False):
children = []
if self._db_effect is not None:
children.extend(self._db_effect.db_children((self.vtType, self.db_id), orphan, for_action))
if orphan:
self._db_effect = None
if self._db_role is not None:
children.extend(self._db_role.db_children((self.vtType, self.db_id), orphan, for_action))
if orphan:
self._db_role = None
if self._db_cause is not None:
children.extend(self._db_cause.db_children((self.vtType, self.db_id), orphan, for_action))
if orphan:
self._db_cause = None
to_del = []
for child in self.db_accounts:
children.extend(child.db_children((self.vtType, self.db_id), orphan, for_action))
if orphan:
to_del.append(child)
for child in to_del:
self.db_delete_account(child)
to_del = []
for child in self.db_opm_times:
children.extend(child.db_children((self.vtType, self.db_id), orphan, for_action))
if orphan:
to_del.append(child)
for child in to_del:
self.db_delete_opm_time(child)
children.append((self, parent[0], parent[1]))
return children
def db_deleted_children(self, remove=False):
children = []
children.extend(self.db_deleted_effect)
children.extend(self.db_deleted_role)
children.extend(self.db_deleted_cause)
children.extend(self.db_deleted_accounts)
children.extend(self.db_deleted_opm_times)
if remove:
self.db_deleted_effect = []
self.db_deleted_role = []
self.db_deleted_cause = []
self.db_deleted_accounts = []
self.db_deleted_opm_times = []
return children
def has_changes(self):
if self.is_dirty:
return True
if self._db_effect is not None and self._db_effect.has_changes():
return True
if self._db_role is not None and self._db_role.has_changes():
return True
if self._db_cause is not None and self._db_cause.has_changes():
return True
for child in self._db_accounts:
if child.has_changes():
return True
for child in self._db_opm_times:
if child.has_changes():
return True
return False
def __get_db_effect(self):
return self._db_effect
def __set_db_effect(self, effect):
self._db_effect = effect
self.is_dirty = True
db_effect = property(__get_db_effect, __set_db_effect)
def db_add_effect(self, effect):
self._db_effect = effect
def db_change_effect(self, effect):
self._db_effect = effect
def db_delete_effect(self, effect):
if not self.is_new:
self.db_deleted_effect.append(self._db_effect)
self._db_effect = None
def __get_db_role(self):
return self._db_role
def __set_db_role(self, role):
self._db_role = role
self.is_dirty = True
db_role = property(__get_db_role, __set_db_role)
def db_add_role(self, role):
self._db_role = role
def db_change_role(self, role):
self._db_role = role
def db_delete_role(self, role):
if not self.is_new:
self.db_deleted_role.append(self._db_role)
self._db_role = None
def __get_db_cause(self):
return self._db_cause
def __set_db_cause(self, cause):
self._db_cause = cause
self.is_dirty = True
db_cause = property(__get_db_cause, __set_db_cause)
def db_add_cause(self, cause):
self._db_cause = cause
def db_change_cause(self, cause):
self._db_cause = cause
def db_delete_cause(self, cause):
if not self.is_new:
self.db_deleted_cause.append(self._db_cause)
self._db_cause = None
def __get_db_accounts(self):
return self._db_accounts
def __set_db_accounts(self, accounts):
self._db_accounts = accounts
self.is_dirty = True
db_accounts = property(__get_db_accounts, __set_db_accounts)
def db_get_accounts(self):
return self._db_accounts
def db_add_account(self, account):
self.is_dirty = True
self._db_accounts.append(account)
def db_change_account(self, account):
self.is_dirty = True
self._db_accounts.append(account)
def db_delete_account(self, account):
self.is_dirty = True
raise Exception('Cannot delete a non-keyed object')
def db_get_account(self, key):
return None
def __get_db_opm_times(self):
return self._db_opm_times
def __set_db_opm_times(self, opm_times):
self._db_opm_times = opm_times
self.is_dirty = True
db_opm_times = property(__get_db_opm_times, __set_db_opm_times)
def db_get_opm_times(self):
return self._db_opm_times
def db_add_opm_time(self, opm_time):
self.is_dirty = True
self._db_opm_times.append(opm_time)
def db_change_opm_time(self, opm_time):
self.is_dirty = True
self._db_opm_times.append(opm_time)
def db_delete_opm_time(self, opm_time):
self.is_dirty = True
raise Exception('Cannot delete a non-keyed object')
def db_get_opm_time(self, key):
return None
class DBOpmArtifactIdCause(object):
vtType = 'opm_artifact_id_cause'
def __init__(self, id=None):
self._db_id = id
self.is_dirty = True
self.is_new = True
def __copy__(self):
return DBOpmArtifactIdCause.do_copy(self)
def do_copy(self, new_ids=False, id_scope=None, id_remap=None):
cp = DBOpmArtifactIdCause(id=self._db_id)
# set new ids
if new_ids:
new_id = id_scope.getNewId(self.vtType)
if self.vtType in id_scope.remap:
id_remap[(id_scope.remap[self.vtType], self.db_id)] = new_id
else:
id_remap[(self.vtType, self.db_id)] = new_id
cp.db_id = new_id
if hasattr(self, 'db_id') and ('opm_artifact', self._db_id) in id_remap:
cp._db_id = id_remap[('opm_artifact', self._db_id)]
# recreate indices and set flags
if not new_ids:
cp.is_dirty = self.is_dirty
cp.is_new = self.is_new
return cp
@staticmethod
def update_version(old_obj, trans_dict, new_obj=None):
if new_obj is None:
new_obj = DBOpmArtifactIdCause()
class_dict = {}
if new_obj.__class__.__name__ in trans_dict:
class_dict = trans_dict[new_obj.__class__.__name__]
if 'id' in class_dict:
res = class_dict['id'](old_obj, trans_dict)
new_obj.db_id = res
elif hasattr(old_obj, 'db_id') and old_obj.db_id is not None:
new_obj.db_id = old_obj.db_id
new_obj.is_new = old_obj.is_new
new_obj.is_dirty = old_obj.is_dirty
return new_obj
def db_children(self, parent=(None,None), orphan=False, for_action=False):
return [(self, parent[0], parent[1])]
def db_deleted_children(self, remove=False):
children = []
return children
def has_changes(self):
if self.is_dirty:
return True
return False
def __get_db_id(self):
return self._db_id
def __set_db_id(self, id):
self._db_id = id
self.is_dirty = True
db_id = property(__get_db_id, __set_db_id)
def db_add_id(self, id):
self._db_id = id
def db_change_id(self, id):
self._db_id = id
def db_delete_id(self, id):
self._db_id = None
class DBRefProvEntity(object):
vtType = 'ref_prov_entity'
def __init__(self, prov_ref=None):
self._db_prov_ref = prov_ref
self.is_dirty = True
self.is_new = True
def __copy__(self):
return DBRefProvEntity.do_copy(self)
def do_copy(self, new_ids=False, id_scope=None, id_remap=None):
cp = DBRefProvEntity(prov_ref=self._db_prov_ref)
# set new ids
if new_ids:
new_id = id_scope.getNewId(self.vtType)
if self.vtType in id_scope.remap:
id_remap[(id_scope.remap[self.vtType], self.db_id)] = new_id
else:
id_remap[(self.vtType, self.db_id)] = new_id
cp.db_id = new_id
if hasattr(self, 'db_prov_ref') and ('prov_entity', self._db_prov_ref) in id_remap:
cp._db_prov_ref = id_remap[('prov_entity', self._db_prov_ref)]
# recreate indices and set flags
if not new_ids:
cp.is_dirty = self.is_dirty
cp.is_new = self.is_new
return cp
@staticmethod
def update_version(old_obj, trans_dict, new_obj=None):
if new_obj is None:
new_obj = DBRefProvEntity()
class_dict = {}
if new_obj.__class__.__name__ in trans_dict:
class_dict = trans_dict[new_obj.__class__.__name__]
if 'prov_ref' in class_dict:
res = class_dict['prov_ref'](old_obj, trans_dict)
new_obj.db_prov_ref = res
elif hasattr(old_obj, 'db_prov_ref') and old_obj.db_prov_ref is not None:
new_obj.db_prov_ref = old_obj.db_prov_ref
new_obj.is_new = old_obj.is_new
new_obj.is_dirty = old_obj.is_dirty
return new_obj
def db_children(self, parent=(None,None), orphan=False, for_action=False):
return [(self, parent[0], parent[1])]
def db_deleted_children(self, remove=False):
children = []
return children
def has_changes(self):
if self.is_dirty:
return True
return False
def __get_db_prov_ref(self):
return self._db_prov_ref
def __set_db_prov_ref(self, prov_ref):
self._db_prov_ref = prov_ref
self.is_dirty = True
db_prov_ref = property(__get_db_prov_ref, __set_db_prov_ref)
def db_add_prov_ref(self, prov_ref):
self._db_prov_ref = prov_ref
def db_change_prov_ref(self, prov_ref):
self._db_prov_ref = prov_ref
def db_delete_prov_ref(self, prov_ref):
self._db_prov_ref = None
class DBVtConnection(object):
vtType = 'vt_connection'
def __init__(self, id=None, vt_source=None, vt_dest=None, vt_source_port=None, vt_dest_port=None, vt_source_signature=None, vt_dest_signature=None):
self._db_id = id
self._db_vt_source = vt_source
self._db_vt_dest = vt_dest
self._db_vt_source_port = vt_source_port
self._db_vt_dest_port = vt_dest_port
self._db_vt_source_signature = vt_source_signature
self._db_vt_dest_signature = vt_dest_signature
self.is_dirty = True
self.is_new = True
def __copy__(self):
return DBVtConnection.do_copy(self)
def do_copy(self, new_ids=False, id_scope=None, id_remap=None):
cp = DBVtConnection(id=self._db_id,
vt_source=self._db_vt_source,
vt_dest=self._db_vt_dest,
vt_source_port=self._db_vt_source_port,
vt_dest_port=self._db_vt_dest_port,
vt_source_signature=self._db_vt_source_signature,
vt_dest_signature=self._db_vt_dest_signature)
# set new ids
if new_ids:
new_id = id_scope.getNewId(self.vtType)
if self.vtType in id_scope.remap:
id_remap[(id_scope.remap[self.vtType], self.db_id)] = new_id
else:
id_remap[(self.vtType, self.db_id)] = new_id
cp.db_id = new_id
# recreate indices and set flags
if not new_ids:
cp.is_dirty = self.is_dirty
cp.is_new = self.is_new
return cp
@staticmethod
def update_version(old_obj, trans_dict, new_obj=None):
if new_obj is None:
new_obj = DBVtConnection()
class_dict = {}
if new_obj.__class__.__name__ in trans_dict:
class_dict = trans_dict[new_obj.__class__.__name__]
if 'id' in class_dict:
res = class_dict['id'](old_obj, trans_dict)
new_obj.db_id = res
elif hasattr(old_obj, 'db_id') and old_obj.db_id is not None:
new_obj.db_id = old_obj.db_id
if 'vt_source' in class_dict:
res = class_dict['vt_source'](old_obj, trans_dict)
new_obj.db_vt_source = res
elif hasattr(old_obj, 'db_vt_source') and old_obj.db_vt_source is not None:
new_obj.db_vt_source = old_obj.db_vt_source
if 'vt_dest' in class_dict:
res = class_dict['vt_dest'](old_obj, trans_dict)
new_obj.db_vt_dest = res
elif hasattr(old_obj, 'db_vt_dest') and old_obj.db_vt_dest is not None:
new_obj.db_vt_dest = old_obj.db_vt_dest
if 'vt_source_port' in class_dict:
res = class_dict['vt_source_port'](old_obj, trans_dict)
new_obj.db_vt_source_port = res
elif hasattr(old_obj, 'db_vt_source_port') and old_obj.db_vt_source_port is not None:
new_obj.db_vt_source_port = old_obj.db_vt_source_port
if 'vt_dest_port' in class_dict:
res = class_dict['vt_dest_port'](old_obj, trans_dict)
new_obj.db_vt_dest_port = res
elif hasattr(old_obj, 'db_vt_dest_port') and old_obj.db_vt_dest_port is not None:
new_obj.db_vt_dest_port = old_obj.db_vt_dest_port
if 'vt_source_signature' in class_dict:
res = class_dict['vt_source_signature'](old_obj, trans_dict)
new_obj.db_vt_source_signature = res
elif hasattr(old_obj, 'db_vt_source_signature') and old_obj.db_vt_source_signature is not None:
new_obj.db_vt_source_signature = old_obj.db_vt_source_signature
if 'vt_dest_signature' in class_dict:
res = class_dict['vt_dest_signature'](old_obj, trans_dict)
new_obj.db_vt_dest_signature = res
elif hasattr(old_obj, 'db_vt_dest_signature') and old_obj.db_vt_dest_signature is not None:
new_obj.db_vt_dest_signature = old_obj.db_vt_dest_signature
new_obj.is_new = old_obj.is_new
new_obj.is_dirty = old_obj.is_dirty
return new_obj
def db_children(self, parent=(None,None), orphan=False, for_action=False):
return [(self, parent[0], parent[1])]
def db_deleted_children(self, remove=False):
children = []
return children
def has_changes(self):
if self.is_dirty:
return True
return False
def __get_db_id(self):
return self._db_id
def __set_db_id(self, id):
self._db_id = id
self.is_dirty = True
db_id = property(__get_db_id, __set_db_id)
def db_add_id(self, id):
self._db_id = id
def db_change_id(self, id):
self._db_id = id
def db_delete_id(self, id):
self._db_id = None
def __get_db_vt_source(self):
return self._db_vt_source
def __set_db_vt_source(self, vt_source):
self._db_vt_source = vt_source
self.is_dirty = True
db_vt_source = property(__get_db_vt_source, __set_db_vt_source)
def db_add_vt_source(self, vt_source):
self._db_vt_source = vt_source
def db_change_vt_source(self, vt_source):
self._db_vt_source = vt_source
def db_delete_vt_source(self, vt_source):
self._db_vt_source = None
def __get_db_vt_dest(self):
return self._db_vt_dest
def __set_db_vt_dest(self, vt_dest):
self._db_vt_dest = vt_dest
self.is_dirty = True
db_vt_dest = property(__get_db_vt_dest, __set_db_vt_dest)
def db_add_vt_dest(self, vt_dest):
self._db_vt_dest = vt_dest
def db_change_vt_dest(self, vt_dest):
self._db_vt_dest = vt_dest
def db_delete_vt_dest(self, vt_dest):
self._db_vt_dest = None
def __get_db_vt_source_port(self):
return self._db_vt_source_port
def __set_db_vt_source_port(self, vt_source_port):
self._db_vt_source_port = vt_source_port
self.is_dirty = True
db_vt_source_port = property(__get_db_vt_source_port, __set_db_vt_source_port)
def db_add_vt_source_port(self, vt_source_port):
self._db_vt_source_port = vt_source_port
def db_change_vt_source_port(self, vt_source_port):
self._db_vt_source_port = vt_source_port
def db_delete_vt_source_port(self, vt_source_port):
self._db_vt_source_port = None
def __get_db_vt_dest_port(self):
return self._db_vt_dest_port
def __set_db_vt_dest_port(self, vt_dest_port):
self._db_vt_dest_port = vt_dest_port
self.is_dirty = True
db_vt_dest_port = property(__get_db_vt_dest_port, __set_db_vt_dest_port)
def db_add_vt_dest_port(self, vt_dest_port):
self._db_vt_dest_port = vt_dest_port
def db_change_vt_dest_port(self, vt_dest_port):
self._db_vt_dest_port = vt_dest_port
def db_delete_vt_dest_port(self, vt_dest_port):
self._db_vt_dest_port = None
def __get_db_vt_source_signature(self):
return self._db_vt_source_signature
def __set_db_vt_source_signature(self, vt_source_signature):
self._db_vt_source_signature = vt_source_signature
self.is_dirty = True
db_vt_source_signature = property(__get_db_vt_source_signature, __set_db_vt_source_signature)
def db_add_vt_source_signature(self, vt_source_signature):
self._db_vt_source_signature = vt_source_signature
def db_change_vt_source_signature(self, vt_source_signature):
self._db_vt_source_signature = vt_source_signature
def db_delete_vt_source_signature(self, vt_source_signature):
self._db_vt_source_signature = None
def __get_db_vt_dest_signature(self):
return self._db_vt_dest_signature
def __set_db_vt_dest_signature(self, vt_dest_signature):
self._db_vt_dest_signature = vt_dest_signature
self.is_dirty = True
db_vt_dest_signature = property(__get_db_vt_dest_signature, __set_db_vt_dest_signature)
def db_add_vt_dest_signature(self, vt_dest_signature):
self._db_vt_dest_signature = vt_dest_signature
def db_change_vt_dest_signature(self, vt_dest_signature):
self._db_vt_dest_signature = vt_dest_signature
def db_delete_vt_dest_signature(self, vt_dest_signature):
self._db_vt_dest_signature = None
def getPrimaryKey(self):
return self._db_id
class DBOpmAccount(object):
vtType = 'opm_account'
def __init__(self, id=None, value=None):
self._db_id = id
self._db_value = value
self.is_dirty = True
self.is_new = True
def __copy__(self):
return DBOpmAccount.do_copy(self)
def do_copy(self, new_ids=False, id_scope=None, id_remap=None):
cp = DBOpmAccount(id=self._db_id,
value=self._db_value)
# set new ids
if new_ids:
new_id = id_scope.getNewId(self.vtType)
if self.vtType in id_scope.remap:
id_remap[(id_scope.remap[self.vtType], self.db_id)] = new_id
else:
id_remap[(self.vtType, self.db_id)] = new_id
cp.db_id = new_id
# recreate indices and set flags
if not new_ids:
cp.is_dirty = self.is_dirty
cp.is_new = self.is_new
return cp
@staticmethod
def update_version(old_obj, trans_dict, new_obj=None):
if new_obj is None:
new_obj = DBOpmAccount()
class_dict = {}
if new_obj.__class__.__name__ in trans_dict:
class_dict = trans_dict[new_obj.__class__.__name__]
if 'id' in class_dict:
res = class_dict['id'](old_obj, trans_dict)
new_obj.db_id = res
elif hasattr(old_obj, 'db_id') and old_obj.db_id is not None:
new_obj.db_id = old_obj.db_id
if 'value' in class_dict:
res = class_dict['value'](old_obj, trans_dict)
new_obj.db_value = res
elif hasattr(old_obj, 'db_value') and old_obj.db_value is not None:
new_obj.db_value = old_obj.db_value
new_obj.is_new = old_obj.is_new
new_obj.is_dirty = old_obj.is_dirty
return new_obj
def db_children(self, parent=(None,None), orphan=False, for_action=False):
return [(self, parent[0], parent[1])]
def db_deleted_children(self, remove=False):
children = []
return children
def has_changes(self):
if self.is_dirty:
return True
return False
def __get_db_id(self):
return self._db_id
def __set_db_id(self, id):
self._db_id = id
self.is_dirty = True
db_id = property(__get_db_id, __set_db_id)
def db_add_id(self, id):
self._db_id = id
def db_change_id(self, id):
self._db_id = id
def db_delete_id(self, id):
self._db_id = None
def __get_db_value(self):
return self._db_value
def __set_db_value(self, value):
self._db_value = value
self.is_dirty = True
db_value = property(__get_db_value, __set_db_value)
def db_add_value(self, value):
self._db_value = value
def db_change_value(self, value):
self._db_value = value
def db_delete_value(self, value):
self._db_value = None
def getPrimaryKey(self):
return self._db_id
class DBGroupExec(object):
vtType = 'group_exec'
def __init__(self, item_execs=None, id=None, ts_start=None, ts_end=None, cached=None, module_id=None, group_name=None, group_type=None, completed=None, error=None, machine_id=None, annotations=None):
self.db_deleted_item_execs = []
self.db_item_execs_id_index = {}
if item_execs is None:
self._db_item_execs = []
else:
self._db_item_execs = item_execs
for v in self._db_item_execs:
self.db_item_execs_id_index[v.db_id] = v
self._db_id = id
self._db_ts_start = ts_start
self._db_ts_end = ts_end
self._db_cached = cached
self._db_module_id = module_id
self._db_group_name = group_name
self._db_group_type = group_type
self._db_completed = completed
self._db_error = error
self._db_machine_id = machine_id
self.db_deleted_annotations = []
self.db_annotations_id_index = {}
if annotations is None:
self._db_annotations = []
else:
self._db_annotations = annotations
for v in self._db_annotations:
self.db_annotations_id_index[v.db_id] = v
self.is_dirty = True
self.is_new = True
def __copy__(self):
return DBGroupExec.do_copy(self)
def do_copy(self, new_ids=False, id_scope=None, id_remap=None):
cp = DBGroupExec(id=self._db_id,
ts_start=self._db_ts_start,
ts_end=self._db_ts_end,
cached=self._db_cached,
module_id=self._db_module_id,
group_name=self._db_group_name,
group_type=self._db_group_type,
completed=self._db_completed,
error=self._db_error,
machine_id=self._db_machine_id)
if self._db_item_execs is None:
cp._db_item_execs = []
else:
cp._db_item_execs = [v.do_copy(new_ids, id_scope, id_remap) for v in self._db_item_execs]
if self._db_annotations is None:
cp._db_annotations = []
else:
cp._db_annotations = [v.do_copy(new_ids, id_scope, id_remap) for v in self._db_annotations]
# set new ids
if new_ids:
new_id = id_scope.getNewId(self.vtType)
if self.vtType in id_scope.remap:
id_remap[(id_scope.remap[self.vtType], self.db_id)] = new_id
else:
id_remap[(self.vtType, self.db_id)] = new_id
cp.db_id = new_id
if hasattr(self, 'db_module_id') and ('module', self._db_module_id) in id_remap:
cp._db_module_id = id_remap[('module', self._db_module_id)]
if hasattr(self, 'db_machine_id') and ('machine', self._db_machine_id) in id_remap:
cp._db_machine_id = id_remap[('machine', self._db_machine_id)]
# recreate indices and set flags
cp.db_item_execs_id_index = dict((v.db_id, v) for v in cp._db_item_execs)
cp.db_annotations_id_index = dict((v.db_id, v) for v in cp._db_annotations)
if not new_ids:
cp.is_dirty = self.is_dirty
cp.is_new = self.is_new
return cp
@staticmethod
def update_version(old_obj, trans_dict, new_obj=None):
if new_obj is None:
new_obj = DBGroupExec()
class_dict = {}
if new_obj.__class__.__name__ in trans_dict:
class_dict = trans_dict[new_obj.__class__.__name__]
if 'item_execs' in class_dict:
res = class_dict['item_execs'](old_obj, trans_dict)
for obj in res:
new_obj.db_add_item_exec(obj)
elif hasattr(old_obj, 'db_item_execs') and old_obj.db_item_execs is not None:
for obj in old_obj.db_item_execs:
if obj.vtType == 'module_exec':
new_obj.db_add_item_exec(DBModuleExec.update_version(obj, trans_dict))
elif obj.vtType == 'group_exec':
new_obj.db_add_item_exec(DBGroupExec.update_version(obj, trans_dict))
elif obj.vtType == 'loop_exec':
new_obj.db_add_item_exec(DBLoopExec.update_version(obj, trans_dict))
if hasattr(old_obj, 'db_deleted_item_execs') and hasattr(new_obj, 'db_deleted_item_execs'):
for obj in old_obj.db_deleted_item_execs:
if obj.vtType == 'module_exec':
n_obj = DBModuleExec.update_version(obj, trans_dict)
new_obj.db_deleted_item_execs.append(n_obj)
elif obj.vtType == 'group_exec':
n_obj = DBGroupExec.update_version(obj, trans_dict)
new_obj.db_deleted_item_execs.append(n_obj)
elif obj.vtType == 'loop_exec':
n_obj = DBLoopExec.update_version(obj, trans_dict)
new_obj.db_deleted_item_execs.append(n_obj)
if 'id' in class_dict:
res = class_dict['id'](old_obj, trans_dict)
new_obj.db_id = res
elif hasattr(old_obj, 'db_id') and old_obj.db_id is not None:
new_obj.db_id = old_obj.db_id
if 'ts_start' in class_dict:
res = class_dict['ts_start'](old_obj, trans_dict)
new_obj.db_ts_start = res
elif hasattr(old_obj, 'db_ts_start') and old_obj.db_ts_start is not None:
new_obj.db_ts_start = old_obj.db_ts_start
if 'ts_end' in class_dict:
res = class_dict['ts_end'](old_obj, trans_dict)
new_obj.db_ts_end = res
elif hasattr(old_obj, 'db_ts_end') and old_obj.db_ts_end is not None:
new_obj.db_ts_end = old_obj.db_ts_end
if 'cached' in class_dict:
res = class_dict['cached'](old_obj, trans_dict)
new_obj.db_cached = res
elif hasattr(old_obj, 'db_cached') and old_obj.db_cached is not None:
new_obj.db_cached = old_obj.db_cached
if 'module_id' in class_dict:
res = class_dict['module_id'](old_obj, trans_dict)
new_obj.db_module_id = res
elif hasattr(old_obj, 'db_module_id') and old_obj.db_module_id is not None:
new_obj.db_module_id = old_obj.db_module_id
if 'group_name' in class_dict:
res = class_dict['group_name'](old_obj, trans_dict)
new_obj.db_group_name = res
elif hasattr(old_obj, 'db_group_name') and old_obj.db_group_name is not None:
new_obj.db_group_name = old_obj.db_group_name
if 'group_type' in class_dict:
res = class_dict['group_type'](old_obj, trans_dict)
new_obj.db_group_type = res
elif hasattr(old_obj, 'db_group_type') and old_obj.db_group_type is not None:
new_obj.db_group_type = old_obj.db_group_type
if 'completed' in class_dict:
res = class_dict['completed'](old_obj, trans_dict)
new_obj.db_completed = res
elif hasattr(old_obj, 'db_completed') and old_obj.db_completed is not None:
new_obj.db_completed = old_obj.db_completed
if 'error' in class_dict:
res = class_dict['error'](old_obj, trans_dict)
new_obj.db_error = res
elif hasattr(old_obj, 'db_error') and old_obj.db_error is not None:
new_obj.db_error = old_obj.db_error
if 'machine_id' in class_dict:
res = class_dict['machine_id'](old_obj, trans_dict)
new_obj.db_machine_id = res
elif hasattr(old_obj, 'db_machine_id') and old_obj.db_machine_id is not None:
new_obj.db_machine_id = old_obj.db_machine_id
if 'annotations' in class_dict:
res = class_dict['annotations'](old_obj, trans_dict)
for obj in res:
new_obj.db_add_annotation(obj)
elif hasattr(old_obj, 'db_annotations') and old_obj.db_annotations is not None:
for obj in old_obj.db_annotations:
new_obj.db_add_annotation(DBAnnotation.update_version(obj, trans_dict))
if hasattr(old_obj, 'db_deleted_annotations') and hasattr(new_obj, 'db_deleted_annotations'):
for obj in old_obj.db_deleted_annotations:
n_obj = DBAnnotation.update_version(obj, trans_dict)
new_obj.db_deleted_annotations.append(n_obj)
new_obj.is_new = old_obj.is_new
new_obj.is_dirty = old_obj.is_dirty
return new_obj
def db_children(self, parent=(None,None), orphan=False, for_action=False):
children = []
to_del = []
for child in self.db_annotations:
children.extend(child.db_children((self.vtType, self.db_id), orphan, for_action))
if orphan:
to_del.append(child)
for child in to_del:
self.db_delete_annotation(child)
to_del = []
for child in self.db_item_execs:
children.extend(child.db_children((self.vtType, self.db_id), orphan, for_action))
if orphan:
to_del.append(child)
for child in to_del:
self.db_delete_item_exec(child)
children.append((self, parent[0], parent[1]))
return children
def db_deleted_children(self, remove=False):
children = []
children.extend(self.db_deleted_annotations)
children.extend(self.db_deleted_item_execs)
if remove:
self.db_deleted_annotations = []
self.db_deleted_item_execs = []
return children
def has_changes(self):
if self.is_dirty:
return True
for child in self._db_annotations:
if child.has_changes():
return True
for child in self._db_item_execs:
if child.has_changes():
return True
return False
def __get_db_item_execs(self):
return self._db_item_execs
def __set_db_item_execs(self, item_execs):
self._db_item_execs = item_execs
self.is_dirty = True
db_item_execs = property(__get_db_item_execs, __set_db_item_execs)
def db_get_item_execs(self):
return self._db_item_execs
def db_add_item_exec(self, item_exec):
self.is_dirty = True
self._db_item_execs.append(item_exec)
self.db_item_execs_id_index[item_exec.db_id] = item_exec
def db_change_item_exec(self, item_exec):
self.is_dirty = True
found = False
for i in xrange(len(self._db_item_execs)):
if self._db_item_execs[i].db_id == item_exec.db_id:
self._db_item_execs[i] = item_exec
found = True
break
if not found:
self._db_item_execs.append(item_exec)
self.db_item_execs_id_index[item_exec.db_id] = item_exec
def db_delete_item_exec(self, item_exec):
self.is_dirty = True
for i in xrange(len(self._db_item_execs)):
if self._db_item_execs[i].db_id == item_exec.db_id:
if not self._db_item_execs[i].is_new:
self.db_deleted_item_execs.append(self._db_item_execs[i])
del self._db_item_execs[i]
break
del self.db_item_execs_id_index[item_exec.db_id]
def db_get_item_exec(self, key):
for i in xrange(len(self._db_item_execs)):
if self._db_item_execs[i].db_id == key:
return self._db_item_execs[i]
return None
def db_get_item_exec_by_id(self, key):
return self.db_item_execs_id_index[key]
def db_has_item_exec_with_id(self, key):
return key in self.db_item_execs_id_index
def __get_db_id(self):
return self._db_id
def __set_db_id(self, id):
self._db_id = id
self.is_dirty = True
db_id = property(__get_db_id, __set_db_id)
def db_add_id(self, id):
self._db_id = id
def db_change_id(self, id):
self._db_id = id
def db_delete_id(self, id):
self._db_id = None
def __get_db_ts_start(self):
return self._db_ts_start
def __set_db_ts_start(self, ts_start):
self._db_ts_start = ts_start
self.is_dirty = True
db_ts_start = property(__get_db_ts_start, __set_db_ts_start)
def db_add_ts_start(self, ts_start):
self._db_ts_start = ts_start
def db_change_ts_start(self, ts_start):
self._db_ts_start = ts_start
def db_delete_ts_start(self, ts_start):
self._db_ts_start = None
def __get_db_ts_end(self):
return self._db_ts_end
def __set_db_ts_end(self, ts_end):
self._db_ts_end = ts_end
self.is_dirty = True
db_ts_end = property(__get_db_ts_end, __set_db_ts_end)
def db_add_ts_end(self, ts_end):
self._db_ts_end = ts_end
def db_change_ts_end(self, ts_end):
self._db_ts_end = ts_end
def db_delete_ts_end(self, ts_end):
self._db_ts_end = None
def __get_db_cached(self):
return self._db_cached
def __set_db_cached(self, cached):
self._db_cached = cached
self.is_dirty = True
db_cached = property(__get_db_cached, __set_db_cached)
def db_add_cached(self, cached):
self._db_cached = cached
def db_change_cached(self, cached):
self._db_cached = cached
def db_delete_cached(self, cached):
self._db_cached = None
def __get_db_module_id(self):
return self._db_module_id
def __set_db_module_id(self, module_id):
self._db_module_id = module_id
self.is_dirty = True
db_module_id = property(__get_db_module_id, __set_db_module_id)
def db_add_module_id(self, module_id):
self._db_module_id = module_id
def db_change_module_id(self, module_id):
self._db_module_id = module_id
def db_delete_module_id(self, module_id):
self._db_module_id = None
def __get_db_group_name(self):
return self._db_group_name
def __set_db_group_name(self, group_name):
self._db_group_name = group_name
self.is_dirty = True
db_group_name = property(__get_db_group_name, __set_db_group_name)
def db_add_group_name(self, group_name):
self._db_group_name = group_name
def db_change_group_name(self, group_name):
self._db_group_name = group_name
def db_delete_group_name(self, group_name):
self._db_group_name = None
def __get_db_group_type(self):
return self._db_group_type
def __set_db_group_type(self, group_type):
self._db_group_type = group_type
self.is_dirty = True
db_group_type = property(__get_db_group_type, __set_db_group_type)
def db_add_group_type(self, group_type):
self._db_group_type = group_type
def db_change_group_type(self, group_type):
self._db_group_type = group_type
def db_delete_group_type(self, group_type):
self._db_group_type = None
def __get_db_completed(self):
return self._db_completed
def __set_db_completed(self, completed):
self._db_completed = completed
self.is_dirty = True
db_completed = property(__get_db_completed, __set_db_completed)
def db_add_completed(self, completed):
self._db_completed = completed
def db_change_completed(self, completed):
self._db_completed = completed
def db_delete_completed(self, completed):
self._db_completed = None
def __get_db_error(self):
return self._db_error
def __set_db_error(self, error):
self._db_error = error
self.is_dirty = True
db_error = property(__get_db_error, __set_db_error)
def db_add_error(self, error):
self._db_error = error
def db_change_error(self, error):
self._db_error = error
def db_delete_error(self, error):
self._db_error = None
def __get_db_machine_id(self):
return self._db_machine_id
def __set_db_machine_id(self, machine_id):
self._db_machine_id = machine_id
self.is_dirty = True
db_machine_id = property(__get_db_machine_id, __set_db_machine_id)
def db_add_machine_id(self, machine_id):
self._db_machine_id = machine_id
def db_change_machine_id(self, machine_id):
self._db_machine_id = machine_id
def db_delete_machine_id(self, machine_id):
self._db_machine_id = None
def __get_db_annotations(self):
return self._db_annotations
def __set_db_annotations(self, annotations):
self._db_annotations = annotations
self.is_dirty = True
db_annotations = property(__get_db_annotations, __set_db_annotations)
def db_get_annotations(self):
return self._db_annotations
def db_add_annotation(self, annotation):
self.is_dirty = True
self._db_annotations.append(annotation)
self.db_annotations_id_index[annotation.db_id] = annotation
def db_change_annotation(self, annotation):
self.is_dirty = True
found = False
for i in xrange(len(self._db_annotations)):
if self._db_annotations[i].db_id == annotation.db_id:
self._db_annotations[i] = annotation
found = True
break
if not found:
self._db_annotations.append(annotation)
self.db_annotations_id_index[annotation.db_id] = annotation
def db_delete_annotation(self, annotation):
self.is_dirty = True
for i in xrange(len(self._db_annotations)):
if self._db_annotations[i].db_id == annotation.db_id:
if not self._db_annotations[i].is_new:
self.db_deleted_annotations.append(self._db_annotations[i])
del self._db_annotations[i]
break
del self.db_annotations_id_index[annotation.db_id]
def db_get_annotation(self, key):
for i in xrange(len(self._db_annotations)):
if self._db_annotations[i].db_id == key:
return self._db_annotations[i]
return None
def db_get_annotation_by_id(self, key):
return self.db_annotations_id_index[key]
def db_has_annotation_with_id(self, key):
return key in self.db_annotations_id_index
def getPrimaryKey(self):
return self._db_id
class DBOpmAgentId(object):
vtType = 'opm_agent_id'
def __init__(self, id=None):
self._db_id = id
self.is_dirty = True
self.is_new = True
def __copy__(self):
return DBOpmAgentId.do_copy(self)
def do_copy(self, new_ids=False, id_scope=None, id_remap=None):
cp = DBOpmAgentId(id=self._db_id)
# set new ids
if new_ids:
new_id = id_scope.getNewId(self.vtType)
if self.vtType in id_scope.remap:
id_remap[(id_scope.remap[self.vtType], self.db_id)] = new_id
else:
id_remap[(self.vtType, self.db_id)] = new_id
cp.db_id = new_id
if hasattr(self, 'db_id') and ('opm_agent', self._db_id) in id_remap:
cp._db_id = id_remap[('opm_agent', self._db_id)]
# recreate indices and set flags
if not new_ids:
cp.is_dirty = self.is_dirty
cp.is_new = self.is_new
return cp
@staticmethod
def update_version(old_obj, trans_dict, new_obj=None):
if new_obj is None:
new_obj = DBOpmAgentId()
class_dict = {}
if new_obj.__class__.__name__ in trans_dict:
class_dict = trans_dict[new_obj.__class__.__name__]
if 'id' in class_dict:
res = class_dict['id'](old_obj, trans_dict)
new_obj.db_id = res
elif hasattr(old_obj, 'db_id') and old_obj.db_id is not None:
new_obj.db_id = old_obj.db_id
new_obj.is_new = old_obj.is_new
new_obj.is_dirty = old_obj.is_dirty
return new_obj
def db_children(self, parent=(None,None), orphan=False, for_action=False):
return [(self, parent[0], parent[1])]
def db_deleted_children(self, remove=False):
children = []
return children
def has_changes(self):
if self.is_dirty:
return True
return False
def __get_db_id(self):
return self._db_id
def __set_db_id(self, id):
self._db_id = id
self.is_dirty = True
db_id = property(__get_db_id, __set_db_id)
def db_add_id(self, id):
self._db_id = id
def db_change_id(self, id):
self._db_id = id
def db_delete_id(self, id):
self._db_id = None
class DBParameter(object):
vtType = 'parameter'
def __init__(self, id=None, pos=None, name=None, type=None, val=None, alias=None):
self._db_id = id
self._db_pos = pos
self._db_name = name
self._db_type = type
self._db_val = val
self._db_alias = alias
self.is_dirty = True
self.is_new = True
def __copy__(self):
return DBParameter.do_copy(self)
def do_copy(self, new_ids=False, id_scope=None, id_remap=None):
cp = DBParameter(id=self._db_id,
pos=self._db_pos,
name=self._db_name,
type=self._db_type,
val=self._db_val,
alias=self._db_alias)
# set new ids
if new_ids:
new_id = id_scope.getNewId(self.vtType)
if self.vtType in id_scope.remap:
id_remap[(id_scope.remap[self.vtType], self.db_id)] = new_id
else:
id_remap[(self.vtType, self.db_id)] = new_id
cp.db_id = new_id
# recreate indices and set flags
if not new_ids:
cp.is_dirty = self.is_dirty
cp.is_new = self.is_new
return cp
@staticmethod
def update_version(old_obj, trans_dict, new_obj=None):
if new_obj is None:
new_obj = DBParameter()
class_dict = {}
if new_obj.__class__.__name__ in trans_dict:
class_dict = trans_dict[new_obj.__class__.__name__]
if 'id' in class_dict:
res = class_dict['id'](old_obj, trans_dict)
new_obj.db_id = res
elif hasattr(old_obj, 'db_id') and old_obj.db_id is not None:
new_obj.db_id = old_obj.db_id
if 'pos' in class_dict:
res = class_dict['pos'](old_obj, trans_dict)
new_obj.db_pos = res
elif hasattr(old_obj, 'db_pos') and old_obj.db_pos is not None:
new_obj.db_pos = old_obj.db_pos
if 'name' in class_dict:
res = class_dict['name'](old_obj, trans_dict)
new_obj.db_name = res
elif hasattr(old_obj, 'db_name') and old_obj.db_name is not None:
new_obj.db_name = old_obj.db_name
if 'type' in class_dict:
res = class_dict['type'](old_obj, trans_dict)
new_obj.db_type = res
elif hasattr(old_obj, 'db_type') and old_obj.db_type is not None:
new_obj.db_type = old_obj.db_type
if 'val' in class_dict:
res = class_dict['val'](old_obj, trans_dict)
new_obj.db_val = res
elif hasattr(old_obj, 'db_val') and old_obj.db_val is not None:
new_obj.db_val = old_obj.db_val
if 'alias' in class_dict:
res = class_dict['alias'](old_obj, trans_dict)
new_obj.db_alias = res
elif hasattr(old_obj, 'db_alias') and old_obj.db_alias is not None:
new_obj.db_alias = old_obj.db_alias
new_obj.is_new = old_obj.is_new
new_obj.is_dirty = old_obj.is_dirty
return new_obj
def db_children(self, parent=(None,None), orphan=False, for_action=False):
return [(self, parent[0], parent[1])]
def db_deleted_children(self, remove=False):
children = []
return children
def has_changes(self):
if self.is_dirty:
return True
return False
def __get_db_id(self):
return self._db_id
def __set_db_id(self, id):
self._db_id = id
self.is_dirty = True
db_id = property(__get_db_id, __set_db_id)
def db_add_id(self, id):
self._db_id = id
def db_change_id(self, id):
self._db_id = id
def db_delete_id(self, id):
self._db_id = None
def __get_db_pos(self):
return self._db_pos
def __set_db_pos(self, pos):
self._db_pos = pos
self.is_dirty = True
db_pos = property(__get_db_pos, __set_db_pos)
def db_add_pos(self, pos):
self._db_pos = pos
def db_change_pos(self, pos):
self._db_pos = pos
def db_delete_pos(self, pos):
self._db_pos = None
def __get_db_name(self):
return self._db_name
def __set_db_name(self, name):
self._db_name = name
self.is_dirty = True
db_name = property(__get_db_name, __set_db_name)
def db_add_name(self, name):
self._db_name = name
def db_change_name(self, name):
self._db_name = name
def db_delete_name(self, name):
self._db_name = None
def __get_db_type(self):
return self._db_type
def __set_db_type(self, type):
self._db_type = type
self.is_dirty = True
db_type = property(__get_db_type, __set_db_type)
def db_add_type(self, type):
self._db_type = type
def db_change_type(self, type):
self._db_type = type
def db_delete_type(self, type):
self._db_type = None
def __get_db_val(self):
return self._db_val
def __set_db_val(self, val):
self._db_val = val
self.is_dirty = True
db_val = property(__get_db_val, __set_db_val)
def db_add_val(self, val):
self._db_val = val
def db_change_val(self, val):
self._db_val = val
def db_delete_val(self, val):
self._db_val = None
def __get_db_alias(self):
return self._db_alias
def __set_db_alias(self, alias):
self._db_alias = alias
self.is_dirty = True
db_alias = property(__get_db_alias, __set_db_alias)
def db_add_alias(self, alias):
self._db_alias = alias
def db_change_alias(self, alias):
self._db_alias = alias
def db_delete_alias(self, alias):
self._db_alias = None
def getPrimaryKey(self):
return self._db_id
class DBVistrail(object):
vtType = 'vistrail'
def __init__(self, id=None, entity_type=None, version=None, name=None, last_modified=None, actions=None, tags=None, annotations=None, vistrailVariables=None, parameter_explorations=None, actionAnnotations=None):
self._db_id = id
self._db_entity_type = entity_type
self._db_version = version
self._db_name = name
self._db_last_modified = last_modified
self.db_deleted_actions = []
self.db_actions_id_index = {}
if actions is None:
self._db_actions = []
else:
self._db_actions = actions
for v in self._db_actions:
self.db_actions_id_index[v.db_id] = v
self.db_deleted_tags = []
self.db_tags_id_index = {}
self.db_tags_name_index = {}
if tags is None:
self._db_tags = []
else:
self._db_tags = tags
for v in self._db_tags:
self.db_tags_id_index[v.db_id] = v
self.db_tags_name_index[v.db_name] = v
self.db_deleted_annotations = []
self.db_annotations_id_index = {}
self.db_annotations_key_index = {}
if annotations is None:
self._db_annotations = []
else:
self._db_annotations = annotations
for v in self._db_annotations:
self.db_annotations_id_index[v.db_id] = v
self.db_annotations_key_index[v.db_key] = v
self.db_deleted_vistrailVariables = []
self.db_vistrailVariables_name_index = {}
self.db_vistrailVariables_uuid_index = {}
if vistrailVariables is None:
self._db_vistrailVariables = []
else:
self._db_vistrailVariables = vistrailVariables
for v in self._db_vistrailVariables:
self.db_vistrailVariables_name_index[v.db_name] = v
self.db_vistrailVariables_uuid_index[v.db_uuid] = v
self.db_deleted_parameter_explorations = []
self.db_parameter_explorations_id_index = {}
if parameter_explorations is None:
self._db_parameter_explorations = []
else:
self._db_parameter_explorations = parameter_explorations
for v in self._db_parameter_explorations:
self.db_parameter_explorations_id_index[v.db_id] = v
self.db_deleted_actionAnnotations = []
self.db_actionAnnotations_id_index = {}
self.db_actionAnnotations_action_id_index = {}
self.db_actionAnnotations_key_index = {}
if actionAnnotations is None:
self._db_actionAnnotations = []
else:
self._db_actionAnnotations = actionAnnotations
for v in self._db_actionAnnotations:
self.db_actionAnnotations_id_index[v.db_id] = v
self.db_actionAnnotations_action_id_index[(v.db_action_id,v.db_key)] = v
self.db_actionAnnotations_key_index[(v.db_key,v.db_value)] = v
self.is_dirty = True
self.is_new = True
def __copy__(self):
return DBVistrail.do_copy(self)
def do_copy(self, new_ids=False, id_scope=None, id_remap=None):
cp = DBVistrail(id=self._db_id,
entity_type=self._db_entity_type,
version=self._db_version,
name=self._db_name,
last_modified=self._db_last_modified)
if self._db_actions is None:
cp._db_actions = []
else:
cp._db_actions = [v.do_copy(new_ids, id_scope, id_remap) for v in self._db_actions]
if self._db_tags is None:
cp._db_tags = []
else:
cp._db_tags = [v.do_copy(new_ids, id_scope, id_remap) for v in self._db_tags]
if self._db_annotations is None:
cp._db_annotations = []
else:
cp._db_annotations = [v.do_copy(new_ids, id_scope, id_remap) for v in self._db_annotations]
if self._db_vistrailVariables is None:
cp._db_vistrailVariables = []
else:
cp._db_vistrailVariables = [v.do_copy(new_ids, id_scope, id_remap) for v in self._db_vistrailVariables]
if self._db_parameter_explorations is None:
cp._db_parameter_explorations = []
else:
cp._db_parameter_explorations = [v.do_copy(new_ids, id_scope, id_remap) for v in self._db_parameter_explorations]
if self._db_actionAnnotations is None:
cp._db_actionAnnotations = []
else:
cp._db_actionAnnotations = [v.do_copy(new_ids, id_scope, id_remap) for v in self._db_actionAnnotations]
# set new ids
if new_ids:
new_id = id_scope.getNewId(self.vtType)
if self.vtType in id_scope.remap:
id_remap[(id_scope.remap[self.vtType], self.db_id)] = new_id
else:
id_remap[(self.vtType, self.db_id)] = new_id
cp.db_id = new_id
# recreate indices and set flags
cp.db_actions_id_index = dict((v.db_id, v) for v in cp._db_actions)
cp.db_tags_id_index = dict((v.db_id, v) for v in cp._db_tags)
cp.db_tags_name_index = dict((v.db_name, v) for v in cp._db_tags)
cp.db_annotations_id_index = dict((v.db_id, v) for v in cp._db_annotations)
cp.db_annotations_key_index = dict((v.db_key, v) for v in cp._db_annotations)
cp.db_vistrailVariables_name_index = dict((v.db_name, v) for v in cp._db_vistrailVariables)
cp.db_vistrailVariables_uuid_index = dict((v.db_uuid, v) for v in cp._db_vistrailVariables)
cp.db_parameter_explorations_id_index = dict((v.db_id, v) for v in cp._db_parameter_explorations)
cp.db_actionAnnotations_id_index = dict((v.db_id, v) for v in cp._db_actionAnnotations)
cp.db_actionAnnotations_action_id_index = dict(((v.db_action_id,v.db_key), v) for v in cp._db_actionAnnotations)
cp.db_actionAnnotations_key_index = dict(((v.db_key,v.db_value), v) for v in cp._db_actionAnnotations)
if not new_ids:
cp.is_dirty = self.is_dirty
cp.is_new = self.is_new
return cp
@staticmethod
def update_version(old_obj, trans_dict, new_obj=None):
if new_obj is None:
new_obj = DBVistrail()
class_dict = {}
if new_obj.__class__.__name__ in trans_dict:
class_dict = trans_dict[new_obj.__class__.__name__]
if 'id' in class_dict:
res = class_dict['id'](old_obj, trans_dict)
new_obj.db_id = res
elif hasattr(old_obj, 'db_id') and old_obj.db_id is not None:
new_obj.db_id = old_obj.db_id
if 'entity_type' in class_dict:
res = class_dict['entity_type'](old_obj, trans_dict)
new_obj.db_entity_type = res
elif hasattr(old_obj, 'db_entity_type') and old_obj.db_entity_type is not None:
new_obj.db_entity_type = old_obj.db_entity_type
if 'version' in class_dict:
res = class_dict['version'](old_obj, trans_dict)
new_obj.db_version = res
elif hasattr(old_obj, 'db_version') and old_obj.db_version is not None:
new_obj.db_version = old_obj.db_version
if 'name' in class_dict:
res = class_dict['name'](old_obj, trans_dict)
new_obj.db_name = res
elif hasattr(old_obj, 'db_name') and old_obj.db_name is not None:
new_obj.db_name = old_obj.db_name
if 'last_modified' in class_dict:
res = class_dict['last_modified'](old_obj, trans_dict)
new_obj.db_last_modified = res
elif hasattr(old_obj, 'db_last_modified') and old_obj.db_last_modified is not None:
new_obj.db_last_modified = old_obj.db_last_modified
if 'actions' in class_dict:
res = class_dict['actions'](old_obj, trans_dict)
for obj in res:
new_obj.db_add_action(obj)
elif hasattr(old_obj, 'db_actions') and old_obj.db_actions is not None:
for obj in old_obj.db_actions:
new_obj.db_add_action(DBAction.update_version(obj, trans_dict))
if hasattr(old_obj, 'db_deleted_actions') and hasattr(new_obj, 'db_deleted_actions'):
for obj in old_obj.db_deleted_actions:
n_obj = DBAction.update_version(obj, trans_dict)
new_obj.db_deleted_actions.append(n_obj)
if 'tags' in class_dict:
res = class_dict['tags'](old_obj, trans_dict)
for obj in res:
new_obj.db_add_tag(obj)
elif hasattr(old_obj, 'db_tags') and old_obj.db_tags is not None:
for obj in old_obj.db_tags:
new_obj.db_add_tag(DBTag.update_version(obj, trans_dict))
if hasattr(old_obj, 'db_deleted_tags') and hasattr(new_obj, 'db_deleted_tags'):
for obj in old_obj.db_deleted_tags:
n_obj = DBTag.update_version(obj, trans_dict)
new_obj.db_deleted_tags.append(n_obj)
if 'annotations' in class_dict:
res = class_dict['annotations'](old_obj, trans_dict)
for obj in res:
new_obj.db_add_annotation(obj)
elif hasattr(old_obj, 'db_annotations') and old_obj.db_annotations is not None:
for obj in old_obj.db_annotations:
new_obj.db_add_annotation(DBAnnotation.update_version(obj, trans_dict))
if hasattr(old_obj, 'db_deleted_annotations') and hasattr(new_obj, 'db_deleted_annotations'):
for obj in old_obj.db_deleted_annotations:
n_obj = DBAnnotation.update_version(obj, trans_dict)
new_obj.db_deleted_annotations.append(n_obj)
if 'vistrailVariables' in class_dict:
res = class_dict['vistrailVariables'](old_obj, trans_dict)
for obj in res:
new_obj.db_add_vistrailVariable(obj)
elif hasattr(old_obj, 'db_vistrailVariables') and old_obj.db_vistrailVariables is not None:
for obj in old_obj.db_vistrailVariables:
new_obj.db_add_vistrailVariable(DBVistrailVariable.update_version(obj, trans_dict))
if hasattr(old_obj, 'db_deleted_vistrailVariables') and hasattr(new_obj, 'db_deleted_vistrailVariables'):
for obj in old_obj.db_deleted_vistrailVariables:
n_obj = DBVistrailVariable.update_version(obj, trans_dict)
new_obj.db_deleted_vistrailVariables.append(n_obj)
if 'parameter_explorations' in class_dict:
res = class_dict['parameter_explorations'](old_obj, trans_dict)
for obj in res:
new_obj.db_add_parameter_exploration(obj)
elif hasattr(old_obj, 'db_parameter_explorations') and old_obj.db_parameter_explorations is not None:
for obj in old_obj.db_parameter_explorations:
new_obj.db_add_parameter_exploration(DBParameterExploration.update_version(obj, trans_dict))
if hasattr(old_obj, 'db_deleted_parameter_explorations') and hasattr(new_obj, 'db_deleted_parameter_explorations'):
for obj in old_obj.db_deleted_parameter_explorations:
n_obj = DBParameterExploration.update_version(obj, trans_dict)
new_obj.db_deleted_parameter_explorations.append(n_obj)
if 'actionAnnotations' in class_dict:
res = class_dict['actionAnnotations'](old_obj, trans_dict)
for obj in res:
new_obj.db_add_actionAnnotation(obj)
elif hasattr(old_obj, 'db_actionAnnotations') and old_obj.db_actionAnnotations is not None:
for obj in old_obj.db_actionAnnotations:
new_obj.db_add_actionAnnotation(DBActionAnnotation.update_version(obj, trans_dict))
if hasattr(old_obj, 'db_deleted_actionAnnotations') and hasattr(new_obj, 'db_deleted_actionAnnotations'):
for obj in old_obj.db_deleted_actionAnnotations:
n_obj = DBActionAnnotation.update_version(obj, trans_dict)
new_obj.db_deleted_actionAnnotations.append(n_obj)
new_obj.is_new = old_obj.is_new
new_obj.is_dirty = old_obj.is_dirty
return new_obj
def db_children(self, parent=(None,None), orphan=False, for_action=False):
children = []
to_del = []
for child in self.db_actions:
children.extend(child.db_children((self.vtType, self.db_id), orphan, for_action))
if orphan:
to_del.append(child)
for child in to_del:
self.db_delete_action(child)
to_del = []
for child in self.db_tags:
children.extend(child.db_children((self.vtType, self.db_id), orphan, for_action))
if orphan:
to_del.append(child)
for child in to_del:
self.db_delete_tag(child)
to_del = []
for child in self.db_annotations:
children.extend(child.db_children((self.vtType, self.db_id), orphan, for_action))
if orphan:
to_del.append(child)
for child in to_del:
self.db_delete_annotation(child)
to_del = []
for child in self.db_vistrailVariables:
children.extend(child.db_children((self.vtType, self.db_id), orphan, for_action))
if orphan:
to_del.append(child)
for child in to_del:
self.db_delete_vistrailVariable(child)
to_del = []
for child in self.db_parameter_explorations:
children.extend(child.db_children((self.vtType, self.db_id), orphan, for_action))
if orphan:
to_del.append(child)
for child in to_del:
self.db_delete_parameter_exploration(child)
to_del = []
for child in self.db_actionAnnotations:
children.extend(child.db_children((self.vtType, self.db_id), orphan, for_action))
if orphan:
to_del.append(child)
for child in to_del:
self.db_delete_actionAnnotation(child)
children.append((self, parent[0], parent[1]))
return children
def db_deleted_children(self, remove=False):
children = []
children.extend(self.db_deleted_actions)
children.extend(self.db_deleted_tags)
children.extend(self.db_deleted_annotations)
children.extend(self.db_deleted_vistrailVariables)
children.extend(self.db_deleted_parameter_explorations)
children.extend(self.db_deleted_actionAnnotations)
if remove:
self.db_deleted_actions = []
self.db_deleted_tags = []
self.db_deleted_annotations = []
self.db_deleted_vistrailVariables = []
self.db_deleted_parameter_explorations = []
self.db_deleted_actionAnnotations = []
return children
def has_changes(self):
if self.is_dirty:
return True
for child in self._db_actions:
if child.has_changes():
return True
for child in self._db_tags:
if child.has_changes():
return True
for child in self._db_annotations:
if child.has_changes():
return True
for child in self._db_vistrailVariables:
if child.has_changes():
return True
for child in self._db_parameter_explorations:
if child.has_changes():
return True
for child in self._db_actionAnnotations:
if child.has_changes():
return True
return False
def __get_db_id(self):
return self._db_id
def __set_db_id(self, id):
self._db_id = id
self.is_dirty = True
db_id = property(__get_db_id, __set_db_id)
def db_add_id(self, id):
self._db_id = id
def db_change_id(self, id):
self._db_id = id
def db_delete_id(self, id):
self._db_id = None
def __get_db_entity_type(self):
return self._db_entity_type
def __set_db_entity_type(self, entity_type):
self._db_entity_type = entity_type
self.is_dirty = True
db_entity_type = property(__get_db_entity_type, __set_db_entity_type)
def db_add_entity_type(self, entity_type):
self._db_entity_type = entity_type
def db_change_entity_type(self, entity_type):
self._db_entity_type = entity_type
def db_delete_entity_type(self, entity_type):
self._db_entity_type = None
def __get_db_version(self):
return self._db_version
def __set_db_version(self, version):
self._db_version = version
self.is_dirty = True
db_version = property(__get_db_version, __set_db_version)
def db_add_version(self, version):
self._db_version = version
def db_change_version(self, version):
self._db_version = version
def db_delete_version(self, version):
self._db_version = None
def __get_db_name(self):
return self._db_name
def __set_db_name(self, name):
self._db_name = name
self.is_dirty = True
db_name = property(__get_db_name, __set_db_name)
def db_add_name(self, name):
self._db_name = name
def db_change_name(self, name):
self._db_name = name
def db_delete_name(self, name):
self._db_name = None
def __get_db_last_modified(self):
return self._db_last_modified
def __set_db_last_modified(self, last_modified):
self._db_last_modified = last_modified
self.is_dirty = True
db_last_modified = property(__get_db_last_modified, __set_db_last_modified)
def db_add_last_modified(self, last_modified):
self._db_last_modified = last_modified
def db_change_last_modified(self, last_modified):
self._db_last_modified = last_modified
def db_delete_last_modified(self, last_modified):
self._db_last_modified = None
def __get_db_actions(self):
return self._db_actions
def __set_db_actions(self, actions):
self._db_actions = actions
self.is_dirty = True
db_actions = property(__get_db_actions, __set_db_actions)
def db_get_actions(self):
return self._db_actions
def db_add_action(self, action):
self.is_dirty = True
self._db_actions.append(action)
self.db_actions_id_index[action.db_id] = action
def db_change_action(self, action):
self.is_dirty = True
found = False
for i in xrange(len(self._db_actions)):
if self._db_actions[i].db_id == action.db_id:
self._db_actions[i] = action
found = True
break
if not found:
self._db_actions.append(action)
self.db_actions_id_index[action.db_id] = action
def db_delete_action(self, action):
self.is_dirty = True
for i in xrange(len(self._db_actions)):
if self._db_actions[i].db_id == action.db_id:
if not self._db_actions[i].is_new:
self.db_deleted_actions.append(self._db_actions[i])
del self._db_actions[i]
break
del self.db_actions_id_index[action.db_id]
def db_get_action(self, key):
for i in xrange(len(self._db_actions)):
if self._db_actions[i].db_id == key:
return self._db_actions[i]
return None
def db_get_action_by_id(self, key):
return self.db_actions_id_index[key]
def db_has_action_with_id(self, key):
return key in self.db_actions_id_index
def __get_db_tags(self):
return self._db_tags
def __set_db_tags(self, tags):
self._db_tags = tags
self.is_dirty = True
db_tags = property(__get_db_tags, __set_db_tags)
def db_get_tags(self):
return self._db_tags
def db_add_tag(self, tag):
self.is_dirty = True
self._db_tags.append(tag)
self.db_tags_id_index[tag.db_id] = tag
self.db_tags_name_index[tag.db_name] = tag
def db_change_tag(self, tag):
self.is_dirty = True
found = False
for i in xrange(len(self._db_tags)):
if self._db_tags[i].db_id == tag.db_id:
self._db_tags[i] = tag
found = True
break
if not found:
self._db_tags.append(tag)
self.db_tags_id_index[tag.db_id] = tag
self.db_tags_name_index[tag.db_name] = tag
def db_delete_tag(self, tag):
self.is_dirty = True
for i in xrange(len(self._db_tags)):
if self._db_tags[i].db_id == tag.db_id:
if not self._db_tags[i].is_new:
self.db_deleted_tags.append(self._db_tags[i])
del self._db_tags[i]
break
del self.db_tags_id_index[tag.db_id]
del self.db_tags_name_index[tag.db_name]
def db_get_tag(self, key):
for i in xrange(len(self._db_tags)):
if self._db_tags[i].db_id == key:
return self._db_tags[i]
return None
def db_get_tag_by_id(self, key):
return self.db_tags_id_index[key]
def db_has_tag_with_id(self, key):
return key in self.db_tags_id_index
def db_get_tag_by_name(self, key):
return self.db_tags_name_index[key]
def db_has_tag_with_name(self, key):
return key in self.db_tags_name_index
def __get_db_annotations(self):
return self._db_annotations
def __set_db_annotations(self, annotations):
self._db_annotations = annotations
self.is_dirty = True
db_annotations = property(__get_db_annotations, __set_db_annotations)
def db_get_annotations(self):
return self._db_annotations
def db_add_annotation(self, annotation):
self.is_dirty = True
self._db_annotations.append(annotation)
self.db_annotations_id_index[annotation.db_id] = annotation
self.db_annotations_key_index[annotation.db_key] = annotation
def db_change_annotation(self, annotation):
self.is_dirty = True
found = False
for i in xrange(len(self._db_annotations)):
if self._db_annotations[i].db_id == annotation.db_id:
self._db_annotations[i] = annotation
found = True
break
if not found:
self._db_annotations.append(annotation)
self.db_annotations_id_index[annotation.db_id] = annotation
self.db_annotations_key_index[annotation.db_key] = annotation
def db_delete_annotation(self, annotation):
self.is_dirty = True
for i in xrange(len(self._db_annotations)):
if self._db_annotations[i].db_id == annotation.db_id:
if not self._db_annotations[i].is_new:
self.db_deleted_annotations.append(self._db_annotations[i])
del self._db_annotations[i]
break
del self.db_annotations_id_index[annotation.db_id]
del self.db_annotations_key_index[annotation.db_key]
def db_get_annotation(self, key):
for i in xrange(len(self._db_annotations)):
if self._db_annotations[i].db_id == key:
return self._db_annotations[i]
return None
def db_get_annotation_by_id(self, key):
return self.db_annotations_id_index[key]
def db_has_annotation_with_id(self, key):
return key in self.db_annotations_id_index
def db_get_annotation_by_key(self, key):
return self.db_annotations_key_index[key]
def db_has_annotation_with_key(self, key):
return key in self.db_annotations_key_index
def __get_db_vistrailVariables(self):
return self._db_vistrailVariables
def __set_db_vistrailVariables(self, vistrailVariables):
self._db_vistrailVariables = vistrailVariables
self.is_dirty = True
db_vistrailVariables = property(__get_db_vistrailVariables, __set_db_vistrailVariables)
def db_get_vistrailVariables(self):
return self._db_vistrailVariables
def db_add_vistrailVariable(self, vistrailVariable):
self.is_dirty = True
self._db_vistrailVariables.append(vistrailVariable)
self.db_vistrailVariables_name_index[vistrailVariable.db_name] = vistrailVariable
self.db_vistrailVariables_uuid_index[vistrailVariable.db_uuid] = vistrailVariable
def db_change_vistrailVariable(self, vistrailVariable):
self.is_dirty = True
found = False
for i in xrange(len(self._db_vistrailVariables)):
if self._db_vistrailVariables[i].db_name == vistrailVariable.db_name:
self._db_vistrailVariables[i] = vistrailVariable
found = True
break
if not found:
self._db_vistrailVariables.append(vistrailVariable)
self.db_vistrailVariables_name_index[vistrailVariable.db_name] = vistrailVariable
self.db_vistrailVariables_uuid_index[vistrailVariable.db_uuid] = vistrailVariable
def db_delete_vistrailVariable(self, vistrailVariable):
self.is_dirty = True
for i in xrange(len(self._db_vistrailVariables)):
if self._db_vistrailVariables[i].db_name == vistrailVariable.db_name:
if not self._db_vistrailVariables[i].is_new:
self.db_deleted_vistrailVariables.append(self._db_vistrailVariables[i])
del self._db_vistrailVariables[i]
break
del self.db_vistrailVariables_name_index[vistrailVariable.db_name]
del self.db_vistrailVariables_uuid_index[vistrailVariable.db_uuid]
def db_get_vistrailVariable(self, key):
for i in xrange(len(self._db_vistrailVariables)):
if self._db_vistrailVariables[i].db_name == key:
return self._db_vistrailVariables[i]
return None
def db_get_vistrailVariable_by_name(self, key):
return self.db_vistrailVariables_name_index[key]
def db_has_vistrailVariable_with_name(self, key):
return key in self.db_vistrailVariables_name_index
def db_get_vistrailVariable_by_uuid(self, key):
return self.db_vistrailVariables_uuid_index[key]
def db_has_vistrailVariable_with_uuid(self, key):
return key in self.db_vistrailVariables_uuid_index
def __get_db_parameter_explorations(self):
return self._db_parameter_explorations
def __set_db_parameter_explorations(self, parameter_explorations):
self._db_parameter_explorations = parameter_explorations
self.is_dirty = True
db_parameter_explorations = property(__get_db_parameter_explorations, __set_db_parameter_explorations)
def db_get_parameter_explorations(self):
return self._db_parameter_explorations
def db_add_parameter_exploration(self, parameter_exploration):
self.is_dirty = True
self._db_parameter_explorations.append(parameter_exploration)
self.db_parameter_explorations_id_index[parameter_exploration.db_id] = parameter_exploration
def db_change_parameter_exploration(self, parameter_exploration):
self.is_dirty = True
found = False
for i in xrange(len(self._db_parameter_explorations)):
if self._db_parameter_explorations[i].db_id == parameter_exploration.db_id:
self._db_parameter_explorations[i] = parameter_exploration
found = True
break
if not found:
self._db_parameter_explorations.append(parameter_exploration)
self.db_parameter_explorations_id_index[parameter_exploration.db_id] = parameter_exploration
def db_delete_parameter_exploration(self, parameter_exploration):
self.is_dirty = True
for i in xrange(len(self._db_parameter_explorations)):
if self._db_parameter_explorations[i].db_id == parameter_exploration.db_id:
if not self._db_parameter_explorations[i].is_new:
self.db_deleted_parameter_explorations.append(self._db_parameter_explorations[i])
del self._db_parameter_explorations[i]
break
del self.db_parameter_explorations_id_index[parameter_exploration.db_id]
def db_get_parameter_exploration(self, key):
for i in xrange(len(self._db_parameter_explorations)):
if self._db_parameter_explorations[i].db_id == key:
return self._db_parameter_explorations[i]
return None
def db_get_parameter_exploration_by_id(self, key):
return self.db_parameter_explorations_id_index[key]
def db_has_parameter_exploration_with_id(self, key):
return key in self.db_parameter_explorations_id_index
def __get_db_actionAnnotations(self):
return self._db_actionAnnotations
def __set_db_actionAnnotations(self, actionAnnotations):
self._db_actionAnnotations = actionAnnotations
self.is_dirty = True
db_actionAnnotations = property(__get_db_actionAnnotations, __set_db_actionAnnotations)
def db_get_actionAnnotations(self):
return self._db_actionAnnotations
def db_add_actionAnnotation(self, actionAnnotation):
self.is_dirty = True
self._db_actionAnnotations.append(actionAnnotation)
self.db_actionAnnotations_id_index[actionAnnotation.db_id] = actionAnnotation
self.db_actionAnnotations_action_id_index[(actionAnnotation.db_action_id,actionAnnotation.db_key)] = actionAnnotation
self.db_actionAnnotations_key_index[(actionAnnotation.db_key,actionAnnotation.db_value)] = actionAnnotation
def db_change_actionAnnotation(self, actionAnnotation):
self.is_dirty = True
found = False
for i in xrange(len(self._db_actionAnnotations)):
if self._db_actionAnnotations[i].db_id == actionAnnotation.db_id:
self._db_actionAnnotations[i] = actionAnnotation
found = True
break
if not found:
self._db_actionAnnotations.append(actionAnnotation)
self.db_actionAnnotations_id_index[actionAnnotation.db_id] = actionAnnotation
self.db_actionAnnotations_action_id_index[(actionAnnotation.db_action_id,actionAnnotation.db_key)] = actionAnnotation
self.db_actionAnnotations_key_index[(actionAnnotation.db_key,actionAnnotation.db_value)] = actionAnnotation
def db_delete_actionAnnotation(self, actionAnnotation):
self.is_dirty = True
for i in xrange(len(self._db_actionAnnotations)):
if self._db_actionAnnotations[i].db_id == actionAnnotation.db_id:
if not self._db_actionAnnotations[i].is_new:
self.db_deleted_actionAnnotations.append(self._db_actionAnnotations[i])
del self._db_actionAnnotations[i]
break
del self.db_actionAnnotations_id_index[actionAnnotation.db_id]
del self.db_actionAnnotations_action_id_index[(actionAnnotation.db_action_id,actionAnnotation.db_key)]
try:
del self.db_actionAnnotations_key_index[(actionAnnotation.db_key,actionAnnotation.db_value)]
except KeyError:
pass
def db_get_actionAnnotation(self, key):
for i in xrange(len(self._db_actionAnnotations)):
if self._db_actionAnnotations[i].db_id == key:
return self._db_actionAnnotations[i]
return None
def db_get_actionAnnotation_by_id(self, key):
return self.db_actionAnnotations_id_index[key]
def db_has_actionAnnotation_with_id(self, key):
return key in self.db_actionAnnotations_id_index
def db_get_actionAnnotation_by_action_id(self, key):
return self.db_actionAnnotations_action_id_index[key]
def db_has_actionAnnotation_with_action_id(self, key):
return key in self.db_actionAnnotations_action_id_index
def db_get_actionAnnotation_by_key(self, key):
return self.db_actionAnnotations_key_index[key]
def db_has_actionAnnotation_with_key(self, key):
return key in self.db_actionAnnotations_key_index
def getPrimaryKey(self):
return self._db_id
class DBOpmArtifactValue(object):
vtType = 'opm_artifact_value'
def __init__(self, value=None):
self.db_deleted_value = []
self._db_value = value
self.is_dirty = True
self.is_new = True
def __copy__(self):
return DBOpmArtifactValue.do_copy(self)
def do_copy(self, new_ids=False, id_scope=None, id_remap=None):
cp = DBOpmArtifactValue()
if self._db_value is not None:
cp._db_value = self._db_value.do_copy(new_ids, id_scope, id_remap)
# set new ids
if new_ids:
new_id = id_scope.getNewId(self.vtType)
if self.vtType in id_scope.remap:
id_remap[(id_scope.remap[self.vtType], self.db_id)] = new_id
else:
id_remap[(self.vtType, self.db_id)] = new_id
cp.db_id = new_id
# recreate indices and set flags
if not new_ids:
cp.is_dirty = self.is_dirty
cp.is_new = self.is_new
return cp
@staticmethod
def update_version(old_obj, trans_dict, new_obj=None):
if new_obj is None:
new_obj = DBOpmArtifactValue()
class_dict = {}
if new_obj.__class__.__name__ in trans_dict:
class_dict = trans_dict[new_obj.__class__.__name__]
if 'value' in class_dict:
res = class_dict['value'](old_obj, trans_dict)
new_obj.db_value = res
elif hasattr(old_obj, 'db_value') and old_obj.db_value is not None:
obj = old_obj.db_value
if obj.vtType == 'portSpec':
new_obj.db_add_value(DBPortSpec.update_version(obj, trans_dict))
elif obj.vtType == 'function':
new_obj.db_add_value(DBFunction.update_version(obj, trans_dict))
if hasattr(old_obj, 'db_deleted_value') and hasattr(new_obj, 'db_deleted_value'):
for obj in old_obj.db_deleted_value:
if obj.vtType == 'portSpec':
n_obj = DBPortSpec.update_version(obj, trans_dict)
new_obj.db_deleted_value.append(n_obj)
elif obj.vtType == 'function':
n_obj = DBFunction.update_version(obj, trans_dict)
new_obj.db_deleted_value.append(n_obj)
new_obj.is_new = old_obj.is_new
new_obj.is_dirty = old_obj.is_dirty
return new_obj
def db_children(self, parent=(None,None), orphan=False, for_action=False):
children = []
if self._db_value is not None:
children.extend(self._db_value.db_children((self.vtType, self.db_id), orphan, for_action))
if orphan:
self._db_value = None
children.append((self, parent[0], parent[1]))
return children
def db_deleted_children(self, remove=False):
children = []
children.extend(self.db_deleted_value)
if remove:
self.db_deleted_value = []
return children
def has_changes(self):
if self.is_dirty:
return True
if self._db_value is not None and self._db_value.has_changes():
return True
return False
def __get_db_value(self):
return self._db_value
def __set_db_value(self, value):
self._db_value = value
self.is_dirty = True
db_value = property(__get_db_value, __set_db_value)
def db_add_value(self, value):
self._db_value = value
def db_change_value(self, value):
self._db_value = value
def db_delete_value(self, value):
if not self.is_new:
self.db_deleted_value.append(self._db_value)
self._db_value = None
class DBConfigStr(object):
vtType = 'config_str'
def __init__(self, value=None):
self._db_value = value
self.is_dirty = True
self.is_new = True
def __copy__(self):
return DBConfigStr.do_copy(self)
def do_copy(self, new_ids=False, id_scope=None, id_remap=None):
cp = DBConfigStr(value=self._db_value)
# set new ids
if new_ids:
new_id = id_scope.getNewId(self.vtType)
if self.vtType in id_scope.remap:
id_remap[(id_scope.remap[self.vtType], self.db_id)] = new_id
else:
id_remap[(self.vtType, self.db_id)] = new_id
cp.db_id = new_id
# recreate indices and set flags
if not new_ids:
cp.is_dirty = self.is_dirty
cp.is_new = self.is_new
return cp
@staticmethod
def update_version(old_obj, trans_dict, new_obj=None):
if new_obj is None:
new_obj = DBConfigStr()
class_dict = {}
if new_obj.__class__.__name__ in trans_dict:
class_dict = trans_dict[new_obj.__class__.__name__]
if 'value' in class_dict:
res = class_dict['value'](old_obj, trans_dict)
new_obj.db_value = res
elif hasattr(old_obj, 'db_value') and old_obj.db_value is not None:
new_obj.db_value = old_obj.db_value
new_obj.is_new = old_obj.is_new
new_obj.is_dirty = old_obj.is_dirty
return new_obj
def db_children(self, parent=(None,None), orphan=False, for_action=False):
return [(self, parent[0], parent[1])]
def db_deleted_children(self, remove=False):
children = []
return children
def has_changes(self):
if self.is_dirty:
return True
return False
def __get_db_value(self):
return self._db_value
def __set_db_value(self, value):
self._db_value = value
self.is_dirty = True
db_value = property(__get_db_value, __set_db_value)
def db_add_value(self, value):
self._db_value = value
def db_change_value(self, value):
self._db_value = value
def db_delete_value(self, value):
self._db_value = None
class DBStartup(object):
vtType = 'startup'
def __init__(self, version=None, configuration=None, enabled_packages=None, disabled_packages=None):
self._db_version = version
self.db_deleted_configuration = []
self._db_configuration = configuration
self.db_deleted_enabled_packages = []
self._db_enabled_packages = enabled_packages
self.db_deleted_disabled_packages = []
self._db_disabled_packages = disabled_packages
self.is_dirty = True
self.is_new = True
def __copy__(self):
return DBStartup.do_copy(self)
def do_copy(self, new_ids=False, id_scope=None, id_remap=None):
cp = DBStartup(version=self._db_version)
if self._db_configuration is not None:
cp._db_configuration = self._db_configuration.do_copy(new_ids, id_scope, id_remap)
if self._db_enabled_packages is not None:
cp._db_enabled_packages = self._db_enabled_packages.do_copy(new_ids, id_scope, id_remap)
if self._db_disabled_packages is not None:
cp._db_disabled_packages = self._db_disabled_packages.do_copy(new_ids, id_scope, id_remap)
# set new ids
if new_ids:
new_id = id_scope.getNewId(self.vtType)
if self.vtType in id_scope.remap:
id_remap[(id_scope.remap[self.vtType], self.db_id)] = new_id
else:
id_remap[(self.vtType, self.db_id)] = new_id
cp.db_id = new_id
# recreate indices and set flags
if not new_ids:
cp.is_dirty = self.is_dirty
cp.is_new = self.is_new
return cp
@staticmethod
def update_version(old_obj, trans_dict, new_obj=None):
if new_obj is None:
new_obj = DBStartup()
class_dict = {}
if new_obj.__class__.__name__ in trans_dict:
class_dict = trans_dict[new_obj.__class__.__name__]
if 'version' in class_dict:
res = class_dict['version'](old_obj, trans_dict)
new_obj.db_version = res
elif hasattr(old_obj, 'db_version') and old_obj.db_version is not None:
new_obj.db_version = old_obj.db_version
if 'configuration' in class_dict:
res = class_dict['configuration'](old_obj, trans_dict)
new_obj.db_configuration = res
elif hasattr(old_obj, 'db_configuration') and old_obj.db_configuration is not None:
obj = old_obj.db_configuration
new_obj.db_add_configuration(DBConfiguration.update_version(obj, trans_dict))
if hasattr(old_obj, 'db_deleted_configuration') and hasattr(new_obj, 'db_deleted_configuration'):
for obj in old_obj.db_deleted_configuration:
n_obj = DBConfiguration.update_version(obj, trans_dict)
new_obj.db_deleted_configuration.append(n_obj)
if 'enabled_packages' in class_dict:
res = class_dict['enabled_packages'](old_obj, trans_dict)
new_obj.db_enabled_packages = res
elif hasattr(old_obj, 'db_enabled_packages') and old_obj.db_enabled_packages is not None:
obj = old_obj.db_enabled_packages
new_obj.db_add_enabled_packages(DBEnabledPackages.update_version(obj, trans_dict))
if hasattr(old_obj, 'db_deleted_enabled_packages') and hasattr(new_obj, 'db_deleted_enabled_packages'):
for obj in old_obj.db_deleted_enabled_packages:
n_obj = DBEnabledPackages.update_version(obj, trans_dict)
new_obj.db_deleted_enabled_packages.append(n_obj)
if 'disabled_packages' in class_dict:
res = class_dict['disabled_packages'](old_obj, trans_dict)
new_obj.db_disabled_packages = res
elif hasattr(old_obj, 'db_disabled_packages') and old_obj.db_disabled_packages is not None:
obj = old_obj.db_disabled_packages
new_obj.db_add_disabled_packages(DBDisabledPackages.update_version(obj, trans_dict))
if hasattr(old_obj, 'db_deleted_disabled_packages') and hasattr(new_obj, 'db_deleted_disabled_packages'):
for obj in old_obj.db_deleted_disabled_packages:
n_obj = DBDisabledPackages.update_version(obj, trans_dict)
new_obj.db_deleted_disabled_packages.append(n_obj)
new_obj.is_new = old_obj.is_new
new_obj.is_dirty = old_obj.is_dirty
return new_obj
def db_children(self, parent=(None,None), orphan=False, for_action=False):
children = []
if self._db_configuration is not None:
children.extend(self._db_configuration.db_children((self.vtType, self.db_id), orphan, for_action))
if orphan:
self._db_configuration = None
if self._db_enabled_packages is not None:
children.extend(self._db_enabled_packages.db_children((self.vtType, self.db_id), orphan, for_action))
if orphan:
self._db_enabled_packages = None
if self._db_disabled_packages is not None:
children.extend(self._db_disabled_packages.db_children((self.vtType, self.db_id), orphan, for_action))
if orphan:
self._db_disabled_packages = None
children.append((self, parent[0], parent[1]))
return children
def db_deleted_children(self, remove=False):
children = []
children.extend(self.db_deleted_configuration)
children.extend(self.db_deleted_enabled_packages)
children.extend(self.db_deleted_disabled_packages)
if remove:
self.db_deleted_configuration = []
self.db_deleted_enabled_packages = []
self.db_deleted_disabled_packages = []
return children
def has_changes(self):
if self.is_dirty:
return True
if self._db_configuration is not None and self._db_configuration.has_changes():
return True
if self._db_enabled_packages is not None and self._db_enabled_packages.has_changes():
return True
if self._db_disabled_packages is not None and self._db_disabled_packages.has_changes():
return True
return False
def __get_db_version(self):
return self._db_version
def __set_db_version(self, version):
self._db_version = version
self.is_dirty = True
db_version = property(__get_db_version, __set_db_version)
def db_add_version(self, version):
self._db_version = version
def db_change_version(self, version):
self._db_version = version
def db_delete_version(self, version):
self._db_version = None
def __get_db_configuration(self):
return self._db_configuration
def __set_db_configuration(self, configuration):
self._db_configuration = configuration
self.is_dirty = True
db_configuration = property(__get_db_configuration, __set_db_configuration)
def db_add_configuration(self, configuration):
self._db_configuration = configuration
def db_change_configuration(self, configuration):
self._db_configuration = configuration
def db_delete_configuration(self, configuration):
if not self.is_new:
self.db_deleted_configuration.append(self._db_configuration)
self._db_configuration = None
def __get_db_enabled_packages(self):
return self._db_enabled_packages
def __set_db_enabled_packages(self, enabled_packages):
self._db_enabled_packages = enabled_packages
self.is_dirty = True
db_enabled_packages = property(__get_db_enabled_packages, __set_db_enabled_packages)
def db_add_enabled_packages(self, enabled_packages):
self._db_enabled_packages = enabled_packages
def db_change_enabled_packages(self, enabled_packages):
self._db_enabled_packages = enabled_packages
def db_delete_enabled_packages(self, enabled_packages):
if not self.is_new:
self.db_deleted_enabled_packages.append(self._db_enabled_packages)
self._db_enabled_packages = None
def __get_db_disabled_packages(self):
return self._db_disabled_packages
def __set_db_disabled_packages(self, disabled_packages):
self._db_disabled_packages = disabled_packages
self.is_dirty = True
db_disabled_packages = property(__get_db_disabled_packages, __set_db_disabled_packages)
def db_add_disabled_packages(self, disabled_packages):
self._db_disabled_packages = disabled_packages
def db_change_disabled_packages(self, disabled_packages):
self._db_disabled_packages = disabled_packages
def db_delete_disabled_packages(self, disabled_packages):
if not self.is_new:
self.db_deleted_disabled_packages.append(self._db_disabled_packages)
self._db_disabled_packages = None
class DBModule(object):
vtType = 'module'
def __init__(self, id=None, cache=None, name=None, namespace=None, package=None, version=None, location=None, functions=None, annotations=None, portSpecs=None):
self._db_id = id
self._db_cache = cache
self._db_name = name
self._db_namespace = namespace
self._db_package = package
self._db_version = version
self.db_deleted_location = []
self._db_location = location
self.db_deleted_functions = []
self.db_functions_id_index = {}
if functions is None:
self._db_functions = []
else:
self._db_functions = functions
for v in self._db_functions:
self.db_functions_id_index[v.db_id] = v
self.db_deleted_annotations = []
self.db_annotations_id_index = {}
self.db_annotations_key_index = {}
if annotations is None:
self._db_annotations = []
else:
self._db_annotations = annotations
for v in self._db_annotations:
self.db_annotations_id_index[v.db_id] = v
self.db_annotations_key_index[v.db_key] = v
self.db_deleted_portSpecs = []
self.db_portSpecs_id_index = {}
self.db_portSpecs_name_index = {}
if portSpecs is None:
self._db_portSpecs = []
else:
self._db_portSpecs = portSpecs
for v in self._db_portSpecs:
self.db_portSpecs_id_index[v.db_id] = v
self.db_portSpecs_name_index[(v.db_name,v.db_type)] = v
self.is_dirty = True
self.is_new = True
def __copy__(self):
return DBModule.do_copy(self)
def do_copy(self, new_ids=False, id_scope=None, id_remap=None):
cp = DBModule(id=self._db_id,
cache=self._db_cache,
name=self._db_name,
namespace=self._db_namespace,
package=self._db_package,
version=self._db_version)
if self._db_location is not None:
cp._db_location = self._db_location.do_copy(new_ids, id_scope, id_remap)
if self._db_functions is None:
cp._db_functions = []
else:
cp._db_functions = [v.do_copy(new_ids, id_scope, id_remap) for v in self._db_functions]
if self._db_annotations is None:
cp._db_annotations = []
else:
cp._db_annotations = [v.do_copy(new_ids, id_scope, id_remap) for v in self._db_annotations]
if self._db_portSpecs is None:
cp._db_portSpecs = []
else:
cp._db_portSpecs = [v.do_copy(new_ids, id_scope, id_remap) for v in self._db_portSpecs]
# set new ids
if new_ids:
new_id = id_scope.getNewId(self.vtType)
if self.vtType in id_scope.remap:
id_remap[(id_scope.remap[self.vtType], self.db_id)] = new_id
else:
id_remap[(self.vtType, self.db_id)] = new_id
cp.db_id = new_id
# recreate indices and set flags
cp.db_functions_id_index = dict((v.db_id, v) for v in cp._db_functions)
cp.db_annotations_id_index = dict((v.db_id, v) for v in cp._db_annotations)
cp.db_annotations_key_index = dict((v.db_key, v) for v in cp._db_annotations)
cp.db_portSpecs_id_index = dict((v.db_id, v) for v in cp._db_portSpecs)
cp.db_portSpecs_name_index = dict(((v.db_name,v.db_type), v) for v in cp._db_portSpecs)
if not new_ids:
cp.is_dirty = self.is_dirty
cp.is_new = self.is_new
return cp
@staticmethod
def update_version(old_obj, trans_dict, new_obj=None):
if new_obj is None:
new_obj = DBModule()
class_dict = {}
if new_obj.__class__.__name__ in trans_dict:
class_dict = trans_dict[new_obj.__class__.__name__]
if 'id' in class_dict:
res = class_dict['id'](old_obj, trans_dict)
new_obj.db_id = res
elif hasattr(old_obj, 'db_id') and old_obj.db_id is not None:
new_obj.db_id = old_obj.db_id
if 'cache' in class_dict:
res = class_dict['cache'](old_obj, trans_dict)
new_obj.db_cache = res
elif hasattr(old_obj, 'db_cache') and old_obj.db_cache is not None:
new_obj.db_cache = old_obj.db_cache
if 'name' in class_dict:
res = class_dict['name'](old_obj, trans_dict)
new_obj.db_name = res
elif hasattr(old_obj, 'db_name') and old_obj.db_name is not None:
new_obj.db_name = old_obj.db_name
if 'namespace' in class_dict:
res = class_dict['namespace'](old_obj, trans_dict)
new_obj.db_namespace = res
elif hasattr(old_obj, 'db_namespace') and old_obj.db_namespace is not None:
new_obj.db_namespace = old_obj.db_namespace
if 'package' in class_dict:
res = class_dict['package'](old_obj, trans_dict)
new_obj.db_package = res
elif hasattr(old_obj, 'db_package') and old_obj.db_package is not None:
new_obj.db_package = old_obj.db_package
if 'version' in class_dict:
res = class_dict['version'](old_obj, trans_dict)
new_obj.db_version = res
elif hasattr(old_obj, 'db_version') and old_obj.db_version is not None:
new_obj.db_version = old_obj.db_version
if 'location' in class_dict:
res = class_dict['location'](old_obj, trans_dict)
new_obj.db_location = res
elif hasattr(old_obj, 'db_location') and old_obj.db_location is not None:
obj = old_obj.db_location
new_obj.db_add_location(DBLocation.update_version(obj, trans_dict))
if hasattr(old_obj, 'db_deleted_location') and hasattr(new_obj, 'db_deleted_location'):
for obj in old_obj.db_deleted_location:
n_obj = DBLocation.update_version(obj, trans_dict)
new_obj.db_deleted_location.append(n_obj)
if 'functions' in class_dict:
res = class_dict['functions'](old_obj, trans_dict)
for obj in res:
new_obj.db_add_function(obj)
elif hasattr(old_obj, 'db_functions') and old_obj.db_functions is not None:
for obj in old_obj.db_functions:
new_obj.db_add_function(DBFunction.update_version(obj, trans_dict))
if hasattr(old_obj, 'db_deleted_functions') and hasattr(new_obj, 'db_deleted_functions'):
for obj in old_obj.db_deleted_functions:
n_obj = DBFunction.update_version(obj, trans_dict)
new_obj.db_deleted_functions.append(n_obj)
if 'annotations' in class_dict:
res = class_dict['annotations'](old_obj, trans_dict)
for obj in res:
new_obj.db_add_annotation(obj)
elif hasattr(old_obj, 'db_annotations') and old_obj.db_annotations is not None:
for obj in old_obj.db_annotations:
new_obj.db_add_annotation(DBAnnotation.update_version(obj, trans_dict))
if hasattr(old_obj, 'db_deleted_annotations') and hasattr(new_obj, 'db_deleted_annotations'):
for obj in old_obj.db_deleted_annotations:
n_obj = DBAnnotation.update_version(obj, trans_dict)
new_obj.db_deleted_annotations.append(n_obj)
if 'portSpecs' in class_dict:
res = class_dict['portSpecs'](old_obj, trans_dict)
for obj in res:
new_obj.db_add_portSpec(obj)
elif hasattr(old_obj, 'db_portSpecs') and old_obj.db_portSpecs is not None:
for obj in old_obj.db_portSpecs:
new_obj.db_add_portSpec(DBPortSpec.update_version(obj, trans_dict))
if hasattr(old_obj, 'db_deleted_portSpecs') and hasattr(new_obj, 'db_deleted_portSpecs'):
for obj in old_obj.db_deleted_portSpecs:
n_obj = DBPortSpec.update_version(obj, trans_dict)
new_obj.db_deleted_portSpecs.append(n_obj)
new_obj.is_new = old_obj.is_new
new_obj.is_dirty = old_obj.is_dirty
return new_obj
def db_children(self, parent=(None,None), orphan=False, for_action=False):
children = []
if self._db_location is not None:
children.extend(self._db_location.db_children((self.vtType, self.db_id), orphan, for_action))
if orphan:
self._db_location = None
to_del = []
for child in self.db_functions:
children.extend(child.db_children((self.vtType, self.db_id), orphan, for_action))
if orphan:
to_del.append(child)
for child in to_del:
self.db_delete_function(child)
to_del = []
for child in self.db_annotations:
children.extend(child.db_children((self.vtType, self.db_id), orphan, for_action))
if orphan:
to_del.append(child)
for child in to_del:
self.db_delete_annotation(child)
to_del = []
for child in self.db_portSpecs:
children.extend(child.db_children((self.vtType, self.db_id), orphan, for_action))
if orphan:
to_del.append(child)
for child in to_del:
self.db_delete_portSpec(child)
children.append((self, parent[0], parent[1]))
return children
def db_deleted_children(self, remove=False):
children = []
children.extend(self.db_deleted_location)
children.extend(self.db_deleted_functions)
children.extend(self.db_deleted_annotations)
children.extend(self.db_deleted_portSpecs)
if remove:
self.db_deleted_location = []
self.db_deleted_functions = []
self.db_deleted_annotations = []
self.db_deleted_portSpecs = []
return children
def has_changes(self):
if self.is_dirty:
return True
if self._db_location is not None and self._db_location.has_changes():
return True
for child in self._db_functions:
if child.has_changes():
return True
for child in self._db_annotations:
if child.has_changes():
return True
for child in self._db_portSpecs:
if child.has_changes():
return True
return False
def __get_db_id(self):
return self._db_id
def __set_db_id(self, id):
self._db_id = id
self.is_dirty = True
db_id = property(__get_db_id, __set_db_id)
def db_add_id(self, id):
self._db_id = id
def db_change_id(self, id):
self._db_id = id
def db_delete_id(self, id):
self._db_id = None
def __get_db_cache(self):
return self._db_cache
def __set_db_cache(self, cache):
self._db_cache = cache
self.is_dirty = True
db_cache = property(__get_db_cache, __set_db_cache)
def db_add_cache(self, cache):
self._db_cache = cache
def db_change_cache(self, cache):
self._db_cache = cache
def db_delete_cache(self, cache):
self._db_cache = None
def __get_db_name(self):
return self._db_name
def __set_db_name(self, name):
self._db_name = name
self.is_dirty = True
db_name = property(__get_db_name, __set_db_name)
def db_add_name(self, name):
self._db_name = name
def db_change_name(self, name):
self._db_name = name
def db_delete_name(self, name):
self._db_name = None
def __get_db_namespace(self):
return self._db_namespace
def __set_db_namespace(self, namespace):
self._db_namespace = namespace
self.is_dirty = True
db_namespace = property(__get_db_namespace, __set_db_namespace)
def db_add_namespace(self, namespace):
self._db_namespace = namespace
def db_change_namespace(self, namespace):
self._db_namespace = namespace
def db_delete_namespace(self, namespace):
self._db_namespace = None
def __get_db_package(self):
return self._db_package
def __set_db_package(self, package):
self._db_package = package
self.is_dirty = True
db_package = property(__get_db_package, __set_db_package)
def db_add_package(self, package):
self._db_package = package
def db_change_package(self, package):
self._db_package = package
def db_delete_package(self, package):
self._db_package = None
def __get_db_version(self):
return self._db_version
def __set_db_version(self, version):
self._db_version = version
self.is_dirty = True
db_version = property(__get_db_version, __set_db_version)
def db_add_version(self, version):
self._db_version = version
def db_change_version(self, version):
self._db_version = version
def db_delete_version(self, version):
self._db_version = None
def __get_db_location(self):
return self._db_location
def __set_db_location(self, location):
self._db_location = location
self.is_dirty = True
db_location = property(__get_db_location, __set_db_location)
def db_add_location(self, location):
self._db_location = location
def db_change_location(self, location):
self._db_location = location
def db_delete_location(self, location):
if not self.is_new:
self.db_deleted_location.append(self._db_location)
self._db_location = None
def __get_db_functions(self):
return self._db_functions
def __set_db_functions(self, functions):
self._db_functions = functions
self.is_dirty = True
db_functions = property(__get_db_functions, __set_db_functions)
def db_get_functions(self):
return self._db_functions
def db_add_function(self, function):
self.is_dirty = True
self._db_functions.append(function)
self.db_functions_id_index[function.db_id] = function
def db_change_function(self, function):
self.is_dirty = True
found = False
for i in xrange(len(self._db_functions)):
if self._db_functions[i].db_id == function.db_id:
self._db_functions[i] = function
found = True
break
if not found:
self._db_functions.append(function)
self.db_functions_id_index[function.db_id] = function
def db_delete_function(self, function):
self.is_dirty = True
for i in xrange(len(self._db_functions)):
if self._db_functions[i].db_id == function.db_id:
if not self._db_functions[i].is_new:
self.db_deleted_functions.append(self._db_functions[i])
del self._db_functions[i]
break
del self.db_functions_id_index[function.db_id]
def db_get_function(self, key):
for i in xrange(len(self._db_functions)):
if self._db_functions[i].db_id == key:
return self._db_functions[i]
return None
def db_get_function_by_id(self, key):
return self.db_functions_id_index[key]
def db_has_function_with_id(self, key):
return key in self.db_functions_id_index
def __get_db_annotations(self):
return self._db_annotations
def __set_db_annotations(self, annotations):
self._db_annotations = annotations
self.is_dirty = True
db_annotations = property(__get_db_annotations, __set_db_annotations)
def db_get_annotations(self):
return self._db_annotations
def db_add_annotation(self, annotation):
self.is_dirty = True
self._db_annotations.append(annotation)
self.db_annotations_id_index[annotation.db_id] = annotation
self.db_annotations_key_index[annotation.db_key] = annotation
def db_change_annotation(self, annotation):
self.is_dirty = True
found = False
for i in xrange(len(self._db_annotations)):
if self._db_annotations[i].db_id == annotation.db_id:
self._db_annotations[i] = annotation
found = True
break
if not found:
self._db_annotations.append(annotation)
self.db_annotations_id_index[annotation.db_id] = annotation
self.db_annotations_key_index[annotation.db_key] = annotation
def db_delete_annotation(self, annotation):
self.is_dirty = True
for i in xrange(len(self._db_annotations)):
if self._db_annotations[i].db_id == annotation.db_id:
if not self._db_annotations[i].is_new:
self.db_deleted_annotations.append(self._db_annotations[i])
del self._db_annotations[i]
break
del self.db_annotations_id_index[annotation.db_id]
del self.db_annotations_key_index[annotation.db_key]
def db_get_annotation(self, key):
for i in xrange(len(self._db_annotations)):
if self._db_annotations[i].db_id == key:
return self._db_annotations[i]
return None
def db_get_annotation_by_id(self, key):
return self.db_annotations_id_index[key]
def db_has_annotation_with_id(self, key):
return key in self.db_annotations_id_index
def db_get_annotation_by_key(self, key):
return self.db_annotations_key_index[key]
def db_has_annotation_with_key(self, key):
return key in self.db_annotations_key_index
def __get_db_portSpecs(self):
return self._db_portSpecs
def __set_db_portSpecs(self, portSpecs):
self._db_portSpecs = portSpecs
self.is_dirty = True
db_portSpecs = property(__get_db_portSpecs, __set_db_portSpecs)
def db_get_portSpecs(self):
return self._db_portSpecs
def db_add_portSpec(self, portSpec):
self.is_dirty = True
self._db_portSpecs.append(portSpec)
self.db_portSpecs_id_index[portSpec.db_id] = portSpec
self.db_portSpecs_name_index[(portSpec.db_name,portSpec.db_type)] = portSpec
def db_change_portSpec(self, portSpec):
self.is_dirty = True
found = False
for i in xrange(len(self._db_portSpecs)):
if self._db_portSpecs[i].db_id == portSpec.db_id:
self._db_portSpecs[i] = portSpec
found = True
break
if not found:
self._db_portSpecs.append(portSpec)
self.db_portSpecs_id_index[portSpec.db_id] = portSpec
self.db_portSpecs_name_index[(portSpec.db_name,portSpec.db_type)] = portSpec
def db_delete_portSpec(self, portSpec):
self.is_dirty = True
for i in xrange(len(self._db_portSpecs)):
if self._db_portSpecs[i].db_id == portSpec.db_id:
if not self._db_portSpecs[i].is_new:
self.db_deleted_portSpecs.append(self._db_portSpecs[i])
del self._db_portSpecs[i]
break
del self.db_portSpecs_id_index[portSpec.db_id]
del self.db_portSpecs_name_index[(portSpec.db_name,portSpec.db_type)]
def db_get_portSpec(self, key):
for i in xrange(len(self._db_portSpecs)):
if self._db_portSpecs[i].db_id == key:
return self._db_portSpecs[i]
return None
def db_get_portSpec_by_id(self, key):
return self.db_portSpecs_id_index[key]
def db_has_portSpec_with_id(self, key):
return key in self.db_portSpecs_id_index
def db_get_portSpec_by_name(self, key):
return self.db_portSpecs_name_index[key]
def db_has_portSpec_with_name(self, key):
return key in self.db_portSpecs_name_index
def getPrimaryKey(self):
return self._db_id
class DBPort(object):
vtType = 'port'
def __init__(self, id=None, type=None, moduleId=None, moduleName=None, name=None, signature=None):
self._db_id = id
self._db_type = type
self._db_moduleId = moduleId
self._db_moduleName = moduleName
self._db_name = name
self._db_signature = signature
self.is_dirty = True
self.is_new = True
def __copy__(self):
return DBPort.do_copy(self)
def do_copy(self, new_ids=False, id_scope=None, id_remap=None):
cp = DBPort(id=self._db_id,
type=self._db_type,
moduleId=self._db_moduleId,
moduleName=self._db_moduleName,
name=self._db_name,
signature=self._db_signature)
# set new ids
if new_ids:
new_id = id_scope.getNewId(self.vtType)
if self.vtType in id_scope.remap:
id_remap[(id_scope.remap[self.vtType], self.db_id)] = new_id
else:
id_remap[(self.vtType, self.db_id)] = new_id
cp.db_id = new_id
if hasattr(self, 'db_moduleId') and ('module', self._db_moduleId) in id_remap:
cp._db_moduleId = id_remap[('module', self._db_moduleId)]
# recreate indices and set flags
if not new_ids:
cp.is_dirty = self.is_dirty
cp.is_new = self.is_new
return cp
@staticmethod
def update_version(old_obj, trans_dict, new_obj=None):
if new_obj is None:
new_obj = DBPort()
class_dict = {}
if new_obj.__class__.__name__ in trans_dict:
class_dict = trans_dict[new_obj.__class__.__name__]
if 'id' in class_dict:
res = class_dict['id'](old_obj, trans_dict)
new_obj.db_id = res
elif hasattr(old_obj, 'db_id') and old_obj.db_id is not None:
new_obj.db_id = old_obj.db_id
if 'type' in class_dict:
res = class_dict['type'](old_obj, trans_dict)
new_obj.db_type = res
elif hasattr(old_obj, 'db_type') and old_obj.db_type is not None:
new_obj.db_type = old_obj.db_type
if 'moduleId' in class_dict:
res = class_dict['moduleId'](old_obj, trans_dict)
new_obj.db_moduleId = res
elif hasattr(old_obj, 'db_moduleId') and old_obj.db_moduleId is not None:
new_obj.db_moduleId = old_obj.db_moduleId
if 'moduleName' in class_dict:
res = class_dict['moduleName'](old_obj, trans_dict)
new_obj.db_moduleName = res
elif hasattr(old_obj, 'db_moduleName') and old_obj.db_moduleName is not None:
new_obj.db_moduleName = old_obj.db_moduleName
if 'name' in class_dict:
res = class_dict['name'](old_obj, trans_dict)
new_obj.db_name = res
elif hasattr(old_obj, 'db_name') and old_obj.db_name is not None:
new_obj.db_name = old_obj.db_name
if 'signature' in class_dict:
res = class_dict['signature'](old_obj, trans_dict)
new_obj.db_signature = res
elif hasattr(old_obj, 'db_signature') and old_obj.db_signature is not None:
new_obj.db_signature = old_obj.db_signature
new_obj.is_new = old_obj.is_new
new_obj.is_dirty = old_obj.is_dirty
return new_obj
def db_children(self, parent=(None,None), orphan=False, for_action=False):
return [(self, parent[0], parent[1])]
def db_deleted_children(self, remove=False):
children = []
return children
def has_changes(self):
if self.is_dirty:
return True
return False
def __get_db_id(self):
return self._db_id
def __set_db_id(self, id):
self._db_id = id
self.is_dirty = True
db_id = property(__get_db_id, __set_db_id)
def db_add_id(self, id):
self._db_id = id
def db_change_id(self, id):
self._db_id = id
def db_delete_id(self, id):
self._db_id = None
def __get_db_type(self):
return self._db_type
def __set_db_type(self, type):
self._db_type = type
self.is_dirty = True
db_type = property(__get_db_type, __set_db_type)
def db_add_type(self, type):
self._db_type = type
def db_change_type(self, type):
self._db_type = type
def db_delete_type(self, type):
self._db_type = None
def __get_db_moduleId(self):
return self._db_moduleId
def __set_db_moduleId(self, moduleId):
self._db_moduleId = moduleId
self.is_dirty = True
db_moduleId = property(__get_db_moduleId, __set_db_moduleId)
def db_add_moduleId(self, moduleId):
self._db_moduleId = moduleId
def db_change_moduleId(self, moduleId):
self._db_moduleId = moduleId
def db_delete_moduleId(self, moduleId):
self._db_moduleId = None
def __get_db_moduleName(self):
return self._db_moduleName
def __set_db_moduleName(self, moduleName):
self._db_moduleName = moduleName
self.is_dirty = True
db_moduleName = property(__get_db_moduleName, __set_db_moduleName)
def db_add_moduleName(self, moduleName):
self._db_moduleName = moduleName
def db_change_moduleName(self, moduleName):
self._db_moduleName = moduleName
def db_delete_moduleName(self, moduleName):
self._db_moduleName = None
def __get_db_name(self):
return self._db_name
def __set_db_name(self, name):
self._db_name = name
self.is_dirty = True
db_name = property(__get_db_name, __set_db_name)
def db_add_name(self, name):
self._db_name = name
def db_change_name(self, name):
self._db_name = name
def db_delete_name(self, name):
self._db_name = None
def __get_db_signature(self):
return self._db_signature
def __set_db_signature(self, signature):
self._db_signature = signature
self.is_dirty = True
db_signature = property(__get_db_signature, __set_db_signature)
def db_add_signature(self, signature):
self._db_signature = signature
def db_change_signature(self, signature):
self._db_signature = signature
def db_delete_signature(self, signature):
self._db_signature = None
def getPrimaryKey(self):
return self._db_id
class DBOpmAgents(object):
vtType = 'opm_agents'
def __init__(self, agents=None):
self.db_deleted_agents = []
self.db_agents_id_index = {}
if agents is None:
self._db_agents = []
else:
self._db_agents = agents
for v in self._db_agents:
self.db_agents_id_index[v.db_id] = v
self.is_dirty = True
self.is_new = True
def __copy__(self):
return DBOpmAgents.do_copy(self)
def do_copy(self, new_ids=False, id_scope=None, id_remap=None):
cp = DBOpmAgents()
if self._db_agents is None:
cp._db_agents = []
else:
cp._db_agents = [v.do_copy(new_ids, id_scope, id_remap) for v in self._db_agents]
# set new ids
if new_ids:
new_id = id_scope.getNewId(self.vtType)
if self.vtType in id_scope.remap:
id_remap[(id_scope.remap[self.vtType], self.db_id)] = new_id
else:
id_remap[(self.vtType, self.db_id)] = new_id
cp.db_id = new_id
# recreate indices and set flags
cp.db_agents_id_index = dict((v.db_id, v) for v in cp._db_agents)
if not new_ids:
cp.is_dirty = self.is_dirty
cp.is_new = self.is_new
return cp
@staticmethod
def update_version(old_obj, trans_dict, new_obj=None):
if new_obj is None:
new_obj = DBOpmAgents()
class_dict = {}
if new_obj.__class__.__name__ in trans_dict:
class_dict = trans_dict[new_obj.__class__.__name__]
if 'agents' in class_dict:
res = class_dict['agents'](old_obj, trans_dict)
for obj in res:
new_obj.db_add_agent(obj)
elif hasattr(old_obj, 'db_agents') and old_obj.db_agents is not None:
for obj in old_obj.db_agents:
new_obj.db_add_agent(DBOpmAgent.update_version(obj, trans_dict))
if hasattr(old_obj, 'db_deleted_agents') and hasattr(new_obj, 'db_deleted_agents'):
for obj in old_obj.db_deleted_agents:
n_obj = DBOpmAgent.update_version(obj, trans_dict)
new_obj.db_deleted_agents.append(n_obj)
new_obj.is_new = old_obj.is_new
new_obj.is_dirty = old_obj.is_dirty
return new_obj
def db_children(self, parent=(None,None), orphan=False, for_action=False):
children = []
to_del = []
for child in self.db_agents:
children.extend(child.db_children((self.vtType, self.db_id), orphan, for_action))
if orphan:
to_del.append(child)
for child in to_del:
self.db_delete_agent(child)
children.append((self, parent[0], parent[1]))
return children
def db_deleted_children(self, remove=False):
children = []
children.extend(self.db_deleted_agents)
if remove:
self.db_deleted_agents = []
return children
def has_changes(self):
if self.is_dirty:
return True
for child in self._db_agents:
if child.has_changes():
return True
return False
def __get_db_agents(self):
return self._db_agents
def __set_db_agents(self, agents):
self._db_agents = agents
self.is_dirty = True
db_agents = property(__get_db_agents, __set_db_agents)
def db_get_agents(self):
return self._db_agents
def db_add_agent(self, agent):
self.is_dirty = True
self._db_agents.append(agent)
self.db_agents_id_index[agent.db_id] = agent
def db_change_agent(self, agent):
self.is_dirty = True
found = False
for i in xrange(len(self._db_agents)):
if self._db_agents[i].db_id == agent.db_id:
self._db_agents[i] = agent
found = True
break
if not found:
self._db_agents.append(agent)
self.db_agents_id_index[agent.db_id] = agent
def db_delete_agent(self, agent):
self.is_dirty = True
for i in xrange(len(self._db_agents)):
if self._db_agents[i].db_id == agent.db_id:
if not self._db_agents[i].is_new:
self.db_deleted_agents.append(self._db_agents[i])
del self._db_agents[i]
break
del self.db_agents_id_index[agent.db_id]
def db_get_agent(self, key):
for i in xrange(len(self._db_agents)):
if self._db_agents[i].db_id == key:
return self._db_agents[i]
return None
def db_get_agent_by_id(self, key):
return self.db_agents_id_index[key]
def db_has_agent_with_id(self, key):
return key in self.db_agents_id_index
class DBOpmDependencies(object):
vtType = 'opm_dependencies'
def __init__(self, dependencys=None):
self.db_deleted_dependencys = []
if dependencys is None:
self._db_dependencys = []
else:
self._db_dependencys = dependencys
self.is_dirty = True
self.is_new = True
def __copy__(self):
return DBOpmDependencies.do_copy(self)
def do_copy(self, new_ids=False, id_scope=None, id_remap=None):
cp = DBOpmDependencies()
if self._db_dependencys is None:
cp._db_dependencys = []
else:
cp._db_dependencys = [v.do_copy(new_ids, id_scope, id_remap) for v in self._db_dependencys]
# set new ids
if new_ids:
new_id = id_scope.getNewId(self.vtType)
if self.vtType in id_scope.remap:
id_remap[(id_scope.remap[self.vtType], self.db_id)] = new_id
else:
id_remap[(self.vtType, self.db_id)] = new_id
cp.db_id = new_id
# recreate indices and set flags
if not new_ids:
cp.is_dirty = self.is_dirty
cp.is_new = self.is_new
return cp
@staticmethod
def update_version(old_obj, trans_dict, new_obj=None):
if new_obj is None:
new_obj = DBOpmDependencies()
class_dict = {}
if new_obj.__class__.__name__ in trans_dict:
class_dict = trans_dict[new_obj.__class__.__name__]
if 'dependencys' in class_dict:
res = class_dict['dependencys'](old_obj, trans_dict)
for obj in res:
new_obj.db_add_dependency(obj)
elif hasattr(old_obj, 'db_dependencys') and old_obj.db_dependencys is not None:
for obj in old_obj.db_dependencys:
if obj.vtType == 'opm_used':
new_obj.db_add_dependency(DBOpmUsed.update_version(obj, trans_dict))
elif obj.vtType == 'opm_was_generated_by':
new_obj.db_add_dependency(DBOpmWasGeneratedBy.update_version(obj, trans_dict))
elif obj.vtType == 'opm_was_triggered_by':
new_obj.db_add_dependency(DBOpmWasTriggeredBy.update_version(obj, trans_dict))
elif obj.vtType == 'opm_was_derived_from':
new_obj.db_add_dependency(DBOpmWasDerivedFrom.update_version(obj, trans_dict))
elif obj.vtType == 'opm_was_controlled_by':
new_obj.db_add_dependency(DBOpmWasControlledBy.update_version(obj, trans_dict))
if hasattr(old_obj, 'db_deleted_dependencys') and hasattr(new_obj, 'db_deleted_dependencys'):
for obj in old_obj.db_deleted_dependencys:
if obj.vtType == 'opm_used':
n_obj = DBOpmUsed.update_version(obj, trans_dict)
new_obj.db_deleted_dependencys.append(n_obj)
elif obj.vtType == 'opm_was_generated_by':
n_obj = DBOpmWasGeneratedBy.update_version(obj, trans_dict)
new_obj.db_deleted_dependencys.append(n_obj)
elif obj.vtType == 'opm_was_triggered_by':
n_obj = DBOpmWasTriggeredBy.update_version(obj, trans_dict)
new_obj.db_deleted_dependencys.append(n_obj)
elif obj.vtType == 'opm_was_derived_from':
n_obj = DBOpmWasDerivedFrom.update_version(obj, trans_dict)
new_obj.db_deleted_dependencys.append(n_obj)
elif obj.vtType == 'opm_was_controlled_by':
n_obj = DBOpmWasControlledBy.update_version(obj, trans_dict)
new_obj.db_deleted_dependencys.append(n_obj)
new_obj.is_new = old_obj.is_new
new_obj.is_dirty = old_obj.is_dirty
return new_obj
def db_children(self, parent=(None,None), orphan=False, for_action=False):
children = []
to_del = []
for child in self.db_dependencys:
children.extend(child.db_children((self.vtType, self.db_id), orphan, for_action))
if orphan:
to_del.append(child)
for child in to_del:
self.db_delete_dependency(child)
children.append((self, parent[0], parent[1]))
return children
def db_deleted_children(self, remove=False):
children = []
children.extend(self.db_deleted_dependencys)
if remove:
self.db_deleted_dependencys = []
return children
def has_changes(self):
if self.is_dirty:
return True
for child in self._db_dependencys:
if child.has_changes():
return True
return False
def __get_db_dependencys(self):
return self._db_dependencys
def __set_db_dependencys(self, dependencys):
self._db_dependencys = dependencys
self.is_dirty = True
db_dependencys = property(__get_db_dependencys, __set_db_dependencys)
def db_get_dependencys(self):
return self._db_dependencys
def db_add_dependency(self, dependency):
self.is_dirty = True
self._db_dependencys.append(dependency)
def db_change_dependency(self, dependency):
self.is_dirty = True
self._db_dependencys.append(dependency)
def db_delete_dependency(self, dependency):
self.is_dirty = True
raise Exception('Cannot delete a non-keyed object')
def db_get_dependency(self, key):
return None
class DBPEFunction(object):
vtType = 'pe_function'
def __init__(self, id=None, module_id=None, port_name=None, is_alias=None, parameters=None):
self._db_id = id
self._db_module_id = module_id
self._db_port_name = port_name
self._db_is_alias = is_alias
self.db_deleted_parameters = []
self.db_parameters_id_index = {}
if parameters is None:
self._db_parameters = []
else:
self._db_parameters = parameters
for v in self._db_parameters:
self.db_parameters_id_index[v.db_id] = v
self.is_dirty = True
self.is_new = True
def __copy__(self):
return DBPEFunction.do_copy(self)
def do_copy(self, new_ids=False, id_scope=None, id_remap=None):
cp = DBPEFunction(id=self._db_id,
module_id=self._db_module_id,
port_name=self._db_port_name,
is_alias=self._db_is_alias)
if self._db_parameters is None:
cp._db_parameters = []
else:
cp._db_parameters = [v.do_copy(new_ids, id_scope, id_remap) for v in self._db_parameters]
# set new ids
if new_ids:
new_id = id_scope.getNewId(self.vtType)
if self.vtType in id_scope.remap:
id_remap[(id_scope.remap[self.vtType], self.db_id)] = new_id
else:
id_remap[(self.vtType, self.db_id)] = new_id
cp.db_id = new_id
if hasattr(self, 'db_module_id') and ('module', self._db_module_id) in id_remap:
cp._db_module_id = id_remap[('module', self._db_module_id)]
# recreate indices and set flags
cp.db_parameters_id_index = dict((v.db_id, v) for v in cp._db_parameters)
if not new_ids:
cp.is_dirty = self.is_dirty
cp.is_new = self.is_new
return cp
@staticmethod
def update_version(old_obj, trans_dict, new_obj=None):
if new_obj is None:
new_obj = DBPEFunction()
class_dict = {}
if new_obj.__class__.__name__ in trans_dict:
class_dict = trans_dict[new_obj.__class__.__name__]
if 'id' in class_dict:
res = class_dict['id'](old_obj, trans_dict)
new_obj.db_id = res
elif hasattr(old_obj, 'db_id') and old_obj.db_id is not None:
new_obj.db_id = old_obj.db_id
if 'module_id' in class_dict:
res = class_dict['module_id'](old_obj, trans_dict)
new_obj.db_module_id = res
elif hasattr(old_obj, 'db_module_id') and old_obj.db_module_id is not None:
new_obj.db_module_id = old_obj.db_module_id
if 'port_name' in class_dict:
res = class_dict['port_name'](old_obj, trans_dict)
new_obj.db_port_name = res
elif hasattr(old_obj, 'db_port_name') and old_obj.db_port_name is not None:
new_obj.db_port_name = old_obj.db_port_name
if 'is_alias' in class_dict:
res = class_dict['is_alias'](old_obj, trans_dict)
new_obj.db_is_alias = res
elif hasattr(old_obj, 'db_is_alias') and old_obj.db_is_alias is not None:
new_obj.db_is_alias = old_obj.db_is_alias
if 'parameters' in class_dict:
res = class_dict['parameters'](old_obj, trans_dict)
for obj in res:
new_obj.db_add_parameter(obj)
elif hasattr(old_obj, 'db_parameters') and old_obj.db_parameters is not None:
for obj in old_obj.db_parameters:
new_obj.db_add_parameter(DBPEParameter.update_version(obj, trans_dict))
if hasattr(old_obj, 'db_deleted_parameters') and hasattr(new_obj, 'db_deleted_parameters'):
for obj in old_obj.db_deleted_parameters:
n_obj = DBPEParameter.update_version(obj, trans_dict)
new_obj.db_deleted_parameters.append(n_obj)
new_obj.is_new = old_obj.is_new
new_obj.is_dirty = old_obj.is_dirty
return new_obj
def db_children(self, parent=(None,None), orphan=False, for_action=False):
children = []
to_del = []
for child in self.db_parameters:
children.extend(child.db_children((self.vtType, self.db_id), orphan, for_action))
if orphan:
to_del.append(child)
for child in to_del:
self.db_delete_parameter(child)
children.append((self, parent[0], parent[1]))
return children
def db_deleted_children(self, remove=False):
children = []
children.extend(self.db_deleted_parameters)
if remove:
self.db_deleted_parameters = []
return children
def has_changes(self):
if self.is_dirty:
return True
for child in self._db_parameters:
if child.has_changes():
return True
return False
def __get_db_id(self):
return self._db_id
def __set_db_id(self, id):
self._db_id = id
self.is_dirty = True
db_id = property(__get_db_id, __set_db_id)
def db_add_id(self, id):
self._db_id = id
def db_change_id(self, id):
self._db_id = id
def db_delete_id(self, id):
self._db_id = None
def __get_db_module_id(self):
return self._db_module_id
def __set_db_module_id(self, module_id):
self._db_module_id = module_id
self.is_dirty = True
db_module_id = property(__get_db_module_id, __set_db_module_id)
def db_add_module_id(self, module_id):
self._db_module_id = module_id
def db_change_module_id(self, module_id):
self._db_module_id = module_id
def db_delete_module_id(self, module_id):
self._db_module_id = None
def __get_db_port_name(self):
return self._db_port_name
def __set_db_port_name(self, port_name):
self._db_port_name = port_name
self.is_dirty = True
db_port_name = property(__get_db_port_name, __set_db_port_name)
def db_add_port_name(self, port_name):
self._db_port_name = port_name
def db_change_port_name(self, port_name):
self._db_port_name = port_name
def db_delete_port_name(self, port_name):
self._db_port_name = None
def __get_db_is_alias(self):
return self._db_is_alias
def __set_db_is_alias(self, is_alias):
self._db_is_alias = is_alias
self.is_dirty = True
db_is_alias = property(__get_db_is_alias, __set_db_is_alias)
def db_add_is_alias(self, is_alias):
self._db_is_alias = is_alias
def db_change_is_alias(self, is_alias):
self._db_is_alias = is_alias
def db_delete_is_alias(self, is_alias):
self._db_is_alias = None
def __get_db_parameters(self):
return self._db_parameters
def __set_db_parameters(self, parameters):
self._db_parameters = parameters
self.is_dirty = True
db_parameters = property(__get_db_parameters, __set_db_parameters)
def db_get_parameters(self):
return self._db_parameters
def db_add_parameter(self, parameter):
self.is_dirty = True
self._db_parameters.append(parameter)
self.db_parameters_id_index[parameter.db_id] = parameter
def db_change_parameter(self, parameter):
self.is_dirty = True
found = False
for i in xrange(len(self._db_parameters)):
if self._db_parameters[i].db_id == parameter.db_id:
self._db_parameters[i] = parameter
found = True
break
if not found:
self._db_parameters.append(parameter)
self.db_parameters_id_index[parameter.db_id] = parameter
def db_delete_parameter(self, parameter):
self.is_dirty = True
for i in xrange(len(self._db_parameters)):
if self._db_parameters[i].db_id == parameter.db_id:
if not self._db_parameters[i].is_new:
self.db_deleted_parameters.append(self._db_parameters[i])
del self._db_parameters[i]
break
del self.db_parameters_id_index[parameter.db_id]
def db_get_parameter(self, key):
for i in xrange(len(self._db_parameters)):
if self._db_parameters[i].db_id == key:
return self._db_parameters[i]
return None
def db_get_parameter_by_id(self, key):
return self.db_parameters_id_index[key]
def db_has_parameter_with_id(self, key):
return key in self.db_parameters_id_index
def getPrimaryKey(self):
return self._db_id
class DBWorkflow(object):
vtType = 'workflow'
def __init__(self, modules=None, id=None, entity_type=None, name=None, version=None, last_modified=None, connections=None, annotations=None, plugin_datas=None, others=None, vistrail_id=None):
self.db_deleted_modules = []
self.db_modules_id_index = {}
if modules is None:
self._db_modules = []
else:
self._db_modules = modules
for v in self._db_modules:
self.db_modules_id_index[v.db_id] = v
self._db_id = id
self._db_entity_type = entity_type
self._db_name = name
self._db_version = version
self._db_last_modified = last_modified
self.db_deleted_connections = []
self.db_connections_id_index = {}
if connections is None:
self._db_connections = []
else:
self._db_connections = connections
for v in self._db_connections:
self.db_connections_id_index[v.db_id] = v
self.db_deleted_annotations = []
self.db_annotations_id_index = {}
if annotations is None:
self._db_annotations = []
else:
self._db_annotations = annotations
for v in self._db_annotations:
self.db_annotations_id_index[v.db_id] = v
self.db_deleted_plugin_datas = []
self.db_plugin_datas_id_index = {}
if plugin_datas is None:
self._db_plugin_datas = []
else:
self._db_plugin_datas = plugin_datas
for v in self._db_plugin_datas:
self.db_plugin_datas_id_index[v.db_id] = v
self.db_deleted_others = []
self.db_others_id_index = {}
if others is None:
self._db_others = []
else:
self._db_others = others
for v in self._db_others:
self.db_others_id_index[v.db_id] = v
self._db_vistrail_id = vistrail_id
self.is_dirty = True
self.is_new = True
def __copy__(self):
return DBWorkflow.do_copy(self)
def do_copy(self, new_ids=False, id_scope=None, id_remap=None):
cp = DBWorkflow(id=self._db_id,
entity_type=self._db_entity_type,
name=self._db_name,
version=self._db_version,
last_modified=self._db_last_modified,
vistrail_id=self._db_vistrail_id)
if self._db_modules is None:
cp._db_modules = []
else:
cp._db_modules = [v.do_copy(new_ids, id_scope, id_remap) for v in self._db_modules]
if self._db_connections is None:
cp._db_connections = []
else:
cp._db_connections = [v.do_copy(new_ids, id_scope, id_remap) for v in self._db_connections]
if self._db_annotations is None:
cp._db_annotations = []
else:
cp._db_annotations = [v.do_copy(new_ids, id_scope, id_remap) for v in self._db_annotations]
if self._db_plugin_datas is None:
cp._db_plugin_datas = []
else:
cp._db_plugin_datas = [v.do_copy(new_ids, id_scope, id_remap) for v in self._db_plugin_datas]
if self._db_others is None:
cp._db_others = []
else:
cp._db_others = [v.do_copy(new_ids, id_scope, id_remap) for v in self._db_others]
# set new ids
if new_ids:
new_id = id_scope.getNewId(self.vtType)
if self.vtType in id_scope.remap:
id_remap[(id_scope.remap[self.vtType], self.db_id)] = new_id
else:
id_remap[(self.vtType, self.db_id)] = new_id
cp.db_id = new_id
if hasattr(self, 'db_vistrail_id') and ('vistrail', self._db_vistrail_id) in id_remap:
cp._db_vistrail_id = id_remap[('vistrail', self._db_vistrail_id)]
# recreate indices and set flags
cp.db_modules_id_index = dict((v.db_id, v) for v in cp._db_modules)
cp.db_connections_id_index = dict((v.db_id, v) for v in cp._db_connections)
cp.db_annotations_id_index = dict((v.db_id, v) for v in cp._db_annotations)
cp.db_plugin_datas_id_index = dict((v.db_id, v) for v in cp._db_plugin_datas)
cp.db_others_id_index = dict((v.db_id, v) for v in cp._db_others)
if not new_ids:
cp.is_dirty = self.is_dirty
cp.is_new = self.is_new
return cp
@staticmethod
def update_version(old_obj, trans_dict, new_obj=None):
if new_obj is None:
new_obj = DBWorkflow()
class_dict = {}
if new_obj.__class__.__name__ in trans_dict:
class_dict = trans_dict[new_obj.__class__.__name__]
if 'modules' in class_dict:
res = class_dict['modules'](old_obj, trans_dict)
for obj in res:
new_obj.db_add_module(obj)
elif hasattr(old_obj, 'db_modules') and old_obj.db_modules is not None:
for obj in old_obj.db_modules:
if obj.vtType == 'module':
new_obj.db_add_module(DBModule.update_version(obj, trans_dict))
elif obj.vtType == 'abstraction':
new_obj.db_add_module(DBAbstraction.update_version(obj, trans_dict))
elif obj.vtType == 'group':
new_obj.db_add_module(DBGroup.update_version(obj, trans_dict))
if hasattr(old_obj, 'db_deleted_modules') and hasattr(new_obj, 'db_deleted_modules'):
for obj in old_obj.db_deleted_modules:
if obj.vtType == 'module':
n_obj = DBModule.update_version(obj, trans_dict)
new_obj.db_deleted_modules.append(n_obj)
elif obj.vtType == 'abstraction':
n_obj = DBAbstraction.update_version(obj, trans_dict)
new_obj.db_deleted_modules.append(n_obj)
elif obj.vtType == 'group':
n_obj = DBGroup.update_version(obj, trans_dict)
new_obj.db_deleted_modules.append(n_obj)
if 'id' in class_dict:
res = class_dict['id'](old_obj, trans_dict)
new_obj.db_id = res
elif hasattr(old_obj, 'db_id') and old_obj.db_id is not None:
new_obj.db_id = old_obj.db_id
if 'entity_type' in class_dict:
res = class_dict['entity_type'](old_obj, trans_dict)
new_obj.db_entity_type = res
elif hasattr(old_obj, 'db_entity_type') and old_obj.db_entity_type is not None:
new_obj.db_entity_type = old_obj.db_entity_type
if 'name' in class_dict:
res = class_dict['name'](old_obj, trans_dict)
new_obj.db_name = res
elif hasattr(old_obj, 'db_name') and old_obj.db_name is not None:
new_obj.db_name = old_obj.db_name
if 'version' in class_dict:
res = class_dict['version'](old_obj, trans_dict)
new_obj.db_version = res
elif hasattr(old_obj, 'db_version') and old_obj.db_version is not None:
new_obj.db_version = old_obj.db_version
if 'last_modified' in class_dict:
res = class_dict['last_modified'](old_obj, trans_dict)
new_obj.db_last_modified = res
elif hasattr(old_obj, 'db_last_modified') and old_obj.db_last_modified is not None:
new_obj.db_last_modified = old_obj.db_last_modified
if 'connections' in class_dict:
res = class_dict['connections'](old_obj, trans_dict)
for obj in res:
new_obj.db_add_connection(obj)
elif hasattr(old_obj, 'db_connections') and old_obj.db_connections is not None:
for obj in old_obj.db_connections:
new_obj.db_add_connection(DBConnection.update_version(obj, trans_dict))
if hasattr(old_obj, 'db_deleted_connections') and hasattr(new_obj, 'db_deleted_connections'):
for obj in old_obj.db_deleted_connections:
n_obj = DBConnection.update_version(obj, trans_dict)
new_obj.db_deleted_connections.append(n_obj)
if 'annotations' in class_dict:
res = class_dict['annotations'](old_obj, trans_dict)
for obj in res:
new_obj.db_add_annotation(obj)
elif hasattr(old_obj, 'db_annotations') and old_obj.db_annotations is not None:
for obj in old_obj.db_annotations:
new_obj.db_add_annotation(DBAnnotation.update_version(obj, trans_dict))
if hasattr(old_obj, 'db_deleted_annotations') and hasattr(new_obj, 'db_deleted_annotations'):
for obj in old_obj.db_deleted_annotations:
n_obj = DBAnnotation.update_version(obj, trans_dict)
new_obj.db_deleted_annotations.append(n_obj)
if 'plugin_datas' in class_dict:
res = class_dict['plugin_datas'](old_obj, trans_dict)
for obj in res:
new_obj.db_add_plugin_data(obj)
elif hasattr(old_obj, 'db_plugin_datas') and old_obj.db_plugin_datas is not None:
for obj in old_obj.db_plugin_datas:
new_obj.db_add_plugin_data(DBPluginData.update_version(obj, trans_dict))
if hasattr(old_obj, 'db_deleted_plugin_datas') and hasattr(new_obj, 'db_deleted_plugin_datas'):
for obj in old_obj.db_deleted_plugin_datas:
n_obj = DBPluginData.update_version(obj, trans_dict)
new_obj.db_deleted_plugin_datas.append(n_obj)
if 'others' in class_dict:
res = class_dict['others'](old_obj, trans_dict)
for obj in res:
new_obj.db_add_other(obj)
elif hasattr(old_obj, 'db_others') and old_obj.db_others is not None:
for obj in old_obj.db_others:
new_obj.db_add_other(DBOther.update_version(obj, trans_dict))
if hasattr(old_obj, 'db_deleted_others') and hasattr(new_obj, 'db_deleted_others'):
for obj in old_obj.db_deleted_others:
n_obj = DBOther.update_version(obj, trans_dict)
new_obj.db_deleted_others.append(n_obj)
if 'vistrail_id' in class_dict:
res = class_dict['vistrail_id'](old_obj, trans_dict)
new_obj.db_vistrail_id = res
elif hasattr(old_obj, 'db_vistrail_id') and old_obj.db_vistrail_id is not None:
new_obj.db_vistrail_id = old_obj.db_vistrail_id
new_obj.is_new = old_obj.is_new
new_obj.is_dirty = old_obj.is_dirty
return new_obj
def db_children(self, parent=(None,None), orphan=False, for_action=False):
children = []
to_del = []
for child in self.db_connections:
children.extend(child.db_children((self.vtType, self.db_id), orphan, for_action))
if orphan:
to_del.append(child)
for child in to_del:
self.db_delete_connection(child)
to_del = []
for child in self.db_annotations:
children.extend(child.db_children((self.vtType, self.db_id), orphan, for_action))
if orphan:
to_del.append(child)
for child in to_del:
self.db_delete_annotation(child)
to_del = []
for child in self.db_plugin_datas:
children.extend(child.db_children((self.vtType, self.db_id), orphan, for_action))
if orphan:
to_del.append(child)
for child in to_del:
self.db_delete_plugin_data(child)
to_del = []
for child in self.db_others:
children.extend(child.db_children((self.vtType, self.db_id), orphan, for_action))
if orphan:
to_del.append(child)
for child in to_del:
self.db_delete_other(child)
to_del = []
for child in self.db_modules:
children.extend(child.db_children((self.vtType, self.db_id), orphan, for_action))
if orphan:
to_del.append(child)
for child in to_del:
self.db_delete_module(child)
children.append((self, parent[0], parent[1]))
return children
def db_deleted_children(self, remove=False):
children = []
children.extend(self.db_deleted_connections)
children.extend(self.db_deleted_annotations)
children.extend(self.db_deleted_plugin_datas)
children.extend(self.db_deleted_others)
children.extend(self.db_deleted_modules)
if remove:
self.db_deleted_connections = []
self.db_deleted_annotations = []
self.db_deleted_plugin_datas = []
self.db_deleted_others = []
self.db_deleted_modules = []
return children
def has_changes(self):
if self.is_dirty:
return True
for child in self._db_connections:
if child.has_changes():
return True
for child in self._db_annotations:
if child.has_changes():
return True
for child in self._db_plugin_datas:
if child.has_changes():
return True
for child in self._db_others:
if child.has_changes():
return True
for child in self._db_modules:
if child.has_changes():
return True
return False
def __get_db_modules(self):
return self._db_modules
def __set_db_modules(self, modules):
self._db_modules = modules
self.is_dirty = True
db_modules = property(__get_db_modules, __set_db_modules)
def db_get_modules(self):
return self._db_modules
def db_add_module(self, module):
self.is_dirty = True
self._db_modules.append(module)
self.db_modules_id_index[module.db_id] = module
def db_change_module(self, module):
self.is_dirty = True
found = False
for i in xrange(len(self._db_modules)):
if self._db_modules[i].db_id == module.db_id:
self._db_modules[i] = module
found = True
break
if not found:
self._db_modules.append(module)
self.db_modules_id_index[module.db_id] = module
def db_delete_module(self, module):
self.is_dirty = True
for i in xrange(len(self._db_modules)):
if self._db_modules[i].db_id == module.db_id:
if not self._db_modules[i].is_new:
self.db_deleted_modules.append(self._db_modules[i])
del self._db_modules[i]
break
del self.db_modules_id_index[module.db_id]
def db_get_module(self, key):
for i in xrange(len(self._db_modules)):
if self._db_modules[i].db_id == key:
return self._db_modules[i]
return None
def db_get_module_by_id(self, key):
return self.db_modules_id_index[key]
def db_has_module_with_id(self, key):
return key in self.db_modules_id_index
def __get_db_id(self):
return self._db_id
def __set_db_id(self, id):
self._db_id = id
self.is_dirty = True
db_id = property(__get_db_id, __set_db_id)
def db_add_id(self, id):
self._db_id = id
def db_change_id(self, id):
self._db_id = id
def db_delete_id(self, id):
self._db_id = None
def __get_db_entity_type(self):
return self._db_entity_type
def __set_db_entity_type(self, entity_type):
self._db_entity_type = entity_type
self.is_dirty = True
db_entity_type = property(__get_db_entity_type, __set_db_entity_type)
def db_add_entity_type(self, entity_type):
self._db_entity_type = entity_type
def db_change_entity_type(self, entity_type):
self._db_entity_type = entity_type
def db_delete_entity_type(self, entity_type):
self._db_entity_type = None
def __get_db_name(self):
return self._db_name
def __set_db_name(self, name):
self._db_name = name
self.is_dirty = True
db_name = property(__get_db_name, __set_db_name)
def db_add_name(self, name):
self._db_name = name
def db_change_name(self, name):
self._db_name = name
def db_delete_name(self, name):
self._db_name = None
def __get_db_version(self):
return self._db_version
def __set_db_version(self, version):
self._db_version = version
self.is_dirty = True
db_version = property(__get_db_version, __set_db_version)
def db_add_version(self, version):
self._db_version = version
def db_change_version(self, version):
self._db_version = version
def db_delete_version(self, version):
self._db_version = None
def __get_db_last_modified(self):
return self._db_last_modified
def __set_db_last_modified(self, last_modified):
self._db_last_modified = last_modified
self.is_dirty = True
db_last_modified = property(__get_db_last_modified, __set_db_last_modified)
def db_add_last_modified(self, last_modified):
self._db_last_modified = last_modified
def db_change_last_modified(self, last_modified):
self._db_last_modified = last_modified
def db_delete_last_modified(self, last_modified):
self._db_last_modified = None
def __get_db_connections(self):
return self._db_connections
def __set_db_connections(self, connections):
self._db_connections = connections
self.is_dirty = True
db_connections = property(__get_db_connections, __set_db_connections)
def db_get_connections(self):
return self._db_connections
def db_add_connection(self, connection):
self.is_dirty = True
self._db_connections.append(connection)
self.db_connections_id_index[connection.db_id] = connection
def db_change_connection(self, connection):
self.is_dirty = True
found = False
for i in xrange(len(self._db_connections)):
if self._db_connections[i].db_id == connection.db_id:
self._db_connections[i] = connection
found = True
break
if not found:
self._db_connections.append(connection)
self.db_connections_id_index[connection.db_id] = connection
def db_delete_connection(self, connection):
self.is_dirty = True
for i in xrange(len(self._db_connections)):
if self._db_connections[i].db_id == connection.db_id:
if not self._db_connections[i].is_new:
self.db_deleted_connections.append(self._db_connections[i])
del self._db_connections[i]
break
del self.db_connections_id_index[connection.db_id]
def db_get_connection(self, key):
for i in xrange(len(self._db_connections)):
if self._db_connections[i].db_id == key:
return self._db_connections[i]
return None
def db_get_connection_by_id(self, key):
return self.db_connections_id_index[key]
def db_has_connection_with_id(self, key):
return key in self.db_connections_id_index
def __get_db_annotations(self):
return self._db_annotations
def __set_db_annotations(self, annotations):
self._db_annotations = annotations
self.is_dirty = True
db_annotations = property(__get_db_annotations, __set_db_annotations)
def db_get_annotations(self):
return self._db_annotations
def db_add_annotation(self, annotation):
self.is_dirty = True
self._db_annotations.append(annotation)
self.db_annotations_id_index[annotation.db_id] = annotation
def db_change_annotation(self, annotation):
self.is_dirty = True
found = False
for i in xrange(len(self._db_annotations)):
if self._db_annotations[i].db_id == annotation.db_id:
self._db_annotations[i] = annotation
found = True
break
if not found:
self._db_annotations.append(annotation)
self.db_annotations_id_index[annotation.db_id] = annotation
def db_delete_annotation(self, annotation):
self.is_dirty = True
for i in xrange(len(self._db_annotations)):
if self._db_annotations[i].db_id == annotation.db_id:
if not self._db_annotations[i].is_new:
self.db_deleted_annotations.append(self._db_annotations[i])
del self._db_annotations[i]
break
del self.db_annotations_id_index[annotation.db_id]
def db_get_annotation(self, key):
for i in xrange(len(self._db_annotations)):
if self._db_annotations[i].db_id == key:
return self._db_annotations[i]
return None
def db_get_annotation_by_id(self, key):
return self.db_annotations_id_index[key]
def db_has_annotation_with_id(self, key):
return key in self.db_annotations_id_index
def __get_db_plugin_datas(self):
return self._db_plugin_datas
def __set_db_plugin_datas(self, plugin_datas):
self._db_plugin_datas = plugin_datas
self.is_dirty = True
db_plugin_datas = property(__get_db_plugin_datas, __set_db_plugin_datas)
def db_get_plugin_datas(self):
return self._db_plugin_datas
def db_add_plugin_data(self, plugin_data):
self.is_dirty = True
self._db_plugin_datas.append(plugin_data)
self.db_plugin_datas_id_index[plugin_data.db_id] = plugin_data
def db_change_plugin_data(self, plugin_data):
self.is_dirty = True
found = False
for i in xrange(len(self._db_plugin_datas)):
if self._db_plugin_datas[i].db_id == plugin_data.db_id:
self._db_plugin_datas[i] = plugin_data
found = True
break
if not found:
self._db_plugin_datas.append(plugin_data)
self.db_plugin_datas_id_index[plugin_data.db_id] = plugin_data
def db_delete_plugin_data(self, plugin_data):
self.is_dirty = True
for i in xrange(len(self._db_plugin_datas)):
if self._db_plugin_datas[i].db_id == plugin_data.db_id:
if not self._db_plugin_datas[i].is_new:
self.db_deleted_plugin_datas.append(self._db_plugin_datas[i])
del self._db_plugin_datas[i]
break
del self.db_plugin_datas_id_index[plugin_data.db_id]
def db_get_plugin_data(self, key):
for i in xrange(len(self._db_plugin_datas)):
if self._db_plugin_datas[i].db_id == key:
return self._db_plugin_datas[i]
return None
def db_get_plugin_data_by_id(self, key):
return self.db_plugin_datas_id_index[key]
def db_has_plugin_data_with_id(self, key):
return key in self.db_plugin_datas_id_index
def __get_db_others(self):
return self._db_others
def __set_db_others(self, others):
self._db_others = others
self.is_dirty = True
db_others = property(__get_db_others, __set_db_others)
def db_get_others(self):
return self._db_others
def db_add_other(self, other):
self.is_dirty = True
self._db_others.append(other)
self.db_others_id_index[other.db_id] = other
def db_change_other(self, other):
self.is_dirty = True
found = False
for i in xrange(len(self._db_others)):
if self._db_others[i].db_id == other.db_id:
self._db_others[i] = other
found = True
break
if not found:
self._db_others.append(other)
self.db_others_id_index[other.db_id] = other
def db_delete_other(self, other):
self.is_dirty = True
for i in xrange(len(self._db_others)):
if self._db_others[i].db_id == other.db_id:
if not self._db_others[i].is_new:
self.db_deleted_others.append(self._db_others[i])
del self._db_others[i]
break
del self.db_others_id_index[other.db_id]
def db_get_other(self, key):
for i in xrange(len(self._db_others)):
if self._db_others[i].db_id == key:
return self._db_others[i]
return None
def db_get_other_by_id(self, key):
return self.db_others_id_index[key]
def db_has_other_with_id(self, key):
return key in self.db_others_id_index
def __get_db_vistrail_id(self):
return self._db_vistrail_id
def __set_db_vistrail_id(self, vistrail_id):
self._db_vistrail_id = vistrail_id
self.is_dirty = True
db_vistrail_id = property(__get_db_vistrail_id, __set_db_vistrail_id)
def db_add_vistrail_id(self, vistrail_id):
self._db_vistrail_id = vistrail_id
def db_change_vistrail_id(self, vistrail_id):
self._db_vistrail_id = vistrail_id
def db_delete_vistrail_id(self, vistrail_id):
self._db_vistrail_id = None
def getPrimaryKey(self):
return self._db_id
class DBMashupAction(object):
vtType = 'mashup_action'
def __init__(self, id=None, prevId=None, date=None, user=None, mashup=None):
self._db_id = id
self._db_prevId = prevId
self._db_date = date
self._db_user = user
self.db_deleted_mashup = []
self._db_mashup = mashup
self.is_dirty = True
self.is_new = True
def __copy__(self):
return DBMashupAction.do_copy(self)
def do_copy(self, new_ids=False, id_scope=None, id_remap=None):
cp = DBMashupAction(id=self._db_id,
prevId=self._db_prevId,
date=self._db_date,
user=self._db_user)
if self._db_mashup is not None:
cp._db_mashup = self._db_mashup.do_copy(new_ids, id_scope, id_remap)
# set new ids
if new_ids:
new_id = id_scope.getNewId(self.vtType)
if self.vtType in id_scope.remap:
id_remap[(id_scope.remap[self.vtType], self.db_id)] = new_id
else:
id_remap[(self.vtType, self.db_id)] = new_id
cp.db_id = new_id
if hasattr(self, 'db_prevId') and ('mashup_action', self._db_prevId) in id_remap:
cp._db_prevId = id_remap[('mashup_action', self._db_prevId)]
# recreate indices and set flags
if not new_ids:
cp.is_dirty = self.is_dirty
cp.is_new = self.is_new
return cp
@staticmethod
def update_version(old_obj, trans_dict, new_obj=None):
if new_obj is None:
new_obj = DBMashupAction()
class_dict = {}
if new_obj.__class__.__name__ in trans_dict:
class_dict = trans_dict[new_obj.__class__.__name__]
if 'id' in class_dict:
res = class_dict['id'](old_obj, trans_dict)
new_obj.db_id = res
elif hasattr(old_obj, 'db_id') and old_obj.db_id is not None:
new_obj.db_id = old_obj.db_id
if 'prevId' in class_dict:
res = class_dict['prevId'](old_obj, trans_dict)
new_obj.db_prevId = res
elif hasattr(old_obj, 'db_prevId') and old_obj.db_prevId is not None:
new_obj.db_prevId = old_obj.db_prevId
if 'date' in class_dict:
res = class_dict['date'](old_obj, trans_dict)
new_obj.db_date = res
elif hasattr(old_obj, 'db_date') and old_obj.db_date is not None:
new_obj.db_date = old_obj.db_date
if 'user' in class_dict:
res = class_dict['user'](old_obj, trans_dict)
new_obj.db_user = res
elif hasattr(old_obj, 'db_user') and old_obj.db_user is not None:
new_obj.db_user = old_obj.db_user
if 'mashup' in class_dict:
res = class_dict['mashup'](old_obj, trans_dict)
new_obj.db_mashup = res
elif hasattr(old_obj, 'db_mashup') and old_obj.db_mashup is not None:
obj = old_obj.db_mashup
new_obj.db_add_mashup(DBMashup.update_version(obj, trans_dict))
if hasattr(old_obj, 'db_deleted_mashup') and hasattr(new_obj, 'db_deleted_mashup'):
for obj in old_obj.db_deleted_mashup:
n_obj = DBMashup.update_version(obj, trans_dict)
new_obj.db_deleted_mashup.append(n_obj)
new_obj.is_new = old_obj.is_new
new_obj.is_dirty = old_obj.is_dirty
return new_obj
def db_children(self, parent=(None,None), orphan=False, for_action=False):
children = []
if self._db_mashup is not None:
children.extend(self._db_mashup.db_children((self.vtType, self.db_id), orphan, for_action))
if orphan:
self._db_mashup = None
children.append((self, parent[0], parent[1]))
return children
def db_deleted_children(self, remove=False):
children = []
children.extend(self.db_deleted_mashup)
if remove:
self.db_deleted_mashup = []
return children
def has_changes(self):
if self.is_dirty:
return True
if self._db_mashup is not None and self._db_mashup.has_changes():
return True
return False
def __get_db_id(self):
return self._db_id
def __set_db_id(self, id):
self._db_id = id
self.is_dirty = True
db_id = property(__get_db_id, __set_db_id)
def db_add_id(self, id):
self._db_id = id
def db_change_id(self, id):
self._db_id = id
def db_delete_id(self, id):
self._db_id = None
def __get_db_prevId(self):
return self._db_prevId
def __set_db_prevId(self, prevId):
self._db_prevId = prevId
self.is_dirty = True
db_prevId = property(__get_db_prevId, __set_db_prevId)
def db_add_prevId(self, prevId):
self._db_prevId = prevId
def db_change_prevId(self, prevId):
self._db_prevId = prevId
def db_delete_prevId(self, prevId):
self._db_prevId = None
def __get_db_date(self):
return self._db_date
def __set_db_date(self, date):
self._db_date = date
self.is_dirty = True
db_date = property(__get_db_date, __set_db_date)
def db_add_date(self, date):
self._db_date = date
def db_change_date(self, date):
self._db_date = date
def db_delete_date(self, date):
self._db_date = None
def __get_db_user(self):
return self._db_user
def __set_db_user(self, user):
self._db_user = user
self.is_dirty = True
db_user = property(__get_db_user, __set_db_user)
def db_add_user(self, user):
self._db_user = user
def db_change_user(self, user):
self._db_user = user
def db_delete_user(self, user):
self._db_user = None
def __get_db_mashup(self):
return self._db_mashup
def __set_db_mashup(self, mashup):
self._db_mashup = mashup
self.is_dirty = True
db_mashup = property(__get_db_mashup, __set_db_mashup)
def db_add_mashup(self, mashup):
self._db_mashup = mashup
def db_change_mashup(self, mashup):
self._db_mashup = mashup
def db_delete_mashup(self, mashup):
if not self.is_new:
self.db_deleted_mashup.append(self._db_mashup)
self._db_mashup = None
def getPrimaryKey(self):
return self._db_id
class DBConfiguration(object):
vtType = 'configuration'
def __init__(self, config_keys=None):
self.db_deleted_config_keys = []
self.db_config_keys_name_index = {}
if config_keys is None:
self._db_config_keys = []
else:
self._db_config_keys = config_keys
for v in self._db_config_keys:
self.db_config_keys_name_index[v.db_name] = v
self.is_dirty = True
self.is_new = True
def __copy__(self):
return DBConfiguration.do_copy(self)
def do_copy(self, new_ids=False, id_scope=None, id_remap=None):
cp = DBConfiguration()
if self._db_config_keys is None:
cp._db_config_keys = []
else:
cp._db_config_keys = [v.do_copy(new_ids, id_scope, id_remap) for v in self._db_config_keys]
# set new ids
if new_ids:
new_id = id_scope.getNewId(self.vtType)
if self.vtType in id_scope.remap:
id_remap[(id_scope.remap[self.vtType], self.db_id)] = new_id
else:
id_remap[(self.vtType, self.db_id)] = new_id
cp.db_id = new_id
# recreate indices and set flags
cp.db_config_keys_name_index = dict((v.db_name, v) for v in cp._db_config_keys)
if not new_ids:
cp.is_dirty = self.is_dirty
cp.is_new = self.is_new
return cp
@staticmethod
def update_version(old_obj, trans_dict, new_obj=None):
if new_obj is None:
new_obj = DBConfiguration()
class_dict = {}
if new_obj.__class__.__name__ in trans_dict:
class_dict = trans_dict[new_obj.__class__.__name__]
if 'config_keys' in class_dict:
res = class_dict['config_keys'](old_obj, trans_dict)
for obj in res:
new_obj.db_add_config_key(obj)
elif hasattr(old_obj, 'db_config_keys') and old_obj.db_config_keys is not None:
for obj in old_obj.db_config_keys:
new_obj.db_add_config_key(DBConfigKey.update_version(obj, trans_dict))
if hasattr(old_obj, 'db_deleted_config_keys') and hasattr(new_obj, 'db_deleted_config_keys'):
for obj in old_obj.db_deleted_config_keys:
n_obj = DBConfigKey.update_version(obj, trans_dict)
new_obj.db_deleted_config_keys.append(n_obj)
new_obj.is_new = old_obj.is_new
new_obj.is_dirty = old_obj.is_dirty
return new_obj
def db_children(self, parent=(None,None), orphan=False, for_action=False):
children = []
to_del = []
for child in self.db_config_keys:
children.extend(child.db_children((self.vtType, self.db_id), orphan, for_action))
if orphan:
to_del.append(child)
for child in to_del:
self.db_delete_config_key(child)
children.append((self, parent[0], parent[1]))
return children
def db_deleted_children(self, remove=False):
children = []
children.extend(self.db_deleted_config_keys)
if remove:
self.db_deleted_config_keys = []
return children
def has_changes(self):
if self.is_dirty:
return True
for child in self._db_config_keys:
if child.has_changes():
return True
return False
def __get_db_config_keys(self):
return self._db_config_keys
def __set_db_config_keys(self, config_keys):
self._db_config_keys = config_keys
self.is_dirty = True
db_config_keys = property(__get_db_config_keys, __set_db_config_keys)
def db_get_config_keys(self):
return self._db_config_keys
def db_add_config_key(self, config_key):
self.is_dirty = True
self._db_config_keys.append(config_key)
self.db_config_keys_name_index[config_key.db_name] = config_key
def db_change_config_key(self, config_key):
self.is_dirty = True
self._db_config_keys.append(config_key)
self.db_config_keys_name_index[config_key.db_name] = config_key
def db_delete_config_key(self, config_key):
self.is_dirty = True
raise Exception('Cannot delete a non-keyed object')
def db_get_config_key(self, key):
return None
def db_get_config_key_by_name(self, key):
return self.db_config_keys_name_index[key]
def db_has_config_key_with_name(self, key):
return key in self.db_config_keys_name_index
class DBChange(object):
vtType = 'change'
def __init__(self, data=None, id=None, what=None, oldObjId=None, newObjId=None, parentObjId=None, parentObjType=None):
self.db_deleted_data = []
self._db_data = data
self._db_id = id
self._db_what = what
self._db_oldObjId = oldObjId
self._db_newObjId = newObjId
self._db_parentObjId = parentObjId
self._db_parentObjType = parentObjType
self.is_dirty = True
self.is_new = True
def __copy__(self):
return DBChange.do_copy(self)
def do_copy(self, new_ids=False, id_scope=None, id_remap=None):
cp = DBChange(id=self._db_id,
what=self._db_what,
oldObjId=self._db_oldObjId,
newObjId=self._db_newObjId,
parentObjId=self._db_parentObjId,
parentObjType=self._db_parentObjType)
if self._db_data is not None:
cp._db_data = self._db_data.do_copy(new_ids, id_scope, id_remap)
# set new ids
if new_ids:
new_id = id_scope.getNewId(self.vtType)
if self.vtType in id_scope.remap:
id_remap[(id_scope.remap[self.vtType], self.db_id)] = new_id
else:
id_remap[(self.vtType, self.db_id)] = new_id
cp.db_id = new_id
if hasattr(self, 'db_oldObjId') and (self._db_what, self._db_oldObjId) in id_remap:
cp._db_oldObjId = id_remap[(self._db_what, self._db_oldObjId)]
if hasattr(self, 'db_newObjId') and (self._db_what, self._db_newObjId) in id_remap:
cp._db_newObjId = id_remap[(self._db_what, self._db_newObjId)]
if hasattr(self, 'db_parentObjId') and (self._db_parentObjType, self._db_parentObjId) in id_remap:
cp._db_parentObjId = id_remap[(self._db_parentObjType, self._db_parentObjId)]
# recreate indices and set flags
if not new_ids:
cp.is_dirty = self.is_dirty
cp.is_new = self.is_new
return cp
@staticmethod
def update_version(old_obj, trans_dict, new_obj=None):
if new_obj is None:
new_obj = DBChange()
class_dict = {}
if new_obj.__class__.__name__ in trans_dict:
class_dict = trans_dict[new_obj.__class__.__name__]
if 'data' in class_dict:
res = class_dict['data'](old_obj, trans_dict)
new_obj.db_data = res
elif hasattr(old_obj, 'db_data') and old_obj.db_data is not None:
obj = old_obj.db_data
if obj.vtType == 'module':
new_obj.db_add_data(DBModule.update_version(obj, trans_dict))
elif obj.vtType == 'location':
new_obj.db_add_data(DBLocation.update_version(obj, trans_dict))
elif obj.vtType == 'annotation':
new_obj.db_add_data(DBAnnotation.update_version(obj, trans_dict))
elif obj.vtType == 'function':
new_obj.db_add_data(DBFunction.update_version(obj, trans_dict))
elif obj.vtType == 'connection':
new_obj.db_add_data(DBConnection.update_version(obj, trans_dict))
elif obj.vtType == 'port':
new_obj.db_add_data(DBPort.update_version(obj, trans_dict))
elif obj.vtType == 'parameter':
new_obj.db_add_data(DBParameter.update_version(obj, trans_dict))
elif obj.vtType == 'portSpec':
new_obj.db_add_data(DBPortSpec.update_version(obj, trans_dict))
elif obj.vtType == 'abstraction':
new_obj.db_add_data(DBAbstraction.update_version(obj, trans_dict))
elif obj.vtType == 'group':
new_obj.db_add_data(DBGroup.update_version(obj, trans_dict))
elif obj.vtType == 'other':
new_obj.db_add_data(DBOther.update_version(obj, trans_dict))
elif obj.vtType == 'plugin_data':
new_obj.db_add_data(DBPluginData.update_version(obj, trans_dict))
if hasattr(old_obj, 'db_deleted_data') and hasattr(new_obj, 'db_deleted_data'):
for obj in old_obj.db_deleted_data:
if obj.vtType == 'module':
n_obj = DBModule.update_version(obj, trans_dict)
new_obj.db_deleted_data.append(n_obj)
elif obj.vtType == 'location':
n_obj = DBLocation.update_version(obj, trans_dict)
new_obj.db_deleted_data.append(n_obj)
elif obj.vtType == 'annotation':
n_obj = DBAnnotation.update_version(obj, trans_dict)
new_obj.db_deleted_data.append(n_obj)
elif obj.vtType == 'function':
n_obj = DBFunction.update_version(obj, trans_dict)
new_obj.db_deleted_data.append(n_obj)
elif obj.vtType == 'connection':
n_obj = DBConnection.update_version(obj, trans_dict)
new_obj.db_deleted_data.append(n_obj)
elif obj.vtType == 'port':
n_obj = DBPort.update_version(obj, trans_dict)
new_obj.db_deleted_data.append(n_obj)
elif obj.vtType == 'parameter':
n_obj = DBParameter.update_version(obj, trans_dict)
new_obj.db_deleted_data.append(n_obj)
elif obj.vtType == 'portSpec':
n_obj = DBPortSpec.update_version(obj, trans_dict)
new_obj.db_deleted_data.append(n_obj)
elif obj.vtType == 'abstraction':
n_obj = DBAbstraction.update_version(obj, trans_dict)
new_obj.db_deleted_data.append(n_obj)
elif obj.vtType == 'group':
n_obj = DBGroup.update_version(obj, trans_dict)
new_obj.db_deleted_data.append(n_obj)
elif obj.vtType == 'other':
n_obj = DBOther.update_version(obj, trans_dict)
new_obj.db_deleted_data.append(n_obj)
elif obj.vtType == 'plugin_data':
n_obj = DBPluginData.update_version(obj, trans_dict)
new_obj.db_deleted_data.append(n_obj)
if 'id' in class_dict:
res = class_dict['id'](old_obj, trans_dict)
new_obj.db_id = res
elif hasattr(old_obj, 'db_id') and old_obj.db_id is not None:
new_obj.db_id = old_obj.db_id
if 'what' in class_dict:
res = class_dict['what'](old_obj, trans_dict)
new_obj.db_what = res
elif hasattr(old_obj, 'db_what') and old_obj.db_what is not None:
new_obj.db_what = old_obj.db_what
if 'oldObjId' in class_dict:
res = class_dict['oldObjId'](old_obj, trans_dict)
new_obj.db_oldObjId = res
elif hasattr(old_obj, 'db_oldObjId') and old_obj.db_oldObjId is not None:
new_obj.db_oldObjId = old_obj.db_oldObjId
if 'newObjId' in class_dict:
res = class_dict['newObjId'](old_obj, trans_dict)
new_obj.db_newObjId = res
elif hasattr(old_obj, 'db_newObjId') and old_obj.db_newObjId is not None:
new_obj.db_newObjId = old_obj.db_newObjId
if 'parentObjId' in class_dict:
res = class_dict['parentObjId'](old_obj, trans_dict)
new_obj.db_parentObjId = res
elif hasattr(old_obj, 'db_parentObjId') and old_obj.db_parentObjId is not None:
new_obj.db_parentObjId = old_obj.db_parentObjId
if 'parentObjType' in class_dict:
res = class_dict['parentObjType'](old_obj, trans_dict)
new_obj.db_parentObjType = res
elif hasattr(old_obj, 'db_parentObjType') and old_obj.db_parentObjType is not None:
new_obj.db_parentObjType = old_obj.db_parentObjType
new_obj.is_new = old_obj.is_new
new_obj.is_dirty = old_obj.is_dirty
return new_obj
def db_children(self, parent=(None,None), orphan=False, for_action=False):
children = []
if self._db_data is not None:
children.extend(self._db_data.db_children((self.vtType, self.db_id), orphan, for_action))
if orphan:
self._db_data = None
children.append((self, parent[0], parent[1]))
return children
def db_deleted_children(self, remove=False):
children = []
children.extend(self.db_deleted_data)
if remove:
self.db_deleted_data = []
return children
def has_changes(self):
if self.is_dirty:
return True
if self._db_data is not None and self._db_data.has_changes():
return True
return False
def __get_db_data(self):
return self._db_data
def __set_db_data(self, data):
self._db_data = data
self.is_dirty = True
db_data = property(__get_db_data, __set_db_data)
def db_add_data(self, data):
self._db_data = data
def db_change_data(self, data):
self._db_data = data
def db_delete_data(self, data):
if not self.is_new:
self.db_deleted_data.append(self._db_data)
self._db_data = None
def __get_db_id(self):
return self._db_id
def __set_db_id(self, id):
self._db_id = id
self.is_dirty = True
db_id = property(__get_db_id, __set_db_id)
def db_add_id(self, id):
self._db_id = id
def db_change_id(self, id):
self._db_id = id
def db_delete_id(self, id):
self._db_id = None
def __get_db_what(self):
return self._db_what
def __set_db_what(self, what):
self._db_what = what
self.is_dirty = True
db_what = property(__get_db_what, __set_db_what)
def db_add_what(self, what):
self._db_what = what
def db_change_what(self, what):
self._db_what = what
def db_delete_what(self, what):
self._db_what = None
def __get_db_oldObjId(self):
return self._db_oldObjId
def __set_db_oldObjId(self, oldObjId):
self._db_oldObjId = oldObjId
self.is_dirty = True
db_oldObjId = property(__get_db_oldObjId, __set_db_oldObjId)
def db_add_oldObjId(self, oldObjId):
self._db_oldObjId = oldObjId
def db_change_oldObjId(self, oldObjId):
self._db_oldObjId = oldObjId
def db_delete_oldObjId(self, oldObjId):
self._db_oldObjId = None
def __get_db_newObjId(self):
return self._db_newObjId
def __set_db_newObjId(self, newObjId):
self._db_newObjId = newObjId
self.is_dirty = True
db_newObjId = property(__get_db_newObjId, __set_db_newObjId)
def db_add_newObjId(self, newObjId):
self._db_newObjId = newObjId
def db_change_newObjId(self, newObjId):
self._db_newObjId = newObjId
def db_delete_newObjId(self, newObjId):
self._db_newObjId = None
def __get_db_parentObjId(self):
return self._db_parentObjId
def __set_db_parentObjId(self, parentObjId):
self._db_parentObjId = parentObjId
self.is_dirty = True
db_parentObjId = property(__get_db_parentObjId, __set_db_parentObjId)
def db_add_parentObjId(self, parentObjId):
self._db_parentObjId = parentObjId
def db_change_parentObjId(self, parentObjId):
self._db_parentObjId = parentObjId
def db_delete_parentObjId(self, parentObjId):
self._db_parentObjId = None
def __get_db_parentObjType(self):
return self._db_parentObjType
def __set_db_parentObjType(self, parentObjType):
self._db_parentObjType = parentObjType
self.is_dirty = True
db_parentObjType = property(__get_db_parentObjType, __set_db_parentObjType)
def db_add_parentObjType(self, parentObjType):
self._db_parentObjType = parentObjType
def db_change_parentObjType(self, parentObjType):
self._db_parentObjType = parentObjType
def db_delete_parentObjType(self, parentObjType):
self._db_parentObjType = None
def getPrimaryKey(self):
return self._db_id
class DBPackage(object):
vtType = 'package'
def __init__(self, id=None, name=None, identifier=None, codepath=None, load_configuration=None, version=None, description=None, module_descriptors=None):
self._db_id = id
self._db_name = name
self._db_identifier = identifier
self._db_codepath = codepath
self._db_load_configuration = load_configuration
self._db_version = version
self._db_description = description
self.db_deleted_module_descriptors = []
self.db_module_descriptors_id_index = {}
self.db_module_descriptors_name_index = {}
if module_descriptors is None:
self._db_module_descriptors = []
else:
self._db_module_descriptors = module_descriptors
for v in self._db_module_descriptors:
self.db_module_descriptors_id_index[v.db_id] = v
self.db_module_descriptors_name_index[(v.db_name,v.db_namespace,v.db_version)] = v
self.is_dirty = True
self.is_new = True
def __copy__(self):
return DBPackage.do_copy(self)
def do_copy(self, new_ids=False, id_scope=None, id_remap=None):
cp = DBPackage(id=self._db_id,
name=self._db_name,
identifier=self._db_identifier,
codepath=self._db_codepath,
load_configuration=self._db_load_configuration,
version=self._db_version,
description=self._db_description)
if self._db_module_descriptors is None:
cp._db_module_descriptors = []
else:
cp._db_module_descriptors = [v.do_copy(new_ids, id_scope, id_remap) for v in self._db_module_descriptors]
# set new ids
if new_ids:
new_id = id_scope.getNewId(self.vtType)
if self.vtType in id_scope.remap:
id_remap[(id_scope.remap[self.vtType], self.db_id)] = new_id
else:
id_remap[(self.vtType, self.db_id)] = new_id
cp.db_id = new_id
# recreate indices and set flags
cp.db_module_descriptors_id_index = dict((v.db_id, v) for v in cp._db_module_descriptors)
cp.db_module_descriptors_name_index = dict(((v.db_name,v.db_namespace,v.db_version), v) for v in cp._db_module_descriptors)
if not new_ids:
cp.is_dirty = self.is_dirty
cp.is_new = self.is_new
return cp
@staticmethod
def update_version(old_obj, trans_dict, new_obj=None):
if new_obj is None:
new_obj = DBPackage()
class_dict = {}
if new_obj.__class__.__name__ in trans_dict:
class_dict = trans_dict[new_obj.__class__.__name__]
if 'id' in class_dict:
res = class_dict['id'](old_obj, trans_dict)
new_obj.db_id = res
elif hasattr(old_obj, 'db_id') and old_obj.db_id is not None:
new_obj.db_id = old_obj.db_id
if 'name' in class_dict:
res = class_dict['name'](old_obj, trans_dict)
new_obj.db_name = res
elif hasattr(old_obj, 'db_name') and old_obj.db_name is not None:
new_obj.db_name = old_obj.db_name
if 'identifier' in class_dict:
res = class_dict['identifier'](old_obj, trans_dict)
new_obj.db_identifier = res
elif hasattr(old_obj, 'db_identifier') and old_obj.db_identifier is not None:
new_obj.db_identifier = old_obj.db_identifier
if 'codepath' in class_dict:
res = class_dict['codepath'](old_obj, trans_dict)
new_obj.db_codepath = res
elif hasattr(old_obj, 'db_codepath') and old_obj.db_codepath is not None:
new_obj.db_codepath = old_obj.db_codepath
if 'load_configuration' in class_dict:
res = class_dict['load_configuration'](old_obj, trans_dict)
new_obj.db_load_configuration = res
elif hasattr(old_obj, 'db_load_configuration') and old_obj.db_load_configuration is not None:
new_obj.db_load_configuration = old_obj.db_load_configuration
if 'version' in class_dict:
res = class_dict['version'](old_obj, trans_dict)
new_obj.db_version = res
elif hasattr(old_obj, 'db_version') and old_obj.db_version is not None:
new_obj.db_version = old_obj.db_version
if 'description' in class_dict:
res = class_dict['description'](old_obj, trans_dict)
new_obj.db_description = res
elif hasattr(old_obj, 'db_description') and old_obj.db_description is not None:
new_obj.db_description = old_obj.db_description
if 'module_descriptors' in class_dict:
res = class_dict['module_descriptors'](old_obj, trans_dict)
for obj in res:
new_obj.db_add_module_descriptor(obj)
elif hasattr(old_obj, 'db_module_descriptors') and old_obj.db_module_descriptors is not None:
for obj in old_obj.db_module_descriptors:
new_obj.db_add_module_descriptor(DBModuleDescriptor.update_version(obj, trans_dict))
if hasattr(old_obj, 'db_deleted_module_descriptors') and hasattr(new_obj, 'db_deleted_module_descriptors'):
for obj in old_obj.db_deleted_module_descriptors:
n_obj = DBModuleDescriptor.update_version(obj, trans_dict)
new_obj.db_deleted_module_descriptors.append(n_obj)
new_obj.is_new = old_obj.is_new
new_obj.is_dirty = old_obj.is_dirty
return new_obj
def db_children(self, parent=(None,None), orphan=False, for_action=False):
children = []
to_del = []
for child in self.db_module_descriptors:
children.extend(child.db_children((self.vtType, self.db_id), orphan, for_action))
if orphan:
to_del.append(child)
for child in to_del:
self.db_delete_module_descriptor(child)
children.append((self, parent[0], parent[1]))
return children
def db_deleted_children(self, remove=False):
children = []
children.extend(self.db_deleted_module_descriptors)
if remove:
self.db_deleted_module_descriptors = []
return children
def has_changes(self):
if self.is_dirty:
return True
for child in self._db_module_descriptors:
if child.has_changes():
return True
return False
def __get_db_id(self):
return self._db_id
def __set_db_id(self, id):
self._db_id = id
self.is_dirty = True
db_id = property(__get_db_id, __set_db_id)
def db_add_id(self, id):
self._db_id = id
def db_change_id(self, id):
self._db_id = id
def db_delete_id(self, id):
self._db_id = None
def __get_db_name(self):
return self._db_name
def __set_db_name(self, name):
self._db_name = name
self.is_dirty = True
db_name = property(__get_db_name, __set_db_name)
def db_add_name(self, name):
self._db_name = name
def db_change_name(self, name):
self._db_name = name
def db_delete_name(self, name):
self._db_name = None
def __get_db_identifier(self):
return self._db_identifier
def __set_db_identifier(self, identifier):
self._db_identifier = identifier
self.is_dirty = True
db_identifier = property(__get_db_identifier, __set_db_identifier)
def db_add_identifier(self, identifier):
self._db_identifier = identifier
def db_change_identifier(self, identifier):
self._db_identifier = identifier
def db_delete_identifier(self, identifier):
self._db_identifier = None
def __get_db_codepath(self):
return self._db_codepath
def __set_db_codepath(self, codepath):
self._db_codepath = codepath
self.is_dirty = True
db_codepath = property(__get_db_codepath, __set_db_codepath)
def db_add_codepath(self, codepath):
self._db_codepath = codepath
def db_change_codepath(self, codepath):
self._db_codepath = codepath
def db_delete_codepath(self, codepath):
self._db_codepath = None
def __get_db_load_configuration(self):
return self._db_load_configuration
def __set_db_load_configuration(self, load_configuration):
self._db_load_configuration = load_configuration
self.is_dirty = True
db_load_configuration = property(__get_db_load_configuration, __set_db_load_configuration)
def db_add_load_configuration(self, load_configuration):
self._db_load_configuration = load_configuration
def db_change_load_configuration(self, load_configuration):
self._db_load_configuration = load_configuration
def db_delete_load_configuration(self, load_configuration):
self._db_load_configuration = None
def __get_db_version(self):
return self._db_version
def __set_db_version(self, version):
self._db_version = version
self.is_dirty = True
db_version = property(__get_db_version, __set_db_version)
def db_add_version(self, version):
self._db_version = version
def db_change_version(self, version):
self._db_version = version
def db_delete_version(self, version):
self._db_version = None
def __get_db_description(self):
return self._db_description
def __set_db_description(self, description):
self._db_description = description
self.is_dirty = True
db_description = property(__get_db_description, __set_db_description)
def db_add_description(self, description):
self._db_description = description
def db_change_description(self, description):
self._db_description = description
def db_delete_description(self, description):
self._db_description = None
def __get_db_module_descriptors(self):
return self._db_module_descriptors
def __set_db_module_descriptors(self, module_descriptors):
self._db_module_descriptors = module_descriptors
self.is_dirty = True
db_module_descriptors = property(__get_db_module_descriptors, __set_db_module_descriptors)
def db_get_module_descriptors(self):
return self._db_module_descriptors
def db_add_module_descriptor(self, module_descriptor):
self.is_dirty = True
self._db_module_descriptors.append(module_descriptor)
self.db_module_descriptors_id_index[module_descriptor.db_id] = module_descriptor
self.db_module_descriptors_name_index[(module_descriptor.db_name,module_descriptor.db_namespace,module_descriptor.db_version)] = module_descriptor
def db_change_module_descriptor(self, module_descriptor):
self.is_dirty = True
found = False
for i in xrange(len(self._db_module_descriptors)):
if self._db_module_descriptors[i].db_id == module_descriptor.db_id:
self._db_module_descriptors[i] = module_descriptor
found = True
break
if not found:
self._db_module_descriptors.append(module_descriptor)
self.db_module_descriptors_id_index[module_descriptor.db_id] = module_descriptor
self.db_module_descriptors_name_index[(module_descriptor.db_name,module_descriptor.db_namespace,module_descriptor.db_version)] = module_descriptor
def db_delete_module_descriptor(self, module_descriptor):
self.is_dirty = True
for i in xrange(len(self._db_module_descriptors)):
if self._db_module_descriptors[i].db_id == module_descriptor.db_id:
if not self._db_module_descriptors[i].is_new:
self.db_deleted_module_descriptors.append(self._db_module_descriptors[i])
del self._db_module_descriptors[i]
break
del self.db_module_descriptors_id_index[module_descriptor.db_id]
del self.db_module_descriptors_name_index[(module_descriptor.db_name,module_descriptor.db_namespace,module_descriptor.db_version)]
def db_get_module_descriptor(self, key):
for i in xrange(len(self._db_module_descriptors)):
if self._db_module_descriptors[i].db_id == key:
return self._db_module_descriptors[i]
return None
def db_get_module_descriptor_by_id(self, key):
return self.db_module_descriptors_id_index[key]
def db_has_module_descriptor_with_id(self, key):
return key in self.db_module_descriptors_id_index
def db_get_module_descriptor_by_name(self, key):
return self.db_module_descriptors_name_index[key]
def db_has_module_descriptor_with_name(self, key):
return key in self.db_module_descriptors_name_index
def getPrimaryKey(self):
return self._db_id
class DBLoopExec(object):
vtType = 'loop_exec'
def __init__(self, item_execs=None, id=None, ts_start=None, ts_end=None, iteration=None, completed=None, error=None):
self.db_deleted_item_execs = []
self.db_item_execs_id_index = {}
if item_execs is None:
self._db_item_execs = []
else:
self._db_item_execs = item_execs
for v in self._db_item_execs:
self.db_item_execs_id_index[v.db_id] = v
self._db_id = id
self._db_ts_start = ts_start
self._db_ts_end = ts_end
self._db_iteration = iteration
self._db_completed = completed
self._db_error = error
self.is_dirty = True
self.is_new = True
def __copy__(self):
return DBLoopExec.do_copy(self)
def do_copy(self, new_ids=False, id_scope=None, id_remap=None):
cp = DBLoopExec(id=self._db_id,
ts_start=self._db_ts_start,
ts_end=self._db_ts_end,
iteration=self._db_iteration,
completed=self._db_completed,
error=self._db_error)
if self._db_item_execs is None:
cp._db_item_execs = []
else:
cp._db_item_execs = [v.do_copy(new_ids, id_scope, id_remap) for v in self._db_item_execs]
# set new ids
if new_ids:
new_id = id_scope.getNewId(self.vtType)
if self.vtType in id_scope.remap:
id_remap[(id_scope.remap[self.vtType], self.db_id)] = new_id
else:
id_remap[(self.vtType, self.db_id)] = new_id
cp.db_id = new_id
# recreate indices and set flags
cp.db_item_execs_id_index = dict((v.db_id, v) for v in cp._db_item_execs)
if not new_ids:
cp.is_dirty = self.is_dirty
cp.is_new = self.is_new
return cp
@staticmethod
def update_version(old_obj, trans_dict, new_obj=None):
if new_obj is None:
new_obj = DBLoopExec()
class_dict = {}
if new_obj.__class__.__name__ in trans_dict:
class_dict = trans_dict[new_obj.__class__.__name__]
if 'item_execs' in class_dict:
res = class_dict['item_execs'](old_obj, trans_dict)
for obj in res:
new_obj.db_add_item_exec(obj)
elif hasattr(old_obj, 'db_item_execs') and old_obj.db_item_execs is not None:
for obj in old_obj.db_item_execs:
if obj.vtType == 'module_exec':
new_obj.db_add_item_exec(DBModuleExec.update_version(obj, trans_dict))
elif obj.vtType == 'group_exec':
new_obj.db_add_item_exec(DBGroupExec.update_version(obj, trans_dict))
elif obj.vtType == 'loop_exec':
new_obj.db_add_item_exec(DBLoopExec.update_version(obj, trans_dict))
if hasattr(old_obj, 'db_deleted_item_execs') and hasattr(new_obj, 'db_deleted_item_execs'):
for obj in old_obj.db_deleted_item_execs:
if obj.vtType == 'module_exec':
n_obj = DBModuleExec.update_version(obj, trans_dict)
new_obj.db_deleted_item_execs.append(n_obj)
elif obj.vtType == 'group_exec':
n_obj = DBGroupExec.update_version(obj, trans_dict)
new_obj.db_deleted_item_execs.append(n_obj)
elif obj.vtType == 'loop_exec':
n_obj = DBLoopExec.update_version(obj, trans_dict)
new_obj.db_deleted_item_execs.append(n_obj)
if 'id' in class_dict:
res = class_dict['id'](old_obj, trans_dict)
new_obj.db_id = res
elif hasattr(old_obj, 'db_id') and old_obj.db_id is not None:
new_obj.db_id = old_obj.db_id
if 'ts_start' in class_dict:
res = class_dict['ts_start'](old_obj, trans_dict)
new_obj.db_ts_start = res
elif hasattr(old_obj, 'db_ts_start') and old_obj.db_ts_start is not None:
new_obj.db_ts_start = old_obj.db_ts_start
if 'ts_end' in class_dict:
res = class_dict['ts_end'](old_obj, trans_dict)
new_obj.db_ts_end = res
elif hasattr(old_obj, 'db_ts_end') and old_obj.db_ts_end is not None:
new_obj.db_ts_end = old_obj.db_ts_end
if 'iteration' in class_dict:
res = class_dict['iteration'](old_obj, trans_dict)
new_obj.db_iteration = res
elif hasattr(old_obj, 'db_iteration') and old_obj.db_iteration is not None:
new_obj.db_iteration = old_obj.db_iteration
if 'completed' in class_dict:
res = class_dict['completed'](old_obj, trans_dict)
new_obj.db_completed = res
elif hasattr(old_obj, 'db_completed') and old_obj.db_completed is not None:
new_obj.db_completed = old_obj.db_completed
if 'error' in class_dict:
res = class_dict['error'](old_obj, trans_dict)
new_obj.db_error = res
elif hasattr(old_obj, 'db_error') and old_obj.db_error is not None:
new_obj.db_error = old_obj.db_error
new_obj.is_new = old_obj.is_new
new_obj.is_dirty = old_obj.is_dirty
return new_obj
def db_children(self, parent=(None,None), orphan=False, for_action=False):
children = []
to_del = []
for child in self.db_item_execs:
children.extend(child.db_children((self.vtType, self.db_id), orphan, for_action))
if orphan:
to_del.append(child)
for child in to_del:
self.db_delete_item_exec(child)
children.append((self, parent[0], parent[1]))
return children
def db_deleted_children(self, remove=False):
children = []
children.extend(self.db_deleted_item_execs)
if remove:
self.db_deleted_item_execs = []
return children
def has_changes(self):
if self.is_dirty:
return True
for child in self._db_item_execs:
if child.has_changes():
return True
return False
def __get_db_item_execs(self):
return self._db_item_execs
def __set_db_item_execs(self, item_execs):
self._db_item_execs = item_execs
self.is_dirty = True
db_item_execs = property(__get_db_item_execs, __set_db_item_execs)
def db_get_item_execs(self):
return self._db_item_execs
def db_add_item_exec(self, item_exec):
self.is_dirty = True
self._db_item_execs.append(item_exec)
self.db_item_execs_id_index[item_exec.db_id] = item_exec
def db_change_item_exec(self, item_exec):
self.is_dirty = True
found = False
for i in xrange(len(self._db_item_execs)):
if self._db_item_execs[i].db_id == item_exec.db_id:
self._db_item_execs[i] = item_exec
found = True
break
if not found:
self._db_item_execs.append(item_exec)
self.db_item_execs_id_index[item_exec.db_id] = item_exec
def db_delete_item_exec(self, item_exec):
self.is_dirty = True
for i in xrange(len(self._db_item_execs)):
if self._db_item_execs[i].db_id == item_exec.db_id:
if not self._db_item_execs[i].is_new:
self.db_deleted_item_execs.append(self._db_item_execs[i])
del self._db_item_execs[i]
break
del self.db_item_execs_id_index[item_exec.db_id]
def db_get_item_exec(self, key):
for i in xrange(len(self._db_item_execs)):
if self._db_item_execs[i].db_id == key:
return self._db_item_execs[i]
return None
def db_get_item_exec_by_id(self, key):
return self.db_item_execs_id_index[key]
def db_has_item_exec_with_id(self, key):
return key in self.db_item_execs_id_index
def __get_db_id(self):
return self._db_id
def __set_db_id(self, id):
self._db_id = id
self.is_dirty = True
db_id = property(__get_db_id, __set_db_id)
def db_add_id(self, id):
self._db_id = id
def db_change_id(self, id):
self._db_id = id
def db_delete_id(self, id):
self._db_id = None
def __get_db_ts_start(self):
return self._db_ts_start
def __set_db_ts_start(self, ts_start):
self._db_ts_start = ts_start
self.is_dirty = True
db_ts_start = property(__get_db_ts_start, __set_db_ts_start)
def db_add_ts_start(self, ts_start):
self._db_ts_start = ts_start
def db_change_ts_start(self, ts_start):
self._db_ts_start = ts_start
def db_delete_ts_start(self, ts_start):
self._db_ts_start = None
def __get_db_ts_end(self):
return self._db_ts_end
def __set_db_ts_end(self, ts_end):
self._db_ts_end = ts_end
self.is_dirty = True
db_ts_end = property(__get_db_ts_end, __set_db_ts_end)
def db_add_ts_end(self, ts_end):
self._db_ts_end = ts_end
def db_change_ts_end(self, ts_end):
self._db_ts_end = ts_end
def db_delete_ts_end(self, ts_end):
self._db_ts_end = None
def __get_db_iteration(self):
return self._db_iteration
def __set_db_iteration(self, iteration):
self._db_iteration = iteration
self.is_dirty = True
db_iteration = property(__get_db_iteration, __set_db_iteration)
def db_add_iteration(self, iteration):
self._db_iteration = iteration
def db_change_iteration(self, iteration):
self._db_iteration = iteration
def db_delete_iteration(self, iteration):
self._db_iteration = None
def __get_db_completed(self):
return self._db_completed
def __set_db_completed(self, completed):
self._db_completed = completed
self.is_dirty = True
db_completed = property(__get_db_completed, __set_db_completed)
def db_add_completed(self, completed):
self._db_completed = completed
def db_change_completed(self, completed):
self._db_completed = completed
def db_delete_completed(self, completed):
self._db_completed = None
def __get_db_error(self):
return self._db_error
def __set_db_error(self, error):
self._db_error = error
self.is_dirty = True
db_error = property(__get_db_error, __set_db_error)
def db_add_error(self, error):
self._db_error = error
def db_change_error(self, error):
self._db_error = error
def db_delete_error(self, error):
self._db_error = None
def getPrimaryKey(self):
return self._db_id
class DBConnection(object):
vtType = 'connection'
def __init__(self, id=None, ports=None):
self._db_id = id
self.db_deleted_ports = []
self.db_ports_id_index = {}
self.db_ports_type_index = {}
if ports is None:
self._db_ports = []
else:
self._db_ports = ports
for v in self._db_ports:
self.db_ports_id_index[v.db_id] = v
self.db_ports_type_index[v.db_type] = v
self.is_dirty = True
self.is_new = True
def __copy__(self):
return DBConnection.do_copy(self)
def do_copy(self, new_ids=False, id_scope=None, id_remap=None):
cp = DBConnection(id=self._db_id)
if self._db_ports is None:
cp._db_ports = []
else:
cp._db_ports = [v.do_copy(new_ids, id_scope, id_remap) for v in self._db_ports]
# set new ids
if new_ids:
new_id = id_scope.getNewId(self.vtType)
if self.vtType in id_scope.remap:
id_remap[(id_scope.remap[self.vtType], self.db_id)] = new_id
else:
id_remap[(self.vtType, self.db_id)] = new_id
cp.db_id = new_id
# recreate indices and set flags
cp.db_ports_id_index = dict((v.db_id, v) for v in cp._db_ports)
cp.db_ports_type_index = dict((v.db_type, v) for v in cp._db_ports)
if not new_ids:
cp.is_dirty = self.is_dirty
cp.is_new = self.is_new
return cp
@staticmethod
def update_version(old_obj, trans_dict, new_obj=None):
if new_obj is None:
new_obj = DBConnection()
class_dict = {}
if new_obj.__class__.__name__ in trans_dict:
class_dict = trans_dict[new_obj.__class__.__name__]
if 'id' in class_dict:
res = class_dict['id'](old_obj, trans_dict)
new_obj.db_id = res
elif hasattr(old_obj, 'db_id') and old_obj.db_id is not None:
new_obj.db_id = old_obj.db_id
if 'ports' in class_dict:
res = class_dict['ports'](old_obj, trans_dict)
for obj in res:
new_obj.db_add_port(obj)
elif hasattr(old_obj, 'db_ports') and old_obj.db_ports is not None:
for obj in old_obj.db_ports:
new_obj.db_add_port(DBPort.update_version(obj, trans_dict))
if hasattr(old_obj, 'db_deleted_ports') and hasattr(new_obj, 'db_deleted_ports'):
for obj in old_obj.db_deleted_ports:
n_obj = DBPort.update_version(obj, trans_dict)
new_obj.db_deleted_ports.append(n_obj)
new_obj.is_new = old_obj.is_new
new_obj.is_dirty = old_obj.is_dirty
return new_obj
def db_children(self, parent=(None,None), orphan=False, for_action=False):
children = []
to_del = []
for child in self.db_ports:
children.extend(child.db_children((self.vtType, self.db_id), orphan, for_action))
if orphan:
to_del.append(child)
for child in to_del:
self.db_delete_port(child)
children.append((self, parent[0], parent[1]))
return children
def db_deleted_children(self, remove=False):
children = []
children.extend(self.db_deleted_ports)
if remove:
self.db_deleted_ports = []
return children
def has_changes(self):
if self.is_dirty:
return True
for child in self._db_ports:
if child.has_changes():
return True
return False
def __get_db_id(self):
return self._db_id
def __set_db_id(self, id):
self._db_id = id
self.is_dirty = True
db_id = property(__get_db_id, __set_db_id)
def db_add_id(self, id):
self._db_id = id
def db_change_id(self, id):
self._db_id = id
def db_delete_id(self, id):
self._db_id = None
def __get_db_ports(self):
return self._db_ports
def __set_db_ports(self, ports):
self._db_ports = ports
self.is_dirty = True
db_ports = property(__get_db_ports, __set_db_ports)
def db_get_ports(self):
return self._db_ports
def db_add_port(self, port):
self.is_dirty = True
self._db_ports.append(port)
self.db_ports_id_index[port.db_id] = port
self.db_ports_type_index[port.db_type] = port
def db_change_port(self, port):
self.is_dirty = True
found = False
for i in xrange(len(self._db_ports)):
if self._db_ports[i].db_id == port.db_id:
self._db_ports[i] = port
found = True
break
if not found:
self._db_ports.append(port)
self.db_ports_id_index[port.db_id] = port
self.db_ports_type_index[port.db_type] = port
def db_delete_port(self, port):
self.is_dirty = True
for i in xrange(len(self._db_ports)):
if self._db_ports[i].db_id == port.db_id:
if not self._db_ports[i].is_new:
self.db_deleted_ports.append(self._db_ports[i])
del self._db_ports[i]
break
del self.db_ports_id_index[port.db_id]
del self.db_ports_type_index[port.db_type]
def db_get_port(self, key):
for i in xrange(len(self._db_ports)):
if self._db_ports[i].db_id == key:
return self._db_ports[i]
return None
def db_get_port_by_id(self, key):
return self.db_ports_id_index[key]
def db_has_port_with_id(self, key):
return key in self.db_ports_id_index
def db_get_port_by_type(self, key):
return self.db_ports_type_index[key]
def db_has_port_with_type(self, key):
return key in self.db_ports_type_index
def getPrimaryKey(self):
return self._db_id
class DBConfigBool(object):
vtType = 'config_bool'
def __init__(self, value=None):
self._db_value = value
self.is_dirty = True
self.is_new = True
def __copy__(self):
return DBConfigBool.do_copy(self)
def do_copy(self, new_ids=False, id_scope=None, id_remap=None):
cp = DBConfigBool(value=self._db_value)
# set new ids
if new_ids:
new_id = id_scope.getNewId(self.vtType)
if self.vtType in id_scope.remap:
id_remap[(id_scope.remap[self.vtType], self.db_id)] = new_id
else:
id_remap[(self.vtType, self.db_id)] = new_id
cp.db_id = new_id
# recreate indices and set flags
if not new_ids:
cp.is_dirty = self.is_dirty
cp.is_new = self.is_new
return cp
@staticmethod
def update_version(old_obj, trans_dict, new_obj=None):
if new_obj is None:
new_obj = DBConfigBool()
class_dict = {}
if new_obj.__class__.__name__ in trans_dict:
class_dict = trans_dict[new_obj.__class__.__name__]
if 'value' in class_dict:
res = class_dict['value'](old_obj, trans_dict)
new_obj.db_value = res
elif hasattr(old_obj, 'db_value') and old_obj.db_value is not None:
new_obj.db_value = old_obj.db_value
new_obj.is_new = old_obj.is_new
new_obj.is_dirty = old_obj.is_dirty
return new_obj
def db_children(self, parent=(None,None), orphan=False, for_action=False):
return [(self, parent[0], parent[1])]
def db_deleted_children(self, remove=False):
children = []
return children
def has_changes(self):
if self.is_dirty:
return True
return False
def __get_db_value(self):
return self._db_value
def __set_db_value(self, value):
self._db_value = value
self.is_dirty = True
db_value = property(__get_db_value, __set_db_value)
def db_add_value(self, value):
self._db_value = value
def db_change_value(self, value):
self._db_value = value
def db_delete_value(self, value):
self._db_value = None
class DBAction(object):
vtType = 'action'
def __init__(self, operations=None, id=None, prevId=None, date=None, session=None, user=None, annotations=None):
self.db_deleted_operations = []
self.db_operations_id_index = {}
if operations is None:
self._db_operations = []
else:
self._db_operations = operations
for v in self._db_operations:
self.db_operations_id_index[v.db_id] = v
self._db_id = id
self._db_prevId = prevId
self._db_date = date
self._db_session = session
self._db_user = user
self.db_deleted_annotations = []
self.db_annotations_id_index = {}
self.db_annotations_key_index = {}
if annotations is None:
self._db_annotations = []
else:
self._db_annotations = annotations
for v in self._db_annotations:
self.db_annotations_id_index[v.db_id] = v
self.db_annotations_key_index[v.db_key] = v
self.is_dirty = True
self.is_new = True
def __copy__(self):
return DBAction.do_copy(self)
def do_copy(self, new_ids=False, id_scope=None, id_remap=None):
cp = DBAction(id=self._db_id,
prevId=self._db_prevId,
date=self._db_date,
session=self._db_session,
user=self._db_user)
if self._db_operations is None:
cp._db_operations = []
else:
cp._db_operations = [v.do_copy(new_ids, id_scope, id_remap) for v in self._db_operations]
if self._db_annotations is None:
cp._db_annotations = []
else:
cp._db_annotations = [v.do_copy(new_ids, id_scope, id_remap) for v in self._db_annotations]
# set new ids
if new_ids:
new_id = id_scope.getNewId(self.vtType)
if self.vtType in id_scope.remap:
id_remap[(id_scope.remap[self.vtType], self.db_id)] = new_id
else:
id_remap[(self.vtType, self.db_id)] = new_id
cp.db_id = new_id
if hasattr(self, 'db_prevId') and ('action', self._db_prevId) in id_remap:
cp._db_prevId = id_remap[('action', self._db_prevId)]
# recreate indices and set flags
cp.db_operations_id_index = dict((v.db_id, v) for v in cp._db_operations)
cp.db_annotations_id_index = dict((v.db_id, v) for v in cp._db_annotations)
cp.db_annotations_key_index = dict((v.db_key, v) for v in cp._db_annotations)
if not new_ids:
cp.is_dirty = self.is_dirty
cp.is_new = self.is_new
return cp
@staticmethod
def update_version(old_obj, trans_dict, new_obj=None):
if new_obj is None:
new_obj = DBAction()
class_dict = {}
if new_obj.__class__.__name__ in trans_dict:
class_dict = trans_dict[new_obj.__class__.__name__]
if 'operations' in class_dict:
res = class_dict['operations'](old_obj, trans_dict)
for obj in res:
new_obj.db_add_operation(obj)
elif hasattr(old_obj, 'db_operations') and old_obj.db_operations is not None:
for obj in old_obj.db_operations:
if obj.vtType == 'add':
new_obj.db_add_operation(DBAdd.update_version(obj, trans_dict))
elif obj.vtType == 'delete':
new_obj.db_add_operation(DBDelete.update_version(obj, trans_dict))
elif obj.vtType == 'change':
new_obj.db_add_operation(DBChange.update_version(obj, trans_dict))
if hasattr(old_obj, 'db_deleted_operations') and hasattr(new_obj, 'db_deleted_operations'):
for obj in old_obj.db_deleted_operations:
if obj.vtType == 'add':
n_obj = DBAdd.update_version(obj, trans_dict)
new_obj.db_deleted_operations.append(n_obj)
elif obj.vtType == 'delete':
n_obj = DBDelete.update_version(obj, trans_dict)
new_obj.db_deleted_operations.append(n_obj)
elif obj.vtType == 'change':
n_obj = DBChange.update_version(obj, trans_dict)
new_obj.db_deleted_operations.append(n_obj)
if 'id' in class_dict:
res = class_dict['id'](old_obj, trans_dict)
new_obj.db_id = res
elif hasattr(old_obj, 'db_id') and old_obj.db_id is not None:
new_obj.db_id = old_obj.db_id
if 'prevId' in class_dict:
res = class_dict['prevId'](old_obj, trans_dict)
new_obj.db_prevId = res
elif hasattr(old_obj, 'db_prevId') and old_obj.db_prevId is not None:
new_obj.db_prevId = old_obj.db_prevId
if 'date' in class_dict:
res = class_dict['date'](old_obj, trans_dict)
new_obj.db_date = res
elif hasattr(old_obj, 'db_date') and old_obj.db_date is not None:
new_obj.db_date = old_obj.db_date
if 'session' in class_dict:
res = class_dict['session'](old_obj, trans_dict)
new_obj.db_session = res
elif hasattr(old_obj, 'db_session') and old_obj.db_session is not None:
new_obj.db_session = old_obj.db_session
if 'user' in class_dict:
res = class_dict['user'](old_obj, trans_dict)
new_obj.db_user = res
elif hasattr(old_obj, 'db_user') and old_obj.db_user is not None:
new_obj.db_user = old_obj.db_user
if 'annotations' in class_dict:
res = class_dict['annotations'](old_obj, trans_dict)
for obj in res:
new_obj.db_add_annotation(obj)
elif hasattr(old_obj, 'db_annotations') and old_obj.db_annotations is not None:
for obj in old_obj.db_annotations:
new_obj.db_add_annotation(DBAnnotation.update_version(obj, trans_dict))
if hasattr(old_obj, 'db_deleted_annotations') and hasattr(new_obj, 'db_deleted_annotations'):
for obj in old_obj.db_deleted_annotations:
n_obj = DBAnnotation.update_version(obj, trans_dict)
new_obj.db_deleted_annotations.append(n_obj)
new_obj.is_new = old_obj.is_new
new_obj.is_dirty = old_obj.is_dirty
return new_obj
def db_children(self, parent=(None,None), orphan=False, for_action=False):
children = []
to_del = []
for child in self.db_annotations:
children.extend(child.db_children((self.vtType, self.db_id), orphan, for_action))
if orphan:
to_del.append(child)
for child in to_del:
self.db_delete_annotation(child)
to_del = []
for child in self.db_operations:
children.extend(child.db_children((self.vtType, self.db_id), orphan, for_action))
if orphan:
to_del.append(child)
for child in to_del:
self.db_delete_operation(child)
children.append((self, parent[0], parent[1]))
return children
def db_deleted_children(self, remove=False):
children = []
children.extend(self.db_deleted_annotations)
children.extend(self.db_deleted_operations)
if remove:
self.db_deleted_annotations = []
self.db_deleted_operations = []
return children
def has_changes(self):
if self.is_dirty:
return True
for child in self._db_annotations:
if child.has_changes():
return True
for child in self._db_operations:
if child.has_changes():
return True
return False
def __get_db_operations(self):
return self._db_operations
def __set_db_operations(self, operations):
self._db_operations = operations
self.is_dirty = True
db_operations = property(__get_db_operations, __set_db_operations)
def db_get_operations(self):
return self._db_operations
def db_add_operation(self, operation):
self.is_dirty = True
self._db_operations.append(operation)
self.db_operations_id_index[operation.db_id] = operation
def db_change_operation(self, operation):
self.is_dirty = True
found = False
for i in xrange(len(self._db_operations)):
if self._db_operations[i].db_id == operation.db_id:
self._db_operations[i] = operation
found = True
break
if not found:
self._db_operations.append(operation)
self.db_operations_id_index[operation.db_id] = operation
def db_delete_operation(self, operation):
self.is_dirty = True
for i in xrange(len(self._db_operations)):
if self._db_operations[i].db_id == operation.db_id:
if not self._db_operations[i].is_new:
self.db_deleted_operations.append(self._db_operations[i])
del self._db_operations[i]
break
del self.db_operations_id_index[operation.db_id]
def db_get_operation(self, key):
for i in xrange(len(self._db_operations)):
if self._db_operations[i].db_id == key:
return self._db_operations[i]
return None
def db_get_operation_by_id(self, key):
return self.db_operations_id_index[key]
def db_has_operation_with_id(self, key):
return key in self.db_operations_id_index
def __get_db_id(self):
return self._db_id
def __set_db_id(self, id):
self._db_id = id
self.is_dirty = True
db_id = property(__get_db_id, __set_db_id)
def db_add_id(self, id):
self._db_id = id
def db_change_id(self, id):
self._db_id = id
def db_delete_id(self, id):
self._db_id = None
def __get_db_prevId(self):
return self._db_prevId
def __set_db_prevId(self, prevId):
self._db_prevId = prevId
self.is_dirty = True
db_prevId = property(__get_db_prevId, __set_db_prevId)
def db_add_prevId(self, prevId):
self._db_prevId = prevId
def db_change_prevId(self, prevId):
self._db_prevId = prevId
def db_delete_prevId(self, prevId):
self._db_prevId = None
def __get_db_date(self):
return self._db_date
def __set_db_date(self, date):
self._db_date = date
self.is_dirty = True
db_date = property(__get_db_date, __set_db_date)
def db_add_date(self, date):
self._db_date = date
def db_change_date(self, date):
self._db_date = date
def db_delete_date(self, date):
self._db_date = None
def __get_db_session(self):
return self._db_session
def __set_db_session(self, session):
self._db_session = session
self.is_dirty = True
db_session = property(__get_db_session, __set_db_session)
def db_add_session(self, session):
self._db_session = session
def db_change_session(self, session):
self._db_session = session
def db_delete_session(self, session):
self._db_session = None
def __get_db_user(self):
return self._db_user
def __set_db_user(self, user):
self._db_user = user
self.is_dirty = True
db_user = property(__get_db_user, __set_db_user)
def db_add_user(self, user):
self._db_user = user
def db_change_user(self, user):
self._db_user = user
def db_delete_user(self, user):
self._db_user = None
def __get_db_annotations(self):
return self._db_annotations
def __set_db_annotations(self, annotations):
self._db_annotations = annotations
self.is_dirty = True
db_annotations = property(__get_db_annotations, __set_db_annotations)
def db_get_annotations(self):
return self._db_annotations
def db_add_annotation(self, annotation):
self.is_dirty = True
self._db_annotations.append(annotation)
self.db_annotations_id_index[annotation.db_id] = annotation
self.db_annotations_key_index[annotation.db_key] = annotation
def db_change_annotation(self, annotation):
self.is_dirty = True
found = False
for i in xrange(len(self._db_annotations)):
if self._db_annotations[i].db_id == annotation.db_id:
self._db_annotations[i] = annotation
found = True
break
if not found:
self._db_annotations.append(annotation)
self.db_annotations_id_index[annotation.db_id] = annotation
self.db_annotations_key_index[annotation.db_key] = annotation
def db_delete_annotation(self, annotation):
self.is_dirty = True
for i in xrange(len(self._db_annotations)):
if self._db_annotations[i].db_id == annotation.db_id:
if not self._db_annotations[i].is_new:
self.db_deleted_annotations.append(self._db_annotations[i])
del self._db_annotations[i]
break
del self.db_annotations_id_index[annotation.db_id]
del self.db_annotations_key_index[annotation.db_key]
def db_get_annotation(self, key):
for i in xrange(len(self._db_annotations)):
if self._db_annotations[i].db_id == key:
return self._db_annotations[i]
return None
def db_get_annotation_by_id(self, key):
return self.db_annotations_id_index[key]
def db_has_annotation_with_id(self, key):
return key in self.db_annotations_id_index
def db_get_annotation_by_key(self, key):
return self.db_annotations_key_index[key]
def db_has_annotation_with_key(self, key):
return key in self.db_annotations_key_index
def getPrimaryKey(self):
return self._db_id
class DBStartupPackage(object):
vtType = 'startup_package'
def __init__(self, name=None, configuration=None):
self._db_name = name
self.db_deleted_configuration = []
self._db_configuration = configuration
self.is_dirty = True
self.is_new = True
def __copy__(self):
return DBStartupPackage.do_copy(self)
def do_copy(self, new_ids=False, id_scope=None, id_remap=None):
cp = DBStartupPackage(name=self._db_name)
if self._db_configuration is not None:
cp._db_configuration = self._db_configuration.do_copy(new_ids, id_scope, id_remap)
# set new ids
if new_ids:
new_id = id_scope.getNewId(self.vtType)
if self.vtType in id_scope.remap:
id_remap[(id_scope.remap[self.vtType], self.db_id)] = new_id
else:
id_remap[(self.vtType, self.db_id)] = new_id
cp.db_id = new_id
# recreate indices and set flags
if not new_ids:
cp.is_dirty = self.is_dirty
cp.is_new = self.is_new
return cp
@staticmethod
def update_version(old_obj, trans_dict, new_obj=None):
if new_obj is None:
new_obj = DBStartupPackage()
class_dict = {}
if new_obj.__class__.__name__ in trans_dict:
class_dict = trans_dict[new_obj.__class__.__name__]
if 'name' in class_dict:
res = class_dict['name'](old_obj, trans_dict)
new_obj.db_name = res
elif hasattr(old_obj, 'db_name') and old_obj.db_name is not None:
new_obj.db_name = old_obj.db_name
if 'configuration' in class_dict:
res = class_dict['configuration'](old_obj, trans_dict)
new_obj.db_configuration = res
elif hasattr(old_obj, 'db_configuration') and old_obj.db_configuration is not None:
obj = old_obj.db_configuration
new_obj.db_add_configuration(DBConfiguration.update_version(obj, trans_dict))
if hasattr(old_obj, 'db_deleted_configuration') and hasattr(new_obj, 'db_deleted_configuration'):
for obj in old_obj.db_deleted_configuration:
n_obj = DBConfiguration.update_version(obj, trans_dict)
new_obj.db_deleted_configuration.append(n_obj)
new_obj.is_new = old_obj.is_new
new_obj.is_dirty = old_obj.is_dirty
return new_obj
def db_children(self, parent=(None,None), orphan=False, for_action=False):
children = []
if self._db_configuration is not None:
children.extend(self._db_configuration.db_children((self.vtType, self.db_id), orphan, for_action))
if orphan:
self._db_configuration = None
children.append((self, parent[0], parent[1]))
return children
def db_deleted_children(self, remove=False):
children = []
children.extend(self.db_deleted_configuration)
if remove:
self.db_deleted_configuration = []
return children
def has_changes(self):
if self.is_dirty:
return True
if self._db_configuration is not None and self._db_configuration.has_changes():
return True
return False
def __get_db_name(self):
return self._db_name
def __set_db_name(self, name):
self._db_name = name
self.is_dirty = True
db_name = property(__get_db_name, __set_db_name)
def db_add_name(self, name):
self._db_name = name
def db_change_name(self, name):
self._db_name = name
def db_delete_name(self, name):
self._db_name = None
def __get_db_configuration(self):
return self._db_configuration
def __set_db_configuration(self, configuration):
self._db_configuration = configuration
self.is_dirty = True
db_configuration = property(__get_db_configuration, __set_db_configuration)
def db_add_configuration(self, configuration):
self._db_configuration = configuration
def db_change_configuration(self, configuration):
self._db_configuration = configuration
def db_delete_configuration(self, configuration):
if not self.is_new:
self.db_deleted_configuration.append(self._db_configuration)
self._db_configuration = None
class DBConfigInt(object):
vtType = 'config_int'
def __init__(self, value=None):
self._db_value = value
self.is_dirty = True
self.is_new = True
def __copy__(self):
return DBConfigInt.do_copy(self)
def do_copy(self, new_ids=False, id_scope=None, id_remap=None):
cp = DBConfigInt(value=self._db_value)
# set new ids
if new_ids:
new_id = id_scope.getNewId(self.vtType)
if self.vtType in id_scope.remap:
id_remap[(id_scope.remap[self.vtType], self.db_id)] = new_id
else:
id_remap[(self.vtType, self.db_id)] = new_id
cp.db_id = new_id
# recreate indices and set flags
if not new_ids:
cp.is_dirty = self.is_dirty
cp.is_new = self.is_new
return cp
@staticmethod
def update_version(old_obj, trans_dict, new_obj=None):
if new_obj is None:
new_obj = DBConfigInt()
class_dict = {}
if new_obj.__class__.__name__ in trans_dict:
class_dict = trans_dict[new_obj.__class__.__name__]
if 'value' in class_dict:
res = class_dict['value'](old_obj, trans_dict)
new_obj.db_value = res
elif hasattr(old_obj, 'db_value') and old_obj.db_value is not None:
new_obj.db_value = old_obj.db_value
new_obj.is_new = old_obj.is_new
new_obj.is_dirty = old_obj.is_dirty
return new_obj
def db_children(self, parent=(None,None), orphan=False, for_action=False):
return [(self, parent[0], parent[1])]
def db_deleted_children(self, remove=False):
children = []
return children
def has_changes(self):
if self.is_dirty:
return True
return False
def __get_db_value(self):
return self._db_value
def __set_db_value(self, value):
self._db_value = value
self.is_dirty = True
db_value = property(__get_db_value, __set_db_value)
def db_add_value(self, value):
self._db_value = value
def db_change_value(self, value):
self._db_value = value
def db_delete_value(self, value):
self._db_value = None
class DBOpmProcessIdEffect(object):
vtType = 'opm_process_id_effect'
def __init__(self, id=None):
self._db_id = id
self.is_dirty = True
self.is_new = True
def __copy__(self):
return DBOpmProcessIdEffect.do_copy(self)
def do_copy(self, new_ids=False, id_scope=None, id_remap=None):
cp = DBOpmProcessIdEffect(id=self._db_id)
# set new ids
if new_ids:
new_id = id_scope.getNewId(self.vtType)
if self.vtType in id_scope.remap:
id_remap[(id_scope.remap[self.vtType], self.db_id)] = new_id
else:
id_remap[(self.vtType, self.db_id)] = new_id
cp.db_id = new_id
if hasattr(self, 'db_id') and ('opm_process', self._db_id) in id_remap:
cp._db_id = id_remap[('opm_process', self._db_id)]
# recreate indices and set flags
if not new_ids:
cp.is_dirty = self.is_dirty
cp.is_new = self.is_new
return cp
@staticmethod
def update_version(old_obj, trans_dict, new_obj=None):
if new_obj is None:
new_obj = DBOpmProcessIdEffect()
class_dict = {}
if new_obj.__class__.__name__ in trans_dict:
class_dict = trans_dict[new_obj.__class__.__name__]
if 'id' in class_dict:
res = class_dict['id'](old_obj, trans_dict)
new_obj.db_id = res
elif hasattr(old_obj, 'db_id') and old_obj.db_id is not None:
new_obj.db_id = old_obj.db_id
new_obj.is_new = old_obj.is_new
new_obj.is_dirty = old_obj.is_dirty
return new_obj
def db_children(self, parent=(None,None), orphan=False, for_action=False):
return [(self, parent[0], parent[1])]
def db_deleted_children(self, remove=False):
children = []
return children
def has_changes(self):
if self.is_dirty:
return True
return False
def __get_db_id(self):
return self._db_id
def __set_db_id(self, id):
self._db_id = id
self.is_dirty = True
db_id = property(__get_db_id, __set_db_id)
def db_add_id(self, id):
self._db_id = id
def db_change_id(self, id):
self._db_id = id
def db_delete_id(self, id):
self._db_id = None
class DBRefProvPlan(object):
vtType = 'ref_prov_plan'
def __init__(self, prov_ref=None):
self._db_prov_ref = prov_ref
self.is_dirty = True
self.is_new = True
def __copy__(self):
return DBRefProvPlan.do_copy(self)
def do_copy(self, new_ids=False, id_scope=None, id_remap=None):
cp = DBRefProvPlan(prov_ref=self._db_prov_ref)
# set new ids
if new_ids:
new_id = id_scope.getNewId(self.vtType)
if self.vtType in id_scope.remap:
id_remap[(id_scope.remap[self.vtType], self.db_id)] = new_id
else:
id_remap[(self.vtType, self.db_id)] = new_id
cp.db_id = new_id
if hasattr(self, 'db_prov_ref') and ('prov_entity', self._db_prov_ref) in id_remap:
cp._db_prov_ref = id_remap[('prov_entity', self._db_prov_ref)]
# recreate indices and set flags
if not new_ids:
cp.is_dirty = self.is_dirty
cp.is_new = self.is_new
return cp
@staticmethod
def update_version(old_obj, trans_dict, new_obj=None):
if new_obj is None:
new_obj = DBRefProvPlan()
class_dict = {}
if new_obj.__class__.__name__ in trans_dict:
class_dict = trans_dict[new_obj.__class__.__name__]
if 'prov_ref' in class_dict:
res = class_dict['prov_ref'](old_obj, trans_dict)
new_obj.db_prov_ref = res
elif hasattr(old_obj, 'db_prov_ref') and old_obj.db_prov_ref is not None:
new_obj.db_prov_ref = old_obj.db_prov_ref
new_obj.is_new = old_obj.is_new
new_obj.is_dirty = old_obj.is_dirty
return new_obj
def db_children(self, parent=(None,None), orphan=False, for_action=False):
return [(self, parent[0], parent[1])]
def db_deleted_children(self, remove=False):
children = []
return children
def has_changes(self):
if self.is_dirty:
return True
return False
def __get_db_prov_ref(self):
return self._db_prov_ref
def __set_db_prov_ref(self, prov_ref):
self._db_prov_ref = prov_ref
self.is_dirty = True
db_prov_ref = property(__get_db_prov_ref, __set_db_prov_ref)
def db_add_prov_ref(self, prov_ref):
self._db_prov_ref = prov_ref
def db_change_prov_ref(self, prov_ref):
self._db_prov_ref = prov_ref
def db_delete_prov_ref(self, prov_ref):
self._db_prov_ref = None
class DBOpmAccounts(object):
vtType = 'opm_accounts'
def __init__(self, accounts=None, opm_overlapss=None):
self.db_deleted_accounts = []
self.db_accounts_id_index = {}
if accounts is None:
self._db_accounts = []
else:
self._db_accounts = accounts
for v in self._db_accounts:
self.db_accounts_id_index[v.db_id] = v
self.db_deleted_opm_overlapss = []
if opm_overlapss is None:
self._db_opm_overlapss = []
else:
self._db_opm_overlapss = opm_overlapss
self.is_dirty = True
self.is_new = True
def __copy__(self):
return DBOpmAccounts.do_copy(self)
def do_copy(self, new_ids=False, id_scope=None, id_remap=None):
cp = DBOpmAccounts()
if self._db_accounts is None:
cp._db_accounts = []
else:
cp._db_accounts = [v.do_copy(new_ids, id_scope, id_remap) for v in self._db_accounts]
if self._db_opm_overlapss is None:
cp._db_opm_overlapss = []
else:
cp._db_opm_overlapss = [v.do_copy(new_ids, id_scope, id_remap) for v in self._db_opm_overlapss]
# set new ids
if new_ids:
new_id = id_scope.getNewId(self.vtType)
if self.vtType in id_scope.remap:
id_remap[(id_scope.remap[self.vtType], self.db_id)] = new_id
else:
id_remap[(self.vtType, self.db_id)] = new_id
cp.db_id = new_id
# recreate indices and set flags
cp.db_accounts_id_index = dict((v.db_id, v) for v in cp._db_accounts)
if not new_ids:
cp.is_dirty = self.is_dirty
cp.is_new = self.is_new
return cp
@staticmethod
def update_version(old_obj, trans_dict, new_obj=None):
if new_obj is None:
new_obj = DBOpmAccounts()
class_dict = {}
if new_obj.__class__.__name__ in trans_dict:
class_dict = trans_dict[new_obj.__class__.__name__]
if 'accounts' in class_dict:
res = class_dict['accounts'](old_obj, trans_dict)
for obj in res:
new_obj.db_add_account(obj)
elif hasattr(old_obj, 'db_accounts') and old_obj.db_accounts is not None:
for obj in old_obj.db_accounts:
new_obj.db_add_account(DBOpmAccount.update_version(obj, trans_dict))
if hasattr(old_obj, 'db_deleted_accounts') and hasattr(new_obj, 'db_deleted_accounts'):
for obj in old_obj.db_deleted_accounts:
n_obj = DBOpmAccount.update_version(obj, trans_dict)
new_obj.db_deleted_accounts.append(n_obj)
if 'opm_overlapss' in class_dict:
res = class_dict['opm_overlapss'](old_obj, trans_dict)
for obj in res:
new_obj.db_add_opm_overlaps(obj)
elif hasattr(old_obj, 'db_opm_overlapss') and old_obj.db_opm_overlapss is not None:
for obj in old_obj.db_opm_overlapss:
new_obj.db_add_opm_overlaps(DBOpmOverlaps.update_version(obj, trans_dict))
if hasattr(old_obj, 'db_deleted_opm_overlapss') and hasattr(new_obj, 'db_deleted_opm_overlapss'):
for obj in old_obj.db_deleted_opm_overlapss:
n_obj = DBOpmOverlaps.update_version(obj, trans_dict)
new_obj.db_deleted_opm_overlapss.append(n_obj)
new_obj.is_new = old_obj.is_new
new_obj.is_dirty = old_obj.is_dirty
return new_obj
def db_children(self, parent=(None,None), orphan=False, for_action=False):
children = []
to_del = []
for child in self.db_accounts:
children.extend(child.db_children((self.vtType, self.db_id), orphan, for_action))
if orphan:
to_del.append(child)
for child in to_del:
self.db_delete_account(child)
to_del = []
for child in self.db_opm_overlapss:
children.extend(child.db_children((self.vtType, self.db_id), orphan, for_action))
if orphan:
to_del.append(child)
for child in to_del:
self.db_delete_opm_overlaps(child)
children.append((self, parent[0], parent[1]))
return children
def db_deleted_children(self, remove=False):
children = []
children.extend(self.db_deleted_accounts)
children.extend(self.db_deleted_opm_overlapss)
if remove:
self.db_deleted_accounts = []
self.db_deleted_opm_overlapss = []
return children
def has_changes(self):
if self.is_dirty:
return True
for child in self._db_accounts:
if child.has_changes():
return True
for child in self._db_opm_overlapss:
if child.has_changes():
return True
return False
def __get_db_accounts(self):
return self._db_accounts
def __set_db_accounts(self, accounts):
self._db_accounts = accounts
self.is_dirty = True
db_accounts = property(__get_db_accounts, __set_db_accounts)
def db_get_accounts(self):
return self._db_accounts
def db_add_account(self, account):
self.is_dirty = True
self._db_accounts.append(account)
self.db_accounts_id_index[account.db_id] = account
def db_change_account(self, account):
self.is_dirty = True
found = False
for i in xrange(len(self._db_accounts)):
if self._db_accounts[i].db_id == account.db_id:
self._db_accounts[i] = account
found = True
break
if not found:
self._db_accounts.append(account)
self.db_accounts_id_index[account.db_id] = account
def db_delete_account(self, account):
self.is_dirty = True
for i in xrange(len(self._db_accounts)):
if self._db_accounts[i].db_id == account.db_id:
if not self._db_accounts[i].is_new:
self.db_deleted_accounts.append(self._db_accounts[i])
del self._db_accounts[i]
break
del self.db_accounts_id_index[account.db_id]
def db_get_account(self, key):
for i in xrange(len(self._db_accounts)):
if self._db_accounts[i].db_id == key:
return self._db_accounts[i]
return None
def db_get_account_by_id(self, key):
return self.db_accounts_id_index[key]
def db_has_account_with_id(self, key):
return key in self.db_accounts_id_index
def __get_db_opm_overlapss(self):
return self._db_opm_overlapss
def __set_db_opm_overlapss(self, opm_overlapss):
self._db_opm_overlapss = opm_overlapss
self.is_dirty = True
db_opm_overlapss = property(__get_db_opm_overlapss, __set_db_opm_overlapss)
def db_get_opm_overlapss(self):
return self._db_opm_overlapss
def db_add_opm_overlaps(self, opm_overlaps):
self.is_dirty = True
self._db_opm_overlapss.append(opm_overlaps)
def db_change_opm_overlaps(self, opm_overlaps):
self.is_dirty = True
self._db_opm_overlapss.append(opm_overlaps)
def db_delete_opm_overlaps(self, opm_overlaps):
self.is_dirty = True
raise Exception('Cannot delete a non-keyed object')
def db_get_opm_overlaps(self, key):
return None
class DBRefProvAgent(object):
vtType = 'ref_prov_agent'
def __init__(self, prov_ref=None):
self._db_prov_ref = prov_ref
self.is_dirty = True
self.is_new = True
def __copy__(self):
return DBRefProvAgent.do_copy(self)
def do_copy(self, new_ids=False, id_scope=None, id_remap=None):
cp = DBRefProvAgent(prov_ref=self._db_prov_ref)
# set new ids
if new_ids:
new_id = id_scope.getNewId(self.vtType)
if self.vtType in id_scope.remap:
id_remap[(id_scope.remap[self.vtType], self.db_id)] = new_id
else:
id_remap[(self.vtType, self.db_id)] = new_id
cp.db_id = new_id
if hasattr(self, 'db_prov_ref') and ('prov_agent', self._db_prov_ref) in id_remap:
cp._db_prov_ref = id_remap[('prov_agent', self._db_prov_ref)]
# recreate indices and set flags
if not new_ids:
cp.is_dirty = self.is_dirty
cp.is_new = self.is_new
return cp
@staticmethod
def update_version(old_obj, trans_dict, new_obj=None):
if new_obj is None:
new_obj = DBRefProvAgent()
class_dict = {}
if new_obj.__class__.__name__ in trans_dict:
class_dict = trans_dict[new_obj.__class__.__name__]
if 'prov_ref' in class_dict:
res = class_dict['prov_ref'](old_obj, trans_dict)
new_obj.db_prov_ref = res
elif hasattr(old_obj, 'db_prov_ref') and old_obj.db_prov_ref is not None:
new_obj.db_prov_ref = old_obj.db_prov_ref
new_obj.is_new = old_obj.is_new
new_obj.is_dirty = old_obj.is_dirty
return new_obj
def db_children(self, parent=(None,None), orphan=False, for_action=False):
return [(self, parent[0], parent[1])]
def db_deleted_children(self, remove=False):
children = []
return children
def has_changes(self):
if self.is_dirty:
return True
return False
def __get_db_prov_ref(self):
return self._db_prov_ref
def __set_db_prov_ref(self, prov_ref):
self._db_prov_ref = prov_ref
self.is_dirty = True
db_prov_ref = property(__get_db_prov_ref, __set_db_prov_ref)
def db_add_prov_ref(self, prov_ref):
self._db_prov_ref = prov_ref
def db_change_prov_ref(self, prov_ref):
self._db_prov_ref = prov_ref
def db_delete_prov_ref(self, prov_ref):
self._db_prov_ref = None
class DBPortSpec(object):
vtType = 'portSpec'
def __init__(self, id=None, name=None, type=None, optional=None, sort_key=None, portSpecItems=None, min_conns=None, max_conns=None):
self._db_id = id
self._db_name = name
self._db_type = type
self._db_optional = optional
self._db_sort_key = sort_key
self.db_deleted_portSpecItems = []
self.db_portSpecItems_id_index = {}
if portSpecItems is None:
self._db_portSpecItems = []
else:
self._db_portSpecItems = portSpecItems
for v in self._db_portSpecItems:
self.db_portSpecItems_id_index[v.db_id] = v
self._db_min_conns = min_conns
self._db_max_conns = max_conns
self.is_dirty = True
self.is_new = True
def __copy__(self):
return DBPortSpec.do_copy(self)
def do_copy(self, new_ids=False, id_scope=None, id_remap=None):
cp = DBPortSpec(id=self._db_id,
name=self._db_name,
type=self._db_type,
optional=self._db_optional,
sort_key=self._db_sort_key,
min_conns=self._db_min_conns,
max_conns=self._db_max_conns)
if self._db_portSpecItems is None:
cp._db_portSpecItems = []
else:
cp._db_portSpecItems = [v.do_copy(new_ids, id_scope, id_remap) for v in self._db_portSpecItems]
# set new ids
if new_ids:
new_id = id_scope.getNewId(self.vtType)
if self.vtType in id_scope.remap:
id_remap[(id_scope.remap[self.vtType], self.db_id)] = new_id
else:
id_remap[(self.vtType, self.db_id)] = new_id
cp.db_id = new_id
# recreate indices and set flags
cp.db_portSpecItems_id_index = dict((v.db_id, v) for v in cp._db_portSpecItems)
if not new_ids:
cp.is_dirty = self.is_dirty
cp.is_new = self.is_new
return cp
@staticmethod
def update_version(old_obj, trans_dict, new_obj=None):
if new_obj is None:
new_obj = DBPortSpec()
class_dict = {}
if new_obj.__class__.__name__ in trans_dict:
class_dict = trans_dict[new_obj.__class__.__name__]
if 'id' in class_dict:
res = class_dict['id'](old_obj, trans_dict)
new_obj.db_id = res
elif hasattr(old_obj, 'db_id') and old_obj.db_id is not None:
new_obj.db_id = old_obj.db_id
if 'name' in class_dict:
res = class_dict['name'](old_obj, trans_dict)
new_obj.db_name = res
elif hasattr(old_obj, 'db_name') and old_obj.db_name is not None:
new_obj.db_name = old_obj.db_name
if 'type' in class_dict:
res = class_dict['type'](old_obj, trans_dict)
new_obj.db_type = res
elif hasattr(old_obj, 'db_type') and old_obj.db_type is not None:
new_obj.db_type = old_obj.db_type
if 'optional' in class_dict:
res = class_dict['optional'](old_obj, trans_dict)
new_obj.db_optional = res
elif hasattr(old_obj, 'db_optional') and old_obj.db_optional is not None:
new_obj.db_optional = old_obj.db_optional
if 'sort_key' in class_dict:
res = class_dict['sort_key'](old_obj, trans_dict)
new_obj.db_sort_key = res
elif hasattr(old_obj, 'db_sort_key') and old_obj.db_sort_key is not None:
new_obj.db_sort_key = old_obj.db_sort_key
if 'portSpecItems' in class_dict:
res = class_dict['portSpecItems'](old_obj, trans_dict)
for obj in res:
new_obj.db_add_portSpecItem(obj)
elif hasattr(old_obj, 'db_portSpecItems') and old_obj.db_portSpecItems is not None:
for obj in old_obj.db_portSpecItems:
new_obj.db_add_portSpecItem(DBPortSpecItem.update_version(obj, trans_dict))
if hasattr(old_obj, 'db_deleted_portSpecItems') and hasattr(new_obj, 'db_deleted_portSpecItems'):
for obj in old_obj.db_deleted_portSpecItems:
n_obj = DBPortSpecItem.update_version(obj, trans_dict)
new_obj.db_deleted_portSpecItems.append(n_obj)
if 'min_conns' in class_dict:
res = class_dict['min_conns'](old_obj, trans_dict)
new_obj.db_min_conns = res
elif hasattr(old_obj, 'db_min_conns') and old_obj.db_min_conns is not None:
new_obj.db_min_conns = old_obj.db_min_conns
if 'max_conns' in class_dict:
res = class_dict['max_conns'](old_obj, trans_dict)
new_obj.db_max_conns = res
elif hasattr(old_obj, 'db_max_conns') and old_obj.db_max_conns is not None:
new_obj.db_max_conns = old_obj.db_max_conns
new_obj.is_new = old_obj.is_new
new_obj.is_dirty = old_obj.is_dirty
return new_obj
def db_children(self, parent=(None,None), orphan=False, for_action=False):
children = []
if not for_action:
for child in self.db_portSpecItems:
children.extend(child.db_children((self.vtType, self.db_id), orphan, for_action))
children.append((self, parent[0], parent[1]))
return children
def db_deleted_children(self, remove=False):
children = []
children.extend(self.db_deleted_portSpecItems)
if remove:
self.db_deleted_portSpecItems = []
return children
def has_changes(self):
if self.is_dirty:
return True
for child in self._db_portSpecItems:
if child.has_changes():
return True
return False
def __get_db_id(self):
return self._db_id
def __set_db_id(self, id):
self._db_id = id
self.is_dirty = True
db_id = property(__get_db_id, __set_db_id)
def db_add_id(self, id):
self._db_id = id
def db_change_id(self, id):
self._db_id = id
def db_delete_id(self, id):
self._db_id = None
def __get_db_name(self):
return self._db_name
def __set_db_name(self, name):
self._db_name = name
self.is_dirty = True
db_name = property(__get_db_name, __set_db_name)
def db_add_name(self, name):
self._db_name = name
def db_change_name(self, name):
self._db_name = name
def db_delete_name(self, name):
self._db_name = None
def __get_db_type(self):
return self._db_type
def __set_db_type(self, type):
self._db_type = type
self.is_dirty = True
db_type = property(__get_db_type, __set_db_type)
def db_add_type(self, type):
self._db_type = type
def db_change_type(self, type):
self._db_type = type
def db_delete_type(self, type):
self._db_type = None
def __get_db_optional(self):
return self._db_optional
def __set_db_optional(self, optional):
self._db_optional = optional
self.is_dirty = True
db_optional = property(__get_db_optional, __set_db_optional)
def db_add_optional(self, optional):
self._db_optional = optional
def db_change_optional(self, optional):
self._db_optional = optional
def db_delete_optional(self, optional):
self._db_optional = None
def __get_db_sort_key(self):
return self._db_sort_key
def __set_db_sort_key(self, sort_key):
self._db_sort_key = sort_key
self.is_dirty = True
db_sort_key = property(__get_db_sort_key, __set_db_sort_key)
def db_add_sort_key(self, sort_key):
self._db_sort_key = sort_key
def db_change_sort_key(self, sort_key):
self._db_sort_key = sort_key
def db_delete_sort_key(self, sort_key):
self._db_sort_key = None
def __get_db_portSpecItems(self):
return self._db_portSpecItems
def __set_db_portSpecItems(self, portSpecItems):
self._db_portSpecItems = portSpecItems
self.is_dirty = True
db_portSpecItems = property(__get_db_portSpecItems, __set_db_portSpecItems)
def db_get_portSpecItems(self):
return self._db_portSpecItems
def db_add_portSpecItem(self, portSpecItem):
self.is_dirty = True
self._db_portSpecItems.append(portSpecItem)
self.db_portSpecItems_id_index[portSpecItem.db_id] = portSpecItem
def db_change_portSpecItem(self, portSpecItem):
self.is_dirty = True
found = False
for i in xrange(len(self._db_portSpecItems)):
if self._db_portSpecItems[i].db_id == portSpecItem.db_id:
self._db_portSpecItems[i] = portSpecItem
found = True
break
if not found:
self._db_portSpecItems.append(portSpecItem)
self.db_portSpecItems_id_index[portSpecItem.db_id] = portSpecItem
def db_delete_portSpecItem(self, portSpecItem):
self.is_dirty = True
for i in xrange(len(self._db_portSpecItems)):
if self._db_portSpecItems[i].db_id == portSpecItem.db_id:
if not self._db_portSpecItems[i].is_new:
self.db_deleted_portSpecItems.append(self._db_portSpecItems[i])
del self._db_portSpecItems[i]
break
del self.db_portSpecItems_id_index[portSpecItem.db_id]
def db_get_portSpecItem(self, key):
for i in xrange(len(self._db_portSpecItems)):
if self._db_portSpecItems[i].db_id == key:
return self._db_portSpecItems[i]
return None
def db_get_portSpecItem_by_id(self, key):
return self.db_portSpecItems_id_index[key]
def db_has_portSpecItem_with_id(self, key):
return key in self.db_portSpecItems_id_index
def __get_db_min_conns(self):
return self._db_min_conns
def __set_db_min_conns(self, min_conns):
self._db_min_conns = min_conns
self.is_dirty = True
db_min_conns = property(__get_db_min_conns, __set_db_min_conns)
def db_add_min_conns(self, min_conns):
self._db_min_conns = min_conns
def db_change_min_conns(self, min_conns):
self._db_min_conns = min_conns
def db_delete_min_conns(self, min_conns):
self._db_min_conns = None
def __get_db_max_conns(self):
return self._db_max_conns
def __set_db_max_conns(self, max_conns):
self._db_max_conns = max_conns
self.is_dirty = True
db_max_conns = property(__get_db_max_conns, __set_db_max_conns)
def db_add_max_conns(self, max_conns):
self._db_max_conns = max_conns
def db_change_max_conns(self, max_conns):
self._db_max_conns = max_conns
def db_delete_max_conns(self, max_conns):
self._db_max_conns = None
def getPrimaryKey(self):
return self._db_id
class DBEnabledPackages(object):
vtType = 'enabled_packages'
def __init__(self, packages=None):
self.db_deleted_packages = []
self.db_packages_name_index = {}
if packages is None:
self._db_packages = []
else:
self._db_packages = packages
for v in self._db_packages:
self.db_packages_name_index[v.db_name] = v
self.is_dirty = True
self.is_new = True
def __copy__(self):
return DBEnabledPackages.do_copy(self)
def do_copy(self, new_ids=False, id_scope=None, id_remap=None):
cp = DBEnabledPackages()
if self._db_packages is None:
cp._db_packages = []
else:
cp._db_packages = [v.do_copy(new_ids, id_scope, id_remap) for v in self._db_packages]
# set new ids
if new_ids:
new_id = id_scope.getNewId(self.vtType)
if self.vtType in id_scope.remap:
id_remap[(id_scope.remap[self.vtType], self.db_id)] = new_id
else:
id_remap[(self.vtType, self.db_id)] = new_id
cp.db_id = new_id
# recreate indices and set flags
cp.db_packages_name_index = dict((v.db_name, v) for v in cp._db_packages)
if not new_ids:
cp.is_dirty = self.is_dirty
cp.is_new = self.is_new
return cp
@staticmethod
def update_version(old_obj, trans_dict, new_obj=None):
if new_obj is None:
new_obj = DBEnabledPackages()
class_dict = {}
if new_obj.__class__.__name__ in trans_dict:
class_dict = trans_dict[new_obj.__class__.__name__]
if 'packages' in class_dict:
res = class_dict['packages'](old_obj, trans_dict)
for obj in res:
new_obj.db_add_package(obj)
elif hasattr(old_obj, 'db_packages') and old_obj.db_packages is not None:
for obj in old_obj.db_packages:
new_obj.db_add_package(DBStartupPackage.update_version(obj, trans_dict))
if hasattr(old_obj, 'db_deleted_packages') and hasattr(new_obj, 'db_deleted_packages'):
for obj in old_obj.db_deleted_packages:
n_obj = DBStartupPackage.update_version(obj, trans_dict)
new_obj.db_deleted_packages.append(n_obj)
new_obj.is_new = old_obj.is_new
new_obj.is_dirty = old_obj.is_dirty
return new_obj
def db_children(self, parent=(None,None), orphan=False, for_action=False):
children = []
to_del = []
for child in self.db_packages:
children.extend(child.db_children((self.vtType, self.db_id), orphan, for_action))
if orphan:
to_del.append(child)
for child in to_del:
self.db_delete_package(child)
children.append((self, parent[0], parent[1]))
return children
def db_deleted_children(self, remove=False):
children = []
children.extend(self.db_deleted_packages)
if remove:
self.db_deleted_packages = []
return children
def has_changes(self):
if self.is_dirty:
return True
for child in self._db_packages:
if child.has_changes():
return True
return False
def __get_db_packages(self):
return self._db_packages
def __set_db_packages(self, packages):
self._db_packages = packages
self.is_dirty = True
db_packages = property(__get_db_packages, __set_db_packages)
def db_get_packages(self):
return self._db_packages
def db_add_package(self, package):
self.is_dirty = True
self._db_packages.append(package)
self.db_packages_name_index[package.db_name] = package
def db_change_package(self, package):
self.is_dirty = True
self._db_packages.append(package)
self.db_packages_name_index[package.db_name] = package
def db_delete_package(self, package):
self.is_dirty = True
raise Exception('Cannot delete a non-keyed object')
def db_get_package(self, key):
return None
def db_get_package_by_name(self, key):
return self.db_packages_name_index[key]
def db_has_package_with_name(self, key):
return key in self.db_packages_name_index
class DBOpmArtifact(object):
vtType = 'opm_artifact'
def __init__(self, id=None, value=None, accounts=None):
self._db_id = id
self.db_deleted_value = []
self._db_value = value
self.db_deleted_accounts = []
if accounts is None:
self._db_accounts = []
else:
self._db_accounts = accounts
self.is_dirty = True
self.is_new = True
def __copy__(self):
return DBOpmArtifact.do_copy(self)
def do_copy(self, new_ids=False, id_scope=None, id_remap=None):
cp = DBOpmArtifact(id=self._db_id)
if self._db_value is not None:
cp._db_value = self._db_value.do_copy(new_ids, id_scope, id_remap)
if self._db_accounts is None:
cp._db_accounts = []
else:
cp._db_accounts = [v.do_copy(new_ids, id_scope, id_remap) for v in self._db_accounts]
# set new ids
if new_ids:
new_id = id_scope.getNewId(self.vtType)
if self.vtType in id_scope.remap:
id_remap[(id_scope.remap[self.vtType], self.db_id)] = new_id
else:
id_remap[(self.vtType, self.db_id)] = new_id
cp.db_id = new_id
# recreate indices and set flags
if not new_ids:
cp.is_dirty = self.is_dirty
cp.is_new = self.is_new
return cp
@staticmethod
def update_version(old_obj, trans_dict, new_obj=None):
if new_obj is None:
new_obj = DBOpmArtifact()
class_dict = {}
if new_obj.__class__.__name__ in trans_dict:
class_dict = trans_dict[new_obj.__class__.__name__]
if 'id' in class_dict:
res = class_dict['id'](old_obj, trans_dict)
new_obj.db_id = res
elif hasattr(old_obj, 'db_id') and old_obj.db_id is not None:
new_obj.db_id = old_obj.db_id
if 'value' in class_dict:
res = class_dict['value'](old_obj, trans_dict)
new_obj.db_value = res
elif hasattr(old_obj, 'db_value') and old_obj.db_value is not None:
obj = old_obj.db_value
new_obj.db_add_value(DBOpmArtifactValue.update_version(obj, trans_dict))
if hasattr(old_obj, 'db_deleted_value') and hasattr(new_obj, 'db_deleted_value'):
for obj in old_obj.db_deleted_value:
n_obj = DBOpmArtifactValue.update_version(obj, trans_dict)
new_obj.db_deleted_value.append(n_obj)
if 'accounts' in class_dict:
res = class_dict['accounts'](old_obj, trans_dict)
for obj in res:
new_obj.db_add_account(obj)
elif hasattr(old_obj, 'db_accounts') and old_obj.db_accounts is not None:
for obj in old_obj.db_accounts:
new_obj.db_add_account(DBOpmAccountId.update_version(obj, trans_dict))
if hasattr(old_obj, 'db_deleted_accounts') and hasattr(new_obj, 'db_deleted_accounts'):
for obj in old_obj.db_deleted_accounts:
n_obj = DBOpmAccountId.update_version(obj, trans_dict)
new_obj.db_deleted_accounts.append(n_obj)
new_obj.is_new = old_obj.is_new
new_obj.is_dirty = old_obj.is_dirty
return new_obj
def db_children(self, parent=(None,None), orphan=False, for_action=False):
children = []
if self._db_value is not None:
children.extend(self._db_value.db_children((self.vtType, self.db_id), orphan, for_action))
if orphan:
self._db_value = None
to_del = []
for child in self.db_accounts:
children.extend(child.db_children((self.vtType, self.db_id), orphan, for_action))
if orphan:
to_del.append(child)
for child in to_del:
self.db_delete_account(child)
children.append((self, parent[0], parent[1]))
return children
def db_deleted_children(self, remove=False):
children = []
children.extend(self.db_deleted_value)
children.extend(self.db_deleted_accounts)
if remove:
self.db_deleted_value = []
self.db_deleted_accounts = []
return children
def has_changes(self):
if self.is_dirty:
return True
if self._db_value is not None and self._db_value.has_changes():
return True
for child in self._db_accounts:
if child.has_changes():
return True
return False
def __get_db_id(self):
return self._db_id
def __set_db_id(self, id):
self._db_id = id
self.is_dirty = True
db_id = property(__get_db_id, __set_db_id)
def db_add_id(self, id):
self._db_id = id
def db_change_id(self, id):
self._db_id = id
def db_delete_id(self, id):
self._db_id = None
def __get_db_value(self):
return self._db_value
def __set_db_value(self, value):
self._db_value = value
self.is_dirty = True
db_value = property(__get_db_value, __set_db_value)
def db_add_value(self, value):
self._db_value = value
def db_change_value(self, value):
self._db_value = value
def db_delete_value(self, value):
if not self.is_new:
self.db_deleted_value.append(self._db_value)
self._db_value = None
def __get_db_accounts(self):
return self._db_accounts
def __set_db_accounts(self, accounts):
self._db_accounts = accounts
self.is_dirty = True
db_accounts = property(__get_db_accounts, __set_db_accounts)
def db_get_accounts(self):
return self._db_accounts
def db_add_account(self, account):
self.is_dirty = True
self._db_accounts.append(account)
def db_change_account(self, account):
self.is_dirty = True
self._db_accounts.append(account)
def db_delete_account(self, account):
self.is_dirty = True
raise Exception('Cannot delete a non-keyed object')
def db_get_account(self, key):
return None
def getPrimaryKey(self):
return self._db_id
class DBLog(object):
vtType = 'log'
def __init__(self, id=None, entity_type=None, version=None, name=None, last_modified=None, workflow_execs=None, machines=None, vistrail_id=None):
self._db_id = id
self._db_entity_type = entity_type
self._db_version = version
self._db_name = name
self._db_last_modified = last_modified
self.db_deleted_workflow_execs = []
self.db_workflow_execs_id_index = {}
if workflow_execs is None:
self._db_workflow_execs = []
else:
self._db_workflow_execs = workflow_execs
for v in self._db_workflow_execs:
self.db_workflow_execs_id_index[v.db_id] = v
self.db_deleted_machines = []
self.db_machines_id_index = {}
if machines is None:
self._db_machines = []
else:
self._db_machines = machines
for v in self._db_machines:
self.db_machines_id_index[v.db_id] = v
self._db_vistrail_id = vistrail_id
self.is_dirty = True
self.is_new = True
def __copy__(self):
return DBLog.do_copy(self)
def do_copy(self, new_ids=False, id_scope=None, id_remap=None):
cp = DBLog(id=self._db_id,
entity_type=self._db_entity_type,
version=self._db_version,
name=self._db_name,
last_modified=self._db_last_modified,
vistrail_id=self._db_vistrail_id)
if self._db_workflow_execs is None:
cp._db_workflow_execs = []
else:
cp._db_workflow_execs = [v.do_copy(new_ids, id_scope, id_remap) for v in self._db_workflow_execs]
if self._db_machines is None:
cp._db_machines = []
else:
cp._db_machines = [v.do_copy(new_ids, id_scope, id_remap) for v in self._db_machines]
# set new ids
if new_ids:
new_id = id_scope.getNewId(self.vtType)
if self.vtType in id_scope.remap:
id_remap[(id_scope.remap[self.vtType], self.db_id)] = new_id
else:
id_remap[(self.vtType, self.db_id)] = new_id
cp.db_id = new_id
if hasattr(self, 'db_vistrail_id') and ('vistrail', self._db_vistrail_id) in id_remap:
cp._db_vistrail_id = id_remap[('vistrail', self._db_vistrail_id)]
# recreate indices and set flags
cp.db_workflow_execs_id_index = dict((v.db_id, v) for v in cp._db_workflow_execs)
cp.db_machines_id_index = dict((v.db_id, v) for v in cp._db_machines)
if not new_ids:
cp.is_dirty = self.is_dirty
cp.is_new = self.is_new
return cp
@staticmethod
def update_version(old_obj, trans_dict, new_obj=None):
if new_obj is None:
new_obj = DBLog()
class_dict = {}
if new_obj.__class__.__name__ in trans_dict:
class_dict = trans_dict[new_obj.__class__.__name__]
if 'id' in class_dict:
res = class_dict['id'](old_obj, trans_dict)
new_obj.db_id = res
elif hasattr(old_obj, 'db_id') and old_obj.db_id is not None:
new_obj.db_id = old_obj.db_id
if 'entity_type' in class_dict:
res = class_dict['entity_type'](old_obj, trans_dict)
new_obj.db_entity_type = res
elif hasattr(old_obj, 'db_entity_type') and old_obj.db_entity_type is not None:
new_obj.db_entity_type = old_obj.db_entity_type
if 'version' in class_dict:
res = class_dict['version'](old_obj, trans_dict)
new_obj.db_version = res
elif hasattr(old_obj, 'db_version') and old_obj.db_version is not None:
new_obj.db_version = old_obj.db_version
if 'name' in class_dict:
res = class_dict['name'](old_obj, trans_dict)
new_obj.db_name = res
elif hasattr(old_obj, 'db_name') and old_obj.db_name is not None:
new_obj.db_name = old_obj.db_name
if 'last_modified' in class_dict:
res = class_dict['last_modified'](old_obj, trans_dict)
new_obj.db_last_modified = res
elif hasattr(old_obj, 'db_last_modified') and old_obj.db_last_modified is not None:
new_obj.db_last_modified = old_obj.db_last_modified
if 'workflow_execs' in class_dict:
res = class_dict['workflow_execs'](old_obj, trans_dict)
for obj in res:
new_obj.db_add_workflow_exec(obj)
elif hasattr(old_obj, 'db_workflow_execs') and old_obj.db_workflow_execs is not None:
for obj in old_obj.db_workflow_execs:
new_obj.db_add_workflow_exec(DBWorkflowExec.update_version(obj, trans_dict))
if hasattr(old_obj, 'db_deleted_workflow_execs') and hasattr(new_obj, 'db_deleted_workflow_execs'):
for obj in old_obj.db_deleted_workflow_execs:
n_obj = DBWorkflowExec.update_version(obj, trans_dict)
new_obj.db_deleted_workflow_execs.append(n_obj)
if 'machines' in class_dict:
res = class_dict['machines'](old_obj, trans_dict)
for obj in res:
new_obj.db_add_machine(obj)
elif hasattr(old_obj, 'db_machines') and old_obj.db_machines is not None:
for obj in old_obj.db_machines:
new_obj.db_add_machine(DBMachine.update_version(obj, trans_dict))
if hasattr(old_obj, 'db_deleted_machines') and hasattr(new_obj, 'db_deleted_machines'):
for obj in old_obj.db_deleted_machines:
n_obj = DBMachine.update_version(obj, trans_dict)
new_obj.db_deleted_machines.append(n_obj)
if 'vistrail_id' in class_dict:
res = class_dict['vistrail_id'](old_obj, trans_dict)
new_obj.db_vistrail_id = res
elif hasattr(old_obj, 'db_vistrail_id') and old_obj.db_vistrail_id is not None:
new_obj.db_vistrail_id = old_obj.db_vistrail_id
new_obj.is_new = old_obj.is_new
new_obj.is_dirty = old_obj.is_dirty
return new_obj
def db_children(self, parent=(None,None), orphan=False, for_action=False):
children = []
to_del = []
for child in self.db_workflow_execs:
children.extend(child.db_children((self.vtType, self.db_id), orphan, for_action))
if orphan:
to_del.append(child)
for child in to_del:
self.db_delete_workflow_exec(child)
to_del = []
for child in self.db_machines:
children.extend(child.db_children((self.vtType, self.db_id), orphan, for_action))
if orphan:
to_del.append(child)
for child in to_del:
self.db_delete_machine(child)
children.append((self, parent[0], parent[1]))
return children
def db_deleted_children(self, remove=False):
children = []
children.extend(self.db_deleted_workflow_execs)
children.extend(self.db_deleted_machines)
if remove:
self.db_deleted_workflow_execs = []
self.db_deleted_machines = []
return children
def has_changes(self):
if self.is_dirty:
return True
for child in self._db_workflow_execs:
if child.has_changes():
return True
for child in self._db_machines:
if child.has_changes():
return True
return False
def __get_db_id(self):
return self._db_id
def __set_db_id(self, id):
self._db_id = id
self.is_dirty = True
db_id = property(__get_db_id, __set_db_id)
def db_add_id(self, id):
self._db_id = id
def db_change_id(self, id):
self._db_id = id
def db_delete_id(self, id):
self._db_id = None
def __get_db_entity_type(self):
return self._db_entity_type
def __set_db_entity_type(self, entity_type):
self._db_entity_type = entity_type
self.is_dirty = True
db_entity_type = property(__get_db_entity_type, __set_db_entity_type)
def db_add_entity_type(self, entity_type):
self._db_entity_type = entity_type
def db_change_entity_type(self, entity_type):
self._db_entity_type = entity_type
def db_delete_entity_type(self, entity_type):
self._db_entity_type = None
def __get_db_version(self):
return self._db_version
def __set_db_version(self, version):
self._db_version = version
self.is_dirty = True
db_version = property(__get_db_version, __set_db_version)
def db_add_version(self, version):
self._db_version = version
def db_change_version(self, version):
self._db_version = version
def db_delete_version(self, version):
self._db_version = None
def __get_db_name(self):
return self._db_name
def __set_db_name(self, name):
self._db_name = name
self.is_dirty = True
db_name = property(__get_db_name, __set_db_name)
def db_add_name(self, name):
self._db_name = name
def db_change_name(self, name):
self._db_name = name
def db_delete_name(self, name):
self._db_name = None
def __get_db_last_modified(self):
return self._db_last_modified
def __set_db_last_modified(self, last_modified):
self._db_last_modified = last_modified
self.is_dirty = True
db_last_modified = property(__get_db_last_modified, __set_db_last_modified)
def db_add_last_modified(self, last_modified):
self._db_last_modified = last_modified
def db_change_last_modified(self, last_modified):
self._db_last_modified = last_modified
def db_delete_last_modified(self, last_modified):
self._db_last_modified = None
def __get_db_workflow_execs(self):
return self._db_workflow_execs
def __set_db_workflow_execs(self, workflow_execs):
self._db_workflow_execs = workflow_execs
self.is_dirty = True
db_workflow_execs = property(__get_db_workflow_execs, __set_db_workflow_execs)
def db_get_workflow_execs(self):
return self._db_workflow_execs
def db_add_workflow_exec(self, workflow_exec):
self.is_dirty = True
self._db_workflow_execs.append(workflow_exec)
self.db_workflow_execs_id_index[workflow_exec.db_id] = workflow_exec
def db_change_workflow_exec(self, workflow_exec):
self.is_dirty = True
found = False
for i in xrange(len(self._db_workflow_execs)):
if self._db_workflow_execs[i].db_id == workflow_exec.db_id:
self._db_workflow_execs[i] = workflow_exec
found = True
break
if not found:
self._db_workflow_execs.append(workflow_exec)
self.db_workflow_execs_id_index[workflow_exec.db_id] = workflow_exec
def db_delete_workflow_exec(self, workflow_exec):
self.is_dirty = True
for i in xrange(len(self._db_workflow_execs)):
if self._db_workflow_execs[i].db_id == workflow_exec.db_id:
if not self._db_workflow_execs[i].is_new:
self.db_deleted_workflow_execs.append(self._db_workflow_execs[i])
del self._db_workflow_execs[i]
break
del self.db_workflow_execs_id_index[workflow_exec.db_id]
def db_get_workflow_exec(self, key):
for i in xrange(len(self._db_workflow_execs)):
if self._db_workflow_execs[i].db_id == key:
return self._db_workflow_execs[i]
return None
def db_get_workflow_exec_by_id(self, key):
return self.db_workflow_execs_id_index[key]
def db_has_workflow_exec_with_id(self, key):
return key in self.db_workflow_execs_id_index
def __get_db_machines(self):
return self._db_machines
def __set_db_machines(self, machines):
self._db_machines = machines
self.is_dirty = True
db_machines = property(__get_db_machines, __set_db_machines)
def db_get_machines(self):
return self._db_machines
def db_add_machine(self, machine):
self.is_dirty = True
self._db_machines.append(machine)
self.db_machines_id_index[machine.db_id] = machine
def db_change_machine(self, machine):
self.is_dirty = True
found = False
for i in xrange(len(self._db_machines)):
if self._db_machines[i].db_id == machine.db_id:
self._db_machines[i] = machine
found = True
break
if not found:
self._db_machines.append(machine)
self.db_machines_id_index[machine.db_id] = machine
def db_delete_machine(self, machine):
self.is_dirty = True
for i in xrange(len(self._db_machines)):
if self._db_machines[i].db_id == machine.db_id:
if not self._db_machines[i].is_new:
self.db_deleted_machines.append(self._db_machines[i])
del self._db_machines[i]
break
del self.db_machines_id_index[machine.db_id]
def db_get_machine(self, key):
for i in xrange(len(self._db_machines)):
if self._db_machines[i].db_id == key:
return self._db_machines[i]
return None
def db_get_machine_by_id(self, key):
return self.db_machines_id_index[key]
def db_has_machine_with_id(self, key):
return key in self.db_machines_id_index
def __get_db_vistrail_id(self):
return self._db_vistrail_id
def __set_db_vistrail_id(self, vistrail_id):
self._db_vistrail_id = vistrail_id
self.is_dirty = True
db_vistrail_id = property(__get_db_vistrail_id, __set_db_vistrail_id)
def db_add_vistrail_id(self, vistrail_id):
self._db_vistrail_id = vistrail_id
def db_change_vistrail_id(self, vistrail_id):
self._db_vistrail_id = vistrail_id
def db_delete_vistrail_id(self, vistrail_id):
self._db_vistrail_id = None
def getPrimaryKey(self):
return self._db_id
class DBOpmProcessIdCause(object):
vtType = 'opm_process_id_cause'
def __init__(self, id=None):
self._db_id = id
self.is_dirty = True
self.is_new = True
def __copy__(self):
return DBOpmProcessIdCause.do_copy(self)
def do_copy(self, new_ids=False, id_scope=None, id_remap=None):
cp = DBOpmProcessIdCause(id=self._db_id)
# set new ids
if new_ids:
new_id = id_scope.getNewId(self.vtType)
if self.vtType in id_scope.remap:
id_remap[(id_scope.remap[self.vtType], self.db_id)] = new_id
else:
id_remap[(self.vtType, self.db_id)] = new_id
cp.db_id = new_id
if hasattr(self, 'db_id') and ('opm_process', self._db_id) in id_remap:
cp._db_id = id_remap[('opm_process', self._db_id)]
# recreate indices and set flags
if not new_ids:
cp.is_dirty = self.is_dirty
cp.is_new = self.is_new
return cp
@staticmethod
def update_version(old_obj, trans_dict, new_obj=None):
if new_obj is None:
new_obj = DBOpmProcessIdCause()
class_dict = {}
if new_obj.__class__.__name__ in trans_dict:
class_dict = trans_dict[new_obj.__class__.__name__]
if 'id' in class_dict:
res = class_dict['id'](old_obj, trans_dict)
new_obj.db_id = res
elif hasattr(old_obj, 'db_id') and old_obj.db_id is not None:
new_obj.db_id = old_obj.db_id
new_obj.is_new = old_obj.is_new
new_obj.is_dirty = old_obj.is_dirty
return new_obj
def db_children(self, parent=(None,None), orphan=False, for_action=False):
return [(self, parent[0], parent[1])]
def db_deleted_children(self, remove=False):
children = []
return children
def has_changes(self):
if self.is_dirty:
return True
return False
def __get_db_id(self):
return self._db_id
def __set_db_id(self, id):
self._db_id = id
self.is_dirty = True
db_id = property(__get_db_id, __set_db_id)
def db_add_id(self, id):
self._db_id = id
def db_change_id(self, id):
self._db_id = id
def db_delete_id(self, id):
self._db_id = None
class DBOpmArtifacts(object):
vtType = 'opm_artifacts'
def __init__(self, artifacts=None):
self.db_deleted_artifacts = []
self.db_artifacts_id_index = {}
if artifacts is None:
self._db_artifacts = []
else:
self._db_artifacts = artifacts
for v in self._db_artifacts:
self.db_artifacts_id_index[v.db_id] = v
self.is_dirty = True
self.is_new = True
def __copy__(self):
return DBOpmArtifacts.do_copy(self)
def do_copy(self, new_ids=False, id_scope=None, id_remap=None):
cp = DBOpmArtifacts()
if self._db_artifacts is None:
cp._db_artifacts = []
else:
cp._db_artifacts = [v.do_copy(new_ids, id_scope, id_remap) for v in self._db_artifacts]
# set new ids
if new_ids:
new_id = id_scope.getNewId(self.vtType)
if self.vtType in id_scope.remap:
id_remap[(id_scope.remap[self.vtType], self.db_id)] = new_id
else:
id_remap[(self.vtType, self.db_id)] = new_id
cp.db_id = new_id
# recreate indices and set flags
cp.db_artifacts_id_index = dict((v.db_id, v) for v in cp._db_artifacts)
if not new_ids:
cp.is_dirty = self.is_dirty
cp.is_new = self.is_new
return cp
@staticmethod
def update_version(old_obj, trans_dict, new_obj=None):
if new_obj is None:
new_obj = DBOpmArtifacts()
class_dict = {}
if new_obj.__class__.__name__ in trans_dict:
class_dict = trans_dict[new_obj.__class__.__name__]
if 'artifacts' in class_dict:
res = class_dict['artifacts'](old_obj, trans_dict)
for obj in res:
new_obj.db_add_artifact(obj)
elif hasattr(old_obj, 'db_artifacts') and old_obj.db_artifacts is not None:
for obj in old_obj.db_artifacts:
new_obj.db_add_artifact(DBOpmArtifact.update_version(obj, trans_dict))
if hasattr(old_obj, 'db_deleted_artifacts') and hasattr(new_obj, 'db_deleted_artifacts'):
for obj in old_obj.db_deleted_artifacts:
n_obj = DBOpmArtifact.update_version(obj, trans_dict)
new_obj.db_deleted_artifacts.append(n_obj)
new_obj.is_new = old_obj.is_new
new_obj.is_dirty = old_obj.is_dirty
return new_obj
def db_children(self, parent=(None,None), orphan=False, for_action=False):
children = []
to_del = []
for child in self.db_artifacts:
children.extend(child.db_children((self.vtType, self.db_id), orphan, for_action))
if orphan:
to_del.append(child)
for child in to_del:
self.db_delete_artifact(child)
children.append((self, parent[0], parent[1]))
return children
def db_deleted_children(self, remove=False):
children = []
children.extend(self.db_deleted_artifacts)
if remove:
self.db_deleted_artifacts = []
return children
def has_changes(self):
if self.is_dirty:
return True
for child in self._db_artifacts:
if child.has_changes():
return True
return False
def __get_db_artifacts(self):
return self._db_artifacts
def __set_db_artifacts(self, artifacts):
self._db_artifacts = artifacts
self.is_dirty = True
db_artifacts = property(__get_db_artifacts, __set_db_artifacts)
def db_get_artifacts(self):
return self._db_artifacts
def db_add_artifact(self, artifact):
self.is_dirty = True
self._db_artifacts.append(artifact)
self.db_artifacts_id_index[artifact.db_id] = artifact
def db_change_artifact(self, artifact):
self.is_dirty = True
found = False
for i in xrange(len(self._db_artifacts)):
if self._db_artifacts[i].db_id == artifact.db_id:
self._db_artifacts[i] = artifact
found = True
break
if not found:
self._db_artifacts.append(artifact)
self.db_artifacts_id_index[artifact.db_id] = artifact
def db_delete_artifact(self, artifact):
self.is_dirty = True
for i in xrange(len(self._db_artifacts)):
if self._db_artifacts[i].db_id == artifact.db_id:
if not self._db_artifacts[i].is_new:
self.db_deleted_artifacts.append(self._db_artifacts[i])
del self._db_artifacts[i]
break
del self.db_artifacts_id_index[artifact.db_id]
def db_get_artifact(self, key):
for i in xrange(len(self._db_artifacts)):
if self._db_artifacts[i].db_id == key:
return self._db_artifacts[i]
return None
def db_get_artifact_by_id(self, key):
return self.db_artifacts_id_index[key]
def db_has_artifact_with_id(self, key):
return key in self.db_artifacts_id_index
class DBPEParameter(object):
vtType = 'pe_parameter'
def __init__(self, id=None, pos=None, interpolator=None, value=None, dimension=None):
self._db_id = id
self._db_pos = pos
self._db_interpolator = interpolator
self._db_value = value
self._db_dimension = dimension
self.is_dirty = True
self.is_new = True
def __copy__(self):
return DBPEParameter.do_copy(self)
def do_copy(self, new_ids=False, id_scope=None, id_remap=None):
cp = DBPEParameter(id=self._db_id,
pos=self._db_pos,
interpolator=self._db_interpolator,
value=self._db_value,
dimension=self._db_dimension)
# set new ids
if new_ids:
new_id = id_scope.getNewId(self.vtType)
if self.vtType in id_scope.remap:
id_remap[(id_scope.remap[self.vtType], self.db_id)] = new_id
else:
id_remap[(self.vtType, self.db_id)] = new_id
cp.db_id = new_id
# recreate indices and set flags
if not new_ids:
cp.is_dirty = self.is_dirty
cp.is_new = self.is_new
return cp
@staticmethod
def update_version(old_obj, trans_dict, new_obj=None):
if new_obj is None:
new_obj = DBPEParameter()
class_dict = {}
if new_obj.__class__.__name__ in trans_dict:
class_dict = trans_dict[new_obj.__class__.__name__]
if 'id' in class_dict:
res = class_dict['id'](old_obj, trans_dict)
new_obj.db_id = res
elif hasattr(old_obj, 'db_id') and old_obj.db_id is not None:
new_obj.db_id = old_obj.db_id
if 'pos' in class_dict:
res = class_dict['pos'](old_obj, trans_dict)
new_obj.db_pos = res
elif hasattr(old_obj, 'db_pos') and old_obj.db_pos is not None:
new_obj.db_pos = old_obj.db_pos
if 'interpolator' in class_dict:
res = class_dict['interpolator'](old_obj, trans_dict)
new_obj.db_interpolator = res
elif hasattr(old_obj, 'db_interpolator') and old_obj.db_interpolator is not None:
new_obj.db_interpolator = old_obj.db_interpolator
if 'value' in class_dict:
res = class_dict['value'](old_obj, trans_dict)
new_obj.db_value = res
elif hasattr(old_obj, 'db_value') and old_obj.db_value is not None:
new_obj.db_value = old_obj.db_value
if 'dimension' in class_dict:
res = class_dict['dimension'](old_obj, trans_dict)
new_obj.db_dimension = res
elif hasattr(old_obj, 'db_dimension') and old_obj.db_dimension is not None:
new_obj.db_dimension = old_obj.db_dimension
new_obj.is_new = old_obj.is_new
new_obj.is_dirty = old_obj.is_dirty
return new_obj
def db_children(self, parent=(None,None), orphan=False, for_action=False):
return [(self, parent[0], parent[1])]
def db_deleted_children(self, remove=False):
children = []
return children
def has_changes(self):
if self.is_dirty:
return True
return False
def __get_db_id(self):
return self._db_id
def __set_db_id(self, id):
self._db_id = id
self.is_dirty = True
db_id = property(__get_db_id, __set_db_id)
def db_add_id(self, id):
self._db_id = id
def db_change_id(self, id):
self._db_id = id
def db_delete_id(self, id):
self._db_id = None
def __get_db_pos(self):
return self._db_pos
def __set_db_pos(self, pos):
self._db_pos = pos
self.is_dirty = True
db_pos = property(__get_db_pos, __set_db_pos)
def db_add_pos(self, pos):
self._db_pos = pos
def db_change_pos(self, pos):
self._db_pos = pos
def db_delete_pos(self, pos):
self._db_pos = None
def __get_db_interpolator(self):
return self._db_interpolator
def __set_db_interpolator(self, interpolator):
self._db_interpolator = interpolator
self.is_dirty = True
db_interpolator = property(__get_db_interpolator, __set_db_interpolator)
def db_add_interpolator(self, interpolator):
self._db_interpolator = interpolator
def db_change_interpolator(self, interpolator):
self._db_interpolator = interpolator
def db_delete_interpolator(self, interpolator):
self._db_interpolator = None
def __get_db_value(self):
return self._db_value
def __set_db_value(self, value):
self._db_value = value
self.is_dirty = True
db_value = property(__get_db_value, __set_db_value)
def db_add_value(self, value):
self._db_value = value
def db_change_value(self, value):
self._db_value = value
def db_delete_value(self, value):
self._db_value = None
def __get_db_dimension(self):
return self._db_dimension
def __set_db_dimension(self, dimension):
self._db_dimension = dimension
self.is_dirty = True
db_dimension = property(__get_db_dimension, __set_db_dimension)
def db_add_dimension(self, dimension):
self._db_dimension = dimension
def db_change_dimension(self, dimension):
self._db_dimension = dimension
def db_delete_dimension(self, dimension):
self._db_dimension = None
def getPrimaryKey(self):
return self._db_id
class DBWorkflowExec(object):
vtType = 'workflow_exec'
def __init__(self, item_execs=None, id=None, user=None, ip=None, session=None, vt_version=None, ts_start=None, ts_end=None, parent_id=None, parent_type=None, parent_version=None, completed=None, name=None, annotations=None):
self.db_deleted_item_execs = []
self.db_item_execs_id_index = {}
if item_execs is None:
self._db_item_execs = []
else:
self._db_item_execs = item_execs
for v in self._db_item_execs:
self.db_item_execs_id_index[v.db_id] = v
self._db_id = id
self._db_user = user
self._db_ip = ip
self._db_session = session
self._db_vt_version = vt_version
self._db_ts_start = ts_start
self._db_ts_end = ts_end
self._db_parent_id = parent_id
self._db_parent_type = parent_type
self._db_parent_version = parent_version
self._db_completed = completed
self._db_name = name
self.db_deleted_annotations = []
self.db_annotations_id_index = {}
if annotations is None:
self._db_annotations = []
else:
self._db_annotations = annotations
for v in self._db_annotations:
self.db_annotations_id_index[v.db_id] = v
self.is_dirty = True
self.is_new = True
def __copy__(self):
return DBWorkflowExec.do_copy(self)
def do_copy(self, new_ids=False, id_scope=None, id_remap=None):
cp = DBWorkflowExec(id=self._db_id,
user=self._db_user,
ip=self._db_ip,
session=self._db_session,
vt_version=self._db_vt_version,
ts_start=self._db_ts_start,
ts_end=self._db_ts_end,
parent_id=self._db_parent_id,
parent_type=self._db_parent_type,
parent_version=self._db_parent_version,
completed=self._db_completed,
name=self._db_name)
if self._db_item_execs is None:
cp._db_item_execs = []
else:
cp._db_item_execs = [v.do_copy(new_ids, id_scope, id_remap) for v in self._db_item_execs]
if self._db_annotations is None:
cp._db_annotations = []
else:
cp._db_annotations = [v.do_copy(new_ids, id_scope, id_remap) for v in self._db_annotations]
# set new ids
if new_ids:
new_id = id_scope.getNewId(self.vtType)
if self.vtType in id_scope.remap:
id_remap[(id_scope.remap[self.vtType], self.db_id)] = new_id
else:
id_remap[(self.vtType, self.db_id)] = new_id
cp.db_id = new_id
# recreate indices and set flags
cp.db_item_execs_id_index = dict((v.db_id, v) for v in cp._db_item_execs)
cp.db_annotations_id_index = dict((v.db_id, v) for v in cp._db_annotations)
if not new_ids:
cp.is_dirty = self.is_dirty
cp.is_new = self.is_new
return cp
@staticmethod
def update_version(old_obj, trans_dict, new_obj=None):
if new_obj is None:
new_obj = DBWorkflowExec()
class_dict = {}
if new_obj.__class__.__name__ in trans_dict:
class_dict = trans_dict[new_obj.__class__.__name__]
if 'item_execs' in class_dict:
res = class_dict['item_execs'](old_obj, trans_dict)
for obj in res:
new_obj.db_add_item_exec(obj)
elif hasattr(old_obj, 'db_item_execs') and old_obj.db_item_execs is not None:
for obj in old_obj.db_item_execs:
if obj.vtType == 'module_exec':
new_obj.db_add_item_exec(DBModuleExec.update_version(obj, trans_dict))
elif obj.vtType == 'group_exec':
new_obj.db_add_item_exec(DBGroupExec.update_version(obj, trans_dict))
elif obj.vtType == 'loop_exec':
new_obj.db_add_item_exec(DBLoopExec.update_version(obj, trans_dict))
if hasattr(old_obj, 'db_deleted_item_execs') and hasattr(new_obj, 'db_deleted_item_execs'):
for obj in old_obj.db_deleted_item_execs:
if obj.vtType == 'module_exec':
n_obj = DBModuleExec.update_version(obj, trans_dict)
new_obj.db_deleted_item_execs.append(n_obj)
elif obj.vtType == 'group_exec':
n_obj = DBGroupExec.update_version(obj, trans_dict)
new_obj.db_deleted_item_execs.append(n_obj)
elif obj.vtType == 'loop_exec':
n_obj = DBLoopExec.update_version(obj, trans_dict)
new_obj.db_deleted_item_execs.append(n_obj)
if 'id' in class_dict:
res = class_dict['id'](old_obj, trans_dict)
new_obj.db_id = res
elif hasattr(old_obj, 'db_id') and old_obj.db_id is not None:
new_obj.db_id = old_obj.db_id
if 'user' in class_dict:
res = class_dict['user'](old_obj, trans_dict)
new_obj.db_user = res
elif hasattr(old_obj, 'db_user') and old_obj.db_user is not None:
new_obj.db_user = old_obj.db_user
if 'ip' in class_dict:
res = class_dict['ip'](old_obj, trans_dict)
new_obj.db_ip = res
elif hasattr(old_obj, 'db_ip') and old_obj.db_ip is not None:
new_obj.db_ip = old_obj.db_ip
if 'session' in class_dict:
res = class_dict['session'](old_obj, trans_dict)
new_obj.db_session = res
elif hasattr(old_obj, 'db_session') and old_obj.db_session is not None:
new_obj.db_session = old_obj.db_session
if 'vt_version' in class_dict:
res = class_dict['vt_version'](old_obj, trans_dict)
new_obj.db_vt_version = res
elif hasattr(old_obj, 'db_vt_version') and old_obj.db_vt_version is not None:
new_obj.db_vt_version = old_obj.db_vt_version
if 'ts_start' in class_dict:
res = class_dict['ts_start'](old_obj, trans_dict)
new_obj.db_ts_start = res
elif hasattr(old_obj, 'db_ts_start') and old_obj.db_ts_start is not None:
new_obj.db_ts_start = old_obj.db_ts_start
if 'ts_end' in class_dict:
res = class_dict['ts_end'](old_obj, trans_dict)
new_obj.db_ts_end = res
elif hasattr(old_obj, 'db_ts_end') and old_obj.db_ts_end is not None:
new_obj.db_ts_end = old_obj.db_ts_end
if 'parent_id' in class_dict:
res = class_dict['parent_id'](old_obj, trans_dict)
new_obj.db_parent_id = res
elif hasattr(old_obj, 'db_parent_id') and old_obj.db_parent_id is not None:
new_obj.db_parent_id = old_obj.db_parent_id
if 'parent_type' in class_dict:
res = class_dict['parent_type'](old_obj, trans_dict)
new_obj.db_parent_type = res
elif hasattr(old_obj, 'db_parent_type') and old_obj.db_parent_type is not None:
new_obj.db_parent_type = old_obj.db_parent_type
if 'parent_version' in class_dict:
res = class_dict['parent_version'](old_obj, trans_dict)
new_obj.db_parent_version = res
elif hasattr(old_obj, 'db_parent_version') and old_obj.db_parent_version is not None:
new_obj.db_parent_version = old_obj.db_parent_version
if 'completed' in class_dict:
res = class_dict['completed'](old_obj, trans_dict)
new_obj.db_completed = res
elif hasattr(old_obj, 'db_completed') and old_obj.db_completed is not None:
new_obj.db_completed = old_obj.db_completed
if 'name' in class_dict:
res = class_dict['name'](old_obj, trans_dict)
new_obj.db_name = res
elif hasattr(old_obj, 'db_name') and old_obj.db_name is not None:
new_obj.db_name = old_obj.db_name
if 'annotations' in class_dict:
res = class_dict['annotations'](old_obj, trans_dict)
for obj in res:
new_obj.db_add_annotation(obj)
elif hasattr(old_obj, 'db_annotations') and old_obj.db_annotations is not None:
for obj in old_obj.db_annotations:
new_obj.db_add_annotation(DBAnnotation.update_version(obj, trans_dict))
if hasattr(old_obj, 'db_deleted_annotations') and hasattr(new_obj, 'db_deleted_annotations'):
for obj in old_obj.db_deleted_annotations:
n_obj = DBAnnotation.update_version(obj, trans_dict)
new_obj.db_deleted_annotations.append(n_obj)
new_obj.is_new = old_obj.is_new
new_obj.is_dirty = old_obj.is_dirty
return new_obj
def db_children(self, parent=(None,None), orphan=False, for_action=False):
children = []
to_del = []
for child in self.db_annotations:
children.extend(child.db_children((self.vtType, self.db_id), orphan, for_action))
if orphan:
to_del.append(child)
for child in to_del:
self.db_delete_annotation(child)
to_del = []
for child in self.db_item_execs:
children.extend(child.db_children((self.vtType, self.db_id), orphan, for_action))
if orphan:
to_del.append(child)
for child in to_del:
self.db_delete_item_exec(child)
children.append((self, parent[0], parent[1]))
return children
def db_deleted_children(self, remove=False):
children = []
children.extend(self.db_deleted_annotations)
children.extend(self.db_deleted_item_execs)
if remove:
self.db_deleted_annotations = []
self.db_deleted_item_execs = []
return children
def has_changes(self):
if self.is_dirty:
return True
for child in self._db_annotations:
if child.has_changes():
return True
for child in self._db_item_execs:
if child.has_changes():
return True
return False
def __get_db_item_execs(self):
return self._db_item_execs
def __set_db_item_execs(self, item_execs):
self._db_item_execs = item_execs
self.is_dirty = True
db_item_execs = property(__get_db_item_execs, __set_db_item_execs)
def db_get_item_execs(self):
return self._db_item_execs
def db_add_item_exec(self, item_exec):
self.is_dirty = True
self._db_item_execs.append(item_exec)
self.db_item_execs_id_index[item_exec.db_id] = item_exec
def db_change_item_exec(self, item_exec):
self.is_dirty = True
found = False
for i in xrange(len(self._db_item_execs)):
if self._db_item_execs[i].db_id == item_exec.db_id:
self._db_item_execs[i] = item_exec
found = True
break
if not found:
self._db_item_execs.append(item_exec)
self.db_item_execs_id_index[item_exec.db_id] = item_exec
def db_delete_item_exec(self, item_exec):
self.is_dirty = True
for i in xrange(len(self._db_item_execs)):
if self._db_item_execs[i].db_id == item_exec.db_id:
if not self._db_item_execs[i].is_new:
self.db_deleted_item_execs.append(self._db_item_execs[i])
del self._db_item_execs[i]
break
del self.db_item_execs_id_index[item_exec.db_id]
def db_get_item_exec(self, key):
for i in xrange(len(self._db_item_execs)):
if self._db_item_execs[i].db_id == key:
return self._db_item_execs[i]
return None
def db_get_item_exec_by_id(self, key):
return self.db_item_execs_id_index[key]
def db_has_item_exec_with_id(self, key):
return key in self.db_item_execs_id_index
def __get_db_id(self):
return self._db_id
def __set_db_id(self, id):
self._db_id = id
self.is_dirty = True
db_id = property(__get_db_id, __set_db_id)
def db_add_id(self, id):
self._db_id = id
def db_change_id(self, id):
self._db_id = id
def db_delete_id(self, id):
self._db_id = None
def __get_db_user(self):
return self._db_user
def __set_db_user(self, user):
self._db_user = user
self.is_dirty = True
db_user = property(__get_db_user, __set_db_user)
def db_add_user(self, user):
self._db_user = user
def db_change_user(self, user):
self._db_user = user
def db_delete_user(self, user):
self._db_user = None
def __get_db_ip(self):
return self._db_ip
def __set_db_ip(self, ip):
self._db_ip = ip
self.is_dirty = True
db_ip = property(__get_db_ip, __set_db_ip)
def db_add_ip(self, ip):
self._db_ip = ip
def db_change_ip(self, ip):
self._db_ip = ip
def db_delete_ip(self, ip):
self._db_ip = None
def __get_db_session(self):
return self._db_session
def __set_db_session(self, session):
self._db_session = session
self.is_dirty = True
db_session = property(__get_db_session, __set_db_session)
def db_add_session(self, session):
self._db_session = session
def db_change_session(self, session):
self._db_session = session
def db_delete_session(self, session):
self._db_session = None
def __get_db_vt_version(self):
return self._db_vt_version
def __set_db_vt_version(self, vt_version):
self._db_vt_version = vt_version
self.is_dirty = True
db_vt_version = property(__get_db_vt_version, __set_db_vt_version)
def db_add_vt_version(self, vt_version):
self._db_vt_version = vt_version
def db_change_vt_version(self, vt_version):
self._db_vt_version = vt_version
def db_delete_vt_version(self, vt_version):
self._db_vt_version = None
def __get_db_ts_start(self):
return self._db_ts_start
def __set_db_ts_start(self, ts_start):
self._db_ts_start = ts_start
self.is_dirty = True
db_ts_start = property(__get_db_ts_start, __set_db_ts_start)
def db_add_ts_start(self, ts_start):
self._db_ts_start = ts_start
def db_change_ts_start(self, ts_start):
self._db_ts_start = ts_start
def db_delete_ts_start(self, ts_start):
self._db_ts_start = None
def __get_db_ts_end(self):
return self._db_ts_end
def __set_db_ts_end(self, ts_end):
self._db_ts_end = ts_end
self.is_dirty = True
db_ts_end = property(__get_db_ts_end, __set_db_ts_end)
def db_add_ts_end(self, ts_end):
self._db_ts_end = ts_end
def db_change_ts_end(self, ts_end):
self._db_ts_end = ts_end
def db_delete_ts_end(self, ts_end):
self._db_ts_end = None
def __get_db_parent_id(self):
return self._db_parent_id
def __set_db_parent_id(self, parent_id):
self._db_parent_id = parent_id
self.is_dirty = True
db_parent_id = property(__get_db_parent_id, __set_db_parent_id)
def db_add_parent_id(self, parent_id):
self._db_parent_id = parent_id
def db_change_parent_id(self, parent_id):
self._db_parent_id = parent_id
def db_delete_parent_id(self, parent_id):
self._db_parent_id = None
def __get_db_parent_type(self):
return self._db_parent_type
def __set_db_parent_type(self, parent_type):
self._db_parent_type = parent_type
self.is_dirty = True
db_parent_type = property(__get_db_parent_type, __set_db_parent_type)
def db_add_parent_type(self, parent_type):
self._db_parent_type = parent_type
def db_change_parent_type(self, parent_type):
self._db_parent_type = parent_type
def db_delete_parent_type(self, parent_type):
self._db_parent_type = None
def __get_db_parent_version(self):
return self._db_parent_version
def __set_db_parent_version(self, parent_version):
self._db_parent_version = parent_version
self.is_dirty = True
db_parent_version = property(__get_db_parent_version, __set_db_parent_version)
def db_add_parent_version(self, parent_version):
self._db_parent_version = parent_version
def db_change_parent_version(self, parent_version):
self._db_parent_version = parent_version
def db_delete_parent_version(self, parent_version):
self._db_parent_version = None
def __get_db_completed(self):
return self._db_completed
def __set_db_completed(self, completed):
self._db_completed = completed
self.is_dirty = True
db_completed = property(__get_db_completed, __set_db_completed)
def db_add_completed(self, completed):
self._db_completed = completed
def db_change_completed(self, completed):
self._db_completed = completed
def db_delete_completed(self, completed):
self._db_completed = None
def __get_db_name(self):
return self._db_name
def __set_db_name(self, name):
self._db_name = name
self.is_dirty = True
db_name = property(__get_db_name, __set_db_name)
def db_add_name(self, name):
self._db_name = name
def db_change_name(self, name):
self._db_name = name
def db_delete_name(self, name):
self._db_name = None
def __get_db_annotations(self):
return self._db_annotations
def __set_db_annotations(self, annotations):
self._db_annotations = annotations
self.is_dirty = True
db_annotations = property(__get_db_annotations, __set_db_annotations)
def db_get_annotations(self):
return self._db_annotations
def db_add_annotation(self, annotation):
self.is_dirty = True
self._db_annotations.append(annotation)
self.db_annotations_id_index[annotation.db_id] = annotation
def db_change_annotation(self, annotation):
self.is_dirty = True
found = False
for i in xrange(len(self._db_annotations)):
if self._db_annotations[i].db_id == annotation.db_id:
self._db_annotations[i] = annotation
found = True
break
if not found:
self._db_annotations.append(annotation)
self.db_annotations_id_index[annotation.db_id] = annotation
def db_delete_annotation(self, annotation):
self.is_dirty = True
for i in xrange(len(self._db_annotations)):
if self._db_annotations[i].db_id == annotation.db_id:
if not self._db_annotations[i].is_new:
self.db_deleted_annotations.append(self._db_annotations[i])
del self._db_annotations[i]
break
del self.db_annotations_id_index[annotation.db_id]
def db_get_annotation(self, key):
for i in xrange(len(self._db_annotations)):
if self._db_annotations[i].db_id == key:
return self._db_annotations[i]
return None
def db_get_annotation_by_id(self, key):
return self.db_annotations_id_index[key]
def db_has_annotation_with_id(self, key):
return key in self.db_annotations_id_index
def getPrimaryKey(self):
return self._db_id
class DBLocation(object):
vtType = 'location'
def __init__(self, id=None, x=None, y=None):
self._db_id = id
self._db_x = x
self._db_y = y
self.is_dirty = True
self.is_new = True
def __copy__(self):
return DBLocation.do_copy(self)
def do_copy(self, new_ids=False, id_scope=None, id_remap=None):
cp = DBLocation(id=self._db_id,
x=self._db_x,
y=self._db_y)
# set new ids
if new_ids:
new_id = id_scope.getNewId(self.vtType)
if self.vtType in id_scope.remap:
id_remap[(id_scope.remap[self.vtType], self.db_id)] = new_id
else:
id_remap[(self.vtType, self.db_id)] = new_id
cp.db_id = new_id
# recreate indices and set flags
if not new_ids:
cp.is_dirty = self.is_dirty
cp.is_new = self.is_new
return cp
@staticmethod
def update_version(old_obj, trans_dict, new_obj=None):
if new_obj is None:
new_obj = DBLocation()
class_dict = {}
if new_obj.__class__.__name__ in trans_dict:
class_dict = trans_dict[new_obj.__class__.__name__]
if 'id' in class_dict:
res = class_dict['id'](old_obj, trans_dict)
new_obj.db_id = res
elif hasattr(old_obj, 'db_id') and old_obj.db_id is not None:
new_obj.db_id = old_obj.db_id
if 'x' in class_dict:
res = class_dict['x'](old_obj, trans_dict)
new_obj.db_x = res
elif hasattr(old_obj, 'db_x') and old_obj.db_x is not None:
new_obj.db_x = old_obj.db_x
if 'y' in class_dict:
res = class_dict['y'](old_obj, trans_dict)
new_obj.db_y = res
elif hasattr(old_obj, 'db_y') and old_obj.db_y is not None:
new_obj.db_y = old_obj.db_y
new_obj.is_new = old_obj.is_new
new_obj.is_dirty = old_obj.is_dirty
return new_obj
def db_children(self, parent=(None,None), orphan=False, for_action=False):
return [(self, parent[0], parent[1])]
def db_deleted_children(self, remove=False):
children = []
return children
def has_changes(self):
if self.is_dirty:
return True
return False
def __get_db_id(self):
return self._db_id
def __set_db_id(self, id):
self._db_id = id
self.is_dirty = True
db_id = property(__get_db_id, __set_db_id)
def db_add_id(self, id):
self._db_id = id
def db_change_id(self, id):
self._db_id = id
def db_delete_id(self, id):
self._db_id = None
def __get_db_x(self):
return self._db_x
def __set_db_x(self, x):
self._db_x = x
self.is_dirty = True
db_x = property(__get_db_x, __set_db_x)
def db_add_x(self, x):
self._db_x = x
def db_change_x(self, x):
self._db_x = x
def db_delete_x(self, x):
self._db_x = None
def __get_db_y(self):
return self._db_y
def __set_db_y(self, y):
self._db_y = y
self.is_dirty = True
db_y = property(__get_db_y, __set_db_y)
def db_add_y(self, y):
self._db_y = y
def db_change_y(self, y):
self._db_y = y
def db_delete_y(self, y):
self._db_y = None
def getPrimaryKey(self):
return self._db_id
class DBFunction(object):
vtType = 'function'
def __init__(self, id=None, pos=None, name=None, parameters=None):
self._db_id = id
self._db_pos = pos
self._db_name = name
self.db_deleted_parameters = []
self.db_parameters_id_index = {}
if parameters is None:
self._db_parameters = []
else:
self._db_parameters = parameters
for v in self._db_parameters:
self.db_parameters_id_index[v.db_id] = v
self.is_dirty = True
self.is_new = True
def __copy__(self):
return DBFunction.do_copy(self)
def do_copy(self, new_ids=False, id_scope=None, id_remap=None):
cp = DBFunction(id=self._db_id,
pos=self._db_pos,
name=self._db_name)
if self._db_parameters is None:
cp._db_parameters = []
else:
cp._db_parameters = [v.do_copy(new_ids, id_scope, id_remap) for v in self._db_parameters]
# set new ids
if new_ids:
new_id = id_scope.getNewId(self.vtType)
if self.vtType in id_scope.remap:
id_remap[(id_scope.remap[self.vtType], self.db_id)] = new_id
else:
id_remap[(self.vtType, self.db_id)] = new_id
cp.db_id = new_id
# recreate indices and set flags
cp.db_parameters_id_index = dict((v.db_id, v) for v in cp._db_parameters)
if not new_ids:
cp.is_dirty = self.is_dirty
cp.is_new = self.is_new
return cp
@staticmethod
def update_version(old_obj, trans_dict, new_obj=None):
if new_obj is None:
new_obj = DBFunction()
class_dict = {}
if new_obj.__class__.__name__ in trans_dict:
class_dict = trans_dict[new_obj.__class__.__name__]
if 'id' in class_dict:
res = class_dict['id'](old_obj, trans_dict)
new_obj.db_id = res
elif hasattr(old_obj, 'db_id') and old_obj.db_id is not None:
new_obj.db_id = old_obj.db_id
if 'pos' in class_dict:
res = class_dict['pos'](old_obj, trans_dict)
new_obj.db_pos = res
elif hasattr(old_obj, 'db_pos') and old_obj.db_pos is not None:
new_obj.db_pos = old_obj.db_pos
if 'name' in class_dict:
res = class_dict['name'](old_obj, trans_dict)
new_obj.db_name = res
elif hasattr(old_obj, 'db_name') and old_obj.db_name is not None:
new_obj.db_name = old_obj.db_name
if 'parameters' in class_dict:
res = class_dict['parameters'](old_obj, trans_dict)
for obj in res:
new_obj.db_add_parameter(obj)
elif hasattr(old_obj, 'db_parameters') and old_obj.db_parameters is not None:
for obj in old_obj.db_parameters:
new_obj.db_add_parameter(DBParameter.update_version(obj, trans_dict))
if hasattr(old_obj, 'db_deleted_parameters') and hasattr(new_obj, 'db_deleted_parameters'):
for obj in old_obj.db_deleted_parameters:
n_obj = DBParameter.update_version(obj, trans_dict)
new_obj.db_deleted_parameters.append(n_obj)
new_obj.is_new = old_obj.is_new
new_obj.is_dirty = old_obj.is_dirty
return new_obj
def db_children(self, parent=(None,None), orphan=False, for_action=False):
children = []
to_del = []
for child in self.db_parameters:
children.extend(child.db_children((self.vtType, self.db_id), orphan, for_action))
if orphan:
to_del.append(child)
for child in to_del:
self.db_delete_parameter(child)
children.append((self, parent[0], parent[1]))
return children
def db_deleted_children(self, remove=False):
children = []
children.extend(self.db_deleted_parameters)
if remove:
self.db_deleted_parameters = []
return children
def has_changes(self):
if self.is_dirty:
return True
for child in self._db_parameters:
if child.has_changes():
return True
return False
def __get_db_id(self):
return self._db_id
def __set_db_id(self, id):
self._db_id = id
self.is_dirty = True
db_id = property(__get_db_id, __set_db_id)
def db_add_id(self, id):
self._db_id = id
def db_change_id(self, id):
self._db_id = id
def db_delete_id(self, id):
self._db_id = None
def __get_db_pos(self):
return self._db_pos
def __set_db_pos(self, pos):
self._db_pos = pos
self.is_dirty = True
db_pos = property(__get_db_pos, __set_db_pos)
def db_add_pos(self, pos):
self._db_pos = pos
def db_change_pos(self, pos):
self._db_pos = pos
def db_delete_pos(self, pos):
self._db_pos = None
def __get_db_name(self):
return self._db_name
def __set_db_name(self, name):
self._db_name = name
self.is_dirty = True
db_name = property(__get_db_name, __set_db_name)
def db_add_name(self, name):
self._db_name = name
def db_change_name(self, name):
self._db_name = name
def db_delete_name(self, name):
self._db_name = None
def __get_db_parameters(self):
return self._db_parameters
def __set_db_parameters(self, parameters):
self._db_parameters = parameters
self.is_dirty = True
db_parameters = property(__get_db_parameters, __set_db_parameters)
def db_get_parameters(self):
return self._db_parameters
def db_add_parameter(self, parameter):
self.is_dirty = True
self._db_parameters.append(parameter)
self.db_parameters_id_index[parameter.db_id] = parameter
def db_change_parameter(self, parameter):
self.is_dirty = True
found = False
for i in xrange(len(self._db_parameters)):
if self._db_parameters[i].db_id == parameter.db_id:
self._db_parameters[i] = parameter
found = True
break
if not found:
self._db_parameters.append(parameter)
self.db_parameters_id_index[parameter.db_id] = parameter
def db_delete_parameter(self, parameter):
self.is_dirty = True
for i in xrange(len(self._db_parameters)):
if self._db_parameters[i].db_id == parameter.db_id:
if not self._db_parameters[i].is_new:
self.db_deleted_parameters.append(self._db_parameters[i])
del self._db_parameters[i]
break
del self.db_parameters_id_index[parameter.db_id]
def db_get_parameter(self, key):
for i in xrange(len(self._db_parameters)):
if self._db_parameters[i].db_id == key:
return self._db_parameters[i]
return None
def db_get_parameter_by_id(self, key):
return self.db_parameters_id_index[key]
def db_has_parameter_with_id(self, key):
return key in self.db_parameters_id_index
def getPrimaryKey(self):
return self._db_id
class DBActionAnnotation(object):
vtType = 'actionAnnotation'
def __init__(self, id=None, key=None, value=None, action_id=None, date=None, user=None):
self._db_id = id
self._db_key = key
self._db_value = value
self._db_action_id = action_id
self._db_date = date
self._db_user = user
self.is_dirty = True
self.is_new = True
def __copy__(self):
return DBActionAnnotation.do_copy(self)
def do_copy(self, new_ids=False, id_scope=None, id_remap=None):
cp = DBActionAnnotation(id=self._db_id,
key=self._db_key,
value=self._db_value,
action_id=self._db_action_id,
date=self._db_date,
user=self._db_user)
# set new ids
if new_ids:
new_id = id_scope.getNewId(self.vtType)
if self.vtType in id_scope.remap:
id_remap[(id_scope.remap[self.vtType], self.db_id)] = new_id
else:
id_remap[(self.vtType, self.db_id)] = new_id
cp.db_id = new_id
if hasattr(self, 'db_action_id') and ('action', self._db_action_id) in id_remap:
cp._db_action_id = id_remap[('action', self._db_action_id)]
# recreate indices and set flags
if not new_ids:
cp.is_dirty = self.is_dirty
cp.is_new = self.is_new
return cp
@staticmethod
def update_version(old_obj, trans_dict, new_obj=None):
if new_obj is None:
new_obj = DBActionAnnotation()
class_dict = {}
if new_obj.__class__.__name__ in trans_dict:
class_dict = trans_dict[new_obj.__class__.__name__]
if 'id' in class_dict:
res = class_dict['id'](old_obj, trans_dict)
new_obj.db_id = res
elif hasattr(old_obj, 'db_id') and old_obj.db_id is not None:
new_obj.db_id = old_obj.db_id
if 'key' in class_dict:
res = class_dict['key'](old_obj, trans_dict)
new_obj.db_key = res
elif hasattr(old_obj, 'db_key') and old_obj.db_key is not None:
new_obj.db_key = old_obj.db_key
if 'value' in class_dict:
res = class_dict['value'](old_obj, trans_dict)
new_obj.db_value = res
elif hasattr(old_obj, 'db_value') and old_obj.db_value is not None:
new_obj.db_value = old_obj.db_value
if 'action_id' in class_dict:
res = class_dict['action_id'](old_obj, trans_dict)
new_obj.db_action_id = res
elif hasattr(old_obj, 'db_action_id') and old_obj.db_action_id is not None:
new_obj.db_action_id = old_obj.db_action_id
if 'date' in class_dict:
res = class_dict['date'](old_obj, trans_dict)
new_obj.db_date = res
elif hasattr(old_obj, 'db_date') and old_obj.db_date is not None:
new_obj.db_date = old_obj.db_date
if 'user' in class_dict:
res = class_dict['user'](old_obj, trans_dict)
new_obj.db_user = res
elif hasattr(old_obj, 'db_user') and old_obj.db_user is not None:
new_obj.db_user = old_obj.db_user
new_obj.is_new = old_obj.is_new
new_obj.is_dirty = old_obj.is_dirty
return new_obj
def db_children(self, parent=(None,None), orphan=False, for_action=False):
return [(self, parent[0], parent[1])]
def db_deleted_children(self, remove=False):
children = []
return children
def has_changes(self):
if self.is_dirty:
return True
return False
def __get_db_id(self):
return self._db_id
def __set_db_id(self, id):
self._db_id = id
self.is_dirty = True
db_id = property(__get_db_id, __set_db_id)
def db_add_id(self, id):
self._db_id = id
def db_change_id(self, id):
self._db_id = id
def db_delete_id(self, id):
self._db_id = None
def __get_db_key(self):
return self._db_key
def __set_db_key(self, key):
self._db_key = key
self.is_dirty = True
db_key = property(__get_db_key, __set_db_key)
def db_add_key(self, key):
self._db_key = key
def db_change_key(self, key):
self._db_key = key
def db_delete_key(self, key):
self._db_key = None
def __get_db_value(self):
return self._db_value
def __set_db_value(self, value):
self._db_value = value
self.is_dirty = True
db_value = property(__get_db_value, __set_db_value)
def db_add_value(self, value):
self._db_value = value
def db_change_value(self, value):
self._db_value = value
def db_delete_value(self, value):
self._db_value = None
def __get_db_action_id(self):
return self._db_action_id
def __set_db_action_id(self, action_id):
self._db_action_id = action_id
self.is_dirty = True
db_action_id = property(__get_db_action_id, __set_db_action_id)
def db_add_action_id(self, action_id):
self._db_action_id = action_id
def db_change_action_id(self, action_id):
self._db_action_id = action_id
def db_delete_action_id(self, action_id):
self._db_action_id = None
def __get_db_date(self):
return self._db_date
def __set_db_date(self, date):
self._db_date = date
self.is_dirty = True
db_date = property(__get_db_date, __set_db_date)
def db_add_date(self, date):
self._db_date = date
def db_change_date(self, date):
self._db_date = date
def db_delete_date(self, date):
self._db_date = None
def __get_db_user(self):
return self._db_user
def __set_db_user(self, user):
self._db_user = user
self.is_dirty = True
db_user = property(__get_db_user, __set_db_user)
def db_add_user(self, user):
self._db_user = user
def db_change_user(self, user):
self._db_user = user
def db_delete_user(self, user):
self._db_user = None
def getPrimaryKey(self):
return self._db_id
class DBProvActivity(object):
vtType = 'prov_activity'
def __init__(self, id=None, startTime=None, endTime=None, vt_id=None, vt_type=None, vt_cached=None, vt_completed=None, vt_machine_id=None, vt_error=None, is_part_of=None):
self._db_id = id
self._db_startTime = startTime
self._db_endTime = endTime
self._db_vt_id = vt_id
self._db_vt_type = vt_type
self._db_vt_cached = vt_cached
self._db_vt_completed = vt_completed
self._db_vt_machine_id = vt_machine_id
self._db_vt_error = vt_error
self.db_deleted_is_part_of = []
self._db_is_part_of = is_part_of
self.is_dirty = True
self.is_new = True
def __copy__(self):
return DBProvActivity.do_copy(self)
def do_copy(self, new_ids=False, id_scope=None, id_remap=None):
cp = DBProvActivity(id=self._db_id,
startTime=self._db_startTime,
endTime=self._db_endTime,
vt_id=self._db_vt_id,
vt_type=self._db_vt_type,
vt_cached=self._db_vt_cached,
vt_completed=self._db_vt_completed,
vt_machine_id=self._db_vt_machine_id,
vt_error=self._db_vt_error)
if self._db_is_part_of is not None:
cp._db_is_part_of = self._db_is_part_of.do_copy(new_ids, id_scope, id_remap)
# set new ids
if new_ids:
new_id = id_scope.getNewId(self.vtType)
if self.vtType in id_scope.remap:
id_remap[(id_scope.remap[self.vtType], self.db_id)] = new_id
else:
id_remap[(self.vtType, self.db_id)] = new_id
cp.db_id = new_id
# recreate indices and set flags
if not new_ids:
cp.is_dirty = self.is_dirty
cp.is_new = self.is_new
return cp
@staticmethod
def update_version(old_obj, trans_dict, new_obj=None):
if new_obj is None:
new_obj = DBProvActivity()
class_dict = {}
if new_obj.__class__.__name__ in trans_dict:
class_dict = trans_dict[new_obj.__class__.__name__]
if 'id' in class_dict:
res = class_dict['id'](old_obj, trans_dict)
new_obj.db_id = res
elif hasattr(old_obj, 'db_id') and old_obj.db_id is not None:
new_obj.db_id = old_obj.db_id
if 'startTime' in class_dict:
res = class_dict['startTime'](old_obj, trans_dict)
new_obj.db_startTime = res
elif hasattr(old_obj, 'db_startTime') and old_obj.db_startTime is not None:
new_obj.db_startTime = old_obj.db_startTime
if 'endTime' in class_dict:
res = class_dict['endTime'](old_obj, trans_dict)
new_obj.db_endTime = res
elif hasattr(old_obj, 'db_endTime') and old_obj.db_endTime is not None:
new_obj.db_endTime = old_obj.db_endTime
if 'vt_id' in class_dict:
res = class_dict['vt_id'](old_obj, trans_dict)
new_obj.db_vt_id = res
elif hasattr(old_obj, 'db_vt_id') and old_obj.db_vt_id is not None:
new_obj.db_vt_id = old_obj.db_vt_id
if 'vt_type' in class_dict:
res = class_dict['vt_type'](old_obj, trans_dict)
new_obj.db_vt_type = res
elif hasattr(old_obj, 'db_vt_type') and old_obj.db_vt_type is not None:
new_obj.db_vt_type = old_obj.db_vt_type
if 'vt_cached' in class_dict:
res = class_dict['vt_cached'](old_obj, trans_dict)
new_obj.db_vt_cached = res
elif hasattr(old_obj, 'db_vt_cached') and old_obj.db_vt_cached is not None:
new_obj.db_vt_cached = old_obj.db_vt_cached
if 'vt_completed' in class_dict:
res = class_dict['vt_completed'](old_obj, trans_dict)
new_obj.db_vt_completed = res
elif hasattr(old_obj, 'db_vt_completed') and old_obj.db_vt_completed is not None:
new_obj.db_vt_completed = old_obj.db_vt_completed
if 'vt_machine_id' in class_dict:
res = class_dict['vt_machine_id'](old_obj, trans_dict)
new_obj.db_vt_machine_id = res
elif hasattr(old_obj, 'db_vt_machine_id') and old_obj.db_vt_machine_id is not None:
new_obj.db_vt_machine_id = old_obj.db_vt_machine_id
if 'vt_error' in class_dict:
res = class_dict['vt_error'](old_obj, trans_dict)
new_obj.db_vt_error = res
elif hasattr(old_obj, 'db_vt_error') and old_obj.db_vt_error is not None:
new_obj.db_vt_error = old_obj.db_vt_error
if 'is_part_of' in class_dict:
res = class_dict['is_part_of'](old_obj, trans_dict)
new_obj.db_is_part_of = res
elif hasattr(old_obj, 'db_is_part_of') and old_obj.db_is_part_of is not None:
obj = old_obj.db_is_part_of
new_obj.db_add_is_part_of(DBIsPartOf.update_version(obj, trans_dict))
if hasattr(old_obj, 'db_deleted_is_part_of') and hasattr(new_obj, 'db_deleted_is_part_of'):
for obj in old_obj.db_deleted_is_part_of:
n_obj = DBIsPartOf.update_version(obj, trans_dict)
new_obj.db_deleted_is_part_of.append(n_obj)
new_obj.is_new = old_obj.is_new
new_obj.is_dirty = old_obj.is_dirty
return new_obj
def db_children(self, parent=(None,None), orphan=False, for_action=False):
children = []
if self._db_is_part_of is not None:
children.extend(self._db_is_part_of.db_children((self.vtType, self.db_id), orphan, for_action))
if orphan:
self._db_is_part_of = None
children.append((self, parent[0], parent[1]))
return children
def db_deleted_children(self, remove=False):
children = []
children.extend(self.db_deleted_is_part_of)
if remove:
self.db_deleted_is_part_of = []
return children
def has_changes(self):
if self.is_dirty:
return True
if self._db_is_part_of is not None and self._db_is_part_of.has_changes():
return True
return False
def __get_db_id(self):
return self._db_id
def __set_db_id(self, id):
self._db_id = id
self.is_dirty = True
db_id = property(__get_db_id, __set_db_id)
def db_add_id(self, id):
self._db_id = id
def db_change_id(self, id):
self._db_id = id
def db_delete_id(self, id):
self._db_id = None
def __get_db_startTime(self):
return self._db_startTime
def __set_db_startTime(self, startTime):
self._db_startTime = startTime
self.is_dirty = True
db_startTime = property(__get_db_startTime, __set_db_startTime)
def db_add_startTime(self, startTime):
self._db_startTime = startTime
def db_change_startTime(self, startTime):
self._db_startTime = startTime
def db_delete_startTime(self, startTime):
self._db_startTime = None
def __get_db_endTime(self):
return self._db_endTime
def __set_db_endTime(self, endTime):
self._db_endTime = endTime
self.is_dirty = True
db_endTime = property(__get_db_endTime, __set_db_endTime)
def db_add_endTime(self, endTime):
self._db_endTime = endTime
def db_change_endTime(self, endTime):
self._db_endTime = endTime
def db_delete_endTime(self, endTime):
self._db_endTime = None
def __get_db_vt_id(self):
return self._db_vt_id
def __set_db_vt_id(self, vt_id):
self._db_vt_id = vt_id
self.is_dirty = True
db_vt_id = property(__get_db_vt_id, __set_db_vt_id)
def db_add_vt_id(self, vt_id):
self._db_vt_id = vt_id
def db_change_vt_id(self, vt_id):
self._db_vt_id = vt_id
def db_delete_vt_id(self, vt_id):
self._db_vt_id = None
def __get_db_vt_type(self):
return self._db_vt_type
def __set_db_vt_type(self, vt_type):
self._db_vt_type = vt_type
self.is_dirty = True
db_vt_type = property(__get_db_vt_type, __set_db_vt_type)
def db_add_vt_type(self, vt_type):
self._db_vt_type = vt_type
def db_change_vt_type(self, vt_type):
self._db_vt_type = vt_type
def db_delete_vt_type(self, vt_type):
self._db_vt_type = None
def __get_db_vt_cached(self):
return self._db_vt_cached
def __set_db_vt_cached(self, vt_cached):
self._db_vt_cached = vt_cached
self.is_dirty = True
db_vt_cached = property(__get_db_vt_cached, __set_db_vt_cached)
def db_add_vt_cached(self, vt_cached):
self._db_vt_cached = vt_cached
def db_change_vt_cached(self, vt_cached):
self._db_vt_cached = vt_cached
def db_delete_vt_cached(self, vt_cached):
self._db_vt_cached = None
def __get_db_vt_completed(self):
return self._db_vt_completed
def __set_db_vt_completed(self, vt_completed):
self._db_vt_completed = vt_completed
self.is_dirty = True
db_vt_completed = property(__get_db_vt_completed, __set_db_vt_completed)
def db_add_vt_completed(self, vt_completed):
self._db_vt_completed = vt_completed
def db_change_vt_completed(self, vt_completed):
self._db_vt_completed = vt_completed
def db_delete_vt_completed(self, vt_completed):
self._db_vt_completed = None
def __get_db_vt_machine_id(self):
return self._db_vt_machine_id
def __set_db_vt_machine_id(self, vt_machine_id):
self._db_vt_machine_id = vt_machine_id
self.is_dirty = True
db_vt_machine_id = property(__get_db_vt_machine_id, __set_db_vt_machine_id)
def db_add_vt_machine_id(self, vt_machine_id):
self._db_vt_machine_id = vt_machine_id
def db_change_vt_machine_id(self, vt_machine_id):
self._db_vt_machine_id = vt_machine_id
def db_delete_vt_machine_id(self, vt_machine_id):
self._db_vt_machine_id = None
def __get_db_vt_error(self):
return self._db_vt_error
def __set_db_vt_error(self, vt_error):
self._db_vt_error = vt_error
self.is_dirty = True
db_vt_error = property(__get_db_vt_error, __set_db_vt_error)
def db_add_vt_error(self, vt_error):
self._db_vt_error = vt_error
def db_change_vt_error(self, vt_error):
self._db_vt_error = vt_error
def db_delete_vt_error(self, vt_error):
self._db_vt_error = None
def __get_db_is_part_of(self):
return self._db_is_part_of
def __set_db_is_part_of(self, is_part_of):
self._db_is_part_of = is_part_of
self.is_dirty = True
db_is_part_of = property(__get_db_is_part_of, __set_db_is_part_of)
def db_add_is_part_of(self, is_part_of):
self._db_is_part_of = is_part_of
def db_change_is_part_of(self, is_part_of):
self._db_is_part_of = is_part_of
def db_delete_is_part_of(self, is_part_of):
if not self.is_new:
self.db_deleted_is_part_of.append(self._db_is_part_of)
self._db_is_part_of = None
def getPrimaryKey(self):
return self._db_id
class DBProvUsage(object):
vtType = 'prov_usage'
def __init__(self, prov_activity=None, prov_entity=None, prov_role=None):
self.db_deleted_prov_activity = []
self._db_prov_activity = prov_activity
self.db_deleted_prov_entity = []
self._db_prov_entity = prov_entity
self._db_prov_role = prov_role
self.is_dirty = True
self.is_new = True
def __copy__(self):
return DBProvUsage.do_copy(self)
def do_copy(self, new_ids=False, id_scope=None, id_remap=None):
cp = DBProvUsage(prov_role=self._db_prov_role)
if self._db_prov_activity is not None:
cp._db_prov_activity = self._db_prov_activity.do_copy(new_ids, id_scope, id_remap)
if self._db_prov_entity is not None:
cp._db_prov_entity = self._db_prov_entity.do_copy(new_ids, id_scope, id_remap)
# set new ids
if new_ids:
new_id = id_scope.getNewId(self.vtType)
if self.vtType in id_scope.remap:
id_remap[(id_scope.remap[self.vtType], self.db_id)] = new_id
else:
id_remap[(self.vtType, self.db_id)] = new_id
cp.db_id = new_id
# recreate indices and set flags
if not new_ids:
cp.is_dirty = self.is_dirty
cp.is_new = self.is_new
return cp
@staticmethod
def update_version(old_obj, trans_dict, new_obj=None):
if new_obj is None:
new_obj = DBProvUsage()
class_dict = {}
if new_obj.__class__.__name__ in trans_dict:
class_dict = trans_dict[new_obj.__class__.__name__]
if 'prov_activity' in class_dict:
res = class_dict['prov_activity'](old_obj, trans_dict)
new_obj.db_prov_activity = res
elif hasattr(old_obj, 'db_prov_activity') and old_obj.db_prov_activity is not None:
obj = old_obj.db_prov_activity
new_obj.db_add_prov_activity(DBRefProvActivity.update_version(obj, trans_dict))
if hasattr(old_obj, 'db_deleted_prov_activity') and hasattr(new_obj, 'db_deleted_prov_activity'):
for obj in old_obj.db_deleted_prov_activity:
n_obj = DBRefProvActivity.update_version(obj, trans_dict)
new_obj.db_deleted_prov_activity.append(n_obj)
if 'prov_entity' in class_dict:
res = class_dict['prov_entity'](old_obj, trans_dict)
new_obj.db_prov_entity = res
elif hasattr(old_obj, 'db_prov_entity') and old_obj.db_prov_entity is not None:
obj = old_obj.db_prov_entity
new_obj.db_add_prov_entity(DBRefProvEntity.update_version(obj, trans_dict))
if hasattr(old_obj, 'db_deleted_prov_entity') and hasattr(new_obj, 'db_deleted_prov_entity'):
for obj in old_obj.db_deleted_prov_entity:
n_obj = DBRefProvEntity.update_version(obj, trans_dict)
new_obj.db_deleted_prov_entity.append(n_obj)
if 'prov_role' in class_dict:
res = class_dict['prov_role'](old_obj, trans_dict)
new_obj.db_prov_role = res
elif hasattr(old_obj, 'db_prov_role') and old_obj.db_prov_role is not None:
new_obj.db_prov_role = old_obj.db_prov_role
new_obj.is_new = old_obj.is_new
new_obj.is_dirty = old_obj.is_dirty
return new_obj
def db_children(self, parent=(None,None), orphan=False, for_action=False):
children = []
if self._db_prov_activity is not None:
children.extend(self._db_prov_activity.db_children((self.vtType, self.db_id), orphan, for_action))
if orphan:
self._db_prov_activity = None
if self._db_prov_entity is not None:
children.extend(self._db_prov_entity.db_children((self.vtType, self.db_id), orphan, for_action))
if orphan:
self._db_prov_entity = None
children.append((self, parent[0], parent[1]))
return children
def db_deleted_children(self, remove=False):
children = []
children.extend(self.db_deleted_prov_activity)
children.extend(self.db_deleted_prov_entity)
if remove:
self.db_deleted_prov_activity = []
self.db_deleted_prov_entity = []
return children
def has_changes(self):
if self.is_dirty:
return True
if self._db_prov_activity is not None and self._db_prov_activity.has_changes():
return True
if self._db_prov_entity is not None and self._db_prov_entity.has_changes():
return True
return False
def __get_db_prov_activity(self):
return self._db_prov_activity
def __set_db_prov_activity(self, prov_activity):
self._db_prov_activity = prov_activity
self.is_dirty = True
db_prov_activity = property(__get_db_prov_activity, __set_db_prov_activity)
def db_add_prov_activity(self, prov_activity):
self._db_prov_activity = prov_activity
def db_change_prov_activity(self, prov_activity):
self._db_prov_activity = prov_activity
def db_delete_prov_activity(self, prov_activity):
if not self.is_new:
self.db_deleted_prov_activity.append(self._db_prov_activity)
self._db_prov_activity = None
def __get_db_prov_entity(self):
return self._db_prov_entity
def __set_db_prov_entity(self, prov_entity):
self._db_prov_entity = prov_entity
self.is_dirty = True
db_prov_entity = property(__get_db_prov_entity, __set_db_prov_entity)
def db_add_prov_entity(self, prov_entity):
self._db_prov_entity = prov_entity
def db_change_prov_entity(self, prov_entity):
self._db_prov_entity = prov_entity
def db_delete_prov_entity(self, prov_entity):
if not self.is_new:
self.db_deleted_prov_entity.append(self._db_prov_entity)
self._db_prov_entity = None
def __get_db_prov_role(self):
return self._db_prov_role
def __set_db_prov_role(self, prov_role):
self._db_prov_role = prov_role
self.is_dirty = True
db_prov_role = property(__get_db_prov_role, __set_db_prov_role)
def db_add_prov_role(self, prov_role):
self._db_prov_role = prov_role
def db_change_prov_role(self, prov_role):
self._db_prov_role = prov_role
def db_delete_prov_role(self, prov_role):
self._db_prov_role = None
class DBOpmArtifactIdEffect(object):
vtType = 'opm_artifact_id_effect'
def __init__(self, id=None):
self._db_id = id
self.is_dirty = True
self.is_new = True
def __copy__(self):
return DBOpmArtifactIdEffect.do_copy(self)
def do_copy(self, new_ids=False, id_scope=None, id_remap=None):
cp = DBOpmArtifactIdEffect(id=self._db_id)
# set new ids
if new_ids:
new_id = id_scope.getNewId(self.vtType)
if self.vtType in id_scope.remap:
id_remap[(id_scope.remap[self.vtType], self.db_id)] = new_id
else:
id_remap[(self.vtType, self.db_id)] = new_id
cp.db_id = new_id
if hasattr(self, 'db_id') and ('opm_artifact', self._db_id) in id_remap:
cp._db_id = id_remap[('opm_artifact', self._db_id)]
# recreate indices and set flags
if not new_ids:
cp.is_dirty = self.is_dirty
cp.is_new = self.is_new
return cp
@staticmethod
def update_version(old_obj, trans_dict, new_obj=None):
if new_obj is None:
new_obj = DBOpmArtifactIdEffect()
class_dict = {}
if new_obj.__class__.__name__ in trans_dict:
class_dict = trans_dict[new_obj.__class__.__name__]
if 'id' in class_dict:
res = class_dict['id'](old_obj, trans_dict)
new_obj.db_id = res
elif hasattr(old_obj, 'db_id') and old_obj.db_id is not None:
new_obj.db_id = old_obj.db_id
new_obj.is_new = old_obj.is_new
new_obj.is_dirty = old_obj.is_dirty
return new_obj
def db_children(self, parent=(None,None), orphan=False, for_action=False):
return [(self, parent[0], parent[1])]
def db_deleted_children(self, remove=False):
children = []
return children
def has_changes(self):
if self.is_dirty:
return True
return False
def __get_db_id(self):
return self._db_id
def __set_db_id(self, id):
self._db_id = id
self.is_dirty = True
db_id = property(__get_db_id, __set_db_id)
def db_add_id(self, id):
self._db_id = id
def db_change_id(self, id):
self._db_id = id
def db_delete_id(self, id):
self._db_id = None
class DBOpmGraph(object):
vtType = 'opm_graph'
def __init__(self, accounts=None, processes=None, artifacts=None, agents=None, dependencies=None):
self.db_deleted_accounts = []
self._db_accounts = accounts
self.db_deleted_processes = []
self._db_processes = processes
self.db_deleted_artifacts = []
self._db_artifacts = artifacts
self.db_deleted_agents = []
self._db_agents = agents
self.db_deleted_dependencies = []
self._db_dependencies = dependencies
self.is_dirty = True
self.is_new = True
def __copy__(self):
return DBOpmGraph.do_copy(self)
def do_copy(self, new_ids=False, id_scope=None, id_remap=None):
cp = DBOpmGraph()
if self._db_accounts is not None:
cp._db_accounts = self._db_accounts.do_copy(new_ids, id_scope, id_remap)
if self._db_processes is not None:
cp._db_processes = self._db_processes.do_copy(new_ids, id_scope, id_remap)
if self._db_artifacts is not None:
cp._db_artifacts = self._db_artifacts.do_copy(new_ids, id_scope, id_remap)
if self._db_agents is not None:
cp._db_agents = self._db_agents.do_copy(new_ids, id_scope, id_remap)
if self._db_dependencies is not None:
cp._db_dependencies = self._db_dependencies.do_copy(new_ids, id_scope, id_remap)
# set new ids
if new_ids:
new_id = id_scope.getNewId(self.vtType)
if self.vtType in id_scope.remap:
id_remap[(id_scope.remap[self.vtType], self.db_id)] = new_id
else:
id_remap[(self.vtType, self.db_id)] = new_id
cp.db_id = new_id
# recreate indices and set flags
if not new_ids:
cp.is_dirty = self.is_dirty
cp.is_new = self.is_new
return cp
@staticmethod
def update_version(old_obj, trans_dict, new_obj=None):
if new_obj is None:
new_obj = DBOpmGraph()
class_dict = {}
if new_obj.__class__.__name__ in trans_dict:
class_dict = trans_dict[new_obj.__class__.__name__]
if 'accounts' in class_dict:
res = class_dict['accounts'](old_obj, trans_dict)
new_obj.db_accounts = res
elif hasattr(old_obj, 'db_accounts') and old_obj.db_accounts is not None:
obj = old_obj.db_accounts
new_obj.db_add_accounts(DBOpmAccounts.update_version(obj, trans_dict))
if hasattr(old_obj, 'db_deleted_accounts') and hasattr(new_obj, 'db_deleted_accounts'):
for obj in old_obj.db_deleted_accounts:
n_obj = DBOpmAccounts.update_version(obj, trans_dict)
new_obj.db_deleted_accounts.append(n_obj)
if 'processes' in class_dict:
res = class_dict['processes'](old_obj, trans_dict)
new_obj.db_processes = res
elif hasattr(old_obj, 'db_processes') and old_obj.db_processes is not None:
obj = old_obj.db_processes
new_obj.db_add_processes(DBOpmProcesses.update_version(obj, trans_dict))
if hasattr(old_obj, 'db_deleted_processes') and hasattr(new_obj, 'db_deleted_processes'):
for obj in old_obj.db_deleted_processes:
n_obj = DBOpmProcesses.update_version(obj, trans_dict)
new_obj.db_deleted_processes.append(n_obj)
if 'artifacts' in class_dict:
res = class_dict['artifacts'](old_obj, trans_dict)
new_obj.db_artifacts = res
elif hasattr(old_obj, 'db_artifacts') and old_obj.db_artifacts is not None:
obj = old_obj.db_artifacts
new_obj.db_add_artifacts(DBOpmArtifacts.update_version(obj, trans_dict))
if hasattr(old_obj, 'db_deleted_artifacts') and hasattr(new_obj, 'db_deleted_artifacts'):
for obj in old_obj.db_deleted_artifacts:
n_obj = DBOpmArtifacts.update_version(obj, trans_dict)
new_obj.db_deleted_artifacts.append(n_obj)
if 'agents' in class_dict:
res = class_dict['agents'](old_obj, trans_dict)
new_obj.db_agents = res
elif hasattr(old_obj, 'db_agents') and old_obj.db_agents is not None:
obj = old_obj.db_agents
new_obj.db_add_agents(DBOpmAgents.update_version(obj, trans_dict))
if hasattr(old_obj, 'db_deleted_agents') and hasattr(new_obj, 'db_deleted_agents'):
for obj in old_obj.db_deleted_agents:
n_obj = DBOpmAgents.update_version(obj, trans_dict)
new_obj.db_deleted_agents.append(n_obj)
if 'dependencies' in class_dict:
res = class_dict['dependencies'](old_obj, trans_dict)
new_obj.db_dependencies = res
elif hasattr(old_obj, 'db_dependencies') and old_obj.db_dependencies is not None:
obj = old_obj.db_dependencies
new_obj.db_add_dependencies(DBOpmDependencies.update_version(obj, trans_dict))
if hasattr(old_obj, 'db_deleted_dependencies') and hasattr(new_obj, 'db_deleted_dependencies'):
for obj in old_obj.db_deleted_dependencies:
n_obj = DBOpmDependencies.update_version(obj, trans_dict)
new_obj.db_deleted_dependencies.append(n_obj)
new_obj.is_new = old_obj.is_new
new_obj.is_dirty = old_obj.is_dirty
return new_obj
def db_children(self, parent=(None,None), orphan=False, for_action=False):
children = []
if self._db_accounts is not None:
children.extend(self._db_accounts.db_children((self.vtType, self.db_id), orphan, for_action))
if orphan:
self._db_accounts = None
if self._db_processes is not None:
children.extend(self._db_processes.db_children((self.vtType, self.db_id), orphan, for_action))
if orphan:
self._db_processes = None
if self._db_artifacts is not None:
children.extend(self._db_artifacts.db_children((self.vtType, self.db_id), orphan, for_action))
if orphan:
self._db_artifacts = None
if self._db_agents is not None:
children.extend(self._db_agents.db_children((self.vtType, self.db_id), orphan, for_action))
if orphan:
self._db_agents = None
if self._db_dependencies is not None:
children.extend(self._db_dependencies.db_children((self.vtType, self.db_id), orphan, for_action))
if orphan:
self._db_dependencies = None
children.append((self, parent[0], parent[1]))
return children
def db_deleted_children(self, remove=False):
children = []
children.extend(self.db_deleted_accounts)
children.extend(self.db_deleted_processes)
children.extend(self.db_deleted_artifacts)
children.extend(self.db_deleted_agents)
children.extend(self.db_deleted_dependencies)
if remove:
self.db_deleted_accounts = []
self.db_deleted_processes = []
self.db_deleted_artifacts = []
self.db_deleted_agents = []
self.db_deleted_dependencies = []
return children
def has_changes(self):
if self.is_dirty:
return True
if self._db_accounts is not None and self._db_accounts.has_changes():
return True
if self._db_processes is not None and self._db_processes.has_changes():
return True
if self._db_artifacts is not None and self._db_artifacts.has_changes():
return True
if self._db_agents is not None and self._db_agents.has_changes():
return True
if self._db_dependencies is not None and self._db_dependencies.has_changes():
return True
return False
def __get_db_accounts(self):
return self._db_accounts
def __set_db_accounts(self, accounts):
self._db_accounts = accounts
self.is_dirty = True
db_accounts = property(__get_db_accounts, __set_db_accounts)
def db_add_accounts(self, accounts):
self._db_accounts = accounts
def db_change_accounts(self, accounts):
self._db_accounts = accounts
def db_delete_accounts(self, accounts):
if not self.is_new:
self.db_deleted_accounts.append(self._db_accounts)
self._db_accounts = None
def __get_db_processes(self):
return self._db_processes
def __set_db_processes(self, processes):
self._db_processes = processes
self.is_dirty = True
db_processes = property(__get_db_processes, __set_db_processes)
def db_add_processes(self, processes):
self._db_processes = processes
def db_change_processes(self, processes):
self._db_processes = processes
def db_delete_processes(self, processes):
if not self.is_new:
self.db_deleted_processes.append(self._db_processes)
self._db_processes = None
def __get_db_artifacts(self):
return self._db_artifacts
def __set_db_artifacts(self, artifacts):
self._db_artifacts = artifacts
self.is_dirty = True
db_artifacts = property(__get_db_artifacts, __set_db_artifacts)
def db_add_artifacts(self, artifacts):
self._db_artifacts = artifacts
def db_change_artifacts(self, artifacts):
self._db_artifacts = artifacts
def db_delete_artifacts(self, artifacts):
if not self.is_new:
self.db_deleted_artifacts.append(self._db_artifacts)
self._db_artifacts = None
def __get_db_agents(self):
return self._db_agents
def __set_db_agents(self, agents):
self._db_agents = agents
self.is_dirty = True
db_agents = property(__get_db_agents, __set_db_agents)
def db_add_agents(self, agents):
self._db_agents = agents
def db_change_agents(self, agents):
self._db_agents = agents
def db_delete_agents(self, agents):
if not self.is_new:
self.db_deleted_agents.append(self._db_agents)
self._db_agents = None
def __get_db_dependencies(self):
return self._db_dependencies
def __set_db_dependencies(self, dependencies):
self._db_dependencies = dependencies
self.is_dirty = True
db_dependencies = property(__get_db_dependencies, __set_db_dependencies)
def db_add_dependencies(self, dependencies):
self._db_dependencies = dependencies
def db_change_dependencies(self, dependencies):
self._db_dependencies = dependencies
def db_delete_dependencies(self, dependencies):
if not self.is_new:
self.db_deleted_dependencies.append(self._db_dependencies)
self._db_dependencies = None
class DBIsPartOf(object):
vtType = 'is_part_of'
def __init__(self, prov_ref=None):
self._db_prov_ref = prov_ref
self.is_dirty = True
self.is_new = True
def __copy__(self):
return DBIsPartOf.do_copy(self)
def do_copy(self, new_ids=False, id_scope=None, id_remap=None):
cp = DBIsPartOf(prov_ref=self._db_prov_ref)
# set new ids
if new_ids:
new_id = id_scope.getNewId(self.vtType)
if self.vtType in id_scope.remap:
id_remap[(id_scope.remap[self.vtType], self.db_id)] = new_id
else:
id_remap[(self.vtType, self.db_id)] = new_id
cp.db_id = new_id
# recreate indices and set flags
if not new_ids:
cp.is_dirty = self.is_dirty
cp.is_new = self.is_new
return cp
@staticmethod
def update_version(old_obj, trans_dict, new_obj=None):
if new_obj is None:
new_obj = DBIsPartOf()
class_dict = {}
if new_obj.__class__.__name__ in trans_dict:
class_dict = trans_dict[new_obj.__class__.__name__]
if 'prov_ref' in class_dict:
res = class_dict['prov_ref'](old_obj, trans_dict)
new_obj.db_prov_ref = res
elif hasattr(old_obj, 'db_prov_ref') and old_obj.db_prov_ref is not None:
new_obj.db_prov_ref = old_obj.db_prov_ref
new_obj.is_new = old_obj.is_new
new_obj.is_dirty = old_obj.is_dirty
return new_obj
def db_children(self, parent=(None,None), orphan=False, for_action=False):
return [(self, parent[0], parent[1])]
def db_deleted_children(self, remove=False):
children = []
return children
def has_changes(self):
if self.is_dirty:
return True
return False
def __get_db_prov_ref(self):
return self._db_prov_ref
def __set_db_prov_ref(self, prov_ref):
self._db_prov_ref = prov_ref
self.is_dirty = True
db_prov_ref = property(__get_db_prov_ref, __set_db_prov_ref)
def db_add_prov_ref(self, prov_ref):
self._db_prov_ref = prov_ref
def db_change_prov_ref(self, prov_ref):
self._db_prov_ref = prov_ref
def db_delete_prov_ref(self, prov_ref):
self._db_prov_ref = None
class DBOpmWasDerivedFrom(object):
vtType = 'opm_was_derived_from'
def __init__(self, effect=None, role=None, cause=None, accounts=None, opm_times=None):
self.db_deleted_effect = []
self._db_effect = effect
self.db_deleted_role = []
self._db_role = role
self.db_deleted_cause = []
self._db_cause = cause
self.db_deleted_accounts = []
if accounts is None:
self._db_accounts = []
else:
self._db_accounts = accounts
self.db_deleted_opm_times = []
if opm_times is None:
self._db_opm_times = []
else:
self._db_opm_times = opm_times
self.is_dirty = True
self.is_new = True
def __copy__(self):
return DBOpmWasDerivedFrom.do_copy(self)
def do_copy(self, new_ids=False, id_scope=None, id_remap=None):
cp = DBOpmWasDerivedFrom()
if self._db_effect is not None:
cp._db_effect = self._db_effect.do_copy(new_ids, id_scope, id_remap)
if self._db_role is not None:
cp._db_role = self._db_role.do_copy(new_ids, id_scope, id_remap)
if self._db_cause is not None:
cp._db_cause = self._db_cause.do_copy(new_ids, id_scope, id_remap)
if self._db_accounts is None:
cp._db_accounts = []
else:
cp._db_accounts = [v.do_copy(new_ids, id_scope, id_remap) for v in self._db_accounts]
if self._db_opm_times is None:
cp._db_opm_times = []
else:
cp._db_opm_times = [v.do_copy(new_ids, id_scope, id_remap) for v in self._db_opm_times]
# set new ids
if new_ids:
new_id = id_scope.getNewId(self.vtType)
if self.vtType in id_scope.remap:
id_remap[(id_scope.remap[self.vtType], self.db_id)] = new_id
else:
id_remap[(self.vtType, self.db_id)] = new_id
cp.db_id = new_id
# recreate indices and set flags
if not new_ids:
cp.is_dirty = self.is_dirty
cp.is_new = self.is_new
return cp
@staticmethod
def update_version(old_obj, trans_dict, new_obj=None):
if new_obj is None:
new_obj = DBOpmWasDerivedFrom()
class_dict = {}
if new_obj.__class__.__name__ in trans_dict:
class_dict = trans_dict[new_obj.__class__.__name__]
if 'effect' in class_dict:
res = class_dict['effect'](old_obj, trans_dict)
new_obj.db_effect = res
elif hasattr(old_obj, 'db_effect') and old_obj.db_effect is not None:
obj = old_obj.db_effect
new_obj.db_add_effect(DBOpmArtifactIdEffect.update_version(obj, trans_dict))
if hasattr(old_obj, 'db_deleted_effect') and hasattr(new_obj, 'db_deleted_effect'):
for obj in old_obj.db_deleted_effect:
n_obj = DBOpmArtifactIdEffect.update_version(obj, trans_dict)
new_obj.db_deleted_effect.append(n_obj)
if 'role' in class_dict:
res = class_dict['role'](old_obj, trans_dict)
new_obj.db_role = res
elif hasattr(old_obj, 'db_role') and old_obj.db_role is not None:
obj = old_obj.db_role
new_obj.db_add_role(DBOpmRole.update_version(obj, trans_dict))
if hasattr(old_obj, 'db_deleted_role') and hasattr(new_obj, 'db_deleted_role'):
for obj in old_obj.db_deleted_role:
n_obj = DBOpmRole.update_version(obj, trans_dict)
new_obj.db_deleted_role.append(n_obj)
if 'cause' in class_dict:
res = class_dict['cause'](old_obj, trans_dict)
new_obj.db_cause = res
elif hasattr(old_obj, 'db_cause') and old_obj.db_cause is not None:
obj = old_obj.db_cause
new_obj.db_add_cause(DBOpmArtifactIdCause.update_version(obj, trans_dict))
if hasattr(old_obj, 'db_deleted_cause') and hasattr(new_obj, 'db_deleted_cause'):
for obj in old_obj.db_deleted_cause:
n_obj = DBOpmArtifactIdCause.update_version(obj, trans_dict)
new_obj.db_deleted_cause.append(n_obj)
if 'accounts' in class_dict:
res = class_dict['accounts'](old_obj, trans_dict)
for obj in res:
new_obj.db_add_account(obj)
elif hasattr(old_obj, 'db_accounts') and old_obj.db_accounts is not None:
for obj in old_obj.db_accounts:
new_obj.db_add_account(DBOpmAccountId.update_version(obj, trans_dict))
if hasattr(old_obj, 'db_deleted_accounts') and hasattr(new_obj, 'db_deleted_accounts'):
for obj in old_obj.db_deleted_accounts:
n_obj = DBOpmAccountId.update_version(obj, trans_dict)
new_obj.db_deleted_accounts.append(n_obj)
if 'opm_times' in class_dict:
res = class_dict['opm_times'](old_obj, trans_dict)
for obj in res:
new_obj.db_add_opm_time(obj)
elif hasattr(old_obj, 'db_opm_times') and old_obj.db_opm_times is not None:
for obj in old_obj.db_opm_times:
new_obj.db_add_opm_time(DBOpmTime.update_version(obj, trans_dict))
if hasattr(old_obj, 'db_deleted_opm_times') and hasattr(new_obj, 'db_deleted_opm_times'):
for obj in old_obj.db_deleted_opm_times:
n_obj = DBOpmTime.update_version(obj, trans_dict)
new_obj.db_deleted_opm_times.append(n_obj)
new_obj.is_new = old_obj.is_new
new_obj.is_dirty = old_obj.is_dirty
return new_obj
def db_children(self, parent=(None,None), orphan=False, for_action=False):
children = []
if self._db_effect is not None:
children.extend(self._db_effect.db_children((self.vtType, self.db_id), orphan, for_action))
if orphan:
self._db_effect = None
if self._db_role is not None:
children.extend(self._db_role.db_children((self.vtType, self.db_id), orphan, for_action))
if orphan:
self._db_role = None
if self._db_cause is not None:
children.extend(self._db_cause.db_children((self.vtType, self.db_id), orphan, for_action))
if orphan:
self._db_cause = None
to_del = []
for child in self.db_accounts:
children.extend(child.db_children((self.vtType, self.db_id), orphan, for_action))
if orphan:
to_del.append(child)
for child in to_del:
self.db_delete_account(child)
to_del = []
for child in self.db_opm_times:
children.extend(child.db_children((self.vtType, self.db_id), orphan, for_action))
if orphan:
to_del.append(child)
for child in to_del:
self.db_delete_opm_time(child)
children.append((self, parent[0], parent[1]))
return children
def db_deleted_children(self, remove=False):
children = []
children.extend(self.db_deleted_effect)
children.extend(self.db_deleted_role)
children.extend(self.db_deleted_cause)
children.extend(self.db_deleted_accounts)
children.extend(self.db_deleted_opm_times)
if remove:
self.db_deleted_effect = []
self.db_deleted_role = []
self.db_deleted_cause = []
self.db_deleted_accounts = []
self.db_deleted_opm_times = []
return children
def has_changes(self):
if self.is_dirty:
return True
if self._db_effect is not None and self._db_effect.has_changes():
return True
if self._db_role is not None and self._db_role.has_changes():
return True
if self._db_cause is not None and self._db_cause.has_changes():
return True
for child in self._db_accounts:
if child.has_changes():
return True
for child in self._db_opm_times:
if child.has_changes():
return True
return False
def __get_db_effect(self):
return self._db_effect
def __set_db_effect(self, effect):
self._db_effect = effect
self.is_dirty = True
db_effect = property(__get_db_effect, __set_db_effect)
def db_add_effect(self, effect):
self._db_effect = effect
def db_change_effect(self, effect):
self._db_effect = effect
def db_delete_effect(self, effect):
if not self.is_new:
self.db_deleted_effect.append(self._db_effect)
self._db_effect = None
def __get_db_role(self):
return self._db_role
def __set_db_role(self, role):
self._db_role = role
self.is_dirty = True
db_role = property(__get_db_role, __set_db_role)
def db_add_role(self, role):
self._db_role = role
def db_change_role(self, role):
self._db_role = role
def db_delete_role(self, role):
if not self.is_new:
self.db_deleted_role.append(self._db_role)
self._db_role = None
def __get_db_cause(self):
return self._db_cause
def __set_db_cause(self, cause):
self._db_cause = cause
self.is_dirty = True
db_cause = property(__get_db_cause, __set_db_cause)
def db_add_cause(self, cause):
self._db_cause = cause
def db_change_cause(self, cause):
self._db_cause = cause
def db_delete_cause(self, cause):
if not self.is_new:
self.db_deleted_cause.append(self._db_cause)
self._db_cause = None
def __get_db_accounts(self):
return self._db_accounts
def __set_db_accounts(self, accounts):
self._db_accounts = accounts
self.is_dirty = True
db_accounts = property(__get_db_accounts, __set_db_accounts)
def db_get_accounts(self):
return self._db_accounts
def db_add_account(self, account):
self.is_dirty = True
self._db_accounts.append(account)
def db_change_account(self, account):
self.is_dirty = True
self._db_accounts.append(account)
def db_delete_account(self, account):
self.is_dirty = True
raise Exception('Cannot delete a non-keyed object')
def db_get_account(self, key):
return None
def __get_db_opm_times(self):
return self._db_opm_times
def __set_db_opm_times(self, opm_times):
self._db_opm_times = opm_times
self.is_dirty = True
db_opm_times = property(__get_db_opm_times, __set_db_opm_times)
def db_get_opm_times(self):
return self._db_opm_times
def db_add_opm_time(self, opm_time):
self.is_dirty = True
self._db_opm_times.append(opm_time)
def db_change_opm_time(self, opm_time):
self.is_dirty = True
self._db_opm_times.append(opm_time)
def db_delete_opm_time(self, opm_time):
self.is_dirty = True
raise Exception('Cannot delete a non-keyed object')
def db_get_opm_time(self, key):
return None
class DBPluginData(object):
vtType = 'plugin_data'
def __init__(self, id=None, data=None):
self._db_id = id
self._db_data = data
self.is_dirty = True
self.is_new = True
def __copy__(self):
return DBPluginData.do_copy(self)
def do_copy(self, new_ids=False, id_scope=None, id_remap=None):
cp = DBPluginData(id=self._db_id,
data=self._db_data)
# set new ids
if new_ids:
new_id = id_scope.getNewId(self.vtType)
if self.vtType in id_scope.remap:
id_remap[(id_scope.remap[self.vtType], self.db_id)] = new_id
else:
id_remap[(self.vtType, self.db_id)] = new_id
cp.db_id = new_id
# recreate indices and set flags
if not new_ids:
cp.is_dirty = self.is_dirty
cp.is_new = self.is_new
return cp
@staticmethod
def update_version(old_obj, trans_dict, new_obj=None):
if new_obj is None:
new_obj = DBPluginData()
class_dict = {}
if new_obj.__class__.__name__ in trans_dict:
class_dict = trans_dict[new_obj.__class__.__name__]
if 'id' in class_dict:
res = class_dict['id'](old_obj, trans_dict)
new_obj.db_id = res
elif hasattr(old_obj, 'db_id') and old_obj.db_id is not None:
new_obj.db_id = old_obj.db_id
if 'data' in class_dict:
res = class_dict['data'](old_obj, trans_dict)
new_obj.db_data = res
elif hasattr(old_obj, 'db_data') and old_obj.db_data is not None:
new_obj.db_data = old_obj.db_data
new_obj.is_new = old_obj.is_new
new_obj.is_dirty = old_obj.is_dirty
return new_obj
def db_children(self, parent=(None,None), orphan=False, for_action=False):
return [(self, parent[0], parent[1])]
def db_deleted_children(self, remove=False):
children = []
return children
def has_changes(self):
if self.is_dirty:
return True
return False
def __get_db_id(self):
return self._db_id
def __set_db_id(self, id):
self._db_id = id
self.is_dirty = True
db_id = property(__get_db_id, __set_db_id)
def db_add_id(self, id):
self._db_id = id
def db_change_id(self, id):
self._db_id = id
def db_delete_id(self, id):
self._db_id = None
def __get_db_data(self):
return self._db_data
def __set_db_data(self, data):
self._db_data = data
self.is_dirty = True
db_data = property(__get_db_data, __set_db_data)
def db_add_data(self, data):
self._db_data = data
def db_change_data(self, data):
self._db_data = data
def db_delete_data(self, data):
self._db_data = None
def getPrimaryKey(self):
return self._db_id
class DBDelete(object):
vtType = 'delete'
def __init__(self, id=None, what=None, objectId=None, parentObjId=None, parentObjType=None):
self._db_id = id
self._db_what = what
self._db_objectId = objectId
self._db_parentObjId = parentObjId
self._db_parentObjType = parentObjType
self.is_dirty = True
self.is_new = True
def __copy__(self):
return DBDelete.do_copy(self)
def do_copy(self, new_ids=False, id_scope=None, id_remap=None):
cp = DBDelete(id=self._db_id,
what=self._db_what,
objectId=self._db_objectId,
parentObjId=self._db_parentObjId,
parentObjType=self._db_parentObjType)
# set new ids
if new_ids:
new_id = id_scope.getNewId(self.vtType)
if self.vtType in id_scope.remap:
id_remap[(id_scope.remap[self.vtType], self.db_id)] = new_id
else:
id_remap[(self.vtType, self.db_id)] = new_id
cp.db_id = new_id
if hasattr(self, 'db_objectId') and (self._db_what, self._db_objectId) in id_remap:
cp._db_objectId = id_remap[(self._db_what, self._db_objectId)]
if hasattr(self, 'db_parentObjId') and (self._db_parentObjType, self._db_parentObjId) in id_remap:
cp._db_parentObjId = id_remap[(self._db_parentObjType, self._db_parentObjId)]
# recreate indices and set flags
if not new_ids:
cp.is_dirty = self.is_dirty
cp.is_new = self.is_new
return cp
@staticmethod
def update_version(old_obj, trans_dict, new_obj=None):
if new_obj is None:
new_obj = DBDelete()
class_dict = {}
if new_obj.__class__.__name__ in trans_dict:
class_dict = trans_dict[new_obj.__class__.__name__]
if 'id' in class_dict:
res = class_dict['id'](old_obj, trans_dict)
new_obj.db_id = res
elif hasattr(old_obj, 'db_id') and old_obj.db_id is not None:
new_obj.db_id = old_obj.db_id
if 'what' in class_dict:
res = class_dict['what'](old_obj, trans_dict)
new_obj.db_what = res
elif hasattr(old_obj, 'db_what') and old_obj.db_what is not None:
new_obj.db_what = old_obj.db_what
if 'objectId' in class_dict:
res = class_dict['objectId'](old_obj, trans_dict)
new_obj.db_objectId = res
elif hasattr(old_obj, 'db_objectId') and old_obj.db_objectId is not None:
new_obj.db_objectId = old_obj.db_objectId
if 'parentObjId' in class_dict:
res = class_dict['parentObjId'](old_obj, trans_dict)
new_obj.db_parentObjId = res
elif hasattr(old_obj, 'db_parentObjId') and old_obj.db_parentObjId is not None:
new_obj.db_parentObjId = old_obj.db_parentObjId
if 'parentObjType' in class_dict:
res = class_dict['parentObjType'](old_obj, trans_dict)
new_obj.db_parentObjType = res
elif hasattr(old_obj, 'db_parentObjType') and old_obj.db_parentObjType is not None:
new_obj.db_parentObjType = old_obj.db_parentObjType
new_obj.is_new = old_obj.is_new
new_obj.is_dirty = old_obj.is_dirty
return new_obj
def db_children(self, parent=(None,None), orphan=False, for_action=False):
return [(self, parent[0], parent[1])]
def db_deleted_children(self, remove=False):
children = []
return children
def has_changes(self):
if self.is_dirty:
return True
return False
def __get_db_id(self):
return self._db_id
def __set_db_id(self, id):
self._db_id = id
self.is_dirty = True
db_id = property(__get_db_id, __set_db_id)
def db_add_id(self, id):
self._db_id = id
def db_change_id(self, id):
self._db_id = id
def db_delete_id(self, id):
self._db_id = None
def __get_db_what(self):
return self._db_what
def __set_db_what(self, what):
self._db_what = what
self.is_dirty = True
db_what = property(__get_db_what, __set_db_what)
def db_add_what(self, what):
self._db_what = what
def db_change_what(self, what):
self._db_what = what
def db_delete_what(self, what):
self._db_what = None
def __get_db_objectId(self):
return self._db_objectId
def __set_db_objectId(self, objectId):
self._db_objectId = objectId
self.is_dirty = True
db_objectId = property(__get_db_objectId, __set_db_objectId)
def db_add_objectId(self, objectId):
self._db_objectId = objectId
def db_change_objectId(self, objectId):
self._db_objectId = objectId
def db_delete_objectId(self, objectId):
self._db_objectId = None
def __get_db_parentObjId(self):
return self._db_parentObjId
def __set_db_parentObjId(self, parentObjId):
self._db_parentObjId = parentObjId
self.is_dirty = True
db_parentObjId = property(__get_db_parentObjId, __set_db_parentObjId)
def db_add_parentObjId(self, parentObjId):
self._db_parentObjId = parentObjId
def db_change_parentObjId(self, parentObjId):
self._db_parentObjId = parentObjId
def db_delete_parentObjId(self, parentObjId):
self._db_parentObjId = None
def __get_db_parentObjType(self):
return self._db_parentObjType
def __set_db_parentObjType(self, parentObjType):
self._db_parentObjType = parentObjType
self.is_dirty = True
db_parentObjType = property(__get_db_parentObjType, __set_db_parentObjType)
def db_add_parentObjType(self, parentObjType):
self._db_parentObjType = parentObjType
def db_change_parentObjType(self, parentObjType):
self._db_parentObjType = parentObjType
def db_delete_parentObjType(self, parentObjType):
self._db_parentObjType = None
def getPrimaryKey(self):
return self._db_id
class DBVistrailVariable(object):
vtType = 'vistrailVariable'
def __init__(self, name=None, uuid=None, package=None, module=None, namespace=None, value=None):
self._db_name = name
self._db_uuid = uuid
self._db_package = package
self._db_module = module
self._db_namespace = namespace
self._db_value = value
self.is_dirty = True
self.is_new = True
def __copy__(self):
return DBVistrailVariable.do_copy(self)
def do_copy(self, new_ids=False, id_scope=None, id_remap=None):
cp = DBVistrailVariable(name=self._db_name,
uuid=self._db_uuid,
package=self._db_package,
module=self._db_module,
namespace=self._db_namespace,
value=self._db_value)
# set new ids
if new_ids:
new_id = id_scope.getNewId(self.vtType)
if self.vtType in id_scope.remap:
id_remap[(id_scope.remap[self.vtType], self.db_id)] = new_id
else:
id_remap[(self.vtType, self.db_id)] = new_id
cp.db_id = new_id
# recreate indices and set flags
if not new_ids:
cp.is_dirty = self.is_dirty
cp.is_new = self.is_new
return cp
@staticmethod
def update_version(old_obj, trans_dict, new_obj=None):
if new_obj is None:
new_obj = DBVistrailVariable()
class_dict = {}
if new_obj.__class__.__name__ in trans_dict:
class_dict = trans_dict[new_obj.__class__.__name__]
if 'name' in class_dict:
res = class_dict['name'](old_obj, trans_dict)
new_obj.db_name = res
elif hasattr(old_obj, 'db_name') and old_obj.db_name is not None:
new_obj.db_name = old_obj.db_name
if 'uuid' in class_dict:
res = class_dict['uuid'](old_obj, trans_dict)
new_obj.db_uuid = res
elif hasattr(old_obj, 'db_uuid') and old_obj.db_uuid is not None:
new_obj.db_uuid = old_obj.db_uuid
if 'package' in class_dict:
res = class_dict['package'](old_obj, trans_dict)
new_obj.db_package = res
elif hasattr(old_obj, 'db_package') and old_obj.db_package is not None:
new_obj.db_package = old_obj.db_package
if 'module' in class_dict:
res = class_dict['module'](old_obj, trans_dict)
new_obj.db_module = res
elif hasattr(old_obj, 'db_module') and old_obj.db_module is not None:
new_obj.db_module = old_obj.db_module
if 'namespace' in class_dict:
res = class_dict['namespace'](old_obj, trans_dict)
new_obj.db_namespace = res
elif hasattr(old_obj, 'db_namespace') and old_obj.db_namespace is not None:
new_obj.db_namespace = old_obj.db_namespace
if 'value' in class_dict:
res = class_dict['value'](old_obj, trans_dict)
new_obj.db_value = res
elif hasattr(old_obj, 'db_value') and old_obj.db_value is not None:
new_obj.db_value = old_obj.db_value
new_obj.is_new = old_obj.is_new
new_obj.is_dirty = old_obj.is_dirty
return new_obj
def db_children(self, parent=(None,None), orphan=False, for_action=False):
return [(self, parent[0], parent[1])]
def db_deleted_children(self, remove=False):
children = []
return children
def has_changes(self):
if self.is_dirty:
return True
return False
def __get_db_name(self):
return self._db_name
def __set_db_name(self, name):
self._db_name = name
self.is_dirty = True
db_name = property(__get_db_name, __set_db_name)
def db_add_name(self, name):
self._db_name = name
def db_change_name(self, name):
self._db_name = name
def db_delete_name(self, name):
self._db_name = None
def __get_db_uuid(self):
return self._db_uuid
def __set_db_uuid(self, uuid):
self._db_uuid = uuid
self.is_dirty = True
db_uuid = property(__get_db_uuid, __set_db_uuid)
def db_add_uuid(self, uuid):
self._db_uuid = uuid
def db_change_uuid(self, uuid):
self._db_uuid = uuid
def db_delete_uuid(self, uuid):
self._db_uuid = None
def __get_db_package(self):
return self._db_package
def __set_db_package(self, package):
self._db_package = package
self.is_dirty = True
db_package = property(__get_db_package, __set_db_package)
def db_add_package(self, package):
self._db_package = package
def db_change_package(self, package):
self._db_package = package
def db_delete_package(self, package):
self._db_package = None
def __get_db_module(self):
return self._db_module
def __set_db_module(self, module):
self._db_module = module
self.is_dirty = True
db_module = property(__get_db_module, __set_db_module)
def db_add_module(self, module):
self._db_module = module
def db_change_module(self, module):
self._db_module = module
def db_delete_module(self, module):
self._db_module = None
def __get_db_namespace(self):
return self._db_namespace
def __set_db_namespace(self, namespace):
self._db_namespace = namespace
self.is_dirty = True
db_namespace = property(__get_db_namespace, __set_db_namespace)
def db_add_namespace(self, namespace):
self._db_namespace = namespace
def db_change_namespace(self, namespace):
self._db_namespace = namespace
def db_delete_namespace(self, namespace):
self._db_namespace = None
def __get_db_value(self):
return self._db_value
def __set_db_value(self, value):
self._db_value = value
self.is_dirty = True
db_value = property(__get_db_value, __set_db_value)
def db_add_value(self, value):
self._db_value = value
def db_change_value(self, value):
self._db_value = value
def db_delete_value(self, value):
self._db_value = None
def getPrimaryKey(self):
return self._db_name
class DBOpmOverlaps(object):
vtType = 'opm_overlaps'
def __init__(self, opm_account_ids=None):
self.db_deleted_opm_account_ids = []
if opm_account_ids is None:
self._db_opm_account_ids = []
else:
self._db_opm_account_ids = opm_account_ids
self.is_dirty = True
self.is_new = True
def __copy__(self):
return DBOpmOverlaps.do_copy(self)
def do_copy(self, new_ids=False, id_scope=None, id_remap=None):
cp = DBOpmOverlaps()
if self._db_opm_account_ids is None:
cp._db_opm_account_ids = []
else:
cp._db_opm_account_ids = [v.do_copy(new_ids, id_scope, id_remap) for v in self._db_opm_account_ids]
# set new ids
if new_ids:
new_id = id_scope.getNewId(self.vtType)
if self.vtType in id_scope.remap:
id_remap[(id_scope.remap[self.vtType], self.db_id)] = new_id
else:
id_remap[(self.vtType, self.db_id)] = new_id
cp.db_id = new_id
# recreate indices and set flags
if not new_ids:
cp.is_dirty = self.is_dirty
cp.is_new = self.is_new
return cp
@staticmethod
def update_version(old_obj, trans_dict, new_obj=None):
if new_obj is None:
new_obj = DBOpmOverlaps()
class_dict = {}
if new_obj.__class__.__name__ in trans_dict:
class_dict = trans_dict[new_obj.__class__.__name__]
if 'opm_account_ids' in class_dict:
res = class_dict['opm_account_ids'](old_obj, trans_dict)
for obj in res:
new_obj.db_add_opm_account_id(obj)
elif hasattr(old_obj, 'db_opm_account_ids') and old_obj.db_opm_account_ids is not None:
for obj in old_obj.db_opm_account_ids:
new_obj.db_add_opm_account_id(DBOpmAccountId.update_version(obj, trans_dict))
if hasattr(old_obj, 'db_deleted_opm_account_ids') and hasattr(new_obj, 'db_deleted_opm_account_ids'):
for obj in old_obj.db_deleted_opm_account_ids:
n_obj = DBOpmAccountId.update_version(obj, trans_dict)
new_obj.db_deleted_opm_account_ids.append(n_obj)
new_obj.is_new = old_obj.is_new
new_obj.is_dirty = old_obj.is_dirty
return new_obj
def db_children(self, parent=(None,None), orphan=False, for_action=False):
children = []
to_del = []
for child in self.db_opm_account_ids:
children.extend(child.db_children((self.vtType, self.db_id), orphan, for_action))
if orphan:
to_del.append(child)
for child in to_del:
self.db_delete_opm_account_id(child)
children.append((self, parent[0], parent[1]))
return children
def db_deleted_children(self, remove=False):
children = []
children.extend(self.db_deleted_opm_account_ids)
if remove:
self.db_deleted_opm_account_ids = []
return children
def has_changes(self):
if self.is_dirty:
return True
for child in self._db_opm_account_ids:
if child.has_changes():
return True
return False
def __get_db_opm_account_ids(self):
return self._db_opm_account_ids
def __set_db_opm_account_ids(self, opm_account_ids):
self._db_opm_account_ids = opm_account_ids
self.is_dirty = True
db_opm_account_ids = property(__get_db_opm_account_ids, __set_db_opm_account_ids)
def db_get_opm_account_ids(self):
return self._db_opm_account_ids
def db_add_opm_account_id(self, opm_account_id):
self.is_dirty = True
self._db_opm_account_ids.append(opm_account_id)
def db_change_opm_account_id(self, opm_account_id):
self.is_dirty = True
self._db_opm_account_ids.append(opm_account_id)
def db_delete_opm_account_id(self, opm_account_id):
self.is_dirty = True
raise Exception('Cannot delete a non-keyed object')
def db_get_opm_account_id(self, key):
return None
class DBOpmWasTriggeredBy(object):
vtType = 'opm_was_triggered_by'
def __init__(self, effect=None, role=None, cause=None, accounts=None, opm_times=None):
self.db_deleted_effect = []
self._db_effect = effect
self.db_deleted_role = []
self._db_role = role
self.db_deleted_cause = []
self._db_cause = cause
self.db_deleted_accounts = []
if accounts is None:
self._db_accounts = []
else:
self._db_accounts = accounts
self.db_deleted_opm_times = []
if opm_times is None:
self._db_opm_times = []
else:
self._db_opm_times = opm_times
self.is_dirty = True
self.is_new = True
def __copy__(self):
return DBOpmWasTriggeredBy.do_copy(self)
def do_copy(self, new_ids=False, id_scope=None, id_remap=None):
cp = DBOpmWasTriggeredBy()
if self._db_effect is not None:
cp._db_effect = self._db_effect.do_copy(new_ids, id_scope, id_remap)
if self._db_role is not None:
cp._db_role = self._db_role.do_copy(new_ids, id_scope, id_remap)
if self._db_cause is not None:
cp._db_cause = self._db_cause.do_copy(new_ids, id_scope, id_remap)
if self._db_accounts is None:
cp._db_accounts = []
else:
cp._db_accounts = [v.do_copy(new_ids, id_scope, id_remap) for v in self._db_accounts]
if self._db_opm_times is None:
cp._db_opm_times = []
else:
cp._db_opm_times = [v.do_copy(new_ids, id_scope, id_remap) for v in self._db_opm_times]
# set new ids
if new_ids:
new_id = id_scope.getNewId(self.vtType)
if self.vtType in id_scope.remap:
id_remap[(id_scope.remap[self.vtType], self.db_id)] = new_id
else:
id_remap[(self.vtType, self.db_id)] = new_id
cp.db_id = new_id
# recreate indices and set flags
if not new_ids:
cp.is_dirty = self.is_dirty
cp.is_new = self.is_new
return cp
@staticmethod
def update_version(old_obj, trans_dict, new_obj=None):
if new_obj is None:
new_obj = DBOpmWasTriggeredBy()
class_dict = {}
if new_obj.__class__.__name__ in trans_dict:
class_dict = trans_dict[new_obj.__class__.__name__]
if 'effect' in class_dict:
res = class_dict['effect'](old_obj, trans_dict)
new_obj.db_effect = res
elif hasattr(old_obj, 'db_effect') and old_obj.db_effect is not None:
obj = old_obj.db_effect
new_obj.db_add_effect(DBOpmProcessIdEffect.update_version(obj, trans_dict))
if hasattr(old_obj, 'db_deleted_effect') and hasattr(new_obj, 'db_deleted_effect'):
for obj in old_obj.db_deleted_effect:
n_obj = DBOpmProcessIdEffect.update_version(obj, trans_dict)
new_obj.db_deleted_effect.append(n_obj)
if 'role' in class_dict:
res = class_dict['role'](old_obj, trans_dict)
new_obj.db_role = res
elif hasattr(old_obj, 'db_role') and old_obj.db_role is not None:
obj = old_obj.db_role
new_obj.db_add_role(DBOpmRole.update_version(obj, trans_dict))
if hasattr(old_obj, 'db_deleted_role') and hasattr(new_obj, 'db_deleted_role'):
for obj in old_obj.db_deleted_role:
n_obj = DBOpmRole.update_version(obj, trans_dict)
new_obj.db_deleted_role.append(n_obj)
if 'cause' in class_dict:
res = class_dict['cause'](old_obj, trans_dict)
new_obj.db_cause = res
elif hasattr(old_obj, 'db_cause') and old_obj.db_cause is not None:
obj = old_obj.db_cause
new_obj.db_add_cause(DBOpmProcessIdCause.update_version(obj, trans_dict))
if hasattr(old_obj, 'db_deleted_cause') and hasattr(new_obj, 'db_deleted_cause'):
for obj in old_obj.db_deleted_cause:
n_obj = DBOpmProcessIdCause.update_version(obj, trans_dict)
new_obj.db_deleted_cause.append(n_obj)
if 'accounts' in class_dict:
res = class_dict['accounts'](old_obj, trans_dict)
for obj in res:
new_obj.db_add_account(obj)
elif hasattr(old_obj, 'db_accounts') and old_obj.db_accounts is not None:
for obj in old_obj.db_accounts:
new_obj.db_add_account(DBOpmAccountId.update_version(obj, trans_dict))
if hasattr(old_obj, 'db_deleted_accounts') and hasattr(new_obj, 'db_deleted_accounts'):
for obj in old_obj.db_deleted_accounts:
n_obj = DBOpmAccountId.update_version(obj, trans_dict)
new_obj.db_deleted_accounts.append(n_obj)
if 'opm_times' in class_dict:
res = class_dict['opm_times'](old_obj, trans_dict)
for obj in res:
new_obj.db_add_opm_time(obj)
elif hasattr(old_obj, 'db_opm_times') and old_obj.db_opm_times is not None:
for obj in old_obj.db_opm_times:
new_obj.db_add_opm_time(DBOpmTime.update_version(obj, trans_dict))
if hasattr(old_obj, 'db_deleted_opm_times') and hasattr(new_obj, 'db_deleted_opm_times'):
for obj in old_obj.db_deleted_opm_times:
n_obj = DBOpmTime.update_version(obj, trans_dict)
new_obj.db_deleted_opm_times.append(n_obj)
new_obj.is_new = old_obj.is_new
new_obj.is_dirty = old_obj.is_dirty
return new_obj
def db_children(self, parent=(None,None), orphan=False, for_action=False):
children = []
if self._db_effect is not None:
children.extend(self._db_effect.db_children((self.vtType, self.db_id), orphan, for_action))
if orphan:
self._db_effect = None
if self._db_role is not None:
children.extend(self._db_role.db_children((self.vtType, self.db_id), orphan, for_action))
if orphan:
self._db_role = None
if self._db_cause is not None:
children.extend(self._db_cause.db_children((self.vtType, self.db_id), orphan, for_action))
if orphan:
self._db_cause = None
to_del = []
for child in self.db_accounts:
children.extend(child.db_children((self.vtType, self.db_id), orphan, for_action))
if orphan:
to_del.append(child)
for child in to_del:
self.db_delete_account(child)
to_del = []
for child in self.db_opm_times:
children.extend(child.db_children((self.vtType, self.db_id), orphan, for_action))
if orphan:
to_del.append(child)
for child in to_del:
self.db_delete_opm_time(child)
children.append((self, parent[0], parent[1]))
return children
def db_deleted_children(self, remove=False):
children = []
children.extend(self.db_deleted_effect)
children.extend(self.db_deleted_role)
children.extend(self.db_deleted_cause)
children.extend(self.db_deleted_accounts)
children.extend(self.db_deleted_opm_times)
if remove:
self.db_deleted_effect = []
self.db_deleted_role = []
self.db_deleted_cause = []
self.db_deleted_accounts = []
self.db_deleted_opm_times = []
return children
def has_changes(self):
if self.is_dirty:
return True
if self._db_effect is not None and self._db_effect.has_changes():
return True
if self._db_role is not None and self._db_role.has_changes():
return True
if self._db_cause is not None and self._db_cause.has_changes():
return True
for child in self._db_accounts:
if child.has_changes():
return True
for child in self._db_opm_times:
if child.has_changes():
return True
return False
def __get_db_effect(self):
return self._db_effect
def __set_db_effect(self, effect):
self._db_effect = effect
self.is_dirty = True
db_effect = property(__get_db_effect, __set_db_effect)
def db_add_effect(self, effect):
self._db_effect = effect
def db_change_effect(self, effect):
self._db_effect = effect
def db_delete_effect(self, effect):
if not self.is_new:
self.db_deleted_effect.append(self._db_effect)
self._db_effect = None
def __get_db_role(self):
return self._db_role
def __set_db_role(self, role):
self._db_role = role
self.is_dirty = True
db_role = property(__get_db_role, __set_db_role)
def db_add_role(self, role):
self._db_role = role
def db_change_role(self, role):
self._db_role = role
def db_delete_role(self, role):
if not self.is_new:
self.db_deleted_role.append(self._db_role)
self._db_role = None
def __get_db_cause(self):
return self._db_cause
def __set_db_cause(self, cause):
self._db_cause = cause
self.is_dirty = True
db_cause = property(__get_db_cause, __set_db_cause)
def db_add_cause(self, cause):
self._db_cause = cause
def db_change_cause(self, cause):
self._db_cause = cause
def db_delete_cause(self, cause):
if not self.is_new:
self.db_deleted_cause.append(self._db_cause)
self._db_cause = None
def __get_db_accounts(self):
return self._db_accounts
def __set_db_accounts(self, accounts):
self._db_accounts = accounts
self.is_dirty = True
db_accounts = property(__get_db_accounts, __set_db_accounts)
def db_get_accounts(self):
return self._db_accounts
def db_add_account(self, account):
self.is_dirty = True
self._db_accounts.append(account)
def db_change_account(self, account):
self.is_dirty = True
self._db_accounts.append(account)
def db_delete_account(self, account):
self.is_dirty = True
raise Exception('Cannot delete a non-keyed object')
def db_get_account(self, key):
return None
def __get_db_opm_times(self):
return self._db_opm_times
def __set_db_opm_times(self, opm_times):
self._db_opm_times = opm_times
self.is_dirty = True
db_opm_times = property(__get_db_opm_times, __set_db_opm_times)
def db_get_opm_times(self):
return self._db_opm_times
def db_add_opm_time(self, opm_time):
self.is_dirty = True
self._db_opm_times.append(opm_time)
def db_change_opm_time(self, opm_time):
self.is_dirty = True
self._db_opm_times.append(opm_time)
def db_delete_opm_time(self, opm_time):
self.is_dirty = True
raise Exception('Cannot delete a non-keyed object')
def db_get_opm_time(self, key):
return None
class DBModuleDescriptor(object):
vtType = 'module_descriptor'
def __init__(self, id=None, name=None, package=None, namespace=None, package_version=None, version=None, base_descriptor_id=None, portSpecs=None):
self._db_id = id
self._db_name = name
self._db_package = package
self._db_namespace = namespace
self._db_package_version = package_version
self._db_version = version
self._db_base_descriptor_id = base_descriptor_id
self.db_deleted_portSpecs = []
self.db_portSpecs_id_index = {}
self.db_portSpecs_name_index = {}
if portSpecs is None:
self._db_portSpecs = []
else:
self._db_portSpecs = portSpecs
for v in self._db_portSpecs:
self.db_portSpecs_id_index[v.db_id] = v
self.db_portSpecs_name_index[(v.db_name,v.db_type)] = v
self.is_dirty = True
self.is_new = True
def __copy__(self):
return DBModuleDescriptor.do_copy(self)
def do_copy(self, new_ids=False, id_scope=None, id_remap=None):
cp = DBModuleDescriptor(id=self._db_id,
name=self._db_name,
package=self._db_package,
namespace=self._db_namespace,
package_version=self._db_package_version,
version=self._db_version,
base_descriptor_id=self._db_base_descriptor_id)
if self._db_portSpecs is None:
cp._db_portSpecs = []
else:
cp._db_portSpecs = [v.do_copy(new_ids, id_scope, id_remap) for v in self._db_portSpecs]
# set new ids
if new_ids:
new_id = id_scope.getNewId(self.vtType)
if self.vtType in id_scope.remap:
id_remap[(id_scope.remap[self.vtType], self.db_id)] = new_id
else:
id_remap[(self.vtType, self.db_id)] = new_id
cp.db_id = new_id
if hasattr(self, 'db_base_descriptor_id') and ('module_descriptor', self._db_base_descriptor_id) in id_remap:
cp._db_base_descriptor_id = id_remap[('module_descriptor', self._db_base_descriptor_id)]
# recreate indices and set flags
cp.db_portSpecs_id_index = dict((v.db_id, v) for v in cp._db_portSpecs)
cp.db_portSpecs_name_index = dict(((v.db_name,v.db_type), v) for v in cp._db_portSpecs)
if not new_ids:
cp.is_dirty = self.is_dirty
cp.is_new = self.is_new
return cp
@staticmethod
def update_version(old_obj, trans_dict, new_obj=None):
if new_obj is None:
new_obj = DBModuleDescriptor()
class_dict = {}
if new_obj.__class__.__name__ in trans_dict:
class_dict = trans_dict[new_obj.__class__.__name__]
if 'id' in class_dict:
res = class_dict['id'](old_obj, trans_dict)
new_obj.db_id = res
elif hasattr(old_obj, 'db_id') and old_obj.db_id is not None:
new_obj.db_id = old_obj.db_id
if 'name' in class_dict:
res = class_dict['name'](old_obj, trans_dict)
new_obj.db_name = res
elif hasattr(old_obj, 'db_name') and old_obj.db_name is not None:
new_obj.db_name = old_obj.db_name
if 'package' in class_dict:
res = class_dict['package'](old_obj, trans_dict)
new_obj.db_package = res
elif hasattr(old_obj, 'db_package') and old_obj.db_package is not None:
new_obj.db_package = old_obj.db_package
if 'namespace' in class_dict:
res = class_dict['namespace'](old_obj, trans_dict)
new_obj.db_namespace = res
elif hasattr(old_obj, 'db_namespace') and old_obj.db_namespace is not None:
new_obj.db_namespace = old_obj.db_namespace
if 'package_version' in class_dict:
res = class_dict['package_version'](old_obj, trans_dict)
new_obj.db_package_version = res
elif hasattr(old_obj, 'db_package_version') and old_obj.db_package_version is not None:
new_obj.db_package_version = old_obj.db_package_version
if 'version' in class_dict:
res = class_dict['version'](old_obj, trans_dict)
new_obj.db_version = res
elif hasattr(old_obj, 'db_version') and old_obj.db_version is not None:
new_obj.db_version = old_obj.db_version
if 'base_descriptor_id' in class_dict:
res = class_dict['base_descriptor_id'](old_obj, trans_dict)
new_obj.db_base_descriptor_id = res
elif hasattr(old_obj, 'db_base_descriptor_id') and old_obj.db_base_descriptor_id is not None:
new_obj.db_base_descriptor_id = old_obj.db_base_descriptor_id
if 'portSpecs' in class_dict:
res = class_dict['portSpecs'](old_obj, trans_dict)
for obj in res:
new_obj.db_add_portSpec(obj)
elif hasattr(old_obj, 'db_portSpecs') and old_obj.db_portSpecs is not None:
for obj in old_obj.db_portSpecs:
new_obj.db_add_portSpec(DBPortSpec.update_version(obj, trans_dict))
if hasattr(old_obj, 'db_deleted_portSpecs') and hasattr(new_obj, 'db_deleted_portSpecs'):
for obj in old_obj.db_deleted_portSpecs:
n_obj = DBPortSpec.update_version(obj, trans_dict)
new_obj.db_deleted_portSpecs.append(n_obj)
new_obj.is_new = old_obj.is_new
new_obj.is_dirty = old_obj.is_dirty
return new_obj
def db_children(self, parent=(None,None), orphan=False, for_action=False):
children = []
to_del = []
for child in self.db_portSpecs:
children.extend(child.db_children((self.vtType, self.db_id), orphan, for_action))
if orphan:
to_del.append(child)
for child in to_del:
self.db_delete_portSpec(child)
children.append((self, parent[0], parent[1]))
return children
def db_deleted_children(self, remove=False):
children = []
children.extend(self.db_deleted_portSpecs)
if remove:
self.db_deleted_portSpecs = []
return children
def has_changes(self):
if self.is_dirty:
return True
for child in self._db_portSpecs:
if child.has_changes():
return True
return False
def __get_db_id(self):
return self._db_id
def __set_db_id(self, id):
self._db_id = id
self.is_dirty = True
db_id = property(__get_db_id, __set_db_id)
def db_add_id(self, id):
self._db_id = id
def db_change_id(self, id):
self._db_id = id
def db_delete_id(self, id):
self._db_id = None
def __get_db_name(self):
return self._db_name
def __set_db_name(self, name):
self._db_name = name
self.is_dirty = True
db_name = property(__get_db_name, __set_db_name)
def db_add_name(self, name):
self._db_name = name
def db_change_name(self, name):
self._db_name = name
def db_delete_name(self, name):
self._db_name = None
def __get_db_package(self):
return self._db_package
def __set_db_package(self, package):
self._db_package = package
self.is_dirty = True
db_package = property(__get_db_package, __set_db_package)
def db_add_package(self, package):
self._db_package = package
def db_change_package(self, package):
self._db_package = package
def db_delete_package(self, package):
self._db_package = None
def __get_db_namespace(self):
return self._db_namespace
def __set_db_namespace(self, namespace):
self._db_namespace = namespace
self.is_dirty = True
db_namespace = property(__get_db_namespace, __set_db_namespace)
def db_add_namespace(self, namespace):
self._db_namespace = namespace
def db_change_namespace(self, namespace):
self._db_namespace = namespace
def db_delete_namespace(self, namespace):
self._db_namespace = None
def __get_db_package_version(self):
return self._db_package_version
def __set_db_package_version(self, package_version):
self._db_package_version = package_version
self.is_dirty = True
db_package_version = property(__get_db_package_version, __set_db_package_version)
def db_add_package_version(self, package_version):
self._db_package_version = package_version
def db_change_package_version(self, package_version):
self._db_package_version = package_version
def db_delete_package_version(self, package_version):
self._db_package_version = None
def __get_db_version(self):
return self._db_version
def __set_db_version(self, version):
self._db_version = version
self.is_dirty = True
db_version = property(__get_db_version, __set_db_version)
def db_add_version(self, version):
self._db_version = version
def db_change_version(self, version):
self._db_version = version
def db_delete_version(self, version):
self._db_version = None
def __get_db_base_descriptor_id(self):
return self._db_base_descriptor_id
def __set_db_base_descriptor_id(self, base_descriptor_id):
self._db_base_descriptor_id = base_descriptor_id
self.is_dirty = True
db_base_descriptor_id = property(__get_db_base_descriptor_id, __set_db_base_descriptor_id)
def db_add_base_descriptor_id(self, base_descriptor_id):
self._db_base_descriptor_id = base_descriptor_id
def db_change_base_descriptor_id(self, base_descriptor_id):
self._db_base_descriptor_id = base_descriptor_id
def db_delete_base_descriptor_id(self, base_descriptor_id):
self._db_base_descriptor_id = None
def __get_db_portSpecs(self):
return self._db_portSpecs
def __set_db_portSpecs(self, portSpecs):
self._db_portSpecs = portSpecs
self.is_dirty = True
db_portSpecs = property(__get_db_portSpecs, __set_db_portSpecs)
def db_get_portSpecs(self):
return self._db_portSpecs
def db_add_portSpec(self, portSpec):
self.is_dirty = True
self._db_portSpecs.append(portSpec)
self.db_portSpecs_id_index[portSpec.db_id] = portSpec
self.db_portSpecs_name_index[(portSpec.db_name,portSpec.db_type)] = portSpec
def db_change_portSpec(self, portSpec):
self.is_dirty = True
found = False
for i in xrange(len(self._db_portSpecs)):
if self._db_portSpecs[i].db_id == portSpec.db_id:
self._db_portSpecs[i] = portSpec
found = True
break
if not found:
self._db_portSpecs.append(portSpec)
self.db_portSpecs_id_index[portSpec.db_id] = portSpec
self.db_portSpecs_name_index[(portSpec.db_name,portSpec.db_type)] = portSpec
def db_delete_portSpec(self, portSpec):
self.is_dirty = True
for i in xrange(len(self._db_portSpecs)):
if self._db_portSpecs[i].db_id == portSpec.db_id:
if not self._db_portSpecs[i].is_new:
self.db_deleted_portSpecs.append(self._db_portSpecs[i])
del self._db_portSpecs[i]
break
del self.db_portSpecs_id_index[portSpec.db_id]
del self.db_portSpecs_name_index[(portSpec.db_name,portSpec.db_type)]
def db_get_portSpec(self, key):
for i in xrange(len(self._db_portSpecs)):
if self._db_portSpecs[i].db_id == key:
return self._db_portSpecs[i]
return None
def db_get_portSpec_by_id(self, key):
return self.db_portSpecs_id_index[key]
def db_has_portSpec_with_id(self, key):
return key in self.db_portSpecs_id_index
def db_get_portSpec_by_name(self, key):
return self.db_portSpecs_name_index[key]
def db_has_portSpec_with_name(self, key):
return key in self.db_portSpecs_name_index
def getPrimaryKey(self):
return self._db_id
class DBTag(object):
vtType = 'tag'
def __init__(self, id=None, name=None):
self._db_id = id
self._db_name = name
self.is_dirty = True
self.is_new = True
def __copy__(self):
return DBTag.do_copy(self)
def do_copy(self, new_ids=False, id_scope=None, id_remap=None):
cp = DBTag(id=self._db_id,
name=self._db_name)
# set new ids
if new_ids:
new_id = id_scope.getNewId(self.vtType)
if self.vtType in id_scope.remap:
id_remap[(id_scope.remap[self.vtType], self.db_id)] = new_id
else:
id_remap[(self.vtType, self.db_id)] = new_id
cp.db_id = new_id
if hasattr(self, 'db_id') and ('action', self._db_id) in id_remap:
cp._db_id = id_remap[('action', self._db_id)]
# recreate indices and set flags
if not new_ids:
cp.is_dirty = self.is_dirty
cp.is_new = self.is_new
return cp
@staticmethod
def update_version(old_obj, trans_dict, new_obj=None):
if new_obj is None:
new_obj = DBTag()
class_dict = {}
if new_obj.__class__.__name__ in trans_dict:
class_dict = trans_dict[new_obj.__class__.__name__]
if 'id' in class_dict:
res = class_dict['id'](old_obj, trans_dict)
new_obj.db_id = res
elif hasattr(old_obj, 'db_id') and old_obj.db_id is not None:
new_obj.db_id = old_obj.db_id
if 'name' in class_dict:
res = class_dict['name'](old_obj, trans_dict)
new_obj.db_name = res
elif hasattr(old_obj, 'db_name') and old_obj.db_name is not None:
new_obj.db_name = old_obj.db_name
new_obj.is_new = old_obj.is_new
new_obj.is_dirty = old_obj.is_dirty
return new_obj
def db_children(self, parent=(None,None), orphan=False, for_action=False):
return [(self, parent[0], parent[1])]
def db_deleted_children(self, remove=False):
children = []
return children
def has_changes(self):
if self.is_dirty:
return True
return False
def __get_db_id(self):
return self._db_id
def __set_db_id(self, id):
self._db_id = id
self.is_dirty = True
db_id = property(__get_db_id, __set_db_id)
def db_add_id(self, id):
self._db_id = id
def db_change_id(self, id):
self._db_id = id
def db_delete_id(self, id):
self._db_id = None
def __get_db_name(self):
return self._db_name
def __set_db_name(self, name):
self._db_name = name
self.is_dirty = True
db_name = property(__get_db_name, __set_db_name)
def db_add_name(self, name):
self._db_name = name
def db_change_name(self, name):
self._db_name = name
def db_delete_name(self, name):
self._db_name = None
def getPrimaryKey(self):
return self._db_id
class DBOpmRole(object):
vtType = 'opm_role'
def __init__(self, value=None):
self._db_value = value
self.is_dirty = True
self.is_new = True
def __copy__(self):
return DBOpmRole.do_copy(self)
def do_copy(self, new_ids=False, id_scope=None, id_remap=None):
cp = DBOpmRole(value=self._db_value)
# set new ids
if new_ids:
new_id = id_scope.getNewId(self.vtType)
if self.vtType in id_scope.remap:
id_remap[(id_scope.remap[self.vtType], self.db_id)] = new_id
else:
id_remap[(self.vtType, self.db_id)] = new_id
cp.db_id = new_id
# recreate indices and set flags
if not new_ids:
cp.is_dirty = self.is_dirty
cp.is_new = self.is_new
return cp
@staticmethod
def update_version(old_obj, trans_dict, new_obj=None):
if new_obj is None:
new_obj = DBOpmRole()
class_dict = {}
if new_obj.__class__.__name__ in trans_dict:
class_dict = trans_dict[new_obj.__class__.__name__]
if 'value' in class_dict:
res = class_dict['value'](old_obj, trans_dict)
new_obj.db_value = res
elif hasattr(old_obj, 'db_value') and old_obj.db_value is not None:
new_obj.db_value = old_obj.db_value
new_obj.is_new = old_obj.is_new
new_obj.is_dirty = old_obj.is_dirty
return new_obj
def db_children(self, parent=(None,None), orphan=False, for_action=False):
return [(self, parent[0], parent[1])]
def db_deleted_children(self, remove=False):
children = []
return children
def has_changes(self):
if self.is_dirty:
return True
return False
def __get_db_value(self):
return self._db_value
def __set_db_value(self, value):
self._db_value = value
self.is_dirty = True
db_value = property(__get_db_value, __set_db_value)
def db_add_value(self, value):
self._db_value = value
def db_change_value(self, value):
self._db_value = value
def db_delete_value(self, value):
self._db_value = None
class DBProvDocument(object):
vtType = 'prov_document'
def __init__(self, prov_entitys=None, prov_activitys=None, prov_agents=None, vt_connections=None, prov_usages=None, prov_generations=None, prov_associations=None):
self.db_deleted_prov_entitys = []
self.db_prov_entitys_id_index = {}
if prov_entitys is None:
self._db_prov_entitys = []
else:
self._db_prov_entitys = prov_entitys
for v in self._db_prov_entitys:
self.db_prov_entitys_id_index[v.db_id] = v
self.db_deleted_prov_activitys = []
self.db_prov_activitys_id_index = {}
if prov_activitys is None:
self._db_prov_activitys = []
else:
self._db_prov_activitys = prov_activitys
for v in self._db_prov_activitys:
self.db_prov_activitys_id_index[v.db_id] = v
self.db_deleted_prov_agents = []
self.db_prov_agents_id_index = {}
if prov_agents is None:
self._db_prov_agents = []
else:
self._db_prov_agents = prov_agents
for v in self._db_prov_agents:
self.db_prov_agents_id_index[v.db_id] = v
self.db_deleted_vt_connections = []
self.db_vt_connections_id_index = {}
if vt_connections is None:
self._db_vt_connections = []
else:
self._db_vt_connections = vt_connections
for v in self._db_vt_connections:
self.db_vt_connections_id_index[v.db_id] = v
self.db_deleted_prov_usages = []
if prov_usages is None:
self._db_prov_usages = []
else:
self._db_prov_usages = prov_usages
self.db_deleted_prov_generations = []
if prov_generations is None:
self._db_prov_generations = []
else:
self._db_prov_generations = prov_generations
self.db_deleted_prov_associations = []
if prov_associations is None:
self._db_prov_associations = []
else:
self._db_prov_associations = prov_associations
self.is_dirty = True
self.is_new = True
def __copy__(self):
return DBProvDocument.do_copy(self)
def do_copy(self, new_ids=False, id_scope=None, id_remap=None):
cp = DBProvDocument()
if self._db_prov_entitys is None:
cp._db_prov_entitys = []
else:
cp._db_prov_entitys = [v.do_copy(new_ids, id_scope, id_remap) for v in self._db_prov_entitys]
if self._db_prov_activitys is None:
cp._db_prov_activitys = []
else:
cp._db_prov_activitys = [v.do_copy(new_ids, id_scope, id_remap) for v in self._db_prov_activitys]
if self._db_prov_agents is None:
cp._db_prov_agents = []
else:
cp._db_prov_agents = [v.do_copy(new_ids, id_scope, id_remap) for v in self._db_prov_agents]
if self._db_vt_connections is None:
cp._db_vt_connections = []
else:
cp._db_vt_connections = [v.do_copy(new_ids, id_scope, id_remap) for v in self._db_vt_connections]
if self._db_prov_usages is None:
cp._db_prov_usages = []
else:
cp._db_prov_usages = [v.do_copy(new_ids, id_scope, id_remap) for v in self._db_prov_usages]
if self._db_prov_generations is None:
cp._db_prov_generations = []
else:
cp._db_prov_generations = [v.do_copy(new_ids, id_scope, id_remap) for v in self._db_prov_generations]
if self._db_prov_associations is None:
cp._db_prov_associations = []
else:
cp._db_prov_associations = [v.do_copy(new_ids, id_scope, id_remap) for v in self._db_prov_associations]
# set new ids
if new_ids:
new_id = id_scope.getNewId(self.vtType)
if self.vtType in id_scope.remap:
id_remap[(id_scope.remap[self.vtType], self.db_id)] = new_id
else:
id_remap[(self.vtType, self.db_id)] = new_id
cp.db_id = new_id
# recreate indices and set flags
cp.db_prov_entitys_id_index = dict((v.db_id, v) for v in cp._db_prov_entitys)
cp.db_prov_activitys_id_index = dict((v.db_id, v) for v in cp._db_prov_activitys)
cp.db_prov_agents_id_index = dict((v.db_id, v) for v in cp._db_prov_agents)
cp.db_vt_connections_id_index = dict((v.db_id, v) for v in cp._db_vt_connections)
if not new_ids:
cp.is_dirty = self.is_dirty
cp.is_new = self.is_new
return cp
@staticmethod
def update_version(old_obj, trans_dict, new_obj=None):
if new_obj is None:
new_obj = DBProvDocument()
class_dict = {}
if new_obj.__class__.__name__ in trans_dict:
class_dict = trans_dict[new_obj.__class__.__name__]
if 'prov_entitys' in class_dict:
res = class_dict['prov_entitys'](old_obj, trans_dict)
for obj in res:
new_obj.db_add_prov_entity(obj)
elif hasattr(old_obj, 'db_prov_entitys') and old_obj.db_prov_entitys is not None:
for obj in old_obj.db_prov_entitys:
new_obj.db_add_prov_entity(DBProvEntity.update_version(obj, trans_dict))
if hasattr(old_obj, 'db_deleted_prov_entitys') and hasattr(new_obj, 'db_deleted_prov_entitys'):
for obj in old_obj.db_deleted_prov_entitys:
n_obj = DBProvEntity.update_version(obj, trans_dict)
new_obj.db_deleted_prov_entitys.append(n_obj)
if 'prov_activitys' in class_dict:
res = class_dict['prov_activitys'](old_obj, trans_dict)
for obj in res:
new_obj.db_add_prov_activity(obj)
elif hasattr(old_obj, 'db_prov_activitys') and old_obj.db_prov_activitys is not None:
for obj in old_obj.db_prov_activitys:
new_obj.db_add_prov_activity(DBProvActivity.update_version(obj, trans_dict))
if hasattr(old_obj, 'db_deleted_prov_activitys') and hasattr(new_obj, 'db_deleted_prov_activitys'):
for obj in old_obj.db_deleted_prov_activitys:
n_obj = DBProvActivity.update_version(obj, trans_dict)
new_obj.db_deleted_prov_activitys.append(n_obj)
if 'prov_agents' in class_dict:
res = class_dict['prov_agents'](old_obj, trans_dict)
for obj in res:
new_obj.db_add_prov_agent(obj)
elif hasattr(old_obj, 'db_prov_agents') and old_obj.db_prov_agents is not None:
for obj in old_obj.db_prov_agents:
new_obj.db_add_prov_agent(DBProvAgent.update_version(obj, trans_dict))
if hasattr(old_obj, 'db_deleted_prov_agents') and hasattr(new_obj, 'db_deleted_prov_agents'):
for obj in old_obj.db_deleted_prov_agents:
n_obj = DBProvAgent.update_version(obj, trans_dict)
new_obj.db_deleted_prov_agents.append(n_obj)
if 'vt_connections' in class_dict:
res = class_dict['vt_connections'](old_obj, trans_dict)
for obj in res:
new_obj.db_add_vt_connection(obj)
elif hasattr(old_obj, 'db_vt_connections') and old_obj.db_vt_connections is not None:
for obj in old_obj.db_vt_connections:
new_obj.db_add_vt_connection(DBVtConnection.update_version(obj, trans_dict))
if hasattr(old_obj, 'db_deleted_vt_connections') and hasattr(new_obj, 'db_deleted_vt_connections'):
for obj in old_obj.db_deleted_vt_connections:
n_obj = DBVtConnection.update_version(obj, trans_dict)
new_obj.db_deleted_vt_connections.append(n_obj)
if 'prov_usages' in class_dict:
res = class_dict['prov_usages'](old_obj, trans_dict)
for obj in res:
new_obj.db_add_prov_usage(obj)
elif hasattr(old_obj, 'db_prov_usages') and old_obj.db_prov_usages is not None:
for obj in old_obj.db_prov_usages:
new_obj.db_add_prov_usage(DBProvUsage.update_version(obj, trans_dict))
if hasattr(old_obj, 'db_deleted_prov_usages') and hasattr(new_obj, 'db_deleted_prov_usages'):
for obj in old_obj.db_deleted_prov_usages:
n_obj = DBProvUsage.update_version(obj, trans_dict)
new_obj.db_deleted_prov_usages.append(n_obj)
if 'prov_generations' in class_dict:
res = class_dict['prov_generations'](old_obj, trans_dict)
for obj in res:
new_obj.db_add_prov_generation(obj)
elif hasattr(old_obj, 'db_prov_generations') and old_obj.db_prov_generations is not None:
for obj in old_obj.db_prov_generations:
new_obj.db_add_prov_generation(DBProvGeneration.update_version(obj, trans_dict))
if hasattr(old_obj, 'db_deleted_prov_generations') and hasattr(new_obj, 'db_deleted_prov_generations'):
for obj in old_obj.db_deleted_prov_generations:
n_obj = DBProvGeneration.update_version(obj, trans_dict)
new_obj.db_deleted_prov_generations.append(n_obj)
if 'prov_associations' in class_dict:
res = class_dict['prov_associations'](old_obj, trans_dict)
for obj in res:
new_obj.db_add_prov_association(obj)
elif hasattr(old_obj, 'db_prov_associations') and old_obj.db_prov_associations is not None:
for obj in old_obj.db_prov_associations:
new_obj.db_add_prov_association(DBProvAssociation.update_version(obj, trans_dict))
if hasattr(old_obj, 'db_deleted_prov_associations') and hasattr(new_obj, 'db_deleted_prov_associations'):
for obj in old_obj.db_deleted_prov_associations:
n_obj = DBProvAssociation.update_version(obj, trans_dict)
new_obj.db_deleted_prov_associations.append(n_obj)
new_obj.is_new = old_obj.is_new
new_obj.is_dirty = old_obj.is_dirty
return new_obj
def db_children(self, parent=(None,None), orphan=False, for_action=False):
children = []
to_del = []
for child in self.db_prov_entitys:
children.extend(child.db_children((self.vtType, self.db_id), orphan, for_action))
if orphan:
to_del.append(child)
for child in to_del:
self.db_delete_prov_entity(child)
to_del = []
for child in self.db_prov_activitys:
children.extend(child.db_children((self.vtType, self.db_id), orphan, for_action))
if orphan:
to_del.append(child)
for child in to_del:
self.db_delete_prov_activity(child)
to_del = []
for child in self.db_prov_agents:
children.extend(child.db_children((self.vtType, self.db_id), orphan, for_action))
if orphan:
to_del.append(child)
for child in to_del:
self.db_delete_prov_agent(child)
to_del = []
for child in self.db_vt_connections:
children.extend(child.db_children((self.vtType, self.db_id), orphan, for_action))
if orphan:
to_del.append(child)
for child in to_del:
self.db_delete_vt_connection(child)
to_del = []
for child in self.db_prov_usages:
children.extend(child.db_children((self.vtType, self.db_id), orphan, for_action))
if orphan:
to_del.append(child)
for child in to_del:
self.db_delete_prov_usage(child)
to_del = []
for child in self.db_prov_generations:
children.extend(child.db_children((self.vtType, self.db_id), orphan, for_action))
if orphan:
to_del.append(child)
for child in to_del:
self.db_delete_prov_generation(child)
to_del = []
for child in self.db_prov_associations:
children.extend(child.db_children((self.vtType, self.db_id), orphan, for_action))
if orphan:
to_del.append(child)
for child in to_del:
self.db_delete_prov_association(child)
children.append((self, parent[0], parent[1]))
return children
def db_deleted_children(self, remove=False):
children = []
children.extend(self.db_deleted_prov_entitys)
children.extend(self.db_deleted_prov_activitys)
children.extend(self.db_deleted_prov_agents)
children.extend(self.db_deleted_vt_connections)
children.extend(self.db_deleted_prov_usages)
children.extend(self.db_deleted_prov_generations)
children.extend(self.db_deleted_prov_associations)
if remove:
self.db_deleted_prov_entitys = []
self.db_deleted_prov_activitys = []
self.db_deleted_prov_agents = []
self.db_deleted_vt_connections = []
self.db_deleted_prov_usages = []
self.db_deleted_prov_generations = []
self.db_deleted_prov_associations = []
return children
def has_changes(self):
if self.is_dirty:
return True
for child in self._db_prov_entitys:
if child.has_changes():
return True
for child in self._db_prov_activitys:
if child.has_changes():
return True
for child in self._db_prov_agents:
if child.has_changes():
return True
for child in self._db_vt_connections:
if child.has_changes():
return True
for child in self._db_prov_usages:
if child.has_changes():
return True
for child in self._db_prov_generations:
if child.has_changes():
return True
for child in self._db_prov_associations:
if child.has_changes():
return True
return False
def __get_db_prov_entitys(self):
return self._db_prov_entitys
def __set_db_prov_entitys(self, prov_entitys):
self._db_prov_entitys = prov_entitys
self.is_dirty = True
db_prov_entitys = property(__get_db_prov_entitys, __set_db_prov_entitys)
def db_get_prov_entitys(self):
return self._db_prov_entitys
def db_add_prov_entity(self, prov_entity):
self.is_dirty = True
self._db_prov_entitys.append(prov_entity)
self.db_prov_entitys_id_index[prov_entity.db_id] = prov_entity
def db_change_prov_entity(self, prov_entity):
self.is_dirty = True
found = False
for i in xrange(len(self._db_prov_entitys)):
if self._db_prov_entitys[i].db_id == prov_entity.db_id:
self._db_prov_entitys[i] = prov_entity
found = True
break
if not found:
self._db_prov_entitys.append(prov_entity)
self.db_prov_entitys_id_index[prov_entity.db_id] = prov_entity
def db_delete_prov_entity(self, prov_entity):
self.is_dirty = True
for i in xrange(len(self._db_prov_entitys)):
if self._db_prov_entitys[i].db_id == prov_entity.db_id:
if not self._db_prov_entitys[i].is_new:
self.db_deleted_prov_entitys.append(self._db_prov_entitys[i])
del self._db_prov_entitys[i]
break
del self.db_prov_entitys_id_index[prov_entity.db_id]
def db_get_prov_entity(self, key):
for i in xrange(len(self._db_prov_entitys)):
if self._db_prov_entitys[i].db_id == key:
return self._db_prov_entitys[i]
return None
def db_get_prov_entity_by_id(self, key):
return self.db_prov_entitys_id_index[key]
def db_has_prov_entity_with_id(self, key):
return key in self.db_prov_entitys_id_index
def __get_db_prov_activitys(self):
return self._db_prov_activitys
def __set_db_prov_activitys(self, prov_activitys):
self._db_prov_activitys = prov_activitys
self.is_dirty = True
db_prov_activitys = property(__get_db_prov_activitys, __set_db_prov_activitys)
def db_get_prov_activitys(self):
return self._db_prov_activitys
def db_add_prov_activity(self, prov_activity):
self.is_dirty = True
self._db_prov_activitys.append(prov_activity)
self.db_prov_activitys_id_index[prov_activity.db_id] = prov_activity
def db_change_prov_activity(self, prov_activity):
self.is_dirty = True
found = False
for i in xrange(len(self._db_prov_activitys)):
if self._db_prov_activitys[i].db_id == prov_activity.db_id:
self._db_prov_activitys[i] = prov_activity
found = True
break
if not found:
self._db_prov_activitys.append(prov_activity)
self.db_prov_activitys_id_index[prov_activity.db_id] = prov_activity
def db_delete_prov_activity(self, prov_activity):
self.is_dirty = True
for i in xrange(len(self._db_prov_activitys)):
if self._db_prov_activitys[i].db_id == prov_activity.db_id:
if not self._db_prov_activitys[i].is_new:
self.db_deleted_prov_activitys.append(self._db_prov_activitys[i])
del self._db_prov_activitys[i]
break
del self.db_prov_activitys_id_index[prov_activity.db_id]
def db_get_prov_activity(self, key):
for i in xrange(len(self._db_prov_activitys)):
if self._db_prov_activitys[i].db_id == key:
return self._db_prov_activitys[i]
return None
def db_get_prov_activity_by_id(self, key):
return self.db_prov_activitys_id_index[key]
def db_has_prov_activity_with_id(self, key):
return key in self.db_prov_activitys_id_index
def __get_db_prov_agents(self):
return self._db_prov_agents
def __set_db_prov_agents(self, prov_agents):
self._db_prov_agents = prov_agents
self.is_dirty = True
db_prov_agents = property(__get_db_prov_agents, __set_db_prov_agents)
def db_get_prov_agents(self):
return self._db_prov_agents
def db_add_prov_agent(self, prov_agent):
self.is_dirty = True
self._db_prov_agents.append(prov_agent)
self.db_prov_agents_id_index[prov_agent.db_id] = prov_agent
def db_change_prov_agent(self, prov_agent):
self.is_dirty = True
found = False
for i in xrange(len(self._db_prov_agents)):
if self._db_prov_agents[i].db_id == prov_agent.db_id:
self._db_prov_agents[i] = prov_agent
found = True
break
if not found:
self._db_prov_agents.append(prov_agent)
self.db_prov_agents_id_index[prov_agent.db_id] = prov_agent
def db_delete_prov_agent(self, prov_agent):
self.is_dirty = True
for i in xrange(len(self._db_prov_agents)):
if self._db_prov_agents[i].db_id == prov_agent.db_id:
if not self._db_prov_agents[i].is_new:
self.db_deleted_prov_agents.append(self._db_prov_agents[i])
del self._db_prov_agents[i]
break
del self.db_prov_agents_id_index[prov_agent.db_id]
def db_get_prov_agent(self, key):
for i in xrange(len(self._db_prov_agents)):
if self._db_prov_agents[i].db_id == key:
return self._db_prov_agents[i]
return None
def db_get_prov_agent_by_id(self, key):
return self.db_prov_agents_id_index[key]
def db_has_prov_agent_with_id(self, key):
return key in self.db_prov_agents_id_index
def __get_db_vt_connections(self):
return self._db_vt_connections
def __set_db_vt_connections(self, vt_connections):
self._db_vt_connections = vt_connections
self.is_dirty = True
db_vt_connections = property(__get_db_vt_connections, __set_db_vt_connections)
def db_get_vt_connections(self):
return self._db_vt_connections
def db_add_vt_connection(self, vt_connection):
self.is_dirty = True
self._db_vt_connections.append(vt_connection)
self.db_vt_connections_id_index[vt_connection.db_id] = vt_connection
def db_change_vt_connection(self, vt_connection):
self.is_dirty = True
found = False
for i in xrange(len(self._db_vt_connections)):
if self._db_vt_connections[i].db_id == vt_connection.db_id:
self._db_vt_connections[i] = vt_connection
found = True
break
if not found:
self._db_vt_connections.append(vt_connection)
self.db_vt_connections_id_index[vt_connection.db_id] = vt_connection
def db_delete_vt_connection(self, vt_connection):
self.is_dirty = True
for i in xrange(len(self._db_vt_connections)):
if self._db_vt_connections[i].db_id == vt_connection.db_id:
if not self._db_vt_connections[i].is_new:
self.db_deleted_vt_connections.append(self._db_vt_connections[i])
del self._db_vt_connections[i]
break
del self.db_vt_connections_id_index[vt_connection.db_id]
def db_get_vt_connection(self, key):
for i in xrange(len(self._db_vt_connections)):
if self._db_vt_connections[i].db_id == key:
return self._db_vt_connections[i]
return None
def db_get_vt_connection_by_id(self, key):
return self.db_vt_connections_id_index[key]
def db_has_vt_connection_with_id(self, key):
return key in self.db_vt_connections_id_index
def __get_db_prov_usages(self):
return self._db_prov_usages
def __set_db_prov_usages(self, prov_usages):
self._db_prov_usages = prov_usages
self.is_dirty = True
db_prov_usages = property(__get_db_prov_usages, __set_db_prov_usages)
def db_get_prov_usages(self):
return self._db_prov_usages
def db_add_prov_usage(self, prov_usage):
self.is_dirty = True
self._db_prov_usages.append(prov_usage)
def db_change_prov_usage(self, prov_usage):
self.is_dirty = True
self._db_prov_usages.append(prov_usage)
def db_delete_prov_usage(self, prov_usage):
self.is_dirty = True
raise Exception('Cannot delete a non-keyed object')
def db_get_prov_usage(self, key):
return None
def __get_db_prov_generations(self):
return self._db_prov_generations
def __set_db_prov_generations(self, prov_generations):
self._db_prov_generations = prov_generations
self.is_dirty = True
db_prov_generations = property(__get_db_prov_generations, __set_db_prov_generations)
def db_get_prov_generations(self):
return self._db_prov_generations
def db_add_prov_generation(self, prov_generation):
self.is_dirty = True
self._db_prov_generations.append(prov_generation)
def db_change_prov_generation(self, prov_generation):
self.is_dirty = True
self._db_prov_generations.append(prov_generation)
def db_delete_prov_generation(self, prov_generation):
self.is_dirty = True
raise Exception('Cannot delete a non-keyed object')
def db_get_prov_generation(self, key):
return None
def __get_db_prov_associations(self):
return self._db_prov_associations
def __set_db_prov_associations(self, prov_associations):
self._db_prov_associations = prov_associations
self.is_dirty = True
db_prov_associations = property(__get_db_prov_associations, __set_db_prov_associations)
def db_get_prov_associations(self):
return self._db_prov_associations
def db_add_prov_association(self, prov_association):
self.is_dirty = True
self._db_prov_associations.append(prov_association)
def db_change_prov_association(self, prov_association):
self.is_dirty = True
self._db_prov_associations.append(prov_association)
def db_delete_prov_association(self, prov_association):
self.is_dirty = True
raise Exception('Cannot delete a non-keyed object')
def db_get_prov_association(self, key):
return None
class DBOpmProcesses(object):
vtType = 'opm_processes'
def __init__(self, processs=None):
self.db_deleted_processs = []
self.db_processs_id_index = {}
if processs is None:
self._db_processs = []
else:
self._db_processs = processs
for v in self._db_processs:
self.db_processs_id_index[v.db_id] = v
self.is_dirty = True
self.is_new = True
def __copy__(self):
return DBOpmProcesses.do_copy(self)
def do_copy(self, new_ids=False, id_scope=None, id_remap=None):
cp = DBOpmProcesses()
if self._db_processs is None:
cp._db_processs = []
else:
cp._db_processs = [v.do_copy(new_ids, id_scope, id_remap) for v in self._db_processs]
# set new ids
if new_ids:
new_id = id_scope.getNewId(self.vtType)
if self.vtType in id_scope.remap:
id_remap[(id_scope.remap[self.vtType], self.db_id)] = new_id
else:
id_remap[(self.vtType, self.db_id)] = new_id
cp.db_id = new_id
# recreate indices and set flags
cp.db_processs_id_index = dict((v.db_id, v) for v in cp._db_processs)
if not new_ids:
cp.is_dirty = self.is_dirty
cp.is_new = self.is_new
return cp
@staticmethod
def update_version(old_obj, trans_dict, new_obj=None):
if new_obj is None:
new_obj = DBOpmProcesses()
class_dict = {}
if new_obj.__class__.__name__ in trans_dict:
class_dict = trans_dict[new_obj.__class__.__name__]
if 'processs' in class_dict:
res = class_dict['processs'](old_obj, trans_dict)
for obj in res:
new_obj.db_add_process(obj)
elif hasattr(old_obj, 'db_processs') and old_obj.db_processs is not None:
for obj in old_obj.db_processs:
new_obj.db_add_process(DBOpmProcess.update_version(obj, trans_dict))
if hasattr(old_obj, 'db_deleted_processs') and hasattr(new_obj, 'db_deleted_processs'):
for obj in old_obj.db_deleted_processs:
n_obj = DBOpmProcess.update_version(obj, trans_dict)
new_obj.db_deleted_processs.append(n_obj)
new_obj.is_new = old_obj.is_new
new_obj.is_dirty = old_obj.is_dirty
return new_obj
def db_children(self, parent=(None,None), orphan=False, for_action=False):
children = []
to_del = []
for child in self.db_processs:
children.extend(child.db_children((self.vtType, self.db_id), orphan, for_action))
if orphan:
to_del.append(child)
for child in to_del:
self.db_delete_process(child)
children.append((self, parent[0], parent[1]))
return children
def db_deleted_children(self, remove=False):
children = []
children.extend(self.db_deleted_processs)
if remove:
self.db_deleted_processs = []
return children
def has_changes(self):
if self.is_dirty:
return True
for child in self._db_processs:
if child.has_changes():
return True
return False
def __get_db_processs(self):
return self._db_processs
def __set_db_processs(self, processs):
self._db_processs = processs
self.is_dirty = True
db_processs = property(__get_db_processs, __set_db_processs)
def db_get_processs(self):
return self._db_processs
def db_add_process(self, process):
self.is_dirty = True
self._db_processs.append(process)
self.db_processs_id_index[process.db_id] = process
def db_change_process(self, process):
self.is_dirty = True
found = False
for i in xrange(len(self._db_processs)):
if self._db_processs[i].db_id == process.db_id:
self._db_processs[i] = process
found = True
break
if not found:
self._db_processs.append(process)
self.db_processs_id_index[process.db_id] = process
def db_delete_process(self, process):
self.is_dirty = True
for i in xrange(len(self._db_processs)):
if self._db_processs[i].db_id == process.db_id:
if not self._db_processs[i].is_new:
self.db_deleted_processs.append(self._db_processs[i])
del self._db_processs[i]
break
del self.db_processs_id_index[process.db_id]
def db_get_process(self, key):
for i in xrange(len(self._db_processs)):
if self._db_processs[i].db_id == key:
return self._db_processs[i]
return None
def db_get_process_by_id(self, key):
return self.db_processs_id_index[key]
def db_has_process_with_id(self, key):
return key in self.db_processs_id_index
class DBOpmAccountId(object):
vtType = 'opm_account_id'
def __init__(self, id=None):
self._db_id = id
self.is_dirty = True
self.is_new = True
def __copy__(self):
return DBOpmAccountId.do_copy(self)
def do_copy(self, new_ids=False, id_scope=None, id_remap=None):
cp = DBOpmAccountId(id=self._db_id)
# set new ids
if new_ids:
new_id = id_scope.getNewId(self.vtType)
if self.vtType in id_scope.remap:
id_remap[(id_scope.remap[self.vtType], self.db_id)] = new_id
else:
id_remap[(self.vtType, self.db_id)] = new_id
cp.db_id = new_id
if hasattr(self, 'db_id') and ('opm_account', self._db_id) in id_remap:
cp._db_id = id_remap[('opm_account', self._db_id)]
# recreate indices and set flags
if not new_ids:
cp.is_dirty = self.is_dirty
cp.is_new = self.is_new
return cp
@staticmethod
def update_version(old_obj, trans_dict, new_obj=None):
if new_obj is None:
new_obj = DBOpmAccountId()
class_dict = {}
if new_obj.__class__.__name__ in trans_dict:
class_dict = trans_dict[new_obj.__class__.__name__]
if 'id' in class_dict:
res = class_dict['id'](old_obj, trans_dict)
new_obj.db_id = res
elif hasattr(old_obj, 'db_id') and old_obj.db_id is not None:
new_obj.db_id = old_obj.db_id
new_obj.is_new = old_obj.is_new
new_obj.is_dirty = old_obj.is_dirty
return new_obj
def db_children(self, parent=(None,None), orphan=False, for_action=False):
return [(self, parent[0], parent[1])]
def db_deleted_children(self, remove=False):
children = []
return children
def has_changes(self):
if self.is_dirty:
return True
return False
def __get_db_id(self):
return self._db_id
def __set_db_id(self, id):
self._db_id = id
self.is_dirty = True
db_id = property(__get_db_id, __set_db_id)
def db_add_id(self, id):
self._db_id = id
def db_change_id(self, id):
self._db_id = id
def db_delete_id(self, id):
self._db_id = None
class DBPortSpecItem(object):
vtType = 'portSpecItem'
def __init__(self, id=None, pos=None, module=None, package=None, namespace=None, label=None, default=None, values=None, entry_type=None):
self._db_id = id
self._db_pos = pos
self._db_module = module
self._db_package = package
self._db_namespace = namespace
self._db_label = label
self._db_default = default
self._db_values = values
self._db_entry_type = entry_type
self.is_dirty = True
self.is_new = True
def __copy__(self):
return DBPortSpecItem.do_copy(self)
def do_copy(self, new_ids=False, id_scope=None, id_remap=None):
cp = DBPortSpecItem(id=self._db_id,
pos=self._db_pos,
module=self._db_module,
package=self._db_package,
namespace=self._db_namespace,
label=self._db_label,
default=self._db_default,
values=self._db_values,
entry_type=self._db_entry_type)
# set new ids
if new_ids:
new_id = id_scope.getNewId(self.vtType)
if self.vtType in id_scope.remap:
id_remap[(id_scope.remap[self.vtType], self.db_id)] = new_id
else:
id_remap[(self.vtType, self.db_id)] = new_id
cp.db_id = new_id
# recreate indices and set flags
if not new_ids:
cp.is_dirty = self.is_dirty
cp.is_new = self.is_new
return cp
@staticmethod
def update_version(old_obj, trans_dict, new_obj=None):
if new_obj is None:
new_obj = DBPortSpecItem()
class_dict = {}
if new_obj.__class__.__name__ in trans_dict:
class_dict = trans_dict[new_obj.__class__.__name__]
if 'id' in class_dict:
res = class_dict['id'](old_obj, trans_dict)
new_obj.db_id = res
elif hasattr(old_obj, 'db_id') and old_obj.db_id is not None:
new_obj.db_id = old_obj.db_id
if 'pos' in class_dict:
res = class_dict['pos'](old_obj, trans_dict)
new_obj.db_pos = res
elif hasattr(old_obj, 'db_pos') and old_obj.db_pos is not None:
new_obj.db_pos = old_obj.db_pos
if 'module' in class_dict:
res = class_dict['module'](old_obj, trans_dict)
new_obj.db_module = res
elif hasattr(old_obj, 'db_module') and old_obj.db_module is not None:
new_obj.db_module = old_obj.db_module
if 'package' in class_dict:
res = class_dict['package'](old_obj, trans_dict)
new_obj.db_package = res
elif hasattr(old_obj, 'db_package') and old_obj.db_package is not None:
new_obj.db_package = old_obj.db_package
if 'namespace' in class_dict:
res = class_dict['namespace'](old_obj, trans_dict)
new_obj.db_namespace = res
elif hasattr(old_obj, 'db_namespace') and old_obj.db_namespace is not None:
new_obj.db_namespace = old_obj.db_namespace
if 'label' in class_dict:
res = class_dict['label'](old_obj, trans_dict)
new_obj.db_label = res
elif hasattr(old_obj, 'db_label') and old_obj.db_label is not None:
new_obj.db_label = old_obj.db_label
if 'default' in class_dict:
res = class_dict['default'](old_obj, trans_dict)
new_obj.db_default = res
elif hasattr(old_obj, 'db_default') and old_obj.db_default is not None:
new_obj.db_default = old_obj.db_default
if 'values' in class_dict:
res = class_dict['values'](old_obj, trans_dict)
new_obj.db_values = res
elif hasattr(old_obj, 'db_values') and old_obj.db_values is not None:
new_obj.db_values = old_obj.db_values
if 'entry_type' in class_dict:
res = class_dict['entry_type'](old_obj, trans_dict)
new_obj.db_entry_type = res
elif hasattr(old_obj, 'db_entry_type') and old_obj.db_entry_type is not None:
new_obj.db_entry_type = old_obj.db_entry_type
new_obj.is_new = old_obj.is_new
new_obj.is_dirty = old_obj.is_dirty
return new_obj
def db_children(self, parent=(None,None), orphan=False, for_action=False):
return [(self, parent[0], parent[1])]
def db_deleted_children(self, remove=False):
children = []
return children
def has_changes(self):
if self.is_dirty:
return True
return False
def __get_db_id(self):
return self._db_id
def __set_db_id(self, id):
self._db_id = id
self.is_dirty = True
db_id = property(__get_db_id, __set_db_id)
def db_add_id(self, id):
self._db_id = id
def db_change_id(self, id):
self._db_id = id
def db_delete_id(self, id):
self._db_id = None
def __get_db_pos(self):
return self._db_pos
def __set_db_pos(self, pos):
self._db_pos = pos
self.is_dirty = True
db_pos = property(__get_db_pos, __set_db_pos)
def db_add_pos(self, pos):
self._db_pos = pos
def db_change_pos(self, pos):
self._db_pos = pos
def db_delete_pos(self, pos):
self._db_pos = None
def __get_db_module(self):
return self._db_module
def __set_db_module(self, module):
self._db_module = module
self.is_dirty = True
db_module = property(__get_db_module, __set_db_module)
def db_add_module(self, module):
self._db_module = module
def db_change_module(self, module):
self._db_module = module
def db_delete_module(self, module):
self._db_module = None
def __get_db_package(self):
return self._db_package
def __set_db_package(self, package):
self._db_package = package
self.is_dirty = True
db_package = property(__get_db_package, __set_db_package)
def db_add_package(self, package):
self._db_package = package
def db_change_package(self, package):
self._db_package = package
def db_delete_package(self, package):
self._db_package = None
def __get_db_namespace(self):
return self._db_namespace
def __set_db_namespace(self, namespace):
self._db_namespace = namespace
self.is_dirty = True
db_namespace = property(__get_db_namespace, __set_db_namespace)
def db_add_namespace(self, namespace):
self._db_namespace = namespace
def db_change_namespace(self, namespace):
self._db_namespace = namespace
def db_delete_namespace(self, namespace):
self._db_namespace = None
def __get_db_label(self):
return self._db_label
def __set_db_label(self, label):
self._db_label = label
self.is_dirty = True
db_label = property(__get_db_label, __set_db_label)
def db_add_label(self, label):
self._db_label = label
def db_change_label(self, label):
self._db_label = label
def db_delete_label(self, label):
self._db_label = None
def __get_db_default(self):
return self._db_default
def __set_db_default(self, default):
self._db_default = default
self.is_dirty = True
db_default = property(__get_db_default, __set_db_default)
def db_add_default(self, default):
self._db_default = default
def db_change_default(self, default):
self._db_default = default
def db_delete_default(self, default):
self._db_default = None
def __get_db_values(self):
return self._db_values
def __set_db_values(self, values):
self._db_values = values
self.is_dirty = True
db_values = property(__get_db_values, __set_db_values)
def db_add_values(self, values):
self._db_values = values
def db_change_values(self, values):
self._db_values = values
def db_delete_values(self, values):
self._db_values = None
def __get_db_entry_type(self):
return self._db_entry_type
def __set_db_entry_type(self, entry_type):
self._db_entry_type = entry_type
self.is_dirty = True
db_entry_type = property(__get_db_entry_type, __set_db_entry_type)
def db_add_entry_type(self, entry_type):
self._db_entry_type = entry_type
def db_change_entry_type(self, entry_type):
self._db_entry_type = entry_type
def db_delete_entry_type(self, entry_type):
self._db_entry_type = None
def getPrimaryKey(self):
return self._db_id
class DBMashupComponent(object):
vtType = 'mashup_component'
def __init__(self, id=None, vtid=None, vttype=None, vtparent_type=None, vtparent_id=None, vtpos=None, vtmid=None, pos=None, type=None, val=None, minVal=None, maxVal=None, stepSize=None, strvaluelist=None, widget=None, seq=None, parent=None):
self._db_id = id
self._db_vtid = vtid
self._db_vttype = vttype
self._db_vtparent_type = vtparent_type
self._db_vtparent_id = vtparent_id
self._db_vtpos = vtpos
self._db_vtmid = vtmid
self._db_pos = pos
self._db_type = type
self._db_val = val
self._db_minVal = minVal
self._db_maxVal = maxVal
self._db_stepSize = stepSize
self._db_strvaluelist = strvaluelist
self._db_widget = widget
self._db_seq = seq
self._db_parent = parent
self.is_dirty = True
self.is_new = True
def __copy__(self):
return DBMashupComponent.do_copy(self)
def do_copy(self, new_ids=False, id_scope=None, id_remap=None):
cp = DBMashupComponent(id=self._db_id,
vtid=self._db_vtid,
vttype=self._db_vttype,
vtparent_type=self._db_vtparent_type,
vtparent_id=self._db_vtparent_id,
vtpos=self._db_vtpos,
vtmid=self._db_vtmid,
pos=self._db_pos,
type=self._db_type,
val=self._db_val,
minVal=self._db_minVal,
maxVal=self._db_maxVal,
stepSize=self._db_stepSize,
strvaluelist=self._db_strvaluelist,
widget=self._db_widget,
seq=self._db_seq,
parent=self._db_parent)
# set new ids
if new_ids:
new_id = id_scope.getNewId(self.vtType)
if self.vtType in id_scope.remap:
id_remap[(id_scope.remap[self.vtType], self.db_id)] = new_id
else:
id_remap[(self.vtType, self.db_id)] = new_id
cp.db_id = new_id
# recreate indices and set flags
if not new_ids:
cp.is_dirty = self.is_dirty
cp.is_new = self.is_new
return cp
@staticmethod
def update_version(old_obj, trans_dict, new_obj=None):
if new_obj is None:
new_obj = DBMashupComponent()
class_dict = {}
if new_obj.__class__.__name__ in trans_dict:
class_dict = trans_dict[new_obj.__class__.__name__]
if 'id' in class_dict:
res = class_dict['id'](old_obj, trans_dict)
new_obj.db_id = res
elif hasattr(old_obj, 'db_id') and old_obj.db_id is not None:
new_obj.db_id = old_obj.db_id
if 'vtid' in class_dict:
res = class_dict['vtid'](old_obj, trans_dict)
new_obj.db_vtid = res
elif hasattr(old_obj, 'db_vtid') and old_obj.db_vtid is not None:
new_obj.db_vtid = old_obj.db_vtid
if 'vttype' in class_dict:
res = class_dict['vttype'](old_obj, trans_dict)
new_obj.db_vttype = res
elif hasattr(old_obj, 'db_vttype') and old_obj.db_vttype is not None:
new_obj.db_vttype = old_obj.db_vttype
if 'vtparent_type' in class_dict:
res = class_dict['vtparent_type'](old_obj, trans_dict)
new_obj.db_vtparent_type = res
elif hasattr(old_obj, 'db_vtparent_type') and old_obj.db_vtparent_type is not None:
new_obj.db_vtparent_type = old_obj.db_vtparent_type
if 'vtparent_id' in class_dict:
res = class_dict['vtparent_id'](old_obj, trans_dict)
new_obj.db_vtparent_id = res
elif hasattr(old_obj, 'db_vtparent_id') and old_obj.db_vtparent_id is not None:
new_obj.db_vtparent_id = old_obj.db_vtparent_id
if 'vtpos' in class_dict:
res = class_dict['vtpos'](old_obj, trans_dict)
new_obj.db_vtpos = res
elif hasattr(old_obj, 'db_vtpos') and old_obj.db_vtpos is not None:
new_obj.db_vtpos = old_obj.db_vtpos
if 'vtmid' in class_dict:
res = class_dict['vtmid'](old_obj, trans_dict)
new_obj.db_vtmid = res
elif hasattr(old_obj, 'db_vtmid') and old_obj.db_vtmid is not None:
new_obj.db_vtmid = old_obj.db_vtmid
if 'pos' in class_dict:
res = class_dict['pos'](old_obj, trans_dict)
new_obj.db_pos = res
elif hasattr(old_obj, 'db_pos') and old_obj.db_pos is not None:
new_obj.db_pos = old_obj.db_pos
if 'type' in class_dict:
res = class_dict['type'](old_obj, trans_dict)
new_obj.db_type = res
elif hasattr(old_obj, 'db_type') and old_obj.db_type is not None:
new_obj.db_type = old_obj.db_type
if 'val' in class_dict:
res = class_dict['val'](old_obj, trans_dict)
new_obj.db_val = res
elif hasattr(old_obj, 'db_val') and old_obj.db_val is not None:
new_obj.db_val = old_obj.db_val
if 'minVal' in class_dict:
res = class_dict['minVal'](old_obj, trans_dict)
new_obj.db_minVal = res
elif hasattr(old_obj, 'db_minVal') and old_obj.db_minVal is not None:
new_obj.db_minVal = old_obj.db_minVal
if 'maxVal' in class_dict:
res = class_dict['maxVal'](old_obj, trans_dict)
new_obj.db_maxVal = res
elif hasattr(old_obj, 'db_maxVal') and old_obj.db_maxVal is not None:
new_obj.db_maxVal = old_obj.db_maxVal
if 'stepSize' in class_dict:
res = class_dict['stepSize'](old_obj, trans_dict)
new_obj.db_stepSize = res
elif hasattr(old_obj, 'db_stepSize') and old_obj.db_stepSize is not None:
new_obj.db_stepSize = old_obj.db_stepSize
if 'strvaluelist' in class_dict:
res = class_dict['strvaluelist'](old_obj, trans_dict)
new_obj.db_strvaluelist = res
elif hasattr(old_obj, 'db_strvaluelist') and old_obj.db_strvaluelist is not None:
new_obj.db_strvaluelist = old_obj.db_strvaluelist
if 'widget' in class_dict:
res = class_dict['widget'](old_obj, trans_dict)
new_obj.db_widget = res
elif hasattr(old_obj, 'db_widget') and old_obj.db_widget is not None:
new_obj.db_widget = old_obj.db_widget
if 'seq' in class_dict:
res = class_dict['seq'](old_obj, trans_dict)
new_obj.db_seq = res
elif hasattr(old_obj, 'db_seq') and old_obj.db_seq is not None:
new_obj.db_seq = old_obj.db_seq
if 'parent' in class_dict:
res = class_dict['parent'](old_obj, trans_dict)
new_obj.db_parent = res
elif hasattr(old_obj, 'db_parent') and old_obj.db_parent is not None:
new_obj.db_parent = old_obj.db_parent
new_obj.is_new = old_obj.is_new
new_obj.is_dirty = old_obj.is_dirty
return new_obj
def db_children(self, parent=(None,None), orphan=False, for_action=False):
return [(self, parent[0], parent[1])]
def db_deleted_children(self, remove=False):
children = []
return children
def has_changes(self):
if self.is_dirty:
return True
return False
def __get_db_id(self):
return self._db_id
def __set_db_id(self, id):
self._db_id = id
self.is_dirty = True
db_id = property(__get_db_id, __set_db_id)
def db_add_id(self, id):
self._db_id = id
def db_change_id(self, id):
self._db_id = id
def db_delete_id(self, id):
self._db_id = None
def __get_db_vtid(self):
return self._db_vtid
def __set_db_vtid(self, vtid):
self._db_vtid = vtid
self.is_dirty = True
db_vtid = property(__get_db_vtid, __set_db_vtid)
def db_add_vtid(self, vtid):
self._db_vtid = vtid
def db_change_vtid(self, vtid):
self._db_vtid = vtid
def db_delete_vtid(self, vtid):
self._db_vtid = None
def __get_db_vttype(self):
return self._db_vttype
def __set_db_vttype(self, vttype):
self._db_vttype = vttype
self.is_dirty = True
db_vttype = property(__get_db_vttype, __set_db_vttype)
def db_add_vttype(self, vttype):
self._db_vttype = vttype
def db_change_vttype(self, vttype):
self._db_vttype = vttype
def db_delete_vttype(self, vttype):
self._db_vttype = None
def __get_db_vtparent_type(self):
return self._db_vtparent_type
def __set_db_vtparent_type(self, vtparent_type):
self._db_vtparent_type = vtparent_type
self.is_dirty = True
db_vtparent_type = property(__get_db_vtparent_type, __set_db_vtparent_type)
def db_add_vtparent_type(self, vtparent_type):
self._db_vtparent_type = vtparent_type
def db_change_vtparent_type(self, vtparent_type):
self._db_vtparent_type = vtparent_type
def db_delete_vtparent_type(self, vtparent_type):
self._db_vtparent_type = None
def __get_db_vtparent_id(self):
return self._db_vtparent_id
def __set_db_vtparent_id(self, vtparent_id):
self._db_vtparent_id = vtparent_id
self.is_dirty = True
db_vtparent_id = property(__get_db_vtparent_id, __set_db_vtparent_id)
def db_add_vtparent_id(self, vtparent_id):
self._db_vtparent_id = vtparent_id
def db_change_vtparent_id(self, vtparent_id):
self._db_vtparent_id = vtparent_id
def db_delete_vtparent_id(self, vtparent_id):
self._db_vtparent_id = None
def __get_db_vtpos(self):
return self._db_vtpos
def __set_db_vtpos(self, vtpos):
self._db_vtpos = vtpos
self.is_dirty = True
db_vtpos = property(__get_db_vtpos, __set_db_vtpos)
def db_add_vtpos(self, vtpos):
self._db_vtpos = vtpos
def db_change_vtpos(self, vtpos):
self._db_vtpos = vtpos
def db_delete_vtpos(self, vtpos):
self._db_vtpos = None
def __get_db_vtmid(self):
return self._db_vtmid
def __set_db_vtmid(self, vtmid):
self._db_vtmid = vtmid
self.is_dirty = True
db_vtmid = property(__get_db_vtmid, __set_db_vtmid)
def db_add_vtmid(self, vtmid):
self._db_vtmid = vtmid
def db_change_vtmid(self, vtmid):
self._db_vtmid = vtmid
def db_delete_vtmid(self, vtmid):
self._db_vtmid = None
def __get_db_pos(self):
return self._db_pos
def __set_db_pos(self, pos):
self._db_pos = pos
self.is_dirty = True
db_pos = property(__get_db_pos, __set_db_pos)
def db_add_pos(self, pos):
self._db_pos = pos
def db_change_pos(self, pos):
self._db_pos = pos
def db_delete_pos(self, pos):
self._db_pos = None
def __get_db_type(self):
return self._db_type
def __set_db_type(self, type):
self._db_type = type
self.is_dirty = True
db_type = property(__get_db_type, __set_db_type)
def db_add_type(self, type):
self._db_type = type
def db_change_type(self, type):
self._db_type = type
def db_delete_type(self, type):
self._db_type = None
def __get_db_val(self):
return self._db_val
def __set_db_val(self, val):
self._db_val = val
self.is_dirty = True
db_val = property(__get_db_val, __set_db_val)
def db_add_val(self, val):
self._db_val = val
def db_change_val(self, val):
self._db_val = val
def db_delete_val(self, val):
self._db_val = None
def __get_db_minVal(self):
return self._db_minVal
def __set_db_minVal(self, minVal):
self._db_minVal = minVal
self.is_dirty = True
db_minVal = property(__get_db_minVal, __set_db_minVal)
def db_add_minVal(self, minVal):
self._db_minVal = minVal
def db_change_minVal(self, minVal):
self._db_minVal = minVal
def db_delete_minVal(self, minVal):
self._db_minVal = None
def __get_db_maxVal(self):
return self._db_maxVal
def __set_db_maxVal(self, maxVal):
self._db_maxVal = maxVal
self.is_dirty = True
db_maxVal = property(__get_db_maxVal, __set_db_maxVal)
def db_add_maxVal(self, maxVal):
self._db_maxVal = maxVal
def db_change_maxVal(self, maxVal):
self._db_maxVal = maxVal
def db_delete_maxVal(self, maxVal):
self._db_maxVal = None
def __get_db_stepSize(self):
return self._db_stepSize
def __set_db_stepSize(self, stepSize):
self._db_stepSize = stepSize
self.is_dirty = True
db_stepSize = property(__get_db_stepSize, __set_db_stepSize)
def db_add_stepSize(self, stepSize):
self._db_stepSize = stepSize
def db_change_stepSize(self, stepSize):
self._db_stepSize = stepSize
def db_delete_stepSize(self, stepSize):
self._db_stepSize = None
def __get_db_strvaluelist(self):
return self._db_strvaluelist
def __set_db_strvaluelist(self, strvaluelist):
self._db_strvaluelist = strvaluelist
self.is_dirty = True
db_strvaluelist = property(__get_db_strvaluelist, __set_db_strvaluelist)
def db_add_strvaluelist(self, strvaluelist):
self._db_strvaluelist = strvaluelist
def db_change_strvaluelist(self, strvaluelist):
self._db_strvaluelist = strvaluelist
def db_delete_strvaluelist(self, strvaluelist):
self._db_strvaluelist = None
def __get_db_widget(self):
return self._db_widget
def __set_db_widget(self, widget):
self._db_widget = widget
self.is_dirty = True
db_widget = property(__get_db_widget, __set_db_widget)
def db_add_widget(self, widget):
self._db_widget = widget
def db_change_widget(self, widget):
self._db_widget = widget
def db_delete_widget(self, widget):
self._db_widget = None
def __get_db_seq(self):
return self._db_seq
def __set_db_seq(self, seq):
self._db_seq = seq
self.is_dirty = True
db_seq = property(__get_db_seq, __set_db_seq)
def db_add_seq(self, seq):
self._db_seq = seq
def db_change_seq(self, seq):
self._db_seq = seq
def db_delete_seq(self, seq):
self._db_seq = None
def __get_db_parent(self):
return self._db_parent
def __set_db_parent(self, parent):
self._db_parent = parent
self.is_dirty = True
db_parent = property(__get_db_parent, __set_db_parent)
def db_add_parent(self, parent):
self._db_parent = parent
def db_change_parent(self, parent):
self._db_parent = parent
def db_delete_parent(self, parent):
self._db_parent = None
def getPrimaryKey(self):
return self._db_id
class DBMashup(object):
vtType = 'mashup'
def __init__(self, id=None, name=None, version=None, aliases=None, type=None, vtid=None, layout=None, geometry=None, has_seq=None):
self._db_id = id
self._db_name = name
self._db_version = version
self.db_deleted_aliases = []
self.db_aliases_id_index = {}
if aliases is None:
self._db_aliases = []
else:
self._db_aliases = aliases
for v in self._db_aliases:
self.db_aliases_id_index[v.db_id] = v
self._db_type = type
self._db_vtid = vtid
self._db_layout = layout
self._db_geometry = geometry
self._db_has_seq = has_seq
self.is_dirty = True
self.is_new = True
def __copy__(self):
return DBMashup.do_copy(self)
def do_copy(self, new_ids=False, id_scope=None, id_remap=None):
cp = DBMashup(id=self._db_id,
name=self._db_name,
version=self._db_version,
type=self._db_type,
vtid=self._db_vtid,
layout=self._db_layout,
geometry=self._db_geometry,
has_seq=self._db_has_seq)
if self._db_aliases is None:
cp._db_aliases = []
else:
cp._db_aliases = [v.do_copy(new_ids, id_scope, id_remap) for v in self._db_aliases]
# set new ids
if new_ids:
new_id = id_scope.getNewId(self.vtType)
if self.vtType in id_scope.remap:
id_remap[(id_scope.remap[self.vtType], self.db_id)] = new_id
else:
id_remap[(self.vtType, self.db_id)] = new_id
cp.db_id = new_id
if hasattr(self, 'db_vtid') and ('vistrail', self._db_vtid) in id_remap:
cp._db_vtid = id_remap[('vistrail', self._db_vtid)]
# recreate indices and set flags
cp.db_aliases_id_index = dict((v.db_id, v) for v in cp._db_aliases)
if not new_ids:
cp.is_dirty = self.is_dirty
cp.is_new = self.is_new
return cp
@staticmethod
def update_version(old_obj, trans_dict, new_obj=None):
if new_obj is None:
new_obj = DBMashup()
class_dict = {}
if new_obj.__class__.__name__ in trans_dict:
class_dict = trans_dict[new_obj.__class__.__name__]
if 'id' in class_dict:
res = class_dict['id'](old_obj, trans_dict)
new_obj.db_id = res
elif hasattr(old_obj, 'db_id') and old_obj.db_id is not None:
new_obj.db_id = old_obj.db_id
if 'name' in class_dict:
res = class_dict['name'](old_obj, trans_dict)
new_obj.db_name = res
elif hasattr(old_obj, 'db_name') and old_obj.db_name is not None:
new_obj.db_name = old_obj.db_name
if 'version' in class_dict:
res = class_dict['version'](old_obj, trans_dict)
new_obj.db_version = res
elif hasattr(old_obj, 'db_version') and old_obj.db_version is not None:
new_obj.db_version = old_obj.db_version
if 'aliases' in class_dict:
res = class_dict['aliases'](old_obj, trans_dict)
for obj in res:
new_obj.db_add_alias(obj)
elif hasattr(old_obj, 'db_aliases') and old_obj.db_aliases is not None:
for obj in old_obj.db_aliases:
new_obj.db_add_alias(DBMashupAlias.update_version(obj, trans_dict))
if hasattr(old_obj, 'db_deleted_aliases') and hasattr(new_obj, 'db_deleted_aliases'):
for obj in old_obj.db_deleted_aliases:
n_obj = DBMashupAlias.update_version(obj, trans_dict)
new_obj.db_deleted_aliases.append(n_obj)
if 'type' in class_dict:
res = class_dict['type'](old_obj, trans_dict)
new_obj.db_type = res
elif hasattr(old_obj, 'db_type') and old_obj.db_type is not None:
new_obj.db_type = old_obj.db_type
if 'vtid' in class_dict:
res = class_dict['vtid'](old_obj, trans_dict)
new_obj.db_vtid = res
elif hasattr(old_obj, 'db_vtid') and old_obj.db_vtid is not None:
new_obj.db_vtid = old_obj.db_vtid
if 'layout' in class_dict:
res = class_dict['layout'](old_obj, trans_dict)
new_obj.db_layout = res
elif hasattr(old_obj, 'db_layout') and old_obj.db_layout is not None:
new_obj.db_layout = old_obj.db_layout
if 'geometry' in class_dict:
res = class_dict['geometry'](old_obj, trans_dict)
new_obj.db_geometry = res
elif hasattr(old_obj, 'db_geometry') and old_obj.db_geometry is not None:
new_obj.db_geometry = old_obj.db_geometry
if 'has_seq' in class_dict:
res = class_dict['has_seq'](old_obj, trans_dict)
new_obj.db_has_seq = res
elif hasattr(old_obj, 'db_has_seq') and old_obj.db_has_seq is not None:
new_obj.db_has_seq = old_obj.db_has_seq
new_obj.is_new = old_obj.is_new
new_obj.is_dirty = old_obj.is_dirty
return new_obj
def db_children(self, parent=(None,None), orphan=False, for_action=False):
children = []
to_del = []
for child in self.db_aliases:
children.extend(child.db_children((self.vtType, self.db_id), orphan, for_action))
if orphan:
to_del.append(child)
for child in to_del:
self.db_delete_alias(child)
children.append((self, parent[0], parent[1]))
return children
def db_deleted_children(self, remove=False):
children = []
children.extend(self.db_deleted_aliases)
if remove:
self.db_deleted_aliases = []
return children
def has_changes(self):
if self.is_dirty:
return True
for child in self._db_aliases:
if child.has_changes():
return True
return False
def __get_db_id(self):
return self._db_id
def __set_db_id(self, id):
self._db_id = id
self.is_dirty = True
db_id = property(__get_db_id, __set_db_id)
def db_add_id(self, id):
self._db_id = id
def db_change_id(self, id):
self._db_id = id
def db_delete_id(self, id):
self._db_id = None
def __get_db_name(self):
return self._db_name
def __set_db_name(self, name):
self._db_name = name
self.is_dirty = True
db_name = property(__get_db_name, __set_db_name)
def db_add_name(self, name):
self._db_name = name
def db_change_name(self, name):
self._db_name = name
def db_delete_name(self, name):
self._db_name = None
def __get_db_version(self):
return self._db_version
def __set_db_version(self, version):
self._db_version = version
self.is_dirty = True
db_version = property(__get_db_version, __set_db_version)
def db_add_version(self, version):
self._db_version = version
def db_change_version(self, version):
self._db_version = version
def db_delete_version(self, version):
self._db_version = None
def __get_db_aliases(self):
return self._db_aliases
def __set_db_aliases(self, aliases):
self._db_aliases = aliases
self.is_dirty = True
db_aliases = property(__get_db_aliases, __set_db_aliases)
def db_get_aliases(self):
return self._db_aliases
def db_add_alias(self, alias):
self.is_dirty = True
self._db_aliases.append(alias)
self.db_aliases_id_index[alias.db_id] = alias
def db_change_alias(self, alias):
self.is_dirty = True
found = False
for i in xrange(len(self._db_aliases)):
if self._db_aliases[i].db_id == alias.db_id:
self._db_aliases[i] = alias
found = True
break
if not found:
self._db_aliases.append(alias)
self.db_aliases_id_index[alias.db_id] = alias
def db_delete_alias(self, alias):
self.is_dirty = True
for i in xrange(len(self._db_aliases)):
if self._db_aliases[i].db_id == alias.db_id:
if not self._db_aliases[i].is_new:
self.db_deleted_aliases.append(self._db_aliases[i])
del self._db_aliases[i]
break
del self.db_aliases_id_index[alias.db_id]
def db_get_alias(self, key):
for i in xrange(len(self._db_aliases)):
if self._db_aliases[i].db_id == key:
return self._db_aliases[i]
return None
def db_get_alias_by_id(self, key):
return self.db_aliases_id_index[key]
def db_has_alias_with_id(self, key):
return key in self.db_aliases_id_index
def __get_db_type(self):
return self._db_type
def __set_db_type(self, type):
self._db_type = type
self.is_dirty = True
db_type = property(__get_db_type, __set_db_type)
def db_add_type(self, type):
self._db_type = type
def db_change_type(self, type):
self._db_type = type
def db_delete_type(self, type):
self._db_type = None
def __get_db_vtid(self):
return self._db_vtid
def __set_db_vtid(self, vtid):
self._db_vtid = vtid
self.is_dirty = True
db_vtid = property(__get_db_vtid, __set_db_vtid)
def db_add_vtid(self, vtid):
self._db_vtid = vtid
def db_change_vtid(self, vtid):
self._db_vtid = vtid
def db_delete_vtid(self, vtid):
self._db_vtid = None
def __get_db_layout(self):
return self._db_layout
def __set_db_layout(self, layout):
self._db_layout = layout
self.is_dirty = True
db_layout = property(__get_db_layout, __set_db_layout)
def db_add_layout(self, layout):
self._db_layout = layout
def db_change_layout(self, layout):
self._db_layout = layout
def db_delete_layout(self, layout):
self._db_layout = None
def __get_db_geometry(self):
return self._db_geometry
def __set_db_geometry(self, geometry):
self._db_geometry = geometry
self.is_dirty = True
db_geometry = property(__get_db_geometry, __set_db_geometry)
def db_add_geometry(self, geometry):
self._db_geometry = geometry
def db_change_geometry(self, geometry):
self._db_geometry = geometry
def db_delete_geometry(self, geometry):
self._db_geometry = None
def __get_db_has_seq(self):
return self._db_has_seq
def __set_db_has_seq(self, has_seq):
self._db_has_seq = has_seq
self.is_dirty = True
db_has_seq = property(__get_db_has_seq, __set_db_has_seq)
def db_add_has_seq(self, has_seq):
self._db_has_seq = has_seq
def db_change_has_seq(self, has_seq):
self._db_has_seq = has_seq
def db_delete_has_seq(self, has_seq):
self._db_has_seq = None
def getPrimaryKey(self):
return self._db_id
class DBMachine(object):
vtType = 'machine'
def __init__(self, id=None, name=None, os=None, architecture=None, processor=None, ram=None):
self._db_id = id
self._db_name = name
self._db_os = os
self._db_architecture = architecture
self._db_processor = processor
self._db_ram = ram
self.is_dirty = True
self.is_new = True
def __copy__(self):
return DBMachine.do_copy(self)
def do_copy(self, new_ids=False, id_scope=None, id_remap=None):
cp = DBMachine(id=self._db_id,
name=self._db_name,
os=self._db_os,
architecture=self._db_architecture,
processor=self._db_processor,
ram=self._db_ram)
# set new ids
if new_ids:
new_id = id_scope.getNewId(self.vtType)
if self.vtType in id_scope.remap:
id_remap[(id_scope.remap[self.vtType], self.db_id)] = new_id
else:
id_remap[(self.vtType, self.db_id)] = new_id
cp.db_id = new_id
if hasattr(self, 'db_vistrailId') and ('vistrail', self._db_vistrailId) in id_remap:
cp._db_vistrailId = id_remap[('vistrail', self._db_vistrailId)]
# recreate indices and set flags
if not new_ids:
cp.is_dirty = self.is_dirty
cp.is_new = self.is_new
return cp
@staticmethod
def update_version(old_obj, trans_dict, new_obj=None):
if new_obj is None:
new_obj = DBMachine()
class_dict = {}
if new_obj.__class__.__name__ in trans_dict:
class_dict = trans_dict[new_obj.__class__.__name__]
if 'id' in class_dict:
res = class_dict['id'](old_obj, trans_dict)
new_obj.db_id = res
elif hasattr(old_obj, 'db_id') and old_obj.db_id is not None:
new_obj.db_id = old_obj.db_id
if 'name' in class_dict:
res = class_dict['name'](old_obj, trans_dict)
new_obj.db_name = res
elif hasattr(old_obj, 'db_name') and old_obj.db_name is not None:
new_obj.db_name = old_obj.db_name
if 'os' in class_dict:
res = class_dict['os'](old_obj, trans_dict)
new_obj.db_os = res
elif hasattr(old_obj, 'db_os') and old_obj.db_os is not None:
new_obj.db_os = old_obj.db_os
if 'architecture' in class_dict:
res = class_dict['architecture'](old_obj, trans_dict)
new_obj.db_architecture = res
elif hasattr(old_obj, 'db_architecture') and old_obj.db_architecture is not None:
new_obj.db_architecture = old_obj.db_architecture
if 'processor' in class_dict:
res = class_dict['processor'](old_obj, trans_dict)
new_obj.db_processor = res
elif hasattr(old_obj, 'db_processor') and old_obj.db_processor is not None:
new_obj.db_processor = old_obj.db_processor
if 'ram' in class_dict:
res = class_dict['ram'](old_obj, trans_dict)
new_obj.db_ram = res
elif hasattr(old_obj, 'db_ram') and old_obj.db_ram is not None:
new_obj.db_ram = old_obj.db_ram
new_obj.is_new = old_obj.is_new
new_obj.is_dirty = old_obj.is_dirty
return new_obj
def db_children(self, parent=(None,None), orphan=False, for_action=False):
return [(self, parent[0], parent[1])]
def db_deleted_children(self, remove=False):
children = []
return children
def has_changes(self):
if self.is_dirty:
return True
return False
def __get_db_id(self):
return self._db_id
def __set_db_id(self, id):
self._db_id = id
self.is_dirty = True
db_id = property(__get_db_id, __set_db_id)
def db_add_id(self, id):
self._db_id = id
def db_change_id(self, id):
self._db_id = id
def db_delete_id(self, id):
self._db_id = None
def __get_db_name(self):
return self._db_name
def __set_db_name(self, name):
self._db_name = name
self.is_dirty = True
db_name = property(__get_db_name, __set_db_name)
def db_add_name(self, name):
self._db_name = name
def db_change_name(self, name):
self._db_name = name
def db_delete_name(self, name):
self._db_name = None
def __get_db_os(self):
return self._db_os
def __set_db_os(self, os):
self._db_os = os
self.is_dirty = True
db_os = property(__get_db_os, __set_db_os)
def db_add_os(self, os):
self._db_os = os
def db_change_os(self, os):
self._db_os = os
def db_delete_os(self, os):
self._db_os = None
def __get_db_architecture(self):
return self._db_architecture
def __set_db_architecture(self, architecture):
self._db_architecture = architecture
self.is_dirty = True
db_architecture = property(__get_db_architecture, __set_db_architecture)
def db_add_architecture(self, architecture):
self._db_architecture = architecture
def db_change_architecture(self, architecture):
self._db_architecture = architecture
def db_delete_architecture(self, architecture):
self._db_architecture = None
def __get_db_processor(self):
return self._db_processor
def __set_db_processor(self, processor):
self._db_processor = processor
self.is_dirty = True
db_processor = property(__get_db_processor, __set_db_processor)
def db_add_processor(self, processor):
self._db_processor = processor
def db_change_processor(self, processor):
self._db_processor = processor
def db_delete_processor(self, processor):
self._db_processor = None
def __get_db_ram(self):
return self._db_ram
def __set_db_ram(self, ram):
self._db_ram = ram
self.is_dirty = True
db_ram = property(__get_db_ram, __set_db_ram)
def db_add_ram(self, ram):
self._db_ram = ram
def db_change_ram(self, ram):
self._db_ram = ram
def db_delete_ram(self, ram):
self._db_ram = None
def getPrimaryKey(self):
return self._db_id
class DBConfigFloat(object):
vtType = 'config_float'
def __init__(self, value=None):
self._db_value = value
self.is_dirty = True
self.is_new = True
def __copy__(self):
return DBConfigFloat.do_copy(self)
def do_copy(self, new_ids=False, id_scope=None, id_remap=None):
cp = DBConfigFloat(value=self._db_value)
# set new ids
if new_ids:
new_id = id_scope.getNewId(self.vtType)
if self.vtType in id_scope.remap:
id_remap[(id_scope.remap[self.vtType], self.db_id)] = new_id
else:
id_remap[(self.vtType, self.db_id)] = new_id
cp.db_id = new_id
# recreate indices and set flags
if not new_ids:
cp.is_dirty = self.is_dirty
cp.is_new = self.is_new
return cp
@staticmethod
def update_version(old_obj, trans_dict, new_obj=None):
if new_obj is None:
new_obj = DBConfigFloat()
class_dict = {}
if new_obj.__class__.__name__ in trans_dict:
class_dict = trans_dict[new_obj.__class__.__name__]
if 'value' in class_dict:
res = class_dict['value'](old_obj, trans_dict)
new_obj.db_value = res
elif hasattr(old_obj, 'db_value') and old_obj.db_value is not None:
new_obj.db_value = old_obj.db_value
new_obj.is_new = old_obj.is_new
new_obj.is_dirty = old_obj.is_dirty
return new_obj
def db_children(self, parent=(None,None), orphan=False, for_action=False):
return [(self, parent[0], parent[1])]
def db_deleted_children(self, remove=False):
children = []
return children
def has_changes(self):
if self.is_dirty:
return True
return False
def __get_db_value(self):
return self._db_value
def __set_db_value(self, value):
self._db_value = value
self.is_dirty = True
db_value = property(__get_db_value, __set_db_value)
def db_add_value(self, value):
self._db_value = value
def db_change_value(self, value):
self._db_value = value
def db_delete_value(self, value):
self._db_value = None
class DBOther(object):
vtType = 'other'
def __init__(self, id=None, key=None, value=None):
self._db_id = id
self._db_key = key
self._db_value = value
self.is_dirty = True
self.is_new = True
def __copy__(self):
return DBOther.do_copy(self)
def do_copy(self, new_ids=False, id_scope=None, id_remap=None):
cp = DBOther(id=self._db_id,
key=self._db_key,
value=self._db_value)
# set new ids
if new_ids:
new_id = id_scope.getNewId(self.vtType)
if self.vtType in id_scope.remap:
id_remap[(id_scope.remap[self.vtType], self.db_id)] = new_id
else:
id_remap[(self.vtType, self.db_id)] = new_id
cp.db_id = new_id
# recreate indices and set flags
if not new_ids:
cp.is_dirty = self.is_dirty
cp.is_new = self.is_new
return cp
@staticmethod
def update_version(old_obj, trans_dict, new_obj=None):
if new_obj is None:
new_obj = DBOther()
class_dict = {}
if new_obj.__class__.__name__ in trans_dict:
class_dict = trans_dict[new_obj.__class__.__name__]
if 'id' in class_dict:
res = class_dict['id'](old_obj, trans_dict)
new_obj.db_id = res
elif hasattr(old_obj, 'db_id') and old_obj.db_id is not None:
new_obj.db_id = old_obj.db_id
if 'key' in class_dict:
res = class_dict['key'](old_obj, trans_dict)
new_obj.db_key = res
elif hasattr(old_obj, 'db_key') and old_obj.db_key is not None:
new_obj.db_key = old_obj.db_key
if 'value' in class_dict:
res = class_dict['value'](old_obj, trans_dict)
new_obj.db_value = res
elif hasattr(old_obj, 'db_value') and old_obj.db_value is not None:
new_obj.db_value = old_obj.db_value
new_obj.is_new = old_obj.is_new
new_obj.is_dirty = old_obj.is_dirty
return new_obj
def db_children(self, parent=(None,None), orphan=False, for_action=False):
return [(self, parent[0], parent[1])]
def db_deleted_children(self, remove=False):
children = []
return children
def has_changes(self):
if self.is_dirty:
return True
return False
def __get_db_id(self):
return self._db_id
def __set_db_id(self, id):
self._db_id = id
self.is_dirty = True
db_id = property(__get_db_id, __set_db_id)
def db_add_id(self, id):
self._db_id = id
def db_change_id(self, id):
self._db_id = id
def db_delete_id(self, id):
self._db_id = None
def __get_db_key(self):
return self._db_key
def __set_db_key(self, key):
self._db_key = key
self.is_dirty = True
db_key = property(__get_db_key, __set_db_key)
def db_add_key(self, key):
self._db_key = key
def db_change_key(self, key):
self._db_key = key
def db_delete_key(self, key):
self._db_key = None
def __get_db_value(self):
return self._db_value
def __set_db_value(self, value):
self._db_value = value
self.is_dirty = True
db_value = property(__get_db_value, __set_db_value)
def db_add_value(self, value):
self._db_value = value
def db_change_value(self, value):
self._db_value = value
def db_delete_value(self, value):
self._db_value = None
def getPrimaryKey(self):
return self._db_id
class DBRefProvActivity(object):
vtType = 'ref_prov_activity'
def __init__(self, prov_ref=None):
self._db_prov_ref = prov_ref
self.is_dirty = True
self.is_new = True
def __copy__(self):
return DBRefProvActivity.do_copy(self)
def do_copy(self, new_ids=False, id_scope=None, id_remap=None):
cp = DBRefProvActivity(prov_ref=self._db_prov_ref)
# set new ids
if new_ids:
new_id = id_scope.getNewId(self.vtType)
if self.vtType in id_scope.remap:
id_remap[(id_scope.remap[self.vtType], self.db_id)] = new_id
else:
id_remap[(self.vtType, self.db_id)] = new_id
cp.db_id = new_id
if hasattr(self, 'db_prov_ref') and ('prov_activity', self._db_prov_ref) in id_remap:
cp._db_prov_ref = id_remap[('prov_activity', self._db_prov_ref)]
# recreate indices and set flags
if not new_ids:
cp.is_dirty = self.is_dirty
cp.is_new = self.is_new
return cp
@staticmethod
def update_version(old_obj, trans_dict, new_obj=None):
if new_obj is None:
new_obj = DBRefProvActivity()
class_dict = {}
if new_obj.__class__.__name__ in trans_dict:
class_dict = trans_dict[new_obj.__class__.__name__]
if 'prov_ref' in class_dict:
res = class_dict['prov_ref'](old_obj, trans_dict)
new_obj.db_prov_ref = res
elif hasattr(old_obj, 'db_prov_ref') and old_obj.db_prov_ref is not None:
new_obj.db_prov_ref = old_obj.db_prov_ref
new_obj.is_new = old_obj.is_new
new_obj.is_dirty = old_obj.is_dirty
return new_obj
def db_children(self, parent=(None,None), orphan=False, for_action=False):
return [(self, parent[0], parent[1])]
def db_deleted_children(self, remove=False):
children = []
return children
def has_changes(self):
if self.is_dirty:
return True
return False
def __get_db_prov_ref(self):
return self._db_prov_ref
def __set_db_prov_ref(self, prov_ref):
self._db_prov_ref = prov_ref
self.is_dirty = True
db_prov_ref = property(__get_db_prov_ref, __set_db_prov_ref)
def db_add_prov_ref(self, prov_ref):
self._db_prov_ref = prov_ref
def db_change_prov_ref(self, prov_ref):
self._db_prov_ref = prov_ref
def db_delete_prov_ref(self, prov_ref):
self._db_prov_ref = None
class DBAbstraction(object):
vtType = 'abstraction'
def __init__(self, id=None, cache=None, name=None, namespace=None, package=None, version=None, internal_version=None, location=None, functions=None, annotations=None):
self._db_id = id
self._db_cache = cache
self._db_name = name
self._db_namespace = namespace
self._db_package = package
self._db_version = version
self._db_internal_version = internal_version
self.db_deleted_location = []
self._db_location = location
self.db_deleted_functions = []
self.db_functions_id_index = {}
if functions is None:
self._db_functions = []
else:
self._db_functions = functions
for v in self._db_functions:
self.db_functions_id_index[v.db_id] = v
self.db_deleted_annotations = []
self.db_annotations_id_index = {}
self.db_annotations_key_index = {}
if annotations is None:
self._db_annotations = []
else:
self._db_annotations = annotations
for v in self._db_annotations:
self.db_annotations_id_index[v.db_id] = v
self.db_annotations_key_index[v.db_key] = v
self.is_dirty = True
self.is_new = True
def __copy__(self):
return DBAbstraction.do_copy(self)
def do_copy(self, new_ids=False, id_scope=None, id_remap=None):
cp = DBAbstraction(id=self._db_id,
cache=self._db_cache,
name=self._db_name,
namespace=self._db_namespace,
package=self._db_package,
version=self._db_version,
internal_version=self._db_internal_version)
if self._db_location is not None:
cp._db_location = self._db_location.do_copy(new_ids, id_scope, id_remap)
if self._db_functions is None:
cp._db_functions = []
else:
cp._db_functions = [v.do_copy(new_ids, id_scope, id_remap) for v in self._db_functions]
if self._db_annotations is None:
cp._db_annotations = []
else:
cp._db_annotations = [v.do_copy(new_ids, id_scope, id_remap) for v in self._db_annotations]
# set new ids
if new_ids:
new_id = id_scope.getNewId(self.vtType)
if self.vtType in id_scope.remap:
id_remap[(id_scope.remap[self.vtType], self.db_id)] = new_id
else:
id_remap[(self.vtType, self.db_id)] = new_id
cp.db_id = new_id
# recreate indices and set flags
cp.db_functions_id_index = dict((v.db_id, v) for v in cp._db_functions)
cp.db_annotations_id_index = dict((v.db_id, v) for v in cp._db_annotations)
cp.db_annotations_key_index = dict((v.db_key, v) for v in cp._db_annotations)
if not new_ids:
cp.is_dirty = self.is_dirty
cp.is_new = self.is_new
return cp
@staticmethod
def update_version(old_obj, trans_dict, new_obj=None):
if new_obj is None:
new_obj = DBAbstraction()
class_dict = {}
if new_obj.__class__.__name__ in trans_dict:
class_dict = trans_dict[new_obj.__class__.__name__]
if 'id' in class_dict:
res = class_dict['id'](old_obj, trans_dict)
new_obj.db_id = res
elif hasattr(old_obj, 'db_id') and old_obj.db_id is not None:
new_obj.db_id = old_obj.db_id
if 'cache' in class_dict:
res = class_dict['cache'](old_obj, trans_dict)
new_obj.db_cache = res
elif hasattr(old_obj, 'db_cache') and old_obj.db_cache is not None:
new_obj.db_cache = old_obj.db_cache
if 'name' in class_dict:
res = class_dict['name'](old_obj, trans_dict)
new_obj.db_name = res
elif hasattr(old_obj, 'db_name') and old_obj.db_name is not None:
new_obj.db_name = old_obj.db_name
if 'namespace' in class_dict:
res = class_dict['namespace'](old_obj, trans_dict)
new_obj.db_namespace = res
elif hasattr(old_obj, 'db_namespace') and old_obj.db_namespace is not None:
new_obj.db_namespace = old_obj.db_namespace
if 'package' in class_dict:
res = class_dict['package'](old_obj, trans_dict)
new_obj.db_package = res
elif hasattr(old_obj, 'db_package') and old_obj.db_package is not None:
new_obj.db_package = old_obj.db_package
if 'version' in class_dict:
res = class_dict['version'](old_obj, trans_dict)
new_obj.db_version = res
elif hasattr(old_obj, 'db_version') and old_obj.db_version is not None:
new_obj.db_version = old_obj.db_version
if 'internal_version' in class_dict:
res = class_dict['internal_version'](old_obj, trans_dict)
new_obj.db_internal_version = res
elif hasattr(old_obj, 'db_internal_version') and old_obj.db_internal_version is not None:
new_obj.db_internal_version = old_obj.db_internal_version
if 'location' in class_dict:
res = class_dict['location'](old_obj, trans_dict)
new_obj.db_location = res
elif hasattr(old_obj, 'db_location') and old_obj.db_location is not None:
obj = old_obj.db_location
new_obj.db_add_location(DBLocation.update_version(obj, trans_dict))
if hasattr(old_obj, 'db_deleted_location') and hasattr(new_obj, 'db_deleted_location'):
for obj in old_obj.db_deleted_location:
n_obj = DBLocation.update_version(obj, trans_dict)
new_obj.db_deleted_location.append(n_obj)
if 'functions' in class_dict:
res = class_dict['functions'](old_obj, trans_dict)
for obj in res:
new_obj.db_add_function(obj)
elif hasattr(old_obj, 'db_functions') and old_obj.db_functions is not None:
for obj in old_obj.db_functions:
new_obj.db_add_function(DBFunction.update_version(obj, trans_dict))
if hasattr(old_obj, 'db_deleted_functions') and hasattr(new_obj, 'db_deleted_functions'):
for obj in old_obj.db_deleted_functions:
n_obj = DBFunction.update_version(obj, trans_dict)
new_obj.db_deleted_functions.append(n_obj)
if 'annotations' in class_dict:
res = class_dict['annotations'](old_obj, trans_dict)
for obj in res:
new_obj.db_add_annotation(obj)
elif hasattr(old_obj, 'db_annotations') and old_obj.db_annotations is not None:
for obj in old_obj.db_annotations:
new_obj.db_add_annotation(DBAnnotation.update_version(obj, trans_dict))
if hasattr(old_obj, 'db_deleted_annotations') and hasattr(new_obj, 'db_deleted_annotations'):
for obj in old_obj.db_deleted_annotations:
n_obj = DBAnnotation.update_version(obj, trans_dict)
new_obj.db_deleted_annotations.append(n_obj)
new_obj.is_new = old_obj.is_new
new_obj.is_dirty = old_obj.is_dirty
return new_obj
def db_children(self, parent=(None,None), orphan=False, for_action=False):
children = []
if self._db_location is not None:
children.extend(self._db_location.db_children((self.vtType, self.db_id), orphan, for_action))
if orphan:
self._db_location = None
to_del = []
for child in self.db_functions:
children.extend(child.db_children((self.vtType, self.db_id), orphan, for_action))
if orphan:
to_del.append(child)
for child in to_del:
self.db_delete_function(child)
to_del = []
for child in self.db_annotations:
children.extend(child.db_children((self.vtType, self.db_id), orphan, for_action))
if orphan:
to_del.append(child)
for child in to_del:
self.db_delete_annotation(child)
children.append((self, parent[0], parent[1]))
return children
def db_deleted_children(self, remove=False):
children = []
children.extend(self.db_deleted_location)
children.extend(self.db_deleted_functions)
children.extend(self.db_deleted_annotations)
if remove:
self.db_deleted_location = []
self.db_deleted_functions = []
self.db_deleted_annotations = []
return children
def has_changes(self):
if self.is_dirty:
return True
if self._db_location is not None and self._db_location.has_changes():
return True
for child in self._db_functions:
if child.has_changes():
return True
for child in self._db_annotations:
if child.has_changes():
return True
return False
def __get_db_id(self):
return self._db_id
def __set_db_id(self, id):
self._db_id = id
self.is_dirty = True
db_id = property(__get_db_id, __set_db_id)
def db_add_id(self, id):
self._db_id = id
def db_change_id(self, id):
self._db_id = id
def db_delete_id(self, id):
self._db_id = None
def __get_db_cache(self):
return self._db_cache
def __set_db_cache(self, cache):
self._db_cache = cache
self.is_dirty = True
db_cache = property(__get_db_cache, __set_db_cache)
def db_add_cache(self, cache):
self._db_cache = cache
def db_change_cache(self, cache):
self._db_cache = cache
def db_delete_cache(self, cache):
self._db_cache = None
def __get_db_name(self):
return self._db_name
def __set_db_name(self, name):
self._db_name = name
self.is_dirty = True
db_name = property(__get_db_name, __set_db_name)
def db_add_name(self, name):
self._db_name = name
def db_change_name(self, name):
self._db_name = name
def db_delete_name(self, name):
self._db_name = None
def __get_db_namespace(self):
return self._db_namespace
def __set_db_namespace(self, namespace):
self._db_namespace = namespace
self.is_dirty = True
db_namespace = property(__get_db_namespace, __set_db_namespace)
def db_add_namespace(self, namespace):
self._db_namespace = namespace
def db_change_namespace(self, namespace):
self._db_namespace = namespace
def db_delete_namespace(self, namespace):
self._db_namespace = None
def __get_db_package(self):
return self._db_package
def __set_db_package(self, package):
self._db_package = package
self.is_dirty = True
db_package = property(__get_db_package, __set_db_package)
def db_add_package(self, package):
self._db_package = package
def db_change_package(self, package):
self._db_package = package
def db_delete_package(self, package):
self._db_package = None
def __get_db_version(self):
return self._db_version
def __set_db_version(self, version):
self._db_version = version
self.is_dirty = True
db_version = property(__get_db_version, __set_db_version)
def db_add_version(self, version):
self._db_version = version
def db_change_version(self, version):
self._db_version = version
def db_delete_version(self, version):
self._db_version = None
def __get_db_internal_version(self):
return self._db_internal_version
def __set_db_internal_version(self, internal_version):
self._db_internal_version = internal_version
self.is_dirty = True
db_internal_version = property(__get_db_internal_version, __set_db_internal_version)
def db_add_internal_version(self, internal_version):
self._db_internal_version = internal_version
def db_change_internal_version(self, internal_version):
self._db_internal_version = internal_version
def db_delete_internal_version(self, internal_version):
self._db_internal_version = None
def __get_db_location(self):
return self._db_location
def __set_db_location(self, location):
self._db_location = location
self.is_dirty = True
db_location = property(__get_db_location, __set_db_location)
def db_add_location(self, location):
self._db_location = location
def db_change_location(self, location):
self._db_location = location
def db_delete_location(self, location):
if not self.is_new:
self.db_deleted_location.append(self._db_location)
self._db_location = None
def __get_db_functions(self):
return self._db_functions
def __set_db_functions(self, functions):
self._db_functions = functions
self.is_dirty = True
db_functions = property(__get_db_functions, __set_db_functions)
def db_get_functions(self):
return self._db_functions
def db_add_function(self, function):
self.is_dirty = True
self._db_functions.append(function)
self.db_functions_id_index[function.db_id] = function
def db_change_function(self, function):
self.is_dirty = True
found = False
for i in xrange(len(self._db_functions)):
if self._db_functions[i].db_id == function.db_id:
self._db_functions[i] = function
found = True
break
if not found:
self._db_functions.append(function)
self.db_functions_id_index[function.db_id] = function
def db_delete_function(self, function):
self.is_dirty = True
for i in xrange(len(self._db_functions)):
if self._db_functions[i].db_id == function.db_id:
if not self._db_functions[i].is_new:
self.db_deleted_functions.append(self._db_functions[i])
del self._db_functions[i]
break
del self.db_functions_id_index[function.db_id]
def db_get_function(self, key):
for i in xrange(len(self._db_functions)):
if self._db_functions[i].db_id == key:
return self._db_functions[i]
return None
def db_get_function_by_id(self, key):
return self.db_functions_id_index[key]
def db_has_function_with_id(self, key):
return key in self.db_functions_id_index
def __get_db_annotations(self):
return self._db_annotations
def __set_db_annotations(self, annotations):
self._db_annotations = annotations
self.is_dirty = True
db_annotations = property(__get_db_annotations, __set_db_annotations)
def db_get_annotations(self):
return self._db_annotations
def db_add_annotation(self, annotation):
self.is_dirty = True
self._db_annotations.append(annotation)
self.db_annotations_id_index[annotation.db_id] = annotation
self.db_annotations_key_index[annotation.db_key] = annotation
def db_change_annotation(self, annotation):
self.is_dirty = True
found = False
for i in xrange(len(self._db_annotations)):
if self._db_annotations[i].db_id == annotation.db_id:
self._db_annotations[i] = annotation
found = True
break
if not found:
self._db_annotations.append(annotation)
self.db_annotations_id_index[annotation.db_id] = annotation
self.db_annotations_key_index[annotation.db_key] = annotation
def db_delete_annotation(self, annotation):
self.is_dirty = True
for i in xrange(len(self._db_annotations)):
if self._db_annotations[i].db_id == annotation.db_id:
if not self._db_annotations[i].is_new:
self.db_deleted_annotations.append(self._db_annotations[i])
del self._db_annotations[i]
break
del self.db_annotations_id_index[annotation.db_id]
del self.db_annotations_key_index[annotation.db_key]
def db_get_annotation(self, key):
for i in xrange(len(self._db_annotations)):
if self._db_annotations[i].db_id == key:
return self._db_annotations[i]
return None
def db_get_annotation_by_id(self, key):
return self.db_annotations_id_index[key]
def db_has_annotation_with_id(self, key):
return key in self.db_annotations_id_index
def db_get_annotation_by_key(self, key):
return self.db_annotations_key_index[key]
def db_has_annotation_with_key(self, key):
return key in self.db_annotations_key_index
def getPrimaryKey(self):
return self._db_id
class DBProvAgent(object):
vtType = 'prov_agent'
def __init__(self, id=None, vt_id=None, prov_type=None, prov_label=None, vt_machine_os=None, vt_machine_architecture=None, vt_machine_processor=None, vt_machine_ram=None):
self._db_id = id
self._db_vt_id = vt_id
self._db_prov_type = prov_type
self._db_prov_label = prov_label
self._db_vt_machine_os = vt_machine_os
self._db_vt_machine_architecture = vt_machine_architecture
self._db_vt_machine_processor = vt_machine_processor
self._db_vt_machine_ram = vt_machine_ram
self.is_dirty = True
self.is_new = True
def __copy__(self):
return DBProvAgent.do_copy(self)
def do_copy(self, new_ids=False, id_scope=None, id_remap=None):
cp = DBProvAgent(id=self._db_id,
vt_id=self._db_vt_id,
prov_type=self._db_prov_type,
prov_label=self._db_prov_label,
vt_machine_os=self._db_vt_machine_os,
vt_machine_architecture=self._db_vt_machine_architecture,
vt_machine_processor=self._db_vt_machine_processor,
vt_machine_ram=self._db_vt_machine_ram)
# set new ids
if new_ids:
new_id = id_scope.getNewId(self.vtType)
if self.vtType in id_scope.remap:
id_remap[(id_scope.remap[self.vtType], self.db_id)] = new_id
else:
id_remap[(self.vtType, self.db_id)] = new_id
cp.db_id = new_id
# recreate indices and set flags
if not new_ids:
cp.is_dirty = self.is_dirty
cp.is_new = self.is_new
return cp
@staticmethod
def update_version(old_obj, trans_dict, new_obj=None):
if new_obj is None:
new_obj = DBProvAgent()
class_dict = {}
if new_obj.__class__.__name__ in trans_dict:
class_dict = trans_dict[new_obj.__class__.__name__]
if 'id' in class_dict:
res = class_dict['id'](old_obj, trans_dict)
new_obj.db_id = res
elif hasattr(old_obj, 'db_id') and old_obj.db_id is not None:
new_obj.db_id = old_obj.db_id
if 'vt_id' in class_dict:
res = class_dict['vt_id'](old_obj, trans_dict)
new_obj.db_vt_id = res
elif hasattr(old_obj, 'db_vt_id') and old_obj.db_vt_id is not None:
new_obj.db_vt_id = old_obj.db_vt_id
if 'prov_type' in class_dict:
res = class_dict['prov_type'](old_obj, trans_dict)
new_obj.db_prov_type = res
elif hasattr(old_obj, 'db_prov_type') and old_obj.db_prov_type is not None:
new_obj.db_prov_type = old_obj.db_prov_type
if 'prov_label' in class_dict:
res = class_dict['prov_label'](old_obj, trans_dict)
new_obj.db_prov_label = res
elif hasattr(old_obj, 'db_prov_label') and old_obj.db_prov_label is not None:
new_obj.db_prov_label = old_obj.db_prov_label
if 'vt_machine_os' in class_dict:
res = class_dict['vt_machine_os'](old_obj, trans_dict)
new_obj.db_vt_machine_os = res
elif hasattr(old_obj, 'db_vt_machine_os') and old_obj.db_vt_machine_os is not None:
new_obj.db_vt_machine_os = old_obj.db_vt_machine_os
if 'vt_machine_architecture' in class_dict:
res = class_dict['vt_machine_architecture'](old_obj, trans_dict)
new_obj.db_vt_machine_architecture = res
elif hasattr(old_obj, 'db_vt_machine_architecture') and old_obj.db_vt_machine_architecture is not None:
new_obj.db_vt_machine_architecture = old_obj.db_vt_machine_architecture
if 'vt_machine_processor' in class_dict:
res = class_dict['vt_machine_processor'](old_obj, trans_dict)
new_obj.db_vt_machine_processor = res
elif hasattr(old_obj, 'db_vt_machine_processor') and old_obj.db_vt_machine_processor is not None:
new_obj.db_vt_machine_processor = old_obj.db_vt_machine_processor
if 'vt_machine_ram' in class_dict:
res = class_dict['vt_machine_ram'](old_obj, trans_dict)
new_obj.db_vt_machine_ram = res
elif hasattr(old_obj, 'db_vt_machine_ram') and old_obj.db_vt_machine_ram is not None:
new_obj.db_vt_machine_ram = old_obj.db_vt_machine_ram
new_obj.is_new = old_obj.is_new
new_obj.is_dirty = old_obj.is_dirty
return new_obj
def db_children(self, parent=(None,None), orphan=False, for_action=False):
return [(self, parent[0], parent[1])]
def db_deleted_children(self, remove=False):
children = []
return children
def has_changes(self):
if self.is_dirty:
return True
return False
def __get_db_id(self):
return self._db_id
def __set_db_id(self, id):
self._db_id = id
self.is_dirty = True
db_id = property(__get_db_id, __set_db_id)
def db_add_id(self, id):
self._db_id = id
def db_change_id(self, id):
self._db_id = id
def db_delete_id(self, id):
self._db_id = None
def __get_db_vt_id(self):
return self._db_vt_id
def __set_db_vt_id(self, vt_id):
self._db_vt_id = vt_id
self.is_dirty = True
db_vt_id = property(__get_db_vt_id, __set_db_vt_id)
def db_add_vt_id(self, vt_id):
self._db_vt_id = vt_id
def db_change_vt_id(self, vt_id):
self._db_vt_id = vt_id
def db_delete_vt_id(self, vt_id):
self._db_vt_id = None
def __get_db_prov_type(self):
return self._db_prov_type
def __set_db_prov_type(self, prov_type):
self._db_prov_type = prov_type
self.is_dirty = True
db_prov_type = property(__get_db_prov_type, __set_db_prov_type)
def db_add_prov_type(self, prov_type):
self._db_prov_type = prov_type
def db_change_prov_type(self, prov_type):
self._db_prov_type = prov_type
def db_delete_prov_type(self, prov_type):
self._db_prov_type = None
def __get_db_prov_label(self):
return self._db_prov_label
def __set_db_prov_label(self, prov_label):
self._db_prov_label = prov_label
self.is_dirty = True
db_prov_label = property(__get_db_prov_label, __set_db_prov_label)
def db_add_prov_label(self, prov_label):
self._db_prov_label = prov_label
def db_change_prov_label(self, prov_label):
self._db_prov_label = prov_label
def db_delete_prov_label(self, prov_label):
self._db_prov_label = None
def __get_db_vt_machine_os(self):
return self._db_vt_machine_os
def __set_db_vt_machine_os(self, vt_machine_os):
self._db_vt_machine_os = vt_machine_os
self.is_dirty = True
db_vt_machine_os = property(__get_db_vt_machine_os, __set_db_vt_machine_os)
def db_add_vt_machine_os(self, vt_machine_os):
self._db_vt_machine_os = vt_machine_os
def db_change_vt_machine_os(self, vt_machine_os):
self._db_vt_machine_os = vt_machine_os
def db_delete_vt_machine_os(self, vt_machine_os):
self._db_vt_machine_os = None
def __get_db_vt_machine_architecture(self):
return self._db_vt_machine_architecture
def __set_db_vt_machine_architecture(self, vt_machine_architecture):
self._db_vt_machine_architecture = vt_machine_architecture
self.is_dirty = True
db_vt_machine_architecture = property(__get_db_vt_machine_architecture, __set_db_vt_machine_architecture)
def db_add_vt_machine_architecture(self, vt_machine_architecture):
self._db_vt_machine_architecture = vt_machine_architecture
def db_change_vt_machine_architecture(self, vt_machine_architecture):
self._db_vt_machine_architecture = vt_machine_architecture
def db_delete_vt_machine_architecture(self, vt_machine_architecture):
self._db_vt_machine_architecture = None
def __get_db_vt_machine_processor(self):
return self._db_vt_machine_processor
def __set_db_vt_machine_processor(self, vt_machine_processor):
self._db_vt_machine_processor = vt_machine_processor
self.is_dirty = True
db_vt_machine_processor = property(__get_db_vt_machine_processor, __set_db_vt_machine_processor)
def db_add_vt_machine_processor(self, vt_machine_processor):
self._db_vt_machine_processor = vt_machine_processor
def db_change_vt_machine_processor(self, vt_machine_processor):
self._db_vt_machine_processor = vt_machine_processor
def db_delete_vt_machine_processor(self, vt_machine_processor):
self._db_vt_machine_processor = None
def __get_db_vt_machine_ram(self):
return self._db_vt_machine_ram
def __set_db_vt_machine_ram(self, vt_machine_ram):
self._db_vt_machine_ram = vt_machine_ram
self.is_dirty = True
db_vt_machine_ram = property(__get_db_vt_machine_ram, __set_db_vt_machine_ram)
def db_add_vt_machine_ram(self, vt_machine_ram):
self._db_vt_machine_ram = vt_machine_ram
def db_change_vt_machine_ram(self, vt_machine_ram):
self._db_vt_machine_ram = vt_machine_ram
def db_delete_vt_machine_ram(self, vt_machine_ram):
self._db_vt_machine_ram = None
def getPrimaryKey(self):
return self._db_id
class DBMashuptrail(object):
vtType = 'mashuptrail'
def __init__(self, id=None, name=None, version=None, vtVersion=None, last_modified=None, actions=None, annotations=None, actionAnnotations=None):
self._db_id = id
self._db_name = name
self._db_version = version
self._db_vtVersion = vtVersion
self._db_last_modified = last_modified
self.db_deleted_actions = []
self.db_actions_id_index = {}
if actions is None:
self._db_actions = []
else:
self._db_actions = actions
for v in self._db_actions:
self.db_actions_id_index[v.db_id] = v
self.db_deleted_annotations = []
self.db_annotations_id_index = {}
self.db_annotations_key_index = {}
if annotations is None:
self._db_annotations = []
else:
self._db_annotations = annotations
for v in self._db_annotations:
self.db_annotations_id_index[v.db_id] = v
self.db_annotations_key_index[v.db_key] = v
self.db_deleted_actionAnnotations = []
self.db_actionAnnotations_id_index = {}
self.db_actionAnnotations_action_id_index = {}
self.db_actionAnnotations_key_index = {}
if actionAnnotations is None:
self._db_actionAnnotations = []
else:
self._db_actionAnnotations = actionAnnotations
for v in self._db_actionAnnotations:
self.db_actionAnnotations_id_index[v.db_id] = v
self.db_actionAnnotations_action_id_index[(v.db_action_id,v.db_key)] = v
self.db_actionAnnotations_key_index[(v.db_key,v.db_value)] = v
self.is_dirty = True
self.is_new = True
def __copy__(self):
return DBMashuptrail.do_copy(self)
def do_copy(self, new_ids=False, id_scope=None, id_remap=None):
cp = DBMashuptrail(id=self._db_id,
name=self._db_name,
version=self._db_version,
vtVersion=self._db_vtVersion,
last_modified=self._db_last_modified)
if self._db_actions is None:
cp._db_actions = []
else:
cp._db_actions = [v.do_copy(new_ids, id_scope, id_remap) for v in self._db_actions]
if self._db_annotations is None:
cp._db_annotations = []
else:
cp._db_annotations = [v.do_copy(new_ids, id_scope, id_remap) for v in self._db_annotations]
if self._db_actionAnnotations is None:
cp._db_actionAnnotations = []
else:
cp._db_actionAnnotations = [v.do_copy(new_ids, id_scope, id_remap) for v in self._db_actionAnnotations]
# set new ids
if new_ids:
new_id = id_scope.getNewId(self.vtType)
if self.vtType in id_scope.remap:
id_remap[(id_scope.remap[self.vtType], self.db_id)] = new_id
else:
id_remap[(self.vtType, self.db_id)] = new_id
cp.db_id = new_id
# recreate indices and set flags
cp.db_actions_id_index = dict((v.db_id, v) for v in cp._db_actions)
cp.db_annotations_id_index = dict((v.db_id, v) for v in cp._db_annotations)
cp.db_annotations_key_index = dict((v.db_key, v) for v in cp._db_annotations)
cp.db_actionAnnotations_id_index = dict((v.db_id, v) for v in cp._db_actionAnnotations)
cp.db_actionAnnotations_action_id_index = dict(((v.db_action_id,v.db_key), v) for v in cp._db_actionAnnotations)
cp.db_actionAnnotations_key_index = dict(((v.db_key,v.db_value), v) for v in cp._db_actionAnnotations)
if not new_ids:
cp.is_dirty = self.is_dirty
cp.is_new = self.is_new
return cp
@staticmethod
def update_version(old_obj, trans_dict, new_obj=None):
if new_obj is None:
new_obj = DBMashuptrail()
class_dict = {}
if new_obj.__class__.__name__ in trans_dict:
class_dict = trans_dict[new_obj.__class__.__name__]
if 'id' in class_dict:
res = class_dict['id'](old_obj, trans_dict)
new_obj.db_id = res
elif hasattr(old_obj, 'db_id') and old_obj.db_id is not None:
new_obj.db_id = old_obj.db_id
if 'name' in class_dict:
res = class_dict['name'](old_obj, trans_dict)
new_obj.db_name = res
elif hasattr(old_obj, 'db_name') and old_obj.db_name is not None:
new_obj.db_name = old_obj.db_name
if 'version' in class_dict:
res = class_dict['version'](old_obj, trans_dict)
new_obj.db_version = res
elif hasattr(old_obj, 'db_version') and old_obj.db_version is not None:
new_obj.db_version = old_obj.db_version
if 'vtVersion' in class_dict:
res = class_dict['vtVersion'](old_obj, trans_dict)
new_obj.db_vtVersion = res
elif hasattr(old_obj, 'db_vtVersion') and old_obj.db_vtVersion is not None:
new_obj.db_vtVersion = old_obj.db_vtVersion
if 'last_modified' in class_dict:
res = class_dict['last_modified'](old_obj, trans_dict)
new_obj.db_last_modified = res
elif hasattr(old_obj, 'db_last_modified') and old_obj.db_last_modified is not None:
new_obj.db_last_modified = old_obj.db_last_modified
if 'actions' in class_dict:
res = class_dict['actions'](old_obj, trans_dict)
for obj in res:
new_obj.db_add_action(obj)
elif hasattr(old_obj, 'db_actions') and old_obj.db_actions is not None:
for obj in old_obj.db_actions:
new_obj.db_add_action(DBMashupAction.update_version(obj, trans_dict))
if hasattr(old_obj, 'db_deleted_actions') and hasattr(new_obj, 'db_deleted_actions'):
for obj in old_obj.db_deleted_actions:
n_obj = DBMashupAction.update_version(obj, trans_dict)
new_obj.db_deleted_actions.append(n_obj)
if 'annotations' in class_dict:
res = class_dict['annotations'](old_obj, trans_dict)
for obj in res:
new_obj.db_add_annotation(obj)
elif hasattr(old_obj, 'db_annotations') and old_obj.db_annotations is not None:
for obj in old_obj.db_annotations:
new_obj.db_add_annotation(DBAnnotation.update_version(obj, trans_dict))
if hasattr(old_obj, 'db_deleted_annotations') and hasattr(new_obj, 'db_deleted_annotations'):
for obj in old_obj.db_deleted_annotations:
n_obj = DBAnnotation.update_version(obj, trans_dict)
new_obj.db_deleted_annotations.append(n_obj)
if 'actionAnnotations' in class_dict:
res = class_dict['actionAnnotations'](old_obj, trans_dict)
for obj in res:
new_obj.db_add_actionAnnotation(obj)
elif hasattr(old_obj, 'db_actionAnnotations') and old_obj.db_actionAnnotations is not None:
for obj in old_obj.db_actionAnnotations:
new_obj.db_add_actionAnnotation(DBMashupActionAnnotation.update_version(obj, trans_dict))
if hasattr(old_obj, 'db_deleted_actionAnnotations') and hasattr(new_obj, 'db_deleted_actionAnnotations'):
for obj in old_obj.db_deleted_actionAnnotations:
n_obj = DBMashupActionAnnotation.update_version(obj, trans_dict)
new_obj.db_deleted_actionAnnotations.append(n_obj)
new_obj.is_new = old_obj.is_new
new_obj.is_dirty = old_obj.is_dirty
return new_obj
def db_children(self, parent=(None,None), orphan=False, for_action=False):
children = []
to_del = []
for child in self.db_actions:
children.extend(child.db_children((self.vtType, self.db_id), orphan, for_action))
if orphan:
to_del.append(child)
for child in to_del:
self.db_delete_action(child)
to_del = []
for child in self.db_annotations:
children.extend(child.db_children((self.vtType, self.db_id), orphan, for_action))
if orphan:
to_del.append(child)
for child in to_del:
self.db_delete_annotation(child)
to_del = []
for child in self.db_actionAnnotations:
children.extend(child.db_children((self.vtType, self.db_id), orphan, for_action))
if orphan:
to_del.append(child)
for child in to_del:
self.db_delete_actionAnnotation(child)
children.append((self, parent[0], parent[1]))
return children
def db_deleted_children(self, remove=False):
children = []
children.extend(self.db_deleted_actions)
children.extend(self.db_deleted_annotations)
children.extend(self.db_deleted_actionAnnotations)
if remove:
self.db_deleted_actions = []
self.db_deleted_annotations = []
self.db_deleted_actionAnnotations = []
return children
def has_changes(self):
if self.is_dirty:
return True
for child in self._db_actions:
if child.has_changes():
return True
for child in self._db_annotations:
if child.has_changes():
return True
for child in self._db_actionAnnotations:
if child.has_changes():
return True
return False
def __get_db_id(self):
return self._db_id
def __set_db_id(self, id):
self._db_id = id
self.is_dirty = True
db_id = property(__get_db_id, __set_db_id)
def db_add_id(self, id):
self._db_id = id
def db_change_id(self, id):
self._db_id = id
def db_delete_id(self, id):
self._db_id = None
def __get_db_name(self):
return self._db_name
def __set_db_name(self, name):
self._db_name = name
self.is_dirty = True
db_name = property(__get_db_name, __set_db_name)
def db_add_name(self, name):
self._db_name = name
def db_change_name(self, name):
self._db_name = name
def db_delete_name(self, name):
self._db_name = None
def __get_db_version(self):
return self._db_version
def __set_db_version(self, version):
self._db_version = version
self.is_dirty = True
db_version = property(__get_db_version, __set_db_version)
def db_add_version(self, version):
self._db_version = version
def db_change_version(self, version):
self._db_version = version
def db_delete_version(self, version):
self._db_version = None
def __get_db_vtVersion(self):
return self._db_vtVersion
def __set_db_vtVersion(self, vtVersion):
self._db_vtVersion = vtVersion
self.is_dirty = True
db_vtVersion = property(__get_db_vtVersion, __set_db_vtVersion)
def db_add_vtVersion(self, vtVersion):
self._db_vtVersion = vtVersion
def db_change_vtVersion(self, vtVersion):
self._db_vtVersion = vtVersion
def db_delete_vtVersion(self, vtVersion):
self._db_vtVersion = None
def __get_db_last_modified(self):
return self._db_last_modified
def __set_db_last_modified(self, last_modified):
self._db_last_modified = last_modified
self.is_dirty = True
db_last_modified = property(__get_db_last_modified, __set_db_last_modified)
def db_add_last_modified(self, last_modified):
self._db_last_modified = last_modified
def db_change_last_modified(self, last_modified):
self._db_last_modified = last_modified
def db_delete_last_modified(self, last_modified):
self._db_last_modified = None
def __get_db_actions(self):
return self._db_actions
def __set_db_actions(self, actions):
self._db_actions = actions
self.is_dirty = True
db_actions = property(__get_db_actions, __set_db_actions)
def db_get_actions(self):
return self._db_actions
def db_add_action(self, action):
self.is_dirty = True
self._db_actions.append(action)
self.db_actions_id_index[action.db_id] = action
def db_change_action(self, action):
self.is_dirty = True
found = False
for i in xrange(len(self._db_actions)):
if self._db_actions[i].db_id == action.db_id:
self._db_actions[i] = action
found = True
break
if not found:
self._db_actions.append(action)
self.db_actions_id_index[action.db_id] = action
def db_delete_action(self, action):
self.is_dirty = True
for i in xrange(len(self._db_actions)):
if self._db_actions[i].db_id == action.db_id:
if not self._db_actions[i].is_new:
self.db_deleted_actions.append(self._db_actions[i])
del self._db_actions[i]
break
del self.db_actions_id_index[action.db_id]
def db_get_action(self, key):
for i in xrange(len(self._db_actions)):
if self._db_actions[i].db_id == key:
return self._db_actions[i]
return None
def db_get_action_by_id(self, key):
return self.db_actions_id_index[key]
def db_has_action_with_id(self, key):
return key in self.db_actions_id_index
def __get_db_annotations(self):
return self._db_annotations
def __set_db_annotations(self, annotations):
self._db_annotations = annotations
self.is_dirty = True
db_annotations = property(__get_db_annotations, __set_db_annotations)
def db_get_annotations(self):
return self._db_annotations
def db_add_annotation(self, annotation):
self.is_dirty = True
self._db_annotations.append(annotation)
self.db_annotations_id_index[annotation.db_id] = annotation
self.db_annotations_key_index[annotation.db_key] = annotation
def db_change_annotation(self, annotation):
self.is_dirty = True
found = False
for i in xrange(len(self._db_annotations)):
if self._db_annotations[i].db_id == annotation.db_id:
self._db_annotations[i] = annotation
found = True
break
if not found:
self._db_annotations.append(annotation)
self.db_annotations_id_index[annotation.db_id] = annotation
self.db_annotations_key_index[annotation.db_key] = annotation
def db_delete_annotation(self, annotation):
self.is_dirty = True
for i in xrange(len(self._db_annotations)):
if self._db_annotations[i].db_id == annotation.db_id:
if not self._db_annotations[i].is_new:
self.db_deleted_annotations.append(self._db_annotations[i])
del self._db_annotations[i]
break
del self.db_annotations_id_index[annotation.db_id]
del self.db_annotations_key_index[annotation.db_key]
def db_get_annotation(self, key):
for i in xrange(len(self._db_annotations)):
if self._db_annotations[i].db_id == key:
return self._db_annotations[i]
return None
def db_get_annotation_by_id(self, key):
return self.db_annotations_id_index[key]
def db_has_annotation_with_id(self, key):
return key in self.db_annotations_id_index
def db_get_annotation_by_key(self, key):
return self.db_annotations_key_index[key]
def db_has_annotation_with_key(self, key):
return key in self.db_annotations_key_index
def __get_db_actionAnnotations(self):
return self._db_actionAnnotations
def __set_db_actionAnnotations(self, actionAnnotations):
self._db_actionAnnotations = actionAnnotations
self.is_dirty = True
db_actionAnnotations = property(__get_db_actionAnnotations, __set_db_actionAnnotations)
def db_get_actionAnnotations(self):
return self._db_actionAnnotations
def db_add_actionAnnotation(self, actionAnnotation):
self.is_dirty = True
self._db_actionAnnotations.append(actionAnnotation)
self.db_actionAnnotations_id_index[actionAnnotation.db_id] = actionAnnotation
self.db_actionAnnotations_action_id_index[(actionAnnotation.db_action_id,actionAnnotation.db_key)] = actionAnnotation
self.db_actionAnnotations_key_index[(actionAnnotation.db_key,actionAnnotation.db_value)] = actionAnnotation
def db_change_actionAnnotation(self, actionAnnotation):
self.is_dirty = True
found = False
for i in xrange(len(self._db_actionAnnotations)):
if self._db_actionAnnotations[i].db_id == actionAnnotation.db_id:
self._db_actionAnnotations[i] = actionAnnotation
found = True
break
if not found:
self._db_actionAnnotations.append(actionAnnotation)
self.db_actionAnnotations_id_index[actionAnnotation.db_id] = actionAnnotation
self.db_actionAnnotations_action_id_index[(actionAnnotation.db_action_id,actionAnnotation.db_key)] = actionAnnotation
self.db_actionAnnotations_key_index[(actionAnnotation.db_key,actionAnnotation.db_value)] = actionAnnotation
def db_delete_actionAnnotation(self, actionAnnotation):
self.is_dirty = True
for i in xrange(len(self._db_actionAnnotations)):
if self._db_actionAnnotations[i].db_id == actionAnnotation.db_id:
if not self._db_actionAnnotations[i].is_new:
self.db_deleted_actionAnnotations.append(self._db_actionAnnotations[i])
del self._db_actionAnnotations[i]
break
del self.db_actionAnnotations_id_index[actionAnnotation.db_id]
del self.db_actionAnnotations_action_id_index[(actionAnnotation.db_action_id,actionAnnotation.db_key)]
try:
del self.db_actionAnnotations_key_index[(actionAnnotation.db_key,actionAnnotation.db_value)]
except KeyError:
pass
def db_get_actionAnnotation(self, key):
for i in xrange(len(self._db_actionAnnotations)):
if self._db_actionAnnotations[i].db_id == key:
return self._db_actionAnnotations[i]
return None
def db_get_actionAnnotation_by_id(self, key):
return self.db_actionAnnotations_id_index[key]
def db_has_actionAnnotation_with_id(self, key):
return key in self.db_actionAnnotations_id_index
def db_get_actionAnnotation_by_action_id(self, key):
return self.db_actionAnnotations_action_id_index[key]
def db_has_actionAnnotation_with_action_id(self, key):
return key in self.db_actionAnnotations_action_id_index
def db_get_actionAnnotation_by_key(self, key):
return self.db_actionAnnotations_key_index[key]
def db_has_actionAnnotation_with_key(self, key):
return key in self.db_actionAnnotations_key_index
def getPrimaryKey(self):
return self._db_id
class DBRegistry(object):
vtType = 'registry'
def __init__(self, id=None, entity_type=None, version=None, root_descriptor_id=None, name=None, last_modified=None, packages=None):
self._db_id = id
self._db_entity_type = entity_type
self._db_version = version
self._db_root_descriptor_id = root_descriptor_id
self._db_name = name
self._db_last_modified = last_modified
self.db_deleted_packages = []
self.db_packages_id_index = {}
self.db_packages_identifier_index = {}
if packages is None:
self._db_packages = []
else:
self._db_packages = packages
for v in self._db_packages:
self.db_packages_id_index[v.db_id] = v
self.db_packages_identifier_index[(v.db_identifier,v.db_version)] = v
self.is_dirty = True
self.is_new = True
def __copy__(self):
return DBRegistry.do_copy(self)
def do_copy(self, new_ids=False, id_scope=None, id_remap=None):
cp = DBRegistry(id=self._db_id,
entity_type=self._db_entity_type,
version=self._db_version,
root_descriptor_id=self._db_root_descriptor_id,
name=self._db_name,
last_modified=self._db_last_modified)
if self._db_packages is None:
cp._db_packages = []
else:
cp._db_packages = [v.do_copy(new_ids, id_scope, id_remap) for v in self._db_packages]
# set new ids
if new_ids:
new_id = id_scope.getNewId(self.vtType)
if self.vtType in id_scope.remap:
id_remap[(id_scope.remap[self.vtType], self.db_id)] = new_id
else:
id_remap[(self.vtType, self.db_id)] = new_id
cp.db_id = new_id
if hasattr(self, 'db_root_descriptor_id') and ('module_descriptor', self._db_root_descriptor_id) in id_remap:
cp._db_root_descriptor_id = id_remap[('module_descriptor', self._db_root_descriptor_id)]
# recreate indices and set flags
cp.db_packages_id_index = dict((v.db_id, v) for v in cp._db_packages)
cp.db_packages_identifier_index = dict(((v.db_identifier,v.db_version), v) for v in cp._db_packages)
if not new_ids:
cp.is_dirty = self.is_dirty
cp.is_new = self.is_new
return cp
@staticmethod
def update_version(old_obj, trans_dict, new_obj=None):
if new_obj is None:
new_obj = DBRegistry()
class_dict = {}
if new_obj.__class__.__name__ in trans_dict:
class_dict = trans_dict[new_obj.__class__.__name__]
if 'id' in class_dict:
res = class_dict['id'](old_obj, trans_dict)
new_obj.db_id = res
elif hasattr(old_obj, 'db_id') and old_obj.db_id is not None:
new_obj.db_id = old_obj.db_id
if 'entity_type' in class_dict:
res = class_dict['entity_type'](old_obj, trans_dict)
new_obj.db_entity_type = res
elif hasattr(old_obj, 'db_entity_type') and old_obj.db_entity_type is not None:
new_obj.db_entity_type = old_obj.db_entity_type
if 'version' in class_dict:
res = class_dict['version'](old_obj, trans_dict)
new_obj.db_version = res
elif hasattr(old_obj, 'db_version') and old_obj.db_version is not None:
new_obj.db_version = old_obj.db_version
if 'root_descriptor_id' in class_dict:
res = class_dict['root_descriptor_id'](old_obj, trans_dict)
new_obj.db_root_descriptor_id = res
elif hasattr(old_obj, 'db_root_descriptor_id') and old_obj.db_root_descriptor_id is not None:
new_obj.db_root_descriptor_id = old_obj.db_root_descriptor_id
if 'name' in class_dict:
res = class_dict['name'](old_obj, trans_dict)
new_obj.db_name = res
elif hasattr(old_obj, 'db_name') and old_obj.db_name is not None:
new_obj.db_name = old_obj.db_name
if 'last_modified' in class_dict:
res = class_dict['last_modified'](old_obj, trans_dict)
new_obj.db_last_modified = res
elif hasattr(old_obj, 'db_last_modified') and old_obj.db_last_modified is not None:
new_obj.db_last_modified = old_obj.db_last_modified
if 'packages' in class_dict:
res = class_dict['packages'](old_obj, trans_dict)
for obj in res:
new_obj.db_add_package(obj)
elif hasattr(old_obj, 'db_packages') and old_obj.db_packages is not None:
for obj in old_obj.db_packages:
new_obj.db_add_package(DBPackage.update_version(obj, trans_dict))
if hasattr(old_obj, 'db_deleted_packages') and hasattr(new_obj, 'db_deleted_packages'):
for obj in old_obj.db_deleted_packages:
n_obj = DBPackage.update_version(obj, trans_dict)
new_obj.db_deleted_packages.append(n_obj)
new_obj.is_new = old_obj.is_new
new_obj.is_dirty = old_obj.is_dirty
return new_obj
def db_children(self, parent=(None,None), orphan=False, for_action=False):
children = []
to_del = []
for child in self.db_packages:
children.extend(child.db_children((self.vtType, self.db_id), orphan, for_action))
if orphan:
to_del.append(child)
for child in to_del:
self.db_delete_package(child)
children.append((self, parent[0], parent[1]))
return children
def db_deleted_children(self, remove=False):
children = []
children.extend(self.db_deleted_packages)
if remove:
self.db_deleted_packages = []
return children
def has_changes(self):
if self.is_dirty:
return True
for child in self._db_packages:
if child.has_changes():
return True
return False
def __get_db_id(self):
return self._db_id
def __set_db_id(self, id):
self._db_id = id
self.is_dirty = True
db_id = property(__get_db_id, __set_db_id)
def db_add_id(self, id):
self._db_id = id
def db_change_id(self, id):
self._db_id = id
def db_delete_id(self, id):
self._db_id = None
def __get_db_entity_type(self):
return self._db_entity_type
def __set_db_entity_type(self, entity_type):
self._db_entity_type = entity_type
self.is_dirty = True
db_entity_type = property(__get_db_entity_type, __set_db_entity_type)
def db_add_entity_type(self, entity_type):
self._db_entity_type = entity_type
def db_change_entity_type(self, entity_type):
self._db_entity_type = entity_type
def db_delete_entity_type(self, entity_type):
self._db_entity_type = None
def __get_db_version(self):
return self._db_version
def __set_db_version(self, version):
self._db_version = version
self.is_dirty = True
db_version = property(__get_db_version, __set_db_version)
def db_add_version(self, version):
self._db_version = version
def db_change_version(self, version):
self._db_version = version
def db_delete_version(self, version):
self._db_version = None
def __get_db_root_descriptor_id(self):
return self._db_root_descriptor_id
def __set_db_root_descriptor_id(self, root_descriptor_id):
self._db_root_descriptor_id = root_descriptor_id
self.is_dirty = True
db_root_descriptor_id = property(__get_db_root_descriptor_id, __set_db_root_descriptor_id)
def db_add_root_descriptor_id(self, root_descriptor_id):
self._db_root_descriptor_id = root_descriptor_id
def db_change_root_descriptor_id(self, root_descriptor_id):
self._db_root_descriptor_id = root_descriptor_id
def db_delete_root_descriptor_id(self, root_descriptor_id):
self._db_root_descriptor_id = None
def __get_db_name(self):
return self._db_name
def __set_db_name(self, name):
self._db_name = name
self.is_dirty = True
db_name = property(__get_db_name, __set_db_name)
def db_add_name(self, name):
self._db_name = name
def db_change_name(self, name):
self._db_name = name
def db_delete_name(self, name):
self._db_name = None
def __get_db_last_modified(self):
return self._db_last_modified
def __set_db_last_modified(self, last_modified):
self._db_last_modified = last_modified
self.is_dirty = True
db_last_modified = property(__get_db_last_modified, __set_db_last_modified)
def db_add_last_modified(self, last_modified):
self._db_last_modified = last_modified
def db_change_last_modified(self, last_modified):
self._db_last_modified = last_modified
def db_delete_last_modified(self, last_modified):
self._db_last_modified = None
def __get_db_packages(self):
return self._db_packages
def __set_db_packages(self, packages):
self._db_packages = packages
self.is_dirty = True
db_packages = property(__get_db_packages, __set_db_packages)
def db_get_packages(self):
return self._db_packages
def db_add_package(self, package):
self.is_dirty = True
self._db_packages.append(package)
self.db_packages_id_index[package.db_id] = package
self.db_packages_identifier_index[(package.db_identifier,package.db_version)] = package
def db_change_package(self, package):
self.is_dirty = True
found = False
for i in xrange(len(self._db_packages)):
if self._db_packages[i].db_id == package.db_id:
self._db_packages[i] = package
found = True
break
if not found:
self._db_packages.append(package)
self.db_packages_id_index[package.db_id] = package
self.db_packages_identifier_index[(package.db_identifier,package.db_version)] = package
def db_delete_package(self, package):
self.is_dirty = True
for i in xrange(len(self._db_packages)):
if self._db_packages[i].db_id == package.db_id:
if not self._db_packages[i].is_new:
self.db_deleted_packages.append(self._db_packages[i])
del self._db_packages[i]
break
del self.db_packages_id_index[package.db_id]
del self.db_packages_identifier_index[(package.db_identifier,package.db_version)]
def db_get_package(self, key):
for i in xrange(len(self._db_packages)):
if self._db_packages[i].db_id == key:
return self._db_packages[i]
return None
def db_get_package_by_id(self, key):
return self.db_packages_id_index[key]
def db_has_package_with_id(self, key):
return key in self.db_packages_id_index
def db_get_package_by_identifier(self, key):
return self.db_packages_identifier_index[key]
def db_has_package_with_identifier(self, key):
return key in self.db_packages_identifier_index
def getPrimaryKey(self):
return self._db_id
class DBOpmAgent(object):
vtType = 'opm_agent'
def __init__(self, id=None, value=None, accounts=None):
self._db_id = id
self._db_value = value
self.db_deleted_accounts = []
if accounts is None:
self._db_accounts = []
else:
self._db_accounts = accounts
self.is_dirty = True
self.is_new = True
def __copy__(self):
return DBOpmAgent.do_copy(self)
def do_copy(self, new_ids=False, id_scope=None, id_remap=None):
cp = DBOpmAgent(id=self._db_id,
value=self._db_value)
if self._db_accounts is None:
cp._db_accounts = []
else:
cp._db_accounts = [v.do_copy(new_ids, id_scope, id_remap) for v in self._db_accounts]
# set new ids
if new_ids:
new_id = id_scope.getNewId(self.vtType)
if self.vtType in id_scope.remap:
id_remap[(id_scope.remap[self.vtType], self.db_id)] = new_id
else:
id_remap[(self.vtType, self.db_id)] = new_id
cp.db_id = new_id
# recreate indices and set flags
if not new_ids:
cp.is_dirty = self.is_dirty
cp.is_new = self.is_new
return cp
@staticmethod
def update_version(old_obj, trans_dict, new_obj=None):
if new_obj is None:
new_obj = DBOpmAgent()
class_dict = {}
if new_obj.__class__.__name__ in trans_dict:
class_dict = trans_dict[new_obj.__class__.__name__]
if 'id' in class_dict:
res = class_dict['id'](old_obj, trans_dict)
new_obj.db_id = res
elif hasattr(old_obj, 'db_id') and old_obj.db_id is not None:
new_obj.db_id = old_obj.db_id
if 'value' in class_dict:
res = class_dict['value'](old_obj, trans_dict)
new_obj.db_value = res
elif hasattr(old_obj, 'db_value') and old_obj.db_value is not None:
new_obj.db_value = old_obj.db_value
if 'accounts' in class_dict:
res = class_dict['accounts'](old_obj, trans_dict)
for obj in res:
new_obj.db_add_account(obj)
elif hasattr(old_obj, 'db_accounts') and old_obj.db_accounts is not None:
for obj in old_obj.db_accounts:
new_obj.db_add_account(DBOpmAccountId.update_version(obj, trans_dict))
if hasattr(old_obj, 'db_deleted_accounts') and hasattr(new_obj, 'db_deleted_accounts'):
for obj in old_obj.db_deleted_accounts:
n_obj = DBOpmAccountId.update_version(obj, trans_dict)
new_obj.db_deleted_accounts.append(n_obj)
new_obj.is_new = old_obj.is_new
new_obj.is_dirty = old_obj.is_dirty
return new_obj
def db_children(self, parent=(None,None), orphan=False, for_action=False):
children = []
to_del = []
for child in self.db_accounts:
children.extend(child.db_children((self.vtType, self.db_id), orphan, for_action))
if orphan:
to_del.append(child)
for child in to_del:
self.db_delete_account(child)
children.append((self, parent[0], parent[1]))
return children
def db_deleted_children(self, remove=False):
children = []
children.extend(self.db_deleted_accounts)
if remove:
self.db_deleted_accounts = []
return children
def has_changes(self):
if self.is_dirty:
return True
for child in self._db_accounts:
if child.has_changes():
return True
return False
def __get_db_id(self):
return self._db_id
def __set_db_id(self, id):
self._db_id = id
self.is_dirty = True
db_id = property(__get_db_id, __set_db_id)
def db_add_id(self, id):
self._db_id = id
def db_change_id(self, id):
self._db_id = id
def db_delete_id(self, id):
self._db_id = None
def __get_db_value(self):
return self._db_value
def __set_db_value(self, value):
self._db_value = value
self.is_dirty = True
db_value = property(__get_db_value, __set_db_value)
def db_add_value(self, value):
self._db_value = value
def db_change_value(self, value):
self._db_value = value
def db_delete_value(self, value):
self._db_value = None
def __get_db_accounts(self):
return self._db_accounts
def __set_db_accounts(self, accounts):
self._db_accounts = accounts
self.is_dirty = True
db_accounts = property(__get_db_accounts, __set_db_accounts)
def db_get_accounts(self):
return self._db_accounts
def db_add_account(self, account):
self.is_dirty = True
self._db_accounts.append(account)
def db_change_account(self, account):
self.is_dirty = True
self._db_accounts.append(account)
def db_delete_account(self, account):
self.is_dirty = True
raise Exception('Cannot delete a non-keyed object')
def db_get_account(self, key):
return None
def getPrimaryKey(self):
return self._db_id
class DBProvEntity(object):
vtType = 'prov_entity'
def __init__(self, id=None, prov_type=None, prov_label=None, prov_value=None, vt_id=None, vt_type=None, vt_desc=None, vt_package=None, vt_version=None, vt_cache=None, vt_location_x=None, vt_location_y=None, is_part_of=None):
self._db_id = id
self._db_prov_type = prov_type
self._db_prov_label = prov_label
self._db_prov_value = prov_value
self._db_vt_id = vt_id
self._db_vt_type = vt_type
self._db_vt_desc = vt_desc
self._db_vt_package = vt_package
self._db_vt_version = vt_version
self._db_vt_cache = vt_cache
self._db_vt_location_x = vt_location_x
self._db_vt_location_y = vt_location_y
self.db_deleted_is_part_of = []
self._db_is_part_of = is_part_of
self.is_dirty = True
self.is_new = True
def __copy__(self):
return DBProvEntity.do_copy(self)
def do_copy(self, new_ids=False, id_scope=None, id_remap=None):
cp = DBProvEntity(id=self._db_id,
prov_type=self._db_prov_type,
prov_label=self._db_prov_label,
prov_value=self._db_prov_value,
vt_id=self._db_vt_id,
vt_type=self._db_vt_type,
vt_desc=self._db_vt_desc,
vt_package=self._db_vt_package,
vt_version=self._db_vt_version,
vt_cache=self._db_vt_cache,
vt_location_x=self._db_vt_location_x,
vt_location_y=self._db_vt_location_y)
if self._db_is_part_of is not None:
cp._db_is_part_of = self._db_is_part_of.do_copy(new_ids, id_scope, id_remap)
# set new ids
if new_ids:
new_id = id_scope.getNewId(self.vtType)
if self.vtType in id_scope.remap:
id_remap[(id_scope.remap[self.vtType], self.db_id)] = new_id
else:
id_remap[(self.vtType, self.db_id)] = new_id
cp.db_id = new_id
# recreate indices and set flags
if not new_ids:
cp.is_dirty = self.is_dirty
cp.is_new = self.is_new
return cp
@staticmethod
def update_version(old_obj, trans_dict, new_obj=None):
if new_obj is None:
new_obj = DBProvEntity()
class_dict = {}
if new_obj.__class__.__name__ in trans_dict:
class_dict = trans_dict[new_obj.__class__.__name__]
if 'id' in class_dict:
res = class_dict['id'](old_obj, trans_dict)
new_obj.db_id = res
elif hasattr(old_obj, 'db_id') and old_obj.db_id is not None:
new_obj.db_id = old_obj.db_id
if 'prov_type' in class_dict:
res = class_dict['prov_type'](old_obj, trans_dict)
new_obj.db_prov_type = res
elif hasattr(old_obj, 'db_prov_type') and old_obj.db_prov_type is not None:
new_obj.db_prov_type = old_obj.db_prov_type
if 'prov_label' in class_dict:
res = class_dict['prov_label'](old_obj, trans_dict)
new_obj.db_prov_label = res
elif hasattr(old_obj, 'db_prov_label') and old_obj.db_prov_label is not None:
new_obj.db_prov_label = old_obj.db_prov_label
if 'prov_value' in class_dict:
res = class_dict['prov_value'](old_obj, trans_dict)
new_obj.db_prov_value = res
elif hasattr(old_obj, 'db_prov_value') and old_obj.db_prov_value is not None:
new_obj.db_prov_value = old_obj.db_prov_value
if 'vt_id' in class_dict:
res = class_dict['vt_id'](old_obj, trans_dict)
new_obj.db_vt_id = res
elif hasattr(old_obj, 'db_vt_id') and old_obj.db_vt_id is not None:
new_obj.db_vt_id = old_obj.db_vt_id
if 'vt_type' in class_dict:
res = class_dict['vt_type'](old_obj, trans_dict)
new_obj.db_vt_type = res
elif hasattr(old_obj, 'db_vt_type') and old_obj.db_vt_type is not None:
new_obj.db_vt_type = old_obj.db_vt_type
if 'vt_desc' in class_dict:
res = class_dict['vt_desc'](old_obj, trans_dict)
new_obj.db_vt_desc = res
elif hasattr(old_obj, 'db_vt_desc') and old_obj.db_vt_desc is not None:
new_obj.db_vt_desc = old_obj.db_vt_desc
if 'vt_package' in class_dict:
res = class_dict['vt_package'](old_obj, trans_dict)
new_obj.db_vt_package = res
elif hasattr(old_obj, 'db_vt_package') and old_obj.db_vt_package is not None:
new_obj.db_vt_package = old_obj.db_vt_package
if 'vt_version' in class_dict:
res = class_dict['vt_version'](old_obj, trans_dict)
new_obj.db_vt_version = res
elif hasattr(old_obj, 'db_vt_version') and old_obj.db_vt_version is not None:
new_obj.db_vt_version = old_obj.db_vt_version
if 'vt_cache' in class_dict:
res = class_dict['vt_cache'](old_obj, trans_dict)
new_obj.db_vt_cache = res
elif hasattr(old_obj, 'db_vt_cache') and old_obj.db_vt_cache is not None:
new_obj.db_vt_cache = old_obj.db_vt_cache
if 'vt_location_x' in class_dict:
res = class_dict['vt_location_x'](old_obj, trans_dict)
new_obj.db_vt_location_x = res
elif hasattr(old_obj, 'db_vt_location_x') and old_obj.db_vt_location_x is not None:
new_obj.db_vt_location_x = old_obj.db_vt_location_x
if 'vt_location_y' in class_dict:
res = class_dict['vt_location_y'](old_obj, trans_dict)
new_obj.db_vt_location_y = res
elif hasattr(old_obj, 'db_vt_location_y') and old_obj.db_vt_location_y is not None:
new_obj.db_vt_location_y = old_obj.db_vt_location_y
if 'is_part_of' in class_dict:
res = class_dict['is_part_of'](old_obj, trans_dict)
new_obj.db_is_part_of = res
elif hasattr(old_obj, 'db_is_part_of') and old_obj.db_is_part_of is not None:
obj = old_obj.db_is_part_of
new_obj.db_add_is_part_of(DBIsPartOf.update_version(obj, trans_dict))
if hasattr(old_obj, 'db_deleted_is_part_of') and hasattr(new_obj, 'db_deleted_is_part_of'):
for obj in old_obj.db_deleted_is_part_of:
n_obj = DBIsPartOf.update_version(obj, trans_dict)
new_obj.db_deleted_is_part_of.append(n_obj)
new_obj.is_new = old_obj.is_new
new_obj.is_dirty = old_obj.is_dirty
return new_obj
def db_children(self, parent=(None,None), orphan=False, for_action=False):
children = []
if self._db_is_part_of is not None:
children.extend(self._db_is_part_of.db_children((self.vtType, self.db_id), orphan, for_action))
if orphan:
self._db_is_part_of = None
children.append((self, parent[0], parent[1]))
return children
def db_deleted_children(self, remove=False):
children = []
children.extend(self.db_deleted_is_part_of)
if remove:
self.db_deleted_is_part_of = []
return children
def has_changes(self):
if self.is_dirty:
return True
if self._db_is_part_of is not None and self._db_is_part_of.has_changes():
return True
return False
def __get_db_id(self):
return self._db_id
def __set_db_id(self, id):
self._db_id = id
self.is_dirty = True
db_id = property(__get_db_id, __set_db_id)
def db_add_id(self, id):
self._db_id = id
def db_change_id(self, id):
self._db_id = id
def db_delete_id(self, id):
self._db_id = None
def __get_db_prov_type(self):
return self._db_prov_type
def __set_db_prov_type(self, prov_type):
self._db_prov_type = prov_type
self.is_dirty = True
db_prov_type = property(__get_db_prov_type, __set_db_prov_type)
def db_add_prov_type(self, prov_type):
self._db_prov_type = prov_type
def db_change_prov_type(self, prov_type):
self._db_prov_type = prov_type
def db_delete_prov_type(self, prov_type):
self._db_prov_type = None
def __get_db_prov_label(self):
return self._db_prov_label
def __set_db_prov_label(self, prov_label):
self._db_prov_label = prov_label
self.is_dirty = True
db_prov_label = property(__get_db_prov_label, __set_db_prov_label)
def db_add_prov_label(self, prov_label):
self._db_prov_label = prov_label
def db_change_prov_label(self, prov_label):
self._db_prov_label = prov_label
def db_delete_prov_label(self, prov_label):
self._db_prov_label = None
def __get_db_prov_value(self):
return self._db_prov_value
def __set_db_prov_value(self, prov_value):
self._db_prov_value = prov_value
self.is_dirty = True
db_prov_value = property(__get_db_prov_value, __set_db_prov_value)
def db_add_prov_value(self, prov_value):
self._db_prov_value = prov_value
def db_change_prov_value(self, prov_value):
self._db_prov_value = prov_value
def db_delete_prov_value(self, prov_value):
self._db_prov_value = None
def __get_db_vt_id(self):
return self._db_vt_id
def __set_db_vt_id(self, vt_id):
self._db_vt_id = vt_id
self.is_dirty = True
db_vt_id = property(__get_db_vt_id, __set_db_vt_id)
def db_add_vt_id(self, vt_id):
self._db_vt_id = vt_id
def db_change_vt_id(self, vt_id):
self._db_vt_id = vt_id
def db_delete_vt_id(self, vt_id):
self._db_vt_id = None
def __get_db_vt_type(self):
return self._db_vt_type
def __set_db_vt_type(self, vt_type):
self._db_vt_type = vt_type
self.is_dirty = True
db_vt_type = property(__get_db_vt_type, __set_db_vt_type)
def db_add_vt_type(self, vt_type):
self._db_vt_type = vt_type
def db_change_vt_type(self, vt_type):
self._db_vt_type = vt_type
def db_delete_vt_type(self, vt_type):
self._db_vt_type = None
def __get_db_vt_desc(self):
return self._db_vt_desc
def __set_db_vt_desc(self, vt_desc):
self._db_vt_desc = vt_desc
self.is_dirty = True
db_vt_desc = property(__get_db_vt_desc, __set_db_vt_desc)
def db_add_vt_desc(self, vt_desc):
self._db_vt_desc = vt_desc
def db_change_vt_desc(self, vt_desc):
self._db_vt_desc = vt_desc
def db_delete_vt_desc(self, vt_desc):
self._db_vt_desc = None
def __get_db_vt_package(self):
return self._db_vt_package
def __set_db_vt_package(self, vt_package):
self._db_vt_package = vt_package
self.is_dirty = True
db_vt_package = property(__get_db_vt_package, __set_db_vt_package)
def db_add_vt_package(self, vt_package):
self._db_vt_package = vt_package
def db_change_vt_package(self, vt_package):
self._db_vt_package = vt_package
def db_delete_vt_package(self, vt_package):
self._db_vt_package = None
def __get_db_vt_version(self):
return self._db_vt_version
def __set_db_vt_version(self, vt_version):
self._db_vt_version = vt_version
self.is_dirty = True
db_vt_version = property(__get_db_vt_version, __set_db_vt_version)
def db_add_vt_version(self, vt_version):
self._db_vt_version = vt_version
def db_change_vt_version(self, vt_version):
self._db_vt_version = vt_version
def db_delete_vt_version(self, vt_version):
self._db_vt_version = None
def __get_db_vt_cache(self):
return self._db_vt_cache
def __set_db_vt_cache(self, vt_cache):
self._db_vt_cache = vt_cache
self.is_dirty = True
db_vt_cache = property(__get_db_vt_cache, __set_db_vt_cache)
def db_add_vt_cache(self, vt_cache):
self._db_vt_cache = vt_cache
def db_change_vt_cache(self, vt_cache):
self._db_vt_cache = vt_cache
def db_delete_vt_cache(self, vt_cache):
self._db_vt_cache = None
def __get_db_vt_location_x(self):
return self._db_vt_location_x
def __set_db_vt_location_x(self, vt_location_x):
self._db_vt_location_x = vt_location_x
self.is_dirty = True
db_vt_location_x = property(__get_db_vt_location_x, __set_db_vt_location_x)
def db_add_vt_location_x(self, vt_location_x):
self._db_vt_location_x = vt_location_x
def db_change_vt_location_x(self, vt_location_x):
self._db_vt_location_x = vt_location_x
def db_delete_vt_location_x(self, vt_location_x):
self._db_vt_location_x = None
def __get_db_vt_location_y(self):
return self._db_vt_location_y
def __set_db_vt_location_y(self, vt_location_y):
self._db_vt_location_y = vt_location_y
self.is_dirty = True
db_vt_location_y = property(__get_db_vt_location_y, __set_db_vt_location_y)
def db_add_vt_location_y(self, vt_location_y):
self._db_vt_location_y = vt_location_y
def db_change_vt_location_y(self, vt_location_y):
self._db_vt_location_y = vt_location_y
def db_delete_vt_location_y(self, vt_location_y):
self._db_vt_location_y = None
def __get_db_is_part_of(self):
return self._db_is_part_of
def __set_db_is_part_of(self, is_part_of):
self._db_is_part_of = is_part_of
self.is_dirty = True
db_is_part_of = property(__get_db_is_part_of, __set_db_is_part_of)
def db_add_is_part_of(self, is_part_of):
self._db_is_part_of = is_part_of
def db_change_is_part_of(self, is_part_of):
self._db_is_part_of = is_part_of
def db_delete_is_part_of(self, is_part_of):
if not self.is_new:
self.db_deleted_is_part_of.append(self._db_is_part_of)
self._db_is_part_of = None
def getPrimaryKey(self):
return self._db_id
class DBAnnotation(object):
vtType = 'annotation'
def __init__(self, id=None, key=None, value=None):
self._db_id = id
self._db_key = key
self._db_value = value
self.is_dirty = True
self.is_new = True
def __copy__(self):
return DBAnnotation.do_copy(self)
def do_copy(self, new_ids=False, id_scope=None, id_remap=None):
cp = DBAnnotation(id=self._db_id,
key=self._db_key,
value=self._db_value)
# set new ids
if new_ids:
new_id = id_scope.getNewId(self.vtType)
if self.vtType in id_scope.remap:
id_remap[(id_scope.remap[self.vtType], self.db_id)] = new_id
else:
id_remap[(self.vtType, self.db_id)] = new_id
cp.db_id = new_id
# recreate indices and set flags
if not new_ids:
cp.is_dirty = self.is_dirty
cp.is_new = self.is_new
return cp
@staticmethod
def update_version(old_obj, trans_dict, new_obj=None):
if new_obj is None:
new_obj = DBAnnotation()
class_dict = {}
if new_obj.__class__.__name__ in trans_dict:
class_dict = trans_dict[new_obj.__class__.__name__]
if 'id' in class_dict:
res = class_dict['id'](old_obj, trans_dict)
new_obj.db_id = res
elif hasattr(old_obj, 'db_id') and old_obj.db_id is not None:
new_obj.db_id = old_obj.db_id
if 'key' in class_dict:
res = class_dict['key'](old_obj, trans_dict)
new_obj.db_key = res
elif hasattr(old_obj, 'db_key') and old_obj.db_key is not None:
new_obj.db_key = old_obj.db_key
if 'value' in class_dict:
res = class_dict['value'](old_obj, trans_dict)
new_obj.db_value = res
elif hasattr(old_obj, 'db_value') and old_obj.db_value is not None:
new_obj.db_value = old_obj.db_value
new_obj.is_new = old_obj.is_new
new_obj.is_dirty = old_obj.is_dirty
return new_obj
def db_children(self, parent=(None,None), orphan=False, for_action=False):
return [(self, parent[0], parent[1])]
def db_deleted_children(self, remove=False):
children = []
return children
def has_changes(self):
if self.is_dirty:
return True
return False
def __get_db_id(self):
return self._db_id
def __set_db_id(self, id):
self._db_id = id
self.is_dirty = True
db_id = property(__get_db_id, __set_db_id)
def db_add_id(self, id):
self._db_id = id
def db_change_id(self, id):
self._db_id = id
def db_delete_id(self, id):
self._db_id = None
def __get_db_key(self):
return self._db_key
def __set_db_key(self, key):
self._db_key = key
self.is_dirty = True
db_key = property(__get_db_key, __set_db_key)
def db_add_key(self, key):
self._db_key = key
def db_change_key(self, key):
self._db_key = key
def db_delete_key(self, key):
self._db_key = None
def __get_db_value(self):
return self._db_value
def __set_db_value(self, value):
self._db_value = value
self.is_dirty = True
db_value = property(__get_db_value, __set_db_value)
def db_add_value(self, value):
self._db_value = value
def db_change_value(self, value):
self._db_value = value
def db_delete_value(self, value):
self._db_value = None
def getPrimaryKey(self):
return self._db_id
class DBOpmTime(object):
vtType = 'opm_time'
def __init__(self, no_later_than=None, no_earlier_than=None, clock_id=None):
self._db_no_later_than = no_later_than
self._db_no_earlier_than = no_earlier_than
self._db_clock_id = clock_id
self.is_dirty = True
self.is_new = True
def __copy__(self):
return DBOpmTime.do_copy(self)
def do_copy(self, new_ids=False, id_scope=None, id_remap=None):
cp = DBOpmTime(no_later_than=self._db_no_later_than,
no_earlier_than=self._db_no_earlier_than,
clock_id=self._db_clock_id)
# set new ids
if new_ids:
new_id = id_scope.getNewId(self.vtType)
if self.vtType in id_scope.remap:
id_remap[(id_scope.remap[self.vtType], self.db_id)] = new_id
else:
id_remap[(self.vtType, self.db_id)] = new_id
cp.db_id = new_id
# recreate indices and set flags
if not new_ids:
cp.is_dirty = self.is_dirty
cp.is_new = self.is_new
return cp
@staticmethod
def update_version(old_obj, trans_dict, new_obj=None):
if new_obj is None:
new_obj = DBOpmTime()
class_dict = {}
if new_obj.__class__.__name__ in trans_dict:
class_dict = trans_dict[new_obj.__class__.__name__]
if 'no_later_than' in class_dict:
res = class_dict['no_later_than'](old_obj, trans_dict)
new_obj.db_no_later_than = res
elif hasattr(old_obj, 'db_no_later_than') and old_obj.db_no_later_than is not None:
new_obj.db_no_later_than = old_obj.db_no_later_than
if 'no_earlier_than' in class_dict:
res = class_dict['no_earlier_than'](old_obj, trans_dict)
new_obj.db_no_earlier_than = res
elif hasattr(old_obj, 'db_no_earlier_than') and old_obj.db_no_earlier_than is not None:
new_obj.db_no_earlier_than = old_obj.db_no_earlier_than
if 'clock_id' in class_dict:
res = class_dict['clock_id'](old_obj, trans_dict)
new_obj.db_clock_id = res
elif hasattr(old_obj, 'db_clock_id') and old_obj.db_clock_id is not None:
new_obj.db_clock_id = old_obj.db_clock_id
new_obj.is_new = old_obj.is_new
new_obj.is_dirty = old_obj.is_dirty
return new_obj
def db_children(self, parent=(None,None), orphan=False, for_action=False):
return [(self, parent[0], parent[1])]
def db_deleted_children(self, remove=False):
children = []
return children
def has_changes(self):
if self.is_dirty:
return True
return False
def __get_db_no_later_than(self):
return self._db_no_later_than
def __set_db_no_later_than(self, no_later_than):
self._db_no_later_than = no_later_than
self.is_dirty = True
db_no_later_than = property(__get_db_no_later_than, __set_db_no_later_than)
def db_add_no_later_than(self, no_later_than):
self._db_no_later_than = no_later_than
def db_change_no_later_than(self, no_later_than):
self._db_no_later_than = no_later_than
def db_delete_no_later_than(self, no_later_than):
self._db_no_later_than = None
def __get_db_no_earlier_than(self):
return self._db_no_earlier_than
def __set_db_no_earlier_than(self, no_earlier_than):
self._db_no_earlier_than = no_earlier_than
self.is_dirty = True
db_no_earlier_than = property(__get_db_no_earlier_than, __set_db_no_earlier_than)
def db_add_no_earlier_than(self, no_earlier_than):
self._db_no_earlier_than = no_earlier_than
def db_change_no_earlier_than(self, no_earlier_than):
self._db_no_earlier_than = no_earlier_than
def db_delete_no_earlier_than(self, no_earlier_than):
self._db_no_earlier_than = None
def __get_db_clock_id(self):
return self._db_clock_id
def __set_db_clock_id(self, clock_id):
self._db_clock_id = clock_id
self.is_dirty = True
db_clock_id = property(__get_db_clock_id, __set_db_clock_id)
def db_add_clock_id(self, clock_id):
self._db_clock_id = clock_id
def db_change_clock_id(self, clock_id):
self._db_clock_id = clock_id
def db_delete_clock_id(self, clock_id):
self._db_clock_id = None
class DBParameterExploration(object):
vtType = 'parameter_exploration'
def __init__(self, id=None, action_id=None, name=None, date=None, user=None, dims=None, layout=None, functions=None):
self._db_id = id
self._db_action_id = action_id
self._db_name = name
self._db_date = date
self._db_user = user
self._db_dims = dims
self._db_layout = layout
self.db_deleted_functions = []
self.db_functions_id_index = {}
if functions is None:
self._db_functions = []
else:
self._db_functions = functions
for v in self._db_functions:
self.db_functions_id_index[v.db_id] = v
self.is_dirty = True
self.is_new = True
def __copy__(self):
return DBParameterExploration.do_copy(self)
def do_copy(self, new_ids=False, id_scope=None, id_remap=None):
cp = DBParameterExploration(id=self._db_id,
action_id=self._db_action_id,
name=self._db_name,
date=self._db_date,
user=self._db_user,
dims=self._db_dims,
layout=self._db_layout)
if self._db_functions is None:
cp._db_functions = []
else:
cp._db_functions = [v.do_copy(new_ids, id_scope, id_remap) for v in self._db_functions]
# set new ids
if new_ids:
new_id = id_scope.getNewId(self.vtType)
if self.vtType in id_scope.remap:
id_remap[(id_scope.remap[self.vtType], self.db_id)] = new_id
else:
id_remap[(self.vtType, self.db_id)] = new_id
cp.db_id = new_id
if hasattr(self, 'db_action_id') and ('action', self._db_action_id) in id_remap:
cp._db_action_id = id_remap[('action', self._db_action_id)]
# recreate indices and set flags
cp.db_functions_id_index = dict((v.db_id, v) for v in cp._db_functions)
if not new_ids:
cp.is_dirty = self.is_dirty
cp.is_new = self.is_new
return cp
@staticmethod
def update_version(old_obj, trans_dict, new_obj=None):
if new_obj is None:
new_obj = DBParameterExploration()
class_dict = {}
if new_obj.__class__.__name__ in trans_dict:
class_dict = trans_dict[new_obj.__class__.__name__]
if 'id' in class_dict:
res = class_dict['id'](old_obj, trans_dict)
new_obj.db_id = res
elif hasattr(old_obj, 'db_id') and old_obj.db_id is not None:
new_obj.db_id = old_obj.db_id
if 'action_id' in class_dict:
res = class_dict['action_id'](old_obj, trans_dict)
new_obj.db_action_id = res
elif hasattr(old_obj, 'db_action_id') and old_obj.db_action_id is not None:
new_obj.db_action_id = old_obj.db_action_id
if 'name' in class_dict:
res = class_dict['name'](old_obj, trans_dict)
new_obj.db_name = res
elif hasattr(old_obj, 'db_name') and old_obj.db_name is not None:
new_obj.db_name = old_obj.db_name
if 'date' in class_dict:
res = class_dict['date'](old_obj, trans_dict)
new_obj.db_date = res
elif hasattr(old_obj, 'db_date') and old_obj.db_date is not None:
new_obj.db_date = old_obj.db_date
if 'user' in class_dict:
res = class_dict['user'](old_obj, trans_dict)
new_obj.db_user = res
elif hasattr(old_obj, 'db_user') and old_obj.db_user is not None:
new_obj.db_user = old_obj.db_user
if 'dims' in class_dict:
res = class_dict['dims'](old_obj, trans_dict)
new_obj.db_dims = res
elif hasattr(old_obj, 'db_dims') and old_obj.db_dims is not None:
new_obj.db_dims = old_obj.db_dims
if 'layout' in class_dict:
res = class_dict['layout'](old_obj, trans_dict)
new_obj.db_layout = res
elif hasattr(old_obj, 'db_layout') and old_obj.db_layout is not None:
new_obj.db_layout = old_obj.db_layout
if 'functions' in class_dict:
res = class_dict['functions'](old_obj, trans_dict)
for obj in res:
new_obj.db_add_function(obj)
elif hasattr(old_obj, 'db_functions') and old_obj.db_functions is not None:
for obj in old_obj.db_functions:
new_obj.db_add_function(DBPEFunction.update_version(obj, trans_dict))
if hasattr(old_obj, 'db_deleted_functions') and hasattr(new_obj, 'db_deleted_functions'):
for obj in old_obj.db_deleted_functions:
n_obj = DBPEFunction.update_version(obj, trans_dict)
new_obj.db_deleted_functions.append(n_obj)
new_obj.is_new = old_obj.is_new
new_obj.is_dirty = old_obj.is_dirty
return new_obj
def db_children(self, parent=(None,None), orphan=False, for_action=False):
children = []
to_del = []
for child in self.db_functions:
children.extend(child.db_children((self.vtType, self.db_id), orphan, for_action))
if orphan:
to_del.append(child)
for child in to_del:
self.db_delete_function(child)
children.append((self, parent[0], parent[1]))
return children
def db_deleted_children(self, remove=False):
children = []
children.extend(self.db_deleted_functions)
if remove:
self.db_deleted_functions = []
return children
def has_changes(self):
if self.is_dirty:
return True
for child in self._db_functions:
if child.has_changes():
return True
return False
def __get_db_id(self):
return self._db_id
def __set_db_id(self, id):
self._db_id = id
self.is_dirty = True
db_id = property(__get_db_id, __set_db_id)
def db_add_id(self, id):
self._db_id = id
def db_change_id(self, id):
self._db_id = id
def db_delete_id(self, id):
self._db_id = None
def __get_db_action_id(self):
return self._db_action_id
def __set_db_action_id(self, action_id):
self._db_action_id = action_id
self.is_dirty = True
db_action_id = property(__get_db_action_id, __set_db_action_id)
def db_add_action_id(self, action_id):
self._db_action_id = action_id
def db_change_action_id(self, action_id):
self._db_action_id = action_id
def db_delete_action_id(self, action_id):
self._db_action_id = None
def __get_db_name(self):
return self._db_name
def __set_db_name(self, name):
self._db_name = name
self.is_dirty = True
db_name = property(__get_db_name, __set_db_name)
def db_add_name(self, name):
self._db_name = name
def db_change_name(self, name):
self._db_name = name
def db_delete_name(self, name):
self._db_name = None
def __get_db_date(self):
return self._db_date
def __set_db_date(self, date):
self._db_date = date
self.is_dirty = True
db_date = property(__get_db_date, __set_db_date)
def db_add_date(self, date):
self._db_date = date
def db_change_date(self, date):
self._db_date = date
def db_delete_date(self, date):
self._db_date = None
def __get_db_user(self):
return self._db_user
def __set_db_user(self, user):
self._db_user = user
self.is_dirty = True
db_user = property(__get_db_user, __set_db_user)
def db_add_user(self, user):
self._db_user = user
def db_change_user(self, user):
self._db_user = user
def db_delete_user(self, user):
self._db_user = None
def __get_db_dims(self):
return self._db_dims
def __set_db_dims(self, dims):
self._db_dims = dims
self.is_dirty = True
db_dims = property(__get_db_dims, __set_db_dims)
def db_add_dims(self, dims):
self._db_dims = dims
def db_change_dims(self, dims):
self._db_dims = dims
def db_delete_dims(self, dims):
self._db_dims = None
def __get_db_layout(self):
return self._db_layout
def __set_db_layout(self, layout):
self._db_layout = layout
self.is_dirty = True
db_layout = property(__get_db_layout, __set_db_layout)
def db_add_layout(self, layout):
self._db_layout = layout
def db_change_layout(self, layout):
self._db_layout = layout
def db_delete_layout(self, layout):
self._db_layout = None
def __get_db_functions(self):
return self._db_functions
def __set_db_functions(self, functions):
self._db_functions = functions
self.is_dirty = True
db_functions = property(__get_db_functions, __set_db_functions)
def db_get_functions(self):
return self._db_functions
def db_add_function(self, function):
self.is_dirty = True
self._db_functions.append(function)
self.db_functions_id_index[function.db_id] = function
def db_change_function(self, function):
self.is_dirty = True
found = False
for i in xrange(len(self._db_functions)):
if self._db_functions[i].db_id == function.db_id:
self._db_functions[i] = function
found = True
break
if not found:
self._db_functions.append(function)
self.db_functions_id_index[function.db_id] = function
def db_delete_function(self, function):
self.is_dirty = True
for i in xrange(len(self._db_functions)):
if self._db_functions[i].db_id == function.db_id:
if not self._db_functions[i].is_new:
self.db_deleted_functions.append(self._db_functions[i])
del self._db_functions[i]
break
del self.db_functions_id_index[function.db_id]
def db_get_function(self, key):
for i in xrange(len(self._db_functions)):
if self._db_functions[i].db_id == key:
return self._db_functions[i]
return None
def db_get_function_by_id(self, key):
return self.db_functions_id_index[key]
def db_has_function_with_id(self, key):
return key in self.db_functions_id_index
def getPrimaryKey(self):
return self._db_id
class DBMashupActionAnnotation(object):
vtType = 'mashup_actionAnnotation'
def __init__(self, id=None, key=None, value=None, action_id=None, date=None, user=None):
self._db_id = id
self._db_key = key
self._db_value = value
self._db_action_id = action_id
self._db_date = date
self._db_user = user
self.is_dirty = True
self.is_new = True
def __copy__(self):
return DBMashupActionAnnotation.do_copy(self)
def do_copy(self, new_ids=False, id_scope=None, id_remap=None):
cp = DBMashupActionAnnotation(id=self._db_id,
key=self._db_key,
value=self._db_value,
action_id=self._db_action_id,
date=self._db_date,
user=self._db_user)
# set new ids
if new_ids:
new_id = id_scope.getNewId(self.vtType)
if self.vtType in id_scope.remap:
id_remap[(id_scope.remap[self.vtType], self.db_id)] = new_id
else:
id_remap[(self.vtType, self.db_id)] = new_id
cp.db_id = new_id
if hasattr(self, 'db_action_id') and ('mashup_action', self._db_action_id) in id_remap:
cp._db_action_id = id_remap[('mashup_action', self._db_action_id)]
# recreate indices and set flags
if not new_ids:
cp.is_dirty = self.is_dirty
cp.is_new = self.is_new
return cp
@staticmethod
def update_version(old_obj, trans_dict, new_obj=None):
if new_obj is None:
new_obj = DBMashupActionAnnotation()
class_dict = {}
if new_obj.__class__.__name__ in trans_dict:
class_dict = trans_dict[new_obj.__class__.__name__]
if 'id' in class_dict:
res = class_dict['id'](old_obj, trans_dict)
new_obj.db_id = res
elif hasattr(old_obj, 'db_id') and old_obj.db_id is not None:
new_obj.db_id = old_obj.db_id
if 'key' in class_dict:
res = class_dict['key'](old_obj, trans_dict)
new_obj.db_key = res
elif hasattr(old_obj, 'db_key') and old_obj.db_key is not None:
new_obj.db_key = old_obj.db_key
if 'value' in class_dict:
res = class_dict['value'](old_obj, trans_dict)
new_obj.db_value = res
elif hasattr(old_obj, 'db_value') and old_obj.db_value is not None:
new_obj.db_value = old_obj.db_value
if 'action_id' in class_dict:
res = class_dict['action_id'](old_obj, trans_dict)
new_obj.db_action_id = res
elif hasattr(old_obj, 'db_action_id') and old_obj.db_action_id is not None:
new_obj.db_action_id = old_obj.db_action_id
if 'date' in class_dict:
res = class_dict['date'](old_obj, trans_dict)
new_obj.db_date = res
elif hasattr(old_obj, 'db_date') and old_obj.db_date is not None:
new_obj.db_date = old_obj.db_date
if 'user' in class_dict:
res = class_dict['user'](old_obj, trans_dict)
new_obj.db_user = res
elif hasattr(old_obj, 'db_user') and old_obj.db_user is not None:
new_obj.db_user = old_obj.db_user
new_obj.is_new = old_obj.is_new
new_obj.is_dirty = old_obj.is_dirty
return new_obj
def db_children(self, parent=(None,None), orphan=False, for_action=False):
return [(self, parent[0], parent[1])]
def db_deleted_children(self, remove=False):
children = []
return children
def has_changes(self):
if self.is_dirty:
return True
return False
def __get_db_id(self):
return self._db_id
def __set_db_id(self, id):
self._db_id = id
self.is_dirty = True
db_id = property(__get_db_id, __set_db_id)
def db_add_id(self, id):
self._db_id = id
def db_change_id(self, id):
self._db_id = id
def db_delete_id(self, id):
self._db_id = None
def __get_db_key(self):
return self._db_key
def __set_db_key(self, key):
self._db_key = key
self.is_dirty = True
db_key = property(__get_db_key, __set_db_key)
def db_add_key(self, key):
self._db_key = key
def db_change_key(self, key):
self._db_key = key
def db_delete_key(self, key):
self._db_key = None
def __get_db_value(self):
return self._db_value
def __set_db_value(self, value):
self._db_value = value
self.is_dirty = True
db_value = property(__get_db_value, __set_db_value)
def db_add_value(self, value):
self._db_value = value
def db_change_value(self, value):
self._db_value = value
def db_delete_value(self, value):
self._db_value = None
def __get_db_action_id(self):
return self._db_action_id
def __set_db_action_id(self, action_id):
self._db_action_id = action_id
self.is_dirty = True
db_action_id = property(__get_db_action_id, __set_db_action_id)
def db_add_action_id(self, action_id):
self._db_action_id = action_id
def db_change_action_id(self, action_id):
self._db_action_id = action_id
def db_delete_action_id(self, action_id):
self._db_action_id = None
def __get_db_date(self):
return self._db_date
def __set_db_date(self, date):
self._db_date = date
self.is_dirty = True
db_date = property(__get_db_date, __set_db_date)
def db_add_date(self, date):
self._db_date = date
def db_change_date(self, date):
self._db_date = date
def db_delete_date(self, date):
self._db_date = None
def __get_db_user(self):
return self._db_user
def __set_db_user(self, user):
self._db_user = user
self.is_dirty = True
db_user = property(__get_db_user, __set_db_user)
def db_add_user(self, user):
self._db_user = user
def db_change_user(self, user):
self._db_user = user
def db_delete_user(self, user):
self._db_user = None
def getPrimaryKey(self):
return self._db_id
class DBOpmProcess(object):
vtType = 'opm_process'
def __init__(self, id=None, value=None, accounts=None):
self._db_id = id
self.db_deleted_value = []
self._db_value = value
self.db_deleted_accounts = []
if accounts is None:
self._db_accounts = []
else:
self._db_accounts = accounts
self.is_dirty = True
self.is_new = True
def __copy__(self):
return DBOpmProcess.do_copy(self)
def do_copy(self, new_ids=False, id_scope=None, id_remap=None):
cp = DBOpmProcess(id=self._db_id)
if self._db_value is not None:
cp._db_value = self._db_value.do_copy(new_ids, id_scope, id_remap)
if self._db_accounts is None:
cp._db_accounts = []
else:
cp._db_accounts = [v.do_copy(new_ids, id_scope, id_remap) for v in self._db_accounts]
# set new ids
if new_ids:
new_id = id_scope.getNewId(self.vtType)
if self.vtType in id_scope.remap:
id_remap[(id_scope.remap[self.vtType], self.db_id)] = new_id
else:
id_remap[(self.vtType, self.db_id)] = new_id
cp.db_id = new_id
# recreate indices and set flags
if not new_ids:
cp.is_dirty = self.is_dirty
cp.is_new = self.is_new
return cp
@staticmethod
def update_version(old_obj, trans_dict, new_obj=None):
if new_obj is None:
new_obj = DBOpmProcess()
class_dict = {}
if new_obj.__class__.__name__ in trans_dict:
class_dict = trans_dict[new_obj.__class__.__name__]
if 'id' in class_dict:
res = class_dict['id'](old_obj, trans_dict)
new_obj.db_id = res
elif hasattr(old_obj, 'db_id') and old_obj.db_id is not None:
new_obj.db_id = old_obj.db_id
if 'value' in class_dict:
res = class_dict['value'](old_obj, trans_dict)
new_obj.db_value = res
elif hasattr(old_obj, 'db_value') and old_obj.db_value is not None:
obj = old_obj.db_value
new_obj.db_add_value(DBOpmProcessValue.update_version(obj, trans_dict))
if hasattr(old_obj, 'db_deleted_value') and hasattr(new_obj, 'db_deleted_value'):
for obj in old_obj.db_deleted_value:
n_obj = DBOpmProcessValue.update_version(obj, trans_dict)
new_obj.db_deleted_value.append(n_obj)
if 'accounts' in class_dict:
res = class_dict['accounts'](old_obj, trans_dict)
for obj in res:
new_obj.db_add_account(obj)
elif hasattr(old_obj, 'db_accounts') and old_obj.db_accounts is not None:
for obj in old_obj.db_accounts:
new_obj.db_add_account(DBOpmAccountId.update_version(obj, trans_dict))
if hasattr(old_obj, 'db_deleted_accounts') and hasattr(new_obj, 'db_deleted_accounts'):
for obj in old_obj.db_deleted_accounts:
n_obj = DBOpmAccountId.update_version(obj, trans_dict)
new_obj.db_deleted_accounts.append(n_obj)
new_obj.is_new = old_obj.is_new
new_obj.is_dirty = old_obj.is_dirty
return new_obj
def db_children(self, parent=(None,None), orphan=False, for_action=False):
children = []
if self._db_value is not None:
children.extend(self._db_value.db_children((self.vtType, self.db_id), orphan, for_action))
if orphan:
self._db_value = None
to_del = []
for child in self.db_accounts:
children.extend(child.db_children((self.vtType, self.db_id), orphan, for_action))
if orphan:
to_del.append(child)
for child in to_del:
self.db_delete_account(child)
children.append((self, parent[0], parent[1]))
return children
def db_deleted_children(self, remove=False):
children = []
children.extend(self.db_deleted_value)
children.extend(self.db_deleted_accounts)
if remove:
self.db_deleted_value = []
self.db_deleted_accounts = []
return children
def has_changes(self):
if self.is_dirty:
return True
if self._db_value is not None and self._db_value.has_changes():
return True
for child in self._db_accounts:
if child.has_changes():
return True
return False
def __get_db_id(self):
return self._db_id
def __set_db_id(self, id):
self._db_id = id
self.is_dirty = True
db_id = property(__get_db_id, __set_db_id)
def db_add_id(self, id):
self._db_id = id
def db_change_id(self, id):
self._db_id = id
def db_delete_id(self, id):
self._db_id = None
def __get_db_value(self):
return self._db_value
def __set_db_value(self, value):
self._db_value = value
self.is_dirty = True
db_value = property(__get_db_value, __set_db_value)
def db_add_value(self, value):
self._db_value = value
def db_change_value(self, value):
self._db_value = value
def db_delete_value(self, value):
if not self.is_new:
self.db_deleted_value.append(self._db_value)
self._db_value = None
def __get_db_accounts(self):
return self._db_accounts
def __set_db_accounts(self, accounts):
self._db_accounts = accounts
self.is_dirty = True
db_accounts = property(__get_db_accounts, __set_db_accounts)
def db_get_accounts(self):
return self._db_accounts
def db_add_account(self, account):
self.is_dirty = True
self._db_accounts.append(account)
def db_change_account(self, account):
self.is_dirty = True
self._db_accounts.append(account)
def db_delete_account(self, account):
self.is_dirty = True
raise Exception('Cannot delete a non-keyed object')
def db_get_account(self, key):
return None
def getPrimaryKey(self):
return self._db_id
class DBDisabledPackages(object):
vtType = 'disabled_packages'
def __init__(self, packages=None):
self.db_deleted_packages = []
self.db_packages_name_index = {}
if packages is None:
self._db_packages = []
else:
self._db_packages = packages
for v in self._db_packages:
self.db_packages_name_index[v.db_name] = v
self.is_dirty = True
self.is_new = True
def __copy__(self):
return DBDisabledPackages.do_copy(self)
def do_copy(self, new_ids=False, id_scope=None, id_remap=None):
cp = DBDisabledPackages()
if self._db_packages is None:
cp._db_packages = []
else:
cp._db_packages = [v.do_copy(new_ids, id_scope, id_remap) for v in self._db_packages]
# set new ids
if new_ids:
new_id = id_scope.getNewId(self.vtType)
if self.vtType in id_scope.remap:
id_remap[(id_scope.remap[self.vtType], self.db_id)] = new_id
else:
id_remap[(self.vtType, self.db_id)] = new_id
cp.db_id = new_id
# recreate indices and set flags
cp.db_packages_name_index = dict((v.db_name, v) for v in cp._db_packages)
if not new_ids:
cp.is_dirty = self.is_dirty
cp.is_new = self.is_new
return cp
@staticmethod
def update_version(old_obj, trans_dict, new_obj=None):
if new_obj is None:
new_obj = DBDisabledPackages()
class_dict = {}
if new_obj.__class__.__name__ in trans_dict:
class_dict = trans_dict[new_obj.__class__.__name__]
if 'packages' in class_dict:
res = class_dict['packages'](old_obj, trans_dict)
for obj in res:
new_obj.db_add_package(obj)
elif hasattr(old_obj, 'db_packages') and old_obj.db_packages is not None:
for obj in old_obj.db_packages:
new_obj.db_add_package(DBStartupPackage.update_version(obj, trans_dict))
if hasattr(old_obj, 'db_deleted_packages') and hasattr(new_obj, 'db_deleted_packages'):
for obj in old_obj.db_deleted_packages:
n_obj = DBStartupPackage.update_version(obj, trans_dict)
new_obj.db_deleted_packages.append(n_obj)
new_obj.is_new = old_obj.is_new
new_obj.is_dirty = old_obj.is_dirty
return new_obj
def db_children(self, parent=(None,None), orphan=False, for_action=False):
children = []
to_del = []
for child in self.db_packages:
children.extend(child.db_children((self.vtType, self.db_id), orphan, for_action))
if orphan:
to_del.append(child)
for child in to_del:
self.db_delete_package(child)
children.append((self, parent[0], parent[1]))
return children
def db_deleted_children(self, remove=False):
children = []
children.extend(self.db_deleted_packages)
if remove:
self.db_deleted_packages = []
return children
def has_changes(self):
if self.is_dirty:
return True
for child in self._db_packages:
if child.has_changes():
return True
return False
def __get_db_packages(self):
return self._db_packages
def __set_db_packages(self, packages):
self._db_packages = packages
self.is_dirty = True
db_packages = property(__get_db_packages, __set_db_packages)
def db_get_packages(self):
return self._db_packages
def db_add_package(self, package):
self.is_dirty = True
self._db_packages.append(package)
self.db_packages_name_index[package.db_name] = package
def db_change_package(self, package):
self.is_dirty = True
self._db_packages.append(package)
self.db_packages_name_index[package.db_name] = package
def db_delete_package(self, package):
self.is_dirty = True
raise Exception('Cannot delete a non-keyed object')
def db_get_package(self, key):
return None
def db_get_package_by_name(self, key):
return self.db_packages_name_index[key]
def db_has_package_with_name(self, key):
return key in self.db_packages_name_index
class DBModuleExec(object):
vtType = 'module_exec'
def __init__(self, id=None, ts_start=None, ts_end=None, cached=None, module_id=None, module_name=None, completed=None, error=None, machine_id=None, annotations=None, loop_execs=None):
self._db_id = id
self._db_ts_start = ts_start
self._db_ts_end = ts_end
self._db_cached = cached
self._db_module_id = module_id
self._db_module_name = module_name
self._db_completed = completed
self._db_error = error
self._db_machine_id = machine_id
self.db_deleted_annotations = []
self.db_annotations_id_index = {}
if annotations is None:
self._db_annotations = []
else:
self._db_annotations = annotations
for v in self._db_annotations:
self.db_annotations_id_index[v.db_id] = v
self.db_deleted_loop_execs = []
self.db_loop_execs_id_index = {}
if loop_execs is None:
self._db_loop_execs = []
else:
self._db_loop_execs = loop_execs
for v in self._db_loop_execs:
self.db_loop_execs_id_index[v.db_id] = v
self.is_dirty = True
self.is_new = True
def __copy__(self):
return DBModuleExec.do_copy(self)
def do_copy(self, new_ids=False, id_scope=None, id_remap=None):
cp = DBModuleExec(id=self._db_id,
ts_start=self._db_ts_start,
ts_end=self._db_ts_end,
cached=self._db_cached,
module_id=self._db_module_id,
module_name=self._db_module_name,
completed=self._db_completed,
error=self._db_error,
machine_id=self._db_machine_id)
if self._db_annotations is None:
cp._db_annotations = []
else:
cp._db_annotations = [v.do_copy(new_ids, id_scope, id_remap) for v in self._db_annotations]
if self._db_loop_execs is None:
cp._db_loop_execs = []
else:
cp._db_loop_execs = [v.do_copy(new_ids, id_scope, id_remap) for v in self._db_loop_execs]
# set new ids
if new_ids:
new_id = id_scope.getNewId(self.vtType)
if self.vtType in id_scope.remap:
id_remap[(id_scope.remap[self.vtType], self.db_id)] = new_id
else:
id_remap[(self.vtType, self.db_id)] = new_id
cp.db_id = new_id
if hasattr(self, 'db_module_id') and ('module', self._db_module_id) in id_remap:
cp._db_module_id = id_remap[('module', self._db_module_id)]
if hasattr(self, 'db_machine_id') and ('machine', self._db_machine_id) in id_remap:
cp._db_machine_id = id_remap[('machine', self._db_machine_id)]
# recreate indices and set flags
cp.db_annotations_id_index = dict((v.db_id, v) for v in cp._db_annotations)
cp.db_loop_execs_id_index = dict((v.db_id, v) for v in cp._db_loop_execs)
if not new_ids:
cp.is_dirty = self.is_dirty
cp.is_new = self.is_new
return cp
@staticmethod
def update_version(old_obj, trans_dict, new_obj=None):
if new_obj is None:
new_obj = DBModuleExec()
class_dict = {}
if new_obj.__class__.__name__ in trans_dict:
class_dict = trans_dict[new_obj.__class__.__name__]
if 'id' in class_dict:
res = class_dict['id'](old_obj, trans_dict)
new_obj.db_id = res
elif hasattr(old_obj, 'db_id') and old_obj.db_id is not None:
new_obj.db_id = old_obj.db_id
if 'ts_start' in class_dict:
res = class_dict['ts_start'](old_obj, trans_dict)
new_obj.db_ts_start = res
elif hasattr(old_obj, 'db_ts_start') and old_obj.db_ts_start is not None:
new_obj.db_ts_start = old_obj.db_ts_start
if 'ts_end' in class_dict:
res = class_dict['ts_end'](old_obj, trans_dict)
new_obj.db_ts_end = res
elif hasattr(old_obj, 'db_ts_end') and old_obj.db_ts_end is not None:
new_obj.db_ts_end = old_obj.db_ts_end
if 'cached' in class_dict:
res = class_dict['cached'](old_obj, trans_dict)
new_obj.db_cached = res
elif hasattr(old_obj, 'db_cached') and old_obj.db_cached is not None:
new_obj.db_cached = old_obj.db_cached
if 'module_id' in class_dict:
res = class_dict['module_id'](old_obj, trans_dict)
new_obj.db_module_id = res
elif hasattr(old_obj, 'db_module_id') and old_obj.db_module_id is not None:
new_obj.db_module_id = old_obj.db_module_id
if 'module_name' in class_dict:
res = class_dict['module_name'](old_obj, trans_dict)
new_obj.db_module_name = res
elif hasattr(old_obj, 'db_module_name') and old_obj.db_module_name is not None:
new_obj.db_module_name = old_obj.db_module_name
if 'completed' in class_dict:
res = class_dict['completed'](old_obj, trans_dict)
new_obj.db_completed = res
elif hasattr(old_obj, 'db_completed') and old_obj.db_completed is not None:
new_obj.db_completed = old_obj.db_completed
if 'error' in class_dict:
res = class_dict['error'](old_obj, trans_dict)
new_obj.db_error = res
elif hasattr(old_obj, 'db_error') and old_obj.db_error is not None:
new_obj.db_error = old_obj.db_error
if 'machine_id' in class_dict:
res = class_dict['machine_id'](old_obj, trans_dict)
new_obj.db_machine_id = res
elif hasattr(old_obj, 'db_machine_id') and old_obj.db_machine_id is not None:
new_obj.db_machine_id = old_obj.db_machine_id
if 'annotations' in class_dict:
res = class_dict['annotations'](old_obj, trans_dict)
for obj in res:
new_obj.db_add_annotation(obj)
elif hasattr(old_obj, 'db_annotations') and old_obj.db_annotations is not None:
for obj in old_obj.db_annotations:
new_obj.db_add_annotation(DBAnnotation.update_version(obj, trans_dict))
if hasattr(old_obj, 'db_deleted_annotations') and hasattr(new_obj, 'db_deleted_annotations'):
for obj in old_obj.db_deleted_annotations:
n_obj = DBAnnotation.update_version(obj, trans_dict)
new_obj.db_deleted_annotations.append(n_obj)
if 'loop_execs' in class_dict:
res = class_dict['loop_execs'](old_obj, trans_dict)
for obj in res:
new_obj.db_add_loop_exec(obj)
elif hasattr(old_obj, 'db_loop_execs') and old_obj.db_loop_execs is not None:
for obj in old_obj.db_loop_execs:
new_obj.db_add_loop_exec(DBLoopExec.update_version(obj, trans_dict))
if hasattr(old_obj, 'db_deleted_loop_execs') and hasattr(new_obj, 'db_deleted_loop_execs'):
for obj in old_obj.db_deleted_loop_execs:
n_obj = DBLoopExec.update_version(obj, trans_dict)
new_obj.db_deleted_loop_execs.append(n_obj)
new_obj.is_new = old_obj.is_new
new_obj.is_dirty = old_obj.is_dirty
return new_obj
def db_children(self, parent=(None,None), orphan=False, for_action=False):
children = []
to_del = []
for child in self.db_annotations:
children.extend(child.db_children((self.vtType, self.db_id), orphan, for_action))
if orphan:
to_del.append(child)
for child in to_del:
self.db_delete_annotation(child)
to_del = []
for child in self.db_loop_execs:
children.extend(child.db_children((self.vtType, self.db_id), orphan, for_action))
if orphan:
to_del.append(child)
for child in to_del:
self.db_delete_loop_exec(child)
children.append((self, parent[0], parent[1]))
return children
def db_deleted_children(self, remove=False):
children = []
children.extend(self.db_deleted_annotations)
children.extend(self.db_deleted_loop_execs)
if remove:
self.db_deleted_annotations = []
self.db_deleted_loop_execs = []
return children
def has_changes(self):
if self.is_dirty:
return True
for child in self._db_annotations:
if child.has_changes():
return True
for child in self._db_loop_execs:
if child.has_changes():
return True
return False
def __get_db_id(self):
return self._db_id
def __set_db_id(self, id):
self._db_id = id
self.is_dirty = True
db_id = property(__get_db_id, __set_db_id)
def db_add_id(self, id):
self._db_id = id
def db_change_id(self, id):
self._db_id = id
def db_delete_id(self, id):
self._db_id = None
def __get_db_ts_start(self):
return self._db_ts_start
def __set_db_ts_start(self, ts_start):
self._db_ts_start = ts_start
self.is_dirty = True
db_ts_start = property(__get_db_ts_start, __set_db_ts_start)
def db_add_ts_start(self, ts_start):
self._db_ts_start = ts_start
def db_change_ts_start(self, ts_start):
self._db_ts_start = ts_start
def db_delete_ts_start(self, ts_start):
self._db_ts_start = None
def __get_db_ts_end(self):
return self._db_ts_end
def __set_db_ts_end(self, ts_end):
self._db_ts_end = ts_end
self.is_dirty = True
db_ts_end = property(__get_db_ts_end, __set_db_ts_end)
def db_add_ts_end(self, ts_end):
self._db_ts_end = ts_end
def db_change_ts_end(self, ts_end):
self._db_ts_end = ts_end
def db_delete_ts_end(self, ts_end):
self._db_ts_end = None
def __get_db_cached(self):
return self._db_cached
def __set_db_cached(self, cached):
self._db_cached = cached
self.is_dirty = True
db_cached = property(__get_db_cached, __set_db_cached)
def db_add_cached(self, cached):
self._db_cached = cached
def db_change_cached(self, cached):
self._db_cached = cached
def db_delete_cached(self, cached):
self._db_cached = None
def __get_db_module_id(self):
return self._db_module_id
def __set_db_module_id(self, module_id):
self._db_module_id = module_id
self.is_dirty = True
db_module_id = property(__get_db_module_id, __set_db_module_id)
def db_add_module_id(self, module_id):
self._db_module_id = module_id
def db_change_module_id(self, module_id):
self._db_module_id = module_id
def db_delete_module_id(self, module_id):
self._db_module_id = None
def __get_db_module_name(self):
return self._db_module_name
def __set_db_module_name(self, module_name):
self._db_module_name = module_name
self.is_dirty = True
db_module_name = property(__get_db_module_name, __set_db_module_name)
def db_add_module_name(self, module_name):
self._db_module_name = module_name
def db_change_module_name(self, module_name):
self._db_module_name = module_name
def db_delete_module_name(self, module_name):
self._db_module_name = None
def __get_db_completed(self):
return self._db_completed
def __set_db_completed(self, completed):
self._db_completed = completed
self.is_dirty = True
db_completed = property(__get_db_completed, __set_db_completed)
def db_add_completed(self, completed):
self._db_completed = completed
def db_change_completed(self, completed):
self._db_completed = completed
def db_delete_completed(self, completed):
self._db_completed = None
def __get_db_error(self):
return self._db_error
def __set_db_error(self, error):
self._db_error = error
self.is_dirty = True
db_error = property(__get_db_error, __set_db_error)
def db_add_error(self, error):
self._db_error = error
def db_change_error(self, error):
self._db_error = error
def db_delete_error(self, error):
self._db_error = None
def __get_db_machine_id(self):
return self._db_machine_id
def __set_db_machine_id(self, machine_id):
self._db_machine_id = machine_id
self.is_dirty = True
db_machine_id = property(__get_db_machine_id, __set_db_machine_id)
def db_add_machine_id(self, machine_id):
self._db_machine_id = machine_id
def db_change_machine_id(self, machine_id):
self._db_machine_id = machine_id
def db_delete_machine_id(self, machine_id):
self._db_machine_id = None
def __get_db_annotations(self):
return self._db_annotations
def __set_db_annotations(self, annotations):
self._db_annotations = annotations
self.is_dirty = True
db_annotations = property(__get_db_annotations, __set_db_annotations)
def db_get_annotations(self):
return self._db_annotations
def db_add_annotation(self, annotation):
self.is_dirty = True
self._db_annotations.append(annotation)
self.db_annotations_id_index[annotation.db_id] = annotation
def db_change_annotation(self, annotation):
self.is_dirty = True
found = False
for i in xrange(len(self._db_annotations)):
if self._db_annotations[i].db_id == annotation.db_id:
self._db_annotations[i] = annotation
found = True
break
if not found:
self._db_annotations.append(annotation)
self.db_annotations_id_index[annotation.db_id] = annotation
def db_delete_annotation(self, annotation):
self.is_dirty = True
for i in xrange(len(self._db_annotations)):
if self._db_annotations[i].db_id == annotation.db_id:
if not self._db_annotations[i].is_new:
self.db_deleted_annotations.append(self._db_annotations[i])
del self._db_annotations[i]
break
del self.db_annotations_id_index[annotation.db_id]
def db_get_annotation(self, key):
for i in xrange(len(self._db_annotations)):
if self._db_annotations[i].db_id == key:
return self._db_annotations[i]
return None
def db_get_annotation_by_id(self, key):
return self.db_annotations_id_index[key]
def db_has_annotation_with_id(self, key):
return key in self.db_annotations_id_index
def __get_db_loop_execs(self):
return self._db_loop_execs
def __set_db_loop_execs(self, loop_execs):
self._db_loop_execs = loop_execs
self.is_dirty = True
db_loop_execs = property(__get_db_loop_execs, __set_db_loop_execs)
def db_get_loop_execs(self):
return self._db_loop_execs
def db_add_loop_exec(self, loop_exec):
self.is_dirty = True
self._db_loop_execs.append(loop_exec)
self.db_loop_execs_id_index[loop_exec.db_id] = loop_exec
def db_change_loop_exec(self, loop_exec):
self.is_dirty = True
found = False
for i in xrange(len(self._db_loop_execs)):
if self._db_loop_execs[i].db_id == loop_exec.db_id:
self._db_loop_execs[i] = loop_exec
found = True
break
if not found:
self._db_loop_execs.append(loop_exec)
self.db_loop_execs_id_index[loop_exec.db_id] = loop_exec
def db_delete_loop_exec(self, loop_exec):
self.is_dirty = True
for i in xrange(len(self._db_loop_execs)):
if self._db_loop_execs[i].db_id == loop_exec.db_id:
if not self._db_loop_execs[i].is_new:
self.db_deleted_loop_execs.append(self._db_loop_execs[i])
del self._db_loop_execs[i]
break
del self.db_loop_execs_id_index[loop_exec.db_id]
def db_get_loop_exec(self, key):
for i in xrange(len(self._db_loop_execs)):
if self._db_loop_execs[i].db_id == key:
return self._db_loop_execs[i]
return None
def db_get_loop_exec_by_id(self, key):
return self.db_loop_execs_id_index[key]
def db_has_loop_exec_with_id(self, key):
return key in self.db_loop_execs_id_index
def getPrimaryKey(self):
return self._db_id
class DBProvAssociation(object):
vtType = 'prov_association'
def __init__(self, prov_activity=None, prov_agent=None, prov_plan=None, prov_role=None):
self.db_deleted_prov_activity = []
self._db_prov_activity = prov_activity
self.db_deleted_prov_agent = []
self._db_prov_agent = prov_agent
self.db_deleted_prov_plan = []
self._db_prov_plan = prov_plan
self._db_prov_role = prov_role
self.is_dirty = True
self.is_new = True
def __copy__(self):
return DBProvAssociation.do_copy(self)
def do_copy(self, new_ids=False, id_scope=None, id_remap=None):
cp = DBProvAssociation(prov_role=self._db_prov_role)
if self._db_prov_activity is not None:
cp._db_prov_activity = self._db_prov_activity.do_copy(new_ids, id_scope, id_remap)
if self._db_prov_agent is not None:
cp._db_prov_agent = self._db_prov_agent.do_copy(new_ids, id_scope, id_remap)
if self._db_prov_plan is not None:
cp._db_prov_plan = self._db_prov_plan.do_copy(new_ids, id_scope, id_remap)
# set new ids
if new_ids:
new_id = id_scope.getNewId(self.vtType)
if self.vtType in id_scope.remap:
id_remap[(id_scope.remap[self.vtType], self.db_id)] = new_id
else:
id_remap[(self.vtType, self.db_id)] = new_id
cp.db_id = new_id
# recreate indices and set flags
if not new_ids:
cp.is_dirty = self.is_dirty
cp.is_new = self.is_new
return cp
@staticmethod
def update_version(old_obj, trans_dict, new_obj=None):
if new_obj is None:
new_obj = DBProvAssociation()
class_dict = {}
if new_obj.__class__.__name__ in trans_dict:
class_dict = trans_dict[new_obj.__class__.__name__]
if 'prov_activity' in class_dict:
res = class_dict['prov_activity'](old_obj, trans_dict)
new_obj.db_prov_activity = res
elif hasattr(old_obj, 'db_prov_activity') and old_obj.db_prov_activity is not None:
obj = old_obj.db_prov_activity
new_obj.db_add_prov_activity(DBRefProvActivity.update_version(obj, trans_dict))
if hasattr(old_obj, 'db_deleted_prov_activity') and hasattr(new_obj, 'db_deleted_prov_activity'):
for obj in old_obj.db_deleted_prov_activity:
n_obj = DBRefProvActivity.update_version(obj, trans_dict)
new_obj.db_deleted_prov_activity.append(n_obj)
if 'prov_agent' in class_dict:
res = class_dict['prov_agent'](old_obj, trans_dict)
new_obj.db_prov_agent = res
elif hasattr(old_obj, 'db_prov_agent') and old_obj.db_prov_agent is not None:
obj = old_obj.db_prov_agent
new_obj.db_add_prov_agent(DBRefProvAgent.update_version(obj, trans_dict))
if hasattr(old_obj, 'db_deleted_prov_agent') and hasattr(new_obj, 'db_deleted_prov_agent'):
for obj in old_obj.db_deleted_prov_agent:
n_obj = DBRefProvAgent.update_version(obj, trans_dict)
new_obj.db_deleted_prov_agent.append(n_obj)
if 'prov_plan' in class_dict:
res = class_dict['prov_plan'](old_obj, trans_dict)
new_obj.db_prov_plan = res
elif hasattr(old_obj, 'db_prov_plan') and old_obj.db_prov_plan is not None:
obj = old_obj.db_prov_plan
new_obj.db_add_prov_plan(DBRefProvPlan.update_version(obj, trans_dict))
if hasattr(old_obj, 'db_deleted_prov_plan') and hasattr(new_obj, 'db_deleted_prov_plan'):
for obj in old_obj.db_deleted_prov_plan:
n_obj = DBRefProvPlan.update_version(obj, trans_dict)
new_obj.db_deleted_prov_plan.append(n_obj)
if 'prov_role' in class_dict:
res = class_dict['prov_role'](old_obj, trans_dict)
new_obj.db_prov_role = res
elif hasattr(old_obj, 'db_prov_role') and old_obj.db_prov_role is not None:
new_obj.db_prov_role = old_obj.db_prov_role
new_obj.is_new = old_obj.is_new
new_obj.is_dirty = old_obj.is_dirty
return new_obj
def db_children(self, parent=(None,None), orphan=False, for_action=False):
children = []
if self._db_prov_activity is not None:
children.extend(self._db_prov_activity.db_children((self.vtType, self.db_id), orphan, for_action))
if orphan:
self._db_prov_activity = None
if self._db_prov_agent is not None:
children.extend(self._db_prov_agent.db_children((self.vtType, self.db_id), orphan, for_action))
if orphan:
self._db_prov_agent = None
if self._db_prov_plan is not None:
children.extend(self._db_prov_plan.db_children((self.vtType, self.db_id), orphan, for_action))
if orphan:
self._db_prov_plan = None
children.append((self, parent[0], parent[1]))
return children
def db_deleted_children(self, remove=False):
children = []
children.extend(self.db_deleted_prov_activity)
children.extend(self.db_deleted_prov_agent)
children.extend(self.db_deleted_prov_plan)
if remove:
self.db_deleted_prov_activity = []
self.db_deleted_prov_agent = []
self.db_deleted_prov_plan = []
return children
def has_changes(self):
if self.is_dirty:
return True
if self._db_prov_activity is not None and self._db_prov_activity.has_changes():
return True
if self._db_prov_agent is not None and self._db_prov_agent.has_changes():
return True
if self._db_prov_plan is not None and self._db_prov_plan.has_changes():
return True
return False
def __get_db_prov_activity(self):
return self._db_prov_activity
def __set_db_prov_activity(self, prov_activity):
self._db_prov_activity = prov_activity
self.is_dirty = True
db_prov_activity = property(__get_db_prov_activity, __set_db_prov_activity)
def db_add_prov_activity(self, prov_activity):
self._db_prov_activity = prov_activity
def db_change_prov_activity(self, prov_activity):
self._db_prov_activity = prov_activity
def db_delete_prov_activity(self, prov_activity):
if not self.is_new:
self.db_deleted_prov_activity.append(self._db_prov_activity)
self._db_prov_activity = None
def __get_db_prov_agent(self):
return self._db_prov_agent
def __set_db_prov_agent(self, prov_agent):
self._db_prov_agent = prov_agent
self.is_dirty = True
db_prov_agent = property(__get_db_prov_agent, __set_db_prov_agent)
def db_add_prov_agent(self, prov_agent):
self._db_prov_agent = prov_agent
def db_change_prov_agent(self, prov_agent):
self._db_prov_agent = prov_agent
def db_delete_prov_agent(self, prov_agent):
if not self.is_new:
self.db_deleted_prov_agent.append(self._db_prov_agent)
self._db_prov_agent = None
def __get_db_prov_plan(self):
return self._db_prov_plan
def __set_db_prov_plan(self, prov_plan):
self._db_prov_plan = prov_plan
self.is_dirty = True
db_prov_plan = property(__get_db_prov_plan, __set_db_prov_plan)
def db_add_prov_plan(self, prov_plan):
self._db_prov_plan = prov_plan
def db_change_prov_plan(self, prov_plan):
self._db_prov_plan = prov_plan
def db_delete_prov_plan(self, prov_plan):
if not self.is_new:
self.db_deleted_prov_plan.append(self._db_prov_plan)
self._db_prov_plan = None
def __get_db_prov_role(self):
return self._db_prov_role
def __set_db_prov_role(self, prov_role):
self._db_prov_role = prov_role
self.is_dirty = True
db_prov_role = property(__get_db_prov_role, __set_db_prov_role)
def db_add_prov_role(self, prov_role):
self._db_prov_role = prov_role
def db_change_prov_role(self, prov_role):
self._db_prov_role = prov_role
def db_delete_prov_role(self, prov_role):
self._db_prov_role = None
class DBOpmProcessValue(object):
vtType = 'opm_process_value'
def __init__(self, value=None):
self.db_deleted_value = []
self._db_value = value
self.is_dirty = True
self.is_new = True
def __copy__(self):
return DBOpmProcessValue.do_copy(self)
def do_copy(self, new_ids=False, id_scope=None, id_remap=None):
cp = DBOpmProcessValue()
if self._db_value is not None:
cp._db_value = self._db_value.do_copy(new_ids, id_scope, id_remap)
# set new ids
if new_ids:
new_id = id_scope.getNewId(self.vtType)
if self.vtType in id_scope.remap:
id_remap[(id_scope.remap[self.vtType], self.db_id)] = new_id
else:
id_remap[(self.vtType, self.db_id)] = new_id
cp.db_id = new_id
# recreate indices and set flags
if not new_ids:
cp.is_dirty = self.is_dirty
cp.is_new = self.is_new
return cp
@staticmethod
def update_version(old_obj, trans_dict, new_obj=None):
if new_obj is None:
new_obj = DBOpmProcessValue()
class_dict = {}
if new_obj.__class__.__name__ in trans_dict:
class_dict = trans_dict[new_obj.__class__.__name__]
if 'value' in class_dict:
res = class_dict['value'](old_obj, trans_dict)
new_obj.db_value = res
elif hasattr(old_obj, 'db_value') and old_obj.db_value is not None:
obj = old_obj.db_value
if obj.vtType == 'module_exec':
new_obj.db_add_value(DBModuleExec.update_version(obj, trans_dict))
elif obj.vtType == 'group_exec':
new_obj.db_add_value(DBGroupExec.update_version(obj, trans_dict))
elif obj.vtType == 'loop_exec':
new_obj.db_add_value(DBLoopExec.update_version(obj, trans_dict))
if hasattr(old_obj, 'db_deleted_value') and hasattr(new_obj, 'db_deleted_value'):
for obj in old_obj.db_deleted_value:
if obj.vtType == 'module_exec':
n_obj = DBModuleExec.update_version(obj, trans_dict)
new_obj.db_deleted_value.append(n_obj)
elif obj.vtType == 'group_exec':
n_obj = DBGroupExec.update_version(obj, trans_dict)
new_obj.db_deleted_value.append(n_obj)
elif obj.vtType == 'loop_exec':
n_obj = DBLoopExec.update_version(obj, trans_dict)
new_obj.db_deleted_value.append(n_obj)
new_obj.is_new = old_obj.is_new
new_obj.is_dirty = old_obj.is_dirty
return new_obj
def db_children(self, parent=(None,None), orphan=False, for_action=False):
children = []
if self._db_value is not None:
children.extend(self._db_value.db_children((self.vtType, self.db_id), orphan, for_action))
if orphan:
self._db_value = None
children.append((self, parent[0], parent[1]))
return children
def db_deleted_children(self, remove=False):
children = []
children.extend(self.db_deleted_value)
if remove:
self.db_deleted_value = []
return children
def has_changes(self):
if self.is_dirty:
return True
if self._db_value is not None and self._db_value.has_changes():
return True
return False
def __get_db_value(self):
return self._db_value
def __set_db_value(self, value):
self._db_value = value
self.is_dirty = True
db_value = property(__get_db_value, __set_db_value)
def db_add_value(self, value):
self._db_value = value
def db_change_value(self, value):
self._db_value = value
def db_delete_value(self, value):
if not self.is_new:
self.db_deleted_value.append(self._db_value)
self._db_value = None
|
###############################################################################
##
## Copyright (C) 2011-2014, NYU-Poly.
## Copyright (C) 2006-2011, University of Utah.
## All rights reserved.
## Contact: <EMAIL>
##
## This file is part of VisTrails.
##
## "Redistribution and use in source and binary forms, with or without
## modification, are permitted provided that the following conditions are met:
##
## - Redistributions of source code must retain the above copyright notice,
## this list of conditions and the following disclaimer.
## - Redistributions in binary form must reproduce the above copyright
## notice, this list of conditions and the following disclaimer in the
## documentation and/or other materials provided with the distribution.
## - Neither the name of the University of Utah nor the names of its
## contributors may be used to endorse or promote products derived from
## this software without specific prior written permission.
##
## THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
## AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
## THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
## PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR
## CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
## EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
## PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
## OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
## WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
## OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
## ADVISED OF THE POSSIBILITY OF SUCH DAMAGE."
##
###############################################################################
"""generated automatically by auto_dao.py"""
import copy
class DBOpmWasGeneratedBy(object):
vtType = 'opm_was_generated_by'
def __init__(self, effect=None, role=None, cause=None, accounts=None, opm_times=None):
self.db_deleted_effect = []
self._db_effect = effect
self.db_deleted_role = []
self._db_role = role
self.db_deleted_cause = []
self._db_cause = cause
self.db_deleted_accounts = []
if accounts is None:
self._db_accounts = []
else:
self._db_accounts = accounts
self.db_deleted_opm_times = []
if opm_times is None:
self._db_opm_times = []
else:
self._db_opm_times = opm_times
self.is_dirty = True
self.is_new = True
def __copy__(self):
return DBOpmWasGeneratedBy.do_copy(self)
def do_copy(self, new_ids=False, id_scope=None, id_remap=None):
cp = DBOpmWasGeneratedBy()
if self._db_effect is not None:
cp._db_effect = self._db_effect.do_copy(new_ids, id_scope, id_remap)
if self._db_role is not None:
cp._db_role = self._db_role.do_copy(new_ids, id_scope, id_remap)
if self._db_cause is not None:
cp._db_cause = self._db_cause.do_copy(new_ids, id_scope, id_remap)
if self._db_accounts is None:
cp._db_accounts = []
else:
cp._db_accounts = [v.do_copy(new_ids, id_scope, id_remap) for v in self._db_accounts]
if self._db_opm_times is None:
cp._db_opm_times = []
else:
cp._db_opm_times = [v.do_copy(new_ids, id_scope, id_remap) for v in self._db_opm_times]
# set new ids
if new_ids:
new_id = id_scope.getNewId(self.vtType)
if self.vtType in id_scope.remap:
id_remap[(id_scope.remap[self.vtType], self.db_id)] = new_id
else:
id_remap[(self.vtType, self.db_id)] = new_id
cp.db_id = new_id
# recreate indices and set flags
if not new_ids:
cp.is_dirty = self.is_dirty
cp.is_new = self.is_new
return cp
@staticmethod
def update_version(old_obj, trans_dict, new_obj=None):
if new_obj is None:
new_obj = DBOpmWasGeneratedBy()
class_dict = {}
if new_obj.__class__.__name__ in trans_dict:
class_dict = trans_dict[new_obj.__class__.__name__]
if 'effect' in class_dict:
res = class_dict['effect'](old_obj, trans_dict)
new_obj.db_effect = res
elif hasattr(old_obj, 'db_effect') and old_obj.db_effect is not None:
obj = old_obj.db_effect
new_obj.db_add_effect(DBOpmArtifactIdEffect.update_version(obj, trans_dict))
if hasattr(old_obj, 'db_deleted_effect') and hasattr(new_obj, 'db_deleted_effect'):
for obj in old_obj.db_deleted_effect:
n_obj = DBOpmArtifactIdEffect.update_version(obj, trans_dict)
new_obj.db_deleted_effect.append(n_obj)
if 'role' in class_dict:
res = class_dict['role'](old_obj, trans_dict)
new_obj.db_role = res
elif hasattr(old_obj, 'db_role') and old_obj.db_role is not None:
obj = old_obj.db_role
new_obj.db_add_role(DBOpmRole.update_version(obj, trans_dict))
if hasattr(old_obj, 'db_deleted_role') and hasattr(new_obj, 'db_deleted_role'):
for obj in old_obj.db_deleted_role:
n_obj = DBOpmRole.update_version(obj, trans_dict)
new_obj.db_deleted_role.append(n_obj)
if 'cause' in class_dict:
res = class_dict['cause'](old_obj, trans_dict)
new_obj.db_cause = res
elif hasattr(old_obj, 'db_cause') and old_obj.db_cause is not None:
obj = old_obj.db_cause
new_obj.db_add_cause(DBOpmProcessIdCause.update_version(obj, trans_dict))
if hasattr(old_obj, 'db_deleted_cause') and hasattr(new_obj, 'db_deleted_cause'):
for obj in old_obj.db_deleted_cause:
n_obj = DBOpmProcessIdCause.update_version(obj, trans_dict)
new_obj.db_deleted_cause.append(n_obj)
if 'accounts' in class_dict:
res = class_dict['accounts'](old_obj, trans_dict)
for obj in res:
new_obj.db_add_account(obj)
elif hasattr(old_obj, 'db_accounts') and old_obj.db_accounts is not None:
for obj in old_obj.db_accounts:
new_obj.db_add_account(DBOpmAccountId.update_version(obj, trans_dict))
if hasattr(old_obj, 'db_deleted_accounts') and hasattr(new_obj, 'db_deleted_accounts'):
for obj in old_obj.db_deleted_accounts:
n_obj = DBOpmAccountId.update_version(obj, trans_dict)
new_obj.db_deleted_accounts.append(n_obj)
if 'opm_times' in class_dict:
res = class_dict['opm_times'](old_obj, trans_dict)
for obj in res:
new_obj.db_add_opm_time(obj)
elif hasattr(old_obj, 'db_opm_times') and old_obj.db_opm_times is not None:
for obj in old_obj.db_opm_times:
new_obj.db_add_opm_time(DBOpmTime.update_version(obj, trans_dict))
if hasattr(old_obj, 'db_deleted_opm_times') and hasattr(new_obj, 'db_deleted_opm_times'):
for obj in old_obj.db_deleted_opm_times:
n_obj = DBOpmTime.update_version(obj, trans_dict)
new_obj.db_deleted_opm_times.append(n_obj)
new_obj.is_new = old_obj.is_new
new_obj.is_dirty = old_obj.is_dirty
return new_obj
def db_children(self, parent=(None,None), orphan=False, for_action=False):
children = []
if self._db_effect is not None:
children.extend(self._db_effect.db_children((self.vtType, self.db_id), orphan, for_action))
if orphan:
self._db_effect = None
if self._db_role is not None:
children.extend(self._db_role.db_children((self.vtType, self.db_id), orphan, for_action))
if orphan:
self._db_role = None
if self._db_cause is not None:
children.extend(self._db_cause.db_children((self.vtType, self.db_id), orphan, for_action))
if orphan:
self._db_cause = None
to_del = []
for child in self.db_accounts:
children.extend(child.db_children((self.vtType, self.db_id), orphan, for_action))
if orphan:
to_del.append(child)
for child in to_del:
self.db_delete_account(child)
to_del = []
for child in self.db_opm_times:
children.extend(child.db_children((self.vtType, self.db_id), orphan, for_action))
if orphan:
to_del.append(child)
for child in to_del:
self.db_delete_opm_time(child)
children.append((self, parent[0], parent[1]))
return children
def db_deleted_children(self, remove=False):
children = []
children.extend(self.db_deleted_effect)
children.extend(self.db_deleted_role)
children.extend(self.db_deleted_cause)
children.extend(self.db_deleted_accounts)
children.extend(self.db_deleted_opm_times)
if remove:
self.db_deleted_effect = []
self.db_deleted_role = []
self.db_deleted_cause = []
self.db_deleted_accounts = []
self.db_deleted_opm_times = []
return children
def has_changes(self):
if self.is_dirty:
return True
if self._db_effect is not None and self._db_effect.has_changes():
return True
if self._db_role is not None and self._db_role.has_changes():
return True
if self._db_cause is not None and self._db_cause.has_changes():
return True
for child in self._db_accounts:
if child.has_changes():
return True
for child in self._db_opm_times:
if child.has_changes():
return True
return False
def __get_db_effect(self):
return self._db_effect
def __set_db_effect(self, effect):
self._db_effect = effect
self.is_dirty = True
db_effect = property(__get_db_effect, __set_db_effect)
def db_add_effect(self, effect):
self._db_effect = effect
def db_change_effect(self, effect):
self._db_effect = effect
def db_delete_effect(self, effect):
if not self.is_new:
self.db_deleted_effect.append(self._db_effect)
self._db_effect = None
def __get_db_role(self):
return self._db_role
def __set_db_role(self, role):
self._db_role = role
self.is_dirty = True
db_role = property(__get_db_role, __set_db_role)
def db_add_role(self, role):
self._db_role = role
def db_change_role(self, role):
self._db_role = role
def db_delete_role(self, role):
if not self.is_new:
self.db_deleted_role.append(self._db_role)
self._db_role = None
def __get_db_cause(self):
return self._db_cause
def __set_db_cause(self, cause):
self._db_cause = cause
self.is_dirty = True
db_cause = property(__get_db_cause, __set_db_cause)
def db_add_cause(self, cause):
self._db_cause = cause
def db_change_cause(self, cause):
self._db_cause = cause
def db_delete_cause(self, cause):
if not self.is_new:
self.db_deleted_cause.append(self._db_cause)
self._db_cause = None
def __get_db_accounts(self):
return self._db_accounts
def __set_db_accounts(self, accounts):
self._db_accounts = accounts
self.is_dirty = True
db_accounts = property(__get_db_accounts, __set_db_accounts)
def db_get_accounts(self):
return self._db_accounts
def db_add_account(self, account):
self.is_dirty = True
self._db_accounts.append(account)
def db_change_account(self, account):
self.is_dirty = True
self._db_accounts.append(account)
def db_delete_account(self, account):
self.is_dirty = True
raise Exception('Cannot delete a non-keyed object')
def db_get_account(self, key):
return None
def __get_db_opm_times(self):
return self._db_opm_times
def __set_db_opm_times(self, opm_times):
self._db_opm_times = opm_times
self.is_dirty = True
db_opm_times = property(__get_db_opm_times, __set_db_opm_times)
def db_get_opm_times(self):
return self._db_opm_times
def db_add_opm_time(self, opm_time):
self.is_dirty = True
self._db_opm_times.append(opm_time)
def db_change_opm_time(self, opm_time):
self.is_dirty = True
self._db_opm_times.append(opm_time)
def db_delete_opm_time(self, opm_time):
self.is_dirty = True
raise Exception('Cannot delete a non-keyed object')
def db_get_opm_time(self, key):
return None
class DBConfigKey(object):
vtType = 'config_key'
def __init__(self, value=None, name=None):
self.db_deleted_value = []
self._db_value = value
self._db_name = name
self.is_dirty = True
self.is_new = True
def __copy__(self):
return DBConfigKey.do_copy(self)
def do_copy(self, new_ids=False, id_scope=None, id_remap=None):
cp = DBConfigKey(name=self._db_name)
if self._db_value is not None:
cp._db_value = self._db_value.do_copy(new_ids, id_scope, id_remap)
# set new ids
if new_ids:
new_id = id_scope.getNewId(self.vtType)
if self.vtType in id_scope.remap:
id_remap[(id_scope.remap[self.vtType], self.db_id)] = new_id
else:
id_remap[(self.vtType, self.db_id)] = new_id
cp.db_id = new_id
# recreate indices and set flags
if not new_ids:
cp.is_dirty = self.is_dirty
cp.is_new = self.is_new
return cp
@staticmethod
def update_version(old_obj, trans_dict, new_obj=None):
if new_obj is None:
new_obj = DBConfigKey()
class_dict = {}
if new_obj.__class__.__name__ in trans_dict:
class_dict = trans_dict[new_obj.__class__.__name__]
if 'value' in class_dict:
res = class_dict['value'](old_obj, trans_dict)
new_obj.db_value = res
elif hasattr(old_obj, 'db_value') and old_obj.db_value is not None:
obj = old_obj.db_value
if obj.vtType == 'config_str':
new_obj.db_add_value(DBConfigStr.update_version(obj, trans_dict))
elif obj.vtType == 'config_int':
new_obj.db_add_value(DBConfigInt.update_version(obj, trans_dict))
elif obj.vtType == 'config_float':
new_obj.db_add_value(DBConfigFloat.update_version(obj, trans_dict))
elif obj.vtType == 'config_bool':
new_obj.db_add_value(DBConfigBool.update_version(obj, trans_dict))
elif obj.vtType == 'configuration':
new_obj.db_add_value(DBConfiguration.update_version(obj, trans_dict))
if hasattr(old_obj, 'db_deleted_value') and hasattr(new_obj, 'db_deleted_value'):
for obj in old_obj.db_deleted_value:
if obj.vtType == 'config_str':
n_obj = DBConfigStr.update_version(obj, trans_dict)
new_obj.db_deleted_value.append(n_obj)
elif obj.vtType == 'config_int':
n_obj = DBConfigInt.update_version(obj, trans_dict)
new_obj.db_deleted_value.append(n_obj)
elif obj.vtType == 'config_float':
n_obj = DBConfigFloat.update_version(obj, trans_dict)
new_obj.db_deleted_value.append(n_obj)
elif obj.vtType == 'config_bool':
n_obj = DBConfigBool.update_version(obj, trans_dict)
new_obj.db_deleted_value.append(n_obj)
elif obj.vtType == 'configuration':
n_obj = DBConfiguration.update_version(obj, trans_dict)
new_obj.db_deleted_value.append(n_obj)
if 'name' in class_dict:
res = class_dict['name'](old_obj, trans_dict)
new_obj.db_name = res
elif hasattr(old_obj, 'db_name') and old_obj.db_name is not None:
new_obj.db_name = old_obj.db_name
new_obj.is_new = old_obj.is_new
new_obj.is_dirty = old_obj.is_dirty
return new_obj
def db_children(self, parent=(None,None), orphan=False, for_action=False):
children = []
if self._db_value is not None:
children.extend(self._db_value.db_children((self.vtType, self.db_id), orphan, for_action))
if orphan:
self._db_value = None
children.append((self, parent[0], parent[1]))
return children
def db_deleted_children(self, remove=False):
children = []
children.extend(self.db_deleted_value)
if remove:
self.db_deleted_value = []
return children
def has_changes(self):
if self.is_dirty:
return True
if self._db_value is not None and self._db_value.has_changes():
return True
return False
def __get_db_value(self):
return self._db_value
def __set_db_value(self, value):
self._db_value = value
self.is_dirty = True
db_value = property(__get_db_value, __set_db_value)
def db_add_value(self, value):
self._db_value = value
def db_change_value(self, value):
self._db_value = value
def db_delete_value(self, value):
if not self.is_new:
self.db_deleted_value.append(self._db_value)
self._db_value = None
def __get_db_name(self):
return self._db_name
def __set_db_name(self, name):
self._db_name = name
self.is_dirty = True
db_name = property(__get_db_name, __set_db_name)
def db_add_name(self, name):
self._db_name = name
def db_change_name(self, name):
self._db_name = name
def db_delete_name(self, name):
self._db_name = None
class DBMashupAlias(object):
vtType = 'mashup_alias'
def __init__(self, id=None, name=None, component=None):
self._db_id = id
self._db_name = name
self.db_deleted_component = []
self._db_component = component
self.is_dirty = True
self.is_new = True
def __copy__(self):
return DBMashupAlias.do_copy(self)
def do_copy(self, new_ids=False, id_scope=None, id_remap=None):
cp = DBMashupAlias(id=self._db_id,
name=self._db_name)
if self._db_component is not None:
cp._db_component = self._db_component.do_copy(new_ids, id_scope, id_remap)
# set new ids
if new_ids:
new_id = id_scope.getNewId(self.vtType)
if self.vtType in id_scope.remap:
id_remap[(id_scope.remap[self.vtType], self.db_id)] = new_id
else:
id_remap[(self.vtType, self.db_id)] = new_id
cp.db_id = new_id
# recreate indices and set flags
if not new_ids:
cp.is_dirty = self.is_dirty
cp.is_new = self.is_new
return cp
@staticmethod
def update_version(old_obj, trans_dict, new_obj=None):
if new_obj is None:
new_obj = DBMashupAlias()
class_dict = {}
if new_obj.__class__.__name__ in trans_dict:
class_dict = trans_dict[new_obj.__class__.__name__]
if 'id' in class_dict:
res = class_dict['id'](old_obj, trans_dict)
new_obj.db_id = res
elif hasattr(old_obj, 'db_id') and old_obj.db_id is not None:
new_obj.db_id = old_obj.db_id
if 'name' in class_dict:
res = class_dict['name'](old_obj, trans_dict)
new_obj.db_name = res
elif hasattr(old_obj, 'db_name') and old_obj.db_name is not None:
new_obj.db_name = old_obj.db_name
if 'component' in class_dict:
res = class_dict['component'](old_obj, trans_dict)
new_obj.db_component = res
elif hasattr(old_obj, 'db_component') and old_obj.db_component is not None:
obj = old_obj.db_component
new_obj.db_add_component(DBMashupComponent.update_version(obj, trans_dict))
if hasattr(old_obj, 'db_deleted_component') and hasattr(new_obj, 'db_deleted_component'):
for obj in old_obj.db_deleted_component:
n_obj = DBMashupComponent.update_version(obj, trans_dict)
new_obj.db_deleted_component.append(n_obj)
new_obj.is_new = old_obj.is_new
new_obj.is_dirty = old_obj.is_dirty
return new_obj
def db_children(self, parent=(None,None), orphan=False, for_action=False):
children = []
if self._db_component is not None:
children.extend(self._db_component.db_children((self.vtType, self.db_id), orphan, for_action))
if orphan:
self._db_component = None
children.append((self, parent[0], parent[1]))
return children
def db_deleted_children(self, remove=False):
children = []
children.extend(self.db_deleted_component)
if remove:
self.db_deleted_component = []
return children
def has_changes(self):
if self.is_dirty:
return True
if self._db_component is not None and self._db_component.has_changes():
return True
return False
def __get_db_id(self):
return self._db_id
def __set_db_id(self, id):
self._db_id = id
self.is_dirty = True
db_id = property(__get_db_id, __set_db_id)
def db_add_id(self, id):
self._db_id = id
def db_change_id(self, id):
self._db_id = id
def db_delete_id(self, id):
self._db_id = None
def __get_db_name(self):
return self._db_name
def __set_db_name(self, name):
self._db_name = name
self.is_dirty = True
db_name = property(__get_db_name, __set_db_name)
def db_add_name(self, name):
self._db_name = name
def db_change_name(self, name):
self._db_name = name
def db_delete_name(self, name):
self._db_name = None
def __get_db_component(self):
return self._db_component
def __set_db_component(self, component):
self._db_component = component
self.is_dirty = True
db_component = property(__get_db_component, __set_db_component)
def db_add_component(self, component):
self._db_component = component
def db_change_component(self, component):
self._db_component = component
def db_delete_component(self, component):
if not self.is_new:
self.db_deleted_component.append(self._db_component)
self._db_component = None
def getPrimaryKey(self):
return self._db_id
class DBGroup(object):
vtType = 'group'
def __init__(self, id=None, workflow=None, cache=None, name=None, namespace=None, package=None, version=None, location=None, functions=None, annotations=None):
self._db_id = id
self.db_deleted_workflow = []
self._db_workflow = workflow
self._db_cache = cache
self._db_name = name
self._db_namespace = namespace
self._db_package = package
self._db_version = version
self.db_deleted_location = []
self._db_location = location
self.db_deleted_functions = []
self.db_functions_id_index = {}
if functions is None:
self._db_functions = []
else:
self._db_functions = functions
for v in self._db_functions:
self.db_functions_id_index[v.db_id] = v
self.db_deleted_annotations = []
self.db_annotations_id_index = {}
self.db_annotations_key_index = {}
if annotations is None:
self._db_annotations = []
else:
self._db_annotations = annotations
for v in self._db_annotations:
self.db_annotations_id_index[v.db_id] = v
self.db_annotations_key_index[v.db_key] = v
self.is_dirty = True
self.is_new = True
def __copy__(self):
return DBGroup.do_copy(self)
def do_copy(self, new_ids=False, id_scope=None, id_remap=None):
cp = DBGroup(id=self._db_id,
cache=self._db_cache,
name=self._db_name,
namespace=self._db_namespace,
package=self._db_package,
version=self._db_version)
if self._db_workflow is not None:
cp._db_workflow = self._db_workflow.do_copy()
if self._db_location is not None:
cp._db_location = self._db_location.do_copy(new_ids, id_scope, id_remap)
if self._db_functions is None:
cp._db_functions = []
else:
cp._db_functions = [v.do_copy(new_ids, id_scope, id_remap) for v in self._db_functions]
if self._db_annotations is None:
cp._db_annotations = []
else:
cp._db_annotations = [v.do_copy(new_ids, id_scope, id_remap) for v in self._db_annotations]
# set new ids
if new_ids:
new_id = id_scope.getNewId(self.vtType)
if self.vtType in id_scope.remap:
id_remap[(id_scope.remap[self.vtType], self.db_id)] = new_id
else:
id_remap[(self.vtType, self.db_id)] = new_id
cp.db_id = new_id
# recreate indices and set flags
cp.db_functions_id_index = dict((v.db_id, v) for v in cp._db_functions)
cp.db_annotations_id_index = dict((v.db_id, v) for v in cp._db_annotations)
cp.db_annotations_key_index = dict((v.db_key, v) for v in cp._db_annotations)
if not new_ids:
cp.is_dirty = self.is_dirty
cp.is_new = self.is_new
return cp
@staticmethod
def update_version(old_obj, trans_dict, new_obj=None):
if new_obj is None:
new_obj = DBGroup()
class_dict = {}
if new_obj.__class__.__name__ in trans_dict:
class_dict = trans_dict[new_obj.__class__.__name__]
if 'id' in class_dict:
res = class_dict['id'](old_obj, trans_dict)
new_obj.db_id = res
elif hasattr(old_obj, 'db_id') and old_obj.db_id is not None:
new_obj.db_id = old_obj.db_id
if 'workflow' in class_dict:
res = class_dict['workflow'](old_obj, trans_dict)
new_obj.db_workflow = res
elif hasattr(old_obj, 'db_workflow') and old_obj.db_workflow is not None:
obj = old_obj.db_workflow
new_obj.db_add_workflow(DBWorkflow.update_version(obj, trans_dict))
if hasattr(old_obj, 'db_deleted_workflow') and hasattr(new_obj, 'db_deleted_workflow'):
for obj in old_obj.db_deleted_workflow:
n_obj = DBWorkflow.update_version(obj, trans_dict)
new_obj.db_deleted_workflow.append(n_obj)
if 'cache' in class_dict:
res = class_dict['cache'](old_obj, trans_dict)
new_obj.db_cache = res
elif hasattr(old_obj, 'db_cache') and old_obj.db_cache is not None:
new_obj.db_cache = old_obj.db_cache
if 'name' in class_dict:
res = class_dict['name'](old_obj, trans_dict)
new_obj.db_name = res
elif hasattr(old_obj, 'db_name') and old_obj.db_name is not None:
new_obj.db_name = old_obj.db_name
if 'namespace' in class_dict:
res = class_dict['namespace'](old_obj, trans_dict)
new_obj.db_namespace = res
elif hasattr(old_obj, 'db_namespace') and old_obj.db_namespace is not None:
new_obj.db_namespace = old_obj.db_namespace
if 'package' in class_dict:
res = class_dict['package'](old_obj, trans_dict)
new_obj.db_package = res
elif hasattr(old_obj, 'db_package') and old_obj.db_package is not None:
new_obj.db_package = old_obj.db_package
if 'version' in class_dict:
res = class_dict['version'](old_obj, trans_dict)
new_obj.db_version = res
elif hasattr(old_obj, 'db_version') and old_obj.db_version is not None:
new_obj.db_version = old_obj.db_version
if 'location' in class_dict:
res = class_dict['location'](old_obj, trans_dict)
new_obj.db_location = res
elif hasattr(old_obj, 'db_location') and old_obj.db_location is not None:
obj = old_obj.db_location
new_obj.db_add_location(DBLocation.update_version(obj, trans_dict))
if hasattr(old_obj, 'db_deleted_location') and hasattr(new_obj, 'db_deleted_location'):
for obj in old_obj.db_deleted_location:
n_obj = DBLocation.update_version(obj, trans_dict)
new_obj.db_deleted_location.append(n_obj)
if 'functions' in class_dict:
res = class_dict['functions'](old_obj, trans_dict)
for obj in res:
new_obj.db_add_function(obj)
elif hasattr(old_obj, 'db_functions') and old_obj.db_functions is not None:
for obj in old_obj.db_functions:
new_obj.db_add_function(DBFunction.update_version(obj, trans_dict))
if hasattr(old_obj, 'db_deleted_functions') and hasattr(new_obj, 'db_deleted_functions'):
for obj in old_obj.db_deleted_functions:
n_obj = DBFunction.update_version(obj, trans_dict)
new_obj.db_deleted_functions.append(n_obj)
if 'annotations' in class_dict:
res = class_dict['annotations'](old_obj, trans_dict)
for obj in res:
new_obj.db_add_annotation(obj)
elif hasattr(old_obj, 'db_annotations') and old_obj.db_annotations is not None:
for obj in old_obj.db_annotations:
new_obj.db_add_annotation(DBAnnotation.update_version(obj, trans_dict))
if hasattr(old_obj, 'db_deleted_annotations') and hasattr(new_obj, 'db_deleted_annotations'):
for obj in old_obj.db_deleted_annotations:
n_obj = DBAnnotation.update_version(obj, trans_dict)
new_obj.db_deleted_annotations.append(n_obj)
new_obj.is_new = old_obj.is_new
new_obj.is_dirty = old_obj.is_dirty
return new_obj
def db_children(self, parent=(None,None), orphan=False, for_action=False):
children = []
if self._db_location is not None:
children.extend(self._db_location.db_children((self.vtType, self.db_id), orphan, for_action))
if orphan:
self._db_location = None
to_del = []
for child in self.db_functions:
children.extend(child.db_children((self.vtType, self.db_id), orphan, for_action))
if orphan:
to_del.append(child)
for child in to_del:
self.db_delete_function(child)
to_del = []
for child in self.db_annotations:
children.extend(child.db_children((self.vtType, self.db_id), orphan, for_action))
if orphan:
to_del.append(child)
for child in to_del:
self.db_delete_annotation(child)
children.append((self, parent[0], parent[1]))
return children
def db_deleted_children(self, remove=False):
children = []
children.extend(self.db_deleted_workflow)
children.extend(self.db_deleted_location)
children.extend(self.db_deleted_functions)
children.extend(self.db_deleted_annotations)
if remove:
self.db_deleted_workflow = []
self.db_deleted_location = []
self.db_deleted_functions = []
self.db_deleted_annotations = []
return children
def has_changes(self):
if self.is_dirty:
return True
if self._db_workflow is not None and self._db_workflow.has_changes():
return True
if self._db_location is not None and self._db_location.has_changes():
return True
for child in self._db_functions:
if child.has_changes():
return True
for child in self._db_annotations:
if child.has_changes():
return True
return False
def __get_db_id(self):
return self._db_id
def __set_db_id(self, id):
self._db_id = id
self.is_dirty = True
db_id = property(__get_db_id, __set_db_id)
def db_add_id(self, id):
self._db_id = id
def db_change_id(self, id):
self._db_id = id
def db_delete_id(self, id):
self._db_id = None
def __get_db_workflow(self):
return self._db_workflow
def __set_db_workflow(self, workflow):
self._db_workflow = workflow
self.is_dirty = True
db_workflow = property(__get_db_workflow, __set_db_workflow)
def db_add_workflow(self, workflow):
self._db_workflow = workflow
def db_change_workflow(self, workflow):
self._db_workflow = workflow
def db_delete_workflow(self, workflow):
if not self.is_new:
self.db_deleted_workflow.append(self._db_workflow)
self._db_workflow = None
def __get_db_cache(self):
return self._db_cache
def __set_db_cache(self, cache):
self._db_cache = cache
self.is_dirty = True
db_cache = property(__get_db_cache, __set_db_cache)
def db_add_cache(self, cache):
self._db_cache = cache
def db_change_cache(self, cache):
self._db_cache = cache
def db_delete_cache(self, cache):
self._db_cache = None
def __get_db_name(self):
return self._db_name
def __set_db_name(self, name):
self._db_name = name
self.is_dirty = True
db_name = property(__get_db_name, __set_db_name)
def db_add_name(self, name):
self._db_name = name
def db_change_name(self, name):
self._db_name = name
def db_delete_name(self, name):
self._db_name = None
def __get_db_namespace(self):
return self._db_namespace
def __set_db_namespace(self, namespace):
self._db_namespace = namespace
self.is_dirty = True
db_namespace = property(__get_db_namespace, __set_db_namespace)
def db_add_namespace(self, namespace):
self._db_namespace = namespace
def db_change_namespace(self, namespace):
self._db_namespace = namespace
def db_delete_namespace(self, namespace):
self._db_namespace = None
def __get_db_package(self):
return self._db_package
def __set_db_package(self, package):
self._db_package = package
self.is_dirty = True
db_package = property(__get_db_package, __set_db_package)
def db_add_package(self, package):
self._db_package = package
def db_change_package(self, package):
self._db_package = package
def db_delete_package(self, package):
self._db_package = None
def __get_db_version(self):
return self._db_version
def __set_db_version(self, version):
self._db_version = version
self.is_dirty = True
db_version = property(__get_db_version, __set_db_version)
def db_add_version(self, version):
self._db_version = version
def db_change_version(self, version):
self._db_version = version
def db_delete_version(self, version):
self._db_version = None
def __get_db_location(self):
return self._db_location
def __set_db_location(self, location):
self._db_location = location
self.is_dirty = True
db_location = property(__get_db_location, __set_db_location)
def db_add_location(self, location):
self._db_location = location
def db_change_location(self, location):
self._db_location = location
def db_delete_location(self, location):
if not self.is_new:
self.db_deleted_location.append(self._db_location)
self._db_location = None
def __get_db_functions(self):
return self._db_functions
def __set_db_functions(self, functions):
self._db_functions = functions
self.is_dirty = True
db_functions = property(__get_db_functions, __set_db_functions)
def db_get_functions(self):
return self._db_functions
def db_add_function(self, function):
self.is_dirty = True
self._db_functions.append(function)
self.db_functions_id_index[function.db_id] = function
def db_change_function(self, function):
self.is_dirty = True
found = False
for i in xrange(len(self._db_functions)):
if self._db_functions[i].db_id == function.db_id:
self._db_functions[i] = function
found = True
break
if not found:
self._db_functions.append(function)
self.db_functions_id_index[function.db_id] = function
def db_delete_function(self, function):
self.is_dirty = True
for i in xrange(len(self._db_functions)):
if self._db_functions[i].db_id == function.db_id:
if not self._db_functions[i].is_new:
self.db_deleted_functions.append(self._db_functions[i])
del self._db_functions[i]
break
del self.db_functions_id_index[function.db_id]
def db_get_function(self, key):
for i in xrange(len(self._db_functions)):
if self._db_functions[i].db_id == key:
return self._db_functions[i]
return None
def db_get_function_by_id(self, key):
return self.db_functions_id_index[key]
def db_has_function_with_id(self, key):
return key in self.db_functions_id_index
def __get_db_annotations(self):
return self._db_annotations
def __set_db_annotations(self, annotations):
self._db_annotations = annotations
self.is_dirty = True
db_annotations = property(__get_db_annotations, __set_db_annotations)
def db_get_annotations(self):
return self._db_annotations
def db_add_annotation(self, annotation):
self.is_dirty = True
self._db_annotations.append(annotation)
self.db_annotations_id_index[annotation.db_id] = annotation
self.db_annotations_key_index[annotation.db_key] = annotation
def db_change_annotation(self, annotation):
self.is_dirty = True
found = False
for i in xrange(len(self._db_annotations)):
if self._db_annotations[i].db_id == annotation.db_id:
self._db_annotations[i] = annotation
found = True
break
if not found:
self._db_annotations.append(annotation)
self.db_annotations_id_index[annotation.db_id] = annotation
self.db_annotations_key_index[annotation.db_key] = annotation
def db_delete_annotation(self, annotation):
self.is_dirty = True
for i in xrange(len(self._db_annotations)):
if self._db_annotations[i].db_id == annotation.db_id:
if not self._db_annotations[i].is_new:
self.db_deleted_annotations.append(self._db_annotations[i])
del self._db_annotations[i]
break
del self.db_annotations_id_index[annotation.db_id]
del self.db_annotations_key_index[annotation.db_key]
def db_get_annotation(self, key):
for i in xrange(len(self._db_annotations)):
if self._db_annotations[i].db_id == key:
return self._db_annotations[i]
return None
def db_get_annotation_by_id(self, key):
return self.db_annotations_id_index[key]
def db_has_annotation_with_id(self, key):
return key in self.db_annotations_id_index
def db_get_annotation_by_key(self, key):
return self.db_annotations_key_index[key]
def db_has_annotation_with_key(self, key):
return key in self.db_annotations_key_index
def getPrimaryKey(self):
return self._db_id
class DBOpmWasControlledBy(object):
vtType = 'opm_was_controlled_by'
def __init__(self, effect=None, role=None, cause=None, accounts=None, starts=None, ends=None):
self.db_deleted_effect = []
self._db_effect = effect
self.db_deleted_role = []
self._db_role = role
self.db_deleted_cause = []
self._db_cause = cause
self.db_deleted_accounts = []
if accounts is None:
self._db_accounts = []
else:
self._db_accounts = accounts
self.db_deleted_starts = []
if starts is None:
self._db_starts = []
else:
self._db_starts = starts
self.db_deleted_ends = []
if ends is None:
self._db_ends = []
else:
self._db_ends = ends
self.is_dirty = True
self.is_new = True
def __copy__(self):
return DBOpmWasControlledBy.do_copy(self)
def do_copy(self, new_ids=False, id_scope=None, id_remap=None):
cp = DBOpmWasControlledBy()
if self._db_effect is not None:
cp._db_effect = self._db_effect.do_copy(new_ids, id_scope, id_remap)
if self._db_role is not None:
cp._db_role = self._db_role.do_copy(new_ids, id_scope, id_remap)
if self._db_cause is not None:
cp._db_cause = self._db_cause.do_copy(new_ids, id_scope, id_remap)
if self._db_accounts is None:
cp._db_accounts = []
else:
cp._db_accounts = [v.do_copy(new_ids, id_scope, id_remap) for v in self._db_accounts]
if self._db_starts is None:
cp._db_starts = []
else:
cp._db_starts = [v.do_copy(new_ids, id_scope, id_remap) for v in self._db_starts]
if self._db_ends is None:
cp._db_ends = []
else:
cp._db_ends = [v.do_copy(new_ids, id_scope, id_remap) for v in self._db_ends]
# set new ids
if new_ids:
new_id = id_scope.getNewId(self.vtType)
if self.vtType in id_scope.remap:
id_remap[(id_scope.remap[self.vtType], self.db_id)] = new_id
else:
id_remap[(self.vtType, self.db_id)] = new_id
cp.db_id = new_id
# recreate indices and set flags
if not new_ids:
cp.is_dirty = self.is_dirty
cp.is_new = self.is_new
return cp
@staticmethod
def update_version(old_obj, trans_dict, new_obj=None):
if new_obj is None:
new_obj = DBOpmWasControlledBy()
class_dict = {}
if new_obj.__class__.__name__ in trans_dict:
class_dict = trans_dict[new_obj.__class__.__name__]
if 'effect' in class_dict:
res = class_dict['effect'](old_obj, trans_dict)
new_obj.db_effect = res
elif hasattr(old_obj, 'db_effect') and old_obj.db_effect is not None:
obj = old_obj.db_effect
new_obj.db_add_effect(DBOpmProcessIdEffect.update_version(obj, trans_dict))
if hasattr(old_obj, 'db_deleted_effect') and hasattr(new_obj, 'db_deleted_effect'):
for obj in old_obj.db_deleted_effect:
n_obj = DBOpmProcessIdEffect.update_version(obj, trans_dict)
new_obj.db_deleted_effect.append(n_obj)
if 'role' in class_dict:
res = class_dict['role'](old_obj, trans_dict)
new_obj.db_role = res
elif hasattr(old_obj, 'db_role') and old_obj.db_role is not None:
obj = old_obj.db_role
new_obj.db_add_role(DBOpmRole.update_version(obj, trans_dict))
if hasattr(old_obj, 'db_deleted_role') and hasattr(new_obj, 'db_deleted_role'):
for obj in old_obj.db_deleted_role:
n_obj = DBOpmRole.update_version(obj, trans_dict)
new_obj.db_deleted_role.append(n_obj)
if 'cause' in class_dict:
res = class_dict['cause'](old_obj, trans_dict)
new_obj.db_cause = res
elif hasattr(old_obj, 'db_cause') and old_obj.db_cause is not None:
obj = old_obj.db_cause
new_obj.db_add_cause(DBOpmAgentId.update_version(obj, trans_dict))
if hasattr(old_obj, 'db_deleted_cause') and hasattr(new_obj, 'db_deleted_cause'):
for obj in old_obj.db_deleted_cause:
n_obj = DBOpmAgentId.update_version(obj, trans_dict)
new_obj.db_deleted_cause.append(n_obj)
if 'accounts' in class_dict:
res = class_dict['accounts'](old_obj, trans_dict)
for obj in res:
new_obj.db_add_account(obj)
elif hasattr(old_obj, 'db_accounts') and old_obj.db_accounts is not None:
for obj in old_obj.db_accounts:
new_obj.db_add_account(DBOpmAccountId.update_version(obj, trans_dict))
if hasattr(old_obj, 'db_deleted_accounts') and hasattr(new_obj, 'db_deleted_accounts'):
for obj in old_obj.db_deleted_accounts:
n_obj = DBOpmAccountId.update_version(obj, trans_dict)
new_obj.db_deleted_accounts.append(n_obj)
if 'starts' in class_dict:
res = class_dict['starts'](old_obj, trans_dict)
for obj in res:
new_obj.db_add_start(obj)
elif hasattr(old_obj, 'db_starts') and old_obj.db_starts is not None:
for obj in old_obj.db_starts:
new_obj.db_add_start(DBOpmTime.update_version(obj, trans_dict))
if hasattr(old_obj, 'db_deleted_starts') and hasattr(new_obj, 'db_deleted_starts'):
for obj in old_obj.db_deleted_starts:
n_obj = DBOpmTime.update_version(obj, trans_dict)
new_obj.db_deleted_starts.append(n_obj)
if 'ends' in class_dict:
res = class_dict['ends'](old_obj, trans_dict)
for obj in res:
new_obj.db_add_end(obj)
elif hasattr(old_obj, 'db_ends') and old_obj.db_ends is not None:
for obj in old_obj.db_ends:
new_obj.db_add_end(DBOpmTime.update_version(obj, trans_dict))
if hasattr(old_obj, 'db_deleted_ends') and hasattr(new_obj, 'db_deleted_ends'):
for obj in old_obj.db_deleted_ends:
n_obj = DBOpmTime.update_version(obj, trans_dict)
new_obj.db_deleted_ends.append(n_obj)
new_obj.is_new = old_obj.is_new
new_obj.is_dirty = old_obj.is_dirty
return new_obj
def db_children(self, parent=(None,None), orphan=False, for_action=False):
children = []
if self._db_effect is not None:
children.extend(self._db_effect.db_children((self.vtType, self.db_id), orphan, for_action))
if orphan:
self._db_effect = None
if self._db_role is not None:
children.extend(self._db_role.db_children((self.vtType, self.db_id), orphan, for_action))
if orphan:
self._db_role = None
if self._db_cause is not None:
children.extend(self._db_cause.db_children((self.vtType, self.db_id), orphan, for_action))
if orphan:
self._db_cause = None
to_del = []
for child in self.db_accounts:
children.extend(child.db_children((self.vtType, self.db_id), orphan, for_action))
if orphan:
to_del.append(child)
for child in to_del:
self.db_delete_account(child)
to_del = []
for child in self.db_starts:
children.extend(child.db_children((self.vtType, self.db_id), orphan, for_action))
if orphan:
to_del.append(child)
for child in to_del:
self.db_delete_start(child)
to_del = []
for child in self.db_ends:
children.extend(child.db_children((self.vtType, self.db_id), orphan, for_action))
if orphan:
to_del.append(child)
for child in to_del:
self.db_delete_end(child)
children.append((self, parent[0], parent[1]))
return children
def db_deleted_children(self, remove=False):
children = []
children.extend(self.db_deleted_effect)
children.extend(self.db_deleted_role)
children.extend(self.db_deleted_cause)
children.extend(self.db_deleted_accounts)
children.extend(self.db_deleted_starts)
children.extend(self.db_deleted_ends)
if remove:
self.db_deleted_effect = []
self.db_deleted_role = []
self.db_deleted_cause = []
self.db_deleted_accounts = []
self.db_deleted_starts = []
self.db_deleted_ends = []
return children
def has_changes(self):
if self.is_dirty:
return True
if self._db_effect is not None and self._db_effect.has_changes():
return True
if self._db_role is not None and self._db_role.has_changes():
return True
if self._db_cause is not None and self._db_cause.has_changes():
return True
for child in self._db_accounts:
if child.has_changes():
return True
for child in self._db_starts:
if child.has_changes():
return True
for child in self._db_ends:
if child.has_changes():
return True
return False
def __get_db_effect(self):
return self._db_effect
def __set_db_effect(self, effect):
self._db_effect = effect
self.is_dirty = True
db_effect = property(__get_db_effect, __set_db_effect)
def db_add_effect(self, effect):
self._db_effect = effect
def db_change_effect(self, effect):
self._db_effect = effect
def db_delete_effect(self, effect):
if not self.is_new:
self.db_deleted_effect.append(self._db_effect)
self._db_effect = None
def __get_db_role(self):
return self._db_role
def __set_db_role(self, role):
self._db_role = role
self.is_dirty = True
db_role = property(__get_db_role, __set_db_role)
def db_add_role(self, role):
self._db_role = role
def db_change_role(self, role):
self._db_role = role
def db_delete_role(self, role):
if not self.is_new:
self.db_deleted_role.append(self._db_role)
self._db_role = None
def __get_db_cause(self):
return self._db_cause
def __set_db_cause(self, cause):
self._db_cause = cause
self.is_dirty = True
db_cause = property(__get_db_cause, __set_db_cause)
def db_add_cause(self, cause):
self._db_cause = cause
def db_change_cause(self, cause):
self._db_cause = cause
def db_delete_cause(self, cause):
if not self.is_new:
self.db_deleted_cause.append(self._db_cause)
self._db_cause = None
def __get_db_accounts(self):
return self._db_accounts
def __set_db_accounts(self, accounts):
self._db_accounts = accounts
self.is_dirty = True
db_accounts = property(__get_db_accounts, __set_db_accounts)
def db_get_accounts(self):
return self._db_accounts
def db_add_account(self, account):
self.is_dirty = True
self._db_accounts.append(account)
def db_change_account(self, account):
self.is_dirty = True
self._db_accounts.append(account)
def db_delete_account(self, account):
self.is_dirty = True
raise Exception('Cannot delete a non-keyed object')
def db_get_account(self, key):
return None
def __get_db_starts(self):
return self._db_starts
def __set_db_starts(self, starts):
self._db_starts = starts
self.is_dirty = True
db_starts = property(__get_db_starts, __set_db_starts)
def db_get_starts(self):
return self._db_starts
def db_add_start(self, start):
self.is_dirty = True
self._db_starts.append(start)
def db_change_start(self, start):
self.is_dirty = True
self._db_starts.append(start)
def db_delete_start(self, start):
self.is_dirty = True
raise Exception('Cannot delete a non-keyed object')
def db_get_start(self, key):
return None
def __get_db_ends(self):
return self._db_ends
def __set_db_ends(self, ends):
self._db_ends = ends
self.is_dirty = True
db_ends = property(__get_db_ends, __set_db_ends)
def db_get_ends(self):
return self._db_ends
def db_add_end(self, end):
self.is_dirty = True
self._db_ends.append(end)
def db_change_end(self, end):
self.is_dirty = True
self._db_ends.append(end)
def db_delete_end(self, end):
self.is_dirty = True
raise Exception('Cannot delete a non-keyed object')
def db_get_end(self, key):
return None
class DBAdd(object):
vtType = 'add'
def __init__(self, data=None, id=None, what=None, objectId=None, parentObjId=None, parentObjType=None):
self.db_deleted_data = []
self._db_data = data
self._db_id = id
self._db_what = what
self._db_objectId = objectId
self._db_parentObjId = parentObjId
self._db_parentObjType = parentObjType
self.is_dirty = True
self.is_new = True
def __copy__(self):
return DBAdd.do_copy(self)
def do_copy(self, new_ids=False, id_scope=None, id_remap=None):
cp = DBAdd(id=self._db_id,
what=self._db_what,
objectId=self._db_objectId,
parentObjId=self._db_parentObjId,
parentObjType=self._db_parentObjType)
if self._db_data is not None:
cp._db_data = self._db_data.do_copy(new_ids, id_scope, id_remap)
# set new ids
if new_ids:
new_id = id_scope.getNewId(self.vtType)
if self.vtType in id_scope.remap:
id_remap[(id_scope.remap[self.vtType], self.db_id)] = new_id
else:
id_remap[(self.vtType, self.db_id)] = new_id
cp.db_id = new_id
if hasattr(self, 'db_objectId') and (self._db_what, self._db_objectId) in id_remap:
cp._db_objectId = id_remap[(self._db_what, self._db_objectId)]
if hasattr(self, 'db_parentObjId') and (self._db_parentObjType, self._db_parentObjId) in id_remap:
cp._db_parentObjId = id_remap[(self._db_parentObjType, self._db_parentObjId)]
# recreate indices and set flags
if not new_ids:
cp.is_dirty = self.is_dirty
cp.is_new = self.is_new
return cp
@staticmethod
def update_version(old_obj, trans_dict, new_obj=None):
if new_obj is None:
new_obj = DBAdd()
class_dict = {}
if new_obj.__class__.__name__ in trans_dict:
class_dict = trans_dict[new_obj.__class__.__name__]
if 'data' in class_dict:
res = class_dict['data'](old_obj, trans_dict)
new_obj.db_data = res
elif hasattr(old_obj, 'db_data') and old_obj.db_data is not None:
obj = old_obj.db_data
if obj.vtType == 'module':
new_obj.db_add_data(DBModule.update_version(obj, trans_dict))
elif obj.vtType == 'location':
new_obj.db_add_data(DBLocation.update_version(obj, trans_dict))
elif obj.vtType == 'annotation':
new_obj.db_add_data(DBAnnotation.update_version(obj, trans_dict))
elif obj.vtType == 'function':
new_obj.db_add_data(DBFunction.update_version(obj, trans_dict))
elif obj.vtType == 'connection':
new_obj.db_add_data(DBConnection.update_version(obj, trans_dict))
elif obj.vtType == 'port':
new_obj.db_add_data(DBPort.update_version(obj, trans_dict))
elif obj.vtType == 'parameter':
new_obj.db_add_data(DBParameter.update_version(obj, trans_dict))
elif obj.vtType == 'portSpec':
new_obj.db_add_data(DBPortSpec.update_version(obj, trans_dict))
elif obj.vtType == 'abstraction':
new_obj.db_add_data(DBAbstraction.update_version(obj, trans_dict))
elif obj.vtType == 'group':
new_obj.db_add_data(DBGroup.update_version(obj, trans_dict))
elif obj.vtType == 'other':
new_obj.db_add_data(DBOther.update_version(obj, trans_dict))
elif obj.vtType == 'plugin_data':
new_obj.db_add_data(DBPluginData.update_version(obj, trans_dict))
if hasattr(old_obj, 'db_deleted_data') and hasattr(new_obj, 'db_deleted_data'):
for obj in old_obj.db_deleted_data:
if obj.vtType == 'module':
n_obj = DBModule.update_version(obj, trans_dict)
new_obj.db_deleted_data.append(n_obj)
elif obj.vtType == 'location':
n_obj = DBLocation.update_version(obj, trans_dict)
new_obj.db_deleted_data.append(n_obj)
elif obj.vtType == 'annotation':
n_obj = DBAnnotation.update_version(obj, trans_dict)
new_obj.db_deleted_data.append(n_obj)
elif obj.vtType == 'function':
n_obj = DBFunction.update_version(obj, trans_dict)
new_obj.db_deleted_data.append(n_obj)
elif obj.vtType == 'connection':
n_obj = DBConnection.update_version(obj, trans_dict)
new_obj.db_deleted_data.append(n_obj)
elif obj.vtType == 'port':
n_obj = DBPort.update_version(obj, trans_dict)
new_obj.db_deleted_data.append(n_obj)
elif obj.vtType == 'parameter':
n_obj = DBParameter.update_version(obj, trans_dict)
new_obj.db_deleted_data.append(n_obj)
elif obj.vtType == 'portSpec':
n_obj = DBPortSpec.update_version(obj, trans_dict)
new_obj.db_deleted_data.append(n_obj)
elif obj.vtType == 'abstraction':
n_obj = DBAbstraction.update_version(obj, trans_dict)
new_obj.db_deleted_data.append(n_obj)
elif obj.vtType == 'group':
n_obj = DBGroup.update_version(obj, trans_dict)
new_obj.db_deleted_data.append(n_obj)
elif obj.vtType == 'other':
n_obj = DBOther.update_version(obj, trans_dict)
new_obj.db_deleted_data.append(n_obj)
elif obj.vtType == 'plugin_data':
n_obj = DBPluginData.update_version(obj, trans_dict)
new_obj.db_deleted_data.append(n_obj)
if 'id' in class_dict:
res = class_dict['id'](old_obj, trans_dict)
new_obj.db_id = res
elif hasattr(old_obj, 'db_id') and old_obj.db_id is not None:
new_obj.db_id = old_obj.db_id
if 'what' in class_dict:
res = class_dict['what'](old_obj, trans_dict)
new_obj.db_what = res
elif hasattr(old_obj, 'db_what') and old_obj.db_what is not None:
new_obj.db_what = old_obj.db_what
if 'objectId' in class_dict:
res = class_dict['objectId'](old_obj, trans_dict)
new_obj.db_objectId = res
elif hasattr(old_obj, 'db_objectId') and old_obj.db_objectId is not None:
new_obj.db_objectId = old_obj.db_objectId
if 'parentObjId' in class_dict:
res = class_dict['parentObjId'](old_obj, trans_dict)
new_obj.db_parentObjId = res
elif hasattr(old_obj, 'db_parentObjId') and old_obj.db_parentObjId is not None:
new_obj.db_parentObjId = old_obj.db_parentObjId
if 'parentObjType' in class_dict:
res = class_dict['parentObjType'](old_obj, trans_dict)
new_obj.db_parentObjType = res
elif hasattr(old_obj, 'db_parentObjType') and old_obj.db_parentObjType is not None:
new_obj.db_parentObjType = old_obj.db_parentObjType
new_obj.is_new = old_obj.is_new
new_obj.is_dirty = old_obj.is_dirty
return new_obj
def db_children(self, parent=(None,None), orphan=False, for_action=False):
children = []
if self._db_data is not None:
children.extend(self._db_data.db_children((self.vtType, self.db_id), orphan, for_action))
if orphan:
self._db_data = None
children.append((self, parent[0], parent[1]))
return children
def db_deleted_children(self, remove=False):
children = []
children.extend(self.db_deleted_data)
if remove:
self.db_deleted_data = []
return children
def has_changes(self):
if self.is_dirty:
return True
if self._db_data is not None and self._db_data.has_changes():
return True
return False
def __get_db_data(self):
return self._db_data
def __set_db_data(self, data):
self._db_data = data
self.is_dirty = True
db_data = property(__get_db_data, __set_db_data)
def db_add_data(self, data):
self._db_data = data
def db_change_data(self, data):
self._db_data = data
def db_delete_data(self, data):
if not self.is_new:
self.db_deleted_data.append(self._db_data)
self._db_data = None
def __get_db_id(self):
return self._db_id
def __set_db_id(self, id):
self._db_id = id
self.is_dirty = True
db_id = property(__get_db_id, __set_db_id)
def db_add_id(self, id):
self._db_id = id
def db_change_id(self, id):
self._db_id = id
def db_delete_id(self, id):
self._db_id = None
def __get_db_what(self):
return self._db_what
def __set_db_what(self, what):
self._db_what = what
self.is_dirty = True
db_what = property(__get_db_what, __set_db_what)
def db_add_what(self, what):
self._db_what = what
def db_change_what(self, what):
self._db_what = what
def db_delete_what(self, what):
self._db_what = None
def __get_db_objectId(self):
return self._db_objectId
def __set_db_objectId(self, objectId):
self._db_objectId = objectId
self.is_dirty = True
db_objectId = property(__get_db_objectId, __set_db_objectId)
def db_add_objectId(self, objectId):
self._db_objectId = objectId
def db_change_objectId(self, objectId):
self._db_objectId = objectId
def db_delete_objectId(self, objectId):
self._db_objectId = None
def __get_db_parentObjId(self):
return self._db_parentObjId
def __set_db_parentObjId(self, parentObjId):
self._db_parentObjId = parentObjId
self.is_dirty = True
db_parentObjId = property(__get_db_parentObjId, __set_db_parentObjId)
def db_add_parentObjId(self, parentObjId):
self._db_parentObjId = parentObjId
def db_change_parentObjId(self, parentObjId):
self._db_parentObjId = parentObjId
def db_delete_parentObjId(self, parentObjId):
self._db_parentObjId = None
def __get_db_parentObjType(self):
return self._db_parentObjType
def __set_db_parentObjType(self, parentObjType):
self._db_parentObjType = parentObjType
self.is_dirty = True
db_parentObjType = property(__get_db_parentObjType, __set_db_parentObjType)
def db_add_parentObjType(self, parentObjType):
self._db_parentObjType = parentObjType
def db_change_parentObjType(self, parentObjType):
self._db_parentObjType = parentObjType
def db_delete_parentObjType(self, parentObjType):
self._db_parentObjType = None
def getPrimaryKey(self):
return self._db_id
class DBProvGeneration(object):
vtType = 'prov_generation'
def __init__(self, prov_entity=None, prov_activity=None, prov_role=None):
self.db_deleted_prov_entity = []
self._db_prov_entity = prov_entity
self.db_deleted_prov_activity = []
self._db_prov_activity = prov_activity
self._db_prov_role = prov_role
self.is_dirty = True
self.is_new = True
def __copy__(self):
return DBProvGeneration.do_copy(self)
def do_copy(self, new_ids=False, id_scope=None, id_remap=None):
cp = DBProvGeneration(prov_role=self._db_prov_role)
if self._db_prov_entity is not None:
cp._db_prov_entity = self._db_prov_entity.do_copy(new_ids, id_scope, id_remap)
if self._db_prov_activity is not None:
cp._db_prov_activity = self._db_prov_activity.do_copy(new_ids, id_scope, id_remap)
# set new ids
if new_ids:
new_id = id_scope.getNewId(self.vtType)
if self.vtType in id_scope.remap:
id_remap[(id_scope.remap[self.vtType], self.db_id)] = new_id
else:
id_remap[(self.vtType, self.db_id)] = new_id
cp.db_id = new_id
# recreate indices and set flags
if not new_ids:
cp.is_dirty = self.is_dirty
cp.is_new = self.is_new
return cp
@staticmethod
def update_version(old_obj, trans_dict, new_obj=None):
if new_obj is None:
new_obj = DBProvGeneration()
class_dict = {}
if new_obj.__class__.__name__ in trans_dict:
class_dict = trans_dict[new_obj.__class__.__name__]
if 'prov_entity' in class_dict:
res = class_dict['prov_entity'](old_obj, trans_dict)
new_obj.db_prov_entity = res
elif hasattr(old_obj, 'db_prov_entity') and old_obj.db_prov_entity is not None:
obj = old_obj.db_prov_entity
new_obj.db_add_prov_entity(DBRefProvEntity.update_version(obj, trans_dict))
if hasattr(old_obj, 'db_deleted_prov_entity') and hasattr(new_obj, 'db_deleted_prov_entity'):
for obj in old_obj.db_deleted_prov_entity:
n_obj = DBRefProvEntity.update_version(obj, trans_dict)
new_obj.db_deleted_prov_entity.append(n_obj)
if 'prov_activity' in class_dict:
res = class_dict['prov_activity'](old_obj, trans_dict)
new_obj.db_prov_activity = res
elif hasattr(old_obj, 'db_prov_activity') and old_obj.db_prov_activity is not None:
obj = old_obj.db_prov_activity
new_obj.db_add_prov_activity(DBRefProvActivity.update_version(obj, trans_dict))
if hasattr(old_obj, 'db_deleted_prov_activity') and hasattr(new_obj, 'db_deleted_prov_activity'):
for obj in old_obj.db_deleted_prov_activity:
n_obj = DBRefProvActivity.update_version(obj, trans_dict)
new_obj.db_deleted_prov_activity.append(n_obj)
if 'prov_role' in class_dict:
res = class_dict['prov_role'](old_obj, trans_dict)
new_obj.db_prov_role = res
elif hasattr(old_obj, 'db_prov_role') and old_obj.db_prov_role is not None:
new_obj.db_prov_role = old_obj.db_prov_role
new_obj.is_new = old_obj.is_new
new_obj.is_dirty = old_obj.is_dirty
return new_obj
def db_children(self, parent=(None,None), orphan=False, for_action=False):
children = []
if self._db_prov_entity is not None:
children.extend(self._db_prov_entity.db_children((self.vtType, self.db_id), orphan, for_action))
if orphan:
self._db_prov_entity = None
if self._db_prov_activity is not None:
children.extend(self._db_prov_activity.db_children((self.vtType, self.db_id), orphan, for_action))
if orphan:
self._db_prov_activity = None
children.append((self, parent[0], parent[1]))
return children
def db_deleted_children(self, remove=False):
children = []
children.extend(self.db_deleted_prov_entity)
children.extend(self.db_deleted_prov_activity)
if remove:
self.db_deleted_prov_entity = []
self.db_deleted_prov_activity = []
return children
def has_changes(self):
if self.is_dirty:
return True
if self._db_prov_entity is not None and self._db_prov_entity.has_changes():
return True
if self._db_prov_activity is not None and self._db_prov_activity.has_changes():
return True
return False
def __get_db_prov_entity(self):
return self._db_prov_entity
def __set_db_prov_entity(self, prov_entity):
self._db_prov_entity = prov_entity
self.is_dirty = True
db_prov_entity = property(__get_db_prov_entity, __set_db_prov_entity)
def db_add_prov_entity(self, prov_entity):
self._db_prov_entity = prov_entity
def db_change_prov_entity(self, prov_entity):
self._db_prov_entity = prov_entity
def db_delete_prov_entity(self, prov_entity):
if not self.is_new:
self.db_deleted_prov_entity.append(self._db_prov_entity)
self._db_prov_entity = None
def __get_db_prov_activity(self):
return self._db_prov_activity
def __set_db_prov_activity(self, prov_activity):
self._db_prov_activity = prov_activity
self.is_dirty = True
db_prov_activity = property(__get_db_prov_activity, __set_db_prov_activity)
def db_add_prov_activity(self, prov_activity):
self._db_prov_activity = prov_activity
def db_change_prov_activity(self, prov_activity):
self._db_prov_activity = prov_activity
def db_delete_prov_activity(self, prov_activity):
if not self.is_new:
self.db_deleted_prov_activity.append(self._db_prov_activity)
self._db_prov_activity = None
def __get_db_prov_role(self):
return self._db_prov_role
def __set_db_prov_role(self, prov_role):
self._db_prov_role = prov_role
self.is_dirty = True
db_prov_role = property(__get_db_prov_role, __set_db_prov_role)
def db_add_prov_role(self, prov_role):
self._db_prov_role = prov_role
def db_change_prov_role(self, prov_role):
self._db_prov_role = prov_role
def db_delete_prov_role(self, prov_role):
self._db_prov_role = None
class DBOpmUsed(object):
vtType = 'opm_used'
def __init__(self, effect=None, role=None, cause=None, accounts=None, opm_times=None):
self.db_deleted_effect = []
self._db_effect = effect
self.db_deleted_role = []
self._db_role = role
self.db_deleted_cause = []
self._db_cause = cause
self.db_deleted_accounts = []
if accounts is None:
self._db_accounts = []
else:
self._db_accounts = accounts
self.db_deleted_opm_times = []
if opm_times is None:
self._db_opm_times = []
else:
self._db_opm_times = opm_times
self.is_dirty = True
self.is_new = True
def __copy__(self):
return DBOpmUsed.do_copy(self)
def do_copy(self, new_ids=False, id_scope=None, id_remap=None):
cp = DBOpmUsed()
if self._db_effect is not None:
cp._db_effect = self._db_effect.do_copy(new_ids, id_scope, id_remap)
if self._db_role is not None:
cp._db_role = self._db_role.do_copy(new_ids, id_scope, id_remap)
if self._db_cause is not None:
cp._db_cause = self._db_cause.do_copy(new_ids, id_scope, id_remap)
if self._db_accounts is None:
cp._db_accounts = []
else:
cp._db_accounts = [v.do_copy(new_ids, id_scope, id_remap) for v in self._db_accounts]
if self._db_opm_times is None:
cp._db_opm_times = []
else:
cp._db_opm_times = [v.do_copy(new_ids, id_scope, id_remap) for v in self._db_opm_times]
# set new ids
if new_ids:
new_id = id_scope.getNewId(self.vtType)
if self.vtType in id_scope.remap:
id_remap[(id_scope.remap[self.vtType], self.db_id)] = new_id
else:
id_remap[(self.vtType, self.db_id)] = new_id
cp.db_id = new_id
# recreate indices and set flags
if not new_ids:
cp.is_dirty = self.is_dirty
cp.is_new = self.is_new
return cp
@staticmethod
def update_version(old_obj, trans_dict, new_obj=None):
if new_obj is None:
new_obj = DBOpmUsed()
class_dict = {}
if new_obj.__class__.__name__ in trans_dict:
class_dict = trans_dict[new_obj.__class__.__name__]
if 'effect' in class_dict:
res = class_dict['effect'](old_obj, trans_dict)
new_obj.db_effect = res
elif hasattr(old_obj, 'db_effect') and old_obj.db_effect is not None:
obj = old_obj.db_effect
new_obj.db_add_effect(DBOpmProcessIdEffect.update_version(obj, trans_dict))
if hasattr(old_obj, 'db_deleted_effect') and hasattr(new_obj, 'db_deleted_effect'):
for obj in old_obj.db_deleted_effect:
n_obj = DBOpmProcessIdEffect.update_version(obj, trans_dict)
new_obj.db_deleted_effect.append(n_obj)
if 'role' in class_dict:
res = class_dict['role'](old_obj, trans_dict)
new_obj.db_role = res
elif hasattr(old_obj, 'db_role') and old_obj.db_role is not None:
obj = old_obj.db_role
new_obj.db_add_role(DBOpmRole.update_version(obj, trans_dict))
if hasattr(old_obj, 'db_deleted_role') and hasattr(new_obj, 'db_deleted_role'):
for obj in old_obj.db_deleted_role:
n_obj = DBOpmRole.update_version(obj, trans_dict)
new_obj.db_deleted_role.append(n_obj)
if 'cause' in class_dict:
res = class_dict['cause'](old_obj, trans_dict)
new_obj.db_cause = res
elif hasattr(old_obj, 'db_cause') and old_obj.db_cause is not None:
obj = old_obj.db_cause
new_obj.db_add_cause(DBOpmArtifactIdCause.update_version(obj, trans_dict))
if hasattr(old_obj, 'db_deleted_cause') and hasattr(new_obj, 'db_deleted_cause'):
for obj in old_obj.db_deleted_cause:
n_obj = DBOpmArtifactIdCause.update_version(obj, trans_dict)
new_obj.db_deleted_cause.append(n_obj)
if 'accounts' in class_dict:
res = class_dict['accounts'](old_obj, trans_dict)
for obj in res:
new_obj.db_add_account(obj)
elif hasattr(old_obj, 'db_accounts') and old_obj.db_accounts is not None:
for obj in old_obj.db_accounts:
new_obj.db_add_account(DBOpmAccountId.update_version(obj, trans_dict))
if hasattr(old_obj, 'db_deleted_accounts') and hasattr(new_obj, 'db_deleted_accounts'):
for obj in old_obj.db_deleted_accounts:
n_obj = DBOpmAccountId.update_version(obj, trans_dict)
new_obj.db_deleted_accounts.append(n_obj)
if 'opm_times' in class_dict:
res = class_dict['opm_times'](old_obj, trans_dict)
for obj in res:
new_obj.db_add_opm_time(obj)
elif hasattr(old_obj, 'db_opm_times') and old_obj.db_opm_times is not None:
for obj in old_obj.db_opm_times:
new_obj.db_add_opm_time(DBOpmTime.update_version(obj, trans_dict))
if hasattr(old_obj, 'db_deleted_opm_times') and hasattr(new_obj, 'db_deleted_opm_times'):
for obj in old_obj.db_deleted_opm_times:
n_obj = DBOpmTime.update_version(obj, trans_dict)
new_obj.db_deleted_opm_times.append(n_obj)
new_obj.is_new = old_obj.is_new
new_obj.is_dirty = old_obj.is_dirty
return new_obj
def db_children(self, parent=(None,None), orphan=False, for_action=False):
children = []
if self._db_effect is not None:
children.extend(self._db_effect.db_children((self.vtType, self.db_id), orphan, for_action))
if orphan:
self._db_effect = None
if self._db_role is not None:
children.extend(self._db_role.db_children((self.vtType, self.db_id), orphan, for_action))
if orphan:
self._db_role = None
if self._db_cause is not None:
children.extend(self._db_cause.db_children((self.vtType, self.db_id), orphan, for_action))
if orphan:
self._db_cause = None
to_del = []
for child in self.db_accounts:
children.extend(child.db_children((self.vtType, self.db_id), orphan, for_action))
if orphan:
to_del.append(child)
for child in to_del:
self.db_delete_account(child)
to_del = []
for child in self.db_opm_times:
children.extend(child.db_children((self.vtType, self.db_id), orphan, for_action))
if orphan:
to_del.append(child)
for child in to_del:
self.db_delete_opm_time(child)
children.append((self, parent[0], parent[1]))
return children
def db_deleted_children(self, remove=False):
children = []
children.extend(self.db_deleted_effect)
children.extend(self.db_deleted_role)
children.extend(self.db_deleted_cause)
children.extend(self.db_deleted_accounts)
children.extend(self.db_deleted_opm_times)
if remove:
self.db_deleted_effect = []
self.db_deleted_role = []
self.db_deleted_cause = []
self.db_deleted_accounts = []
self.db_deleted_opm_times = []
return children
def has_changes(self):
if self.is_dirty:
return True
if self._db_effect is not None and self._db_effect.has_changes():
return True
if self._db_role is not None and self._db_role.has_changes():
return True
if self._db_cause is not None and self._db_cause.has_changes():
return True
for child in self._db_accounts:
if child.has_changes():
return True
for child in self._db_opm_times:
if child.has_changes():
return True
return False
def __get_db_effect(self):
return self._db_effect
def __set_db_effect(self, effect):
self._db_effect = effect
self.is_dirty = True
db_effect = property(__get_db_effect, __set_db_effect)
def db_add_effect(self, effect):
self._db_effect = effect
def db_change_effect(self, effect):
self._db_effect = effect
def db_delete_effect(self, effect):
if not self.is_new:
self.db_deleted_effect.append(self._db_effect)
self._db_effect = None
def __get_db_role(self):
return self._db_role
def __set_db_role(self, role):
self._db_role = role
self.is_dirty = True
db_role = property(__get_db_role, __set_db_role)
def db_add_role(self, role):
self._db_role = role
def db_change_role(self, role):
self._db_role = role
def db_delete_role(self, role):
if not self.is_new:
self.db_deleted_role.append(self._db_role)
self._db_role = None
def __get_db_cause(self):
return self._db_cause
def __set_db_cause(self, cause):
self._db_cause = cause
self.is_dirty = True
db_cause = property(__get_db_cause, __set_db_cause)
def db_add_cause(self, cause):
self._db_cause = cause
def db_change_cause(self, cause):
self._db_cause = cause
def db_delete_cause(self, cause):
if not self.is_new:
self.db_deleted_cause.append(self._db_cause)
self._db_cause = None
def __get_db_accounts(self):
return self._db_accounts
def __set_db_accounts(self, accounts):
self._db_accounts = accounts
self.is_dirty = True
db_accounts = property(__get_db_accounts, __set_db_accounts)
def db_get_accounts(self):
return self._db_accounts
def db_add_account(self, account):
self.is_dirty = True
self._db_accounts.append(account)
def db_change_account(self, account):
self.is_dirty = True
self._db_accounts.append(account)
def db_delete_account(self, account):
self.is_dirty = True
raise Exception('Cannot delete a non-keyed object')
def db_get_account(self, key):
return None
def __get_db_opm_times(self):
return self._db_opm_times
def __set_db_opm_times(self, opm_times):
self._db_opm_times = opm_times
self.is_dirty = True
db_opm_times = property(__get_db_opm_times, __set_db_opm_times)
def db_get_opm_times(self):
return self._db_opm_times
def db_add_opm_time(self, opm_time):
self.is_dirty = True
self._db_opm_times.append(opm_time)
def db_change_opm_time(self, opm_time):
self.is_dirty = True
self._db_opm_times.append(opm_time)
def db_delete_opm_time(self, opm_time):
self.is_dirty = True
raise Exception('Cannot delete a non-keyed object')
def db_get_opm_time(self, key):
return None
class DBOpmArtifactIdCause(object):
vtType = 'opm_artifact_id_cause'
def __init__(self, id=None):
self._db_id = id
self.is_dirty = True
self.is_new = True
def __copy__(self):
return DBOpmArtifactIdCause.do_copy(self)
def do_copy(self, new_ids=False, id_scope=None, id_remap=None):
cp = DBOpmArtifactIdCause(id=self._db_id)
# set new ids
if new_ids:
new_id = id_scope.getNewId(self.vtType)
if self.vtType in id_scope.remap:
id_remap[(id_scope.remap[self.vtType], self.db_id)] = new_id
else:
id_remap[(self.vtType, self.db_id)] = new_id
cp.db_id = new_id
if hasattr(self, 'db_id') and ('opm_artifact', self._db_id) in id_remap:
cp._db_id = id_remap[('opm_artifact', self._db_id)]
# recreate indices and set flags
if not new_ids:
cp.is_dirty = self.is_dirty
cp.is_new = self.is_new
return cp
@staticmethod
def update_version(old_obj, trans_dict, new_obj=None):
if new_obj is None:
new_obj = DBOpmArtifactIdCause()
class_dict = {}
if new_obj.__class__.__name__ in trans_dict:
class_dict = trans_dict[new_obj.__class__.__name__]
if 'id' in class_dict:
res = class_dict['id'](old_obj, trans_dict)
new_obj.db_id = res
elif hasattr(old_obj, 'db_id') and old_obj.db_id is not None:
new_obj.db_id = old_obj.db_id
new_obj.is_new = old_obj.is_new
new_obj.is_dirty = old_obj.is_dirty
return new_obj
def db_children(self, parent=(None,None), orphan=False, for_action=False):
return [(self, parent[0], parent[1])]
def db_deleted_children(self, remove=False):
children = []
return children
def has_changes(self):
if self.is_dirty:
return True
return False
def __get_db_id(self):
return self._db_id
def __set_db_id(self, id):
self._db_id = id
self.is_dirty = True
db_id = property(__get_db_id, __set_db_id)
def db_add_id(self, id):
self._db_id = id
def db_change_id(self, id):
self._db_id = id
def db_delete_id(self, id):
self._db_id = None
class DBRefProvEntity(object):
vtType = 'ref_prov_entity'
def __init__(self, prov_ref=None):
self._db_prov_ref = prov_ref
self.is_dirty = True
self.is_new = True
def __copy__(self):
return DBRefProvEntity.do_copy(self)
def do_copy(self, new_ids=False, id_scope=None, id_remap=None):
cp = DBRefProvEntity(prov_ref=self._db_prov_ref)
# set new ids
if new_ids:
new_id = id_scope.getNewId(self.vtType)
if self.vtType in id_scope.remap:
id_remap[(id_scope.remap[self.vtType], self.db_id)] = new_id
else:
id_remap[(self.vtType, self.db_id)] = new_id
cp.db_id = new_id
if hasattr(self, 'db_prov_ref') and ('prov_entity', self._db_prov_ref) in id_remap:
cp._db_prov_ref = id_remap[('prov_entity', self._db_prov_ref)]
# recreate indices and set flags
if not new_ids:
cp.is_dirty = self.is_dirty
cp.is_new = self.is_new
return cp
@staticmethod
def update_version(old_obj, trans_dict, new_obj=None):
if new_obj is None:
new_obj = DBRefProvEntity()
class_dict = {}
if new_obj.__class__.__name__ in trans_dict:
class_dict = trans_dict[new_obj.__class__.__name__]
if 'prov_ref' in class_dict:
res = class_dict['prov_ref'](old_obj, trans_dict)
new_obj.db_prov_ref = res
elif hasattr(old_obj, 'db_prov_ref') and old_obj.db_prov_ref is not None:
new_obj.db_prov_ref = old_obj.db_prov_ref
new_obj.is_new = old_obj.is_new
new_obj.is_dirty = old_obj.is_dirty
return new_obj
def db_children(self, parent=(None,None), orphan=False, for_action=False):
return [(self, parent[0], parent[1])]
def db_deleted_children(self, remove=False):
children = []
return children
def has_changes(self):
if self.is_dirty:
return True
return False
def __get_db_prov_ref(self):
return self._db_prov_ref
def __set_db_prov_ref(self, prov_ref):
self._db_prov_ref = prov_ref
self.is_dirty = True
db_prov_ref = property(__get_db_prov_ref, __set_db_prov_ref)
def db_add_prov_ref(self, prov_ref):
self._db_prov_ref = prov_ref
def db_change_prov_ref(self, prov_ref):
self._db_prov_ref = prov_ref
def db_delete_prov_ref(self, prov_ref):
self._db_prov_ref = None
class DBVtConnection(object):
vtType = 'vt_connection'
def __init__(self, id=None, vt_source=None, vt_dest=None, vt_source_port=None, vt_dest_port=None, vt_source_signature=None, vt_dest_signature=None):
self._db_id = id
self._db_vt_source = vt_source
self._db_vt_dest = vt_dest
self._db_vt_source_port = vt_source_port
self._db_vt_dest_port = vt_dest_port
self._db_vt_source_signature = vt_source_signature
self._db_vt_dest_signature = vt_dest_signature
self.is_dirty = True
self.is_new = True
def __copy__(self):
return DBVtConnection.do_copy(self)
def do_copy(self, new_ids=False, id_scope=None, id_remap=None):
cp = DBVtConnection(id=self._db_id,
vt_source=self._db_vt_source,
vt_dest=self._db_vt_dest,
vt_source_port=self._db_vt_source_port,
vt_dest_port=self._db_vt_dest_port,
vt_source_signature=self._db_vt_source_signature,
vt_dest_signature=self._db_vt_dest_signature)
# set new ids
if new_ids:
new_id = id_scope.getNewId(self.vtType)
if self.vtType in id_scope.remap:
id_remap[(id_scope.remap[self.vtType], self.db_id)] = new_id
else:
id_remap[(self.vtType, self.db_id)] = new_id
cp.db_id = new_id
# recreate indices and set flags
if not new_ids:
cp.is_dirty = self.is_dirty
cp.is_new = self.is_new
return cp
@staticmethod
def update_version(old_obj, trans_dict, new_obj=None):
if new_obj is None:
new_obj = DBVtConnection()
class_dict = {}
if new_obj.__class__.__name__ in trans_dict:
class_dict = trans_dict[new_obj.__class__.__name__]
if 'id' in class_dict:
res = class_dict['id'](old_obj, trans_dict)
new_obj.db_id = res
elif hasattr(old_obj, 'db_id') and old_obj.db_id is not None:
new_obj.db_id = old_obj.db_id
if 'vt_source' in class_dict:
res = class_dict['vt_source'](old_obj, trans_dict)
new_obj.db_vt_source = res
elif hasattr(old_obj, 'db_vt_source') and old_obj.db_vt_source is not None:
new_obj.db_vt_source = old_obj.db_vt_source
if 'vt_dest' in class_dict:
res = class_dict['vt_dest'](old_obj, trans_dict)
new_obj.db_vt_dest = res
elif hasattr(old_obj, 'db_vt_dest') and old_obj.db_vt_dest is not None:
new_obj.db_vt_dest = old_obj.db_vt_dest
if 'vt_source_port' in class_dict:
res = class_dict['vt_source_port'](old_obj, trans_dict)
new_obj.db_vt_source_port = res
elif hasattr(old_obj, 'db_vt_source_port') and old_obj.db_vt_source_port is not None:
new_obj.db_vt_source_port = old_obj.db_vt_source_port
if 'vt_dest_port' in class_dict:
res = class_dict['vt_dest_port'](old_obj, trans_dict)
new_obj.db_vt_dest_port = res
elif hasattr(old_obj, 'db_vt_dest_port') and old_obj.db_vt_dest_port is not None:
new_obj.db_vt_dest_port = old_obj.db_vt_dest_port
if 'vt_source_signature' in class_dict:
res = class_dict['vt_source_signature'](old_obj, trans_dict)
new_obj.db_vt_source_signature = res
elif hasattr(old_obj, 'db_vt_source_signature') and old_obj.db_vt_source_signature is not None:
new_obj.db_vt_source_signature = old_obj.db_vt_source_signature
if 'vt_dest_signature' in class_dict:
res = class_dict['vt_dest_signature'](old_obj, trans_dict)
new_obj.db_vt_dest_signature = res
elif hasattr(old_obj, 'db_vt_dest_signature') and old_obj.db_vt_dest_signature is not None:
new_obj.db_vt_dest_signature = old_obj.db_vt_dest_signature
new_obj.is_new = old_obj.is_new
new_obj.is_dirty = old_obj.is_dirty
return new_obj
def db_children(self, parent=(None,None), orphan=False, for_action=False):
return [(self, parent[0], parent[1])]
def db_deleted_children(self, remove=False):
children = []
return children
def has_changes(self):
if self.is_dirty:
return True
return False
def __get_db_id(self):
return self._db_id
def __set_db_id(self, id):
self._db_id = id
self.is_dirty = True
db_id = property(__get_db_id, __set_db_id)
def db_add_id(self, id):
self._db_id = id
def db_change_id(self, id):
self._db_id = id
def db_delete_id(self, id):
self._db_id = None
def __get_db_vt_source(self):
return self._db_vt_source
def __set_db_vt_source(self, vt_source):
self._db_vt_source = vt_source
self.is_dirty = True
db_vt_source = property(__get_db_vt_source, __set_db_vt_source)
def db_add_vt_source(self, vt_source):
self._db_vt_source = vt_source
def db_change_vt_source(self, vt_source):
self._db_vt_source = vt_source
def db_delete_vt_source(self, vt_source):
self._db_vt_source = None
def __get_db_vt_dest(self):
return self._db_vt_dest
def __set_db_vt_dest(self, vt_dest):
self._db_vt_dest = vt_dest
self.is_dirty = True
db_vt_dest = property(__get_db_vt_dest, __set_db_vt_dest)
def db_add_vt_dest(self, vt_dest):
self._db_vt_dest = vt_dest
def db_change_vt_dest(self, vt_dest):
self._db_vt_dest = vt_dest
def db_delete_vt_dest(self, vt_dest):
self._db_vt_dest = None
def __get_db_vt_source_port(self):
return self._db_vt_source_port
def __set_db_vt_source_port(self, vt_source_port):
self._db_vt_source_port = vt_source_port
self.is_dirty = True
db_vt_source_port = property(__get_db_vt_source_port, __set_db_vt_source_port)
def db_add_vt_source_port(self, vt_source_port):
self._db_vt_source_port = vt_source_port
def db_change_vt_source_port(self, vt_source_port):
self._db_vt_source_port = vt_source_port
def db_delete_vt_source_port(self, vt_source_port):
self._db_vt_source_port = None
def __get_db_vt_dest_port(self):
return self._db_vt_dest_port
def __set_db_vt_dest_port(self, vt_dest_port):
self._db_vt_dest_port = vt_dest_port
self.is_dirty = True
db_vt_dest_port = property(__get_db_vt_dest_port, __set_db_vt_dest_port)
def db_add_vt_dest_port(self, vt_dest_port):
self._db_vt_dest_port = vt_dest_port
def db_change_vt_dest_port(self, vt_dest_port):
self._db_vt_dest_port = vt_dest_port
def db_delete_vt_dest_port(self, vt_dest_port):
self._db_vt_dest_port = None
def __get_db_vt_source_signature(self):
return self._db_vt_source_signature
def __set_db_vt_source_signature(self, vt_source_signature):
self._db_vt_source_signature = vt_source_signature
self.is_dirty = True
db_vt_source_signature = property(__get_db_vt_source_signature, __set_db_vt_source_signature)
def db_add_vt_source_signature(self, vt_source_signature):
self._db_vt_source_signature = vt_source_signature
def db_change_vt_source_signature(self, vt_source_signature):
self._db_vt_source_signature = vt_source_signature
def db_delete_vt_source_signature(self, vt_source_signature):
self._db_vt_source_signature = None
def __get_db_vt_dest_signature(self):
return self._db_vt_dest_signature
def __set_db_vt_dest_signature(self, vt_dest_signature):
self._db_vt_dest_signature = vt_dest_signature
self.is_dirty = True
db_vt_dest_signature = property(__get_db_vt_dest_signature, __set_db_vt_dest_signature)
def db_add_vt_dest_signature(self, vt_dest_signature):
self._db_vt_dest_signature = vt_dest_signature
def db_change_vt_dest_signature(self, vt_dest_signature):
self._db_vt_dest_signature = vt_dest_signature
def db_delete_vt_dest_signature(self, vt_dest_signature):
self._db_vt_dest_signature = None
def getPrimaryKey(self):
return self._db_id
class DBOpmAccount(object):
vtType = 'opm_account'
def __init__(self, id=None, value=None):
self._db_id = id
self._db_value = value
self.is_dirty = True
self.is_new = True
def __copy__(self):
return DBOpmAccount.do_copy(self)
def do_copy(self, new_ids=False, id_scope=None, id_remap=None):
cp = DBOpmAccount(id=self._db_id,
value=self._db_value)
# set new ids
if new_ids:
new_id = id_scope.getNewId(self.vtType)
if self.vtType in id_scope.remap:
id_remap[(id_scope.remap[self.vtType], self.db_id)] = new_id
else:
id_remap[(self.vtType, self.db_id)] = new_id
cp.db_id = new_id
# recreate indices and set flags
if not new_ids:
cp.is_dirty = self.is_dirty
cp.is_new = self.is_new
return cp
@staticmethod
def update_version(old_obj, trans_dict, new_obj=None):
if new_obj is None:
new_obj = DBOpmAccount()
class_dict = {}
if new_obj.__class__.__name__ in trans_dict:
class_dict = trans_dict[new_obj.__class__.__name__]
if 'id' in class_dict:
res = class_dict['id'](old_obj, trans_dict)
new_obj.db_id = res
elif hasattr(old_obj, 'db_id') and old_obj.db_id is not None:
new_obj.db_id = old_obj.db_id
if 'value' in class_dict:
res = class_dict['value'](old_obj, trans_dict)
new_obj.db_value = res
elif hasattr(old_obj, 'db_value') and old_obj.db_value is not None:
new_obj.db_value = old_obj.db_value
new_obj.is_new = old_obj.is_new
new_obj.is_dirty = old_obj.is_dirty
return new_obj
def db_children(self, parent=(None,None), orphan=False, for_action=False):
return [(self, parent[0], parent[1])]
def db_deleted_children(self, remove=False):
children = []
return children
def has_changes(self):
if self.is_dirty:
return True
return False
def __get_db_id(self):
return self._db_id
def __set_db_id(self, id):
self._db_id = id
self.is_dirty = True
db_id = property(__get_db_id, __set_db_id)
def db_add_id(self, id):
self._db_id = id
def db_change_id(self, id):
self._db_id = id
def db_delete_id(self, id):
self._db_id = None
def __get_db_value(self):
return self._db_value
def __set_db_value(self, value):
self._db_value = value
self.is_dirty = True
db_value = property(__get_db_value, __set_db_value)
def db_add_value(self, value):
self._db_value = value
def db_change_value(self, value):
self._db_value = value
def db_delete_value(self, value):
self._db_value = None
def getPrimaryKey(self):
return self._db_id
class DBGroupExec(object):
vtType = 'group_exec'
def __init__(self, item_execs=None, id=None, ts_start=None, ts_end=None, cached=None, module_id=None, group_name=None, group_type=None, completed=None, error=None, machine_id=None, annotations=None):
self.db_deleted_item_execs = []
self.db_item_execs_id_index = {}
if item_execs is None:
self._db_item_execs = []
else:
self._db_item_execs = item_execs
for v in self._db_item_execs:
self.db_item_execs_id_index[v.db_id] = v
self._db_id = id
self._db_ts_start = ts_start
self._db_ts_end = ts_end
self._db_cached = cached
self._db_module_id = module_id
self._db_group_name = group_name
self._db_group_type = group_type
self._db_completed = completed
self._db_error = error
self._db_machine_id = machine_id
self.db_deleted_annotations = []
self.db_annotations_id_index = {}
if annotations is None:
self._db_annotations = []
else:
self._db_annotations = annotations
for v in self._db_annotations:
self.db_annotations_id_index[v.db_id] = v
self.is_dirty = True
self.is_new = True
def __copy__(self):
return DBGroupExec.do_copy(self)
def do_copy(self, new_ids=False, id_scope=None, id_remap=None):
cp = DBGroupExec(id=self._db_id,
ts_start=self._db_ts_start,
ts_end=self._db_ts_end,
cached=self._db_cached,
module_id=self._db_module_id,
group_name=self._db_group_name,
group_type=self._db_group_type,
completed=self._db_completed,
error=self._db_error,
machine_id=self._db_machine_id)
if self._db_item_execs is None:
cp._db_item_execs = []
else:
cp._db_item_execs = [v.do_copy(new_ids, id_scope, id_remap) for v in self._db_item_execs]
if self._db_annotations is None:
cp._db_annotations = []
else:
cp._db_annotations = [v.do_copy(new_ids, id_scope, id_remap) for v in self._db_annotations]
# set new ids
if new_ids:
new_id = id_scope.getNewId(self.vtType)
if self.vtType in id_scope.remap:
id_remap[(id_scope.remap[self.vtType], self.db_id)] = new_id
else:
id_remap[(self.vtType, self.db_id)] = new_id
cp.db_id = new_id
if hasattr(self, 'db_module_id') and ('module', self._db_module_id) in id_remap:
cp._db_module_id = id_remap[('module', self._db_module_id)]
if hasattr(self, 'db_machine_id') and ('machine', self._db_machine_id) in id_remap:
cp._db_machine_id = id_remap[('machine', self._db_machine_id)]
# recreate indices and set flags
cp.db_item_execs_id_index = dict((v.db_id, v) for v in cp._db_item_execs)
cp.db_annotations_id_index = dict((v.db_id, v) for v in cp._db_annotations)
if not new_ids:
cp.is_dirty = self.is_dirty
cp.is_new = self.is_new
return cp
@staticmethod
def update_version(old_obj, trans_dict, new_obj=None):
if new_obj is None:
new_obj = DBGroupExec()
class_dict = {}
if new_obj.__class__.__name__ in trans_dict:
class_dict = trans_dict[new_obj.__class__.__name__]
if 'item_execs' in class_dict:
res = class_dict['item_execs'](old_obj, trans_dict)
for obj in res:
new_obj.db_add_item_exec(obj)
elif hasattr(old_obj, 'db_item_execs') and old_obj.db_item_execs is not None:
for obj in old_obj.db_item_execs:
if obj.vtType == 'module_exec':
new_obj.db_add_item_exec(DBModuleExec.update_version(obj, trans_dict))
elif obj.vtType == 'group_exec':
new_obj.db_add_item_exec(DBGroupExec.update_version(obj, trans_dict))
elif obj.vtType == 'loop_exec':
new_obj.db_add_item_exec(DBLoopExec.update_version(obj, trans_dict))
if hasattr(old_obj, 'db_deleted_item_execs') and hasattr(new_obj, 'db_deleted_item_execs'):
for obj in old_obj.db_deleted_item_execs:
if obj.vtType == 'module_exec':
n_obj = DBModuleExec.update_version(obj, trans_dict)
new_obj.db_deleted_item_execs.append(n_obj)
elif obj.vtType == 'group_exec':
n_obj = DBGroupExec.update_version(obj, trans_dict)
new_obj.db_deleted_item_execs.append(n_obj)
elif obj.vtType == 'loop_exec':
n_obj = DBLoopExec.update_version(obj, trans_dict)
new_obj.db_deleted_item_execs.append(n_obj)
if 'id' in class_dict:
res = class_dict['id'](old_obj, trans_dict)
new_obj.db_id = res
elif hasattr(old_obj, 'db_id') and old_obj.db_id is not None:
new_obj.db_id = old_obj.db_id
if 'ts_start' in class_dict:
res = class_dict['ts_start'](old_obj, trans_dict)
new_obj.db_ts_start = res
elif hasattr(old_obj, 'db_ts_start') and old_obj.db_ts_start is not None:
new_obj.db_ts_start = old_obj.db_ts_start
if 'ts_end' in class_dict:
res = class_dict['ts_end'](old_obj, trans_dict)
new_obj.db_ts_end = res
elif hasattr(old_obj, 'db_ts_end') and old_obj.db_ts_end is not None:
new_obj.db_ts_end = old_obj.db_ts_end
if 'cached' in class_dict:
res = class_dict['cached'](old_obj, trans_dict)
new_obj.db_cached = res
elif hasattr(old_obj, 'db_cached') and old_obj.db_cached is not None:
new_obj.db_cached = old_obj.db_cached
if 'module_id' in class_dict:
res = class_dict['module_id'](old_obj, trans_dict)
new_obj.db_module_id = res
elif hasattr(old_obj, 'db_module_id') and old_obj.db_module_id is not None:
new_obj.db_module_id = old_obj.db_module_id
if 'group_name' in class_dict:
res = class_dict['group_name'](old_obj, trans_dict)
new_obj.db_group_name = res
elif hasattr(old_obj, 'db_group_name') and old_obj.db_group_name is not None:
new_obj.db_group_name = old_obj.db_group_name
if 'group_type' in class_dict:
res = class_dict['group_type'](old_obj, trans_dict)
new_obj.db_group_type = res
elif hasattr(old_obj, 'db_group_type') and old_obj.db_group_type is not None:
new_obj.db_group_type = old_obj.db_group_type
if 'completed' in class_dict:
res = class_dict['completed'](old_obj, trans_dict)
new_obj.db_completed = res
elif hasattr(old_obj, 'db_completed') and old_obj.db_completed is not None:
new_obj.db_completed = old_obj.db_completed
if 'error' in class_dict:
res = class_dict['error'](old_obj, trans_dict)
new_obj.db_error = res
elif hasattr(old_obj, 'db_error') and old_obj.db_error is not None:
new_obj.db_error = old_obj.db_error
if 'machine_id' in class_dict:
res = class_dict['machine_id'](old_obj, trans_dict)
new_obj.db_machine_id = res
elif hasattr(old_obj, 'db_machine_id') and old_obj.db_machine_id is not None:
new_obj.db_machine_id = old_obj.db_machine_id
if 'annotations' in class_dict:
res = class_dict['annotations'](old_obj, trans_dict)
for obj in res:
new_obj.db_add_annotation(obj)
elif hasattr(old_obj, 'db_annotations') and old_obj.db_annotations is not None:
for obj in old_obj.db_annotations:
new_obj.db_add_annotation(DBAnnotation.update_version(obj, trans_dict))
if hasattr(old_obj, 'db_deleted_annotations') and hasattr(new_obj, 'db_deleted_annotations'):
for obj in old_obj.db_deleted_annotations:
n_obj = DBAnnotation.update_version(obj, trans_dict)
new_obj.db_deleted_annotations.append(n_obj)
new_obj.is_new = old_obj.is_new
new_obj.is_dirty = old_obj.is_dirty
return new_obj
def db_children(self, parent=(None,None), orphan=False, for_action=False):
children = []
to_del = []
for child in self.db_annotations:
children.extend(child.db_children((self.vtType, self.db_id), orphan, for_action))
if orphan:
to_del.append(child)
for child in to_del:
self.db_delete_annotation(child)
to_del = []
for child in self.db_item_execs:
children.extend(child.db_children((self.vtType, self.db_id), orphan, for_action))
if orphan:
to_del.append(child)
for child in to_del:
self.db_delete_item_exec(child)
children.append((self, parent[0], parent[1]))
return children
def db_deleted_children(self, remove=False):
children = []
children.extend(self.db_deleted_annotations)
children.extend(self.db_deleted_item_execs)
if remove:
self.db_deleted_annotations = []
self.db_deleted_item_execs = []
return children
def has_changes(self):
if self.is_dirty:
return True
for child in self._db_annotations:
if child.has_changes():
return True
for child in self._db_item_execs:
if child.has_changes():
return True
return False
def __get_db_item_execs(self):
return self._db_item_execs
def __set_db_item_execs(self, item_execs):
self._db_item_execs = item_execs
self.is_dirty = True
db_item_execs = property(__get_db_item_execs, __set_db_item_execs)
def db_get_item_execs(self):
return self._db_item_execs
def db_add_item_exec(self, item_exec):
self.is_dirty = True
self._db_item_execs.append(item_exec)
self.db_item_execs_id_index[item_exec.db_id] = item_exec
def db_change_item_exec(self, item_exec):
self.is_dirty = True
found = False
for i in xrange(len(self._db_item_execs)):
if self._db_item_execs[i].db_id == item_exec.db_id:
self._db_item_execs[i] = item_exec
found = True
break
if not found:
self._db_item_execs.append(item_exec)
self.db_item_execs_id_index[item_exec.db_id] = item_exec
def db_delete_item_exec(self, item_exec):
self.is_dirty = True
for i in xrange(len(self._db_item_execs)):
if self._db_item_execs[i].db_id == item_exec.db_id:
if not self._db_item_execs[i].is_new:
self.db_deleted_item_execs.append(self._db_item_execs[i])
del self._db_item_execs[i]
break
del self.db_item_execs_id_index[item_exec.db_id]
def db_get_item_exec(self, key):
for i in xrange(len(self._db_item_execs)):
if self._db_item_execs[i].db_id == key:
return self._db_item_execs[i]
return None
def db_get_item_exec_by_id(self, key):
return self.db_item_execs_id_index[key]
def db_has_item_exec_with_id(self, key):
return key in self.db_item_execs_id_index
def __get_db_id(self):
return self._db_id
def __set_db_id(self, id):
self._db_id = id
self.is_dirty = True
db_id = property(__get_db_id, __set_db_id)
def db_add_id(self, id):
self._db_id = id
def db_change_id(self, id):
self._db_id = id
def db_delete_id(self, id):
self._db_id = None
def __get_db_ts_start(self):
return self._db_ts_start
def __set_db_ts_start(self, ts_start):
self._db_ts_start = ts_start
self.is_dirty = True
db_ts_start = property(__get_db_ts_start, __set_db_ts_start)
def db_add_ts_start(self, ts_start):
self._db_ts_start = ts_start
def db_change_ts_start(self, ts_start):
self._db_ts_start = ts_start
def db_delete_ts_start(self, ts_start):
self._db_ts_start = None
def __get_db_ts_end(self):
return self._db_ts_end
def __set_db_ts_end(self, ts_end):
self._db_ts_end = ts_end
self.is_dirty = True
db_ts_end = property(__get_db_ts_end, __set_db_ts_end)
def db_add_ts_end(self, ts_end):
self._db_ts_end = ts_end
def db_change_ts_end(self, ts_end):
self._db_ts_end = ts_end
def db_delete_ts_end(self, ts_end):
self._db_ts_end = None
def __get_db_cached(self):
return self._db_cached
def __set_db_cached(self, cached):
self._db_cached = cached
self.is_dirty = True
db_cached = property(__get_db_cached, __set_db_cached)
def db_add_cached(self, cached):
self._db_cached = cached
def db_change_cached(self, cached):
self._db_cached = cached
def db_delete_cached(self, cached):
self._db_cached = None
def __get_db_module_id(self):
return self._db_module_id
def __set_db_module_id(self, module_id):
self._db_module_id = module_id
self.is_dirty = True
db_module_id = property(__get_db_module_id, __set_db_module_id)
def db_add_module_id(self, module_id):
self._db_module_id = module_id
def db_change_module_id(self, module_id):
self._db_module_id = module_id
def db_delete_module_id(self, module_id):
self._db_module_id = None
def __get_db_group_name(self):
return self._db_group_name
def __set_db_group_name(self, group_name):
self._db_group_name = group_name
self.is_dirty = True
db_group_name = property(__get_db_group_name, __set_db_group_name)
def db_add_group_name(self, group_name):
self._db_group_name = group_name
def db_change_group_name(self, group_name):
self._db_group_name = group_name
def db_delete_group_name(self, group_name):
self._db_group_name = None
def __get_db_group_type(self):
return self._db_group_type
def __set_db_group_type(self, group_type):
self._db_group_type = group_type
self.is_dirty = True
db_group_type = property(__get_db_group_type, __set_db_group_type)
def db_add_group_type(self, group_type):
self._db_group_type = group_type
def db_change_group_type(self, group_type):
self._db_group_type = group_type
def db_delete_group_type(self, group_type):
self._db_group_type = None
def __get_db_completed(self):
return self._db_completed
def __set_db_completed(self, completed):
self._db_completed = completed
self.is_dirty = True
db_completed = property(__get_db_completed, __set_db_completed)
def db_add_completed(self, completed):
self._db_completed = completed
def db_change_completed(self, completed):
self._db_completed = completed
def db_delete_completed(self, completed):
self._db_completed = None
def __get_db_error(self):
return self._db_error
def __set_db_error(self, error):
self._db_error = error
self.is_dirty = True
db_error = property(__get_db_error, __set_db_error)
def db_add_error(self, error):
self._db_error = error
def db_change_error(self, error):
self._db_error = error
def db_delete_error(self, error):
self._db_error = None
def __get_db_machine_id(self):
return self._db_machine_id
def __set_db_machine_id(self, machine_id):
self._db_machine_id = machine_id
self.is_dirty = True
db_machine_id = property(__get_db_machine_id, __set_db_machine_id)
def db_add_machine_id(self, machine_id):
self._db_machine_id = machine_id
def db_change_machine_id(self, machine_id):
self._db_machine_id = machine_id
def db_delete_machine_id(self, machine_id):
self._db_machine_id = None
def __get_db_annotations(self):
return self._db_annotations
def __set_db_annotations(self, annotations):
self._db_annotations = annotations
self.is_dirty = True
db_annotations = property(__get_db_annotations, __set_db_annotations)
def db_get_annotations(self):
return self._db_annotations
def db_add_annotation(self, annotation):
self.is_dirty = True
self._db_annotations.append(annotation)
self.db_annotations_id_index[annotation.db_id] = annotation
def db_change_annotation(self, annotation):
self.is_dirty = True
found = False
for i in xrange(len(self._db_annotations)):
if self._db_annotations[i].db_id == annotation.db_id:
self._db_annotations[i] = annotation
found = True
break
if not found:
self._db_annotations.append(annotation)
self.db_annotations_id_index[annotation.db_id] = annotation
def db_delete_annotation(self, annotation):
self.is_dirty = True
for i in xrange(len(self._db_annotations)):
if self._db_annotations[i].db_id == annotation.db_id:
if not self._db_annotations[i].is_new:
self.db_deleted_annotations.append(self._db_annotations[i])
del self._db_annotations[i]
break
del self.db_annotations_id_index[annotation.db_id]
def db_get_annotation(self, key):
for i in xrange(len(self._db_annotations)):
if self._db_annotations[i].db_id == key:
return self._db_annotations[i]
return None
def db_get_annotation_by_id(self, key):
return self.db_annotations_id_index[key]
def db_has_annotation_with_id(self, key):
return key in self.db_annotations_id_index
def getPrimaryKey(self):
return self._db_id
class DBOpmAgentId(object):
vtType = 'opm_agent_id'
def __init__(self, id=None):
self._db_id = id
self.is_dirty = True
self.is_new = True
def __copy__(self):
return DBOpmAgentId.do_copy(self)
def do_copy(self, new_ids=False, id_scope=None, id_remap=None):
cp = DBOpmAgentId(id=self._db_id)
# set new ids
if new_ids:
new_id = id_scope.getNewId(self.vtType)
if self.vtType in id_scope.remap:
id_remap[(id_scope.remap[self.vtType], self.db_id)] = new_id
else:
id_remap[(self.vtType, self.db_id)] = new_id
cp.db_id = new_id
if hasattr(self, 'db_id') and ('opm_agent', self._db_id) in id_remap:
cp._db_id = id_remap[('opm_agent', self._db_id)]
# recreate indices and set flags
if not new_ids:
cp.is_dirty = self.is_dirty
cp.is_new = self.is_new
return cp
@staticmethod
def update_version(old_obj, trans_dict, new_obj=None):
if new_obj is None:
new_obj = DBOpmAgentId()
class_dict = {}
if new_obj.__class__.__name__ in trans_dict:
class_dict = trans_dict[new_obj.__class__.__name__]
if 'id' in class_dict:
res = class_dict['id'](old_obj, trans_dict)
new_obj.db_id = res
elif hasattr(old_obj, 'db_id') and old_obj.db_id is not None:
new_obj.db_id = old_obj.db_id
new_obj.is_new = old_obj.is_new
new_obj.is_dirty = old_obj.is_dirty
return new_obj
def db_children(self, parent=(None,None), orphan=False, for_action=False):
return [(self, parent[0], parent[1])]
def db_deleted_children(self, remove=False):
children = []
return children
def has_changes(self):
if self.is_dirty:
return True
return False
def __get_db_id(self):
return self._db_id
def __set_db_id(self, id):
self._db_id = id
self.is_dirty = True
db_id = property(__get_db_id, __set_db_id)
def db_add_id(self, id):
self._db_id = id
def db_change_id(self, id):
self._db_id = id
def db_delete_id(self, id):
self._db_id = None
class DBParameter(object):
vtType = 'parameter'
def __init__(self, id=None, pos=None, name=None, type=None, val=None, alias=None):
self._db_id = id
self._db_pos = pos
self._db_name = name
self._db_type = type
self._db_val = val
self._db_alias = alias
self.is_dirty = True
self.is_new = True
def __copy__(self):
return DBParameter.do_copy(self)
def do_copy(self, new_ids=False, id_scope=None, id_remap=None):
cp = DBParameter(id=self._db_id,
pos=self._db_pos,
name=self._db_name,
type=self._db_type,
val=self._db_val,
alias=self._db_alias)
# set new ids
if new_ids:
new_id = id_scope.getNewId(self.vtType)
if self.vtType in id_scope.remap:
id_remap[(id_scope.remap[self.vtType], self.db_id)] = new_id
else:
id_remap[(self.vtType, self.db_id)] = new_id
cp.db_id = new_id
# recreate indices and set flags
if not new_ids:
cp.is_dirty = self.is_dirty
cp.is_new = self.is_new
return cp
@staticmethod
def update_version(old_obj, trans_dict, new_obj=None):
if new_obj is None:
new_obj = DBParameter()
class_dict = {}
if new_obj.__class__.__name__ in trans_dict:
class_dict = trans_dict[new_obj.__class__.__name__]
if 'id' in class_dict:
res = class_dict['id'](old_obj, trans_dict)
new_obj.db_id = res
elif hasattr(old_obj, 'db_id') and old_obj.db_id is not None:
new_obj.db_id = old_obj.db_id
if 'pos' in class_dict:
res = class_dict['pos'](old_obj, trans_dict)
new_obj.db_pos = res
elif hasattr(old_obj, 'db_pos') and old_obj.db_pos is not None:
new_obj.db_pos = old_obj.db_pos
if 'name' in class_dict:
res = class_dict['name'](old_obj, trans_dict)
new_obj.db_name = res
elif hasattr(old_obj, 'db_name') and old_obj.db_name is not None:
new_obj.db_name = old_obj.db_name
if 'type' in class_dict:
res = class_dict['type'](old_obj, trans_dict)
new_obj.db_type = res
elif hasattr(old_obj, 'db_type') and old_obj.db_type is not None:
new_obj.db_type = old_obj.db_type
if 'val' in class_dict:
res = class_dict['val'](old_obj, trans_dict)
new_obj.db_val = res
elif hasattr(old_obj, 'db_val') and old_obj.db_val is not None:
new_obj.db_val = old_obj.db_val
if 'alias' in class_dict:
res = class_dict['alias'](old_obj, trans_dict)
new_obj.db_alias = res
elif hasattr(old_obj, 'db_alias') and old_obj.db_alias is not None:
new_obj.db_alias = old_obj.db_alias
new_obj.is_new = old_obj.is_new
new_obj.is_dirty = old_obj.is_dirty
return new_obj
def db_children(self, parent=(None,None), orphan=False, for_action=False):
return [(self, parent[0], parent[1])]
def db_deleted_children(self, remove=False):
children = []
return children
def has_changes(self):
if self.is_dirty:
return True
return False
def __get_db_id(self):
return self._db_id
def __set_db_id(self, id):
self._db_id = id
self.is_dirty = True
db_id = property(__get_db_id, __set_db_id)
def db_add_id(self, id):
self._db_id = id
def db_change_id(self, id):
self._db_id = id
def db_delete_id(self, id):
self._db_id = None
def __get_db_pos(self):
return self._db_pos
def __set_db_pos(self, pos):
self._db_pos = pos
self.is_dirty = True
db_pos = property(__get_db_pos, __set_db_pos)
def db_add_pos(self, pos):
self._db_pos = pos
def db_change_pos(self, pos):
self._db_pos = pos
def db_delete_pos(self, pos):
self._db_pos = None
def __get_db_name(self):
return self._db_name
def __set_db_name(self, name):
self._db_name = name
self.is_dirty = True
db_name = property(__get_db_name, __set_db_name)
def db_add_name(self, name):
self._db_name = name
def db_change_name(self, name):
self._db_name = name
def db_delete_name(self, name):
self._db_name = None
def __get_db_type(self):
return self._db_type
def __set_db_type(self, type):
self._db_type = type
self.is_dirty = True
db_type = property(__get_db_type, __set_db_type)
def db_add_type(self, type):
self._db_type = type
def db_change_type(self, type):
self._db_type = type
def db_delete_type(self, type):
self._db_type = None
def __get_db_val(self):
return self._db_val
def __set_db_val(self, val):
self._db_val = val
self.is_dirty = True
db_val = property(__get_db_val, __set_db_val)
def db_add_val(self, val):
self._db_val = val
def db_change_val(self, val):
self._db_val = val
def db_delete_val(self, val):
self._db_val = None
def __get_db_alias(self):
return self._db_alias
def __set_db_alias(self, alias):
self._db_alias = alias
self.is_dirty = True
db_alias = property(__get_db_alias, __set_db_alias)
def db_add_alias(self, alias):
self._db_alias = alias
def db_change_alias(self, alias):
self._db_alias = alias
def db_delete_alias(self, alias):
self._db_alias = None
def getPrimaryKey(self):
return self._db_id
class DBVistrail(object):
vtType = 'vistrail'
def __init__(self, id=None, entity_type=None, version=None, name=None, last_modified=None, actions=None, tags=None, annotations=None, vistrailVariables=None, parameter_explorations=None, actionAnnotations=None):
self._db_id = id
self._db_entity_type = entity_type
self._db_version = version
self._db_name = name
self._db_last_modified = last_modified
self.db_deleted_actions = []
self.db_actions_id_index = {}
if actions is None:
self._db_actions = []
else:
self._db_actions = actions
for v in self._db_actions:
self.db_actions_id_index[v.db_id] = v
self.db_deleted_tags = []
self.db_tags_id_index = {}
self.db_tags_name_index = {}
if tags is None:
self._db_tags = []
else:
self._db_tags = tags
for v in self._db_tags:
self.db_tags_id_index[v.db_id] = v
self.db_tags_name_index[v.db_name] = v
self.db_deleted_annotations = []
self.db_annotations_id_index = {}
self.db_annotations_key_index = {}
if annotations is None:
self._db_annotations = []
else:
self._db_annotations = annotations
for v in self._db_annotations:
self.db_annotations_id_index[v.db_id] = v
self.db_annotations_key_index[v.db_key] = v
self.db_deleted_vistrailVariables = []
self.db_vistrailVariables_name_index = {}
self.db_vistrailVariables_uuid_index = {}
if vistrailVariables is None:
self._db_vistrailVariables = []
else:
self._db_vistrailVariables = vistrailVariables
for v in self._db_vistrailVariables:
self.db_vistrailVariables_name_index[v.db_name] = v
self.db_vistrailVariables_uuid_index[v.db_uuid] = v
self.db_deleted_parameter_explorations = []
self.db_parameter_explorations_id_index = {}
if parameter_explorations is None:
self._db_parameter_explorations = []
else:
self._db_parameter_explorations = parameter_explorations
for v in self._db_parameter_explorations:
self.db_parameter_explorations_id_index[v.db_id] = v
self.db_deleted_actionAnnotations = []
self.db_actionAnnotations_id_index = {}
self.db_actionAnnotations_action_id_index = {}
self.db_actionAnnotations_key_index = {}
if actionAnnotations is None:
self._db_actionAnnotations = []
else:
self._db_actionAnnotations = actionAnnotations
for v in self._db_actionAnnotations:
self.db_actionAnnotations_id_index[v.db_id] = v
self.db_actionAnnotations_action_id_index[(v.db_action_id,v.db_key)] = v
self.db_actionAnnotations_key_index[(v.db_key,v.db_value)] = v
self.is_dirty = True
self.is_new = True
def __copy__(self):
return DBVistrail.do_copy(self)
def do_copy(self, new_ids=False, id_scope=None, id_remap=None):
cp = DBVistrail(id=self._db_id,
entity_type=self._db_entity_type,
version=self._db_version,
name=self._db_name,
last_modified=self._db_last_modified)
if self._db_actions is None:
cp._db_actions = []
else:
cp._db_actions = [v.do_copy(new_ids, id_scope, id_remap) for v in self._db_actions]
if self._db_tags is None:
cp._db_tags = []
else:
cp._db_tags = [v.do_copy(new_ids, id_scope, id_remap) for v in self._db_tags]
if self._db_annotations is None:
cp._db_annotations = []
else:
cp._db_annotations = [v.do_copy(new_ids, id_scope, id_remap) for v in self._db_annotations]
if self._db_vistrailVariables is None:
cp._db_vistrailVariables = []
else:
cp._db_vistrailVariables = [v.do_copy(new_ids, id_scope, id_remap) for v in self._db_vistrailVariables]
if self._db_parameter_explorations is None:
cp._db_parameter_explorations = []
else:
cp._db_parameter_explorations = [v.do_copy(new_ids, id_scope, id_remap) for v in self._db_parameter_explorations]
if self._db_actionAnnotations is None:
cp._db_actionAnnotations = []
else:
cp._db_actionAnnotations = [v.do_copy(new_ids, id_scope, id_remap) for v in self._db_actionAnnotations]
# set new ids
if new_ids:
new_id = id_scope.getNewId(self.vtType)
if self.vtType in id_scope.remap:
id_remap[(id_scope.remap[self.vtType], self.db_id)] = new_id
else:
id_remap[(self.vtType, self.db_id)] = new_id
cp.db_id = new_id
# recreate indices and set flags
cp.db_actions_id_index = dict((v.db_id, v) for v in cp._db_actions)
cp.db_tags_id_index = dict((v.db_id, v) for v in cp._db_tags)
cp.db_tags_name_index = dict((v.db_name, v) for v in cp._db_tags)
cp.db_annotations_id_index = dict((v.db_id, v) for v in cp._db_annotations)
cp.db_annotations_key_index = dict((v.db_key, v) for v in cp._db_annotations)
cp.db_vistrailVariables_name_index = dict((v.db_name, v) for v in cp._db_vistrailVariables)
cp.db_vistrailVariables_uuid_index = dict((v.db_uuid, v) for v in cp._db_vistrailVariables)
cp.db_parameter_explorations_id_index = dict((v.db_id, v) for v in cp._db_parameter_explorations)
cp.db_actionAnnotations_id_index = dict((v.db_id, v) for v in cp._db_actionAnnotations)
cp.db_actionAnnotations_action_id_index = dict(((v.db_action_id,v.db_key), v) for v in cp._db_actionAnnotations)
cp.db_actionAnnotations_key_index = dict(((v.db_key,v.db_value), v) for v in cp._db_actionAnnotations)
if not new_ids:
cp.is_dirty = self.is_dirty
cp.is_new = self.is_new
return cp
@staticmethod
def update_version(old_obj, trans_dict, new_obj=None):
if new_obj is None:
new_obj = DBVistrail()
class_dict = {}
if new_obj.__class__.__name__ in trans_dict:
class_dict = trans_dict[new_obj.__class__.__name__]
if 'id' in class_dict:
res = class_dict['id'](old_obj, trans_dict)
new_obj.db_id = res
elif hasattr(old_obj, 'db_id') and old_obj.db_id is not None:
new_obj.db_id = old_obj.db_id
if 'entity_type' in class_dict:
res = class_dict['entity_type'](old_obj, trans_dict)
new_obj.db_entity_type = res
elif hasattr(old_obj, 'db_entity_type') and old_obj.db_entity_type is not None:
new_obj.db_entity_type = old_obj.db_entity_type
if 'version' in class_dict:
res = class_dict['version'](old_obj, trans_dict)
new_obj.db_version = res
elif hasattr(old_obj, 'db_version') and old_obj.db_version is not None:
new_obj.db_version = old_obj.db_version
if 'name' in class_dict:
res = class_dict['name'](old_obj, trans_dict)
new_obj.db_name = res
elif hasattr(old_obj, 'db_name') and old_obj.db_name is not None:
new_obj.db_name = old_obj.db_name
if 'last_modified' in class_dict:
res = class_dict['last_modified'](old_obj, trans_dict)
new_obj.db_last_modified = res
elif hasattr(old_obj, 'db_last_modified') and old_obj.db_last_modified is not None:
new_obj.db_last_modified = old_obj.db_last_modified
if 'actions' in class_dict:
res = class_dict['actions'](old_obj, trans_dict)
for obj in res:
new_obj.db_add_action(obj)
elif hasattr(old_obj, 'db_actions') and old_obj.db_actions is not None:
for obj in old_obj.db_actions:
new_obj.db_add_action(DBAction.update_version(obj, trans_dict))
if hasattr(old_obj, 'db_deleted_actions') and hasattr(new_obj, 'db_deleted_actions'):
for obj in old_obj.db_deleted_actions:
n_obj = DBAction.update_version(obj, trans_dict)
new_obj.db_deleted_actions.append(n_obj)
if 'tags' in class_dict:
res = class_dict['tags'](old_obj, trans_dict)
for obj in res:
new_obj.db_add_tag(obj)
elif hasattr(old_obj, 'db_tags') and old_obj.db_tags is not None:
for obj in old_obj.db_tags:
new_obj.db_add_tag(DBTag.update_version(obj, trans_dict))
if hasattr(old_obj, 'db_deleted_tags') and hasattr(new_obj, 'db_deleted_tags'):
for obj in old_obj.db_deleted_tags:
n_obj = DBTag.update_version(obj, trans_dict)
new_obj.db_deleted_tags.append(n_obj)
if 'annotations' in class_dict:
res = class_dict['annotations'](old_obj, trans_dict)
for obj in res:
new_obj.db_add_annotation(obj)
elif hasattr(old_obj, 'db_annotations') and old_obj.db_annotations is not None:
for obj in old_obj.db_annotations:
new_obj.db_add_annotation(DBAnnotation.update_version(obj, trans_dict))
if hasattr(old_obj, 'db_deleted_annotations') and hasattr(new_obj, 'db_deleted_annotations'):
for obj in old_obj.db_deleted_annotations:
n_obj = DBAnnotation.update_version(obj, trans_dict)
new_obj.db_deleted_annotations.append(n_obj)
if 'vistrailVariables' in class_dict:
res = class_dict['vistrailVariables'](old_obj, trans_dict)
for obj in res:
new_obj.db_add_vistrailVariable(obj)
elif hasattr(old_obj, 'db_vistrailVariables') and old_obj.db_vistrailVariables is not None:
for obj in old_obj.db_vistrailVariables:
new_obj.db_add_vistrailVariable(DBVistrailVariable.update_version(obj, trans_dict))
if hasattr(old_obj, 'db_deleted_vistrailVariables') and hasattr(new_obj, 'db_deleted_vistrailVariables'):
for obj in old_obj.db_deleted_vistrailVariables:
n_obj = DBVistrailVariable.update_version(obj, trans_dict)
new_obj.db_deleted_vistrailVariables.append(n_obj)
if 'parameter_explorations' in class_dict:
res = class_dict['parameter_explorations'](old_obj, trans_dict)
for obj in res:
new_obj.db_add_parameter_exploration(obj)
elif hasattr(old_obj, 'db_parameter_explorations') and old_obj.db_parameter_explorations is not None:
for obj in old_obj.db_parameter_explorations:
new_obj.db_add_parameter_exploration(DBParameterExploration.update_version(obj, trans_dict))
if hasattr(old_obj, 'db_deleted_parameter_explorations') and hasattr(new_obj, 'db_deleted_parameter_explorations'):
for obj in old_obj.db_deleted_parameter_explorations:
n_obj = DBParameterExploration.update_version(obj, trans_dict)
new_obj.db_deleted_parameter_explorations.append(n_obj)
if 'actionAnnotations' in class_dict:
res = class_dict['actionAnnotations'](old_obj, trans_dict)
for obj in res:
new_obj.db_add_actionAnnotation(obj)
elif hasattr(old_obj, 'db_actionAnnotations') and old_obj.db_actionAnnotations is not None:
for obj in old_obj.db_actionAnnotations:
new_obj.db_add_actionAnnotation(DBActionAnnotation.update_version(obj, trans_dict))
if hasattr(old_obj, 'db_deleted_actionAnnotations') and hasattr(new_obj, 'db_deleted_actionAnnotations'):
for obj in old_obj.db_deleted_actionAnnotations:
n_obj = DBActionAnnotation.update_version(obj, trans_dict)
new_obj.db_deleted_actionAnnotations.append(n_obj)
new_obj.is_new = old_obj.is_new
new_obj.is_dirty = old_obj.is_dirty
return new_obj
def db_children(self, parent=(None,None), orphan=False, for_action=False):
children = []
to_del = []
for child in self.db_actions:
children.extend(child.db_children((self.vtType, self.db_id), orphan, for_action))
if orphan:
to_del.append(child)
for child in to_del:
self.db_delete_action(child)
to_del = []
for child in self.db_tags:
children.extend(child.db_children((self.vtType, self.db_id), orphan, for_action))
if orphan:
to_del.append(child)
for child in to_del:
self.db_delete_tag(child)
to_del = []
for child in self.db_annotations:
children.extend(child.db_children((self.vtType, self.db_id), orphan, for_action))
if orphan:
to_del.append(child)
for child in to_del:
self.db_delete_annotation(child)
to_del = []
for child in self.db_vistrailVariables:
children.extend(child.db_children((self.vtType, self.db_id), orphan, for_action))
if orphan:
to_del.append(child)
for child in to_del:
self.db_delete_vistrailVariable(child)
to_del = []
for child in self.db_parameter_explorations:
children.extend(child.db_children((self.vtType, self.db_id), orphan, for_action))
if orphan:
to_del.append(child)
for child in to_del:
self.db_delete_parameter_exploration(child)
to_del = []
for child in self.db_actionAnnotations:
children.extend(child.db_children((self.vtType, self.db_id), orphan, for_action))
if orphan:
to_del.append(child)
for child in to_del:
self.db_delete_actionAnnotation(child)
children.append((self, parent[0], parent[1]))
return children
def db_deleted_children(self, remove=False):
children = []
children.extend(self.db_deleted_actions)
children.extend(self.db_deleted_tags)
children.extend(self.db_deleted_annotations)
children.extend(self.db_deleted_vistrailVariables)
children.extend(self.db_deleted_parameter_explorations)
children.extend(self.db_deleted_actionAnnotations)
if remove:
self.db_deleted_actions = []
self.db_deleted_tags = []
self.db_deleted_annotations = []
self.db_deleted_vistrailVariables = []
self.db_deleted_parameter_explorations = []
self.db_deleted_actionAnnotations = []
return children
def has_changes(self):
if self.is_dirty:
return True
for child in self._db_actions:
if child.has_changes():
return True
for child in self._db_tags:
if child.has_changes():
return True
for child in self._db_annotations:
if child.has_changes():
return True
for child in self._db_vistrailVariables:
if child.has_changes():
return True
for child in self._db_parameter_explorations:
if child.has_changes():
return True
for child in self._db_actionAnnotations:
if child.has_changes():
return True
return False
def __get_db_id(self):
return self._db_id
def __set_db_id(self, id):
self._db_id = id
self.is_dirty = True
db_id = property(__get_db_id, __set_db_id)
def db_add_id(self, id):
self._db_id = id
def db_change_id(self, id):
self._db_id = id
def db_delete_id(self, id):
self._db_id = None
def __get_db_entity_type(self):
return self._db_entity_type
def __set_db_entity_type(self, entity_type):
self._db_entity_type = entity_type
self.is_dirty = True
db_entity_type = property(__get_db_entity_type, __set_db_entity_type)
def db_add_entity_type(self, entity_type):
self._db_entity_type = entity_type
def db_change_entity_type(self, entity_type):
self._db_entity_type = entity_type
def db_delete_entity_type(self, entity_type):
self._db_entity_type = None
def __get_db_version(self):
return self._db_version
def __set_db_version(self, version):
self._db_version = version
self.is_dirty = True
db_version = property(__get_db_version, __set_db_version)
def db_add_version(self, version):
self._db_version = version
def db_change_version(self, version):
self._db_version = version
def db_delete_version(self, version):
self._db_version = None
def __get_db_name(self):
return self._db_name
def __set_db_name(self, name):
self._db_name = name
self.is_dirty = True
db_name = property(__get_db_name, __set_db_name)
def db_add_name(self, name):
self._db_name = name
def db_change_name(self, name):
self._db_name = name
def db_delete_name(self, name):
self._db_name = None
def __get_db_last_modified(self):
return self._db_last_modified
def __set_db_last_modified(self, last_modified):
self._db_last_modified = last_modified
self.is_dirty = True
db_last_modified = property(__get_db_last_modified, __set_db_last_modified)
def db_add_last_modified(self, last_modified):
self._db_last_modified = last_modified
def db_change_last_modified(self, last_modified):
self._db_last_modified = last_modified
def db_delete_last_modified(self, last_modified):
self._db_last_modified = None
def __get_db_actions(self):
return self._db_actions
def __set_db_actions(self, actions):
self._db_actions = actions
self.is_dirty = True
db_actions = property(__get_db_actions, __set_db_actions)
def db_get_actions(self):
return self._db_actions
def db_add_action(self, action):
self.is_dirty = True
self._db_actions.append(action)
self.db_actions_id_index[action.db_id] = action
def db_change_action(self, action):
self.is_dirty = True
found = False
for i in xrange(len(self._db_actions)):
if self._db_actions[i].db_id == action.db_id:
self._db_actions[i] = action
found = True
break
if not found:
self._db_actions.append(action)
self.db_actions_id_index[action.db_id] = action
def db_delete_action(self, action):
self.is_dirty = True
for i in xrange(len(self._db_actions)):
if self._db_actions[i].db_id == action.db_id:
if not self._db_actions[i].is_new:
self.db_deleted_actions.append(self._db_actions[i])
del self._db_actions[i]
break
del self.db_actions_id_index[action.db_id]
def db_get_action(self, key):
for i in xrange(len(self._db_actions)):
if self._db_actions[i].db_id == key:
return self._db_actions[i]
return None
def db_get_action_by_id(self, key):
return self.db_actions_id_index[key]
def db_has_action_with_id(self, key):
return key in self.db_actions_id_index
def __get_db_tags(self):
return self._db_tags
def __set_db_tags(self, tags):
self._db_tags = tags
self.is_dirty = True
db_tags = property(__get_db_tags, __set_db_tags)
def db_get_tags(self):
return self._db_tags
def db_add_tag(self, tag):
self.is_dirty = True
self._db_tags.append(tag)
self.db_tags_id_index[tag.db_id] = tag
self.db_tags_name_index[tag.db_name] = tag
def db_change_tag(self, tag):
self.is_dirty = True
found = False
for i in xrange(len(self._db_tags)):
if self._db_tags[i].db_id == tag.db_id:
self._db_tags[i] = tag
found = True
break
if not found:
self._db_tags.append(tag)
self.db_tags_id_index[tag.db_id] = tag
self.db_tags_name_index[tag.db_name] = tag
def db_delete_tag(self, tag):
self.is_dirty = True
for i in xrange(len(self._db_tags)):
if self._db_tags[i].db_id == tag.db_id:
if not self._db_tags[i].is_new:
self.db_deleted_tags.append(self._db_tags[i])
del self._db_tags[i]
break
del self.db_tags_id_index[tag.db_id]
del self.db_tags_name_index[tag.db_name]
def db_get_tag(self, key):
for i in xrange(len(self._db_tags)):
if self._db_tags[i].db_id == key:
return self._db_tags[i]
return None
def db_get_tag_by_id(self, key):
return self.db_tags_id_index[key]
def db_has_tag_with_id(self, key):
return key in self.db_tags_id_index
def db_get_tag_by_name(self, key):
return self.db_tags_name_index[key]
def db_has_tag_with_name(self, key):
return key in self.db_tags_name_index
def __get_db_annotations(self):
return self._db_annotations
def __set_db_annotations(self, annotations):
self._db_annotations = annotations
self.is_dirty = True
db_annotations = property(__get_db_annotations, __set_db_annotations)
def db_get_annotations(self):
return self._db_annotations
def db_add_annotation(self, annotation):
self.is_dirty = True
self._db_annotations.append(annotation)
self.db_annotations_id_index[annotation.db_id] = annotation
self.db_annotations_key_index[annotation.db_key] = annotation
def db_change_annotation(self, annotation):
self.is_dirty = True
found = False
for i in xrange(len(self._db_annotations)):
if self._db_annotations[i].db_id == annotation.db_id:
self._db_annotations[i] = annotation
found = True
break
if not found:
self._db_annotations.append(annotation)
self.db_annotations_id_index[annotation.db_id] = annotation
self.db_annotations_key_index[annotation.db_key] = annotation
def db_delete_annotation(self, annotation):
self.is_dirty = True
for i in xrange(len(self._db_annotations)):
if self._db_annotations[i].db_id == annotation.db_id:
if not self._db_annotations[i].is_new:
self.db_deleted_annotations.append(self._db_annotations[i])
del self._db_annotations[i]
break
del self.db_annotations_id_index[annotation.db_id]
del self.db_annotations_key_index[annotation.db_key]
def db_get_annotation(self, key):
for i in xrange(len(self._db_annotations)):
if self._db_annotations[i].db_id == key:
return self._db_annotations[i]
return None
def db_get_annotation_by_id(self, key):
return self.db_annotations_id_index[key]
def db_has_annotation_with_id(self, key):
return key in self.db_annotations_id_index
def db_get_annotation_by_key(self, key):
return self.db_annotations_key_index[key]
def db_has_annotation_with_key(self, key):
return key in self.db_annotations_key_index
def __get_db_vistrailVariables(self):
return self._db_vistrailVariables
def __set_db_vistrailVariables(self, vistrailVariables):
self._db_vistrailVariables = vistrailVariables
self.is_dirty = True
db_vistrailVariables = property(__get_db_vistrailVariables, __set_db_vistrailVariables)
def db_get_vistrailVariables(self):
return self._db_vistrailVariables
def db_add_vistrailVariable(self, vistrailVariable):
self.is_dirty = True
self._db_vistrailVariables.append(vistrailVariable)
self.db_vistrailVariables_name_index[vistrailVariable.db_name] = vistrailVariable
self.db_vistrailVariables_uuid_index[vistrailVariable.db_uuid] = vistrailVariable
def db_change_vistrailVariable(self, vistrailVariable):
self.is_dirty = True
found = False
for i in xrange(len(self._db_vistrailVariables)):
if self._db_vistrailVariables[i].db_name == vistrailVariable.db_name:
self._db_vistrailVariables[i] = vistrailVariable
found = True
break
if not found:
self._db_vistrailVariables.append(vistrailVariable)
self.db_vistrailVariables_name_index[vistrailVariable.db_name] = vistrailVariable
self.db_vistrailVariables_uuid_index[vistrailVariable.db_uuid] = vistrailVariable
def db_delete_vistrailVariable(self, vistrailVariable):
self.is_dirty = True
for i in xrange(len(self._db_vistrailVariables)):
if self._db_vistrailVariables[i].db_name == vistrailVariable.db_name:
if not self._db_vistrailVariables[i].is_new:
self.db_deleted_vistrailVariables.append(self._db_vistrailVariables[i])
del self._db_vistrailVariables[i]
break
del self.db_vistrailVariables_name_index[vistrailVariable.db_name]
del self.db_vistrailVariables_uuid_index[vistrailVariable.db_uuid]
def db_get_vistrailVariable(self, key):
for i in xrange(len(self._db_vistrailVariables)):
if self._db_vistrailVariables[i].db_name == key:
return self._db_vistrailVariables[i]
return None
def db_get_vistrailVariable_by_name(self, key):
return self.db_vistrailVariables_name_index[key]
def db_has_vistrailVariable_with_name(self, key):
return key in self.db_vistrailVariables_name_index
def db_get_vistrailVariable_by_uuid(self, key):
return self.db_vistrailVariables_uuid_index[key]
def db_has_vistrailVariable_with_uuid(self, key):
return key in self.db_vistrailVariables_uuid_index
def __get_db_parameter_explorations(self):
return self._db_parameter_explorations
def __set_db_parameter_explorations(self, parameter_explorations):
self._db_parameter_explorations = parameter_explorations
self.is_dirty = True
db_parameter_explorations = property(__get_db_parameter_explorations, __set_db_parameter_explorations)
def db_get_parameter_explorations(self):
return self._db_parameter_explorations
def db_add_parameter_exploration(self, parameter_exploration):
self.is_dirty = True
self._db_parameter_explorations.append(parameter_exploration)
self.db_parameter_explorations_id_index[parameter_exploration.db_id] = parameter_exploration
def db_change_parameter_exploration(self, parameter_exploration):
self.is_dirty = True
found = False
for i in xrange(len(self._db_parameter_explorations)):
if self._db_parameter_explorations[i].db_id == parameter_exploration.db_id:
self._db_parameter_explorations[i] = parameter_exploration
found = True
break
if not found:
self._db_parameter_explorations.append(parameter_exploration)
self.db_parameter_explorations_id_index[parameter_exploration.db_id] = parameter_exploration
def db_delete_parameter_exploration(self, parameter_exploration):
self.is_dirty = True
for i in xrange(len(self._db_parameter_explorations)):
if self._db_parameter_explorations[i].db_id == parameter_exploration.db_id:
if not self._db_parameter_explorations[i].is_new:
self.db_deleted_parameter_explorations.append(self._db_parameter_explorations[i])
del self._db_parameter_explorations[i]
break
del self.db_parameter_explorations_id_index[parameter_exploration.db_id]
def db_get_parameter_exploration(self, key):
for i in xrange(len(self._db_parameter_explorations)):
if self._db_parameter_explorations[i].db_id == key:
return self._db_parameter_explorations[i]
return None
def db_get_parameter_exploration_by_id(self, key):
return self.db_parameter_explorations_id_index[key]
def db_has_parameter_exploration_with_id(self, key):
return key in self.db_parameter_explorations_id_index
def __get_db_actionAnnotations(self):
return self._db_actionAnnotations
def __set_db_actionAnnotations(self, actionAnnotations):
self._db_actionAnnotations = actionAnnotations
self.is_dirty = True
db_actionAnnotations = property(__get_db_actionAnnotations, __set_db_actionAnnotations)
def db_get_actionAnnotations(self):
return self._db_actionAnnotations
def db_add_actionAnnotation(self, actionAnnotation):
self.is_dirty = True
self._db_actionAnnotations.append(actionAnnotation)
self.db_actionAnnotations_id_index[actionAnnotation.db_id] = actionAnnotation
self.db_actionAnnotations_action_id_index[(actionAnnotation.db_action_id,actionAnnotation.db_key)] = actionAnnotation
self.db_actionAnnotations_key_index[(actionAnnotation.db_key,actionAnnotation.db_value)] = actionAnnotation
def db_change_actionAnnotation(self, actionAnnotation):
self.is_dirty = True
found = False
for i in xrange(len(self._db_actionAnnotations)):
if self._db_actionAnnotations[i].db_id == actionAnnotation.db_id:
self._db_actionAnnotations[i] = actionAnnotation
found = True
break
if not found:
self._db_actionAnnotations.append(actionAnnotation)
self.db_actionAnnotations_id_index[actionAnnotation.db_id] = actionAnnotation
self.db_actionAnnotations_action_id_index[(actionAnnotation.db_action_id,actionAnnotation.db_key)] = actionAnnotation
self.db_actionAnnotations_key_index[(actionAnnotation.db_key,actionAnnotation.db_value)] = actionAnnotation
def db_delete_actionAnnotation(self, actionAnnotation):
self.is_dirty = True
for i in xrange(len(self._db_actionAnnotations)):
if self._db_actionAnnotations[i].db_id == actionAnnotation.db_id:
if not self._db_actionAnnotations[i].is_new:
self.db_deleted_actionAnnotations.append(self._db_actionAnnotations[i])
del self._db_actionAnnotations[i]
break
del self.db_actionAnnotations_id_index[actionAnnotation.db_id]
del self.db_actionAnnotations_action_id_index[(actionAnnotation.db_action_id,actionAnnotation.db_key)]
try:
del self.db_actionAnnotations_key_index[(actionAnnotation.db_key,actionAnnotation.db_value)]
except KeyError:
pass
def db_get_actionAnnotation(self, key):
for i in xrange(len(self._db_actionAnnotations)):
if self._db_actionAnnotations[i].db_id == key:
return self._db_actionAnnotations[i]
return None
def db_get_actionAnnotation_by_id(self, key):
return self.db_actionAnnotations_id_index[key]
def db_has_actionAnnotation_with_id(self, key):
return key in self.db_actionAnnotations_id_index
def db_get_actionAnnotation_by_action_id(self, key):
return self.db_actionAnnotations_action_id_index[key]
def db_has_actionAnnotation_with_action_id(self, key):
return key in self.db_actionAnnotations_action_id_index
def db_get_actionAnnotation_by_key(self, key):
return self.db_actionAnnotations_key_index[key]
def db_has_actionAnnotation_with_key(self, key):
return key in self.db_actionAnnotations_key_index
def getPrimaryKey(self):
return self._db_id
class DBOpmArtifactValue(object):
vtType = 'opm_artifact_value'
def __init__(self, value=None):
self.db_deleted_value = []
self._db_value = value
self.is_dirty = True
self.is_new = True
def __copy__(self):
return DBOpmArtifactValue.do_copy(self)
def do_copy(self, new_ids=False, id_scope=None, id_remap=None):
cp = DBOpmArtifactValue()
if self._db_value is not None:
cp._db_value = self._db_value.do_copy(new_ids, id_scope, id_remap)
# set new ids
if new_ids:
new_id = id_scope.getNewId(self.vtType)
if self.vtType in id_scope.remap:
id_remap[(id_scope.remap[self.vtType], self.db_id)] = new_id
else:
id_remap[(self.vtType, self.db_id)] = new_id
cp.db_id = new_id
# recreate indices and set flags
if not new_ids:
cp.is_dirty = self.is_dirty
cp.is_new = self.is_new
return cp
@staticmethod
def update_version(old_obj, trans_dict, new_obj=None):
if new_obj is None:
new_obj = DBOpmArtifactValue()
class_dict = {}
if new_obj.__class__.__name__ in trans_dict:
class_dict = trans_dict[new_obj.__class__.__name__]
if 'value' in class_dict:
res = class_dict['value'](old_obj, trans_dict)
new_obj.db_value = res
elif hasattr(old_obj, 'db_value') and old_obj.db_value is not None:
obj = old_obj.db_value
if obj.vtType == 'portSpec':
new_obj.db_add_value(DBPortSpec.update_version(obj, trans_dict))
elif obj.vtType == 'function':
new_obj.db_add_value(DBFunction.update_version(obj, trans_dict))
if hasattr(old_obj, 'db_deleted_value') and hasattr(new_obj, 'db_deleted_value'):
for obj in old_obj.db_deleted_value:
if obj.vtType == 'portSpec':
n_obj = DBPortSpec.update_version(obj, trans_dict)
new_obj.db_deleted_value.append(n_obj)
elif obj.vtType == 'function':
n_obj = DBFunction.update_version(obj, trans_dict)
new_obj.db_deleted_value.append(n_obj)
new_obj.is_new = old_obj.is_new
new_obj.is_dirty = old_obj.is_dirty
return new_obj
def db_children(self, parent=(None,None), orphan=False, for_action=False):
children = []
if self._db_value is not None:
children.extend(self._db_value.db_children((self.vtType, self.db_id), orphan, for_action))
if orphan:
self._db_value = None
children.append((self, parent[0], parent[1]))
return children
def db_deleted_children(self, remove=False):
children = []
children.extend(self.db_deleted_value)
if remove:
self.db_deleted_value = []
return children
def has_changes(self):
if self.is_dirty:
return True
if self._db_value is not None and self._db_value.has_changes():
return True
return False
def __get_db_value(self):
return self._db_value
def __set_db_value(self, value):
self._db_value = value
self.is_dirty = True
db_value = property(__get_db_value, __set_db_value)
def db_add_value(self, value):
self._db_value = value
def db_change_value(self, value):
self._db_value = value
def db_delete_value(self, value):
if not self.is_new:
self.db_deleted_value.append(self._db_value)
self._db_value = None
class DBConfigStr(object):
vtType = 'config_str'
def __init__(self, value=None):
self._db_value = value
self.is_dirty = True
self.is_new = True
def __copy__(self):
return DBConfigStr.do_copy(self)
def do_copy(self, new_ids=False, id_scope=None, id_remap=None):
cp = DBConfigStr(value=self._db_value)
# set new ids
if new_ids:
new_id = id_scope.getNewId(self.vtType)
if self.vtType in id_scope.remap:
id_remap[(id_scope.remap[self.vtType], self.db_id)] = new_id
else:
id_remap[(self.vtType, self.db_id)] = new_id
cp.db_id = new_id
# recreate indices and set flags
if not new_ids:
cp.is_dirty = self.is_dirty
cp.is_new = self.is_new
return cp
@staticmethod
def update_version(old_obj, trans_dict, new_obj=None):
if new_obj is None:
new_obj = DBConfigStr()
class_dict = {}
if new_obj.__class__.__name__ in trans_dict:
class_dict = trans_dict[new_obj.__class__.__name__]
if 'value' in class_dict:
res = class_dict['value'](old_obj, trans_dict)
new_obj.db_value = res
elif hasattr(old_obj, 'db_value') and old_obj.db_value is not None:
new_obj.db_value = old_obj.db_value
new_obj.is_new = old_obj.is_new
new_obj.is_dirty = old_obj.is_dirty
return new_obj
def db_children(self, parent=(None,None), orphan=False, for_action=False):
return [(self, parent[0], parent[1])]
def db_deleted_children(self, remove=False):
children = []
return children
def has_changes(self):
if self.is_dirty:
return True
return False
def __get_db_value(self):
return self._db_value
def __set_db_value(self, value):
self._db_value = value
self.is_dirty = True
db_value = property(__get_db_value, __set_db_value)
def db_add_value(self, value):
self._db_value = value
def db_change_value(self, value):
self._db_value = value
def db_delete_value(self, value):
self._db_value = None
class DBStartup(object):
vtType = 'startup'
def __init__(self, version=None, configuration=None, enabled_packages=None, disabled_packages=None):
self._db_version = version
self.db_deleted_configuration = []
self._db_configuration = configuration
self.db_deleted_enabled_packages = []
self._db_enabled_packages = enabled_packages
self.db_deleted_disabled_packages = []
self._db_disabled_packages = disabled_packages
self.is_dirty = True
self.is_new = True
def __copy__(self):
return DBStartup.do_copy(self)
def do_copy(self, new_ids=False, id_scope=None, id_remap=None):
cp = DBStartup(version=self._db_version)
if self._db_configuration is not None:
cp._db_configuration = self._db_configuration.do_copy(new_ids, id_scope, id_remap)
if self._db_enabled_packages is not None:
cp._db_enabled_packages = self._db_enabled_packages.do_copy(new_ids, id_scope, id_remap)
if self._db_disabled_packages is not None:
cp._db_disabled_packages = self._db_disabled_packages.do_copy(new_ids, id_scope, id_remap)
# set new ids
if new_ids:
new_id = id_scope.getNewId(self.vtType)
if self.vtType in id_scope.remap:
id_remap[(id_scope.remap[self.vtType], self.db_id)] = new_id
else:
id_remap[(self.vtType, self.db_id)] = new_id
cp.db_id = new_id
# recreate indices and set flags
if not new_ids:
cp.is_dirty = self.is_dirty
cp.is_new = self.is_new
return cp
@staticmethod
def update_version(old_obj, trans_dict, new_obj=None):
if new_obj is None:
new_obj = DBStartup()
class_dict = {}
if new_obj.__class__.__name__ in trans_dict:
class_dict = trans_dict[new_obj.__class__.__name__]
if 'version' in class_dict:
res = class_dict['version'](old_obj, trans_dict)
new_obj.db_version = res
elif hasattr(old_obj, 'db_version') and old_obj.db_version is not None:
new_obj.db_version = old_obj.db_version
if 'configuration' in class_dict:
res = class_dict['configuration'](old_obj, trans_dict)
new_obj.db_configuration = res
elif hasattr(old_obj, 'db_configuration') and old_obj.db_configuration is not None:
obj = old_obj.db_configuration
new_obj.db_add_configuration(DBConfiguration.update_version(obj, trans_dict))
if hasattr(old_obj, 'db_deleted_configuration') and hasattr(new_obj, 'db_deleted_configuration'):
for obj in old_obj.db_deleted_configuration:
n_obj = DBConfiguration.update_version(obj, trans_dict)
new_obj.db_deleted_configuration.append(n_obj)
if 'enabled_packages' in class_dict:
res = class_dict['enabled_packages'](old_obj, trans_dict)
new_obj.db_enabled_packages = res
elif hasattr(old_obj, 'db_enabled_packages') and old_obj.db_enabled_packages is not None:
obj = old_obj.db_enabled_packages
new_obj.db_add_enabled_packages(DBEnabledPackages.update_version(obj, trans_dict))
if hasattr(old_obj, 'db_deleted_enabled_packages') and hasattr(new_obj, 'db_deleted_enabled_packages'):
for obj in old_obj.db_deleted_enabled_packages:
n_obj = DBEnabledPackages.update_version(obj, trans_dict)
new_obj.db_deleted_enabled_packages.append(n_obj)
if 'disabled_packages' in class_dict:
res = class_dict['disabled_packages'](old_obj, trans_dict)
new_obj.db_disabled_packages = res
elif hasattr(old_obj, 'db_disabled_packages') and old_obj.db_disabled_packages is not None:
obj = old_obj.db_disabled_packages
new_obj.db_add_disabled_packages(DBDisabledPackages.update_version(obj, trans_dict))
if hasattr(old_obj, 'db_deleted_disabled_packages') and hasattr(new_obj, 'db_deleted_disabled_packages'):
for obj in old_obj.db_deleted_disabled_packages:
n_obj = DBDisabledPackages.update_version(obj, trans_dict)
new_obj.db_deleted_disabled_packages.append(n_obj)
new_obj.is_new = old_obj.is_new
new_obj.is_dirty = old_obj.is_dirty
return new_obj
def db_children(self, parent=(None,None), orphan=False, for_action=False):
children = []
if self._db_configuration is not None:
children.extend(self._db_configuration.db_children((self.vtType, self.db_id), orphan, for_action))
if orphan:
self._db_configuration = None
if self._db_enabled_packages is not None:
children.extend(self._db_enabled_packages.db_children((self.vtType, self.db_id), orphan, for_action))
if orphan:
self._db_enabled_packages = None
if self._db_disabled_packages is not None:
children.extend(self._db_disabled_packages.db_children((self.vtType, self.db_id), orphan, for_action))
if orphan:
self._db_disabled_packages = None
children.append((self, parent[0], parent[1]))
return children
def db_deleted_children(self, remove=False):
children = []
children.extend(self.db_deleted_configuration)
children.extend(self.db_deleted_enabled_packages)
children.extend(self.db_deleted_disabled_packages)
if remove:
self.db_deleted_configuration = []
self.db_deleted_enabled_packages = []
self.db_deleted_disabled_packages = []
return children
def has_changes(self):
if self.is_dirty:
return True
if self._db_configuration is not None and self._db_configuration.has_changes():
return True
if self._db_enabled_packages is not None and self._db_enabled_packages.has_changes():
return True
if self._db_disabled_packages is not None and self._db_disabled_packages.has_changes():
return True
return False
def __get_db_version(self):
return self._db_version
def __set_db_version(self, version):
self._db_version = version
self.is_dirty = True
db_version = property(__get_db_version, __set_db_version)
def db_add_version(self, version):
self._db_version = version
def db_change_version(self, version):
self._db_version = version
def db_delete_version(self, version):
self._db_version = None
def __get_db_configuration(self):
return self._db_configuration
def __set_db_configuration(self, configuration):
self._db_configuration = configuration
self.is_dirty = True
db_configuration = property(__get_db_configuration, __set_db_configuration)
def db_add_configuration(self, configuration):
self._db_configuration = configuration
def db_change_configuration(self, configuration):
self._db_configuration = configuration
def db_delete_configuration(self, configuration):
if not self.is_new:
self.db_deleted_configuration.append(self._db_configuration)
self._db_configuration = None
def __get_db_enabled_packages(self):
return self._db_enabled_packages
def __set_db_enabled_packages(self, enabled_packages):
self._db_enabled_packages = enabled_packages
self.is_dirty = True
db_enabled_packages = property(__get_db_enabled_packages, __set_db_enabled_packages)
def db_add_enabled_packages(self, enabled_packages):
self._db_enabled_packages = enabled_packages
def db_change_enabled_packages(self, enabled_packages):
self._db_enabled_packages = enabled_packages
def db_delete_enabled_packages(self, enabled_packages):
if not self.is_new:
self.db_deleted_enabled_packages.append(self._db_enabled_packages)
self._db_enabled_packages = None
def __get_db_disabled_packages(self):
return self._db_disabled_packages
def __set_db_disabled_packages(self, disabled_packages):
self._db_disabled_packages = disabled_packages
self.is_dirty = True
db_disabled_packages = property(__get_db_disabled_packages, __set_db_disabled_packages)
def db_add_disabled_packages(self, disabled_packages):
self._db_disabled_packages = disabled_packages
def db_change_disabled_packages(self, disabled_packages):
self._db_disabled_packages = disabled_packages
def db_delete_disabled_packages(self, disabled_packages):
if not self.is_new:
self.db_deleted_disabled_packages.append(self._db_disabled_packages)
self._db_disabled_packages = None
class DBModule(object):
vtType = 'module'
def __init__(self, id=None, cache=None, name=None, namespace=None, package=None, version=None, location=None, functions=None, annotations=None, portSpecs=None):
self._db_id = id
self._db_cache = cache
self._db_name = name
self._db_namespace = namespace
self._db_package = package
self._db_version = version
self.db_deleted_location = []
self._db_location = location
self.db_deleted_functions = []
self.db_functions_id_index = {}
if functions is None:
self._db_functions = []
else:
self._db_functions = functions
for v in self._db_functions:
self.db_functions_id_index[v.db_id] = v
self.db_deleted_annotations = []
self.db_annotations_id_index = {}
self.db_annotations_key_index = {}
if annotations is None:
self._db_annotations = []
else:
self._db_annotations = annotations
for v in self._db_annotations:
self.db_annotations_id_index[v.db_id] = v
self.db_annotations_key_index[v.db_key] = v
self.db_deleted_portSpecs = []
self.db_portSpecs_id_index = {}
self.db_portSpecs_name_index = {}
if portSpecs is None:
self._db_portSpecs = []
else:
self._db_portSpecs = portSpecs
for v in self._db_portSpecs:
self.db_portSpecs_id_index[v.db_id] = v
self.db_portSpecs_name_index[(v.db_name,v.db_type)] = v
self.is_dirty = True
self.is_new = True
def __copy__(self):
return DBModule.do_copy(self)
def do_copy(self, new_ids=False, id_scope=None, id_remap=None):
cp = DBModule(id=self._db_id,
cache=self._db_cache,
name=self._db_name,
namespace=self._db_namespace,
package=self._db_package,
version=self._db_version)
if self._db_location is not None:
cp._db_location = self._db_location.do_copy(new_ids, id_scope, id_remap)
if self._db_functions is None:
cp._db_functions = []
else:
cp._db_functions = [v.do_copy(new_ids, id_scope, id_remap) for v in self._db_functions]
if self._db_annotations is None:
cp._db_annotations = []
else:
cp._db_annotations = [v.do_copy(new_ids, id_scope, id_remap) for v in self._db_annotations]
if self._db_portSpecs is None:
cp._db_portSpecs = []
else:
cp._db_portSpecs = [v.do_copy(new_ids, id_scope, id_remap) for v in self._db_portSpecs]
# set new ids
if new_ids:
new_id = id_scope.getNewId(self.vtType)
if self.vtType in id_scope.remap:
id_remap[(id_scope.remap[self.vtType], self.db_id)] = new_id
else:
id_remap[(self.vtType, self.db_id)] = new_id
cp.db_id = new_id
# recreate indices and set flags
cp.db_functions_id_index = dict((v.db_id, v) for v in cp._db_functions)
cp.db_annotations_id_index = dict((v.db_id, v) for v in cp._db_annotations)
cp.db_annotations_key_index = dict((v.db_key, v) for v in cp._db_annotations)
cp.db_portSpecs_id_index = dict((v.db_id, v) for v in cp._db_portSpecs)
cp.db_portSpecs_name_index = dict(((v.db_name,v.db_type), v) for v in cp._db_portSpecs)
if not new_ids:
cp.is_dirty = self.is_dirty
cp.is_new = self.is_new
return cp
@staticmethod
def update_version(old_obj, trans_dict, new_obj=None):
if new_obj is None:
new_obj = DBModule()
class_dict = {}
if new_obj.__class__.__name__ in trans_dict:
class_dict = trans_dict[new_obj.__class__.__name__]
if 'id' in class_dict:
res = class_dict['id'](old_obj, trans_dict)
new_obj.db_id = res
elif hasattr(old_obj, 'db_id') and old_obj.db_id is not None:
new_obj.db_id = old_obj.db_id
if 'cache' in class_dict:
res = class_dict['cache'](old_obj, trans_dict)
new_obj.db_cache = res
elif hasattr(old_obj, 'db_cache') and old_obj.db_cache is not None:
new_obj.db_cache = old_obj.db_cache
if 'name' in class_dict:
res = class_dict['name'](old_obj, trans_dict)
new_obj.db_name = res
elif hasattr(old_obj, 'db_name') and old_obj.db_name is not None:
new_obj.db_name = old_obj.db_name
if 'namespace' in class_dict:
res = class_dict['namespace'](old_obj, trans_dict)
new_obj.db_namespace = res
elif hasattr(old_obj, 'db_namespace') and old_obj.db_namespace is not None:
new_obj.db_namespace = old_obj.db_namespace
if 'package' in class_dict:
res = class_dict['package'](old_obj, trans_dict)
new_obj.db_package = res
elif hasattr(old_obj, 'db_package') and old_obj.db_package is not None:
new_obj.db_package = old_obj.db_package
if 'version' in class_dict:
res = class_dict['version'](old_obj, trans_dict)
new_obj.db_version = res
elif hasattr(old_obj, 'db_version') and old_obj.db_version is not None:
new_obj.db_version = old_obj.db_version
if 'location' in class_dict:
res = class_dict['location'](old_obj, trans_dict)
new_obj.db_location = res
elif hasattr(old_obj, 'db_location') and old_obj.db_location is not None:
obj = old_obj.db_location
new_obj.db_add_location(DBLocation.update_version(obj, trans_dict))
if hasattr(old_obj, 'db_deleted_location') and hasattr(new_obj, 'db_deleted_location'):
for obj in old_obj.db_deleted_location:
n_obj = DBLocation.update_version(obj, trans_dict)
new_obj.db_deleted_location.append(n_obj)
if 'functions' in class_dict:
res = class_dict['functions'](old_obj, trans_dict)
for obj in res:
new_obj.db_add_function(obj)
elif hasattr(old_obj, 'db_functions') and old_obj.db_functions is not None:
for obj in old_obj.db_functions:
new_obj.db_add_function(DBFunction.update_version(obj, trans_dict))
if hasattr(old_obj, 'db_deleted_functions') and hasattr(new_obj, 'db_deleted_functions'):
for obj in old_obj.db_deleted_functions:
n_obj = DBFunction.update_version(obj, trans_dict)
new_obj.db_deleted_functions.append(n_obj)
if 'annotations' in class_dict:
res = class_dict['annotations'](old_obj, trans_dict)
for obj in res:
new_obj.db_add_annotation(obj)
elif hasattr(old_obj, 'db_annotations') and old_obj.db_annotations is not None:
for obj in old_obj.db_annotations:
new_obj.db_add_annotation(DBAnnotation.update_version(obj, trans_dict))
if hasattr(old_obj, 'db_deleted_annotations') and hasattr(new_obj, 'db_deleted_annotations'):
for obj in old_obj.db_deleted_annotations:
n_obj = DBAnnotation.update_version(obj, trans_dict)
new_obj.db_deleted_annotations.append(n_obj)
if 'portSpecs' in class_dict:
res = class_dict['portSpecs'](old_obj, trans_dict)
for obj in res:
new_obj.db_add_portSpec(obj)
elif hasattr(old_obj, 'db_portSpecs') and old_obj.db_portSpecs is not None:
for obj in old_obj.db_portSpecs:
new_obj.db_add_portSpec(DBPortSpec.update_version(obj, trans_dict))
if hasattr(old_obj, 'db_deleted_portSpecs') and hasattr(new_obj, 'db_deleted_portSpecs'):
for obj in old_obj.db_deleted_portSpecs:
n_obj = DBPortSpec.update_version(obj, trans_dict)
new_obj.db_deleted_portSpecs.append(n_obj)
new_obj.is_new = old_obj.is_new
new_obj.is_dirty = old_obj.is_dirty
return new_obj
def db_children(self, parent=(None,None), orphan=False, for_action=False):
children = []
if self._db_location is not None:
children.extend(self._db_location.db_children((self.vtType, self.db_id), orphan, for_action))
if orphan:
self._db_location = None
to_del = []
for child in self.db_functions:
children.extend(child.db_children((self.vtType, self.db_id), orphan, for_action))
if orphan:
to_del.append(child)
for child in to_del:
self.db_delete_function(child)
to_del = []
for child in self.db_annotations:
children.extend(child.db_children((self.vtType, self.db_id), orphan, for_action))
if orphan:
to_del.append(child)
for child in to_del:
self.db_delete_annotation(child)
to_del = []
for child in self.db_portSpecs:
children.extend(child.db_children((self.vtType, self.db_id), orphan, for_action))
if orphan:
to_del.append(child)
for child in to_del:
self.db_delete_portSpec(child)
children.append((self, parent[0], parent[1]))
return children
def db_deleted_children(self, remove=False):
children = []
children.extend(self.db_deleted_location)
children.extend(self.db_deleted_functions)
children.extend(self.db_deleted_annotations)
children.extend(self.db_deleted_portSpecs)
if remove:
self.db_deleted_location = []
self.db_deleted_functions = []
self.db_deleted_annotations = []
self.db_deleted_portSpecs = []
return children
def has_changes(self):
if self.is_dirty:
return True
if self._db_location is not None and self._db_location.has_changes():
return True
for child in self._db_functions:
if child.has_changes():
return True
for child in self._db_annotations:
if child.has_changes():
return True
for child in self._db_portSpecs:
if child.has_changes():
return True
return False
def __get_db_id(self):
return self._db_id
def __set_db_id(self, id):
self._db_id = id
self.is_dirty = True
db_id = property(__get_db_id, __set_db_id)
def db_add_id(self, id):
self._db_id = id
def db_change_id(self, id):
self._db_id = id
def db_delete_id(self, id):
self._db_id = None
def __get_db_cache(self):
return self._db_cache
def __set_db_cache(self, cache):
self._db_cache = cache
self.is_dirty = True
db_cache = property(__get_db_cache, __set_db_cache)
def db_add_cache(self, cache):
self._db_cache = cache
def db_change_cache(self, cache):
self._db_cache = cache
def db_delete_cache(self, cache):
self._db_cache = None
def __get_db_name(self):
return self._db_name
def __set_db_name(self, name):
self._db_name = name
self.is_dirty = True
db_name = property(__get_db_name, __set_db_name)
def db_add_name(self, name):
self._db_name = name
def db_change_name(self, name):
self._db_name = name
def db_delete_name(self, name):
self._db_name = None
def __get_db_namespace(self):
return self._db_namespace
def __set_db_namespace(self, namespace):
self._db_namespace = namespace
self.is_dirty = True
db_namespace = property(__get_db_namespace, __set_db_namespace)
def db_add_namespace(self, namespace):
self._db_namespace = namespace
def db_change_namespace(self, namespace):
self._db_namespace = namespace
def db_delete_namespace(self, namespace):
self._db_namespace = None
def __get_db_package(self):
return self._db_package
def __set_db_package(self, package):
self._db_package = package
self.is_dirty = True
db_package = property(__get_db_package, __set_db_package)
def db_add_package(self, package):
self._db_package = package
def db_change_package(self, package):
self._db_package = package
def db_delete_package(self, package):
self._db_package = None
def __get_db_version(self):
return self._db_version
def __set_db_version(self, version):
self._db_version = version
self.is_dirty = True
db_version = property(__get_db_version, __set_db_version)
def db_add_version(self, version):
self._db_version = version
def db_change_version(self, version):
self._db_version = version
def db_delete_version(self, version):
self._db_version = None
def __get_db_location(self):
return self._db_location
def __set_db_location(self, location):
self._db_location = location
self.is_dirty = True
db_location = property(__get_db_location, __set_db_location)
def db_add_location(self, location):
self._db_location = location
def db_change_location(self, location):
self._db_location = location
def db_delete_location(self, location):
if not self.is_new:
self.db_deleted_location.append(self._db_location)
self._db_location = None
def __get_db_functions(self):
return self._db_functions
def __set_db_functions(self, functions):
self._db_functions = functions
self.is_dirty = True
db_functions = property(__get_db_functions, __set_db_functions)
def db_get_functions(self):
return self._db_functions
def db_add_function(self, function):
self.is_dirty = True
self._db_functions.append(function)
self.db_functions_id_index[function.db_id] = function
def db_change_function(self, function):
self.is_dirty = True
found = False
for i in xrange(len(self._db_functions)):
if self._db_functions[i].db_id == function.db_id:
self._db_functions[i] = function
found = True
break
if not found:
self._db_functions.append(function)
self.db_functions_id_index[function.db_id] = function
def db_delete_function(self, function):
self.is_dirty = True
for i in xrange(len(self._db_functions)):
if self._db_functions[i].db_id == function.db_id:
if not self._db_functions[i].is_new:
self.db_deleted_functions.append(self._db_functions[i])
del self._db_functions[i]
break
del self.db_functions_id_index[function.db_id]
def db_get_function(self, key):
for i in xrange(len(self._db_functions)):
if self._db_functions[i].db_id == key:
return self._db_functions[i]
return None
def db_get_function_by_id(self, key):
return self.db_functions_id_index[key]
def db_has_function_with_id(self, key):
return key in self.db_functions_id_index
def __get_db_annotations(self):
return self._db_annotations
def __set_db_annotations(self, annotations):
self._db_annotations = annotations
self.is_dirty = True
db_annotations = property(__get_db_annotations, __set_db_annotations)
def db_get_annotations(self):
return self._db_annotations
def db_add_annotation(self, annotation):
self.is_dirty = True
self._db_annotations.append(annotation)
self.db_annotations_id_index[annotation.db_id] = annotation
self.db_annotations_key_index[annotation.db_key] = annotation
def db_change_annotation(self, annotation):
self.is_dirty = True
found = False
for i in xrange(len(self._db_annotations)):
if self._db_annotations[i].db_id == annotation.db_id:
self._db_annotations[i] = annotation
found = True
break
if not found:
self._db_annotations.append(annotation)
self.db_annotations_id_index[annotation.db_id] = annotation
self.db_annotations_key_index[annotation.db_key] = annotation
def db_delete_annotation(self, annotation):
self.is_dirty = True
for i in xrange(len(self._db_annotations)):
if self._db_annotations[i].db_id == annotation.db_id:
if not self._db_annotations[i].is_new:
self.db_deleted_annotations.append(self._db_annotations[i])
del self._db_annotations[i]
break
del self.db_annotations_id_index[annotation.db_id]
del self.db_annotations_key_index[annotation.db_key]
def db_get_annotation(self, key):
for i in xrange(len(self._db_annotations)):
if self._db_annotations[i].db_id == key:
return self._db_annotations[i]
return None
def db_get_annotation_by_id(self, key):
return self.db_annotations_id_index[key]
def db_has_annotation_with_id(self, key):
return key in self.db_annotations_id_index
def db_get_annotation_by_key(self, key):
return self.db_annotations_key_index[key]
def db_has_annotation_with_key(self, key):
return key in self.db_annotations_key_index
def __get_db_portSpecs(self):
return self._db_portSpecs
def __set_db_portSpecs(self, portSpecs):
self._db_portSpecs = portSpecs
self.is_dirty = True
db_portSpecs = property(__get_db_portSpecs, __set_db_portSpecs)
def db_get_portSpecs(self):
return self._db_portSpecs
def db_add_portSpec(self, portSpec):
self.is_dirty = True
self._db_portSpecs.append(portSpec)
self.db_portSpecs_id_index[portSpec.db_id] = portSpec
self.db_portSpecs_name_index[(portSpec.db_name,portSpec.db_type)] = portSpec
def db_change_portSpec(self, portSpec):
self.is_dirty = True
found = False
for i in xrange(len(self._db_portSpecs)):
if self._db_portSpecs[i].db_id == portSpec.db_id:
self._db_portSpecs[i] = portSpec
found = True
break
if not found:
self._db_portSpecs.append(portSpec)
self.db_portSpecs_id_index[portSpec.db_id] = portSpec
self.db_portSpecs_name_index[(portSpec.db_name,portSpec.db_type)] = portSpec
def db_delete_portSpec(self, portSpec):
self.is_dirty = True
for i in xrange(len(self._db_portSpecs)):
if self._db_portSpecs[i].db_id == portSpec.db_id:
if not self._db_portSpecs[i].is_new:
self.db_deleted_portSpecs.append(self._db_portSpecs[i])
del self._db_portSpecs[i]
break
del self.db_portSpecs_id_index[portSpec.db_id]
del self.db_portSpecs_name_index[(portSpec.db_name,portSpec.db_type)]
def db_get_portSpec(self, key):
for i in xrange(len(self._db_portSpecs)):
if self._db_portSpecs[i].db_id == key:
return self._db_portSpecs[i]
return None
def db_get_portSpec_by_id(self, key):
return self.db_portSpecs_id_index[key]
def db_has_portSpec_with_id(self, key):
return key in self.db_portSpecs_id_index
def db_get_portSpec_by_name(self, key):
return self.db_portSpecs_name_index[key]
def db_has_portSpec_with_name(self, key):
return key in self.db_portSpecs_name_index
def getPrimaryKey(self):
return self._db_id
class DBPort(object):
vtType = 'port'
def __init__(self, id=None, type=None, moduleId=None, moduleName=None, name=None, signature=None):
self._db_id = id
self._db_type = type
self._db_moduleId = moduleId
self._db_moduleName = moduleName
self._db_name = name
self._db_signature = signature
self.is_dirty = True
self.is_new = True
def __copy__(self):
return DBPort.do_copy(self)
def do_copy(self, new_ids=False, id_scope=None, id_remap=None):
cp = DBPort(id=self._db_id,
type=self._db_type,
moduleId=self._db_moduleId,
moduleName=self._db_moduleName,
name=self._db_name,
signature=self._db_signature)
# set new ids
if new_ids:
new_id = id_scope.getNewId(self.vtType)
if self.vtType in id_scope.remap:
id_remap[(id_scope.remap[self.vtType], self.db_id)] = new_id
else:
id_remap[(self.vtType, self.db_id)] = new_id
cp.db_id = new_id
if hasattr(self, 'db_moduleId') and ('module', self._db_moduleId) in id_remap:
cp._db_moduleId = id_remap[('module', self._db_moduleId)]
# recreate indices and set flags
if not new_ids:
cp.is_dirty = self.is_dirty
cp.is_new = self.is_new
return cp
@staticmethod
def update_version(old_obj, trans_dict, new_obj=None):
if new_obj is None:
new_obj = DBPort()
class_dict = {}
if new_obj.__class__.__name__ in trans_dict:
class_dict = trans_dict[new_obj.__class__.__name__]
if 'id' in class_dict:
res = class_dict['id'](old_obj, trans_dict)
new_obj.db_id = res
elif hasattr(old_obj, 'db_id') and old_obj.db_id is not None:
new_obj.db_id = old_obj.db_id
if 'type' in class_dict:
res = class_dict['type'](old_obj, trans_dict)
new_obj.db_type = res
elif hasattr(old_obj, 'db_type') and old_obj.db_type is not None:
new_obj.db_type = old_obj.db_type
if 'moduleId' in class_dict:
res = class_dict['moduleId'](old_obj, trans_dict)
new_obj.db_moduleId = res
elif hasattr(old_obj, 'db_moduleId') and old_obj.db_moduleId is not None:
new_obj.db_moduleId = old_obj.db_moduleId
if 'moduleName' in class_dict:
res = class_dict['moduleName'](old_obj, trans_dict)
new_obj.db_moduleName = res
elif hasattr(old_obj, 'db_moduleName') and old_obj.db_moduleName is not None:
new_obj.db_moduleName = old_obj.db_moduleName
if 'name' in class_dict:
res = class_dict['name'](old_obj, trans_dict)
new_obj.db_name = res
elif hasattr(old_obj, 'db_name') and old_obj.db_name is not None:
new_obj.db_name = old_obj.db_name
if 'signature' in class_dict:
res = class_dict['signature'](old_obj, trans_dict)
new_obj.db_signature = res
elif hasattr(old_obj, 'db_signature') and old_obj.db_signature is not None:
new_obj.db_signature = old_obj.db_signature
new_obj.is_new = old_obj.is_new
new_obj.is_dirty = old_obj.is_dirty
return new_obj
def db_children(self, parent=(None,None), orphan=False, for_action=False):
return [(self, parent[0], parent[1])]
def db_deleted_children(self, remove=False):
children = []
return children
def has_changes(self):
if self.is_dirty:
return True
return False
def __get_db_id(self):
return self._db_id
def __set_db_id(self, id):
self._db_id = id
self.is_dirty = True
db_id = property(__get_db_id, __set_db_id)
def db_add_id(self, id):
self._db_id = id
def db_change_id(self, id):
self._db_id = id
def db_delete_id(self, id):
self._db_id = None
def __get_db_type(self):
return self._db_type
def __set_db_type(self, type):
self._db_type = type
self.is_dirty = True
db_type = property(__get_db_type, __set_db_type)
def db_add_type(self, type):
self._db_type = type
def db_change_type(self, type):
self._db_type = type
def db_delete_type(self, type):
self._db_type = None
def __get_db_moduleId(self):
return self._db_moduleId
def __set_db_moduleId(self, moduleId):
self._db_moduleId = moduleId
self.is_dirty = True
db_moduleId = property(__get_db_moduleId, __set_db_moduleId)
def db_add_moduleId(self, moduleId):
self._db_moduleId = moduleId
def db_change_moduleId(self, moduleId):
self._db_moduleId = moduleId
def db_delete_moduleId(self, moduleId):
self._db_moduleId = None
def __get_db_moduleName(self):
return self._db_moduleName
def __set_db_moduleName(self, moduleName):
self._db_moduleName = moduleName
self.is_dirty = True
db_moduleName = property(__get_db_moduleName, __set_db_moduleName)
def db_add_moduleName(self, moduleName):
self._db_moduleName = moduleName
def db_change_moduleName(self, moduleName):
self._db_moduleName = moduleName
def db_delete_moduleName(self, moduleName):
self._db_moduleName = None
def __get_db_name(self):
return self._db_name
def __set_db_name(self, name):
self._db_name = name
self.is_dirty = True
db_name = property(__get_db_name, __set_db_name)
def db_add_name(self, name):
self._db_name = name
def db_change_name(self, name):
self._db_name = name
def db_delete_name(self, name):
self._db_name = None
def __get_db_signature(self):
return self._db_signature
def __set_db_signature(self, signature):
self._db_signature = signature
self.is_dirty = True
db_signature = property(__get_db_signature, __set_db_signature)
def db_add_signature(self, signature):
self._db_signature = signature
def db_change_signature(self, signature):
self._db_signature = signature
def db_delete_signature(self, signature):
self._db_signature = None
def getPrimaryKey(self):
return self._db_id
class DBOpmAgents(object):
vtType = 'opm_agents'
def __init__(self, agents=None):
self.db_deleted_agents = []
self.db_agents_id_index = {}
if agents is None:
self._db_agents = []
else:
self._db_agents = agents
for v in self._db_agents:
self.db_agents_id_index[v.db_id] = v
self.is_dirty = True
self.is_new = True
def __copy__(self):
return DBOpmAgents.do_copy(self)
def do_copy(self, new_ids=False, id_scope=None, id_remap=None):
cp = DBOpmAgents()
if self._db_agents is None:
cp._db_agents = []
else:
cp._db_agents = [v.do_copy(new_ids, id_scope, id_remap) for v in self._db_agents]
# set new ids
if new_ids:
new_id = id_scope.getNewId(self.vtType)
if self.vtType in id_scope.remap:
id_remap[(id_scope.remap[self.vtType], self.db_id)] = new_id
else:
id_remap[(self.vtType, self.db_id)] = new_id
cp.db_id = new_id
# recreate indices and set flags
cp.db_agents_id_index = dict((v.db_id, v) for v in cp._db_agents)
if not new_ids:
cp.is_dirty = self.is_dirty
cp.is_new = self.is_new
return cp
@staticmethod
def update_version(old_obj, trans_dict, new_obj=None):
if new_obj is None:
new_obj = DBOpmAgents()
class_dict = {}
if new_obj.__class__.__name__ in trans_dict:
class_dict = trans_dict[new_obj.__class__.__name__]
if 'agents' in class_dict:
res = class_dict['agents'](old_obj, trans_dict)
for obj in res:
new_obj.db_add_agent(obj)
elif hasattr(old_obj, 'db_agents') and old_obj.db_agents is not None:
for obj in old_obj.db_agents:
new_obj.db_add_agent(DBOpmAgent.update_version(obj, trans_dict))
if hasattr(old_obj, 'db_deleted_agents') and hasattr(new_obj, 'db_deleted_agents'):
for obj in old_obj.db_deleted_agents:
n_obj = DBOpmAgent.update_version(obj, trans_dict)
new_obj.db_deleted_agents.append(n_obj)
new_obj.is_new = old_obj.is_new
new_obj.is_dirty = old_obj.is_dirty
return new_obj
def db_children(self, parent=(None,None), orphan=False, for_action=False):
children = []
to_del = []
for child in self.db_agents:
children.extend(child.db_children((self.vtType, self.db_id), orphan, for_action))
if orphan:
to_del.append(child)
for child in to_del:
self.db_delete_agent(child)
children.append((self, parent[0], parent[1]))
return children
def db_deleted_children(self, remove=False):
children = []
children.extend(self.db_deleted_agents)
if remove:
self.db_deleted_agents = []
return children
def has_changes(self):
if self.is_dirty:
return True
for child in self._db_agents:
if child.has_changes():
return True
return False
def __get_db_agents(self):
return self._db_agents
def __set_db_agents(self, agents):
self._db_agents = agents
self.is_dirty = True
db_agents = property(__get_db_agents, __set_db_agents)
def db_get_agents(self):
return self._db_agents
def db_add_agent(self, agent):
self.is_dirty = True
self._db_agents.append(agent)
self.db_agents_id_index[agent.db_id] = agent
def db_change_agent(self, agent):
self.is_dirty = True
found = False
for i in xrange(len(self._db_agents)):
if self._db_agents[i].db_id == agent.db_id:
self._db_agents[i] = agent
found = True
break
if not found:
self._db_agents.append(agent)
self.db_agents_id_index[agent.db_id] = agent
def db_delete_agent(self, agent):
self.is_dirty = True
for i in xrange(len(self._db_agents)):
if self._db_agents[i].db_id == agent.db_id:
if not self._db_agents[i].is_new:
self.db_deleted_agents.append(self._db_agents[i])
del self._db_agents[i]
break
del self.db_agents_id_index[agent.db_id]
def db_get_agent(self, key):
for i in xrange(len(self._db_agents)):
if self._db_agents[i].db_id == key:
return self._db_agents[i]
return None
def db_get_agent_by_id(self, key):
return self.db_agents_id_index[key]
def db_has_agent_with_id(self, key):
return key in self.db_agents_id_index
class DBOpmDependencies(object):
vtType = 'opm_dependencies'
def __init__(self, dependencys=None):
self.db_deleted_dependencys = []
if dependencys is None:
self._db_dependencys = []
else:
self._db_dependencys = dependencys
self.is_dirty = True
self.is_new = True
def __copy__(self):
return DBOpmDependencies.do_copy(self)
def do_copy(self, new_ids=False, id_scope=None, id_remap=None):
cp = DBOpmDependencies()
if self._db_dependencys is None:
cp._db_dependencys = []
else:
cp._db_dependencys = [v.do_copy(new_ids, id_scope, id_remap) for v in self._db_dependencys]
# set new ids
if new_ids:
new_id = id_scope.getNewId(self.vtType)
if self.vtType in id_scope.remap:
id_remap[(id_scope.remap[self.vtType], self.db_id)] = new_id
else:
id_remap[(self.vtType, self.db_id)] = new_id
cp.db_id = new_id
# recreate indices and set flags
if not new_ids:
cp.is_dirty = self.is_dirty
cp.is_new = self.is_new
return cp
@staticmethod
def update_version(old_obj, trans_dict, new_obj=None):
if new_obj is None:
new_obj = DBOpmDependencies()
class_dict = {}
if new_obj.__class__.__name__ in trans_dict:
class_dict = trans_dict[new_obj.__class__.__name__]
if 'dependencys' in class_dict:
res = class_dict['dependencys'](old_obj, trans_dict)
for obj in res:
new_obj.db_add_dependency(obj)
elif hasattr(old_obj, 'db_dependencys') and old_obj.db_dependencys is not None:
for obj in old_obj.db_dependencys:
if obj.vtType == 'opm_used':
new_obj.db_add_dependency(DBOpmUsed.update_version(obj, trans_dict))
elif obj.vtType == 'opm_was_generated_by':
new_obj.db_add_dependency(DBOpmWasGeneratedBy.update_version(obj, trans_dict))
elif obj.vtType == 'opm_was_triggered_by':
new_obj.db_add_dependency(DBOpmWasTriggeredBy.update_version(obj, trans_dict))
elif obj.vtType == 'opm_was_derived_from':
new_obj.db_add_dependency(DBOpmWasDerivedFrom.update_version(obj, trans_dict))
elif obj.vtType == 'opm_was_controlled_by':
new_obj.db_add_dependency(DBOpmWasControlledBy.update_version(obj, trans_dict))
if hasattr(old_obj, 'db_deleted_dependencys') and hasattr(new_obj, 'db_deleted_dependencys'):
for obj in old_obj.db_deleted_dependencys:
if obj.vtType == 'opm_used':
n_obj = DBOpmUsed.update_version(obj, trans_dict)
new_obj.db_deleted_dependencys.append(n_obj)
elif obj.vtType == 'opm_was_generated_by':
n_obj = DBOpmWasGeneratedBy.update_version(obj, trans_dict)
new_obj.db_deleted_dependencys.append(n_obj)
elif obj.vtType == 'opm_was_triggered_by':
n_obj = DBOpmWasTriggeredBy.update_version(obj, trans_dict)
new_obj.db_deleted_dependencys.append(n_obj)
elif obj.vtType == 'opm_was_derived_from':
n_obj = DBOpmWasDerivedFrom.update_version(obj, trans_dict)
new_obj.db_deleted_dependencys.append(n_obj)
elif obj.vtType == 'opm_was_controlled_by':
n_obj = DBOpmWasControlledBy.update_version(obj, trans_dict)
new_obj.db_deleted_dependencys.append(n_obj)
new_obj.is_new = old_obj.is_new
new_obj.is_dirty = old_obj.is_dirty
return new_obj
def db_children(self, parent=(None,None), orphan=False, for_action=False):
children = []
to_del = []
for child in self.db_dependencys:
children.extend(child.db_children((self.vtType, self.db_id), orphan, for_action))
if orphan:
to_del.append(child)
for child in to_del:
self.db_delete_dependency(child)
children.append((self, parent[0], parent[1]))
return children
def db_deleted_children(self, remove=False):
children = []
children.extend(self.db_deleted_dependencys)
if remove:
self.db_deleted_dependencys = []
return children
def has_changes(self):
if self.is_dirty:
return True
for child in self._db_dependencys:
if child.has_changes():
return True
return False
def __get_db_dependencys(self):
return self._db_dependencys
def __set_db_dependencys(self, dependencys):
self._db_dependencys = dependencys
self.is_dirty = True
db_dependencys = property(__get_db_dependencys, __set_db_dependencys)
def db_get_dependencys(self):
return self._db_dependencys
def db_add_dependency(self, dependency):
self.is_dirty = True
self._db_dependencys.append(dependency)
def db_change_dependency(self, dependency):
self.is_dirty = True
self._db_dependencys.append(dependency)
def db_delete_dependency(self, dependency):
self.is_dirty = True
raise Exception('Cannot delete a non-keyed object')
def db_get_dependency(self, key):
return None
class DBPEFunction(object):
vtType = 'pe_function'
def __init__(self, id=None, module_id=None, port_name=None, is_alias=None, parameters=None):
self._db_id = id
self._db_module_id = module_id
self._db_port_name = port_name
self._db_is_alias = is_alias
self.db_deleted_parameters = []
self.db_parameters_id_index = {}
if parameters is None:
self._db_parameters = []
else:
self._db_parameters = parameters
for v in self._db_parameters:
self.db_parameters_id_index[v.db_id] = v
self.is_dirty = True
self.is_new = True
def __copy__(self):
return DBPEFunction.do_copy(self)
def do_copy(self, new_ids=False, id_scope=None, id_remap=None):
cp = DBPEFunction(id=self._db_id,
module_id=self._db_module_id,
port_name=self._db_port_name,
is_alias=self._db_is_alias)
if self._db_parameters is None:
cp._db_parameters = []
else:
cp._db_parameters = [v.do_copy(new_ids, id_scope, id_remap) for v in self._db_parameters]
# set new ids
if new_ids:
new_id = id_scope.getNewId(self.vtType)
if self.vtType in id_scope.remap:
id_remap[(id_scope.remap[self.vtType], self.db_id)] = new_id
else:
id_remap[(self.vtType, self.db_id)] = new_id
cp.db_id = new_id
if hasattr(self, 'db_module_id') and ('module', self._db_module_id) in id_remap:
cp._db_module_id = id_remap[('module', self._db_module_id)]
# recreate indices and set flags
cp.db_parameters_id_index = dict((v.db_id, v) for v in cp._db_parameters)
if not new_ids:
cp.is_dirty = self.is_dirty
cp.is_new = self.is_new
return cp
@staticmethod
def update_version(old_obj, trans_dict, new_obj=None):
if new_obj is None:
new_obj = DBPEFunction()
class_dict = {}
if new_obj.__class__.__name__ in trans_dict:
class_dict = trans_dict[new_obj.__class__.__name__]
if 'id' in class_dict:
res = class_dict['id'](old_obj, trans_dict)
new_obj.db_id = res
elif hasattr(old_obj, 'db_id') and old_obj.db_id is not None:
new_obj.db_id = old_obj.db_id
if 'module_id' in class_dict:
res = class_dict['module_id'](old_obj, trans_dict)
new_obj.db_module_id = res
elif hasattr(old_obj, 'db_module_id') and old_obj.db_module_id is not None:
new_obj.db_module_id = old_obj.db_module_id
if 'port_name' in class_dict:
res = class_dict['port_name'](old_obj, trans_dict)
new_obj.db_port_name = res
elif hasattr(old_obj, 'db_port_name') and old_obj.db_port_name is not None:
new_obj.db_port_name = old_obj.db_port_name
if 'is_alias' in class_dict:
res = class_dict['is_alias'](old_obj, trans_dict)
new_obj.db_is_alias = res
elif hasattr(old_obj, 'db_is_alias') and old_obj.db_is_alias is not None:
new_obj.db_is_alias = old_obj.db_is_alias
if 'parameters' in class_dict:
res = class_dict['parameters'](old_obj, trans_dict)
for obj in res:
new_obj.db_add_parameter(obj)
elif hasattr(old_obj, 'db_parameters') and old_obj.db_parameters is not None:
for obj in old_obj.db_parameters:
new_obj.db_add_parameter(DBPEParameter.update_version(obj, trans_dict))
if hasattr(old_obj, 'db_deleted_parameters') and hasattr(new_obj, 'db_deleted_parameters'):
for obj in old_obj.db_deleted_parameters:
n_obj = DBPEParameter.update_version(obj, trans_dict)
new_obj.db_deleted_parameters.append(n_obj)
new_obj.is_new = old_obj.is_new
new_obj.is_dirty = old_obj.is_dirty
return new_obj
def db_children(self, parent=(None,None), orphan=False, for_action=False):
children = []
to_del = []
for child in self.db_parameters:
children.extend(child.db_children((self.vtType, self.db_id), orphan, for_action))
if orphan:
to_del.append(child)
for child in to_del:
self.db_delete_parameter(child)
children.append((self, parent[0], parent[1]))
return children
def db_deleted_children(self, remove=False):
children = []
children.extend(self.db_deleted_parameters)
if remove:
self.db_deleted_parameters = []
return children
def has_changes(self):
if self.is_dirty:
return True
for child in self._db_parameters:
if child.has_changes():
return True
return False
def __get_db_id(self):
return self._db_id
def __set_db_id(self, id):
self._db_id = id
self.is_dirty = True
db_id = property(__get_db_id, __set_db_id)
def db_add_id(self, id):
self._db_id = id
def db_change_id(self, id):
self._db_id = id
def db_delete_id(self, id):
self._db_id = None
def __get_db_module_id(self):
return self._db_module_id
def __set_db_module_id(self, module_id):
self._db_module_id = module_id
self.is_dirty = True
db_module_id = property(__get_db_module_id, __set_db_module_id)
def db_add_module_id(self, module_id):
self._db_module_id = module_id
def db_change_module_id(self, module_id):
self._db_module_id = module_id
def db_delete_module_id(self, module_id):
self._db_module_id = None
def __get_db_port_name(self):
return self._db_port_name
def __set_db_port_name(self, port_name):
self._db_port_name = port_name
self.is_dirty = True
db_port_name = property(__get_db_port_name, __set_db_port_name)
def db_add_port_name(self, port_name):
self._db_port_name = port_name
def db_change_port_name(self, port_name):
self._db_port_name = port_name
def db_delete_port_name(self, port_name):
self._db_port_name = None
def __get_db_is_alias(self):
return self._db_is_alias
def __set_db_is_alias(self, is_alias):
self._db_is_alias = is_alias
self.is_dirty = True
db_is_alias = property(__get_db_is_alias, __set_db_is_alias)
def db_add_is_alias(self, is_alias):
self._db_is_alias = is_alias
def db_change_is_alias(self, is_alias):
self._db_is_alias = is_alias
def db_delete_is_alias(self, is_alias):
self._db_is_alias = None
def __get_db_parameters(self):
return self._db_parameters
def __set_db_parameters(self, parameters):
self._db_parameters = parameters
self.is_dirty = True
db_parameters = property(__get_db_parameters, __set_db_parameters)
def db_get_parameters(self):
return self._db_parameters
def db_add_parameter(self, parameter):
self.is_dirty = True
self._db_parameters.append(parameter)
self.db_parameters_id_index[parameter.db_id] = parameter
def db_change_parameter(self, parameter):
self.is_dirty = True
found = False
for i in xrange(len(self._db_parameters)):
if self._db_parameters[i].db_id == parameter.db_id:
self._db_parameters[i] = parameter
found = True
break
if not found:
self._db_parameters.append(parameter)
self.db_parameters_id_index[parameter.db_id] = parameter
def db_delete_parameter(self, parameter):
self.is_dirty = True
for i in xrange(len(self._db_parameters)):
if self._db_parameters[i].db_id == parameter.db_id:
if not self._db_parameters[i].is_new:
self.db_deleted_parameters.append(self._db_parameters[i])
del self._db_parameters[i]
break
del self.db_parameters_id_index[parameter.db_id]
def db_get_parameter(self, key):
for i in xrange(len(self._db_parameters)):
if self._db_parameters[i].db_id == key:
return self._db_parameters[i]
return None
def db_get_parameter_by_id(self, key):
return self.db_parameters_id_index[key]
def db_has_parameter_with_id(self, key):
return key in self.db_parameters_id_index
def getPrimaryKey(self):
return self._db_id
class DBWorkflow(object):
vtType = 'workflow'
def __init__(self, modules=None, id=None, entity_type=None, name=None, version=None, last_modified=None, connections=None, annotations=None, plugin_datas=None, others=None, vistrail_id=None):
self.db_deleted_modules = []
self.db_modules_id_index = {}
if modules is None:
self._db_modules = []
else:
self._db_modules = modules
for v in self._db_modules:
self.db_modules_id_index[v.db_id] = v
self._db_id = id
self._db_entity_type = entity_type
self._db_name = name
self._db_version = version
self._db_last_modified = last_modified
self.db_deleted_connections = []
self.db_connections_id_index = {}
if connections is None:
self._db_connections = []
else:
self._db_connections = connections
for v in self._db_connections:
self.db_connections_id_index[v.db_id] = v
self.db_deleted_annotations = []
self.db_annotations_id_index = {}
if annotations is None:
self._db_annotations = []
else:
self._db_annotations = annotations
for v in self._db_annotations:
self.db_annotations_id_index[v.db_id] = v
self.db_deleted_plugin_datas = []
self.db_plugin_datas_id_index = {}
if plugin_datas is None:
self._db_plugin_datas = []
else:
self._db_plugin_datas = plugin_datas
for v in self._db_plugin_datas:
self.db_plugin_datas_id_index[v.db_id] = v
self.db_deleted_others = []
self.db_others_id_index = {}
if others is None:
self._db_others = []
else:
self._db_others = others
for v in self._db_others:
self.db_others_id_index[v.db_id] = v
self._db_vistrail_id = vistrail_id
self.is_dirty = True
self.is_new = True
def __copy__(self):
return DBWorkflow.do_copy(self)
def do_copy(self, new_ids=False, id_scope=None, id_remap=None):
cp = DBWorkflow(id=self._db_id,
entity_type=self._db_entity_type,
name=self._db_name,
version=self._db_version,
last_modified=self._db_last_modified,
vistrail_id=self._db_vistrail_id)
if self._db_modules is None:
cp._db_modules = []
else:
cp._db_modules = [v.do_copy(new_ids, id_scope, id_remap) for v in self._db_modules]
if self._db_connections is None:
cp._db_connections = []
else:
cp._db_connections = [v.do_copy(new_ids, id_scope, id_remap) for v in self._db_connections]
if self._db_annotations is None:
cp._db_annotations = []
else:
cp._db_annotations = [v.do_copy(new_ids, id_scope, id_remap) for v in self._db_annotations]
if self._db_plugin_datas is None:
cp._db_plugin_datas = []
else:
cp._db_plugin_datas = [v.do_copy(new_ids, id_scope, id_remap) for v in self._db_plugin_datas]
if self._db_others is None:
cp._db_others = []
else:
cp._db_others = [v.do_copy(new_ids, id_scope, id_remap) for v in self._db_others]
# set new ids
if new_ids:
new_id = id_scope.getNewId(self.vtType)
if self.vtType in id_scope.remap:
id_remap[(id_scope.remap[self.vtType], self.db_id)] = new_id
else:
id_remap[(self.vtType, self.db_id)] = new_id
cp.db_id = new_id
if hasattr(self, 'db_vistrail_id') and ('vistrail', self._db_vistrail_id) in id_remap:
cp._db_vistrail_id = id_remap[('vistrail', self._db_vistrail_id)]
# recreate indices and set flags
cp.db_modules_id_index = dict((v.db_id, v) for v in cp._db_modules)
cp.db_connections_id_index = dict((v.db_id, v) for v in cp._db_connections)
cp.db_annotations_id_index = dict((v.db_id, v) for v in cp._db_annotations)
cp.db_plugin_datas_id_index = dict((v.db_id, v) for v in cp._db_plugin_datas)
cp.db_others_id_index = dict((v.db_id, v) for v in cp._db_others)
if not new_ids:
cp.is_dirty = self.is_dirty
cp.is_new = self.is_new
return cp
@staticmethod
def update_version(old_obj, trans_dict, new_obj=None):
if new_obj is None:
new_obj = DBWorkflow()
class_dict = {}
if new_obj.__class__.__name__ in trans_dict:
class_dict = trans_dict[new_obj.__class__.__name__]
if 'modules' in class_dict:
res = class_dict['modules'](old_obj, trans_dict)
for obj in res:
new_obj.db_add_module(obj)
elif hasattr(old_obj, 'db_modules') and old_obj.db_modules is not None:
for obj in old_obj.db_modules:
if obj.vtType == 'module':
new_obj.db_add_module(DBModule.update_version(obj, trans_dict))
elif obj.vtType == 'abstraction':
new_obj.db_add_module(DBAbstraction.update_version(obj, trans_dict))
elif obj.vtType == 'group':
new_obj.db_add_module(DBGroup.update_version(obj, trans_dict))
if hasattr(old_obj, 'db_deleted_modules') and hasattr(new_obj, 'db_deleted_modules'):
for obj in old_obj.db_deleted_modules:
if obj.vtType == 'module':
n_obj = DBModule.update_version(obj, trans_dict)
new_obj.db_deleted_modules.append(n_obj)
elif obj.vtType == 'abstraction':
n_obj = DBAbstraction.update_version(obj, trans_dict)
new_obj.db_deleted_modules.append(n_obj)
elif obj.vtType == 'group':
n_obj = DBGroup.update_version(obj, trans_dict)
new_obj.db_deleted_modules.append(n_obj)
if 'id' in class_dict:
res = class_dict['id'](old_obj, trans_dict)
new_obj.db_id = res
elif hasattr(old_obj, 'db_id') and old_obj.db_id is not None:
new_obj.db_id = old_obj.db_id
if 'entity_type' in class_dict:
res = class_dict['entity_type'](old_obj, trans_dict)
new_obj.db_entity_type = res
elif hasattr(old_obj, 'db_entity_type') and old_obj.db_entity_type is not None:
new_obj.db_entity_type = old_obj.db_entity_type
if 'name' in class_dict:
res = class_dict['name'](old_obj, trans_dict)
new_obj.db_name = res
elif hasattr(old_obj, 'db_name') and old_obj.db_name is not None:
new_obj.db_name = old_obj.db_name
if 'version' in class_dict:
res = class_dict['version'](old_obj, trans_dict)
new_obj.db_version = res
elif hasattr(old_obj, 'db_version') and old_obj.db_version is not None:
new_obj.db_version = old_obj.db_version
if 'last_modified' in class_dict:
res = class_dict['last_modified'](old_obj, trans_dict)
new_obj.db_last_modified = res
elif hasattr(old_obj, 'db_last_modified') and old_obj.db_last_modified is not None:
new_obj.db_last_modified = old_obj.db_last_modified
if 'connections' in class_dict:
res = class_dict['connections'](old_obj, trans_dict)
for obj in res:
new_obj.db_add_connection(obj)
elif hasattr(old_obj, 'db_connections') and old_obj.db_connections is not None:
for obj in old_obj.db_connections:
new_obj.db_add_connection(DBConnection.update_version(obj, trans_dict))
if hasattr(old_obj, 'db_deleted_connections') and hasattr(new_obj, 'db_deleted_connections'):
for obj in old_obj.db_deleted_connections:
n_obj = DBConnection.update_version(obj, trans_dict)
new_obj.db_deleted_connections.append(n_obj)
if 'annotations' in class_dict:
res = class_dict['annotations'](old_obj, trans_dict)
for obj in res:
new_obj.db_add_annotation(obj)
elif hasattr(old_obj, 'db_annotations') and old_obj.db_annotations is not None:
for obj in old_obj.db_annotations:
new_obj.db_add_annotation(DBAnnotation.update_version(obj, trans_dict))
if hasattr(old_obj, 'db_deleted_annotations') and hasattr(new_obj, 'db_deleted_annotations'):
for obj in old_obj.db_deleted_annotations:
n_obj = DBAnnotation.update_version(obj, trans_dict)
new_obj.db_deleted_annotations.append(n_obj)
if 'plugin_datas' in class_dict:
res = class_dict['plugin_datas'](old_obj, trans_dict)
for obj in res:
new_obj.db_add_plugin_data(obj)
elif hasattr(old_obj, 'db_plugin_datas') and old_obj.db_plugin_datas is not None:
for obj in old_obj.db_plugin_datas:
new_obj.db_add_plugin_data(DBPluginData.update_version(obj, trans_dict))
if hasattr(old_obj, 'db_deleted_plugin_datas') and hasattr(new_obj, 'db_deleted_plugin_datas'):
for obj in old_obj.db_deleted_plugin_datas:
n_obj = DBPluginData.update_version(obj, trans_dict)
new_obj.db_deleted_plugin_datas.append(n_obj)
if 'others' in class_dict:
res = class_dict['others'](old_obj, trans_dict)
for obj in res:
new_obj.db_add_other(obj)
elif hasattr(old_obj, 'db_others') and old_obj.db_others is not None:
for obj in old_obj.db_others:
new_obj.db_add_other(DBOther.update_version(obj, trans_dict))
if hasattr(old_obj, 'db_deleted_others') and hasattr(new_obj, 'db_deleted_others'):
for obj in old_obj.db_deleted_others:
n_obj = DBOther.update_version(obj, trans_dict)
new_obj.db_deleted_others.append(n_obj)
if 'vistrail_id' in class_dict:
res = class_dict['vistrail_id'](old_obj, trans_dict)
new_obj.db_vistrail_id = res
elif hasattr(old_obj, 'db_vistrail_id') and old_obj.db_vistrail_id is not None:
new_obj.db_vistrail_id = old_obj.db_vistrail_id
new_obj.is_new = old_obj.is_new
new_obj.is_dirty = old_obj.is_dirty
return new_obj
def db_children(self, parent=(None,None), orphan=False, for_action=False):
children = []
to_del = []
for child in self.db_connections:
children.extend(child.db_children((self.vtType, self.db_id), orphan, for_action))
if orphan:
to_del.append(child)
for child in to_del:
self.db_delete_connection(child)
to_del = []
for child in self.db_annotations:
children.extend(child.db_children((self.vtType, self.db_id), orphan, for_action))
if orphan:
to_del.append(child)
for child in to_del:
self.db_delete_annotation(child)
to_del = []
for child in self.db_plugin_datas:
children.extend(child.db_children((self.vtType, self.db_id), orphan, for_action))
if orphan:
to_del.append(child)
for child in to_del:
self.db_delete_plugin_data(child)
to_del = []
for child in self.db_others:
children.extend(child.db_children((self.vtType, self.db_id), orphan, for_action))
if orphan:
to_del.append(child)
for child in to_del:
self.db_delete_other(child)
to_del = []
for child in self.db_modules:
children.extend(child.db_children((self.vtType, self.db_id), orphan, for_action))
if orphan:
to_del.append(child)
for child in to_del:
self.db_delete_module(child)
children.append((self, parent[0], parent[1]))
return children
def db_deleted_children(self, remove=False):
children = []
children.extend(self.db_deleted_connections)
children.extend(self.db_deleted_annotations)
children.extend(self.db_deleted_plugin_datas)
children.extend(self.db_deleted_others)
children.extend(self.db_deleted_modules)
if remove:
self.db_deleted_connections = []
self.db_deleted_annotations = []
self.db_deleted_plugin_datas = []
self.db_deleted_others = []
self.db_deleted_modules = []
return children
def has_changes(self):
if self.is_dirty:
return True
for child in self._db_connections:
if child.has_changes():
return True
for child in self._db_annotations:
if child.has_changes():
return True
for child in self._db_plugin_datas:
if child.has_changes():
return True
for child in self._db_others:
if child.has_changes():
return True
for child in self._db_modules:
if child.has_changes():
return True
return False
def __get_db_modules(self):
return self._db_modules
def __set_db_modules(self, modules):
self._db_modules = modules
self.is_dirty = True
db_modules = property(__get_db_modules, __set_db_modules)
def db_get_modules(self):
return self._db_modules
def db_add_module(self, module):
self.is_dirty = True
self._db_modules.append(module)
self.db_modules_id_index[module.db_id] = module
def db_change_module(self, module):
self.is_dirty = True
found = False
for i in xrange(len(self._db_modules)):
if self._db_modules[i].db_id == module.db_id:
self._db_modules[i] = module
found = True
break
if not found:
self._db_modules.append(module)
self.db_modules_id_index[module.db_id] = module
def db_delete_module(self, module):
self.is_dirty = True
for i in xrange(len(self._db_modules)):
if self._db_modules[i].db_id == module.db_id:
if not self._db_modules[i].is_new:
self.db_deleted_modules.append(self._db_modules[i])
del self._db_modules[i]
break
del self.db_modules_id_index[module.db_id]
def db_get_module(self, key):
for i in xrange(len(self._db_modules)):
if self._db_modules[i].db_id == key:
return self._db_modules[i]
return None
def db_get_module_by_id(self, key):
return self.db_modules_id_index[key]
def db_has_module_with_id(self, key):
return key in self.db_modules_id_index
def __get_db_id(self):
return self._db_id
def __set_db_id(self, id):
self._db_id = id
self.is_dirty = True
db_id = property(__get_db_id, __set_db_id)
def db_add_id(self, id):
self._db_id = id
def db_change_id(self, id):
self._db_id = id
def db_delete_id(self, id):
self._db_id = None
def __get_db_entity_type(self):
return self._db_entity_type
def __set_db_entity_type(self, entity_type):
self._db_entity_type = entity_type
self.is_dirty = True
db_entity_type = property(__get_db_entity_type, __set_db_entity_type)
def db_add_entity_type(self, entity_type):
self._db_entity_type = entity_type
def db_change_entity_type(self, entity_type):
self._db_entity_type = entity_type
def db_delete_entity_type(self, entity_type):
self._db_entity_type = None
def __get_db_name(self):
return self._db_name
def __set_db_name(self, name):
self._db_name = name
self.is_dirty = True
db_name = property(__get_db_name, __set_db_name)
def db_add_name(self, name):
self._db_name = name
def db_change_name(self, name):
self._db_name = name
def db_delete_name(self, name):
self._db_name = None
def __get_db_version(self):
return self._db_version
def __set_db_version(self, version):
self._db_version = version
self.is_dirty = True
db_version = property(__get_db_version, __set_db_version)
def db_add_version(self, version):
self._db_version = version
def db_change_version(self, version):
self._db_version = version
def db_delete_version(self, version):
self._db_version = None
def __get_db_last_modified(self):
return self._db_last_modified
def __set_db_last_modified(self, last_modified):
self._db_last_modified = last_modified
self.is_dirty = True
db_last_modified = property(__get_db_last_modified, __set_db_last_modified)
def db_add_last_modified(self, last_modified):
self._db_last_modified = last_modified
def db_change_last_modified(self, last_modified):
self._db_last_modified = last_modified
def db_delete_last_modified(self, last_modified):
self._db_last_modified = None
def __get_db_connections(self):
return self._db_connections
def __set_db_connections(self, connections):
self._db_connections = connections
self.is_dirty = True
db_connections = property(__get_db_connections, __set_db_connections)
def db_get_connections(self):
return self._db_connections
def db_add_connection(self, connection):
self.is_dirty = True
self._db_connections.append(connection)
self.db_connections_id_index[connection.db_id] = connection
def db_change_connection(self, connection):
self.is_dirty = True
found = False
for i in xrange(len(self._db_connections)):
if self._db_connections[i].db_id == connection.db_id:
self._db_connections[i] = connection
found = True
break
if not found:
self._db_connections.append(connection)
self.db_connections_id_index[connection.db_id] = connection
def db_delete_connection(self, connection):
self.is_dirty = True
for i in xrange(len(self._db_connections)):
if self._db_connections[i].db_id == connection.db_id:
if not self._db_connections[i].is_new:
self.db_deleted_connections.append(self._db_connections[i])
del self._db_connections[i]
break
del self.db_connections_id_index[connection.db_id]
def db_get_connection(self, key):
for i in xrange(len(self._db_connections)):
if self._db_connections[i].db_id == key:
return self._db_connections[i]
return None
def db_get_connection_by_id(self, key):
return self.db_connections_id_index[key]
def db_has_connection_with_id(self, key):
return key in self.db_connections_id_index
def __get_db_annotations(self):
return self._db_annotations
def __set_db_annotations(self, annotations):
self._db_annotations = annotations
self.is_dirty = True
db_annotations = property(__get_db_annotations, __set_db_annotations)
def db_get_annotations(self):
return self._db_annotations
def db_add_annotation(self, annotation):
self.is_dirty = True
self._db_annotations.append(annotation)
self.db_annotations_id_index[annotation.db_id] = annotation
def db_change_annotation(self, annotation):
self.is_dirty = True
found = False
for i in xrange(len(self._db_annotations)):
if self._db_annotations[i].db_id == annotation.db_id:
self._db_annotations[i] = annotation
found = True
break
if not found:
self._db_annotations.append(annotation)
self.db_annotations_id_index[annotation.db_id] = annotation
def db_delete_annotation(self, annotation):
self.is_dirty = True
for i in xrange(len(self._db_annotations)):
if self._db_annotations[i].db_id == annotation.db_id:
if not self._db_annotations[i].is_new:
self.db_deleted_annotations.append(self._db_annotations[i])
del self._db_annotations[i]
break
del self.db_annotations_id_index[annotation.db_id]
def db_get_annotation(self, key):
for i in xrange(len(self._db_annotations)):
if self._db_annotations[i].db_id == key:
return self._db_annotations[i]
return None
def db_get_annotation_by_id(self, key):
return self.db_annotations_id_index[key]
def db_has_annotation_with_id(self, key):
return key in self.db_annotations_id_index
def __get_db_plugin_datas(self):
return self._db_plugin_datas
def __set_db_plugin_datas(self, plugin_datas):
self._db_plugin_datas = plugin_datas
self.is_dirty = True
db_plugin_datas = property(__get_db_plugin_datas, __set_db_plugin_datas)
def db_get_plugin_datas(self):
return self._db_plugin_datas
def db_add_plugin_data(self, plugin_data):
self.is_dirty = True
self._db_plugin_datas.append(plugin_data)
self.db_plugin_datas_id_index[plugin_data.db_id] = plugin_data
def db_change_plugin_data(self, plugin_data):
self.is_dirty = True
found = False
for i in xrange(len(self._db_plugin_datas)):
if self._db_plugin_datas[i].db_id == plugin_data.db_id:
self._db_plugin_datas[i] = plugin_data
found = True
break
if not found:
self._db_plugin_datas.append(plugin_data)
self.db_plugin_datas_id_index[plugin_data.db_id] = plugin_data
def db_delete_plugin_data(self, plugin_data):
self.is_dirty = True
for i in xrange(len(self._db_plugin_datas)):
if self._db_plugin_datas[i].db_id == plugin_data.db_id:
if not self._db_plugin_datas[i].is_new:
self.db_deleted_plugin_datas.append(self._db_plugin_datas[i])
del self._db_plugin_datas[i]
break
del self.db_plugin_datas_id_index[plugin_data.db_id]
def db_get_plugin_data(self, key):
for i in xrange(len(self._db_plugin_datas)):
if self._db_plugin_datas[i].db_id == key:
return self._db_plugin_datas[i]
return None
def db_get_plugin_data_by_id(self, key):
return self.db_plugin_datas_id_index[key]
def db_has_plugin_data_with_id(self, key):
return key in self.db_plugin_datas_id_index
def __get_db_others(self):
return self._db_others
def __set_db_others(self, others):
self._db_others = others
self.is_dirty = True
db_others = property(__get_db_others, __set_db_others)
def db_get_others(self):
return self._db_others
def db_add_other(self, other):
self.is_dirty = True
self._db_others.append(other)
self.db_others_id_index[other.db_id] = other
def db_change_other(self, other):
self.is_dirty = True
found = False
for i in xrange(len(self._db_others)):
if self._db_others[i].db_id == other.db_id:
self._db_others[i] = other
found = True
break
if not found:
self._db_others.append(other)
self.db_others_id_index[other.db_id] = other
def db_delete_other(self, other):
self.is_dirty = True
for i in xrange(len(self._db_others)):
if self._db_others[i].db_id == other.db_id:
if not self._db_others[i].is_new:
self.db_deleted_others.append(self._db_others[i])
del self._db_others[i]
break
del self.db_others_id_index[other.db_id]
def db_get_other(self, key):
for i in xrange(len(self._db_others)):
if self._db_others[i].db_id == key:
return self._db_others[i]
return None
def db_get_other_by_id(self, key):
return self.db_others_id_index[key]
def db_has_other_with_id(self, key):
return key in self.db_others_id_index
def __get_db_vistrail_id(self):
return self._db_vistrail_id
def __set_db_vistrail_id(self, vistrail_id):
self._db_vistrail_id = vistrail_id
self.is_dirty = True
db_vistrail_id = property(__get_db_vistrail_id, __set_db_vistrail_id)
def db_add_vistrail_id(self, vistrail_id):
self._db_vistrail_id = vistrail_id
def db_change_vistrail_id(self, vistrail_id):
self._db_vistrail_id = vistrail_id
def db_delete_vistrail_id(self, vistrail_id):
self._db_vistrail_id = None
def getPrimaryKey(self):
return self._db_id
class DBMashupAction(object):
vtType = 'mashup_action'
def __init__(self, id=None, prevId=None, date=None, user=None, mashup=None):
self._db_id = id
self._db_prevId = prevId
self._db_date = date
self._db_user = user
self.db_deleted_mashup = []
self._db_mashup = mashup
self.is_dirty = True
self.is_new = True
def __copy__(self):
return DBMashupAction.do_copy(self)
def do_copy(self, new_ids=False, id_scope=None, id_remap=None):
cp = DBMashupAction(id=self._db_id,
prevId=self._db_prevId,
date=self._db_date,
user=self._db_user)
if self._db_mashup is not None:
cp._db_mashup = self._db_mashup.do_copy(new_ids, id_scope, id_remap)
# set new ids
if new_ids:
new_id = id_scope.getNewId(self.vtType)
if self.vtType in id_scope.remap:
id_remap[(id_scope.remap[self.vtType], self.db_id)] = new_id
else:
id_remap[(self.vtType, self.db_id)] = new_id
cp.db_id = new_id
if hasattr(self, 'db_prevId') and ('mashup_action', self._db_prevId) in id_remap:
cp._db_prevId = id_remap[('mashup_action', self._db_prevId)]
# recreate indices and set flags
if not new_ids:
cp.is_dirty = self.is_dirty
cp.is_new = self.is_new
return cp
@staticmethod
def update_version(old_obj, trans_dict, new_obj=None):
if new_obj is None:
new_obj = DBMashupAction()
class_dict = {}
if new_obj.__class__.__name__ in trans_dict:
class_dict = trans_dict[new_obj.__class__.__name__]
if 'id' in class_dict:
res = class_dict['id'](old_obj, trans_dict)
new_obj.db_id = res
elif hasattr(old_obj, 'db_id') and old_obj.db_id is not None:
new_obj.db_id = old_obj.db_id
if 'prevId' in class_dict:
res = class_dict['prevId'](old_obj, trans_dict)
new_obj.db_prevId = res
elif hasattr(old_obj, 'db_prevId') and old_obj.db_prevId is not None:
new_obj.db_prevId = old_obj.db_prevId
if 'date' in class_dict:
res = class_dict['date'](old_obj, trans_dict)
new_obj.db_date = res
elif hasattr(old_obj, 'db_date') and old_obj.db_date is not None:
new_obj.db_date = old_obj.db_date
if 'user' in class_dict:
res = class_dict['user'](old_obj, trans_dict)
new_obj.db_user = res
elif hasattr(old_obj, 'db_user') and old_obj.db_user is not None:
new_obj.db_user = old_obj.db_user
if 'mashup' in class_dict:
res = class_dict['mashup'](old_obj, trans_dict)
new_obj.db_mashup = res
elif hasattr(old_obj, 'db_mashup') and old_obj.db_mashup is not None:
obj = old_obj.db_mashup
new_obj.db_add_mashup(DBMashup.update_version(obj, trans_dict))
if hasattr(old_obj, 'db_deleted_mashup') and hasattr(new_obj, 'db_deleted_mashup'):
for obj in old_obj.db_deleted_mashup:
n_obj = DBMashup.update_version(obj, trans_dict)
new_obj.db_deleted_mashup.append(n_obj)
new_obj.is_new = old_obj.is_new
new_obj.is_dirty = old_obj.is_dirty
return new_obj
def db_children(self, parent=(None,None), orphan=False, for_action=False):
children = []
if self._db_mashup is not None:
children.extend(self._db_mashup.db_children((self.vtType, self.db_id), orphan, for_action))
if orphan:
self._db_mashup = None
children.append((self, parent[0], parent[1]))
return children
def db_deleted_children(self, remove=False):
children = []
children.extend(self.db_deleted_mashup)
if remove:
self.db_deleted_mashup = []
return children
def has_changes(self):
if self.is_dirty:
return True
if self._db_mashup is not None and self._db_mashup.has_changes():
return True
return False
def __get_db_id(self):
return self._db_id
def __set_db_id(self, id):
self._db_id = id
self.is_dirty = True
db_id = property(__get_db_id, __set_db_id)
def db_add_id(self, id):
self._db_id = id
def db_change_id(self, id):
self._db_id = id
def db_delete_id(self, id):
self._db_id = None
def __get_db_prevId(self):
return self._db_prevId
def __set_db_prevId(self, prevId):
self._db_prevId = prevId
self.is_dirty = True
db_prevId = property(__get_db_prevId, __set_db_prevId)
def db_add_prevId(self, prevId):
self._db_prevId = prevId
def db_change_prevId(self, prevId):
self._db_prevId = prevId
def db_delete_prevId(self, prevId):
self._db_prevId = None
def __get_db_date(self):
return self._db_date
def __set_db_date(self, date):
self._db_date = date
self.is_dirty = True
db_date = property(__get_db_date, __set_db_date)
def db_add_date(self, date):
self._db_date = date
def db_change_date(self, date):
self._db_date = date
def db_delete_date(self, date):
self._db_date = None
def __get_db_user(self):
return self._db_user
def __set_db_user(self, user):
self._db_user = user
self.is_dirty = True
db_user = property(__get_db_user, __set_db_user)
def db_add_user(self, user):
self._db_user = user
def db_change_user(self, user):
self._db_user = user
def db_delete_user(self, user):
self._db_user = None
def __get_db_mashup(self):
return self._db_mashup
def __set_db_mashup(self, mashup):
self._db_mashup = mashup
self.is_dirty = True
db_mashup = property(__get_db_mashup, __set_db_mashup)
def db_add_mashup(self, mashup):
self._db_mashup = mashup
def db_change_mashup(self, mashup):
self._db_mashup = mashup
def db_delete_mashup(self, mashup):
if not self.is_new:
self.db_deleted_mashup.append(self._db_mashup)
self._db_mashup = None
def getPrimaryKey(self):
return self._db_id
class DBConfiguration(object):
vtType = 'configuration'
def __init__(self, config_keys=None):
self.db_deleted_config_keys = []
self.db_config_keys_name_index = {}
if config_keys is None:
self._db_config_keys = []
else:
self._db_config_keys = config_keys
for v in self._db_config_keys:
self.db_config_keys_name_index[v.db_name] = v
self.is_dirty = True
self.is_new = True
def __copy__(self):
return DBConfiguration.do_copy(self)
def do_copy(self, new_ids=False, id_scope=None, id_remap=None):
cp = DBConfiguration()
if self._db_config_keys is None:
cp._db_config_keys = []
else:
cp._db_config_keys = [v.do_copy(new_ids, id_scope, id_remap) for v in self._db_config_keys]
# set new ids
if new_ids:
new_id = id_scope.getNewId(self.vtType)
if self.vtType in id_scope.remap:
id_remap[(id_scope.remap[self.vtType], self.db_id)] = new_id
else:
id_remap[(self.vtType, self.db_id)] = new_id
cp.db_id = new_id
# recreate indices and set flags
cp.db_config_keys_name_index = dict((v.db_name, v) for v in cp._db_config_keys)
if not new_ids:
cp.is_dirty = self.is_dirty
cp.is_new = self.is_new
return cp
@staticmethod
def update_version(old_obj, trans_dict, new_obj=None):
if new_obj is None:
new_obj = DBConfiguration()
class_dict = {}
if new_obj.__class__.__name__ in trans_dict:
class_dict = trans_dict[new_obj.__class__.__name__]
if 'config_keys' in class_dict:
res = class_dict['config_keys'](old_obj, trans_dict)
for obj in res:
new_obj.db_add_config_key(obj)
elif hasattr(old_obj, 'db_config_keys') and old_obj.db_config_keys is not None:
for obj in old_obj.db_config_keys:
new_obj.db_add_config_key(DBConfigKey.update_version(obj, trans_dict))
if hasattr(old_obj, 'db_deleted_config_keys') and hasattr(new_obj, 'db_deleted_config_keys'):
for obj in old_obj.db_deleted_config_keys:
n_obj = DBConfigKey.update_version(obj, trans_dict)
new_obj.db_deleted_config_keys.append(n_obj)
new_obj.is_new = old_obj.is_new
new_obj.is_dirty = old_obj.is_dirty
return new_obj
def db_children(self, parent=(None,None), orphan=False, for_action=False):
children = []
to_del = []
for child in self.db_config_keys:
children.extend(child.db_children((self.vtType, self.db_id), orphan, for_action))
if orphan:
to_del.append(child)
for child in to_del:
self.db_delete_config_key(child)
children.append((self, parent[0], parent[1]))
return children
def db_deleted_children(self, remove=False):
children = []
children.extend(self.db_deleted_config_keys)
if remove:
self.db_deleted_config_keys = []
return children
def has_changes(self):
if self.is_dirty:
return True
for child in self._db_config_keys:
if child.has_changes():
return True
return False
def __get_db_config_keys(self):
return self._db_config_keys
def __set_db_config_keys(self, config_keys):
self._db_config_keys = config_keys
self.is_dirty = True
db_config_keys = property(__get_db_config_keys, __set_db_config_keys)
def db_get_config_keys(self):
return self._db_config_keys
def db_add_config_key(self, config_key):
self.is_dirty = True
self._db_config_keys.append(config_key)
self.db_config_keys_name_index[config_key.db_name] = config_key
def db_change_config_key(self, config_key):
self.is_dirty = True
self._db_config_keys.append(config_key)
self.db_config_keys_name_index[config_key.db_name] = config_key
def db_delete_config_key(self, config_key):
self.is_dirty = True
raise Exception('Cannot delete a non-keyed object')
def db_get_config_key(self, key):
return None
def db_get_config_key_by_name(self, key):
return self.db_config_keys_name_index[key]
def db_has_config_key_with_name(self, key):
return key in self.db_config_keys_name_index
class DBChange(object):
vtType = 'change'
def __init__(self, data=None, id=None, what=None, oldObjId=None, newObjId=None, parentObjId=None, parentObjType=None):
self.db_deleted_data = []
self._db_data = data
self._db_id = id
self._db_what = what
self._db_oldObjId = oldObjId
self._db_newObjId = newObjId
self._db_parentObjId = parentObjId
self._db_parentObjType = parentObjType
self.is_dirty = True
self.is_new = True
def __copy__(self):
return DBChange.do_copy(self)
def do_copy(self, new_ids=False, id_scope=None, id_remap=None):
cp = DBChange(id=self._db_id,
what=self._db_what,
oldObjId=self._db_oldObjId,
newObjId=self._db_newObjId,
parentObjId=self._db_parentObjId,
parentObjType=self._db_parentObjType)
if self._db_data is not None:
cp._db_data = self._db_data.do_copy(new_ids, id_scope, id_remap)
# set new ids
if new_ids:
new_id = id_scope.getNewId(self.vtType)
if self.vtType in id_scope.remap:
id_remap[(id_scope.remap[self.vtType], self.db_id)] = new_id
else:
id_remap[(self.vtType, self.db_id)] = new_id
cp.db_id = new_id
if hasattr(self, 'db_oldObjId') and (self._db_what, self._db_oldObjId) in id_remap:
cp._db_oldObjId = id_remap[(self._db_what, self._db_oldObjId)]
if hasattr(self, 'db_newObjId') and (self._db_what, self._db_newObjId) in id_remap:
cp._db_newObjId = id_remap[(self._db_what, self._db_newObjId)]
if hasattr(self, 'db_parentObjId') and (self._db_parentObjType, self._db_parentObjId) in id_remap:
cp._db_parentObjId = id_remap[(self._db_parentObjType, self._db_parentObjId)]
# recreate indices and set flags
if not new_ids:
cp.is_dirty = self.is_dirty
cp.is_new = self.is_new
return cp
@staticmethod
def update_version(old_obj, trans_dict, new_obj=None):
if new_obj is None:
new_obj = DBChange()
class_dict = {}
if new_obj.__class__.__name__ in trans_dict:
class_dict = trans_dict[new_obj.__class__.__name__]
if 'data' in class_dict:
res = class_dict['data'](old_obj, trans_dict)
new_obj.db_data = res
elif hasattr(old_obj, 'db_data') and old_obj.db_data is not None:
obj = old_obj.db_data
if obj.vtType == 'module':
new_obj.db_add_data(DBModule.update_version(obj, trans_dict))
elif obj.vtType == 'location':
new_obj.db_add_data(DBLocation.update_version(obj, trans_dict))
elif obj.vtType == 'annotation':
new_obj.db_add_data(DBAnnotation.update_version(obj, trans_dict))
elif obj.vtType == 'function':
new_obj.db_add_data(DBFunction.update_version(obj, trans_dict))
elif obj.vtType == 'connection':
new_obj.db_add_data(DBConnection.update_version(obj, trans_dict))
elif obj.vtType == 'port':
new_obj.db_add_data(DBPort.update_version(obj, trans_dict))
elif obj.vtType == 'parameter':
new_obj.db_add_data(DBParameter.update_version(obj, trans_dict))
elif obj.vtType == 'portSpec':
new_obj.db_add_data(DBPortSpec.update_version(obj, trans_dict))
elif obj.vtType == 'abstraction':
new_obj.db_add_data(DBAbstraction.update_version(obj, trans_dict))
elif obj.vtType == 'group':
new_obj.db_add_data(DBGroup.update_version(obj, trans_dict))
elif obj.vtType == 'other':
new_obj.db_add_data(DBOther.update_version(obj, trans_dict))
elif obj.vtType == 'plugin_data':
new_obj.db_add_data(DBPluginData.update_version(obj, trans_dict))
if hasattr(old_obj, 'db_deleted_data') and hasattr(new_obj, 'db_deleted_data'):
for obj in old_obj.db_deleted_data:
if obj.vtType == 'module':
n_obj = DBModule.update_version(obj, trans_dict)
new_obj.db_deleted_data.append(n_obj)
elif obj.vtType == 'location':
n_obj = DBLocation.update_version(obj, trans_dict)
new_obj.db_deleted_data.append(n_obj)
elif obj.vtType == 'annotation':
n_obj = DBAnnotation.update_version(obj, trans_dict)
new_obj.db_deleted_data.append(n_obj)
elif obj.vtType == 'function':
n_obj = DBFunction.update_version(obj, trans_dict)
new_obj.db_deleted_data.append(n_obj)
elif obj.vtType == 'connection':
n_obj = DBConnection.update_version(obj, trans_dict)
new_obj.db_deleted_data.append(n_obj)
elif obj.vtType == 'port':
n_obj = DBPort.update_version(obj, trans_dict)
new_obj.db_deleted_data.append(n_obj)
elif obj.vtType == 'parameter':
n_obj = DBParameter.update_version(obj, trans_dict)
new_obj.db_deleted_data.append(n_obj)
elif obj.vtType == 'portSpec':
n_obj = DBPortSpec.update_version(obj, trans_dict)
new_obj.db_deleted_data.append(n_obj)
elif obj.vtType == 'abstraction':
n_obj = DBAbstraction.update_version(obj, trans_dict)
new_obj.db_deleted_data.append(n_obj)
elif obj.vtType == 'group':
n_obj = DBGroup.update_version(obj, trans_dict)
new_obj.db_deleted_data.append(n_obj)
elif obj.vtType == 'other':
n_obj = DBOther.update_version(obj, trans_dict)
new_obj.db_deleted_data.append(n_obj)
elif obj.vtType == 'plugin_data':
n_obj = DBPluginData.update_version(obj, trans_dict)
new_obj.db_deleted_data.append(n_obj)
if 'id' in class_dict:
res = class_dict['id'](old_obj, trans_dict)
new_obj.db_id = res
elif hasattr(old_obj, 'db_id') and old_obj.db_id is not None:
new_obj.db_id = old_obj.db_id
if 'what' in class_dict:
res = class_dict['what'](old_obj, trans_dict)
new_obj.db_what = res
elif hasattr(old_obj, 'db_what') and old_obj.db_what is not None:
new_obj.db_what = old_obj.db_what
if 'oldObjId' in class_dict:
res = class_dict['oldObjId'](old_obj, trans_dict)
new_obj.db_oldObjId = res
elif hasattr(old_obj, 'db_oldObjId') and old_obj.db_oldObjId is not None:
new_obj.db_oldObjId = old_obj.db_oldObjId
if 'newObjId' in class_dict:
res = class_dict['newObjId'](old_obj, trans_dict)
new_obj.db_newObjId = res
elif hasattr(old_obj, 'db_newObjId') and old_obj.db_newObjId is not None:
new_obj.db_newObjId = old_obj.db_newObjId
if 'parentObjId' in class_dict:
res = class_dict['parentObjId'](old_obj, trans_dict)
new_obj.db_parentObjId = res
elif hasattr(old_obj, 'db_parentObjId') and old_obj.db_parentObjId is not None:
new_obj.db_parentObjId = old_obj.db_parentObjId
if 'parentObjType' in class_dict:
res = class_dict['parentObjType'](old_obj, trans_dict)
new_obj.db_parentObjType = res
elif hasattr(old_obj, 'db_parentObjType') and old_obj.db_parentObjType is not None:
new_obj.db_parentObjType = old_obj.db_parentObjType
new_obj.is_new = old_obj.is_new
new_obj.is_dirty = old_obj.is_dirty
return new_obj
def db_children(self, parent=(None,None), orphan=False, for_action=False):
children = []
if self._db_data is not None:
children.extend(self._db_data.db_children((self.vtType, self.db_id), orphan, for_action))
if orphan:
self._db_data = None
children.append((self, parent[0], parent[1]))
return children
def db_deleted_children(self, remove=False):
children = []
children.extend(self.db_deleted_data)
if remove:
self.db_deleted_data = []
return children
def has_changes(self):
if self.is_dirty:
return True
if self._db_data is not None and self._db_data.has_changes():
return True
return False
def __get_db_data(self):
return self._db_data
def __set_db_data(self, data):
self._db_data = data
self.is_dirty = True
db_data = property(__get_db_data, __set_db_data)
def db_add_data(self, data):
self._db_data = data
def db_change_data(self, data):
self._db_data = data
def db_delete_data(self, data):
if not self.is_new:
self.db_deleted_data.append(self._db_data)
self._db_data = None
def __get_db_id(self):
return self._db_id
def __set_db_id(self, id):
self._db_id = id
self.is_dirty = True
db_id = property(__get_db_id, __set_db_id)
def db_add_id(self, id):
self._db_id = id
def db_change_id(self, id):
self._db_id = id
def db_delete_id(self, id):
self._db_id = None
def __get_db_what(self):
return self._db_what
def __set_db_what(self, what):
self._db_what = what
self.is_dirty = True
db_what = property(__get_db_what, __set_db_what)
def db_add_what(self, what):
self._db_what = what
def db_change_what(self, what):
self._db_what = what
def db_delete_what(self, what):
self._db_what = None
def __get_db_oldObjId(self):
return self._db_oldObjId
def __set_db_oldObjId(self, oldObjId):
self._db_oldObjId = oldObjId
self.is_dirty = True
db_oldObjId = property(__get_db_oldObjId, __set_db_oldObjId)
def db_add_oldObjId(self, oldObjId):
self._db_oldObjId = oldObjId
def db_change_oldObjId(self, oldObjId):
self._db_oldObjId = oldObjId
def db_delete_oldObjId(self, oldObjId):
self._db_oldObjId = None
def __get_db_newObjId(self):
return self._db_newObjId
def __set_db_newObjId(self, newObjId):
self._db_newObjId = newObjId
self.is_dirty = True
db_newObjId = property(__get_db_newObjId, __set_db_newObjId)
def db_add_newObjId(self, newObjId):
self._db_newObjId = newObjId
def db_change_newObjId(self, newObjId):
self._db_newObjId = newObjId
def db_delete_newObjId(self, newObjId):
self._db_newObjId = None
def __get_db_parentObjId(self):
return self._db_parentObjId
def __set_db_parentObjId(self, parentObjId):
self._db_parentObjId = parentObjId
self.is_dirty = True
db_parentObjId = property(__get_db_parentObjId, __set_db_parentObjId)
def db_add_parentObjId(self, parentObjId):
self._db_parentObjId = parentObjId
def db_change_parentObjId(self, parentObjId):
self._db_parentObjId = parentObjId
def db_delete_parentObjId(self, parentObjId):
self._db_parentObjId = None
def __get_db_parentObjType(self):
return self._db_parentObjType
def __set_db_parentObjType(self, parentObjType):
self._db_parentObjType = parentObjType
self.is_dirty = True
db_parentObjType = property(__get_db_parentObjType, __set_db_parentObjType)
def db_add_parentObjType(self, parentObjType):
self._db_parentObjType = parentObjType
def db_change_parentObjType(self, parentObjType):
self._db_parentObjType = parentObjType
def db_delete_parentObjType(self, parentObjType):
self._db_parentObjType = None
def getPrimaryKey(self):
return self._db_id
class DBPackage(object):
vtType = 'package'
def __init__(self, id=None, name=None, identifier=None, codepath=None, load_configuration=None, version=None, description=None, module_descriptors=None):
self._db_id = id
self._db_name = name
self._db_identifier = identifier
self._db_codepath = codepath
self._db_load_configuration = load_configuration
self._db_version = version
self._db_description = description
self.db_deleted_module_descriptors = []
self.db_module_descriptors_id_index = {}
self.db_module_descriptors_name_index = {}
if module_descriptors is None:
self._db_module_descriptors = []
else:
self._db_module_descriptors = module_descriptors
for v in self._db_module_descriptors:
self.db_module_descriptors_id_index[v.db_id] = v
self.db_module_descriptors_name_index[(v.db_name,v.db_namespace,v.db_version)] = v
self.is_dirty = True
self.is_new = True
def __copy__(self):
return DBPackage.do_copy(self)
def do_copy(self, new_ids=False, id_scope=None, id_remap=None):
cp = DBPackage(id=self._db_id,
name=self._db_name,
identifier=self._db_identifier,
codepath=self._db_codepath,
load_configuration=self._db_load_configuration,
version=self._db_version,
description=self._db_description)
if self._db_module_descriptors is None:
cp._db_module_descriptors = []
else:
cp._db_module_descriptors = [v.do_copy(new_ids, id_scope, id_remap) for v in self._db_module_descriptors]
# set new ids
if new_ids:
new_id = id_scope.getNewId(self.vtType)
if self.vtType in id_scope.remap:
id_remap[(id_scope.remap[self.vtType], self.db_id)] = new_id
else:
id_remap[(self.vtType, self.db_id)] = new_id
cp.db_id = new_id
# recreate indices and set flags
cp.db_module_descriptors_id_index = dict((v.db_id, v) for v in cp._db_module_descriptors)
cp.db_module_descriptors_name_index = dict(((v.db_name,v.db_namespace,v.db_version), v) for v in cp._db_module_descriptors)
if not new_ids:
cp.is_dirty = self.is_dirty
cp.is_new = self.is_new
return cp
@staticmethod
def update_version(old_obj, trans_dict, new_obj=None):
if new_obj is None:
new_obj = DBPackage()
class_dict = {}
if new_obj.__class__.__name__ in trans_dict:
class_dict = trans_dict[new_obj.__class__.__name__]
if 'id' in class_dict:
res = class_dict['id'](old_obj, trans_dict)
new_obj.db_id = res
elif hasattr(old_obj, 'db_id') and old_obj.db_id is not None:
new_obj.db_id = old_obj.db_id
if 'name' in class_dict:
res = class_dict['name'](old_obj, trans_dict)
new_obj.db_name = res
elif hasattr(old_obj, 'db_name') and old_obj.db_name is not None:
new_obj.db_name = old_obj.db_name
if 'identifier' in class_dict:
res = class_dict['identifier'](old_obj, trans_dict)
new_obj.db_identifier = res
elif hasattr(old_obj, 'db_identifier') and old_obj.db_identifier is not None:
new_obj.db_identifier = old_obj.db_identifier
if 'codepath' in class_dict:
res = class_dict['codepath'](old_obj, trans_dict)
new_obj.db_codepath = res
elif hasattr(old_obj, 'db_codepath') and old_obj.db_codepath is not None:
new_obj.db_codepath = old_obj.db_codepath
if 'load_configuration' in class_dict:
res = class_dict['load_configuration'](old_obj, trans_dict)
new_obj.db_load_configuration = res
elif hasattr(old_obj, 'db_load_configuration') and old_obj.db_load_configuration is not None:
new_obj.db_load_configuration = old_obj.db_load_configuration
if 'version' in class_dict:
res = class_dict['version'](old_obj, trans_dict)
new_obj.db_version = res
elif hasattr(old_obj, 'db_version') and old_obj.db_version is not None:
new_obj.db_version = old_obj.db_version
if 'description' in class_dict:
res = class_dict['description'](old_obj, trans_dict)
new_obj.db_description = res
elif hasattr(old_obj, 'db_description') and old_obj.db_description is not None:
new_obj.db_description = old_obj.db_description
if 'module_descriptors' in class_dict:
res = class_dict['module_descriptors'](old_obj, trans_dict)
for obj in res:
new_obj.db_add_module_descriptor(obj)
elif hasattr(old_obj, 'db_module_descriptors') and old_obj.db_module_descriptors is not None:
for obj in old_obj.db_module_descriptors:
new_obj.db_add_module_descriptor(DBModuleDescriptor.update_version(obj, trans_dict))
if hasattr(old_obj, 'db_deleted_module_descriptors') and hasattr(new_obj, 'db_deleted_module_descriptors'):
for obj in old_obj.db_deleted_module_descriptors:
n_obj = DBModuleDescriptor.update_version(obj, trans_dict)
new_obj.db_deleted_module_descriptors.append(n_obj)
new_obj.is_new = old_obj.is_new
new_obj.is_dirty = old_obj.is_dirty
return new_obj
def db_children(self, parent=(None,None), orphan=False, for_action=False):
children = []
to_del = []
for child in self.db_module_descriptors:
children.extend(child.db_children((self.vtType, self.db_id), orphan, for_action))
if orphan:
to_del.append(child)
for child in to_del:
self.db_delete_module_descriptor(child)
children.append((self, parent[0], parent[1]))
return children
def db_deleted_children(self, remove=False):
children = []
children.extend(self.db_deleted_module_descriptors)
if remove:
self.db_deleted_module_descriptors = []
return children
def has_changes(self):
if self.is_dirty:
return True
for child in self._db_module_descriptors:
if child.has_changes():
return True
return False
def __get_db_id(self):
return self._db_id
def __set_db_id(self, id):
self._db_id = id
self.is_dirty = True
db_id = property(__get_db_id, __set_db_id)
def db_add_id(self, id):
self._db_id = id
def db_change_id(self, id):
self._db_id = id
def db_delete_id(self, id):
self._db_id = None
def __get_db_name(self):
return self._db_name
def __set_db_name(self, name):
self._db_name = name
self.is_dirty = True
db_name = property(__get_db_name, __set_db_name)
def db_add_name(self, name):
self._db_name = name
def db_change_name(self, name):
self._db_name = name
def db_delete_name(self, name):
self._db_name = None
def __get_db_identifier(self):
return self._db_identifier
def __set_db_identifier(self, identifier):
self._db_identifier = identifier
self.is_dirty = True
db_identifier = property(__get_db_identifier, __set_db_identifier)
def db_add_identifier(self, identifier):
self._db_identifier = identifier
def db_change_identifier(self, identifier):
self._db_identifier = identifier
def db_delete_identifier(self, identifier):
self._db_identifier = None
def __get_db_codepath(self):
return self._db_codepath
def __set_db_codepath(self, codepath):
self._db_codepath = codepath
self.is_dirty = True
db_codepath = property(__get_db_codepath, __set_db_codepath)
def db_add_codepath(self, codepath):
self._db_codepath = codepath
def db_change_codepath(self, codepath):
self._db_codepath = codepath
def db_delete_codepath(self, codepath):
self._db_codepath = None
def __get_db_load_configuration(self):
return self._db_load_configuration
def __set_db_load_configuration(self, load_configuration):
self._db_load_configuration = load_configuration
self.is_dirty = True
db_load_configuration = property(__get_db_load_configuration, __set_db_load_configuration)
def db_add_load_configuration(self, load_configuration):
self._db_load_configuration = load_configuration
def db_change_load_configuration(self, load_configuration):
self._db_load_configuration = load_configuration
def db_delete_load_configuration(self, load_configuration):
self._db_load_configuration = None
def __get_db_version(self):
return self._db_version
def __set_db_version(self, version):
self._db_version = version
self.is_dirty = True
db_version = property(__get_db_version, __set_db_version)
def db_add_version(self, version):
self._db_version = version
def db_change_version(self, version):
self._db_version = version
def db_delete_version(self, version):
self._db_version = None
def __get_db_description(self):
return self._db_description
def __set_db_description(self, description):
self._db_description = description
self.is_dirty = True
db_description = property(__get_db_description, __set_db_description)
def db_add_description(self, description):
self._db_description = description
def db_change_description(self, description):
self._db_description = description
def db_delete_description(self, description):
self._db_description = None
def __get_db_module_descriptors(self):
return self._db_module_descriptors
def __set_db_module_descriptors(self, module_descriptors):
self._db_module_descriptors = module_descriptors
self.is_dirty = True
db_module_descriptors = property(__get_db_module_descriptors, __set_db_module_descriptors)
def db_get_module_descriptors(self):
return self._db_module_descriptors
def db_add_module_descriptor(self, module_descriptor):
self.is_dirty = True
self._db_module_descriptors.append(module_descriptor)
self.db_module_descriptors_id_index[module_descriptor.db_id] = module_descriptor
self.db_module_descriptors_name_index[(module_descriptor.db_name,module_descriptor.db_namespace,module_descriptor.db_version)] = module_descriptor
def db_change_module_descriptor(self, module_descriptor):
self.is_dirty = True
found = False
for i in xrange(len(self._db_module_descriptors)):
if self._db_module_descriptors[i].db_id == module_descriptor.db_id:
self._db_module_descriptors[i] = module_descriptor
found = True
break
if not found:
self._db_module_descriptors.append(module_descriptor)
self.db_module_descriptors_id_index[module_descriptor.db_id] = module_descriptor
self.db_module_descriptors_name_index[(module_descriptor.db_name,module_descriptor.db_namespace,module_descriptor.db_version)] = module_descriptor
def db_delete_module_descriptor(self, module_descriptor):
self.is_dirty = True
for i in xrange(len(self._db_module_descriptors)):
if self._db_module_descriptors[i].db_id == module_descriptor.db_id:
if not self._db_module_descriptors[i].is_new:
self.db_deleted_module_descriptors.append(self._db_module_descriptors[i])
del self._db_module_descriptors[i]
break
del self.db_module_descriptors_id_index[module_descriptor.db_id]
del self.db_module_descriptors_name_index[(module_descriptor.db_name,module_descriptor.db_namespace,module_descriptor.db_version)]
def db_get_module_descriptor(self, key):
for i in xrange(len(self._db_module_descriptors)):
if self._db_module_descriptors[i].db_id == key:
return self._db_module_descriptors[i]
return None
def db_get_module_descriptor_by_id(self, key):
return self.db_module_descriptors_id_index[key]
def db_has_module_descriptor_with_id(self, key):
return key in self.db_module_descriptors_id_index
def db_get_module_descriptor_by_name(self, key):
return self.db_module_descriptors_name_index[key]
def db_has_module_descriptor_with_name(self, key):
return key in self.db_module_descriptors_name_index
def getPrimaryKey(self):
return self._db_id
class DBLoopExec(object):
vtType = 'loop_exec'
def __init__(self, item_execs=None, id=None, ts_start=None, ts_end=None, iteration=None, completed=None, error=None):
self.db_deleted_item_execs = []
self.db_item_execs_id_index = {}
if item_execs is None:
self._db_item_execs = []
else:
self._db_item_execs = item_execs
for v in self._db_item_execs:
self.db_item_execs_id_index[v.db_id] = v
self._db_id = id
self._db_ts_start = ts_start
self._db_ts_end = ts_end
self._db_iteration = iteration
self._db_completed = completed
self._db_error = error
self.is_dirty = True
self.is_new = True
def __copy__(self):
return DBLoopExec.do_copy(self)
def do_copy(self, new_ids=False, id_scope=None, id_remap=None):
cp = DBLoopExec(id=self._db_id,
ts_start=self._db_ts_start,
ts_end=self._db_ts_end,
iteration=self._db_iteration,
completed=self._db_completed,
error=self._db_error)
if self._db_item_execs is None:
cp._db_item_execs = []
else:
cp._db_item_execs = [v.do_copy(new_ids, id_scope, id_remap) for v in self._db_item_execs]
# set new ids
if new_ids:
new_id = id_scope.getNewId(self.vtType)
if self.vtType in id_scope.remap:
id_remap[(id_scope.remap[self.vtType], self.db_id)] = new_id
else:
id_remap[(self.vtType, self.db_id)] = new_id
cp.db_id = new_id
# recreate indices and set flags
cp.db_item_execs_id_index = dict((v.db_id, v) for v in cp._db_item_execs)
if not new_ids:
cp.is_dirty = self.is_dirty
cp.is_new = self.is_new
return cp
@staticmethod
def update_version(old_obj, trans_dict, new_obj=None):
if new_obj is None:
new_obj = DBLoopExec()
class_dict = {}
if new_obj.__class__.__name__ in trans_dict:
class_dict = trans_dict[new_obj.__class__.__name__]
if 'item_execs' in class_dict:
res = class_dict['item_execs'](old_obj, trans_dict)
for obj in res:
new_obj.db_add_item_exec(obj)
elif hasattr(old_obj, 'db_item_execs') and old_obj.db_item_execs is not None:
for obj in old_obj.db_item_execs:
if obj.vtType == 'module_exec':
new_obj.db_add_item_exec(DBModuleExec.update_version(obj, trans_dict))
elif obj.vtType == 'group_exec':
new_obj.db_add_item_exec(DBGroupExec.update_version(obj, trans_dict))
elif obj.vtType == 'loop_exec':
new_obj.db_add_item_exec(DBLoopExec.update_version(obj, trans_dict))
if hasattr(old_obj, 'db_deleted_item_execs') and hasattr(new_obj, 'db_deleted_item_execs'):
for obj in old_obj.db_deleted_item_execs:
if obj.vtType == 'module_exec':
n_obj = DBModuleExec.update_version(obj, trans_dict)
new_obj.db_deleted_item_execs.append(n_obj)
elif obj.vtType == 'group_exec':
n_obj = DBGroupExec.update_version(obj, trans_dict)
new_obj.db_deleted_item_execs.append(n_obj)
elif obj.vtType == 'loop_exec':
n_obj = DBLoopExec.update_version(obj, trans_dict)
new_obj.db_deleted_item_execs.append(n_obj)
if 'id' in class_dict:
res = class_dict['id'](old_obj, trans_dict)
new_obj.db_id = res
elif hasattr(old_obj, 'db_id') and old_obj.db_id is not None:
new_obj.db_id = old_obj.db_id
if 'ts_start' in class_dict:
res = class_dict['ts_start'](old_obj, trans_dict)
new_obj.db_ts_start = res
elif hasattr(old_obj, 'db_ts_start') and old_obj.db_ts_start is not None:
new_obj.db_ts_start = old_obj.db_ts_start
if 'ts_end' in class_dict:
res = class_dict['ts_end'](old_obj, trans_dict)
new_obj.db_ts_end = res
elif hasattr(old_obj, 'db_ts_end') and old_obj.db_ts_end is not None:
new_obj.db_ts_end = old_obj.db_ts_end
if 'iteration' in class_dict:
res = class_dict['iteration'](old_obj, trans_dict)
new_obj.db_iteration = res
elif hasattr(old_obj, 'db_iteration') and old_obj.db_iteration is not None:
new_obj.db_iteration = old_obj.db_iteration
if 'completed' in class_dict:
res = class_dict['completed'](old_obj, trans_dict)
new_obj.db_completed = res
elif hasattr(old_obj, 'db_completed') and old_obj.db_completed is not None:
new_obj.db_completed = old_obj.db_completed
if 'error' in class_dict:
res = class_dict['error'](old_obj, trans_dict)
new_obj.db_error = res
elif hasattr(old_obj, 'db_error') and old_obj.db_error is not None:
new_obj.db_error = old_obj.db_error
new_obj.is_new = old_obj.is_new
new_obj.is_dirty = old_obj.is_dirty
return new_obj
def db_children(self, parent=(None,None), orphan=False, for_action=False):
children = []
to_del = []
for child in self.db_item_execs:
children.extend(child.db_children((self.vtType, self.db_id), orphan, for_action))
if orphan:
to_del.append(child)
for child in to_del:
self.db_delete_item_exec(child)
children.append((self, parent[0], parent[1]))
return children
def db_deleted_children(self, remove=False):
children = []
children.extend(self.db_deleted_item_execs)
if remove:
self.db_deleted_item_execs = []
return children
def has_changes(self):
if self.is_dirty:
return True
for child in self._db_item_execs:
if child.has_changes():
return True
return False
def __get_db_item_execs(self):
return self._db_item_execs
def __set_db_item_execs(self, item_execs):
self._db_item_execs = item_execs
self.is_dirty = True
db_item_execs = property(__get_db_item_execs, __set_db_item_execs)
def db_get_item_execs(self):
return self._db_item_execs
def db_add_item_exec(self, item_exec):
self.is_dirty = True
self._db_item_execs.append(item_exec)
self.db_item_execs_id_index[item_exec.db_id] = item_exec
def db_change_item_exec(self, item_exec):
self.is_dirty = True
found = False
for i in xrange(len(self._db_item_execs)):
if self._db_item_execs[i].db_id == item_exec.db_id:
self._db_item_execs[i] = item_exec
found = True
break
if not found:
self._db_item_execs.append(item_exec)
self.db_item_execs_id_index[item_exec.db_id] = item_exec
def db_delete_item_exec(self, item_exec):
self.is_dirty = True
for i in xrange(len(self._db_item_execs)):
if self._db_item_execs[i].db_id == item_exec.db_id:
if not self._db_item_execs[i].is_new:
self.db_deleted_item_execs.append(self._db_item_execs[i])
del self._db_item_execs[i]
break
del self.db_item_execs_id_index[item_exec.db_id]
def db_get_item_exec(self, key):
for i in xrange(len(self._db_item_execs)):
if self._db_item_execs[i].db_id == key:
return self._db_item_execs[i]
return None
def db_get_item_exec_by_id(self, key):
return self.db_item_execs_id_index[key]
def db_has_item_exec_with_id(self, key):
return key in self.db_item_execs_id_index
def __get_db_id(self):
return self._db_id
def __set_db_id(self, id):
self._db_id = id
self.is_dirty = True
db_id = property(__get_db_id, __set_db_id)
def db_add_id(self, id):
self._db_id = id
def db_change_id(self, id):
self._db_id = id
def db_delete_id(self, id):
self._db_id = None
def __get_db_ts_start(self):
return self._db_ts_start
def __set_db_ts_start(self, ts_start):
self._db_ts_start = ts_start
self.is_dirty = True
db_ts_start = property(__get_db_ts_start, __set_db_ts_start)
def db_add_ts_start(self, ts_start):
self._db_ts_start = ts_start
def db_change_ts_start(self, ts_start):
self._db_ts_start = ts_start
def db_delete_ts_start(self, ts_start):
self._db_ts_start = None
def __get_db_ts_end(self):
return self._db_ts_end
def __set_db_ts_end(self, ts_end):
self._db_ts_end = ts_end
self.is_dirty = True
db_ts_end = property(__get_db_ts_end, __set_db_ts_end)
def db_add_ts_end(self, ts_end):
self._db_ts_end = ts_end
def db_change_ts_end(self, ts_end):
self._db_ts_end = ts_end
def db_delete_ts_end(self, ts_end):
self._db_ts_end = None
def __get_db_iteration(self):
return self._db_iteration
def __set_db_iteration(self, iteration):
self._db_iteration = iteration
self.is_dirty = True
db_iteration = property(__get_db_iteration, __set_db_iteration)
def db_add_iteration(self, iteration):
self._db_iteration = iteration
def db_change_iteration(self, iteration):
self._db_iteration = iteration
def db_delete_iteration(self, iteration):
self._db_iteration = None
def __get_db_completed(self):
return self._db_completed
def __set_db_completed(self, completed):
self._db_completed = completed
self.is_dirty = True
db_completed = property(__get_db_completed, __set_db_completed)
def db_add_completed(self, completed):
self._db_completed = completed
def db_change_completed(self, completed):
self._db_completed = completed
def db_delete_completed(self, completed):
self._db_completed = None
def __get_db_error(self):
return self._db_error
def __set_db_error(self, error):
self._db_error = error
self.is_dirty = True
db_error = property(__get_db_error, __set_db_error)
def db_add_error(self, error):
self._db_error = error
def db_change_error(self, error):
self._db_error = error
def db_delete_error(self, error):
self._db_error = None
def getPrimaryKey(self):
return self._db_id
class DBConnection(object):
vtType = 'connection'
def __init__(self, id=None, ports=None):
self._db_id = id
self.db_deleted_ports = []
self.db_ports_id_index = {}
self.db_ports_type_index = {}
if ports is None:
self._db_ports = []
else:
self._db_ports = ports
for v in self._db_ports:
self.db_ports_id_index[v.db_id] = v
self.db_ports_type_index[v.db_type] = v
self.is_dirty = True
self.is_new = True
def __copy__(self):
return DBConnection.do_copy(self)
def do_copy(self, new_ids=False, id_scope=None, id_remap=None):
cp = DBConnection(id=self._db_id)
if self._db_ports is None:
cp._db_ports = []
else:
cp._db_ports = [v.do_copy(new_ids, id_scope, id_remap) for v in self._db_ports]
# set new ids
if new_ids:
new_id = id_scope.getNewId(self.vtType)
if self.vtType in id_scope.remap:
id_remap[(id_scope.remap[self.vtType], self.db_id)] = new_id
else:
id_remap[(self.vtType, self.db_id)] = new_id
cp.db_id = new_id
# recreate indices and set flags
cp.db_ports_id_index = dict((v.db_id, v) for v in cp._db_ports)
cp.db_ports_type_index = dict((v.db_type, v) for v in cp._db_ports)
if not new_ids:
cp.is_dirty = self.is_dirty
cp.is_new = self.is_new
return cp
@staticmethod
def update_version(old_obj, trans_dict, new_obj=None):
if new_obj is None:
new_obj = DBConnection()
class_dict = {}
if new_obj.__class__.__name__ in trans_dict:
class_dict = trans_dict[new_obj.__class__.__name__]
if 'id' in class_dict:
res = class_dict['id'](old_obj, trans_dict)
new_obj.db_id = res
elif hasattr(old_obj, 'db_id') and old_obj.db_id is not None:
new_obj.db_id = old_obj.db_id
if 'ports' in class_dict:
res = class_dict['ports'](old_obj, trans_dict)
for obj in res:
new_obj.db_add_port(obj)
elif hasattr(old_obj, 'db_ports') and old_obj.db_ports is not None:
for obj in old_obj.db_ports:
new_obj.db_add_port(DBPort.update_version(obj, trans_dict))
if hasattr(old_obj, 'db_deleted_ports') and hasattr(new_obj, 'db_deleted_ports'):
for obj in old_obj.db_deleted_ports:
n_obj = DBPort.update_version(obj, trans_dict)
new_obj.db_deleted_ports.append(n_obj)
new_obj.is_new = old_obj.is_new
new_obj.is_dirty = old_obj.is_dirty
return new_obj
def db_children(self, parent=(None,None), orphan=False, for_action=False):
children = []
to_del = []
for child in self.db_ports:
children.extend(child.db_children((self.vtType, self.db_id), orphan, for_action))
if orphan:
to_del.append(child)
for child in to_del:
self.db_delete_port(child)
children.append((self, parent[0], parent[1]))
return children
def db_deleted_children(self, remove=False):
children = []
children.extend(self.db_deleted_ports)
if remove:
self.db_deleted_ports = []
return children
def has_changes(self):
if self.is_dirty:
return True
for child in self._db_ports:
if child.has_changes():
return True
return False
def __get_db_id(self):
return self._db_id
def __set_db_id(self, id):
self._db_id = id
self.is_dirty = True
db_id = property(__get_db_id, __set_db_id)
def db_add_id(self, id):
self._db_id = id
def db_change_id(self, id):
self._db_id = id
def db_delete_id(self, id):
self._db_id = None
def __get_db_ports(self):
return self._db_ports
def __set_db_ports(self, ports):
self._db_ports = ports
self.is_dirty = True
db_ports = property(__get_db_ports, __set_db_ports)
def db_get_ports(self):
return self._db_ports
def db_add_port(self, port):
self.is_dirty = True
self._db_ports.append(port)
self.db_ports_id_index[port.db_id] = port
self.db_ports_type_index[port.db_type] = port
def db_change_port(self, port):
self.is_dirty = True
found = False
for i in xrange(len(self._db_ports)):
if self._db_ports[i].db_id == port.db_id:
self._db_ports[i] = port
found = True
break
if not found:
self._db_ports.append(port)
self.db_ports_id_index[port.db_id] = port
self.db_ports_type_index[port.db_type] = port
def db_delete_port(self, port):
self.is_dirty = True
for i in xrange(len(self._db_ports)):
if self._db_ports[i].db_id == port.db_id:
if not self._db_ports[i].is_new:
self.db_deleted_ports.append(self._db_ports[i])
del self._db_ports[i]
break
del self.db_ports_id_index[port.db_id]
del self.db_ports_type_index[port.db_type]
def db_get_port(self, key):
for i in xrange(len(self._db_ports)):
if self._db_ports[i].db_id == key:
return self._db_ports[i]
return None
def db_get_port_by_id(self, key):
return self.db_ports_id_index[key]
def db_has_port_with_id(self, key):
return key in self.db_ports_id_index
def db_get_port_by_type(self, key):
return self.db_ports_type_index[key]
def db_has_port_with_type(self, key):
return key in self.db_ports_type_index
def getPrimaryKey(self):
return self._db_id
class DBConfigBool(object):
vtType = 'config_bool'
def __init__(self, value=None):
self._db_value = value
self.is_dirty = True
self.is_new = True
def __copy__(self):
return DBConfigBool.do_copy(self)
def do_copy(self, new_ids=False, id_scope=None, id_remap=None):
cp = DBConfigBool(value=self._db_value)
# set new ids
if new_ids:
new_id = id_scope.getNewId(self.vtType)
if self.vtType in id_scope.remap:
id_remap[(id_scope.remap[self.vtType], self.db_id)] = new_id
else:
id_remap[(self.vtType, self.db_id)] = new_id
cp.db_id = new_id
# recreate indices and set flags
if not new_ids:
cp.is_dirty = self.is_dirty
cp.is_new = self.is_new
return cp
@staticmethod
def update_version(old_obj, trans_dict, new_obj=None):
if new_obj is None:
new_obj = DBConfigBool()
class_dict = {}
if new_obj.__class__.__name__ in trans_dict:
class_dict = trans_dict[new_obj.__class__.__name__]
if 'value' in class_dict:
res = class_dict['value'](old_obj, trans_dict)
new_obj.db_value = res
elif hasattr(old_obj, 'db_value') and old_obj.db_value is not None:
new_obj.db_value = old_obj.db_value
new_obj.is_new = old_obj.is_new
new_obj.is_dirty = old_obj.is_dirty
return new_obj
def db_children(self, parent=(None,None), orphan=False, for_action=False):
return [(self, parent[0], parent[1])]
def db_deleted_children(self, remove=False):
children = []
return children
def has_changes(self):
if self.is_dirty:
return True
return False
def __get_db_value(self):
return self._db_value
def __set_db_value(self, value):
self._db_value = value
self.is_dirty = True
db_value = property(__get_db_value, __set_db_value)
def db_add_value(self, value):
self._db_value = value
def db_change_value(self, value):
self._db_value = value
def db_delete_value(self, value):
self._db_value = None
class DBAction(object):
vtType = 'action'
def __init__(self, operations=None, id=None, prevId=None, date=None, session=None, user=None, annotations=None):
self.db_deleted_operations = []
self.db_operations_id_index = {}
if operations is None:
self._db_operations = []
else:
self._db_operations = operations
for v in self._db_operations:
self.db_operations_id_index[v.db_id] = v
self._db_id = id
self._db_prevId = prevId
self._db_date = date
self._db_session = session
self._db_user = user
self.db_deleted_annotations = []
self.db_annotations_id_index = {}
self.db_annotations_key_index = {}
if annotations is None:
self._db_annotations = []
else:
self._db_annotations = annotations
for v in self._db_annotations:
self.db_annotations_id_index[v.db_id] = v
self.db_annotations_key_index[v.db_key] = v
self.is_dirty = True
self.is_new = True
def __copy__(self):
return DBAction.do_copy(self)
def do_copy(self, new_ids=False, id_scope=None, id_remap=None):
cp = DBAction(id=self._db_id,
prevId=self._db_prevId,
date=self._db_date,
session=self._db_session,
user=self._db_user)
if self._db_operations is None:
cp._db_operations = []
else:
cp._db_operations = [v.do_copy(new_ids, id_scope, id_remap) for v in self._db_operations]
if self._db_annotations is None:
cp._db_annotations = []
else:
cp._db_annotations = [v.do_copy(new_ids, id_scope, id_remap) for v in self._db_annotations]
# set new ids
if new_ids:
new_id = id_scope.getNewId(self.vtType)
if self.vtType in id_scope.remap:
id_remap[(id_scope.remap[self.vtType], self.db_id)] = new_id
else:
id_remap[(self.vtType, self.db_id)] = new_id
cp.db_id = new_id
if hasattr(self, 'db_prevId') and ('action', self._db_prevId) in id_remap:
cp._db_prevId = id_remap[('action', self._db_prevId)]
# recreate indices and set flags
cp.db_operations_id_index = dict((v.db_id, v) for v in cp._db_operations)
cp.db_annotations_id_index = dict((v.db_id, v) for v in cp._db_annotations)
cp.db_annotations_key_index = dict((v.db_key, v) for v in cp._db_annotations)
if not new_ids:
cp.is_dirty = self.is_dirty
cp.is_new = self.is_new
return cp
@staticmethod
def update_version(old_obj, trans_dict, new_obj=None):
if new_obj is None:
new_obj = DBAction()
class_dict = {}
if new_obj.__class__.__name__ in trans_dict:
class_dict = trans_dict[new_obj.__class__.__name__]
if 'operations' in class_dict:
res = class_dict['operations'](old_obj, trans_dict)
for obj in res:
new_obj.db_add_operation(obj)
elif hasattr(old_obj, 'db_operations') and old_obj.db_operations is not None:
for obj in old_obj.db_operations:
if obj.vtType == 'add':
new_obj.db_add_operation(DBAdd.update_version(obj, trans_dict))
elif obj.vtType == 'delete':
new_obj.db_add_operation(DBDelete.update_version(obj, trans_dict))
elif obj.vtType == 'change':
new_obj.db_add_operation(DBChange.update_version(obj, trans_dict))
if hasattr(old_obj, 'db_deleted_operations') and hasattr(new_obj, 'db_deleted_operations'):
for obj in old_obj.db_deleted_operations:
if obj.vtType == 'add':
n_obj = DBAdd.update_version(obj, trans_dict)
new_obj.db_deleted_operations.append(n_obj)
elif obj.vtType == 'delete':
n_obj = DBDelete.update_version(obj, trans_dict)
new_obj.db_deleted_operations.append(n_obj)
elif obj.vtType == 'change':
n_obj = DBChange.update_version(obj, trans_dict)
new_obj.db_deleted_operations.append(n_obj)
if 'id' in class_dict:
res = class_dict['id'](old_obj, trans_dict)
new_obj.db_id = res
elif hasattr(old_obj, 'db_id') and old_obj.db_id is not None:
new_obj.db_id = old_obj.db_id
if 'prevId' in class_dict:
res = class_dict['prevId'](old_obj, trans_dict)
new_obj.db_prevId = res
elif hasattr(old_obj, 'db_prevId') and old_obj.db_prevId is not None:
new_obj.db_prevId = old_obj.db_prevId
if 'date' in class_dict:
res = class_dict['date'](old_obj, trans_dict)
new_obj.db_date = res
elif hasattr(old_obj, 'db_date') and old_obj.db_date is not None:
new_obj.db_date = old_obj.db_date
if 'session' in class_dict:
res = class_dict['session'](old_obj, trans_dict)
new_obj.db_session = res
elif hasattr(old_obj, 'db_session') and old_obj.db_session is not None:
new_obj.db_session = old_obj.db_session
if 'user' in class_dict:
res = class_dict['user'](old_obj, trans_dict)
new_obj.db_user = res
elif hasattr(old_obj, 'db_user') and old_obj.db_user is not None:
new_obj.db_user = old_obj.db_user
if 'annotations' in class_dict:
res = class_dict['annotations'](old_obj, trans_dict)
for obj in res:
new_obj.db_add_annotation(obj)
elif hasattr(old_obj, 'db_annotations') and old_obj.db_annotations is not None:
for obj in old_obj.db_annotations:
new_obj.db_add_annotation(DBAnnotation.update_version(obj, trans_dict))
if hasattr(old_obj, 'db_deleted_annotations') and hasattr(new_obj, 'db_deleted_annotations'):
for obj in old_obj.db_deleted_annotations:
n_obj = DBAnnotation.update_version(obj, trans_dict)
new_obj.db_deleted_annotations.append(n_obj)
new_obj.is_new = old_obj.is_new
new_obj.is_dirty = old_obj.is_dirty
return new_obj
def db_children(self, parent=(None,None), orphan=False, for_action=False):
children = []
to_del = []
for child in self.db_annotations:
children.extend(child.db_children((self.vtType, self.db_id), orphan, for_action))
if orphan:
to_del.append(child)
for child in to_del:
self.db_delete_annotation(child)
to_del = []
for child in self.db_operations:
children.extend(child.db_children((self.vtType, self.db_id), orphan, for_action))
if orphan:
to_del.append(child)
for child in to_del:
self.db_delete_operation(child)
children.append((self, parent[0], parent[1]))
return children
def db_deleted_children(self, remove=False):
children = []
children.extend(self.db_deleted_annotations)
children.extend(self.db_deleted_operations)
if remove:
self.db_deleted_annotations = []
self.db_deleted_operations = []
return children
def has_changes(self):
if self.is_dirty:
return True
for child in self._db_annotations:
if child.has_changes():
return True
for child in self._db_operations:
if child.has_changes():
return True
return False
def __get_db_operations(self):
return self._db_operations
def __set_db_operations(self, operations):
self._db_operations = operations
self.is_dirty = True
db_operations = property(__get_db_operations, __set_db_operations)
def db_get_operations(self):
return self._db_operations
def db_add_operation(self, operation):
self.is_dirty = True
self._db_operations.append(operation)
self.db_operations_id_index[operation.db_id] = operation
def db_change_operation(self, operation):
self.is_dirty = True
found = False
for i in xrange(len(self._db_operations)):
if self._db_operations[i].db_id == operation.db_id:
self._db_operations[i] = operation
found = True
break
if not found:
self._db_operations.append(operation)
self.db_operations_id_index[operation.db_id] = operation
def db_delete_operation(self, operation):
self.is_dirty = True
for i in xrange(len(self._db_operations)):
if self._db_operations[i].db_id == operation.db_id:
if not self._db_operations[i].is_new:
self.db_deleted_operations.append(self._db_operations[i])
del self._db_operations[i]
break
del self.db_operations_id_index[operation.db_id]
def db_get_operation(self, key):
for i in xrange(len(self._db_operations)):
if self._db_operations[i].db_id == key:
return self._db_operations[i]
return None
def db_get_operation_by_id(self, key):
return self.db_operations_id_index[key]
def db_has_operation_with_id(self, key):
return key in self.db_operations_id_index
def __get_db_id(self):
return self._db_id
def __set_db_id(self, id):
self._db_id = id
self.is_dirty = True
db_id = property(__get_db_id, __set_db_id)
def db_add_id(self, id):
self._db_id = id
def db_change_id(self, id):
self._db_id = id
def db_delete_id(self, id):
self._db_id = None
def __get_db_prevId(self):
return self._db_prevId
def __set_db_prevId(self, prevId):
self._db_prevId = prevId
self.is_dirty = True
db_prevId = property(__get_db_prevId, __set_db_prevId)
def db_add_prevId(self, prevId):
self._db_prevId = prevId
def db_change_prevId(self, prevId):
self._db_prevId = prevId
def db_delete_prevId(self, prevId):
self._db_prevId = None
def __get_db_date(self):
return self._db_date
def __set_db_date(self, date):
self._db_date = date
self.is_dirty = True
db_date = property(__get_db_date, __set_db_date)
def db_add_date(self, date):
self._db_date = date
def db_change_date(self, date):
self._db_date = date
def db_delete_date(self, date):
self._db_date = None
def __get_db_session(self):
return self._db_session
def __set_db_session(self, session):
self._db_session = session
self.is_dirty = True
db_session = property(__get_db_session, __set_db_session)
def db_add_session(self, session):
self._db_session = session
def db_change_session(self, session):
self._db_session = session
def db_delete_session(self, session):
self._db_session = None
def __get_db_user(self):
return self._db_user
def __set_db_user(self, user):
self._db_user = user
self.is_dirty = True
db_user = property(__get_db_user, __set_db_user)
def db_add_user(self, user):
self._db_user = user
def db_change_user(self, user):
self._db_user = user
def db_delete_user(self, user):
self._db_user = None
def __get_db_annotations(self):
return self._db_annotations
def __set_db_annotations(self, annotations):
self._db_annotations = annotations
self.is_dirty = True
db_annotations = property(__get_db_annotations, __set_db_annotations)
def db_get_annotations(self):
return self._db_annotations
def db_add_annotation(self, annotation):
self.is_dirty = True
self._db_annotations.append(annotation)
self.db_annotations_id_index[annotation.db_id] = annotation
self.db_annotations_key_index[annotation.db_key] = annotation
def db_change_annotation(self, annotation):
self.is_dirty = True
found = False
for i in xrange(len(self._db_annotations)):
if self._db_annotations[i].db_id == annotation.db_id:
self._db_annotations[i] = annotation
found = True
break
if not found:
self._db_annotations.append(annotation)
self.db_annotations_id_index[annotation.db_id] = annotation
self.db_annotations_key_index[annotation.db_key] = annotation
def db_delete_annotation(self, annotation):
self.is_dirty = True
for i in xrange(len(self._db_annotations)):
if self._db_annotations[i].db_id == annotation.db_id:
if not self._db_annotations[i].is_new:
self.db_deleted_annotations.append(self._db_annotations[i])
del self._db_annotations[i]
break
del self.db_annotations_id_index[annotation.db_id]
del self.db_annotations_key_index[annotation.db_key]
def db_get_annotation(self, key):
for i in xrange(len(self._db_annotations)):
if self._db_annotations[i].db_id == key:
return self._db_annotations[i]
return None
def db_get_annotation_by_id(self, key):
return self.db_annotations_id_index[key]
def db_has_annotation_with_id(self, key):
return key in self.db_annotations_id_index
def db_get_annotation_by_key(self, key):
return self.db_annotations_key_index[key]
def db_has_annotation_with_key(self, key):
return key in self.db_annotations_key_index
def getPrimaryKey(self):
return self._db_id
class DBStartupPackage(object):
vtType = 'startup_package'
def __init__(self, name=None, configuration=None):
self._db_name = name
self.db_deleted_configuration = []
self._db_configuration = configuration
self.is_dirty = True
self.is_new = True
def __copy__(self):
return DBStartupPackage.do_copy(self)
def do_copy(self, new_ids=False, id_scope=None, id_remap=None):
cp = DBStartupPackage(name=self._db_name)
if self._db_configuration is not None:
cp._db_configuration = self._db_configuration.do_copy(new_ids, id_scope, id_remap)
# set new ids
if new_ids:
new_id = id_scope.getNewId(self.vtType)
if self.vtType in id_scope.remap:
id_remap[(id_scope.remap[self.vtType], self.db_id)] = new_id
else:
id_remap[(self.vtType, self.db_id)] = new_id
cp.db_id = new_id
# recreate indices and set flags
if not new_ids:
cp.is_dirty = self.is_dirty
cp.is_new = self.is_new
return cp
@staticmethod
def update_version(old_obj, trans_dict, new_obj=None):
if new_obj is None:
new_obj = DBStartupPackage()
class_dict = {}
if new_obj.__class__.__name__ in trans_dict:
class_dict = trans_dict[new_obj.__class__.__name__]
if 'name' in class_dict:
res = class_dict['name'](old_obj, trans_dict)
new_obj.db_name = res
elif hasattr(old_obj, 'db_name') and old_obj.db_name is not None:
new_obj.db_name = old_obj.db_name
if 'configuration' in class_dict:
res = class_dict['configuration'](old_obj, trans_dict)
new_obj.db_configuration = res
elif hasattr(old_obj, 'db_configuration') and old_obj.db_configuration is not None:
obj = old_obj.db_configuration
new_obj.db_add_configuration(DBConfiguration.update_version(obj, trans_dict))
if hasattr(old_obj, 'db_deleted_configuration') and hasattr(new_obj, 'db_deleted_configuration'):
for obj in old_obj.db_deleted_configuration:
n_obj = DBConfiguration.update_version(obj, trans_dict)
new_obj.db_deleted_configuration.append(n_obj)
new_obj.is_new = old_obj.is_new
new_obj.is_dirty = old_obj.is_dirty
return new_obj
def db_children(self, parent=(None,None), orphan=False, for_action=False):
children = []
if self._db_configuration is not None:
children.extend(self._db_configuration.db_children((self.vtType, self.db_id), orphan, for_action))
if orphan:
self._db_configuration = None
children.append((self, parent[0], parent[1]))
return children
def db_deleted_children(self, remove=False):
children = []
children.extend(self.db_deleted_configuration)
if remove:
self.db_deleted_configuration = []
return children
def has_changes(self):
if self.is_dirty:
return True
if self._db_configuration is not None and self._db_configuration.has_changes():
return True
return False
def __get_db_name(self):
return self._db_name
def __set_db_name(self, name):
self._db_name = name
self.is_dirty = True
db_name = property(__get_db_name, __set_db_name)
def db_add_name(self, name):
self._db_name = name
def db_change_name(self, name):
self._db_name = name
def db_delete_name(self, name):
self._db_name = None
def __get_db_configuration(self):
return self._db_configuration
def __set_db_configuration(self, configuration):
self._db_configuration = configuration
self.is_dirty = True
db_configuration = property(__get_db_configuration, __set_db_configuration)
def db_add_configuration(self, configuration):
self._db_configuration = configuration
def db_change_configuration(self, configuration):
self._db_configuration = configuration
def db_delete_configuration(self, configuration):
if not self.is_new:
self.db_deleted_configuration.append(self._db_configuration)
self._db_configuration = None
class DBConfigInt(object):
vtType = 'config_int'
def __init__(self, value=None):
self._db_value = value
self.is_dirty = True
self.is_new = True
def __copy__(self):
return DBConfigInt.do_copy(self)
def do_copy(self, new_ids=False, id_scope=None, id_remap=None):
cp = DBConfigInt(value=self._db_value)
# set new ids
if new_ids:
new_id = id_scope.getNewId(self.vtType)
if self.vtType in id_scope.remap:
id_remap[(id_scope.remap[self.vtType], self.db_id)] = new_id
else:
id_remap[(self.vtType, self.db_id)] = new_id
cp.db_id = new_id
# recreate indices and set flags
if not new_ids:
cp.is_dirty = self.is_dirty
cp.is_new = self.is_new
return cp
@staticmethod
def update_version(old_obj, trans_dict, new_obj=None):
if new_obj is None:
new_obj = DBConfigInt()
class_dict = {}
if new_obj.__class__.__name__ in trans_dict:
class_dict = trans_dict[new_obj.__class__.__name__]
if 'value' in class_dict:
res = class_dict['value'](old_obj, trans_dict)
new_obj.db_value = res
elif hasattr(old_obj, 'db_value') and old_obj.db_value is not None:
new_obj.db_value = old_obj.db_value
new_obj.is_new = old_obj.is_new
new_obj.is_dirty = old_obj.is_dirty
return new_obj
def db_children(self, parent=(None,None), orphan=False, for_action=False):
return [(self, parent[0], parent[1])]
def db_deleted_children(self, remove=False):
children = []
return children
def has_changes(self):
if self.is_dirty:
return True
return False
def __get_db_value(self):
return self._db_value
def __set_db_value(self, value):
self._db_value = value
self.is_dirty = True
db_value = property(__get_db_value, __set_db_value)
def db_add_value(self, value):
self._db_value = value
def db_change_value(self, value):
self._db_value = value
def db_delete_value(self, value):
self._db_value = None
class DBOpmProcessIdEffect(object):
vtType = 'opm_process_id_effect'
def __init__(self, id=None):
self._db_id = id
self.is_dirty = True
self.is_new = True
def __copy__(self):
return DBOpmProcessIdEffect.do_copy(self)
def do_copy(self, new_ids=False, id_scope=None, id_remap=None):
cp = DBOpmProcessIdEffect(id=self._db_id)
# set new ids
if new_ids:
new_id = id_scope.getNewId(self.vtType)
if self.vtType in id_scope.remap:
id_remap[(id_scope.remap[self.vtType], self.db_id)] = new_id
else:
id_remap[(self.vtType, self.db_id)] = new_id
cp.db_id = new_id
if hasattr(self, 'db_id') and ('opm_process', self._db_id) in id_remap:
cp._db_id = id_remap[('opm_process', self._db_id)]
# recreate indices and set flags
if not new_ids:
cp.is_dirty = self.is_dirty
cp.is_new = self.is_new
return cp
@staticmethod
def update_version(old_obj, trans_dict, new_obj=None):
if new_obj is None:
new_obj = DBOpmProcessIdEffect()
class_dict = {}
if new_obj.__class__.__name__ in trans_dict:
class_dict = trans_dict[new_obj.__class__.__name__]
if 'id' in class_dict:
res = class_dict['id'](old_obj, trans_dict)
new_obj.db_id = res
elif hasattr(old_obj, 'db_id') and old_obj.db_id is not None:
new_obj.db_id = old_obj.db_id
new_obj.is_new = old_obj.is_new
new_obj.is_dirty = old_obj.is_dirty
return new_obj
def db_children(self, parent=(None,None), orphan=False, for_action=False):
return [(self, parent[0], parent[1])]
def db_deleted_children(self, remove=False):
children = []
return children
def has_changes(self):
if self.is_dirty:
return True
return False
def __get_db_id(self):
return self._db_id
def __set_db_id(self, id):
self._db_id = id
self.is_dirty = True
db_id = property(__get_db_id, __set_db_id)
def db_add_id(self, id):
self._db_id = id
def db_change_id(self, id):
self._db_id = id
def db_delete_id(self, id):
self._db_id = None
class DBRefProvPlan(object):
vtType = 'ref_prov_plan'
def __init__(self, prov_ref=None):
self._db_prov_ref = prov_ref
self.is_dirty = True
self.is_new = True
def __copy__(self):
return DBRefProvPlan.do_copy(self)
def do_copy(self, new_ids=False, id_scope=None, id_remap=None):
cp = DBRefProvPlan(prov_ref=self._db_prov_ref)
# set new ids
if new_ids:
new_id = id_scope.getNewId(self.vtType)
if self.vtType in id_scope.remap:
id_remap[(id_scope.remap[self.vtType], self.db_id)] = new_id
else:
id_remap[(self.vtType, self.db_id)] = new_id
cp.db_id = new_id
if hasattr(self, 'db_prov_ref') and ('prov_entity', self._db_prov_ref) in id_remap:
cp._db_prov_ref = id_remap[('prov_entity', self._db_prov_ref)]
# recreate indices and set flags
if not new_ids:
cp.is_dirty = self.is_dirty
cp.is_new = self.is_new
return cp
@staticmethod
def update_version(old_obj, trans_dict, new_obj=None):
if new_obj is None:
new_obj = DBRefProvPlan()
class_dict = {}
if new_obj.__class__.__name__ in trans_dict:
class_dict = trans_dict[new_obj.__class__.__name__]
if 'prov_ref' in class_dict:
res = class_dict['prov_ref'](old_obj, trans_dict)
new_obj.db_prov_ref = res
elif hasattr(old_obj, 'db_prov_ref') and old_obj.db_prov_ref is not None:
new_obj.db_prov_ref = old_obj.db_prov_ref
new_obj.is_new = old_obj.is_new
new_obj.is_dirty = old_obj.is_dirty
return new_obj
def db_children(self, parent=(None,None), orphan=False, for_action=False):
return [(self, parent[0], parent[1])]
def db_deleted_children(self, remove=False):
children = []
return children
def has_changes(self):
if self.is_dirty:
return True
return False
def __get_db_prov_ref(self):
return self._db_prov_ref
def __set_db_prov_ref(self, prov_ref):
self._db_prov_ref = prov_ref
self.is_dirty = True
db_prov_ref = property(__get_db_prov_ref, __set_db_prov_ref)
def db_add_prov_ref(self, prov_ref):
self._db_prov_ref = prov_ref
def db_change_prov_ref(self, prov_ref):
self._db_prov_ref = prov_ref
def db_delete_prov_ref(self, prov_ref):
self._db_prov_ref = None
class DBOpmAccounts(object):
vtType = 'opm_accounts'
def __init__(self, accounts=None, opm_overlapss=None):
self.db_deleted_accounts = []
self.db_accounts_id_index = {}
if accounts is None:
self._db_accounts = []
else:
self._db_accounts = accounts
for v in self._db_accounts:
self.db_accounts_id_index[v.db_id] = v
self.db_deleted_opm_overlapss = []
if opm_overlapss is None:
self._db_opm_overlapss = []
else:
self._db_opm_overlapss = opm_overlapss
self.is_dirty = True
self.is_new = True
def __copy__(self):
return DBOpmAccounts.do_copy(self)
def do_copy(self, new_ids=False, id_scope=None, id_remap=None):
cp = DBOpmAccounts()
if self._db_accounts is None:
cp._db_accounts = []
else:
cp._db_accounts = [v.do_copy(new_ids, id_scope, id_remap) for v in self._db_accounts]
if self._db_opm_overlapss is None:
cp._db_opm_overlapss = []
else:
cp._db_opm_overlapss = [v.do_copy(new_ids, id_scope, id_remap) for v in self._db_opm_overlapss]
# set new ids
if new_ids:
new_id = id_scope.getNewId(self.vtType)
if self.vtType in id_scope.remap:
id_remap[(id_scope.remap[self.vtType], self.db_id)] = new_id
else:
id_remap[(self.vtType, self.db_id)] = new_id
cp.db_id = new_id
# recreate indices and set flags
cp.db_accounts_id_index = dict((v.db_id, v) for v in cp._db_accounts)
if not new_ids:
cp.is_dirty = self.is_dirty
cp.is_new = self.is_new
return cp
@staticmethod
def update_version(old_obj, trans_dict, new_obj=None):
if new_obj is None:
new_obj = DBOpmAccounts()
class_dict = {}
if new_obj.__class__.__name__ in trans_dict:
class_dict = trans_dict[new_obj.__class__.__name__]
if 'accounts' in class_dict:
res = class_dict['accounts'](old_obj, trans_dict)
for obj in res:
new_obj.db_add_account(obj)
elif hasattr(old_obj, 'db_accounts') and old_obj.db_accounts is not None:
for obj in old_obj.db_accounts:
new_obj.db_add_account(DBOpmAccount.update_version(obj, trans_dict))
if hasattr(old_obj, 'db_deleted_accounts') and hasattr(new_obj, 'db_deleted_accounts'):
for obj in old_obj.db_deleted_accounts:
n_obj = DBOpmAccount.update_version(obj, trans_dict)
new_obj.db_deleted_accounts.append(n_obj)
if 'opm_overlapss' in class_dict:
res = class_dict['opm_overlapss'](old_obj, trans_dict)
for obj in res:
new_obj.db_add_opm_overlaps(obj)
elif hasattr(old_obj, 'db_opm_overlapss') and old_obj.db_opm_overlapss is not None:
for obj in old_obj.db_opm_overlapss:
new_obj.db_add_opm_overlaps(DBOpmOverlaps.update_version(obj, trans_dict))
if hasattr(old_obj, 'db_deleted_opm_overlapss') and hasattr(new_obj, 'db_deleted_opm_overlapss'):
for obj in old_obj.db_deleted_opm_overlapss:
n_obj = DBOpmOverlaps.update_version(obj, trans_dict)
new_obj.db_deleted_opm_overlapss.append(n_obj)
new_obj.is_new = old_obj.is_new
new_obj.is_dirty = old_obj.is_dirty
return new_obj
def db_children(self, parent=(None,None), orphan=False, for_action=False):
children = []
to_del = []
for child in self.db_accounts:
children.extend(child.db_children((self.vtType, self.db_id), orphan, for_action))
if orphan:
to_del.append(child)
for child in to_del:
self.db_delete_account(child)
to_del = []
for child in self.db_opm_overlapss:
children.extend(child.db_children((self.vtType, self.db_id), orphan, for_action))
if orphan:
to_del.append(child)
for child in to_del:
self.db_delete_opm_overlaps(child)
children.append((self, parent[0], parent[1]))
return children
def db_deleted_children(self, remove=False):
children = []
children.extend(self.db_deleted_accounts)
children.extend(self.db_deleted_opm_overlapss)
if remove:
self.db_deleted_accounts = []
self.db_deleted_opm_overlapss = []
return children
def has_changes(self):
if self.is_dirty:
return True
for child in self._db_accounts:
if child.has_changes():
return True
for child in self._db_opm_overlapss:
if child.has_changes():
return True
return False
def __get_db_accounts(self):
return self._db_accounts
def __set_db_accounts(self, accounts):
self._db_accounts = accounts
self.is_dirty = True
db_accounts = property(__get_db_accounts, __set_db_accounts)
def db_get_accounts(self):
return self._db_accounts
def db_add_account(self, account):
self.is_dirty = True
self._db_accounts.append(account)
self.db_accounts_id_index[account.db_id] = account
def db_change_account(self, account):
self.is_dirty = True
found = False
for i in xrange(len(self._db_accounts)):
if self._db_accounts[i].db_id == account.db_id:
self._db_accounts[i] = account
found = True
break
if not found:
self._db_accounts.append(account)
self.db_accounts_id_index[account.db_id] = account
def db_delete_account(self, account):
self.is_dirty = True
for i in xrange(len(self._db_accounts)):
if self._db_accounts[i].db_id == account.db_id:
if not self._db_accounts[i].is_new:
self.db_deleted_accounts.append(self._db_accounts[i])
del self._db_accounts[i]
break
del self.db_accounts_id_index[account.db_id]
def db_get_account(self, key):
for i in xrange(len(self._db_accounts)):
if self._db_accounts[i].db_id == key:
return self._db_accounts[i]
return None
def db_get_account_by_id(self, key):
return self.db_accounts_id_index[key]
def db_has_account_with_id(self, key):
return key in self.db_accounts_id_index
def __get_db_opm_overlapss(self):
return self._db_opm_overlapss
def __set_db_opm_overlapss(self, opm_overlapss):
self._db_opm_overlapss = opm_overlapss
self.is_dirty = True
db_opm_overlapss = property(__get_db_opm_overlapss, __set_db_opm_overlapss)
def db_get_opm_overlapss(self):
return self._db_opm_overlapss
def db_add_opm_overlaps(self, opm_overlaps):
self.is_dirty = True
self._db_opm_overlapss.append(opm_overlaps)
def db_change_opm_overlaps(self, opm_overlaps):
self.is_dirty = True
self._db_opm_overlapss.append(opm_overlaps)
def db_delete_opm_overlaps(self, opm_overlaps):
self.is_dirty = True
raise Exception('Cannot delete a non-keyed object')
def db_get_opm_overlaps(self, key):
return None
class DBRefProvAgent(object):
vtType = 'ref_prov_agent'
def __init__(self, prov_ref=None):
self._db_prov_ref = prov_ref
self.is_dirty = True
self.is_new = True
def __copy__(self):
return DBRefProvAgent.do_copy(self)
def do_copy(self, new_ids=False, id_scope=None, id_remap=None):
cp = DBRefProvAgent(prov_ref=self._db_prov_ref)
# set new ids
if new_ids:
new_id = id_scope.getNewId(self.vtType)
if self.vtType in id_scope.remap:
id_remap[(id_scope.remap[self.vtType], self.db_id)] = new_id
else:
id_remap[(self.vtType, self.db_id)] = new_id
cp.db_id = new_id
if hasattr(self, 'db_prov_ref') and ('prov_agent', self._db_prov_ref) in id_remap:
cp._db_prov_ref = id_remap[('prov_agent', self._db_prov_ref)]
# recreate indices and set flags
if not new_ids:
cp.is_dirty = self.is_dirty
cp.is_new = self.is_new
return cp
@staticmethod
def update_version(old_obj, trans_dict, new_obj=None):
if new_obj is None:
new_obj = DBRefProvAgent()
class_dict = {}
if new_obj.__class__.__name__ in trans_dict:
class_dict = trans_dict[new_obj.__class__.__name__]
if 'prov_ref' in class_dict:
res = class_dict['prov_ref'](old_obj, trans_dict)
new_obj.db_prov_ref = res
elif hasattr(old_obj, 'db_prov_ref') and old_obj.db_prov_ref is not None:
new_obj.db_prov_ref = old_obj.db_prov_ref
new_obj.is_new = old_obj.is_new
new_obj.is_dirty = old_obj.is_dirty
return new_obj
def db_children(self, parent=(None,None), orphan=False, for_action=False):
return [(self, parent[0], parent[1])]
def db_deleted_children(self, remove=False):
children = []
return children
def has_changes(self):
if self.is_dirty:
return True
return False
def __get_db_prov_ref(self):
return self._db_prov_ref
def __set_db_prov_ref(self, prov_ref):
self._db_prov_ref = prov_ref
self.is_dirty = True
db_prov_ref = property(__get_db_prov_ref, __set_db_prov_ref)
def db_add_prov_ref(self, prov_ref):
self._db_prov_ref = prov_ref
def db_change_prov_ref(self, prov_ref):
self._db_prov_ref = prov_ref
def db_delete_prov_ref(self, prov_ref):
self._db_prov_ref = None
class DBPortSpec(object):
vtType = 'portSpec'
def __init__(self, id=None, name=None, type=None, optional=None, sort_key=None, portSpecItems=None, min_conns=None, max_conns=None):
self._db_id = id
self._db_name = name
self._db_type = type
self._db_optional = optional
self._db_sort_key = sort_key
self.db_deleted_portSpecItems = []
self.db_portSpecItems_id_index = {}
if portSpecItems is None:
self._db_portSpecItems = []
else:
self._db_portSpecItems = portSpecItems
for v in self._db_portSpecItems:
self.db_portSpecItems_id_index[v.db_id] = v
self._db_min_conns = min_conns
self._db_max_conns = max_conns
self.is_dirty = True
self.is_new = True
def __copy__(self):
return DBPortSpec.do_copy(self)
def do_copy(self, new_ids=False, id_scope=None, id_remap=None):
cp = DBPortSpec(id=self._db_id,
name=self._db_name,
type=self._db_type,
optional=self._db_optional,
sort_key=self._db_sort_key,
min_conns=self._db_min_conns,
max_conns=self._db_max_conns)
if self._db_portSpecItems is None:
cp._db_portSpecItems = []
else:
cp._db_portSpecItems = [v.do_copy(new_ids, id_scope, id_remap) for v in self._db_portSpecItems]
# set new ids
if new_ids:
new_id = id_scope.getNewId(self.vtType)
if self.vtType in id_scope.remap:
id_remap[(id_scope.remap[self.vtType], self.db_id)] = new_id
else:
id_remap[(self.vtType, self.db_id)] = new_id
cp.db_id = new_id
# recreate indices and set flags
cp.db_portSpecItems_id_index = dict((v.db_id, v) for v in cp._db_portSpecItems)
if not new_ids:
cp.is_dirty = self.is_dirty
cp.is_new = self.is_new
return cp
@staticmethod
def update_version(old_obj, trans_dict, new_obj=None):
if new_obj is None:
new_obj = DBPortSpec()
class_dict = {}
if new_obj.__class__.__name__ in trans_dict:
class_dict = trans_dict[new_obj.__class__.__name__]
if 'id' in class_dict:
res = class_dict['id'](old_obj, trans_dict)
new_obj.db_id = res
elif hasattr(old_obj, 'db_id') and old_obj.db_id is not None:
new_obj.db_id = old_obj.db_id
if 'name' in class_dict:
res = class_dict['name'](old_obj, trans_dict)
new_obj.db_name = res
elif hasattr(old_obj, 'db_name') and old_obj.db_name is not None:
new_obj.db_name = old_obj.db_name
if 'type' in class_dict:
res = class_dict['type'](old_obj, trans_dict)
new_obj.db_type = res
elif hasattr(old_obj, 'db_type') and old_obj.db_type is not None:
new_obj.db_type = old_obj.db_type
if 'optional' in class_dict:
res = class_dict['optional'](old_obj, trans_dict)
new_obj.db_optional = res
elif hasattr(old_obj, 'db_optional') and old_obj.db_optional is not None:
new_obj.db_optional = old_obj.db_optional
if 'sort_key' in class_dict:
res = class_dict['sort_key'](old_obj, trans_dict)
new_obj.db_sort_key = res
elif hasattr(old_obj, 'db_sort_key') and old_obj.db_sort_key is not None:
new_obj.db_sort_key = old_obj.db_sort_key
if 'portSpecItems' in class_dict:
res = class_dict['portSpecItems'](old_obj, trans_dict)
for obj in res:
new_obj.db_add_portSpecItem(obj)
elif hasattr(old_obj, 'db_portSpecItems') and old_obj.db_portSpecItems is not None:
for obj in old_obj.db_portSpecItems:
new_obj.db_add_portSpecItem(DBPortSpecItem.update_version(obj, trans_dict))
if hasattr(old_obj, 'db_deleted_portSpecItems') and hasattr(new_obj, 'db_deleted_portSpecItems'):
for obj in old_obj.db_deleted_portSpecItems:
n_obj = DBPortSpecItem.update_version(obj, trans_dict)
new_obj.db_deleted_portSpecItems.append(n_obj)
if 'min_conns' in class_dict:
res = class_dict['min_conns'](old_obj, trans_dict)
new_obj.db_min_conns = res
elif hasattr(old_obj, 'db_min_conns') and old_obj.db_min_conns is not None:
new_obj.db_min_conns = old_obj.db_min_conns
if 'max_conns' in class_dict:
res = class_dict['max_conns'](old_obj, trans_dict)
new_obj.db_max_conns = res
elif hasattr(old_obj, 'db_max_conns') and old_obj.db_max_conns is not None:
new_obj.db_max_conns = old_obj.db_max_conns
new_obj.is_new = old_obj.is_new
new_obj.is_dirty = old_obj.is_dirty
return new_obj
def db_children(self, parent=(None,None), orphan=False, for_action=False):
children = []
if not for_action:
for child in self.db_portSpecItems:
children.extend(child.db_children((self.vtType, self.db_id), orphan, for_action))
children.append((self, parent[0], parent[1]))
return children
def db_deleted_children(self, remove=False):
children = []
children.extend(self.db_deleted_portSpecItems)
if remove:
self.db_deleted_portSpecItems = []
return children
def has_changes(self):
if self.is_dirty:
return True
for child in self._db_portSpecItems:
if child.has_changes():
return True
return False
def __get_db_id(self):
return self._db_id
def __set_db_id(self, id):
self._db_id = id
self.is_dirty = True
db_id = property(__get_db_id, __set_db_id)
def db_add_id(self, id):
self._db_id = id
def db_change_id(self, id):
self._db_id = id
def db_delete_id(self, id):
self._db_id = None
def __get_db_name(self):
return self._db_name
def __set_db_name(self, name):
self._db_name = name
self.is_dirty = True
db_name = property(__get_db_name, __set_db_name)
def db_add_name(self, name):
self._db_name = name
def db_change_name(self, name):
self._db_name = name
def db_delete_name(self, name):
self._db_name = None
def __get_db_type(self):
return self._db_type
def __set_db_type(self, type):
self._db_type = type
self.is_dirty = True
db_type = property(__get_db_type, __set_db_type)
def db_add_type(self, type):
self._db_type = type
def db_change_type(self, type):
self._db_type = type
def db_delete_type(self, type):
self._db_type = None
def __get_db_optional(self):
return self._db_optional
def __set_db_optional(self, optional):
self._db_optional = optional
self.is_dirty = True
db_optional = property(__get_db_optional, __set_db_optional)
def db_add_optional(self, optional):
self._db_optional = optional
def db_change_optional(self, optional):
self._db_optional = optional
def db_delete_optional(self, optional):
self._db_optional = None
def __get_db_sort_key(self):
return self._db_sort_key
def __set_db_sort_key(self, sort_key):
self._db_sort_key = sort_key
self.is_dirty = True
db_sort_key = property(__get_db_sort_key, __set_db_sort_key)
def db_add_sort_key(self, sort_key):
self._db_sort_key = sort_key
def db_change_sort_key(self, sort_key):
self._db_sort_key = sort_key
def db_delete_sort_key(self, sort_key):
self._db_sort_key = None
def __get_db_portSpecItems(self):
return self._db_portSpecItems
def __set_db_portSpecItems(self, portSpecItems):
self._db_portSpecItems = portSpecItems
self.is_dirty = True
db_portSpecItems = property(__get_db_portSpecItems, __set_db_portSpecItems)
def db_get_portSpecItems(self):
return self._db_portSpecItems
def db_add_portSpecItem(self, portSpecItem):
self.is_dirty = True
self._db_portSpecItems.append(portSpecItem)
self.db_portSpecItems_id_index[portSpecItem.db_id] = portSpecItem
def db_change_portSpecItem(self, portSpecItem):
self.is_dirty = True
found = False
for i in xrange(len(self._db_portSpecItems)):
if self._db_portSpecItems[i].db_id == portSpecItem.db_id:
self._db_portSpecItems[i] = portSpecItem
found = True
break
if not found:
self._db_portSpecItems.append(portSpecItem)
self.db_portSpecItems_id_index[portSpecItem.db_id] = portSpecItem
def db_delete_portSpecItem(self, portSpecItem):
self.is_dirty = True
for i in xrange(len(self._db_portSpecItems)):
if self._db_portSpecItems[i].db_id == portSpecItem.db_id:
if not self._db_portSpecItems[i].is_new:
self.db_deleted_portSpecItems.append(self._db_portSpecItems[i])
del self._db_portSpecItems[i]
break
del self.db_portSpecItems_id_index[portSpecItem.db_id]
def db_get_portSpecItem(self, key):
for i in xrange(len(self._db_portSpecItems)):
if self._db_portSpecItems[i].db_id == key:
return self._db_portSpecItems[i]
return None
def db_get_portSpecItem_by_id(self, key):
return self.db_portSpecItems_id_index[key]
def db_has_portSpecItem_with_id(self, key):
return key in self.db_portSpecItems_id_index
def __get_db_min_conns(self):
return self._db_min_conns
def __set_db_min_conns(self, min_conns):
self._db_min_conns = min_conns
self.is_dirty = True
db_min_conns = property(__get_db_min_conns, __set_db_min_conns)
def db_add_min_conns(self, min_conns):
self._db_min_conns = min_conns
def db_change_min_conns(self, min_conns):
self._db_min_conns = min_conns
def db_delete_min_conns(self, min_conns):
self._db_min_conns = None
def __get_db_max_conns(self):
return self._db_max_conns
def __set_db_max_conns(self, max_conns):
self._db_max_conns = max_conns
self.is_dirty = True
db_max_conns = property(__get_db_max_conns, __set_db_max_conns)
def db_add_max_conns(self, max_conns):
self._db_max_conns = max_conns
def db_change_max_conns(self, max_conns):
self._db_max_conns = max_conns
def db_delete_max_conns(self, max_conns):
self._db_max_conns = None
def getPrimaryKey(self):
return self._db_id
class DBEnabledPackages(object):
vtType = 'enabled_packages'
def __init__(self, packages=None):
self.db_deleted_packages = []
self.db_packages_name_index = {}
if packages is None:
self._db_packages = []
else:
self._db_packages = packages
for v in self._db_packages:
self.db_packages_name_index[v.db_name] = v
self.is_dirty = True
self.is_new = True
def __copy__(self):
return DBEnabledPackages.do_copy(self)
def do_copy(self, new_ids=False, id_scope=None, id_remap=None):
cp = DBEnabledPackages()
if self._db_packages is None:
cp._db_packages = []
else:
cp._db_packages = [v.do_copy(new_ids, id_scope, id_remap) for v in self._db_packages]
# set new ids
if new_ids:
new_id = id_scope.getNewId(self.vtType)
if self.vtType in id_scope.remap:
id_remap[(id_scope.remap[self.vtType], self.db_id)] = new_id
else:
id_remap[(self.vtType, self.db_id)] = new_id
cp.db_id = new_id
# recreate indices and set flags
cp.db_packages_name_index = dict((v.db_name, v) for v in cp._db_packages)
if not new_ids:
cp.is_dirty = self.is_dirty
cp.is_new = self.is_new
return cp
@staticmethod
def update_version(old_obj, trans_dict, new_obj=None):
if new_obj is None:
new_obj = DBEnabledPackages()
class_dict = {}
if new_obj.__class__.__name__ in trans_dict:
class_dict = trans_dict[new_obj.__class__.__name__]
if 'packages' in class_dict:
res = class_dict['packages'](old_obj, trans_dict)
for obj in res:
new_obj.db_add_package(obj)
elif hasattr(old_obj, 'db_packages') and old_obj.db_packages is not None:
for obj in old_obj.db_packages:
new_obj.db_add_package(DBStartupPackage.update_version(obj, trans_dict))
if hasattr(old_obj, 'db_deleted_packages') and hasattr(new_obj, 'db_deleted_packages'):
for obj in old_obj.db_deleted_packages:
n_obj = DBStartupPackage.update_version(obj, trans_dict)
new_obj.db_deleted_packages.append(n_obj)
new_obj.is_new = old_obj.is_new
new_obj.is_dirty = old_obj.is_dirty
return new_obj
def db_children(self, parent=(None,None), orphan=False, for_action=False):
children = []
to_del = []
for child in self.db_packages:
children.extend(child.db_children((self.vtType, self.db_id), orphan, for_action))
if orphan:
to_del.append(child)
for child in to_del:
self.db_delete_package(child)
children.append((self, parent[0], parent[1]))
return children
def db_deleted_children(self, remove=False):
children = []
children.extend(self.db_deleted_packages)
if remove:
self.db_deleted_packages = []
return children
def has_changes(self):
if self.is_dirty:
return True
for child in self._db_packages:
if child.has_changes():
return True
return False
def __get_db_packages(self):
return self._db_packages
def __set_db_packages(self, packages):
self._db_packages = packages
self.is_dirty = True
db_packages = property(__get_db_packages, __set_db_packages)
def db_get_packages(self):
return self._db_packages
def db_add_package(self, package):
self.is_dirty = True
self._db_packages.append(package)
self.db_packages_name_index[package.db_name] = package
def db_change_package(self, package):
self.is_dirty = True
self._db_packages.append(package)
self.db_packages_name_index[package.db_name] = package
def db_delete_package(self, package):
self.is_dirty = True
raise Exception('Cannot delete a non-keyed object')
def db_get_package(self, key):
return None
def db_get_package_by_name(self, key):
return self.db_packages_name_index[key]
def db_has_package_with_name(self, key):
return key in self.db_packages_name_index
class DBOpmArtifact(object):
vtType = 'opm_artifact'
def __init__(self, id=None, value=None, accounts=None):
self._db_id = id
self.db_deleted_value = []
self._db_value = value
self.db_deleted_accounts = []
if accounts is None:
self._db_accounts = []
else:
self._db_accounts = accounts
self.is_dirty = True
self.is_new = True
def __copy__(self):
return DBOpmArtifact.do_copy(self)
def do_copy(self, new_ids=False, id_scope=None, id_remap=None):
cp = DBOpmArtifact(id=self._db_id)
if self._db_value is not None:
cp._db_value = self._db_value.do_copy(new_ids, id_scope, id_remap)
if self._db_accounts is None:
cp._db_accounts = []
else:
cp._db_accounts = [v.do_copy(new_ids, id_scope, id_remap) for v in self._db_accounts]
# set new ids
if new_ids:
new_id = id_scope.getNewId(self.vtType)
if self.vtType in id_scope.remap:
id_remap[(id_scope.remap[self.vtType], self.db_id)] = new_id
else:
id_remap[(self.vtType, self.db_id)] = new_id
cp.db_id = new_id
# recreate indices and set flags
if not new_ids:
cp.is_dirty = self.is_dirty
cp.is_new = self.is_new
return cp
@staticmethod
def update_version(old_obj, trans_dict, new_obj=None):
if new_obj is None:
new_obj = DBOpmArtifact()
class_dict = {}
if new_obj.__class__.__name__ in trans_dict:
class_dict = trans_dict[new_obj.__class__.__name__]
if 'id' in class_dict:
res = class_dict['id'](old_obj, trans_dict)
new_obj.db_id = res
elif hasattr(old_obj, 'db_id') and old_obj.db_id is not None:
new_obj.db_id = old_obj.db_id
if 'value' in class_dict:
res = class_dict['value'](old_obj, trans_dict)
new_obj.db_value = res
elif hasattr(old_obj, 'db_value') and old_obj.db_value is not None:
obj = old_obj.db_value
new_obj.db_add_value(DBOpmArtifactValue.update_version(obj, trans_dict))
if hasattr(old_obj, 'db_deleted_value') and hasattr(new_obj, 'db_deleted_value'):
for obj in old_obj.db_deleted_value:
n_obj = DBOpmArtifactValue.update_version(obj, trans_dict)
new_obj.db_deleted_value.append(n_obj)
if 'accounts' in class_dict:
res = class_dict['accounts'](old_obj, trans_dict)
for obj in res:
new_obj.db_add_account(obj)
elif hasattr(old_obj, 'db_accounts') and old_obj.db_accounts is not None:
for obj in old_obj.db_accounts:
new_obj.db_add_account(DBOpmAccountId.update_version(obj, trans_dict))
if hasattr(old_obj, 'db_deleted_accounts') and hasattr(new_obj, 'db_deleted_accounts'):
for obj in old_obj.db_deleted_accounts:
n_obj = DBOpmAccountId.update_version(obj, trans_dict)
new_obj.db_deleted_accounts.append(n_obj)
new_obj.is_new = old_obj.is_new
new_obj.is_dirty = old_obj.is_dirty
return new_obj
def db_children(self, parent=(None,None), orphan=False, for_action=False):
children = []
if self._db_value is not None:
children.extend(self._db_value.db_children((self.vtType, self.db_id), orphan, for_action))
if orphan:
self._db_value = None
to_del = []
for child in self.db_accounts:
children.extend(child.db_children((self.vtType, self.db_id), orphan, for_action))
if orphan:
to_del.append(child)
for child in to_del:
self.db_delete_account(child)
children.append((self, parent[0], parent[1]))
return children
def db_deleted_children(self, remove=False):
children = []
children.extend(self.db_deleted_value)
children.extend(self.db_deleted_accounts)
if remove:
self.db_deleted_value = []
self.db_deleted_accounts = []
return children
def has_changes(self):
if self.is_dirty:
return True
if self._db_value is not None and self._db_value.has_changes():
return True
for child in self._db_accounts:
if child.has_changes():
return True
return False
def __get_db_id(self):
return self._db_id
def __set_db_id(self, id):
self._db_id = id
self.is_dirty = True
db_id = property(__get_db_id, __set_db_id)
def db_add_id(self, id):
self._db_id = id
def db_change_id(self, id):
self._db_id = id
def db_delete_id(self, id):
self._db_id = None
def __get_db_value(self):
return self._db_value
def __set_db_value(self, value):
self._db_value = value
self.is_dirty = True
db_value = property(__get_db_value, __set_db_value)
def db_add_value(self, value):
self._db_value = value
def db_change_value(self, value):
self._db_value = value
def db_delete_value(self, value):
if not self.is_new:
self.db_deleted_value.append(self._db_value)
self._db_value = None
def __get_db_accounts(self):
return self._db_accounts
def __set_db_accounts(self, accounts):
self._db_accounts = accounts
self.is_dirty = True
db_accounts = property(__get_db_accounts, __set_db_accounts)
def db_get_accounts(self):
return self._db_accounts
def db_add_account(self, account):
self.is_dirty = True
self._db_accounts.append(account)
def db_change_account(self, account):
self.is_dirty = True
self._db_accounts.append(account)
def db_delete_account(self, account):
self.is_dirty = True
raise Exception('Cannot delete a non-keyed object')
def db_get_account(self, key):
return None
def getPrimaryKey(self):
return self._db_id
class DBLog(object):
vtType = 'log'
def __init__(self, id=None, entity_type=None, version=None, name=None, last_modified=None, workflow_execs=None, machines=None, vistrail_id=None):
self._db_id = id
self._db_entity_type = entity_type
self._db_version = version
self._db_name = name
self._db_last_modified = last_modified
self.db_deleted_workflow_execs = []
self.db_workflow_execs_id_index = {}
if workflow_execs is None:
self._db_workflow_execs = []
else:
self._db_workflow_execs = workflow_execs
for v in self._db_workflow_execs:
self.db_workflow_execs_id_index[v.db_id] = v
self.db_deleted_machines = []
self.db_machines_id_index = {}
if machines is None:
self._db_machines = []
else:
self._db_machines = machines
for v in self._db_machines:
self.db_machines_id_index[v.db_id] = v
self._db_vistrail_id = vistrail_id
self.is_dirty = True
self.is_new = True
def __copy__(self):
return DBLog.do_copy(self)
def do_copy(self, new_ids=False, id_scope=None, id_remap=None):
cp = DBLog(id=self._db_id,
entity_type=self._db_entity_type,
version=self._db_version,
name=self._db_name,
last_modified=self._db_last_modified,
vistrail_id=self._db_vistrail_id)
if self._db_workflow_execs is None:
cp._db_workflow_execs = []
else:
cp._db_workflow_execs = [v.do_copy(new_ids, id_scope, id_remap) for v in self._db_workflow_execs]
if self._db_machines is None:
cp._db_machines = []
else:
cp._db_machines = [v.do_copy(new_ids, id_scope, id_remap) for v in self._db_machines]
# set new ids
if new_ids:
new_id = id_scope.getNewId(self.vtType)
if self.vtType in id_scope.remap:
id_remap[(id_scope.remap[self.vtType], self.db_id)] = new_id
else:
id_remap[(self.vtType, self.db_id)] = new_id
cp.db_id = new_id
if hasattr(self, 'db_vistrail_id') and ('vistrail', self._db_vistrail_id) in id_remap:
cp._db_vistrail_id = id_remap[('vistrail', self._db_vistrail_id)]
# recreate indices and set flags
cp.db_workflow_execs_id_index = dict((v.db_id, v) for v in cp._db_workflow_execs)
cp.db_machines_id_index = dict((v.db_id, v) for v in cp._db_machines)
if not new_ids:
cp.is_dirty = self.is_dirty
cp.is_new = self.is_new
return cp
@staticmethod
def update_version(old_obj, trans_dict, new_obj=None):
if new_obj is None:
new_obj = DBLog()
class_dict = {}
if new_obj.__class__.__name__ in trans_dict:
class_dict = trans_dict[new_obj.__class__.__name__]
if 'id' in class_dict:
res = class_dict['id'](old_obj, trans_dict)
new_obj.db_id = res
elif hasattr(old_obj, 'db_id') and old_obj.db_id is not None:
new_obj.db_id = old_obj.db_id
if 'entity_type' in class_dict:
res = class_dict['entity_type'](old_obj, trans_dict)
new_obj.db_entity_type = res
elif hasattr(old_obj, 'db_entity_type') and old_obj.db_entity_type is not None:
new_obj.db_entity_type = old_obj.db_entity_type
if 'version' in class_dict:
res = class_dict['version'](old_obj, trans_dict)
new_obj.db_version = res
elif hasattr(old_obj, 'db_version') and old_obj.db_version is not None:
new_obj.db_version = old_obj.db_version
if 'name' in class_dict:
res = class_dict['name'](old_obj, trans_dict)
new_obj.db_name = res
elif hasattr(old_obj, 'db_name') and old_obj.db_name is not None:
new_obj.db_name = old_obj.db_name
if 'last_modified' in class_dict:
res = class_dict['last_modified'](old_obj, trans_dict)
new_obj.db_last_modified = res
elif hasattr(old_obj, 'db_last_modified') and old_obj.db_last_modified is not None:
new_obj.db_last_modified = old_obj.db_last_modified
if 'workflow_execs' in class_dict:
res = class_dict['workflow_execs'](old_obj, trans_dict)
for obj in res:
new_obj.db_add_workflow_exec(obj)
elif hasattr(old_obj, 'db_workflow_execs') and old_obj.db_workflow_execs is not None:
for obj in old_obj.db_workflow_execs:
new_obj.db_add_workflow_exec(DBWorkflowExec.update_version(obj, trans_dict))
if hasattr(old_obj, 'db_deleted_workflow_execs') and hasattr(new_obj, 'db_deleted_workflow_execs'):
for obj in old_obj.db_deleted_workflow_execs:
n_obj = DBWorkflowExec.update_version(obj, trans_dict)
new_obj.db_deleted_workflow_execs.append(n_obj)
if 'machines' in class_dict:
res = class_dict['machines'](old_obj, trans_dict)
for obj in res:
new_obj.db_add_machine(obj)
elif hasattr(old_obj, 'db_machines') and old_obj.db_machines is not None:
for obj in old_obj.db_machines:
new_obj.db_add_machine(DBMachine.update_version(obj, trans_dict))
if hasattr(old_obj, 'db_deleted_machines') and hasattr(new_obj, 'db_deleted_machines'):
for obj in old_obj.db_deleted_machines:
n_obj = DBMachine.update_version(obj, trans_dict)
new_obj.db_deleted_machines.append(n_obj)
if 'vistrail_id' in class_dict:
res = class_dict['vistrail_id'](old_obj, trans_dict)
new_obj.db_vistrail_id = res
elif hasattr(old_obj, 'db_vistrail_id') and old_obj.db_vistrail_id is not None:
new_obj.db_vistrail_id = old_obj.db_vistrail_id
new_obj.is_new = old_obj.is_new
new_obj.is_dirty = old_obj.is_dirty
return new_obj
def db_children(self, parent=(None,None), orphan=False, for_action=False):
children = []
to_del = []
for child in self.db_workflow_execs:
children.extend(child.db_children((self.vtType, self.db_id), orphan, for_action))
if orphan:
to_del.append(child)
for child in to_del:
self.db_delete_workflow_exec(child)
to_del = []
for child in self.db_machines:
children.extend(child.db_children((self.vtType, self.db_id), orphan, for_action))
if orphan:
to_del.append(child)
for child in to_del:
self.db_delete_machine(child)
children.append((self, parent[0], parent[1]))
return children
def db_deleted_children(self, remove=False):
children = []
children.extend(self.db_deleted_workflow_execs)
children.extend(self.db_deleted_machines)
if remove:
self.db_deleted_workflow_execs = []
self.db_deleted_machines = []
return children
def has_changes(self):
if self.is_dirty:
return True
for child in self._db_workflow_execs:
if child.has_changes():
return True
for child in self._db_machines:
if child.has_changes():
return True
return False
def __get_db_id(self):
return self._db_id
def __set_db_id(self, id):
self._db_id = id
self.is_dirty = True
db_id = property(__get_db_id, __set_db_id)
def db_add_id(self, id):
self._db_id = id
def db_change_id(self, id):
self._db_id = id
def db_delete_id(self, id):
self._db_id = None
def __get_db_entity_type(self):
return self._db_entity_type
def __set_db_entity_type(self, entity_type):
self._db_entity_type = entity_type
self.is_dirty = True
db_entity_type = property(__get_db_entity_type, __set_db_entity_type)
def db_add_entity_type(self, entity_type):
self._db_entity_type = entity_type
def db_change_entity_type(self, entity_type):
self._db_entity_type = entity_type
def db_delete_entity_type(self, entity_type):
self._db_entity_type = None
def __get_db_version(self):
return self._db_version
def __set_db_version(self, version):
self._db_version = version
self.is_dirty = True
db_version = property(__get_db_version, __set_db_version)
def db_add_version(self, version):
self._db_version = version
def db_change_version(self, version):
self._db_version = version
def db_delete_version(self, version):
self._db_version = None
def __get_db_name(self):
return self._db_name
def __set_db_name(self, name):
self._db_name = name
self.is_dirty = True
db_name = property(__get_db_name, __set_db_name)
def db_add_name(self, name):
self._db_name = name
def db_change_name(self, name):
self._db_name = name
def db_delete_name(self, name):
self._db_name = None
def __get_db_last_modified(self):
return self._db_last_modified
def __set_db_last_modified(self, last_modified):
self._db_last_modified = last_modified
self.is_dirty = True
db_last_modified = property(__get_db_last_modified, __set_db_last_modified)
def db_add_last_modified(self, last_modified):
self._db_last_modified = last_modified
def db_change_last_modified(self, last_modified):
self._db_last_modified = last_modified
def db_delete_last_modified(self, last_modified):
self._db_last_modified = None
def __get_db_workflow_execs(self):
return self._db_workflow_execs
def __set_db_workflow_execs(self, workflow_execs):
self._db_workflow_execs = workflow_execs
self.is_dirty = True
db_workflow_execs = property(__get_db_workflow_execs, __set_db_workflow_execs)
def db_get_workflow_execs(self):
return self._db_workflow_execs
def db_add_workflow_exec(self, workflow_exec):
self.is_dirty = True
self._db_workflow_execs.append(workflow_exec)
self.db_workflow_execs_id_index[workflow_exec.db_id] = workflow_exec
def db_change_workflow_exec(self, workflow_exec):
self.is_dirty = True
found = False
for i in xrange(len(self._db_workflow_execs)):
if self._db_workflow_execs[i].db_id == workflow_exec.db_id:
self._db_workflow_execs[i] = workflow_exec
found = True
break
if not found:
self._db_workflow_execs.append(workflow_exec)
self.db_workflow_execs_id_index[workflow_exec.db_id] = workflow_exec
def db_delete_workflow_exec(self, workflow_exec):
self.is_dirty = True
for i in xrange(len(self._db_workflow_execs)):
if self._db_workflow_execs[i].db_id == workflow_exec.db_id:
if not self._db_workflow_execs[i].is_new:
self.db_deleted_workflow_execs.append(self._db_workflow_execs[i])
del self._db_workflow_execs[i]
break
del self.db_workflow_execs_id_index[workflow_exec.db_id]
def db_get_workflow_exec(self, key):
for i in xrange(len(self._db_workflow_execs)):
if self._db_workflow_execs[i].db_id == key:
return self._db_workflow_execs[i]
return None
def db_get_workflow_exec_by_id(self, key):
return self.db_workflow_execs_id_index[key]
def db_has_workflow_exec_with_id(self, key):
return key in self.db_workflow_execs_id_index
def __get_db_machines(self):
return self._db_machines
def __set_db_machines(self, machines):
self._db_machines = machines
self.is_dirty = True
db_machines = property(__get_db_machines, __set_db_machines)
def db_get_machines(self):
return self._db_machines
def db_add_machine(self, machine):
self.is_dirty = True
self._db_machines.append(machine)
self.db_machines_id_index[machine.db_id] = machine
def db_change_machine(self, machine):
self.is_dirty = True
found = False
for i in xrange(len(self._db_machines)):
if self._db_machines[i].db_id == machine.db_id:
self._db_machines[i] = machine
found = True
break
if not found:
self._db_machines.append(machine)
self.db_machines_id_index[machine.db_id] = machine
def db_delete_machine(self, machine):
self.is_dirty = True
for i in xrange(len(self._db_machines)):
if self._db_machines[i].db_id == machine.db_id:
if not self._db_machines[i].is_new:
self.db_deleted_machines.append(self._db_machines[i])
del self._db_machines[i]
break
del self.db_machines_id_index[machine.db_id]
def db_get_machine(self, key):
for i in xrange(len(self._db_machines)):
if self._db_machines[i].db_id == key:
return self._db_machines[i]
return None
def db_get_machine_by_id(self, key):
return self.db_machines_id_index[key]
def db_has_machine_with_id(self, key):
return key in self.db_machines_id_index
def __get_db_vistrail_id(self):
return self._db_vistrail_id
def __set_db_vistrail_id(self, vistrail_id):
self._db_vistrail_id = vistrail_id
self.is_dirty = True
db_vistrail_id = property(__get_db_vistrail_id, __set_db_vistrail_id)
def db_add_vistrail_id(self, vistrail_id):
self._db_vistrail_id = vistrail_id
def db_change_vistrail_id(self, vistrail_id):
self._db_vistrail_id = vistrail_id
def db_delete_vistrail_id(self, vistrail_id):
self._db_vistrail_id = None
def getPrimaryKey(self):
return self._db_id
class DBOpmProcessIdCause(object):
vtType = 'opm_process_id_cause'
def __init__(self, id=None):
self._db_id = id
self.is_dirty = True
self.is_new = True
def __copy__(self):
return DBOpmProcessIdCause.do_copy(self)
def do_copy(self, new_ids=False, id_scope=None, id_remap=None):
cp = DBOpmProcessIdCause(id=self._db_id)
# set new ids
if new_ids:
new_id = id_scope.getNewId(self.vtType)
if self.vtType in id_scope.remap:
id_remap[(id_scope.remap[self.vtType], self.db_id)] = new_id
else:
id_remap[(self.vtType, self.db_id)] = new_id
cp.db_id = new_id
if hasattr(self, 'db_id') and ('opm_process', self._db_id) in id_remap:
cp._db_id = id_remap[('opm_process', self._db_id)]
# recreate indices and set flags
if not new_ids:
cp.is_dirty = self.is_dirty
cp.is_new = self.is_new
return cp
@staticmethod
def update_version(old_obj, trans_dict, new_obj=None):
if new_obj is None:
new_obj = DBOpmProcessIdCause()
class_dict = {}
if new_obj.__class__.__name__ in trans_dict:
class_dict = trans_dict[new_obj.__class__.__name__]
if 'id' in class_dict:
res = class_dict['id'](old_obj, trans_dict)
new_obj.db_id = res
elif hasattr(old_obj, 'db_id') and old_obj.db_id is not None:
new_obj.db_id = old_obj.db_id
new_obj.is_new = old_obj.is_new
new_obj.is_dirty = old_obj.is_dirty
return new_obj
def db_children(self, parent=(None,None), orphan=False, for_action=False):
return [(self, parent[0], parent[1])]
def db_deleted_children(self, remove=False):
children = []
return children
def has_changes(self):
if self.is_dirty:
return True
return False
def __get_db_id(self):
return self._db_id
def __set_db_id(self, id):
self._db_id = id
self.is_dirty = True
db_id = property(__get_db_id, __set_db_id)
def db_add_id(self, id):
self._db_id = id
def db_change_id(self, id):
self._db_id = id
def db_delete_id(self, id):
self._db_id = None
class DBOpmArtifacts(object):
vtType = 'opm_artifacts'
def __init__(self, artifacts=None):
self.db_deleted_artifacts = []
self.db_artifacts_id_index = {}
if artifacts is None:
self._db_artifacts = []
else:
self._db_artifacts = artifacts
for v in self._db_artifacts:
self.db_artifacts_id_index[v.db_id] = v
self.is_dirty = True
self.is_new = True
def __copy__(self):
return DBOpmArtifacts.do_copy(self)
def do_copy(self, new_ids=False, id_scope=None, id_remap=None):
cp = DBOpmArtifacts()
if self._db_artifacts is None:
cp._db_artifacts = []
else:
cp._db_artifacts = [v.do_copy(new_ids, id_scope, id_remap) for v in self._db_artifacts]
# set new ids
if new_ids:
new_id = id_scope.getNewId(self.vtType)
if self.vtType in id_scope.remap:
id_remap[(id_scope.remap[self.vtType], self.db_id)] = new_id
else:
id_remap[(self.vtType, self.db_id)] = new_id
cp.db_id = new_id
# recreate indices and set flags
cp.db_artifacts_id_index = dict((v.db_id, v) for v in cp._db_artifacts)
if not new_ids:
cp.is_dirty = self.is_dirty
cp.is_new = self.is_new
return cp
@staticmethod
def update_version(old_obj, trans_dict, new_obj=None):
if new_obj is None:
new_obj = DBOpmArtifacts()
class_dict = {}
if new_obj.__class__.__name__ in trans_dict:
class_dict = trans_dict[new_obj.__class__.__name__]
if 'artifacts' in class_dict:
res = class_dict['artifacts'](old_obj, trans_dict)
for obj in res:
new_obj.db_add_artifact(obj)
elif hasattr(old_obj, 'db_artifacts') and old_obj.db_artifacts is not None:
for obj in old_obj.db_artifacts:
new_obj.db_add_artifact(DBOpmArtifact.update_version(obj, trans_dict))
if hasattr(old_obj, 'db_deleted_artifacts') and hasattr(new_obj, 'db_deleted_artifacts'):
for obj in old_obj.db_deleted_artifacts:
n_obj = DBOpmArtifact.update_version(obj, trans_dict)
new_obj.db_deleted_artifacts.append(n_obj)
new_obj.is_new = old_obj.is_new
new_obj.is_dirty = old_obj.is_dirty
return new_obj
def db_children(self, parent=(None,None), orphan=False, for_action=False):
children = []
to_del = []
for child in self.db_artifacts:
children.extend(child.db_children((self.vtType, self.db_id), orphan, for_action))
if orphan:
to_del.append(child)
for child in to_del:
self.db_delete_artifact(child)
children.append((self, parent[0], parent[1]))
return children
def db_deleted_children(self, remove=False):
children = []
children.extend(self.db_deleted_artifacts)
if remove:
self.db_deleted_artifacts = []
return children
def has_changes(self):
if self.is_dirty:
return True
for child in self._db_artifacts:
if child.has_changes():
return True
return False
def __get_db_artifacts(self):
return self._db_artifacts
def __set_db_artifacts(self, artifacts):
self._db_artifacts = artifacts
self.is_dirty = True
db_artifacts = property(__get_db_artifacts, __set_db_artifacts)
def db_get_artifacts(self):
return self._db_artifacts
def db_add_artifact(self, artifact):
self.is_dirty = True
self._db_artifacts.append(artifact)
self.db_artifacts_id_index[artifact.db_id] = artifact
def db_change_artifact(self, artifact):
self.is_dirty = True
found = False
for i in xrange(len(self._db_artifacts)):
if self._db_artifacts[i].db_id == artifact.db_id:
self._db_artifacts[i] = artifact
found = True
break
if not found:
self._db_artifacts.append(artifact)
self.db_artifacts_id_index[artifact.db_id] = artifact
def db_delete_artifact(self, artifact):
self.is_dirty = True
for i in xrange(len(self._db_artifacts)):
if self._db_artifacts[i].db_id == artifact.db_id:
if not self._db_artifacts[i].is_new:
self.db_deleted_artifacts.append(self._db_artifacts[i])
del self._db_artifacts[i]
break
del self.db_artifacts_id_index[artifact.db_id]
def db_get_artifact(self, key):
for i in xrange(len(self._db_artifacts)):
if self._db_artifacts[i].db_id == key:
return self._db_artifacts[i]
return None
def db_get_artifact_by_id(self, key):
return self.db_artifacts_id_index[key]
def db_has_artifact_with_id(self, key):
return key in self.db_artifacts_id_index
class DBPEParameter(object):
vtType = 'pe_parameter'
def __init__(self, id=None, pos=None, interpolator=None, value=None, dimension=None):
self._db_id = id
self._db_pos = pos
self._db_interpolator = interpolator
self._db_value = value
self._db_dimension = dimension
self.is_dirty = True
self.is_new = True
def __copy__(self):
return DBPEParameter.do_copy(self)
def do_copy(self, new_ids=False, id_scope=None, id_remap=None):
cp = DBPEParameter(id=self._db_id,
pos=self._db_pos,
interpolator=self._db_interpolator,
value=self._db_value,
dimension=self._db_dimension)
# set new ids
if new_ids:
new_id = id_scope.getNewId(self.vtType)
if self.vtType in id_scope.remap:
id_remap[(id_scope.remap[self.vtType], self.db_id)] = new_id
else:
id_remap[(self.vtType, self.db_id)] = new_id
cp.db_id = new_id
# recreate indices and set flags
if not new_ids:
cp.is_dirty = self.is_dirty
cp.is_new = self.is_new
return cp
@staticmethod
def update_version(old_obj, trans_dict, new_obj=None):
if new_obj is None:
new_obj = DBPEParameter()
class_dict = {}
if new_obj.__class__.__name__ in trans_dict:
class_dict = trans_dict[new_obj.__class__.__name__]
if 'id' in class_dict:
res = class_dict['id'](old_obj, trans_dict)
new_obj.db_id = res
elif hasattr(old_obj, 'db_id') and old_obj.db_id is not None:
new_obj.db_id = old_obj.db_id
if 'pos' in class_dict:
res = class_dict['pos'](old_obj, trans_dict)
new_obj.db_pos = res
elif hasattr(old_obj, 'db_pos') and old_obj.db_pos is not None:
new_obj.db_pos = old_obj.db_pos
if 'interpolator' in class_dict:
res = class_dict['interpolator'](old_obj, trans_dict)
new_obj.db_interpolator = res
elif hasattr(old_obj, 'db_interpolator') and old_obj.db_interpolator is not None:
new_obj.db_interpolator = old_obj.db_interpolator
if 'value' in class_dict:
res = class_dict['value'](old_obj, trans_dict)
new_obj.db_value = res
elif hasattr(old_obj, 'db_value') and old_obj.db_value is not None:
new_obj.db_value = old_obj.db_value
if 'dimension' in class_dict:
res = class_dict['dimension'](old_obj, trans_dict)
new_obj.db_dimension = res
elif hasattr(old_obj, 'db_dimension') and old_obj.db_dimension is not None:
new_obj.db_dimension = old_obj.db_dimension
new_obj.is_new = old_obj.is_new
new_obj.is_dirty = old_obj.is_dirty
return new_obj
def db_children(self, parent=(None,None), orphan=False, for_action=False):
return [(self, parent[0], parent[1])]
def db_deleted_children(self, remove=False):
children = []
return children
def has_changes(self):
if self.is_dirty:
return True
return False
def __get_db_id(self):
return self._db_id
def __set_db_id(self, id):
self._db_id = id
self.is_dirty = True
db_id = property(__get_db_id, __set_db_id)
def db_add_id(self, id):
self._db_id = id
def db_change_id(self, id):
self._db_id = id
def db_delete_id(self, id):
self._db_id = None
def __get_db_pos(self):
return self._db_pos
def __set_db_pos(self, pos):
self._db_pos = pos
self.is_dirty = True
db_pos = property(__get_db_pos, __set_db_pos)
def db_add_pos(self, pos):
self._db_pos = pos
def db_change_pos(self, pos):
self._db_pos = pos
def db_delete_pos(self, pos):
self._db_pos = None
def __get_db_interpolator(self):
return self._db_interpolator
def __set_db_interpolator(self, interpolator):
self._db_interpolator = interpolator
self.is_dirty = True
db_interpolator = property(__get_db_interpolator, __set_db_interpolator)
def db_add_interpolator(self, interpolator):
self._db_interpolator = interpolator
def db_change_interpolator(self, interpolator):
self._db_interpolator = interpolator
def db_delete_interpolator(self, interpolator):
self._db_interpolator = None
def __get_db_value(self):
return self._db_value
def __set_db_value(self, value):
self._db_value = value
self.is_dirty = True
db_value = property(__get_db_value, __set_db_value)
def db_add_value(self, value):
self._db_value = value
def db_change_value(self, value):
self._db_value = value
def db_delete_value(self, value):
self._db_value = None
def __get_db_dimension(self):
return self._db_dimension
def __set_db_dimension(self, dimension):
self._db_dimension = dimension
self.is_dirty = True
db_dimension = property(__get_db_dimension, __set_db_dimension)
def db_add_dimension(self, dimension):
self._db_dimension = dimension
def db_change_dimension(self, dimension):
self._db_dimension = dimension
def db_delete_dimension(self, dimension):
self._db_dimension = None
def getPrimaryKey(self):
return self._db_id
class DBWorkflowExec(object):
vtType = 'workflow_exec'
def __init__(self, item_execs=None, id=None, user=None, ip=None, session=None, vt_version=None, ts_start=None, ts_end=None, parent_id=None, parent_type=None, parent_version=None, completed=None, name=None, annotations=None):
self.db_deleted_item_execs = []
self.db_item_execs_id_index = {}
if item_execs is None:
self._db_item_execs = []
else:
self._db_item_execs = item_execs
for v in self._db_item_execs:
self.db_item_execs_id_index[v.db_id] = v
self._db_id = id
self._db_user = user
self._db_ip = ip
self._db_session = session
self._db_vt_version = vt_version
self._db_ts_start = ts_start
self._db_ts_end = ts_end
self._db_parent_id = parent_id
self._db_parent_type = parent_type
self._db_parent_version = parent_version
self._db_completed = completed
self._db_name = name
self.db_deleted_annotations = []
self.db_annotations_id_index = {}
if annotations is None:
self._db_annotations = []
else:
self._db_annotations = annotations
for v in self._db_annotations:
self.db_annotations_id_index[v.db_id] = v
self.is_dirty = True
self.is_new = True
def __copy__(self):
return DBWorkflowExec.do_copy(self)
def do_copy(self, new_ids=False, id_scope=None, id_remap=None):
cp = DBWorkflowExec(id=self._db_id,
user=self._db_user,
ip=self._db_ip,
session=self._db_session,
vt_version=self._db_vt_version,
ts_start=self._db_ts_start,
ts_end=self._db_ts_end,
parent_id=self._db_parent_id,
parent_type=self._db_parent_type,
parent_version=self._db_parent_version,
completed=self._db_completed,
name=self._db_name)
if self._db_item_execs is None:
cp._db_item_execs = []
else:
cp._db_item_execs = [v.do_copy(new_ids, id_scope, id_remap) for v in self._db_item_execs]
if self._db_annotations is None:
cp._db_annotations = []
else:
cp._db_annotations = [v.do_copy(new_ids, id_scope, id_remap) for v in self._db_annotations]
# set new ids
if new_ids:
new_id = id_scope.getNewId(self.vtType)
if self.vtType in id_scope.remap:
id_remap[(id_scope.remap[self.vtType], self.db_id)] = new_id
else:
id_remap[(self.vtType, self.db_id)] = new_id
cp.db_id = new_id
# recreate indices and set flags
cp.db_item_execs_id_index = dict((v.db_id, v) for v in cp._db_item_execs)
cp.db_annotations_id_index = dict((v.db_id, v) for v in cp._db_annotations)
if not new_ids:
cp.is_dirty = self.is_dirty
cp.is_new = self.is_new
return cp
@staticmethod
def update_version(old_obj, trans_dict, new_obj=None):
if new_obj is None:
new_obj = DBWorkflowExec()
class_dict = {}
if new_obj.__class__.__name__ in trans_dict:
class_dict = trans_dict[new_obj.__class__.__name__]
if 'item_execs' in class_dict:
res = class_dict['item_execs'](old_obj, trans_dict)
for obj in res:
new_obj.db_add_item_exec(obj)
elif hasattr(old_obj, 'db_item_execs') and old_obj.db_item_execs is not None:
for obj in old_obj.db_item_execs:
if obj.vtType == 'module_exec':
new_obj.db_add_item_exec(DBModuleExec.update_version(obj, trans_dict))
elif obj.vtType == 'group_exec':
new_obj.db_add_item_exec(DBGroupExec.update_version(obj, trans_dict))
elif obj.vtType == 'loop_exec':
new_obj.db_add_item_exec(DBLoopExec.update_version(obj, trans_dict))
if hasattr(old_obj, 'db_deleted_item_execs') and hasattr(new_obj, 'db_deleted_item_execs'):
for obj in old_obj.db_deleted_item_execs:
if obj.vtType == 'module_exec':
n_obj = DBModuleExec.update_version(obj, trans_dict)
new_obj.db_deleted_item_execs.append(n_obj)
elif obj.vtType == 'group_exec':
n_obj = DBGroupExec.update_version(obj, trans_dict)
new_obj.db_deleted_item_execs.append(n_obj)
elif obj.vtType == 'loop_exec':
n_obj = DBLoopExec.update_version(obj, trans_dict)
new_obj.db_deleted_item_execs.append(n_obj)
if 'id' in class_dict:
res = class_dict['id'](old_obj, trans_dict)
new_obj.db_id = res
elif hasattr(old_obj, 'db_id') and old_obj.db_id is not None:
new_obj.db_id = old_obj.db_id
if 'user' in class_dict:
res = class_dict['user'](old_obj, trans_dict)
new_obj.db_user = res
elif hasattr(old_obj, 'db_user') and old_obj.db_user is not None:
new_obj.db_user = old_obj.db_user
if 'ip' in class_dict:
res = class_dict['ip'](old_obj, trans_dict)
new_obj.db_ip = res
elif hasattr(old_obj, 'db_ip') and old_obj.db_ip is not None:
new_obj.db_ip = old_obj.db_ip
if 'session' in class_dict:
res = class_dict['session'](old_obj, trans_dict)
new_obj.db_session = res
elif hasattr(old_obj, 'db_session') and old_obj.db_session is not None:
new_obj.db_session = old_obj.db_session
if 'vt_version' in class_dict:
res = class_dict['vt_version'](old_obj, trans_dict)
new_obj.db_vt_version = res
elif hasattr(old_obj, 'db_vt_version') and old_obj.db_vt_version is not None:
new_obj.db_vt_version = old_obj.db_vt_version
if 'ts_start' in class_dict:
res = class_dict['ts_start'](old_obj, trans_dict)
new_obj.db_ts_start = res
elif hasattr(old_obj, 'db_ts_start') and old_obj.db_ts_start is not None:
new_obj.db_ts_start = old_obj.db_ts_start
if 'ts_end' in class_dict:
res = class_dict['ts_end'](old_obj, trans_dict)
new_obj.db_ts_end = res
elif hasattr(old_obj, 'db_ts_end') and old_obj.db_ts_end is not None:
new_obj.db_ts_end = old_obj.db_ts_end
if 'parent_id' in class_dict:
res = class_dict['parent_id'](old_obj, trans_dict)
new_obj.db_parent_id = res
elif hasattr(old_obj, 'db_parent_id') and old_obj.db_parent_id is not None:
new_obj.db_parent_id = old_obj.db_parent_id
if 'parent_type' in class_dict:
res = class_dict['parent_type'](old_obj, trans_dict)
new_obj.db_parent_type = res
elif hasattr(old_obj, 'db_parent_type') and old_obj.db_parent_type is not None:
new_obj.db_parent_type = old_obj.db_parent_type
if 'parent_version' in class_dict:
res = class_dict['parent_version'](old_obj, trans_dict)
new_obj.db_parent_version = res
elif hasattr(old_obj, 'db_parent_version') and old_obj.db_parent_version is not None:
new_obj.db_parent_version = old_obj.db_parent_version
if 'completed' in class_dict:
res = class_dict['completed'](old_obj, trans_dict)
new_obj.db_completed = res
elif hasattr(old_obj, 'db_completed') and old_obj.db_completed is not None:
new_obj.db_completed = old_obj.db_completed
if 'name' in class_dict:
res = class_dict['name'](old_obj, trans_dict)
new_obj.db_name = res
elif hasattr(old_obj, 'db_name') and old_obj.db_name is not None:
new_obj.db_name = old_obj.db_name
if 'annotations' in class_dict:
res = class_dict['annotations'](old_obj, trans_dict)
for obj in res:
new_obj.db_add_annotation(obj)
elif hasattr(old_obj, 'db_annotations') and old_obj.db_annotations is not None:
for obj in old_obj.db_annotations:
new_obj.db_add_annotation(DBAnnotation.update_version(obj, trans_dict))
if hasattr(old_obj, 'db_deleted_annotations') and hasattr(new_obj, 'db_deleted_annotations'):
for obj in old_obj.db_deleted_annotations:
n_obj = DBAnnotation.update_version(obj, trans_dict)
new_obj.db_deleted_annotations.append(n_obj)
new_obj.is_new = old_obj.is_new
new_obj.is_dirty = old_obj.is_dirty
return new_obj
def db_children(self, parent=(None,None), orphan=False, for_action=False):
children = []
to_del = []
for child in self.db_annotations:
children.extend(child.db_children((self.vtType, self.db_id), orphan, for_action))
if orphan:
to_del.append(child)
for child in to_del:
self.db_delete_annotation(child)
to_del = []
for child in self.db_item_execs:
children.extend(child.db_children((self.vtType, self.db_id), orphan, for_action))
if orphan:
to_del.append(child)
for child in to_del:
self.db_delete_item_exec(child)
children.append((self, parent[0], parent[1]))
return children
def db_deleted_children(self, remove=False):
children = []
children.extend(self.db_deleted_annotations)
children.extend(self.db_deleted_item_execs)
if remove:
self.db_deleted_annotations = []
self.db_deleted_item_execs = []
return children
def has_changes(self):
if self.is_dirty:
return True
for child in self._db_annotations:
if child.has_changes():
return True
for child in self._db_item_execs:
if child.has_changes():
return True
return False
def __get_db_item_execs(self):
return self._db_item_execs
def __set_db_item_execs(self, item_execs):
self._db_item_execs = item_execs
self.is_dirty = True
db_item_execs = property(__get_db_item_execs, __set_db_item_execs)
def db_get_item_execs(self):
return self._db_item_execs
def db_add_item_exec(self, item_exec):
self.is_dirty = True
self._db_item_execs.append(item_exec)
self.db_item_execs_id_index[item_exec.db_id] = item_exec
def db_change_item_exec(self, item_exec):
self.is_dirty = True
found = False
for i in xrange(len(self._db_item_execs)):
if self._db_item_execs[i].db_id == item_exec.db_id:
self._db_item_execs[i] = item_exec
found = True
break
if not found:
self._db_item_execs.append(item_exec)
self.db_item_execs_id_index[item_exec.db_id] = item_exec
def db_delete_item_exec(self, item_exec):
self.is_dirty = True
for i in xrange(len(self._db_item_execs)):
if self._db_item_execs[i].db_id == item_exec.db_id:
if not self._db_item_execs[i].is_new:
self.db_deleted_item_execs.append(self._db_item_execs[i])
del self._db_item_execs[i]
break
del self.db_item_execs_id_index[item_exec.db_id]
def db_get_item_exec(self, key):
for i in xrange(len(self._db_item_execs)):
if self._db_item_execs[i].db_id == key:
return self._db_item_execs[i]
return None
def db_get_item_exec_by_id(self, key):
return self.db_item_execs_id_index[key]
def db_has_item_exec_with_id(self, key):
return key in self.db_item_execs_id_index
def __get_db_id(self):
return self._db_id
def __set_db_id(self, id):
self._db_id = id
self.is_dirty = True
db_id = property(__get_db_id, __set_db_id)
def db_add_id(self, id):
self._db_id = id
def db_change_id(self, id):
self._db_id = id
def db_delete_id(self, id):
self._db_id = None
def __get_db_user(self):
return self._db_user
def __set_db_user(self, user):
self._db_user = user
self.is_dirty = True
db_user = property(__get_db_user, __set_db_user)
def db_add_user(self, user):
self._db_user = user
def db_change_user(self, user):
self._db_user = user
def db_delete_user(self, user):
self._db_user = None
def __get_db_ip(self):
return self._db_ip
def __set_db_ip(self, ip):
self._db_ip = ip
self.is_dirty = True
db_ip = property(__get_db_ip, __set_db_ip)
def db_add_ip(self, ip):
self._db_ip = ip
def db_change_ip(self, ip):
self._db_ip = ip
def db_delete_ip(self, ip):
self._db_ip = None
def __get_db_session(self):
return self._db_session
def __set_db_session(self, session):
self._db_session = session
self.is_dirty = True
db_session = property(__get_db_session, __set_db_session)
def db_add_session(self, session):
self._db_session = session
def db_change_session(self, session):
self._db_session = session
def db_delete_session(self, session):
self._db_session = None
def __get_db_vt_version(self):
return self._db_vt_version
def __set_db_vt_version(self, vt_version):
self._db_vt_version = vt_version
self.is_dirty = True
db_vt_version = property(__get_db_vt_version, __set_db_vt_version)
def db_add_vt_version(self, vt_version):
self._db_vt_version = vt_version
def db_change_vt_version(self, vt_version):
self._db_vt_version = vt_version
def db_delete_vt_version(self, vt_version):
self._db_vt_version = None
def __get_db_ts_start(self):
return self._db_ts_start
def __set_db_ts_start(self, ts_start):
self._db_ts_start = ts_start
self.is_dirty = True
db_ts_start = property(__get_db_ts_start, __set_db_ts_start)
def db_add_ts_start(self, ts_start):
self._db_ts_start = ts_start
def db_change_ts_start(self, ts_start):
self._db_ts_start = ts_start
def db_delete_ts_start(self, ts_start):
self._db_ts_start = None
def __get_db_ts_end(self):
return self._db_ts_end
def __set_db_ts_end(self, ts_end):
self._db_ts_end = ts_end
self.is_dirty = True
db_ts_end = property(__get_db_ts_end, __set_db_ts_end)
def db_add_ts_end(self, ts_end):
self._db_ts_end = ts_end
def db_change_ts_end(self, ts_end):
self._db_ts_end = ts_end
def db_delete_ts_end(self, ts_end):
self._db_ts_end = None
def __get_db_parent_id(self):
return self._db_parent_id
def __set_db_parent_id(self, parent_id):
self._db_parent_id = parent_id
self.is_dirty = True
db_parent_id = property(__get_db_parent_id, __set_db_parent_id)
def db_add_parent_id(self, parent_id):
self._db_parent_id = parent_id
def db_change_parent_id(self, parent_id):
self._db_parent_id = parent_id
def db_delete_parent_id(self, parent_id):
self._db_parent_id = None
def __get_db_parent_type(self):
return self._db_parent_type
def __set_db_parent_type(self, parent_type):
self._db_parent_type = parent_type
self.is_dirty = True
db_parent_type = property(__get_db_parent_type, __set_db_parent_type)
def db_add_parent_type(self, parent_type):
self._db_parent_type = parent_type
def db_change_parent_type(self, parent_type):
self._db_parent_type = parent_type
def db_delete_parent_type(self, parent_type):
self._db_parent_type = None
def __get_db_parent_version(self):
return self._db_parent_version
def __set_db_parent_version(self, parent_version):
self._db_parent_version = parent_version
self.is_dirty = True
db_parent_version = property(__get_db_parent_version, __set_db_parent_version)
def db_add_parent_version(self, parent_version):
self._db_parent_version = parent_version
def db_change_parent_version(self, parent_version):
self._db_parent_version = parent_version
def db_delete_parent_version(self, parent_version):
self._db_parent_version = None
def __get_db_completed(self):
return self._db_completed
def __set_db_completed(self, completed):
self._db_completed = completed
self.is_dirty = True
db_completed = property(__get_db_completed, __set_db_completed)
def db_add_completed(self, completed):
self._db_completed = completed
def db_change_completed(self, completed):
self._db_completed = completed
def db_delete_completed(self, completed):
self._db_completed = None
def __get_db_name(self):
return self._db_name
def __set_db_name(self, name):
self._db_name = name
self.is_dirty = True
db_name = property(__get_db_name, __set_db_name)
def db_add_name(self, name):
self._db_name = name
def db_change_name(self, name):
self._db_name = name
def db_delete_name(self, name):
self._db_name = None
def __get_db_annotations(self):
return self._db_annotations
def __set_db_annotations(self, annotations):
self._db_annotations = annotations
self.is_dirty = True
db_annotations = property(__get_db_annotations, __set_db_annotations)
def db_get_annotations(self):
return self._db_annotations
def db_add_annotation(self, annotation):
self.is_dirty = True
self._db_annotations.append(annotation)
self.db_annotations_id_index[annotation.db_id] = annotation
def db_change_annotation(self, annotation):
self.is_dirty = True
found = False
for i in xrange(len(self._db_annotations)):
if self._db_annotations[i].db_id == annotation.db_id:
self._db_annotations[i] = annotation
found = True
break
if not found:
self._db_annotations.append(annotation)
self.db_annotations_id_index[annotation.db_id] = annotation
def db_delete_annotation(self, annotation):
self.is_dirty = True
for i in xrange(len(self._db_annotations)):
if self._db_annotations[i].db_id == annotation.db_id:
if not self._db_annotations[i].is_new:
self.db_deleted_annotations.append(self._db_annotations[i])
del self._db_annotations[i]
break
del self.db_annotations_id_index[annotation.db_id]
def db_get_annotation(self, key):
for i in xrange(len(self._db_annotations)):
if self._db_annotations[i].db_id == key:
return self._db_annotations[i]
return None
def db_get_annotation_by_id(self, key):
return self.db_annotations_id_index[key]
def db_has_annotation_with_id(self, key):
return key in self.db_annotations_id_index
def getPrimaryKey(self):
return self._db_id
class DBLocation(object):
vtType = 'location'
def __init__(self, id=None, x=None, y=None):
self._db_id = id
self._db_x = x
self._db_y = y
self.is_dirty = True
self.is_new = True
def __copy__(self):
return DBLocation.do_copy(self)
def do_copy(self, new_ids=False, id_scope=None, id_remap=None):
cp = DBLocation(id=self._db_id,
x=self._db_x,
y=self._db_y)
# set new ids
if new_ids:
new_id = id_scope.getNewId(self.vtType)
if self.vtType in id_scope.remap:
id_remap[(id_scope.remap[self.vtType], self.db_id)] = new_id
else:
id_remap[(self.vtType, self.db_id)] = new_id
cp.db_id = new_id
# recreate indices and set flags
if not new_ids:
cp.is_dirty = self.is_dirty
cp.is_new = self.is_new
return cp
@staticmethod
def update_version(old_obj, trans_dict, new_obj=None):
if new_obj is None:
new_obj = DBLocation()
class_dict = {}
if new_obj.__class__.__name__ in trans_dict:
class_dict = trans_dict[new_obj.__class__.__name__]
if 'id' in class_dict:
res = class_dict['id'](old_obj, trans_dict)
new_obj.db_id = res
elif hasattr(old_obj, 'db_id') and old_obj.db_id is not None:
new_obj.db_id = old_obj.db_id
if 'x' in class_dict:
res = class_dict['x'](old_obj, trans_dict)
new_obj.db_x = res
elif hasattr(old_obj, 'db_x') and old_obj.db_x is not None:
new_obj.db_x = old_obj.db_x
if 'y' in class_dict:
res = class_dict['y'](old_obj, trans_dict)
new_obj.db_y = res
elif hasattr(old_obj, 'db_y') and old_obj.db_y is not None:
new_obj.db_y = old_obj.db_y
new_obj.is_new = old_obj.is_new
new_obj.is_dirty = old_obj.is_dirty
return new_obj
def db_children(self, parent=(None,None), orphan=False, for_action=False):
return [(self, parent[0], parent[1])]
def db_deleted_children(self, remove=False):
children = []
return children
def has_changes(self):
if self.is_dirty:
return True
return False
def __get_db_id(self):
return self._db_id
def __set_db_id(self, id):
self._db_id = id
self.is_dirty = True
db_id = property(__get_db_id, __set_db_id)
def db_add_id(self, id):
self._db_id = id
def db_change_id(self, id):
self._db_id = id
def db_delete_id(self, id):
self._db_id = None
def __get_db_x(self):
return self._db_x
def __set_db_x(self, x):
self._db_x = x
self.is_dirty = True
db_x = property(__get_db_x, __set_db_x)
def db_add_x(self, x):
self._db_x = x
def db_change_x(self, x):
self._db_x = x
def db_delete_x(self, x):
self._db_x = None
def __get_db_y(self):
return self._db_y
def __set_db_y(self, y):
self._db_y = y
self.is_dirty = True
db_y = property(__get_db_y, __set_db_y)
def db_add_y(self, y):
self._db_y = y
def db_change_y(self, y):
self._db_y = y
def db_delete_y(self, y):
self._db_y = None
def getPrimaryKey(self):
return self._db_id
class DBFunction(object):
vtType = 'function'
def __init__(self, id=None, pos=None, name=None, parameters=None):
self._db_id = id
self._db_pos = pos
self._db_name = name
self.db_deleted_parameters = []
self.db_parameters_id_index = {}
if parameters is None:
self._db_parameters = []
else:
self._db_parameters = parameters
for v in self._db_parameters:
self.db_parameters_id_index[v.db_id] = v
self.is_dirty = True
self.is_new = True
def __copy__(self):
return DBFunction.do_copy(self)
def do_copy(self, new_ids=False, id_scope=None, id_remap=None):
cp = DBFunction(id=self._db_id,
pos=self._db_pos,
name=self._db_name)
if self._db_parameters is None:
cp._db_parameters = []
else:
cp._db_parameters = [v.do_copy(new_ids, id_scope, id_remap) for v in self._db_parameters]
# set new ids
if new_ids:
new_id = id_scope.getNewId(self.vtType)
if self.vtType in id_scope.remap:
id_remap[(id_scope.remap[self.vtType], self.db_id)] = new_id
else:
id_remap[(self.vtType, self.db_id)] = new_id
cp.db_id = new_id
# recreate indices and set flags
cp.db_parameters_id_index = dict((v.db_id, v) for v in cp._db_parameters)
if not new_ids:
cp.is_dirty = self.is_dirty
cp.is_new = self.is_new
return cp
@staticmethod
def update_version(old_obj, trans_dict, new_obj=None):
if new_obj is None:
new_obj = DBFunction()
class_dict = {}
if new_obj.__class__.__name__ in trans_dict:
class_dict = trans_dict[new_obj.__class__.__name__]
if 'id' in class_dict:
res = class_dict['id'](old_obj, trans_dict)
new_obj.db_id = res
elif hasattr(old_obj, 'db_id') and old_obj.db_id is not None:
new_obj.db_id = old_obj.db_id
if 'pos' in class_dict:
res = class_dict['pos'](old_obj, trans_dict)
new_obj.db_pos = res
elif hasattr(old_obj, 'db_pos') and old_obj.db_pos is not None:
new_obj.db_pos = old_obj.db_pos
if 'name' in class_dict:
res = class_dict['name'](old_obj, trans_dict)
new_obj.db_name = res
elif hasattr(old_obj, 'db_name') and old_obj.db_name is not None:
new_obj.db_name = old_obj.db_name
if 'parameters' in class_dict:
res = class_dict['parameters'](old_obj, trans_dict)
for obj in res:
new_obj.db_add_parameter(obj)
elif hasattr(old_obj, 'db_parameters') and old_obj.db_parameters is not None:
for obj in old_obj.db_parameters:
new_obj.db_add_parameter(DBParameter.update_version(obj, trans_dict))
if hasattr(old_obj, 'db_deleted_parameters') and hasattr(new_obj, 'db_deleted_parameters'):
for obj in old_obj.db_deleted_parameters:
n_obj = DBParameter.update_version(obj, trans_dict)
new_obj.db_deleted_parameters.append(n_obj)
new_obj.is_new = old_obj.is_new
new_obj.is_dirty = old_obj.is_dirty
return new_obj
def db_children(self, parent=(None,None), orphan=False, for_action=False):
children = []
to_del = []
for child in self.db_parameters:
children.extend(child.db_children((self.vtType, self.db_id), orphan, for_action))
if orphan:
to_del.append(child)
for child in to_del:
self.db_delete_parameter(child)
children.append((self, parent[0], parent[1]))
return children
def db_deleted_children(self, remove=False):
children = []
children.extend(self.db_deleted_parameters)
if remove:
self.db_deleted_parameters = []
return children
def has_changes(self):
if self.is_dirty:
return True
for child in self._db_parameters:
if child.has_changes():
return True
return False
def __get_db_id(self):
return self._db_id
def __set_db_id(self, id):
self._db_id = id
self.is_dirty = True
db_id = property(__get_db_id, __set_db_id)
def db_add_id(self, id):
self._db_id = id
def db_change_id(self, id):
self._db_id = id
def db_delete_id(self, id):
self._db_id = None
def __get_db_pos(self):
return self._db_pos
def __set_db_pos(self, pos):
self._db_pos = pos
self.is_dirty = True
db_pos = property(__get_db_pos, __set_db_pos)
def db_add_pos(self, pos):
self._db_pos = pos
def db_change_pos(self, pos):
self._db_pos = pos
def db_delete_pos(self, pos):
self._db_pos = None
def __get_db_name(self):
return self._db_name
def __set_db_name(self, name):
self._db_name = name
self.is_dirty = True
db_name = property(__get_db_name, __set_db_name)
def db_add_name(self, name):
self._db_name = name
def db_change_name(self, name):
self._db_name = name
def db_delete_name(self, name):
self._db_name = None
def __get_db_parameters(self):
return self._db_parameters
def __set_db_parameters(self, parameters):
self._db_parameters = parameters
self.is_dirty = True
db_parameters = property(__get_db_parameters, __set_db_parameters)
def db_get_parameters(self):
return self._db_parameters
def db_add_parameter(self, parameter):
self.is_dirty = True
self._db_parameters.append(parameter)
self.db_parameters_id_index[parameter.db_id] = parameter
def db_change_parameter(self, parameter):
self.is_dirty = True
found = False
for i in xrange(len(self._db_parameters)):
if self._db_parameters[i].db_id == parameter.db_id:
self._db_parameters[i] = parameter
found = True
break
if not found:
self._db_parameters.append(parameter)
self.db_parameters_id_index[parameter.db_id] = parameter
def db_delete_parameter(self, parameter):
self.is_dirty = True
for i in xrange(len(self._db_parameters)):
if self._db_parameters[i].db_id == parameter.db_id:
if not self._db_parameters[i].is_new:
self.db_deleted_parameters.append(self._db_parameters[i])
del self._db_parameters[i]
break
del self.db_parameters_id_index[parameter.db_id]
def db_get_parameter(self, key):
for i in xrange(len(self._db_parameters)):
if self._db_parameters[i].db_id == key:
return self._db_parameters[i]
return None
def db_get_parameter_by_id(self, key):
return self.db_parameters_id_index[key]
def db_has_parameter_with_id(self, key):
return key in self.db_parameters_id_index
def getPrimaryKey(self):
return self._db_id
class DBActionAnnotation(object):
vtType = 'actionAnnotation'
def __init__(self, id=None, key=None, value=None, action_id=None, date=None, user=None):
self._db_id = id
self._db_key = key
self._db_value = value
self._db_action_id = action_id
self._db_date = date
self._db_user = user
self.is_dirty = True
self.is_new = True
def __copy__(self):
return DBActionAnnotation.do_copy(self)
def do_copy(self, new_ids=False, id_scope=None, id_remap=None):
cp = DBActionAnnotation(id=self._db_id,
key=self._db_key,
value=self._db_value,
action_id=self._db_action_id,
date=self._db_date,
user=self._db_user)
# set new ids
if new_ids:
new_id = id_scope.getNewId(self.vtType)
if self.vtType in id_scope.remap:
id_remap[(id_scope.remap[self.vtType], self.db_id)] = new_id
else:
id_remap[(self.vtType, self.db_id)] = new_id
cp.db_id = new_id
if hasattr(self, 'db_action_id') and ('action', self._db_action_id) in id_remap:
cp._db_action_id = id_remap[('action', self._db_action_id)]
# recreate indices and set flags
if not new_ids:
cp.is_dirty = self.is_dirty
cp.is_new = self.is_new
return cp
@staticmethod
def update_version(old_obj, trans_dict, new_obj=None):
if new_obj is None:
new_obj = DBActionAnnotation()
class_dict = {}
if new_obj.__class__.__name__ in trans_dict:
class_dict = trans_dict[new_obj.__class__.__name__]
if 'id' in class_dict:
res = class_dict['id'](old_obj, trans_dict)
new_obj.db_id = res
elif hasattr(old_obj, 'db_id') and old_obj.db_id is not None:
new_obj.db_id = old_obj.db_id
if 'key' in class_dict:
res = class_dict['key'](old_obj, trans_dict)
new_obj.db_key = res
elif hasattr(old_obj, 'db_key') and old_obj.db_key is not None:
new_obj.db_key = old_obj.db_key
if 'value' in class_dict:
res = class_dict['value'](old_obj, trans_dict)
new_obj.db_value = res
elif hasattr(old_obj, 'db_value') and old_obj.db_value is not None:
new_obj.db_value = old_obj.db_value
if 'action_id' in class_dict:
res = class_dict['action_id'](old_obj, trans_dict)
new_obj.db_action_id = res
elif hasattr(old_obj, 'db_action_id') and old_obj.db_action_id is not None:
new_obj.db_action_id = old_obj.db_action_id
if 'date' in class_dict:
res = class_dict['date'](old_obj, trans_dict)
new_obj.db_date = res
elif hasattr(old_obj, 'db_date') and old_obj.db_date is not None:
new_obj.db_date = old_obj.db_date
if 'user' in class_dict:
res = class_dict['user'](old_obj, trans_dict)
new_obj.db_user = res
elif hasattr(old_obj, 'db_user') and old_obj.db_user is not None:
new_obj.db_user = old_obj.db_user
new_obj.is_new = old_obj.is_new
new_obj.is_dirty = old_obj.is_dirty
return new_obj
def db_children(self, parent=(None,None), orphan=False, for_action=False):
return [(self, parent[0], parent[1])]
def db_deleted_children(self, remove=False):
children = []
return children
def has_changes(self):
if self.is_dirty:
return True
return False
def __get_db_id(self):
return self._db_id
def __set_db_id(self, id):
self._db_id = id
self.is_dirty = True
db_id = property(__get_db_id, __set_db_id)
def db_add_id(self, id):
self._db_id = id
def db_change_id(self, id):
self._db_id = id
def db_delete_id(self, id):
self._db_id = None
def __get_db_key(self):
return self._db_key
def __set_db_key(self, key):
self._db_key = key
self.is_dirty = True
db_key = property(__get_db_key, __set_db_key)
def db_add_key(self, key):
self._db_key = key
def db_change_key(self, key):
self._db_key = key
def db_delete_key(self, key):
self._db_key = None
def __get_db_value(self):
return self._db_value
def __set_db_value(self, value):
self._db_value = value
self.is_dirty = True
db_value = property(__get_db_value, __set_db_value)
def db_add_value(self, value):
self._db_value = value
def db_change_value(self, value):
self._db_value = value
def db_delete_value(self, value):
self._db_value = None
def __get_db_action_id(self):
return self._db_action_id
def __set_db_action_id(self, action_id):
self._db_action_id = action_id
self.is_dirty = True
db_action_id = property(__get_db_action_id, __set_db_action_id)
def db_add_action_id(self, action_id):
self._db_action_id = action_id
def db_change_action_id(self, action_id):
self._db_action_id = action_id
def db_delete_action_id(self, action_id):
self._db_action_id = None
def __get_db_date(self):
return self._db_date
def __set_db_date(self, date):
self._db_date = date
self.is_dirty = True
db_date = property(__get_db_date, __set_db_date)
def db_add_date(self, date):
self._db_date = date
def db_change_date(self, date):
self._db_date = date
def db_delete_date(self, date):
self._db_date = None
def __get_db_user(self):
return self._db_user
def __set_db_user(self, user):
self._db_user = user
self.is_dirty = True
db_user = property(__get_db_user, __set_db_user)
def db_add_user(self, user):
self._db_user = user
def db_change_user(self, user):
self._db_user = user
def db_delete_user(self, user):
self._db_user = None
def getPrimaryKey(self):
return self._db_id
class DBProvActivity(object):
vtType = 'prov_activity'
def __init__(self, id=None, startTime=None, endTime=None, vt_id=None, vt_type=None, vt_cached=None, vt_completed=None, vt_machine_id=None, vt_error=None, is_part_of=None):
self._db_id = id
self._db_startTime = startTime
self._db_endTime = endTime
self._db_vt_id = vt_id
self._db_vt_type = vt_type
self._db_vt_cached = vt_cached
self._db_vt_completed = vt_completed
self._db_vt_machine_id = vt_machine_id
self._db_vt_error = vt_error
self.db_deleted_is_part_of = []
self._db_is_part_of = is_part_of
self.is_dirty = True
self.is_new = True
def __copy__(self):
return DBProvActivity.do_copy(self)
def do_copy(self, new_ids=False, id_scope=None, id_remap=None):
cp = DBProvActivity(id=self._db_id,
startTime=self._db_startTime,
endTime=self._db_endTime,
vt_id=self._db_vt_id,
vt_type=self._db_vt_type,
vt_cached=self._db_vt_cached,
vt_completed=self._db_vt_completed,
vt_machine_id=self._db_vt_machine_id,
vt_error=self._db_vt_error)
if self._db_is_part_of is not None:
cp._db_is_part_of = self._db_is_part_of.do_copy(new_ids, id_scope, id_remap)
# set new ids
if new_ids:
new_id = id_scope.getNewId(self.vtType)
if self.vtType in id_scope.remap:
id_remap[(id_scope.remap[self.vtType], self.db_id)] = new_id
else:
id_remap[(self.vtType, self.db_id)] = new_id
cp.db_id = new_id
# recreate indices and set flags
if not new_ids:
cp.is_dirty = self.is_dirty
cp.is_new = self.is_new
return cp
@staticmethod
def update_version(old_obj, trans_dict, new_obj=None):
if new_obj is None:
new_obj = DBProvActivity()
class_dict = {}
if new_obj.__class__.__name__ in trans_dict:
class_dict = trans_dict[new_obj.__class__.__name__]
if 'id' in class_dict:
res = class_dict['id'](old_obj, trans_dict)
new_obj.db_id = res
elif hasattr(old_obj, 'db_id') and old_obj.db_id is not None:
new_obj.db_id = old_obj.db_id
if 'startTime' in class_dict:
res = class_dict['startTime'](old_obj, trans_dict)
new_obj.db_startTime = res
elif hasattr(old_obj, 'db_startTime') and old_obj.db_startTime is not None:
new_obj.db_startTime = old_obj.db_startTime
if 'endTime' in class_dict:
res = class_dict['endTime'](old_obj, trans_dict)
new_obj.db_endTime = res
elif hasattr(old_obj, 'db_endTime') and old_obj.db_endTime is not None:
new_obj.db_endTime = old_obj.db_endTime
if 'vt_id' in class_dict:
res = class_dict['vt_id'](old_obj, trans_dict)
new_obj.db_vt_id = res
elif hasattr(old_obj, 'db_vt_id') and old_obj.db_vt_id is not None:
new_obj.db_vt_id = old_obj.db_vt_id
if 'vt_type' in class_dict:
res = class_dict['vt_type'](old_obj, trans_dict)
new_obj.db_vt_type = res
elif hasattr(old_obj, 'db_vt_type') and old_obj.db_vt_type is not None:
new_obj.db_vt_type = old_obj.db_vt_type
if 'vt_cached' in class_dict:
res = class_dict['vt_cached'](old_obj, trans_dict)
new_obj.db_vt_cached = res
elif hasattr(old_obj, 'db_vt_cached') and old_obj.db_vt_cached is not None:
new_obj.db_vt_cached = old_obj.db_vt_cached
if 'vt_completed' in class_dict:
res = class_dict['vt_completed'](old_obj, trans_dict)
new_obj.db_vt_completed = res
elif hasattr(old_obj, 'db_vt_completed') and old_obj.db_vt_completed is not None:
new_obj.db_vt_completed = old_obj.db_vt_completed
if 'vt_machine_id' in class_dict:
res = class_dict['vt_machine_id'](old_obj, trans_dict)
new_obj.db_vt_machine_id = res
elif hasattr(old_obj, 'db_vt_machine_id') and old_obj.db_vt_machine_id is not None:
new_obj.db_vt_machine_id = old_obj.db_vt_machine_id
if 'vt_error' in class_dict:
res = class_dict['vt_error'](old_obj, trans_dict)
new_obj.db_vt_error = res
elif hasattr(old_obj, 'db_vt_error') and old_obj.db_vt_error is not None:
new_obj.db_vt_error = old_obj.db_vt_error
if 'is_part_of' in class_dict:
res = class_dict['is_part_of'](old_obj, trans_dict)
new_obj.db_is_part_of = res
elif hasattr(old_obj, 'db_is_part_of') and old_obj.db_is_part_of is not None:
obj = old_obj.db_is_part_of
new_obj.db_add_is_part_of(DBIsPartOf.update_version(obj, trans_dict))
if hasattr(old_obj, 'db_deleted_is_part_of') and hasattr(new_obj, 'db_deleted_is_part_of'):
for obj in old_obj.db_deleted_is_part_of:
n_obj = DBIsPartOf.update_version(obj, trans_dict)
new_obj.db_deleted_is_part_of.append(n_obj)
new_obj.is_new = old_obj.is_new
new_obj.is_dirty = old_obj.is_dirty
return new_obj
def db_children(self, parent=(None,None), orphan=False, for_action=False):
children = []
if self._db_is_part_of is not None:
children.extend(self._db_is_part_of.db_children((self.vtType, self.db_id), orphan, for_action))
if orphan:
self._db_is_part_of = None
children.append((self, parent[0], parent[1]))
return children
def db_deleted_children(self, remove=False):
children = []
children.extend(self.db_deleted_is_part_of)
if remove:
self.db_deleted_is_part_of = []
return children
def has_changes(self):
if self.is_dirty:
return True
if self._db_is_part_of is not None and self._db_is_part_of.has_changes():
return True
return False
def __get_db_id(self):
return self._db_id
def __set_db_id(self, id):
self._db_id = id
self.is_dirty = True
db_id = property(__get_db_id, __set_db_id)
def db_add_id(self, id):
self._db_id = id
def db_change_id(self, id):
self._db_id = id
def db_delete_id(self, id):
self._db_id = None
def __get_db_startTime(self):
return self._db_startTime
def __set_db_startTime(self, startTime):
self._db_startTime = startTime
self.is_dirty = True
db_startTime = property(__get_db_startTime, __set_db_startTime)
def db_add_startTime(self, startTime):
self._db_startTime = startTime
def db_change_startTime(self, startTime):
self._db_startTime = startTime
def db_delete_startTime(self, startTime):
self._db_startTime = None
def __get_db_endTime(self):
return self._db_endTime
def __set_db_endTime(self, endTime):
self._db_endTime = endTime
self.is_dirty = True
db_endTime = property(__get_db_endTime, __set_db_endTime)
def db_add_endTime(self, endTime):
self._db_endTime = endTime
def db_change_endTime(self, endTime):
self._db_endTime = endTime
def db_delete_endTime(self, endTime):
self._db_endTime = None
def __get_db_vt_id(self):
return self._db_vt_id
def __set_db_vt_id(self, vt_id):
self._db_vt_id = vt_id
self.is_dirty = True
db_vt_id = property(__get_db_vt_id, __set_db_vt_id)
def db_add_vt_id(self, vt_id):
self._db_vt_id = vt_id
def db_change_vt_id(self, vt_id):
self._db_vt_id = vt_id
def db_delete_vt_id(self, vt_id):
self._db_vt_id = None
def __get_db_vt_type(self):
return self._db_vt_type
def __set_db_vt_type(self, vt_type):
self._db_vt_type = vt_type
self.is_dirty = True
db_vt_type = property(__get_db_vt_type, __set_db_vt_type)
def db_add_vt_type(self, vt_type):
self._db_vt_type = vt_type
def db_change_vt_type(self, vt_type):
self._db_vt_type = vt_type
def db_delete_vt_type(self, vt_type):
self._db_vt_type = None
def __get_db_vt_cached(self):
return self._db_vt_cached
def __set_db_vt_cached(self, vt_cached):
self._db_vt_cached = vt_cached
self.is_dirty = True
db_vt_cached = property(__get_db_vt_cached, __set_db_vt_cached)
def db_add_vt_cached(self, vt_cached):
self._db_vt_cached = vt_cached
def db_change_vt_cached(self, vt_cached):
self._db_vt_cached = vt_cached
def db_delete_vt_cached(self, vt_cached):
self._db_vt_cached = None
def __get_db_vt_completed(self):
return self._db_vt_completed
def __set_db_vt_completed(self, vt_completed):
self._db_vt_completed = vt_completed
self.is_dirty = True
db_vt_completed = property(__get_db_vt_completed, __set_db_vt_completed)
def db_add_vt_completed(self, vt_completed):
self._db_vt_completed = vt_completed
def db_change_vt_completed(self, vt_completed):
self._db_vt_completed = vt_completed
def db_delete_vt_completed(self, vt_completed):
self._db_vt_completed = None
def __get_db_vt_machine_id(self):
return self._db_vt_machine_id
def __set_db_vt_machine_id(self, vt_machine_id):
self._db_vt_machine_id = vt_machine_id
self.is_dirty = True
db_vt_machine_id = property(__get_db_vt_machine_id, __set_db_vt_machine_id)
def db_add_vt_machine_id(self, vt_machine_id):
self._db_vt_machine_id = vt_machine_id
def db_change_vt_machine_id(self, vt_machine_id):
self._db_vt_machine_id = vt_machine_id
def db_delete_vt_machine_id(self, vt_machine_id):
self._db_vt_machine_id = None
def __get_db_vt_error(self):
return self._db_vt_error
def __set_db_vt_error(self, vt_error):
self._db_vt_error = vt_error
self.is_dirty = True
db_vt_error = property(__get_db_vt_error, __set_db_vt_error)
def db_add_vt_error(self, vt_error):
self._db_vt_error = vt_error
def db_change_vt_error(self, vt_error):
self._db_vt_error = vt_error
def db_delete_vt_error(self, vt_error):
self._db_vt_error = None
def __get_db_is_part_of(self):
return self._db_is_part_of
def __set_db_is_part_of(self, is_part_of):
self._db_is_part_of = is_part_of
self.is_dirty = True
db_is_part_of = property(__get_db_is_part_of, __set_db_is_part_of)
def db_add_is_part_of(self, is_part_of):
self._db_is_part_of = is_part_of
def db_change_is_part_of(self, is_part_of):
self._db_is_part_of = is_part_of
def db_delete_is_part_of(self, is_part_of):
if not self.is_new:
self.db_deleted_is_part_of.append(self._db_is_part_of)
self._db_is_part_of = None
def getPrimaryKey(self):
return self._db_id
class DBProvUsage(object):
vtType = 'prov_usage'
def __init__(self, prov_activity=None, prov_entity=None, prov_role=None):
self.db_deleted_prov_activity = []
self._db_prov_activity = prov_activity
self.db_deleted_prov_entity = []
self._db_prov_entity = prov_entity
self._db_prov_role = prov_role
self.is_dirty = True
self.is_new = True
def __copy__(self):
return DBProvUsage.do_copy(self)
def do_copy(self, new_ids=False, id_scope=None, id_remap=None):
cp = DBProvUsage(prov_role=self._db_prov_role)
if self._db_prov_activity is not None:
cp._db_prov_activity = self._db_prov_activity.do_copy(new_ids, id_scope, id_remap)
if self._db_prov_entity is not None:
cp._db_prov_entity = self._db_prov_entity.do_copy(new_ids, id_scope, id_remap)
# set new ids
if new_ids:
new_id = id_scope.getNewId(self.vtType)
if self.vtType in id_scope.remap:
id_remap[(id_scope.remap[self.vtType], self.db_id)] = new_id
else:
id_remap[(self.vtType, self.db_id)] = new_id
cp.db_id = new_id
# recreate indices and set flags
if not new_ids:
cp.is_dirty = self.is_dirty
cp.is_new = self.is_new
return cp
@staticmethod
def update_version(old_obj, trans_dict, new_obj=None):
if new_obj is None:
new_obj = DBProvUsage()
class_dict = {}
if new_obj.__class__.__name__ in trans_dict:
class_dict = trans_dict[new_obj.__class__.__name__]
if 'prov_activity' in class_dict:
res = class_dict['prov_activity'](old_obj, trans_dict)
new_obj.db_prov_activity = res
elif hasattr(old_obj, 'db_prov_activity') and old_obj.db_prov_activity is not None:
obj = old_obj.db_prov_activity
new_obj.db_add_prov_activity(DBRefProvActivity.update_version(obj, trans_dict))
if hasattr(old_obj, 'db_deleted_prov_activity') and hasattr(new_obj, 'db_deleted_prov_activity'):
for obj in old_obj.db_deleted_prov_activity:
n_obj = DBRefProvActivity.update_version(obj, trans_dict)
new_obj.db_deleted_prov_activity.append(n_obj)
if 'prov_entity' in class_dict:
res = class_dict['prov_entity'](old_obj, trans_dict)
new_obj.db_prov_entity = res
elif hasattr(old_obj, 'db_prov_entity') and old_obj.db_prov_entity is not None:
obj = old_obj.db_prov_entity
new_obj.db_add_prov_entity(DBRefProvEntity.update_version(obj, trans_dict))
if hasattr(old_obj, 'db_deleted_prov_entity') and hasattr(new_obj, 'db_deleted_prov_entity'):
for obj in old_obj.db_deleted_prov_entity:
n_obj = DBRefProvEntity.update_version(obj, trans_dict)
new_obj.db_deleted_prov_entity.append(n_obj)
if 'prov_role' in class_dict:
res = class_dict['prov_role'](old_obj, trans_dict)
new_obj.db_prov_role = res
elif hasattr(old_obj, 'db_prov_role') and old_obj.db_prov_role is not None:
new_obj.db_prov_role = old_obj.db_prov_role
new_obj.is_new = old_obj.is_new
new_obj.is_dirty = old_obj.is_dirty
return new_obj
def db_children(self, parent=(None,None), orphan=False, for_action=False):
children = []
if self._db_prov_activity is not None:
children.extend(self._db_prov_activity.db_children((self.vtType, self.db_id), orphan, for_action))
if orphan:
self._db_prov_activity = None
if self._db_prov_entity is not None:
children.extend(self._db_prov_entity.db_children((self.vtType, self.db_id), orphan, for_action))
if orphan:
self._db_prov_entity = None
children.append((self, parent[0], parent[1]))
return children
def db_deleted_children(self, remove=False):
children = []
children.extend(self.db_deleted_prov_activity)
children.extend(self.db_deleted_prov_entity)
if remove:
self.db_deleted_prov_activity = []
self.db_deleted_prov_entity = []
return children
def has_changes(self):
if self.is_dirty:
return True
if self._db_prov_activity is not None and self._db_prov_activity.has_changes():
return True
if self._db_prov_entity is not None and self._db_prov_entity.has_changes():
return True
return False
def __get_db_prov_activity(self):
return self._db_prov_activity
def __set_db_prov_activity(self, prov_activity):
self._db_prov_activity = prov_activity
self.is_dirty = True
db_prov_activity = property(__get_db_prov_activity, __set_db_prov_activity)
def db_add_prov_activity(self, prov_activity):
self._db_prov_activity = prov_activity
def db_change_prov_activity(self, prov_activity):
self._db_prov_activity = prov_activity
def db_delete_prov_activity(self, prov_activity):
if not self.is_new:
self.db_deleted_prov_activity.append(self._db_prov_activity)
self._db_prov_activity = None
def __get_db_prov_entity(self):
return self._db_prov_entity
def __set_db_prov_entity(self, prov_entity):
self._db_prov_entity = prov_entity
self.is_dirty = True
db_prov_entity = property(__get_db_prov_entity, __set_db_prov_entity)
def db_add_prov_entity(self, prov_entity):
self._db_prov_entity = prov_entity
def db_change_prov_entity(self, prov_entity):
self._db_prov_entity = prov_entity
def db_delete_prov_entity(self, prov_entity):
if not self.is_new:
self.db_deleted_prov_entity.append(self._db_prov_entity)
self._db_prov_entity = None
def __get_db_prov_role(self):
return self._db_prov_role
def __set_db_prov_role(self, prov_role):
self._db_prov_role = prov_role
self.is_dirty = True
db_prov_role = property(__get_db_prov_role, __set_db_prov_role)
def db_add_prov_role(self, prov_role):
self._db_prov_role = prov_role
def db_change_prov_role(self, prov_role):
self._db_prov_role = prov_role
def db_delete_prov_role(self, prov_role):
self._db_prov_role = None
class DBOpmArtifactIdEffect(object):
vtType = 'opm_artifact_id_effect'
def __init__(self, id=None):
self._db_id = id
self.is_dirty = True
self.is_new = True
def __copy__(self):
return DBOpmArtifactIdEffect.do_copy(self)
def do_copy(self, new_ids=False, id_scope=None, id_remap=None):
cp = DBOpmArtifactIdEffect(id=self._db_id)
# set new ids
if new_ids:
new_id = id_scope.getNewId(self.vtType)
if self.vtType in id_scope.remap:
id_remap[(id_scope.remap[self.vtType], self.db_id)] = new_id
else:
id_remap[(self.vtType, self.db_id)] = new_id
cp.db_id = new_id
if hasattr(self, 'db_id') and ('opm_artifact', self._db_id) in id_remap:
cp._db_id = id_remap[('opm_artifact', self._db_id)]
# recreate indices and set flags
if not new_ids:
cp.is_dirty = self.is_dirty
cp.is_new = self.is_new
return cp
@staticmethod
def update_version(old_obj, trans_dict, new_obj=None):
if new_obj is None:
new_obj = DBOpmArtifactIdEffect()
class_dict = {}
if new_obj.__class__.__name__ in trans_dict:
class_dict = trans_dict[new_obj.__class__.__name__]
if 'id' in class_dict:
res = class_dict['id'](old_obj, trans_dict)
new_obj.db_id = res
elif hasattr(old_obj, 'db_id') and old_obj.db_id is not None:
new_obj.db_id = old_obj.db_id
new_obj.is_new = old_obj.is_new
new_obj.is_dirty = old_obj.is_dirty
return new_obj
def db_children(self, parent=(None,None), orphan=False, for_action=False):
return [(self, parent[0], parent[1])]
def db_deleted_children(self, remove=False):
children = []
return children
def has_changes(self):
if self.is_dirty:
return True
return False
def __get_db_id(self):
return self._db_id
def __set_db_id(self, id):
self._db_id = id
self.is_dirty = True
db_id = property(__get_db_id, __set_db_id)
def db_add_id(self, id):
self._db_id = id
def db_change_id(self, id):
self._db_id = id
def db_delete_id(self, id):
self._db_id = None
class DBOpmGraph(object):
vtType = 'opm_graph'
def __init__(self, accounts=None, processes=None, artifacts=None, agents=None, dependencies=None):
self.db_deleted_accounts = []
self._db_accounts = accounts
self.db_deleted_processes = []
self._db_processes = processes
self.db_deleted_artifacts = []
self._db_artifacts = artifacts
self.db_deleted_agents = []
self._db_agents = agents
self.db_deleted_dependencies = []
self._db_dependencies = dependencies
self.is_dirty = True
self.is_new = True
def __copy__(self):
return DBOpmGraph.do_copy(self)
def do_copy(self, new_ids=False, id_scope=None, id_remap=None):
cp = DBOpmGraph()
if self._db_accounts is not None:
cp._db_accounts = self._db_accounts.do_copy(new_ids, id_scope, id_remap)
if self._db_processes is not None:
cp._db_processes = self._db_processes.do_copy(new_ids, id_scope, id_remap)
if self._db_artifacts is not None:
cp._db_artifacts = self._db_artifacts.do_copy(new_ids, id_scope, id_remap)
if self._db_agents is not None:
cp._db_agents = self._db_agents.do_copy(new_ids, id_scope, id_remap)
if self._db_dependencies is not None:
cp._db_dependencies = self._db_dependencies.do_copy(new_ids, id_scope, id_remap)
# set new ids
if new_ids:
new_id = id_scope.getNewId(self.vtType)
if self.vtType in id_scope.remap:
id_remap[(id_scope.remap[self.vtType], self.db_id)] = new_id
else:
id_remap[(self.vtType, self.db_id)] = new_id
cp.db_id = new_id
# recreate indices and set flags
if not new_ids:
cp.is_dirty = self.is_dirty
cp.is_new = self.is_new
return cp
@staticmethod
def update_version(old_obj, trans_dict, new_obj=None):
if new_obj is None:
new_obj = DBOpmGraph()
class_dict = {}
if new_obj.__class__.__name__ in trans_dict:
class_dict = trans_dict[new_obj.__class__.__name__]
if 'accounts' in class_dict:
res = class_dict['accounts'](old_obj, trans_dict)
new_obj.db_accounts = res
elif hasattr(old_obj, 'db_accounts') and old_obj.db_accounts is not None:
obj = old_obj.db_accounts
new_obj.db_add_accounts(DBOpmAccounts.update_version(obj, trans_dict))
if hasattr(old_obj, 'db_deleted_accounts') and hasattr(new_obj, 'db_deleted_accounts'):
for obj in old_obj.db_deleted_accounts:
n_obj = DBOpmAccounts.update_version(obj, trans_dict)
new_obj.db_deleted_accounts.append(n_obj)
if 'processes' in class_dict:
res = class_dict['processes'](old_obj, trans_dict)
new_obj.db_processes = res
elif hasattr(old_obj, 'db_processes') and old_obj.db_processes is not None:
obj = old_obj.db_processes
new_obj.db_add_processes(DBOpmProcesses.update_version(obj, trans_dict))
if hasattr(old_obj, 'db_deleted_processes') and hasattr(new_obj, 'db_deleted_processes'):
for obj in old_obj.db_deleted_processes:
n_obj = DBOpmProcesses.update_version(obj, trans_dict)
new_obj.db_deleted_processes.append(n_obj)
if 'artifacts' in class_dict:
res = class_dict['artifacts'](old_obj, trans_dict)
new_obj.db_artifacts = res
elif hasattr(old_obj, 'db_artifacts') and old_obj.db_artifacts is not None:
obj = old_obj.db_artifacts
new_obj.db_add_artifacts(DBOpmArtifacts.update_version(obj, trans_dict))
if hasattr(old_obj, 'db_deleted_artifacts') and hasattr(new_obj, 'db_deleted_artifacts'):
for obj in old_obj.db_deleted_artifacts:
n_obj = DBOpmArtifacts.update_version(obj, trans_dict)
new_obj.db_deleted_artifacts.append(n_obj)
if 'agents' in class_dict:
res = class_dict['agents'](old_obj, trans_dict)
new_obj.db_agents = res
elif hasattr(old_obj, 'db_agents') and old_obj.db_agents is not None:
obj = old_obj.db_agents
new_obj.db_add_agents(DBOpmAgents.update_version(obj, trans_dict))
if hasattr(old_obj, 'db_deleted_agents') and hasattr(new_obj, 'db_deleted_agents'):
for obj in old_obj.db_deleted_agents:
n_obj = DBOpmAgents.update_version(obj, trans_dict)
new_obj.db_deleted_agents.append(n_obj)
if 'dependencies' in class_dict:
res = class_dict['dependencies'](old_obj, trans_dict)
new_obj.db_dependencies = res
elif hasattr(old_obj, 'db_dependencies') and old_obj.db_dependencies is not None:
obj = old_obj.db_dependencies
new_obj.db_add_dependencies(DBOpmDependencies.update_version(obj, trans_dict))
if hasattr(old_obj, 'db_deleted_dependencies') and hasattr(new_obj, 'db_deleted_dependencies'):
for obj in old_obj.db_deleted_dependencies:
n_obj = DBOpmDependencies.update_version(obj, trans_dict)
new_obj.db_deleted_dependencies.append(n_obj)
new_obj.is_new = old_obj.is_new
new_obj.is_dirty = old_obj.is_dirty
return new_obj
def db_children(self, parent=(None,None), orphan=False, for_action=False):
children = []
if self._db_accounts is not None:
children.extend(self._db_accounts.db_children((self.vtType, self.db_id), orphan, for_action))
if orphan:
self._db_accounts = None
if self._db_processes is not None:
children.extend(self._db_processes.db_children((self.vtType, self.db_id), orphan, for_action))
if orphan:
self._db_processes = None
if self._db_artifacts is not None:
children.extend(self._db_artifacts.db_children((self.vtType, self.db_id), orphan, for_action))
if orphan:
self._db_artifacts = None
if self._db_agents is not None:
children.extend(self._db_agents.db_children((self.vtType, self.db_id), orphan, for_action))
if orphan:
self._db_agents = None
if self._db_dependencies is not None:
children.extend(self._db_dependencies.db_children((self.vtType, self.db_id), orphan, for_action))
if orphan:
self._db_dependencies = None
children.append((self, parent[0], parent[1]))
return children
def db_deleted_children(self, remove=False):
children = []
children.extend(self.db_deleted_accounts)
children.extend(self.db_deleted_processes)
children.extend(self.db_deleted_artifacts)
children.extend(self.db_deleted_agents)
children.extend(self.db_deleted_dependencies)
if remove:
self.db_deleted_accounts = []
self.db_deleted_processes = []
self.db_deleted_artifacts = []
self.db_deleted_agents = []
self.db_deleted_dependencies = []
return children
def has_changes(self):
if self.is_dirty:
return True
if self._db_accounts is not None and self._db_accounts.has_changes():
return True
if self._db_processes is not None and self._db_processes.has_changes():
return True
if self._db_artifacts is not None and self._db_artifacts.has_changes():
return True
if self._db_agents is not None and self._db_agents.has_changes():
return True
if self._db_dependencies is not None and self._db_dependencies.has_changes():
return True
return False
def __get_db_accounts(self):
return self._db_accounts
def __set_db_accounts(self, accounts):
self._db_accounts = accounts
self.is_dirty = True
db_accounts = property(__get_db_accounts, __set_db_accounts)
def db_add_accounts(self, accounts):
self._db_accounts = accounts
def db_change_accounts(self, accounts):
self._db_accounts = accounts
def db_delete_accounts(self, accounts):
if not self.is_new:
self.db_deleted_accounts.append(self._db_accounts)
self._db_accounts = None
def __get_db_processes(self):
return self._db_processes
def __set_db_processes(self, processes):
self._db_processes = processes
self.is_dirty = True
db_processes = property(__get_db_processes, __set_db_processes)
def db_add_processes(self, processes):
self._db_processes = processes
def db_change_processes(self, processes):
self._db_processes = processes
def db_delete_processes(self, processes):
if not self.is_new:
self.db_deleted_processes.append(self._db_processes)
self._db_processes = None
def __get_db_artifacts(self):
return self._db_artifacts
def __set_db_artifacts(self, artifacts):
self._db_artifacts = artifacts
self.is_dirty = True
db_artifacts = property(__get_db_artifacts, __set_db_artifacts)
def db_add_artifacts(self, artifacts):
self._db_artifacts = artifacts
def db_change_artifacts(self, artifacts):
self._db_artifacts = artifacts
def db_delete_artifacts(self, artifacts):
if not self.is_new:
self.db_deleted_artifacts.append(self._db_artifacts)
self._db_artifacts = None
def __get_db_agents(self):
return self._db_agents
def __set_db_agents(self, agents):
self._db_agents = agents
self.is_dirty = True
db_agents = property(__get_db_agents, __set_db_agents)
def db_add_agents(self, agents):
self._db_agents = agents
def db_change_agents(self, agents):
self._db_agents = agents
def db_delete_agents(self, agents):
if not self.is_new:
self.db_deleted_agents.append(self._db_agents)
self._db_agents = None
def __get_db_dependencies(self):
return self._db_dependencies
def __set_db_dependencies(self, dependencies):
self._db_dependencies = dependencies
self.is_dirty = True
db_dependencies = property(__get_db_dependencies, __set_db_dependencies)
def db_add_dependencies(self, dependencies):
self._db_dependencies = dependencies
def db_change_dependencies(self, dependencies):
self._db_dependencies = dependencies
def db_delete_dependencies(self, dependencies):
if not self.is_new:
self.db_deleted_dependencies.append(self._db_dependencies)
self._db_dependencies = None
class DBIsPartOf(object):
vtType = 'is_part_of'
def __init__(self, prov_ref=None):
self._db_prov_ref = prov_ref
self.is_dirty = True
self.is_new = True
def __copy__(self):
return DBIsPartOf.do_copy(self)
def do_copy(self, new_ids=False, id_scope=None, id_remap=None):
cp = DBIsPartOf(prov_ref=self._db_prov_ref)
# set new ids
if new_ids:
new_id = id_scope.getNewId(self.vtType)
if self.vtType in id_scope.remap:
id_remap[(id_scope.remap[self.vtType], self.db_id)] = new_id
else:
id_remap[(self.vtType, self.db_id)] = new_id
cp.db_id = new_id
# recreate indices and set flags
if not new_ids:
cp.is_dirty = self.is_dirty
cp.is_new = self.is_new
return cp
@staticmethod
def update_version(old_obj, trans_dict, new_obj=None):
if new_obj is None:
new_obj = DBIsPartOf()
class_dict = {}
if new_obj.__class__.__name__ in trans_dict:
class_dict = trans_dict[new_obj.__class__.__name__]
if 'prov_ref' in class_dict:
res = class_dict['prov_ref'](old_obj, trans_dict)
new_obj.db_prov_ref = res
elif hasattr(old_obj, 'db_prov_ref') and old_obj.db_prov_ref is not None:
new_obj.db_prov_ref = old_obj.db_prov_ref
new_obj.is_new = old_obj.is_new
new_obj.is_dirty = old_obj.is_dirty
return new_obj
def db_children(self, parent=(None,None), orphan=False, for_action=False):
return [(self, parent[0], parent[1])]
def db_deleted_children(self, remove=False):
children = []
return children
def has_changes(self):
if self.is_dirty:
return True
return False
def __get_db_prov_ref(self):
return self._db_prov_ref
def __set_db_prov_ref(self, prov_ref):
self._db_prov_ref = prov_ref
self.is_dirty = True
db_prov_ref = property(__get_db_prov_ref, __set_db_prov_ref)
def db_add_prov_ref(self, prov_ref):
self._db_prov_ref = prov_ref
def db_change_prov_ref(self, prov_ref):
self._db_prov_ref = prov_ref
def db_delete_prov_ref(self, prov_ref):
self._db_prov_ref = None
class DBOpmWasDerivedFrom(object):
vtType = 'opm_was_derived_from'
def __init__(self, effect=None, role=None, cause=None, accounts=None, opm_times=None):
self.db_deleted_effect = []
self._db_effect = effect
self.db_deleted_role = []
self._db_role = role
self.db_deleted_cause = []
self._db_cause = cause
self.db_deleted_accounts = []
if accounts is None:
self._db_accounts = []
else:
self._db_accounts = accounts
self.db_deleted_opm_times = []
if opm_times is None:
self._db_opm_times = []
else:
self._db_opm_times = opm_times
self.is_dirty = True
self.is_new = True
def __copy__(self):
return DBOpmWasDerivedFrom.do_copy(self)
def do_copy(self, new_ids=False, id_scope=None, id_remap=None):
cp = DBOpmWasDerivedFrom()
if self._db_effect is not None:
cp._db_effect = self._db_effect.do_copy(new_ids, id_scope, id_remap)
if self._db_role is not None:
cp._db_role = self._db_role.do_copy(new_ids, id_scope, id_remap)
if self._db_cause is not None:
cp._db_cause = self._db_cause.do_copy(new_ids, id_scope, id_remap)
if self._db_accounts is None:
cp._db_accounts = []
else:
cp._db_accounts = [v.do_copy(new_ids, id_scope, id_remap) for v in self._db_accounts]
if self._db_opm_times is None:
cp._db_opm_times = []
else:
cp._db_opm_times = [v.do_copy(new_ids, id_scope, id_remap) for v in self._db_opm_times]
# set new ids
if new_ids:
new_id = id_scope.getNewId(self.vtType)
if self.vtType in id_scope.remap:
id_remap[(id_scope.remap[self.vtType], self.db_id)] = new_id
else:
id_remap[(self.vtType, self.db_id)] = new_id
cp.db_id = new_id
# recreate indices and set flags
if not new_ids:
cp.is_dirty = self.is_dirty
cp.is_new = self.is_new
return cp
@staticmethod
def update_version(old_obj, trans_dict, new_obj=None):
if new_obj is None:
new_obj = DBOpmWasDerivedFrom()
class_dict = {}
if new_obj.__class__.__name__ in trans_dict:
class_dict = trans_dict[new_obj.__class__.__name__]
if 'effect' in class_dict:
res = class_dict['effect'](old_obj, trans_dict)
new_obj.db_effect = res
elif hasattr(old_obj, 'db_effect') and old_obj.db_effect is not None:
obj = old_obj.db_effect
new_obj.db_add_effect(DBOpmArtifactIdEffect.update_version(obj, trans_dict))
if hasattr(old_obj, 'db_deleted_effect') and hasattr(new_obj, 'db_deleted_effect'):
for obj in old_obj.db_deleted_effect:
n_obj = DBOpmArtifactIdEffect.update_version(obj, trans_dict)
new_obj.db_deleted_effect.append(n_obj)
if 'role' in class_dict:
res = class_dict['role'](old_obj, trans_dict)
new_obj.db_role = res
elif hasattr(old_obj, 'db_role') and old_obj.db_role is not None:
obj = old_obj.db_role
new_obj.db_add_role(DBOpmRole.update_version(obj, trans_dict))
if hasattr(old_obj, 'db_deleted_role') and hasattr(new_obj, 'db_deleted_role'):
for obj in old_obj.db_deleted_role:
n_obj = DBOpmRole.update_version(obj, trans_dict)
new_obj.db_deleted_role.append(n_obj)
if 'cause' in class_dict:
res = class_dict['cause'](old_obj, trans_dict)
new_obj.db_cause = res
elif hasattr(old_obj, 'db_cause') and old_obj.db_cause is not None:
obj = old_obj.db_cause
new_obj.db_add_cause(DBOpmArtifactIdCause.update_version(obj, trans_dict))
if hasattr(old_obj, 'db_deleted_cause') and hasattr(new_obj, 'db_deleted_cause'):
for obj in old_obj.db_deleted_cause:
n_obj = DBOpmArtifactIdCause.update_version(obj, trans_dict)
new_obj.db_deleted_cause.append(n_obj)
if 'accounts' in class_dict:
res = class_dict['accounts'](old_obj, trans_dict)
for obj in res:
new_obj.db_add_account(obj)
elif hasattr(old_obj, 'db_accounts') and old_obj.db_accounts is not None:
for obj in old_obj.db_accounts:
new_obj.db_add_account(DBOpmAccountId.update_version(obj, trans_dict))
if hasattr(old_obj, 'db_deleted_accounts') and hasattr(new_obj, 'db_deleted_accounts'):
for obj in old_obj.db_deleted_accounts:
n_obj = DBOpmAccountId.update_version(obj, trans_dict)
new_obj.db_deleted_accounts.append(n_obj)
if 'opm_times' in class_dict:
res = class_dict['opm_times'](old_obj, trans_dict)
for obj in res:
new_obj.db_add_opm_time(obj)
elif hasattr(old_obj, 'db_opm_times') and old_obj.db_opm_times is not None:
for obj in old_obj.db_opm_times:
new_obj.db_add_opm_time(DBOpmTime.update_version(obj, trans_dict))
if hasattr(old_obj, 'db_deleted_opm_times') and hasattr(new_obj, 'db_deleted_opm_times'):
for obj in old_obj.db_deleted_opm_times:
n_obj = DBOpmTime.update_version(obj, trans_dict)
new_obj.db_deleted_opm_times.append(n_obj)
new_obj.is_new = old_obj.is_new
new_obj.is_dirty = old_obj.is_dirty
return new_obj
def db_children(self, parent=(None,None), orphan=False, for_action=False):
children = []
if self._db_effect is not None:
children.extend(self._db_effect.db_children((self.vtType, self.db_id), orphan, for_action))
if orphan:
self._db_effect = None
if self._db_role is not None:
children.extend(self._db_role.db_children((self.vtType, self.db_id), orphan, for_action))
if orphan:
self._db_role = None
if self._db_cause is not None:
children.extend(self._db_cause.db_children((self.vtType, self.db_id), orphan, for_action))
if orphan:
self._db_cause = None
to_del = []
for child in self.db_accounts:
children.extend(child.db_children((self.vtType, self.db_id), orphan, for_action))
if orphan:
to_del.append(child)
for child in to_del:
self.db_delete_account(child)
to_del = []
for child in self.db_opm_times:
children.extend(child.db_children((self.vtType, self.db_id), orphan, for_action))
if orphan:
to_del.append(child)
for child in to_del:
self.db_delete_opm_time(child)
children.append((self, parent[0], parent[1]))
return children
def db_deleted_children(self, remove=False):
children = []
children.extend(self.db_deleted_effect)
children.extend(self.db_deleted_role)
children.extend(self.db_deleted_cause)
children.extend(self.db_deleted_accounts)
children.extend(self.db_deleted_opm_times)
if remove:
self.db_deleted_effect = []
self.db_deleted_role = []
self.db_deleted_cause = []
self.db_deleted_accounts = []
self.db_deleted_opm_times = []
return children
def has_changes(self):
if self.is_dirty:
return True
if self._db_effect is not None and self._db_effect.has_changes():
return True
if self._db_role is not None and self._db_role.has_changes():
return True
if self._db_cause is not None and self._db_cause.has_changes():
return True
for child in self._db_accounts:
if child.has_changes():
return True
for child in self._db_opm_times:
if child.has_changes():
return True
return False
def __get_db_effect(self):
return self._db_effect
def __set_db_effect(self, effect):
self._db_effect = effect
self.is_dirty = True
db_effect = property(__get_db_effect, __set_db_effect)
def db_add_effect(self, effect):
self._db_effect = effect
def db_change_effect(self, effect):
self._db_effect = effect
def db_delete_effect(self, effect):
if not self.is_new:
self.db_deleted_effect.append(self._db_effect)
self._db_effect = None
def __get_db_role(self):
return self._db_role
def __set_db_role(self, role):
self._db_role = role
self.is_dirty = True
db_role = property(__get_db_role, __set_db_role)
def db_add_role(self, role):
self._db_role = role
def db_change_role(self, role):
self._db_role = role
def db_delete_role(self, role):
if not self.is_new:
self.db_deleted_role.append(self._db_role)
self._db_role = None
def __get_db_cause(self):
return self._db_cause
def __set_db_cause(self, cause):
self._db_cause = cause
self.is_dirty = True
db_cause = property(__get_db_cause, __set_db_cause)
def db_add_cause(self, cause):
self._db_cause = cause
def db_change_cause(self, cause):
self._db_cause = cause
def db_delete_cause(self, cause):
if not self.is_new:
self.db_deleted_cause.append(self._db_cause)
self._db_cause = None
def __get_db_accounts(self):
return self._db_accounts
def __set_db_accounts(self, accounts):
self._db_accounts = accounts
self.is_dirty = True
db_accounts = property(__get_db_accounts, __set_db_accounts)
def db_get_accounts(self):
return self._db_accounts
def db_add_account(self, account):
self.is_dirty = True
self._db_accounts.append(account)
def db_change_account(self, account):
self.is_dirty = True
self._db_accounts.append(account)
def db_delete_account(self, account):
self.is_dirty = True
raise Exception('Cannot delete a non-keyed object')
def db_get_account(self, key):
return None
def __get_db_opm_times(self):
return self._db_opm_times
def __set_db_opm_times(self, opm_times):
self._db_opm_times = opm_times
self.is_dirty = True
db_opm_times = property(__get_db_opm_times, __set_db_opm_times)
def db_get_opm_times(self):
return self._db_opm_times
def db_add_opm_time(self, opm_time):
self.is_dirty = True
self._db_opm_times.append(opm_time)
def db_change_opm_time(self, opm_time):
self.is_dirty = True
self._db_opm_times.append(opm_time)
def db_delete_opm_time(self, opm_time):
self.is_dirty = True
raise Exception('Cannot delete a non-keyed object')
def db_get_opm_time(self, key):
return None
class DBPluginData(object):
vtType = 'plugin_data'
def __init__(self, id=None, data=None):
self._db_id = id
self._db_data = data
self.is_dirty = True
self.is_new = True
def __copy__(self):
return DBPluginData.do_copy(self)
def do_copy(self, new_ids=False, id_scope=None, id_remap=None):
cp = DBPluginData(id=self._db_id,
data=self._db_data)
# set new ids
if new_ids:
new_id = id_scope.getNewId(self.vtType)
if self.vtType in id_scope.remap:
id_remap[(id_scope.remap[self.vtType], self.db_id)] = new_id
else:
id_remap[(self.vtType, self.db_id)] = new_id
cp.db_id = new_id
# recreate indices and set flags
if not new_ids:
cp.is_dirty = self.is_dirty
cp.is_new = self.is_new
return cp
@staticmethod
def update_version(old_obj, trans_dict, new_obj=None):
if new_obj is None:
new_obj = DBPluginData()
class_dict = {}
if new_obj.__class__.__name__ in trans_dict:
class_dict = trans_dict[new_obj.__class__.__name__]
if 'id' in class_dict:
res = class_dict['id'](old_obj, trans_dict)
new_obj.db_id = res
elif hasattr(old_obj, 'db_id') and old_obj.db_id is not None:
new_obj.db_id = old_obj.db_id
if 'data' in class_dict:
res = class_dict['data'](old_obj, trans_dict)
new_obj.db_data = res
elif hasattr(old_obj, 'db_data') and old_obj.db_data is not None:
new_obj.db_data = old_obj.db_data
new_obj.is_new = old_obj.is_new
new_obj.is_dirty = old_obj.is_dirty
return new_obj
def db_children(self, parent=(None,None), orphan=False, for_action=False):
return [(self, parent[0], parent[1])]
def db_deleted_children(self, remove=False):
children = []
return children
def has_changes(self):
if self.is_dirty:
return True
return False
def __get_db_id(self):
return self._db_id
def __set_db_id(self, id):
self._db_id = id
self.is_dirty = True
db_id = property(__get_db_id, __set_db_id)
def db_add_id(self, id):
self._db_id = id
def db_change_id(self, id):
self._db_id = id
def db_delete_id(self, id):
self._db_id = None
def __get_db_data(self):
return self._db_data
def __set_db_data(self, data):
self._db_data = data
self.is_dirty = True
db_data = property(__get_db_data, __set_db_data)
def db_add_data(self, data):
self._db_data = data
def db_change_data(self, data):
self._db_data = data
def db_delete_data(self, data):
self._db_data = None
def getPrimaryKey(self):
return self._db_id
class DBDelete(object):
vtType = 'delete'
def __init__(self, id=None, what=None, objectId=None, parentObjId=None, parentObjType=None):
self._db_id = id
self._db_what = what
self._db_objectId = objectId
self._db_parentObjId = parentObjId
self._db_parentObjType = parentObjType
self.is_dirty = True
self.is_new = True
def __copy__(self):
return DBDelete.do_copy(self)
def do_copy(self, new_ids=False, id_scope=None, id_remap=None):
cp = DBDelete(id=self._db_id,
what=self._db_what,
objectId=self._db_objectId,
parentObjId=self._db_parentObjId,
parentObjType=self._db_parentObjType)
# set new ids
if new_ids:
new_id = id_scope.getNewId(self.vtType)
if self.vtType in id_scope.remap:
id_remap[(id_scope.remap[self.vtType], self.db_id)] = new_id
else:
id_remap[(self.vtType, self.db_id)] = new_id
cp.db_id = new_id
if hasattr(self, 'db_objectId') and (self._db_what, self._db_objectId) in id_remap:
cp._db_objectId = id_remap[(self._db_what, self._db_objectId)]
if hasattr(self, 'db_parentObjId') and (self._db_parentObjType, self._db_parentObjId) in id_remap:
cp._db_parentObjId = id_remap[(self._db_parentObjType, self._db_parentObjId)]
# recreate indices and set flags
if not new_ids:
cp.is_dirty = self.is_dirty
cp.is_new = self.is_new
return cp
@staticmethod
def update_version(old_obj, trans_dict, new_obj=None):
if new_obj is None:
new_obj = DBDelete()
class_dict = {}
if new_obj.__class__.__name__ in trans_dict:
class_dict = trans_dict[new_obj.__class__.__name__]
if 'id' in class_dict:
res = class_dict['id'](old_obj, trans_dict)
new_obj.db_id = res
elif hasattr(old_obj, 'db_id') and old_obj.db_id is not None:
new_obj.db_id = old_obj.db_id
if 'what' in class_dict:
res = class_dict['what'](old_obj, trans_dict)
new_obj.db_what = res
elif hasattr(old_obj, 'db_what') and old_obj.db_what is not None:
new_obj.db_what = old_obj.db_what
if 'objectId' in class_dict:
res = class_dict['objectId'](old_obj, trans_dict)
new_obj.db_objectId = res
elif hasattr(old_obj, 'db_objectId') and old_obj.db_objectId is not None:
new_obj.db_objectId = old_obj.db_objectId
if 'parentObjId' in class_dict:
res = class_dict['parentObjId'](old_obj, trans_dict)
new_obj.db_parentObjId = res
elif hasattr(old_obj, 'db_parentObjId') and old_obj.db_parentObjId is not None:
new_obj.db_parentObjId = old_obj.db_parentObjId
if 'parentObjType' in class_dict:
res = class_dict['parentObjType'](old_obj, trans_dict)
new_obj.db_parentObjType = res
elif hasattr(old_obj, 'db_parentObjType') and old_obj.db_parentObjType is not None:
new_obj.db_parentObjType = old_obj.db_parentObjType
new_obj.is_new = old_obj.is_new
new_obj.is_dirty = old_obj.is_dirty
return new_obj
def db_children(self, parent=(None,None), orphan=False, for_action=False):
return [(self, parent[0], parent[1])]
def db_deleted_children(self, remove=False):
children = []
return children
def has_changes(self):
if self.is_dirty:
return True
return False
def __get_db_id(self):
return self._db_id
def __set_db_id(self, id):
self._db_id = id
self.is_dirty = True
db_id = property(__get_db_id, __set_db_id)
def db_add_id(self, id):
self._db_id = id
def db_change_id(self, id):
self._db_id = id
def db_delete_id(self, id):
self._db_id = None
def __get_db_what(self):
return self._db_what
def __set_db_what(self, what):
self._db_what = what
self.is_dirty = True
db_what = property(__get_db_what, __set_db_what)
def db_add_what(self, what):
self._db_what = what
def db_change_what(self, what):
self._db_what = what
def db_delete_what(self, what):
self._db_what = None
def __get_db_objectId(self):
return self._db_objectId
def __set_db_objectId(self, objectId):
self._db_objectId = objectId
self.is_dirty = True
db_objectId = property(__get_db_objectId, __set_db_objectId)
def db_add_objectId(self, objectId):
self._db_objectId = objectId
def db_change_objectId(self, objectId):
self._db_objectId = objectId
def db_delete_objectId(self, objectId):
self._db_objectId = None
def __get_db_parentObjId(self):
return self._db_parentObjId
def __set_db_parentObjId(self, parentObjId):
self._db_parentObjId = parentObjId
self.is_dirty = True
db_parentObjId = property(__get_db_parentObjId, __set_db_parentObjId)
def db_add_parentObjId(self, parentObjId):
self._db_parentObjId = parentObjId
def db_change_parentObjId(self, parentObjId):
self._db_parentObjId = parentObjId
def db_delete_parentObjId(self, parentObjId):
self._db_parentObjId = None
def __get_db_parentObjType(self):
return self._db_parentObjType
def __set_db_parentObjType(self, parentObjType):
self._db_parentObjType = parentObjType
self.is_dirty = True
db_parentObjType = property(__get_db_parentObjType, __set_db_parentObjType)
def db_add_parentObjType(self, parentObjType):
self._db_parentObjType = parentObjType
def db_change_parentObjType(self, parentObjType):
self._db_parentObjType = parentObjType
def db_delete_parentObjType(self, parentObjType):
self._db_parentObjType = None
def getPrimaryKey(self):
return self._db_id
class DBVistrailVariable(object):
vtType = 'vistrailVariable'
def __init__(self, name=None, uuid=None, package=None, module=None, namespace=None, value=None):
self._db_name = name
self._db_uuid = uuid
self._db_package = package
self._db_module = module
self._db_namespace = namespace
self._db_value = value
self.is_dirty = True
self.is_new = True
def __copy__(self):
return DBVistrailVariable.do_copy(self)
def do_copy(self, new_ids=False, id_scope=None, id_remap=None):
cp = DBVistrailVariable(name=self._db_name,
uuid=self._db_uuid,
package=self._db_package,
module=self._db_module,
namespace=self._db_namespace,
value=self._db_value)
# set new ids
if new_ids:
new_id = id_scope.getNewId(self.vtType)
if self.vtType in id_scope.remap:
id_remap[(id_scope.remap[self.vtType], self.db_id)] = new_id
else:
id_remap[(self.vtType, self.db_id)] = new_id
cp.db_id = new_id
# recreate indices and set flags
if not new_ids:
cp.is_dirty = self.is_dirty
cp.is_new = self.is_new
return cp
@staticmethod
def update_version(old_obj, trans_dict, new_obj=None):
if new_obj is None:
new_obj = DBVistrailVariable()
class_dict = {}
if new_obj.__class__.__name__ in trans_dict:
class_dict = trans_dict[new_obj.__class__.__name__]
if 'name' in class_dict:
res = class_dict['name'](old_obj, trans_dict)
new_obj.db_name = res
elif hasattr(old_obj, 'db_name') and old_obj.db_name is not None:
new_obj.db_name = old_obj.db_name
if 'uuid' in class_dict:
res = class_dict['uuid'](old_obj, trans_dict)
new_obj.db_uuid = res
elif hasattr(old_obj, 'db_uuid') and old_obj.db_uuid is not None:
new_obj.db_uuid = old_obj.db_uuid
if 'package' in class_dict:
res = class_dict['package'](old_obj, trans_dict)
new_obj.db_package = res
elif hasattr(old_obj, 'db_package') and old_obj.db_package is not None:
new_obj.db_package = old_obj.db_package
if 'module' in class_dict:
res = class_dict['module'](old_obj, trans_dict)
new_obj.db_module = res
elif hasattr(old_obj, 'db_module') and old_obj.db_module is not None:
new_obj.db_module = old_obj.db_module
if 'namespace' in class_dict:
res = class_dict['namespace'](old_obj, trans_dict)
new_obj.db_namespace = res
elif hasattr(old_obj, 'db_namespace') and old_obj.db_namespace is not None:
new_obj.db_namespace = old_obj.db_namespace
if 'value' in class_dict:
res = class_dict['value'](old_obj, trans_dict)
new_obj.db_value = res
elif hasattr(old_obj, 'db_value') and old_obj.db_value is not None:
new_obj.db_value = old_obj.db_value
new_obj.is_new = old_obj.is_new
new_obj.is_dirty = old_obj.is_dirty
return new_obj
def db_children(self, parent=(None,None), orphan=False, for_action=False):
return [(self, parent[0], parent[1])]
def db_deleted_children(self, remove=False):
children = []
return children
def has_changes(self):
if self.is_dirty:
return True
return False
def __get_db_name(self):
return self._db_name
def __set_db_name(self, name):
self._db_name = name
self.is_dirty = True
db_name = property(__get_db_name, __set_db_name)
def db_add_name(self, name):
self._db_name = name
def db_change_name(self, name):
self._db_name = name
def db_delete_name(self, name):
self._db_name = None
def __get_db_uuid(self):
return self._db_uuid
def __set_db_uuid(self, uuid):
self._db_uuid = uuid
self.is_dirty = True
db_uuid = property(__get_db_uuid, __set_db_uuid)
def db_add_uuid(self, uuid):
self._db_uuid = uuid
def db_change_uuid(self, uuid):
self._db_uuid = uuid
def db_delete_uuid(self, uuid):
self._db_uuid = None
def __get_db_package(self):
return self._db_package
def __set_db_package(self, package):
self._db_package = package
self.is_dirty = True
db_package = property(__get_db_package, __set_db_package)
def db_add_package(self, package):
self._db_package = package
def db_change_package(self, package):
self._db_package = package
def db_delete_package(self, package):
self._db_package = None
def __get_db_module(self):
return self._db_module
def __set_db_module(self, module):
self._db_module = module
self.is_dirty = True
db_module = property(__get_db_module, __set_db_module)
def db_add_module(self, module):
self._db_module = module
def db_change_module(self, module):
self._db_module = module
def db_delete_module(self, module):
self._db_module = None
def __get_db_namespace(self):
return self._db_namespace
def __set_db_namespace(self, namespace):
self._db_namespace = namespace
self.is_dirty = True
db_namespace = property(__get_db_namespace, __set_db_namespace)
def db_add_namespace(self, namespace):
self._db_namespace = namespace
def db_change_namespace(self, namespace):
self._db_namespace = namespace
def db_delete_namespace(self, namespace):
self._db_namespace = None
def __get_db_value(self):
return self._db_value
def __set_db_value(self, value):
self._db_value = value
self.is_dirty = True
db_value = property(__get_db_value, __set_db_value)
def db_add_value(self, value):
self._db_value = value
def db_change_value(self, value):
self._db_value = value
def db_delete_value(self, value):
self._db_value = None
def getPrimaryKey(self):
return self._db_name
class DBOpmOverlaps(object):
vtType = 'opm_overlaps'
def __init__(self, opm_account_ids=None):
self.db_deleted_opm_account_ids = []
if opm_account_ids is None:
self._db_opm_account_ids = []
else:
self._db_opm_account_ids = opm_account_ids
self.is_dirty = True
self.is_new = True
def __copy__(self):
return DBOpmOverlaps.do_copy(self)
def do_copy(self, new_ids=False, id_scope=None, id_remap=None):
cp = DBOpmOverlaps()
if self._db_opm_account_ids is None:
cp._db_opm_account_ids = []
else:
cp._db_opm_account_ids = [v.do_copy(new_ids, id_scope, id_remap) for v in self._db_opm_account_ids]
# set new ids
if new_ids:
new_id = id_scope.getNewId(self.vtType)
if self.vtType in id_scope.remap:
id_remap[(id_scope.remap[self.vtType], self.db_id)] = new_id
else:
id_remap[(self.vtType, self.db_id)] = new_id
cp.db_id = new_id
# recreate indices and set flags
if not new_ids:
cp.is_dirty = self.is_dirty
cp.is_new = self.is_new
return cp
@staticmethod
def update_version(old_obj, trans_dict, new_obj=None):
if new_obj is None:
new_obj = DBOpmOverlaps()
class_dict = {}
if new_obj.__class__.__name__ in trans_dict:
class_dict = trans_dict[new_obj.__class__.__name__]
if 'opm_account_ids' in class_dict:
res = class_dict['opm_account_ids'](old_obj, trans_dict)
for obj in res:
new_obj.db_add_opm_account_id(obj)
elif hasattr(old_obj, 'db_opm_account_ids') and old_obj.db_opm_account_ids is not None:
for obj in old_obj.db_opm_account_ids:
new_obj.db_add_opm_account_id(DBOpmAccountId.update_version(obj, trans_dict))
if hasattr(old_obj, 'db_deleted_opm_account_ids') and hasattr(new_obj, 'db_deleted_opm_account_ids'):
for obj in old_obj.db_deleted_opm_account_ids:
n_obj = DBOpmAccountId.update_version(obj, trans_dict)
new_obj.db_deleted_opm_account_ids.append(n_obj)
new_obj.is_new = old_obj.is_new
new_obj.is_dirty = old_obj.is_dirty
return new_obj
def db_children(self, parent=(None,None), orphan=False, for_action=False):
children = []
to_del = []
for child in self.db_opm_account_ids:
children.extend(child.db_children((self.vtType, self.db_id), orphan, for_action))
if orphan:
to_del.append(child)
for child in to_del:
self.db_delete_opm_account_id(child)
children.append((self, parent[0], parent[1]))
return children
def db_deleted_children(self, remove=False):
children = []
children.extend(self.db_deleted_opm_account_ids)
if remove:
self.db_deleted_opm_account_ids = []
return children
def has_changes(self):
if self.is_dirty:
return True
for child in self._db_opm_account_ids:
if child.has_changes():
return True
return False
def __get_db_opm_account_ids(self):
return self._db_opm_account_ids
def __set_db_opm_account_ids(self, opm_account_ids):
self._db_opm_account_ids = opm_account_ids
self.is_dirty = True
db_opm_account_ids = property(__get_db_opm_account_ids, __set_db_opm_account_ids)
def db_get_opm_account_ids(self):
return self._db_opm_account_ids
def db_add_opm_account_id(self, opm_account_id):
self.is_dirty = True
self._db_opm_account_ids.append(opm_account_id)
def db_change_opm_account_id(self, opm_account_id):
self.is_dirty = True
self._db_opm_account_ids.append(opm_account_id)
def db_delete_opm_account_id(self, opm_account_id):
self.is_dirty = True
raise Exception('Cannot delete a non-keyed object')
def db_get_opm_account_id(self, key):
return None
class DBOpmWasTriggeredBy(object):
vtType = 'opm_was_triggered_by'
def __init__(self, effect=None, role=None, cause=None, accounts=None, opm_times=None):
self.db_deleted_effect = []
self._db_effect = effect
self.db_deleted_role = []
self._db_role = role
self.db_deleted_cause = []
self._db_cause = cause
self.db_deleted_accounts = []
if accounts is None:
self._db_accounts = []
else:
self._db_accounts = accounts
self.db_deleted_opm_times = []
if opm_times is None:
self._db_opm_times = []
else:
self._db_opm_times = opm_times
self.is_dirty = True
self.is_new = True
def __copy__(self):
return DBOpmWasTriggeredBy.do_copy(self)
def do_copy(self, new_ids=False, id_scope=None, id_remap=None):
cp = DBOpmWasTriggeredBy()
if self._db_effect is not None:
cp._db_effect = self._db_effect.do_copy(new_ids, id_scope, id_remap)
if self._db_role is not None:
cp._db_role = self._db_role.do_copy(new_ids, id_scope, id_remap)
if self._db_cause is not None:
cp._db_cause = self._db_cause.do_copy(new_ids, id_scope, id_remap)
if self._db_accounts is None:
cp._db_accounts = []
else:
cp._db_accounts = [v.do_copy(new_ids, id_scope, id_remap) for v in self._db_accounts]
if self._db_opm_times is None:
cp._db_opm_times = []
else:
cp._db_opm_times = [v.do_copy(new_ids, id_scope, id_remap) for v in self._db_opm_times]
# set new ids
if new_ids:
new_id = id_scope.getNewId(self.vtType)
if self.vtType in id_scope.remap:
id_remap[(id_scope.remap[self.vtType], self.db_id)] = new_id
else:
id_remap[(self.vtType, self.db_id)] = new_id
cp.db_id = new_id
# recreate indices and set flags
if not new_ids:
cp.is_dirty = self.is_dirty
cp.is_new = self.is_new
return cp
@staticmethod
def update_version(old_obj, trans_dict, new_obj=None):
if new_obj is None:
new_obj = DBOpmWasTriggeredBy()
class_dict = {}
if new_obj.__class__.__name__ in trans_dict:
class_dict = trans_dict[new_obj.__class__.__name__]
if 'effect' in class_dict:
res = class_dict['effect'](old_obj, trans_dict)
new_obj.db_effect = res
elif hasattr(old_obj, 'db_effect') and old_obj.db_effect is not None:
obj = old_obj.db_effect
new_obj.db_add_effect(DBOpmProcessIdEffect.update_version(obj, trans_dict))
if hasattr(old_obj, 'db_deleted_effect') and hasattr(new_obj, 'db_deleted_effect'):
for obj in old_obj.db_deleted_effect:
n_obj = DBOpmProcessIdEffect.update_version(obj, trans_dict)
new_obj.db_deleted_effect.append(n_obj)
if 'role' in class_dict:
res = class_dict['role'](old_obj, trans_dict)
new_obj.db_role = res
elif hasattr(old_obj, 'db_role') and old_obj.db_role is not None:
obj = old_obj.db_role
new_obj.db_add_role(DBOpmRole.update_version(obj, trans_dict))
if hasattr(old_obj, 'db_deleted_role') and hasattr(new_obj, 'db_deleted_role'):
for obj in old_obj.db_deleted_role:
n_obj = DBOpmRole.update_version(obj, trans_dict)
new_obj.db_deleted_role.append(n_obj)
if 'cause' in class_dict:
res = class_dict['cause'](old_obj, trans_dict)
new_obj.db_cause = res
elif hasattr(old_obj, 'db_cause') and old_obj.db_cause is not None:
obj = old_obj.db_cause
new_obj.db_add_cause(DBOpmProcessIdCause.update_version(obj, trans_dict))
if hasattr(old_obj, 'db_deleted_cause') and hasattr(new_obj, 'db_deleted_cause'):
for obj in old_obj.db_deleted_cause:
n_obj = DBOpmProcessIdCause.update_version(obj, trans_dict)
new_obj.db_deleted_cause.append(n_obj)
if 'accounts' in class_dict:
res = class_dict['accounts'](old_obj, trans_dict)
for obj in res:
new_obj.db_add_account(obj)
elif hasattr(old_obj, 'db_accounts') and old_obj.db_accounts is not None:
for obj in old_obj.db_accounts:
new_obj.db_add_account(DBOpmAccountId.update_version(obj, trans_dict))
if hasattr(old_obj, 'db_deleted_accounts') and hasattr(new_obj, 'db_deleted_accounts'):
for obj in old_obj.db_deleted_accounts:
n_obj = DBOpmAccountId.update_version(obj, trans_dict)
new_obj.db_deleted_accounts.append(n_obj)
if 'opm_times' in class_dict:
res = class_dict['opm_times'](old_obj, trans_dict)
for obj in res:
new_obj.db_add_opm_time(obj)
elif hasattr(old_obj, 'db_opm_times') and old_obj.db_opm_times is not None:
for obj in old_obj.db_opm_times:
new_obj.db_add_opm_time(DBOpmTime.update_version(obj, trans_dict))
if hasattr(old_obj, 'db_deleted_opm_times') and hasattr(new_obj, 'db_deleted_opm_times'):
for obj in old_obj.db_deleted_opm_times:
n_obj = DBOpmTime.update_version(obj, trans_dict)
new_obj.db_deleted_opm_times.append(n_obj)
new_obj.is_new = old_obj.is_new
new_obj.is_dirty = old_obj.is_dirty
return new_obj
def db_children(self, parent=(None,None), orphan=False, for_action=False):
children = []
if self._db_effect is not None:
children.extend(self._db_effect.db_children((self.vtType, self.db_id), orphan, for_action))
if orphan:
self._db_effect = None
if self._db_role is not None:
children.extend(self._db_role.db_children((self.vtType, self.db_id), orphan, for_action))
if orphan:
self._db_role = None
if self._db_cause is not None:
children.extend(self._db_cause.db_children((self.vtType, self.db_id), orphan, for_action))
if orphan:
self._db_cause = None
to_del = []
for child in self.db_accounts:
children.extend(child.db_children((self.vtType, self.db_id), orphan, for_action))
if orphan:
to_del.append(child)
for child in to_del:
self.db_delete_account(child)
to_del = []
for child in self.db_opm_times:
children.extend(child.db_children((self.vtType, self.db_id), orphan, for_action))
if orphan:
to_del.append(child)
for child in to_del:
self.db_delete_opm_time(child)
children.append((self, parent[0], parent[1]))
return children
def db_deleted_children(self, remove=False):
children = []
children.extend(self.db_deleted_effect)
children.extend(self.db_deleted_role)
children.extend(self.db_deleted_cause)
children.extend(self.db_deleted_accounts)
children.extend(self.db_deleted_opm_times)
if remove:
self.db_deleted_effect = []
self.db_deleted_role = []
self.db_deleted_cause = []
self.db_deleted_accounts = []
self.db_deleted_opm_times = []
return children
def has_changes(self):
if self.is_dirty:
return True
if self._db_effect is not None and self._db_effect.has_changes():
return True
if self._db_role is not None and self._db_role.has_changes():
return True
if self._db_cause is not None and self._db_cause.has_changes():
return True
for child in self._db_accounts:
if child.has_changes():
return True
for child in self._db_opm_times:
if child.has_changes():
return True
return False
def __get_db_effect(self):
return self._db_effect
def __set_db_effect(self, effect):
self._db_effect = effect
self.is_dirty = True
db_effect = property(__get_db_effect, __set_db_effect)
def db_add_effect(self, effect):
self._db_effect = effect
def db_change_effect(self, effect):
self._db_effect = effect
def db_delete_effect(self, effect):
if not self.is_new:
self.db_deleted_effect.append(self._db_effect)
self._db_effect = None
def __get_db_role(self):
return self._db_role
def __set_db_role(self, role):
self._db_role = role
self.is_dirty = True
db_role = property(__get_db_role, __set_db_role)
def db_add_role(self, role):
self._db_role = role
def db_change_role(self, role):
self._db_role = role
def db_delete_role(self, role):
if not self.is_new:
self.db_deleted_role.append(self._db_role)
self._db_role = None
def __get_db_cause(self):
return self._db_cause
def __set_db_cause(self, cause):
self._db_cause = cause
self.is_dirty = True
db_cause = property(__get_db_cause, __set_db_cause)
def db_add_cause(self, cause):
self._db_cause = cause
def db_change_cause(self, cause):
self._db_cause = cause
def db_delete_cause(self, cause):
if not self.is_new:
self.db_deleted_cause.append(self._db_cause)
self._db_cause = None
def __get_db_accounts(self):
return self._db_accounts
def __set_db_accounts(self, accounts):
self._db_accounts = accounts
self.is_dirty = True
db_accounts = property(__get_db_accounts, __set_db_accounts)
def db_get_accounts(self):
return self._db_accounts
def db_add_account(self, account):
self.is_dirty = True
self._db_accounts.append(account)
def db_change_account(self, account):
self.is_dirty = True
self._db_accounts.append(account)
def db_delete_account(self, account):
self.is_dirty = True
raise Exception('Cannot delete a non-keyed object')
def db_get_account(self, key):
return None
def __get_db_opm_times(self):
return self._db_opm_times
def __set_db_opm_times(self, opm_times):
self._db_opm_times = opm_times
self.is_dirty = True
db_opm_times = property(__get_db_opm_times, __set_db_opm_times)
def db_get_opm_times(self):
return self._db_opm_times
def db_add_opm_time(self, opm_time):
self.is_dirty = True
self._db_opm_times.append(opm_time)
def db_change_opm_time(self, opm_time):
self.is_dirty = True
self._db_opm_times.append(opm_time)
def db_delete_opm_time(self, opm_time):
self.is_dirty = True
raise Exception('Cannot delete a non-keyed object')
def db_get_opm_time(self, key):
return None
class DBModuleDescriptor(object):
vtType = 'module_descriptor'
def __init__(self, id=None, name=None, package=None, namespace=None, package_version=None, version=None, base_descriptor_id=None, portSpecs=None):
self._db_id = id
self._db_name = name
self._db_package = package
self._db_namespace = namespace
self._db_package_version = package_version
self._db_version = version
self._db_base_descriptor_id = base_descriptor_id
self.db_deleted_portSpecs = []
self.db_portSpecs_id_index = {}
self.db_portSpecs_name_index = {}
if portSpecs is None:
self._db_portSpecs = []
else:
self._db_portSpecs = portSpecs
for v in self._db_portSpecs:
self.db_portSpecs_id_index[v.db_id] = v
self.db_portSpecs_name_index[(v.db_name,v.db_type)] = v
self.is_dirty = True
self.is_new = True
def __copy__(self):
return DBModuleDescriptor.do_copy(self)
def do_copy(self, new_ids=False, id_scope=None, id_remap=None):
cp = DBModuleDescriptor(id=self._db_id,
name=self._db_name,
package=self._db_package,
namespace=self._db_namespace,
package_version=self._db_package_version,
version=self._db_version,
base_descriptor_id=self._db_base_descriptor_id)
if self._db_portSpecs is None:
cp._db_portSpecs = []
else:
cp._db_portSpecs = [v.do_copy(new_ids, id_scope, id_remap) for v in self._db_portSpecs]
# set new ids
if new_ids:
new_id = id_scope.getNewId(self.vtType)
if self.vtType in id_scope.remap:
id_remap[(id_scope.remap[self.vtType], self.db_id)] = new_id
else:
id_remap[(self.vtType, self.db_id)] = new_id
cp.db_id = new_id
if hasattr(self, 'db_base_descriptor_id') and ('module_descriptor', self._db_base_descriptor_id) in id_remap:
cp._db_base_descriptor_id = id_remap[('module_descriptor', self._db_base_descriptor_id)]
# recreate indices and set flags
cp.db_portSpecs_id_index = dict((v.db_id, v) for v in cp._db_portSpecs)
cp.db_portSpecs_name_index = dict(((v.db_name,v.db_type), v) for v in cp._db_portSpecs)
if not new_ids:
cp.is_dirty = self.is_dirty
cp.is_new = self.is_new
return cp
@staticmethod
def update_version(old_obj, trans_dict, new_obj=None):
if new_obj is None:
new_obj = DBModuleDescriptor()
class_dict = {}
if new_obj.__class__.__name__ in trans_dict:
class_dict = trans_dict[new_obj.__class__.__name__]
if 'id' in class_dict:
res = class_dict['id'](old_obj, trans_dict)
new_obj.db_id = res
elif hasattr(old_obj, 'db_id') and old_obj.db_id is not None:
new_obj.db_id = old_obj.db_id
if 'name' in class_dict:
res = class_dict['name'](old_obj, trans_dict)
new_obj.db_name = res
elif hasattr(old_obj, 'db_name') and old_obj.db_name is not None:
new_obj.db_name = old_obj.db_name
if 'package' in class_dict:
res = class_dict['package'](old_obj, trans_dict)
new_obj.db_package = res
elif hasattr(old_obj, 'db_package') and old_obj.db_package is not None:
new_obj.db_package = old_obj.db_package
if 'namespace' in class_dict:
res = class_dict['namespace'](old_obj, trans_dict)
new_obj.db_namespace = res
elif hasattr(old_obj, 'db_namespace') and old_obj.db_namespace is not None:
new_obj.db_namespace = old_obj.db_namespace
if 'package_version' in class_dict:
res = class_dict['package_version'](old_obj, trans_dict)
new_obj.db_package_version = res
elif hasattr(old_obj, 'db_package_version') and old_obj.db_package_version is not None:
new_obj.db_package_version = old_obj.db_package_version
if 'version' in class_dict:
res = class_dict['version'](old_obj, trans_dict)
new_obj.db_version = res
elif hasattr(old_obj, 'db_version') and old_obj.db_version is not None:
new_obj.db_version = old_obj.db_version
if 'base_descriptor_id' in class_dict:
res = class_dict['base_descriptor_id'](old_obj, trans_dict)
new_obj.db_base_descriptor_id = res
elif hasattr(old_obj, 'db_base_descriptor_id') and old_obj.db_base_descriptor_id is not None:
new_obj.db_base_descriptor_id = old_obj.db_base_descriptor_id
if 'portSpecs' in class_dict:
res = class_dict['portSpecs'](old_obj, trans_dict)
for obj in res:
new_obj.db_add_portSpec(obj)
elif hasattr(old_obj, 'db_portSpecs') and old_obj.db_portSpecs is not None:
for obj in old_obj.db_portSpecs:
new_obj.db_add_portSpec(DBPortSpec.update_version(obj, trans_dict))
if hasattr(old_obj, 'db_deleted_portSpecs') and hasattr(new_obj, 'db_deleted_portSpecs'):
for obj in old_obj.db_deleted_portSpecs:
n_obj = DBPortSpec.update_version(obj, trans_dict)
new_obj.db_deleted_portSpecs.append(n_obj)
new_obj.is_new = old_obj.is_new
new_obj.is_dirty = old_obj.is_dirty
return new_obj
def db_children(self, parent=(None,None), orphan=False, for_action=False):
children = []
to_del = []
for child in self.db_portSpecs:
children.extend(child.db_children((self.vtType, self.db_id), orphan, for_action))
if orphan:
to_del.append(child)
for child in to_del:
self.db_delete_portSpec(child)
children.append((self, parent[0], parent[1]))
return children
def db_deleted_children(self, remove=False):
children = []
children.extend(self.db_deleted_portSpecs)
if remove:
self.db_deleted_portSpecs = []
return children
def has_changes(self):
if self.is_dirty:
return True
for child in self._db_portSpecs:
if child.has_changes():
return True
return False
def __get_db_id(self):
return self._db_id
def __set_db_id(self, id):
self._db_id = id
self.is_dirty = True
db_id = property(__get_db_id, __set_db_id)
def db_add_id(self, id):
self._db_id = id
def db_change_id(self, id):
self._db_id = id
def db_delete_id(self, id):
self._db_id = None
def __get_db_name(self):
return self._db_name
def __set_db_name(self, name):
self._db_name = name
self.is_dirty = True
db_name = property(__get_db_name, __set_db_name)
def db_add_name(self, name):
self._db_name = name
def db_change_name(self, name):
self._db_name = name
def db_delete_name(self, name):
self._db_name = None
def __get_db_package(self):
return self._db_package
def __set_db_package(self, package):
self._db_package = package
self.is_dirty = True
db_package = property(__get_db_package, __set_db_package)
def db_add_package(self, package):
self._db_package = package
def db_change_package(self, package):
self._db_package = package
def db_delete_package(self, package):
self._db_package = None
def __get_db_namespace(self):
return self._db_namespace
def __set_db_namespace(self, namespace):
self._db_namespace = namespace
self.is_dirty = True
db_namespace = property(__get_db_namespace, __set_db_namespace)
def db_add_namespace(self, namespace):
self._db_namespace = namespace
def db_change_namespace(self, namespace):
self._db_namespace = namespace
def db_delete_namespace(self, namespace):
self._db_namespace = None
def __get_db_package_version(self):
return self._db_package_version
def __set_db_package_version(self, package_version):
self._db_package_version = package_version
self.is_dirty = True
db_package_version = property(__get_db_package_version, __set_db_package_version)
def db_add_package_version(self, package_version):
self._db_package_version = package_version
def db_change_package_version(self, package_version):
self._db_package_version = package_version
def db_delete_package_version(self, package_version):
self._db_package_version = None
def __get_db_version(self):
return self._db_version
def __set_db_version(self, version):
self._db_version = version
self.is_dirty = True
db_version = property(__get_db_version, __set_db_version)
def db_add_version(self, version):
self._db_version = version
def db_change_version(self, version):
self._db_version = version
def db_delete_version(self, version):
self._db_version = None
def __get_db_base_descriptor_id(self):
return self._db_base_descriptor_id
def __set_db_base_descriptor_id(self, base_descriptor_id):
self._db_base_descriptor_id = base_descriptor_id
self.is_dirty = True
db_base_descriptor_id = property(__get_db_base_descriptor_id, __set_db_base_descriptor_id)
def db_add_base_descriptor_id(self, base_descriptor_id):
self._db_base_descriptor_id = base_descriptor_id
def db_change_base_descriptor_id(self, base_descriptor_id):
self._db_base_descriptor_id = base_descriptor_id
def db_delete_base_descriptor_id(self, base_descriptor_id):
self._db_base_descriptor_id = None
def __get_db_portSpecs(self):
return self._db_portSpecs
def __set_db_portSpecs(self, portSpecs):
self._db_portSpecs = portSpecs
self.is_dirty = True
db_portSpecs = property(__get_db_portSpecs, __set_db_portSpecs)
def db_get_portSpecs(self):
return self._db_portSpecs
def db_add_portSpec(self, portSpec):
self.is_dirty = True
self._db_portSpecs.append(portSpec)
self.db_portSpecs_id_index[portSpec.db_id] = portSpec
self.db_portSpecs_name_index[(portSpec.db_name,portSpec.db_type)] = portSpec
def db_change_portSpec(self, portSpec):
self.is_dirty = True
found = False
for i in xrange(len(self._db_portSpecs)):
if self._db_portSpecs[i].db_id == portSpec.db_id:
self._db_portSpecs[i] = portSpec
found = True
break
if not found:
self._db_portSpecs.append(portSpec)
self.db_portSpecs_id_index[portSpec.db_id] = portSpec
self.db_portSpecs_name_index[(portSpec.db_name,portSpec.db_type)] = portSpec
def db_delete_portSpec(self, portSpec):
self.is_dirty = True
for i in xrange(len(self._db_portSpecs)):
if self._db_portSpecs[i].db_id == portSpec.db_id:
if not self._db_portSpecs[i].is_new:
self.db_deleted_portSpecs.append(self._db_portSpecs[i])
del self._db_portSpecs[i]
break
del self.db_portSpecs_id_index[portSpec.db_id]
del self.db_portSpecs_name_index[(portSpec.db_name,portSpec.db_type)]
def db_get_portSpec(self, key):
for i in xrange(len(self._db_portSpecs)):
if self._db_portSpecs[i].db_id == key:
return self._db_portSpecs[i]
return None
def db_get_portSpec_by_id(self, key):
return self.db_portSpecs_id_index[key]
def db_has_portSpec_with_id(self, key):
return key in self.db_portSpecs_id_index
def db_get_portSpec_by_name(self, key):
return self.db_portSpecs_name_index[key]
def db_has_portSpec_with_name(self, key):
return key in self.db_portSpecs_name_index
def getPrimaryKey(self):
return self._db_id
class DBTag(object):
vtType = 'tag'
def __init__(self, id=None, name=None):
self._db_id = id
self._db_name = name
self.is_dirty = True
self.is_new = True
def __copy__(self):
return DBTag.do_copy(self)
def do_copy(self, new_ids=False, id_scope=None, id_remap=None):
cp = DBTag(id=self._db_id,
name=self._db_name)
# set new ids
if new_ids:
new_id = id_scope.getNewId(self.vtType)
if self.vtType in id_scope.remap:
id_remap[(id_scope.remap[self.vtType], self.db_id)] = new_id
else:
id_remap[(self.vtType, self.db_id)] = new_id
cp.db_id = new_id
if hasattr(self, 'db_id') and ('action', self._db_id) in id_remap:
cp._db_id = id_remap[('action', self._db_id)]
# recreate indices and set flags
if not new_ids:
cp.is_dirty = self.is_dirty
cp.is_new = self.is_new
return cp
@staticmethod
def update_version(old_obj, trans_dict, new_obj=None):
if new_obj is None:
new_obj = DBTag()
class_dict = {}
if new_obj.__class__.__name__ in trans_dict:
class_dict = trans_dict[new_obj.__class__.__name__]
if 'id' in class_dict:
res = class_dict['id'](old_obj, trans_dict)
new_obj.db_id = res
elif hasattr(old_obj, 'db_id') and old_obj.db_id is not None:
new_obj.db_id = old_obj.db_id
if 'name' in class_dict:
res = class_dict['name'](old_obj, trans_dict)
new_obj.db_name = res
elif hasattr(old_obj, 'db_name') and old_obj.db_name is not None:
new_obj.db_name = old_obj.db_name
new_obj.is_new = old_obj.is_new
new_obj.is_dirty = old_obj.is_dirty
return new_obj
def db_children(self, parent=(None,None), orphan=False, for_action=False):
return [(self, parent[0], parent[1])]
def db_deleted_children(self, remove=False):
children = []
return children
def has_changes(self):
if self.is_dirty:
return True
return False
def __get_db_id(self):
return self._db_id
def __set_db_id(self, id):
self._db_id = id
self.is_dirty = True
db_id = property(__get_db_id, __set_db_id)
def db_add_id(self, id):
self._db_id = id
def db_change_id(self, id):
self._db_id = id
def db_delete_id(self, id):
self._db_id = None
def __get_db_name(self):
return self._db_name
def __set_db_name(self, name):
self._db_name = name
self.is_dirty = True
db_name = property(__get_db_name, __set_db_name)
def db_add_name(self, name):
self._db_name = name
def db_change_name(self, name):
self._db_name = name
def db_delete_name(self, name):
self._db_name = None
def getPrimaryKey(self):
return self._db_id
class DBOpmRole(object):
vtType = 'opm_role'
def __init__(self, value=None):
self._db_value = value
self.is_dirty = True
self.is_new = True
def __copy__(self):
return DBOpmRole.do_copy(self)
def do_copy(self, new_ids=False, id_scope=None, id_remap=None):
cp = DBOpmRole(value=self._db_value)
# set new ids
if new_ids:
new_id = id_scope.getNewId(self.vtType)
if self.vtType in id_scope.remap:
id_remap[(id_scope.remap[self.vtType], self.db_id)] = new_id
else:
id_remap[(self.vtType, self.db_id)] = new_id
cp.db_id = new_id
# recreate indices and set flags
if not new_ids:
cp.is_dirty = self.is_dirty
cp.is_new = self.is_new
return cp
@staticmethod
def update_version(old_obj, trans_dict, new_obj=None):
if new_obj is None:
new_obj = DBOpmRole()
class_dict = {}
if new_obj.__class__.__name__ in trans_dict:
class_dict = trans_dict[new_obj.__class__.__name__]
if 'value' in class_dict:
res = class_dict['value'](old_obj, trans_dict)
new_obj.db_value = res
elif hasattr(old_obj, 'db_value') and old_obj.db_value is not None:
new_obj.db_value = old_obj.db_value
new_obj.is_new = old_obj.is_new
new_obj.is_dirty = old_obj.is_dirty
return new_obj
def db_children(self, parent=(None,None), orphan=False, for_action=False):
return [(self, parent[0], parent[1])]
def db_deleted_children(self, remove=False):
children = []
return children
def has_changes(self):
if self.is_dirty:
return True
return False
def __get_db_value(self):
return self._db_value
def __set_db_value(self, value):
self._db_value = value
self.is_dirty = True
db_value = property(__get_db_value, __set_db_value)
def db_add_value(self, value):
self._db_value = value
def db_change_value(self, value):
self._db_value = value
def db_delete_value(self, value):
self._db_value = None
class DBProvDocument(object):
vtType = 'prov_document'
def __init__(self, prov_entitys=None, prov_activitys=None, prov_agents=None, vt_connections=None, prov_usages=None, prov_generations=None, prov_associations=None):
self.db_deleted_prov_entitys = []
self.db_prov_entitys_id_index = {}
if prov_entitys is None:
self._db_prov_entitys = []
else:
self._db_prov_entitys = prov_entitys
for v in self._db_prov_entitys:
self.db_prov_entitys_id_index[v.db_id] = v
self.db_deleted_prov_activitys = []
self.db_prov_activitys_id_index = {}
if prov_activitys is None:
self._db_prov_activitys = []
else:
self._db_prov_activitys = prov_activitys
for v in self._db_prov_activitys:
self.db_prov_activitys_id_index[v.db_id] = v
self.db_deleted_prov_agents = []
self.db_prov_agents_id_index = {}
if prov_agents is None:
self._db_prov_agents = []
else:
self._db_prov_agents = prov_agents
for v in self._db_prov_agents:
self.db_prov_agents_id_index[v.db_id] = v
self.db_deleted_vt_connections = []
self.db_vt_connections_id_index = {}
if vt_connections is None:
self._db_vt_connections = []
else:
self._db_vt_connections = vt_connections
for v in self._db_vt_connections:
self.db_vt_connections_id_index[v.db_id] = v
self.db_deleted_prov_usages = []
if prov_usages is None:
self._db_prov_usages = []
else:
self._db_prov_usages = prov_usages
self.db_deleted_prov_generations = []
if prov_generations is None:
self._db_prov_generations = []
else:
self._db_prov_generations = prov_generations
self.db_deleted_prov_associations = []
if prov_associations is None:
self._db_prov_associations = []
else:
self._db_prov_associations = prov_associations
self.is_dirty = True
self.is_new = True
def __copy__(self):
return DBProvDocument.do_copy(self)
def do_copy(self, new_ids=False, id_scope=None, id_remap=None):
cp = DBProvDocument()
if self._db_prov_entitys is None:
cp._db_prov_entitys = []
else:
cp._db_prov_entitys = [v.do_copy(new_ids, id_scope, id_remap) for v in self._db_prov_entitys]
if self._db_prov_activitys is None:
cp._db_prov_activitys = []
else:
cp._db_prov_activitys = [v.do_copy(new_ids, id_scope, id_remap) for v in self._db_prov_activitys]
if self._db_prov_agents is None:
cp._db_prov_agents = []
else:
cp._db_prov_agents = [v.do_copy(new_ids, id_scope, id_remap) for v in self._db_prov_agents]
if self._db_vt_connections is None:
cp._db_vt_connections = []
else:
cp._db_vt_connections = [v.do_copy(new_ids, id_scope, id_remap) for v in self._db_vt_connections]
if self._db_prov_usages is None:
cp._db_prov_usages = []
else:
cp._db_prov_usages = [v.do_copy(new_ids, id_scope, id_remap) for v in self._db_prov_usages]
if self._db_prov_generations is None:
cp._db_prov_generations = []
else:
cp._db_prov_generations = [v.do_copy(new_ids, id_scope, id_remap) for v in self._db_prov_generations]
if self._db_prov_associations is None:
cp._db_prov_associations = []
else:
cp._db_prov_associations = [v.do_copy(new_ids, id_scope, id_remap) for v in self._db_prov_associations]
# set new ids
if new_ids:
new_id = id_scope.getNewId(self.vtType)
if self.vtType in id_scope.remap:
id_remap[(id_scope.remap[self.vtType], self.db_id)] = new_id
else:
id_remap[(self.vtType, self.db_id)] = new_id
cp.db_id = new_id
# recreate indices and set flags
cp.db_prov_entitys_id_index = dict((v.db_id, v) for v in cp._db_prov_entitys)
cp.db_prov_activitys_id_index = dict((v.db_id, v) for v in cp._db_prov_activitys)
cp.db_prov_agents_id_index = dict((v.db_id, v) for v in cp._db_prov_agents)
cp.db_vt_connections_id_index = dict((v.db_id, v) for v in cp._db_vt_connections)
if not new_ids:
cp.is_dirty = self.is_dirty
cp.is_new = self.is_new
return cp
@staticmethod
def update_version(old_obj, trans_dict, new_obj=None):
if new_obj is None:
new_obj = DBProvDocument()
class_dict = {}
if new_obj.__class__.__name__ in trans_dict:
class_dict = trans_dict[new_obj.__class__.__name__]
if 'prov_entitys' in class_dict:
res = class_dict['prov_entitys'](old_obj, trans_dict)
for obj in res:
new_obj.db_add_prov_entity(obj)
elif hasattr(old_obj, 'db_prov_entitys') and old_obj.db_prov_entitys is not None:
for obj in old_obj.db_prov_entitys:
new_obj.db_add_prov_entity(DBProvEntity.update_version(obj, trans_dict))
if hasattr(old_obj, 'db_deleted_prov_entitys') and hasattr(new_obj, 'db_deleted_prov_entitys'):
for obj in old_obj.db_deleted_prov_entitys:
n_obj = DBProvEntity.update_version(obj, trans_dict)
new_obj.db_deleted_prov_entitys.append(n_obj)
if 'prov_activitys' in class_dict:
res = class_dict['prov_activitys'](old_obj, trans_dict)
for obj in res:
new_obj.db_add_prov_activity(obj)
elif hasattr(old_obj, 'db_prov_activitys') and old_obj.db_prov_activitys is not None:
for obj in old_obj.db_prov_activitys:
new_obj.db_add_prov_activity(DBProvActivity.update_version(obj, trans_dict))
if hasattr(old_obj, 'db_deleted_prov_activitys') and hasattr(new_obj, 'db_deleted_prov_activitys'):
for obj in old_obj.db_deleted_prov_activitys:
n_obj = DBProvActivity.update_version(obj, trans_dict)
new_obj.db_deleted_prov_activitys.append(n_obj)
if 'prov_agents' in class_dict:
res = class_dict['prov_agents'](old_obj, trans_dict)
for obj in res:
new_obj.db_add_prov_agent(obj)
elif hasattr(old_obj, 'db_prov_agents') and old_obj.db_prov_agents is not None:
for obj in old_obj.db_prov_agents:
new_obj.db_add_prov_agent(DBProvAgent.update_version(obj, trans_dict))
if hasattr(old_obj, 'db_deleted_prov_agents') and hasattr(new_obj, 'db_deleted_prov_agents'):
for obj in old_obj.db_deleted_prov_agents:
n_obj = DBProvAgent.update_version(obj, trans_dict)
new_obj.db_deleted_prov_agents.append(n_obj)
if 'vt_connections' in class_dict:
res = class_dict['vt_connections'](old_obj, trans_dict)
for obj in res:
new_obj.db_add_vt_connection(obj)
elif hasattr(old_obj, 'db_vt_connections') and old_obj.db_vt_connections is not None:
for obj in old_obj.db_vt_connections:
new_obj.db_add_vt_connection(DBVtConnection.update_version(obj, trans_dict))
if hasattr(old_obj, 'db_deleted_vt_connections') and hasattr(new_obj, 'db_deleted_vt_connections'):
for obj in old_obj.db_deleted_vt_connections:
n_obj = DBVtConnection.update_version(obj, trans_dict)
new_obj.db_deleted_vt_connections.append(n_obj)
if 'prov_usages' in class_dict:
res = class_dict['prov_usages'](old_obj, trans_dict)
for obj in res:
new_obj.db_add_prov_usage(obj)
elif hasattr(old_obj, 'db_prov_usages') and old_obj.db_prov_usages is not None:
for obj in old_obj.db_prov_usages:
new_obj.db_add_prov_usage(DBProvUsage.update_version(obj, trans_dict))
if hasattr(old_obj, 'db_deleted_prov_usages') and hasattr(new_obj, 'db_deleted_prov_usages'):
for obj in old_obj.db_deleted_prov_usages:
n_obj = DBProvUsage.update_version(obj, trans_dict)
new_obj.db_deleted_prov_usages.append(n_obj)
if 'prov_generations' in class_dict:
res = class_dict['prov_generations'](old_obj, trans_dict)
for obj in res:
new_obj.db_add_prov_generation(obj)
elif hasattr(old_obj, 'db_prov_generations') and old_obj.db_prov_generations is not None:
for obj in old_obj.db_prov_generations:
new_obj.db_add_prov_generation(DBProvGeneration.update_version(obj, trans_dict))
if hasattr(old_obj, 'db_deleted_prov_generations') and hasattr(new_obj, 'db_deleted_prov_generations'):
for obj in old_obj.db_deleted_prov_generations:
n_obj = DBProvGeneration.update_version(obj, trans_dict)
new_obj.db_deleted_prov_generations.append(n_obj)
if 'prov_associations' in class_dict:
res = class_dict['prov_associations'](old_obj, trans_dict)
for obj in res:
new_obj.db_add_prov_association(obj)
elif hasattr(old_obj, 'db_prov_associations') and old_obj.db_prov_associations is not None:
for obj in old_obj.db_prov_associations:
new_obj.db_add_prov_association(DBProvAssociation.update_version(obj, trans_dict))
if hasattr(old_obj, 'db_deleted_prov_associations') and hasattr(new_obj, 'db_deleted_prov_associations'):
for obj in old_obj.db_deleted_prov_associations:
n_obj = DBProvAssociation.update_version(obj, trans_dict)
new_obj.db_deleted_prov_associations.append(n_obj)
new_obj.is_new = old_obj.is_new
new_obj.is_dirty = old_obj.is_dirty
return new_obj
def db_children(self, parent=(None,None), orphan=False, for_action=False):
children = []
to_del = []
for child in self.db_prov_entitys:
children.extend(child.db_children((self.vtType, self.db_id), orphan, for_action))
if orphan:
to_del.append(child)
for child in to_del:
self.db_delete_prov_entity(child)
to_del = []
for child in self.db_prov_activitys:
children.extend(child.db_children((self.vtType, self.db_id), orphan, for_action))
if orphan:
to_del.append(child)
for child in to_del:
self.db_delete_prov_activity(child)
to_del = []
for child in self.db_prov_agents:
children.extend(child.db_children((self.vtType, self.db_id), orphan, for_action))
if orphan:
to_del.append(child)
for child in to_del:
self.db_delete_prov_agent(child)
to_del = []
for child in self.db_vt_connections:
children.extend(child.db_children((self.vtType, self.db_id), orphan, for_action))
if orphan:
to_del.append(child)
for child in to_del:
self.db_delete_vt_connection(child)
to_del = []
for child in self.db_prov_usages:
children.extend(child.db_children((self.vtType, self.db_id), orphan, for_action))
if orphan:
to_del.append(child)
for child in to_del:
self.db_delete_prov_usage(child)
to_del = []
for child in self.db_prov_generations:
children.extend(child.db_children((self.vtType, self.db_id), orphan, for_action))
if orphan:
to_del.append(child)
for child in to_del:
self.db_delete_prov_generation(child)
to_del = []
for child in self.db_prov_associations:
children.extend(child.db_children((self.vtType, self.db_id), orphan, for_action))
if orphan:
to_del.append(child)
for child in to_del:
self.db_delete_prov_association(child)
children.append((self, parent[0], parent[1]))
return children
def db_deleted_children(self, remove=False):
children = []
children.extend(self.db_deleted_prov_entitys)
children.extend(self.db_deleted_prov_activitys)
children.extend(self.db_deleted_prov_agents)
children.extend(self.db_deleted_vt_connections)
children.extend(self.db_deleted_prov_usages)
children.extend(self.db_deleted_prov_generations)
children.extend(self.db_deleted_prov_associations)
if remove:
self.db_deleted_prov_entitys = []
self.db_deleted_prov_activitys = []
self.db_deleted_prov_agents = []
self.db_deleted_vt_connections = []
self.db_deleted_prov_usages = []
self.db_deleted_prov_generations = []
self.db_deleted_prov_associations = []
return children
def has_changes(self):
if self.is_dirty:
return True
for child in self._db_prov_entitys:
if child.has_changes():
return True
for child in self._db_prov_activitys:
if child.has_changes():
return True
for child in self._db_prov_agents:
if child.has_changes():
return True
for child in self._db_vt_connections:
if child.has_changes():
return True
for child in self._db_prov_usages:
if child.has_changes():
return True
for child in self._db_prov_generations:
if child.has_changes():
return True
for child in self._db_prov_associations:
if child.has_changes():
return True
return False
def __get_db_prov_entitys(self):
return self._db_prov_entitys
def __set_db_prov_entitys(self, prov_entitys):
self._db_prov_entitys = prov_entitys
self.is_dirty = True
db_prov_entitys = property(__get_db_prov_entitys, __set_db_prov_entitys)
def db_get_prov_entitys(self):
return self._db_prov_entitys
def db_add_prov_entity(self, prov_entity):
self.is_dirty = True
self._db_prov_entitys.append(prov_entity)
self.db_prov_entitys_id_index[prov_entity.db_id] = prov_entity
def db_change_prov_entity(self, prov_entity):
self.is_dirty = True
found = False
for i in xrange(len(self._db_prov_entitys)):
if self._db_prov_entitys[i].db_id == prov_entity.db_id:
self._db_prov_entitys[i] = prov_entity
found = True
break
if not found:
self._db_prov_entitys.append(prov_entity)
self.db_prov_entitys_id_index[prov_entity.db_id] = prov_entity
def db_delete_prov_entity(self, prov_entity):
self.is_dirty = True
for i in xrange(len(self._db_prov_entitys)):
if self._db_prov_entitys[i].db_id == prov_entity.db_id:
if not self._db_prov_entitys[i].is_new:
self.db_deleted_prov_entitys.append(self._db_prov_entitys[i])
del self._db_prov_entitys[i]
break
del self.db_prov_entitys_id_index[prov_entity.db_id]
def db_get_prov_entity(self, key):
for i in xrange(len(self._db_prov_entitys)):
if self._db_prov_entitys[i].db_id == key:
return self._db_prov_entitys[i]
return None
def db_get_prov_entity_by_id(self, key):
return self.db_prov_entitys_id_index[key]
def db_has_prov_entity_with_id(self, key):
return key in self.db_prov_entitys_id_index
def __get_db_prov_activitys(self):
return self._db_prov_activitys
def __set_db_prov_activitys(self, prov_activitys):
self._db_prov_activitys = prov_activitys
self.is_dirty = True
db_prov_activitys = property(__get_db_prov_activitys, __set_db_prov_activitys)
def db_get_prov_activitys(self):
return self._db_prov_activitys
def db_add_prov_activity(self, prov_activity):
self.is_dirty = True
self._db_prov_activitys.append(prov_activity)
self.db_prov_activitys_id_index[prov_activity.db_id] = prov_activity
def db_change_prov_activity(self, prov_activity):
self.is_dirty = True
found = False
for i in xrange(len(self._db_prov_activitys)):
if self._db_prov_activitys[i].db_id == prov_activity.db_id:
self._db_prov_activitys[i] = prov_activity
found = True
break
if not found:
self._db_prov_activitys.append(prov_activity)
self.db_prov_activitys_id_index[prov_activity.db_id] = prov_activity
def db_delete_prov_activity(self, prov_activity):
self.is_dirty = True
for i in xrange(len(self._db_prov_activitys)):
if self._db_prov_activitys[i].db_id == prov_activity.db_id:
if not self._db_prov_activitys[i].is_new:
self.db_deleted_prov_activitys.append(self._db_prov_activitys[i])
del self._db_prov_activitys[i]
break
del self.db_prov_activitys_id_index[prov_activity.db_id]
def db_get_prov_activity(self, key):
for i in xrange(len(self._db_prov_activitys)):
if self._db_prov_activitys[i].db_id == key:
return self._db_prov_activitys[i]
return None
def db_get_prov_activity_by_id(self, key):
return self.db_prov_activitys_id_index[key]
def db_has_prov_activity_with_id(self, key):
return key in self.db_prov_activitys_id_index
def __get_db_prov_agents(self):
return self._db_prov_agents
def __set_db_prov_agents(self, prov_agents):
self._db_prov_agents = prov_agents
self.is_dirty = True
db_prov_agents = property(__get_db_prov_agents, __set_db_prov_agents)
def db_get_prov_agents(self):
return self._db_prov_agents
def db_add_prov_agent(self, prov_agent):
self.is_dirty = True
self._db_prov_agents.append(prov_agent)
self.db_prov_agents_id_index[prov_agent.db_id] = prov_agent
def db_change_prov_agent(self, prov_agent):
self.is_dirty = True
found = False
for i in xrange(len(self._db_prov_agents)):
if self._db_prov_agents[i].db_id == prov_agent.db_id:
self._db_prov_agents[i] = prov_agent
found = True
break
if not found:
self._db_prov_agents.append(prov_agent)
self.db_prov_agents_id_index[prov_agent.db_id] = prov_agent
def db_delete_prov_agent(self, prov_agent):
self.is_dirty = True
for i in xrange(len(self._db_prov_agents)):
if self._db_prov_agents[i].db_id == prov_agent.db_id:
if not self._db_prov_agents[i].is_new:
self.db_deleted_prov_agents.append(self._db_prov_agents[i])
del self._db_prov_agents[i]
break
del self.db_prov_agents_id_index[prov_agent.db_id]
def db_get_prov_agent(self, key):
for i in xrange(len(self._db_prov_agents)):
if self._db_prov_agents[i].db_id == key:
return self._db_prov_agents[i]
return None
def db_get_prov_agent_by_id(self, key):
return self.db_prov_agents_id_index[key]
def db_has_prov_agent_with_id(self, key):
return key in self.db_prov_agents_id_index
def __get_db_vt_connections(self):
return self._db_vt_connections
def __set_db_vt_connections(self, vt_connections):
self._db_vt_connections = vt_connections
self.is_dirty = True
db_vt_connections = property(__get_db_vt_connections, __set_db_vt_connections)
def db_get_vt_connections(self):
return self._db_vt_connections
def db_add_vt_connection(self, vt_connection):
self.is_dirty = True
self._db_vt_connections.append(vt_connection)
self.db_vt_connections_id_index[vt_connection.db_id] = vt_connection
def db_change_vt_connection(self, vt_connection):
self.is_dirty = True
found = False
for i in xrange(len(self._db_vt_connections)):
if self._db_vt_connections[i].db_id == vt_connection.db_id:
self._db_vt_connections[i] = vt_connection
found = True
break
if not found:
self._db_vt_connections.append(vt_connection)
self.db_vt_connections_id_index[vt_connection.db_id] = vt_connection
def db_delete_vt_connection(self, vt_connection):
self.is_dirty = True
for i in xrange(len(self._db_vt_connections)):
if self._db_vt_connections[i].db_id == vt_connection.db_id:
if not self._db_vt_connections[i].is_new:
self.db_deleted_vt_connections.append(self._db_vt_connections[i])
del self._db_vt_connections[i]
break
del self.db_vt_connections_id_index[vt_connection.db_id]
def db_get_vt_connection(self, key):
for i in xrange(len(self._db_vt_connections)):
if self._db_vt_connections[i].db_id == key:
return self._db_vt_connections[i]
return None
def db_get_vt_connection_by_id(self, key):
return self.db_vt_connections_id_index[key]
def db_has_vt_connection_with_id(self, key):
return key in self.db_vt_connections_id_index
def __get_db_prov_usages(self):
return self._db_prov_usages
def __set_db_prov_usages(self, prov_usages):
self._db_prov_usages = prov_usages
self.is_dirty = True
db_prov_usages = property(__get_db_prov_usages, __set_db_prov_usages)
def db_get_prov_usages(self):
return self._db_prov_usages
def db_add_prov_usage(self, prov_usage):
self.is_dirty = True
self._db_prov_usages.append(prov_usage)
def db_change_prov_usage(self, prov_usage):
self.is_dirty = True
self._db_prov_usages.append(prov_usage)
def db_delete_prov_usage(self, prov_usage):
self.is_dirty = True
raise Exception('Cannot delete a non-keyed object')
def db_get_prov_usage(self, key):
return None
def __get_db_prov_generations(self):
return self._db_prov_generations
def __set_db_prov_generations(self, prov_generations):
self._db_prov_generations = prov_generations
self.is_dirty = True
db_prov_generations = property(__get_db_prov_generations, __set_db_prov_generations)
def db_get_prov_generations(self):
return self._db_prov_generations
def db_add_prov_generation(self, prov_generation):
self.is_dirty = True
self._db_prov_generations.append(prov_generation)
def db_change_prov_generation(self, prov_generation):
self.is_dirty = True
self._db_prov_generations.append(prov_generation)
def db_delete_prov_generation(self, prov_generation):
self.is_dirty = True
raise Exception('Cannot delete a non-keyed object')
def db_get_prov_generation(self, key):
return None
def __get_db_prov_associations(self):
return self._db_prov_associations
def __set_db_prov_associations(self, prov_associations):
self._db_prov_associations = prov_associations
self.is_dirty = True
db_prov_associations = property(__get_db_prov_associations, __set_db_prov_associations)
def db_get_prov_associations(self):
return self._db_prov_associations
def db_add_prov_association(self, prov_association):
self.is_dirty = True
self._db_prov_associations.append(prov_association)
def db_change_prov_association(self, prov_association):
self.is_dirty = True
self._db_prov_associations.append(prov_association)
def db_delete_prov_association(self, prov_association):
self.is_dirty = True
raise Exception('Cannot delete a non-keyed object')
def db_get_prov_association(self, key):
return None
class DBOpmProcesses(object):
vtType = 'opm_processes'
def __init__(self, processs=None):
self.db_deleted_processs = []
self.db_processs_id_index = {}
if processs is None:
self._db_processs = []
else:
self._db_processs = processs
for v in self._db_processs:
self.db_processs_id_index[v.db_id] = v
self.is_dirty = True
self.is_new = True
def __copy__(self):
return DBOpmProcesses.do_copy(self)
def do_copy(self, new_ids=False, id_scope=None, id_remap=None):
cp = DBOpmProcesses()
if self._db_processs is None:
cp._db_processs = []
else:
cp._db_processs = [v.do_copy(new_ids, id_scope, id_remap) for v in self._db_processs]
# set new ids
if new_ids:
new_id = id_scope.getNewId(self.vtType)
if self.vtType in id_scope.remap:
id_remap[(id_scope.remap[self.vtType], self.db_id)] = new_id
else:
id_remap[(self.vtType, self.db_id)] = new_id
cp.db_id = new_id
# recreate indices and set flags
cp.db_processs_id_index = dict((v.db_id, v) for v in cp._db_processs)
if not new_ids:
cp.is_dirty = self.is_dirty
cp.is_new = self.is_new
return cp
@staticmethod
def update_version(old_obj, trans_dict, new_obj=None):
if new_obj is None:
new_obj = DBOpmProcesses()
class_dict = {}
if new_obj.__class__.__name__ in trans_dict:
class_dict = trans_dict[new_obj.__class__.__name__]
if 'processs' in class_dict:
res = class_dict['processs'](old_obj, trans_dict)
for obj in res:
new_obj.db_add_process(obj)
elif hasattr(old_obj, 'db_processs') and old_obj.db_processs is not None:
for obj in old_obj.db_processs:
new_obj.db_add_process(DBOpmProcess.update_version(obj, trans_dict))
if hasattr(old_obj, 'db_deleted_processs') and hasattr(new_obj, 'db_deleted_processs'):
for obj in old_obj.db_deleted_processs:
n_obj = DBOpmProcess.update_version(obj, trans_dict)
new_obj.db_deleted_processs.append(n_obj)
new_obj.is_new = old_obj.is_new
new_obj.is_dirty = old_obj.is_dirty
return new_obj
def db_children(self, parent=(None,None), orphan=False, for_action=False):
children = []
to_del = []
for child in self.db_processs:
children.extend(child.db_children((self.vtType, self.db_id), orphan, for_action))
if orphan:
to_del.append(child)
for child in to_del:
self.db_delete_process(child)
children.append((self, parent[0], parent[1]))
return children
def db_deleted_children(self, remove=False):
children = []
children.extend(self.db_deleted_processs)
if remove:
self.db_deleted_processs = []
return children
def has_changes(self):
if self.is_dirty:
return True
for child in self._db_processs:
if child.has_changes():
return True
return False
def __get_db_processs(self):
return self._db_processs
def __set_db_processs(self, processs):
self._db_processs = processs
self.is_dirty = True
db_processs = property(__get_db_processs, __set_db_processs)
def db_get_processs(self):
return self._db_processs
def db_add_process(self, process):
self.is_dirty = True
self._db_processs.append(process)
self.db_processs_id_index[process.db_id] = process
def db_change_process(self, process):
self.is_dirty = True
found = False
for i in xrange(len(self._db_processs)):
if self._db_processs[i].db_id == process.db_id:
self._db_processs[i] = process
found = True
break
if not found:
self._db_processs.append(process)
self.db_processs_id_index[process.db_id] = process
def db_delete_process(self, process):
self.is_dirty = True
for i in xrange(len(self._db_processs)):
if self._db_processs[i].db_id == process.db_id:
if not self._db_processs[i].is_new:
self.db_deleted_processs.append(self._db_processs[i])
del self._db_processs[i]
break
del self.db_processs_id_index[process.db_id]
def db_get_process(self, key):
for i in xrange(len(self._db_processs)):
if self._db_processs[i].db_id == key:
return self._db_processs[i]
return None
def db_get_process_by_id(self, key):
return self.db_processs_id_index[key]
def db_has_process_with_id(self, key):
return key in self.db_processs_id_index
class DBOpmAccountId(object):
vtType = 'opm_account_id'
def __init__(self, id=None):
self._db_id = id
self.is_dirty = True
self.is_new = True
def __copy__(self):
return DBOpmAccountId.do_copy(self)
def do_copy(self, new_ids=False, id_scope=None, id_remap=None):
cp = DBOpmAccountId(id=self._db_id)
# set new ids
if new_ids:
new_id = id_scope.getNewId(self.vtType)
if self.vtType in id_scope.remap:
id_remap[(id_scope.remap[self.vtType], self.db_id)] = new_id
else:
id_remap[(self.vtType, self.db_id)] = new_id
cp.db_id = new_id
if hasattr(self, 'db_id') and ('opm_account', self._db_id) in id_remap:
cp._db_id = id_remap[('opm_account', self._db_id)]
# recreate indices and set flags
if not new_ids:
cp.is_dirty = self.is_dirty
cp.is_new = self.is_new
return cp
@staticmethod
def update_version(old_obj, trans_dict, new_obj=None):
if new_obj is None:
new_obj = DBOpmAccountId()
class_dict = {}
if new_obj.__class__.__name__ in trans_dict:
class_dict = trans_dict[new_obj.__class__.__name__]
if 'id' in class_dict:
res = class_dict['id'](old_obj, trans_dict)
new_obj.db_id = res
elif hasattr(old_obj, 'db_id') and old_obj.db_id is not None:
new_obj.db_id = old_obj.db_id
new_obj.is_new = old_obj.is_new
new_obj.is_dirty = old_obj.is_dirty
return new_obj
def db_children(self, parent=(None,None), orphan=False, for_action=False):
return [(self, parent[0], parent[1])]
def db_deleted_children(self, remove=False):
children = []
return children
def has_changes(self):
if self.is_dirty:
return True
return False
def __get_db_id(self):
return self._db_id
def __set_db_id(self, id):
self._db_id = id
self.is_dirty = True
db_id = property(__get_db_id, __set_db_id)
def db_add_id(self, id):
self._db_id = id
def db_change_id(self, id):
self._db_id = id
def db_delete_id(self, id):
self._db_id = None
class DBPortSpecItem(object):
vtType = 'portSpecItem'
def __init__(self, id=None, pos=None, module=None, package=None, namespace=None, label=None, default=None, values=None, entry_type=None):
self._db_id = id
self._db_pos = pos
self._db_module = module
self._db_package = package
self._db_namespace = namespace
self._db_label = label
self._db_default = default
self._db_values = values
self._db_entry_type = entry_type
self.is_dirty = True
self.is_new = True
def __copy__(self):
return DBPortSpecItem.do_copy(self)
def do_copy(self, new_ids=False, id_scope=None, id_remap=None):
cp = DBPortSpecItem(id=self._db_id,
pos=self._db_pos,
module=self._db_module,
package=self._db_package,
namespace=self._db_namespace,
label=self._db_label,
default=self._db_default,
values=self._db_values,
entry_type=self._db_entry_type)
# set new ids
if new_ids:
new_id = id_scope.getNewId(self.vtType)
if self.vtType in id_scope.remap:
id_remap[(id_scope.remap[self.vtType], self.db_id)] = new_id
else:
id_remap[(self.vtType, self.db_id)] = new_id
cp.db_id = new_id
# recreate indices and set flags
if not new_ids:
cp.is_dirty = self.is_dirty
cp.is_new = self.is_new
return cp
@staticmethod
def update_version(old_obj, trans_dict, new_obj=None):
if new_obj is None:
new_obj = DBPortSpecItem()
class_dict = {}
if new_obj.__class__.__name__ in trans_dict:
class_dict = trans_dict[new_obj.__class__.__name__]
if 'id' in class_dict:
res = class_dict['id'](old_obj, trans_dict)
new_obj.db_id = res
elif hasattr(old_obj, 'db_id') and old_obj.db_id is not None:
new_obj.db_id = old_obj.db_id
if 'pos' in class_dict:
res = class_dict['pos'](old_obj, trans_dict)
new_obj.db_pos = res
elif hasattr(old_obj, 'db_pos') and old_obj.db_pos is not None:
new_obj.db_pos = old_obj.db_pos
if 'module' in class_dict:
res = class_dict['module'](old_obj, trans_dict)
new_obj.db_module = res
elif hasattr(old_obj, 'db_module') and old_obj.db_module is not None:
new_obj.db_module = old_obj.db_module
if 'package' in class_dict:
res = class_dict['package'](old_obj, trans_dict)
new_obj.db_package = res
elif hasattr(old_obj, 'db_package') and old_obj.db_package is not None:
new_obj.db_package = old_obj.db_package
if 'namespace' in class_dict:
res = class_dict['namespace'](old_obj, trans_dict)
new_obj.db_namespace = res
elif hasattr(old_obj, 'db_namespace') and old_obj.db_namespace is not None:
new_obj.db_namespace = old_obj.db_namespace
if 'label' in class_dict:
res = class_dict['label'](old_obj, trans_dict)
new_obj.db_label = res
elif hasattr(old_obj, 'db_label') and old_obj.db_label is not None:
new_obj.db_label = old_obj.db_label
if 'default' in class_dict:
res = class_dict['default'](old_obj, trans_dict)
new_obj.db_default = res
elif hasattr(old_obj, 'db_default') and old_obj.db_default is not None:
new_obj.db_default = old_obj.db_default
if 'values' in class_dict:
res = class_dict['values'](old_obj, trans_dict)
new_obj.db_values = res
elif hasattr(old_obj, 'db_values') and old_obj.db_values is not None:
new_obj.db_values = old_obj.db_values
if 'entry_type' in class_dict:
res = class_dict['entry_type'](old_obj, trans_dict)
new_obj.db_entry_type = res
elif hasattr(old_obj, 'db_entry_type') and old_obj.db_entry_type is not None:
new_obj.db_entry_type = old_obj.db_entry_type
new_obj.is_new = old_obj.is_new
new_obj.is_dirty = old_obj.is_dirty
return new_obj
def db_children(self, parent=(None,None), orphan=False, for_action=False):
return [(self, parent[0], parent[1])]
def db_deleted_children(self, remove=False):
children = []
return children
def has_changes(self):
if self.is_dirty:
return True
return False
def __get_db_id(self):
return self._db_id
def __set_db_id(self, id):
self._db_id = id
self.is_dirty = True
db_id = property(__get_db_id, __set_db_id)
def db_add_id(self, id):
self._db_id = id
def db_change_id(self, id):
self._db_id = id
def db_delete_id(self, id):
self._db_id = None
def __get_db_pos(self):
return self._db_pos
def __set_db_pos(self, pos):
self._db_pos = pos
self.is_dirty = True
db_pos = property(__get_db_pos, __set_db_pos)
def db_add_pos(self, pos):
self._db_pos = pos
def db_change_pos(self, pos):
self._db_pos = pos
def db_delete_pos(self, pos):
self._db_pos = None
def __get_db_module(self):
return self._db_module
def __set_db_module(self, module):
self._db_module = module
self.is_dirty = True
db_module = property(__get_db_module, __set_db_module)
def db_add_module(self, module):
self._db_module = module
def db_change_module(self, module):
self._db_module = module
def db_delete_module(self, module):
self._db_module = None
def __get_db_package(self):
return self._db_package
def __set_db_package(self, package):
self._db_package = package
self.is_dirty = True
db_package = property(__get_db_package, __set_db_package)
def db_add_package(self, package):
self._db_package = package
def db_change_package(self, package):
self._db_package = package
def db_delete_package(self, package):
self._db_package = None
def __get_db_namespace(self):
return self._db_namespace
def __set_db_namespace(self, namespace):
self._db_namespace = namespace
self.is_dirty = True
db_namespace = property(__get_db_namespace, __set_db_namespace)
def db_add_namespace(self, namespace):
self._db_namespace = namespace
def db_change_namespace(self, namespace):
self._db_namespace = namespace
def db_delete_namespace(self, namespace):
self._db_namespace = None
def __get_db_label(self):
return self._db_label
def __set_db_label(self, label):
self._db_label = label
self.is_dirty = True
db_label = property(__get_db_label, __set_db_label)
def db_add_label(self, label):
self._db_label = label
def db_change_label(self, label):
self._db_label = label
def db_delete_label(self, label):
self._db_label = None
def __get_db_default(self):
return self._db_default
def __set_db_default(self, default):
self._db_default = default
self.is_dirty = True
db_default = property(__get_db_default, __set_db_default)
def db_add_default(self, default):
self._db_default = default
def db_change_default(self, default):
self._db_default = default
def db_delete_default(self, default):
self._db_default = None
def __get_db_values(self):
return self._db_values
def __set_db_values(self, values):
self._db_values = values
self.is_dirty = True
db_values = property(__get_db_values, __set_db_values)
def db_add_values(self, values):
self._db_values = values
def db_change_values(self, values):
self._db_values = values
def db_delete_values(self, values):
self._db_values = None
def __get_db_entry_type(self):
return self._db_entry_type
def __set_db_entry_type(self, entry_type):
self._db_entry_type = entry_type
self.is_dirty = True
db_entry_type = property(__get_db_entry_type, __set_db_entry_type)
def db_add_entry_type(self, entry_type):
self._db_entry_type = entry_type
def db_change_entry_type(self, entry_type):
self._db_entry_type = entry_type
def db_delete_entry_type(self, entry_type):
self._db_entry_type = None
def getPrimaryKey(self):
return self._db_id
class DBMashupComponent(object):
vtType = 'mashup_component'
def __init__(self, id=None, vtid=None, vttype=None, vtparent_type=None, vtparent_id=None, vtpos=None, vtmid=None, pos=None, type=None, val=None, minVal=None, maxVal=None, stepSize=None, strvaluelist=None, widget=None, seq=None, parent=None):
self._db_id = id
self._db_vtid = vtid
self._db_vttype = vttype
self._db_vtparent_type = vtparent_type
self._db_vtparent_id = vtparent_id
self._db_vtpos = vtpos
self._db_vtmid = vtmid
self._db_pos = pos
self._db_type = type
self._db_val = val
self._db_minVal = minVal
self._db_maxVal = maxVal
self._db_stepSize = stepSize
self._db_strvaluelist = strvaluelist
self._db_widget = widget
self._db_seq = seq
self._db_parent = parent
self.is_dirty = True
self.is_new = True
def __copy__(self):
return DBMashupComponent.do_copy(self)
def do_copy(self, new_ids=False, id_scope=None, id_remap=None):
cp = DBMashupComponent(id=self._db_id,
vtid=self._db_vtid,
vttype=self._db_vttype,
vtparent_type=self._db_vtparent_type,
vtparent_id=self._db_vtparent_id,
vtpos=self._db_vtpos,
vtmid=self._db_vtmid,
pos=self._db_pos,
type=self._db_type,
val=self._db_val,
minVal=self._db_minVal,
maxVal=self._db_maxVal,
stepSize=self._db_stepSize,
strvaluelist=self._db_strvaluelist,
widget=self._db_widget,
seq=self._db_seq,
parent=self._db_parent)
# set new ids
if new_ids:
new_id = id_scope.getNewId(self.vtType)
if self.vtType in id_scope.remap:
id_remap[(id_scope.remap[self.vtType], self.db_id)] = new_id
else:
id_remap[(self.vtType, self.db_id)] = new_id
cp.db_id = new_id
# recreate indices and set flags
if not new_ids:
cp.is_dirty = self.is_dirty
cp.is_new = self.is_new
return cp
@staticmethod
def update_version(old_obj, trans_dict, new_obj=None):
if new_obj is None:
new_obj = DBMashupComponent()
class_dict = {}
if new_obj.__class__.__name__ in trans_dict:
class_dict = trans_dict[new_obj.__class__.__name__]
if 'id' in class_dict:
res = class_dict['id'](old_obj, trans_dict)
new_obj.db_id = res
elif hasattr(old_obj, 'db_id') and old_obj.db_id is not None:
new_obj.db_id = old_obj.db_id
if 'vtid' in class_dict:
res = class_dict['vtid'](old_obj, trans_dict)
new_obj.db_vtid = res
elif hasattr(old_obj, 'db_vtid') and old_obj.db_vtid is not None:
new_obj.db_vtid = old_obj.db_vtid
if 'vttype' in class_dict:
res = class_dict['vttype'](old_obj, trans_dict)
new_obj.db_vttype = res
elif hasattr(old_obj, 'db_vttype') and old_obj.db_vttype is not None:
new_obj.db_vttype = old_obj.db_vttype
if 'vtparent_type' in class_dict:
res = class_dict['vtparent_type'](old_obj, trans_dict)
new_obj.db_vtparent_type = res
elif hasattr(old_obj, 'db_vtparent_type') and old_obj.db_vtparent_type is not None:
new_obj.db_vtparent_type = old_obj.db_vtparent_type
if 'vtparent_id' in class_dict:
res = class_dict['vtparent_id'](old_obj, trans_dict)
new_obj.db_vtparent_id = res
elif hasattr(old_obj, 'db_vtparent_id') and old_obj.db_vtparent_id is not None:
new_obj.db_vtparent_id = old_obj.db_vtparent_id
if 'vtpos' in class_dict:
res = class_dict['vtpos'](old_obj, trans_dict)
new_obj.db_vtpos = res
elif hasattr(old_obj, 'db_vtpos') and old_obj.db_vtpos is not None:
new_obj.db_vtpos = old_obj.db_vtpos
if 'vtmid' in class_dict:
res = class_dict['vtmid'](old_obj, trans_dict)
new_obj.db_vtmid = res
elif hasattr(old_obj, 'db_vtmid') and old_obj.db_vtmid is not None:
new_obj.db_vtmid = old_obj.db_vtmid
if 'pos' in class_dict:
res = class_dict['pos'](old_obj, trans_dict)
new_obj.db_pos = res
elif hasattr(old_obj, 'db_pos') and old_obj.db_pos is not None:
new_obj.db_pos = old_obj.db_pos
if 'type' in class_dict:
res = class_dict['type'](old_obj, trans_dict)
new_obj.db_type = res
elif hasattr(old_obj, 'db_type') and old_obj.db_type is not None:
new_obj.db_type = old_obj.db_type
if 'val' in class_dict:
res = class_dict['val'](old_obj, trans_dict)
new_obj.db_val = res
elif hasattr(old_obj, 'db_val') and old_obj.db_val is not None:
new_obj.db_val = old_obj.db_val
if 'minVal' in class_dict:
res = class_dict['minVal'](old_obj, trans_dict)
new_obj.db_minVal = res
elif hasattr(old_obj, 'db_minVal') and old_obj.db_minVal is not None:
new_obj.db_minVal = old_obj.db_minVal
if 'maxVal' in class_dict:
res = class_dict['maxVal'](old_obj, trans_dict)
new_obj.db_maxVal = res
elif hasattr(old_obj, 'db_maxVal') and old_obj.db_maxVal is not None:
new_obj.db_maxVal = old_obj.db_maxVal
if 'stepSize' in class_dict:
res = class_dict['stepSize'](old_obj, trans_dict)
new_obj.db_stepSize = res
elif hasattr(old_obj, 'db_stepSize') and old_obj.db_stepSize is not None:
new_obj.db_stepSize = old_obj.db_stepSize
if 'strvaluelist' in class_dict:
res = class_dict['strvaluelist'](old_obj, trans_dict)
new_obj.db_strvaluelist = res
elif hasattr(old_obj, 'db_strvaluelist') and old_obj.db_strvaluelist is not None:
new_obj.db_strvaluelist = old_obj.db_strvaluelist
if 'widget' in class_dict:
res = class_dict['widget'](old_obj, trans_dict)
new_obj.db_widget = res
elif hasattr(old_obj, 'db_widget') and old_obj.db_widget is not None:
new_obj.db_widget = old_obj.db_widget
if 'seq' in class_dict:
res = class_dict['seq'](old_obj, trans_dict)
new_obj.db_seq = res
elif hasattr(old_obj, 'db_seq') and old_obj.db_seq is not None:
new_obj.db_seq = old_obj.db_seq
if 'parent' in class_dict:
res = class_dict['parent'](old_obj, trans_dict)
new_obj.db_parent = res
elif hasattr(old_obj, 'db_parent') and old_obj.db_parent is not None:
new_obj.db_parent = old_obj.db_parent
new_obj.is_new = old_obj.is_new
new_obj.is_dirty = old_obj.is_dirty
return new_obj
def db_children(self, parent=(None,None), orphan=False, for_action=False):
return [(self, parent[0], parent[1])]
def db_deleted_children(self, remove=False):
children = []
return children
def has_changes(self):
if self.is_dirty:
return True
return False
def __get_db_id(self):
return self._db_id
def __set_db_id(self, id):
self._db_id = id
self.is_dirty = True
db_id = property(__get_db_id, __set_db_id)
def db_add_id(self, id):
self._db_id = id
def db_change_id(self, id):
self._db_id = id
def db_delete_id(self, id):
self._db_id = None
def __get_db_vtid(self):
return self._db_vtid
def __set_db_vtid(self, vtid):
self._db_vtid = vtid
self.is_dirty = True
db_vtid = property(__get_db_vtid, __set_db_vtid)
def db_add_vtid(self, vtid):
self._db_vtid = vtid
def db_change_vtid(self, vtid):
self._db_vtid = vtid
def db_delete_vtid(self, vtid):
self._db_vtid = None
def __get_db_vttype(self):
return self._db_vttype
def __set_db_vttype(self, vttype):
self._db_vttype = vttype
self.is_dirty = True
db_vttype = property(__get_db_vttype, __set_db_vttype)
def db_add_vttype(self, vttype):
self._db_vttype = vttype
def db_change_vttype(self, vttype):
self._db_vttype = vttype
def db_delete_vttype(self, vttype):
self._db_vttype = None
def __get_db_vtparent_type(self):
return self._db_vtparent_type
def __set_db_vtparent_type(self, vtparent_type):
self._db_vtparent_type = vtparent_type
self.is_dirty = True
db_vtparent_type = property(__get_db_vtparent_type, __set_db_vtparent_type)
def db_add_vtparent_type(self, vtparent_type):
self._db_vtparent_type = vtparent_type
def db_change_vtparent_type(self, vtparent_type):
self._db_vtparent_type = vtparent_type
def db_delete_vtparent_type(self, vtparent_type):
self._db_vtparent_type = None
def __get_db_vtparent_id(self):
return self._db_vtparent_id
def __set_db_vtparent_id(self, vtparent_id):
self._db_vtparent_id = vtparent_id
self.is_dirty = True
db_vtparent_id = property(__get_db_vtparent_id, __set_db_vtparent_id)
def db_add_vtparent_id(self, vtparent_id):
self._db_vtparent_id = vtparent_id
def db_change_vtparent_id(self, vtparent_id):
self._db_vtparent_id = vtparent_id
def db_delete_vtparent_id(self, vtparent_id):
self._db_vtparent_id = None
def __get_db_vtpos(self):
return self._db_vtpos
def __set_db_vtpos(self, vtpos):
self._db_vtpos = vtpos
self.is_dirty = True
db_vtpos = property(__get_db_vtpos, __set_db_vtpos)
def db_add_vtpos(self, vtpos):
self._db_vtpos = vtpos
def db_change_vtpos(self, vtpos):
self._db_vtpos = vtpos
def db_delete_vtpos(self, vtpos):
self._db_vtpos = None
def __get_db_vtmid(self):
return self._db_vtmid
def __set_db_vtmid(self, vtmid):
self._db_vtmid = vtmid
self.is_dirty = True
db_vtmid = property(__get_db_vtmid, __set_db_vtmid)
def db_add_vtmid(self, vtmid):
self._db_vtmid = vtmid
def db_change_vtmid(self, vtmid):
self._db_vtmid = vtmid
def db_delete_vtmid(self, vtmid):
self._db_vtmid = None
def __get_db_pos(self):
return self._db_pos
def __set_db_pos(self, pos):
self._db_pos = pos
self.is_dirty = True
db_pos = property(__get_db_pos, __set_db_pos)
def db_add_pos(self, pos):
self._db_pos = pos
def db_change_pos(self, pos):
self._db_pos = pos
def db_delete_pos(self, pos):
self._db_pos = None
def __get_db_type(self):
return self._db_type
def __set_db_type(self, type):
self._db_type = type
self.is_dirty = True
db_type = property(__get_db_type, __set_db_type)
def db_add_type(self, type):
self._db_type = type
def db_change_type(self, type):
self._db_type = type
def db_delete_type(self, type):
self._db_type = None
def __get_db_val(self):
return self._db_val
def __set_db_val(self, val):
self._db_val = val
self.is_dirty = True
db_val = property(__get_db_val, __set_db_val)
def db_add_val(self, val):
self._db_val = val
def db_change_val(self, val):
self._db_val = val
def db_delete_val(self, val):
self._db_val = None
def __get_db_minVal(self):
return self._db_minVal
def __set_db_minVal(self, minVal):
self._db_minVal = minVal
self.is_dirty = True
db_minVal = property(__get_db_minVal, __set_db_minVal)
def db_add_minVal(self, minVal):
self._db_minVal = minVal
def db_change_minVal(self, minVal):
self._db_minVal = minVal
def db_delete_minVal(self, minVal):
self._db_minVal = None
def __get_db_maxVal(self):
return self._db_maxVal
def __set_db_maxVal(self, maxVal):
self._db_maxVal = maxVal
self.is_dirty = True
db_maxVal = property(__get_db_maxVal, __set_db_maxVal)
def db_add_maxVal(self, maxVal):
self._db_maxVal = maxVal
def db_change_maxVal(self, maxVal):
self._db_maxVal = maxVal
def db_delete_maxVal(self, maxVal):
self._db_maxVal = None
def __get_db_stepSize(self):
return self._db_stepSize
def __set_db_stepSize(self, stepSize):
self._db_stepSize = stepSize
self.is_dirty = True
db_stepSize = property(__get_db_stepSize, __set_db_stepSize)
def db_add_stepSize(self, stepSize):
self._db_stepSize = stepSize
def db_change_stepSize(self, stepSize):
self._db_stepSize = stepSize
def db_delete_stepSize(self, stepSize):
self._db_stepSize = None
def __get_db_strvaluelist(self):
return self._db_strvaluelist
def __set_db_strvaluelist(self, strvaluelist):
self._db_strvaluelist = strvaluelist
self.is_dirty = True
db_strvaluelist = property(__get_db_strvaluelist, __set_db_strvaluelist)
def db_add_strvaluelist(self, strvaluelist):
self._db_strvaluelist = strvaluelist
def db_change_strvaluelist(self, strvaluelist):
self._db_strvaluelist = strvaluelist
def db_delete_strvaluelist(self, strvaluelist):
self._db_strvaluelist = None
def __get_db_widget(self):
return self._db_widget
def __set_db_widget(self, widget):
self._db_widget = widget
self.is_dirty = True
db_widget = property(__get_db_widget, __set_db_widget)
def db_add_widget(self, widget):
self._db_widget = widget
def db_change_widget(self, widget):
self._db_widget = widget
def db_delete_widget(self, widget):
self._db_widget = None
def __get_db_seq(self):
return self._db_seq
def __set_db_seq(self, seq):
self._db_seq = seq
self.is_dirty = True
db_seq = property(__get_db_seq, __set_db_seq)
def db_add_seq(self, seq):
self._db_seq = seq
def db_change_seq(self, seq):
self._db_seq = seq
def db_delete_seq(self, seq):
self._db_seq = None
def __get_db_parent(self):
return self._db_parent
def __set_db_parent(self, parent):
self._db_parent = parent
self.is_dirty = True
db_parent = property(__get_db_parent, __set_db_parent)
def db_add_parent(self, parent):
self._db_parent = parent
def db_change_parent(self, parent):
self._db_parent = parent
def db_delete_parent(self, parent):
self._db_parent = None
def getPrimaryKey(self):
return self._db_id
class DBMashup(object):
vtType = 'mashup'
def __init__(self, id=None, name=None, version=None, aliases=None, type=None, vtid=None, layout=None, geometry=None, has_seq=None):
self._db_id = id
self._db_name = name
self._db_version = version
self.db_deleted_aliases = []
self.db_aliases_id_index = {}
if aliases is None:
self._db_aliases = []
else:
self._db_aliases = aliases
for v in self._db_aliases:
self.db_aliases_id_index[v.db_id] = v
self._db_type = type
self._db_vtid = vtid
self._db_layout = layout
self._db_geometry = geometry
self._db_has_seq = has_seq
self.is_dirty = True
self.is_new = True
def __copy__(self):
return DBMashup.do_copy(self)
def do_copy(self, new_ids=False, id_scope=None, id_remap=None):
cp = DBMashup(id=self._db_id,
name=self._db_name,
version=self._db_version,
type=self._db_type,
vtid=self._db_vtid,
layout=self._db_layout,
geometry=self._db_geometry,
has_seq=self._db_has_seq)
if self._db_aliases is None:
cp._db_aliases = []
else:
cp._db_aliases = [v.do_copy(new_ids, id_scope, id_remap) for v in self._db_aliases]
# set new ids
if new_ids:
new_id = id_scope.getNewId(self.vtType)
if self.vtType in id_scope.remap:
id_remap[(id_scope.remap[self.vtType], self.db_id)] = new_id
else:
id_remap[(self.vtType, self.db_id)] = new_id
cp.db_id = new_id
if hasattr(self, 'db_vtid') and ('vistrail', self._db_vtid) in id_remap:
cp._db_vtid = id_remap[('vistrail', self._db_vtid)]
# recreate indices and set flags
cp.db_aliases_id_index = dict((v.db_id, v) for v in cp._db_aliases)
if not new_ids:
cp.is_dirty = self.is_dirty
cp.is_new = self.is_new
return cp
@staticmethod
def update_version(old_obj, trans_dict, new_obj=None):
if new_obj is None:
new_obj = DBMashup()
class_dict = {}
if new_obj.__class__.__name__ in trans_dict:
class_dict = trans_dict[new_obj.__class__.__name__]
if 'id' in class_dict:
res = class_dict['id'](old_obj, trans_dict)
new_obj.db_id = res
elif hasattr(old_obj, 'db_id') and old_obj.db_id is not None:
new_obj.db_id = old_obj.db_id
if 'name' in class_dict:
res = class_dict['name'](old_obj, trans_dict)
new_obj.db_name = res
elif hasattr(old_obj, 'db_name') and old_obj.db_name is not None:
new_obj.db_name = old_obj.db_name
if 'version' in class_dict:
res = class_dict['version'](old_obj, trans_dict)
new_obj.db_version = res
elif hasattr(old_obj, 'db_version') and old_obj.db_version is not None:
new_obj.db_version = old_obj.db_version
if 'aliases' in class_dict:
res = class_dict['aliases'](old_obj, trans_dict)
for obj in res:
new_obj.db_add_alias(obj)
elif hasattr(old_obj, 'db_aliases') and old_obj.db_aliases is not None:
for obj in old_obj.db_aliases:
new_obj.db_add_alias(DBMashupAlias.update_version(obj, trans_dict))
if hasattr(old_obj, 'db_deleted_aliases') and hasattr(new_obj, 'db_deleted_aliases'):
for obj in old_obj.db_deleted_aliases:
n_obj = DBMashupAlias.update_version(obj, trans_dict)
new_obj.db_deleted_aliases.append(n_obj)
if 'type' in class_dict:
res = class_dict['type'](old_obj, trans_dict)
new_obj.db_type = res
elif hasattr(old_obj, 'db_type') and old_obj.db_type is not None:
new_obj.db_type = old_obj.db_type
if 'vtid' in class_dict:
res = class_dict['vtid'](old_obj, trans_dict)
new_obj.db_vtid = res
elif hasattr(old_obj, 'db_vtid') and old_obj.db_vtid is not None:
new_obj.db_vtid = old_obj.db_vtid
if 'layout' in class_dict:
res = class_dict['layout'](old_obj, trans_dict)
new_obj.db_layout = res
elif hasattr(old_obj, 'db_layout') and old_obj.db_layout is not None:
new_obj.db_layout = old_obj.db_layout
if 'geometry' in class_dict:
res = class_dict['geometry'](old_obj, trans_dict)
new_obj.db_geometry = res
elif hasattr(old_obj, 'db_geometry') and old_obj.db_geometry is not None:
new_obj.db_geometry = old_obj.db_geometry
if 'has_seq' in class_dict:
res = class_dict['has_seq'](old_obj, trans_dict)
new_obj.db_has_seq = res
elif hasattr(old_obj, 'db_has_seq') and old_obj.db_has_seq is not None:
new_obj.db_has_seq = old_obj.db_has_seq
new_obj.is_new = old_obj.is_new
new_obj.is_dirty = old_obj.is_dirty
return new_obj
def db_children(self, parent=(None,None), orphan=False, for_action=False):
children = []
to_del = []
for child in self.db_aliases:
children.extend(child.db_children((self.vtType, self.db_id), orphan, for_action))
if orphan:
to_del.append(child)
for child in to_del:
self.db_delete_alias(child)
children.append((self, parent[0], parent[1]))
return children
def db_deleted_children(self, remove=False):
children = []
children.extend(self.db_deleted_aliases)
if remove:
self.db_deleted_aliases = []
return children
def has_changes(self):
if self.is_dirty:
return True
for child in self._db_aliases:
if child.has_changes():
return True
return False
def __get_db_id(self):
return self._db_id
def __set_db_id(self, id):
self._db_id = id
self.is_dirty = True
db_id = property(__get_db_id, __set_db_id)
def db_add_id(self, id):
self._db_id = id
def db_change_id(self, id):
self._db_id = id
def db_delete_id(self, id):
self._db_id = None
def __get_db_name(self):
return self._db_name
def __set_db_name(self, name):
self._db_name = name
self.is_dirty = True
db_name = property(__get_db_name, __set_db_name)
def db_add_name(self, name):
self._db_name = name
def db_change_name(self, name):
self._db_name = name
def db_delete_name(self, name):
self._db_name = None
def __get_db_version(self):
return self._db_version
def __set_db_version(self, version):
self._db_version = version
self.is_dirty = True
db_version = property(__get_db_version, __set_db_version)
def db_add_version(self, version):
self._db_version = version
def db_change_version(self, version):
self._db_version = version
def db_delete_version(self, version):
self._db_version = None
def __get_db_aliases(self):
return self._db_aliases
def __set_db_aliases(self, aliases):
self._db_aliases = aliases
self.is_dirty = True
db_aliases = property(__get_db_aliases, __set_db_aliases)
def db_get_aliases(self):
return self._db_aliases
def db_add_alias(self, alias):
self.is_dirty = True
self._db_aliases.append(alias)
self.db_aliases_id_index[alias.db_id] = alias
def db_change_alias(self, alias):
self.is_dirty = True
found = False
for i in xrange(len(self._db_aliases)):
if self._db_aliases[i].db_id == alias.db_id:
self._db_aliases[i] = alias
found = True
break
if not found:
self._db_aliases.append(alias)
self.db_aliases_id_index[alias.db_id] = alias
def db_delete_alias(self, alias):
self.is_dirty = True
for i in xrange(len(self._db_aliases)):
if self._db_aliases[i].db_id == alias.db_id:
if not self._db_aliases[i].is_new:
self.db_deleted_aliases.append(self._db_aliases[i])
del self._db_aliases[i]
break
del self.db_aliases_id_index[alias.db_id]
def db_get_alias(self, key):
for i in xrange(len(self._db_aliases)):
if self._db_aliases[i].db_id == key:
return self._db_aliases[i]
return None
def db_get_alias_by_id(self, key):
return self.db_aliases_id_index[key]
def db_has_alias_with_id(self, key):
return key in self.db_aliases_id_index
def __get_db_type(self):
return self._db_type
def __set_db_type(self, type):
self._db_type = type
self.is_dirty = True
db_type = property(__get_db_type, __set_db_type)
def db_add_type(self, type):
self._db_type = type
def db_change_type(self, type):
self._db_type = type
def db_delete_type(self, type):
self._db_type = None
def __get_db_vtid(self):
return self._db_vtid
def __set_db_vtid(self, vtid):
self._db_vtid = vtid
self.is_dirty = True
db_vtid = property(__get_db_vtid, __set_db_vtid)
def db_add_vtid(self, vtid):
self._db_vtid = vtid
def db_change_vtid(self, vtid):
self._db_vtid = vtid
def db_delete_vtid(self, vtid):
self._db_vtid = None
def __get_db_layout(self):
return self._db_layout
def __set_db_layout(self, layout):
self._db_layout = layout
self.is_dirty = True
db_layout = property(__get_db_layout, __set_db_layout)
def db_add_layout(self, layout):
self._db_layout = layout
def db_change_layout(self, layout):
self._db_layout = layout
def db_delete_layout(self, layout):
self._db_layout = None
def __get_db_geometry(self):
return self._db_geometry
def __set_db_geometry(self, geometry):
self._db_geometry = geometry
self.is_dirty = True
db_geometry = property(__get_db_geometry, __set_db_geometry)
def db_add_geometry(self, geometry):
self._db_geometry = geometry
def db_change_geometry(self, geometry):
self._db_geometry = geometry
def db_delete_geometry(self, geometry):
self._db_geometry = None
def __get_db_has_seq(self):
return self._db_has_seq
def __set_db_has_seq(self, has_seq):
self._db_has_seq = has_seq
self.is_dirty = True
db_has_seq = property(__get_db_has_seq, __set_db_has_seq)
def db_add_has_seq(self, has_seq):
self._db_has_seq = has_seq
def db_change_has_seq(self, has_seq):
self._db_has_seq = has_seq
def db_delete_has_seq(self, has_seq):
self._db_has_seq = None
def getPrimaryKey(self):
return self._db_id
class DBMachine(object):
vtType = 'machine'
def __init__(self, id=None, name=None, os=None, architecture=None, processor=None, ram=None):
self._db_id = id
self._db_name = name
self._db_os = os
self._db_architecture = architecture
self._db_processor = processor
self._db_ram = ram
self.is_dirty = True
self.is_new = True
def __copy__(self):
return DBMachine.do_copy(self)
def do_copy(self, new_ids=False, id_scope=None, id_remap=None):
cp = DBMachine(id=self._db_id,
name=self._db_name,
os=self._db_os,
architecture=self._db_architecture,
processor=self._db_processor,
ram=self._db_ram)
# set new ids
if new_ids:
new_id = id_scope.getNewId(self.vtType)
if self.vtType in id_scope.remap:
id_remap[(id_scope.remap[self.vtType], self.db_id)] = new_id
else:
id_remap[(self.vtType, self.db_id)] = new_id
cp.db_id = new_id
if hasattr(self, 'db_vistrailId') and ('vistrail', self._db_vistrailId) in id_remap:
cp._db_vistrailId = id_remap[('vistrail', self._db_vistrailId)]
# recreate indices and set flags
if not new_ids:
cp.is_dirty = self.is_dirty
cp.is_new = self.is_new
return cp
@staticmethod
def update_version(old_obj, trans_dict, new_obj=None):
if new_obj is None:
new_obj = DBMachine()
class_dict = {}
if new_obj.__class__.__name__ in trans_dict:
class_dict = trans_dict[new_obj.__class__.__name__]
if 'id' in class_dict:
res = class_dict['id'](old_obj, trans_dict)
new_obj.db_id = res
elif hasattr(old_obj, 'db_id') and old_obj.db_id is not None:
new_obj.db_id = old_obj.db_id
if 'name' in class_dict:
res = class_dict['name'](old_obj, trans_dict)
new_obj.db_name = res
elif hasattr(old_obj, 'db_name') and old_obj.db_name is not None:
new_obj.db_name = old_obj.db_name
if 'os' in class_dict:
res = class_dict['os'](old_obj, trans_dict)
new_obj.db_os = res
elif hasattr(old_obj, 'db_os') and old_obj.db_os is not None:
new_obj.db_os = old_obj.db_os
if 'architecture' in class_dict:
res = class_dict['architecture'](old_obj, trans_dict)
new_obj.db_architecture = res
elif hasattr(old_obj, 'db_architecture') and old_obj.db_architecture is not None:
new_obj.db_architecture = old_obj.db_architecture
if 'processor' in class_dict:
res = class_dict['processor'](old_obj, trans_dict)
new_obj.db_processor = res
elif hasattr(old_obj, 'db_processor') and old_obj.db_processor is not None:
new_obj.db_processor = old_obj.db_processor
if 'ram' in class_dict:
res = class_dict['ram'](old_obj, trans_dict)
new_obj.db_ram = res
elif hasattr(old_obj, 'db_ram') and old_obj.db_ram is not None:
new_obj.db_ram = old_obj.db_ram
new_obj.is_new = old_obj.is_new
new_obj.is_dirty = old_obj.is_dirty
return new_obj
def db_children(self, parent=(None,None), orphan=False, for_action=False):
return [(self, parent[0], parent[1])]
def db_deleted_children(self, remove=False):
children = []
return children
def has_changes(self):
if self.is_dirty:
return True
return False
def __get_db_id(self):
return self._db_id
def __set_db_id(self, id):
self._db_id = id
self.is_dirty = True
db_id = property(__get_db_id, __set_db_id)
def db_add_id(self, id):
self._db_id = id
def db_change_id(self, id):
self._db_id = id
def db_delete_id(self, id):
self._db_id = None
def __get_db_name(self):
return self._db_name
def __set_db_name(self, name):
self._db_name = name
self.is_dirty = True
db_name = property(__get_db_name, __set_db_name)
def db_add_name(self, name):
self._db_name = name
def db_change_name(self, name):
self._db_name = name
def db_delete_name(self, name):
self._db_name = None
def __get_db_os(self):
return self._db_os
def __set_db_os(self, os):
self._db_os = os
self.is_dirty = True
db_os = property(__get_db_os, __set_db_os)
def db_add_os(self, os):
self._db_os = os
def db_change_os(self, os):
self._db_os = os
def db_delete_os(self, os):
self._db_os = None
def __get_db_architecture(self):
return self._db_architecture
def __set_db_architecture(self, architecture):
self._db_architecture = architecture
self.is_dirty = True
db_architecture = property(__get_db_architecture, __set_db_architecture)
def db_add_architecture(self, architecture):
self._db_architecture = architecture
def db_change_architecture(self, architecture):
self._db_architecture = architecture
def db_delete_architecture(self, architecture):
self._db_architecture = None
def __get_db_processor(self):
return self._db_processor
def __set_db_processor(self, processor):
self._db_processor = processor
self.is_dirty = True
db_processor = property(__get_db_processor, __set_db_processor)
def db_add_processor(self, processor):
self._db_processor = processor
def db_change_processor(self, processor):
self._db_processor = processor
def db_delete_processor(self, processor):
self._db_processor = None
def __get_db_ram(self):
return self._db_ram
def __set_db_ram(self, ram):
self._db_ram = ram
self.is_dirty = True
db_ram = property(__get_db_ram, __set_db_ram)
def db_add_ram(self, ram):
self._db_ram = ram
def db_change_ram(self, ram):
self._db_ram = ram
def db_delete_ram(self, ram):
self._db_ram = None
def getPrimaryKey(self):
return self._db_id
class DBConfigFloat(object):
vtType = 'config_float'
def __init__(self, value=None):
self._db_value = value
self.is_dirty = True
self.is_new = True
def __copy__(self):
return DBConfigFloat.do_copy(self)
def do_copy(self, new_ids=False, id_scope=None, id_remap=None):
cp = DBConfigFloat(value=self._db_value)
# set new ids
if new_ids:
new_id = id_scope.getNewId(self.vtType)
if self.vtType in id_scope.remap:
id_remap[(id_scope.remap[self.vtType], self.db_id)] = new_id
else:
id_remap[(self.vtType, self.db_id)] = new_id
cp.db_id = new_id
# recreate indices and set flags
if not new_ids:
cp.is_dirty = self.is_dirty
cp.is_new = self.is_new
return cp
@staticmethod
def update_version(old_obj, trans_dict, new_obj=None):
if new_obj is None:
new_obj = DBConfigFloat()
class_dict = {}
if new_obj.__class__.__name__ in trans_dict:
class_dict = trans_dict[new_obj.__class__.__name__]
if 'value' in class_dict:
res = class_dict['value'](old_obj, trans_dict)
new_obj.db_value = res
elif hasattr(old_obj, 'db_value') and old_obj.db_value is not None:
new_obj.db_value = old_obj.db_value
new_obj.is_new = old_obj.is_new
new_obj.is_dirty = old_obj.is_dirty
return new_obj
def db_children(self, parent=(None,None), orphan=False, for_action=False):
return [(self, parent[0], parent[1])]
def db_deleted_children(self, remove=False):
children = []
return children
def has_changes(self):
if self.is_dirty:
return True
return False
def __get_db_value(self):
return self._db_value
def __set_db_value(self, value):
self._db_value = value
self.is_dirty = True
db_value = property(__get_db_value, __set_db_value)
def db_add_value(self, value):
self._db_value = value
def db_change_value(self, value):
self._db_value = value
def db_delete_value(self, value):
self._db_value = None
class DBOther(object):
vtType = 'other'
def __init__(self, id=None, key=None, value=None):
self._db_id = id
self._db_key = key
self._db_value = value
self.is_dirty = True
self.is_new = True
def __copy__(self):
return DBOther.do_copy(self)
def do_copy(self, new_ids=False, id_scope=None, id_remap=None):
cp = DBOther(id=self._db_id,
key=self._db_key,
value=self._db_value)
# set new ids
if new_ids:
new_id = id_scope.getNewId(self.vtType)
if self.vtType in id_scope.remap:
id_remap[(id_scope.remap[self.vtType], self.db_id)] = new_id
else:
id_remap[(self.vtType, self.db_id)] = new_id
cp.db_id = new_id
# recreate indices and set flags
if not new_ids:
cp.is_dirty = self.is_dirty
cp.is_new = self.is_new
return cp
@staticmethod
def update_version(old_obj, trans_dict, new_obj=None):
if new_obj is None:
new_obj = DBOther()
class_dict = {}
if new_obj.__class__.__name__ in trans_dict:
class_dict = trans_dict[new_obj.__class__.__name__]
if 'id' in class_dict:
res = class_dict['id'](old_obj, trans_dict)
new_obj.db_id = res
elif hasattr(old_obj, 'db_id') and old_obj.db_id is not None:
new_obj.db_id = old_obj.db_id
if 'key' in class_dict:
res = class_dict['key'](old_obj, trans_dict)
new_obj.db_key = res
elif hasattr(old_obj, 'db_key') and old_obj.db_key is not None:
new_obj.db_key = old_obj.db_key
if 'value' in class_dict:
res = class_dict['value'](old_obj, trans_dict)
new_obj.db_value = res
elif hasattr(old_obj, 'db_value') and old_obj.db_value is not None:
new_obj.db_value = old_obj.db_value
new_obj.is_new = old_obj.is_new
new_obj.is_dirty = old_obj.is_dirty
return new_obj
def db_children(self, parent=(None,None), orphan=False, for_action=False):
return [(self, parent[0], parent[1])]
def db_deleted_children(self, remove=False):
children = []
return children
def has_changes(self):
if self.is_dirty:
return True
return False
def __get_db_id(self):
return self._db_id
def __set_db_id(self, id):
self._db_id = id
self.is_dirty = True
db_id = property(__get_db_id, __set_db_id)
def db_add_id(self, id):
self._db_id = id
def db_change_id(self, id):
self._db_id = id
def db_delete_id(self, id):
self._db_id = None
def __get_db_key(self):
return self._db_key
def __set_db_key(self, key):
self._db_key = key
self.is_dirty = True
db_key = property(__get_db_key, __set_db_key)
def db_add_key(self, key):
self._db_key = key
def db_change_key(self, key):
self._db_key = key
def db_delete_key(self, key):
self._db_key = None
def __get_db_value(self):
return self._db_value
def __set_db_value(self, value):
self._db_value = value
self.is_dirty = True
db_value = property(__get_db_value, __set_db_value)
def db_add_value(self, value):
self._db_value = value
def db_change_value(self, value):
self._db_value = value
def db_delete_value(self, value):
self._db_value = None
def getPrimaryKey(self):
return self._db_id
class DBRefProvActivity(object):
vtType = 'ref_prov_activity'
def __init__(self, prov_ref=None):
self._db_prov_ref = prov_ref
self.is_dirty = True
self.is_new = True
def __copy__(self):
return DBRefProvActivity.do_copy(self)
def do_copy(self, new_ids=False, id_scope=None, id_remap=None):
cp = DBRefProvActivity(prov_ref=self._db_prov_ref)
# set new ids
if new_ids:
new_id = id_scope.getNewId(self.vtType)
if self.vtType in id_scope.remap:
id_remap[(id_scope.remap[self.vtType], self.db_id)] = new_id
else:
id_remap[(self.vtType, self.db_id)] = new_id
cp.db_id = new_id
if hasattr(self, 'db_prov_ref') and ('prov_activity', self._db_prov_ref) in id_remap:
cp._db_prov_ref = id_remap[('prov_activity', self._db_prov_ref)]
# recreate indices and set flags
if not new_ids:
cp.is_dirty = self.is_dirty
cp.is_new = self.is_new
return cp
@staticmethod
def update_version(old_obj, trans_dict, new_obj=None):
if new_obj is None:
new_obj = DBRefProvActivity()
class_dict = {}
if new_obj.__class__.__name__ in trans_dict:
class_dict = trans_dict[new_obj.__class__.__name__]
if 'prov_ref' in class_dict:
res = class_dict['prov_ref'](old_obj, trans_dict)
new_obj.db_prov_ref = res
elif hasattr(old_obj, 'db_prov_ref') and old_obj.db_prov_ref is not None:
new_obj.db_prov_ref = old_obj.db_prov_ref
new_obj.is_new = old_obj.is_new
new_obj.is_dirty = old_obj.is_dirty
return new_obj
def db_children(self, parent=(None,None), orphan=False, for_action=False):
return [(self, parent[0], parent[1])]
def db_deleted_children(self, remove=False):
children = []
return children
def has_changes(self):
if self.is_dirty:
return True
return False
def __get_db_prov_ref(self):
return self._db_prov_ref
def __set_db_prov_ref(self, prov_ref):
self._db_prov_ref = prov_ref
self.is_dirty = True
db_prov_ref = property(__get_db_prov_ref, __set_db_prov_ref)
def db_add_prov_ref(self, prov_ref):
self._db_prov_ref = prov_ref
def db_change_prov_ref(self, prov_ref):
self._db_prov_ref = prov_ref
def db_delete_prov_ref(self, prov_ref):
self._db_prov_ref = None
class DBAbstraction(object):
vtType = 'abstraction'
def __init__(self, id=None, cache=None, name=None, namespace=None, package=None, version=None, internal_version=None, location=None, functions=None, annotations=None):
self._db_id = id
self._db_cache = cache
self._db_name = name
self._db_namespace = namespace
self._db_package = package
self._db_version = version
self._db_internal_version = internal_version
self.db_deleted_location = []
self._db_location = location
self.db_deleted_functions = []
self.db_functions_id_index = {}
if functions is None:
self._db_functions = []
else:
self._db_functions = functions
for v in self._db_functions:
self.db_functions_id_index[v.db_id] = v
self.db_deleted_annotations = []
self.db_annotations_id_index = {}
self.db_annotations_key_index = {}
if annotations is None:
self._db_annotations = []
else:
self._db_annotations = annotations
for v in self._db_annotations:
self.db_annotations_id_index[v.db_id] = v
self.db_annotations_key_index[v.db_key] = v
self.is_dirty = True
self.is_new = True
def __copy__(self):
return DBAbstraction.do_copy(self)
def do_copy(self, new_ids=False, id_scope=None, id_remap=None):
cp = DBAbstraction(id=self._db_id,
cache=self._db_cache,
name=self._db_name,
namespace=self._db_namespace,
package=self._db_package,
version=self._db_version,
internal_version=self._db_internal_version)
if self._db_location is not None:
cp._db_location = self._db_location.do_copy(new_ids, id_scope, id_remap)
if self._db_functions is None:
cp._db_functions = []
else:
cp._db_functions = [v.do_copy(new_ids, id_scope, id_remap) for v in self._db_functions]
if self._db_annotations is None:
cp._db_annotations = []
else:
cp._db_annotations = [v.do_copy(new_ids, id_scope, id_remap) for v in self._db_annotations]
# set new ids
if new_ids:
new_id = id_scope.getNewId(self.vtType)
if self.vtType in id_scope.remap:
id_remap[(id_scope.remap[self.vtType], self.db_id)] = new_id
else:
id_remap[(self.vtType, self.db_id)] = new_id
cp.db_id = new_id
# recreate indices and set flags
cp.db_functions_id_index = dict((v.db_id, v) for v in cp._db_functions)
cp.db_annotations_id_index = dict((v.db_id, v) for v in cp._db_annotations)
cp.db_annotations_key_index = dict((v.db_key, v) for v in cp._db_annotations)
if not new_ids:
cp.is_dirty = self.is_dirty
cp.is_new = self.is_new
return cp
@staticmethod
def update_version(old_obj, trans_dict, new_obj=None):
if new_obj is None:
new_obj = DBAbstraction()
class_dict = {}
if new_obj.__class__.__name__ in trans_dict:
class_dict = trans_dict[new_obj.__class__.__name__]
if 'id' in class_dict:
res = class_dict['id'](old_obj, trans_dict)
new_obj.db_id = res
elif hasattr(old_obj, 'db_id') and old_obj.db_id is not None:
new_obj.db_id = old_obj.db_id
if 'cache' in class_dict:
res = class_dict['cache'](old_obj, trans_dict)
new_obj.db_cache = res
elif hasattr(old_obj, 'db_cache') and old_obj.db_cache is not None:
new_obj.db_cache = old_obj.db_cache
if 'name' in class_dict:
res = class_dict['name'](old_obj, trans_dict)
new_obj.db_name = res
elif hasattr(old_obj, 'db_name') and old_obj.db_name is not None:
new_obj.db_name = old_obj.db_name
if 'namespace' in class_dict:
res = class_dict['namespace'](old_obj, trans_dict)
new_obj.db_namespace = res
elif hasattr(old_obj, 'db_namespace') and old_obj.db_namespace is not None:
new_obj.db_namespace = old_obj.db_namespace
if 'package' in class_dict:
res = class_dict['package'](old_obj, trans_dict)
new_obj.db_package = res
elif hasattr(old_obj, 'db_package') and old_obj.db_package is not None:
new_obj.db_package = old_obj.db_package
if 'version' in class_dict:
res = class_dict['version'](old_obj, trans_dict)
new_obj.db_version = res
elif hasattr(old_obj, 'db_version') and old_obj.db_version is not None:
new_obj.db_version = old_obj.db_version
if 'internal_version' in class_dict:
res = class_dict['internal_version'](old_obj, trans_dict)
new_obj.db_internal_version = res
elif hasattr(old_obj, 'db_internal_version') and old_obj.db_internal_version is not None:
new_obj.db_internal_version = old_obj.db_internal_version
if 'location' in class_dict:
res = class_dict['location'](old_obj, trans_dict)
new_obj.db_location = res
elif hasattr(old_obj, 'db_location') and old_obj.db_location is not None:
obj = old_obj.db_location
new_obj.db_add_location(DBLocation.update_version(obj, trans_dict))
if hasattr(old_obj, 'db_deleted_location') and hasattr(new_obj, 'db_deleted_location'):
for obj in old_obj.db_deleted_location:
n_obj = DBLocation.update_version(obj, trans_dict)
new_obj.db_deleted_location.append(n_obj)
if 'functions' in class_dict:
res = class_dict['functions'](old_obj, trans_dict)
for obj in res:
new_obj.db_add_function(obj)
elif hasattr(old_obj, 'db_functions') and old_obj.db_functions is not None:
for obj in old_obj.db_functions:
new_obj.db_add_function(DBFunction.update_version(obj, trans_dict))
if hasattr(old_obj, 'db_deleted_functions') and hasattr(new_obj, 'db_deleted_functions'):
for obj in old_obj.db_deleted_functions:
n_obj = DBFunction.update_version(obj, trans_dict)
new_obj.db_deleted_functions.append(n_obj)
if 'annotations' in class_dict:
res = class_dict['annotations'](old_obj, trans_dict)
for obj in res:
new_obj.db_add_annotation(obj)
elif hasattr(old_obj, 'db_annotations') and old_obj.db_annotations is not None:
for obj in old_obj.db_annotations:
new_obj.db_add_annotation(DBAnnotation.update_version(obj, trans_dict))
if hasattr(old_obj, 'db_deleted_annotations') and hasattr(new_obj, 'db_deleted_annotations'):
for obj in old_obj.db_deleted_annotations:
n_obj = DBAnnotation.update_version(obj, trans_dict)
new_obj.db_deleted_annotations.append(n_obj)
new_obj.is_new = old_obj.is_new
new_obj.is_dirty = old_obj.is_dirty
return new_obj
def db_children(self, parent=(None,None), orphan=False, for_action=False):
children = []
if self._db_location is not None:
children.extend(self._db_location.db_children((self.vtType, self.db_id), orphan, for_action))
if orphan:
self._db_location = None
to_del = []
for child in self.db_functions:
children.extend(child.db_children((self.vtType, self.db_id), orphan, for_action))
if orphan:
to_del.append(child)
for child in to_del:
self.db_delete_function(child)
to_del = []
for child in self.db_annotations:
children.extend(child.db_children((self.vtType, self.db_id), orphan, for_action))
if orphan:
to_del.append(child)
for child in to_del:
self.db_delete_annotation(child)
children.append((self, parent[0], parent[1]))
return children
def db_deleted_children(self, remove=False):
children = []
children.extend(self.db_deleted_location)
children.extend(self.db_deleted_functions)
children.extend(self.db_deleted_annotations)
if remove:
self.db_deleted_location = []
self.db_deleted_functions = []
self.db_deleted_annotations = []
return children
def has_changes(self):
if self.is_dirty:
return True
if self._db_location is not None and self._db_location.has_changes():
return True
for child in self._db_functions:
if child.has_changes():
return True
for child in self._db_annotations:
if child.has_changes():
return True
return False
def __get_db_id(self):
return self._db_id
def __set_db_id(self, id):
self._db_id = id
self.is_dirty = True
db_id = property(__get_db_id, __set_db_id)
def db_add_id(self, id):
self._db_id = id
def db_change_id(self, id):
self._db_id = id
def db_delete_id(self, id):
self._db_id = None
def __get_db_cache(self):
return self._db_cache
def __set_db_cache(self, cache):
self._db_cache = cache
self.is_dirty = True
db_cache = property(__get_db_cache, __set_db_cache)
def db_add_cache(self, cache):
self._db_cache = cache
def db_change_cache(self, cache):
self._db_cache = cache
def db_delete_cache(self, cache):
self._db_cache = None
def __get_db_name(self):
return self._db_name
def __set_db_name(self, name):
self._db_name = name
self.is_dirty = True
db_name = property(__get_db_name, __set_db_name)
def db_add_name(self, name):
self._db_name = name
def db_change_name(self, name):
self._db_name = name
def db_delete_name(self, name):
self._db_name = None
def __get_db_namespace(self):
return self._db_namespace
def __set_db_namespace(self, namespace):
self._db_namespace = namespace
self.is_dirty = True
db_namespace = property(__get_db_namespace, __set_db_namespace)
def db_add_namespace(self, namespace):
self._db_namespace = namespace
def db_change_namespace(self, namespace):
self._db_namespace = namespace
def db_delete_namespace(self, namespace):
self._db_namespace = None
def __get_db_package(self):
return self._db_package
def __set_db_package(self, package):
self._db_package = package
self.is_dirty = True
db_package = property(__get_db_package, __set_db_package)
def db_add_package(self, package):
self._db_package = package
def db_change_package(self, package):
self._db_package = package
def db_delete_package(self, package):
self._db_package = None
def __get_db_version(self):
return self._db_version
def __set_db_version(self, version):
self._db_version = version
self.is_dirty = True
db_version = property(__get_db_version, __set_db_version)
def db_add_version(self, version):
self._db_version = version
def db_change_version(self, version):
self._db_version = version
def db_delete_version(self, version):
self._db_version = None
def __get_db_internal_version(self):
return self._db_internal_version
def __set_db_internal_version(self, internal_version):
self._db_internal_version = internal_version
self.is_dirty = True
db_internal_version = property(__get_db_internal_version, __set_db_internal_version)
def db_add_internal_version(self, internal_version):
self._db_internal_version = internal_version
def db_change_internal_version(self, internal_version):
self._db_internal_version = internal_version
def db_delete_internal_version(self, internal_version):
self._db_internal_version = None
def __get_db_location(self):
return self._db_location
def __set_db_location(self, location):
self._db_location = location
self.is_dirty = True
db_location = property(__get_db_location, __set_db_location)
def db_add_location(self, location):
self._db_location = location
def db_change_location(self, location):
self._db_location = location
def db_delete_location(self, location):
if not self.is_new:
self.db_deleted_location.append(self._db_location)
self._db_location = None
def __get_db_functions(self):
return self._db_functions
def __set_db_functions(self, functions):
self._db_functions = functions
self.is_dirty = True
db_functions = property(__get_db_functions, __set_db_functions)
def db_get_functions(self):
return self._db_functions
def db_add_function(self, function):
self.is_dirty = True
self._db_functions.append(function)
self.db_functions_id_index[function.db_id] = function
def db_change_function(self, function):
self.is_dirty = True
found = False
for i in xrange(len(self._db_functions)):
if self._db_functions[i].db_id == function.db_id:
self._db_functions[i] = function
found = True
break
if not found:
self._db_functions.append(function)
self.db_functions_id_index[function.db_id] = function
def db_delete_function(self, function):
self.is_dirty = True
for i in xrange(len(self._db_functions)):
if self._db_functions[i].db_id == function.db_id:
if not self._db_functions[i].is_new:
self.db_deleted_functions.append(self._db_functions[i])
del self._db_functions[i]
break
del self.db_functions_id_index[function.db_id]
def db_get_function(self, key):
for i in xrange(len(self._db_functions)):
if self._db_functions[i].db_id == key:
return self._db_functions[i]
return None
def db_get_function_by_id(self, key):
return self.db_functions_id_index[key]
def db_has_function_with_id(self, key):
return key in self.db_functions_id_index
def __get_db_annotations(self):
return self._db_annotations
def __set_db_annotations(self, annotations):
self._db_annotations = annotations
self.is_dirty = True
db_annotations = property(__get_db_annotations, __set_db_annotations)
def db_get_annotations(self):
return self._db_annotations
def db_add_annotation(self, annotation):
self.is_dirty = True
self._db_annotations.append(annotation)
self.db_annotations_id_index[annotation.db_id] = annotation
self.db_annotations_key_index[annotation.db_key] = annotation
def db_change_annotation(self, annotation):
self.is_dirty = True
found = False
for i in xrange(len(self._db_annotations)):
if self._db_annotations[i].db_id == annotation.db_id:
self._db_annotations[i] = annotation
found = True
break
if not found:
self._db_annotations.append(annotation)
self.db_annotations_id_index[annotation.db_id] = annotation
self.db_annotations_key_index[annotation.db_key] = annotation
def db_delete_annotation(self, annotation):
self.is_dirty = True
for i in xrange(len(self._db_annotations)):
if self._db_annotations[i].db_id == annotation.db_id:
if not self._db_annotations[i].is_new:
self.db_deleted_annotations.append(self._db_annotations[i])
del self._db_annotations[i]
break
del self.db_annotations_id_index[annotation.db_id]
del self.db_annotations_key_index[annotation.db_key]
def db_get_annotation(self, key):
for i in xrange(len(self._db_annotations)):
if self._db_annotations[i].db_id == key:
return self._db_annotations[i]
return None
def db_get_annotation_by_id(self, key):
return self.db_annotations_id_index[key]
def db_has_annotation_with_id(self, key):
return key in self.db_annotations_id_index
def db_get_annotation_by_key(self, key):
return self.db_annotations_key_index[key]
def db_has_annotation_with_key(self, key):
return key in self.db_annotations_key_index
def getPrimaryKey(self):
return self._db_id
class DBProvAgent(object):
vtType = 'prov_agent'
def __init__(self, id=None, vt_id=None, prov_type=None, prov_label=None, vt_machine_os=None, vt_machine_architecture=None, vt_machine_processor=None, vt_machine_ram=None):
self._db_id = id
self._db_vt_id = vt_id
self._db_prov_type = prov_type
self._db_prov_label = prov_label
self._db_vt_machine_os = vt_machine_os
self._db_vt_machine_architecture = vt_machine_architecture
self._db_vt_machine_processor = vt_machine_processor
self._db_vt_machine_ram = vt_machine_ram
self.is_dirty = True
self.is_new = True
def __copy__(self):
return DBProvAgent.do_copy(self)
def do_copy(self, new_ids=False, id_scope=None, id_remap=None):
cp = DBProvAgent(id=self._db_id,
vt_id=self._db_vt_id,
prov_type=self._db_prov_type,
prov_label=self._db_prov_label,
vt_machine_os=self._db_vt_machine_os,
vt_machine_architecture=self._db_vt_machine_architecture,
vt_machine_processor=self._db_vt_machine_processor,
vt_machine_ram=self._db_vt_machine_ram)
# set new ids
if new_ids:
new_id = id_scope.getNewId(self.vtType)
if self.vtType in id_scope.remap:
id_remap[(id_scope.remap[self.vtType], self.db_id)] = new_id
else:
id_remap[(self.vtType, self.db_id)] = new_id
cp.db_id = new_id
# recreate indices and set flags
if not new_ids:
cp.is_dirty = self.is_dirty
cp.is_new = self.is_new
return cp
@staticmethod
def update_version(old_obj, trans_dict, new_obj=None):
if new_obj is None:
new_obj = DBProvAgent()
class_dict = {}
if new_obj.__class__.__name__ in trans_dict:
class_dict = trans_dict[new_obj.__class__.__name__]
if 'id' in class_dict:
res = class_dict['id'](old_obj, trans_dict)
new_obj.db_id = res
elif hasattr(old_obj, 'db_id') and old_obj.db_id is not None:
new_obj.db_id = old_obj.db_id
if 'vt_id' in class_dict:
res = class_dict['vt_id'](old_obj, trans_dict)
new_obj.db_vt_id = res
elif hasattr(old_obj, 'db_vt_id') and old_obj.db_vt_id is not None:
new_obj.db_vt_id = old_obj.db_vt_id
if 'prov_type' in class_dict:
res = class_dict['prov_type'](old_obj, trans_dict)
new_obj.db_prov_type = res
elif hasattr(old_obj, 'db_prov_type') and old_obj.db_prov_type is not None:
new_obj.db_prov_type = old_obj.db_prov_type
if 'prov_label' in class_dict:
res = class_dict['prov_label'](old_obj, trans_dict)
new_obj.db_prov_label = res
elif hasattr(old_obj, 'db_prov_label') and old_obj.db_prov_label is not None:
new_obj.db_prov_label = old_obj.db_prov_label
if 'vt_machine_os' in class_dict:
res = class_dict['vt_machine_os'](old_obj, trans_dict)
new_obj.db_vt_machine_os = res
elif hasattr(old_obj, 'db_vt_machine_os') and old_obj.db_vt_machine_os is not None:
new_obj.db_vt_machine_os = old_obj.db_vt_machine_os
if 'vt_machine_architecture' in class_dict:
res = class_dict['vt_machine_architecture'](old_obj, trans_dict)
new_obj.db_vt_machine_architecture = res
elif hasattr(old_obj, 'db_vt_machine_architecture') and old_obj.db_vt_machine_architecture is not None:
new_obj.db_vt_machine_architecture = old_obj.db_vt_machine_architecture
if 'vt_machine_processor' in class_dict:
res = class_dict['vt_machine_processor'](old_obj, trans_dict)
new_obj.db_vt_machine_processor = res
elif hasattr(old_obj, 'db_vt_machine_processor') and old_obj.db_vt_machine_processor is not None:
new_obj.db_vt_machine_processor = old_obj.db_vt_machine_processor
if 'vt_machine_ram' in class_dict:
res = class_dict['vt_machine_ram'](old_obj, trans_dict)
new_obj.db_vt_machine_ram = res
elif hasattr(old_obj, 'db_vt_machine_ram') and old_obj.db_vt_machine_ram is not None:
new_obj.db_vt_machine_ram = old_obj.db_vt_machine_ram
new_obj.is_new = old_obj.is_new
new_obj.is_dirty = old_obj.is_dirty
return new_obj
def db_children(self, parent=(None,None), orphan=False, for_action=False):
return [(self, parent[0], parent[1])]
def db_deleted_children(self, remove=False):
children = []
return children
def has_changes(self):
if self.is_dirty:
return True
return False
def __get_db_id(self):
return self._db_id
def __set_db_id(self, id):
self._db_id = id
self.is_dirty = True
db_id = property(__get_db_id, __set_db_id)
def db_add_id(self, id):
self._db_id = id
def db_change_id(self, id):
self._db_id = id
def db_delete_id(self, id):
self._db_id = None
def __get_db_vt_id(self):
return self._db_vt_id
def __set_db_vt_id(self, vt_id):
self._db_vt_id = vt_id
self.is_dirty = True
db_vt_id = property(__get_db_vt_id, __set_db_vt_id)
def db_add_vt_id(self, vt_id):
self._db_vt_id = vt_id
def db_change_vt_id(self, vt_id):
self._db_vt_id = vt_id
def db_delete_vt_id(self, vt_id):
self._db_vt_id = None
def __get_db_prov_type(self):
return self._db_prov_type
def __set_db_prov_type(self, prov_type):
self._db_prov_type = prov_type
self.is_dirty = True
db_prov_type = property(__get_db_prov_type, __set_db_prov_type)
def db_add_prov_type(self, prov_type):
self._db_prov_type = prov_type
def db_change_prov_type(self, prov_type):
self._db_prov_type = prov_type
def db_delete_prov_type(self, prov_type):
self._db_prov_type = None
def __get_db_prov_label(self):
return self._db_prov_label
def __set_db_prov_label(self, prov_label):
self._db_prov_label = prov_label
self.is_dirty = True
db_prov_label = property(__get_db_prov_label, __set_db_prov_label)
def db_add_prov_label(self, prov_label):
self._db_prov_label = prov_label
def db_change_prov_label(self, prov_label):
self._db_prov_label = prov_label
def db_delete_prov_label(self, prov_label):
self._db_prov_label = None
def __get_db_vt_machine_os(self):
return self._db_vt_machine_os
def __set_db_vt_machine_os(self, vt_machine_os):
self._db_vt_machine_os = vt_machine_os
self.is_dirty = True
db_vt_machine_os = property(__get_db_vt_machine_os, __set_db_vt_machine_os)
def db_add_vt_machine_os(self, vt_machine_os):
self._db_vt_machine_os = vt_machine_os
def db_change_vt_machine_os(self, vt_machine_os):
self._db_vt_machine_os = vt_machine_os
def db_delete_vt_machine_os(self, vt_machine_os):
self._db_vt_machine_os = None
def __get_db_vt_machine_architecture(self):
return self._db_vt_machine_architecture
def __set_db_vt_machine_architecture(self, vt_machine_architecture):
self._db_vt_machine_architecture = vt_machine_architecture
self.is_dirty = True
db_vt_machine_architecture = property(__get_db_vt_machine_architecture, __set_db_vt_machine_architecture)
def db_add_vt_machine_architecture(self, vt_machine_architecture):
self._db_vt_machine_architecture = vt_machine_architecture
def db_change_vt_machine_architecture(self, vt_machine_architecture):
self._db_vt_machine_architecture = vt_machine_architecture
def db_delete_vt_machine_architecture(self, vt_machine_architecture):
self._db_vt_machine_architecture = None
def __get_db_vt_machine_processor(self):
return self._db_vt_machine_processor
def __set_db_vt_machine_processor(self, vt_machine_processor):
self._db_vt_machine_processor = vt_machine_processor
self.is_dirty = True
db_vt_machine_processor = property(__get_db_vt_machine_processor, __set_db_vt_machine_processor)
def db_add_vt_machine_processor(self, vt_machine_processor):
self._db_vt_machine_processor = vt_machine_processor
def db_change_vt_machine_processor(self, vt_machine_processor):
self._db_vt_machine_processor = vt_machine_processor
def db_delete_vt_machine_processor(self, vt_machine_processor):
self._db_vt_machine_processor = None
def __get_db_vt_machine_ram(self):
return self._db_vt_machine_ram
def __set_db_vt_machine_ram(self, vt_machine_ram):
self._db_vt_machine_ram = vt_machine_ram
self.is_dirty = True
db_vt_machine_ram = property(__get_db_vt_machine_ram, __set_db_vt_machine_ram)
def db_add_vt_machine_ram(self, vt_machine_ram):
self._db_vt_machine_ram = vt_machine_ram
def db_change_vt_machine_ram(self, vt_machine_ram):
self._db_vt_machine_ram = vt_machine_ram
def db_delete_vt_machine_ram(self, vt_machine_ram):
self._db_vt_machine_ram = None
def getPrimaryKey(self):
return self._db_id
class DBMashuptrail(object):
vtType = 'mashuptrail'
def __init__(self, id=None, name=None, version=None, vtVersion=None, last_modified=None, actions=None, annotations=None, actionAnnotations=None):
self._db_id = id
self._db_name = name
self._db_version = version
self._db_vtVersion = vtVersion
self._db_last_modified = last_modified
self.db_deleted_actions = []
self.db_actions_id_index = {}
if actions is None:
self._db_actions = []
else:
self._db_actions = actions
for v in self._db_actions:
self.db_actions_id_index[v.db_id] = v
self.db_deleted_annotations = []
self.db_annotations_id_index = {}
self.db_annotations_key_index = {}
if annotations is None:
self._db_annotations = []
else:
self._db_annotations = annotations
for v in self._db_annotations:
self.db_annotations_id_index[v.db_id] = v
self.db_annotations_key_index[v.db_key] = v
self.db_deleted_actionAnnotations = []
self.db_actionAnnotations_id_index = {}
self.db_actionAnnotations_action_id_index = {}
self.db_actionAnnotations_key_index = {}
if actionAnnotations is None:
self._db_actionAnnotations = []
else:
self._db_actionAnnotations = actionAnnotations
for v in self._db_actionAnnotations:
self.db_actionAnnotations_id_index[v.db_id] = v
self.db_actionAnnotations_action_id_index[(v.db_action_id,v.db_key)] = v
self.db_actionAnnotations_key_index[(v.db_key,v.db_value)] = v
self.is_dirty = True
self.is_new = True
def __copy__(self):
return DBMashuptrail.do_copy(self)
def do_copy(self, new_ids=False, id_scope=None, id_remap=None):
cp = DBMashuptrail(id=self._db_id,
name=self._db_name,
version=self._db_version,
vtVersion=self._db_vtVersion,
last_modified=self._db_last_modified)
if self._db_actions is None:
cp._db_actions = []
else:
cp._db_actions = [v.do_copy(new_ids, id_scope, id_remap) for v in self._db_actions]
if self._db_annotations is None:
cp._db_annotations = []
else:
cp._db_annotations = [v.do_copy(new_ids, id_scope, id_remap) for v in self._db_annotations]
if self._db_actionAnnotations is None:
cp._db_actionAnnotations = []
else:
cp._db_actionAnnotations = [v.do_copy(new_ids, id_scope, id_remap) for v in self._db_actionAnnotations]
# set new ids
if new_ids:
new_id = id_scope.getNewId(self.vtType)
if self.vtType in id_scope.remap:
id_remap[(id_scope.remap[self.vtType], self.db_id)] = new_id
else:
id_remap[(self.vtType, self.db_id)] = new_id
cp.db_id = new_id
# recreate indices and set flags
cp.db_actions_id_index = dict((v.db_id, v) for v in cp._db_actions)
cp.db_annotations_id_index = dict((v.db_id, v) for v in cp._db_annotations)
cp.db_annotations_key_index = dict((v.db_key, v) for v in cp._db_annotations)
cp.db_actionAnnotations_id_index = dict((v.db_id, v) for v in cp._db_actionAnnotations)
cp.db_actionAnnotations_action_id_index = dict(((v.db_action_id,v.db_key), v) for v in cp._db_actionAnnotations)
cp.db_actionAnnotations_key_index = dict(((v.db_key,v.db_value), v) for v in cp._db_actionAnnotations)
if not new_ids:
cp.is_dirty = self.is_dirty
cp.is_new = self.is_new
return cp
@staticmethod
def update_version(old_obj, trans_dict, new_obj=None):
if new_obj is None:
new_obj = DBMashuptrail()
class_dict = {}
if new_obj.__class__.__name__ in trans_dict:
class_dict = trans_dict[new_obj.__class__.__name__]
if 'id' in class_dict:
res = class_dict['id'](old_obj, trans_dict)
new_obj.db_id = res
elif hasattr(old_obj, 'db_id') and old_obj.db_id is not None:
new_obj.db_id = old_obj.db_id
if 'name' in class_dict:
res = class_dict['name'](old_obj, trans_dict)
new_obj.db_name = res
elif hasattr(old_obj, 'db_name') and old_obj.db_name is not None:
new_obj.db_name = old_obj.db_name
if 'version' in class_dict:
res = class_dict['version'](old_obj, trans_dict)
new_obj.db_version = res
elif hasattr(old_obj, 'db_version') and old_obj.db_version is not None:
new_obj.db_version = old_obj.db_version
if 'vtVersion' in class_dict:
res = class_dict['vtVersion'](old_obj, trans_dict)
new_obj.db_vtVersion = res
elif hasattr(old_obj, 'db_vtVersion') and old_obj.db_vtVersion is not None:
new_obj.db_vtVersion = old_obj.db_vtVersion
if 'last_modified' in class_dict:
res = class_dict['last_modified'](old_obj, trans_dict)
new_obj.db_last_modified = res
elif hasattr(old_obj, 'db_last_modified') and old_obj.db_last_modified is not None:
new_obj.db_last_modified = old_obj.db_last_modified
if 'actions' in class_dict:
res = class_dict['actions'](old_obj, trans_dict)
for obj in res:
new_obj.db_add_action(obj)
elif hasattr(old_obj, 'db_actions') and old_obj.db_actions is not None:
for obj in old_obj.db_actions:
new_obj.db_add_action(DBMashupAction.update_version(obj, trans_dict))
if hasattr(old_obj, 'db_deleted_actions') and hasattr(new_obj, 'db_deleted_actions'):
for obj in old_obj.db_deleted_actions:
n_obj = DBMashupAction.update_version(obj, trans_dict)
new_obj.db_deleted_actions.append(n_obj)
if 'annotations' in class_dict:
res = class_dict['annotations'](old_obj, trans_dict)
for obj in res:
new_obj.db_add_annotation(obj)
elif hasattr(old_obj, 'db_annotations') and old_obj.db_annotations is not None:
for obj in old_obj.db_annotations:
new_obj.db_add_annotation(DBAnnotation.update_version(obj, trans_dict))
if hasattr(old_obj, 'db_deleted_annotations') and hasattr(new_obj, 'db_deleted_annotations'):
for obj in old_obj.db_deleted_annotations:
n_obj = DBAnnotation.update_version(obj, trans_dict)
new_obj.db_deleted_annotations.append(n_obj)
if 'actionAnnotations' in class_dict:
res = class_dict['actionAnnotations'](old_obj, trans_dict)
for obj in res:
new_obj.db_add_actionAnnotation(obj)
elif hasattr(old_obj, 'db_actionAnnotations') and old_obj.db_actionAnnotations is not None:
for obj in old_obj.db_actionAnnotations:
new_obj.db_add_actionAnnotation(DBMashupActionAnnotation.update_version(obj, trans_dict))
if hasattr(old_obj, 'db_deleted_actionAnnotations') and hasattr(new_obj, 'db_deleted_actionAnnotations'):
for obj in old_obj.db_deleted_actionAnnotations:
n_obj = DBMashupActionAnnotation.update_version(obj, trans_dict)
new_obj.db_deleted_actionAnnotations.append(n_obj)
new_obj.is_new = old_obj.is_new
new_obj.is_dirty = old_obj.is_dirty
return new_obj
def db_children(self, parent=(None,None), orphan=False, for_action=False):
children = []
to_del = []
for child in self.db_actions:
children.extend(child.db_children((self.vtType, self.db_id), orphan, for_action))
if orphan:
to_del.append(child)
for child in to_del:
self.db_delete_action(child)
to_del = []
for child in self.db_annotations:
children.extend(child.db_children((self.vtType, self.db_id), orphan, for_action))
if orphan:
to_del.append(child)
for child in to_del:
self.db_delete_annotation(child)
to_del = []
for child in self.db_actionAnnotations:
children.extend(child.db_children((self.vtType, self.db_id), orphan, for_action))
if orphan:
to_del.append(child)
for child in to_del:
self.db_delete_actionAnnotation(child)
children.append((self, parent[0], parent[1]))
return children
def db_deleted_children(self, remove=False):
children = []
children.extend(self.db_deleted_actions)
children.extend(self.db_deleted_annotations)
children.extend(self.db_deleted_actionAnnotations)
if remove:
self.db_deleted_actions = []
self.db_deleted_annotations = []
self.db_deleted_actionAnnotations = []
return children
def has_changes(self):
if self.is_dirty:
return True
for child in self._db_actions:
if child.has_changes():
return True
for child in self._db_annotations:
if child.has_changes():
return True
for child in self._db_actionAnnotations:
if child.has_changes():
return True
return False
def __get_db_id(self):
return self._db_id
def __set_db_id(self, id):
self._db_id = id
self.is_dirty = True
db_id = property(__get_db_id, __set_db_id)
def db_add_id(self, id):
self._db_id = id
def db_change_id(self, id):
self._db_id = id
def db_delete_id(self, id):
self._db_id = None
def __get_db_name(self):
return self._db_name
def __set_db_name(self, name):
self._db_name = name
self.is_dirty = True
db_name = property(__get_db_name, __set_db_name)
def db_add_name(self, name):
self._db_name = name
def db_change_name(self, name):
self._db_name = name
def db_delete_name(self, name):
self._db_name = None
def __get_db_version(self):
return self._db_version
def __set_db_version(self, version):
self._db_version = version
self.is_dirty = True
db_version = property(__get_db_version, __set_db_version)
def db_add_version(self, version):
self._db_version = version
def db_change_version(self, version):
self._db_version = version
def db_delete_version(self, version):
self._db_version = None
def __get_db_vtVersion(self):
return self._db_vtVersion
def __set_db_vtVersion(self, vtVersion):
self._db_vtVersion = vtVersion
self.is_dirty = True
db_vtVersion = property(__get_db_vtVersion, __set_db_vtVersion)
def db_add_vtVersion(self, vtVersion):
self._db_vtVersion = vtVersion
def db_change_vtVersion(self, vtVersion):
self._db_vtVersion = vtVersion
def db_delete_vtVersion(self, vtVersion):
self._db_vtVersion = None
def __get_db_last_modified(self):
return self._db_last_modified
def __set_db_last_modified(self, last_modified):
self._db_last_modified = last_modified
self.is_dirty = True
db_last_modified = property(__get_db_last_modified, __set_db_last_modified)
def db_add_last_modified(self, last_modified):
self._db_last_modified = last_modified
def db_change_last_modified(self, last_modified):
self._db_last_modified = last_modified
def db_delete_last_modified(self, last_modified):
self._db_last_modified = None
def __get_db_actions(self):
return self._db_actions
def __set_db_actions(self, actions):
self._db_actions = actions
self.is_dirty = True
db_actions = property(__get_db_actions, __set_db_actions)
def db_get_actions(self):
return self._db_actions
def db_add_action(self, action):
self.is_dirty = True
self._db_actions.append(action)
self.db_actions_id_index[action.db_id] = action
def db_change_action(self, action):
self.is_dirty = True
found = False
for i in xrange(len(self._db_actions)):
if self._db_actions[i].db_id == action.db_id:
self._db_actions[i] = action
found = True
break
if not found:
self._db_actions.append(action)
self.db_actions_id_index[action.db_id] = action
def db_delete_action(self, action):
self.is_dirty = True
for i in xrange(len(self._db_actions)):
if self._db_actions[i].db_id == action.db_id:
if not self._db_actions[i].is_new:
self.db_deleted_actions.append(self._db_actions[i])
del self._db_actions[i]
break
del self.db_actions_id_index[action.db_id]
def db_get_action(self, key):
for i in xrange(len(self._db_actions)):
if self._db_actions[i].db_id == key:
return self._db_actions[i]
return None
def db_get_action_by_id(self, key):
return self.db_actions_id_index[key]
def db_has_action_with_id(self, key):
return key in self.db_actions_id_index
def __get_db_annotations(self):
return self._db_annotations
def __set_db_annotations(self, annotations):
self._db_annotations = annotations
self.is_dirty = True
db_annotations = property(__get_db_annotations, __set_db_annotations)
def db_get_annotations(self):
return self._db_annotations
def db_add_annotation(self, annotation):
self.is_dirty = True
self._db_annotations.append(annotation)
self.db_annotations_id_index[annotation.db_id] = annotation
self.db_annotations_key_index[annotation.db_key] = annotation
def db_change_annotation(self, annotation):
self.is_dirty = True
found = False
for i in xrange(len(self._db_annotations)):
if self._db_annotations[i].db_id == annotation.db_id:
self._db_annotations[i] = annotation
found = True
break
if not found:
self._db_annotations.append(annotation)
self.db_annotations_id_index[annotation.db_id] = annotation
self.db_annotations_key_index[annotation.db_key] = annotation
def db_delete_annotation(self, annotation):
self.is_dirty = True
for i in xrange(len(self._db_annotations)):
if self._db_annotations[i].db_id == annotation.db_id:
if not self._db_annotations[i].is_new:
self.db_deleted_annotations.append(self._db_annotations[i])
del self._db_annotations[i]
break
del self.db_annotations_id_index[annotation.db_id]
del self.db_annotations_key_index[annotation.db_key]
def db_get_annotation(self, key):
for i in xrange(len(self._db_annotations)):
if self._db_annotations[i].db_id == key:
return self._db_annotations[i]
return None
def db_get_annotation_by_id(self, key):
return self.db_annotations_id_index[key]
def db_has_annotation_with_id(self, key):
return key in self.db_annotations_id_index
def db_get_annotation_by_key(self, key):
return self.db_annotations_key_index[key]
def db_has_annotation_with_key(self, key):
return key in self.db_annotations_key_index
def __get_db_actionAnnotations(self):
return self._db_actionAnnotations
def __set_db_actionAnnotations(self, actionAnnotations):
self._db_actionAnnotations = actionAnnotations
self.is_dirty = True
db_actionAnnotations = property(__get_db_actionAnnotations, __set_db_actionAnnotations)
def db_get_actionAnnotations(self):
return self._db_actionAnnotations
def db_add_actionAnnotation(self, actionAnnotation):
self.is_dirty = True
self._db_actionAnnotations.append(actionAnnotation)
self.db_actionAnnotations_id_index[actionAnnotation.db_id] = actionAnnotation
self.db_actionAnnotations_action_id_index[(actionAnnotation.db_action_id,actionAnnotation.db_key)] = actionAnnotation
self.db_actionAnnotations_key_index[(actionAnnotation.db_key,actionAnnotation.db_value)] = actionAnnotation
def db_change_actionAnnotation(self, actionAnnotation):
self.is_dirty = True
found = False
for i in xrange(len(self._db_actionAnnotations)):
if self._db_actionAnnotations[i].db_id == actionAnnotation.db_id:
self._db_actionAnnotations[i] = actionAnnotation
found = True
break
if not found:
self._db_actionAnnotations.append(actionAnnotation)
self.db_actionAnnotations_id_index[actionAnnotation.db_id] = actionAnnotation
self.db_actionAnnotations_action_id_index[(actionAnnotation.db_action_id,actionAnnotation.db_key)] = actionAnnotation
self.db_actionAnnotations_key_index[(actionAnnotation.db_key,actionAnnotation.db_value)] = actionAnnotation
def db_delete_actionAnnotation(self, actionAnnotation):
self.is_dirty = True
for i in xrange(len(self._db_actionAnnotations)):
if self._db_actionAnnotations[i].db_id == actionAnnotation.db_id:
if not self._db_actionAnnotations[i].is_new:
self.db_deleted_actionAnnotations.append(self._db_actionAnnotations[i])
del self._db_actionAnnotations[i]
break
del self.db_actionAnnotations_id_index[actionAnnotation.db_id]
del self.db_actionAnnotations_action_id_index[(actionAnnotation.db_action_id,actionAnnotation.db_key)]
try:
del self.db_actionAnnotations_key_index[(actionAnnotation.db_key,actionAnnotation.db_value)]
except KeyError:
pass
def db_get_actionAnnotation(self, key):
for i in xrange(len(self._db_actionAnnotations)):
if self._db_actionAnnotations[i].db_id == key:
return self._db_actionAnnotations[i]
return None
def db_get_actionAnnotation_by_id(self, key):
return self.db_actionAnnotations_id_index[key]
def db_has_actionAnnotation_with_id(self, key):
return key in self.db_actionAnnotations_id_index
def db_get_actionAnnotation_by_action_id(self, key):
return self.db_actionAnnotations_action_id_index[key]
def db_has_actionAnnotation_with_action_id(self, key):
return key in self.db_actionAnnotations_action_id_index
def db_get_actionAnnotation_by_key(self, key):
return self.db_actionAnnotations_key_index[key]
def db_has_actionAnnotation_with_key(self, key):
return key in self.db_actionAnnotations_key_index
def getPrimaryKey(self):
return self._db_id
class DBRegistry(object):
vtType = 'registry'
def __init__(self, id=None, entity_type=None, version=None, root_descriptor_id=None, name=None, last_modified=None, packages=None):
self._db_id = id
self._db_entity_type = entity_type
self._db_version = version
self._db_root_descriptor_id = root_descriptor_id
self._db_name = name
self._db_last_modified = last_modified
self.db_deleted_packages = []
self.db_packages_id_index = {}
self.db_packages_identifier_index = {}
if packages is None:
self._db_packages = []
else:
self._db_packages = packages
for v in self._db_packages:
self.db_packages_id_index[v.db_id] = v
self.db_packages_identifier_index[(v.db_identifier,v.db_version)] = v
self.is_dirty = True
self.is_new = True
def __copy__(self):
return DBRegistry.do_copy(self)
def do_copy(self, new_ids=False, id_scope=None, id_remap=None):
cp = DBRegistry(id=self._db_id,
entity_type=self._db_entity_type,
version=self._db_version,
root_descriptor_id=self._db_root_descriptor_id,
name=self._db_name,
last_modified=self._db_last_modified)
if self._db_packages is None:
cp._db_packages = []
else:
cp._db_packages = [v.do_copy(new_ids, id_scope, id_remap) for v in self._db_packages]
# set new ids
if new_ids:
new_id = id_scope.getNewId(self.vtType)
if self.vtType in id_scope.remap:
id_remap[(id_scope.remap[self.vtType], self.db_id)] = new_id
else:
id_remap[(self.vtType, self.db_id)] = new_id
cp.db_id = new_id
if hasattr(self, 'db_root_descriptor_id') and ('module_descriptor', self._db_root_descriptor_id) in id_remap:
cp._db_root_descriptor_id = id_remap[('module_descriptor', self._db_root_descriptor_id)]
# recreate indices and set flags
cp.db_packages_id_index = dict((v.db_id, v) for v in cp._db_packages)
cp.db_packages_identifier_index = dict(((v.db_identifier,v.db_version), v) for v in cp._db_packages)
if not new_ids:
cp.is_dirty = self.is_dirty
cp.is_new = self.is_new
return cp
@staticmethod
def update_version(old_obj, trans_dict, new_obj=None):
if new_obj is None:
new_obj = DBRegistry()
class_dict = {}
if new_obj.__class__.__name__ in trans_dict:
class_dict = trans_dict[new_obj.__class__.__name__]
if 'id' in class_dict:
res = class_dict['id'](old_obj, trans_dict)
new_obj.db_id = res
elif hasattr(old_obj, 'db_id') and old_obj.db_id is not None:
new_obj.db_id = old_obj.db_id
if 'entity_type' in class_dict:
res = class_dict['entity_type'](old_obj, trans_dict)
new_obj.db_entity_type = res
elif hasattr(old_obj, 'db_entity_type') and old_obj.db_entity_type is not None:
new_obj.db_entity_type = old_obj.db_entity_type
if 'version' in class_dict:
res = class_dict['version'](old_obj, trans_dict)
new_obj.db_version = res
elif hasattr(old_obj, 'db_version') and old_obj.db_version is not None:
new_obj.db_version = old_obj.db_version
if 'root_descriptor_id' in class_dict:
res = class_dict['root_descriptor_id'](old_obj, trans_dict)
new_obj.db_root_descriptor_id = res
elif hasattr(old_obj, 'db_root_descriptor_id') and old_obj.db_root_descriptor_id is not None:
new_obj.db_root_descriptor_id = old_obj.db_root_descriptor_id
if 'name' in class_dict:
res = class_dict['name'](old_obj, trans_dict)
new_obj.db_name = res
elif hasattr(old_obj, 'db_name') and old_obj.db_name is not None:
new_obj.db_name = old_obj.db_name
if 'last_modified' in class_dict:
res = class_dict['last_modified'](old_obj, trans_dict)
new_obj.db_last_modified = res
elif hasattr(old_obj, 'db_last_modified') and old_obj.db_last_modified is not None:
new_obj.db_last_modified = old_obj.db_last_modified
if 'packages' in class_dict:
res = class_dict['packages'](old_obj, trans_dict)
for obj in res:
new_obj.db_add_package(obj)
elif hasattr(old_obj, 'db_packages') and old_obj.db_packages is not None:
for obj in old_obj.db_packages:
new_obj.db_add_package(DBPackage.update_version(obj, trans_dict))
if hasattr(old_obj, 'db_deleted_packages') and hasattr(new_obj, 'db_deleted_packages'):
for obj in old_obj.db_deleted_packages:
n_obj = DBPackage.update_version(obj, trans_dict)
new_obj.db_deleted_packages.append(n_obj)
new_obj.is_new = old_obj.is_new
new_obj.is_dirty = old_obj.is_dirty
return new_obj
def db_children(self, parent=(None,None), orphan=False, for_action=False):
children = []
to_del = []
for child in self.db_packages:
children.extend(child.db_children((self.vtType, self.db_id), orphan, for_action))
if orphan:
to_del.append(child)
for child in to_del:
self.db_delete_package(child)
children.append((self, parent[0], parent[1]))
return children
def db_deleted_children(self, remove=False):
children = []
children.extend(self.db_deleted_packages)
if remove:
self.db_deleted_packages = []
return children
def has_changes(self):
if self.is_dirty:
return True
for child in self._db_packages:
if child.has_changes():
return True
return False
def __get_db_id(self):
return self._db_id
def __set_db_id(self, id):
self._db_id = id
self.is_dirty = True
db_id = property(__get_db_id, __set_db_id)
def db_add_id(self, id):
self._db_id = id
def db_change_id(self, id):
self._db_id = id
def db_delete_id(self, id):
self._db_id = None
def __get_db_entity_type(self):
return self._db_entity_type
def __set_db_entity_type(self, entity_type):
self._db_entity_type = entity_type
self.is_dirty = True
db_entity_type = property(__get_db_entity_type, __set_db_entity_type)
def db_add_entity_type(self, entity_type):
self._db_entity_type = entity_type
def db_change_entity_type(self, entity_type):
self._db_entity_type = entity_type
def db_delete_entity_type(self, entity_type):
self._db_entity_type = None
def __get_db_version(self):
return self._db_version
def __set_db_version(self, version):
self._db_version = version
self.is_dirty = True
db_version = property(__get_db_version, __set_db_version)
def db_add_version(self, version):
self._db_version = version
def db_change_version(self, version):
self._db_version = version
def db_delete_version(self, version):
self._db_version = None
def __get_db_root_descriptor_id(self):
return self._db_root_descriptor_id
def __set_db_root_descriptor_id(self, root_descriptor_id):
self._db_root_descriptor_id = root_descriptor_id
self.is_dirty = True
db_root_descriptor_id = property(__get_db_root_descriptor_id, __set_db_root_descriptor_id)
def db_add_root_descriptor_id(self, root_descriptor_id):
self._db_root_descriptor_id = root_descriptor_id
def db_change_root_descriptor_id(self, root_descriptor_id):
self._db_root_descriptor_id = root_descriptor_id
def db_delete_root_descriptor_id(self, root_descriptor_id):
self._db_root_descriptor_id = None
def __get_db_name(self):
return self._db_name
def __set_db_name(self, name):
self._db_name = name
self.is_dirty = True
db_name = property(__get_db_name, __set_db_name)
def db_add_name(self, name):
self._db_name = name
def db_change_name(self, name):
self._db_name = name
def db_delete_name(self, name):
self._db_name = None
def __get_db_last_modified(self):
return self._db_last_modified
def __set_db_last_modified(self, last_modified):
self._db_last_modified = last_modified
self.is_dirty = True
db_last_modified = property(__get_db_last_modified, __set_db_last_modified)
def db_add_last_modified(self, last_modified):
self._db_last_modified = last_modified
def db_change_last_modified(self, last_modified):
self._db_last_modified = last_modified
def db_delete_last_modified(self, last_modified):
self._db_last_modified = None
def __get_db_packages(self):
return self._db_packages
def __set_db_packages(self, packages):
self._db_packages = packages
self.is_dirty = True
db_packages = property(__get_db_packages, __set_db_packages)
def db_get_packages(self):
return self._db_packages
def db_add_package(self, package):
self.is_dirty = True
self._db_packages.append(package)
self.db_packages_id_index[package.db_id] = package
self.db_packages_identifier_index[(package.db_identifier,package.db_version)] = package
def db_change_package(self, package):
self.is_dirty = True
found = False
for i in xrange(len(self._db_packages)):
if self._db_packages[i].db_id == package.db_id:
self._db_packages[i] = package
found = True
break
if not found:
self._db_packages.append(package)
self.db_packages_id_index[package.db_id] = package
self.db_packages_identifier_index[(package.db_identifier,package.db_version)] = package
def db_delete_package(self, package):
self.is_dirty = True
for i in xrange(len(self._db_packages)):
if self._db_packages[i].db_id == package.db_id:
if not self._db_packages[i].is_new:
self.db_deleted_packages.append(self._db_packages[i])
del self._db_packages[i]
break
del self.db_packages_id_index[package.db_id]
del self.db_packages_identifier_index[(package.db_identifier,package.db_version)]
def db_get_package(self, key):
for i in xrange(len(self._db_packages)):
if self._db_packages[i].db_id == key:
return self._db_packages[i]
return None
def db_get_package_by_id(self, key):
return self.db_packages_id_index[key]
def db_has_package_with_id(self, key):
return key in self.db_packages_id_index
def db_get_package_by_identifier(self, key):
return self.db_packages_identifier_index[key]
def db_has_package_with_identifier(self, key):
return key in self.db_packages_identifier_index
def getPrimaryKey(self):
return self._db_id
class DBOpmAgent(object):
vtType = 'opm_agent'
def __init__(self, id=None, value=None, accounts=None):
self._db_id = id
self._db_value = value
self.db_deleted_accounts = []
if accounts is None:
self._db_accounts = []
else:
self._db_accounts = accounts
self.is_dirty = True
self.is_new = True
def __copy__(self):
return DBOpmAgent.do_copy(self)
def do_copy(self, new_ids=False, id_scope=None, id_remap=None):
cp = DBOpmAgent(id=self._db_id,
value=self._db_value)
if self._db_accounts is None:
cp._db_accounts = []
else:
cp._db_accounts = [v.do_copy(new_ids, id_scope, id_remap) for v in self._db_accounts]
# set new ids
if new_ids:
new_id = id_scope.getNewId(self.vtType)
if self.vtType in id_scope.remap:
id_remap[(id_scope.remap[self.vtType], self.db_id)] = new_id
else:
id_remap[(self.vtType, self.db_id)] = new_id
cp.db_id = new_id
# recreate indices and set flags
if not new_ids:
cp.is_dirty = self.is_dirty
cp.is_new = self.is_new
return cp
@staticmethod
def update_version(old_obj, trans_dict, new_obj=None):
if new_obj is None:
new_obj = DBOpmAgent()
class_dict = {}
if new_obj.__class__.__name__ in trans_dict:
class_dict = trans_dict[new_obj.__class__.__name__]
if 'id' in class_dict:
res = class_dict['id'](old_obj, trans_dict)
new_obj.db_id = res
elif hasattr(old_obj, 'db_id') and old_obj.db_id is not None:
new_obj.db_id = old_obj.db_id
if 'value' in class_dict:
res = class_dict['value'](old_obj, trans_dict)
new_obj.db_value = res
elif hasattr(old_obj, 'db_value') and old_obj.db_value is not None:
new_obj.db_value = old_obj.db_value
if 'accounts' in class_dict:
res = class_dict['accounts'](old_obj, trans_dict)
for obj in res:
new_obj.db_add_account(obj)
elif hasattr(old_obj, 'db_accounts') and old_obj.db_accounts is not None:
for obj in old_obj.db_accounts:
new_obj.db_add_account(DBOpmAccountId.update_version(obj, trans_dict))
if hasattr(old_obj, 'db_deleted_accounts') and hasattr(new_obj, 'db_deleted_accounts'):
for obj in old_obj.db_deleted_accounts:
n_obj = DBOpmAccountId.update_version(obj, trans_dict)
new_obj.db_deleted_accounts.append(n_obj)
new_obj.is_new = old_obj.is_new
new_obj.is_dirty = old_obj.is_dirty
return new_obj
def db_children(self, parent=(None,None), orphan=False, for_action=False):
children = []
to_del = []
for child in self.db_accounts:
children.extend(child.db_children((self.vtType, self.db_id), orphan, for_action))
if orphan:
to_del.append(child)
for child in to_del:
self.db_delete_account(child)
children.append((self, parent[0], parent[1]))
return children
def db_deleted_children(self, remove=False):
children = []
children.extend(self.db_deleted_accounts)
if remove:
self.db_deleted_accounts = []
return children
def has_changes(self):
if self.is_dirty:
return True
for child in self._db_accounts:
if child.has_changes():
return True
return False
def __get_db_id(self):
return self._db_id
def __set_db_id(self, id):
self._db_id = id
self.is_dirty = True
db_id = property(__get_db_id, __set_db_id)
def db_add_id(self, id):
self._db_id = id
def db_change_id(self, id):
self._db_id = id
def db_delete_id(self, id):
self._db_id = None
def __get_db_value(self):
return self._db_value
def __set_db_value(self, value):
self._db_value = value
self.is_dirty = True
db_value = property(__get_db_value, __set_db_value)
def db_add_value(self, value):
self._db_value = value
def db_change_value(self, value):
self._db_value = value
def db_delete_value(self, value):
self._db_value = None
def __get_db_accounts(self):
return self._db_accounts
def __set_db_accounts(self, accounts):
self._db_accounts = accounts
self.is_dirty = True
db_accounts = property(__get_db_accounts, __set_db_accounts)
def db_get_accounts(self):
return self._db_accounts
def db_add_account(self, account):
self.is_dirty = True
self._db_accounts.append(account)
def db_change_account(self, account):
self.is_dirty = True
self._db_accounts.append(account)
def db_delete_account(self, account):
self.is_dirty = True
raise Exception('Cannot delete a non-keyed object')
def db_get_account(self, key):
return None
def getPrimaryKey(self):
return self._db_id
class DBProvEntity(object):
vtType = 'prov_entity'
def __init__(self, id=None, prov_type=None, prov_label=None, prov_value=None, vt_id=None, vt_type=None, vt_desc=None, vt_package=None, vt_version=None, vt_cache=None, vt_location_x=None, vt_location_y=None, is_part_of=None):
self._db_id = id
self._db_prov_type = prov_type
self._db_prov_label = prov_label
self._db_prov_value = prov_value
self._db_vt_id = vt_id
self._db_vt_type = vt_type
self._db_vt_desc = vt_desc
self._db_vt_package = vt_package
self._db_vt_version = vt_version
self._db_vt_cache = vt_cache
self._db_vt_location_x = vt_location_x
self._db_vt_location_y = vt_location_y
self.db_deleted_is_part_of = []
self._db_is_part_of = is_part_of
self.is_dirty = True
self.is_new = True
def __copy__(self):
return DBProvEntity.do_copy(self)
def do_copy(self, new_ids=False, id_scope=None, id_remap=None):
cp = DBProvEntity(id=self._db_id,
prov_type=self._db_prov_type,
prov_label=self._db_prov_label,
prov_value=self._db_prov_value,
vt_id=self._db_vt_id,
vt_type=self._db_vt_type,
vt_desc=self._db_vt_desc,
vt_package=self._db_vt_package,
vt_version=self._db_vt_version,
vt_cache=self._db_vt_cache,
vt_location_x=self._db_vt_location_x,
vt_location_y=self._db_vt_location_y)
if self._db_is_part_of is not None:
cp._db_is_part_of = self._db_is_part_of.do_copy(new_ids, id_scope, id_remap)
# set new ids
if new_ids:
new_id = id_scope.getNewId(self.vtType)
if self.vtType in id_scope.remap:
id_remap[(id_scope.remap[self.vtType], self.db_id)] = new_id
else:
id_remap[(self.vtType, self.db_id)] = new_id
cp.db_id = new_id
# recreate indices and set flags
if not new_ids:
cp.is_dirty = self.is_dirty
cp.is_new = self.is_new
return cp
@staticmethod
def update_version(old_obj, trans_dict, new_obj=None):
if new_obj is None:
new_obj = DBProvEntity()
class_dict = {}
if new_obj.__class__.__name__ in trans_dict:
class_dict = trans_dict[new_obj.__class__.__name__]
if 'id' in class_dict:
res = class_dict['id'](old_obj, trans_dict)
new_obj.db_id = res
elif hasattr(old_obj, 'db_id') and old_obj.db_id is not None:
new_obj.db_id = old_obj.db_id
if 'prov_type' in class_dict:
res = class_dict['prov_type'](old_obj, trans_dict)
new_obj.db_prov_type = res
elif hasattr(old_obj, 'db_prov_type') and old_obj.db_prov_type is not None:
new_obj.db_prov_type = old_obj.db_prov_type
if 'prov_label' in class_dict:
res = class_dict['prov_label'](old_obj, trans_dict)
new_obj.db_prov_label = res
elif hasattr(old_obj, 'db_prov_label') and old_obj.db_prov_label is not None:
new_obj.db_prov_label = old_obj.db_prov_label
if 'prov_value' in class_dict:
res = class_dict['prov_value'](old_obj, trans_dict)
new_obj.db_prov_value = res
elif hasattr(old_obj, 'db_prov_value') and old_obj.db_prov_value is not None:
new_obj.db_prov_value = old_obj.db_prov_value
if 'vt_id' in class_dict:
res = class_dict['vt_id'](old_obj, trans_dict)
new_obj.db_vt_id = res
elif hasattr(old_obj, 'db_vt_id') and old_obj.db_vt_id is not None:
new_obj.db_vt_id = old_obj.db_vt_id
if 'vt_type' in class_dict:
res = class_dict['vt_type'](old_obj, trans_dict)
new_obj.db_vt_type = res
elif hasattr(old_obj, 'db_vt_type') and old_obj.db_vt_type is not None:
new_obj.db_vt_type = old_obj.db_vt_type
if 'vt_desc' in class_dict:
res = class_dict['vt_desc'](old_obj, trans_dict)
new_obj.db_vt_desc = res
elif hasattr(old_obj, 'db_vt_desc') and old_obj.db_vt_desc is not None:
new_obj.db_vt_desc = old_obj.db_vt_desc
if 'vt_package' in class_dict:
res = class_dict['vt_package'](old_obj, trans_dict)
new_obj.db_vt_package = res
elif hasattr(old_obj, 'db_vt_package') and old_obj.db_vt_package is not None:
new_obj.db_vt_package = old_obj.db_vt_package
if 'vt_version' in class_dict:
res = class_dict['vt_version'](old_obj, trans_dict)
new_obj.db_vt_version = res
elif hasattr(old_obj, 'db_vt_version') and old_obj.db_vt_version is not None:
new_obj.db_vt_version = old_obj.db_vt_version
if 'vt_cache' in class_dict:
res = class_dict['vt_cache'](old_obj, trans_dict)
new_obj.db_vt_cache = res
elif hasattr(old_obj, 'db_vt_cache') and old_obj.db_vt_cache is not None:
new_obj.db_vt_cache = old_obj.db_vt_cache
if 'vt_location_x' in class_dict:
res = class_dict['vt_location_x'](old_obj, trans_dict)
new_obj.db_vt_location_x = res
elif hasattr(old_obj, 'db_vt_location_x') and old_obj.db_vt_location_x is not None:
new_obj.db_vt_location_x = old_obj.db_vt_location_x
if 'vt_location_y' in class_dict:
res = class_dict['vt_location_y'](old_obj, trans_dict)
new_obj.db_vt_location_y = res
elif hasattr(old_obj, 'db_vt_location_y') and old_obj.db_vt_location_y is not None:
new_obj.db_vt_location_y = old_obj.db_vt_location_y
if 'is_part_of' in class_dict:
res = class_dict['is_part_of'](old_obj, trans_dict)
new_obj.db_is_part_of = res
elif hasattr(old_obj, 'db_is_part_of') and old_obj.db_is_part_of is not None:
obj = old_obj.db_is_part_of
new_obj.db_add_is_part_of(DBIsPartOf.update_version(obj, trans_dict))
if hasattr(old_obj, 'db_deleted_is_part_of') and hasattr(new_obj, 'db_deleted_is_part_of'):
for obj in old_obj.db_deleted_is_part_of:
n_obj = DBIsPartOf.update_version(obj, trans_dict)
new_obj.db_deleted_is_part_of.append(n_obj)
new_obj.is_new = old_obj.is_new
new_obj.is_dirty = old_obj.is_dirty
return new_obj
def db_children(self, parent=(None,None), orphan=False, for_action=False):
children = []
if self._db_is_part_of is not None:
children.extend(self._db_is_part_of.db_children((self.vtType, self.db_id), orphan, for_action))
if orphan:
self._db_is_part_of = None
children.append((self, parent[0], parent[1]))
return children
def db_deleted_children(self, remove=False):
children = []
children.extend(self.db_deleted_is_part_of)
if remove:
self.db_deleted_is_part_of = []
return children
def has_changes(self):
if self.is_dirty:
return True
if self._db_is_part_of is not None and self._db_is_part_of.has_changes():
return True
return False
def __get_db_id(self):
return self._db_id
def __set_db_id(self, id):
self._db_id = id
self.is_dirty = True
db_id = property(__get_db_id, __set_db_id)
def db_add_id(self, id):
self._db_id = id
def db_change_id(self, id):
self._db_id = id
def db_delete_id(self, id):
self._db_id = None
def __get_db_prov_type(self):
return self._db_prov_type
def __set_db_prov_type(self, prov_type):
self._db_prov_type = prov_type
self.is_dirty = True
db_prov_type = property(__get_db_prov_type, __set_db_prov_type)
def db_add_prov_type(self, prov_type):
self._db_prov_type = prov_type
def db_change_prov_type(self, prov_type):
self._db_prov_type = prov_type
def db_delete_prov_type(self, prov_type):
self._db_prov_type = None
def __get_db_prov_label(self):
return self._db_prov_label
def __set_db_prov_label(self, prov_label):
self._db_prov_label = prov_label
self.is_dirty = True
db_prov_label = property(__get_db_prov_label, __set_db_prov_label)
def db_add_prov_label(self, prov_label):
self._db_prov_label = prov_label
def db_change_prov_label(self, prov_label):
self._db_prov_label = prov_label
def db_delete_prov_label(self, prov_label):
self._db_prov_label = None
def __get_db_prov_value(self):
return self._db_prov_value
def __set_db_prov_value(self, prov_value):
self._db_prov_value = prov_value
self.is_dirty = True
db_prov_value = property(__get_db_prov_value, __set_db_prov_value)
def db_add_prov_value(self, prov_value):
self._db_prov_value = prov_value
def db_change_prov_value(self, prov_value):
self._db_prov_value = prov_value
def db_delete_prov_value(self, prov_value):
self._db_prov_value = None
def __get_db_vt_id(self):
return self._db_vt_id
def __set_db_vt_id(self, vt_id):
self._db_vt_id = vt_id
self.is_dirty = True
db_vt_id = property(__get_db_vt_id, __set_db_vt_id)
def db_add_vt_id(self, vt_id):
self._db_vt_id = vt_id
def db_change_vt_id(self, vt_id):
self._db_vt_id = vt_id
def db_delete_vt_id(self, vt_id):
self._db_vt_id = None
def __get_db_vt_type(self):
return self._db_vt_type
def __set_db_vt_type(self, vt_type):
self._db_vt_type = vt_type
self.is_dirty = True
db_vt_type = property(__get_db_vt_type, __set_db_vt_type)
def db_add_vt_type(self, vt_type):
self._db_vt_type = vt_type
def db_change_vt_type(self, vt_type):
self._db_vt_type = vt_type
def db_delete_vt_type(self, vt_type):
self._db_vt_type = None
def __get_db_vt_desc(self):
return self._db_vt_desc
def __set_db_vt_desc(self, vt_desc):
self._db_vt_desc = vt_desc
self.is_dirty = True
db_vt_desc = property(__get_db_vt_desc, __set_db_vt_desc)
def db_add_vt_desc(self, vt_desc):
self._db_vt_desc = vt_desc
def db_change_vt_desc(self, vt_desc):
self._db_vt_desc = vt_desc
def db_delete_vt_desc(self, vt_desc):
self._db_vt_desc = None
def __get_db_vt_package(self):
return self._db_vt_package
def __set_db_vt_package(self, vt_package):
self._db_vt_package = vt_package
self.is_dirty = True
db_vt_package = property(__get_db_vt_package, __set_db_vt_package)
def db_add_vt_package(self, vt_package):
self._db_vt_package = vt_package
def db_change_vt_package(self, vt_package):
self._db_vt_package = vt_package
def db_delete_vt_package(self, vt_package):
self._db_vt_package = None
def __get_db_vt_version(self):
return self._db_vt_version
def __set_db_vt_version(self, vt_version):
self._db_vt_version = vt_version
self.is_dirty = True
db_vt_version = property(__get_db_vt_version, __set_db_vt_version)
def db_add_vt_version(self, vt_version):
self._db_vt_version = vt_version
def db_change_vt_version(self, vt_version):
self._db_vt_version = vt_version
def db_delete_vt_version(self, vt_version):
self._db_vt_version = None
def __get_db_vt_cache(self):
return self._db_vt_cache
def __set_db_vt_cache(self, vt_cache):
self._db_vt_cache = vt_cache
self.is_dirty = True
db_vt_cache = property(__get_db_vt_cache, __set_db_vt_cache)
def db_add_vt_cache(self, vt_cache):
self._db_vt_cache = vt_cache
def db_change_vt_cache(self, vt_cache):
self._db_vt_cache = vt_cache
def db_delete_vt_cache(self, vt_cache):
self._db_vt_cache = None
def __get_db_vt_location_x(self):
return self._db_vt_location_x
def __set_db_vt_location_x(self, vt_location_x):
self._db_vt_location_x = vt_location_x
self.is_dirty = True
db_vt_location_x = property(__get_db_vt_location_x, __set_db_vt_location_x)
def db_add_vt_location_x(self, vt_location_x):
self._db_vt_location_x = vt_location_x
def db_change_vt_location_x(self, vt_location_x):
self._db_vt_location_x = vt_location_x
def db_delete_vt_location_x(self, vt_location_x):
self._db_vt_location_x = None
def __get_db_vt_location_y(self):
return self._db_vt_location_y
def __set_db_vt_location_y(self, vt_location_y):
self._db_vt_location_y = vt_location_y
self.is_dirty = True
db_vt_location_y = property(__get_db_vt_location_y, __set_db_vt_location_y)
def db_add_vt_location_y(self, vt_location_y):
self._db_vt_location_y = vt_location_y
def db_change_vt_location_y(self, vt_location_y):
self._db_vt_location_y = vt_location_y
def db_delete_vt_location_y(self, vt_location_y):
self._db_vt_location_y = None
def __get_db_is_part_of(self):
return self._db_is_part_of
def __set_db_is_part_of(self, is_part_of):
self._db_is_part_of = is_part_of
self.is_dirty = True
db_is_part_of = property(__get_db_is_part_of, __set_db_is_part_of)
def db_add_is_part_of(self, is_part_of):
self._db_is_part_of = is_part_of
def db_change_is_part_of(self, is_part_of):
self._db_is_part_of = is_part_of
def db_delete_is_part_of(self, is_part_of):
if not self.is_new:
self.db_deleted_is_part_of.append(self._db_is_part_of)
self._db_is_part_of = None
def getPrimaryKey(self):
return self._db_id
class DBAnnotation(object):
vtType = 'annotation'
def __init__(self, id=None, key=None, value=None):
self._db_id = id
self._db_key = key
self._db_value = value
self.is_dirty = True
self.is_new = True
def __copy__(self):
return DBAnnotation.do_copy(self)
def do_copy(self, new_ids=False, id_scope=None, id_remap=None):
cp = DBAnnotation(id=self._db_id,
key=self._db_key,
value=self._db_value)
# set new ids
if new_ids:
new_id = id_scope.getNewId(self.vtType)
if self.vtType in id_scope.remap:
id_remap[(id_scope.remap[self.vtType], self.db_id)] = new_id
else:
id_remap[(self.vtType, self.db_id)] = new_id
cp.db_id = new_id
# recreate indices and set flags
if not new_ids:
cp.is_dirty = self.is_dirty
cp.is_new = self.is_new
return cp
@staticmethod
def update_version(old_obj, trans_dict, new_obj=None):
if new_obj is None:
new_obj = DBAnnotation()
class_dict = {}
if new_obj.__class__.__name__ in trans_dict:
class_dict = trans_dict[new_obj.__class__.__name__]
if 'id' in class_dict:
res = class_dict['id'](old_obj, trans_dict)
new_obj.db_id = res
elif hasattr(old_obj, 'db_id') and old_obj.db_id is not None:
new_obj.db_id = old_obj.db_id
if 'key' in class_dict:
res = class_dict['key'](old_obj, trans_dict)
new_obj.db_key = res
elif hasattr(old_obj, 'db_key') and old_obj.db_key is not None:
new_obj.db_key = old_obj.db_key
if 'value' in class_dict:
res = class_dict['value'](old_obj, trans_dict)
new_obj.db_value = res
elif hasattr(old_obj, 'db_value') and old_obj.db_value is not None:
new_obj.db_value = old_obj.db_value
new_obj.is_new = old_obj.is_new
new_obj.is_dirty = old_obj.is_dirty
return new_obj
def db_children(self, parent=(None,None), orphan=False, for_action=False):
return [(self, parent[0], parent[1])]
def db_deleted_children(self, remove=False):
children = []
return children
def has_changes(self):
if self.is_dirty:
return True
return False
def __get_db_id(self):
return self._db_id
def __set_db_id(self, id):
self._db_id = id
self.is_dirty = True
db_id = property(__get_db_id, __set_db_id)
def db_add_id(self, id):
self._db_id = id
def db_change_id(self, id):
self._db_id = id
def db_delete_id(self, id):
self._db_id = None
def __get_db_key(self):
return self._db_key
def __set_db_key(self, key):
self._db_key = key
self.is_dirty = True
db_key = property(__get_db_key, __set_db_key)
def db_add_key(self, key):
self._db_key = key
def db_change_key(self, key):
self._db_key = key
def db_delete_key(self, key):
self._db_key = None
def __get_db_value(self):
return self._db_value
def __set_db_value(self, value):
self._db_value = value
self.is_dirty = True
db_value = property(__get_db_value, __set_db_value)
def db_add_value(self, value):
self._db_value = value
def db_change_value(self, value):
self._db_value = value
def db_delete_value(self, value):
self._db_value = None
def getPrimaryKey(self):
return self._db_id
class DBOpmTime(object):
vtType = 'opm_time'
def __init__(self, no_later_than=None, no_earlier_than=None, clock_id=None):
self._db_no_later_than = no_later_than
self._db_no_earlier_than = no_earlier_than
self._db_clock_id = clock_id
self.is_dirty = True
self.is_new = True
def __copy__(self):
return DBOpmTime.do_copy(self)
def do_copy(self, new_ids=False, id_scope=None, id_remap=None):
cp = DBOpmTime(no_later_than=self._db_no_later_than,
no_earlier_than=self._db_no_earlier_than,
clock_id=self._db_clock_id)
# set new ids
if new_ids:
new_id = id_scope.getNewId(self.vtType)
if self.vtType in id_scope.remap:
id_remap[(id_scope.remap[self.vtType], self.db_id)] = new_id
else:
id_remap[(self.vtType, self.db_id)] = new_id
cp.db_id = new_id
# recreate indices and set flags
if not new_ids:
cp.is_dirty = self.is_dirty
cp.is_new = self.is_new
return cp
@staticmethod
def update_version(old_obj, trans_dict, new_obj=None):
if new_obj is None:
new_obj = DBOpmTime()
class_dict = {}
if new_obj.__class__.__name__ in trans_dict:
class_dict = trans_dict[new_obj.__class__.__name__]
if 'no_later_than' in class_dict:
res = class_dict['no_later_than'](old_obj, trans_dict)
new_obj.db_no_later_than = res
elif hasattr(old_obj, 'db_no_later_than') and old_obj.db_no_later_than is not None:
new_obj.db_no_later_than = old_obj.db_no_later_than
if 'no_earlier_than' in class_dict:
res = class_dict['no_earlier_than'](old_obj, trans_dict)
new_obj.db_no_earlier_than = res
elif hasattr(old_obj, 'db_no_earlier_than') and old_obj.db_no_earlier_than is not None:
new_obj.db_no_earlier_than = old_obj.db_no_earlier_than
if 'clock_id' in class_dict:
res = class_dict['clock_id'](old_obj, trans_dict)
new_obj.db_clock_id = res
elif hasattr(old_obj, 'db_clock_id') and old_obj.db_clock_id is not None:
new_obj.db_clock_id = old_obj.db_clock_id
new_obj.is_new = old_obj.is_new
new_obj.is_dirty = old_obj.is_dirty
return new_obj
def db_children(self, parent=(None,None), orphan=False, for_action=False):
return [(self, parent[0], parent[1])]
def db_deleted_children(self, remove=False):
children = []
return children
def has_changes(self):
if self.is_dirty:
return True
return False
def __get_db_no_later_than(self):
return self._db_no_later_than
def __set_db_no_later_than(self, no_later_than):
self._db_no_later_than = no_later_than
self.is_dirty = True
db_no_later_than = property(__get_db_no_later_than, __set_db_no_later_than)
def db_add_no_later_than(self, no_later_than):
self._db_no_later_than = no_later_than
def db_change_no_later_than(self, no_later_than):
self._db_no_later_than = no_later_than
def db_delete_no_later_than(self, no_later_than):
self._db_no_later_than = None
def __get_db_no_earlier_than(self):
return self._db_no_earlier_than
def __set_db_no_earlier_than(self, no_earlier_than):
self._db_no_earlier_than = no_earlier_than
self.is_dirty = True
db_no_earlier_than = property(__get_db_no_earlier_than, __set_db_no_earlier_than)
def db_add_no_earlier_than(self, no_earlier_than):
self._db_no_earlier_than = no_earlier_than
def db_change_no_earlier_than(self, no_earlier_than):
self._db_no_earlier_than = no_earlier_than
def db_delete_no_earlier_than(self, no_earlier_than):
self._db_no_earlier_than = None
def __get_db_clock_id(self):
return self._db_clock_id
def __set_db_clock_id(self, clock_id):
self._db_clock_id = clock_id
self.is_dirty = True
db_clock_id = property(__get_db_clock_id, __set_db_clock_id)
def db_add_clock_id(self, clock_id):
self._db_clock_id = clock_id
def db_change_clock_id(self, clock_id):
self._db_clock_id = clock_id
def db_delete_clock_id(self, clock_id):
self._db_clock_id = None
class DBParameterExploration(object):
vtType = 'parameter_exploration'
def __init__(self, id=None, action_id=None, name=None, date=None, user=None, dims=None, layout=None, functions=None):
self._db_id = id
self._db_action_id = action_id
self._db_name = name
self._db_date = date
self._db_user = user
self._db_dims = dims
self._db_layout = layout
self.db_deleted_functions = []
self.db_functions_id_index = {}
if functions is None:
self._db_functions = []
else:
self._db_functions = functions
for v in self._db_functions:
self.db_functions_id_index[v.db_id] = v
self.is_dirty = True
self.is_new = True
def __copy__(self):
return DBParameterExploration.do_copy(self)
def do_copy(self, new_ids=False, id_scope=None, id_remap=None):
cp = DBParameterExploration(id=self._db_id,
action_id=self._db_action_id,
name=self._db_name,
date=self._db_date,
user=self._db_user,
dims=self._db_dims,
layout=self._db_layout)
if self._db_functions is None:
cp._db_functions = []
else:
cp._db_functions = [v.do_copy(new_ids, id_scope, id_remap) for v in self._db_functions]
# set new ids
if new_ids:
new_id = id_scope.getNewId(self.vtType)
if self.vtType in id_scope.remap:
id_remap[(id_scope.remap[self.vtType], self.db_id)] = new_id
else:
id_remap[(self.vtType, self.db_id)] = new_id
cp.db_id = new_id
if hasattr(self, 'db_action_id') and ('action', self._db_action_id) in id_remap:
cp._db_action_id = id_remap[('action', self._db_action_id)]
# recreate indices and set flags
cp.db_functions_id_index = dict((v.db_id, v) for v in cp._db_functions)
if not new_ids:
cp.is_dirty = self.is_dirty
cp.is_new = self.is_new
return cp
@staticmethod
def update_version(old_obj, trans_dict, new_obj=None):
if new_obj is None:
new_obj = DBParameterExploration()
class_dict = {}
if new_obj.__class__.__name__ in trans_dict:
class_dict = trans_dict[new_obj.__class__.__name__]
if 'id' in class_dict:
res = class_dict['id'](old_obj, trans_dict)
new_obj.db_id = res
elif hasattr(old_obj, 'db_id') and old_obj.db_id is not None:
new_obj.db_id = old_obj.db_id
if 'action_id' in class_dict:
res = class_dict['action_id'](old_obj, trans_dict)
new_obj.db_action_id = res
elif hasattr(old_obj, 'db_action_id') and old_obj.db_action_id is not None:
new_obj.db_action_id = old_obj.db_action_id
if 'name' in class_dict:
res = class_dict['name'](old_obj, trans_dict)
new_obj.db_name = res
elif hasattr(old_obj, 'db_name') and old_obj.db_name is not None:
new_obj.db_name = old_obj.db_name
if 'date' in class_dict:
res = class_dict['date'](old_obj, trans_dict)
new_obj.db_date = res
elif hasattr(old_obj, 'db_date') and old_obj.db_date is not None:
new_obj.db_date = old_obj.db_date
if 'user' in class_dict:
res = class_dict['user'](old_obj, trans_dict)
new_obj.db_user = res
elif hasattr(old_obj, 'db_user') and old_obj.db_user is not None:
new_obj.db_user = old_obj.db_user
if 'dims' in class_dict:
res = class_dict['dims'](old_obj, trans_dict)
new_obj.db_dims = res
elif hasattr(old_obj, 'db_dims') and old_obj.db_dims is not None:
new_obj.db_dims = old_obj.db_dims
if 'layout' in class_dict:
res = class_dict['layout'](old_obj, trans_dict)
new_obj.db_layout = res
elif hasattr(old_obj, 'db_layout') and old_obj.db_layout is not None:
new_obj.db_layout = old_obj.db_layout
if 'functions' in class_dict:
res = class_dict['functions'](old_obj, trans_dict)
for obj in res:
new_obj.db_add_function(obj)
elif hasattr(old_obj, 'db_functions') and old_obj.db_functions is not None:
for obj in old_obj.db_functions:
new_obj.db_add_function(DBPEFunction.update_version(obj, trans_dict))
if hasattr(old_obj, 'db_deleted_functions') and hasattr(new_obj, 'db_deleted_functions'):
for obj in old_obj.db_deleted_functions:
n_obj = DBPEFunction.update_version(obj, trans_dict)
new_obj.db_deleted_functions.append(n_obj)
new_obj.is_new = old_obj.is_new
new_obj.is_dirty = old_obj.is_dirty
return new_obj
def db_children(self, parent=(None,None), orphan=False, for_action=False):
children = []
to_del = []
for child in self.db_functions:
children.extend(child.db_children((self.vtType, self.db_id), orphan, for_action))
if orphan:
to_del.append(child)
for child in to_del:
self.db_delete_function(child)
children.append((self, parent[0], parent[1]))
return children
def db_deleted_children(self, remove=False):
children = []
children.extend(self.db_deleted_functions)
if remove:
self.db_deleted_functions = []
return children
def has_changes(self):
if self.is_dirty:
return True
for child in self._db_functions:
if child.has_changes():
return True
return False
def __get_db_id(self):
return self._db_id
def __set_db_id(self, id):
self._db_id = id
self.is_dirty = True
db_id = property(__get_db_id, __set_db_id)
def db_add_id(self, id):
self._db_id = id
def db_change_id(self, id):
self._db_id = id
def db_delete_id(self, id):
self._db_id = None
def __get_db_action_id(self):
return self._db_action_id
def __set_db_action_id(self, action_id):
self._db_action_id = action_id
self.is_dirty = True
db_action_id = property(__get_db_action_id, __set_db_action_id)
def db_add_action_id(self, action_id):
self._db_action_id = action_id
def db_change_action_id(self, action_id):
self._db_action_id = action_id
def db_delete_action_id(self, action_id):
self._db_action_id = None
def __get_db_name(self):
return self._db_name
def __set_db_name(self, name):
self._db_name = name
self.is_dirty = True
db_name = property(__get_db_name, __set_db_name)
def db_add_name(self, name):
self._db_name = name
def db_change_name(self, name):
self._db_name = name
def db_delete_name(self, name):
self._db_name = None
def __get_db_date(self):
return self._db_date
def __set_db_date(self, date):
self._db_date = date
self.is_dirty = True
db_date = property(__get_db_date, __set_db_date)
def db_add_date(self, date):
self._db_date = date
def db_change_date(self, date):
self._db_date = date
def db_delete_date(self, date):
self._db_date = None
def __get_db_user(self):
return self._db_user
def __set_db_user(self, user):
self._db_user = user
self.is_dirty = True
db_user = property(__get_db_user, __set_db_user)
def db_add_user(self, user):
self._db_user = user
def db_change_user(self, user):
self._db_user = user
def db_delete_user(self, user):
self._db_user = None
def __get_db_dims(self):
return self._db_dims
def __set_db_dims(self, dims):
self._db_dims = dims
self.is_dirty = True
db_dims = property(__get_db_dims, __set_db_dims)
def db_add_dims(self, dims):
self._db_dims = dims
def db_change_dims(self, dims):
self._db_dims = dims
def db_delete_dims(self, dims):
self._db_dims = None
def __get_db_layout(self):
return self._db_layout
def __set_db_layout(self, layout):
self._db_layout = layout
self.is_dirty = True
db_layout = property(__get_db_layout, __set_db_layout)
def db_add_layout(self, layout):
self._db_layout = layout
def db_change_layout(self, layout):
self._db_layout = layout
def db_delete_layout(self, layout):
self._db_layout = None
def __get_db_functions(self):
return self._db_functions
def __set_db_functions(self, functions):
self._db_functions = functions
self.is_dirty = True
db_functions = property(__get_db_functions, __set_db_functions)
def db_get_functions(self):
return self._db_functions
def db_add_function(self, function):
self.is_dirty = True
self._db_functions.append(function)
self.db_functions_id_index[function.db_id] = function
def db_change_function(self, function):
self.is_dirty = True
found = False
for i in xrange(len(self._db_functions)):
if self._db_functions[i].db_id == function.db_id:
self._db_functions[i] = function
found = True
break
if not found:
self._db_functions.append(function)
self.db_functions_id_index[function.db_id] = function
def db_delete_function(self, function):
self.is_dirty = True
for i in xrange(len(self._db_functions)):
if self._db_functions[i].db_id == function.db_id:
if not self._db_functions[i].is_new:
self.db_deleted_functions.append(self._db_functions[i])
del self._db_functions[i]
break
del self.db_functions_id_index[function.db_id]
def db_get_function(self, key):
for i in xrange(len(self._db_functions)):
if self._db_functions[i].db_id == key:
return self._db_functions[i]
return None
def db_get_function_by_id(self, key):
return self.db_functions_id_index[key]
def db_has_function_with_id(self, key):
return key in self.db_functions_id_index
def getPrimaryKey(self):
return self._db_id
class DBMashupActionAnnotation(object):
vtType = 'mashup_actionAnnotation'
def __init__(self, id=None, key=None, value=None, action_id=None, date=None, user=None):
self._db_id = id
self._db_key = key
self._db_value = value
self._db_action_id = action_id
self._db_date = date
self._db_user = user
self.is_dirty = True
self.is_new = True
def __copy__(self):
return DBMashupActionAnnotation.do_copy(self)
def do_copy(self, new_ids=False, id_scope=None, id_remap=None):
cp = DBMashupActionAnnotation(id=self._db_id,
key=self._db_key,
value=self._db_value,
action_id=self._db_action_id,
date=self._db_date,
user=self._db_user)
# set new ids
if new_ids:
new_id = id_scope.getNewId(self.vtType)
if self.vtType in id_scope.remap:
id_remap[(id_scope.remap[self.vtType], self.db_id)] = new_id
else:
id_remap[(self.vtType, self.db_id)] = new_id
cp.db_id = new_id
if hasattr(self, 'db_action_id') and ('mashup_action', self._db_action_id) in id_remap:
cp._db_action_id = id_remap[('mashup_action', self._db_action_id)]
# recreate indices and set flags
if not new_ids:
cp.is_dirty = self.is_dirty
cp.is_new = self.is_new
return cp
@staticmethod
def update_version(old_obj, trans_dict, new_obj=None):
if new_obj is None:
new_obj = DBMashupActionAnnotation()
class_dict = {}
if new_obj.__class__.__name__ in trans_dict:
class_dict = trans_dict[new_obj.__class__.__name__]
if 'id' in class_dict:
res = class_dict['id'](old_obj, trans_dict)
new_obj.db_id = res
elif hasattr(old_obj, 'db_id') and old_obj.db_id is not None:
new_obj.db_id = old_obj.db_id
if 'key' in class_dict:
res = class_dict['key'](old_obj, trans_dict)
new_obj.db_key = res
elif hasattr(old_obj, 'db_key') and old_obj.db_key is not None:
new_obj.db_key = old_obj.db_key
if 'value' in class_dict:
res = class_dict['value'](old_obj, trans_dict)
new_obj.db_value = res
elif hasattr(old_obj, 'db_value') and old_obj.db_value is not None:
new_obj.db_value = old_obj.db_value
if 'action_id' in class_dict:
res = class_dict['action_id'](old_obj, trans_dict)
new_obj.db_action_id = res
elif hasattr(old_obj, 'db_action_id') and old_obj.db_action_id is not None:
new_obj.db_action_id = old_obj.db_action_id
if 'date' in class_dict:
res = class_dict['date'](old_obj, trans_dict)
new_obj.db_date = res
elif hasattr(old_obj, 'db_date') and old_obj.db_date is not None:
new_obj.db_date = old_obj.db_date
if 'user' in class_dict:
res = class_dict['user'](old_obj, trans_dict)
new_obj.db_user = res
elif hasattr(old_obj, 'db_user') and old_obj.db_user is not None:
new_obj.db_user = old_obj.db_user
new_obj.is_new = old_obj.is_new
new_obj.is_dirty = old_obj.is_dirty
return new_obj
def db_children(self, parent=(None,None), orphan=False, for_action=False):
return [(self, parent[0], parent[1])]
def db_deleted_children(self, remove=False):
children = []
return children
def has_changes(self):
if self.is_dirty:
return True
return False
def __get_db_id(self):
return self._db_id
def __set_db_id(self, id):
self._db_id = id
self.is_dirty = True
db_id = property(__get_db_id, __set_db_id)
def db_add_id(self, id):
self._db_id = id
def db_change_id(self, id):
self._db_id = id
def db_delete_id(self, id):
self._db_id = None
def __get_db_key(self):
return self._db_key
def __set_db_key(self, key):
self._db_key = key
self.is_dirty = True
db_key = property(__get_db_key, __set_db_key)
def db_add_key(self, key):
self._db_key = key
def db_change_key(self, key):
self._db_key = key
def db_delete_key(self, key):
self._db_key = None
def __get_db_value(self):
return self._db_value
def __set_db_value(self, value):
self._db_value = value
self.is_dirty = True
db_value = property(__get_db_value, __set_db_value)
def db_add_value(self, value):
self._db_value = value
def db_change_value(self, value):
self._db_value = value
def db_delete_value(self, value):
self._db_value = None
def __get_db_action_id(self):
return self._db_action_id
def __set_db_action_id(self, action_id):
self._db_action_id = action_id
self.is_dirty = True
db_action_id = property(__get_db_action_id, __set_db_action_id)
def db_add_action_id(self, action_id):
self._db_action_id = action_id
def db_change_action_id(self, action_id):
self._db_action_id = action_id
def db_delete_action_id(self, action_id):
self._db_action_id = None
def __get_db_date(self):
return self._db_date
def __set_db_date(self, date):
self._db_date = date
self.is_dirty = True
db_date = property(__get_db_date, __set_db_date)
def db_add_date(self, date):
self._db_date = date
def db_change_date(self, date):
self._db_date = date
def db_delete_date(self, date):
self._db_date = None
def __get_db_user(self):
return self._db_user
def __set_db_user(self, user):
self._db_user = user
self.is_dirty = True
db_user = property(__get_db_user, __set_db_user)
def db_add_user(self, user):
self._db_user = user
def db_change_user(self, user):
self._db_user = user
def db_delete_user(self, user):
self._db_user = None
def getPrimaryKey(self):
return self._db_id
class DBOpmProcess(object):
vtType = 'opm_process'
def __init__(self, id=None, value=None, accounts=None):
self._db_id = id
self.db_deleted_value = []
self._db_value = value
self.db_deleted_accounts = []
if accounts is None:
self._db_accounts = []
else:
self._db_accounts = accounts
self.is_dirty = True
self.is_new = True
def __copy__(self):
return DBOpmProcess.do_copy(self)
def do_copy(self, new_ids=False, id_scope=None, id_remap=None):
cp = DBOpmProcess(id=self._db_id)
if self._db_value is not None:
cp._db_value = self._db_value.do_copy(new_ids, id_scope, id_remap)
if self._db_accounts is None:
cp._db_accounts = []
else:
cp._db_accounts = [v.do_copy(new_ids, id_scope, id_remap) for v in self._db_accounts]
# set new ids
if new_ids:
new_id = id_scope.getNewId(self.vtType)
if self.vtType in id_scope.remap:
id_remap[(id_scope.remap[self.vtType], self.db_id)] = new_id
else:
id_remap[(self.vtType, self.db_id)] = new_id
cp.db_id = new_id
# recreate indices and set flags
if not new_ids:
cp.is_dirty = self.is_dirty
cp.is_new = self.is_new
return cp
@staticmethod
def update_version(old_obj, trans_dict, new_obj=None):
if new_obj is None:
new_obj = DBOpmProcess()
class_dict = {}
if new_obj.__class__.__name__ in trans_dict:
class_dict = trans_dict[new_obj.__class__.__name__]
if 'id' in class_dict:
res = class_dict['id'](old_obj, trans_dict)
new_obj.db_id = res
elif hasattr(old_obj, 'db_id') and old_obj.db_id is not None:
new_obj.db_id = old_obj.db_id
if 'value' in class_dict:
res = class_dict['value'](old_obj, trans_dict)
new_obj.db_value = res
elif hasattr(old_obj, 'db_value') and old_obj.db_value is not None:
obj = old_obj.db_value
new_obj.db_add_value(DBOpmProcessValue.update_version(obj, trans_dict))
if hasattr(old_obj, 'db_deleted_value') and hasattr(new_obj, 'db_deleted_value'):
for obj in old_obj.db_deleted_value:
n_obj = DBOpmProcessValue.update_version(obj, trans_dict)
new_obj.db_deleted_value.append(n_obj)
if 'accounts' in class_dict:
res = class_dict['accounts'](old_obj, trans_dict)
for obj in res:
new_obj.db_add_account(obj)
elif hasattr(old_obj, 'db_accounts') and old_obj.db_accounts is not None:
for obj in old_obj.db_accounts:
new_obj.db_add_account(DBOpmAccountId.update_version(obj, trans_dict))
if hasattr(old_obj, 'db_deleted_accounts') and hasattr(new_obj, 'db_deleted_accounts'):
for obj in old_obj.db_deleted_accounts:
n_obj = DBOpmAccountId.update_version(obj, trans_dict)
new_obj.db_deleted_accounts.append(n_obj)
new_obj.is_new = old_obj.is_new
new_obj.is_dirty = old_obj.is_dirty
return new_obj
def db_children(self, parent=(None,None), orphan=False, for_action=False):
children = []
if self._db_value is not None:
children.extend(self._db_value.db_children((self.vtType, self.db_id), orphan, for_action))
if orphan:
self._db_value = None
to_del = []
for child in self.db_accounts:
children.extend(child.db_children((self.vtType, self.db_id), orphan, for_action))
if orphan:
to_del.append(child)
for child in to_del:
self.db_delete_account(child)
children.append((self, parent[0], parent[1]))
return children
def db_deleted_children(self, remove=False):
children = []
children.extend(self.db_deleted_value)
children.extend(self.db_deleted_accounts)
if remove:
self.db_deleted_value = []
self.db_deleted_accounts = []
return children
def has_changes(self):
if self.is_dirty:
return True
if self._db_value is not None and self._db_value.has_changes():
return True
for child in self._db_accounts:
if child.has_changes():
return True
return False
def __get_db_id(self):
return self._db_id
def __set_db_id(self, id):
self._db_id = id
self.is_dirty = True
db_id = property(__get_db_id, __set_db_id)
def db_add_id(self, id):
self._db_id = id
def db_change_id(self, id):
self._db_id = id
def db_delete_id(self, id):
self._db_id = None
def __get_db_value(self):
return self._db_value
def __set_db_value(self, value):
self._db_value = value
self.is_dirty = True
db_value = property(__get_db_value, __set_db_value)
def db_add_value(self, value):
self._db_value = value
def db_change_value(self, value):
self._db_value = value
def db_delete_value(self, value):
if not self.is_new:
self.db_deleted_value.append(self._db_value)
self._db_value = None
def __get_db_accounts(self):
return self._db_accounts
def __set_db_accounts(self, accounts):
self._db_accounts = accounts
self.is_dirty = True
db_accounts = property(__get_db_accounts, __set_db_accounts)
def db_get_accounts(self):
return self._db_accounts
def db_add_account(self, account):
self.is_dirty = True
self._db_accounts.append(account)
def db_change_account(self, account):
self.is_dirty = True
self._db_accounts.append(account)
def db_delete_account(self, account):
self.is_dirty = True
raise Exception('Cannot delete a non-keyed object')
def db_get_account(self, key):
return None
def getPrimaryKey(self):
return self._db_id
class DBDisabledPackages(object):
vtType = 'disabled_packages'
def __init__(self, packages=None):
self.db_deleted_packages = []
self.db_packages_name_index = {}
if packages is None:
self._db_packages = []
else:
self._db_packages = packages
for v in self._db_packages:
self.db_packages_name_index[v.db_name] = v
self.is_dirty = True
self.is_new = True
def __copy__(self):
return DBDisabledPackages.do_copy(self)
def do_copy(self, new_ids=False, id_scope=None, id_remap=None):
cp = DBDisabledPackages()
if self._db_packages is None:
cp._db_packages = []
else:
cp._db_packages = [v.do_copy(new_ids, id_scope, id_remap) for v in self._db_packages]
# set new ids
if new_ids:
new_id = id_scope.getNewId(self.vtType)
if self.vtType in id_scope.remap:
id_remap[(id_scope.remap[self.vtType], self.db_id)] = new_id
else:
id_remap[(self.vtType, self.db_id)] = new_id
cp.db_id = new_id
# recreate indices and set flags
cp.db_packages_name_index = dict((v.db_name, v) for v in cp._db_packages)
if not new_ids:
cp.is_dirty = self.is_dirty
cp.is_new = self.is_new
return cp
@staticmethod
def update_version(old_obj, trans_dict, new_obj=None):
if new_obj is None:
new_obj = DBDisabledPackages()
class_dict = {}
if new_obj.__class__.__name__ in trans_dict:
class_dict = trans_dict[new_obj.__class__.__name__]
if 'packages' in class_dict:
res = class_dict['packages'](old_obj, trans_dict)
for obj in res:
new_obj.db_add_package(obj)
elif hasattr(old_obj, 'db_packages') and old_obj.db_packages is not None:
for obj in old_obj.db_packages:
new_obj.db_add_package(DBStartupPackage.update_version(obj, trans_dict))
if hasattr(old_obj, 'db_deleted_packages') and hasattr(new_obj, 'db_deleted_packages'):
for obj in old_obj.db_deleted_packages:
n_obj = DBStartupPackage.update_version(obj, trans_dict)
new_obj.db_deleted_packages.append(n_obj)
new_obj.is_new = old_obj.is_new
new_obj.is_dirty = old_obj.is_dirty
return new_obj
def db_children(self, parent=(None,None), orphan=False, for_action=False):
children = []
to_del = []
for child in self.db_packages:
children.extend(child.db_children((self.vtType, self.db_id), orphan, for_action))
if orphan:
to_del.append(child)
for child in to_del:
self.db_delete_package(child)
children.append((self, parent[0], parent[1]))
return children
def db_deleted_children(self, remove=False):
children = []
children.extend(self.db_deleted_packages)
if remove:
self.db_deleted_packages = []
return children
def has_changes(self):
if self.is_dirty:
return True
for child in self._db_packages:
if child.has_changes():
return True
return False
def __get_db_packages(self):
return self._db_packages
def __set_db_packages(self, packages):
self._db_packages = packages
self.is_dirty = True
db_packages = property(__get_db_packages, __set_db_packages)
def db_get_packages(self):
return self._db_packages
def db_add_package(self, package):
self.is_dirty = True
self._db_packages.append(package)
self.db_packages_name_index[package.db_name] = package
def db_change_package(self, package):
self.is_dirty = True
self._db_packages.append(package)
self.db_packages_name_index[package.db_name] = package
def db_delete_package(self, package):
self.is_dirty = True
raise Exception('Cannot delete a non-keyed object')
def db_get_package(self, key):
return None
def db_get_package_by_name(self, key):
return self.db_packages_name_index[key]
def db_has_package_with_name(self, key):
return key in self.db_packages_name_index
class DBModuleExec(object):
vtType = 'module_exec'
def __init__(self, id=None, ts_start=None, ts_end=None, cached=None, module_id=None, module_name=None, completed=None, error=None, machine_id=None, annotations=None, loop_execs=None):
self._db_id = id
self._db_ts_start = ts_start
self._db_ts_end = ts_end
self._db_cached = cached
self._db_module_id = module_id
self._db_module_name = module_name
self._db_completed = completed
self._db_error = error
self._db_machine_id = machine_id
self.db_deleted_annotations = []
self.db_annotations_id_index = {}
if annotations is None:
self._db_annotations = []
else:
self._db_annotations = annotations
for v in self._db_annotations:
self.db_annotations_id_index[v.db_id] = v
self.db_deleted_loop_execs = []
self.db_loop_execs_id_index = {}
if loop_execs is None:
self._db_loop_execs = []
else:
self._db_loop_execs = loop_execs
for v in self._db_loop_execs:
self.db_loop_execs_id_index[v.db_id] = v
self.is_dirty = True
self.is_new = True
def __copy__(self):
return DBModuleExec.do_copy(self)
def do_copy(self, new_ids=False, id_scope=None, id_remap=None):
cp = DBModuleExec(id=self._db_id,
ts_start=self._db_ts_start,
ts_end=self._db_ts_end,
cached=self._db_cached,
module_id=self._db_module_id,
module_name=self._db_module_name,
completed=self._db_completed,
error=self._db_error,
machine_id=self._db_machine_id)
if self._db_annotations is None:
cp._db_annotations = []
else:
cp._db_annotations = [v.do_copy(new_ids, id_scope, id_remap) for v in self._db_annotations]
if self._db_loop_execs is None:
cp._db_loop_execs = []
else:
cp._db_loop_execs = [v.do_copy(new_ids, id_scope, id_remap) for v in self._db_loop_execs]
# set new ids
if new_ids:
new_id = id_scope.getNewId(self.vtType)
if self.vtType in id_scope.remap:
id_remap[(id_scope.remap[self.vtType], self.db_id)] = new_id
else:
id_remap[(self.vtType, self.db_id)] = new_id
cp.db_id = new_id
if hasattr(self, 'db_module_id') and ('module', self._db_module_id) in id_remap:
cp._db_module_id = id_remap[('module', self._db_module_id)]
if hasattr(self, 'db_machine_id') and ('machine', self._db_machine_id) in id_remap:
cp._db_machine_id = id_remap[('machine', self._db_machine_id)]
# recreate indices and set flags
cp.db_annotations_id_index = dict((v.db_id, v) for v in cp._db_annotations)
cp.db_loop_execs_id_index = dict((v.db_id, v) for v in cp._db_loop_execs)
if not new_ids:
cp.is_dirty = self.is_dirty
cp.is_new = self.is_new
return cp
@staticmethod
def update_version(old_obj, trans_dict, new_obj=None):
if new_obj is None:
new_obj = DBModuleExec()
class_dict = {}
if new_obj.__class__.__name__ in trans_dict:
class_dict = trans_dict[new_obj.__class__.__name__]
if 'id' in class_dict:
res = class_dict['id'](old_obj, trans_dict)
new_obj.db_id = res
elif hasattr(old_obj, 'db_id') and old_obj.db_id is not None:
new_obj.db_id = old_obj.db_id
if 'ts_start' in class_dict:
res = class_dict['ts_start'](old_obj, trans_dict)
new_obj.db_ts_start = res
elif hasattr(old_obj, 'db_ts_start') and old_obj.db_ts_start is not None:
new_obj.db_ts_start = old_obj.db_ts_start
if 'ts_end' in class_dict:
res = class_dict['ts_end'](old_obj, trans_dict)
new_obj.db_ts_end = res
elif hasattr(old_obj, 'db_ts_end') and old_obj.db_ts_end is not None:
new_obj.db_ts_end = old_obj.db_ts_end
if 'cached' in class_dict:
res = class_dict['cached'](old_obj, trans_dict)
new_obj.db_cached = res
elif hasattr(old_obj, 'db_cached') and old_obj.db_cached is not None:
new_obj.db_cached = old_obj.db_cached
if 'module_id' in class_dict:
res = class_dict['module_id'](old_obj, trans_dict)
new_obj.db_module_id = res
elif hasattr(old_obj, 'db_module_id') and old_obj.db_module_id is not None:
new_obj.db_module_id = old_obj.db_module_id
if 'module_name' in class_dict:
res = class_dict['module_name'](old_obj, trans_dict)
new_obj.db_module_name = res
elif hasattr(old_obj, 'db_module_name') and old_obj.db_module_name is not None:
new_obj.db_module_name = old_obj.db_module_name
if 'completed' in class_dict:
res = class_dict['completed'](old_obj, trans_dict)
new_obj.db_completed = res
elif hasattr(old_obj, 'db_completed') and old_obj.db_completed is not None:
new_obj.db_completed = old_obj.db_completed
if 'error' in class_dict:
res = class_dict['error'](old_obj, trans_dict)
new_obj.db_error = res
elif hasattr(old_obj, 'db_error') and old_obj.db_error is not None:
new_obj.db_error = old_obj.db_error
if 'machine_id' in class_dict:
res = class_dict['machine_id'](old_obj, trans_dict)
new_obj.db_machine_id = res
elif hasattr(old_obj, 'db_machine_id') and old_obj.db_machine_id is not None:
new_obj.db_machine_id = old_obj.db_machine_id
if 'annotations' in class_dict:
res = class_dict['annotations'](old_obj, trans_dict)
for obj in res:
new_obj.db_add_annotation(obj)
elif hasattr(old_obj, 'db_annotations') and old_obj.db_annotations is not None:
for obj in old_obj.db_annotations:
new_obj.db_add_annotation(DBAnnotation.update_version(obj, trans_dict))
if hasattr(old_obj, 'db_deleted_annotations') and hasattr(new_obj, 'db_deleted_annotations'):
for obj in old_obj.db_deleted_annotations:
n_obj = DBAnnotation.update_version(obj, trans_dict)
new_obj.db_deleted_annotations.append(n_obj)
if 'loop_execs' in class_dict:
res = class_dict['loop_execs'](old_obj, trans_dict)
for obj in res:
new_obj.db_add_loop_exec(obj)
elif hasattr(old_obj, 'db_loop_execs') and old_obj.db_loop_execs is not None:
for obj in old_obj.db_loop_execs:
new_obj.db_add_loop_exec(DBLoopExec.update_version(obj, trans_dict))
if hasattr(old_obj, 'db_deleted_loop_execs') and hasattr(new_obj, 'db_deleted_loop_execs'):
for obj in old_obj.db_deleted_loop_execs:
n_obj = DBLoopExec.update_version(obj, trans_dict)
new_obj.db_deleted_loop_execs.append(n_obj)
new_obj.is_new = old_obj.is_new
new_obj.is_dirty = old_obj.is_dirty
return new_obj
def db_children(self, parent=(None,None), orphan=False, for_action=False):
children = []
to_del = []
for child in self.db_annotations:
children.extend(child.db_children((self.vtType, self.db_id), orphan, for_action))
if orphan:
to_del.append(child)
for child in to_del:
self.db_delete_annotation(child)
to_del = []
for child in self.db_loop_execs:
children.extend(child.db_children((self.vtType, self.db_id), orphan, for_action))
if orphan:
to_del.append(child)
for child in to_del:
self.db_delete_loop_exec(child)
children.append((self, parent[0], parent[1]))
return children
def db_deleted_children(self, remove=False):
children = []
children.extend(self.db_deleted_annotations)
children.extend(self.db_deleted_loop_execs)
if remove:
self.db_deleted_annotations = []
self.db_deleted_loop_execs = []
return children
def has_changes(self):
if self.is_dirty:
return True
for child in self._db_annotations:
if child.has_changes():
return True
for child in self._db_loop_execs:
if child.has_changes():
return True
return False
def __get_db_id(self):
return self._db_id
def __set_db_id(self, id):
self._db_id = id
self.is_dirty = True
db_id = property(__get_db_id, __set_db_id)
def db_add_id(self, id):
self._db_id = id
def db_change_id(self, id):
self._db_id = id
def db_delete_id(self, id):
self._db_id = None
def __get_db_ts_start(self):
return self._db_ts_start
def __set_db_ts_start(self, ts_start):
self._db_ts_start = ts_start
self.is_dirty = True
db_ts_start = property(__get_db_ts_start, __set_db_ts_start)
def db_add_ts_start(self, ts_start):
self._db_ts_start = ts_start
def db_change_ts_start(self, ts_start):
self._db_ts_start = ts_start
def db_delete_ts_start(self, ts_start):
self._db_ts_start = None
def __get_db_ts_end(self):
return self._db_ts_end
def __set_db_ts_end(self, ts_end):
self._db_ts_end = ts_end
self.is_dirty = True
db_ts_end = property(__get_db_ts_end, __set_db_ts_end)
def db_add_ts_end(self, ts_end):
self._db_ts_end = ts_end
def db_change_ts_end(self, ts_end):
self._db_ts_end = ts_end
def db_delete_ts_end(self, ts_end):
self._db_ts_end = None
def __get_db_cached(self):
return self._db_cached
def __set_db_cached(self, cached):
self._db_cached = cached
self.is_dirty = True
db_cached = property(__get_db_cached, __set_db_cached)
def db_add_cached(self, cached):
self._db_cached = cached
def db_change_cached(self, cached):
self._db_cached = cached
def db_delete_cached(self, cached):
self._db_cached = None
def __get_db_module_id(self):
return self._db_module_id
def __set_db_module_id(self, module_id):
self._db_module_id = module_id
self.is_dirty = True
db_module_id = property(__get_db_module_id, __set_db_module_id)
def db_add_module_id(self, module_id):
self._db_module_id = module_id
def db_change_module_id(self, module_id):
self._db_module_id = module_id
def db_delete_module_id(self, module_id):
self._db_module_id = None
def __get_db_module_name(self):
return self._db_module_name
def __set_db_module_name(self, module_name):
self._db_module_name = module_name
self.is_dirty = True
db_module_name = property(__get_db_module_name, __set_db_module_name)
def db_add_module_name(self, module_name):
self._db_module_name = module_name
def db_change_module_name(self, module_name):
self._db_module_name = module_name
def db_delete_module_name(self, module_name):
self._db_module_name = None
def __get_db_completed(self):
return self._db_completed
def __set_db_completed(self, completed):
self._db_completed = completed
self.is_dirty = True
db_completed = property(__get_db_completed, __set_db_completed)
def db_add_completed(self, completed):
self._db_completed = completed
def db_change_completed(self, completed):
self._db_completed = completed
def db_delete_completed(self, completed):
self._db_completed = None
def __get_db_error(self):
return self._db_error
def __set_db_error(self, error):
self._db_error = error
self.is_dirty = True
db_error = property(__get_db_error, __set_db_error)
def db_add_error(self, error):
self._db_error = error
def db_change_error(self, error):
self._db_error = error
def db_delete_error(self, error):
self._db_error = None
def __get_db_machine_id(self):
return self._db_machine_id
def __set_db_machine_id(self, machine_id):
self._db_machine_id = machine_id
self.is_dirty = True
db_machine_id = property(__get_db_machine_id, __set_db_machine_id)
def db_add_machine_id(self, machine_id):
self._db_machine_id = machine_id
def db_change_machine_id(self, machine_id):
self._db_machine_id = machine_id
def db_delete_machine_id(self, machine_id):
self._db_machine_id = None
def __get_db_annotations(self):
return self._db_annotations
def __set_db_annotations(self, annotations):
self._db_annotations = annotations
self.is_dirty = True
db_annotations = property(__get_db_annotations, __set_db_annotations)
def db_get_annotations(self):
return self._db_annotations
def db_add_annotation(self, annotation):
self.is_dirty = True
self._db_annotations.append(annotation)
self.db_annotations_id_index[annotation.db_id] = annotation
def db_change_annotation(self, annotation):
self.is_dirty = True
found = False
for i in xrange(len(self._db_annotations)):
if self._db_annotations[i].db_id == annotation.db_id:
self._db_annotations[i] = annotation
found = True
break
if not found:
self._db_annotations.append(annotation)
self.db_annotations_id_index[annotation.db_id] = annotation
def db_delete_annotation(self, annotation):
self.is_dirty = True
for i in xrange(len(self._db_annotations)):
if self._db_annotations[i].db_id == annotation.db_id:
if not self._db_annotations[i].is_new:
self.db_deleted_annotations.append(self._db_annotations[i])
del self._db_annotations[i]
break
del self.db_annotations_id_index[annotation.db_id]
def db_get_annotation(self, key):
for i in xrange(len(self._db_annotations)):
if self._db_annotations[i].db_id == key:
return self._db_annotations[i]
return None
def db_get_annotation_by_id(self, key):
return self.db_annotations_id_index[key]
def db_has_annotation_with_id(self, key):
return key in self.db_annotations_id_index
def __get_db_loop_execs(self):
return self._db_loop_execs
def __set_db_loop_execs(self, loop_execs):
self._db_loop_execs = loop_execs
self.is_dirty = True
db_loop_execs = property(__get_db_loop_execs, __set_db_loop_execs)
def db_get_loop_execs(self):
return self._db_loop_execs
def db_add_loop_exec(self, loop_exec):
self.is_dirty = True
self._db_loop_execs.append(loop_exec)
self.db_loop_execs_id_index[loop_exec.db_id] = loop_exec
def db_change_loop_exec(self, loop_exec):
self.is_dirty = True
found = False
for i in xrange(len(self._db_loop_execs)):
if self._db_loop_execs[i].db_id == loop_exec.db_id:
self._db_loop_execs[i] = loop_exec
found = True
break
if not found:
self._db_loop_execs.append(loop_exec)
self.db_loop_execs_id_index[loop_exec.db_id] = loop_exec
def db_delete_loop_exec(self, loop_exec):
self.is_dirty = True
for i in xrange(len(self._db_loop_execs)):
if self._db_loop_execs[i].db_id == loop_exec.db_id:
if not self._db_loop_execs[i].is_new:
self.db_deleted_loop_execs.append(self._db_loop_execs[i])
del self._db_loop_execs[i]
break
del self.db_loop_execs_id_index[loop_exec.db_id]
def db_get_loop_exec(self, key):
for i in xrange(len(self._db_loop_execs)):
if self._db_loop_execs[i].db_id == key:
return self._db_loop_execs[i]
return None
def db_get_loop_exec_by_id(self, key):
return self.db_loop_execs_id_index[key]
def db_has_loop_exec_with_id(self, key):
return key in self.db_loop_execs_id_index
def getPrimaryKey(self):
return self._db_id
class DBProvAssociation(object):
vtType = 'prov_association'
def __init__(self, prov_activity=None, prov_agent=None, prov_plan=None, prov_role=None):
self.db_deleted_prov_activity = []
self._db_prov_activity = prov_activity
self.db_deleted_prov_agent = []
self._db_prov_agent = prov_agent
self.db_deleted_prov_plan = []
self._db_prov_plan = prov_plan
self._db_prov_role = prov_role
self.is_dirty = True
self.is_new = True
def __copy__(self):
return DBProvAssociation.do_copy(self)
def do_copy(self, new_ids=False, id_scope=None, id_remap=None):
cp = DBProvAssociation(prov_role=self._db_prov_role)
if self._db_prov_activity is not None:
cp._db_prov_activity = self._db_prov_activity.do_copy(new_ids, id_scope, id_remap)
if self._db_prov_agent is not None:
cp._db_prov_agent = self._db_prov_agent.do_copy(new_ids, id_scope, id_remap)
if self._db_prov_plan is not None:
cp._db_prov_plan = self._db_prov_plan.do_copy(new_ids, id_scope, id_remap)
# set new ids
if new_ids:
new_id = id_scope.getNewId(self.vtType)
if self.vtType in id_scope.remap:
id_remap[(id_scope.remap[self.vtType], self.db_id)] = new_id
else:
id_remap[(self.vtType, self.db_id)] = new_id
cp.db_id = new_id
# recreate indices and set flags
if not new_ids:
cp.is_dirty = self.is_dirty
cp.is_new = self.is_new
return cp
@staticmethod
def update_version(old_obj, trans_dict, new_obj=None):
if new_obj is None:
new_obj = DBProvAssociation()
class_dict = {}
if new_obj.__class__.__name__ in trans_dict:
class_dict = trans_dict[new_obj.__class__.__name__]
if 'prov_activity' in class_dict:
res = class_dict['prov_activity'](old_obj, trans_dict)
new_obj.db_prov_activity = res
elif hasattr(old_obj, 'db_prov_activity') and old_obj.db_prov_activity is not None:
obj = old_obj.db_prov_activity
new_obj.db_add_prov_activity(DBRefProvActivity.update_version(obj, trans_dict))
if hasattr(old_obj, 'db_deleted_prov_activity') and hasattr(new_obj, 'db_deleted_prov_activity'):
for obj in old_obj.db_deleted_prov_activity:
n_obj = DBRefProvActivity.update_version(obj, trans_dict)
new_obj.db_deleted_prov_activity.append(n_obj)
if 'prov_agent' in class_dict:
res = class_dict['prov_agent'](old_obj, trans_dict)
new_obj.db_prov_agent = res
elif hasattr(old_obj, 'db_prov_agent') and old_obj.db_prov_agent is not None:
obj = old_obj.db_prov_agent
new_obj.db_add_prov_agent(DBRefProvAgent.update_version(obj, trans_dict))
if hasattr(old_obj, 'db_deleted_prov_agent') and hasattr(new_obj, 'db_deleted_prov_agent'):
for obj in old_obj.db_deleted_prov_agent:
n_obj = DBRefProvAgent.update_version(obj, trans_dict)
new_obj.db_deleted_prov_agent.append(n_obj)
if 'prov_plan' in class_dict:
res = class_dict['prov_plan'](old_obj, trans_dict)
new_obj.db_prov_plan = res
elif hasattr(old_obj, 'db_prov_plan') and old_obj.db_prov_plan is not None:
obj = old_obj.db_prov_plan
new_obj.db_add_prov_plan(DBRefProvPlan.update_version(obj, trans_dict))
if hasattr(old_obj, 'db_deleted_prov_plan') and hasattr(new_obj, 'db_deleted_prov_plan'):
for obj in old_obj.db_deleted_prov_plan:
n_obj = DBRefProvPlan.update_version(obj, trans_dict)
new_obj.db_deleted_prov_plan.append(n_obj)
if 'prov_role' in class_dict:
res = class_dict['prov_role'](old_obj, trans_dict)
new_obj.db_prov_role = res
elif hasattr(old_obj, 'db_prov_role') and old_obj.db_prov_role is not None:
new_obj.db_prov_role = old_obj.db_prov_role
new_obj.is_new = old_obj.is_new
new_obj.is_dirty = old_obj.is_dirty
return new_obj
def db_children(self, parent=(None,None), orphan=False, for_action=False):
children = []
if self._db_prov_activity is not None:
children.extend(self._db_prov_activity.db_children((self.vtType, self.db_id), orphan, for_action))
if orphan:
self._db_prov_activity = None
if self._db_prov_agent is not None:
children.extend(self._db_prov_agent.db_children((self.vtType, self.db_id), orphan, for_action))
if orphan:
self._db_prov_agent = None
if self._db_prov_plan is not None:
children.extend(self._db_prov_plan.db_children((self.vtType, self.db_id), orphan, for_action))
if orphan:
self._db_prov_plan = None
children.append((self, parent[0], parent[1]))
return children
def db_deleted_children(self, remove=False):
children = []
children.extend(self.db_deleted_prov_activity)
children.extend(self.db_deleted_prov_agent)
children.extend(self.db_deleted_prov_plan)
if remove:
self.db_deleted_prov_activity = []
self.db_deleted_prov_agent = []
self.db_deleted_prov_plan = []
return children
def has_changes(self):
if self.is_dirty:
return True
if self._db_prov_activity is not None and self._db_prov_activity.has_changes():
return True
if self._db_prov_agent is not None and self._db_prov_agent.has_changes():
return True
if self._db_prov_plan is not None and self._db_prov_plan.has_changes():
return True
return False
def __get_db_prov_activity(self):
return self._db_prov_activity
def __set_db_prov_activity(self, prov_activity):
self._db_prov_activity = prov_activity
self.is_dirty = True
db_prov_activity = property(__get_db_prov_activity, __set_db_prov_activity)
def db_add_prov_activity(self, prov_activity):
self._db_prov_activity = prov_activity
def db_change_prov_activity(self, prov_activity):
self._db_prov_activity = prov_activity
def db_delete_prov_activity(self, prov_activity):
if not self.is_new:
self.db_deleted_prov_activity.append(self._db_prov_activity)
self._db_prov_activity = None
def __get_db_prov_agent(self):
return self._db_prov_agent
def __set_db_prov_agent(self, prov_agent):
self._db_prov_agent = prov_agent
self.is_dirty = True
db_prov_agent = property(__get_db_prov_agent, __set_db_prov_agent)
def db_add_prov_agent(self, prov_agent):
self._db_prov_agent = prov_agent
def db_change_prov_agent(self, prov_agent):
self._db_prov_agent = prov_agent
def db_delete_prov_agent(self, prov_agent):
if not self.is_new:
self.db_deleted_prov_agent.append(self._db_prov_agent)
self._db_prov_agent = None
def __get_db_prov_plan(self):
return self._db_prov_plan
def __set_db_prov_plan(self, prov_plan):
self._db_prov_plan = prov_plan
self.is_dirty = True
db_prov_plan = property(__get_db_prov_plan, __set_db_prov_plan)
def db_add_prov_plan(self, prov_plan):
self._db_prov_plan = prov_plan
def db_change_prov_plan(self, prov_plan):
self._db_prov_plan = prov_plan
def db_delete_prov_plan(self, prov_plan):
if not self.is_new:
self.db_deleted_prov_plan.append(self._db_prov_plan)
self._db_prov_plan = None
def __get_db_prov_role(self):
return self._db_prov_role
def __set_db_prov_role(self, prov_role):
self._db_prov_role = prov_role
self.is_dirty = True
db_prov_role = property(__get_db_prov_role, __set_db_prov_role)
def db_add_prov_role(self, prov_role):
self._db_prov_role = prov_role
def db_change_prov_role(self, prov_role):
self._db_prov_role = prov_role
def db_delete_prov_role(self, prov_role):
self._db_prov_role = None
class DBOpmProcessValue(object):
vtType = 'opm_process_value'
def __init__(self, value=None):
self.db_deleted_value = []
self._db_value = value
self.is_dirty = True
self.is_new = True
def __copy__(self):
return DBOpmProcessValue.do_copy(self)
def do_copy(self, new_ids=False, id_scope=None, id_remap=None):
cp = DBOpmProcessValue()
if self._db_value is not None:
cp._db_value = self._db_value.do_copy(new_ids, id_scope, id_remap)
# set new ids
if new_ids:
new_id = id_scope.getNewId(self.vtType)
if self.vtType in id_scope.remap:
id_remap[(id_scope.remap[self.vtType], self.db_id)] = new_id
else:
id_remap[(self.vtType, self.db_id)] = new_id
cp.db_id = new_id
# recreate indices and set flags
if not new_ids:
cp.is_dirty = self.is_dirty
cp.is_new = self.is_new
return cp
@staticmethod
def update_version(old_obj, trans_dict, new_obj=None):
if new_obj is None:
new_obj = DBOpmProcessValue()
class_dict = {}
if new_obj.__class__.__name__ in trans_dict:
class_dict = trans_dict[new_obj.__class__.__name__]
if 'value' in class_dict:
res = class_dict['value'](old_obj, trans_dict)
new_obj.db_value = res
elif hasattr(old_obj, 'db_value') and old_obj.db_value is not None:
obj = old_obj.db_value
if obj.vtType == 'module_exec':
new_obj.db_add_value(DBModuleExec.update_version(obj, trans_dict))
elif obj.vtType == 'group_exec':
new_obj.db_add_value(DBGroupExec.update_version(obj, trans_dict))
elif obj.vtType == 'loop_exec':
new_obj.db_add_value(DBLoopExec.update_version(obj, trans_dict))
if hasattr(old_obj, 'db_deleted_value') and hasattr(new_obj, 'db_deleted_value'):
for obj in old_obj.db_deleted_value:
if obj.vtType == 'module_exec':
n_obj = DBModuleExec.update_version(obj, trans_dict)
new_obj.db_deleted_value.append(n_obj)
elif obj.vtType == 'group_exec':
n_obj = DBGroupExec.update_version(obj, trans_dict)
new_obj.db_deleted_value.append(n_obj)
elif obj.vtType == 'loop_exec':
n_obj = DBLoopExec.update_version(obj, trans_dict)
new_obj.db_deleted_value.append(n_obj)
new_obj.is_new = old_obj.is_new
new_obj.is_dirty = old_obj.is_dirty
return new_obj
def db_children(self, parent=(None,None), orphan=False, for_action=False):
children = []
if self._db_value is not None:
children.extend(self._db_value.db_children((self.vtType, self.db_id), orphan, for_action))
if orphan:
self._db_value = None
children.append((self, parent[0], parent[1]))
return children
def db_deleted_children(self, remove=False):
children = []
children.extend(self.db_deleted_value)
if remove:
self.db_deleted_value = []
return children
def has_changes(self):
if self.is_dirty:
return True
if self._db_value is not None and self._db_value.has_changes():
return True
return False
def __get_db_value(self):
return self._db_value
def __set_db_value(self, value):
self._db_value = value
self.is_dirty = True
db_value = property(__get_db_value, __set_db_value)
def db_add_value(self, value):
self._db_value = value
def db_change_value(self, value):
self._db_value = value
def db_delete_value(self, value):
if not self.is_new:
self.db_deleted_value.append(self._db_value)
self._db_value = None
|
en
| 0.748124
|
############################################################################### ## ## Copyright (C) 2011-2014, NYU-Poly. ## Copyright (C) 2006-2011, University of Utah. ## All rights reserved. ## Contact: <EMAIL> ## ## This file is part of VisTrails. ## ## "Redistribution and use in source and binary forms, with or without ## modification, are permitted provided that the following conditions are met: ## ## - Redistributions of source code must retain the above copyright notice, ## this list of conditions and the following disclaimer. ## - Redistributions in binary form must reproduce the above copyright ## notice, this list of conditions and the following disclaimer in the ## documentation and/or other materials provided with the distribution. ## - Neither the name of the University of Utah nor the names of its ## contributors may be used to endorse or promote products derived from ## this software without specific prior written permission. ## ## THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" ## AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, ## THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR ## PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR ## CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, ## EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, ## PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; ## OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, ## WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR ## OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ## ADVISED OF THE POSSIBILITY OF SUCH DAMAGE." ## ############################################################################### generated automatically by auto_dao.py # set new ids # recreate indices and set flags # set new ids # recreate indices and set flags # set new ids # recreate indices and set flags # set new ids # recreate indices and set flags # set new ids # recreate indices and set flags # set new ids # recreate indices and set flags # set new ids # recreate indices and set flags # set new ids # recreate indices and set flags # set new ids # recreate indices and set flags # set new ids # recreate indices and set flags # set new ids # recreate indices and set flags # set new ids # recreate indices and set flags # set new ids # recreate indices and set flags # set new ids # recreate indices and set flags # set new ids # recreate indices and set flags # set new ids # recreate indices and set flags # set new ids # recreate indices and set flags # set new ids # recreate indices and set flags # set new ids # recreate indices and set flags # set new ids # recreate indices and set flags # set new ids # recreate indices and set flags # set new ids # recreate indices and set flags # set new ids # recreate indices and set flags # set new ids # recreate indices and set flags # set new ids # recreate indices and set flags # set new ids # recreate indices and set flags # set new ids # recreate indices and set flags # set new ids # recreate indices and set flags # set new ids # recreate indices and set flags # set new ids # recreate indices and set flags # set new ids # recreate indices and set flags # set new ids # recreate indices and set flags # set new ids # recreate indices and set flags # set new ids # recreate indices and set flags # set new ids # recreate indices and set flags # set new ids # recreate indices and set flags # set new ids # recreate indices and set flags # set new ids # recreate indices and set flags # set new ids # recreate indices and set flags # set new ids # recreate indices and set flags # set new ids # recreate indices and set flags # set new ids # recreate indices and set flags # set new ids # recreate indices and set flags # set new ids # recreate indices and set flags # set new ids # recreate indices and set flags # set new ids # recreate indices and set flags # set new ids # recreate indices and set flags # set new ids # recreate indices and set flags # set new ids # recreate indices and set flags # set new ids # recreate indices and set flags # set new ids # recreate indices and set flags # set new ids # recreate indices and set flags # set new ids # recreate indices and set flags # set new ids # recreate indices and set flags # set new ids # recreate indices and set flags # set new ids # recreate indices and set flags # set new ids # recreate indices and set flags # set new ids # recreate indices and set flags # set new ids # recreate indices and set flags # set new ids # recreate indices and set flags # set new ids # recreate indices and set flags # set new ids # recreate indices and set flags # set new ids # recreate indices and set flags # set new ids # recreate indices and set flags # set new ids # recreate indices and set flags # set new ids # recreate indices and set flags # set new ids # recreate indices and set flags # set new ids # recreate indices and set flags # set new ids # recreate indices and set flags # set new ids # recreate indices and set flags # set new ids # recreate indices and set flags # set new ids # recreate indices and set flags # set new ids # recreate indices and set flags # set new ids # recreate indices and set flags # set new ids # recreate indices and set flags # set new ids # recreate indices and set flags # set new ids # recreate indices and set flags # set new ids # recreate indices and set flags # set new ids # recreate indices and set flags # set new ids # recreate indices and set flags # set new ids # recreate indices and set flags # set new ids # recreate indices and set flags # set new ids # recreate indices and set flags # set new ids # recreate indices and set flags # set new ids # recreate indices and set flags # set new ids # recreate indices and set flags # set new ids # recreate indices and set flags # set new ids # recreate indices and set flags # set new ids # recreate indices and set flags
| 0.99918
| 1
|
docs/core/examples/echoclient_udp.py
|
mathieui/twisted
| 1
|
6626833
|
<reponame>mathieui/twisted
#!/usr/bin/env python
# Copyright (c) Twisted Matrix Laboratories.
# See LICENSE for details.
from __future__ import print_function
from twisted.internet.protocol import DatagramProtocol
from twisted.internet import reactor
class EchoClientDatagramProtocol(DatagramProtocol):
strings = [
b"Hello, world!",
b"What a fine day it is.",
b"Bye-bye!"
]
def startProtocol(self):
self.transport.connect('127.0.0.1', 8000)
self.sendDatagram()
def sendDatagram(self):
if len(self.strings):
datagram = self.strings.pop(0)
self.transport.write(datagram)
else:
reactor.stop()
def datagramReceived(self, datagram, host):
print('Datagram received: ', repr(datagram))
self.sendDatagram()
def main():
protocol = EchoClientDatagramProtocol()
t = reactor.listenUDP(0, protocol)
reactor.run()
if __name__ == '__main__':
main()
|
#!/usr/bin/env python
# Copyright (c) Twisted Matrix Laboratories.
# See LICENSE for details.
from __future__ import print_function
from twisted.internet.protocol import DatagramProtocol
from twisted.internet import reactor
class EchoClientDatagramProtocol(DatagramProtocol):
strings = [
b"Hello, world!",
b"What a fine day it is.",
b"Bye-bye!"
]
def startProtocol(self):
self.transport.connect('127.0.0.1', 8000)
self.sendDatagram()
def sendDatagram(self):
if len(self.strings):
datagram = self.strings.pop(0)
self.transport.write(datagram)
else:
reactor.stop()
def datagramReceived(self, datagram, host):
print('Datagram received: ', repr(datagram))
self.sendDatagram()
def main():
protocol = EchoClientDatagramProtocol()
t = reactor.listenUDP(0, protocol)
reactor.run()
if __name__ == '__main__':
main()
|
en
| 0.639151
|
#!/usr/bin/env python # Copyright (c) Twisted Matrix Laboratories. # See LICENSE for details.
| 2.812813
| 3
|
manageXML/management/commands/find_duplicates.py
|
mokha/verdd
| 5
|
6626834
|
from django.core.management.base import BaseCommand, CommandError
import io, csv, os
from django.db.models import *
from django.db.models.functions import *
from manageXML.utils import get_duplicate_objects, annotate_objects, obj_to_txt
from django.apps import apps
import ast
class Command(BaseCommand):
"""
This command finds all duplicate items and prints them.
Usage: python manage.py find_duplicates -m manageXML.Lexeme -d ; --unique lexeme language --fields id lexeme language pos --filters language='fin'
"""
help = 'This command finds all duplicate items and prints them.'
def add_arguments(self, parser):
parser.add_argument('-m', '--model', type=str, help='The model to search for duplicates in. '
'Format: app.model (e.g. manageXML.Lexeme).', )
parser.add_argument('-d', '--delimiter', type=str, nargs='?', default=';',
help='The delimiter to use when joining fields of duplicate objects.', )
parser.add_argument('--unique', type=str, nargs='+', help='The unique field names to find duplicates in.')
parser.add_argument('--annotations', type=str, nargs='+', help='Extra annotations to obtain.')
parser.add_argument('--fields', type=str, nargs='+', default=('id',), help='Fields to display.')
parser.add_argument('--filters', type=str, nargs='+', default=tuple(), help='Filters to apply on duplicates.')
parser.add_argument('-s', '--sort', type=str, nargs='?', default='id',
help='Used to sort duplicates.', )
def handle(self, *args, **options):
try:
app_name, model_name = options['model'].split('.')
delimiter = options['delimiter']
unique_fields = tuple(options['unique'])
annotations = tuple(options['annotations'])
fields = tuple(options['fields'])
filters = tuple(options['filters'])
order_by = options['sort']
_model = apps.get_model(app_name, model_name)
duplicates = get_duplicate_objects(model=_model, annotations=annotations, unique_fields=unique_fields)
if filters:
filters = [_f.split('=') for _f in filters if '=' in _f] # id__gt=1
for _f in filters:
try:
_f[1] = ast.literal_eval(_f[1])
except: # if failed, treat it as a string
pass
duplicates = duplicates.filter(**dict(filters))
output = []
for dd in duplicates: # for each duplicate values
dup_line = []
d_objects = annotate_objects(_model, annotations).filter(
**{x: dd[x] for x in unique_fields}) # get the objects that have them
d_objects = d_objects.order_by(order_by)
for _d in d_objects:
dup_line.append(obj_to_txt(_d, fields=fields, delimiter=delimiter)) # convert them to text
output.append(delimiter.join(dup_line)) # add duplicate line
self.stdout.write("\n".join(output)) # print final result
except Exception as e:
self.stderr.write(self.style.ERROR(str(e)))
|
from django.core.management.base import BaseCommand, CommandError
import io, csv, os
from django.db.models import *
from django.db.models.functions import *
from manageXML.utils import get_duplicate_objects, annotate_objects, obj_to_txt
from django.apps import apps
import ast
class Command(BaseCommand):
"""
This command finds all duplicate items and prints them.
Usage: python manage.py find_duplicates -m manageXML.Lexeme -d ; --unique lexeme language --fields id lexeme language pos --filters language='fin'
"""
help = 'This command finds all duplicate items and prints them.'
def add_arguments(self, parser):
parser.add_argument('-m', '--model', type=str, help='The model to search for duplicates in. '
'Format: app.model (e.g. manageXML.Lexeme).', )
parser.add_argument('-d', '--delimiter', type=str, nargs='?', default=';',
help='The delimiter to use when joining fields of duplicate objects.', )
parser.add_argument('--unique', type=str, nargs='+', help='The unique field names to find duplicates in.')
parser.add_argument('--annotations', type=str, nargs='+', help='Extra annotations to obtain.')
parser.add_argument('--fields', type=str, nargs='+', default=('id',), help='Fields to display.')
parser.add_argument('--filters', type=str, nargs='+', default=tuple(), help='Filters to apply on duplicates.')
parser.add_argument('-s', '--sort', type=str, nargs='?', default='id',
help='Used to sort duplicates.', )
def handle(self, *args, **options):
try:
app_name, model_name = options['model'].split('.')
delimiter = options['delimiter']
unique_fields = tuple(options['unique'])
annotations = tuple(options['annotations'])
fields = tuple(options['fields'])
filters = tuple(options['filters'])
order_by = options['sort']
_model = apps.get_model(app_name, model_name)
duplicates = get_duplicate_objects(model=_model, annotations=annotations, unique_fields=unique_fields)
if filters:
filters = [_f.split('=') for _f in filters if '=' in _f] # id__gt=1
for _f in filters:
try:
_f[1] = ast.literal_eval(_f[1])
except: # if failed, treat it as a string
pass
duplicates = duplicates.filter(**dict(filters))
output = []
for dd in duplicates: # for each duplicate values
dup_line = []
d_objects = annotate_objects(_model, annotations).filter(
**{x: dd[x] for x in unique_fields}) # get the objects that have them
d_objects = d_objects.order_by(order_by)
for _d in d_objects:
dup_line.append(obj_to_txt(_d, fields=fields, delimiter=delimiter)) # convert them to text
output.append(delimiter.join(dup_line)) # add duplicate line
self.stdout.write("\n".join(output)) # print final result
except Exception as e:
self.stderr.write(self.style.ERROR(str(e)))
|
en
| 0.542618
|
This command finds all duplicate items and prints them. Usage: python manage.py find_duplicates -m manageXML.Lexeme -d ; --unique lexeme language --fields id lexeme language pos --filters language='fin' # id__gt=1 # if failed, treat it as a string # for each duplicate values # get the objects that have them # convert them to text # add duplicate line # print final result
| 2.839667
| 3
|
t3d/__init__.py
|
SamDSchofield/t3d
| 0
|
6626835
|
"""Top-level package for T3D."""
__author__ = """<NAME>"""
__email__ = '<EMAIL>'
__version__ = '0.1.0'
|
"""Top-level package for T3D."""
__author__ = """<NAME>"""
__email__ = '<EMAIL>'
__version__ = '0.1.0'
|
en
| 0.677947
|
Top-level package for T3D. <NAME>
| 0.932157
| 1
|
scripts/script.py
|
thaddywu/RailwayVIS
| 0
|
6626836
|
<gh_stars>0
# -*- coding: utf-8 -*-
import csv, json, os
from IPython import embed
jingwei = [
('重庆', 106.549, 29.581, 1),
('北京', 116.408, 39.904, 1),
('上海', 121.445, 31.213, 1),
('广州', 113.265, 23.108, 1),
('武汉', 114.279, 30.573, 1),
('济南', 117.009, 36.663, 1),
('深圳', 114.109, 22.544, 2),
('乌鲁木齐', 87.585, 43.781, 1),
('兰州', 103.751, 36.068, 1),
('南京', 118.769, 32.048, 1),
('南宁', 108.295, 22.838, 1),
('南昌', 115.889, 28.671, 1),
('合肥', 117.275, 31.861, 1),
('呼和浩特', 111.686, 40.819, 1),
('哈尔滨', 126.645, 45.758, 1),
('大连', 121.576, 38.944, 2),
('天津', 117.246, 39.117, 1),
('太原', 112.551, 37.893, 1),
('成都', 104.071, 30.670, 1),
# ('拉萨', 91.126, 29.656), # 炸
('昆明', 102.702, 25.051, 1),
('杭州', 120.165, 30.319, 1),
('沈阳', 123.418, 41.799, 1),
('石家庄', 114.498, 38.042, 1),
# ('福州', 119.303, 26.071), # 炸
('西宁', 101.778, 36.621, 1),
('贵阳', 106.700, 26.572, 1),
('郑州', 113.641, 34.758, 1),
('银川', 106.263, 38.468, 1),
('长春', 125.324, 43.871, 1),
('长沙', 112.967, 28.197, 1),
('西安', 108.969, 34.285, 1),
# ('秦皇岛', 119.601, 39.932),
# ('厦门', 118.070, 24.445),
('赣州', 114.916, 25.832, 2),
('上饶', 117.956, 28.451, 2),
('徐州', 117.188, 34.263, 2),
('连云港', 119.167, 34.599, 2),
('青岛', 120.343, 36.088, 2),
('襄阳', 112.150, 32.180, 2),
# # ('巴东', 110.312, 30.813), # railway上没这个城市
# # ('乐山', 103.747, 29.564),
('怀化', 109.945, 27.553, 2),
# ('汉中', 107.025, 33.071),
('广元', 105.819, 32.446, 2),
('桂林', 110.277, 25.281, 2),
('衡阳', 112.614, 26.902, 2),
('包头', 109.809, 40.657, 2),
# ('宜昌', 111.281, 30.708)
('嘉峪关', 98.17, 39.45, 2),
('大同', 113.17, 40.06, 2),
('南阳', 112.527, 32.996, 2),
('信阳', 114.06, 32.125, 2),
('张家口', 114.939, 40.764, 2),
('宝鸡', 107.149, 34.381, 2),
# ('天水', 105.709, 34.576),
# ('洛阳', 112.361, 34.671),
('金华', 119.649, 29.107, 2),
# ('宁波', 121.542, 29.872),
('商丘', 115.635, 34.434, 2),
# ('温州', 120.648, 28.003)
# ('蚌埠', )
# ('阜阳', )
# ('商丘', )
# ('芜湖', )
# ('宣城', )
('柳州', 109.373, 24.315, 2)
]
with open('keystations.txt', 'w', encoding='utf-8') as f:
f.write(str([station for station, lng, let, level in jingwei]))
D = {'nodes':{}, 'links':[]}
for station, lng, lat, level in jingwei:
D['nodes'][station] = [lng, lat, level]
citypair2railways = {}
with open('network.json', 'r') as f:
D1 = json.load(f)
for railway_name, D2 in D1.items():
for i in range(len(D2['route'])-1):
uu = D2['route'][i]
vv = D2['route'][i+1]
if (uu, vv) not in citypair2railways:
tmp = vv
vv = uu
uu = tmp
citypair2railways.setdefault((uu, vv), []).append({
'name': railway_name,
'date': D2['date'],
'service': D2['service'],
'electrification': D2['electrification']
})
for k,v in citypair2railways.items():
D['links'].append({
'u': k[0],
'v': k[1],
'railways': v
})
with open('../json/tmp.json', 'w') as f:
# js = json.dumps(D, ensure_ascii=False)
# shown in chinese character instead of \uxxx
json.dump(D, f)
|
# -*- coding: utf-8 -*-
import csv, json, os
from IPython import embed
jingwei = [
('重庆', 106.549, 29.581, 1),
('北京', 116.408, 39.904, 1),
('上海', 121.445, 31.213, 1),
('广州', 113.265, 23.108, 1),
('武汉', 114.279, 30.573, 1),
('济南', 117.009, 36.663, 1),
('深圳', 114.109, 22.544, 2),
('乌鲁木齐', 87.585, 43.781, 1),
('兰州', 103.751, 36.068, 1),
('南京', 118.769, 32.048, 1),
('南宁', 108.295, 22.838, 1),
('南昌', 115.889, 28.671, 1),
('合肥', 117.275, 31.861, 1),
('呼和浩特', 111.686, 40.819, 1),
('哈尔滨', 126.645, 45.758, 1),
('大连', 121.576, 38.944, 2),
('天津', 117.246, 39.117, 1),
('太原', 112.551, 37.893, 1),
('成都', 104.071, 30.670, 1),
# ('拉萨', 91.126, 29.656), # 炸
('昆明', 102.702, 25.051, 1),
('杭州', 120.165, 30.319, 1),
('沈阳', 123.418, 41.799, 1),
('石家庄', 114.498, 38.042, 1),
# ('福州', 119.303, 26.071), # 炸
('西宁', 101.778, 36.621, 1),
('贵阳', 106.700, 26.572, 1),
('郑州', 113.641, 34.758, 1),
('银川', 106.263, 38.468, 1),
('长春', 125.324, 43.871, 1),
('长沙', 112.967, 28.197, 1),
('西安', 108.969, 34.285, 1),
# ('秦皇岛', 119.601, 39.932),
# ('厦门', 118.070, 24.445),
('赣州', 114.916, 25.832, 2),
('上饶', 117.956, 28.451, 2),
('徐州', 117.188, 34.263, 2),
('连云港', 119.167, 34.599, 2),
('青岛', 120.343, 36.088, 2),
('襄阳', 112.150, 32.180, 2),
# # ('巴东', 110.312, 30.813), # railway上没这个城市
# # ('乐山', 103.747, 29.564),
('怀化', 109.945, 27.553, 2),
# ('汉中', 107.025, 33.071),
('广元', 105.819, 32.446, 2),
('桂林', 110.277, 25.281, 2),
('衡阳', 112.614, 26.902, 2),
('包头', 109.809, 40.657, 2),
# ('宜昌', 111.281, 30.708)
('嘉峪关', 98.17, 39.45, 2),
('大同', 113.17, 40.06, 2),
('南阳', 112.527, 32.996, 2),
('信阳', 114.06, 32.125, 2),
('张家口', 114.939, 40.764, 2),
('宝鸡', 107.149, 34.381, 2),
# ('天水', 105.709, 34.576),
# ('洛阳', 112.361, 34.671),
('金华', 119.649, 29.107, 2),
# ('宁波', 121.542, 29.872),
('商丘', 115.635, 34.434, 2),
# ('温州', 120.648, 28.003)
# ('蚌埠', )
# ('阜阳', )
# ('商丘', )
# ('芜湖', )
# ('宣城', )
('柳州', 109.373, 24.315, 2)
]
with open('keystations.txt', 'w', encoding='utf-8') as f:
f.write(str([station for station, lng, let, level in jingwei]))
D = {'nodes':{}, 'links':[]}
for station, lng, lat, level in jingwei:
D['nodes'][station] = [lng, lat, level]
citypair2railways = {}
with open('network.json', 'r') as f:
D1 = json.load(f)
for railway_name, D2 in D1.items():
for i in range(len(D2['route'])-1):
uu = D2['route'][i]
vv = D2['route'][i+1]
if (uu, vv) not in citypair2railways:
tmp = vv
vv = uu
uu = tmp
citypair2railways.setdefault((uu, vv), []).append({
'name': railway_name,
'date': D2['date'],
'service': D2['service'],
'electrification': D2['electrification']
})
for k,v in citypair2railways.items():
D['links'].append({
'u': k[0],
'v': k[1],
'railways': v
})
with open('../json/tmp.json', 'w') as f:
# js = json.dumps(D, ensure_ascii=False)
# shown in chinese character instead of \uxxx
json.dump(D, f)
|
en
| 0.391856
|
# -*- coding: utf-8 -*- # ('拉萨', 91.126, 29.656), # 炸 # ('福州', 119.303, 26.071), # 炸 # ('秦皇岛', 119.601, 39.932), # ('厦门', 118.070, 24.445), # # ('巴东', 110.312, 30.813), # railway上没这个城市 # # ('乐山', 103.747, 29.564), # ('汉中', 107.025, 33.071), # ('宜昌', 111.281, 30.708) # ('天水', 105.709, 34.576), # ('洛阳', 112.361, 34.671), # ('宁波', 121.542, 29.872), # ('温州', 120.648, 28.003) # ('蚌埠', ) # ('阜阳', ) # ('商丘', ) # ('芜湖', ) # ('宣城', ) # js = json.dumps(D, ensure_ascii=False) # shown in chinese character instead of \uxxx
| 2.119078
| 2
|
wavelet.py
|
MuAuan/Scipy-Swan
| 0
|
6626837
|
<reponame>MuAuan/Scipy-Swan
from swan import pycwt
import numpy as np
import matplotlib.pyplot as plt
import wave
from scipy import fromstring, int16
wavfile = './hirakegoma.wav'
#wavfile = 'ohayo.wav'
wr = wave.open(wavfile, "rb")
ch = wr.getnchannels()
width = wr.getsampwidth()
fr = wr.getframerate()
fn = wr.getnframes()
fs = fn / fr
print('ch', ch)
print('frame', fn)
print('fr',fr)
print('sampling fs ', fs, 'sec')
print('width', width)
origin = wr.readframes(wr.getnframes())
data = origin[:fn]
wr.close()
amp = max(data)
print(amp)
print('len of origin', len(origin))
print('len of sampling: ', len(data))
# ステレオ前提 > monoral
y = np.frombuffer(data, dtype="int16") /32768.0
x = np.linspace(0,fs, fn/2, endpoint=False)
plt.plot(x, y)
plt.show()
Fs = 44100
omega0 = 2 #0.2 #1 #2 #8
# (1) Freqを指定してcwt
freqs=np.arange(20,20000,100)
r=pycwt.cwt_f(y,freqs,Fs,pycwt.Morlet(omega0))
rr=np.abs(r)
plt.rcParams['figure.figsize'] = (10, 6)
fig = plt.figure()
ax1 = fig.add_axes([0.1, 0.75, 0.7, 0.2])
ax2 = fig.add_axes([0.1, 0.1, 0.7, 0.60], sharex=ax1)
ax3 = fig.add_axes([0.83, 0.1, 0.03, 0.6])
ax1.plot(x, y, 'k')
img = ax2.imshow(np.flipud(rr), extent=[0, 5,20, 20000], aspect='auto', cmap='hsv')
twin_ax = ax2
twin_ax.set_yscale('log')
twin_ax.set_xlim(0, 5)
twin_ax.set_ylim(20, 20000)
ax2.tick_params(which='both', labelleft=False, left=False)
twin_ax.tick_params(which='both', labelleft=True, left=True, labelright=False)
fig.colorbar(img, cax=ax3)
plt.show()
|
from swan import pycwt
import numpy as np
import matplotlib.pyplot as plt
import wave
from scipy import fromstring, int16
wavfile = './hirakegoma.wav'
#wavfile = 'ohayo.wav'
wr = wave.open(wavfile, "rb")
ch = wr.getnchannels()
width = wr.getsampwidth()
fr = wr.getframerate()
fn = wr.getnframes()
fs = fn / fr
print('ch', ch)
print('frame', fn)
print('fr',fr)
print('sampling fs ', fs, 'sec')
print('width', width)
origin = wr.readframes(wr.getnframes())
data = origin[:fn]
wr.close()
amp = max(data)
print(amp)
print('len of origin', len(origin))
print('len of sampling: ', len(data))
# ステレオ前提 > monoral
y = np.frombuffer(data, dtype="int16") /32768.0
x = np.linspace(0,fs, fn/2, endpoint=False)
plt.plot(x, y)
plt.show()
Fs = 44100
omega0 = 2 #0.2 #1 #2 #8
# (1) Freqを指定してcwt
freqs=np.arange(20,20000,100)
r=pycwt.cwt_f(y,freqs,Fs,pycwt.Morlet(omega0))
rr=np.abs(r)
plt.rcParams['figure.figsize'] = (10, 6)
fig = plt.figure()
ax1 = fig.add_axes([0.1, 0.75, 0.7, 0.2])
ax2 = fig.add_axes([0.1, 0.1, 0.7, 0.60], sharex=ax1)
ax3 = fig.add_axes([0.83, 0.1, 0.03, 0.6])
ax1.plot(x, y, 'k')
img = ax2.imshow(np.flipud(rr), extent=[0, 5,20, 20000], aspect='auto', cmap='hsv')
twin_ax = ax2
twin_ax.set_yscale('log')
twin_ax.set_xlim(0, 5)
twin_ax.set_ylim(20, 20000)
ax2.tick_params(which='both', labelleft=False, left=False)
twin_ax.tick_params(which='both', labelleft=True, left=True, labelright=False)
fig.colorbar(img, cax=ax3)
plt.show()
|
ja
| 0.735401
|
#wavfile = 'ohayo.wav' # ステレオ前提 > monoral #0.2 #1 #2 #8 # (1) Freqを指定してcwt
| 2.280029
| 2
|
pydfs_lineup_optimizer/__init__.py
|
raymondhuynh1988/pydfs-lineup-optimizer
| 1
|
6626838
|
from pydfs_lineup_optimizer.version import __version__
from pydfs_lineup_optimizer.constants import Site, Sport
from pydfs_lineup_optimizer.player import Player
from pydfs_lineup_optimizer.exceptions import LineupOptimizerException, LineupOptimizerIncorrectTeamName, \
LineupOptimizerIncorrectPositionName, LineupOptimizerIncorrectCSV
from pydfs_lineup_optimizer.lineup_optimizer import LineupOptimizer
from pydfs_lineup_optimizer.lineup import Lineup
from pydfs_lineup_optimizer.sites import SitesRegistry
from pydfs_lineup_optimizer.lineup_exporter import CSVLineupExporter, FantasyDraftCSVLineupExporter
from pydfs_lineup_optimizer.tz import set_timezone
__all__ = [
'get_optimizer', 'Site', 'Sport', 'Player', 'LineupOptimizerException', 'LineupOptimizerIncorrectTeamName',
'LineupOptimizerIncorrectPositionName', 'LineupOptimizerIncorrectCSV', 'LineupOptimizer', 'Lineup',
'CSVLineupExporter', 'set_timezone', 'FantasyDraftCSVLineupExporter',
]
def get_optimizer(site: str, sport: str, **kwargs) -> LineupOptimizer:
return LineupOptimizer(SitesRegistry.get_settings(site, sport), **kwargs)
|
from pydfs_lineup_optimizer.version import __version__
from pydfs_lineup_optimizer.constants import Site, Sport
from pydfs_lineup_optimizer.player import Player
from pydfs_lineup_optimizer.exceptions import LineupOptimizerException, LineupOptimizerIncorrectTeamName, \
LineupOptimizerIncorrectPositionName, LineupOptimizerIncorrectCSV
from pydfs_lineup_optimizer.lineup_optimizer import LineupOptimizer
from pydfs_lineup_optimizer.lineup import Lineup
from pydfs_lineup_optimizer.sites import SitesRegistry
from pydfs_lineup_optimizer.lineup_exporter import CSVLineupExporter, FantasyDraftCSVLineupExporter
from pydfs_lineup_optimizer.tz import set_timezone
__all__ = [
'get_optimizer', 'Site', 'Sport', 'Player', 'LineupOptimizerException', 'LineupOptimizerIncorrectTeamName',
'LineupOptimizerIncorrectPositionName', 'LineupOptimizerIncorrectCSV', 'LineupOptimizer', 'Lineup',
'CSVLineupExporter', 'set_timezone', 'FantasyDraftCSVLineupExporter',
]
def get_optimizer(site: str, sport: str, **kwargs) -> LineupOptimizer:
return LineupOptimizer(SitesRegistry.get_settings(site, sport), **kwargs)
|
none
| 1
| 1.947397
| 2
|
|
tests/test_server.py
|
CSP197/submission-criteria
| 112
|
6626839
|
#!/usr/bin/env python
"""Test Server."""
# System
import datetime
import os
# Third Party
import requests
# First Party
from submission_criteria.database_manager import DatabaseManager
def fetch_competition(db):
now = datetime.datetime.utcnow()
return db.competitions.find_one({
"start_date": {
"$lt": now
},
"end_date": {
"$gt": now
}
})
def test_server(db_manager, comp_id):
submissions = db_manager.get_everyone_elses_recent_submssions(comp_id, '')
api_key = os.environ.get("API_KEY",
"<KEY>
for submission in submissions:
s_id = str(submission["submission_id"])
print(s_id)
requests.post(
"http://localhost:5151/",
json={
'submission_id': s_id,
'api_key': api_key
})
def main():
db_manager = DatabaseManager()
cid = str(fetch_competition(db_manager.db)["_id"])
print(cid)
test_server(db_manager, cid)
if __name__ == '__main__':
main()
|
#!/usr/bin/env python
"""Test Server."""
# System
import datetime
import os
# Third Party
import requests
# First Party
from submission_criteria.database_manager import DatabaseManager
def fetch_competition(db):
now = datetime.datetime.utcnow()
return db.competitions.find_one({
"start_date": {
"$lt": now
},
"end_date": {
"$gt": now
}
})
def test_server(db_manager, comp_id):
submissions = db_manager.get_everyone_elses_recent_submssions(comp_id, '')
api_key = os.environ.get("API_KEY",
"<KEY>
for submission in submissions:
s_id = str(submission["submission_id"])
print(s_id)
requests.post(
"http://localhost:5151/",
json={
'submission_id': s_id,
'api_key': api_key
})
def main():
db_manager = DatabaseManager()
cid = str(fetch_competition(db_manager.db)["_id"])
print(cid)
test_server(db_manager, cid)
if __name__ == '__main__':
main()
|
en
| 0.5258
|
#!/usr/bin/env python Test Server. # System # Third Party # First Party
| 2.426917
| 2
|
ax/models/torch/botorch.py
|
aerometu/Ax
| 1
|
6626840
|
#!/usr/bin/env python3
# Copyright (c) Facebook, Inc. and its affiliates.
#
# This source code is licensed under the MIT license found in the
# LICENSE file in the root directory of this source tree.
from copy import deepcopy
from typing import Any, Callable, Dict, List, Optional, Tuple, cast
import numpy as np
import torch
from ax.core.types import TConfig, TGenMetadata
from ax.models.torch.botorch_defaults import (
get_and_fit_model,
get_NEI,
predict_from_model,
recommend_best_observed_point,
scipy_optimizer,
)
from ax.models.torch.utils import _get_X_pending_and_observed, normalize_indices
from ax.models.torch_base import TorchModel
from ax.utils.common.docutils import copy_doc
from ax.utils.common.typeutils import checked_cast
from botorch.acquisition.acquisition import AcquisitionFunction
from botorch.models.model import Model
from torch import Tensor
from .utils import subset_model
TModelConstructor = Callable[
[
List[Tensor],
List[Tensor],
List[Tensor],
List[int],
List[int],
List[str],
Optional[Dict[str, Tensor]],
Any,
],
Model,
]
TModelPredictor = Callable[[Model, Tensor], Tuple[Tensor, Tensor]]
TAcqfConstructor = Callable[
[
Model,
Tensor,
Optional[Tuple[Tensor, Tensor]],
Optional[Tensor],
Optional[Tensor],
Any,
],
AcquisitionFunction,
]
TOptimizer = Callable[
[
AcquisitionFunction,
Tensor,
int,
Optional[List[Tuple[Tensor, Tensor, float]]],
Optional[Dict[int, float]],
Optional[Callable[[Tensor], Tensor]],
Any,
],
Tuple[Tensor, Tensor],
]
TBestPointRecommender = Callable[
[
TorchModel,
List[Tuple[float, float]],
Tensor,
Optional[Tuple[Tensor, Tensor]],
Optional[Tuple[Tensor, Tensor]],
Optional[Dict[int, float]],
Optional[TConfig],
Optional[Dict[int, float]],
],
Optional[Tensor],
]
class BotorchModel(TorchModel):
r"""
Customizable botorch model.
By default, this uses a noisy Expected Improvement acquisition function on
top of a model made up of separate GPs, one for each outcome. This behavior
can be modified by providing custom implementations of the following
components:
- a `model_constructor` that instantiates and fits a model on data
- a `model_predictor` that predicts outcomes using the fitted model
- a `acqf_constructor` that creates an acquisition function from a fitted model
- a `acqf_optimizer` that optimizes the acquisition function
- a `best_point_recommender` that recommends a current "best" point (i.e.,
what the model recommends if the learning process ended now)
Args:
model_constructor: A callable that instantiates and fits a model on data,
with signature as described below.
model_predictor: A callable that predicts using the fitted model, with
signature as described below.
acqf_constructor: A callable that creates an acquisition function from a
fitted model, with signature as described below.
acqf_optimizer: A callable that optimizes the acquisition function, with
signature as described below.
best_point_recommender: A callable that recommends the best point, with
signature as described below.
refit_on_cv: If True, refit the model for each fold when performing
cross-validation.
refit_on_update: If True, refit the model after updating the training
data using the `update` method.
warm_start_refitting: If True, start model refitting from previous
model parameters in order to speed up the fitting process.
Call signatures:
::
model_constructor(
Xs,
Ys,
Yvars,
task_features,
fidelity_features,
metric_names,
state_dict,
**kwargs,
) -> model
Here `Xs`, `Ys`, `Yvars` are lists of tensors (one element per outcome),
`task_features` identifies columns of Xs that should be modeled as a task,
`fidelity_features` is a list of ints that specify the positions of fidelity
parameters in 'Xs', `metric_names` provides the names of each `Y` in `Ys`,
`state_dict` is a pytorch module state dict, and `model` is a BoTorch `Model`.
Optional kwargs are being passed through from the `BotorchModel` constructor.
This callable is assumed to return a fitted BoTorch model that has the same
dtype and lives on the same device as the input tensors.
::
model_predictor(model, X) -> [mean, cov]
Here `model` is a fitted botorch model, `X` is a tensor of candidate points,
and `mean` and `cov` are the posterior mean and covariance, respectively.
::
acqf_constructor(
model,
objective_weights,
outcome_constraints,
X_observed,
X_pending,
**kwargs,
) -> acq_function
Here `model` is a botorch `Model`, `objective_weights` is a tensor of weights
for the model outputs, `outcome_constraints` is a tuple of tensors describing
the (linear) outcome constraints, `X_observed` are previously observed points,
and `X_pending` are points whose evaluation is pending. `acq_function` is a
BoTorch acquisition function crafted from these inputs. For additional
details on the arguments, see `get_NEI`.
::
acqf_optimizer(
acq_function,
bounds,
n,
inequality_constraints,
fixed_features,
rounding_func,
**kwargs,
) -> candidates
Here `acq_function` is a BoTorch `AcquisitionFunction`, `bounds` is a tensor
containing bounds on the parameters, `n` is the number of candidates to be
generated, `inequality_constraints` are inequality constraints on parameter
values, `fixed_features` specifies features that should be fixed during
generation, and `rounding_func` is a callback that rounds an optimization
result appropriately. `candidates` is a tensor of generated candidates.
For additional details on the arguments, see `scipy_optimizer`.
::
best_point_recommender(
model,
bounds,
objective_weights,
outcome_constraints,
linear_constraints,
fixed_features,
model_gen_options,
target_fidelities,
) -> candidates
Here `model` is a TorchModel, `bounds` is a list of tuples containing bounds
on the parameters, `objective_weights` is a tensor of weights for the model outputs,
`outcome_constraints` is a tuple of tensors describing the (linear) outcome
constraints, `linear_constraints` is a tuple of tensors describing constraints
on the design, `fixed_features` specifies features that should be fixed during
generation, `model_gen_options` is a config dictionary that can contain
model-specific options, and `target_fidelities` is a map from fidelity feature
column indices to their respective target fidelities, used for multi-fidelity
optimization problems. % TODO: refer to an example.
"""
dtype: Optional[torch.dtype]
device: Optional[torch.device]
Xs: List[Tensor]
Ys: List[Tensor]
Yvars: List[Tensor]
def __init__(
self,
model_constructor: TModelConstructor = get_and_fit_model,
model_predictor: TModelPredictor = predict_from_model,
# pyre-fixme[9]: acqf_constructor has type `Callable[[Model, Tensor,
# Optional[Tuple[Tensor, Tensor]], Optional[Tensor], Optional[Tensor], Any],
# AcquisitionFunction]`; used as `Callable[[Model, Tensor,
# Optional[Tuple[Tensor, Tensor]], Optional[Tensor], Optional[Tensor],
# **(Any)], AcquisitionFunction]`.
acqf_constructor: TAcqfConstructor = get_NEI,
# pyre-fixme[9]: acqf_optimizer has type `Callable[[AcquisitionFunction,
# Tensor, int, Optional[Dict[int, float]], Optional[Callable[[Tensor],
# Tensor]], Any], Tensor]`; used as `Callable[[AcquisitionFunction, Tensor,
# int, Optional[Dict[int, float]], Optional[Callable[[Tensor], Tensor]],
# **(Any)], Tensor]`.
acqf_optimizer: TOptimizer = scipy_optimizer,
best_point_recommender: TBestPointRecommender = recommend_best_observed_point,
refit_on_cv: bool = False,
refit_on_update: bool = True,
warm_start_refitting: bool = True,
**kwargs: Any,
) -> None:
self.model_constructor = model_constructor
self.model_predictor = model_predictor
self.acqf_constructor = acqf_constructor
self.acqf_optimizer = acqf_optimizer
self.best_point_recommender = best_point_recommender
self._kwargs = kwargs
self.refit_on_cv = refit_on_cv
self.refit_on_update = refit_on_update
self.warm_start_refitting = warm_start_refitting
self.model: Optional[Model] = None
self.Xs = []
self.Ys = []
self.Yvars = []
self.dtype = None
self.device = None
self.task_features: List[int] = []
self.fidelity_features: List[int] = []
self.metric_names: List[str] = []
@copy_doc(TorchModel.fit)
def fit(
self,
Xs: List[Tensor],
Ys: List[Tensor],
Yvars: List[Tensor],
bounds: List[Tuple[float, float]],
task_features: List[int],
feature_names: List[str],
metric_names: List[str],
fidelity_features: List[int],
) -> None:
self.dtype = Xs[0].dtype
self.device = Xs[0].device
self.Xs = Xs
self.Ys = Ys
self.Yvars = Yvars
# ensure indices are non-negative
self.task_features = normalize_indices(task_features, d=Xs[0].size(-1))
self.fidelity_features = normalize_indices(fidelity_features, d=Xs[0].size(-1))
self.metric_names = metric_names
self.model = self.model_constructor( # pyre-ignore [28]
Xs=Xs,
Ys=Ys,
Yvars=Yvars,
task_features=self.task_features,
fidelity_features=self.fidelity_features,
metric_names=self.metric_names,
**self._kwargs,
)
@copy_doc(TorchModel.predict)
def predict(self, X: Tensor) -> Tuple[Tensor, Tensor]:
return self.model_predictor(model=self.model, X=X) # pyre-ignore [28]
@copy_doc(TorchModel.gen)
def gen(
self,
n: int,
bounds: List[Tuple[float, float]],
objective_weights: Tensor,
outcome_constraints: Optional[Tuple[Tensor, Tensor]] = None,
linear_constraints: Optional[Tuple[Tensor, Tensor]] = None,
fixed_features: Optional[Dict[int, float]] = None,
pending_observations: Optional[List[Tensor]] = None,
model_gen_options: Optional[TConfig] = None,
rounding_func: Optional[Callable[[Tensor], Tensor]] = None,
target_fidelities: Optional[Dict[int, float]] = None,
) -> Tuple[Tensor, Tensor, TGenMetadata]:
options = model_gen_options or {}
acf_options = options.get("acquisition_function_kwargs", {})
optimizer_options = options.get("optimizer_kwargs", {})
if target_fidelities:
raise NotImplementedError(
"target_fidelities not implemented for base BotorchModel"
)
X_pending, X_observed = _get_X_pending_and_observed(
Xs=self.Xs,
pending_observations=pending_observations,
objective_weights=objective_weights,
outcome_constraints=outcome_constraints,
bounds=bounds,
linear_constraints=linear_constraints,
fixed_features=fixed_features,
)
model = self.model
# subset model only to the outcomes we need for the optimization
if options.get("subset_model", True):
model, objective_weights, outcome_constraints = subset_model(
model=model, # pyre-ignore [6]
objective_weights=objective_weights,
outcome_constraints=outcome_constraints,
)
bounds_ = torch.tensor(bounds, dtype=self.dtype, device=self.device)
bounds_ = bounds_.transpose(0, 1)
if linear_constraints is not None:
A, b = linear_constraints
inequality_constraints = []
k, d = A.shape
for i in range(k):
indicies = A[i, :].nonzero().view(-1)
coefficients = -A[i, indicies]
rhs = -b[i, 0]
inequality_constraints.append((indicies, coefficients, rhs))
else:
inequality_constraints = None
acquisition_function = self.acqf_constructor( # pyre-ignore: [28]
model=model,
objective_weights=objective_weights,
outcome_constraints=outcome_constraints,
X_observed=X_observed,
X_pending=X_pending,
**acf_options,
)
botorch_rounding_func = get_rounding_func(rounding_func)
# pyre-ignore: [28]
candidates, expected_acquisition_value = self.acqf_optimizer(
acq_function=checked_cast(AcquisitionFunction, acquisition_function),
bounds=bounds_,
n=n,
inequality_constraints=inequality_constraints,
fixed_features=fixed_features,
rounding_func=botorch_rounding_func,
**optimizer_options,
)
return (
candidates.detach().cpu(),
torch.ones(n, dtype=self.dtype),
{"expected_acquisition_value": expected_acquisition_value.tolist()},
)
@copy_doc(TorchModel.best_point)
def best_point(
self,
bounds: List[Tuple[float, float]],
objective_weights: Tensor,
outcome_constraints: Optional[Tuple[Tensor, Tensor]] = None,
linear_constraints: Optional[Tuple[Tensor, Tensor]] = None,
fixed_features: Optional[Dict[int, float]] = None,
model_gen_options: Optional[TConfig] = None,
target_fidelities: Optional[Dict[int, float]] = None,
) -> Optional[Tensor]:
return self.best_point_recommender( # pyre-ignore [28]
model=self,
bounds=bounds,
objective_weights=objective_weights,
outcome_constraints=outcome_constraints,
linear_constraints=linear_constraints,
fixed_features=fixed_features,
model_gen_options=model_gen_options,
target_fidelities=target_fidelities,
)
@copy_doc(TorchModel.cross_validate)
def cross_validate(
self,
Xs_train: List[Tensor],
Ys_train: List[Tensor],
Yvars_train: List[Tensor],
X_test: Tensor,
) -> Tuple[Tensor, Tensor]:
if self.model is None:
raise RuntimeError("Cannot cross-validate model that has not been fitted")
if self.refit_on_cv:
state_dict = None
else:
state_dict = deepcopy(self.model.state_dict()) # pyre-ignore: [16]
model = self.model_constructor( # pyre-ignore: [28]
Xs=Xs_train,
Ys=Ys_train,
Yvars=Yvars_train,
task_features=self.task_features,
state_dict=state_dict,
fidelity_features=self.fidelity_features,
metric_names=self.metric_names,
**self._kwargs,
)
return self.model_predictor(model=model, X=X_test) # pyre-ignore: [28]
@copy_doc(TorchModel.update)
def update(self, Xs: List[Tensor], Ys: List[Tensor], Yvars: List[Tensor]) -> None:
if self.model is None:
raise RuntimeError("Cannot update model that has not been fitted")
self.Xs = Xs
self.Ys = Ys
self.Yvars = Yvars
if self.refit_on_update and not self.warm_start_refitting:
state_dict = None # pragma: no cover
else:
state_dict = deepcopy(self.model.state_dict()) # pyre-ignore: [16]
self.model = self.model_constructor( # pyre-ignore: [28]
Xs=self.Xs,
Ys=self.Ys,
Yvars=self.Yvars,
task_features=self.task_features,
state_dict=state_dict,
fidelity_features=self.fidelity_features,
metric_names=self.metric_names,
refit_model=self.refit_on_update,
**self._kwargs,
)
def feature_importances(self) -> np.ndarray:
if self.model is None:
raise RuntimeError(
"Cannot calculate feature_importances without a fitted model"
)
else:
ls = self.model.covar_module.base_kernel.lengthscale # pyre-ignore: [16]
return cast(Tensor, (1 / ls)).detach().cpu().numpy()
def get_rounding_func(
rounding_func: Optional[Callable[[Tensor], Tensor]]
) -> Optional[Callable[[Tensor], Tensor]]:
if rounding_func is None:
botorch_rounding_func = rounding_func
else:
# make sure rounding_func is properly applied to q- and t-batches
def botorch_rounding_func(X: Tensor) -> Tensor:
batch_shape, d = X.shape[:-1], X.shape[-1]
X_round = torch.stack(
[rounding_func(x) for x in X.view(-1, d)] # pyre-ignore: [16]
)
return X_round.view(*batch_shape, d)
return botorch_rounding_func
|
#!/usr/bin/env python3
# Copyright (c) Facebook, Inc. and its affiliates.
#
# This source code is licensed under the MIT license found in the
# LICENSE file in the root directory of this source tree.
from copy import deepcopy
from typing import Any, Callable, Dict, List, Optional, Tuple, cast
import numpy as np
import torch
from ax.core.types import TConfig, TGenMetadata
from ax.models.torch.botorch_defaults import (
get_and_fit_model,
get_NEI,
predict_from_model,
recommend_best_observed_point,
scipy_optimizer,
)
from ax.models.torch.utils import _get_X_pending_and_observed, normalize_indices
from ax.models.torch_base import TorchModel
from ax.utils.common.docutils import copy_doc
from ax.utils.common.typeutils import checked_cast
from botorch.acquisition.acquisition import AcquisitionFunction
from botorch.models.model import Model
from torch import Tensor
from .utils import subset_model
TModelConstructor = Callable[
[
List[Tensor],
List[Tensor],
List[Tensor],
List[int],
List[int],
List[str],
Optional[Dict[str, Tensor]],
Any,
],
Model,
]
TModelPredictor = Callable[[Model, Tensor], Tuple[Tensor, Tensor]]
TAcqfConstructor = Callable[
[
Model,
Tensor,
Optional[Tuple[Tensor, Tensor]],
Optional[Tensor],
Optional[Tensor],
Any,
],
AcquisitionFunction,
]
TOptimizer = Callable[
[
AcquisitionFunction,
Tensor,
int,
Optional[List[Tuple[Tensor, Tensor, float]]],
Optional[Dict[int, float]],
Optional[Callable[[Tensor], Tensor]],
Any,
],
Tuple[Tensor, Tensor],
]
TBestPointRecommender = Callable[
[
TorchModel,
List[Tuple[float, float]],
Tensor,
Optional[Tuple[Tensor, Tensor]],
Optional[Tuple[Tensor, Tensor]],
Optional[Dict[int, float]],
Optional[TConfig],
Optional[Dict[int, float]],
],
Optional[Tensor],
]
class BotorchModel(TorchModel):
r"""
Customizable botorch model.
By default, this uses a noisy Expected Improvement acquisition function on
top of a model made up of separate GPs, one for each outcome. This behavior
can be modified by providing custom implementations of the following
components:
- a `model_constructor` that instantiates and fits a model on data
- a `model_predictor` that predicts outcomes using the fitted model
- a `acqf_constructor` that creates an acquisition function from a fitted model
- a `acqf_optimizer` that optimizes the acquisition function
- a `best_point_recommender` that recommends a current "best" point (i.e.,
what the model recommends if the learning process ended now)
Args:
model_constructor: A callable that instantiates and fits a model on data,
with signature as described below.
model_predictor: A callable that predicts using the fitted model, with
signature as described below.
acqf_constructor: A callable that creates an acquisition function from a
fitted model, with signature as described below.
acqf_optimizer: A callable that optimizes the acquisition function, with
signature as described below.
best_point_recommender: A callable that recommends the best point, with
signature as described below.
refit_on_cv: If True, refit the model for each fold when performing
cross-validation.
refit_on_update: If True, refit the model after updating the training
data using the `update` method.
warm_start_refitting: If True, start model refitting from previous
model parameters in order to speed up the fitting process.
Call signatures:
::
model_constructor(
Xs,
Ys,
Yvars,
task_features,
fidelity_features,
metric_names,
state_dict,
**kwargs,
) -> model
Here `Xs`, `Ys`, `Yvars` are lists of tensors (one element per outcome),
`task_features` identifies columns of Xs that should be modeled as a task,
`fidelity_features` is a list of ints that specify the positions of fidelity
parameters in 'Xs', `metric_names` provides the names of each `Y` in `Ys`,
`state_dict` is a pytorch module state dict, and `model` is a BoTorch `Model`.
Optional kwargs are being passed through from the `BotorchModel` constructor.
This callable is assumed to return a fitted BoTorch model that has the same
dtype and lives on the same device as the input tensors.
::
model_predictor(model, X) -> [mean, cov]
Here `model` is a fitted botorch model, `X` is a tensor of candidate points,
and `mean` and `cov` are the posterior mean and covariance, respectively.
::
acqf_constructor(
model,
objective_weights,
outcome_constraints,
X_observed,
X_pending,
**kwargs,
) -> acq_function
Here `model` is a botorch `Model`, `objective_weights` is a tensor of weights
for the model outputs, `outcome_constraints` is a tuple of tensors describing
the (linear) outcome constraints, `X_observed` are previously observed points,
and `X_pending` are points whose evaluation is pending. `acq_function` is a
BoTorch acquisition function crafted from these inputs. For additional
details on the arguments, see `get_NEI`.
::
acqf_optimizer(
acq_function,
bounds,
n,
inequality_constraints,
fixed_features,
rounding_func,
**kwargs,
) -> candidates
Here `acq_function` is a BoTorch `AcquisitionFunction`, `bounds` is a tensor
containing bounds on the parameters, `n` is the number of candidates to be
generated, `inequality_constraints` are inequality constraints on parameter
values, `fixed_features` specifies features that should be fixed during
generation, and `rounding_func` is a callback that rounds an optimization
result appropriately. `candidates` is a tensor of generated candidates.
For additional details on the arguments, see `scipy_optimizer`.
::
best_point_recommender(
model,
bounds,
objective_weights,
outcome_constraints,
linear_constraints,
fixed_features,
model_gen_options,
target_fidelities,
) -> candidates
Here `model` is a TorchModel, `bounds` is a list of tuples containing bounds
on the parameters, `objective_weights` is a tensor of weights for the model outputs,
`outcome_constraints` is a tuple of tensors describing the (linear) outcome
constraints, `linear_constraints` is a tuple of tensors describing constraints
on the design, `fixed_features` specifies features that should be fixed during
generation, `model_gen_options` is a config dictionary that can contain
model-specific options, and `target_fidelities` is a map from fidelity feature
column indices to their respective target fidelities, used for multi-fidelity
optimization problems. % TODO: refer to an example.
"""
dtype: Optional[torch.dtype]
device: Optional[torch.device]
Xs: List[Tensor]
Ys: List[Tensor]
Yvars: List[Tensor]
def __init__(
self,
model_constructor: TModelConstructor = get_and_fit_model,
model_predictor: TModelPredictor = predict_from_model,
# pyre-fixme[9]: acqf_constructor has type `Callable[[Model, Tensor,
# Optional[Tuple[Tensor, Tensor]], Optional[Tensor], Optional[Tensor], Any],
# AcquisitionFunction]`; used as `Callable[[Model, Tensor,
# Optional[Tuple[Tensor, Tensor]], Optional[Tensor], Optional[Tensor],
# **(Any)], AcquisitionFunction]`.
acqf_constructor: TAcqfConstructor = get_NEI,
# pyre-fixme[9]: acqf_optimizer has type `Callable[[AcquisitionFunction,
# Tensor, int, Optional[Dict[int, float]], Optional[Callable[[Tensor],
# Tensor]], Any], Tensor]`; used as `Callable[[AcquisitionFunction, Tensor,
# int, Optional[Dict[int, float]], Optional[Callable[[Tensor], Tensor]],
# **(Any)], Tensor]`.
acqf_optimizer: TOptimizer = scipy_optimizer,
best_point_recommender: TBestPointRecommender = recommend_best_observed_point,
refit_on_cv: bool = False,
refit_on_update: bool = True,
warm_start_refitting: bool = True,
**kwargs: Any,
) -> None:
self.model_constructor = model_constructor
self.model_predictor = model_predictor
self.acqf_constructor = acqf_constructor
self.acqf_optimizer = acqf_optimizer
self.best_point_recommender = best_point_recommender
self._kwargs = kwargs
self.refit_on_cv = refit_on_cv
self.refit_on_update = refit_on_update
self.warm_start_refitting = warm_start_refitting
self.model: Optional[Model] = None
self.Xs = []
self.Ys = []
self.Yvars = []
self.dtype = None
self.device = None
self.task_features: List[int] = []
self.fidelity_features: List[int] = []
self.metric_names: List[str] = []
@copy_doc(TorchModel.fit)
def fit(
self,
Xs: List[Tensor],
Ys: List[Tensor],
Yvars: List[Tensor],
bounds: List[Tuple[float, float]],
task_features: List[int],
feature_names: List[str],
metric_names: List[str],
fidelity_features: List[int],
) -> None:
self.dtype = Xs[0].dtype
self.device = Xs[0].device
self.Xs = Xs
self.Ys = Ys
self.Yvars = Yvars
# ensure indices are non-negative
self.task_features = normalize_indices(task_features, d=Xs[0].size(-1))
self.fidelity_features = normalize_indices(fidelity_features, d=Xs[0].size(-1))
self.metric_names = metric_names
self.model = self.model_constructor( # pyre-ignore [28]
Xs=Xs,
Ys=Ys,
Yvars=Yvars,
task_features=self.task_features,
fidelity_features=self.fidelity_features,
metric_names=self.metric_names,
**self._kwargs,
)
@copy_doc(TorchModel.predict)
def predict(self, X: Tensor) -> Tuple[Tensor, Tensor]:
return self.model_predictor(model=self.model, X=X) # pyre-ignore [28]
@copy_doc(TorchModel.gen)
def gen(
self,
n: int,
bounds: List[Tuple[float, float]],
objective_weights: Tensor,
outcome_constraints: Optional[Tuple[Tensor, Tensor]] = None,
linear_constraints: Optional[Tuple[Tensor, Tensor]] = None,
fixed_features: Optional[Dict[int, float]] = None,
pending_observations: Optional[List[Tensor]] = None,
model_gen_options: Optional[TConfig] = None,
rounding_func: Optional[Callable[[Tensor], Tensor]] = None,
target_fidelities: Optional[Dict[int, float]] = None,
) -> Tuple[Tensor, Tensor, TGenMetadata]:
options = model_gen_options or {}
acf_options = options.get("acquisition_function_kwargs", {})
optimizer_options = options.get("optimizer_kwargs", {})
if target_fidelities:
raise NotImplementedError(
"target_fidelities not implemented for base BotorchModel"
)
X_pending, X_observed = _get_X_pending_and_observed(
Xs=self.Xs,
pending_observations=pending_observations,
objective_weights=objective_weights,
outcome_constraints=outcome_constraints,
bounds=bounds,
linear_constraints=linear_constraints,
fixed_features=fixed_features,
)
model = self.model
# subset model only to the outcomes we need for the optimization
if options.get("subset_model", True):
model, objective_weights, outcome_constraints = subset_model(
model=model, # pyre-ignore [6]
objective_weights=objective_weights,
outcome_constraints=outcome_constraints,
)
bounds_ = torch.tensor(bounds, dtype=self.dtype, device=self.device)
bounds_ = bounds_.transpose(0, 1)
if linear_constraints is not None:
A, b = linear_constraints
inequality_constraints = []
k, d = A.shape
for i in range(k):
indicies = A[i, :].nonzero().view(-1)
coefficients = -A[i, indicies]
rhs = -b[i, 0]
inequality_constraints.append((indicies, coefficients, rhs))
else:
inequality_constraints = None
acquisition_function = self.acqf_constructor( # pyre-ignore: [28]
model=model,
objective_weights=objective_weights,
outcome_constraints=outcome_constraints,
X_observed=X_observed,
X_pending=X_pending,
**acf_options,
)
botorch_rounding_func = get_rounding_func(rounding_func)
# pyre-ignore: [28]
candidates, expected_acquisition_value = self.acqf_optimizer(
acq_function=checked_cast(AcquisitionFunction, acquisition_function),
bounds=bounds_,
n=n,
inequality_constraints=inequality_constraints,
fixed_features=fixed_features,
rounding_func=botorch_rounding_func,
**optimizer_options,
)
return (
candidates.detach().cpu(),
torch.ones(n, dtype=self.dtype),
{"expected_acquisition_value": expected_acquisition_value.tolist()},
)
@copy_doc(TorchModel.best_point)
def best_point(
self,
bounds: List[Tuple[float, float]],
objective_weights: Tensor,
outcome_constraints: Optional[Tuple[Tensor, Tensor]] = None,
linear_constraints: Optional[Tuple[Tensor, Tensor]] = None,
fixed_features: Optional[Dict[int, float]] = None,
model_gen_options: Optional[TConfig] = None,
target_fidelities: Optional[Dict[int, float]] = None,
) -> Optional[Tensor]:
return self.best_point_recommender( # pyre-ignore [28]
model=self,
bounds=bounds,
objective_weights=objective_weights,
outcome_constraints=outcome_constraints,
linear_constraints=linear_constraints,
fixed_features=fixed_features,
model_gen_options=model_gen_options,
target_fidelities=target_fidelities,
)
@copy_doc(TorchModel.cross_validate)
def cross_validate(
self,
Xs_train: List[Tensor],
Ys_train: List[Tensor],
Yvars_train: List[Tensor],
X_test: Tensor,
) -> Tuple[Tensor, Tensor]:
if self.model is None:
raise RuntimeError("Cannot cross-validate model that has not been fitted")
if self.refit_on_cv:
state_dict = None
else:
state_dict = deepcopy(self.model.state_dict()) # pyre-ignore: [16]
model = self.model_constructor( # pyre-ignore: [28]
Xs=Xs_train,
Ys=Ys_train,
Yvars=Yvars_train,
task_features=self.task_features,
state_dict=state_dict,
fidelity_features=self.fidelity_features,
metric_names=self.metric_names,
**self._kwargs,
)
return self.model_predictor(model=model, X=X_test) # pyre-ignore: [28]
@copy_doc(TorchModel.update)
def update(self, Xs: List[Tensor], Ys: List[Tensor], Yvars: List[Tensor]) -> None:
if self.model is None:
raise RuntimeError("Cannot update model that has not been fitted")
self.Xs = Xs
self.Ys = Ys
self.Yvars = Yvars
if self.refit_on_update and not self.warm_start_refitting:
state_dict = None # pragma: no cover
else:
state_dict = deepcopy(self.model.state_dict()) # pyre-ignore: [16]
self.model = self.model_constructor( # pyre-ignore: [28]
Xs=self.Xs,
Ys=self.Ys,
Yvars=self.Yvars,
task_features=self.task_features,
state_dict=state_dict,
fidelity_features=self.fidelity_features,
metric_names=self.metric_names,
refit_model=self.refit_on_update,
**self._kwargs,
)
def feature_importances(self) -> np.ndarray:
if self.model is None:
raise RuntimeError(
"Cannot calculate feature_importances without a fitted model"
)
else:
ls = self.model.covar_module.base_kernel.lengthscale # pyre-ignore: [16]
return cast(Tensor, (1 / ls)).detach().cpu().numpy()
def get_rounding_func(
rounding_func: Optional[Callable[[Tensor], Tensor]]
) -> Optional[Callable[[Tensor], Tensor]]:
if rounding_func is None:
botorch_rounding_func = rounding_func
else:
# make sure rounding_func is properly applied to q- and t-batches
def botorch_rounding_func(X: Tensor) -> Tensor:
batch_shape, d = X.shape[:-1], X.shape[-1]
X_round = torch.stack(
[rounding_func(x) for x in X.view(-1, d)] # pyre-ignore: [16]
)
return X_round.view(*batch_shape, d)
return botorch_rounding_func
|
en
| 0.814017
|
#!/usr/bin/env python3 # Copyright (c) Facebook, Inc. and its affiliates. # # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. Customizable botorch model. By default, this uses a noisy Expected Improvement acquisition function on top of a model made up of separate GPs, one for each outcome. This behavior can be modified by providing custom implementations of the following components: - a `model_constructor` that instantiates and fits a model on data - a `model_predictor` that predicts outcomes using the fitted model - a `acqf_constructor` that creates an acquisition function from a fitted model - a `acqf_optimizer` that optimizes the acquisition function - a `best_point_recommender` that recommends a current "best" point (i.e., what the model recommends if the learning process ended now) Args: model_constructor: A callable that instantiates and fits a model on data, with signature as described below. model_predictor: A callable that predicts using the fitted model, with signature as described below. acqf_constructor: A callable that creates an acquisition function from a fitted model, with signature as described below. acqf_optimizer: A callable that optimizes the acquisition function, with signature as described below. best_point_recommender: A callable that recommends the best point, with signature as described below. refit_on_cv: If True, refit the model for each fold when performing cross-validation. refit_on_update: If True, refit the model after updating the training data using the `update` method. warm_start_refitting: If True, start model refitting from previous model parameters in order to speed up the fitting process. Call signatures: :: model_constructor( Xs, Ys, Yvars, task_features, fidelity_features, metric_names, state_dict, **kwargs, ) -> model Here `Xs`, `Ys`, `Yvars` are lists of tensors (one element per outcome), `task_features` identifies columns of Xs that should be modeled as a task, `fidelity_features` is a list of ints that specify the positions of fidelity parameters in 'Xs', `metric_names` provides the names of each `Y` in `Ys`, `state_dict` is a pytorch module state dict, and `model` is a BoTorch `Model`. Optional kwargs are being passed through from the `BotorchModel` constructor. This callable is assumed to return a fitted BoTorch model that has the same dtype and lives on the same device as the input tensors. :: model_predictor(model, X) -> [mean, cov] Here `model` is a fitted botorch model, `X` is a tensor of candidate points, and `mean` and `cov` are the posterior mean and covariance, respectively. :: acqf_constructor( model, objective_weights, outcome_constraints, X_observed, X_pending, **kwargs, ) -> acq_function Here `model` is a botorch `Model`, `objective_weights` is a tensor of weights for the model outputs, `outcome_constraints` is a tuple of tensors describing the (linear) outcome constraints, `X_observed` are previously observed points, and `X_pending` are points whose evaluation is pending. `acq_function` is a BoTorch acquisition function crafted from these inputs. For additional details on the arguments, see `get_NEI`. :: acqf_optimizer( acq_function, bounds, n, inequality_constraints, fixed_features, rounding_func, **kwargs, ) -> candidates Here `acq_function` is a BoTorch `AcquisitionFunction`, `bounds` is a tensor containing bounds on the parameters, `n` is the number of candidates to be generated, `inequality_constraints` are inequality constraints on parameter values, `fixed_features` specifies features that should be fixed during generation, and `rounding_func` is a callback that rounds an optimization result appropriately. `candidates` is a tensor of generated candidates. For additional details on the arguments, see `scipy_optimizer`. :: best_point_recommender( model, bounds, objective_weights, outcome_constraints, linear_constraints, fixed_features, model_gen_options, target_fidelities, ) -> candidates Here `model` is a TorchModel, `bounds` is a list of tuples containing bounds on the parameters, `objective_weights` is a tensor of weights for the model outputs, `outcome_constraints` is a tuple of tensors describing the (linear) outcome constraints, `linear_constraints` is a tuple of tensors describing constraints on the design, `fixed_features` specifies features that should be fixed during generation, `model_gen_options` is a config dictionary that can contain model-specific options, and `target_fidelities` is a map from fidelity feature column indices to their respective target fidelities, used for multi-fidelity optimization problems. % TODO: refer to an example. # pyre-fixme[9]: acqf_constructor has type `Callable[[Model, Tensor, # Optional[Tuple[Tensor, Tensor]], Optional[Tensor], Optional[Tensor], Any], # AcquisitionFunction]`; used as `Callable[[Model, Tensor, # Optional[Tuple[Tensor, Tensor]], Optional[Tensor], Optional[Tensor], # **(Any)], AcquisitionFunction]`. # pyre-fixme[9]: acqf_optimizer has type `Callable[[AcquisitionFunction, # Tensor, int, Optional[Dict[int, float]], Optional[Callable[[Tensor], # Tensor]], Any], Tensor]`; used as `Callable[[AcquisitionFunction, Tensor, # int, Optional[Dict[int, float]], Optional[Callable[[Tensor], Tensor]], # **(Any)], Tensor]`. # ensure indices are non-negative # pyre-ignore [28] # pyre-ignore [28] # subset model only to the outcomes we need for the optimization # pyre-ignore [6] # pyre-ignore: [28] # pyre-ignore: [28] # pyre-ignore [28] # pyre-ignore: [16] # pyre-ignore: [28] # pyre-ignore: [28] # pragma: no cover # pyre-ignore: [16] # pyre-ignore: [28] # pyre-ignore: [16] # make sure rounding_func is properly applied to q- and t-batches # pyre-ignore: [16]
| 1.455575
| 1
|
spark/spark_main.py
|
kcrandall/Kaggle_Mercedes_Manufacturing
| 0
|
6626841
|
# imports
import pandas as pd
import numpy as np
import time
import os
from tabulate import tabulate
import sys
from operator import add
from pyspark import SparkContext
from pyspark.sql import SparkSession
from pyspark.sql import SQLContext
from pyspark.sql import functions as F #https://stackoverflow.com/questions/39504950/python-pyspark-get-sum-of-a-pyspark-dataframe-column-values
from get_type_lists import get_type_lists
from target_encoder import target_encoder
from feature_combiner import feature_combiner
from logging_lib.LoggingController import LoggingController
#Define your s3 bucket to load and store data
S3_BUCKET = 'emr-related-files'
#Create a custom logger to log statistics and plots
logger = LoggingController()
logger.s3_bucket = S3_BUCKET
#.config('spark.executor.cores','6') \
spark = SparkSession.builder \
.appName("App") \
.getOrCreate()
# .master("local[*]") \
# .config('spark.cores.max','16')
#.master("local") \
# .config("spark.some.config.option", "some-value") \
spark.sparkContext.setLogLevel('WARN') #Get rid of all the junk in output
Y = 'y'
ID_VAR = 'ID'
DROPS = [ID_VAR]
#From an XGBoost model
# NOTE the top 6 are categorical, might want to look into this.
MOST_IMPORTANT_VARS_ORDERD = ['X5','X0','X8','X3','X1','X2','X314','X47','X118',\
'X315','X29','X127','X236','X115','X383','X152','X151','X351','X327','X77','X104',\
'X267','X95','X142']
#Load data from s3
train = spark.read.format('com.databricks.spark.csv').options(header='true', inferschema='true').load('s3n://'+S3_BUCKET+'/train.csv')
test = spark.read.format('com.databricks.spark.csv').options(header='true', inferschema='true').load('s3n://'+S3_BUCKET+'/test.csv')
#this needs to be done for h2o glm.predict() bug (which needs same number of columns)
test = test.withColumn(Y,test[ID_VAR])
#Work around for splitting wide data, you need to split on only an ID varaibles
#Then join back with a train varaible (bug in spark as of 2.1 with randomSplit())
(train1,valid1) = train.select(ID_VAR).randomSplit([0.7,0.3], seed=123)
valid = valid1.join(train, ID_VAR,'inner')
train = train1.join(train,ID_VAR,'inner')
# print('TRAIN DATA')
# train.show(2)
# print('VALID DATA')
# valid.show(2)
#workdaround for h2o predict
test1 = test.select(ID_VAR,Y)
test2 = test.drop(Y)
test = test1.join(test2,ID_VAR,'inner')
original_nums, cats = get_type_lists(frame=train,rejects=[ID_VAR,Y],frame_type='spark')
print("Encoding numberic variables...")
training_df_list, test_df_list,valid_df_list = list(),list(),list()
for i, var in enumerate(cats):
total = len(cats)
print('Encoding: ' + var + ' (' + str(i+1) + '/' + str(total) + ') ...')
logger.log_string('Encoding: ' + var + ' (' + str(i+1) + '/' + str(total) + ') ...')
tr_enc,v_enc, ts_enc = target_encoder(train, test, var, Y,valid_frame=valid,frame_type='spark',id_col=ID_VAR)
training_df_list.append(tr_enc)
test_df_list.append(ts_enc)
valid_df_list.append(v_enc)
#join all the new variables
for i, df in enumerate(training_df_list):
train = train.join(training_df_list[i],ID_VAR,'inner')
valid = valid.join(valid_df_list[i],ID_VAR,'inner')
test = test.join(test_df_list[i],ID_VAR,'inner')
# print('TRAIN DATA')
# train.show(2)
# print('VALID DATA')
# valid.show(2)
# print('TEST DATA')
# test.show(2)
print('Done encoding.')
encoded_nums, cats = get_type_lists(frame=train,rejects=[ID_VAR,Y],frame_type='spark')
#Remplace cats with encoded cats from MOST_IMPORTANT_VARS_ORDERD
for i, v in enumerate(MOST_IMPORTANT_VARS_ORDERD):
if v in cats:
MOST_IMPORTANT_VARS_ORDERD[i] = v + '_Tencode'
#
# print('Combining features....')
# (train, valid, test) = feature_combiner(train, test, MOST_IMPORTANT_VARS_ORDERD, valid_frame = valid, frame_type='spark')
# print('Done combining features.')
#
# encoded_combined_nums, cats = get_type_lists(frame=train,rejects=[ID_VAR,Y],frame_type='spark')
# https://stackoverflow.com/questions/31774311/pca-analysis-in-pyspark
################################################################################
# DONE WITH PREPROCESSING - START TRAINING #
################################################################################
import h2o
h2o.show_progress() # turn on progress bars
from h2o.estimators.glm import H2OGeneralizedLinearEstimator # import GLM models
from h2o.estimators.deeplearning import H2ODeepLearningEstimator
from h2o.estimators.gbm import H2OGradientBoostingEstimator
from h2o.estimators.random_forest import H2ORandomForestEstimator
from h2o.grid.grid_search import H2OGridSearch # grid search
from h2o.estimators.xgboost import H2OXGBoostEstimator
from h2o.estimators.stackedensemble import H2OStackedEnsembleEstimator
import xgboost as xgb
import matplotlib
matplotlib.use('Agg') #Need this if running matplot on a server w/o display
from pysparkling import *
conf = H2OConf(spark=spark)
conf.nthreads = -1
hc = H2OContext.getOrCreate(spark,conf)
print('Making h2o frames...')
trainHF = hc.as_h2o_frame(train, "trainTable")
validHF = hc.as_h2o_frame(valid, "validTable")
testHF = hc.as_h2o_frame(test, "testTable")
print('Done making h2o frames.')
logger.log_string("Train Summary:")
logger.log_string("Rows:{}".format(trainHF.nrow))
logger.log_string("Cols:{}".format(trainHF.ncol))
# print(trainHF.summary(return_data=True))
# logger.log_string(tabulate(trainHF.summary(return_data=True),tablefmt="grid"))
# logger.log_string(trainHF._ex._cache._tabulate('grid',False))
base_train, stack_train = trainHF.split_frame([0.5], seed=12345)
base_valid, stack_valid = validHF.split_frame([0.5], seed=12345)
# def upload_submission(sub,predict_column='predict'):
# # create time stamp
# import re
# import time
# time_stamp = re.sub('[: ]', '_', time.asctime())
#
# # save file for submission
# # sub.columns = [ID_VAR, Y]
# sub_fname = 'Submission_'+str(time_stamp) + '.csv'
# # h2o.download_csv(sub, 's3n://'+S3_BUCKET+'/kaggle_submissions/Mercedes/' +sub_fname)
#
# spark_sub_frame = hc.as_spark_frame(sub)
#
# spark_sub_frame.select(ID_VAR,predict_column).coalesce(1).write.option("header","true").csv('s3n://'+S3_BUCKET+'/Kaggle_Submissions/Mercedes/' +sub_fname)
def glm_grid(X, y, train, valid, should_submit = False):
""" Wrapper function for penalized GLM with alpha and lambda search.
:param X: List of inputs.
:param y: Name of target variable.
:param train: Name of training H2OFrame.
:param valid: Name of validation H2OFrame.
:return: Best H2Omodel from H2OGeneralizedLinearEstimator
"""
alpha_opts = [0.01, 0.25, 0.5, 0.99] # always keep some L2
family = ["gaussian", "binomial", "quasibinomial", "multinomial", "poisson", "gamma", "tweedie"]
hyper_parameters = {"alpha":alpha_opts
}
# initialize grid search
grid = H2OGridSearch(
H2OGeneralizedLinearEstimator(
family="gaussian",
lambda_search=True,
seed=12345),
hyper_params=hyper_parameters)
# train grid
grid.train(y=y,
x=X,
training_frame=train,
validation_frame=valid)
# show grid search results
print(grid.show())
best = grid.get_grid()[0]
print(best)
# if should_submit:
# sub_frame = testHF[ID_VAR].cbind(best.predict(testHF))
# print(sub_frame.col_names)
# print('Submission frame preview:')
# print(sub_frame[0:10, [ID_VAR, 'predict']])
# upload_submission(sub_frame,'predict')
# plot top frame values
print('yhat_frame')
yhat_frame = valid.cbind(best.predict(valid))
print(yhat_frame[0:10, [y, 'predict']])
# plot sorted predictions
yhat_frame_df = yhat_frame[[y, 'predict']].as_data_frame()
yhat_frame_df.sort_values(by='predict', inplace=True)
yhat_frame_df.reset_index(inplace=True, drop=True)
plt = yhat_frame_df.plot(title='Ranked Predictions Plot')
logger.log_string('Ranked Predictions Plot')
logger.log_matplotlib_plot(plt)
# select best model
return best
def neural_net_grid(X, y, train, valid):
# define random grid search parameters
hyper_parameters = {'hidden': [[170, 320], [80, 190], [320, 160, 80], [100], [50, 50, 50, 50]],
'l1':[s/1e4 for s in range(0, 1000, 100)],
'l2':[s/1e5 for s in range(0, 1000, 100)],
'input_dropout_ratio':[s/1e2 for s in range(0, 20, 2)]}
# define search strategy
search_criteria = {'strategy':'RandomDiscrete',
'max_models':100,
'max_runtime_secs':60*60*2, #2 hours
}
# initialize grid search
gsearch = H2OGridSearch(H2ODeepLearningEstimator,
hyper_params=hyper_parameters,
search_criteria=search_criteria)
# execute training w/ grid search
gsearch.train(x=X,
y=y,
training_frame=train,
validation_frame=valid,
activation='TanhWithDropout',
epochs=2000,
stopping_rounds=20,
sparse=True, # handles data w/ many zeros more efficiently
ignore_const_cols=True,
adaptive_rate=True)
best_model = gsearch.get_grid()[0]
return best_model
def gboosting_grid(X, y, train, valid):
# define random grid search parameters
hyper_parameters = {'ntrees':list(range(0, 500, 50)),
'max_depth':list(range(0, 20, 2)),
'sample_rate':[s/float(10) for s in range(1, 11)],
'col_sample_rate':[s/float(10) for s in range(1, 11)]}
# define search strategy
search_criteria = {'strategy':'RandomDiscrete',
'max_models':100,
'max_runtime_secs':60*60*2, #2 hours
}
# initialize grid search
gsearch = H2OGridSearch(H2OGradientBoostingEstimator,
hyper_params=hyper_parameters,
search_criteria=search_criteria)
# execute training w/ grid search
gsearch.train(x=X,
y=y,
training_frame=train,
validation_frame=valid)
best_model = gsearch.get_grid()[0]
return best_model
h2o_xgb_model = H2OXGBoostEstimator(
ntrees = 10000,
learn_rate = 0.005,
sample_rate = 0.1,
col_sample_rate = 0.8,
max_depth = 5,
nfolds = 3,
keep_cross_validation_predictions=True,
stopping_rounds = 10,
seed = 12345)
# execute training
h2o_xgb_model.train(x=encoded_combined_nums,
y=Y,
training_frame=trainHF,
validation_frame=validHF)
print('Training..')
logger.log_string('glm0')
glm0 = glm_grid(original_nums, Y, base_train, base_valid)
logger.log_string('glm1')
glm1 = glm_grid(encoded_nums, Y, base_train, base_valid)
logger.log_string('glm2')
glm2 = glm_grid(encoded_combined_nums, Y, base_train, base_valid)
#
# logger.log_string('rnn0')
# rnn0 = neural_net_grid(original_nums, Y, base_train, base_valid)
# logger.log_string('rnn1')
# rnn1 = neural_net_grid(encoded_nums, Y, base_train, base_valid)
# logger.log_string('rnn2')
# rnn2 = neural_net_grid(encoded_combined_nums, Y, base_train, base_valid)
#
# logger.log_string('gbm0')
# gbm0 = gboosting_grid(original_nums, Y, base_train, base_valid)
# logger.log_string('gbm1')
# gbm1 = gboosting_grid(encoded_nums, Y, base_train, base_valid)
# logger.log_string('gbm2')
# gbm2 = gboosting_grid(encoded_combined_nums, Y, base_train, base_valid)
print('DONE training.')
stack_train = stack_train.cbind(glm0.predict(stack_train))
stack_valid = stack_valid.cbind(glm0.predict(stack_valid))
stack_train = stack_train.cbind(glm1.predict(stack_train))
stack_valid = stack_valid.cbind(glm1.predict(stack_valid))
stack_train = stack_train.cbind(glm2.predict(stack_train))
stack_valid = stack_valid.cbind(glm2.predict(stack_valid))
#
# stack_train = stack_train.cbind(rnn0.predict(stack_train))
# stack_valid = stack_valid.cbind(rnn0.predict(stack_valid))
# stack_train = stack_train.cbind(rnn1.predict(stack_train))
# stack_valid = stack_valid.cbind(rnn1.predict(stack_valid))
# stack_train = stack_train.cbind(rnn2.predict(stack_train))
# stack_valid = stack_valid.cbind(rnn2.predict(stack_valid))
#
# stack_train = stack_train.cbind(gbm0.predict(stack_train))
# stack_valid = stack_valid.cbind(gbm0.predict(stack_valid))
# stack_train = stack_train.cbind(gbm1.predict(stack_train))
# stack_valid = stack_valid.cbind(gbm1.predict(stack_valid))
# stack_train = stack_train.cbind(gbm2.predict(stack_train))
# stack_valid = stack_valid.cbind(gbm2.predict(stack_valid))
testHF = testHF.cbind(glm0.predict(testHF))
testHF = testHF.cbind(glm1.predict(testHF))
testHF = testHF.cbind(glm2.predict(testHF))
# testHF = testHF.cbind(rnn0.predict(testHF))
# testHF = testHF.cbind(rnn1.predict(testHF))
# testHF = testHF.cbind(rnn2.predict(testHF))
# testHF = testHF.cbind(gbm0.predict(testHF))
# testHF = testHF.cbind(gbm1.predict(testHF))
# testHF = testHF.cbind(gbm2.predict(testHF))
logger.log_string('glm3')
# glm3 = glm_grid(encoded_combined_nums + ['predict', 'predict0','predict1'], Y, stack_train, stack_valid, should_submit=True)
rnn = neural_net_grid(MOST_IMPORTANT_VARS_ORDERD + ['predict', 'predict0', 'predict1','predict2', 'predict3', 'predict4','predict5', 'predict6', 'predict7'], Y, stack_train, stack_valid)
sub = testHF[ID_VAR].cbind(rnn.predict(testHF))
print(sub.head())
# create time stamp
import re
import time
time_stamp = re.sub('[: ]', '_', time.asctime())
# save file for submission
sub.columns = [ID_VAR, Y]
sub_fname = 'Submission_'+str(time_stamp) + '.csv'
# h2o.download_csv(sub, 's3n://'+S3_BUCKET+'/kaggle_submissions/Mercedes/' +sub_fname)
spark_sub_frame = hc.as_spark_frame(sub)
spark_sub_frame.select(ID_VAR,Y).coalesce(1).write.option("header","true").csv('s3n://'+S3_BUCKET+'/Kaggle_Submissions/Mercedes/' +sub_fname)
|
# imports
import pandas as pd
import numpy as np
import time
import os
from tabulate import tabulate
import sys
from operator import add
from pyspark import SparkContext
from pyspark.sql import SparkSession
from pyspark.sql import SQLContext
from pyspark.sql import functions as F #https://stackoverflow.com/questions/39504950/python-pyspark-get-sum-of-a-pyspark-dataframe-column-values
from get_type_lists import get_type_lists
from target_encoder import target_encoder
from feature_combiner import feature_combiner
from logging_lib.LoggingController import LoggingController
#Define your s3 bucket to load and store data
S3_BUCKET = 'emr-related-files'
#Create a custom logger to log statistics and plots
logger = LoggingController()
logger.s3_bucket = S3_BUCKET
#.config('spark.executor.cores','6') \
spark = SparkSession.builder \
.appName("App") \
.getOrCreate()
# .master("local[*]") \
# .config('spark.cores.max','16')
#.master("local") \
# .config("spark.some.config.option", "some-value") \
spark.sparkContext.setLogLevel('WARN') #Get rid of all the junk in output
Y = 'y'
ID_VAR = 'ID'
DROPS = [ID_VAR]
#From an XGBoost model
# NOTE the top 6 are categorical, might want to look into this.
MOST_IMPORTANT_VARS_ORDERD = ['X5','X0','X8','X3','X1','X2','X314','X47','X118',\
'X315','X29','X127','X236','X115','X383','X152','X151','X351','X327','X77','X104',\
'X267','X95','X142']
#Load data from s3
train = spark.read.format('com.databricks.spark.csv').options(header='true', inferschema='true').load('s3n://'+S3_BUCKET+'/train.csv')
test = spark.read.format('com.databricks.spark.csv').options(header='true', inferschema='true').load('s3n://'+S3_BUCKET+'/test.csv')
#this needs to be done for h2o glm.predict() bug (which needs same number of columns)
test = test.withColumn(Y,test[ID_VAR])
#Work around for splitting wide data, you need to split on only an ID varaibles
#Then join back with a train varaible (bug in spark as of 2.1 with randomSplit())
(train1,valid1) = train.select(ID_VAR).randomSplit([0.7,0.3], seed=123)
valid = valid1.join(train, ID_VAR,'inner')
train = train1.join(train,ID_VAR,'inner')
# print('TRAIN DATA')
# train.show(2)
# print('VALID DATA')
# valid.show(2)
#workdaround for h2o predict
test1 = test.select(ID_VAR,Y)
test2 = test.drop(Y)
test = test1.join(test2,ID_VAR,'inner')
original_nums, cats = get_type_lists(frame=train,rejects=[ID_VAR,Y],frame_type='spark')
print("Encoding numberic variables...")
training_df_list, test_df_list,valid_df_list = list(),list(),list()
for i, var in enumerate(cats):
total = len(cats)
print('Encoding: ' + var + ' (' + str(i+1) + '/' + str(total) + ') ...')
logger.log_string('Encoding: ' + var + ' (' + str(i+1) + '/' + str(total) + ') ...')
tr_enc,v_enc, ts_enc = target_encoder(train, test, var, Y,valid_frame=valid,frame_type='spark',id_col=ID_VAR)
training_df_list.append(tr_enc)
test_df_list.append(ts_enc)
valid_df_list.append(v_enc)
#join all the new variables
for i, df in enumerate(training_df_list):
train = train.join(training_df_list[i],ID_VAR,'inner')
valid = valid.join(valid_df_list[i],ID_VAR,'inner')
test = test.join(test_df_list[i],ID_VAR,'inner')
# print('TRAIN DATA')
# train.show(2)
# print('VALID DATA')
# valid.show(2)
# print('TEST DATA')
# test.show(2)
print('Done encoding.')
encoded_nums, cats = get_type_lists(frame=train,rejects=[ID_VAR,Y],frame_type='spark')
#Remplace cats with encoded cats from MOST_IMPORTANT_VARS_ORDERD
for i, v in enumerate(MOST_IMPORTANT_VARS_ORDERD):
if v in cats:
MOST_IMPORTANT_VARS_ORDERD[i] = v + '_Tencode'
#
# print('Combining features....')
# (train, valid, test) = feature_combiner(train, test, MOST_IMPORTANT_VARS_ORDERD, valid_frame = valid, frame_type='spark')
# print('Done combining features.')
#
# encoded_combined_nums, cats = get_type_lists(frame=train,rejects=[ID_VAR,Y],frame_type='spark')
# https://stackoverflow.com/questions/31774311/pca-analysis-in-pyspark
################################################################################
# DONE WITH PREPROCESSING - START TRAINING #
################################################################################
import h2o
h2o.show_progress() # turn on progress bars
from h2o.estimators.glm import H2OGeneralizedLinearEstimator # import GLM models
from h2o.estimators.deeplearning import H2ODeepLearningEstimator
from h2o.estimators.gbm import H2OGradientBoostingEstimator
from h2o.estimators.random_forest import H2ORandomForestEstimator
from h2o.grid.grid_search import H2OGridSearch # grid search
from h2o.estimators.xgboost import H2OXGBoostEstimator
from h2o.estimators.stackedensemble import H2OStackedEnsembleEstimator
import xgboost as xgb
import matplotlib
matplotlib.use('Agg') #Need this if running matplot on a server w/o display
from pysparkling import *
conf = H2OConf(spark=spark)
conf.nthreads = -1
hc = H2OContext.getOrCreate(spark,conf)
print('Making h2o frames...')
trainHF = hc.as_h2o_frame(train, "trainTable")
validHF = hc.as_h2o_frame(valid, "validTable")
testHF = hc.as_h2o_frame(test, "testTable")
print('Done making h2o frames.')
logger.log_string("Train Summary:")
logger.log_string("Rows:{}".format(trainHF.nrow))
logger.log_string("Cols:{}".format(trainHF.ncol))
# print(trainHF.summary(return_data=True))
# logger.log_string(tabulate(trainHF.summary(return_data=True),tablefmt="grid"))
# logger.log_string(trainHF._ex._cache._tabulate('grid',False))
base_train, stack_train = trainHF.split_frame([0.5], seed=12345)
base_valid, stack_valid = validHF.split_frame([0.5], seed=12345)
# def upload_submission(sub,predict_column='predict'):
# # create time stamp
# import re
# import time
# time_stamp = re.sub('[: ]', '_', time.asctime())
#
# # save file for submission
# # sub.columns = [ID_VAR, Y]
# sub_fname = 'Submission_'+str(time_stamp) + '.csv'
# # h2o.download_csv(sub, 's3n://'+S3_BUCKET+'/kaggle_submissions/Mercedes/' +sub_fname)
#
# spark_sub_frame = hc.as_spark_frame(sub)
#
# spark_sub_frame.select(ID_VAR,predict_column).coalesce(1).write.option("header","true").csv('s3n://'+S3_BUCKET+'/Kaggle_Submissions/Mercedes/' +sub_fname)
def glm_grid(X, y, train, valid, should_submit = False):
""" Wrapper function for penalized GLM with alpha and lambda search.
:param X: List of inputs.
:param y: Name of target variable.
:param train: Name of training H2OFrame.
:param valid: Name of validation H2OFrame.
:return: Best H2Omodel from H2OGeneralizedLinearEstimator
"""
alpha_opts = [0.01, 0.25, 0.5, 0.99] # always keep some L2
family = ["gaussian", "binomial", "quasibinomial", "multinomial", "poisson", "gamma", "tweedie"]
hyper_parameters = {"alpha":alpha_opts
}
# initialize grid search
grid = H2OGridSearch(
H2OGeneralizedLinearEstimator(
family="gaussian",
lambda_search=True,
seed=12345),
hyper_params=hyper_parameters)
# train grid
grid.train(y=y,
x=X,
training_frame=train,
validation_frame=valid)
# show grid search results
print(grid.show())
best = grid.get_grid()[0]
print(best)
# if should_submit:
# sub_frame = testHF[ID_VAR].cbind(best.predict(testHF))
# print(sub_frame.col_names)
# print('Submission frame preview:')
# print(sub_frame[0:10, [ID_VAR, 'predict']])
# upload_submission(sub_frame,'predict')
# plot top frame values
print('yhat_frame')
yhat_frame = valid.cbind(best.predict(valid))
print(yhat_frame[0:10, [y, 'predict']])
# plot sorted predictions
yhat_frame_df = yhat_frame[[y, 'predict']].as_data_frame()
yhat_frame_df.sort_values(by='predict', inplace=True)
yhat_frame_df.reset_index(inplace=True, drop=True)
plt = yhat_frame_df.plot(title='Ranked Predictions Plot')
logger.log_string('Ranked Predictions Plot')
logger.log_matplotlib_plot(plt)
# select best model
return best
def neural_net_grid(X, y, train, valid):
# define random grid search parameters
hyper_parameters = {'hidden': [[170, 320], [80, 190], [320, 160, 80], [100], [50, 50, 50, 50]],
'l1':[s/1e4 for s in range(0, 1000, 100)],
'l2':[s/1e5 for s in range(0, 1000, 100)],
'input_dropout_ratio':[s/1e2 for s in range(0, 20, 2)]}
# define search strategy
search_criteria = {'strategy':'RandomDiscrete',
'max_models':100,
'max_runtime_secs':60*60*2, #2 hours
}
# initialize grid search
gsearch = H2OGridSearch(H2ODeepLearningEstimator,
hyper_params=hyper_parameters,
search_criteria=search_criteria)
# execute training w/ grid search
gsearch.train(x=X,
y=y,
training_frame=train,
validation_frame=valid,
activation='TanhWithDropout',
epochs=2000,
stopping_rounds=20,
sparse=True, # handles data w/ many zeros more efficiently
ignore_const_cols=True,
adaptive_rate=True)
best_model = gsearch.get_grid()[0]
return best_model
def gboosting_grid(X, y, train, valid):
# define random grid search parameters
hyper_parameters = {'ntrees':list(range(0, 500, 50)),
'max_depth':list(range(0, 20, 2)),
'sample_rate':[s/float(10) for s in range(1, 11)],
'col_sample_rate':[s/float(10) for s in range(1, 11)]}
# define search strategy
search_criteria = {'strategy':'RandomDiscrete',
'max_models':100,
'max_runtime_secs':60*60*2, #2 hours
}
# initialize grid search
gsearch = H2OGridSearch(H2OGradientBoostingEstimator,
hyper_params=hyper_parameters,
search_criteria=search_criteria)
# execute training w/ grid search
gsearch.train(x=X,
y=y,
training_frame=train,
validation_frame=valid)
best_model = gsearch.get_grid()[0]
return best_model
h2o_xgb_model = H2OXGBoostEstimator(
ntrees = 10000,
learn_rate = 0.005,
sample_rate = 0.1,
col_sample_rate = 0.8,
max_depth = 5,
nfolds = 3,
keep_cross_validation_predictions=True,
stopping_rounds = 10,
seed = 12345)
# execute training
h2o_xgb_model.train(x=encoded_combined_nums,
y=Y,
training_frame=trainHF,
validation_frame=validHF)
print('Training..')
logger.log_string('glm0')
glm0 = glm_grid(original_nums, Y, base_train, base_valid)
logger.log_string('glm1')
glm1 = glm_grid(encoded_nums, Y, base_train, base_valid)
logger.log_string('glm2')
glm2 = glm_grid(encoded_combined_nums, Y, base_train, base_valid)
#
# logger.log_string('rnn0')
# rnn0 = neural_net_grid(original_nums, Y, base_train, base_valid)
# logger.log_string('rnn1')
# rnn1 = neural_net_grid(encoded_nums, Y, base_train, base_valid)
# logger.log_string('rnn2')
# rnn2 = neural_net_grid(encoded_combined_nums, Y, base_train, base_valid)
#
# logger.log_string('gbm0')
# gbm0 = gboosting_grid(original_nums, Y, base_train, base_valid)
# logger.log_string('gbm1')
# gbm1 = gboosting_grid(encoded_nums, Y, base_train, base_valid)
# logger.log_string('gbm2')
# gbm2 = gboosting_grid(encoded_combined_nums, Y, base_train, base_valid)
print('DONE training.')
stack_train = stack_train.cbind(glm0.predict(stack_train))
stack_valid = stack_valid.cbind(glm0.predict(stack_valid))
stack_train = stack_train.cbind(glm1.predict(stack_train))
stack_valid = stack_valid.cbind(glm1.predict(stack_valid))
stack_train = stack_train.cbind(glm2.predict(stack_train))
stack_valid = stack_valid.cbind(glm2.predict(stack_valid))
#
# stack_train = stack_train.cbind(rnn0.predict(stack_train))
# stack_valid = stack_valid.cbind(rnn0.predict(stack_valid))
# stack_train = stack_train.cbind(rnn1.predict(stack_train))
# stack_valid = stack_valid.cbind(rnn1.predict(stack_valid))
# stack_train = stack_train.cbind(rnn2.predict(stack_train))
# stack_valid = stack_valid.cbind(rnn2.predict(stack_valid))
#
# stack_train = stack_train.cbind(gbm0.predict(stack_train))
# stack_valid = stack_valid.cbind(gbm0.predict(stack_valid))
# stack_train = stack_train.cbind(gbm1.predict(stack_train))
# stack_valid = stack_valid.cbind(gbm1.predict(stack_valid))
# stack_train = stack_train.cbind(gbm2.predict(stack_train))
# stack_valid = stack_valid.cbind(gbm2.predict(stack_valid))
testHF = testHF.cbind(glm0.predict(testHF))
testHF = testHF.cbind(glm1.predict(testHF))
testHF = testHF.cbind(glm2.predict(testHF))
# testHF = testHF.cbind(rnn0.predict(testHF))
# testHF = testHF.cbind(rnn1.predict(testHF))
# testHF = testHF.cbind(rnn2.predict(testHF))
# testHF = testHF.cbind(gbm0.predict(testHF))
# testHF = testHF.cbind(gbm1.predict(testHF))
# testHF = testHF.cbind(gbm2.predict(testHF))
logger.log_string('glm3')
# glm3 = glm_grid(encoded_combined_nums + ['predict', 'predict0','predict1'], Y, stack_train, stack_valid, should_submit=True)
rnn = neural_net_grid(MOST_IMPORTANT_VARS_ORDERD + ['predict', 'predict0', 'predict1','predict2', 'predict3', 'predict4','predict5', 'predict6', 'predict7'], Y, stack_train, stack_valid)
sub = testHF[ID_VAR].cbind(rnn.predict(testHF))
print(sub.head())
# create time stamp
import re
import time
time_stamp = re.sub('[: ]', '_', time.asctime())
# save file for submission
sub.columns = [ID_VAR, Y]
sub_fname = 'Submission_'+str(time_stamp) + '.csv'
# h2o.download_csv(sub, 's3n://'+S3_BUCKET+'/kaggle_submissions/Mercedes/' +sub_fname)
spark_sub_frame = hc.as_spark_frame(sub)
spark_sub_frame.select(ID_VAR,Y).coalesce(1).write.option("header","true").csv('s3n://'+S3_BUCKET+'/Kaggle_Submissions/Mercedes/' +sub_fname)
|
en
| 0.39585
|
# imports #https://stackoverflow.com/questions/39504950/python-pyspark-get-sum-of-a-pyspark-dataframe-column-values #Define your s3 bucket to load and store data #Create a custom logger to log statistics and plots #.config('spark.executor.cores','6') \ # .master("local[*]") \ # .config('spark.cores.max','16') #.master("local") \ # .config("spark.some.config.option", "some-value") \ #Get rid of all the junk in output #From an XGBoost model # NOTE the top 6 are categorical, might want to look into this. #Load data from s3 #this needs to be done for h2o glm.predict() bug (which needs same number of columns) #Work around for splitting wide data, you need to split on only an ID varaibles #Then join back with a train varaible (bug in spark as of 2.1 with randomSplit()) # print('TRAIN DATA') # train.show(2) # print('VALID DATA') # valid.show(2) #workdaround for h2o predict #join all the new variables # print('TRAIN DATA') # train.show(2) # print('VALID DATA') # valid.show(2) # print('TEST DATA') # test.show(2) #Remplace cats with encoded cats from MOST_IMPORTANT_VARS_ORDERD # # print('Combining features....') # (train, valid, test) = feature_combiner(train, test, MOST_IMPORTANT_VARS_ORDERD, valid_frame = valid, frame_type='spark') # print('Done combining features.') # # encoded_combined_nums, cats = get_type_lists(frame=train,rejects=[ID_VAR,Y],frame_type='spark') # https://stackoverflow.com/questions/31774311/pca-analysis-in-pyspark ################################################################################ # DONE WITH PREPROCESSING - START TRAINING # ################################################################################ # turn on progress bars # import GLM models # grid search #Need this if running matplot on a server w/o display # print(trainHF.summary(return_data=True)) # logger.log_string(tabulate(trainHF.summary(return_data=True),tablefmt="grid")) # logger.log_string(trainHF._ex._cache._tabulate('grid',False)) # def upload_submission(sub,predict_column='predict'): # # create time stamp # import re # import time # time_stamp = re.sub('[: ]', '_', time.asctime()) # # # save file for submission # # sub.columns = [ID_VAR, Y] # sub_fname = 'Submission_'+str(time_stamp) + '.csv' # # h2o.download_csv(sub, 's3n://'+S3_BUCKET+'/kaggle_submissions/Mercedes/' +sub_fname) # # spark_sub_frame = hc.as_spark_frame(sub) # # spark_sub_frame.select(ID_VAR,predict_column).coalesce(1).write.option("header","true").csv('s3n://'+S3_BUCKET+'/Kaggle_Submissions/Mercedes/' +sub_fname) Wrapper function for penalized GLM with alpha and lambda search. :param X: List of inputs. :param y: Name of target variable. :param train: Name of training H2OFrame. :param valid: Name of validation H2OFrame. :return: Best H2Omodel from H2OGeneralizedLinearEstimator # always keep some L2 # initialize grid search # train grid # show grid search results # if should_submit: # sub_frame = testHF[ID_VAR].cbind(best.predict(testHF)) # print(sub_frame.col_names) # print('Submission frame preview:') # print(sub_frame[0:10, [ID_VAR, 'predict']]) # upload_submission(sub_frame,'predict') # plot top frame values # plot sorted predictions # select best model # define random grid search parameters # define search strategy #2 hours # initialize grid search # execute training w/ grid search # handles data w/ many zeros more efficiently # define random grid search parameters # define search strategy #2 hours # initialize grid search # execute training w/ grid search # execute training # # logger.log_string('rnn0') # rnn0 = neural_net_grid(original_nums, Y, base_train, base_valid) # logger.log_string('rnn1') # rnn1 = neural_net_grid(encoded_nums, Y, base_train, base_valid) # logger.log_string('rnn2') # rnn2 = neural_net_grid(encoded_combined_nums, Y, base_train, base_valid) # # logger.log_string('gbm0') # gbm0 = gboosting_grid(original_nums, Y, base_train, base_valid) # logger.log_string('gbm1') # gbm1 = gboosting_grid(encoded_nums, Y, base_train, base_valid) # logger.log_string('gbm2') # gbm2 = gboosting_grid(encoded_combined_nums, Y, base_train, base_valid) # # stack_train = stack_train.cbind(rnn0.predict(stack_train)) # stack_valid = stack_valid.cbind(rnn0.predict(stack_valid)) # stack_train = stack_train.cbind(rnn1.predict(stack_train)) # stack_valid = stack_valid.cbind(rnn1.predict(stack_valid)) # stack_train = stack_train.cbind(rnn2.predict(stack_train)) # stack_valid = stack_valid.cbind(rnn2.predict(stack_valid)) # # stack_train = stack_train.cbind(gbm0.predict(stack_train)) # stack_valid = stack_valid.cbind(gbm0.predict(stack_valid)) # stack_train = stack_train.cbind(gbm1.predict(stack_train)) # stack_valid = stack_valid.cbind(gbm1.predict(stack_valid)) # stack_train = stack_train.cbind(gbm2.predict(stack_train)) # stack_valid = stack_valid.cbind(gbm2.predict(stack_valid)) # testHF = testHF.cbind(rnn0.predict(testHF)) # testHF = testHF.cbind(rnn1.predict(testHF)) # testHF = testHF.cbind(rnn2.predict(testHF)) # testHF = testHF.cbind(gbm0.predict(testHF)) # testHF = testHF.cbind(gbm1.predict(testHF)) # testHF = testHF.cbind(gbm2.predict(testHF)) # glm3 = glm_grid(encoded_combined_nums + ['predict', 'predict0','predict1'], Y, stack_train, stack_valid, should_submit=True) # create time stamp # save file for submission # h2o.download_csv(sub, 's3n://'+S3_BUCKET+'/kaggle_submissions/Mercedes/' +sub_fname)
| 2.629231
| 3
|
example.py
|
park-daeun/onlinehd
| 0
|
6626842
|
<gh_stars>0
from time import time
import torch
import sklearn.datasets
import sklearn.preprocessing
import sklearn.model_selection
import numpy as np
import onlinehd
# loads simple mnist dataset
def load():
# fetches data
x, y = sklearn.datasets.fetch_openml('mnist_784', return_X_y=True)
x = x.astype(float)
y = y.astype(int)
y = np.array(y)
# split and normalize
x, x_test, y, y_test = sklearn.model_selection.train_test_split(x, y)
scaler = sklearn.preprocessing.Normalizer().fit(x)
x = scaler.transform(x)
x_test = scaler.transform(x_test)
# changes data to pytorch's tensors
x = torch.from_numpy(x).float()
y = torch.from_numpy(y).long()
x_test = torch.from_numpy(x_test).float()
y_test = torch.from_numpy(y_test).long()
return x, x_test, y, y_test
# simple OnlineHD training
def main():
print('Loading...')
x, x_test, y, y_test = load()
classes = y.unique().size(0)
features = x.size(1)
model = onlinehd.OnlineHD(classes, features)
if torch.cuda.is_available():
x = x.cuda()
y = y.cuda()
x_test = x_test.cuda()
y_test = y_test.cuda()
model = model.to('cuda')
print('Using GPU!')
print('Training...')
t = time()
model = model.fit(x, y, bootstrap=1.0, lr=0.035, epochs=20)
t = time() - t
print('Validating...')
yhat = model(x)
yhat_test = model(x_test)
acc = (y == yhat).float().mean()
acc_test = (y_test == yhat_test).float().mean()
print(f'{acc = :6f}')
print(f'{acc_test = :6f}')
print(f'{t = :6f}')
if __name__ == '__main__':
main()
|
from time import time
import torch
import sklearn.datasets
import sklearn.preprocessing
import sklearn.model_selection
import numpy as np
import onlinehd
# loads simple mnist dataset
def load():
# fetches data
x, y = sklearn.datasets.fetch_openml('mnist_784', return_X_y=True)
x = x.astype(float)
y = y.astype(int)
y = np.array(y)
# split and normalize
x, x_test, y, y_test = sklearn.model_selection.train_test_split(x, y)
scaler = sklearn.preprocessing.Normalizer().fit(x)
x = scaler.transform(x)
x_test = scaler.transform(x_test)
# changes data to pytorch's tensors
x = torch.from_numpy(x).float()
y = torch.from_numpy(y).long()
x_test = torch.from_numpy(x_test).float()
y_test = torch.from_numpy(y_test).long()
return x, x_test, y, y_test
# simple OnlineHD training
def main():
print('Loading...')
x, x_test, y, y_test = load()
classes = y.unique().size(0)
features = x.size(1)
model = onlinehd.OnlineHD(classes, features)
if torch.cuda.is_available():
x = x.cuda()
y = y.cuda()
x_test = x_test.cuda()
y_test = y_test.cuda()
model = model.to('cuda')
print('Using GPU!')
print('Training...')
t = time()
model = model.fit(x, y, bootstrap=1.0, lr=0.035, epochs=20)
t = time() - t
print('Validating...')
yhat = model(x)
yhat_test = model(x_test)
acc = (y == yhat).float().mean()
acc_test = (y_test == yhat_test).float().mean()
print(f'{acc = :6f}')
print(f'{acc_test = :6f}')
print(f'{t = :6f}')
if __name__ == '__main__':
main()
|
en
| 0.616775
|
# loads simple mnist dataset # fetches data # split and normalize # changes data to pytorch's tensors # simple OnlineHD training
| 2.771677
| 3
|
piano_notes.py
|
bwalsh62/note-recognition
| 1
|
6626843
|
<gh_stars>1-10
# -*- coding: utf-8 -*-
"""
Created on Mon Aug 5 20:56:26 2019
@author: <NAME>
for liloquy
Music from: http://theremin.music.uiowa.edu/MISpiano.html
Last updated: November 13, 2019
# TO DO
# - Expand sound_dict beyone one octave
"""
#%% Import libraries
from pygame import mixer
import os
import sys
# Add custom modules to path
module_path = os.path.abspath(os.path.join('..'))
if module_path not in sys.path:
sys.path.append(module_path)
# from util.music_util import note_to_freq
#%%
mixer.init()
# Paths to piano wav files
#music_fpath = r"..\..\piano-gui\music_files\piano"
MUSIC_FPATH = r"C:\Users\benja\OneDrive\Documents\Python\liloquy-git\piano-gui\music_files\piano"
C_path = os.path.join(MUSIC_FPATH,"Piano.mf.C4_2p4s.wav")
Csharp_path = os.path.join(MUSIC_FPATH,"Piano.mf.Db4_2p5s.wav")
D_path = os.path.join(MUSIC_FPATH,"Piano.mf.D4_2p4s.wav")
Dsharp_path = os.path.join(MUSIC_FPATH,"Piano.mf.Eb4_2p5s.wav")
E_path = os.path.join(MUSIC_FPATH,"Piano.mf.E4_2p4s.wav")
F_path = os.path.join(MUSIC_FPATH,"Piano.mf.F4_2p4s.wav")
Fsharp_path = os.path.join(MUSIC_FPATH,"Piano.mf.Gb4_2p5s.wav")
G_path = os.path.join(MUSIC_FPATH,"Piano.mf.G4_2p4s.wav")
Gsharp_path = os.path.join(MUSIC_FPATH,"Piano.mf.Ab4_2p5s.wav")
A_path = os.path.join(MUSIC_FPATH,"Piano.mf.A4_2p4s.wav")
Asharp_path = os.path.join(MUSIC_FPATH,"Piano.mf.Bb4_2p5s.wav")
B_path = os.path.join(MUSIC_FPATH,"Piano.mf.B4_2p4s.wav")
# Define sounds
sound_C = mixer.Sound(C_path)
sound_Csharp = mixer.Sound(Csharp_path)
sound_D = mixer.Sound(D_path)
sound_Dsharp = mixer.Sound(Dsharp_path)
sound_E = mixer.Sound(E_path)
sound_F = mixer.Sound(F_path)
sound_Fsharp = mixer.Sound(Fsharp_path)
sound_G = mixer.Sound(G_path)
sound_Gsharp = mixer.Sound(Gsharp_path)
sound_A = mixer.Sound(A_path)
sound_Asharp = mixer.Sound(Asharp_path)
sound_B = mixer.Sound(B_path)
sound_dict = {
'C4': sound_C,
'C#4': sound_Csharp,
'D4': sound_D,
'D#4': sound_Dsharp,
'E4': sound_E,
'F4': sound_F,
'F#4': sound_Fsharp,
'G4': sound_G,
'G#4': sound_Gsharp,
'A4': sound_A,
'A#4': sound_Asharp,
'B4': sound_B,
}
#%% Try to combine into class
# MOVING TO music_util
# class Note:
# fs = 44100 # Sampling frequency in Hz
# def __init__(self, note, instr='piano'):
# #self.f0 = f0 # frequency in Hz
# self.note = note # Example C4
# self.f0 = note_to_freq[note]
# self.instr = instr
# self.sound = sound_dict[note]
|
# -*- coding: utf-8 -*-
"""
Created on Mon Aug 5 20:56:26 2019
@author: <NAME>
for liloquy
Music from: http://theremin.music.uiowa.edu/MISpiano.html
Last updated: November 13, 2019
# TO DO
# - Expand sound_dict beyone one octave
"""
#%% Import libraries
from pygame import mixer
import os
import sys
# Add custom modules to path
module_path = os.path.abspath(os.path.join('..'))
if module_path not in sys.path:
sys.path.append(module_path)
# from util.music_util import note_to_freq
#%%
mixer.init()
# Paths to piano wav files
#music_fpath = r"..\..\piano-gui\music_files\piano"
MUSIC_FPATH = r"C:\Users\benja\OneDrive\Documents\Python\liloquy-git\piano-gui\music_files\piano"
C_path = os.path.join(MUSIC_FPATH,"Piano.mf.C4_2p4s.wav")
Csharp_path = os.path.join(MUSIC_FPATH,"Piano.mf.Db4_2p5s.wav")
D_path = os.path.join(MUSIC_FPATH,"Piano.mf.D4_2p4s.wav")
Dsharp_path = os.path.join(MUSIC_FPATH,"Piano.mf.Eb4_2p5s.wav")
E_path = os.path.join(MUSIC_FPATH,"Piano.mf.E4_2p4s.wav")
F_path = os.path.join(MUSIC_FPATH,"Piano.mf.F4_2p4s.wav")
Fsharp_path = os.path.join(MUSIC_FPATH,"Piano.mf.Gb4_2p5s.wav")
G_path = os.path.join(MUSIC_FPATH,"Piano.mf.G4_2p4s.wav")
Gsharp_path = os.path.join(MUSIC_FPATH,"Piano.mf.Ab4_2p5s.wav")
A_path = os.path.join(MUSIC_FPATH,"Piano.mf.A4_2p4s.wav")
Asharp_path = os.path.join(MUSIC_FPATH,"Piano.mf.Bb4_2p5s.wav")
B_path = os.path.join(MUSIC_FPATH,"Piano.mf.B4_2p4s.wav")
# Define sounds
sound_C = mixer.Sound(C_path)
sound_Csharp = mixer.Sound(Csharp_path)
sound_D = mixer.Sound(D_path)
sound_Dsharp = mixer.Sound(Dsharp_path)
sound_E = mixer.Sound(E_path)
sound_F = mixer.Sound(F_path)
sound_Fsharp = mixer.Sound(Fsharp_path)
sound_G = mixer.Sound(G_path)
sound_Gsharp = mixer.Sound(Gsharp_path)
sound_A = mixer.Sound(A_path)
sound_Asharp = mixer.Sound(Asharp_path)
sound_B = mixer.Sound(B_path)
sound_dict = {
'C4': sound_C,
'C#4': sound_Csharp,
'D4': sound_D,
'D#4': sound_Dsharp,
'E4': sound_E,
'F4': sound_F,
'F#4': sound_Fsharp,
'G4': sound_G,
'G#4': sound_Gsharp,
'A4': sound_A,
'A#4': sound_Asharp,
'B4': sound_B,
}
#%% Try to combine into class
# MOVING TO music_util
# class Note:
# fs = 44100 # Sampling frequency in Hz
# def __init__(self, note, instr='piano'):
# #self.f0 = f0 # frequency in Hz
# self.note = note # Example C4
# self.f0 = note_to_freq[note]
# self.instr = instr
# self.sound = sound_dict[note]
|
en
| 0.66039
|
# -*- coding: utf-8 -*- Created on Mon Aug 5 20:56:26 2019 @author: <NAME> for liloquy Music from: http://theremin.music.uiowa.edu/MISpiano.html Last updated: November 13, 2019 # TO DO # - Expand sound_dict beyone one octave #%% Import libraries # Add custom modules to path # from util.music_util import note_to_freq #%% # Paths to piano wav files #music_fpath = r"..\..\piano-gui\music_files\piano" # Define sounds #4': sound_Csharp, #4': sound_Dsharp, #4': sound_Fsharp, #4': sound_Gsharp, #4': sound_Asharp, #%% Try to combine into class # MOVING TO music_util # class Note: # fs = 44100 # Sampling frequency in Hz # def __init__(self, note, instr='piano'): # #self.f0 = f0 # frequency in Hz # self.note = note # Example C4 # self.f0 = note_to_freq[note] # self.instr = instr # self.sound = sound_dict[note]
| 1.975472
| 2
|
feat/serializers.py
|
heobu/swe573
| 1
|
6626844
|
from django.contrib.auth.models import User
from rest_framework import serializers
from feat.models import ConsumerProfile, ProviderProfile, Recipe, Menu, RecipeLike, MenuLike, DailyIntakeFromRecipe
class UserSerializer(serializers.HyperlinkedModelSerializer):
class Meta:
model = User
fields = ('username', 'email', '<PASSWORD>', '<PASSWORD>')
class ConsumerProfileSerializer(serializers.HyperlinkedModelSerializer):
class Meta:
model = ConsumerProfile
fields = ('date_of_birth',)
class ProviderProfileSerializer(serializers.HyperlinkedModelSerializer):
class Meta:
model = ProviderProfile
fields = ('location',)
class RecipeSerializer(serializers.HyperlinkedModelSerializer):
class Meta:
model = Recipe
fields = ('title', 'ingredients', 'description', 'instructions', 'difficulty', 'prepared_in')
class MenuSerializer(serializers.HyperlinkedModelSerializer):
class Meta:
model = Menu
fields = ('title', 'description', 'food_items', 'nutritional_value', 'created_at')
class RecipeLikeSerializer(serializers.HyperlinkedModelSerializer):
class Meta:
model = RecipeLike
fields = ('recipe', 'cprofiles')
class MenuLikeSerializer(serializers.HyperlinkedModelSerializer):
class Meta:
model = MenuLike
fields = ('menu', 'cprofiles')
class DailyIntakeFromRecipeSerializer(serializers.HyperlinkedModelSerializer):
class Meta:
model = DailyIntakeFromRecipe
fields = ('intake_at',)
|
from django.contrib.auth.models import User
from rest_framework import serializers
from feat.models import ConsumerProfile, ProviderProfile, Recipe, Menu, RecipeLike, MenuLike, DailyIntakeFromRecipe
class UserSerializer(serializers.HyperlinkedModelSerializer):
class Meta:
model = User
fields = ('username', 'email', '<PASSWORD>', '<PASSWORD>')
class ConsumerProfileSerializer(serializers.HyperlinkedModelSerializer):
class Meta:
model = ConsumerProfile
fields = ('date_of_birth',)
class ProviderProfileSerializer(serializers.HyperlinkedModelSerializer):
class Meta:
model = ProviderProfile
fields = ('location',)
class RecipeSerializer(serializers.HyperlinkedModelSerializer):
class Meta:
model = Recipe
fields = ('title', 'ingredients', 'description', 'instructions', 'difficulty', 'prepared_in')
class MenuSerializer(serializers.HyperlinkedModelSerializer):
class Meta:
model = Menu
fields = ('title', 'description', 'food_items', 'nutritional_value', 'created_at')
class RecipeLikeSerializer(serializers.HyperlinkedModelSerializer):
class Meta:
model = RecipeLike
fields = ('recipe', 'cprofiles')
class MenuLikeSerializer(serializers.HyperlinkedModelSerializer):
class Meta:
model = MenuLike
fields = ('menu', 'cprofiles')
class DailyIntakeFromRecipeSerializer(serializers.HyperlinkedModelSerializer):
class Meta:
model = DailyIntakeFromRecipe
fields = ('intake_at',)
|
none
| 1
| 2.078129
| 2
|
|
ukis_pysat/__init__.py
|
Zajquor/ukis-pysat
| 0
|
6626845
|
<gh_stars>0
__version__ = "1.3.4"
|
__version__ = "1.3.4"
|
none
| 1
| 1.039236
| 1
|
|
convertDimension.py
|
vitorpp0/unitsmod
| 0
|
6626846
|
from unitsmod import database as dt
from unitsmod.dimensionClass import dimension as dm
def compareType(unitFrom, unitTo):
for item in unitFrom:
same = False
for term in unitTo:
if(item == term):
same = True
if not same:
return False
return True
def conv(unitFrom, unitTo):
unitFrom = dm(unitFrom)
unitTo = dm(unitTo)
if(compareType(unitFrom.type, unitTo.type)):
return unitFrom.convertConstant/unitTo.convertConstant
else:
message = 'thermo.py: {} and {} are not dimensionally similar'
print(message.format(unitFrom.dType(), unitTo.dType()))
return 0
def convTemp(temp, unitFrom, unitTo):
convConst = [[unitFrom], [unitTo]]
for unit in convConst:
container = dt.database.loc[dt.database['%temperatureUnit']== unit[0]][['temperatureIntervalConv', '%temperatureConv']].values[0]
unit.append(container[0])
unit.append(container[1])
return (temp-convConst[0][2])*(convConst[0][1]/convConst[1][1])+convConst[1][2]
|
from unitsmod import database as dt
from unitsmod.dimensionClass import dimension as dm
def compareType(unitFrom, unitTo):
for item in unitFrom:
same = False
for term in unitTo:
if(item == term):
same = True
if not same:
return False
return True
def conv(unitFrom, unitTo):
unitFrom = dm(unitFrom)
unitTo = dm(unitTo)
if(compareType(unitFrom.type, unitTo.type)):
return unitFrom.convertConstant/unitTo.convertConstant
else:
message = 'thermo.py: {} and {} are not dimensionally similar'
print(message.format(unitFrom.dType(), unitTo.dType()))
return 0
def convTemp(temp, unitFrom, unitTo):
convConst = [[unitFrom], [unitTo]]
for unit in convConst:
container = dt.database.loc[dt.database['%temperatureUnit']== unit[0]][['temperatureIntervalConv', '%temperatureConv']].values[0]
unit.append(container[0])
unit.append(container[1])
return (temp-convConst[0][2])*(convConst[0][1]/convConst[1][1])+convConst[1][2]
|
none
| 1
| 2.512546
| 3
|
|
back-end/erasmail/emails/migrations/0011_emailheaders_co2.py
|
SamirM-BE/ErasMail
| 7
|
6626847
|
<reponame>SamirM-BE/ErasMail<gh_stars>1-10
# Generated by Django 3.1.6 on 2021-03-14 10:26
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('emails', '0010_auto_20210228_1549'),
]
operations = [
migrations.AddField(
model_name='emailheaders',
name='co2',
field=models.FloatField(default=0),
preserve_default=False,
),
]
|
# Generated by Django 3.1.6 on 2021-03-14 10:26
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('emails', '0010_auto_20210228_1549'),
]
operations = [
migrations.AddField(
model_name='emailheaders',
name='co2',
field=models.FloatField(default=0),
preserve_default=False,
),
]
|
en
| 0.774507
|
# Generated by Django 3.1.6 on 2021-03-14 10:26
| 1.56757
| 2
|
armi/materials/caH2.py
|
keckler/armi
| 162
|
6626848
|
# Copyright 2019 TerraPower, LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
Calcium Hydride.
"""
from armi.materials.material import Material
class CaH2(Material):
"""CalciumHydride"""
name = "CaH2"
def setDefaultMassFracs(self):
"""Default mass fractions.
http://atom.kaeri.re.kr/ton/
iso atomic percent abundance and atomic mass of 20-calcium
| 20-Ca-40 96.941% 39.9625912
| 20-Ca-42 0.647% 41.9586183
| 20-Ca-43 0.135% 42.9587668
| 20-Ca-44 2.086% 43.9554811
| 20-Ca-46 0.004% 45.9536928
| 20-Ca-48 0.187% 47.9525335
atomic weight of H2 2.01565
weight of CaH2 42.09367285
| weight% of Ca-40 in CaH2 0.920331558
| weight% of Ca-42 in CaH2 0.006449241
| weight% of Ca-43 in CaH2 0.001377745
| weight% of Ca-44 in CaH2 0.02178264
| weight% of Ca-46 in CaH2 4.3668E-05
| weight% of Ca-48 in CaH2 0.002130278
| weight% of H2 in CaH2 0.047884869
"""
self.setMassFrac("CA", 0.952115131)
self.setMassFrac("H", 0.047884869)
def density(self, Tk=None, Tc=None):
"""Mass density
http://en.wikipedia.org/wiki/Calcium_hydride
Returns
-------
density : float
grams / cc
"""
return 1.70
|
# Copyright 2019 TerraPower, LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
Calcium Hydride.
"""
from armi.materials.material import Material
class CaH2(Material):
"""CalciumHydride"""
name = "CaH2"
def setDefaultMassFracs(self):
"""Default mass fractions.
http://atom.kaeri.re.kr/ton/
iso atomic percent abundance and atomic mass of 20-calcium
| 20-Ca-40 96.941% 39.9625912
| 20-Ca-42 0.647% 41.9586183
| 20-Ca-43 0.135% 42.9587668
| 20-Ca-44 2.086% 43.9554811
| 20-Ca-46 0.004% 45.9536928
| 20-Ca-48 0.187% 47.9525335
atomic weight of H2 2.01565
weight of CaH2 42.09367285
| weight% of Ca-40 in CaH2 0.920331558
| weight% of Ca-42 in CaH2 0.006449241
| weight% of Ca-43 in CaH2 0.001377745
| weight% of Ca-44 in CaH2 0.02178264
| weight% of Ca-46 in CaH2 4.3668E-05
| weight% of Ca-48 in CaH2 0.002130278
| weight% of H2 in CaH2 0.047884869
"""
self.setMassFrac("CA", 0.952115131)
self.setMassFrac("H", 0.047884869)
def density(self, Tk=None, Tc=None):
"""Mass density
http://en.wikipedia.org/wiki/Calcium_hydride
Returns
-------
density : float
grams / cc
"""
return 1.70
|
en
| 0.742287
|
# Copyright 2019 TerraPower, LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. Calcium Hydride. CalciumHydride Default mass fractions. http://atom.kaeri.re.kr/ton/ iso atomic percent abundance and atomic mass of 20-calcium | 20-Ca-40 96.941% 39.9625912 | 20-Ca-42 0.647% 41.9586183 | 20-Ca-43 0.135% 42.9587668 | 20-Ca-44 2.086% 43.9554811 | 20-Ca-46 0.004% 45.9536928 | 20-Ca-48 0.187% 47.9525335 atomic weight of H2 2.01565 weight of CaH2 42.09367285 | weight% of Ca-40 in CaH2 0.920331558 | weight% of Ca-42 in CaH2 0.006449241 | weight% of Ca-43 in CaH2 0.001377745 | weight% of Ca-44 in CaH2 0.02178264 | weight% of Ca-46 in CaH2 4.3668E-05 | weight% of Ca-48 in CaH2 0.002130278 | weight% of H2 in CaH2 0.047884869 Mass density http://en.wikipedia.org/wiki/Calcium_hydride Returns ------- density : float grams / cc
| 1.97248
| 2
|
Helper/RequestContext.py
|
amrishAK/DBB_gateway
| 0
|
6626849
|
<reponame>amrishAK/DBB_gateway<filename>Helper/RequestContext.py
class RequestContext:
def __init__(self,header,path,requestMessage,command):
self.Header = header
self.Path = path
self.RequestMessage = requestMessage
self.Command = command
def SetResponse(self, statusCode, statusReason, payload):
self.ResponseMessage = payload
self.ResponseCode = statusCode
self.ResponseReason = statusReason
|
class RequestContext:
def __init__(self,header,path,requestMessage,command):
self.Header = header
self.Path = path
self.RequestMessage = requestMessage
self.Command = command
def SetResponse(self, statusCode, statusReason, payload):
self.ResponseMessage = payload
self.ResponseCode = statusCode
self.ResponseReason = statusReason
|
none
| 1
| 2.328929
| 2
|
|
main.py
|
stephanebruckert/GoCrawl
| 0
|
6626850
|
<filename>main.py
#!/usr/bin/python
import argparse
from gocrawl.core import core
if __name__ == "__main__":
'''
$ python main.py -h
usage: main.py [-h] -L LINK [--silent] [-W WAIT]
GoCrawl
optional arguments:
-h, --help show this help message and exit
-L LINK, --link LINK Entry point URL
--silent Silent mode
-W WAIT, --wait WAIT Minimum wait time in seconds between each request
'''
parser = argparse.ArgumentParser(description="GoCrawl")
parser.add_argument('-L', '--link', type=str, required=True,
help='Entry point URL')
parser.add_argument('--silent', dest='silent', action='store_false',
help='Silent mode')
parser.add_argument('-W', '--wait', type=int, required=False,
help='Minimum wait time in seconds between each \
request')
parser.set_defaults(progress=True)
args = parser.parse_args()
core(args.link, args.progress, args.wait)
|
<filename>main.py
#!/usr/bin/python
import argparse
from gocrawl.core import core
if __name__ == "__main__":
'''
$ python main.py -h
usage: main.py [-h] -L LINK [--silent] [-W WAIT]
GoCrawl
optional arguments:
-h, --help show this help message and exit
-L LINK, --link LINK Entry point URL
--silent Silent mode
-W WAIT, --wait WAIT Minimum wait time in seconds between each request
'''
parser = argparse.ArgumentParser(description="GoCrawl")
parser.add_argument('-L', '--link', type=str, required=True,
help='Entry point URL')
parser.add_argument('--silent', dest='silent', action='store_false',
help='Silent mode')
parser.add_argument('-W', '--wait', type=int, required=False,
help='Minimum wait time in seconds between each \
request')
parser.set_defaults(progress=True)
args = parser.parse_args()
core(args.link, args.progress, args.wait)
|
en
| 0.31324
|
#!/usr/bin/python $ python main.py -h usage: main.py [-h] -L LINK [--silent] [-W WAIT] GoCrawl optional arguments: -h, --help show this help message and exit -L LINK, --link LINK Entry point URL --silent Silent mode -W WAIT, --wait WAIT Minimum wait time in seconds between each request
| 2.745485
| 3
|