code
stringlengths 22
1.05M
| apis
listlengths 1
3.31k
| extract_api
stringlengths 75
3.25M
|
|---|---|---|
from ..tools.velocity_embedding import velocity_embedding
from ..tools.utils import groups_to_bool
from .utils import default_basis, default_size, default_color, get_components, savefig_or_show, make_unique_list, get_basis
from .velocity_embedding_grid import compute_velocity_on_grid
from .scatter import scatter
from .docs import doc_scatter, doc_params
from matplotlib import rcParams
import matplotlib.pyplot as pl
import numpy as np
@doc_params(scatter=doc_scatter)
def velocity_embedding_stream(adata, basis=None, vkey='velocity', density=None, smooth=None, linewidth=None,
n_neighbors=None, X=None, V=None, X_grid=None, V_grid=None, color=None, use_raw=None,
layer=None, color_map=None, colorbar=True, palette=None, size=None, alpha=.1, perc=None,
sort_order=True, groups=None, components=None, legend_loc='on data',
legend_fontsize=None, legend_fontweight=None, right_margin=None, left_margin=None,
xlabel=None, ylabel=None, title=None, fontsize=None, figsize=None, dpi=None, frameon=None,
show=True, save=None, ax=None, ncols=None, **kwargs):
"""\
Stream plot of velocities on the embedding.
Arguments
---------
adata: :class:`~anndata.AnnData`
Annotated data matrix.
x: `str`, `np.ndarray` or `None` (default: `None`)
x coordinate
y: `str`, `np.ndarray` or `None` (default: `None`)
y coordinate
vkey: `str` or `None` (default: `None`)
Key for annotations of observations/cells or variables/genes.
density: `float` (default: 1)
Amount of velocities to show - 0 none to 1 all
smooth: `float` (default: 0.5)
Multiplication factor for scale in Gaussian kernel around grid point.
linewidth: `float` (default: 1)
Line width for streamplot.
n_neighbors: `int` (default: None)
Number of neighbors to consider around grid point.
X: `np.ndarray` (default: None)
Embedding grid point coordinates
V: `np.ndarray` (default: None)
Embedding grid velocity coordinates
{scatter}
Returns
-------
`matplotlib.Axis` if `show==False`
"""
basis = default_basis(adata) if basis is None else get_basis(adata, basis)
vkey = [key for key in adata.layers.keys() if 'velocity' in key and '_u' not in key] if vkey is 'all' else vkey
colors, layers, vkeys = make_unique_list(color, allow_array=True), make_unique_list(layer), make_unique_list(vkey)
for key in vkeys:
if key + '_' + basis not in adata.obsm_keys() and V is None:
velocity_embedding(adata, basis=basis, vkey=key)
color, layer, vkey = colors[0], layers[0], vkeys[0]
color = default_color(adata) if color is None else color
if X_grid is None or V_grid is None:
_adata = adata[groups_to_bool(adata, groups, groupby=color)] \
if groups is not None and color in adata.obs.keys() else adata
X_emb = np.array(_adata.obsm['X_' + basis][:, get_components(components, basis)]) if X is None else X[:, :2]
V_emb = np.array(_adata.obsm[vkey + '_' + basis][:, get_components(components, basis)]) if V is None else V[:, :2]
X_grid, V_grid = compute_velocity_on_grid(X_emb=X_emb, V_emb=V_emb, density=1, smooth=smooth,
n_neighbors=n_neighbors, autoscale=False, adjust_for_stream=True)
lengths = np.sqrt((V_grid ** 2).sum(0))
linewidth = 1 if linewidth is None else linewidth
linewidth *= 2 * lengths / lengths[~np.isnan(lengths)].max()
scatter_kwargs = {"basis": basis, "perc": perc, "use_raw": use_raw, "sort_order": sort_order, "alpha": alpha,
"components": components, "legend_loc": legend_loc, "groups": groups,
"legend_fontsize": legend_fontsize, "legend_fontweight": legend_fontweight, "palette": palette,
"color_map": color_map, "frameon": frameon, "xlabel": xlabel, "ylabel": ylabel,
"right_margin": right_margin, "left_margin": left_margin, "colorbar": colorbar, "dpi": dpi,
"fontsize": fontsize, "show": False, "save": None}
multikey = colors if len(colors) > 1 else layers if len(layers) > 1 else vkeys if len(vkeys) > 1 else None
if multikey is not None:
if title is None: title = list(multikey)
elif isinstance(title, (list, tuple)): title *= int(np.ceil(len(multikey) / len(title)))
ncols = len(multikey) if ncols is None else min(len(multikey), ncols)
nrows = int(np.ceil(len(multikey) / ncols))
figsize = rcParams['figure.figsize'] if figsize is None else figsize
ax = []
for i, gs in enumerate(
pl.GridSpec(nrows, ncols, pl.figure(None, (figsize[0] * ncols, figsize[1] * nrows), dpi=dpi))):
if i < len(multikey):
ax.append(velocity_embedding_stream(adata, density=density, size=size, smooth=smooth, n_neighbors=n_neighbors,
linewidth=linewidth, ax=pl.subplot(gs),
color=colors[i] if len(colors) > 1 else color,
layer=layers[i] if len(layers) > 1 else layer,
vkey=vkeys[i] if len(vkeys) > 1 else vkey,
title=title[i] if isinstance(title, (list, tuple)) else title,
X_grid=None if len(vkeys) > 1 else X_grid,
V_grid=None if len(vkeys) > 1 else V_grid, **scatter_kwargs, **kwargs))
savefig_or_show('' if basis is None else basis, dpi=dpi, save=save, show=show)
if not show: return ax
else:
ax = pl.figure(None, figsize, dpi=dpi).gca() if ax is None else ax
density = 1 if density is None else density
stream_kwargs = {"linewidth": linewidth, "density": 2 * density}
stream_kwargs.update(kwargs)
pl.streamplot(X_grid[0], X_grid[1], V_grid[0], V_grid[1], color='grey', zorder=3, **stream_kwargs)
size = 4 * default_size(adata) if size is None else size
ax = scatter(adata, layer=layer, color=color, size=size, title=title, ax=ax, zorder=0, **scatter_kwargs)
savefig_or_show('' if basis is None else basis, dpi=dpi, save=save, show=show)
if not show: return ax
|
[
"matplotlib.pyplot.subplot",
"matplotlib.pyplot.figure",
"numpy.isnan",
"matplotlib.pyplot.streamplot"
] |
[((6200, 6302), 'matplotlib.pyplot.streamplot', 'pl.streamplot', (['X_grid[0]', 'X_grid[1]', 'V_grid[0]', 'V_grid[1]'], {'color': '"""grey"""', 'zorder': '(3)'}), "(X_grid[0], X_grid[1], V_grid[0], V_grid[1], color='grey',\n zorder=3, **stream_kwargs)\n", (6213, 6302), True, 'import matplotlib.pyplot as pl\n'), ((4875, 4941), 'matplotlib.pyplot.figure', 'pl.figure', (['None', '(figsize[0] * ncols, figsize[1] * nrows)'], {'dpi': 'dpi'}), '(None, (figsize[0] * ncols, figsize[1] * nrows), dpi=dpi)\n', (4884, 4941), True, 'import matplotlib.pyplot as pl\n'), ((5967, 6000), 'matplotlib.pyplot.figure', 'pl.figure', (['None', 'figsize'], {'dpi': 'dpi'}), '(None, figsize, dpi=dpi)\n', (5976, 6000), True, 'import matplotlib.pyplot as pl\n'), ((3652, 3669), 'numpy.isnan', 'np.isnan', (['lengths'], {}), '(lengths)\n', (3660, 3669), True, 'import numpy as np\n'), ((5182, 5196), 'matplotlib.pyplot.subplot', 'pl.subplot', (['gs'], {}), '(gs)\n', (5192, 5196), True, 'import matplotlib.pyplot as pl\n')]
|
# Copyright 2008-2018 Univa Corporation
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import mock
import pytest
from mock import patch
from tortuga.exceptions.configurationError import ConfigurationError
from tortuga.resourceAdapter.aws import Aws
from tortuga.resourceAdapter.resourceAdapter import ResourceAdapter
@pytest.fixture
def minimal_configDict():
return {
'ami': 'ami-XXXXXXXX',
}
def test_invalid_adapter_configuration(dbm):
"""Ensure exception is raissed from missing required settings"""
with dbm.session() as session:
with pytest.raises(ConfigurationError):
with patch.object(
ResourceAdapter, '_load_config_from_database', return_value={}):
adapter = Aws()
adapter.session = session
adapter.getResourceAdapterConfig()
def test_minimal_config(dbm, minimal_configDict):
with dbm.session() as session:
with patch.object(
ResourceAdapter, '_load_config_from_database',
return_value=minimal_configDict):
adapter = Aws()
adapter.session = session
config = adapter.getResourceAdapterConfig()
assert 'ami' in config
assert config['ami'] == 'ami-XXXXXXXX'
assert isinstance(config['override_dns_domain'], bool)
assert not config['override_dns_domain']
def test_override_dns_domain_enabled(dbm):
configDict = {
'ami': 'ami-XXXXXXXX',
'override_dns_domain': 'true',
}
with dbm.session() as session:
with patch.object(
ResourceAdapter, '_load_config_from_database',
return_value=configDict):
adapter = Aws()
adapter.session = session
config = adapter.getResourceAdapterConfig()
assert isinstance(config['override_dns_domain'], bool)
assert config['override_dns_domain']
# when 'dns_domain' is not specified in the resource adapter
# configuration, the current private DNS zone is used. We don't
# care what the value is as long as there is one.
assert isinstance(config['dns_domain'], str)
assert config['dns_domain']
def test_override_dns_domain_enabled_with_dns_domain(dbm):
configDict = {
'ami': 'ami-XXXXXXXX',
'override_dns_domain': 'true',
'dns_domain': 'mydomain',
}
with dbm.session() as session:
with patch.object(
ResourceAdapter, '_load_config_from_database',
return_value=configDict):
adapter = Aws()
adapter.session = session
config = adapter.getResourceAdapterConfig()
assert isinstance(config['override_dns_domain'], bool)
assert config['override_dns_domain']
assert config['dns_domain'] == 'mydomain'
@mock.patch.object(Aws, '_load_config_from_database')
def test_missing_ami_setting(load_config_dict_mock, dbm):
load_config_dict_mock.return_value = {}
with dbm.session() as session:
with pytest.raises(ConfigurationError):
adapter = Aws()
adapter.session = session
adapter.getResourceAdapterConfig()
@mock.patch.object(Aws, '_load_config_from_database')
def test_use_instance_hostname(load_config_dict_mock, dbm):
load_config_dict_mock.return_value = {
'ami': 'ami-XXXXXX',
'override_dns_domain': 'true',
'dns_domain': 'cloud.example.com',
'use_instance_hostname': 'false',
}
with dbm.session() as session:
adapter = Aws()
adapter.session = session
result = adapter.getResourceAdapterConfig()
assert result['dns_domain'] == 'cloud.example.com'
@mock.patch.object(Aws, '_load_config_from_database')
def test_defaults(load_config_dict_mock, dbm):
load_config_dict_mock.return_value = {
'ami': 'ami-XXXXXXXX',
}
with dbm.session() as session:
adapter = Aws()
adapter.session = session
result = adapter.getResourceAdapterConfig()
assert result['ami'] == 'ami-XXXXXXXX'
assert result['use_instance_hostname']
assert result['associate_public_ip_address']
assert not result['cloud_init']
assert not result.get('override_dns_domain', None)
assert not result.get('use_domain_from_dhcp_option_set', None)
assert result['region'] == 'us-east-1'
@mock.patch.object(Aws, '_load_config_from_database')
def test_invalid_settings(load_config_dict_mock, dbm):
load_config_dict_mock.return_value = {
'ami': 'ami-XXXXXXXX',
'unrecognized': 'setting',
'another_bad_setting': 'value',
}
with dbm.session() as session:
with pytest.raises(ConfigurationError):
adapter = Aws()
adapter.session = session
adapter.getResourceAdapterConfig()
|
[
"mock.patch.object",
"pytest.raises",
"tortuga.resourceAdapter.aws.Aws"
] |
[((3428, 3480), 'mock.patch.object', 'mock.patch.object', (['Aws', '"""_load_config_from_database"""'], {}), "(Aws, '_load_config_from_database')\n", (3445, 3480), False, 'import mock\n'), ((3784, 3836), 'mock.patch.object', 'mock.patch.object', (['Aws', '"""_load_config_from_database"""'], {}), "(Aws, '_load_config_from_database')\n", (3801, 3836), False, 'import mock\n'), ((4309, 4361), 'mock.patch.object', 'mock.patch.object', (['Aws', '"""_load_config_from_database"""'], {}), "(Aws, '_load_config_from_database')\n", (4326, 4361), False, 'import mock\n'), ((5010, 5062), 'mock.patch.object', 'mock.patch.object', (['Aws', '"""_load_config_from_database"""'], {}), "(Aws, '_load_config_from_database')\n", (5027, 5062), False, 'import mock\n'), ((4153, 4158), 'tortuga.resourceAdapter.aws.Aws', 'Aws', ([], {}), '()\n', (4156, 4158), False, 'from tortuga.resourceAdapter.aws import Aws\n'), ((4543, 4548), 'tortuga.resourceAdapter.aws.Aws', 'Aws', ([], {}), '()\n', (4546, 4548), False, 'from tortuga.resourceAdapter.aws import Aws\n'), ((1076, 1109), 'pytest.raises', 'pytest.raises', (['ConfigurationError'], {}), '(ConfigurationError)\n', (1089, 1109), False, 'import pytest\n'), ((1453, 1550), 'mock.patch.object', 'patch.object', (['ResourceAdapter', '"""_load_config_from_database"""'], {'return_value': 'minimal_configDict'}), "(ResourceAdapter, '_load_config_from_database', return_value=\n minimal_configDict)\n", (1465, 1550), False, 'from mock import patch\n'), ((1602, 1607), 'tortuga.resourceAdapter.aws.Aws', 'Aws', ([], {}), '()\n', (1605, 1607), False, 'from tortuga.resourceAdapter.aws import Aws\n'), ((2102, 2191), 'mock.patch.object', 'patch.object', (['ResourceAdapter', '"""_load_config_from_database"""'], {'return_value': 'configDict'}), "(ResourceAdapter, '_load_config_from_database', return_value=\n configDict)\n", (2114, 2191), False, 'from mock import patch\n'), ((2243, 2248), 'tortuga.resourceAdapter.aws.Aws', 'Aws', ([], {}), '()\n', (2246, 2248), False, 'from tortuga.resourceAdapter.aws import Aws\n'), ((3010, 3099), 'mock.patch.object', 'patch.object', (['ResourceAdapter', '"""_load_config_from_database"""'], {'return_value': 'configDict'}), "(ResourceAdapter, '_load_config_from_database', return_value=\n configDict)\n", (3022, 3099), False, 'from mock import patch\n'), ((3151, 3156), 'tortuga.resourceAdapter.aws.Aws', 'Aws', ([], {}), '()\n', (3154, 3156), False, 'from tortuga.resourceAdapter.aws import Aws\n'), ((3632, 3665), 'pytest.raises', 'pytest.raises', (['ConfigurationError'], {}), '(ConfigurationError)\n', (3645, 3665), False, 'import pytest\n'), ((3689, 3694), 'tortuga.resourceAdapter.aws.Aws', 'Aws', ([], {}), '()\n', (3692, 3694), False, 'from tortuga.resourceAdapter.aws import Aws\n'), ((5322, 5355), 'pytest.raises', 'pytest.raises', (['ConfigurationError'], {}), '(ConfigurationError)\n', (5335, 5355), False, 'import pytest\n'), ((5379, 5384), 'tortuga.resourceAdapter.aws.Aws', 'Aws', ([], {}), '()\n', (5382, 5384), False, 'from tortuga.resourceAdapter.aws import Aws\n'), ((1128, 1204), 'mock.patch.object', 'patch.object', (['ResourceAdapter', '"""_load_config_from_database"""'], {'return_value': '{}'}), "(ResourceAdapter, '_load_config_from_database', return_value={})\n", (1140, 1204), False, 'from mock import patch\n'), ((1253, 1258), 'tortuga.resourceAdapter.aws.Aws', 'Aws', ([], {}), '()\n', (1256, 1258), False, 'from tortuga.resourceAdapter.aws import Aws\n')]
|
import pygame
import logging
from Framework.Shapes.Box import Box
from Framework.Shapes.Circle import Circle
from Framework.Sprite import *
from Framework.SpriteText import *
from Framework.GeometricGroup import GeometricGroup
from Framework.MouseListener import MouseListener
from GameObjects.gacha.stickman import Stickman
# container instance for the introductory fishing animation
class IntroductionAnimation (pygame.sprite.Group):
def __init__(self, game):
super().__init__()
# window dimensions
self.w, self.h = pygame.display.get_surface().get_size()
# background
self.background = Sprite ("img_bg", resources=game.ResourceCache.Resources)
self.background.Scale (self.w, self.h)
self.add (self.background)
# foreground
# dock
self.dock = Sprite ("img_dock", resources=game.ResourceCache.Resources)
self.dock.Scale (self.w, self.h)
self.add (self.dock)
# stickman
self.stickman = Stickman (game)
self.stickman.change_pos ((700,285))
self.add (self.stickman)
# water
self.water = Sprite ("img_fgwater", resources=game.ResourceCache.Resources)
self.water.Scale (self.w, self.h)
self.add (self.water)
# time gacha animation was started since beginning of runtime
self.startTime = pygame.time.get_ticks ()
# whether or not the animation is finished
self.playing = True
def play(self, parent):
# time elpased since start of animation
self.time = (pygame.time.get_ticks () - self.startTime) / 1000
# update children
self.stickman.update()
# animation
if self.time < 3.8:
# approaching end of dock
self.stickman.change_pos_x (1)
elif self.time > 3.8 and self.time < 4:
# reset arms
self.stickman.arm1_angle = 15
self.stickman.arm2_angle = -15
elif self.time > 4 and self.time < 4.5:
# BEGIN CASTING
# moving arms to initial position and resetting legs
self.stickman.arm1_angle += 0.5
self.stickman.arm2_angle += 0.5
self.stickman.leg1_angle = 15
self.stickman.leg2_angle = -15
elif self.time > 4.5 and self.time < 5:
# hesitating arms
self.stickman.arm1_angle += 3
self.stickman.arm2_angle += 3
elif self.time > 5 and self.time < 5.2:
# casting rod
self.stickman.arm1_angle -= 5
self.stickman.arm2_angle -= 5
elif self.time > 5.2 and self.time < 8:
# make the line go down
self.stickman.rod.casting = True
elif self.time > 8:
# make the line stop going down and declare the animation finished
self.stickman.rod.casting = False
self.playing = False
parent.Roll()
return self.playing
# container instance for the gacha rolling animation
class FishingAnimation (pygame.sprite.LayeredUpdates, MouseListener):
def __init__(self, game, parent):
super().__init__()
self.game = game
self.res = self.game.ResourceCache.Resources
self.parent = parent
# constants
self.CIRCLE_RADIUS = 500
self.SQUARE_SIZE = 250
self.COL_SQUARE_SIZE = 75
self.RARITY_4_COL = [96, 123, 230]
self.RARITY_5_COL = [230, 220, 110]
# window dimensions
self.w, self.h = pygame.display.get_surface().get_size()
self.center = (self.w - self.w // 2, self.h - self.h // 2)
def Start (self, card):
self.empty ()
# background
self.bg_tex = pygame.Surface ((self.w,self.h))
self.bg_tex.fill ([87, 151, 255])
self.background = Sprite (img=self.bg_tex)
self.background.Scale (self.w, self.h)
self.add (self.background)
# main circle piece
self.circle_piece = Circle (self.center[0] - self.CIRCLE_RADIUS, self.center[1] - self.CIRCLE_RADIUS, self.CIRCLE_RADIUS, 30)
self.add (self.circle_piece)
# other circle piece
self.aux_circle_piece = Circle (self.center[0] - self.CIRCLE_RADIUS, self.center[1] - self.CIRCLE_RADIUS, self.CIRCLE_RADIUS, 15)
self.add (self.aux_circle_piece)
# squares
self.coloured_square = Box (pygame.Rect (self.center[0] - self.COL_SQUARE_SIZE // 2, self.center[1] - self.COL_SQUARE_SIZE // 2, self.COL_SQUARE_SIZE , self.COL_SQUARE_SIZE))
self.square1 = Box (pygame.Rect (self.center[0] - self.SQUARE_SIZE // 2, self.center[1] - self.SQUARE_SIZE // 2, self.SQUARE_SIZE, self.SQUARE_SIZE), 25)
self.square2 = Box (pygame.Rect (self.center[0] - self.SQUARE_SIZE // 2, self.center[1] - self.SQUARE_SIZE // 2, self.SQUARE_SIZE, self.SQUARE_SIZE), 25)
self.square3 = Box (pygame.Rect (self.center[0] - self.SQUARE_SIZE // 2, self.center[1] - self.SQUARE_SIZE // 2, self.SQUARE_SIZE, self.SQUARE_SIZE), 25)
# whether or not the animation is finished
self.finished = False
self.animating = True
self.aux_offset = 0
self.auxrad = 0
self.aux_accel_offset = 0
# Card (information)
self.card = card
self.rarity = int(self.card.meta["rarity"])
self.raritycounter = 1
# Card (sprite)
self.add (self.card)
self.card.Scale (200,200)
self.card.rect.center = self.center
self.move_to_front (self.card)
self.card.Hide()
# Card text
self.card_name_text = SpriteText (self.card.meta["name"], font = self.res["fnt_Berlin_48"], Background=[64,64,64])
self.card_name_text.rect.centerx = self.center[0]
self.card_name_text.rect.centery = self.h - 200
self.add (self.card_name_text)
self.card_name_text.Hide()
self.move_to_front (self.card_name_text)
# Card stars
self.stars = GeometricGroup ()
for star in range (self.rarity): # add stars
star_sprite = Sprite ("img_star", resources=self.res)
star_sprite.rect.x += star * 200
star_sprite.rect.y = self.h - star_sprite.rect.height
self.stars.add (star_sprite)
self.add (self.stars)
self.stars.change_pos_y (self.h + 800)
# time gacha animation was started from reset since beginning of runtime
self.startTime = pygame.time.get_ticks ()
# SQUARES
# single square in the middle which changes colour depending on card rarity
self.add (self.coloured_square)
# rotating squares
self.add (self.square1)
self.add (self.square2)
self.add (self.square3)
# move everything to back
self.move_to_back (self.square1)
self.move_to_back (self.square2)
self.move_to_back (self.square3)
self.move_to_back (self.background)
# initialise rotation
self.square1.Rotate (0)
self.square2.Rotate (0)
self.square3.Rotate (0)
self.wow = self.res["se_wow"]
def play(self):
# time elpased since start of animation
self.time = (pygame.time.get_ticks () - self.startTime) / 500
if self.animating:
# NOTE: all transformations have an acceleration
# rotate squares
self.square1.Rotate (20 * self.time ** 2)
self.square2.Rotate (25 * self.time ** 2)
self.square3.Rotate (30 * self.time ** 2)
# make the main circle grow larger
self.circle_piece.Scale (self.CIRCLE_RADIUS + 10 * self.time ** 2, self.CIRCLE_RADIUS + 10 * self.time ** 2)
self.circle_piece.rect.center = self.center
# ensure the auxilary circle snaps back to the size of the main circle after scaling out of the screen
self.auxrad = self.circle_piece.rect.width + (400 + self.aux_accel_offset) * (self.time - self.aux_offset) ** 2
self.aux_circle_piece.Scale (self.auxrad, self.auxrad)
self.aux_circle_piece.rect.center = self.center
# snap the auxilary circle back
if self.aux_circle_piece.rect.width - self.CIRCLE_RADIUS > self.w:
self.aux_offset = self.time
if self.animating:
self.aux_accel_offset += 600
self.raritycounter += 1
# change rarity square's colour
if self.raritycounter == 3:
self.coloured_square.image.fill (self.RARITY_4_COL)
elif (self.raritycounter == 4):
self.coloured_square.image.fill (self.RARITY_5_COL)
# show the card after the rarity has been revelaed
if self.raritycounter == self.rarity:
self.animating = False
self.card.Show()
self.card.Scale (180,180)
self.card.rect.center = self.center
self.card_name_text.Show()
self.coloured_square.Hide()
# play a special sound effect for good cards
if self.rarity > 3:
self.wow.play()
self.move_to_front (self.card_name_text)
elif (self.raritycounter == self.rarity + 1):
self.stars.change_pos_y (-(self.h + 800))
logging.info ("rolled {0}".format (self.card.meta))
# continue rolling
if pygame.mouse.get_pressed()[0] and self.raritycounter >= self.rarity + 1:
self.parent.Roll()
|
[
"GameObjects.gacha.stickman.Stickman",
"pygame.Surface",
"pygame.mouse.get_pressed",
"pygame.Rect",
"Framework.Shapes.Circle.Circle",
"pygame.display.get_surface",
"Framework.GeometricGroup.GeometricGroup",
"pygame.time.get_ticks"
] |
[((1007, 1021), 'GameObjects.gacha.stickman.Stickman', 'Stickman', (['game'], {}), '(game)\n', (1015, 1021), False, 'from GameObjects.gacha.stickman import Stickman\n'), ((1370, 1393), 'pygame.time.get_ticks', 'pygame.time.get_ticks', ([], {}), '()\n', (1391, 1393), False, 'import pygame\n'), ((3751, 3783), 'pygame.Surface', 'pygame.Surface', (['(self.w, self.h)'], {}), '((self.w, self.h))\n', (3765, 3783), False, 'import pygame\n'), ((4016, 4125), 'Framework.Shapes.Circle.Circle', 'Circle', (['(self.center[0] - self.CIRCLE_RADIUS)', '(self.center[1] - self.CIRCLE_RADIUS)', 'self.CIRCLE_RADIUS', '(30)'], {}), '(self.center[0] - self.CIRCLE_RADIUS, self.center[1] - self.\n CIRCLE_RADIUS, self.CIRCLE_RADIUS, 30)\n', (4022, 4125), False, 'from Framework.Shapes.Circle import Circle\n'), ((4221, 4330), 'Framework.Shapes.Circle.Circle', 'Circle', (['(self.center[0] - self.CIRCLE_RADIUS)', '(self.center[1] - self.CIRCLE_RADIUS)', 'self.CIRCLE_RADIUS', '(15)'], {}), '(self.center[0] - self.CIRCLE_RADIUS, self.center[1] - self.\n CIRCLE_RADIUS, self.CIRCLE_RADIUS, 15)\n', (4227, 4330), False, 'from Framework.Shapes.Circle import Circle\n'), ((6024, 6040), 'Framework.GeometricGroup.GeometricGroup', 'GeometricGroup', ([], {}), '()\n', (6038, 6040), False, 'from Framework.GeometricGroup import GeometricGroup\n'), ((6510, 6533), 'pygame.time.get_ticks', 'pygame.time.get_ticks', ([], {}), '()\n', (6531, 6533), False, 'import pygame\n'), ((4423, 4571), 'pygame.Rect', 'pygame.Rect', (['(self.center[0] - self.COL_SQUARE_SIZE // 2)', '(self.center[1] - self.COL_SQUARE_SIZE // 2)', 'self.COL_SQUARE_SIZE', 'self.COL_SQUARE_SIZE'], {}), '(self.center[0] - self.COL_SQUARE_SIZE // 2, self.center[1] - \n self.COL_SQUARE_SIZE // 2, self.COL_SQUARE_SIZE, self.COL_SQUARE_SIZE)\n', (4434, 4571), False, 'import pygame\n'), ((4601, 4733), 'pygame.Rect', 'pygame.Rect', (['(self.center[0] - self.SQUARE_SIZE // 2)', '(self.center[1] - self.SQUARE_SIZE // 2)', 'self.SQUARE_SIZE', 'self.SQUARE_SIZE'], {}), '(self.center[0] - self.SQUARE_SIZE // 2, self.center[1] - self.\n SQUARE_SIZE // 2, self.SQUARE_SIZE, self.SQUARE_SIZE)\n', (4612, 4733), False, 'import pygame\n'), ((4763, 4895), 'pygame.Rect', 'pygame.Rect', (['(self.center[0] - self.SQUARE_SIZE // 2)', '(self.center[1] - self.SQUARE_SIZE // 2)', 'self.SQUARE_SIZE', 'self.SQUARE_SIZE'], {}), '(self.center[0] - self.SQUARE_SIZE // 2, self.center[1] - self.\n SQUARE_SIZE // 2, self.SQUARE_SIZE, self.SQUARE_SIZE)\n', (4774, 4895), False, 'import pygame\n'), ((4925, 5057), 'pygame.Rect', 'pygame.Rect', (['(self.center[0] - self.SQUARE_SIZE // 2)', '(self.center[1] - self.SQUARE_SIZE // 2)', 'self.SQUARE_SIZE', 'self.SQUARE_SIZE'], {}), '(self.center[0] - self.SQUARE_SIZE // 2, self.center[1] - self.\n SQUARE_SIZE // 2, self.SQUARE_SIZE, self.SQUARE_SIZE)\n', (4936, 5057), False, 'import pygame\n'), ((548, 576), 'pygame.display.get_surface', 'pygame.display.get_surface', ([], {}), '()\n', (574, 576), False, 'import pygame\n'), ((1573, 1596), 'pygame.time.get_ticks', 'pygame.time.get_ticks', ([], {}), '()\n', (1594, 1596), False, 'import pygame\n'), ((3549, 3577), 'pygame.display.get_surface', 'pygame.display.get_surface', ([], {}), '()\n', (3575, 3577), False, 'import pygame\n'), ((7275, 7298), 'pygame.time.get_ticks', 'pygame.time.get_ticks', ([], {}), '()\n', (7296, 7298), False, 'import pygame\n'), ((9530, 9556), 'pygame.mouse.get_pressed', 'pygame.mouse.get_pressed', ([], {}), '()\n', (9554, 9556), False, 'import pygame\n')]
|
import os
class Config:
UPLOADED_PHOTOS_DEST ='app/static/photos'
API_KEY = os.environ.get('API_KEY')
SECRET_KEY = os.environ.get('SECRET_KEY')
# class TestConfig(Config):
# SQLALCHEMY_DATABASE_URI = 'postgresql+psycopg2://'
class ProdConfig(Config):
SQLALCHEMY_DATABASE_URI = "postgresql://vwdxutaimazqis:200cb9d2da6110fdeefa73ceafa92732f4f8c6e5757af42e705ce23d96edb9d8@ec2-3-234-22-132.compute-1.amazonaws.com:5432/d3sn5mpsvgl0r0?sslmode=require"
#postgres://vwdxutaimazqis:200cb9d2da6110fdeefa73ceafa92732f4f8c6e5757af42e705ce23d96edb9d8@ec2-3-234-22-132.compute-1.amazonaws.com:5432/d3sn5mpsvgl0r0
class DevConfig(Config):
SQLALCHEMY_DATABASE_URI = 'postgresql+psycopg2://deepeters:password@localhost/currency'
DEBUG = True
config_options = {
'development':DevConfig,
'production':ProdConfig,
# 'test':TestConfig
}
|
[
"os.environ.get"
] |
[((88, 113), 'os.environ.get', 'os.environ.get', (['"""API_KEY"""'], {}), "('API_KEY')\n", (102, 113), False, 'import os\n'), ((131, 159), 'os.environ.get', 'os.environ.get', (['"""SECRET_KEY"""'], {}), "('SECRET_KEY')\n", (145, 159), False, 'import os\n')]
|
import os
#--------------------------------------#
# 設定ファイル
#--------------------------------------#
# 環境
ENV = 'development'
DEBUG = True
# DB設定
SQLALCHEMY_DATABASE_URI = 'mysql+pymysql://{user}:{password}@{host}/{db_name}?charset=utf8'.format(**{
'user': os.getenv('DB_USER', 'root'),
'password': os.getenv('DB_PASSWORD', ''),
'host': os.getenv('DB_HOST', 'localhost'),
'db_name': 'flask_chat',
})
SECRET_KEY = 'secret!'
# ルーティング設定
APPLICATION_ROOT = '/'
|
[
"os.getenv"
] |
[((262, 290), 'os.getenv', 'os.getenv', (['"""DB_USER"""', '"""root"""'], {}), "('DB_USER', 'root')\n", (271, 290), False, 'import os\n'), ((308, 336), 'os.getenv', 'os.getenv', (['"""DB_PASSWORD"""', '""""""'], {}), "('DB_PASSWORD', '')\n", (317, 336), False, 'import os\n'), ((350, 383), 'os.getenv', 'os.getenv', (['"""DB_HOST"""', '"""localhost"""'], {}), "('DB_HOST', 'localhost')\n", (359, 383), False, 'import os\n')]
|
#!/usr/bin/env python3
# -*- Mode: Python; coding: utf-8; indent-tabs-mode: t; c-basic-offset: 4; tab-width: 4 -*-
import os
import sys
import pstats
import io
import cProfile
import gi
gi.require_version('Gtk', '3.0')
gi.require_version('GdkPixbuf', '2.0')
try:
gi.require_version('GtkSource', '4')
except ValueError:
gi.require_version('GtkSource', '3.0')
gi.require_version('WebKit2', '4.0')
from gi.repository import Gtk, Gio, GLib # noqa: E402
from pathlib import Path # noqa: E402
from typing import Type # noqa: E402
from types import TracebackType # noqa: E402
BASE_PATH = Path(__file__).absolute().parent
resources = Gio.resource_load(str(BASE_PATH / "ui.gresource"))
Gio.resources_register(resources)
PROFILER = None
if 'STARTUP_PROFILE' in os.environ:
PROFILER = cProfile.Profile()
PROFILER.enable()
def install_excepthook() -> None:
""" Make sure we exit when an unhandled exception occurs. """
old_hook = sys.excepthook
def new_hook(type_: Type[BaseException], value: BaseException, traceback: TracebackType) -> None:
old_hook(type_, value, traceback)
while Gtk.main_level():
Gtk.main_quit()
sys.exit()
sys.excepthook = new_hook
def startup_time() -> None:
PROFILER.disable()
s = io.StringIO()
ps = pstats.Stats(PROFILER, stream=s).sort_stats(pstats.SortKey.TIME)
ps.print_stats(20)
print(s.getvalue())
def main() -> None:
if PROFILER:
GLib.idle_add(startup_time)
from ocrd_utils import initLogging
initLogging()
from ocrd_browser.application import OcrdBrowserApplication
install_excepthook()
app = OcrdBrowserApplication()
app.run(sys.argv)
if __name__ == "__main__":
# WHY OH WHY
sys.path.append(str(BASE_PATH.parent))
main()
sys.exit()
|
[
"gi.require_version",
"io.StringIO",
"ocrd_browser.application.OcrdBrowserApplication",
"pstats.Stats",
"gi.repository.Gtk.main_level",
"ocrd_utils.initLogging",
"cProfile.Profile",
"pathlib.Path",
"gi.repository.Gtk.main_quit",
"gi.repository.Gio.resources_register",
"gi.repository.GLib.idle_add",
"sys.exit"
] |
[((188, 220), 'gi.require_version', 'gi.require_version', (['"""Gtk"""', '"""3.0"""'], {}), "('Gtk', '3.0')\n", (206, 220), False, 'import gi\n'), ((221, 259), 'gi.require_version', 'gi.require_version', (['"""GdkPixbuf"""', '"""2.0"""'], {}), "('GdkPixbuf', '2.0')\n", (239, 259), False, 'import gi\n'), ((369, 405), 'gi.require_version', 'gi.require_version', (['"""WebKit2"""', '"""4.0"""'], {}), "('WebKit2', '4.0')\n", (387, 405), False, 'import gi\n'), ((694, 727), 'gi.repository.Gio.resources_register', 'Gio.resources_register', (['resources'], {}), '(resources)\n', (716, 727), False, 'from gi.repository import Gtk, Gio, GLib\n'), ((269, 305), 'gi.require_version', 'gi.require_version', (['"""GtkSource"""', '"""4"""'], {}), "('GtkSource', '4')\n", (287, 305), False, 'import gi\n'), ((796, 814), 'cProfile.Profile', 'cProfile.Profile', ([], {}), '()\n', (812, 814), False, 'import cProfile\n'), ((1285, 1298), 'io.StringIO', 'io.StringIO', ([], {}), '()\n', (1296, 1298), False, 'import io\n'), ((1538, 1551), 'ocrd_utils.initLogging', 'initLogging', ([], {}), '()\n', (1549, 1551), False, 'from ocrd_utils import initLogging\n'), ((1651, 1675), 'ocrd_browser.application.OcrdBrowserApplication', 'OcrdBrowserApplication', ([], {}), '()\n', (1673, 1675), False, 'from ocrd_browser.application import OcrdBrowserApplication\n'), ((1802, 1812), 'sys.exit', 'sys.exit', ([], {}), '()\n', (1810, 1812), False, 'import sys\n'), ((329, 367), 'gi.require_version', 'gi.require_version', (['"""GtkSource"""', '"""3.0"""'], {}), "('GtkSource', '3.0')\n", (347, 367), False, 'import gi\n'), ((1128, 1144), 'gi.repository.Gtk.main_level', 'Gtk.main_level', ([], {}), '()\n', (1142, 1144), False, 'from gi.repository import Gtk, Gio, GLib\n'), ((1182, 1192), 'sys.exit', 'sys.exit', ([], {}), '()\n', (1190, 1192), False, 'import sys\n'), ((1467, 1494), 'gi.repository.GLib.idle_add', 'GLib.idle_add', (['startup_time'], {}), '(startup_time)\n', (1480, 1494), False, 'from gi.repository import Gtk, Gio, GLib\n'), ((598, 612), 'pathlib.Path', 'Path', (['__file__'], {}), '(__file__)\n', (602, 612), False, 'from pathlib import Path\n'), ((1158, 1173), 'gi.repository.Gtk.main_quit', 'Gtk.main_quit', ([], {}), '()\n', (1171, 1173), False, 'from gi.repository import Gtk, Gio, GLib\n'), ((1308, 1340), 'pstats.Stats', 'pstats.Stats', (['PROFILER'], {'stream': 's'}), '(PROFILER, stream=s)\n', (1320, 1340), False, 'import pstats\n')]
|
# Copyright (C) 2020 GreenWaves Technologies, SAS
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <https://www.gnu.org/licenses/>.
import hashlib
from copy import deepcopy
import numpy as np
from graph.dim import Dim
from graph.types import (ActivationParameters, ConstantInputParameters, NNEdge,
ReshapeParameters)
from importer.common.provisional_dim import ProvisionalDim
from quantization.new_qrec import QRec
from quantization.qtype import QType
from utils.node_id import NodeId
from ..tflite_schema_head.ActivationFunctionType import ActivationFunctionType
from .handler import Handler
class BackendHandler(Handler):
""" This class is base backend handler class.
All backend operator handler class MUST inherit this class.
In backend, operator handler class's name should be pascal case of file name
which should be snake case.
Use ONNX operator name as class name.
"""
VAR_COUNT = 0
TF_ACTIVATIONS = {
ActivationFunctionType.RELU: "relu",
ActivationFunctionType.RELU6: "relu6",
ActivationFunctionType.SIGN_BIT: "sign_bit",
ActivationFunctionType.TANH: "tanh"
}
@classmethod
def _get_real_dim(cls, shape):
return np.array([elem for elem in shape if elem is not None])
@classmethod
def _get_real_dims(cls, dims):
return [cls._get_real_dim(dim.shape) for dim in dims]
@classmethod
def _verify_constant(cls, inp):
if cls._is_constant(inp):
return cls._get_constant(inp)
raise ValueError("expected node %s to be constant input" % inp[0].name)
@classmethod
def _is_constant(cls, inp):
return isinstance(inp[0], ConstantInputParameters)
@classmethod
def _get_constant(cls, inp):
return inp[0].value
@classmethod
def _slice_len(cls, vstart, vend, vstep):
if vstep < 0:
vstart, vend = vend, vstart
vstep = -vstep
return (vend - vstart - 1) // vstep + 1
@classmethod
def fuse_activation(cls, tfl_opts, name, params, **kwargs):
G = kwargs['G']
opts = kwargs['opts']
ext = hashlib.sha1(name.encode(
"UTF-8")).hexdigest()[:8] if opts.get('anonymise') else 'activation'
if opts.get('load_quantization') and NodeId(params) in G.quantization:
node_qrec = G.quantization[NodeId(params)]
else:
node_qrec = None
# if node_qrec is not None and None in node_qrec.in_qs + node_qrec.out_qs:
# # one of the input is a constant or strange behaviour -> may be is something fusions will get rid of
# return add_node(self.G, node)
aparams = None
if tfl_opts.FusedActivationFunction() == ActivationFunctionType.NONE:
if node_qrec is not None and node_qrec.ktype.startswith('scaled'): # and opts.get('insert_relus'):
# here we have no activation in an asymmetric qtype -> may be an omitted relu
if node_qrec.out_qs[0] is not None and node_qrec.out_qs[0].min_val == 0:
if np.all(np.round(node_qrec.out_qs[0].max_val) == 6):
aparams = ActivationParameters.get_activation(
'relu6', name + f"_{ext}")
else:
aparams = ActivationParameters.get_activation(
'relu', name + f"_{ext}")
else:
aparams = ActivationParameters.get_activation(cls.TF_ACTIVATIONS[tfl_opts.FusedActivationFunction()],
name + f"_{ext}")
if aparams:
G.add_edge(NNEdge(from_node=params, to_node=aparams))
if opts.get('load_quantization'):
# In between the fused operation and activation the
# transfer is in int32 representation
node_qrec = G.quantization[NodeId(params)]
ina_qtype = deepcopy(node_qrec.out_qs[0])
outa_qtype = deepcopy(ina_qtype)
G.quantization[NodeId(aparams)] = QRec.scaled(
in_qs=[ina_qtype], out_qs=[outa_qtype])
params = aparams
return params
@classmethod
def remove_unspecified_dim(cls, shape):
return [dim for dim in shape if dim is not None]
@classmethod
def get_all_const_inputs(cls, G, all_nodes, opts, node, params,
exclude=None, names=None,
short_names=None,
adjust_transposes=None,
load_quantization_if_present=False,
skip_empty_tensors=True):
if exclude is None:
exclude = []
if names is None:
names = [None] * len(node.inputs)
if short_names is None:
short_names = [None] * len(node.inputs)
if adjust_transposes is None:
adjust_transposes = [None] * len(node.nputs)
const_params = []
# TODO - this should just be picking up the existing constant nodes not creating new ones.
for idx, tensor in enumerate(node.input):
if tensor is None or idx in exclude or (skip_empty_tensors and not tensor.is_constant):
const_params.append(None)
continue
tensor.used = True
if tensor not in all_nodes:
# this can occur for RNN/LSTM state nodes that have a buffer idx of 0
const_param = ConstantInputParameters(
tensor.name,
dims=Dim.unnamed(tensor.shape),
value=tensor.value)
all_nodes[tensor] = (
const_param,
0,
ProvisionalDim.from_tflite_shape(tensor.shape)
)
else:
const_param = all_nodes[tensor][0]
# some constant nodes can be connected to multiple nodes
# changing their name is not a good idea
if const_param not in G.nodes():
const_param.name = names[idx]
const_param.adjust_transpose = adjust_transposes[idx]
const_param.is_mutated = node.is_mutated(idx)
const_param.is_intermediate = node.is_intermediate(idx)
const_param.short_name = short_names[idx]
const_param.value = np.reshape(tensor.value, tensor.shape)
if opts.get('load_quantization'):
G.quantization[NodeId(const_param)] = QRec.scaled(
in_qs=[tensor.qtype],
out_qs=[tensor.qtype])
if load_quantization_if_present and tensor.qtype:
const_param.value_quantization = tensor.qtype
const_params.append(const_param)
G.add_edge(NNEdge(const_param, params, to_idx=idx))
return const_params
@classmethod
def remove_none_from_constants(cls, inputs, model):
if None not in model:
return
for inp in inputs:
if not isinstance(inp[0], ConstantInputParameters):
continue
val = inp[0].value
if val is None or len(val.shape) != len(model):
continue
assert all(val.shape[idx] == 1 for idx, dim in enumerate(model) if dim is None),\
"value has axis that is larger than one in an unknown dimension"
new_shape = [dim for idx, dim in enumerate(
val.shape) if model[idx] is not None]
inp[0].value = np.reshape(inp[0].value, new_shape)
inp[0].dims = Dim.unnamed(new_shape)
@classmethod
def convert_to_symmetric(cls, qtypes):
return [QType.from_min_max_sq(qtype.min_val, qtype.max_val)
if qtype is not None and (qtype.asymmetric or not qtype.signed) else qtype for qtype in qtypes]
@classmethod
def load_tf_quantization(cls, input_tensors, output_tensors, in_qs=None, out_qs=None, qrec_class=None):
if qrec_class is None:
qrec = QRec.scaled(
in_qs=cls.convert_to_symmetric(
in_qs if in_qs is not None else [tensor.qtype if tensor is not None else None for tensor in input_tensors]),
out_qs=cls.convert_to_symmetric(
out_qs if out_qs is not None else [tensor.qtype for tensor in output_tensors]))
else:
qrec = qrec_class(
in_qs=cls.convert_to_symmetric(
in_qs if in_qs is not None else [tensor.qtype if tensor is not None else None for tensor in input_tensors]),
out_qs=cls.convert_to_symmetric(
out_qs if out_qs is not None else [tensor.qtype for tensor in output_tensors]))
return qrec
@classmethod
def remove_known_batch_dimension(cls, G, x, node, batch_axis=0):
x_shape = x[2].shape
if x_shape[batch_axis] is not None:
if x_shape[0] > 1:
raise ValueError(
f'multi batch (n={x_shape[batch_axis]}) operations are not supported by {node.name}')
rparams = ReshapeParameters(
f'{node.name}_batch',
old_shape=Dim.unnamed(x_shape),
shape=Dim.unnamed(x_shape[0:batch_axis:]+x_shape[batch_axis+1::]))
if G.quantization:
qrec = G.quantization[NodeId(x[0])]
G.quantization[NodeId(rparams)] = QRec.copy_ktype(
qrec,
in_qs=[qrec.out_qs[0]],
out_qs=[qrec.out_qs[0]])
G.add_edge(
NNEdge(from_node=x[0], to_node=rparams, from_idx=x[1], to_idx=0))
return (rparams, 0, ProvisionalDim(x_shape[0:batch_axis:]+[None]+x_shape[batch_axis+1::]))
else:
return x
|
[
"quantization.new_qrec.QRec.copy_ktype",
"utils.node_id.NodeId",
"copy.deepcopy",
"graph.types.NNEdge",
"importer.common.provisional_dim.ProvisionalDim",
"importer.common.provisional_dim.ProvisionalDim.from_tflite_shape",
"numpy.array",
"numpy.reshape",
"quantization.qtype.QType.from_min_max_sq",
"quantization.new_qrec.QRec.scaled",
"numpy.round",
"graph.types.ActivationParameters.get_activation",
"graph.dim.Dim.unnamed"
] |
[((1811, 1865), 'numpy.array', 'np.array', (['[elem for elem in shape if elem is not None]'], {}), '([elem for elem in shape if elem is not None])\n', (1819, 1865), True, 'import numpy as np\n'), ((8214, 8249), 'numpy.reshape', 'np.reshape', (['inp[0].value', 'new_shape'], {}), '(inp[0].value, new_shape)\n', (8224, 8249), True, 'import numpy as np\n'), ((8276, 8298), 'graph.dim.Dim.unnamed', 'Dim.unnamed', (['new_shape'], {}), '(new_shape)\n', (8287, 8298), False, 'from graph.dim import Dim\n'), ((2882, 2896), 'utils.node_id.NodeId', 'NodeId', (['params'], {}), '(params)\n', (2888, 2896), False, 'from utils.node_id import NodeId\n'), ((2955, 2969), 'utils.node_id.NodeId', 'NodeId', (['params'], {}), '(params)\n', (2961, 2969), False, 'from utils.node_id import NodeId\n'), ((4250, 4291), 'graph.types.NNEdge', 'NNEdge', ([], {'from_node': 'params', 'to_node': 'aparams'}), '(from_node=params, to_node=aparams)\n', (4256, 4291), False, 'from graph.types import ActivationParameters, ConstantInputParameters, NNEdge, ReshapeParameters\n'), ((4549, 4578), 'copy.deepcopy', 'deepcopy', (['node_qrec.out_qs[0]'], {}), '(node_qrec.out_qs[0])\n', (4557, 4578), False, 'from copy import deepcopy\n'), ((4608, 4627), 'copy.deepcopy', 'deepcopy', (['ina_qtype'], {}), '(ina_qtype)\n', (4616, 4627), False, 'from copy import deepcopy\n'), ((4678, 4729), 'quantization.new_qrec.QRec.scaled', 'QRec.scaled', ([], {'in_qs': '[ina_qtype]', 'out_qs': '[outa_qtype]'}), '(in_qs=[ina_qtype], out_qs=[outa_qtype])\n', (4689, 4729), False, 'from quantization.new_qrec import QRec\n'), ((7029, 7067), 'numpy.reshape', 'np.reshape', (['tensor.value', 'tensor.shape'], {}), '(tensor.value, tensor.shape)\n', (7039, 7067), True, 'import numpy as np\n'), ((7477, 7516), 'graph.types.NNEdge', 'NNEdge', (['const_param', 'params'], {'to_idx': 'idx'}), '(const_param, params, to_idx=idx)\n', (7483, 7516), False, 'from graph.types import ActivationParameters, ConstantInputParameters, NNEdge, ReshapeParameters\n'), ((8376, 8427), 'quantization.qtype.QType.from_min_max_sq', 'QType.from_min_max_sq', (['qtype.min_val', 'qtype.max_val'], {}), '(qtype.min_val, qtype.max_val)\n', (8397, 8427), False, 'from quantization.qtype import QType\n'), ((10120, 10190), 'quantization.new_qrec.QRec.copy_ktype', 'QRec.copy_ktype', (['qrec'], {'in_qs': '[qrec.out_qs[0]]', 'out_qs': '[qrec.out_qs[0]]'}), '(qrec, in_qs=[qrec.out_qs[0]], out_qs=[qrec.out_qs[0]])\n', (10135, 10190), False, 'from quantization.new_qrec import QRec\n'), ((10292, 10356), 'graph.types.NNEdge', 'NNEdge', ([], {'from_node': 'x[0]', 'to_node': 'rparams', 'from_idx': 'x[1]', 'to_idx': '(0)'}), '(from_node=x[0], to_node=rparams, from_idx=x[1], to_idx=0)\n', (10298, 10356), False, 'from graph.types import ActivationParameters, ConstantInputParameters, NNEdge, ReshapeParameters\n'), ((10390, 10463), 'importer.common.provisional_dim.ProvisionalDim', 'ProvisionalDim', (['(x_shape[0:batch_axis] + [None] + x_shape[batch_axis + 1:])'], {}), '(x_shape[0:batch_axis] + [None] + x_shape[batch_axis + 1:])\n', (10404, 10463), False, 'from importer.common.provisional_dim import ProvisionalDim\n'), ((4505, 4519), 'utils.node_id.NodeId', 'NodeId', (['params'], {}), '(params)\n', (4511, 4519), False, 'from utils.node_id import NodeId\n'), ((4659, 4674), 'utils.node_id.NodeId', 'NodeId', (['aparams'], {}), '(aparams)\n', (4665, 4674), False, 'from utils.node_id import NodeId\n'), ((6382, 6428), 'importer.common.provisional_dim.ProvisionalDim.from_tflite_shape', 'ProvisionalDim.from_tflite_shape', (['tensor.shape'], {}), '(tensor.shape)\n', (6414, 6428), False, 'from importer.common.provisional_dim import ProvisionalDim\n'), ((7177, 7233), 'quantization.new_qrec.QRec.scaled', 'QRec.scaled', ([], {'in_qs': '[tensor.qtype]', 'out_qs': '[tensor.qtype]'}), '(in_qs=[tensor.qtype], out_qs=[tensor.qtype])\n', (7188, 7233), False, 'from quantization.new_qrec import QRec\n'), ((9882, 9902), 'graph.dim.Dim.unnamed', 'Dim.unnamed', (['x_shape'], {}), '(x_shape)\n', (9893, 9902), False, 'from graph.dim import Dim\n'), ((9926, 9987), 'graph.dim.Dim.unnamed', 'Dim.unnamed', (['(x_shape[0:batch_axis] + x_shape[batch_axis + 1:])'], {}), '(x_shape[0:batch_axis] + x_shape[batch_axis + 1:])\n', (9937, 9987), False, 'from graph.dim import Dim\n'), ((10056, 10068), 'utils.node_id.NodeId', 'NodeId', (['x[0]'], {}), '(x[0])\n', (10062, 10068), False, 'from utils.node_id import NodeId\n'), ((10101, 10116), 'utils.node_id.NodeId', 'NodeId', (['rparams'], {}), '(rparams)\n', (10107, 10116), False, 'from utils.node_id import NodeId\n'), ((3760, 3822), 'graph.types.ActivationParameters.get_activation', 'ActivationParameters.get_activation', (['"""relu6"""', "(name + f'_{ext}')"], {}), "('relu6', name + f'_{ext}')\n", (3795, 3822), False, 'from graph.types import ActivationParameters, ConstantInputParameters, NNEdge, ReshapeParameters\n'), ((3912, 3973), 'graph.types.ActivationParameters.get_activation', 'ActivationParameters.get_activation', (['"""relu"""', "(name + f'_{ext}')"], {}), "('relu', name + f'_{ext}')\n", (3947, 3973), False, 'from graph.types import ActivationParameters, ConstantInputParameters, NNEdge, ReshapeParameters\n'), ((6201, 6226), 'graph.dim.Dim.unnamed', 'Dim.unnamed', (['tensor.shape'], {}), '(tensor.shape)\n', (6212, 6226), False, 'from graph.dim import Dim\n'), ((7154, 7173), 'utils.node_id.NodeId', 'NodeId', (['const_param'], {}), '(const_param)\n', (7160, 7173), False, 'from utils.node_id import NodeId\n'), ((3681, 3718), 'numpy.round', 'np.round', (['node_qrec.out_qs[0].max_val'], {}), '(node_qrec.out_qs[0].max_val)\n', (3689, 3718), True, 'import numpy as np\n')]
|
"""
Module to deploy WhoTracksMe site to an s3 bucket.
Usage:
deploy_to_s3 <bucket_name> [<prefix>] [--production]
Options:
-h, --help Show help message.
--production Production deployment (set cache-control metadata)
"""
import os
import boto3
from docopt import docopt
from mimetypes import MimeTypes
from concurrent.futures import ThreadPoolExecutor
def iterate_bucket(s3_client, bucket_name, bucket_prefix):
pageinator = s3_client.get_paginator('list_objects_v2')
for page in pageinator.paginate(Bucket=bucket_name, Prefix=bucket_prefix):
if page['KeyCount'] == 0:
continue
for item in page['Contents']:
yield item['Key']
def get_max_age(path, filename):
if path.startswith('static/'):
# font resources: one month cache
if 'font-awesome-4.7.0' in path or 'fonts' in path:
return 2592000
# minified JS: one week cache
if '.min.js' in filename or '.min.css' in filename:
return 604800
# one week cache
return 604800
elif path.startswith('data/'):
# trackerdb: 1day cache
if 'trackerdb.' in path:
return 86400
# rest of data directory: 1 week cache
return 604800
# default 1day cache for html pages
return 86400
def get_cache_control(path, filename, production=False):
if not production:
# one minute cache
return 'max-age=60'
return f'max-age={get_max_age(path, filename)}'
def get_content_type(local_path):
mime = MimeTypes()
guessed_type = mime.guess_type(local_path)[0]
if guessed_type is None:
if local_path.endswith('.css.map') or local_path.endswith('.js.map'):
return 'application/json'
elif local_path.endswith('.otf'):
return 'font/opentype'
elif local_path.endswith('.eot'):
return 'application/vnd.ms-fontobject'
elif local_path.endswith('.ttf'):
return 'application/font-sfnt'
elif local_path.endswith('.woff'):
return 'application/font-woff'
elif local_path.endswith('.woff2'):
return 'application/font-woff2'
elif local_path.endswith('.zip'):
return 'application/zip';
return 'text/html'
return guessed_type
if __name__ == '__main__':
args = docopt(__doc__)
bucket_name = args['<bucket_name>']
bucket_prefix = args['<prefix>'] or '/'
production = args['--production']
site_dir = './_site'
if bucket_prefix[0] != '/':
bucket_prefix = '/' + bucket_prefix
print('Deploying site to ', f's3://{bucket_name}{bucket_prefix}')
# check site build exists
if not os.path.isdir(site_dir):
print('_site directory does not exist. You need to build the site before uploading')
exit(1)
# connect s3
s3_client = boto3.client('s3')
# list existing bucket contents
existing_keys = set(iterate_bucket(s3_client, bucket_name, bucket_prefix[1:]))
print('Bucket contains', len(existing_keys), 'pages')
uploaded = 0
redirected = 0
def upload_file_to_s3(path, filename):
# path to local file
local_path = os.path.join(path, filename)
# relative folder of file
s3_suffix = path[len(site_dir) + 1:]
# full path from root of bucket
s3_path = os.path.join(bucket_prefix, s3_suffix, filename)[1:]
# metadata to set on file
cache_control = get_cache_control(s3_suffix, filename, production=production)
content_type = get_content_type(local_path)
print('put', local_path, s3_path)
with open(local_path, 'rb') as fp:
s3_client.put_object(Bucket=bucket_name, Key=s3_path, Body=fp,
CacheControl=cache_control,
ContentType=content_type)
# setup redirects
html_path = f'{s3_path}.html'
if html_path in existing_keys:
print(f'redirect {html_path} to /{s3_path}')
s3_client.put_object(Bucket=bucket_name, Key=html_path,
WebsiteRedirectLocation=f'/{s3_path}')
# upload + redirect
return True, True
else:
# upload, no redirect
return True, False
with ThreadPoolExecutor(max_workers=5) as executor:
for (dirpath, dirnames, filenames) in os.walk(site_dir):
print('Enter', dirpath)
files_to_upload = [f for f in filenames if not f[0] == '.']
uploads = executor.map(upload_file_to_s3, [dirpath] * len(files_to_upload), files_to_upload)
for (did_upload, did_rediect) in uploads:
if did_upload:
uploaded += 1
if did_rediect:
redirected += 1
print(f'Complete: uploaded {uploaded}, redirected {redirected}')
|
[
"boto3.client",
"docopt.docopt",
"os.path.isdir",
"os.walk",
"mimetypes.MimeTypes",
"concurrent.futures.ThreadPoolExecutor",
"os.path.join"
] |
[((1578, 1589), 'mimetypes.MimeTypes', 'MimeTypes', ([], {}), '()\n', (1587, 1589), False, 'from mimetypes import MimeTypes\n'), ((2384, 2399), 'docopt.docopt', 'docopt', (['__doc__'], {}), '(__doc__)\n', (2390, 2399), False, 'from docopt import docopt\n'), ((2904, 2922), 'boto3.client', 'boto3.client', (['"""s3"""'], {}), "('s3')\n", (2916, 2922), False, 'import boto3\n'), ((2736, 2759), 'os.path.isdir', 'os.path.isdir', (['site_dir'], {}), '(site_dir)\n', (2749, 2759), False, 'import os\n'), ((3231, 3259), 'os.path.join', 'os.path.join', (['path', 'filename'], {}), '(path, filename)\n', (3243, 3259), False, 'import os\n'), ((4356, 4389), 'concurrent.futures.ThreadPoolExecutor', 'ThreadPoolExecutor', ([], {'max_workers': '(5)'}), '(max_workers=5)\n', (4374, 4389), False, 'from concurrent.futures import ThreadPoolExecutor\n'), ((4449, 4466), 'os.walk', 'os.walk', (['site_dir'], {}), '(site_dir)\n', (4456, 4466), False, 'import os\n'), ((3397, 3445), 'os.path.join', 'os.path.join', (['bucket_prefix', 's3_suffix', 'filename'], {}), '(bucket_prefix, s3_suffix, filename)\n', (3409, 3445), False, 'import os\n')]
|
import os
from approvaltests import verify
from approvaltests.reporters.python_native_reporter import *
def test_files_identical(tmpdir):
file1 = os.path.join(str(tmpdir), "a.received.txt")
file2 = os.path.join(str(tmpdir), "b.approved.txt")
identical_contents = "abc"
with open(file1, "w") as f1:
f1.write(identical_contents)
with open(file2, "w") as f2:
f2.write(identical_contents)
verify(calculate_diff(file1, file2))
def test_files_differ(tmpdir):
file1 = os.path.join(str(tmpdir), "a.received.txt")
file2 = os.path.join(str(tmpdir), "b.approved.txt")
with open(file1, "w") as f1:
f1.write("abc")
with open(file2, "w") as f2:
f2.write("def")
diff = calculate_diff(file1, file2)
diff = diff.replace(str(tmpdir), "tmpdir") # use scrubber in future
diff = diff.replace('\\', '/')
verify(diff)
|
[
"approvaltests.verify"
] |
[((878, 890), 'approvaltests.verify', 'verify', (['diff'], {}), '(diff)\n', (884, 890), False, 'from approvaltests import verify\n')]
|
from Bio import SeqIO
from Bio.Seq import Seq
from Bio.SeqRecord import SeqRecord
from Bio.Alphabet import IUPAC
from Bio.Alphabet import generic_dna
from Bio.Restriction import *
##################################
#FUNCTIONS:
def obtainFastaSequences(filename):
handle = open(filename)
records = []
for seqrecord in SeqIO.parse(handle, "fasta"):
records.append(seqrecord)
#print(seqrecord.id)
#print(len(seqrecord))
#print(len(records))
return records
def getOligos(filename):
constructs = []
currentconstruct = 'foo'
for seqrec in SeqIO.parse(filename, "fasta"):
if not seqrec.id.startswith(currentconstruct):
currentconstruct = seqrec.id[0:seqrec.id.rfind(';')]
constructs.append([currentconstruct])
constructs[-1].append(seqrec)
else:
constructs[-1].append(seqrec)
return constructs
def checkConstruct(construct,lengthmax,filenum,ampprimf,ampprimr,barcode,assemblyprimf,assemblyprimr,padding_between_btsaI_ampR,fwdre=[],revre=[]):
oligos = construct[1:]
count_first = 0
count_mid = 0
count_last = 0
BspQI_length = 8
BtsaI_length = 6
amppri_length = len(ampprimf)
asmpri_length = len(assemblyprimf)
bc_length = len(barcode)
btsai_from_end_pos = amppri_length + BtsaI_length - 1
bspqi_first = amppri_length + BspQI_length + 1
bspqi_second = amppri_length + BspQI_length + bc_length -2
asmpriFsearch_min_pos = amppri_length + 2*BspQI_length + bc_length +BtsaI_length-1
asmpriRsearch_pos = lengthmax - amppri_length - BtsaI_length - asmpri_length
KpnI_min_from_end = lengthmax - amppri_length - BtsaI_length - asmpri_length
KpnI_min_from_start = len(ampprimf)+ 2*BspQI_length + bc_length + BtsaI_length
#check that lengths all pass
for oligoseqrec in oligos:
if len(oligoseqrec.seq)>lengthmax:
print('Oligo length out of range:')
print(oligoseqrec.id)
print(str(oligoseqrec.seq) + '\t' + str(len(oligoseqrec.seq)))
#check that amplification primers are correct
for oligoseqrec in oligos:
if ampprimf != str(oligoseqrec.seq[0:len(ampprimf)]) or ampprimr != str(oligoseqrec.seq[-1*len(ampprimr):].reverse_complement()):
print('Amp primers not found:')
print(oligoseqrec.id)
print(str(oligoseqrec.seq))
#check that barcode and BspQI sites are correct
for oligoseqrec in oligos:
#first site is 15(amp)+8(BspQI)+1=24
#second site is 15(amp)+8(BspQI)+12(barcode)-2
if BspQI.search(oligoseqrec.seq) != [bspqi_first, bspqi_second]:
print("BspQI SITES ARE WRONG")
print(oligoseqrec.id)
print(oligoseqrec.seq)
print(BspQI.search(oligoseqrec.seq))
#barcode site is 15amp + 8(BspQI)
barcode_pos = amppri_length + BspQI_length
if str(oligoseqrec.seq).find(barcode) != barcode_pos:
print("BARCODE NOT FOUND")
print(oligoseqrec.id)
print(oligoseqrec.seq)
print(barcode)
#check that BtsI sites are correct
for oligoseqrec in oligos:
btssearch = BtsI.search(oligoseqrec.seq)
kpnsearch = KpnI.search(oligoseqrec.seq)
ndesearch = NdeI.search(oligoseqrec.seq)
asmpriFsearch = str(oligoseqrec.seq).find(assemblyprimf)
assemblyprimr_seq = Seq(assemblyprimr, generic_dna)
asmpriRsearch = str(oligoseqrec.seq).find(str(assemblyprimr_seq.reverse_complement()))
#pos end btsaI from end = amp length + BtsaI_length -1
if len(btssearch) !=2 or len(oligoseqrec)-btssearch[1] != btsai_from_end_pos: # btssearch[0] != 43:#
print("End BtsI site bad")
print(oligoseqrec.id)
print(oligoseqrec.seq)
print(barcode)
print(str(btssearch))
first_oligo = 0
if asmpriFsearch != -1:
#this should be the first oligo in assembly
first_oligo = 1
count_first += 1
#min asm search = amplength + 2*bspqilength + bclength+btsai-1
if asmpriFsearch < asmpriFsearch_min_pos or asmpriFsearch > 100:
print("Frist oligo Assembly FWD primer wrong")
print(oligoseqrec.id)
print(oligoseqrec.seq)
if len(ndesearch) != 1 or ndesearch[0] != asmpriFsearch + asmpri_length + 3:
print("First oligo NdeI site bad")
print(oligoseqrec.id)
print(oligoseqrec.seq)
print(ndesearch)
if len(kpnsearch) != 0:
print("First oligo has a KpnI site")
print(oligoseqrec.id)
print(oligoseqrec.seq)
print(kpnsearch)
if btssearch[0] != asmpriFsearch + 3:
print("First oligo BtsI site bad")
print(oligoseqrec.id)
print(oligoseqrec.seq)
last_oligo = 0
if asmpriRsearch != -1: #this should be the last oligo in assembly
last_oligo = 1
count_last += 1
if first_oligo == 1:
print("This oligo contains both FWD and REV assembly primers")
print(oligoseqrec.id)
print(oligoseqrec.seq)
if asmpriRsearch != asmpriRsearch_pos:#200mer: 159 #230mer:189 #calc: total length - 15ampPriLength - 6BtsaI -20asmPriLength
print("Last oligo assembly primer REV wrong")
print(oligoseqrec.id)
print(oligoseqrec.seq)
if len(ndesearch) != 0:
print("Last oligo has an NdeI site")
print(oligoseqrec.id)
print(oligoseqrec.seq)
if len(kpnsearch) != 1 or kpnsearch[0] > KpnI_min_from_end or kpnsearch[0] < KpnI_min_from_start:
print("Last oligo KpnI site bad")
print(oligoseqrec.id)
print(oligoseqrec.seq)
elif first_oligo == 0: # this is middle oligo
count_mid += 1
if len(ndesearch) != 0:
print("Middle oligo has an NdeI site")
print(oligoseqrec.id)
print(oligoseqrec.seq)
if len(kpnsearch) != 0:
print("Middle oligo has a KpnI site. Lib_num:"+str(filenum))
print(oligoseqrec.id)
print(oligoseqrec.seq)
if asmpriFsearch != -1:
print("Middle oligo has an Assembly primer FWD site")
print(oligoseqrec.id)
print(oligoseqrec.seq)
if asmpriRsearch != -1:
print("Middle oligo has an Assembly primer REV site")
print(oligoseqrec.id)
print(oligoseqrec.seq)
return count_first, count_mid, count_last
#####################################
########## OPTIONS ##################
#####################################
inputfiles = ['db_oligo/DHFR_Lib01_4oligo.oligos','db_oligo/DHFR_Lib02_4oligo.oligos','db_oligo/DHFR_Lib03_4oligo.oligos',
'db_oligo/DHFR_Lib04_4oligo.oligos','db_oligo/DHFR_Lib05_4oligo.oligos','db_oligo/DHFR_Lib06_4oligo.oligos',
'db_oligo/DHFR_Lib07_4oligo.oligos','db_oligo/DHFR_Lib08_4oligo.oligos','db_oligo/DHFR_Lib09_4oligo.oligos',
'db_oligo/DHFR_Lib10_4oligo.oligos','db_oligo/DHFR_Lib11_4oligo.oligos','db_oligo/DHFR_Lib12_4oligo.oligos',
'db_oligo/DHFR_Lib13_4oligo.oligos','db_oligo/DHFR_Lib14_5oligo.oligos','db_oligo/DHFR_Lib15_5oligo.oligos',
'db_oligo/DHFR_Lib16_4oligo.oligos','db_oligo/DHFR_Lib17_4oligo.oligos','db_oligo/DHFR_Lib18_4oligo.oligos',
'db_oligo/DHFR_Lib19_4oligo.oligos','db_oligo/DHFR_Lib20_4oligo.oligos','db_oligo/DHFR_Lib21_4oligo.oligos',
'db_oligo/DHFR_Lib22_4oligo.oligos','db_oligo/DHFR_Lib23_4oligo.oligos','db_oligo/DHFR_Lib24_4oligo.oligos',
'db_oligo/DHFR_Lib25_4oligo.oligos','db_oligo/DHFR_Lib26_4oligo.oligos','db_oligo/DHFR_Lib27_4oligo.oligos',
'db_oligo/DHFR_Lib28_4oligo.oligos','db_oligo/DHFR_Lib29_5oligo.oligos','db_oligo/DHFR_Lib30_5oligo.oligos']
#asmF skpp20 from 00_primer_screen.py output #skpp504F
#Primers for alternate codon versions are offset by len(num_oligos) = 15
assemblyprimf = ['ATCGGGGATGGTAACTAACG', 'ATCGGGGATGGTAACTAACG', 'ATCGGGGATGGTAACTAACG',
'ATCGGGGATGGTAACTAACG', 'ATCGGGGATGGTAACTAACG', 'ATCGGGGATGGTAACTAACG',
'ATCGGGGATGGTAACTAACG', 'ATCGGGGATGGTAACTAACG', 'ATCGGGGATGGTAACTAACG',
'ATCGGGGATGGTAACTAACG', 'ATCGGGGATGGTAACTAACG', 'ATCGGGGATGGTAACTAACG',
'ATCGGGGATGGTAACTAACG', 'ATCGGGGATGGTAACTAACG', 'ATCGGGGATGGTAACTAACG',
'ATCGGGGATGGTAACTAACG', 'ATCGGGGATGGTAACTAACG', 'ATCGGGGATGGTAACTAACG',
'ATCGGGGATGGTAACTAACG', 'ATCGGGGATGGTAACTAACG', 'ATCGGGGATGGTAACTAACG',
'ATCGGGGATGGTAACTAACG', 'ATCGGGGATGGTAACTAACG', 'ATCGGGGATGGTAACTAACG',
'ATCGGGGATGGTAACTAACG', 'ATCGGGGATGGTAACTAACG', 'ATCGGGGATGGTAACTAACG',
'ATCGGGGATGGTAACTAACG', 'ATCGGGGATGGTAACTAACG', 'ATCGGGGATGGTAACTAACG']
#enter the reverse primers #skpp504R-rc
#Primers for alternate codon versions are offset by len(num_oligos) = 15
assemblyprimr = ['ATAGCTGATTGTCCGTTGGT', 'ATAGCTGATTGTCCGTTGGT', 'ATAGCTGATTGTCCGTTGGT',
'ATAGCTGATTGTCCGTTGGT', 'ATAGCTGATTGTCCGTTGGT', 'ATAGCTGATTGTCCGTTGGT',
'ATAGCTGATTGTCCGTTGGT', 'ATAGCTGATTGTCCGTTGGT', 'ATAGCTGATTGTCCGTTGGT',
'ATAGCTGATTGTCCGTTGGT', 'ATAGCTGATTGTCCGTTGGT', 'ATAGCTGATTGTCCGTTGGT',
'ATAGCTGATTGTCCGTTGGT', 'ATAGCTGATTGTCCGTTGGT', 'ATAGCTGATTGTCCGTTGGT',
'ATAGCTGATTGTCCGTTGGT', 'ATAGCTGATTGTCCGTTGGT', 'ATAGCTGATTGTCCGTTGGT',
'ATAGCTGATTGTCCGTTGGT', 'ATAGCTGATTGTCCGTTGGT', 'ATAGCTGATTGTCCGTTGGT',
'ATAGCTGATTGTCCGTTGGT', 'ATAGCTGATTGTCCGTTGGT', 'ATAGCTGATTGTCCGTTGGT',
'ATAGCTGATTGTCCGTTGGT', 'ATAGCTGATTGTCCGTTGGT', 'ATAGCTGATTGTCCGTTGGT',
'ATAGCTGATTGTCCGTTGGT', 'ATAGCTGATTGTCCGTTGGT', 'ATAGCTGATTGTCCGTTGGT']
#ampF from 00_primer_screen.py output: skpp15 5##F for amplification primers
ampprimersf = ['CGCAGGGTCCAGAGT', 'GGGTTCGAGCGGGAG', 'ACTCGACGGCCTCTG',
'GCGGCACCACAAACT', 'TCCACCGTCGGCAAG', 'GGCGCGCTCTAACAC',
'AACGCCCAGCCTGTC', 'AGGCACGCTCAACCT', 'CATTGCCGTGCGTGA',
'CGCCGAGCCGTATGA', 'AGCCCACTTGCCCTC', 'GAGGGCTCCGTTCGT',
'CCCTCCCACGGACTT', 'CGTCCGCACAAACCC', 'GAGTCTGAGCGGCGT',
'GCCGGTCCCAACTCT', 'AGTCCAGCGGCTCAC', 'TCTGAGACGGCGAGG',
'CGGGCGCCTCTTGTT', 'AGGCGCTCATGTGGA', 'CGTGCAATGTGGCGT',
'GAGAGCCGGCCTGTG', 'GGGCACGCGGTAAGT', 'GCTCGGCCGTAGTGT',
'ACCTCATGTGGCCGA', 'ACTGATGCGCGGTCT', 'TCCGCGTTCTTGGCT',
'CAGCACATCCCGCCC', 'GGCACCGTCCTGTCT', 'CCTAACTGCGGGCGT',
'ACTAGCCCGCGTTCC', 'GGCCTGCGCGTATCT', 'GCGACCCTCCACTGA', 'CGCAGGTACGGGTCT']
#ampR (not RC) from 00_primer_screen.py output:
ampprimersr = ['GTTCGCGCGAAGGAA', 'TAGCGCGCAGAGAGG', 'ACACGCGCGTTGAAG',
'CGTGGCCTCTGTCCT', 'GGCCGCACCCAGTAG', 'CTCCCTCTCGCAGCA',
'CCGCGTTGCTGAGTG', 'CCTAGGTCGCACGCA', 'GAGGGTTCCCGCTGA',
'GCGCATTGGAGGCTG', 'CCAAGCCGGGTTCCA', 'CGGCCAGGTCAGGTC',
'GGGTCCCTCGTCTCC', 'CCGCATCGTTGACCC', 'GCCTAGCTCGCCTGA',
'CAGCCATGTCTCGCC', 'CCGCCTTCTAGCCCA', 'AGGACGCCCGTAGTG',
'AGCGCGATTCAGCCA', 'ACTCAGCAGCGGGAC', 'CGCTGGACTCGTGGT',
'CACGCAGCCAAACCC', 'TGTGCCGCCAAGACC', 'CGAGTTGTGGCACGG',
'CCAGTGACGCAGGGA', 'ATGAAGGCGGCAGGT', 'GGGACGTTCGGACCA',
'CCCTGGTCGCGTCTG', 'CCATGCCCTCCGACT', 'ACGGCGGCCCTAATG',
'GCCGACAATTCCGCC', 'AGCGTCGCCAAACCC', 'CGTGATCCCGCCAAG', 'CGCGTGGACTTGCTC']
barcodes = obtainFastaSequences('barcodes/filt_prim_12nt_Lev_3_Tm_40_42_GC_45_55_SD_2_mod_restriction.fasta')
oligos_per_construct = [4,4,4,4,4,4,4,4,4,4,4,4,4,5,5,4,4,4,4,4,4,4,4,4,4,4,4,4,5,5]
constructs_per_lib = [384, 384, 384, 384, 384, 384, 384, 384, 384, 384, 384, 384, 384, 384, 384, 384, 384, 384, 384, 384, 384, 384, 384, 384, 384, 384, 384, 384, 384, 384]
padding_between_btsaI_ampR = True
oligo_length = 230
#####################################
######### / OPTIONS #################
#####################################
for fileindex in range(len(inputfiles)):
constructs = getOligos(inputfiles[fileindex].replace('.oligos','-finaloligos.fasta'))
first_oligo_counter = 0
middle_oligo_counter = 0
last_oligo_counter = 0
for index in range(len(constructs)):
add_first, add_mid, add_last = checkConstruct(constructs[index],oligo_length,fileindex+1,ampprimersf[fileindex],ampprimersr[fileindex],str(barcodes[index].seq),assemblyprimf[fileindex],assemblyprimr[fileindex],padding_between_btsaI_ampR)
first_oligo_counter += add_first
middle_oligo_counter += add_mid
last_oligo_counter += add_last
print("Lib: " + inputfiles[fileindex])
if first_oligo_counter != constructs_per_lib[fileindex] or middle_oligo_counter != ((oligos_per_construct[fileindex]-2)*constructs_per_lib[fileindex]) or last_oligo_counter != constructs_per_lib[fileindex]:
print(str(first_oligo_counter) + " start oligos. Expect to have " + str(constructs_per_lib[fileindex]))
print(str(middle_oligo_counter) + " middle oligos or " + str(middle_oligo_counter/constructs_per_lib[fileindex]) + " per construct. Expect to have "+ str((oligos_per_construct[fileindex]-2)*constructs_per_lib[fileindex])+ " total middle oligos.")
print(str(last_oligo_counter) + " end oligos. Expect to have "+ str(constructs_per_lib[fileindex]))
else:
print("Everything good.")
|
[
"Bio.Seq.Seq",
"Bio.SeqIO.parse"
] |
[((330, 358), 'Bio.SeqIO.parse', 'SeqIO.parse', (['handle', '"""fasta"""'], {}), "(handle, 'fasta')\n", (341, 358), False, 'from Bio import SeqIO\n'), ((591, 621), 'Bio.SeqIO.parse', 'SeqIO.parse', (['filename', '"""fasta"""'], {}), "(filename, 'fasta')\n", (602, 621), False, 'from Bio import SeqIO\n'), ((3456, 3487), 'Bio.Seq.Seq', 'Seq', (['assemblyprimr', 'generic_dna'], {}), '(assemblyprimr, generic_dna)\n', (3459, 3487), False, 'from Bio.Seq import Seq\n')]
|
from qupulse.pulses import FunctionPT, TablePT
class DataTypes:
""" The possible data types for the pulse creation."""
RAW_DATA = 'rawdata'
QU_PULSE = 'qupulse'
class Templates:
@staticmethod
def chirp(name):
""" Creates a chirp signal
Args:
name (str): The user defined name of the pulse template.
Returns:
FunctionPT: The pulse template with the chirp signal.
Parameters of the pulse template are the `duration` (in the same unit as time),
`omega_0` (in Hz), `delta_omega` (in Hz), `amplitude` and `phase`. Time is in ns.
"""
linear_chirp_template = FunctionPT(
'amplitude*cos(2*pi*(omega_0+(t/(2*duration))*delta_omega) *t*1e-9+phase)', 'duration', channel=name)
linear_chirp_template.__doc__ = 'Template for linear chirp\nAlso see https://en.wikipedia.org/wiki/Chirp\n\n'+linear_chirp_template.__doc__
return linear_chirp_template
@staticmethod
def square(name):
""" Creates a block wave qupulse template for sequencing.
Args:
name (str): The user defined name of the sequence.
Returns:
TablePT: The template with the square wave.
"""
return TablePT({name: [(0, 0), ('period/4', 'amplitude'),
('period*3/4', 0), ('period', 0)]})
@staticmethod
def sawtooth(name, padding=0):
""" Creates a sawtooth qupulse template for sequencing.
Args:
name (str): The user defined name of the sequence.
padding (float): Padding to add at the end of the sawtooth
Returns:
TablePT: The sequence with the sawtooth wave.
"""
tbl = [(0, 0), ('period*(1-width)/2', '-amplitude', 'linear'),
('period*(1-(1-width)/2)', 'amplitude', 'linear'),
('period', 0, 'linear')]
if padding > 0:
tbl += [(f'period+{padding}', 0, 'hold')]
return TablePT({name: tbl})
@staticmethod
def hold(name):
"""Creates a DC offset qupulse template for sequencing.
Args:
name (str): The user defined name of the sequence.
Returns:
TablePT: The sequence with the wait pulse.
"""
return TablePT({name: [(0, 'offset'), ('period', 'offset')]})
@staticmethod
def marker(name):
"""Creates a TTL pulse qupulse template for sequencing.
Args:
name (str): The user defined name of the sequence.
Returns:
TablePT: The sequence with the wait pulse.
"""
return TablePT({name: [(0, 0), ('offset', 1),
('offset+uptime', 0), ('period', 0)]})
@staticmethod
def rollover_marker(name):
"""Creates a TTL pulse qupulse template for sequencing that rolls over to the subsequent period.
--------- ----------
| |
| |
----------
<---------period------------>
<-----offset----->
<--------> uptime <--------->
Args:
name (str): The user defined name of the sequence.
Returns:
TablePT: The sequence with the marker pulse and rollover part of the pulse.
"""
return TablePT({name: [(0, 1),
('offset + uptime - period', 0),
('offset', 1),
('period', 1)]})
@staticmethod
def skewed_sawtooth(name):
r""" Creates a skewed sawtooth qupulse template for sequencing.
This pulse is symmetric, has total integral zero and right at period/2 it
has amplitude 0 and a sharp corner.
A visual representation of the waveform is:
A /\ /\
/ \ / \
0 / \ /\ / \
\ / \ /
-A \/ \/
T/6
<->
T/3
<------>
T/2
<--------->
T
<-------------------->
T is period and A is the amplitude. Negative amplitude will produce an inverted pulse.
Args:
name (str): The user defined name of the sequence.
Returns:
TablePT: The sequence with the skewed sawtooth wave.
Parameters of the pulse template are the `amplitude` and `period`.
"""
return TablePT({name: [(0, 0),
('period/6', 'amplitude', 'linear'),
('period/3', '-amplitude', 'linear'),
('period/2', 0, 'linear'),
('period*2/3', '-amplitude', 'linear'),
('period*5/6', 'amplitude', 'linear'),
('period', 0, 'linear')]})
@staticmethod
def pulse_table(name, entries):
return TablePT({name: entries})
|
[
"qupulse.pulses.FunctionPT",
"qupulse.pulses.TablePT"
] |
[((698, 819), 'qupulse.pulses.FunctionPT', 'FunctionPT', (['"""amplitude*cos(2*pi*(omega_0+(t/(2*duration))*delta_omega) *t*1e-9+phase)"""', '"""duration"""'], {'channel': 'name'}), "(\n 'amplitude*cos(2*pi*(omega_0+(t/(2*duration))*delta_omega) *t*1e-9+phase)',\n 'duration', channel=name)\n", (708, 819), False, 'from qupulse.pulses import FunctionPT, TablePT\n'), ((1295, 1386), 'qupulse.pulses.TablePT', 'TablePT', (["{name: [(0, 0), ('period/4', 'amplitude'), ('period*3/4', 0), ('period', 0)]}"], {}), "({name: [(0, 0), ('period/4', 'amplitude'), ('period*3/4', 0), (\n 'period', 0)]})\n", (1302, 1386), False, 'from qupulse.pulses import FunctionPT, TablePT\n'), ((2038, 2058), 'qupulse.pulses.TablePT', 'TablePT', (['{name: tbl}'], {}), '({name: tbl})\n', (2045, 2058), False, 'from qupulse.pulses import FunctionPT, TablePT\n'), ((2340, 2394), 'qupulse.pulses.TablePT', 'TablePT', (["{name: [(0, 'offset'), ('period', 'offset')]}"], {}), "({name: [(0, 'offset'), ('period', 'offset')]})\n", (2347, 2394), False, 'from qupulse.pulses import FunctionPT, TablePT\n'), ((2678, 2755), 'qupulse.pulses.TablePT', 'TablePT', (["{name: [(0, 0), ('offset', 1), ('offset+uptime', 0), ('period', 0)]}"], {}), "({name: [(0, 0), ('offset', 1), ('offset+uptime', 0), ('period', 0)]})\n", (2685, 2755), False, 'from qupulse.pulses import FunctionPT, TablePT\n'), ((3406, 3499), 'qupulse.pulses.TablePT', 'TablePT', (["{name: [(0, 1), ('offset + uptime - period', 0), ('offset', 1), ('period', 1)]}"], {}), "({name: [(0, 1), ('offset + uptime - period', 0), ('offset', 1), (\n 'period', 1)]})\n", (3413, 3499), False, 'from qupulse.pulses import FunctionPT, TablePT\n'), ((4623, 4867), 'qupulse.pulses.TablePT', 'TablePT', (["{name: [(0, 0), ('period/6', 'amplitude', 'linear'), ('period/3',\n '-amplitude', 'linear'), ('period/2', 0, 'linear'), ('period*2/3',\n '-amplitude', 'linear'), ('period*5/6', 'amplitude', 'linear'), (\n 'period', 0, 'linear')]}"], {}), "({name: [(0, 0), ('period/6', 'amplitude', 'linear'), ('period/3',\n '-amplitude', 'linear'), ('period/2', 0, 'linear'), ('period*2/3',\n '-amplitude', 'linear'), ('period*5/6', 'amplitude', 'linear'), (\n 'period', 0, 'linear')]})\n", (4630, 4867), False, 'from qupulse.pulses import FunctionPT, TablePT\n'), ((5111, 5135), 'qupulse.pulses.TablePT', 'TablePT', (['{name: entries}'], {}), '({name: entries})\n', (5118, 5135), False, 'from qupulse.pulses import FunctionPT, TablePT\n')]
|
# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT!
import grpc
from . import ParameterConstraintProvider_pb2 as ParameterConstraintProvider__pb2
class ParameterConstraintsProviderStub(object):
"""Feature: Parameter Constraint Provider
Allows a server to apply constraints on specific command parameters after design time. It is recommended to
specify
the constraints in the Feature Definitions whenever possible.
The constraints given by this Feature act as a logical AND to any constraints already defined in the Feature
Definition.
As an example: A constrained type has a Pattern Constraint, then the ParameterConstraintProvider specifies an
additional
Pattern Constraint - in this case the underlying SiLA Type has to follow BOTH Patterns defined.
The constrained can be retrieved by reading and subscribing to its dynamic property.
Example use-case: Loading and setting the positions a mobile robot can move to at runtime.
"""
def __init__(self, channel):
"""Constructor.
Args:
channel: A grpc.Channel.
"""
self.Subscribe_ParametersConstraints = channel.unary_stream(
'/sila2.org.silastandard.core.parameterconstraintsprovider.v1.ParameterConstraintsProvider/Subscribe_ParametersConstraints',
request_serializer=ParameterConstraintProvider__pb2.Subscribe_ParametersConstraints_Parameters.SerializeToString,
response_deserializer=ParameterConstraintProvider__pb2.Subscribe_ParametersConstraints_Responses.FromString,
)
class ParameterConstraintsProviderServicer(object):
"""Feature: Parameter Constraint Provider
Allows a server to apply constraints on specific command parameters after design time. It is recommended to
specify
the constraints in the Feature Definitions whenever possible.
The constraints given by this Feature act as a logical AND to any constraints already defined in the Feature
Definition.
As an example: A constrained type has a Pattern Constraint, then the ParameterConstraintProvider specifies an
additional
Pattern Constraint - in this case the underlying SiLA Type has to follow BOTH Patterns defined.
The constrained can be retrieved by reading and subscribing to its dynamic property.
Example use-case: Loading and setting the positions a mobile robot can move to at runtime.
"""
def Subscribe_ParametersConstraints(self, request, context):
"""Parameters Constraints
Parameter Constraints that constrain any specified parameters within this structure
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def add_ParameterConstraintsProviderServicer_to_server(servicer, server):
rpc_method_handlers = {
'Subscribe_ParametersConstraints': grpc.unary_stream_rpc_method_handler(
servicer.Subscribe_ParametersConstraints,
request_deserializer=ParameterConstraintProvider__pb2.Subscribe_ParametersConstraints_Parameters.FromString,
response_serializer=ParameterConstraintProvider__pb2.Subscribe_ParametersConstraints_Responses.SerializeToString,
),
}
generic_handler = grpc.method_handlers_generic_handler(
'sila2.org.silastandard.core.parameterconstraintsprovider.v1.ParameterConstraintsProvider', rpc_method_handlers)
server.add_generic_rpc_handlers((generic_handler,))
|
[
"grpc.unary_stream_rpc_method_handler",
"grpc.method_handlers_generic_handler"
] |
[((3211, 3370), 'grpc.method_handlers_generic_handler', 'grpc.method_handlers_generic_handler', (['"""sila2.org.silastandard.core.parameterconstraintsprovider.v1.ParameterConstraintsProvider"""', 'rpc_method_handlers'], {}), "(\n 'sila2.org.silastandard.core.parameterconstraintsprovider.v1.ParameterConstraintsProvider'\n , rpc_method_handlers)\n", (3247, 3370), False, 'import grpc\n'), ((2845, 3170), 'grpc.unary_stream_rpc_method_handler', 'grpc.unary_stream_rpc_method_handler', (['servicer.Subscribe_ParametersConstraints'], {'request_deserializer': 'ParameterConstraintProvider__pb2.Subscribe_ParametersConstraints_Parameters.FromString', 'response_serializer': 'ParameterConstraintProvider__pb2.Subscribe_ParametersConstraints_Responses.SerializeToString'}), '(servicer.\n Subscribe_ParametersConstraints, request_deserializer=\n ParameterConstraintProvider__pb2.\n Subscribe_ParametersConstraints_Parameters.FromString,\n response_serializer=ParameterConstraintProvider__pb2.\n Subscribe_ParametersConstraints_Responses.SerializeToString)\n', (2881, 3170), False, 'import grpc\n')]
|
# Script for running distributed clients using torchrun
import sys
from fedml.arguments import load_arguments
import subprocess
import os
from fedml.constants import FEDML_TRAINING_PLATFORM_CROSS_SILO
# env_variables = {
# 'NCCL_DEBUG':'INFO',
# 'NCCL_MIN_NRINGS':1,
# 'NCCL_TREE_THRESHOLD':4294967296,
# 'OMP_NUM_THREADS':8,
# 'NCCL_NSOCKS_PERTHREAD':8,
# 'NCCL_SOCKET_NTHREADS':8,
# 'NCCL_BUFFSIZE':1048576,
# 'NCCL_IB_DISABLE'=1
# 'NCCL_SOCKET_IFNAME'='$NETWORK_INTERFACE'
# 'GLOO_SOCKET_IFNAME'=$'NETWORK_INTERFACE'
# 'TP_SOCKET_IFNAME'=$'NETWORK_INTERFACE'
# }
def launch_dist_trainers():
inputs = sys.argv[1:]
args = load_arguments(FEDML_TRAINING_PLATFORM_CROSS_SILO)
os.environ['PDSH_RCMD_TYPE'] = 'ssh'
node_addresses = ",".join(args.node_addresses)
pdsh_cmd_aruments = ['pdsh', '-w', node_addresses]
torchrun_path = subprocess.run(['which', 'torchrun'], capture_output=True, text=True).stdout.strip()
# exports = ""
# for key, val in self.exports.items():
# exports += "export {}={}; ".format(key, val)
torchrun_cmd_arguments = [
# exports,
f"cd {os.path.abspath('.')};",
torchrun_path,
f"--nnodes={args.n_node_in_silo}",
f"--nproc_per_node={args.n_proc_per_node}",
"--rdzv_backend=c10d",
f"--rdzv_endpoint={args.master_address}:{args.launcher_rdzv_port}",
"--node_rank=%n",
"--rdzv_id=hi_fl",
"torch_client.py",
] + inputs
subprocess.run(pdsh_cmd_aruments + torchrun_cmd_arguments)
|
[
"fedml.arguments.load_arguments",
"os.path.abspath",
"subprocess.run"
] |
[((678, 728), 'fedml.arguments.load_arguments', 'load_arguments', (['FEDML_TRAINING_PLATFORM_CROSS_SILO'], {}), '(FEDML_TRAINING_PLATFORM_CROSS_SILO)\n', (692, 728), False, 'from fedml.arguments import load_arguments\n'), ((1515, 1573), 'subprocess.run', 'subprocess.run', (['(pdsh_cmd_aruments + torchrun_cmd_arguments)'], {}), '(pdsh_cmd_aruments + torchrun_cmd_arguments)\n', (1529, 1573), False, 'import subprocess\n'), ((896, 965), 'subprocess.run', 'subprocess.run', (["['which', 'torchrun']"], {'capture_output': '(True)', 'text': '(True)'}), "(['which', 'torchrun'], capture_output=True, text=True)\n", (910, 965), False, 'import subprocess\n'), ((1165, 1185), 'os.path.abspath', 'os.path.abspath', (['"""."""'], {}), "('.')\n", (1180, 1185), False, 'import os\n')]
|
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
import functools
import inspect
import json
import logging
import textwrap
from abc import ABC, abstractmethod
from contextlib import contextmanager
from datetime import datetime, timedelta
from typing import (
Any,
Callable,
cast,
Dict,
Iterator,
Optional,
Type,
TYPE_CHECKING,
Union,
)
from flask import current_app, g, request
from flask_appbuilder.const import API_URI_RIS_KEY
from sqlalchemy.exc import SQLAlchemyError
from typing_extensions import Literal
if TYPE_CHECKING:
from superset.stats_logger import BaseStatsLogger
def collect_request_payload() -> Dict[str, Any]:
"""Collect log payload identifiable from request context"""
if not request:
return {}
payload: Dict[str, Any] = {
"path": request.path,
**request.form.to_dict(),
# url search params can overwrite POST body
**request.args.to_dict(),
}
# save URL match pattern in addition to the request path
url_rule = str(request.url_rule)
if url_rule != request.path:
payload["url_rule"] = url_rule
# remove rison raw string (q=xxx in search params) in favor of
# rison object (could come from `payload_override`)
if "rison" in payload and API_URI_RIS_KEY in payload:
del payload[API_URI_RIS_KEY]
# delete empty rison object
if "rison" in payload and not payload["rison"]:
del payload["rison"]
return payload
class AbstractEventLogger(ABC):
def __call__(
self,
action: str,
object_ref: Optional[str] = None,
log_to_statsd: bool = True,
duration: Optional[timedelta] = None,
**payload_override: Dict[str, Any],
) -> object:
# pylint: disable=W0201
self.action = action
self.object_ref = object_ref
self.log_to_statsd = log_to_statsd
self.payload_override = payload_override
return self
def __enter__(self) -> None:
# pylint: disable=W0201
self.start = datetime.now()
def __exit__(self, exc_type: Any, exc_val: Any, exc_tb: Any) -> None:
# Log data w/ arguments being passed in
self.log_with_context(
action=self.action,
object_ref=self.object_ref,
log_to_statsd=self.log_to_statsd,
duration=datetime.now() - self.start,
**self.payload_override,
)
@abstractmethod
def log( # pylint: disable=too-many-arguments
self,
user_id: Optional[int],
action: str,
dashboard_id: Optional[int],
duration_ms: Optional[int],
slice_id: Optional[int],
referrer: Optional[str],
*args: Any,
**kwargs: Any,
) -> None:
pass
def log_with_context( # pylint: disable=too-many-locals
self,
action: str,
duration: Optional[timedelta] = None,
object_ref: Optional[str] = None,
log_to_statsd: bool = True,
**payload_override: Optional[Dict[str, Any]],
) -> None:
# pylint: disable=import-outside-toplevel
from superset.views.core import get_form_data
referrer = request.referrer[:1000] if request and request.referrer else None
duration_ms = int(duration.total_seconds() * 1000) if duration else None
# Initial try and grab user_id via flask.g.user
try:
user_id = g.user.get_id()
except Exception: # pylint: disable=broad-except
user_id = None
# Whenever a user is not bounded to a session we
# need to add them back before logging to capture user_id
if user_id is None:
try:
session = current_app.appbuilder.get_session
session.add(g.user)
user_id = g.user.get_id()
except Exception as ex: # pylint: disable=broad-except
logging.warning(ex)
user_id = None
payload = collect_request_payload()
if object_ref:
payload["object_ref"] = object_ref
if payload_override:
payload.update(payload_override)
dashboard_id: Optional[int] = None
try:
dashboard_id = int(payload.get("dashboard_id")) # type: ignore
except (TypeError, ValueError):
dashboard_id = None
if "form_data" in payload:
form_data, _ = get_form_data()
payload["form_data"] = form_data
slice_id = form_data.get("slice_id")
else:
slice_id = payload.get("slice_id")
try:
slice_id = int(slice_id) # type: ignore
except (TypeError, ValueError):
slice_id = 0
if log_to_statsd:
self.stats_logger.incr(action)
try:
# bulk insert
explode_by = payload.get("explode")
records = json.loads(payload.get(explode_by)) # type: ignore
except Exception: # pylint: disable=broad-except
records = [payload]
self.log(
user_id,
action,
records=records,
dashboard_id=dashboard_id,
slice_id=slice_id,
duration_ms=duration_ms,
referrer=referrer,
)
@contextmanager
def log_context( # pylint: disable=too-many-locals
self, action: str, object_ref: Optional[str] = None, log_to_statsd: bool = True,
) -> Iterator[Callable[..., None]]:
"""
Log an event with additional information from the request context.
:param action: a name to identify the event
:param object_ref: reference to the Python object that triggered this action
:param log_to_statsd: whether to update statsd counter for the action
"""
payload_override = {}
start = datetime.now()
# yield a helper to add additional payload
yield lambda **kwargs: payload_override.update(kwargs)
duration = datetime.now() - start
# take the action from payload_override else take the function param action
action_str = payload_override.pop("action", action)
self.log_with_context(
action_str, duration, object_ref, log_to_statsd, **payload_override
)
def _wrapper(
self,
f: Callable[..., Any],
action: Optional[Union[str, Callable[..., str]]] = None,
object_ref: Optional[Union[str, Callable[..., str], Literal[False]]] = None,
allow_extra_payload: Optional[bool] = False,
**wrapper_kwargs: Any,
) -> Callable[..., Any]:
@functools.wraps(f)
def wrapper(*args: Any, **kwargs: Any) -> Any:
action_str = (
action(*args, **kwargs) if callable(action) else action
) or f.__name__
object_ref_str = (
object_ref(*args, **kwargs) if callable(object_ref) else object_ref
) or (f.__qualname__ if object_ref is not False else None)
with self.log_context(
action=action_str, object_ref=object_ref_str, **wrapper_kwargs
) as log:
log(**kwargs)
if allow_extra_payload:
# add a payload updater to the decorated function
value = f(*args, add_extra_log_payload=log, **kwargs)
else:
value = f(*args, **kwargs)
return value
return wrapper
def log_this(self, f: Callable[..., Any]) -> Callable[..., Any]:
"""Decorator that uses the function name as the action"""
return self._wrapper(f)
def log_this_with_context(self, **kwargs: Any) -> Callable[..., Any]:
"""Decorator that can override kwargs of log_context"""
def func(f: Callable[..., Any]) -> Callable[..., Any]:
return self._wrapper(f, **kwargs)
return func
def log_this_with_extra_payload(self, f: Callable[..., Any]) -> Callable[..., Any]:
"""Decorator that instrument `update_log_payload` to kwargs"""
return self._wrapper(f, allow_extra_payload=True)
@property
def stats_logger(self) -> BaseStatsLogger:
return current_app.config["STATS_LOGGER"]
def get_event_logger_from_cfg_value(cfg_value: Any) -> AbstractEventLogger:
"""
This function implements the deprecation of assignment
of class objects to EVENT_LOGGER configuration, and validates
type of configured loggers.
The motivation for this method is to gracefully deprecate the ability to configure
EVENT_LOGGER with a class type, in favor of preconfigured instances which may have
required construction-time injection of proprietary or locally-defined dependencies.
:param cfg_value: The configured EVENT_LOGGER value to be validated
:return: if cfg_value is a class type, will return a new instance created using a
default con
"""
result: Any = cfg_value
if inspect.isclass(cfg_value):
logging.warning(
textwrap.dedent(
"""
In superset private config, EVENT_LOGGER has been assigned a class
object. In order to accomodate pre-configured instances without a
default constructor, assignment of a class is deprecated and may no
longer work at some point in the future. Please assign an object
instance of a type that implements
superset.utils.log.AbstractEventLogger.
"""
)
)
event_logger_type = cast(Type[Any], cfg_value)
result = event_logger_type()
# Verify that we have a valid logger impl
if not isinstance(result, AbstractEventLogger):
raise TypeError(
"EVENT_LOGGER must be configured with a concrete instance"
"of superset.utils.log.AbstractEventLogger."
)
logging.info("Configured event logger of type %s", type(result))
return cast(AbstractEventLogger, result)
class DBEventLogger(AbstractEventLogger):
"""Event logger that commits logs to Superset DB"""
def log( # pylint: disable=too-many-arguments,too-many-locals
self,
user_id: Optional[int],
action: str,
dashboard_id: Optional[int],
duration_ms: Optional[int],
slice_id: Optional[int],
referrer: Optional[str],
*args: Any,
**kwargs: Any,
) -> None:
# pylint: disable=import-outside-toplevel
from superset.models.core import Log
records = kwargs.get("records", [])
logs = []
for record in records:
json_string: Optional[str]
try:
json_string = json.dumps(record)
except Exception: # pylint: disable=broad-except
json_string = None
log = Log(
action=action,
json=json_string,
dashboard_id=dashboard_id,
slice_id=slice_id,
duration_ms=duration_ms,
referrer=referrer,
user_id=user_id,
)
logs.append(log)
try:
sesh = current_app.appbuilder.get_session
sesh.bulk_save_objects(logs)
sesh.commit()
except SQLAlchemyError as ex:
logging.error("DBEventLogger failed to log event(s)")
logging.exception(ex)
|
[
"textwrap.dedent",
"logging.error",
"logging.exception",
"superset.models.core.Log",
"typing.cast",
"inspect.isclass",
"flask.request.args.to_dict",
"flask.g.user.get_id",
"superset.views.core.get_form_data",
"logging.warning",
"json.dumps",
"flask.request.form.to_dict",
"functools.wraps",
"datetime.datetime.now"
] |
[((9764, 9790), 'inspect.isclass', 'inspect.isclass', (['cfg_value'], {}), '(cfg_value)\n', (9779, 9790), False, 'import inspect\n'), ((10783, 10816), 'typing.cast', 'cast', (['AbstractEventLogger', 'result'], {}), '(AbstractEventLogger, result)\n', (10787, 10816), False, 'from typing import Any, Callable, cast, Dict, Iterator, Optional, Type, TYPE_CHECKING, Union\n'), ((1619, 1641), 'flask.request.form.to_dict', 'request.form.to_dict', ([], {}), '()\n', (1639, 1641), False, 'from flask import current_app, g, request\n'), ((1705, 1727), 'flask.request.args.to_dict', 'request.args.to_dict', ([], {}), '()\n', (1725, 1727), False, 'from flask import current_app, g, request\n'), ((2827, 2841), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (2839, 2841), False, 'from datetime import datetime, timedelta\n'), ((6643, 6657), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (6655, 6657), False, 'from datetime import datetime, timedelta\n'), ((7416, 7434), 'functools.wraps', 'functools.wraps', (['f'], {}), '(f)\n', (7431, 7434), False, 'import functools\n'), ((10376, 10402), 'typing.cast', 'cast', (['Type[Any]', 'cfg_value'], {}), '(Type[Any], cfg_value)\n', (10380, 10402), False, 'from typing import Any, Callable, cast, Dict, Iterator, Optional, Type, TYPE_CHECKING, Union\n'), ((4214, 4229), 'flask.g.user.get_id', 'g.user.get_id', ([], {}), '()\n', (4227, 4229), False, 'from flask import current_app, g, request\n'), ((5215, 5230), 'superset.views.core.get_form_data', 'get_form_data', ([], {}), '()\n', (5228, 5230), False, 'from superset.views.core import get_form_data\n'), ((6791, 6805), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (6803, 6805), False, 'from datetime import datetime, timedelta\n'), ((9829, 10316), 'textwrap.dedent', 'textwrap.dedent', (['"""\n In superset private config, EVENT_LOGGER has been assigned a class\n object. In order to accomodate pre-configured instances without a\n default constructor, assignment of a class is deprecated and may no\n longer work at some point in the future. Please assign an object\n instance of a type that implements\n superset.utils.log.AbstractEventLogger.\n """'], {}), '(\n """\n In superset private config, EVENT_LOGGER has been assigned a class\n object. In order to accomodate pre-configured instances without a\n default constructor, assignment of a class is deprecated and may no\n longer work at some point in the future. Please assign an object\n instance of a type that implements\n superset.utils.log.AbstractEventLogger.\n """\n )\n', (9844, 10316), False, 'import textwrap\n'), ((11658, 11806), 'superset.models.core.Log', 'Log', ([], {'action': 'action', 'json': 'json_string', 'dashboard_id': 'dashboard_id', 'slice_id': 'slice_id', 'duration_ms': 'duration_ms', 'referrer': 'referrer', 'user_id': 'user_id'}), '(action=action, json=json_string, dashboard_id=dashboard_id, slice_id=\n slice_id, duration_ms=duration_ms, referrer=referrer, user_id=user_id)\n', (11661, 11806), False, 'from superset.models.core import Log\n'), ((4607, 4622), 'flask.g.user.get_id', 'g.user.get_id', ([], {}), '()\n', (4620, 4622), False, 'from flask import current_app, g, request\n'), ((11524, 11542), 'json.dumps', 'json.dumps', (['record'], {}), '(record)\n', (11534, 11542), False, 'import json\n'), ((12142, 12195), 'logging.error', 'logging.error', (['"""DBEventLogger failed to log event(s)"""'], {}), "('DBEventLogger failed to log event(s)')\n", (12155, 12195), False, 'import logging\n'), ((12208, 12229), 'logging.exception', 'logging.exception', (['ex'], {}), '(ex)\n', (12225, 12229), False, 'import logging\n'), ((3135, 3149), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (3147, 3149), False, 'from datetime import datetime, timedelta\n'), ((4707, 4726), 'logging.warning', 'logging.warning', (['ex'], {}), '(ex)\n', (4722, 4726), False, 'import logging\n')]
|
from bs4 import BeautifulSoup
import urllib.request as req
import os
with open("./oglethorpe?view=gamelog") as f:
soup = BeautifulSoup(f.read())
# Don't ask. (I just stared at HTML and went through tables until we got the
# right one.)
stats_table = soup.find_all("table")[7]
# The first row is just header information.
# The second row OF THE CURRENT (2017-09-03) FILE is useless.
rows = stats_table.find_all("tr")[2:]
BOXSCORE_DIR = "./boxscores"
PLAYS_DIR = "./plays"
if not os.path.exists(BOXSCORE_DIR):
os.makedirs(BOXSCORE_DIR)
if not os.path.exists(PLAYS_DIR):
os.makedirs(PLAYS_DIR)
for k, row in enumerate(rows):
# Get the boxscore.
# The only present link is to the boxscores.
box_score_link = row.a
append = os.path.basename(box_score_link["href"])
url = "http://gopetrels.com/sports/wvball/2016-17/boxscores/" + append
boxscore_text = req.urlopen(url).read().decode("utf8")
# Get the play-by-play boxscore. (Thank God for statkeepers.)
play_url = url + "?view=plays"
play_text = req.urlopen(play_url).read().decode("utf8")
entries = row.find_all("td")
# This was determined by staring at HTML.
date = entries[0].text.strip().replace(" ", "-")
opponent = entries[1].text.strip().replace(" ", "-")
file_name = date + "_" + opponent
with open("{}/{}.xml".format(BOXSCORE_DIR, file_name), "w") as f:
f.write(boxscore_text)
with open("./{}/{}_plays.xml".format(PLAYS_DIR, file_name), "w") as f:
f.write(play_text)
print("{}/{}".format(k + 1, len(rows)))
|
[
"urllib.request.urlopen",
"os.path.exists",
"os.makedirs",
"os.path.basename"
] |
[((487, 515), 'os.path.exists', 'os.path.exists', (['BOXSCORE_DIR'], {}), '(BOXSCORE_DIR)\n', (501, 515), False, 'import os\n'), ((521, 546), 'os.makedirs', 'os.makedirs', (['BOXSCORE_DIR'], {}), '(BOXSCORE_DIR)\n', (532, 546), False, 'import os\n'), ((555, 580), 'os.path.exists', 'os.path.exists', (['PLAYS_DIR'], {}), '(PLAYS_DIR)\n', (569, 580), False, 'import os\n'), ((586, 608), 'os.makedirs', 'os.makedirs', (['PLAYS_DIR'], {}), '(PLAYS_DIR)\n', (597, 608), False, 'import os\n'), ((755, 795), 'os.path.basename', 'os.path.basename', (["box_score_link['href']"], {}), "(box_score_link['href'])\n", (771, 795), False, 'import os\n'), ((891, 907), 'urllib.request.urlopen', 'req.urlopen', (['url'], {}), '(url)\n', (902, 907), True, 'import urllib.request as req\n'), ((1048, 1069), 'urllib.request.urlopen', 'req.urlopen', (['play_url'], {}), '(play_url)\n', (1059, 1069), True, 'import urllib.request as req\n')]
|
#!/usr/bin/env python3
# Copyright 2020 The Johns Hopkins University Applied Physics Laboratory LLC
# All rights reserved.
#
# Licensed under the 3-Clause BSD License (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://opensource.org/licenses/BSD-3-Clause
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import torch.nn as nn
import lantern
import z3
def main():
"""Lantern demo"""
# Initialize a PyTorch network
# Lantern currently supports: Linear, ReLU, Hardtanh, Dropout, Identity
net = nn.Sequential(
nn.Linear(2, 5),
nn.ReLU(),
nn.Linear(5, 1),
nn.ReLU())
print("A PyTorch network:")
print(net)
print()
# Normally, we would train this network to compute some function. However,
# for this demo, we'll just use the initialized weights.
print("Network parameters:")
print(list(net.parameters()))
print()
# lantern.as_z3(model) returns a triple of z3 constraints, input variables,
# and output variables that directly correspond to the behavior of the
# given PyTorch network. By default, latnern assumes Real-sorted variables.
constraints, in_vars, out_vars = lantern.as_z3(net)
print("Z3 constraints, input variables, output variables (Real-sorted):")
print(constraints)
print(in_vars)
print(out_vars)
print()
# The 'payoff' is that we can prove theorems about our network with z3.
# Trivially, we can ask for a satisfying assignment of variables
print("A satisfying assignment to the variables in this network:")
z3.solve(constraints)
print()
# However, we can run the network "backwards"; e.g. what is an *input* that
# causes the network to output the value 0 (if such an input exists)?
constraints.append(out_vars[0] == 0)
print("An assignment such that the output variable is 0:")
z3.solve(constraints)
print()
# To more precisely represent the underlying computations, consider using
# an appropriate floating-point sort; PyTorch defaults to single precision.
# To speed up satisfiability computations, models can be 'rounded', which
# truncates the mantissa of every PyTorch model parameter. Note that the
# exponent part remains the same (11 bits) so that the result can be
# returned as a Python float. Here, we truncate to 10 bits (half precision).
rounded_net = lantern.round_model(net, 10)
constraints, in_vars, out_vars = lantern.as_z3(rounded_net,
sort=z3.FPSort(11, 10))
print("Z3 constraints, input, output (FPSort(11, 10)):")
print(constraints)
print(in_vars)
print(out_vars)
print()
# We add the constraint that the output must be 0.0, and solve using a
# solver for FloatingPoint theory.
print("An assignment such that the output variable is 0.0:")
constraints.append(out_vars[0] == 0.0)
z3.solve_using(z3.SolverFor("QF_FP"), *constraints)
print()
# Note that the constraints, and variables are all 'ordinary' Z3Py objects.
print("Happy hacking!")
if __name__ == "__main__":
main()
|
[
"z3.solve",
"torch.nn.ReLU",
"lantern.as_z3",
"z3.FPSort",
"torch.nn.Linear",
"lantern.round_model",
"z3.SolverFor"
] |
[((1556, 1574), 'lantern.as_z3', 'lantern.as_z3', (['net'], {}), '(net)\n', (1569, 1574), False, 'import lantern\n'), ((1949, 1970), 'z3.solve', 'z3.solve', (['constraints'], {}), '(constraints)\n', (1957, 1970), False, 'import z3\n'), ((2246, 2267), 'z3.solve', 'z3.solve', (['constraints'], {}), '(constraints)\n', (2254, 2267), False, 'import z3\n'), ((2770, 2798), 'lantern.round_model', 'lantern.round_model', (['net', '(10)'], {}), '(net, 10)\n', (2789, 2798), False, 'import lantern\n'), ((898, 913), 'torch.nn.Linear', 'nn.Linear', (['(2)', '(5)'], {}), '(2, 5)\n', (907, 913), True, 'import torch.nn as nn\n'), ((931, 940), 'torch.nn.ReLU', 'nn.ReLU', ([], {}), '()\n', (938, 940), True, 'import torch.nn as nn\n'), ((958, 973), 'torch.nn.Linear', 'nn.Linear', (['(5)', '(1)'], {}), '(5, 1)\n', (967, 973), True, 'import torch.nn as nn\n'), ((991, 1000), 'torch.nn.ReLU', 'nn.ReLU', ([], {}), '()\n', (998, 1000), True, 'import torch.nn as nn\n'), ((3319, 3340), 'z3.SolverFor', 'z3.SolverFor', (['"""QF_FP"""'], {}), "('QF_FP')\n", (3331, 3340), False, 'import z3\n'), ((2919, 2936), 'z3.FPSort', 'z3.FPSort', (['(11)', '(10)'], {}), '(11, 10)\n', (2928, 2936), False, 'import z3\n')]
|
from flask import flash, redirect, url_for
def flash_and_redirect_to_index(message, category):
flash(message, category)
return redirect(url_for('jekyll.welcome'))
|
[
"flask.url_for",
"flask.flash"
] |
[((101, 125), 'flask.flash', 'flash', (['message', 'category'], {}), '(message, category)\n', (106, 125), False, 'from flask import flash, redirect, url_for\n'), ((146, 171), 'flask.url_for', 'url_for', (['"""jekyll.welcome"""'], {}), "('jekyll.welcome')\n", (153, 171), False, 'from flask import flash, redirect, url_for\n')]
|
from ..base import MultiGridEnv
from .empty import EmptyMultiGrid
from .doorkey import DoorKeyEnv
from .cluttered import ClutteredMultiGrid
from .goalcycle import ClutteredGoalCycleEnv
from .open_doors import OpenDoorsMultiGrid
from .viz_test import VisibilityTestEnv
from ..agents import GridAgentInterface
from gym.envs.registration import register as gym_register
import sys
import inspect
import random
this_module = sys.modules[__name__]
registered_envs = []
def register_marl_env(
env_name,
env_class,
n_agents,
grid_size,
view_size,
view_tile_size=8,
view_offset=0,
restrict_actions=None,
agent_color=None,
env_kwargs={},
):
colors = ["red", "blue", "purple", "orange", "olive", "pink"]
assert n_agents <= len(colors)
class RegEnv(env_class):
def __new__(cls):
instance = super(env_class, RegEnv).__new__(env_class)
instance.__init__(
agents=[
GridAgentInterface(
color=c if agent_color is None else agent_color,
view_size=view_size,
view_tile_size=8,
view_offset=view_offset,
restrict_actions=restrict_actions
)
for c in colors[:n_agents]
],
grid_size=grid_size,
**env_kwargs,
)
return instance
env_class_name = f"env_{len(registered_envs)}"
setattr(this_module, env_class_name, RegEnv)
registered_envs.append(env_name)
gym_register(env_name, entry_point=f"marlgrid.envs:{env_class_name}")
def env_from_config(env_config, randomize_seed=True):
possible_envs = {k:v for k,v in globals().items() if inspect.isclass(v) and issubclass(v, MultiGridEnv)}
env_class = possible_envs[env_config['env_class']]
env_kwargs = {k:v for k,v in env_config.items() if k != 'env_class'}
if randomize_seed:
env_kwargs['seed'] = env_kwargs.get('seed', 0) + random.randint(0, 1337*1337)
return env_class(**env_kwargs)
register_marl_env(
"MarlGrid-1AgentCluttered15x15-v0",
ClutteredMultiGrid,
n_agents=1,
grid_size=11,
view_size=5,
env_kwargs={'n_clutter':30}
)
register_marl_env(
"MarlGrid-3AgentCluttered11x11-v0",
ClutteredMultiGrid,
n_agents=3,
grid_size=11,
view_size=7,
env_kwargs={'clutter_density':0.15}
)
register_marl_env(
"MarlGrid-3AgentCluttered15x15-v0",
ClutteredMultiGrid,
n_agents=3,
grid_size=15,
view_size=7,
env_kwargs={'clutter_density':0.15}
)
register_marl_env(
"MarlGrid-2AgentEmpty9x9-v0", EmptyMultiGrid, n_agents=2, grid_size=9, view_size=7
)
register_marl_env(
"MarlGrid-3AgentEmpty9x9-v0", EmptyMultiGrid, n_agents=3, grid_size=9, view_size=7
)
register_marl_env(
"MarlGrid-4AgentEmpty9x9-v0", EmptyMultiGrid, n_agents=4, grid_size=9, view_size=7
)
register_marl_env(
"Goalcycle-demo-solo-v0",
ClutteredGoalCycleEnv,
n_agents=1,
grid_size=13,
view_size=7,
view_tile_size=5,
view_offset=1,
env_kwargs={
'clutter_density':0.1,
'n_bonus_tiles': 3
}
)
register_marl_env(
"OpenDoors-agent3-view5-11x11-v0",
OpenDoorsMultiGrid,
n_agents=3,
grid_size=11,
view_size=5,
restrict_actions=4,
)
register_marl_env(
"OpenDoors-agent2-view3-4x4-v0",
OpenDoorsMultiGrid,
n_agents=2,
grid_size=4,
view_size=3,
restrict_actions=4,
)
|
[
"inspect.isclass",
"random.randint",
"gym.envs.registration.register"
] |
[((1608, 1677), 'gym.envs.registration.register', 'gym_register', (['env_name'], {'entry_point': 'f"""marlgrid.envs:{env_class_name}"""'}), "(env_name, entry_point=f'marlgrid.envs:{env_class_name}')\n", (1620, 1677), True, 'from gym.envs.registration import register as gym_register\n'), ((2061, 2091), 'random.randint', 'random.randint', (['(0)', '(1337 * 1337)'], {}), '(0, 1337 * 1337)\n', (2075, 2091), False, 'import random\n'), ((1791, 1809), 'inspect.isclass', 'inspect.isclass', (['v'], {}), '(v)\n', (1806, 1809), False, 'import inspect\n')]
|
import gws.tools.units
import _test.util as u
def test_parse():
nn, uu = gws.tools.units.parse('24.5mm', units=['px', 'mm'])
assert (nn, uu) == (24.5, 'mm')
nn, uu = gws.tools.units.parse('24.5 m', units=['px', 'mm'])
assert (nn, uu) == (24500, 'mm')
nn, uu = gws.tools.units.parse('1234 mm', units=['px', 'm'])
assert (nn, uu) == (1.234, 'm')
nn, uu = gws.tools.units.parse('1234 cm', units=['px', 'm'])
assert (nn, uu) == (12.34, 'm')
nn, uu = gws.tools.units.parse('1234 cm', units=['px', 'km'])
assert (nn, uu) == (0.01234, 'km')
nn, uu = gws.tools.units.parse(1234, units=['px', 'm'], default='px')
assert (nn, uu) == (1234, 'px')
nn, uu = gws.tools.units.parse('1234', units=['px', 'm'], default='px')
assert (nn, uu) == (1234, 'px')
with u.raises(ValueError):
nn, uu = gws.tools.units.parse('1234', units=['px', 'm'])
with u.raises(ValueError):
nn, uu = gws.tools.units.parse('1234 in', units=['px', 'm'])
with u.raises(ValueError):
nn, uu = gws.tools.units.parse('1234 BLAH', units=['px', 'm'])
|
[
"_test.util.raises"
] |
[((815, 835), '_test.util.raises', 'u.raises', (['ValueError'], {}), '(ValueError)\n', (823, 835), True, 'import _test.util as u\n'), ((913, 933), '_test.util.raises', 'u.raises', (['ValueError'], {}), '(ValueError)\n', (921, 933), True, 'import _test.util as u\n'), ((1014, 1034), '_test.util.raises', 'u.raises', (['ValueError'], {}), '(ValueError)\n', (1022, 1034), True, 'import _test.util as u\n')]
|
# Generated by Django 2.0.8 on 2018-08-31 08:35
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('diary', '0003_auto_20180829_2123'),
]
operations = [
migrations.AlterModelOptions(
name='user_answer',
options={'ordering': ['-created_at']},
),
migrations.AlterField(
model_name='question',
name='message',
field=models.TextField(max_length=30),
),
]
|
[
"django.db.models.TextField",
"django.db.migrations.AlterModelOptions"
] |
[((233, 325), 'django.db.migrations.AlterModelOptions', 'migrations.AlterModelOptions', ([], {'name': '"""user_answer"""', 'options': "{'ordering': ['-created_at']}"}), "(name='user_answer', options={'ordering': [\n '-created_at']})\n", (261, 325), False, 'from django.db import migrations, models\n'), ((469, 500), 'django.db.models.TextField', 'models.TextField', ([], {'max_length': '(30)'}), '(max_length=30)\n', (485, 500), False, 'from django.db import migrations, models\n')]
|
import sqlite3
def conSQLite(db):
if (db == 'memory'):
con = sqlite3.connect(':memory:')
else:
try:
con = sqlite3.connect(db)
except:
return 0
cursor = con.cursor()
return con, cursor
|
[
"sqlite3.connect"
] |
[((78, 105), 'sqlite3.connect', 'sqlite3.connect', (['""":memory:"""'], {}), "(':memory:')\n", (93, 105), False, 'import sqlite3\n'), ((150, 169), 'sqlite3.connect', 'sqlite3.connect', (['db'], {}), '(db)\n', (165, 169), False, 'import sqlite3\n')]
|
# Python program killing
# a thread using ._stop()
# function
# This kills the program after the period of time denoted as sleep_time (In seconds)
# It determines for how long the relay remains on
#
import time
import threading
import RPi.GPIO as GPIO
sleep_time = 5
class Ignition(threading.Thread):
# Thread class with a _stop() method.
# The thread itself has to check
# regularly for the stopped() condition.
def __init__(self, actuator, pixels, *args, **kwargs):
super(Ignition, self).__init__(*args, **kwargs)
self._stop = threading.Event()
self._actuator = actuator
pixels.fill((255, 0, 0))
self._pixels = pixels
# function using _stop function
def stop(self):
self._stop.set()
def stopped(self):
return self._stop.isSet()
def run(self):
#Turns the relay on
GPIO.output(self._actuator, GPIO.HIGH)
#Wait for selected period of time (sleep_time)
time.sleep(sleep_time)
#Turn off relay
while True:
if self.stopped():
GPIO.output(self._actuator, GPIO.LOW)
#Change the color of the pixel led strip
self._pixels.fill((0,0,255))
return
|
[
"RPi.GPIO.output",
"threading.Event",
"time.sleep"
] |
[((574, 591), 'threading.Event', 'threading.Event', ([], {}), '()\n', (589, 591), False, 'import threading\n'), ((891, 929), 'RPi.GPIO.output', 'GPIO.output', (['self._actuator', 'GPIO.HIGH'], {}), '(self._actuator, GPIO.HIGH)\n', (902, 929), True, 'import RPi.GPIO as GPIO\n'), ((993, 1015), 'time.sleep', 'time.sleep', (['sleep_time'], {}), '(sleep_time)\n', (1003, 1015), False, 'import time\n'), ((1107, 1144), 'RPi.GPIO.output', 'GPIO.output', (['self._actuator', 'GPIO.LOW'], {}), '(self._actuator, GPIO.LOW)\n', (1118, 1144), True, 'import RPi.GPIO as GPIO\n')]
|
import collections
import itertools
import pytest
@pytest.fixture(autouse=True)
def wait_for_first_block(web3, wait_for_block, skip_if_testrpc):
skip_if_testrpc(web3)
wait_for_block(web3)
def test_eth_getTransactionFromBlock(web3, extra_accounts, wait_for_transaction):
current_block_number = web3.eth.blockNumber
transaction_hashes = []
for _ in range(5):
transaction_hashes.append(web3.eth.sendTransaction({
"from": web3.eth.coinbase,
"to": extra_accounts[1],
"value": 1,
}))
# wait for them to resolve
for txn_hash in transaction_hashes:
wait_for_transaction(web3, txn_hash)
# gather all receipts and sort/group them by block number.
all_receipts = sorted(
[web3.eth.getTransactionReceipt(txn_hash) for txn_hash in transaction_hashes],
key=lambda r: r['blockNumber'],
)
all_receipts_by_block = {
key: tuple(value)
for key, value in itertools.groupby(all_receipts, lambda r: r['blockNumber'])
}
for block_number, block_receipts in all_receipts_by_block.items():
block = web3.eth.getBlock(block_number)
block_hash = block['hash']
block_transactions = block['transactions']
for txn_idx, txn_hash in enumerate(block_transactions):
for block_identifier in [block_number, block_hash]:
txn = web3.eth.getTransactionFromBlock(block_identifier, txn_idx)
assert txn['hash'] == txn_hash
|
[
"itertools.groupby",
"pytest.fixture"
] |
[((53, 81), 'pytest.fixture', 'pytest.fixture', ([], {'autouse': '(True)'}), '(autouse=True)\n', (67, 81), False, 'import pytest\n'), ((979, 1038), 'itertools.groupby', 'itertools.groupby', (['all_receipts', "(lambda r: r['blockNumber'])"], {}), "(all_receipts, lambda r: r['blockNumber'])\n", (996, 1038), False, 'import itertools\n')]
|
#!/usr/bin/python
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = '''
---
module: ecs_service
short_description: create, terminate, start or stop a service in ecs
description:
- Creates or terminates ecs services.
notes:
- the service role specified must be assumable (i.e. have a trust relationship for the ecs service, ecs.amazonaws.com)
- for details of the parameters and returns see U(https://boto3.readthedocs.io/en/latest/reference/services/ecs.html)
- An IAM role must have been previously created
version_added: "2.1"
author:
- "<NAME> (@Java1Guy)"
- "<NAME> (@kaczynskid)"
- "<NAME> (@simplesteph)"
- "<NAME> (@zacblazic)"
requirements: [ json, botocore, boto3 ]
options:
state:
description:
- The desired state of the service
required: true
choices: ["present", "absent", "deleting"]
name:
description:
- The name of the service
required: true
cluster:
description:
- The name of the cluster in which the service exists
required: false
task_definition:
description:
- The task definition the service will run. This parameter is required when state=present
required: false
load_balancers:
description:
- The list of ELBs defined for this service
required: false
desired_count:
description:
- The count of how many instances of the service. This parameter is required when state=present
required: false
client_token:
description:
- Unique, case-sensitive identifier you provide to ensure the idempotency of the request. Up to 32 ASCII characters are allowed.
required: false
role:
description:
- The name or full Amazon Resource Name (ARN) of the IAM role that allows your Amazon ECS container agent to make calls to your load balancer
on your behalf. This parameter is only required if you are using a load balancer with your service, in a network mode other than `awsvpc`.
required: false
delay:
description:
- The time to wait before checking that the service is available
required: false
default: 10
repeat:
description:
- The number of times to check that the service is available
required: false
default: 10
force_new_deployment:
description:
- Force deployment of service even if there are no changes
required: false
version_added: 2.8
type: bool
deployment_configuration:
description:
- Optional parameters that control the deployment_configuration; format is '{"maximum_percent":<integer>, "minimum_healthy_percent":<integer>}
required: false
version_added: 2.3
placement_constraints:
description:
- The placement constraints for the tasks in the service
required: false
version_added: 2.4
placement_strategy:
description:
- The placement strategy objects to use for tasks in your service. You can specify a maximum of 5 strategy rules per service
required: false
version_added: 2.4
network_configuration:
description:
- network configuration of the service. Only applicable for task definitions created with C(awsvpc) I(network_mode).
- assign_public_ip requires botocore >= 1.8.4
suboptions:
subnets:
description:
- A list of subnet IDs to associate with the task
version_added: 2.6
security_groups:
description:
- A list of security group names or group IDs to associate with the task
version_added: 2.6
assign_public_ip:
description:
- Whether the task's elastic network interface receives a public IP address. This option requires botocore >= 1.8.4.
type: bool
version_added: 2.7
launch_type:
description:
- The launch type on which to run your service
required: false
version_added: 2.7
choices: ["EC2", "FARGATE"]
health_check_grace_period_seconds:
description:
- Seconds to wait before health checking the freshly added/updated services. This option requires botocore >= 1.8.20.
required: false
version_added: 2.8
service_registries:
description:
- describes service discovery registries this service will register with.
required: false
version_added: 2.8
suboptions:
container_name:
description:
- container name for service discovery registration
container_port:
description:
- container port for service discovery registration
arn:
description:
- Service discovery registry ARN
scheduling_strategy:
description:
- The scheduling strategy, defaults to "REPLICA" if not given to preserve previous behavior
required: false
version_added: 2.8
choices: ["DAEMON", "REPLICA"]
extends_documentation_fragment:
- aws
- ec2
'''
EXAMPLES = '''
# Note: These examples do not set authentication details, see the AWS Guide for details.
# Basic provisioning example
- ecs_service:
state: present
name: console-test-service
cluster: new_cluster
task_definition: 'new_cluster-task:1'
desired_count: 0
- name: create ECS service on VPC network
ecs_service:
state: present
name: console-test-service
cluster: new_cluster
task_definition: 'new_cluster-task:1'
desired_count: 0
network_configuration:
subnets:
- subnet-abcd1234
security_groups:
- sg-aaaa1111
- my_security_group
# Simple example to delete
- ecs_service:
name: default
state: absent
cluster: new_cluster
# With custom deployment configuration (added in version 2.3), placement constraints and strategy (added in version 2.4)
- ecs_service:
state: present
name: test-service
cluster: test-cluster
task_definition: test-task-definition
desired_count: 3
deployment_configuration:
minimum_healthy_percent: 75
maximum_percent: 150
placement_constraints:
- type: memberOf
expression: 'attribute:flavor==test'
placement_strategy:
- type: binpack
field: memory
'''
RETURN = '''
service:
description: Details of created service.
returned: when creating a service
type: complex
contains:
clusterArn:
description: The Amazon Resource Name (ARN) of the of the cluster that hosts the service.
returned: always
type: str
desiredCount:
description: The desired number of instantiations of the task definition to keep running on the service.
returned: always
type: int
loadBalancers:
description: A list of load balancer objects
returned: always
type: complex
contains:
loadBalancerName:
description: the name
returned: always
type: str
containerName:
description: The name of the container to associate with the load balancer.
returned: always
type: str
containerPort:
description: The port on the container to associate with the load balancer.
returned: always
type: int
pendingCount:
description: The number of tasks in the cluster that are in the PENDING state.
returned: always
type: int
runningCount:
description: The number of tasks in the cluster that are in the RUNNING state.
returned: always
type: int
serviceArn:
description: The Amazon Resource Name (ARN) that identifies the service. The ARN contains the arn:aws:ecs namespace, followed by the region
of the service, the AWS account ID of the service owner, the service namespace, and then the service name. For example,
arn:aws:ecs:region :012345678910 :service/my-service .
returned: always
type: str
serviceName:
description: A user-generated string used to identify the service
returned: always
type: str
status:
description: The valid values are ACTIVE, DRAINING, or INACTIVE.
returned: always
type: str
taskDefinition:
description: The ARN of a task definition to use for tasks in the service.
returned: always
type: str
deployments:
description: list of service deployments
returned: always
type: list of complex
deploymentConfiguration:
description: dictionary of deploymentConfiguration
returned: always
type: complex
contains:
maximumPercent:
description: maximumPercent param
returned: always
type: int
minimumHealthyPercent:
description: minimumHealthyPercent param
returned: always
type: int
events:
description: list of service events
returned: always
type: list of complex
placementConstraints:
description: List of placement constraints objects
returned: always
type: list of complex
contains:
type:
description: The type of constraint. Valid values are distinctInstance and memberOf.
returned: always
type: str
expression:
description: A cluster query language expression to apply to the constraint. Note you cannot specify an expression if the constraint type is
distinctInstance.
returned: always
type: str
placementStrategy:
description: List of placement strategy objects
returned: always
type: list of complex
contains:
type:
description: The type of placement strategy. Valid values are random, spread and binpack.
returned: always
type: str
field:
description: The field to apply the placement strategy against. For the spread placement strategy, valid values are instanceId
(or host, which has the same effect), or any platform or custom attribute that is applied to a container instance,
such as attribute:ecs.availability-zone. For the binpack placement strategy, valid values are CPU and MEMORY.
returned: always
type: str
ansible_facts:
description: Facts about deleted service.
returned: when deleting a service
type: complex
contains:
service:
description: Details of deleted service in the same structure described above for service creation.
returned: when service existed and was deleted
type: complex
'''
import time
DEPLOYMENT_CONFIGURATION_TYPE_MAP = {
'maximum_percent': 'int',
'minimum_healthy_percent': 'int'
}
from ansible.module_utils.aws.core import AnsibleAWSModule
from ansible.module_utils.ec2 import ec2_argument_spec
from ansible.module_utils.ec2 import snake_dict_to_camel_dict, map_complex_type, get_ec2_security_group_ids_from_names
try:
import botocore
except ImportError:
pass # handled by AnsibleAWSModule
class EcsServiceManager:
"""Handles ECS Services"""
def __init__(self, module):
self.module = module
self.ecs = module.client('ecs')
self.ec2 = module.client('ec2')
def format_network_configuration(self, network_config):
result = dict()
if network_config['subnets'] is not None:
result['subnets'] = network_config['subnets']
else:
self.module.fail_json(msg="Network configuration must include subnets")
if network_config['security_groups'] is not None:
groups = network_config['security_groups']
if any(not sg.startswith('sg-') for sg in groups):
try:
vpc_id = self.ec2.describe_subnets(SubnetIds=[result['subnets'][0]])['Subnets'][0]['VpcId']
groups = get_ec2_security_group_ids_from_names(groups, self.ec2, vpc_id)
except (botocore.exceptions.ClientError, botocore.exceptions.BotoCoreError) as e:
self.module.fail_json_aws(e, msg="Couldn't look up security groups")
result['securityGroups'] = groups
if network_config['assign_public_ip'] is not None:
if self.module.botocore_at_least('1.8.4'):
if network_config['assign_public_ip'] is True:
result['assignPublicIp'] = "ENABLED"
else:
result['assignPublicIp'] = "DISABLED"
else:
self.module.fail_json(msg='botocore needs to be version 1.8.4 or higher to use assign_public_ip in network_configuration')
return dict(awsvpcConfiguration=result)
def find_in_array(self, array_of_services, service_name, field_name='serviceArn'):
for c in array_of_services:
if c[field_name].endswith(service_name):
return c
return None
def describe_service(self, cluster_name, service_name):
response = self.ecs.describe_services(
cluster=cluster_name,
services=[service_name])
msg = ''
if len(response['failures']) > 0:
c = self.find_in_array(response['failures'], service_name, 'arn')
msg += ", failure reason is " + c['reason']
if c and c['reason'] == 'MISSING':
return None
# fall thru and look through found ones
if len(response['services']) > 0:
c = self.find_in_array(response['services'], service_name)
if c:
return c
raise Exception("Unknown problem describing service %s." % service_name)
def is_matching_service(self, expected, existing):
if expected['task_definition'] != existing['taskDefinition']:
return False
if (expected['load_balancers'] or []) != existing['loadBalancers']:
return False
# expected is params. DAEMON scheduling strategy returns desired count equal to
# number of instances running; don't check desired count if scheduling strat is daemon
if (expected['scheduling_strategy'] != 'DAEMON'):
if (expected['desired_count'] or 0) != existing['desiredCount']:
return False
return True
def create_service(self, service_name, cluster_name, task_definition, load_balancers,
desired_count, client_token, role, deployment_configuration,
placement_constraints, placement_strategy, health_check_grace_period_seconds,
network_configuration, service_registries, launch_type, scheduling_strategy):
params = dict(
cluster=cluster_name,
serviceName=service_name,
taskDefinition=task_definition,
loadBalancers=load_balancers,
clientToken=client_token,
role=role,
deploymentConfiguration=deployment_configuration,
placementConstraints=placement_constraints,
placementStrategy=placement_strategy
)
if network_configuration:
params['networkConfiguration'] = network_configuration
if launch_type:
params['launchType'] = launch_type
if self.health_check_setable(params) and health_check_grace_period_seconds is not None:
params['healthCheckGracePeriodSeconds'] = health_check_grace_period_seconds
if service_registries:
params['serviceRegistries'] = service_registries
# desired count is not required if scheduling strategy is daemon
if desired_count is not None:
params['desiredCount'] = desired_count
if scheduling_strategy:
params['schedulingStrategy'] = scheduling_strategy
response = self.ecs.create_service(**params)
return self.jsonize(response['service'])
def update_service(self, service_name, cluster_name, task_definition,
desired_count, deployment_configuration, network_configuration,
health_check_grace_period_seconds, force_new_deployment):
params = dict(
cluster=cluster_name,
service=service_name,
taskDefinition=task_definition,
deploymentConfiguration=deployment_configuration)
if network_configuration:
params['networkConfiguration'] = network_configuration
if force_new_deployment:
params['forceNewDeployment'] = force_new_deployment
if health_check_grace_period_seconds is not None:
params['healthCheckGracePeriodSeconds'] = health_check_grace_period_seconds
# desired count is not required if scheduling strategy is daemon
if desired_count is not None:
params['desiredCount'] = desired_count
response = self.ecs.update_service(**params)
return self.jsonize(response['service'])
def jsonize(self, service):
# some fields are datetime which is not JSON serializable
# make them strings
if 'createdAt' in service:
service['createdAt'] = str(service['createdAt'])
if 'deployments' in service:
for d in service['deployments']:
if 'createdAt' in d:
d['createdAt'] = str(d['createdAt'])
if 'updatedAt' in d:
d['updatedAt'] = str(d['updatedAt'])
if 'events' in service:
for e in service['events']:
if 'createdAt' in e:
e['createdAt'] = str(e['createdAt'])
return service
def delete_service(self, service, cluster=None):
return self.ecs.delete_service(cluster=cluster, service=service)
def ecs_api_handles_network_configuration(self):
# There doesn't seem to be a nice way to inspect botocore to look
# for attributes (and networkConfiguration is not an explicit argument
# to e.g. ecs.run_task, it's just passed as a keyword argument)
return self.module.botocore_at_least('1.7.44')
def health_check_setable(self, params):
load_balancers = params.get('loadBalancers', [])
# check if botocore (and thus boto3) is new enough for using the healthCheckGracePeriodSeconds parameter
return len(load_balancers) > 0 and self.module.botocore_at_least('1.8.20')
def main():
argument_spec = ec2_argument_spec()
argument_spec.update(dict(
state=dict(required=True, choices=['present', 'absent', 'deleting']),
name=dict(required=True, type='str'),
cluster=dict(required=False, type='str'),
task_definition=dict(required=False, type='str'),
load_balancers=dict(required=False, default=[], type='list'),
desired_count=dict(required=False, type='int'),
client_token=dict(required=False, default='', type='str'),
role=dict(required=False, default='', type='str'),
delay=dict(required=False, type='int', default=10),
repeat=dict(required=False, type='int', default=10),
force_new_deployment=dict(required=False, default=False, type='bool'),
deployment_configuration=dict(required=False, default={}, type='dict'),
placement_constraints=dict(required=False, default=[], type='list'),
placement_strategy=dict(required=False, default=[], type='list'),
health_check_grace_period_seconds=dict(required=False, type='int'),
network_configuration=dict(required=False, type='dict', options=dict(
subnets=dict(type='list'),
security_groups=dict(type='list'),
assign_public_ip=dict(type='bool')
)),
launch_type=dict(required=False, choices=['EC2', 'FARGATE']),
service_registries=dict(required=False, type='list', default=[]),
scheduling_strategy=dict(required=False, choices=['DAEMON', 'REPLICA'])
))
module = AnsibleAWSModule(argument_spec=argument_spec,
supports_check_mode=True,
required_if=[('state', 'present', ['task_definition']),
('launch_type', 'FARGATE', ['network_configuration'])],
required_together=[['load_balancers', 'role']])
if module.params['state'] == 'present' and module.params['scheduling_strategy'] == 'REPLICA':
if module.params['desired_count'] is None:
module.fail_json(msg='state is present, scheduling_strategy is REPLICA; missing desired_count')
service_mgr = EcsServiceManager(module)
if module.params['network_configuration']:
if not service_mgr.ecs_api_handles_network_configuration():
module.fail_json(msg='botocore needs to be version 1.7.44 or higher to use network configuration')
network_configuration = service_mgr.format_network_configuration(module.params['network_configuration'])
else:
network_configuration = None
deployment_configuration = map_complex_type(module.params['deployment_configuration'],
DEPLOYMENT_CONFIGURATION_TYPE_MAP)
deploymentConfiguration = snake_dict_to_camel_dict(deployment_configuration)
serviceRegistries = list(map(snake_dict_to_camel_dict, module.params['service_registries']))
try:
existing = service_mgr.describe_service(module.params['cluster'], module.params['name'])
except Exception as e:
module.fail_json(msg="Exception describing service '" + module.params['name'] + "' in cluster '" + module.params['cluster'] + "': " + str(e))
results = dict(changed=False)
if module.params['launch_type']:
if not module.botocore_at_least('1.8.4'):
module.fail_json(msg='botocore needs to be version 1.8.4 or higher to use launch_type')
if module.params['force_new_deployment']:
if not module.botocore_at_least('1.8.4'):
module.fail_json(msg='botocore needs to be version 1.8.4 or higher to use force_new_deployment')
if module.params['health_check_grace_period_seconds']:
if not module.botocore_at_least('1.8.20'):
module.fail_json(msg='botocore needs to be version 1.8.20 or higher to use health_check_grace_period_seconds')
if module.params['state'] == 'present':
matching = False
update = False
if existing and 'status' in existing and existing['status'] == "ACTIVE":
if module.params['force_new_deployment']:
update = True
elif service_mgr.is_matching_service(module.params, existing):
matching = True
results['service'] = existing
else:
update = True
if not matching:
if not module.check_mode:
role = module.params['role']
clientToken = module.params['client_token']
loadBalancers = []
for loadBalancer in module.params['load_balancers']:
if 'containerPort' in loadBalancer:
loadBalancer['containerPort'] = int(loadBalancer['containerPort'])
loadBalancers.append(loadBalancer)
for loadBalancer in loadBalancers:
if 'containerPort' in loadBalancer:
loadBalancer['containerPort'] = int(loadBalancer['containerPort'])
if update:
# check various parameters and boto versions and give a helpful error in boto is not new enough for feature
if module.params['scheduling_strategy']:
if not module.botocore_at_least('1.10.37'):
module.fail_json(msg='botocore needs to be version 1.10.37 or higher to use scheduling_strategy')
elif (existing['schedulingStrategy']) != module.params['scheduling_strategy']:
module.fail_json(msg="It is not possible to update the scheduling strategy of an existing service")
if module.params['service_registries']:
if not module.botocore_at_least('1.9.15'):
module.fail_json(msg='botocore needs to be version 1.9.15 or higher to use service_registries')
elif (existing['serviceRegistries'] or []) != serviceRegistries:
module.fail_json(msg="It is not possible to update the service registries of an existing service")
if (existing['loadBalancers'] or []) != loadBalancers:
module.fail_json(msg="It is not possible to update the load balancers of an existing service")
# update required
response = service_mgr.update_service(module.params['name'],
module.params['cluster'],
module.params['task_definition'],
module.params['desired_count'],
deploymentConfiguration,
network_configuration,
module.params['health_check_grace_period_seconds'],
module.params['force_new_deployment'])
else:
try:
response = service_mgr.create_service(module.params['name'],
module.params['cluster'],
module.params['task_definition'],
loadBalancers,
module.params['desired_count'],
clientToken,
role,
deploymentConfiguration,
module.params['placement_constraints'],
module.params['placement_strategy'],
module.params['health_check_grace_period_seconds'],
network_configuration,
serviceRegistries,
module.params['launch_type'],
module.params['scheduling_strategy']
)
except botocore.exceptions.ClientError as e:
module.fail_json_aws(e, msg="Couldn't create service")
results['service'] = response
results['changed'] = True
elif module.params['state'] == 'absent':
if not existing:
pass
else:
# it exists, so we should delete it and mark changed.
# return info about the cluster deleted
del existing['deployments']
del existing['events']
results['ansible_facts'] = existing
if 'status' in existing and existing['status'] == "INACTIVE":
results['changed'] = False
else:
if not module.check_mode:
try:
service_mgr.delete_service(
module.params['name'],
module.params['cluster']
)
except botocore.exceptions.ClientError as e:
module.fail_json_aws(e, msg="Couldn't delete service")
results['changed'] = True
elif module.params['state'] == 'deleting':
if not existing:
module.fail_json(msg="Service '" + module.params['name'] + " not found.")
return
# it exists, so we should delete it and mark changed.
# return info about the cluster deleted
delay = module.params['delay']
repeat = module.params['repeat']
time.sleep(delay)
for i in range(repeat):
existing = service_mgr.describe_service(module.params['cluster'], module.params['name'])
status = existing['status']
if status == "INACTIVE":
results['changed'] = True
break
time.sleep(delay)
if i is repeat - 1:
module.fail_json(msg="Service still not deleted after " + str(repeat) + " tries of " + str(delay) + " seconds each.")
return
module.exit_json(**results)
if __name__ == '__main__':
main()
|
[
"ansible.module_utils.ec2.ec2_argument_spec",
"ansible.module_utils.aws.core.AnsibleAWSModule",
"ansible.module_utils.ec2.get_ec2_security_group_ids_from_names",
"ansible.module_utils.ec2.map_complex_type",
"time.sleep",
"ansible.module_utils.ec2.snake_dict_to_camel_dict"
] |
[((20196, 20215), 'ansible.module_utils.ec2.ec2_argument_spec', 'ec2_argument_spec', ([], {}), '()\n', (20213, 20215), False, 'from ansible.module_utils.ec2 import ec2_argument_spec\n'), ((21706, 21950), 'ansible.module_utils.aws.core.AnsibleAWSModule', 'AnsibleAWSModule', ([], {'argument_spec': 'argument_spec', 'supports_check_mode': '(True)', 'required_if': "[('state', 'present', ['task_definition']), ('launch_type', 'FARGATE', [\n 'network_configuration'])]", 'required_together': "[['load_balancers', 'role']]"}), "(argument_spec=argument_spec, supports_check_mode=True,\n required_if=[('state', 'present', ['task_definition']), ('launch_type',\n 'FARGATE', ['network_configuration'])], required_together=[[\n 'load_balancers', 'role']])\n", (21722, 21950), False, 'from ansible.module_utils.aws.core import AnsibleAWSModule\n'), ((22792, 22890), 'ansible.module_utils.ec2.map_complex_type', 'map_complex_type', (["module.params['deployment_configuration']", 'DEPLOYMENT_CONFIGURATION_TYPE_MAP'], {}), "(module.params['deployment_configuration'],\n DEPLOYMENT_CONFIGURATION_TYPE_MAP)\n", (22808, 22890), False, 'from ansible.module_utils.ec2 import snake_dict_to_camel_dict, map_complex_type, get_ec2_security_group_ids_from_names\n'), ((22966, 23016), 'ansible.module_utils.ec2.snake_dict_to_camel_dict', 'snake_dict_to_camel_dict', (['deployment_configuration'], {}), '(deployment_configuration)\n', (22990, 23016), False, 'from ansible.module_utils.ec2 import snake_dict_to_camel_dict, map_complex_type, get_ec2_security_group_ids_from_names\n'), ((30232, 30249), 'time.sleep', 'time.sleep', (['delay'], {}), '(delay)\n', (30242, 30249), False, 'import time\n'), ((13671, 13734), 'ansible.module_utils.ec2.get_ec2_security_group_ids_from_names', 'get_ec2_security_group_ids_from_names', (['groups', 'self.ec2', 'vpc_id'], {}), '(groups, self.ec2, vpc_id)\n', (13708, 13734), False, 'from ansible.module_utils.ec2 import snake_dict_to_camel_dict, map_complex_type, get_ec2_security_group_ids_from_names\n'), ((30536, 30553), 'time.sleep', 'time.sleep', (['delay'], {}), '(delay)\n', (30546, 30553), False, 'import time\n')]
|
from _collections import defaultdict
from collections import namedtuple
import operator
import random
import weakref
from protocolbuffers import Outfits_pb2
from animation import get_throwaway_animation_context
from animation.animation_utils import create_run_animation, flush_all_animations
from animation.arb import Arb
from animation.asm import create_asm
from element_utils import build_critical_section
from event_testing.resolver import SingleSimResolver
from gsi_handlers import outfit_change_handlers
from objects import ALL_HIDDEN_REASONS
from sims.outfits.outfit_enums import OutfitCategory, NON_RANDOMIZABLE_OUTFIT_CATEGORIES, OutfitChangeReason, OutfitFilterFlag
from sims.outfits.outfit_tuning import OutfitTuning
from singletons import DEFAULT
import element_utils
import services
import sims4.log
logger = sims4.log.Logger('Outfits', default_owner='epanero')
OutfitPriority = namedtuple('OutfitPriority', ('change_reason', 'priority', 'interaction_ref'))
class OutfitTrackerMixin:
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self._default_outfit_priorities = []
self._randomize_daily = defaultdict(lambda : True)
self._last_randomize = defaultdict(lambda : None)
self._daily_defaults = {}
self._outfit_dirty = set()
def add_default_outfit_priority(self, interaction, outfit_change_reason, priority):
interaction_ref = weakref.ref(interaction) if interaction is not None else None
outfit_priority = OutfitPriority(outfit_change_reason, priority, interaction_ref)
self._default_outfit_priorities.append(outfit_priority)
return id(outfit_priority)
def add_outfit(self, outfit_category:OutfitCategory, outfit_data):
(outfit_category, outfit_index) = self._base.add_outfit(outfit_category, outfit_data)
return (OutfitCategory(outfit_category), outfit_index)
def can_switch_to_outfit(self, outfit_category_and_index) -> bool:
if outfit_category_and_index is None:
return False
if self.outfit_is_dirty(outfit_category_and_index[0]):
return True
elif self._current_outfit == outfit_category_and_index:
return False
return True
def _get_random_daily_outfit(self, outfit_category):
current_time = services.time_service().sim_now
existing_default = outfit_category in self._daily_defaults
last_randomize_time = self._last_randomize[outfit_category]
if not existing_default or current_time.absolute_days() - last_randomize_time.absolute_days() >= 1 or current_time.day() != last_randomize_time.day():
index = 0
number_of_outfits = self.get_number_of_outfits_in_category(outfit_category)
if number_of_outfits > 1:
if existing_default:
index = random.randrange(number_of_outfits - 1)
exclusion = self._daily_defaults[outfit_category]
if index >= exclusion:
index += 1
else:
index = random.randrange(number_of_outfits)
self._daily_defaults[outfit_category] = index
self._last_randomize[outfit_category] = current_time
return (outfit_category, self._daily_defaults[outfit_category])
def generate_unpopulated_outfits(self, outfit_categories):
for outfit_category in outfit_categories:
if not self.has_outfit((outfit_category, 0)):
self.generate_outfit(outfit_category=outfit_category)
def get_all_outfit_entries(self):
for outfit_category in OutfitCategory:
if outfit_category == OutfitCategory.CURRENT_OUTFIT:
continue
for outfit_index in range(self.get_number_of_outfits_in_category(outfit_category)):
yield (outfit_category, outfit_index)
def get_all_outfits(self):
for outfit_category in OutfitCategory:
if outfit_category == OutfitCategory.CURRENT_OUTFIT:
continue
yield (outfit_category, self.get_outfits_in_category(outfit_category))
def get_change_outfit_element(self, outfit_category_and_index, do_spin=True, interaction=None):
def change_outfit(timeline):
arb = Arb()
self.try_set_current_outfit(outfit_category_and_index, do_spin=do_spin, arb=arb, interaction=interaction)
if not arb.empty:
clothing_element = create_run_animation(arb)
yield from element_utils.run_child(timeline, clothing_element)
return change_outfit
def get_change_outfit_element_and_archive_change_reason(self, outfit_category_and_index, do_spin=True, interaction=None, change_reason=None):
if outfit_change_handlers.archiver.enabled:
outfit_change_handlers.log_outfit_change(self.get_sim_info(), outfit_category_and_index, change_reason)
return self.get_change_outfit_element(outfit_category_and_index, do_spin, interaction)
def get_default_outfit(self, interaction=None, resolver=None):
default_outfit = OutfitPriority(None, 0, None)
if self._default_outfit_priorities:
default_outfit = max(self._default_outfit_priorities, key=operator.attrgetter('priority'))
if interaction is not None or resolver is not None:
return self.get_outfit_for_clothing_change(interaction, default_outfit.change_reason, resolver=resolver)
if default_outfit.interaction_ref() is not None:
return self.get_outfit_for_clothing_change(default_outfit.interaction_ref(), default_outfit.change_reason)
return self._current_outfit
def get_next_outfit_for_category(self, outfit_category):
return (outfit_category, self.get_number_of_outfits_in_category(outfit_category))
def get_number_of_outfits_in_category(self, outfit_category):
return len(self.get_outfits_in_category(outfit_category))
def get_outfit(self, outfit_category:OutfitCategory, outfit_index:int):
if not self.has_outfit((outfit_category, outfit_index)):
self.generate_outfit(outfit_category, outfit_index)
try:
return self._base.get_outfit(outfit_category, outfit_index)
except RuntimeError as exception:
raise exception
def get_outfit_change(self, interaction, change_reason, resolver=None, **kwargs):
if change_reason is not None:
outfit_category_and_index = self.get_outfit_for_clothing_change(interaction, change_reason, resolver=resolver)
return build_critical_section(self.get_change_outfit_element_and_archive_change_reason(outfit_category_and_index, interaction=interaction, change_reason=change_reason, **kwargs), flush_all_animations)
def get_outfit_for_clothing_change(self, interaction, reason, resolver=None):
for trait in self.get_traits():
reason = trait.get_outfit_change_reason(reason)
if reason == OutfitChangeReason.Invalid:
return self._current_outfit
if reason == OutfitChangeReason.DefaultOutfit:
return self.get_default_outfit(interaction=interaction, resolver=resolver)
if reason == OutfitChangeReason.PreviousClothing:
return self._previous_outfit
if reason == OutfitChangeReason.RandomOutfit:
return self.get_random_outfit()
if reason == OutfitChangeReason.CurrentOutfit:
return self._current_outfit
elif reason == OutfitChangeReason.ExitBedNPC:
if self.is_npc:
return self._previous_outfit
return
return
resolver_to_use = resolver or interaction.get_resolver()
outfit_change = None
if reason in OutfitTuning.OUTFIT_CHANGE_REASONS:
test_group_and_outfit_list = OutfitTuning.OUTFIT_CHANGE_REASONS[reason]
for test_group_and_outfit in test_group_and_outfit_list:
outfit_category = test_group_and_outfit.outfit_category
if outfit_category == OutfitCategory.BATHING and not self.has_outfit_category(OutfitCategory.BATHING):
self.generate_outfit(OutfitCategory.BATHING, filter_flag=OutfitFilterFlag.NONE)
if outfit_category != OutfitCategory.CURRENT_OUTFIT and not self.has_outfit_category(outfit_category):
continue
if test_group_and_outfit.tests:
if test_group_and_outfit.tests.run_tests(resolver_to_use):
if test_group_and_outfit.outfit_category == OutfitCategory.CURRENT_OUTFIT or test_group_and_outfit.outfit_category == self._current_outfit[0]:
outfit_change = self._current_outfit
elif self._randomize_daily[outfit_category]:
outfit_change = self._get_random_daily_outfit(outfit_category)
else:
outfit_change = (outfit_category, 0)
break
if test_group_and_outfit.outfit_category == OutfitCategory.CURRENT_OUTFIT or test_group_and_outfit.outfit_category == self._current_outfit[0]:
outfit_change = self._current_outfit
elif self._randomize_daily[outfit_category]:
outfit_change = self._get_random_daily_outfit(outfit_category)
else:
outfit_change = (outfit_category, 0)
break
if outfit_change is None:
outfit_change = (OutfitCategory.EVERYDAY, 0)
outfit_change = self._run_weather_fixup(reason, outfit_change, resolver_to_use)
outfit_change = self._run_career_fixup(outfit_change, interaction)
return outfit_change
def _run_weather_fixup(self, reason, outfit_change, resolver):
weather_service = services.weather_service()
if weather_service is None:
return outfit_change
sim = self.get_sim_instance(allow_hidden_flags=ALL_HIDDEN_REASONS)
if sim is None:
return outfit_change
weather_outfit_change = weather_service.get_weather_outfit_change(resolver, reason=reason)
if weather_outfit_change is None:
return outfit_change
if not sim.is_outside:
return outfit_change
elif reason in weather_service.WEATHER_OUFTIT_CHANGE_REASONS_TO_IGNORE:
return outfit_change
return weather_outfit_change
def _run_career_fixup(self, outfit_change, interaction):
sim = self.get_sim_instance(allow_hidden_flags=ALL_HIDDEN_REASONS)
if sim is None:
return outfit_change
if not self._career_tracker.has_part_time_career_outfit():
return outfit_change
if outfit_change[0] != OutfitCategory.CAREER or (interaction is None or not hasattr(interaction, 'career_uid')) or interaction.career_uid == None:
return outfit_change
elif interaction.career_uid is not None:
self.remove_outfits_in_category(OutfitCategory.CAREER)
career = self._career_tracker.get_career_by_uid(interaction.career_uid)
career.generate_outfit()
career_outfit_change = (OutfitCategory.CAREER, 0)
return career_outfit_change
return outfit_change
def get_outfits_in_category(self, outfit_category:OutfitCategory):
return self._base.get_outfits_in_category(outfit_category)
def get_random_outfit(self, outfit_categories=()):
valid_outfits = []
for (outfit_category, outfit_index) in self.get_all_outfit_entries():
if outfit_categories and outfit_category not in outfit_categories:
continue
if outfit_category == OutfitCategory.CURRENT_OUTFIT:
continue
if outfit_category in NON_RANDOMIZABLE_OUTFIT_CATEGORIES:
continue
valid_outfits.append((outfit_category, outfit_index))
if valid_outfits:
return random.choice(valid_outfits)
return (self.occult_tracker.get_fallback_outfit_category(self.current_occult_types), 0)
def get_sim_info(self):
return self
def has_outfit(self, outfit):
return self._base.has_outfit(outfit[0], outfit[1])
def has_outfit_category(self, outfit_category):
return self.has_outfit((outfit_category, 0))
def has_cas_part(self, cas_part):
try:
return cas_part in self._base.get_outfit(*self._current_outfit).part_ids
except RuntimeError as exception:
logger.exception('Exception encountered trying to get the current outfit: ', exc=exception, level=sims4.log.LEVEL_ERROR)
return False
def is_wearing_outfit(self, category_and_index):
if self.outfit_is_dirty(category_and_index[0]):
return False
return self._current_outfit == category_and_index
def load_outfits(self, outfit_msg):
self._base.outfits = outfit_msg.SerializeToString()
def remove_default_outfit_priority(self, outfit_priority_id):
for (index, value) in enumerate(self._default_outfit_priorities):
if id(value) == outfit_priority_id:
self._default_outfit_priorities.pop(index)
break
def remove_outfit(self, outfit_category:OutfitCategory, outfit_index:int=DEFAULT):
outfit_index = self.get_number_of_outfits_in_category(outfit_category) - 1 if outfit_index is DEFAULT else outfit_index
return self._base.remove_outfit(outfit_category, outfit_index)
def remove_outfits_in_category(self, outfit_category:OutfitCategory):
while self.has_outfit((outfit_category, 0)):
self.remove_outfit(outfit_category, 0)
def remove_all_but_one_outfit_in_category(self, outfit_category:OutfitCategory):
while self.has_outfit((outfit_category, 1)):
self.remove_outfit(outfit_category, 1)
def clear_outfits_to_minimum(self):
for (outfit_category, _) in self.get_all_outfits():
if outfit_category is OutfitCategory.EVERYDAY:
self.remove_all_but_one_outfit_in_category(outfit_category)
else:
self.remove_outfits_in_category(outfit_category)
def save_outfits(self):
outfits_msg = Outfits_pb2.OutfitList()
outfits_msg.ParseFromString(self._base.outfits)
return outfits_msg
def set_outfit_flags(self, outfit_category:OutfitCategory, outfit_index:int, outfit_flags:int):
outfit_flags_low = int(outfit_flags & 18446744073709551615)
outfit_flags_high = int(outfit_flags >> 64 & 18446744073709551615)
return self._base.set_outfit_flags(outfit_category, outfit_index, outfit_flags_low, outfit_flags_high)
def _apply_on_outfit_changed_loot(self):
is_sim = getattr(self, 'is_sim', False)
if is_sim:
resolver = SingleSimResolver(self)
for loot_action in OutfitTuning.LOOT_ON_OUTFIT_CHANGE:
loot_action.apply_to_resolver(resolver)
def try_set_current_outfit(self, outfit_category_and_index, do_spin=False, arb=None, interaction=None):
sim = self.get_sim_instance()
if sim is None:
do_spin = False
if arb is None:
logger.error('Must pass in a valid ARB for the clothing spin.')
do_spin = False
if self.can_switch_to_outfit(outfit_category_and_index):
if do_spin:
did_change = False
def set_ending(*_, **__):
nonlocal did_change
if not did_change:
laundry_service = services.get_laundry_service()
if laundry_service is not None:
laundry_service.on_spin_outfit_change(sim, outfit_category_and_index, interaction)
if self.set_current_outfit(outfit_category_and_index):
self._apply_on_outfit_changed_loot()
did_change = True
arb.register_event_handler(set_ending, handler_id=100)
if sim is not None:
animation_element_tuning = OutfitTuning.OUTFIT_CHANGE_ANIMATION
clothing_context = get_throwaway_animation_context()
clothing_change_asm = create_asm(animation_element_tuning.asm_key, context=clothing_context)
clothing_change_asm.update_locked_params(sim.get_transition_asm_params())
result = sim.posture.setup_asm_interaction(clothing_change_asm, sim, None, animation_element_tuning.actor_name, None)
sim.set_trait_asm_parameters(clothing_change_asm, animation_element_tuning.actor_name)
if not result:
logger.error('Could not setup asm for Clothing Change. {}', result)
clothing_change_asm.request(animation_element_tuning.begin_states[0], arb)
elif self.set_current_outfit(outfit_category_and_index):
self._apply_on_outfit_changed_loot()
def set_outfit_dirty(self, outfit_category):
self._outfit_dirty.add(outfit_category)
def clear_outfit_dirty(self, outfit_category):
self._outfit_dirty.discard(outfit_category)
def outfit_is_dirty(self, outfit_category):
if outfit_category in self._outfit_dirty:
return True
return False
|
[
"animation.animation_utils.create_run_animation",
"animation.asm.create_asm",
"animation.arb.Arb",
"animation.get_throwaway_animation_context",
"sims.outfits.outfit_enums.OutfitCategory",
"random.choice",
"services.get_laundry_service",
"services.time_service",
"event_testing.resolver.SingleSimResolver",
"operator.attrgetter",
"services.weather_service",
"random.randrange",
"collections.namedtuple",
"protocolbuffers.Outfits_pb2.OutfitList",
"element_utils.run_child",
"weakref.ref",
"_collections.defaultdict"
] |
[((891, 969), 'collections.namedtuple', 'namedtuple', (['"""OutfitPriority"""', "('change_reason', 'priority', 'interaction_ref')"], {}), "('OutfitPriority', ('change_reason', 'priority', 'interaction_ref'))\n", (901, 969), False, 'from collections import namedtuple\n'), ((1158, 1184), '_collections.defaultdict', 'defaultdict', (['(lambda : True)'], {}), '(lambda : True)\n', (1169, 1184), False, 'from _collections import defaultdict\n'), ((1216, 1242), '_collections.defaultdict', 'defaultdict', (['(lambda : None)'], {}), '(lambda : None)\n', (1227, 1242), False, 'from _collections import defaultdict\n'), ((9904, 9930), 'services.weather_service', 'services.weather_service', ([], {}), '()\n', (9928, 9930), False, 'import services\n'), ((14371, 14395), 'protocolbuffers.Outfits_pb2.OutfitList', 'Outfits_pb2.OutfitList', ([], {}), '()\n', (14393, 14395), False, 'from protocolbuffers import Outfits_pb2\n'), ((1427, 1451), 'weakref.ref', 'weakref.ref', (['interaction'], {}), '(interaction)\n', (1438, 1451), False, 'import weakref\n'), ((1860, 1891), 'sims.outfits.outfit_enums.OutfitCategory', 'OutfitCategory', (['outfit_category'], {}), '(outfit_category)\n', (1874, 1891), False, 'from sims.outfits.outfit_enums import OutfitCategory, NON_RANDOMIZABLE_OUTFIT_CATEGORIES, OutfitChangeReason, OutfitFilterFlag\n'), ((2327, 2350), 'services.time_service', 'services.time_service', ([], {}), '()\n', (2348, 2350), False, 'import services\n'), ((4312, 4317), 'animation.arb.Arb', 'Arb', ([], {}), '()\n', (4315, 4317), False, 'from animation.arb import Arb\n'), ((12070, 12098), 'random.choice', 'random.choice', (['valid_outfits'], {}), '(valid_outfits)\n', (12083, 12098), False, 'import random\n'), ((14970, 14993), 'event_testing.resolver.SingleSimResolver', 'SingleSimResolver', (['self'], {}), '(self)\n', (14987, 14993), False, 'from event_testing.resolver import SingleSimResolver\n'), ((4501, 4526), 'animation.animation_utils.create_run_animation', 'create_run_animation', (['arb'], {}), '(arb)\n', (4521, 4526), False, 'from animation.animation_utils import create_run_animation, flush_all_animations\n'), ((2866, 2905), 'random.randrange', 'random.randrange', (['(number_of_outfits - 1)'], {}), '(number_of_outfits - 1)\n', (2882, 2905), False, 'import random\n'), ((3104, 3139), 'random.randrange', 'random.randrange', (['number_of_outfits'], {}), '(number_of_outfits)\n', (3120, 3139), False, 'import random\n'), ((4554, 4605), 'element_utils.run_child', 'element_utils.run_child', (['timeline', 'clothing_element'], {}), '(timeline, clothing_element)\n', (4577, 4605), False, 'import element_utils\n'), ((5283, 5314), 'operator.attrgetter', 'operator.attrgetter', (['"""priority"""'], {}), "('priority')\n", (5302, 5314), False, 'import operator\n'), ((16347, 16380), 'animation.get_throwaway_animation_context', 'get_throwaway_animation_context', ([], {}), '()\n', (16378, 16380), False, 'from animation import get_throwaway_animation_context\n'), ((16423, 16493), 'animation.asm.create_asm', 'create_asm', (['animation_element_tuning.asm_key'], {'context': 'clothing_context'}), '(animation_element_tuning.asm_key, context=clothing_context)\n', (16433, 16493), False, 'from animation.asm import create_asm\n'), ((15732, 15762), 'services.get_laundry_service', 'services.get_laundry_service', ([], {}), '()\n', (15760, 15762), False, 'import services\n')]
|
import json
import os
import time
import tensorflow as tf
import shutil
import zipfile
from convlab2.dst.mdbt.mdbt import MDBT
from convlab2.dst.mdbt.mdbt_util import load_word_vectors, load_ontology, load_woz_data_new
from convlab2.util.dataloader.module_dataloader import AgentDSTDataloader
from convlab2.util.dataloader.dataset_dataloader import MultiWOZDataloader
from convlab2.util.file_util import cached_path
train_batch_size = 1
batches_per_eval = 10
no_epochs = 600
device = "gpu"
start_batch = 0
class MultiWozMDBT(MDBT):
def __init__(self, data_dir='configs', data=None):
"""Constructor of MultiWOzMDBT class.
Args:
data_dir (str): The path of data dir, where the root path is tatk/dst/mdbt/multiwoz.
"""
self.file_url = 'https://tatk-data.s3-ap-northeast-1.amazonaws.com/mdbt_multiwoz_sys.zip'
local_path = os.path.dirname(os.path.abspath(__file__))
self.data_dir = os.path.join(local_path, data_dir) # abstract data path
self.validation_url = os.path.join(self.data_dir, 'data/validate.json')
self.training_url = os.path.join(self.data_dir, 'data/train.json')
self.testing_url = os.path.join(self.data_dir, 'data/test.json')
self.word_vectors_url = os.path.join(self.data_dir, 'word-vectors/paragram_300_sl999.txt')
self.ontology_url = os.path.join(self.data_dir, 'data/ontology.json')
self.model_url = os.path.join(self.data_dir, 'models/model-1')
self.graph_url = os.path.join(self.data_dir, 'graphs/graph-1')
self.results_url = os.path.join(self.data_dir, 'results/log-1.txt')
self.kb_url = os.path.join(self.data_dir, 'data/') # not used
self.train_model_url = os.path.join(self.data_dir, 'train_models/model-1')
self.train_graph_url = os.path.join(self.data_dir, 'train_graph/graph-1')
self.auto_download()
print('Configuring MDBT model...')
self.word_vectors = load_word_vectors(self.word_vectors_url)
# Load the ontology and extract the feature vectors
self.ontology, self.ontology_vectors, self.slots = load_ontology(self.ontology_url, self.word_vectors)
# Load and process the training data
self.test_dialogues, self.actual_dialogues = load_woz_data_new(data['test'], self.word_vectors,
self.ontology, url=self.testing_url)
self.no_dialogues = len(self.test_dialogues)
super(MultiWozMDBT, self).__init__(self.ontology_vectors, self.ontology, self.slots, self.data_dir)
def auto_download(self):
"""Automatically download the pretrained model and necessary data."""
if os.path.exists(os.path.join(self.data_dir, 'models')) and \
os.path.exists(os.path.join(self.data_dir, 'data')) and \
os.path.exists(os.path.join(self.data_dir, 'word-vectors')):
return
cached_path(self.file_url, self.data_dir)
files = os.listdir(self.data_dir)
target_file = ''
for name in files:
if name.endswith('.json'):
target_file = name[:-5]
try:
assert target_file in files
except Exception as e:
print('allennlp download file error: MDBT Multiwoz data download failed.')
raise e
zip_file_path = os.path.join(self.data_dir, target_file+'.zip')
shutil.copyfile(os.path.join(self.data_dir, target_file), zip_file_path)
with zipfile.ZipFile(zip_file_path, 'r') as zip_ref:
zip_ref.extractall(self.data_dir)
def test_update():
os.environ["CUDA_VISIBLE_DEVICES"] = '0'
_config = tf.ConfigProto()
_config.gpu_options.allow_growth = True
_config.allow_soft_placement = True
start_time = time.time()
mdbt = MultiWozMDBT()
print('\tMDBT: model build time: {:.2f} seconds'.format(time.time() - start_time))
mdbt.restore()
# demo state history
mdbt.state['history'] = [['null', 'I\'m trying to find an expensive restaurant in the centre part of town.'],
['The Cambridge Chop House is an good expensive restaurant in the centre of town. Would you like me to book it for you?',
'Yes, a table for 1 at 16:15 on sunday. I need the reference number.']]
new_state = mdbt.update('hi, this is not good')
print(json.dumps(new_state, indent=4))
print('all time: {:.2f} seconds'.format(time.time() - start_time))
if __name__ == '__main__':
loader = AgentDSTDataloader(MultiWOZDataloader())
data = loader.load_data()
model = MultiWozMDBT(data=data)
|
[
"os.path.abspath",
"zipfile.ZipFile",
"convlab2.dst.mdbt.mdbt_util.load_woz_data_new",
"convlab2.dst.mdbt.mdbt_util.load_ontology",
"json.dumps",
"time.time",
"convlab2.util.dataloader.dataset_dataloader.MultiWOZDataloader",
"tensorflow.ConfigProto",
"convlab2.dst.mdbt.mdbt_util.load_word_vectors",
"convlab2.util.file_util.cached_path",
"os.path.join",
"os.listdir"
] |
[((3690, 3706), 'tensorflow.ConfigProto', 'tf.ConfigProto', ([], {}), '()\n', (3704, 3706), True, 'import tensorflow as tf\n'), ((3808, 3819), 'time.time', 'time.time', ([], {}), '()\n', (3817, 3819), False, 'import time\n'), ((946, 980), 'os.path.join', 'os.path.join', (['local_path', 'data_dir'], {}), '(local_path, data_dir)\n', (958, 980), False, 'import os\n'), ((1034, 1083), 'os.path.join', 'os.path.join', (['self.data_dir', '"""data/validate.json"""'], {}), "(self.data_dir, 'data/validate.json')\n", (1046, 1083), False, 'import os\n'), ((1112, 1158), 'os.path.join', 'os.path.join', (['self.data_dir', '"""data/train.json"""'], {}), "(self.data_dir, 'data/train.json')\n", (1124, 1158), False, 'import os\n'), ((1186, 1231), 'os.path.join', 'os.path.join', (['self.data_dir', '"""data/test.json"""'], {}), "(self.data_dir, 'data/test.json')\n", (1198, 1231), False, 'import os\n'), ((1265, 1331), 'os.path.join', 'os.path.join', (['self.data_dir', '"""word-vectors/paragram_300_sl999.txt"""'], {}), "(self.data_dir, 'word-vectors/paragram_300_sl999.txt')\n", (1277, 1331), False, 'import os\n'), ((1360, 1409), 'os.path.join', 'os.path.join', (['self.data_dir', '"""data/ontology.json"""'], {}), "(self.data_dir, 'data/ontology.json')\n", (1372, 1409), False, 'import os\n'), ((1435, 1480), 'os.path.join', 'os.path.join', (['self.data_dir', '"""models/model-1"""'], {}), "(self.data_dir, 'models/model-1')\n", (1447, 1480), False, 'import os\n'), ((1506, 1551), 'os.path.join', 'os.path.join', (['self.data_dir', '"""graphs/graph-1"""'], {}), "(self.data_dir, 'graphs/graph-1')\n", (1518, 1551), False, 'import os\n'), ((1579, 1627), 'os.path.join', 'os.path.join', (['self.data_dir', '"""results/log-1.txt"""'], {}), "(self.data_dir, 'results/log-1.txt')\n", (1591, 1627), False, 'import os\n'), ((1650, 1686), 'os.path.join', 'os.path.join', (['self.data_dir', '"""data/"""'], {}), "(self.data_dir, 'data/')\n", (1662, 1686), False, 'import os\n'), ((1730, 1781), 'os.path.join', 'os.path.join', (['self.data_dir', '"""train_models/model-1"""'], {}), "(self.data_dir, 'train_models/model-1')\n", (1742, 1781), False, 'import os\n'), ((1813, 1863), 'os.path.join', 'os.path.join', (['self.data_dir', '"""train_graph/graph-1"""'], {}), "(self.data_dir, 'train_graph/graph-1')\n", (1825, 1863), False, 'import os\n'), ((1966, 2006), 'convlab2.dst.mdbt.mdbt_util.load_word_vectors', 'load_word_vectors', (['self.word_vectors_url'], {}), '(self.word_vectors_url)\n', (1983, 2006), False, 'from convlab2.dst.mdbt.mdbt_util import load_word_vectors, load_ontology, load_woz_data_new\n'), ((2127, 2178), 'convlab2.dst.mdbt.mdbt_util.load_ontology', 'load_ontology', (['self.ontology_url', 'self.word_vectors'], {}), '(self.ontology_url, self.word_vectors)\n', (2140, 2178), False, 'from convlab2.dst.mdbt.mdbt_util import load_word_vectors, load_ontology, load_woz_data_new\n'), ((2278, 2370), 'convlab2.dst.mdbt.mdbt_util.load_woz_data_new', 'load_woz_data_new', (["data['test']", 'self.word_vectors', 'self.ontology'], {'url': 'self.testing_url'}), "(data['test'], self.word_vectors, self.ontology, url=self.\n testing_url)\n", (2295, 2370), False, 'from convlab2.dst.mdbt.mdbt_util import load_word_vectors, load_ontology, load_woz_data_new\n'), ((2944, 2985), 'convlab2.util.file_util.cached_path', 'cached_path', (['self.file_url', 'self.data_dir'], {}), '(self.file_url, self.data_dir)\n', (2955, 2985), False, 'from convlab2.util.file_util import cached_path\n'), ((3002, 3027), 'os.listdir', 'os.listdir', (['self.data_dir'], {}), '(self.data_dir)\n', (3012, 3027), False, 'import os\n'), ((3374, 3423), 'os.path.join', 'os.path.join', (['self.data_dir', "(target_file + '.zip')"], {}), "(self.data_dir, target_file + '.zip')\n", (3386, 3423), False, 'import os\n'), ((4407, 4438), 'json.dumps', 'json.dumps', (['new_state'], {'indent': '(4)'}), '(new_state, indent=4)\n', (4417, 4438), False, 'import json\n'), ((4572, 4592), 'convlab2.util.dataloader.dataset_dataloader.MultiWOZDataloader', 'MultiWOZDataloader', ([], {}), '()\n', (4590, 4592), False, 'from convlab2.util.dataloader.dataset_dataloader import MultiWOZDataloader\n'), ((895, 920), 'os.path.abspath', 'os.path.abspath', (['__file__'], {}), '(__file__)\n', (910, 920), False, 'import os\n'), ((3446, 3486), 'os.path.join', 'os.path.join', (['self.data_dir', 'target_file'], {}), '(self.data_dir, target_file)\n', (3458, 3486), False, 'import os\n'), ((3516, 3551), 'zipfile.ZipFile', 'zipfile.ZipFile', (['zip_file_path', '"""r"""'], {}), "(zip_file_path, 'r')\n", (3531, 3551), False, 'import zipfile\n'), ((2729, 2766), 'os.path.join', 'os.path.join', (['self.data_dir', '"""models"""'], {}), "(self.data_dir, 'models')\n", (2741, 2766), False, 'import os\n'), ((2801, 2836), 'os.path.join', 'os.path.join', (['self.data_dir', '"""data"""'], {}), "(self.data_dir, 'data')\n", (2813, 2836), False, 'import os\n'), ((2871, 2914), 'os.path.join', 'os.path.join', (['self.data_dir', '"""word-vectors"""'], {}), "(self.data_dir, 'word-vectors')\n", (2883, 2914), False, 'import os\n'), ((3906, 3917), 'time.time', 'time.time', ([], {}), '()\n', (3915, 3917), False, 'import time\n'), ((4484, 4495), 'time.time', 'time.time', ([], {}), '()\n', (4493, 4495), False, 'import time\n')]
|
# This file is part of the pylint-ignore project
# https://github.com/mbarkhau/pylint-ignore
#
# Copyright (c) 2020 <NAME> (<EMAIL>) - MIT License
# SPDX-License-Identifier: MIT
import re
import shutil
import typing as typ
import hashlib
import logging
import collections
import pylev
import pathlib2 as pl
logger = logging.getLogger('pylint_ignore')
IGNOREFILE_HEADER = """# Pylint-Ignore
**WARNING: This file is programatically generated.**
This file is parsed by [`pylint-ignore`](https://pypi.org/project/pylint-ignore/)
to determine which
[Pylint messages](https://pylint.pycqa.org/en/stable/technical_reference/features.html)
should be ignored.
- Do not edit this file manually.
- To update, use `pylint-ignore --update-ignorefile`
The recommended approach to using `pylint-ignore` is:
1. If a message refers to a valid issue, update your code rather than
ignoring the message.
2. If a message should *always* be ignored (globally), then to do so
via the usual `pylintrc` or `setup.cfg` files rather than this
`pylint-ignore.md` file.
3. If a message is a false positive, add a comment of this form to your code:
`# pylint:disable=<symbol> ; explain why this is a false positive`
"""
ENTRY_TEMPLATE = """
## File {entry.path} - {lineno_txt}{entry.msgid} ({entry.symbol})
- `message: {msg_text}`
- `author : {entry.author}`
- `date : {entry.date}`
{ctx_src_text}
"""
# https://regex101.com/r/ogknXY/8
_ENTRY_HEADER_PATTERN = r"""
^
\#\#\s
File\s(?P<path>.*?)
\s-\s
(?:
Line\s(?P<lineno>\d+)
\s-\s
)?
(?P<msgid>\w\d+)
\s
\((?P<symbol>.*)\)
$
"""
ENTRY_HEADER_RE = re.compile(_ENTRY_HEADER_PATTERN, flags=re.VERBOSE)
# https://regex101.com/r/6JViif/5
_LIST_ITEM_PATTERN = r"""
^
\s*-\s
`
(?P<key>message|author|date)
\s*:\s
(?P<value>.*)
`
$
"""
LIST_ITEM_RE = re.compile(_LIST_ITEM_PATTERN, flags=re.VERBOSE)
# https://regex101.com/r/Cc8w4v/5
_SOURCE_TEXT_PATTERN = r"""
(```|~~~)(?P<language>\w+)?
(
(?:\s+(?P<def_lineno>\d+):\s(?P<def_line>.*))?
\s+\.\.\.
)?
(?:\s+\d+:\s?.*)?
(?:\s+\d+:\s?.*)?
\s*\>\s+(?P<source_lineno>\d+):\s(?P<source_line>.*)
(?:\s+\d+:\s?.*)?
(?:\s+\d+:\s?.*)?
\s*
(```|~~~)
"""
SOURCE_TEXT_RE = re.compile(_SOURCE_TEXT_PATTERN, flags=re.VERBOSE)
class SourceText(typ.NamedTuple):
new_lineno : int
old_lineno : int
source_line : str
text : str
start_idx : int
end_idx : int
def_line_idx: typ.Optional[int]
def_line : typ.Optional[str]
# SourceText is almost always Optional
MaybeSourceText = typ.Optional[SourceText]
class Key(typ.NamedTuple):
"""Stable (relatively) key to reference ignorefile.Entry values.
The ignorefile key is relatively stable, even between edits
to a file. In particular, it doesn't have the lineno.
"""
msgid : str
path : str
symbol : str
msg_text : str
source_line: str
class Entry(typ.NamedTuple):
msgid : str
path : str
symbol : str
msg_text : str
msg_extra: str
author: str
date : str
srctxt: MaybeSourceText
class ObsoleteEntry(Exception):
pass
Catalog = typ.Dict[Key, Entry]
FUZZY_MATCH_MAX_EDIT_DISTANCE_ABS = 8
FUZZY_MATCH_MAX_EDIT_DISTANCE_PCT = 20
def _iter_candidate_keys(catalog: Catalog, search_key: Key) -> typ.Iterable[Key]:
for key in catalog.keys():
is_candidate = (
search_key.msgid == key.msgid
and search_key.path == key.path
and search_key.symbol == key.symbol
)
if is_candidate:
yield key
def _iter_fuzzy_entries(catalog: Catalog, search_key: Key) -> typ.Iterable[Entry]:
for key in _iter_candidate_keys(catalog, search_key):
msg_text_dist = pylev.levenshtein(key.msg_text , search_key.msg_text)
src_line_dist = pylev.levenshtein(key.source_line, search_key.source_line)
if msg_text_dist > FUZZY_MATCH_MAX_EDIT_DISTANCE_ABS:
continue
if src_line_dist > FUZZY_MATCH_MAX_EDIT_DISTANCE_ABS:
continue
msg_text_dist_pct = 100 * msg_text_dist / max(len(key.msg_text), len(search_key.msg_text))
src_line_dist_pct = (
100 * src_line_dist / max(len(key.source_line), len(search_key.source_line))
)
if msg_text_dist_pct > FUZZY_MATCH_MAX_EDIT_DISTANCE_PCT:
continue
if src_line_dist_pct > FUZZY_MATCH_MAX_EDIT_DISTANCE_PCT:
continue
yield catalog[key]
def find_entry(catalog: Catalog, search_key: Key) -> typ.Optional[Entry]:
has_exact_match = search_key in catalog
if has_exact_match:
# exact match
return catalog[search_key]
# try for a fuzzy match
matches = list(_iter_fuzzy_entries(catalog, search_key))
if len(matches) == 1:
return matches[0]
else:
return None
CONTEXT_LINES = 2
_SRC_CACHE: typ.Dict[str, typ.List[str]] = {}
def read_source_lines(path: str) -> typ.List[str]:
if path not in _SRC_CACHE:
if len(_SRC_CACHE) > 2:
_SRC_CACHE.popitem()
with pl.Path(path).open(mode="r", encoding="utf-8") as fobj:
full_src_text = fobj.read()
_keepends = True
lines = full_src_text.splitlines(_keepends)
_SRC_CACHE[path] = lines
return _SRC_CACHE[path]
def find_source_text_lineno(path: str, old_source_line: str, old_lineno: int) -> int:
if not pl.Path(path).exists():
raise ObsoleteEntry("file not found")
old_line_idx = old_lineno - 1
lines = read_source_lines(path)
# NOTE (mb 2020-07-17): It's not too critical that we find the original
# entry. If we don't (and the message is still valid) then it will
# just be replaced by a new entry which will have to be acknowledged
# again. The git diff should make very obvious what happened.
for offset in range(100):
for line_idx in {old_line_idx - offset, old_line_idx + offset}:
is_matching_line = (
0 <= line_idx < len(lines) and lines[line_idx].rstrip() == old_source_line.rstrip()
)
if is_matching_line:
return line_idx + 1
raise ObsoleteEntry("source text not found")
def read_source_text(path: str, new_lineno: int, old_lineno: int) -> SourceText:
lines = read_source_lines(path)
line_idx = new_lineno - 1 # lineno starts at 1
line_indent_lvl = len(lines[line_idx]) - len(lines[line_idx].lstrip())
start_idx = max(0, line_idx - CONTEXT_LINES)
end_idx = min(len(lines), line_idx + CONTEXT_LINES + 1)
src_lines = lines[start_idx:end_idx]
src_text = "".join(src_lines)
source_line = lines[line_idx]
def_line_idx: typ.Optional[int] = None
def_line : typ.Optional[str] = None
maybe_def_idx = line_idx
while maybe_def_idx > 0:
line_text = lines[maybe_def_idx]
indent_lvl = len(line_text) - len(line_text.lstrip())
if line_text.strip() and indent_lvl < line_indent_lvl:
first_token = line_text.lstrip().split()[0]
if first_token in ('def', 'class'):
is_defline_before_ctx_src = 0 <= maybe_def_idx < start_idx
if is_defline_before_ctx_src:
def_line_idx = maybe_def_idx
def_line = lines[maybe_def_idx]
break
maybe_def_idx -= 1
return SourceText(
new_lineno, old_lineno, source_line, src_text, start_idx, end_idx, def_line_idx, def_line
)
EntryValues = typ.Dict[str, str]
def _init_entry_item(entry_vals: EntryValues) -> typ.Tuple[Key, Entry]:
msg_extra: str = ""
if 'ctx_src_text' in entry_vals:
old_ctx_src_text = entry_vals['ctx_src_text']
old_source_text_match = SOURCE_TEXT_RE.match(old_ctx_src_text)
if old_source_text_match is None:
old_source_line = ""
msg_extra = old_ctx_src_text.strip()[3:][:-3].strip()
else:
# NOTE (mb 2020-07-16): The file may have changed in the meantime,
# so we search for the original source text (which may be on a
# different line).
old_source_line = old_source_text_match.group('source_line')
else:
old_source_line = ""
path = entry_vals['path']
srctxt: MaybeSourceText = None
if entry_vals['lineno']:
old_lineno = int(entry_vals['lineno'])
try:
new_lineno = find_source_text_lineno(path, old_source_line, old_lineno)
srctxt = read_source_text(path, new_lineno, old_lineno)
source_line = srctxt.source_line
except ObsoleteEntry:
source_line = old_source_line
else:
source_line = hashlib.sha1(msg_extra.encode("utf-8")).hexdigest()
ignorefile_entry = Entry(
entry_vals['msgid'],
entry_vals['path'],
entry_vals['symbol'],
entry_vals['message'],
msg_extra,
entry_vals['author'],
entry_vals['date'],
srctxt,
)
ignorefile_key = Key(
ignorefile_entry.msgid,
ignorefile_entry.path,
ignorefile_entry.symbol,
ignorefile_entry.msg_text,
source_line,
)
return (ignorefile_key, ignorefile_entry)
def dumps_entry(entry: Entry) -> str:
srctxt = entry.srctxt
if srctxt is None:
lineno = ""
if "\n" in entry.msg_text:
msg_text_parts = entry.msg_text.split("\n", 1)
msg_text = msg_text_parts[0]
ctx_src_text = msg_text_parts[1]
else:
msg_text = entry.msg_text
ctx_src_text = entry.msg_extra or ""
else:
lineno = str(srctxt.new_lineno)
last_ctx_lineno = srctxt.end_idx + 1
padding_size = len(str(last_ctx_lineno))
src_lines: typ.List[str] = []
def_line = srctxt.def_line
def_line_idx = srctxt.def_line_idx
if def_line and def_line_idx:
def_lineno = def_line_idx + 1
line = def_line.rstrip()
src_lines.append(f" {def_lineno:>{padding_size}}: {line}")
if def_lineno + CONTEXT_LINES < srctxt.new_lineno:
src_lines.append(" ...")
for offset, line in enumerate(srctxt.text.splitlines()):
src_lineno = str(srctxt.start_idx + offset + 1)
# padded_line is to avoid trailing whitespace
padded_line = " " + line if line.strip() else ""
if lineno == src_lineno:
dumps_line = f"> {src_lineno:>{padding_size}}:{padded_line}"
else:
dumps_line = f" {src_lineno:>{padding_size}}:{padded_line}"
src_lines.append(dumps_line)
msg_text = entry.msg_text
ctx_src_text = "\n".join(src_lines)
if ctx_src_text.strip():
ctx_src_text = "```\n" + ctx_src_text.rstrip() + "\n```\n\n"
lineno_txt = f"Line {lineno} - " if lineno else ""
entry_text = ENTRY_TEMPLATE.format(
entry=entry, lineno_txt=lineno_txt, msg_text=msg_text, ctx_src_text=ctx_src_text
)
return entry_text.lstrip("\n")
def _parse_ctx_src_text(fence: str, lines: typ.Iterator[typ.Tuple[int, str]]) -> str:
ctx_src_text_lines = [fence + "\n"]
while True:
# consume lines to next fence
_, next_line = next(lines)
ctx_src_text_lines.append(next_line)
is_close_fence = next_line.strip() == fence
if is_close_fence:
break
return "".join(ctx_src_text_lines)
def _iter_entry_values(ignorefile_path: pl.Path) -> typ.Iterable[EntryValues]:
entry_vals: EntryValues = {}
with ignorefile_path.open(mode="r", encoding="utf-8") as fobj:
lines = iter(enumerate(fobj))
try:
while True:
i, line = next(lines)
ignorefile_lineno = i + 1
if line.startswith("```"):
fence = line[:3]
entry_vals['ctx_src_text'] = _parse_ctx_src_text(fence, lines)
continue
entry_header = ENTRY_HEADER_RE.match(line)
if entry_header and 'msgid' in entry_vals:
# new header -> any existing entry is done
yield entry_vals
# new entry
entry_vals = {}
if entry_header:
entry_vals['ignorefile_lineno'] = str(ignorefile_lineno)
entry_vals.update(entry_header.groupdict())
assert 'msgid' in entry_vals
continue
list_item = LIST_ITEM_RE.match(line)
if list_item:
entry_vals[list_item.group('key')] = list_item.group('value')
except StopIteration:
pass
# yield last entry (not followed by a header that would otherwise trigger the yield)
if 'msgid' in entry_vals:
yield entry_vals
MESSAGE_TYPE_PRIORITIES = [
'F', # [F]atal for errors which prevented further processing
'E', # [E]rror for important programming issues (i.e. most probably bug)
'W', # [W]arning for stylistic problems, or minor programming issues
'C', # [C]onvention for coding standard violation
'R', # [R]efactor for a "good practice" metric violation
'I', # [I]nformational messages that Pylint emits (do not contribute to your analysis score)
]
assert MESSAGE_TYPE_PRIORITIES.index('F') < MESSAGE_TYPE_PRIORITIES.index('I')
def _entry_priority(entry: Entry) -> typ.Any:
msg_type_priority = MESSAGE_TYPE_PRIORITIES.index(entry.msgid[:1])
return (
msg_type_priority,
entry.msgid,
entry.srctxt and entry.srctxt.new_lineno,
entry.msg_text,
)
def dumps(ignorefile: Catalog) -> str:
if len(ignorefile) == 0:
return IGNOREFILE_HEADER
entries = sorted(ignorefile.values(), key=_entry_priority)
msgid_count = collections.Counter(e.msgid for e in entries)
overview_chunks: typ.List[str] = [IGNOREFILE_HEADER, "\n# Overview\n\n"]
entry_chunks : typ.List[str] = []
prev_msg_id = None
for entry in entries:
if entry.msgid != prev_msg_id:
prev_msg_id = entry.msgid
num_entries = msgid_count[entry.msgid]
section_text = f"{entry.msgid}: {entry.symbol} ({num_entries}x)"
entry_link = f"#{entry.msgid}-{entry.symbol}".lower()
section = f" - [{section_text}]({entry_link})\n"
overview_chunks.append(section)
entry_chunks.append(f"# {entry.msgid}: {entry.symbol}\n\n")
entry_chunks.append(dumps_entry(entry))
overview_chunks.append("\n\n")
chunks = overview_chunks + entry_chunks
return "".join(chunks)
def dump(ignorefile: Catalog, ignorefile_path: pl.Path) -> None:
ignorefile_text = dumps(ignorefile)
tmp_path = ignorefile_path.parent / (ignorefile_path.name + ".tmp")
with tmp_path.open(mode="w", encoding="utf-8") as fobj:
fobj.write(ignorefile_text)
shutil.move(str(tmp_path), str(ignorefile_path))
def load(ignorefile_path: pl.Path) -> Catalog:
if not ignorefile_path.exists():
return {}
catalog: Catalog = collections.OrderedDict()
for entry_vals in _iter_entry_values(ignorefile_path):
try:
ignorefile_key, ignorefile_entry = _init_entry_item(entry_vals)
catalog[ignorefile_key] = ignorefile_entry
except ObsoleteEntry:
# NOTE (mb 2020-07-17): It is fine for an entry to be obsolete.
# The code may have improved, it may have moved, in any case
# the ignore file is under version control and the change
# will be seen.
pass
except (KeyError, ValueError) as ex:
lineno = entry_vals['ignorefile_lineno']
path = entry_vals['path']
logmsg = f"Error parsing entry on line {lineno} of {path}: {ex}"
logger.error(logmsg, exc_info=True)
return catalog
def load_dir(dirpath: pl.Path) -> Catalog:
"""Load from multiple files.
This is used to read from a temporary directory where
multiple processes write to concurrently. The files then
need to be read back and joined together.
We don't have to care about sorting here as that is done
in the final write
"""
full_catalog: Catalog = {}
for fpath in dirpath.glob("*.md"):
partial_catalog = load(fpath)
full_catalog.update(partial_catalog)
return full_catalog
|
[
"pylev.levenshtein",
"collections.OrderedDict",
"collections.Counter",
"pathlib2.Path",
"logging.getLogger",
"re.compile"
] |
[((318, 352), 'logging.getLogger', 'logging.getLogger', (['"""pylint_ignore"""'], {}), "('pylint_ignore')\n", (335, 352), False, 'import logging\n'), ((1646, 1697), 're.compile', 're.compile', (['_ENTRY_HEADER_PATTERN'], {'flags': 're.VERBOSE'}), '(_ENTRY_HEADER_PATTERN, flags=re.VERBOSE)\n', (1656, 1697), False, 'import re\n'), ((1845, 1893), 're.compile', 're.compile', (['_LIST_ITEM_PATTERN'], {'flags': 're.VERBOSE'}), '(_LIST_ITEM_PATTERN, flags=re.VERBOSE)\n', (1855, 1893), False, 'import re\n'), ((2257, 2307), 're.compile', 're.compile', (['_SOURCE_TEXT_PATTERN'], {'flags': 're.VERBOSE'}), '(_SOURCE_TEXT_PATTERN, flags=re.VERBOSE)\n', (2267, 2307), False, 'import re\n'), ((14087, 14132), 'collections.Counter', 'collections.Counter', (['(e.msgid for e in entries)'], {}), '(e.msgid for e in entries)\n', (14106, 14132), False, 'import collections\n'), ((15376, 15401), 'collections.OrderedDict', 'collections.OrderedDict', ([], {}), '()\n', (15399, 15401), False, 'import collections\n'), ((3816, 3868), 'pylev.levenshtein', 'pylev.levenshtein', (['key.msg_text', 'search_key.msg_text'], {}), '(key.msg_text, search_key.msg_text)\n', (3833, 3868), False, 'import pylev\n'), ((3896, 3954), 'pylev.levenshtein', 'pylev.levenshtein', (['key.source_line', 'search_key.source_line'], {}), '(key.source_line, search_key.source_line)\n', (3913, 3954), False, 'import pylev\n'), ((5497, 5510), 'pathlib2.Path', 'pl.Path', (['path'], {}), '(path)\n', (5504, 5510), True, 'import pathlib2 as pl\n'), ((5158, 5171), 'pathlib2.Path', 'pl.Path', (['path'], {}), '(path)\n', (5165, 5171), True, 'import pathlib2 as pl\n')]
|
#!/usr/bin/env python
# coding: utf-8
# In[ ]:
# Data Science
import numpy as np
import pandas as pd
# Visualization
import seaborn as sns
import matplotlib.pyplot as plt
# Tricks
sns.set(style='ticks', context='talk', font_scale=1.15)
# In[ ]:
import os, sys
from skimage.io import imread as skIR
from PIL import Image
# Root directory of the project
ROOT_DIR = os.path.abspath(Mask_RCNN_ROOT)
# Import Mask RCNN
sys.path.append(ROOT_DIR) # To find local version of the library
from mrcnn import visualize
# In[ ]:
CLASS_NAMES = ['BG', 'person', 'bicycle', 'car', 'motorcycle', 'airplane',
'bus', 'train', 'truck', 'boat', 'traffic light',
'fire hydrant', 'stop sign', 'parking meter', 'bench', 'bird',
'cat', 'dog', 'horse', 'sheep', 'cow', 'elephant', 'bear',
'zebra', 'giraffe', 'backpack', 'umbrella', 'handbag', 'tie',
'suitcase', 'frisbee', 'skis', 'snowboard', 'sports ball',
'kite', 'baseball bat', 'baseball glove', 'skateboard',
'surfboard', 'tennis racket', 'bottle', 'wine glass', 'cup',
'fork', 'knife', 'spoon', 'bowl', 'banana', 'apple',
'sandwich', 'orange', 'broccoli', 'carrot', 'hot dog', 'pizza',
'donut', 'cake', 'chair', 'couch', 'potted plant', 'bed',
'dining table', 'toilet', 'tv', 'laptop', 'mouse', 'remote',
'keyboard', 'cell phone', 'microwave', 'oven', 'toaster',
'sink', 'refrigerator', 'book', 'clock', 'vase', 'scissors',
'teddy bear', 'hair drier', 'toothbrush']
VIP_CLASS = ['person','skis','snowboard']
# IMAGE_SHAPE = (467, 700, 3)
IMAGE_SHAPE = (667, 1000, 3)
# ---
# In[ ]:
def Show_Img(obj, showBox=True, showMask=True, getArray=False):
"""
Show image for given image ID.
Parameters (Input)
----------
obj : DataFrame, Series, str
The Mask R-CNN record for a image
or the path to the image file
showBox : bool
Show the Boxes generated by Mask R-CNN
showMask : bool
Show the Masks generated by Mask R-CNN
getArray : bool
Return Array, not show image, will overwrite
showBox=False, showMask=False
Returns
-------
None (Just show the image)
or
Numpy array of the image
"""
assert isinstance( obj, (str, pd.DataFrame, pd.Series) ), 'Input should be a Pandas DataFrame or Series.'
if isinstance(obj, str):
imgFile = obj
elif isinstance(obj['imgID'], str):
imgFile = obj['imgID']
elif isinstance(obj['imgID'], pd.Series):
imgFile = obj['imgID'].unique()[0]
obj = obj.where(obj['imgID']==imgFile).dropna()
else:
assert isinstance( obj['imgID'], (str, pd.Series) ), 'Unable to process:' + type(obj['imgID'])
if not os.path.exists(imgFile):
assert None, 'Not such image! ' + imgFile
image = skIR(imgFile)
if getArray:
return np.array(image)
if isinstance(obj, str):
return visualize.display_instances(
image,
np.zeros((2,2)), # Placeholder, rois
np.zeros((2,2)), # Placeholder, masks
np.zeros((2,2)), # Placeholder, class_ids
np.array(0), # Placeholder, CLASS_NAMES
np.array(0), # Placeholder, scores
figsize=(8,8),
show_mask=False,
show_bbox=False,
)
else:
result = {}
if isinstance( obj, pd.DataFrame ):
result['class_ids'] = np.array( obj['class_ids'].to_list() )
result['scores'] = np.array( obj['scores'].to_list() )
result['rois'] = np.array( obj[['x1','y1','x2','y2']].values)
else:
result['class_ids'] = np.array([obj['class_ids']])
result['scores'] = np.array([obj['scores']])
result['rois'] = np.array( obj[['x1','y1','x2','y2']].values)[np.newaxis, :]
if showMask:
result['masks'] = pd.Series(obj['masks']).apply( lambda row: list(map(int, list(row))) ).tolist()
result['masks'] = np.rollaxis(
np.array(result['masks']).reshape(-1, IMAGE_SHAPE[0], IMAGE_SHAPE[1]), 0, 3
).astype(bool)
else:
result['masks'] = np.zeros((IMAGE_SHAPE[0], IMAGE_SHAPE[1], result['scores'].shape[0]))
return visualize.display_instances(
image,
result['rois'],
result['masks'],
result['class_ids'].astype(int),
CLASS_NAMES,
result['scores'],
figsize=(8,8),
show_mask=showMask,
show_bbox=showBox,
)
# ---
# In[ ]:
def extInBoxPixels(obj, getMask=False, show=False):
"""
Extract InBox pixels from given Box and image ID.
Parameters (Input)
----------
obj : Series
The record for a box
getMask : bool
Only extract the InMask pixels
show : bool
Show the extracted pixels
Returns
-------
ext_Box : Array
Numpy array (Matrix) with InBox pixels
Shape = (Unknown, Unknown, 3)
"""
assert isinstance( obj, pd.Series ), 'Input should be a Pandas Series.'
imgFile = obj['imgID']
if not os.path.exists(imgFile):
assert None, 'Not such image!'
image = skIR(imgFile)
(x1, y1, x2, y2) = obj[['x1','y1','x2','y2']].map(int)
# Check image shape
if image.shape != IMAGE_SHAPE:
# Some are vertical image
image = np.swapaxes(image,0,1)
# Check again
if image.shape != IMAGE_SHAPE:
return None # Placehoder
if not getMask:
ext_Box = image[x1:x2, y1:y2, :]
else:
# Mask Invert
ext_Mask = np.invert(
np.array(
pd.Series(obj['masks'])
.apply( lambda row: list(map(int, list(row))) )
.tolist()
).reshape(-1, IMAGE_SHAPE[0], IMAGE_SHAPE[1]).astype(bool)[0]
)
# First, Make Inverted Mask as a white/snow background (255,255,255)
# Next, Add image to the Inverted Mask
# Then, Clip the overflow (>255) pixels (make them white)
ext_Img = (
255*np.stack( [ext_Mask]*3, axis=2 )
+image
).clip(max=255)
# Finally, Crop the box
ext_Box = ext_Img[x1:x2, y1:y2, :]
if show:
plt.imshow(ext_Box)
return ext_Box
# ---
# In[ ]:
def squareBox (BoxArray):
"""
Reshape a Unknow shape Box with pixels to a square Box with 150x150.
Parameters (Input)
----------
BoxArray : numpy array (Matrix)
Array with InBox pixels
Returns
-------
BoxArraySquared : Array
Numpy array (Matrix) with InBox pixels
Shape = (150, 150, 3)
"""
assert isinstance( BoxArray, np.ndarray ), 'Input should be a Numpy array.'
BoxArraySquared = np.array(
resize_tool(
Image.fromarray(BoxArray.astype('uint8')),
width = 150,
height = 150,
)
)
return BoxArraySquared
################################################################################
def resize_tool(image_pil, width, height):
'''
Resize PIL image keeping ratio and using white background.
From https://stackoverflow.com/questions/44370469/python-image-resizing-keep-proportion-add-white-background
'''
ratio_w = width / image_pil.width
ratio_h = height / image_pil.height
if ratio_w < ratio_h:
# It must be fixed by width
resize_width = width
resize_height = round(ratio_w * image_pil.height)
else:
# Fixed by height
resize_width = round(ratio_h * image_pil.width)
resize_height = height
image_resize = image_pil.resize((resize_width, resize_height), Image.ANTIALIAS)
background = Image.new('RGB', (width, height), (255, 255, 255))
offset = (round((width - resize_width) / 2), round((height - resize_height) / 2))
background.paste(image_resize, offset)
return background
# In[ ]:
|
[
"sys.path.append",
"numpy.stack",
"os.path.abspath",
"PIL.Image.new",
"matplotlib.pyplot.imshow",
"os.path.exists",
"numpy.zeros",
"numpy.array",
"numpy.swapaxes",
"pandas.Series",
"seaborn.set",
"skimage.io.imread"
] |
[((184, 239), 'seaborn.set', 'sns.set', ([], {'style': '"""ticks"""', 'context': '"""talk"""', 'font_scale': '(1.15)'}), "(style='ticks', context='talk', font_scale=1.15)\n", (191, 239), True, 'import seaborn as sns\n'), ((372, 403), 'os.path.abspath', 'os.path.abspath', (['Mask_RCNN_ROOT'], {}), '(Mask_RCNN_ROOT)\n', (387, 403), False, 'import os, sys\n'), ((424, 449), 'sys.path.append', 'sys.path.append', (['ROOT_DIR'], {}), '(ROOT_DIR)\n', (439, 449), False, 'import os, sys\n'), ((2949, 2962), 'skimage.io.imread', 'skIR', (['imgFile'], {}), '(imgFile)\n', (2953, 2962), True, 'from skimage.io import imread as skIR\n'), ((5382, 5395), 'skimage.io.imread', 'skIR', (['imgFile'], {}), '(imgFile)\n', (5386, 5395), True, 'from skimage.io import imread as skIR\n'), ((7912, 7962), 'PIL.Image.new', 'Image.new', (['"""RGB"""', '(width, height)', '(255, 255, 255)'], {}), "('RGB', (width, height), (255, 255, 255))\n", (7921, 7962), False, 'from PIL import Image\n'), ((2860, 2883), 'os.path.exists', 'os.path.exists', (['imgFile'], {}), '(imgFile)\n', (2874, 2883), False, 'import os, sys\n'), ((2996, 3011), 'numpy.array', 'np.array', (['image'], {}), '(image)\n', (3004, 3011), True, 'import numpy as np\n'), ((5305, 5328), 'os.path.exists', 'os.path.exists', (['imgFile'], {}), '(imgFile)\n', (5319, 5328), False, 'import os, sys\n'), ((5566, 5590), 'numpy.swapaxes', 'np.swapaxes', (['image', '(0)', '(1)'], {}), '(image, 0, 1)\n', (5577, 5590), True, 'import numpy as np\n'), ((6449, 6468), 'matplotlib.pyplot.imshow', 'plt.imshow', (['ext_Box'], {}), '(ext_Box)\n', (6459, 6468), True, 'import matplotlib.pyplot as plt\n'), ((3117, 3133), 'numpy.zeros', 'np.zeros', (['(2, 2)'], {}), '((2, 2))\n', (3125, 3133), True, 'import numpy as np\n'), ((3166, 3182), 'numpy.zeros', 'np.zeros', (['(2, 2)'], {}), '((2, 2))\n', (3174, 3182), True, 'import numpy as np\n'), ((3216, 3232), 'numpy.zeros', 'np.zeros', (['(2, 2)'], {}), '((2, 2))\n', (3224, 3232), True, 'import numpy as np\n'), ((3270, 3281), 'numpy.array', 'np.array', (['(0)'], {}), '(0)\n', (3278, 3281), True, 'import numpy as np\n'), ((3326, 3337), 'numpy.array', 'np.array', (['(0)'], {}), '(0)\n', (3334, 3337), True, 'import numpy as np\n'), ((3711, 3757), 'numpy.array', 'np.array', (["obj[['x1', 'y1', 'x2', 'y2']].values"], {}), "(obj[['x1', 'y1', 'x2', 'y2']].values)\n", (3719, 3757), True, 'import numpy as np\n'), ((3804, 3832), 'numpy.array', 'np.array', (["[obj['class_ids']]"], {}), "([obj['class_ids']])\n", (3812, 3832), True, 'import numpy as np\n'), ((3867, 3892), 'numpy.array', 'np.array', (["[obj['scores']]"], {}), "([obj['scores']])\n", (3875, 3892), True, 'import numpy as np\n'), ((4329, 4398), 'numpy.zeros', 'np.zeros', (["(IMAGE_SHAPE[0], IMAGE_SHAPE[1], result['scores'].shape[0])"], {}), "((IMAGE_SHAPE[0], IMAGE_SHAPE[1], result['scores'].shape[0]))\n", (4337, 4398), True, 'import numpy as np\n'), ((3927, 3973), 'numpy.array', 'np.array', (["obj[['x1', 'y1', 'x2', 'y2']].values"], {}), "(obj[['x1', 'y1', 'x2', 'y2']].values)\n", (3935, 3973), True, 'import numpy as np\n'), ((6275, 6307), 'numpy.stack', 'np.stack', (['([ext_Mask] * 3)'], {'axis': '(2)'}), '([ext_Mask] * 3, axis=2)\n', (6283, 6307), True, 'import numpy as np\n'), ((4039, 4062), 'pandas.Series', 'pd.Series', (["obj['masks']"], {}), "(obj['masks'])\n", (4048, 4062), True, 'import pandas as pd\n'), ((4182, 4207), 'numpy.array', 'np.array', (["result['masks']"], {}), "(result['masks'])\n", (4190, 4207), True, 'import numpy as np\n'), ((5849, 5872), 'pandas.Series', 'pd.Series', (["obj['masks']"], {}), "(obj['masks'])\n", (5858, 5872), True, 'import pandas as pd\n')]
|
from sklearn.datasets import make_multilabel_classification
from sklearn.model_selection import train_test_split
from mllearn.problem_transform import BinaryRelevance
from mllearn.metrics import subset_acc
from mllearn.metrics import hamming_loss as hamming_loss
from mllearn.metrics import accuracy
from mllearn.metrics import precision
from mllearn.metrics import recall
from mllearn.metrics import F_beta
X, y = make_multilabel_classification(n_samples=700,
n_features = 80,
n_classes=5,
n_labels=2,
allow_unlabeled=False,
random_state=1)
X_train, X_test, y_train, y_test = train_test_split(X, y, test_size=0.3)
classif = BinaryRelevance()
classif.fit(X_train, y_train)
predictions = classif.predict(X_test)
print('The subset_acc Result is %f' % subset_acc(y_test, predictions))
print('The hamming_loss Result is %f' % hamming_loss(y_test, predictions))
print('The accuracy Result is %f' % accuracy(y_test, predictions))
print('The precision Result is %f' % precision(y_test, predictions))
print('The recall Result is %f' % recall(y_test, predictions))
print('The F_beta Result is %f' % F_beta(y_test, predictions))
|
[
"mllearn.metrics.accuracy",
"mllearn.metrics.recall",
"sklearn.datasets.make_multilabel_classification",
"sklearn.model_selection.train_test_split",
"mllearn.metrics.precision",
"mllearn.metrics.F_beta",
"mllearn.metrics.hamming_loss",
"mllearn.problem_transform.BinaryRelevance",
"mllearn.metrics.subset_acc"
] |
[((416, 544), 'sklearn.datasets.make_multilabel_classification', 'make_multilabel_classification', ([], {'n_samples': '(700)', 'n_features': '(80)', 'n_classes': '(5)', 'n_labels': '(2)', 'allow_unlabeled': '(False)', 'random_state': '(1)'}), '(n_samples=700, n_features=80, n_classes=5,\n n_labels=2, allow_unlabeled=False, random_state=1)\n', (446, 544), False, 'from sklearn.datasets import make_multilabel_classification\n'), ((803, 840), 'sklearn.model_selection.train_test_split', 'train_test_split', (['X', 'y'], {'test_size': '(0.3)'}), '(X, y, test_size=0.3)\n', (819, 840), False, 'from sklearn.model_selection import train_test_split\n'), ((852, 869), 'mllearn.problem_transform.BinaryRelevance', 'BinaryRelevance', ([], {}), '()\n', (867, 869), False, 'from mllearn.problem_transform import BinaryRelevance\n'), ((976, 1007), 'mllearn.metrics.subset_acc', 'subset_acc', (['y_test', 'predictions'], {}), '(y_test, predictions)\n', (986, 1007), False, 'from mllearn.metrics import subset_acc\n'), ((1049, 1082), 'mllearn.metrics.hamming_loss', 'hamming_loss', (['y_test', 'predictions'], {}), '(y_test, predictions)\n', (1061, 1082), True, 'from mllearn.metrics import hamming_loss as hamming_loss\n'), ((1120, 1149), 'mllearn.metrics.accuracy', 'accuracy', (['y_test', 'predictions'], {}), '(y_test, predictions)\n', (1128, 1149), False, 'from mllearn.metrics import accuracy\n'), ((1188, 1218), 'mllearn.metrics.precision', 'precision', (['y_test', 'predictions'], {}), '(y_test, predictions)\n', (1197, 1218), False, 'from mllearn.metrics import precision\n'), ((1254, 1281), 'mllearn.metrics.recall', 'recall', (['y_test', 'predictions'], {}), '(y_test, predictions)\n', (1260, 1281), False, 'from mllearn.metrics import recall\n'), ((1317, 1344), 'mllearn.metrics.F_beta', 'F_beta', (['y_test', 'predictions'], {}), '(y_test, predictions)\n', (1323, 1344), False, 'from mllearn.metrics import F_beta\n')]
|
import matplotlib.pyplot as plt
import matplotlib.ticker as mtick
import numpy as np
import pandas as pd
import seaborn as sns
def __add_name_labels(ax, xs, ys):
last_y_pos = 9999
for i, name in enumerate(xs):
y_pos = ys[name] - 0.1
if np.abs(y_pos - last_y_pos) < 0.1:
y_pos = last_y_pos - 0.1
last_y_pos = y_pos
ax.text(
i, y_pos, name, ha="center", va="center", bbox=dict(
boxstyle="round",
ec=(1, 1, 1, 0),
fc=(1, 1, 1, 0.7),
)
)
# Remove original ticks
ax.set_xticks([])
ax.set_xticks([], minor=True)
def visualize_ratings(file_name, df, x='beer', plot_type="box", show=False, figsize=(16, 9), sort=False):
order = df.groupby(x).median()['normalized rating'].sort_values(ascending=False).index if sort else df[x].unique()
fig = plt.figure(figsize=figsize)
# Plot ratings
if plot_type == "box":
ax = sns.boxplot(data=df, x=x, y='normalized rating', order=order, whis=[0, 100])
elif plot_type == "violin":
ax = sns.violinplot(data=df, x=x, y='normalized rating', order=order, inner="point", bw=0.15, scale="count")
ax.grid(linestyle=':')
# Add nice name labels
__add_name_labels(ax, xs=order, ys=df.groupby(x, sort=False)['normalized rating'].min())
plt.tight_layout()
if file_name is not None:
fig.savefig(file_name)
if show:
plt.show()
plt.close(fig)
def visualize_ratings_per_person(file_name, df, show=False, figsize=(16, 9)):
fig = plt.figure(figsize=figsize)
ax = sns.scatterplot(data=df, x='beer', y='normalized rating', hue='person', s=50, edgecolor=(0, 0, 0, 0))
ax.grid(linestyle=':')
# Add nice name labels
__add_name_labels(ax, xs=df['beer'].unique(), ys=df.groupby('beer', sort=False)['normalized rating'].min())
plt.tight_layout()
if file_name is not None:
fig.savefig(file_name)
if show:
plt.show()
plt.close(fig)
def visualize_ratings_per_price(file_name, df, show=False, figsize=(16, 9)):
fig = plt.figure(figsize=figsize)
data = df.groupby('beer').agg(
price=pd.NamedAgg(column='price', aggfunc="first"),
rating=pd.NamedAgg(column='normalized rating', aggfunc="mean"),
beer=pd.NamedAgg(column='beer', aggfunc="first"),
)
# Plot ratings
ax = sns.scatterplot(data=data, x='price', y='rating', s=50, color="black", edgecolor=(0, 0, 0, 0))
ax.set_xlabel('€ / l')
ax.set_ylabel('normalized rating')
ax.grid(linestyle=':')
for _, price, rating, beer in data.itertuples():
ax.annotate(
beer,
xytext=(8, -5),
textcoords='offset pixels',
xy=(price, rating),
)
ax.set_xlim(right=2)
ax.imshow(
[[1, 0.5], [0.5, 0]],
cmap=plt.cm.RdYlGn,
interpolation='bicubic',
extent=plt.xlim() + plt.ylim(),
aspect="auto"
)
plt.tight_layout()
if file_name is not None:
fig.savefig(file_name)
if show:
plt.show()
plt.close(fig)
def visualize_alcohol_per_beer(file_name, df, show=False, figsize=(16, 9)):
fig = plt.figure(figsize=figsize)
data = df.sort_values(['vol', 'beer']).groupby('beer', sort=False).agg(
beer=pd.NamedAgg(column='beer', aggfunc="first"),
rating=pd.NamedAgg(column='normalized rating', aggfunc="mean"),
vol=pd.NamedAgg(column='vol', aggfunc="first"),
)
# Plot ratings
ax = sns.scatterplot(data=data, x='vol', y='rating', s=50, color="black", edgecolor=(0, 0, 0, 0))
ax.grid(linestyle=':')
ax.xaxis.set_major_formatter(mtick.PercentFormatter())
# Plot trend fit
from sklearn.linear_model import LinearRegression
reg = LinearRegression().fit(data['vol'].values.reshape(-1, 1), data['rating'])
plt.plot(
plt.xlim(),
reg.predict(np.array(plt.xlim()).reshape(-1, 1)),
linewidth=1,
color="black",
linestyle="dashed",
label="Trend"
)
plt.legend()
for _, beer, rating, vol in data.itertuples():
ax.annotate(
beer,
xytext=(8, -5),
textcoords='offset pixels',
xy=(vol, rating),
bbox=dict(
boxstyle="round",
ec=(1, 1, 1, 0),
fc=(1, 1, 1, 0.7),
),
)
plt.tight_layout()
if file_name is not None:
fig.savefig(file_name)
if show:
plt.show()
plt.close(fig)
|
[
"matplotlib.pyplot.xlim",
"pandas.NamedAgg",
"matplotlib.pyplot.show",
"numpy.abs",
"seaborn.scatterplot",
"matplotlib.pyplot.ylim",
"matplotlib.pyplot.close",
"matplotlib.pyplot.legend",
"seaborn.violinplot",
"sklearn.linear_model.LinearRegression",
"matplotlib.pyplot.figure",
"seaborn.boxplot",
"matplotlib.pyplot.tight_layout",
"matplotlib.ticker.PercentFormatter"
] |
[((921, 948), 'matplotlib.pyplot.figure', 'plt.figure', ([], {'figsize': 'figsize'}), '(figsize=figsize)\n', (931, 948), True, 'import matplotlib.pyplot as plt\n'), ((1402, 1420), 'matplotlib.pyplot.tight_layout', 'plt.tight_layout', ([], {}), '()\n', (1418, 1420), True, 'import matplotlib.pyplot as plt\n'), ((1523, 1537), 'matplotlib.pyplot.close', 'plt.close', (['fig'], {}), '(fig)\n', (1532, 1537), True, 'import matplotlib.pyplot as plt\n'), ((1632, 1659), 'matplotlib.pyplot.figure', 'plt.figure', ([], {'figsize': 'figsize'}), '(figsize=figsize)\n', (1642, 1659), True, 'import matplotlib.pyplot as plt\n'), ((1672, 1778), 'seaborn.scatterplot', 'sns.scatterplot', ([], {'data': 'df', 'x': '"""beer"""', 'y': '"""normalized rating"""', 'hue': '"""person"""', 's': '(50)', 'edgecolor': '(0, 0, 0, 0)'}), "(data=df, x='beer', y='normalized rating', hue='person', s=\n 50, edgecolor=(0, 0, 0, 0))\n", (1687, 1778), True, 'import seaborn as sns\n'), ((1952, 1970), 'matplotlib.pyplot.tight_layout', 'plt.tight_layout', ([], {}), '()\n', (1968, 1970), True, 'import matplotlib.pyplot as plt\n'), ((2073, 2087), 'matplotlib.pyplot.close', 'plt.close', (['fig'], {}), '(fig)\n', (2082, 2087), True, 'import matplotlib.pyplot as plt\n'), ((2181, 2208), 'matplotlib.pyplot.figure', 'plt.figure', ([], {'figsize': 'figsize'}), '(figsize=figsize)\n', (2191, 2208), True, 'import matplotlib.pyplot as plt\n'), ((2479, 2577), 'seaborn.scatterplot', 'sns.scatterplot', ([], {'data': 'data', 'x': '"""price"""', 'y': '"""rating"""', 's': '(50)', 'color': '"""black"""', 'edgecolor': '(0, 0, 0, 0)'}), "(data=data, x='price', y='rating', s=50, color='black',\n edgecolor=(0, 0, 0, 0))\n", (2494, 2577), True, 'import seaborn as sns\n'), ((3099, 3117), 'matplotlib.pyplot.tight_layout', 'plt.tight_layout', ([], {}), '()\n', (3115, 3117), True, 'import matplotlib.pyplot as plt\n'), ((3220, 3234), 'matplotlib.pyplot.close', 'plt.close', (['fig'], {}), '(fig)\n', (3229, 3234), True, 'import matplotlib.pyplot as plt\n'), ((3327, 3354), 'matplotlib.pyplot.figure', 'plt.figure', ([], {'figsize': 'figsize'}), '(figsize=figsize)\n', (3337, 3354), True, 'import matplotlib.pyplot as plt\n'), ((3662, 3758), 'seaborn.scatterplot', 'sns.scatterplot', ([], {'data': 'data', 'x': '"""vol"""', 'y': '"""rating"""', 's': '(50)', 'color': '"""black"""', 'edgecolor': '(0, 0, 0, 0)'}), "(data=data, x='vol', y='rating', s=50, color='black',\n edgecolor=(0, 0, 0, 0))\n", (3677, 3758), True, 'import seaborn as sns\n'), ((4212, 4224), 'matplotlib.pyplot.legend', 'plt.legend', ([], {}), '()\n', (4222, 4224), True, 'import matplotlib.pyplot as plt\n'), ((4584, 4602), 'matplotlib.pyplot.tight_layout', 'plt.tight_layout', ([], {}), '()\n', (4600, 4602), True, 'import matplotlib.pyplot as plt\n'), ((4705, 4719), 'matplotlib.pyplot.close', 'plt.close', (['fig'], {}), '(fig)\n', (4714, 4719), True, 'import matplotlib.pyplot as plt\n'), ((1015, 1091), 'seaborn.boxplot', 'sns.boxplot', ([], {'data': 'df', 'x': 'x', 'y': '"""normalized rating"""', 'order': 'order', 'whis': '[0, 100]'}), "(data=df, x=x, y='normalized rating', order=order, whis=[0, 100])\n", (1026, 1091), True, 'import seaborn as sns\n'), ((1507, 1517), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (1515, 1517), True, 'import matplotlib.pyplot as plt\n'), ((2057, 2067), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (2065, 2067), True, 'import matplotlib.pyplot as plt\n'), ((3204, 3214), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (3212, 3214), True, 'import matplotlib.pyplot as plt\n'), ((3817, 3841), 'matplotlib.ticker.PercentFormatter', 'mtick.PercentFormatter', ([], {}), '()\n', (3839, 3841), True, 'import matplotlib.ticker as mtick\n'), ((4031, 4041), 'matplotlib.pyplot.xlim', 'plt.xlim', ([], {}), '()\n', (4039, 4041), True, 'import matplotlib.pyplot as plt\n'), ((4689, 4699), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (4697, 4699), True, 'import matplotlib.pyplot as plt\n'), ((273, 299), 'numpy.abs', 'np.abs', (['(y_pos - last_y_pos)'], {}), '(y_pos - last_y_pos)\n', (279, 299), True, 'import numpy as np\n'), ((1139, 1247), 'seaborn.violinplot', 'sns.violinplot', ([], {'data': 'df', 'x': 'x', 'y': '"""normalized rating"""', 'order': 'order', 'inner': '"""point"""', 'bw': '(0.15)', 'scale': '"""count"""'}), "(data=df, x=x, y='normalized rating', order=order, inner=\n 'point', bw=0.15, scale='count')\n", (1153, 1247), True, 'import seaborn as sns\n'), ((2262, 2306), 'pandas.NamedAgg', 'pd.NamedAgg', ([], {'column': '"""price"""', 'aggfunc': '"""first"""'}), "(column='price', aggfunc='first')\n", (2273, 2306), True, 'import pandas as pd\n'), ((2324, 2379), 'pandas.NamedAgg', 'pd.NamedAgg', ([], {'column': '"""normalized rating"""', 'aggfunc': '"""mean"""'}), "(column='normalized rating', aggfunc='mean')\n", (2335, 2379), True, 'import pandas as pd\n'), ((2395, 2438), 'pandas.NamedAgg', 'pd.NamedAgg', ([], {'column': '"""beer"""', 'aggfunc': '"""first"""'}), "(column='beer', aggfunc='first')\n", (2406, 2438), True, 'import pandas as pd\n'), ((3448, 3491), 'pandas.NamedAgg', 'pd.NamedAgg', ([], {'column': '"""beer"""', 'aggfunc': '"""first"""'}), "(column='beer', aggfunc='first')\n", (3459, 3491), True, 'import pandas as pd\n'), ((3509, 3564), 'pandas.NamedAgg', 'pd.NamedAgg', ([], {'column': '"""normalized rating"""', 'aggfunc': '"""mean"""'}), "(column='normalized rating', aggfunc='mean')\n", (3520, 3564), True, 'import pandas as pd\n'), ((3579, 3621), 'pandas.NamedAgg', 'pd.NamedAgg', ([], {'column': '"""vol"""', 'aggfunc': '"""first"""'}), "(column='vol', aggfunc='first')\n", (3590, 3621), True, 'import pandas as pd\n'), ((3933, 3951), 'sklearn.linear_model.LinearRegression', 'LinearRegression', ([], {}), '()\n', (3949, 3951), False, 'from sklearn.linear_model import LinearRegression\n'), ((3037, 3047), 'matplotlib.pyplot.xlim', 'plt.xlim', ([], {}), '()\n', (3045, 3047), True, 'import matplotlib.pyplot as plt\n'), ((3050, 3060), 'matplotlib.pyplot.ylim', 'plt.ylim', ([], {}), '()\n', (3058, 3060), True, 'import matplotlib.pyplot as plt\n'), ((4073, 4083), 'matplotlib.pyplot.xlim', 'plt.xlim', ([], {}), '()\n', (4081, 4083), True, 'import matplotlib.pyplot as plt\n')]
|
import hashlib
import mock
import unittest
class FilesTestCase(unittest.TestCase):
def _file(self, *args, **kwargs):
""" Proxy to ensure ImportErrors actually cause test failures rather
than trashing the test run entirely """
from fabtools import require
require.files.file(*args, **kwargs)
@mock.patch('fabtools.require.files.md5sum')
@mock.patch('fabtools.require.files.is_file')
def test_verify_remote_false(self, is_file, md5sum):
""" If verify_remote is set to False, then we should find that
only is_file is used to check for the file's existence. Hashlib's
md5 should not have been called.
"""
is_file.return_value = True
self._file(contents='This is a test', verify_remote=False)
self.assertTrue(is_file.called)
self.assertFalse(md5sum.called)
@mock.patch('fabtools.require.files.md5sum')
@mock.patch('fabtools.require.files.is_file')
def test_verify_remote_true(self, is_file, md5sum):
""" If verify_remote is True, then we should find that an MD5 hash is
used to work out whether the file is different.
"""
is_file.return_value = True
md5sum.return_value = hashlib.md5('This is a test').hexdigest()
self._file(contents='This is a test', verify_remote=True)
self.assertTrue(is_file.called)
self.assertTrue(md5sum.called)
|
[
"hashlib.md5",
"fabtools.require.files.file",
"mock.patch"
] |
[((336, 379), 'mock.patch', 'mock.patch', (['"""fabtools.require.files.md5sum"""'], {}), "('fabtools.require.files.md5sum')\n", (346, 379), False, 'import mock\n'), ((385, 429), 'mock.patch', 'mock.patch', (['"""fabtools.require.files.is_file"""'], {}), "('fabtools.require.files.is_file')\n", (395, 429), False, 'import mock\n'), ((874, 917), 'mock.patch', 'mock.patch', (['"""fabtools.require.files.md5sum"""'], {}), "('fabtools.require.files.md5sum')\n", (884, 917), False, 'import mock\n'), ((923, 967), 'mock.patch', 'mock.patch', (['"""fabtools.require.files.is_file"""'], {}), "('fabtools.require.files.is_file')\n", (933, 967), False, 'import mock\n'), ((294, 329), 'fabtools.require.files.file', 'require.files.file', (['*args'], {}), '(*args, **kwargs)\n', (312, 329), False, 'from fabtools import require\n'), ((1236, 1265), 'hashlib.md5', 'hashlib.md5', (['"""This is a test"""'], {}), "('This is a test')\n", (1247, 1265), False, 'import hashlib\n')]
|
import pytest
import pathlib
import os
pytest_plugins = "pytester"
path = pathlib.Path(__file__).parent.absolute()
default_resource_dir = os.path.join(path, "..", "pytest_libiio", "resources", "devices")
def pytest_addoption(parser):
parser.addoption(
"--disable_mock",
action="store_true",
help="Disable mocking",
)
parser.addoption(
"--hw-manual",
action="store",
dest="hw_map",
default=None,
help="Set expected hardware",
)
parser.addoption(
"--test-uri",
action="store",
dest="uri_val",
default=None,
help="Set uri to test against",
)
parser.addoption(
"--resource-dir",
action="store",
dest="resource_dir",
default=default_resource_dir,
help="Set path of resource folder",
)
@pytest.fixture(scope="session")
def use_mocking(request):
return not request.config.getoption("--disable_mock")
@pytest.fixture(scope="session")
def hw_select(request):
val = request.config.getoption("--hw-manual")
if not val:
return "adrv9361"
else:
return val
@pytest.fixture(scope="session")
def uri_select(request):
val = request.config.getoption("--test-uri")
if not val:
return "ip:192.168.86.56"
else:
return val
@pytest.fixture(scope="session")
def resource_folder(request):
return request.config.getoption("--resource-dir")
|
[
"pathlib.Path",
"pytest.fixture",
"os.path.join"
] |
[((140, 205), 'os.path.join', 'os.path.join', (['path', '""".."""', '"""pytest_libiio"""', '"""resources"""', '"""devices"""'], {}), "(path, '..', 'pytest_libiio', 'resources', 'devices')\n", (152, 205), False, 'import os\n'), ((863, 894), 'pytest.fixture', 'pytest.fixture', ([], {'scope': '"""session"""'}), "(scope='session')\n", (877, 894), False, 'import pytest\n'), ((982, 1013), 'pytest.fixture', 'pytest.fixture', ([], {'scope': '"""session"""'}), "(scope='session')\n", (996, 1013), False, 'import pytest\n'), ((1162, 1193), 'pytest.fixture', 'pytest.fixture', ([], {'scope': '"""session"""'}), "(scope='session')\n", (1176, 1193), False, 'import pytest\n'), ((1350, 1381), 'pytest.fixture', 'pytest.fixture', ([], {'scope': '"""session"""'}), "(scope='session')\n", (1364, 1381), False, 'import pytest\n'), ((76, 98), 'pathlib.Path', 'pathlib.Path', (['__file__'], {}), '(__file__)\n', (88, 98), False, 'import pathlib\n')]
|
"""
Details of how the data model objects are mapped onto the relational database
are encapsulated here.
"""
import logging
import pkg_resources
from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint
from sqlalchemy.ext.associationproxy import association_proxy
from sqlalchemy.ext.orderinglist import ordering_list
from sqlalchemy.types import BigInteger
from sqlalchemy.orm import backref, object_session, relation, mapper, class_mapper
from sqlalchemy.orm.collections import attribute_mapped_collection
from galaxy import model
from galaxy.model.orm.engine_factory import build_engine
from galaxy.model.orm.now import now
from galaxy.model.custom_types import JSONType, MetadataType, TrimmedString, UUIDType
from galaxy.model.base import ModelMapping
from galaxy.security import GalaxyRBACAgent
log = logging.getLogger( __name__ )
metadata = MetaData()
model.User.table = Table( "galaxy_user", metadata,
Column( "id", Integer, primary_key=True),
Column( "create_time", DateTime, default=now ),
Column( "update_time", DateTime, default=now, onupdate=now ),
Column( "email", TrimmedString( 255 ), index=True, nullable=False ),
Column( "username", TrimmedString( 255 ), index=True, unique=True ),
Column( "password", TrimmedString( 255 ), nullable=False ),
Column( "external", Boolean, default=False ),
Column( "form_values_id", Integer, ForeignKey( "form_values.id" ), index=True ),
Column( "deleted", Boolean, index=True, default=False ),
Column( "purged", Boolean, index=True, default=False ),
Column( "disk_usage", Numeric( 15, 0 ), index=True ) ,
Column( "active", Boolean, index=True, default=True, nullable=False ),
Column( "activation_token", TrimmedString( 64 ), nullable=True, index=True ) )
model.UserAddress.table = Table( "user_address", metadata,
Column( "id", Integer, primary_key=True),
Column( "create_time", DateTime, default=now ),
Column( "update_time", DateTime, default=now, onupdate=now ),
Column( "user_id", Integer, ForeignKey( "galaxy_user.id" ), index=True ),
Column( "desc", TrimmedString( 255 )),
Column( "name", TrimmedString( 255 ), nullable=False),
Column( "institution", TrimmedString( 255 )),
Column( "address", TrimmedString( 255 ), nullable=False),
Column( "city", TrimmedString( 255 ), nullable=False),
Column( "state", TrimmedString( 255 ), nullable=False),
Column( "postal_code", TrimmedString( 255 ), nullable=False),
Column( "country", TrimmedString( 255 ), nullable=False),
Column( "phone", TrimmedString( 255 )),
Column( "deleted", Boolean, index=True, default=False ),
Column( "purged", Boolean, index=True, default=False ) )
model.UserOpenID.table = Table( "galaxy_user_openid", metadata,
Column( "id", Integer, primary_key=True ),
Column( "create_time", DateTime, default=now ),
Column( "update_time", DateTime, index=True, default=now, onupdate=now ),
Column( "session_id", Integer, ForeignKey( "galaxy_session.id" ), index=True ),
Column( "user_id", Integer, ForeignKey( "galaxy_user.id" ), index=True ),
Column( "openid", TEXT, index=True, unique=True ),
Column( "provider", TrimmedString( 255 ) ),
)
model.History.table = Table( "history", metadata,
Column( "id", Integer, primary_key=True),
Column( "create_time", DateTime, default=now ),
Column( "update_time", DateTime, index=True, default=now, onupdate=now ),
Column( "user_id", Integer, ForeignKey( "galaxy_user.id" ), index=True ),
Column( "name", TrimmedString( 255 ) ),
Column( "hid_counter", Integer, default=1 ),
Column( "deleted", Boolean, index=True, default=False ),
Column( "purged", Boolean, index=True, default=False ),
Column( "importing", Boolean, index=True, default=False ),
Column( "genome_build", TrimmedString( 40 ) ),
Column( "importable", Boolean, default=False ),
Column( "slug", TEXT, index=True ),
Column( "published", Boolean, index=True, default=False ) )
model.HistoryUserShareAssociation.table = Table( "history_user_share_association", metadata,
Column( "id", Integer, primary_key=True ),
Column( "history_id", Integer, ForeignKey( "history.id" ), index=True ),
Column( "user_id", Integer, ForeignKey( "galaxy_user.id" ), index=True )
)
model.HistoryDatasetAssociation.table = Table( "history_dataset_association", metadata,
Column( "id", Integer, primary_key=True ),
Column( "history_id", Integer, ForeignKey( "history.id" ), index=True ),
Column( "dataset_id", Integer, ForeignKey( "dataset.id" ), index=True ),
Column( "create_time", DateTime, default=now ),
Column( "update_time", DateTime, default=now, onupdate=now ),
Column( "state", TrimmedString( 64 ), index=True, key="_state" ),
Column( "copied_from_history_dataset_association_id", Integer, ForeignKey( "history_dataset_association.id" ), nullable=True ),
Column( "copied_from_library_dataset_dataset_association_id", Integer, ForeignKey( "library_dataset_dataset_association.id" ), nullable=True ),
Column( "hid", Integer ),
Column( "name", TrimmedString( 255 ) ),
Column( "info", TrimmedString( 255 ) ),
Column( "blurb", TrimmedString( 255 ) ),
Column( "peek" , TEXT ),
Column( "tool_version" , TEXT ),
Column( "extension", TrimmedString( 64 ) ),
Column( "metadata", MetadataType(), key="_metadata" ),
Column( "parent_id", Integer, ForeignKey( "history_dataset_association.id" ), nullable=True ),
Column( "designation", TrimmedString( 255 ) ),
Column( "deleted", Boolean, index=True, default=False ),
Column( "purged", Boolean, index=True, default=False ),
Column( "visible", Boolean ),
Column( "hidden_beneath_collection_instance_id", ForeignKey( "history_dataset_collection_association.id" ), nullable=True ),
Column( "extended_metadata_id", Integer,
ForeignKey( "extended_metadata.id" ), index=True )
)
model.Dataset.table = Table( "dataset", metadata,
Column( "id", Integer, primary_key=True ),
Column( "create_time", DateTime, default=now ),
Column( "update_time", DateTime, index=True, default=now, onupdate=now ),
Column( "state", TrimmedString( 64 ), index=True ),
Column( "deleted", Boolean, index=True, default=False ),
Column( "purged", Boolean, index=True, default=False ),
Column( "purgable", Boolean, default=True ),
Column( "object_store_id", TrimmedString( 255 ), index=True ),
Column( "external_filename" , TEXT ),
Column( "_extra_files_path", TEXT ),
Column( 'file_size', Numeric( 15, 0 ) ),
Column( 'total_size', Numeric( 15, 0 ) ),
Column( 'uuid', UUIDType() ) )
model.HistoryDatasetAssociationDisplayAtAuthorization.table = Table( "history_dataset_association_display_at_authorization", metadata,
Column( "id", Integer, primary_key=True ),
Column( "create_time", DateTime, default=now ),
Column( "update_time", DateTime, index=True, default=now, onupdate=now ),
Column( "history_dataset_association_id", Integer, ForeignKey( "history_dataset_association.id" ), index=True ),
Column( "user_id", Integer, ForeignKey( "galaxy_user.id" ), index=True ),
Column( "site", TrimmedString( 255 ) ) )
model.HistoryDatasetAssociationSubset.table = Table( "history_dataset_association_subset", metadata,
Column( "id", Integer, primary_key=True ),
Column( "history_dataset_association_id", Integer, ForeignKey( "history_dataset_association.id" ), index=True ),
Column( "history_dataset_association_subset_id", Integer, ForeignKey( "history_dataset_association.id" ), index=True ),
Column( "location", Unicode(255), index=True) )
model.ImplicitlyConvertedDatasetAssociation.table = Table( "implicitly_converted_dataset_association", metadata,
Column( "id", Integer, primary_key=True ),
Column( "create_time", DateTime, default=now ),
Column( "update_time", DateTime, default=now, onupdate=now ),
Column( "hda_id", Integer, ForeignKey( "history_dataset_association.id" ), index=True, nullable=True ),
Column( "ldda_id", Integer, ForeignKey( "library_dataset_dataset_association.id" ), index=True, nullable=True ),
Column( "hda_parent_id", Integer, ForeignKey( "history_dataset_association.id" ), index=True ),
Column( "ldda_parent_id", Integer, ForeignKey( "library_dataset_dataset_association.id" ), index=True ),
Column( "deleted", Boolean, index=True, default=False ),
Column( "metadata_safe", Boolean, index=True, default=True ),
Column( "type", TrimmedString( 255 ) ) )
model.ValidationError.table = Table( "validation_error", metadata,
Column( "id", Integer, primary_key=True ),
Column( "dataset_id", Integer, ForeignKey( "history_dataset_association.id" ), index=True ),
Column( "message", TrimmedString( 255 ) ),
Column( "err_type", TrimmedString( 64 ) ),
Column( "attributes", TEXT ) )
model.Group.table = Table( "galaxy_group", metadata,
Column( "id", Integer, primary_key=True ),
Column( "create_time", DateTime, default=now ),
Column( "update_time", DateTime, default=now, onupdate=now ),
Column( "name", String( 255 ), index=True, unique=True ),
Column( "deleted", Boolean, index=True, default=False ) )
model.UserGroupAssociation.table = Table( "user_group_association", metadata,
Column( "id", Integer, primary_key=True ),
Column( "user_id", Integer, ForeignKey( "galaxy_user.id" ), index=True ),
Column( "group_id", Integer, ForeignKey( "galaxy_group.id" ), index=True ),
Column( "create_time", DateTime, default=now ),
Column( "update_time", DateTime, default=now, onupdate=now ) )
model.UserRoleAssociation.table = Table( "user_role_association", metadata,
Column( "id", Integer, primary_key=True ),
Column( "user_id", Integer, ForeignKey( "galaxy_user.id" ), index=True ),
Column( "role_id", Integer, ForeignKey( "role.id" ), index=True ),
Column( "create_time", DateTime, default=now ),
Column( "update_time", DateTime, default=now, onupdate=now ) )
model.GroupRoleAssociation.table = Table( "group_role_association", metadata,
Column( "id", Integer, primary_key=True ),
Column( "group_id", Integer, ForeignKey( "galaxy_group.id" ), index=True ),
Column( "role_id", Integer, ForeignKey( "role.id" ), index=True ),
Column( "create_time", DateTime, default=now ),
Column( "update_time", DateTime, default=now, onupdate=now ) )
model.Role.table = Table( "role", metadata,
Column( "id", Integer, primary_key=True ),
Column( "create_time", DateTime, default=now ),
Column( "update_time", DateTime, default=now, onupdate=now ),
Column( "name", String( 255 ), index=True, unique=True ),
Column( "description", TEXT ),
Column( "type", String( 40 ), index=True ),
Column( "deleted", Boolean, index=True, default=False ) )
model.UserQuotaAssociation.table = Table( "user_quota_association", metadata,
Column( "id", Integer, primary_key=True ),
Column( "user_id", Integer, ForeignKey( "galaxy_user.id" ), index=True ),
Column( "quota_id", Integer, ForeignKey( "quota.id" ), index=True ),
Column( "create_time", DateTime, default=now ),
Column( "update_time", DateTime, default=now, onupdate=now ) )
model.GroupQuotaAssociation.table = Table( "group_quota_association", metadata,
Column( "id", Integer, primary_key=True ),
Column( "group_id", Integer, ForeignKey( "galaxy_group.id" ), index=True ),
Column( "quota_id", Integer, ForeignKey( "quota.id" ), index=True ),
Column( "create_time", DateTime, default=now ),
Column( "update_time", DateTime, default=now, onupdate=now ) )
model.Quota.table = Table( "quota", metadata,
Column( "id", Integer, primary_key=True ),
Column( "create_time", DateTime, default=now ),
Column( "update_time", DateTime, default=now, onupdate=now ),
Column( "name", String( 255 ), index=True, unique=True ),
Column( "description", TEXT ),
Column( "bytes", BigInteger ),
Column( "operation", String( 8 ) ),
Column( "deleted", Boolean, index=True, default=False ) )
model.DefaultQuotaAssociation.table = Table( "default_quota_association", metadata,
Column( "id", Integer, primary_key=True ),
Column( "create_time", DateTime, default=now ),
Column( "update_time", DateTime, default=now, onupdate=now ),
Column( "type", String( 32 ), index=True, unique=True ),
Column( "quota_id", Integer, ForeignKey( "quota.id" ), index=True ) )
model.DatasetPermissions.table = Table( "dataset_permissions", metadata,
Column( "id", Integer, primary_key=True ),
Column( "create_time", DateTime, default=now ),
Column( "update_time", DateTime, default=now, onupdate=now ),
Column( "action", TEXT ),
Column( "dataset_id", Integer, ForeignKey( "dataset.id" ), index=True ),
Column( "role_id", Integer, ForeignKey( "role.id" ), index=True ) )
model.LibraryPermissions.table = Table( "library_permissions", metadata,
Column( "id", Integer, primary_key=True ),
Column( "create_time", DateTime, default=now ),
Column( "update_time", DateTime, default=now, onupdate=now ),
Column( "action", TEXT ),
Column( "library_id", Integer, ForeignKey( "library.id" ), nullable=True, index=True ),
Column( "role_id", Integer, ForeignKey( "role.id" ), index=True ) )
model.LibraryFolderPermissions.table = Table( "library_folder_permissions", metadata,
Column( "id", Integer, primary_key=True ),
Column( "create_time", DateTime, default=now ),
Column( "update_time", DateTime, default=now, onupdate=now ),
Column( "action", TEXT ),
Column( "library_folder_id", Integer, ForeignKey( "library_folder.id" ), nullable=True, index=True ),
Column( "role_id", Integer, ForeignKey( "role.id" ), index=True ) )
model.LibraryDatasetPermissions.table = Table( "library_dataset_permissions", metadata,
Column( "id", Integer, primary_key=True ),
Column( "create_time", DateTime, default=now ),
Column( "update_time", DateTime, default=now, onupdate=now ),
Column( "action", TEXT ),
Column( "library_dataset_id", Integer, ForeignKey( "library_dataset.id" ), nullable=True, index=True ),
Column( "role_id", Integer, ForeignKey( "role.id" ), index=True ) )
model.LibraryDatasetDatasetAssociationPermissions.table = Table( "library_dataset_dataset_association_permissions", metadata,
Column( "id", Integer, primary_key=True ),
Column( "create_time", DateTime, default=now ),
Column( "update_time", DateTime, default=now, onupdate=now ),
Column( "action", TEXT ),
Column( "library_dataset_dataset_association_id", Integer, ForeignKey( "library_dataset_dataset_association.id" ), nullable=True, index=True ),
Column( "role_id", Integer, ForeignKey( "role.id" ), index=True ) )
model.DefaultUserPermissions.table = Table( "default_user_permissions", metadata,
Column( "id", Integer, primary_key=True ),
Column( "user_id", Integer, ForeignKey( "galaxy_user.id" ), index=True ),
Column( "action", TEXT ),
Column( "role_id", Integer, ForeignKey( "role.id" ), index=True ) )
model.DefaultHistoryPermissions.table = Table( "default_history_permissions", metadata,
Column( "id", Integer, primary_key=True ),
Column( "history_id", Integer, ForeignKey( "history.id" ), index=True ),
Column( "action", TEXT ),
Column( "role_id", Integer, ForeignKey( "role.id" ), index=True ) )
model.LibraryDataset.table = Table( "library_dataset", metadata,
Column( "id", Integer, primary_key=True ),
Column( "library_dataset_dataset_association_id", Integer, ForeignKey( "library_dataset_dataset_association.id", use_alter=True, name="library_dataset_dataset_association_id_fk" ), nullable=True, index=True ),#current version of dataset, if null, there is not a current version selected
Column( "folder_id", Integer, ForeignKey( "library_folder.id" ), index=True ),
Column( "order_id", Integer ), #not currently being used, but for possible future use
Column( "create_time", DateTime, default=now ),
Column( "update_time", DateTime, default=now, onupdate=now ),
Column( "name", TrimmedString( 255 ), key="_name", index=True ), #when not None/null this will supercede display in library (but not when imported into user's history?)
Column( "info", TrimmedString( 255 ), key="_info" ), #when not None/null this will supercede display in library (but not when imported into user's history?)
Column( "deleted", Boolean, index=True, default=False ),
Column( "purged", Boolean, index=True, default=False ) )
model.LibraryDatasetDatasetAssociation.table = Table( "library_dataset_dataset_association", metadata,
Column( "id", Integer, primary_key=True ),
Column( "library_dataset_id", Integer, ForeignKey( "library_dataset.id" ), index=True ),
Column( "dataset_id", Integer, ForeignKey( "dataset.id" ), index=True ),
Column( "create_time", DateTime, default=now ),
Column( "update_time", DateTime, default=now, onupdate=now ),
Column( "state", TrimmedString( 64 ), index=True, key="_state" ),
Column( "copied_from_history_dataset_association_id", Integer, ForeignKey( "history_dataset_association.id", use_alter=True, name='history_dataset_association_dataset_id_fkey' ), nullable=True ),
Column( "copied_from_library_dataset_dataset_association_id", Integer, ForeignKey( "library_dataset_dataset_association.id", use_alter=True, name='library_dataset_dataset_association_id_fkey' ), nullable=True ),
Column( "name", TrimmedString( 255 ), index=True ),
Column( "info", TrimmedString( 255 ) ),
Column( "blurb", TrimmedString( 255 ) ),
Column( "peek" , TEXT ),
Column( "tool_version" , TEXT ),
Column( "extension", TrimmedString( 64 ) ),
Column( "metadata", MetadataType(), key="_metadata" ),
Column( "parent_id", Integer, ForeignKey( "library_dataset_dataset_association.id" ), nullable=True ),
Column( "designation", TrimmedString( 255 ) ),
Column( "deleted", Boolean, index=True, default=False ),
Column( "visible", Boolean ),
Column( "user_id", Integer, ForeignKey( "galaxy_user.id" ), index=True ),
Column( "message", TrimmedString( 255 ) ),
Column( "extended_metadata_id", Integer,
ForeignKey( "extended_metadata.id" ), index=True )
)
model.ExtendedMetadata.table = Table("extended_metadata", metadata,
Column( "id", Integer, primary_key=True ),
Column( "data", JSONType ) )
model.ExtendedMetadataIndex.table = Table("extended_metadata_index", metadata,
Column( "id", Integer, primary_key=True ),
Column( "extended_metadata_id", Integer, ForeignKey("extended_metadata.id",
onupdate="CASCADE",
ondelete="CASCADE" ),
index=True ),
Column( "path", String( 255 )),
Column( "value", TEXT))
model.Library.table = Table( "library", metadata,
Column( "id", Integer, primary_key=True ),
Column( "root_folder_id", Integer, ForeignKey( "library_folder.id" ), index=True ),
Column( "create_time", DateTime, default=now ),
Column( "update_time", DateTime, default=now, onupdate=now ),
Column( "name", String( 255 ), index=True ),
Column( "deleted", Boolean, index=True, default=False ),
Column( "purged", Boolean, index=True, default=False ),
Column( "description", TEXT ),
Column( "synopsis", TEXT ) )
model.LibraryFolder.table = Table( "library_folder", metadata,
Column( "id", Integer, primary_key=True ),
Column( "parent_id", Integer, ForeignKey( "library_folder.id" ), nullable = True, index=True ),
Column( "create_time", DateTime, default=now ),
Column( "update_time", DateTime, default=now, onupdate=now ),
Column( "name", TEXT, index=True ),
Column( "description", TEXT ),
Column( "order_id", Integer ), #not currently being used, but for possible future use
Column( "item_count", Integer ),
Column( "deleted", Boolean, index=True, default=False ),
Column( "purged", Boolean, index=True, default=False ),
Column( "genome_build", TrimmedString( 40 ) ) )
model.LibraryInfoAssociation.table = Table( 'library_info_association', metadata,
Column( "id", Integer, primary_key=True ),
Column( "library_id", Integer, ForeignKey( "library.id" ), index=True ),
Column( "form_definition_id", Integer, ForeignKey( "form_definition.id" ), index=True ),
Column( "form_values_id", Integer, ForeignKey( "form_values.id" ), index=True ),
Column( "inheritable", Boolean, index=True, default=False ),
Column( "deleted", Boolean, index=True, default=False ) )
model.LibraryFolderInfoAssociation.table = Table( 'library_folder_info_association', metadata,
Column( "id", Integer, primary_key=True ),
Column( "library_folder_id", Integer, ForeignKey( "library_folder.id" ), nullable=True, index=True ),
Column( "form_definition_id", Integer, ForeignKey( "form_definition.id" ), index=True ),
Column( "form_values_id", Integer, ForeignKey( "form_values.id" ), index=True ),
Column( "inheritable", Boolean, index=True, default=False ),
Column( "deleted", Boolean, index=True, default=False ) )
model.LibraryDatasetDatasetInfoAssociation.table = Table( 'library_dataset_dataset_info_association', metadata,
Column( "id", Integer, primary_key=True ),
Column( "library_dataset_dataset_association_id", Integer, ForeignKey( "library_dataset_dataset_association.id" ), nullable=True, index=True ),
Column( "form_definition_id", Integer, ForeignKey( "form_definition.id" ), index=True ),
Column( "form_values_id", Integer, ForeignKey( "form_values.id" ), index=True ),
Column( "deleted", Boolean, index=True, default=False ) )
model.Job.table = Table( "job", metadata,
Column( "id", Integer, primary_key=True ),
Column( "create_time", DateTime, default=now ),
Column( "update_time", DateTime, default=now, onupdate=now ),
Column( "history_id", Integer, ForeignKey( "history.id" ), index=True ),
Column( "library_folder_id", Integer, ForeignKey( "library_folder.id" ), index=True ),
Column( "tool_id", String( 255 ) ),
Column( "tool_version", TEXT, default="1.0.0" ),
Column( "state", String( 64 ), index=True ),
Column( "info", TrimmedString( 255 ) ),
Column( "command_line", TEXT ),
Column( "param_filename", String( 1024 ) ),
Column( "runner_name", String( 255 ) ),
Column( "stdout", TEXT ),
Column( "stderr", TEXT ),
Column( "exit_code", Integer, nullable=True ),
Column( "traceback", TEXT ),
Column( "session_id", Integer, ForeignKey( "galaxy_session.id" ), index=True, nullable=True ),
Column( "user_id", Integer, ForeignKey( "galaxy_user.id" ), index=True, nullable=True ),
Column( "job_runner_name", String( 255 ) ),
Column( "job_runner_external_id", String( 255 ) ),
Column( "destination_id", String( 255 ), nullable=True ),
Column( "destination_params", JSONType, nullable=True ),
Column( "object_store_id", TrimmedString( 255 ), index=True ),
Column( "imported", Boolean, default=False, index=True ),
Column( "params", TrimmedString(255), index=True ),
Column( "handler", TrimmedString( 255 ), index=True ) )
model.JobStateHistory.table = Table( "job_state_history", metadata,
Column( "id", Integer, primary_key=True ),
Column( "create_time", DateTime, default=now ),
Column( "update_time", DateTime, default=now, onupdate=now ),
Column( "job_id", Integer, ForeignKey( "job.id" ), index=True ),
Column( "state", String( 64 ), index=True ),
Column( "info", TrimmedString( 255 ) ) )
model.JobParameter.table = Table( "job_parameter", metadata,
Column( "id", Integer, primary_key=True ),
Column( "job_id", Integer, ForeignKey( "job.id" ), index=True ),
Column( "name", String(255) ),
Column( "value", TEXT ) )
model.JobToInputDatasetAssociation.table = Table( "job_to_input_dataset", metadata,
Column( "id", Integer, primary_key=True ),
Column( "job_id", Integer, ForeignKey( "job.id" ), index=True ),
Column( "dataset_id", Integer, ForeignKey( "history_dataset_association.id" ), index=True ),
Column( "name", String(255) ) )
model.JobToOutputDatasetAssociation.table = Table( "job_to_output_dataset", metadata,
Column( "id", Integer, primary_key=True ),
Column( "job_id", Integer, ForeignKey( "job.id" ), index=True ),
Column( "dataset_id", Integer, ForeignKey( "history_dataset_association.id" ), index=True ),
Column( "name", String(255) ) )
model.JobToInputDatasetCollectionAssociation.table = Table( "job_to_input_dataset_collection", metadata,
Column( "id", Integer, primary_key=True ),
Column( "job_id", Integer, ForeignKey( "job.id" ), index=True ),
Column( "dataset_collection_id", Integer, ForeignKey( "history_dataset_collection_association.id" ), index=True ),
Column( "name", Unicode(255) ) )
model.JobToOutputDatasetCollectionAssociation.table = Table( "job_to_output_dataset_collection", metadata,
Column( "id", Integer, primary_key=True ),
Column( "job_id", Integer, ForeignKey( "job.id" ), index=True ),
Column( "dataset_collection_id", Integer, ForeignKey( "history_dataset_collection_association.id" ), index=True ),
Column( "name", Unicode(255) ) )
model.JobToInputLibraryDatasetAssociation.table = Table( "job_to_input_library_dataset", metadata,
Column( "id", Integer, primary_key=True ),
Column( "job_id", Integer, ForeignKey( "job.id" ), index=True ),
Column( "ldda_id", Integer, ForeignKey( "library_dataset_dataset_association.id" ), index=True ),
Column( "name", String(255) ) )
model.JobToOutputLibraryDatasetAssociation.table = Table( "job_to_output_library_dataset", metadata,
Column( "id", Integer, primary_key=True ),
Column( "job_id", Integer, ForeignKey( "job.id" ), index=True ),
Column( "ldda_id", Integer, ForeignKey( "library_dataset_dataset_association.id" ), index=True ),
Column( "name", String(255) ) )
model.ImplicitlyCreatedDatasetCollectionInput.table = Table( "implicitly_created_dataset_collection_inputs", metadata,
Column( "id", Integer, primary_key=True ),
Column( "dataset_collection_id", Integer, ForeignKey( "history_dataset_collection_association.id" ), index=True ),
Column( "input_dataset_collection_id", Integer, ForeignKey( "history_dataset_collection_association.id" ), index=True ),
Column( "name", Unicode(255) ) )
model.JobExternalOutputMetadata.table = Table( "job_external_output_metadata", metadata,
Column( "id", Integer, primary_key=True ),
Column( "job_id", Integer, ForeignKey( "job.id" ), index=True ),
Column( "history_dataset_association_id", Integer, ForeignKey( "history_dataset_association.id" ), index=True, nullable=True ),
Column( "library_dataset_dataset_association_id", Integer, ForeignKey( "library_dataset_dataset_association.id" ), index=True, nullable=True ),
Column( "filename_in", String( 255 ) ),
Column( "filename_out", String( 255 ) ),
Column( "filename_results_code", String( 255 ) ),
Column( "filename_kwds", String( 255 ) ),
Column( "filename_override_metadata", String( 255 ) ),
Column( "job_runner_external_pid", String( 255 ) ) )
model.JobExportHistoryArchive.table = Table( "job_export_history_archive", metadata,
Column( "id", Integer, primary_key=True ),
Column( "job_id", Integer, ForeignKey( "job.id" ), index=True ),
Column( "history_id", Integer, ForeignKey( "history.id" ), index=True ),
Column( "dataset_id", Integer, ForeignKey( "dataset.id" ), index=True ),
Column( "compressed", Boolean, index=True, default=False ),
Column( "history_attrs_filename", TEXT ),
Column( "datasets_attrs_filename", TEXT ),
Column( "jobs_attrs_filename", TEXT )
)
model.JobImportHistoryArchive.table = Table( "job_import_history_archive", metadata,
Column( "id", Integer, primary_key=True ),
Column( "job_id", Integer, ForeignKey( "job.id" ), index=True ),
Column( "history_id", Integer, ForeignKey( "history.id" ), index=True ),
Column( "archive_dir", TEXT )
)
JOB_METRIC_MAX_LENGTH = 1023
model.JobMetricText.table = Table(
"job_metric_text",
metadata,
Column( "id", Integer, primary_key=True ),
Column( "job_id", Integer, ForeignKey( "job.id" ), index=True ),
Column( "plugin", Unicode(255), ),
Column( "metric_name", Unicode(255), ),
Column( "metric_value", Unicode(JOB_METRIC_MAX_LENGTH), ),
)
model.TaskMetricText.table = Table(
"task_metric_text",
metadata,
Column( "id", Integer, primary_key=True ),
Column( "task_id", Integer, ForeignKey( "task.id" ), index=True ),
Column( "plugin", Unicode(255), ),
Column( "metric_name", Unicode(255), ),
Column( "metric_value", Unicode(JOB_METRIC_MAX_LENGTH), ),
)
model.JobMetricNumeric.table = Table(
"job_metric_numeric",
metadata,
Column( "id", Integer, primary_key=True ),
Column( "job_id", Integer, ForeignKey( "job.id" ), index=True ),
Column( "plugin", Unicode(255), ),
Column( "metric_name", Unicode(255), ),
Column( "metric_value", Numeric( 22, 7 ), ),
)
model.TaskMetricNumeric.table = Table(
"task_metric_numeric",
metadata,
Column( "id", Integer, primary_key=True ),
Column( "task_id", Integer, ForeignKey( "task.id" ), index=True ),
Column( "plugin", Unicode(255), ),
Column( "metric_name", Unicode(255), ),
Column( "metric_value", Numeric( 22, 7 ), ),
)
model.GenomeIndexToolData.table = Table( "genome_index_tool_data", metadata,
Column( "id", Integer, primary_key=True ),
Column( "job_id", Integer, ForeignKey( "job.id" ), index=True ),
Column( "deferred_job_id", Integer, ForeignKey( "deferred_job.id" ), index=True ),
Column( "transfer_job_id", Integer, ForeignKey( "transfer_job.id" ), index=True ),
Column( "dataset_id", Integer, ForeignKey( "dataset.id" ), index=True ),
Column( "fasta_path", String( 255 ) ),
Column( "created_time", DateTime, default=now ),
Column( "modified_time", DateTime, default=now, onupdate=now ),
Column( "indexer", String( 64 ) ),
Column( "user_id", Integer, ForeignKey( "galaxy_user.id" ), index=True ),
)
model.Task.table = Table( "task", metadata,
Column( "id", Integer, primary_key=True ),
Column( "create_time", DateTime, default=now ),
Column( "execution_time", DateTime ),
Column( "update_time", DateTime, default=now, onupdate=now ),
Column( "state", String( 64 ), index=True ),
Column( "command_line", TEXT ),
Column( "param_filename", String( 1024 ) ),
Column( "runner_name", String( 255 ) ),
Column( "stdout", TEXT ),
Column( "stderr", TEXT ),
Column( "exit_code", Integer, nullable=True ),
Column( "info", TrimmedString ( 255 ) ),
Column( "traceback", TEXT ),
Column( "job_id", Integer, ForeignKey( "job.id" ), index=True, nullable=False ),
Column( "working_directory", String(1024)),
Column( "task_runner_name", String( 255 ) ),
Column( "task_runner_external_id", String( 255 ) ),
Column( "prepare_input_files_cmd", TEXT ) )
model.PostJobAction.table = Table("post_job_action", metadata,
Column("id", Integer, primary_key=True),
Column("workflow_step_id", Integer, ForeignKey( "workflow_step.id" ), index=True, nullable=False),
Column("action_type", String(255), nullable=False),
Column("output_name", String(255), nullable=True),
Column("action_arguments", JSONType, nullable=True))
model.PostJobActionAssociation.table = Table("post_job_action_association", metadata,
Column("id", Integer, primary_key=True),
Column("job_id", Integer, ForeignKey( "job.id" ), index=True, nullable=False),
Column("post_job_action_id", Integer, ForeignKey( "post_job_action.id" ), index=True, nullable=False))
model.DeferredJob.table = Table( "deferred_job", metadata,
Column( "id", Integer, primary_key=True ),
Column( "create_time", DateTime, default=now ),
Column( "update_time", DateTime, default=now, onupdate=now ),
Column( "state", String( 64 ), index=True ),
Column( "plugin", String( 128 ), index=True ),
Column( "params", JSONType ) )
model.TransferJob.table = Table( "transfer_job", metadata,
Column( "id", Integer, primary_key=True ),
Column( "create_time", DateTime, default=now ),
Column( "update_time", DateTime, default=now, onupdate=now ),
Column( "state", String( 64 ), index=True ),
Column( "path", String( 1024 ) ),
Column( "info", TEXT ),
Column( "pid", Integer ),
Column( "socket", Integer ),
Column( "params", JSONType ) )
model.DatasetCollection.table = Table( "dataset_collection", metadata,
Column( "id", Integer, primary_key=True ),
Column( "collection_type", Unicode(255), nullable=False ),
Column( "create_time", DateTime, default=now ),
Column( "update_time", DateTime, default=now, onupdate=now ),
)
model.HistoryDatasetCollectionAssociation.table = Table( "history_dataset_collection_association", metadata,
Column( "id", Integer, primary_key=True ),
Column( "collection_id", Integer, ForeignKey( "dataset_collection.id" ), index=True ),
Column( "history_id", Integer, ForeignKey( "history.id" ), index=True ),
Column( "name", TrimmedString( 255 ) ),
Column( "hid", Integer ),
Column( "visible", Boolean ),
Column( "deleted", Boolean, default=False ),
Column( "copied_from_history_dataset_collection_association_id", Integer, ForeignKey( "history_dataset_collection_association.id" ), nullable=True ),
Column( "implicit_output_name", Unicode(255), nullable=True ),
)
model.LibraryDatasetCollectionAssociation.table = Table( "library_dataset_collection_association", metadata,
Column( "id", Integer, primary_key=True ),
Column( "collection_id", Integer, ForeignKey( "dataset_collection.id" ), index=True ),
Column( "folder_id", Integer, ForeignKey( "library_folder.id" ), index=True ),
Column( "name", TrimmedString( 255 ) ),
Column( "deleted", Boolean, default=False ),
)
model.DatasetCollectionElement.table = Table( "dataset_collection_element", metadata,
Column( "id", Integer, primary_key=True ),
# Parent collection id describing what collection this element belongs to.
Column( "dataset_collection_id", Integer, ForeignKey( "dataset_collection.id" ), index=True, nullable=False ),
# Child defined by this association - HDA, LDDA, or another dataset association...
Column( "hda_id", Integer, ForeignKey( "history_dataset_association.id" ), index=True, nullable=True ),
Column( "ldda_id", Integer, ForeignKey( "library_dataset_dataset_association.id" ), index=True, nullable=True ),
Column( "child_collection_id", Integer, ForeignKey( "dataset_collection.id" ), index=True, nullable=True ),
# Element index and identifier to define this parent-child relationship.
Column( "element_index", Integer ),
Column( "element_identifier", Unicode(255), ),
)
model.Event.table = Table( "event", metadata,
Column( "id", Integer, primary_key=True ),
Column( "create_time", DateTime, default=now ),
Column( "update_time", DateTime, default=now, onupdate=now ),
Column( "history_id", Integer, ForeignKey( "history.id" ), index=True, nullable=True ),
Column( "user_id", Integer, ForeignKey( "galaxy_user.id" ), index=True, nullable=True ),
Column( "message", TrimmedString( 1024 ) ),
Column( "session_id", Integer, ForeignKey( "galaxy_session.id" ), index=True, nullable=True ),
Column( "tool_id", String( 255 ) ) )
model.GalaxySession.table = Table( "galaxy_session", metadata,
Column( "id", Integer, primary_key=True ),
Column( "create_time", DateTime, default=now ),
Column( "update_time", DateTime, default=now, onupdate=now ),
Column( "user_id", Integer, ForeignKey( "galaxy_user.id" ), index=True, nullable=True ),
Column( "remote_host", String( 255 ) ),
Column( "remote_addr", String( 255 ) ),
Column( "referer", TEXT ),
Column( "current_history_id", Integer, ForeignKey( "history.id" ), nullable=True ),
Column( "session_key", TrimmedString( 255 ), index=True, unique=True ), # unique 128 bit random number coerced to a string
Column( "is_valid", Boolean, default=False ),
Column( "prev_session_id", Integer ), # saves a reference to the previous session so we have a way to chain them together
Column( "disk_usage", Numeric( 15, 0 ), index=True ) )
model.GalaxySessionToHistoryAssociation.table = Table( "galaxy_session_to_history", metadata,
Column( "id", Integer, primary_key=True ),
Column( "create_time", DateTime, default=now ),
Column( "session_id", Integer, ForeignKey( "galaxy_session.id" ), index=True ),
Column( "history_id", Integer, ForeignKey( "history.id" ), index=True ) )
model.StoredWorkflow.table = Table( "stored_workflow", metadata,
Column( "id", Integer, primary_key=True ),
Column( "create_time", DateTime, default=now ),
Column( "update_time", DateTime, default=now, onupdate=now ),
Column( "user_id", Integer, ForeignKey( "galaxy_user.id" ), index=True, nullable=False ),
Column( "latest_workflow_id", Integer,
ForeignKey( "workflow.id", use_alter=True, name='stored_workflow_latest_workflow_id_fk' ), index=True ),
Column( "name", TEXT ),
Column( "deleted", Boolean, default=False ),
Column( "importable", Boolean, default=False ),
Column( "slug", TEXT, index=True ),
Column( "published", Boolean, index=True, default=False )
)
model.Workflow.table = Table( "workflow", metadata,
Column( "id", Integer, primary_key=True ),
Column( "create_time", DateTime, default=now ),
Column( "update_time", DateTime, default=now, onupdate=now ),
Column( "stored_workflow_id", Integer, ForeignKey( "stored_workflow.id" ), index=True, nullable=False ),
Column( "name", TEXT ),
Column( "has_cycles", Boolean ),
Column( "has_errors", Boolean ),
Column( "uuid", UUIDType, nullable=True )
)
model.WorkflowStep.table = Table( "workflow_step", metadata,
Column( "id", Integer, primary_key=True ),
Column( "create_time", DateTime, default=now ),
Column( "update_time", DateTime, default=now, onupdate=now ),
Column( "workflow_id", Integer, ForeignKey( "workflow.id" ), index=True, nullable=False ),
Column( "type", String(64) ),
Column( "tool_id", TEXT ),
Column( "tool_version", TEXT ), # Reserved for future
Column( "tool_inputs", JSONType ),
Column( "tool_errors", JSONType ),
Column( "position", JSONType ),
Column( "config", JSONType ),
Column( "order_index", Integer ),
## Column( "input_connections", JSONType )
)
model.WorkflowRequestStepState.table = Table(
"workflow_request_step_states", metadata,
Column( "id", Integer, primary_key=True ),
Column( "workflow_invocation_id", Integer, ForeignKey("workflow_invocation.id", onupdate="CASCADE", ondelete="CASCADE" )),
Column( "workflow_step_id", Integer, ForeignKey("workflow_step.id" )),
Column( "value", JSONType ),
)
model.WorkflowRequestInputParameter.table = Table(
"workflow_request_input_parameters", metadata,
Column( "id", Integer, primary_key=True ),
Column( "workflow_invocation_id", Integer, ForeignKey("workflow_invocation.id", onupdate="CASCADE", ondelete="CASCADE" )),
Column( "name", Unicode(255) ),
Column( "value", TEXT ),
Column( "type", Unicode(255) ),
)
model.WorkflowRequestToInputDatasetAssociation.table = Table(
"workflow_request_to_input_dataset", metadata,
Column( "id", Integer, primary_key=True ),
Column( "name", String(255) ),
Column( "workflow_invocation_id", Integer, ForeignKey( "workflow_invocation.id" ), index=True ),
Column( "workflow_step_id", Integer, ForeignKey("workflow_step.id") ),
Column( "dataset_id", Integer, ForeignKey( "history_dataset_association.id" ), index=True ),
)
model.WorkflowRequestToInputDatasetCollectionAssociation.table = Table(
"workflow_request_to_input_collection_dataset", metadata,
Column( "id", Integer, primary_key=True ),
Column( "name", String(255) ),
Column( "workflow_invocation_id", Integer, ForeignKey( "workflow_invocation.id" ), index=True ),
Column( "workflow_step_id", Integer, ForeignKey("workflow_step.id") ),
Column( "dataset_collection_id", Integer, ForeignKey( "history_dataset_collection_association.id" ), index=True ),
)
model.WorkflowStepConnection.table = Table( "workflow_step_connection", metadata,
Column( "id", Integer, primary_key=True ),
Column( "output_step_id", Integer, ForeignKey( "workflow_step.id" ), index=True ),
Column( "input_step_id", Integer, ForeignKey( "workflow_step.id" ), index=True ),
Column( "output_name", TEXT ),
Column( "input_name", TEXT)
)
model.WorkflowOutput.table = Table( "workflow_output", metadata,
Column( "id", Integer, primary_key=True ),
Column( "workflow_step_id", Integer, ForeignKey("workflow_step.id"), index=True, nullable=False),
Column( "output_name", String(255), nullable=True)
)
model.WorkflowInvocation.table = Table( "workflow_invocation", metadata,
Column( "id", Integer, primary_key=True ),
Column( "create_time", DateTime, default=now ),
Column( "update_time", DateTime, default=now, onupdate=now ),
Column( "workflow_id", Integer, ForeignKey( "workflow.id" ), index=True, nullable=False ),
Column( "state", TrimmedString( 64 ), index=True ),
Column( "scheduler", TrimmedString( 255 ), index=True ),
Column( "handler", TrimmedString( 255 ), index=True ),
Column( 'uuid', UUIDType() ),
Column( "history_id", Integer, ForeignKey( "history.id" ), index=True ),
)
model.WorkflowInvocationStep.table = Table( "workflow_invocation_step", metadata,
Column( "id", Integer, primary_key=True ),
Column( "create_time", DateTime, default=now ),
Column( "update_time", DateTime, default=now, onupdate=now ),
Column( "workflow_invocation_id", Integer, ForeignKey( "workflow_invocation.id" ), index=True, nullable=False ),
Column( "workflow_step_id", Integer, ForeignKey( "workflow_step.id" ), index=True, nullable=False ),
Column( "job_id", Integer, ForeignKey( "job.id" ), index=True, nullable=True ),
Column( "action", JSONType, nullable=True ),
)
model.StoredWorkflowUserShareAssociation.table = Table( "stored_workflow_user_share_connection", metadata,
Column( "id", Integer, primary_key=True ),
Column( "stored_workflow_id", Integer, ForeignKey( "stored_workflow.id" ), index=True ),
Column( "user_id", Integer, ForeignKey( "galaxy_user.id" ), index=True )
)
model.StoredWorkflowMenuEntry.table = Table( "stored_workflow_menu_entry", metadata,
Column( "id", Integer, primary_key=True ),
Column( "stored_workflow_id", Integer, ForeignKey( "stored_workflow.id" ), index=True ),
Column( "user_id", Integer, ForeignKey( "galaxy_user.id" ), index=True ),
Column( "order_index", Integer ) )
model.MetadataFile.table = Table( "metadata_file", metadata,
Column( "id", Integer, primary_key=True ),
Column( "name", TEXT ),
Column( "hda_id", Integer, ForeignKey( "history_dataset_association.id" ), index=True, nullable=True ),
Column( "lda_id", Integer, ForeignKey( "library_dataset_dataset_association.id" ), index=True, nullable=True ),
Column( "create_time", DateTime, default=now ),
Column( "update_time", DateTime, index=True, default=now, onupdate=now ),
Column( "object_store_id", TrimmedString( 255 ), index=True ),
Column( "deleted", Boolean, index=True, default=False ),
Column( "purged", Boolean, index=True, default=False ) )
model.FormDefinitionCurrent.table = Table('form_definition_current', metadata,
Column( "id", Integer, primary_key=True),
Column( "create_time", DateTime, default=now ),
Column( "update_time", DateTime, default=now, onupdate=now ),
Column( "latest_form_id", Integer, ForeignKey( "form_definition.id" ), index=True ),
Column( "deleted", Boolean, index=True, default=False ))
model.FormDefinition.table = Table('form_definition', metadata,
Column( "id", Integer, primary_key=True),
Column( "create_time", DateTime, default=now ),
Column( "update_time", DateTime, default=now, onupdate=now ),
Column( "name", TrimmedString( 255 ), nullable=False ),
Column( "desc", TEXT ),
Column( "form_definition_current_id",
Integer,
ForeignKey( "form_definition_current.id", name='for_def_form_def_current_id_fk', use_alter=True ),
index=True ),
Column( "fields", JSONType() ),
Column( "type", TrimmedString( 255 ), index=True ),
Column( "layout", JSONType() ), )
model.ExternalService.table = Table( 'external_service', metadata,
Column( "id", Integer, primary_key=True ),
Column( "create_time", DateTime, default=now ),
Column( "update_time", DateTime, default=now, onupdate=now ),
Column( "name", TrimmedString( 255 ), nullable=False ),
Column( "description", TEXT ),
Column( "external_service_type_id", TrimmedString( 255 ), nullable=False ),
Column( "version", TrimmedString( 255 ) ),
Column( "form_definition_id", Integer, ForeignKey( "form_definition.id" ), index=True ),
Column( "form_values_id", Integer, ForeignKey( "form_values.id" ), index=True ),
Column( "deleted", Boolean, index=True, default=False ) )
model.RequestType.table = Table('request_type', metadata,
Column( "id", Integer, primary_key=True),
Column( "create_time", DateTime, default=now ),
Column( "update_time", DateTime, default=now, onupdate=now ),
Column( "name", TrimmedString( 255 ), nullable=False ),
Column( "desc", TEXT ),
Column( "request_form_id", Integer, ForeignKey( "form_definition.id" ), index=True ),
Column( "sample_form_id", Integer, ForeignKey( "form_definition.id" ), index=True ),
Column( "deleted", Boolean, index=True, default=False ) )
model.RequestTypeExternalServiceAssociation.table = Table( "request_type_external_service_association", metadata,
Column( "id", Integer, primary_key=True ),
Column( "request_type_id", Integer, ForeignKey( "request_type.id" ), index=True ),
Column( "external_service_id", Integer, ForeignKey( "external_service.id" ), index=True ) )
model.RequestTypePermissions.table = Table( "request_type_permissions", metadata,
Column( "id", Integer, primary_key=True ),
Column( "create_time", DateTime, default=now ),
Column( "update_time", DateTime, default=now, onupdate=now ),
Column( "action", TEXT ),
Column( "request_type_id", Integer, ForeignKey( "request_type.id" ), nullable=True, index=True ),
Column( "role_id", Integer, ForeignKey( "role.id" ), index=True ) )
model.FormValues.table = Table('form_values', metadata,
Column( "id", Integer, primary_key=True),
Column( "create_time", DateTime, default=now ),
Column( "update_time", DateTime, default=now, onupdate=now ),
Column( "form_definition_id", Integer, ForeignKey( "form_definition.id" ), index=True ),
Column( "content", JSONType()) )
model.Request.table = Table('request', metadata,
Column( "id", Integer, primary_key=True),
Column( "create_time", DateTime, default=now ),
Column( "update_time", DateTime, default=now, onupdate=now ),
Column( "name", TrimmedString( 255 ), nullable=False ),
Column( "desc", TEXT ),
Column( "notification", JSONType() ),
Column( "form_values_id", Integer, ForeignKey( "form_values.id" ), index=True ),
Column( "request_type_id", Integer, ForeignKey( "request_type.id" ), index=True ),
Column( "user_id", Integer, ForeignKey( "galaxy_user.id" ), index=True ),
Column( "deleted", Boolean, index=True, default=False ) )
model.RequestEvent.table = Table('request_event', metadata,
Column( "id", Integer, primary_key=True),
Column( "create_time", DateTime, default=now ),
Column( "update_time", DateTime, default=now, onupdate=now ),
Column( "request_id", Integer, ForeignKey( "request.id" ), index=True ),
Column( "state", TrimmedString( 255 ), index=True ),
Column( "comment", TEXT ) )
model.Sample.table = Table('sample', metadata,
Column( "id", Integer, primary_key=True ),
Column( "create_time", DateTime, default=now ),
Column( "update_time", DateTime, default=now, onupdate=now ),
Column( "name", TrimmedString( 255 ), nullable=False ),
Column( "desc", TEXT ),
Column( "form_values_id", Integer, ForeignKey( "form_values.id" ), index=True ),
Column( "request_id", Integer, ForeignKey( "request.id" ), index=True ),
Column( "bar_code", TrimmedString( 255 ), index=True ),
Column( "library_id", Integer, ForeignKey( "library.id" ), index=True ),
Column( "folder_id", Integer, ForeignKey( "library_folder.id" ), index=True ),
Column( "deleted", Boolean, index=True, default=False ),
Column( "workflow", JSONType, nullable=True ),
Column( "history_id", Integer, ForeignKey( "history.id" ), nullable=True) )
model.SampleState.table = Table('sample_state', metadata,
Column( "id", Integer, primary_key=True ),
Column( "create_time", DateTime, default=now ),
Column( "update_time", DateTime, default=now, onupdate=now ),
Column( "name", TrimmedString( 255 ), nullable=False ),
Column( "desc", TEXT ),
Column( "request_type_id", Integer, ForeignKey( "request_type.id" ), index=True ) )
model.SampleEvent.table = Table('sample_event', metadata,
Column( "id", Integer, primary_key=True ),
Column( "create_time", DateTime, default=now ),
Column( "update_time", DateTime, default=now, onupdate=now ),
Column( "sample_id", Integer, ForeignKey( "sample.id" ), index=True ),
Column( "sample_state_id", Integer, ForeignKey( "sample_state.id" ), index=True ),
Column( "comment", TEXT ) )
model.SampleDataset.table = Table('sample_dataset', metadata,
Column( "id", Integer, primary_key=True ),
Column( "create_time", DateTime, default=now ),
Column( "update_time", DateTime, default=now, onupdate=now ),
Column( "sample_id", Integer, ForeignKey( "sample.id" ), index=True ),
Column( "name", TrimmedString( 255 ), nullable=False ),
Column( "file_path", TEXT ),
Column( "status", TrimmedString( 255 ), nullable=False ),
Column( "error_msg", TEXT ),
Column( "size", TrimmedString( 255 ) ),
Column( "external_service_id", Integer, ForeignKey( "external_service.id" ), index=True ) )
model.Run.table = Table( 'run', metadata,
Column( "id", Integer, primary_key=True ),
Column( "create_time", DateTime, default=now ),
Column( "update_time", DateTime, default=now, onupdate=now ),
Column( "form_definition_id", Integer, ForeignKey( "form_definition.id" ), index=True ),
Column( "form_values_id", Integer, ForeignKey( "form_values.id" ), index=True ),
Column( "deleted", Boolean, index=True, default=False ),
Column( "subindex", TrimmedString( 255 ), index=True ) )
model.RequestTypeRunAssociation.table = Table( "request_type_run_association", metadata,
Column( "id", Integer, primary_key=True ),
Column( "request_type_id", Integer, ForeignKey( "request_type.id" ), index=True, nullable=False ),
Column( "run_id", Integer, ForeignKey( "run.id" ), index=True, nullable=False ) )
model.SampleRunAssociation.table = Table( "sample_run_association", metadata,
Column( "id", Integer, primary_key=True ),
Column( "sample_id", Integer, ForeignKey( "sample.id" ), index=True, nullable=False ),
Column( "run_id", Integer, ForeignKey( "run.id" ), index=True, nullable=False ) )
model.Page.table = Table( "page", metadata,
Column( "id", Integer, primary_key=True ),
Column( "create_time", DateTime, default=now ),
Column( "update_time", DateTime, default=now, onupdate=now ),
Column( "user_id", Integer, ForeignKey( "galaxy_user.id" ), index=True, nullable=False ),
Column( "latest_revision_id", Integer,
ForeignKey( "page_revision.id", use_alter=True, name='page_latest_revision_id_fk' ), index=True ),
Column( "title", TEXT ),
Column( "slug", TEXT, unique=True, index=True ),
Column( "importable", Boolean, index=True, default=False ),
Column( "published", Boolean, index=True, default=False ),
Column( "deleted", Boolean, index=True, default=False ),
)
model.PageRevision.table = Table( "page_revision", metadata,
Column( "id", Integer, primary_key=True ),
Column( "create_time", DateTime, default=now ),
Column( "update_time", DateTime, default=now, onupdate=now ),
Column( "page_id", Integer, ForeignKey( "page.id" ), index=True, nullable=False ),
Column( "title", TEXT ),
Column( "content", TEXT )
)
model.PageUserShareAssociation.table = Table( "page_user_share_association", metadata,
Column( "id", Integer, primary_key=True ),
Column( "page_id", Integer, ForeignKey( "page.id" ), index=True ),
Column( "user_id", Integer, ForeignKey( "galaxy_user.id" ), index=True )
)
model.Visualization.table = Table( "visualization", metadata,
Column( "id", Integer, primary_key=True ),
Column( "create_time", DateTime, default=now ),
Column( "update_time", DateTime, default=now, onupdate=now ),
Column( "user_id", Integer, ForeignKey( "galaxy_user.id" ), index=True, nullable=False ),
Column( "latest_revision_id", Integer,
ForeignKey( "visualization_revision.id", use_alter=True, name='visualization_latest_revision_id_fk' ), index=True ),
Column( "title", TEXT ),
Column( "type", TEXT ),
Column( "dbkey", TEXT, index=True ),
Column( "deleted", Boolean, default=False, index=True ),
Column( "importable", Boolean, default=False, index=True ),
Column( "slug", TEXT, index=True ),
Column( "published", Boolean, default=False, index=True )
)
model.VisualizationRevision.table = Table( "visualization_revision", metadata,
Column( "id", Integer, primary_key=True ),
Column( "create_time", DateTime, default=now ),
Column( "update_time", DateTime, default=now, onupdate=now ),
Column( "visualization_id", Integer, ForeignKey( "visualization.id" ), index=True, nullable=False ),
Column( "title", TEXT ),
Column( "dbkey", TEXT, index=True ),
Column( "config", JSONType )
)
model.VisualizationUserShareAssociation.table = Table( "visualization_user_share_association", metadata,
Column( "id", Integer, primary_key=True ),
Column( "visualization_id", Integer, ForeignKey( "visualization.id" ), index=True ),
Column( "user_id", Integer, ForeignKey( "galaxy_user.id" ), index=True )
)
#Data Manager tables
model.DataManagerHistoryAssociation.table = Table( "data_manager_history_association", metadata,
Column( "id", Integer, primary_key=True),
Column( "create_time", DateTime, default=now ),
Column( "update_time", DateTime, index=True, default=now, onupdate=now ),
Column( "history_id", Integer, ForeignKey( "history.id" ), index=True ),
Column( "user_id", Integer, ForeignKey( "galaxy_user.id" ), index=True )
)
model.DataManagerJobAssociation.table = Table( "data_manager_job_association", metadata,
Column( "id", Integer, primary_key=True),
Column( "create_time", DateTime, default=now ),
Column( "update_time", DateTime, index=True, default=now, onupdate=now ),
Column( "job_id", Integer, ForeignKey( "job.id" ), index=True ),
Column( "data_manager_id", TEXT, index=True )
)
# Tagging tables.
model.Tag.table = Table( "tag", metadata,
Column( "id", Integer, primary_key=True ),
Column( "type", Integer ),
Column( "parent_id", Integer, ForeignKey( "tag.id" ) ),
Column( "name", TrimmedString(255) ),
UniqueConstraint( "name" ) )
model.HistoryTagAssociation.table = Table( "history_tag_association", metadata,
Column( "id", Integer, primary_key=True ),
Column( "history_id", Integer, ForeignKey( "history.id" ), index=True ),
Column( "tag_id", Integer, ForeignKey( "tag.id" ), index=True ),
Column( "user_id", Integer, ForeignKey( "galaxy_user.id" ), index=True ),
Column( "user_tname", TrimmedString(255), index=True),
Column( "value", TrimmedString(255), index=True),
Column( "user_value", TrimmedString(255), index=True) )
model.DatasetTagAssociation.table = Table( "dataset_tag_association", metadata,
Column( "id", Integer, primary_key=True ),
Column( "dataset_id", Integer, ForeignKey( "dataset.id" ), index=True ),
Column( "tag_id", Integer, ForeignKey( "tag.id" ), index=True ),
Column( "user_id", Integer, ForeignKey( "galaxy_user.id" ), index=True ),
Column( "user_tname", TrimmedString(255), index=True),
Column( "value", TrimmedString(255), index=True),
Column( "user_value", TrimmedString(255), index=True) )
model.HistoryDatasetAssociationTagAssociation.table = Table( "history_dataset_association_tag_association", metadata,
Column( "id", Integer, primary_key=True ),
Column( "history_dataset_association_id", Integer, ForeignKey( "history_dataset_association.id" ), index=True ),
Column( "tag_id", Integer, ForeignKey( "tag.id" ), index=True ),
Column( "user_id", Integer, ForeignKey( "galaxy_user.id" ), index=True ),
Column( "user_tname", TrimmedString(255), index=True),
Column( "value", TrimmedString(255), index=True),
Column( "user_value", TrimmedString(255), index=True) )
model.StoredWorkflowTagAssociation.table = Table( "stored_workflow_tag_association", metadata,
Column( "id", Integer, primary_key=True ),
Column( "stored_workflow_id", Integer, ForeignKey( "stored_workflow.id" ), index=True ),
Column( "tag_id", Integer, ForeignKey( "tag.id" ), index=True ),
Column( "user_id", Integer, ForeignKey( "galaxy_user.id" ), index=True ),
Column( "user_tname", Unicode(255), index=True),
Column( "value", Unicode(255), index=True),
Column( "user_value", Unicode(255), index=True) )
model.PageTagAssociation.table = Table( "page_tag_association", metadata,
Column( "id", Integer, primary_key=True ),
Column( "page_id", Integer, ForeignKey( "page.id" ), index=True ),
Column( "tag_id", Integer, ForeignKey( "tag.id" ), index=True ),
Column( "user_id", Integer, ForeignKey( "galaxy_user.id" ), index=True ),
Column( "user_tname", TrimmedString(255), index=True),
Column( "value", TrimmedString(255), index=True),
Column( "user_value", TrimmedString(255), index=True) )
model.WorkflowStepTagAssociation.table = Table( "workflow_step_tag_association", metadata,
Column( "id", Integer, primary_key=True ),
Column( "workflow_step_id", Integer, ForeignKey( "workflow_step.id" ), index=True ),
Column( "tag_id", Integer, ForeignKey( "tag.id" ), index=True ),
Column( "user_id", Integer, ForeignKey( "galaxy_user.id" ), index=True ),
Column( "user_tname", Unicode(255), index=True),
Column( "value", Unicode(255), index=True),
Column( "user_value", Unicode(255), index=True) )
model.VisualizationTagAssociation.table = Table( "visualization_tag_association", metadata,
Column( "id", Integer, primary_key=True ),
Column( "visualization_id", Integer, ForeignKey( "visualization.id" ), index=True ),
Column( "tag_id", Integer, ForeignKey( "tag.id" ), index=True ),
Column( "user_id", Integer, ForeignKey( "galaxy_user.id" ), index=True ),
Column( "user_tname", TrimmedString(255), index=True),
Column( "value", TrimmedString(255), index=True),
Column( "user_value", TrimmedString(255), index=True) )
model.HistoryDatasetCollectionTagAssociation.table = Table( "history_dataset_collection_tag_association", metadata,
Column( "id", Integer, primary_key=True ),
Column( "history_dataset_collection_id", Integer, ForeignKey( "history_dataset_collection_association.id" ), index=True ),
Column( "tag_id", Integer, ForeignKey( "tag.id" ), index=True ),
Column( "user_id", Integer, ForeignKey( "galaxy_user.id" ), index=True ),
Column( "user_tname", TrimmedString(255), index=True),
Column( "value", TrimmedString(255), index=True),
Column( "user_value", TrimmedString(255), index=True) )
model.LibraryDatasetCollectionTagAssociation.table = Table( "library_dataset_collection_tag_association", metadata,
Column( "id", Integer, primary_key=True ),
Column( "library_dataset_collection_id", Integer, ForeignKey( "library_dataset_collection_association.id" ), index=True ),
Column( "tag_id", Integer, ForeignKey( "tag.id" ), index=True ),
Column( "user_id", Integer, ForeignKey( "galaxy_user.id" ), index=True ),
Column( "user_tname", TrimmedString(255), index=True),
Column( "value", TrimmedString(255), index=True),
Column( "user_value", TrimmedString(255), index=True) )
model.ToolTagAssociation.table = Table( "tool_tag_association", metadata,
Column( "id", Integer, primary_key=True ),
Column( "tool_id", TrimmedString(255), index=True ),
Column( "tag_id", Integer, ForeignKey( "tag.id" ), index=True ),
Column( "user_id", Integer, ForeignKey( "galaxy_user.id" ), index=True ),
Column( "user_tname", TrimmedString(255), index=True),
Column( "value", TrimmedString(255), index=True),
Column( "user_value", TrimmedString(255), index=True) )
# Annotation tables.
model.HistoryAnnotationAssociation.table = Table( "history_annotation_association", metadata,
Column( "id", Integer, primary_key=True ),
Column( "history_id", Integer, ForeignKey( "history.id" ), index=True ),
Column( "user_id", Integer, ForeignKey( "galaxy_user.id" ), index=True ),
Column( "annotation", TEXT, index=True) )
model.HistoryDatasetAssociationAnnotationAssociation.table = Table( "history_dataset_association_annotation_association", metadata,
Column( "id", Integer, primary_key=True ),
Column( "history_dataset_association_id", Integer, ForeignKey( "history_dataset_association.id" ), index=True ),
Column( "user_id", Integer, ForeignKey( "galaxy_user.id" ), index=True ),
Column( "annotation", TEXT, index=True) )
model.StoredWorkflowAnnotationAssociation.table = Table( "stored_workflow_annotation_association", metadata,
Column( "id", Integer, primary_key=True ),
Column( "stored_workflow_id", Integer, ForeignKey( "stored_workflow.id" ), index=True ),
Column( "user_id", Integer, ForeignKey( "galaxy_user.id" ), index=True ),
Column( "annotation", TEXT, index=True) )
model.WorkflowStepAnnotationAssociation.table = Table( "workflow_step_annotation_association", metadata,
Column( "id", Integer, primary_key=True ),
Column( "workflow_step_id", Integer, ForeignKey( "workflow_step.id" ), index=True ),
Column( "user_id", Integer, ForeignKey( "galaxy_user.id" ), index=True ),
Column( "annotation", TEXT, index=True) )
model.PageAnnotationAssociation.table = Table( "page_annotation_association", metadata,
Column( "id", Integer, primary_key=True ),
Column( "page_id", Integer, ForeignKey( "page.id" ), index=True ),
Column( "user_id", Integer, ForeignKey( "galaxy_user.id" ), index=True ),
Column( "annotation", TEXT, index=True) )
model.VisualizationAnnotationAssociation.table = Table( "visualization_annotation_association", metadata,
Column( "id", Integer, primary_key=True ),
Column( "visualization_id", Integer, ForeignKey( "visualization.id" ), index=True ),
Column( "user_id", Integer, ForeignKey( "galaxy_user.id" ), index=True ),
Column( "annotation", TEXT, index=True) )
model.HistoryDatasetCollectionAnnotationAssociation.table = Table( "history_dataset_collection_annotation_association", metadata,
Column( "id", Integer, primary_key=True ),
Column( "history_dataset_collection_id", Integer, ForeignKey( "history_dataset_collection_association.id" ), index=True ),
Column( "user_id", Integer, ForeignKey( "galaxy_user.id" ), index=True ),
Column( "annotation", TEXT, index=True) )
model.LibraryDatasetCollectionAnnotationAssociation.table = Table( "library_dataset_collection_annotation_association", metadata,
Column( "id", Integer, primary_key=True ),
Column( "library_dataset_collection_id", Integer, ForeignKey( "library_dataset_collection_association.id" ), index=True ),
Column( "user_id", Integer, ForeignKey( "galaxy_user.id" ), index=True ),
Column( "annotation", TEXT, index=True) )
# Ratings tables.
model.HistoryRatingAssociation.table = Table( "history_rating_association", metadata,
Column( "id", Integer, primary_key=True ),
Column( "history_id", Integer, ForeignKey( "history.id" ), index=True ),
Column( "user_id", Integer, ForeignKey( "galaxy_user.id" ), index=True ),
Column( "rating", Integer, index=True) )
model.HistoryDatasetAssociationRatingAssociation.table = Table( "history_dataset_association_rating_association", metadata,
Column( "id", Integer, primary_key=True ),
Column( "history_dataset_association_id", Integer, ForeignKey( "history_dataset_association.id" ), index=True ),
Column( "user_id", Integer, ForeignKey( "galaxy_user.id" ), index=True ),
Column( "rating", Integer, index=True) )
model.StoredWorkflowRatingAssociation.table = Table( "stored_workflow_rating_association", metadata,
Column( "id", Integer, primary_key=True ),
Column( "stored_workflow_id", Integer, ForeignKey( "stored_workflow.id" ), index=True ),
Column( "user_id", Integer, ForeignKey( "galaxy_user.id" ), index=True ),
Column( "rating", Integer, index=True) )
model.PageRatingAssociation.table = Table( "page_rating_association", metadata,
Column( "id", Integer, primary_key=True ),
Column( "page_id", Integer, ForeignKey( "page.id" ), index=True ),
Column( "user_id", Integer, ForeignKey( "galaxy_user.id" ), index=True ),
Column( "rating", Integer, index=True) )
model.VisualizationRatingAssociation.table = Table( "visualization_rating_association", metadata,
Column( "id", Integer, primary_key=True ),
Column( "visualization_id", Integer, ForeignKey( "visualization.id" ), index=True ),
Column( "user_id", Integer, ForeignKey( "galaxy_user.id" ), index=True ),
Column( "rating", Integer, index=True) )
model.HistoryDatasetCollectionRatingAssociation.table = Table( "history_dataset_collection_rating_association", metadata,
Column( "id", Integer, primary_key=True ),
Column( "history_dataset_collection_id", Integer, ForeignKey( "history_dataset_collection_association.id" ), index=True ),
Column( "user_id", Integer, ForeignKey( "galaxy_user.id" ), index=True ),
Column( "rating", Integer, index=True) )
model.LibraryDatasetCollectionRatingAssociation.table = Table( "library_dataset_collection_rating_association", metadata,
Column( "id", Integer, primary_key=True ),
Column( "library_dataset_collection_id", Integer, ForeignKey( "library_dataset_collection_association.id" ), index=True ),
Column( "user_id", Integer, ForeignKey( "galaxy_user.id" ), index=True ),
Column( "rating", Integer, index=True) )
# User tables.
model.UserPreference.table = Table( "user_preference", metadata,
Column( "id", Integer, primary_key=True ),
Column( "user_id", Integer, ForeignKey( "galaxy_user.id" ), index=True ),
Column( "name", Unicode( 255 ), index=True),
Column( "value", Unicode( 1024 ) ) )
model.UserAction.table = Table( "user_action", metadata,
Column( "id", Integer, primary_key=True ),
Column( "create_time", DateTime, default=now ),
Column( "user_id", Integer, ForeignKey( "galaxy_user.id" ), index=True ),
Column( "session_id", Integer, ForeignKey( "galaxy_session.id" ), index=True ),
Column( "action", Unicode( 255 ) ),
Column( "context", Unicode( 512 ) ),
Column( "params", Unicode( 1024 ) ) )
model.APIKeys.table = Table( "api_keys", metadata,
Column( "id", Integer, primary_key=True ),
Column( "create_time", DateTime, default=now ),
Column( "user_id", Integer, ForeignKey( "galaxy_user.id" ), index=True ),
Column( "key", TrimmedString( 32 ), index=True, unique=True ) )
# With the tables defined we can define the mappers and setup the
# relationships between the model objects.
def simple_mapping( model, **kwds ):
mapper( model, model.table, properties=kwds )
mapper( model.Sample, model.Sample.table,
properties=dict(
events=relation( model.SampleEvent, backref="sample",
order_by=desc( model.SampleEvent.table.c.update_time ) ),
datasets=relation( model.SampleDataset, backref="sample",
order_by=desc( model.SampleDataset.table.c.update_time ) ),
values=relation( model.FormValues,
primaryjoin=( model.Sample.table.c.form_values_id == model.FormValues.table.c.id ) ),
request=relation( model.Request,
primaryjoin=( model.Sample.table.c.request_id == model.Request.table.c.id ) ),
folder=relation( model.LibraryFolder,
primaryjoin=( model.Sample.table.c.folder_id == model.LibraryFolder.table.c.id ) ),
library=relation( model.Library,
primaryjoin=( model.Sample.table.c.library_id == model.Library.table.c.id ) ),
history=relation( model.History,
primaryjoin=( model.Sample.table.c.history_id == model.History.table.c.id ) ),
) )
mapper( model.FormValues, model.FormValues.table,
properties=dict( form_definition=relation( model.FormDefinition,
primaryjoin=( model.FormValues.table.c.form_definition_id ==
model.FormDefinition.table.c.id ) )
)
)
mapper( model.Request, model.Request.table,
properties=dict( values=relation( model.FormValues,
primaryjoin=( model.Request.table.c.form_values_id == model.FormValues.table.c.id ) ),
type=relation( model.RequestType,
primaryjoin=( model.Request.table.c.request_type_id == model.RequestType.table.c.id ) ),
user=relation( model.User,
primaryjoin=( model.Request.table.c.user_id == model.User.table.c.id ),
backref="requests" ),
samples=relation( model.Sample,
primaryjoin=( model.Request.table.c.id == model.Sample.table.c.request_id ),
order_by=asc( model.Sample.table.c.id ) ),
events=relation( model.RequestEvent, backref="request",
order_by=desc( model.RequestEvent.table.c.update_time ) )
) )
mapper( model.RequestEvent, model.RequestEvent.table,
properties=None )
mapper( model.ExternalService, model.ExternalService.table,
properties=dict( form_definition=relation( model.FormDefinition,
primaryjoin=( model.ExternalService.table.c.form_definition_id ==
model.FormDefinition.table.c.id ) ),
form_values=relation( model.FormValues,
primaryjoin=( model.ExternalService.table.c.form_values_id ==
model.FormValues.table.c.id ) )
) )
mapper( model.RequestType, model.RequestType.table,
properties=dict( states=relation( model.SampleState,
backref="request_type",
primaryjoin=( model.RequestType.table.c.id == model.SampleState.table.c.request_type_id ),
order_by=asc( model.SampleState.table.c.update_time ) ),
request_form=relation( model.FormDefinition,
primaryjoin=( model.RequestType.table.c.request_form_id == model.FormDefinition.table.c.id ) ),
sample_form=relation( model.FormDefinition,
primaryjoin=( model.RequestType.table.c.sample_form_id == model.FormDefinition.table.c.id ) ),
) )
mapper( model.RequestTypeExternalServiceAssociation, model.RequestTypeExternalServiceAssociation.table,
properties=dict(
request_type=relation( model.RequestType,
primaryjoin=( ( model.RequestTypeExternalServiceAssociation.table.c.request_type_id == model.RequestType.table.c.id ) ), backref="external_service_associations" ),
external_service=relation( model.ExternalService,
primaryjoin=( model.RequestTypeExternalServiceAssociation.table.c.external_service_id
== model.ExternalService.table.c.id ) )
)
)
mapper( model.RequestTypePermissions, model.RequestTypePermissions.table,
properties=dict(
request_type=relation( model.RequestType, backref="actions" ),
role=relation( model.Role, backref="request_type_actions" )
)
)
mapper( model.FormDefinition, model.FormDefinition.table,
properties=dict( current=relation( model.FormDefinitionCurrent,
primaryjoin=( model.FormDefinition.table.c.form_definition_current_id == model.FormDefinitionCurrent.table.c.id ) )
) )
mapper( model.FormDefinitionCurrent, model.FormDefinitionCurrent.table,
properties=dict( forms=relation( model.FormDefinition, backref='form_definition_current',
cascade="all, delete-orphan",
primaryjoin=( model.FormDefinitionCurrent.table.c.id == model.FormDefinition.table.c.form_definition_current_id ) ),
latest_form=relation( model.FormDefinition, post_update=True,
primaryjoin=( model.FormDefinitionCurrent.table.c.latest_form_id == model.FormDefinition.table.c.id ) )
) )
mapper( model.SampleEvent, model.SampleEvent.table,
properties=dict( state=relation( model.SampleState,
primaryjoin=( model.SampleEvent.table.c.sample_state_id == model.SampleState.table.c.id ) ),
) )
mapper( model.SampleState, model.SampleState.table,
properties=None )
mapper( model.SampleDataset, model.SampleDataset.table,
properties=dict( external_service=relation( model.ExternalService,
primaryjoin=( model.SampleDataset.table.c.external_service_id == model.ExternalService.table.c.id ) )
)
)
mapper( model.SampleRunAssociation, model.SampleRunAssociation.table,
properties=dict( sample=relation( model.Sample, backref="runs", order_by=desc( model.Run.table.c.update_time ) ),
run=relation( model.Run, backref="samples", order_by=asc( model.Sample.table.c.id ) ) ) )
mapper( model.RequestTypeRunAssociation, model.RequestTypeRunAssociation.table,
properties=dict( request_type=relation( model.RequestType, backref="run" ),
run=relation( model.Run, backref="request_type" ) ) )
mapper( model.Run, model.Run.table,
properties=dict( template=relation( model.FormDefinition,
primaryjoin=( model.Run.table.c.form_definition_id == model.FormDefinition.table.c.id ) ),
info=relation( model.FormValues,
primaryjoin=( model.Run.table.c.form_values_id == model.FormValues.table.c.id ) ) ) )
mapper( model.UserAddress, model.UserAddress.table,
properties=dict(
user=relation( model.User,
primaryjoin=( model.UserAddress.table.c.user_id == model.User.table.c.id ),
backref='addresses',
order_by=desc(model.UserAddress.table.c.update_time)),
) )
mapper( model.UserOpenID, model.UserOpenID.table,
properties=dict(
session=relation( model.GalaxySession,
primaryjoin=( model.UserOpenID.table.c.session_id == model.GalaxySession.table.c.id ),
backref='openids',
order_by=desc( model.UserOpenID.table.c.update_time ) ),
user=relation( model.User,
primaryjoin=( model.UserOpenID.table.c.user_id == model.User.table.c.id ),
backref='openids',
order_by=desc( model.UserOpenID.table.c.update_time ) ) ) )
mapper( model.ValidationError, model.ValidationError.table )
simple_mapping( model.HistoryDatasetAssociation,
dataset=relation(
model.Dataset,
primaryjoin=( model.Dataset.table.c.id == model.HistoryDatasetAssociation.table.c.dataset_id ), lazy=False ),
# .history defined in History mapper
copied_from_history_dataset_association=relation(
model.HistoryDatasetAssociation,
primaryjoin=( model.HistoryDatasetAssociation.table.c.copied_from_history_dataset_association_id == model.HistoryDatasetAssociation.table.c.id ),
remote_side=[model.HistoryDatasetAssociation.table.c.id],
uselist=False ),
copied_to_history_dataset_associations=relation(
model.HistoryDatasetAssociation,
primaryjoin=( model.HistoryDatasetAssociation.table.c.copied_from_history_dataset_association_id == model.HistoryDatasetAssociation.table.c.id ) ),
copied_from_library_dataset_dataset_association=relation(
model.LibraryDatasetDatasetAssociation,
primaryjoin=( model.HistoryDatasetAssociation.table.c.copied_from_library_dataset_dataset_association_id == model.LibraryDatasetDatasetAssociation.table.c.id ),
uselist=False ),
copied_to_library_dataset_dataset_associations=relation(
model.LibraryDatasetDatasetAssociation,
primaryjoin=( model.HistoryDatasetAssociation.table.c.copied_from_library_dataset_dataset_association_id == model.LibraryDatasetDatasetAssociation.table.c.id ) ),
implicitly_converted_datasets=relation(
model.ImplicitlyConvertedDatasetAssociation,
primaryjoin=( model.ImplicitlyConvertedDatasetAssociation.table.c.hda_parent_id == model.HistoryDatasetAssociation.table.c.id ) ),
implicitly_converted_parent_datasets=relation(
model.ImplicitlyConvertedDatasetAssociation,
primaryjoin=( model.ImplicitlyConvertedDatasetAssociation.table.c.hda_id == model.HistoryDatasetAssociation.table.c.id ) ),
children=relation(
model.HistoryDatasetAssociation,
primaryjoin=( model.HistoryDatasetAssociation.table.c.parent_id == model.HistoryDatasetAssociation.table.c.id ),
backref=backref( "parent", primaryjoin=( model.HistoryDatasetAssociation.table.c.parent_id == model.HistoryDatasetAssociation.table.c.id ), remote_side=[model.HistoryDatasetAssociation.table.c.id], uselist=False ) ),
visible_children=relation(
model.HistoryDatasetAssociation,
primaryjoin=( ( model.HistoryDatasetAssociation.table.c.parent_id == model.HistoryDatasetAssociation.table.c.id ) & ( model.HistoryDatasetAssociation.table.c.visible == True ) ) ),
tags=relation( model.HistoryDatasetAssociationTagAssociation, order_by=model.HistoryDatasetAssociationTagAssociation.table.c.id, backref='history_tag_associations' ),
annotations=relation( model.HistoryDatasetAssociationAnnotationAssociation, order_by=model.HistoryDatasetAssociationAnnotationAssociation.table.c.id, backref="hdas" ),
ratings=relation( model.HistoryDatasetAssociationRatingAssociation, order_by=model.HistoryDatasetAssociationRatingAssociation.table.c.id, backref="hdas" ),
extended_metadata=relation(
model.ExtendedMetadata,
primaryjoin=( ( model.HistoryDatasetAssociation.table.c.extended_metadata_id == model.ExtendedMetadata.table.c.id ) )
),
hidden_beneath_collection_instance=relation(
model.HistoryDatasetCollectionAssociation,
primaryjoin=( ( model.HistoryDatasetAssociation.table.c.hidden_beneath_collection_instance_id == model.HistoryDatasetCollectionAssociation.table.c.id ) ),
uselist=False,
backref="hidden_dataset_instances",
)
)
simple_mapping( model.Dataset,
history_associations=relation(
model.HistoryDatasetAssociation,
primaryjoin=( model.Dataset.table.c.id == model.HistoryDatasetAssociation.table.c.dataset_id ) ),
active_history_associations=relation(
model.HistoryDatasetAssociation,
primaryjoin=( ( model.Dataset.table.c.id == model.HistoryDatasetAssociation.table.c.dataset_id ) & ( model.HistoryDatasetAssociation.table.c.deleted == False ) & ( model.HistoryDatasetAssociation.table.c.purged == False ) ) ),
purged_history_associations=relation(
model.HistoryDatasetAssociation,
primaryjoin=( ( model.Dataset.table.c.id == model.HistoryDatasetAssociation.table.c.dataset_id ) & ( model.HistoryDatasetAssociation.table.c.purged == True ) ) ),
library_associations=relation(
model.LibraryDatasetDatasetAssociation,
primaryjoin=( model.Dataset.table.c.id == model.LibraryDatasetDatasetAssociation.table.c.dataset_id ) ),
active_library_associations=relation(
model.LibraryDatasetDatasetAssociation,
primaryjoin=( ( model.Dataset.table.c.id == model.LibraryDatasetDatasetAssociation.table.c.dataset_id ) & ( model.LibraryDatasetDatasetAssociation.table.c.deleted == False ) ) ),
tags=relation(model.DatasetTagAssociation, order_by=model.DatasetTagAssociation.table.c.id, backref='datasets')
)
mapper( model.HistoryDatasetAssociationDisplayAtAuthorization, model.HistoryDatasetAssociationDisplayAtAuthorization.table,
properties=dict( history_dataset_association = relation( model.HistoryDatasetAssociation ),
user = relation( model.User ) ) )
mapper( model.HistoryDatasetAssociationSubset, model.HistoryDatasetAssociationSubset.table,
properties=dict( hda = relation( model.HistoryDatasetAssociation,
primaryjoin=( model.HistoryDatasetAssociationSubset.table.c.history_dataset_association_id == model.HistoryDatasetAssociation.table.c.id ) ),
subset = relation( model.HistoryDatasetAssociation,
primaryjoin=( model.HistoryDatasetAssociationSubset.table.c.history_dataset_association_subset_id == model.HistoryDatasetAssociation.table.c.id ) )
) )
mapper( model.ImplicitlyConvertedDatasetAssociation, model.ImplicitlyConvertedDatasetAssociation.table,
properties=dict( parent_hda=relation(
model.HistoryDatasetAssociation,
primaryjoin=( model.ImplicitlyConvertedDatasetAssociation.table.c.hda_parent_id == model.HistoryDatasetAssociation.table.c.id ) ),
parent_ldda=relation(
model.LibraryDatasetDatasetAssociation,
primaryjoin=( model.ImplicitlyConvertedDatasetAssociation.table.c.ldda_parent_id == model.LibraryDatasetDatasetAssociation.table.c.id ) ),
dataset_ldda=relation(
model.LibraryDatasetDatasetAssociation,
primaryjoin=( model.ImplicitlyConvertedDatasetAssociation.table.c.ldda_id == model.LibraryDatasetDatasetAssociation.table.c.id ) ),
dataset=relation(
model.HistoryDatasetAssociation,
primaryjoin=( model.ImplicitlyConvertedDatasetAssociation.table.c.hda_id == model.HistoryDatasetAssociation.table.c.id ) ) ) )
mapper( model.History, model.History.table,
properties=dict( galaxy_sessions=relation( model.GalaxySessionToHistoryAssociation ),
datasets=relation( model.HistoryDatasetAssociation, backref="history", order_by=asc(model.HistoryDatasetAssociation.table.c.hid) ),
exports=relation( model.JobExportHistoryArchive, primaryjoin=( model.JobExportHistoryArchive.table.c.history_id == model.History.table.c.id ), order_by=desc( model.JobExportHistoryArchive.table.c.id ) ),
active_datasets=relation(
model.HistoryDatasetAssociation,
primaryjoin=(
( model.HistoryDatasetAssociation.table.c.history_id == model.History.table.c.id ) & not_( model.HistoryDatasetAssociation.table.c.deleted )
),
order_by=asc( model.HistoryDatasetAssociation.table.c.hid ),
viewonly=True
),
active_dataset_collections=relation(
model.HistoryDatasetCollectionAssociation,
primaryjoin=(
( model.HistoryDatasetCollectionAssociation.table.c.history_id ) == model.History.table.c.id ) & not_( model.HistoryDatasetCollectionAssociation.table.c.deleted ),
order_by=asc( model.HistoryDatasetCollectionAssociation.table.c.hid ),
viewonly=True,
),
visible_datasets=relation(
model.HistoryDatasetAssociation,
primaryjoin=( ( model.HistoryDatasetAssociation.table.c.history_id == model.History.table.c.id ) & not_( model.HistoryDatasetAssociation.table.c.deleted ) & model.HistoryDatasetAssociation.table.c.visible ),
order_by=asc( model.HistoryDatasetAssociation.table.c.hid ),
viewonly=True,
),
visible_dataset_collections=relation(
model.HistoryDatasetCollectionAssociation,
primaryjoin=( ( model.HistoryDatasetCollectionAssociation.table.c.history_id == model.History.table.c.id ) & not_( model.HistoryDatasetCollectionAssociation.table.c.deleted ) & model.HistoryDatasetCollectionAssociation.table.c.visible ),
order_by=asc( model.HistoryDatasetCollectionAssociation.table.c.hid ),
viewonly=True,
),
tags=relation( model.HistoryTagAssociation, order_by=model.HistoryTagAssociation.table.c.id, backref="histories" ),
annotations=relation( model.HistoryAnnotationAssociation, order_by=model.HistoryAnnotationAssociation.table.c.id, backref="histories" ),
ratings=relation( model.HistoryRatingAssociation, order_by=model.HistoryRatingAssociation.table.c.id, backref="histories" ) )
)
# Set up proxy so that
# History.users_shared_with
# returns a list of users that history is shared with.
model.History.users_shared_with_dot_users = association_proxy( 'users_shared_with', 'user' )
mapper( model.HistoryUserShareAssociation, model.HistoryUserShareAssociation.table,
properties=dict( user=relation( model.User, backref='histories_shared_by_others' ),
history=relation( model.History, backref='users_shared_with' )
) )
mapper( model.User, model.User.table,
properties=dict( histories=relation( model.History, backref="user",
order_by=desc(model.History.table.c.update_time) ),
active_histories=relation( model.History, primaryjoin=( ( model.History.table.c.user_id == model.User.table.c.id ) & ( not_( model.History.table.c.deleted ) ) ), order_by=desc( model.History.table.c.update_time ) ),
galaxy_sessions=relation( model.GalaxySession, order_by=desc( model.GalaxySession.table.c.update_time ) ),
stored_workflow_menu_entries=relation( model.StoredWorkflowMenuEntry, backref="user",
cascade="all, delete-orphan",
collection_class=ordering_list( 'order_index' ) ),
_preferences=relation( model.UserPreference, backref="user", collection_class=attribute_mapped_collection('name')),
# addresses=relation( UserAddress,
# primaryjoin=( User.table.c.id == UserAddress.table.c.user_id ) )
values=relation( model.FormValues,
primaryjoin=( model.User.table.c.form_values_id == model.FormValues.table.c.id ) ),
api_keys=relation( model.APIKeys, backref="user", order_by=desc( model.APIKeys.table.c.create_time ) ),
) )
# Set up proxy so that this syntax is possible:
# <user_obj>.preferences[pref_name] = pref_value
model.User.preferences = association_proxy('_preferences', 'value', creator=model.UserPreference)
mapper( model.Group, model.Group.table,
properties=dict( users=relation( model.UserGroupAssociation ) ) )
mapper( model.UserGroupAssociation, model.UserGroupAssociation.table,
properties=dict( user=relation( model.User, backref = "groups" ),
group=relation( model.Group, backref = "members" ) ) )
mapper( model.DefaultUserPermissions, model.DefaultUserPermissions.table,
properties=dict( user=relation( model.User, backref = "default_permissions" ),
role=relation( model.Role ) ) )
mapper( model.DefaultHistoryPermissions, model.DefaultHistoryPermissions.table,
properties=dict( history=relation( model.History, backref = "default_permissions" ),
role=relation( model.Role ) ) )
mapper( model.Role, model.Role.table,
properties=dict(
users=relation( model.UserRoleAssociation ),
groups=relation( model.GroupRoleAssociation )
)
)
mapper( model.UserRoleAssociation, model.UserRoleAssociation.table,
properties=dict(
user=relation( model.User, backref="roles" ),
non_private_roles=relation( model.User,
backref="non_private_roles",
primaryjoin=( ( model.User.table.c.id == model.UserRoleAssociation.table.c.user_id ) & ( model.UserRoleAssociation.table.c.role_id == model.Role.table.c.id ) & not_( model.Role.table.c.name == model.User.table.c.email ) ) ),
role=relation( model.Role )
)
)
mapper( model.GroupRoleAssociation, model.GroupRoleAssociation.table,
properties=dict(
group=relation( model.Group, backref="roles" ),
role=relation( model.Role )
)
)
mapper( model.Quota, model.Quota.table,
properties=dict( users=relation( model.UserQuotaAssociation ),
groups=relation( model.GroupQuotaAssociation ) ) )
mapper( model.UserQuotaAssociation, model.UserQuotaAssociation.table,
properties=dict( user=relation( model.User, backref="quotas" ),
quota=relation( model.Quota ) ) )
mapper( model.GroupQuotaAssociation, model.GroupQuotaAssociation.table,
properties=dict( group=relation( model.Group, backref="quotas" ),
quota=relation( model.Quota ) ) )
mapper( model.DefaultQuotaAssociation, model.DefaultQuotaAssociation.table,
properties=dict( quota=relation( model.Quota, backref="default" ) ) )
mapper( model.DatasetPermissions, model.DatasetPermissions.table,
properties=dict(
dataset=relation( model.Dataset, backref="actions" ),
role=relation( model.Role, backref="dataset_actions" )
)
)
mapper( model.LibraryPermissions, model.LibraryPermissions.table,
properties=dict(
library=relation( model.Library, backref="actions" ),
role=relation( model.Role, backref="library_actions" )
)
)
mapper( model.LibraryFolderPermissions, model.LibraryFolderPermissions.table,
properties=dict(
folder=relation( model.LibraryFolder, backref="actions" ),
role=relation( model.Role, backref="library_folder_actions" )
)
)
mapper( model.LibraryDatasetPermissions, model.LibraryDatasetPermissions.table,
properties=dict(
library_dataset=relation( model.LibraryDataset, backref="actions" ),
role=relation( model.Role, backref="library_dataset_actions" )
)
)
mapper( model.LibraryDatasetDatasetAssociationPermissions, model.LibraryDatasetDatasetAssociationPermissions.table,
properties=dict(
library_dataset_dataset_association = relation( model.LibraryDatasetDatasetAssociation, backref="actions" ),
role=relation( model.Role, backref="library_dataset_dataset_actions" )
)
)
mapper( model.Library, model.Library.table,
properties=dict(
root_folder=relation( model.LibraryFolder, backref=backref( "library_root" ) )
)
)
mapper( model.ExtendedMetadata, model.ExtendedMetadata.table,
properties=dict(
children=relation(
model.ExtendedMetadataIndex,
primaryjoin=( model.ExtendedMetadataIndex.table.c.extended_metadata_id == model.ExtendedMetadata.table.c.id ),
backref=backref("parent",
primaryjoin=( model.ExtendedMetadataIndex.table.c.extended_metadata_id == model.ExtendedMetadata.table.c.id )
)
)
)
)
mapper( model.ExtendedMetadataIndex, model.ExtendedMetadataIndex.table,
properties=dict(
extended_metadata=relation(
model.ExtendedMetadata,
primaryjoin=( ( model.ExtendedMetadataIndex.table.c.extended_metadata_id == model.ExtendedMetadata.table.c.id ) )
)
)
)
mapper( model.LibraryInfoAssociation, model.LibraryInfoAssociation.table,
properties=dict( library=relation( model.Library,
primaryjoin=( ( model.LibraryInfoAssociation.table.c.library_id == model.Library.table.c.id ) & ( not_( model.LibraryInfoAssociation.table.c.deleted ) ) ), backref="info_association" ),
template=relation( model.FormDefinition,
primaryjoin=( model.LibraryInfoAssociation.table.c.form_definition_id == model.FormDefinition.table.c.id ) ),
info=relation( model.FormValues,
primaryjoin=( model.LibraryInfoAssociation.table.c.form_values_id == model.FormValues.table.c.id ) )
) )
mapper( model.LibraryFolder, model.LibraryFolder.table,
properties=dict(
folders=relation(
model.LibraryFolder,
primaryjoin=( model.LibraryFolder.table.c.parent_id == model.LibraryFolder.table.c.id ),
order_by=asc( model.LibraryFolder.table.c.name ),
backref=backref( "parent", primaryjoin=( model.LibraryFolder.table.c.parent_id == model.LibraryFolder.table.c.id ), remote_side=[model.LibraryFolder.table.c.id] ) ),
active_folders=relation( model.LibraryFolder,
primaryjoin=( ( model.LibraryFolder.table.c.parent_id == model.LibraryFolder.table.c.id ) & ( not_( model.LibraryFolder.table.c.deleted ) ) ),
order_by=asc( model.LibraryFolder.table.c.name ),
lazy=True, #"""sqlalchemy.exceptions.ArgumentError: Error creating eager relationship 'active_folders' on parent class '<class 'galaxy.model.LibraryFolder'>' to child class '<class 'galaxy.model.LibraryFolder'>': Cant use eager loading on a self referential relationship."""
viewonly=True ),
datasets=relation( model.LibraryDataset,
primaryjoin=( ( model.LibraryDataset.table.c.folder_id == model.LibraryFolder.table.c.id ) ),
order_by=asc( model.LibraryDataset.table.c._name ),
lazy=True,
viewonly=True ),
active_datasets=relation( model.LibraryDataset,
primaryjoin=( ( model.LibraryDataset.table.c.folder_id == model.LibraryFolder.table.c.id ) & ( not_( model.LibraryDataset.table.c.deleted ) ) ),
order_by=asc( model.LibraryDataset.table.c._name ),
lazy=True,
viewonly=True )
) )
mapper( model.LibraryFolderInfoAssociation, model.LibraryFolderInfoAssociation.table,
properties=dict( folder=relation( model.LibraryFolder,
primaryjoin=( ( model.LibraryFolderInfoAssociation.table.c.library_folder_id == model.LibraryFolder.table.c.id ) & ( not_( model.LibraryFolderInfoAssociation.table.c.deleted ) ) ), backref="info_association" ),
template=relation( model.FormDefinition,
primaryjoin=( model.LibraryFolderInfoAssociation.table.c.form_definition_id == model.FormDefinition.table.c.id ) ),
info=relation( model.FormValues,
primaryjoin=( model.LibraryFolderInfoAssociation.table.c.form_values_id == model.FormValues.table.c.id ) )
) )
mapper( model.LibraryDataset, model.LibraryDataset.table,
properties=dict(
folder=relation( model.LibraryFolder ),
library_dataset_dataset_association=relation( model.LibraryDatasetDatasetAssociation, primaryjoin=( model.LibraryDataset.table.c.library_dataset_dataset_association_id == model.LibraryDatasetDatasetAssociation.table.c.id ) ),
expired_datasets = relation( model.LibraryDatasetDatasetAssociation, foreign_keys=[model.LibraryDataset.table.c.id,model.LibraryDataset.table.c.library_dataset_dataset_association_id ], primaryjoin=( ( model.LibraryDataset.table.c.id == model.LibraryDatasetDatasetAssociation.table.c.library_dataset_id ) & ( not_( model.LibraryDataset.table.c.library_dataset_dataset_association_id == model.LibraryDatasetDatasetAssociation.table.c.id ) ) ), viewonly=True, uselist=True )
) )
mapper( model.LibraryDatasetDatasetAssociation, model.LibraryDatasetDatasetAssociation.table,
properties=dict(
dataset=relation( model.Dataset ),
library_dataset = relation( model.LibraryDataset,
primaryjoin=( model.LibraryDatasetDatasetAssociation.table.c.library_dataset_id == model.LibraryDataset.table.c.id ) ),
#user=relation( model.User.mapper ),
user=relation( model.User ),
copied_from_library_dataset_dataset_association=relation(
model.LibraryDatasetDatasetAssociation,
primaryjoin=( model.LibraryDatasetDatasetAssociation.table.c.copied_from_library_dataset_dataset_association_id == model.LibraryDatasetDatasetAssociation.table.c.id ),
remote_side=[model.LibraryDatasetDatasetAssociation.table.c.id],
uselist=False ),
copied_to_library_dataset_dataset_associations=relation(
model.LibraryDatasetDatasetAssociation,
primaryjoin=( model.LibraryDatasetDatasetAssociation.table.c.copied_from_library_dataset_dataset_association_id == model.LibraryDatasetDatasetAssociation.table.c.id ) ),
copied_from_history_dataset_association=relation(
model.HistoryDatasetAssociation,
primaryjoin=( model.LibraryDatasetDatasetAssociation.table.c.copied_from_history_dataset_association_id == model.HistoryDatasetAssociation.table.c.id ),
uselist=False ),
copied_to_history_dataset_associations=relation(
model.HistoryDatasetAssociation,
primaryjoin=( model.HistoryDatasetAssociation.table.c.copied_from_library_dataset_dataset_association_id == model.LibraryDatasetDatasetAssociation.table.c.id ) ),
implicitly_converted_datasets=relation(
model.ImplicitlyConvertedDatasetAssociation,
primaryjoin=( model.ImplicitlyConvertedDatasetAssociation.table.c.ldda_parent_id == model.LibraryDatasetDatasetAssociation.table.c.id ) ),
children=relation(
model.LibraryDatasetDatasetAssociation,
primaryjoin=( model.LibraryDatasetDatasetAssociation.table.c.parent_id == model.LibraryDatasetDatasetAssociation.table.c.id ),
backref=backref( "parent", primaryjoin=( model.LibraryDatasetDatasetAssociation.table.c.parent_id == model.LibraryDatasetDatasetAssociation.table.c.id ), remote_side=[model.LibraryDatasetDatasetAssociation.table.c.id] ) ),
visible_children=relation(
model.LibraryDatasetDatasetAssociation,
primaryjoin=( ( model.LibraryDatasetDatasetAssociation.table.c.parent_id == model.LibraryDatasetDatasetAssociation.table.c.id ) & ( model.LibraryDatasetDatasetAssociation.table.c.visible == True ) ) ),
extended_metadata=relation(
model.ExtendedMetadata,
primaryjoin=( ( model.LibraryDatasetDatasetAssociation.table.c.extended_metadata_id == model.ExtendedMetadata.table.c.id ) )
)
))
mapper( model.LibraryDatasetDatasetInfoAssociation, model.LibraryDatasetDatasetInfoAssociation.table,
properties=dict( library_dataset_dataset_association=relation( model.LibraryDatasetDatasetAssociation,
primaryjoin=( ( model.LibraryDatasetDatasetInfoAssociation.table.c.library_dataset_dataset_association_id == model.LibraryDatasetDatasetAssociation.table.c.id ) & ( not_( model.LibraryDatasetDatasetInfoAssociation.table.c.deleted ) ) ), backref="info_association" ),
template=relation( model.FormDefinition,
primaryjoin=( model.LibraryDatasetDatasetInfoAssociation.table.c.form_definition_id == model.FormDefinition.table.c.id ) ),
info=relation( model.FormValues,
primaryjoin=( model.LibraryDatasetDatasetInfoAssociation.table.c.form_values_id == model.FormValues.table.c.id ) )
) )
mapper( model.JobToInputDatasetAssociation,
model.JobToInputDatasetAssociation.table, properties=dict(
job=relation( model.Job ), dataset=relation(
model.HistoryDatasetAssociation, lazy=False,
backref="dependent_jobs" ) ) )
mapper( model.JobToOutputDatasetAssociation,
model.JobToOutputDatasetAssociation.table, properties=dict(
job=relation( model.Job ), dataset=relation(
model.HistoryDatasetAssociation, lazy=False ) ) )
mapper( model.JobToInputDatasetCollectionAssociation,
model.JobToInputDatasetCollectionAssociation.table, properties=dict(
job=relation( model.Job ), dataset_collection=relation(
model.HistoryDatasetCollectionAssociation, lazy=False,
backref="dependent_jobs" ) ) )
mapper( model.JobToOutputDatasetCollectionAssociation,
model.JobToOutputDatasetCollectionAssociation.table, properties=dict(
job=relation( model.Job ), dataset_collection=relation(
model.HistoryDatasetCollectionAssociation, lazy=False ) ) )
mapper( model.JobToInputLibraryDatasetAssociation,
model.JobToInputLibraryDatasetAssociation.table, properties=dict(
job=relation( model.Job ), dataset=relation(
model.LibraryDatasetDatasetAssociation, lazy=False,
backref="dependent_jobs" ) ) )
mapper( model.JobToOutputLibraryDatasetAssociation,
model.JobToOutputLibraryDatasetAssociation.table, properties=dict(
job=relation( model.Job ), dataset=relation(
model.LibraryDatasetDatasetAssociation, lazy=False ) ) )
simple_mapping(
model.JobStateHistory,
job=relation( model.Job, backref="state_history" ),
)
simple_mapping(
model.JobMetricText,
job=relation( model.Job, backref="text_metrics" ),
)
simple_mapping(
model.TaskMetricText,
task=relation( model.Task, backref="text_metrics" ),
)
simple_mapping(
model.JobMetricNumeric,
job=relation( model.Job, backref="numeric_metrics" ),
)
simple_mapping(
model.TaskMetricNumeric,
task=relation( model.Task, backref="numeric_metrics" ),
)
simple_mapping(
model.ImplicitlyCreatedDatasetCollectionInput,
input_dataset_collection=relation(
model.HistoryDatasetCollectionAssociation,
primaryjoin=( ( model.HistoryDatasetCollectionAssociation.table.c.id == model.ImplicitlyCreatedDatasetCollectionInput.table.c.input_dataset_collection_id ) ),
# backref="implicitly_created_dataset_collections",
),
)
mapper( model.JobParameter, model.JobParameter.table )
mapper( model.JobExternalOutputMetadata, model.JobExternalOutputMetadata.table,
properties=dict( job = relation( model.Job ),
history_dataset_association = relation( model.HistoryDatasetAssociation, lazy = False ),
library_dataset_dataset_association = relation( model.LibraryDatasetDatasetAssociation, lazy = False ) ) )
mapper( model.JobExportHistoryArchive, model.JobExportHistoryArchive.table,
properties=dict( job = relation( model.Job ),
history = relation( model.History ),
dataset = relation( model.Dataset ) ) )
mapper( model.JobImportHistoryArchive, model.JobImportHistoryArchive.table,
properties=dict( job = relation( model.Job ), history = relation( model.History ) ) )
mapper( model.GenomeIndexToolData, model.GenomeIndexToolData.table,
properties=dict( job = relation( model.Job, backref='job' ),
dataset = relation( model.Dataset ),
user = relation( model.User ),
deferred = relation( model.DeferredJob, backref='deferred_job' ),
transfer = relation( model.TransferJob, backref='transfer_job' ) ) )
mapper(model.PostJobAction, model.PostJobAction.table,
properties=dict(workflow_step = relation( model.WorkflowStep, backref='post_job_actions', primaryjoin=(model.WorkflowStep.table.c.id == model.PostJobAction.table.c.workflow_step_id))))
mapper( model.PostJobActionAssociation, model.PostJobActionAssociation.table,
properties=dict( job = relation( model.Job ),
post_job_action = relation( model.PostJobAction) ) )
mapper( model.Job, model.Job.table,
#properties=dict( user=relation( model.User.mapper ),
properties=dict( user=relation( model.User ),
galaxy_session=relation( model.GalaxySession ),
history=relation( model.History ),
library_folder=relation( model.LibraryFolder ),
parameters=relation( model.JobParameter, lazy=False ),
input_datasets=relation( model.JobToInputDatasetAssociation ),
output_datasets=relation( model.JobToOutputDatasetAssociation ),
post_job_actions=relation( model.PostJobActionAssociation, lazy=False ),
input_library_datasets=relation( model.JobToInputLibraryDatasetAssociation ),
output_library_datasets=relation( model.JobToOutputLibraryDatasetAssociation ),
external_output_metadata = relation( model.JobExternalOutputMetadata, lazy = False ),
tasks = relation(model.Task) ) )
mapper( model.Task, model.Task.table,
properties=dict( job = relation( model.Job ) ) )
mapper( model.DeferredJob, model.DeferredJob.table,
properties = {} )
mapper( model.TransferJob, model.TransferJob.table,
properties = {} )
simple_mapping( model.DatasetCollection,
elements=relation(
model.DatasetCollectionElement,
primaryjoin=( model.DatasetCollection.table.c.id == model.DatasetCollectionElement.table.c.dataset_collection_id ),
remote_side=[ model.DatasetCollectionElement.table.c.dataset_collection_id ],
backref="collection",
order_by=model.DatasetCollectionElement.table.c.element_index,
),
)
simple_mapping( model.HistoryDatasetCollectionAssociation,
collection=relation( model.DatasetCollection ),
history=relation( model.History, backref='dataset_collections' ),
copied_from_history_dataset_collection_association=relation(
model.HistoryDatasetCollectionAssociation,
primaryjoin=( model.HistoryDatasetCollectionAssociation.table.c.copied_from_history_dataset_collection_association_id == model.HistoryDatasetCollectionAssociation.table.c.id ),
remote_side=[model.HistoryDatasetCollectionAssociation.table.c.id],
uselist=False ),
copied_to_history_dataset_collection_associations=relation(
model.HistoryDatasetCollectionAssociation,
primaryjoin=( model.HistoryDatasetCollectionAssociation.table.c.copied_from_history_dataset_collection_association_id == model.HistoryDatasetCollectionAssociation.table.c.id ) ),
implicit_input_collections=relation(
model.ImplicitlyCreatedDatasetCollectionInput,
primaryjoin=( ( model.HistoryDatasetCollectionAssociation.table.c.id == model.ImplicitlyCreatedDatasetCollectionInput.table.c.dataset_collection_id ) ),
backref="dataset_collection",
),
tags=relation( model.HistoryDatasetCollectionTagAssociation, order_by=model.HistoryDatasetCollectionTagAssociation.table.c.id, backref='dataset_collections' ),
annotations=relation( model.HistoryDatasetCollectionAnnotationAssociation, order_by=model.HistoryDatasetCollectionAnnotationAssociation.table.c.id, backref="dataset_collections" ),
ratings=relation( model.HistoryDatasetCollectionRatingAssociation, order_by=model.HistoryDatasetCollectionRatingAssociation.table.c.id, backref="dataset_collections" ),
)
simple_mapping( model.LibraryDatasetCollectionAssociation,
collection=relation( model.DatasetCollection ),
folder=relation( model.LibraryFolder, backref='dataset_collections' ),
tags=relation( model.LibraryDatasetCollectionTagAssociation, order_by=model.LibraryDatasetCollectionTagAssociation.table.c.id, backref='dataset_collections' ),
annotations=relation( model.LibraryDatasetCollectionAnnotationAssociation, order_by=model.LibraryDatasetCollectionAnnotationAssociation.table.c.id, backref="dataset_collections" ),
ratings=relation( model.LibraryDatasetCollectionRatingAssociation, order_by=model.LibraryDatasetCollectionRatingAssociation.table.c.id, backref="dataset_collections" ),
)
simple_mapping( model.DatasetCollectionElement,
hda=relation(
model.HistoryDatasetAssociation,
primaryjoin=( model.DatasetCollectionElement.table.c.hda_id == model.HistoryDatasetAssociation.table.c.id )
),
ldda=relation(
model.LibraryDatasetDatasetAssociation,
primaryjoin=( model.DatasetCollectionElement.table.c.ldda_id == model.LibraryDatasetDatasetAssociation.table.c.id )
),
child_collection=relation(
model.DatasetCollection,
primaryjoin=( model.DatasetCollectionElement.table.c.child_collection_id == model.DatasetCollection.table.c.id ),
),
)
mapper( model.Event, model.Event.table,
properties=dict( history=relation( model.History ),
galaxy_session=relation( model.GalaxySession ),
#user=relation( model.User.mapper ) ) )
user=relation( model.User ) ) )
mapper( model.GalaxySession, model.GalaxySession.table,
properties=dict( histories=relation( model.GalaxySessionToHistoryAssociation ),
current_history=relation( model.History ),
#user=relation( model.User.mapper ) ) )
user=relation( model.User ) ) )
mapper( model.GalaxySessionToHistoryAssociation, model.GalaxySessionToHistoryAssociation.table,
properties=dict( galaxy_session=relation( model.GalaxySession ),
history=relation( model.History ) ) )
mapper( model.Workflow, model.Workflow.table,
properties=dict( steps=relation( model.WorkflowStep, backref='workflow',
order_by=asc(model.WorkflowStep.table.c.order_index),
cascade="all, delete-orphan",
lazy=False ),
) )
mapper( model.WorkflowStep, model.WorkflowStep.table,
properties=dict(
tags=relation(model.WorkflowStepTagAssociation, order_by=model.WorkflowStepTagAssociation.table.c.id, backref="workflow_steps"),
annotations=relation( model.WorkflowStepAnnotationAssociation, order_by=model.WorkflowStepAnnotationAssociation.table.c.id, backref="workflow_steps" ) )
)
mapper( model.WorkflowOutput, model.WorkflowOutput.table,
properties=dict(workflow_step = relation( model.WorkflowStep, backref='workflow_outputs', primaryjoin=(model.WorkflowStep.table.c.id == model.WorkflowOutput.table.c.workflow_step_id))))
mapper( model.WorkflowStepConnection, model.WorkflowStepConnection.table,
properties=dict( input_step=relation( model.WorkflowStep, backref="input_connections", cascade="all",
primaryjoin=( model.WorkflowStepConnection.table.c.input_step_id == model.WorkflowStep.table.c.id ) ),
output_step=relation( model.WorkflowStep, backref="output_connections", cascade="all",
primaryjoin=( model.WorkflowStepConnection.table.c.output_step_id == model.WorkflowStep.table.c.id ) ) ) )
mapper( model.StoredWorkflow, model.StoredWorkflow.table,
properties=dict( user=relation( model.User,
primaryjoin=( model.User.table.c.id == model.StoredWorkflow.table.c.user_id ),
backref='stored_workflows' ),
workflows=relation( model.Workflow, backref='stored_workflow',
cascade="all, delete-orphan",
primaryjoin=( model.StoredWorkflow.table.c.id == model.Workflow.table.c.stored_workflow_id ) ),
latest_workflow=relation( model.Workflow, post_update=True,
primaryjoin=( model.StoredWorkflow.table.c.latest_workflow_id == model.Workflow.table.c.id ),
lazy=False ),
tags=relation( model.StoredWorkflowTagAssociation, order_by=model.StoredWorkflowTagAssociation.table.c.id, backref="stored_workflows" ),
owner_tags=relation( model.StoredWorkflowTagAssociation,
primaryjoin=and_( model.StoredWorkflow.table.c.id == model.StoredWorkflowTagAssociation.table.c.stored_workflow_id,
model.StoredWorkflow.table.c.user_id == model.StoredWorkflowTagAssociation.table.c.user_id ),
order_by=model.StoredWorkflowTagAssociation.table.c.id ),
annotations=relation( model.StoredWorkflowAnnotationAssociation, order_by=model.StoredWorkflowAnnotationAssociation.table.c.id, backref="stored_workflows" ),
ratings=relation( model.StoredWorkflowRatingAssociation, order_by=model.StoredWorkflowRatingAssociation.table.c.id, backref="stored_workflows" ) )
)
# Set up proxy so that
# StoredWorkflow.users_shared_with
# returns a list of users that workflow is shared with.
model.StoredWorkflow.users_shared_with_dot_users = association_proxy( 'users_shared_with', 'user' )
mapper( model.StoredWorkflowUserShareAssociation, model.StoredWorkflowUserShareAssociation.table,
properties=dict( user=relation( model.User, backref='workflows_shared_by_others' ),
stored_workflow=relation( model.StoredWorkflow, backref='users_shared_with' )
) )
mapper( model.StoredWorkflowMenuEntry, model.StoredWorkflowMenuEntry.table,
properties=dict( stored_workflow=relation( model.StoredWorkflow ) ) )
mapper( model.WorkflowInvocation, model.WorkflowInvocation.table,
properties=dict(
history=relation( model.History ),
input_parameters=relation( model.WorkflowRequestInputParameter ),
step_states=relation( model.WorkflowRequestStepState ),
input_datasets=relation( model.WorkflowRequestToInputDatasetAssociation ),
input_dataset_collections=relation( model.WorkflowRequestToInputDatasetCollectionAssociation ),
steps=relation( model.WorkflowInvocationStep, backref='workflow_invocation', lazy=False ),
workflow=relation( model.Workflow ) ) )
mapper( model.WorkflowInvocationStep, model.WorkflowInvocationStep.table,
properties=dict(
workflow_step = relation( model.WorkflowStep ),
job = relation( model.Job, backref=backref( 'workflow_invocation_step', uselist=False ) ) ) )
simple_mapping(
model.WorkflowRequestInputParameter,
workflow_invocation=relation( model.WorkflowInvocation ),
)
simple_mapping(
model.WorkflowRequestStepState,
workflow_invocation=relation( model.WorkflowInvocation ),
workflow_step=relation( model.WorkflowStep ),
)
simple_mapping(
model.WorkflowRequestToInputDatasetAssociation,
workflow_invocation=relation( model.WorkflowInvocation ),
workflow_step=relation( model.WorkflowStep ),
dataset=relation( model.HistoryDatasetAssociation ),
)
simple_mapping(
model.WorkflowRequestToInputDatasetCollectionAssociation,
workflow_invocation=relation( model.WorkflowInvocation ),
workflow_step=relation( model.WorkflowStep ),
dataset_collection=relation( model.HistoryDatasetCollectionAssociation ),
)
mapper( model.MetadataFile, model.MetadataFile.table,
properties=dict( history_dataset=relation( model.HistoryDatasetAssociation ), library_dataset=relation( model.LibraryDatasetDatasetAssociation ) ) )
mapper( model.PageRevision, model.PageRevision.table )
mapper( model.Page, model.Page.table,
properties=dict( user=relation( model.User ),
revisions=relation( model.PageRevision, backref='page',
cascade="all, delete-orphan",
primaryjoin=( model.Page.table.c.id == model.PageRevision.table.c.page_id ) ),
latest_revision=relation( model.PageRevision, post_update=True,
primaryjoin=( model.Page.table.c.latest_revision_id == model.PageRevision.table.c.id ),
lazy=False ),
tags=relation(model.PageTagAssociation, order_by=model.PageTagAssociation.table.c.id, backref="pages"),
annotations=relation( model.PageAnnotationAssociation, order_by=model.PageAnnotationAssociation.table.c.id, backref="pages" ),
ratings=relation( model.PageRatingAssociation, order_by=model.PageRatingAssociation.table.c.id, backref="pages" )
) )
# Set up proxy so that
# Page.users_shared_with
# returns a list of users that page is shared with.
model.Page.users_shared_with_dot_users = association_proxy( 'users_shared_with', 'user' )
mapper( model.PageUserShareAssociation, model.PageUserShareAssociation.table,
properties=dict( user=relation( model.User, backref='pages_shared_by_others' ),
page=relation( model.Page, backref='users_shared_with' )
) )
mapper( model.VisualizationRevision, model.VisualizationRevision.table )
mapper( model.Visualization, model.Visualization.table,
properties=dict( user=relation( model.User ),
revisions=relation( model.VisualizationRevision, backref='visualization',
cascade="all, delete-orphan",
primaryjoin=( model.Visualization.table.c.id == model.VisualizationRevision.table.c.visualization_id ) ),
latest_revision=relation( model.VisualizationRevision, post_update=True,
primaryjoin=( model.Visualization.table.c.latest_revision_id == model.VisualizationRevision.table.c.id ),
lazy=False ),
tags=relation( model.VisualizationTagAssociation, order_by=model.VisualizationTagAssociation.table.c.id, backref="visualizations" ),
annotations=relation( model.VisualizationAnnotationAssociation, order_by=model.VisualizationAnnotationAssociation.table.c.id, backref="visualizations" ),
ratings=relation( model.VisualizationRatingAssociation, order_by=model.VisualizationRatingAssociation.table.c.id, backref="visualizations" )
) )
# Set up proxy so that
# Visualization.users_shared_with
# returns a list of users that visualization is shared with.
model.Visualization.users_shared_with_dot_users = association_proxy( 'users_shared_with', 'user' )
mapper( model.VisualizationUserShareAssociation, model.VisualizationUserShareAssociation.table,
properties=dict( user=relation( model.User, backref='visualizations_shared_by_others' ),
visualization=relation( model.Visualization, backref='users_shared_with' )
) )
# Tag tables.
simple_mapping( model.Tag,
children=relation(model.Tag, backref=backref( 'parent', remote_side=[model.Tag.table.c.id] ) )
)
def tag_mapping( tag_association_class, backref_name ):
simple_mapping( tag_association_class, tag=relation( model.Tag, backref=backref_name), user=relation( model.User ) )
tag_mapping( model.HistoryTagAssociation, "tagged_histories" )
tag_mapping( model.DatasetTagAssociation, "tagged_datasets" )
tag_mapping( model.HistoryDatasetAssociationTagAssociation, "tagged_history_dataset_associations" )
tag_mapping( model.PageTagAssociation, "tagged_pages" )
tag_mapping( model.StoredWorkflowTagAssociation, "tagged_workflows" )
tag_mapping( model.WorkflowStepTagAssociation, "tagged_workflow_steps" )
tag_mapping( model.VisualizationTagAssociation, "tagged_visualizations" )
tag_mapping( model.HistoryDatasetCollectionTagAssociation, "tagged_history_dataset_collections" )
tag_mapping( model.LibraryDatasetCollectionTagAssociation, "tagged_library_dataset_collections" )
tag_mapping( model.ToolTagAssociation, "tagged_tools" )
# Annotation tables.
def annotation_mapping( annotation_class, **kwds ):
kwds = dict( [ (key, relation( value ) ) for key, value in kwds.iteritems() ] )
simple_mapping( annotation_class, **dict(user=relation( model.User ), **kwds ) )
annotation_mapping( model.HistoryAnnotationAssociation, history=model.History )
annotation_mapping( model.HistoryDatasetAssociationAnnotationAssociation, hda=model.HistoryDatasetAssociation )
annotation_mapping( model.StoredWorkflowAnnotationAssociation, stored_workflow=model.StoredWorkflow )
annotation_mapping( model.WorkflowStepAnnotationAssociation, workflow_step=model.WorkflowStep )
annotation_mapping( model.PageAnnotationAssociation, page=model.Page )
annotation_mapping( model.VisualizationAnnotationAssociation, visualization=model.Visualization )
annotation_mapping( model.HistoryDatasetCollectionAnnotationAssociation, history_dataset_collection=model.HistoryDatasetCollectionAssociation )
annotation_mapping( model.LibraryDatasetCollectionAnnotationAssociation, library_dataset_collection=model.LibraryDatasetCollectionAssociation )
# Rating tables.
def rating_mapping( rating_class, **kwds ):
kwds = dict( [ (key, relation( value ) ) for key, value in kwds.iteritems() ] )
simple_mapping( rating_class, **dict(user=relation( model.User ), **kwds ) )
rating_mapping( model.HistoryRatingAssociation, history=model.History )
rating_mapping( model.HistoryDatasetAssociationRatingAssociation, hda=model.HistoryDatasetAssociation )
rating_mapping( model.StoredWorkflowRatingAssociation, stored_workflow=model.StoredWorkflow )
rating_mapping( model.PageRatingAssociation, page=model.Page )
rating_mapping( model.VisualizationRatingAssociation, visualizaiton=model.Visualization )
rating_mapping( model.HistoryDatasetCollectionRatingAssociation, history_dataset_collection=model.HistoryDatasetCollectionAssociation )
rating_mapping( model.LibraryDatasetCollectionRatingAssociation, libary_dataset_collection=model.LibraryDatasetCollectionAssociation )
#Data Manager tables
mapper( model.DataManagerHistoryAssociation, model.DataManagerHistoryAssociation.table,
properties=dict( history=relation( model.History ),
user=relation( model.User, backref='data_manager_histories' )
)
)
mapper( model.DataManagerJobAssociation, model.DataManagerJobAssociation.table,
properties=dict( job=relation( model.Job, backref=backref('data_manager_association', uselist=False ), uselist=False ) )
)
# User tables.
mapper( model.UserPreference, model.UserPreference.table,
properties = {}
)
mapper( model.UserAction, model.UserAction.table,
#properties = dict( user=relation( model.User.mapper ) )
properties = dict( user=relation( model.User ) )
)
mapper( model.APIKeys, model.APIKeys.table,
properties = {} )
#model.HistoryDatasetAssociation.mapper.add_property( "creating_job_associations", relation( model.JobToOutputDatasetAssociation ) )
#model.LibraryDatasetDatasetAssociation.mapper.add_property( "creating_job_associations", relation( model.JobToOutputLibraryDatasetAssociation ) )
class_mapper(model.HistoryDatasetAssociation).add_property( "creating_job_associations", relation( model.JobToOutputDatasetAssociation ) )
class_mapper(model.LibraryDatasetDatasetAssociation).add_property( "creating_job_associations", relation( model.JobToOutputLibraryDatasetAssociation ) )
class_mapper(model.HistoryDatasetCollectionAssociation).add_property( "creating_job_associations", relation( model.JobToOutputDatasetCollectionAssociation ) )
# Helper methods.
def db_next_hid( self ):
"""
db_next_hid( self )
Override __next_hid to generate from the database in a concurrency safe way.
Loads the next history ID from the DB and returns it.
It also saves the future next_id into the DB.
:rtype: int
:returns: the next history id
"""
conn = object_session( self ).connection()
table = self.table
trans = conn.begin()
try:
next_hid = select( [table.c.hid_counter], table.c.id == self.id, for_update=True ).scalar()
table.update( table.c.id == self.id ).execute( hid_counter = ( next_hid + 1 ) )
trans.commit()
return next_hid
except:
trans.rollback()
raise
model.History._next_hid = db_next_hid
def init( file_path, url, engine_options={}, create_tables=False, map_install_models=False, database_query_profiling_proxy=False, object_store=None, trace_logger=None, use_pbkdf2=True ):
"""Connect mappings to the database"""
# Connect dataset to the file path
model.Dataset.file_path = file_path
# Connect dataset to object store
model.Dataset.object_store = object_store
# Use PBKDF2 password hashing?
model.User.use_pbkdf2 = use_pbkdf2
# Load the appropriate db module
engine = build_engine( url, engine_options, database_query_profiling_proxy, trace_logger )
# Connect the metadata to the database.
metadata.bind = engine
model_modules = [model]
if map_install_models:
import galaxy.model.tool_shed_install.mapping
from galaxy.model import tool_shed_install
model_modules.append(tool_shed_install)
result = ModelMapping(model_modules, engine=engine)
# Create tables if needed
if create_tables:
metadata.create_all()
# metadata.engine.commit()
result.create_tables = create_tables
#load local galaxy security policy
result.security_agent = GalaxyRBACAgent( result )
return result
|
[
"galaxy.model.base.ModelMapping",
"sqlalchemy.orm.mapper",
"sqlalchemy.orm.collections.attribute_mapped_collection",
"galaxy.model.custom_types.JSONType",
"sqlalchemy.orm.object_session",
"sqlalchemy.select",
"sqlalchemy.UniqueConstraint",
"sqlalchemy.Column",
"sqlalchemy.orm.class_mapper",
"sqlalchemy.orm.relation",
"sqlalchemy.not_",
"galaxy.security.GalaxyRBACAgent",
"galaxy.model.custom_types.TrimmedString",
"sqlalchemy.Unicode",
"sqlalchemy.and_",
"sqlalchemy.Numeric",
"sqlalchemy.ext.orderinglist.ordering_list",
"sqlalchemy.String",
"galaxy.model.custom_types.MetadataType",
"sqlalchemy.MetaData",
"galaxy.model.orm.engine_factory.build_engine",
"sqlalchemy.ext.associationproxy.association_proxy",
"sqlalchemy.ForeignKey",
"sqlalchemy.desc",
"sqlalchemy.asc",
"sqlalchemy.orm.backref",
"galaxy.model.custom_types.UUIDType",
"logging.getLogger"
] |
[((921, 948), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (938, 948), False, 'import logging\n'), ((963, 973), 'sqlalchemy.MetaData', 'MetaData', ([], {}), '()\n', (971, 973), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((71186, 71255), 'sqlalchemy.orm.mapper', 'mapper', (['model.RequestEvent', 'model.RequestEvent.table'], {'properties': 'None'}), '(model.RequestEvent, model.RequestEvent.table, properties=None)\n', (71192, 71255), False, 'from sqlalchemy.orm import backref, object_session, relation, mapper, class_mapper\n'), ((75102, 75169), 'sqlalchemy.orm.mapper', 'mapper', (['model.SampleState', 'model.SampleState.table'], {'properties': 'None'}), '(model.SampleState, model.SampleState.table, properties=None)\n', (75108, 75169), False, 'from sqlalchemy.orm import backref, object_session, relation, mapper, class_mapper\n'), ((77559, 77617), 'sqlalchemy.orm.mapper', 'mapper', (['model.ValidationError', 'model.ValidationError.table'], {}), '(model.ValidationError, model.ValidationError.table)\n', (77565, 77617), False, 'from sqlalchemy.orm import backref, object_session, relation, mapper, class_mapper\n'), ((87793, 87839), 'sqlalchemy.ext.associationproxy.association_proxy', 'association_proxy', (['"""users_shared_with"""', '"""user"""'], {}), "('users_shared_with', 'user')\n", (87810, 87839), False, 'from sqlalchemy.ext.associationproxy import association_proxy\n'), ((89749, 89821), 'sqlalchemy.ext.associationproxy.association_proxy', 'association_proxy', (['"""_preferences"""', '"""value"""'], {'creator': 'model.UserPreference'}), "('_preferences', 'value', creator=model.UserPreference)\n", (89766, 89821), False, 'from sqlalchemy.ext.associationproxy import association_proxy\n'), ((105353, 105405), 'sqlalchemy.orm.mapper', 'mapper', (['model.JobParameter', 'model.JobParameter.table'], {}), '(model.JobParameter, model.JobParameter.table)\n', (105359, 105405), False, 'from sqlalchemy.orm import backref, object_session, relation, mapper, class_mapper\n'), ((108192, 108257), 'sqlalchemy.orm.mapper', 'mapper', (['model.DeferredJob', 'model.DeferredJob.table'], {'properties': '{}'}), '(model.DeferredJob, model.DeferredJob.table, properties={})\n', (108198, 108257), False, 'from sqlalchemy.orm import backref, object_session, relation, mapper, class_mapper\n'), ((108267, 108332), 'sqlalchemy.orm.mapper', 'mapper', (['model.TransferJob', 'model.TransferJob.table'], {'properties': '{}'}), '(model.TransferJob, model.TransferJob.table, properties={})\n', (108273, 108332), False, 'from sqlalchemy.orm import backref, object_session, relation, mapper, class_mapper\n'), ((116287, 116333), 'sqlalchemy.ext.associationproxy.association_proxy', 'association_proxy', (['"""users_shared_with"""', '"""user"""'], {}), "('users_shared_with', 'user')\n", (116304, 116333), False, 'from sqlalchemy.ext.associationproxy import association_proxy\n'), ((118664, 118716), 'sqlalchemy.orm.mapper', 'mapper', (['model.PageRevision', 'model.PageRevision.table'], {}), '(model.PageRevision, model.PageRevision.table)\n', (118670, 118716), False, 'from sqlalchemy.orm import backref, object_session, relation, mapper, class_mapper\n'), ((119932, 119978), 'sqlalchemy.ext.associationproxy.association_proxy', 'association_proxy', (['"""users_shared_with"""', '"""user"""'], {}), "('users_shared_with', 'user')\n", (119949, 119978), False, 'from sqlalchemy.ext.associationproxy import association_proxy\n'), ((120243, 120313), 'sqlalchemy.orm.mapper', 'mapper', (['model.VisualizationRevision', 'model.VisualizationRevision.table'], {}), '(model.VisualizationRevision, model.VisualizationRevision.table)\n', (120249, 120313), False, 'from sqlalchemy.orm import backref, object_session, relation, mapper, class_mapper\n'), ((121729, 121775), 'sqlalchemy.ext.associationproxy.association_proxy', 'association_proxy', (['"""users_shared_with"""', '"""user"""'], {}), "('users_shared_with', 'user')\n", (121746, 121775), False, 'from sqlalchemy.ext.associationproxy import association_proxy\n'), ((125717, 125788), 'sqlalchemy.orm.mapper', 'mapper', (['model.UserPreference', 'model.UserPreference.table'], {'properties': '{}'}), '(model.UserPreference, model.UserPreference.table, properties={})\n', (125723, 125788), False, 'from sqlalchemy.orm import backref, object_session, relation, mapper, class_mapper\n'), ((125987, 126044), 'sqlalchemy.orm.mapper', 'mapper', (['model.APIKeys', 'model.APIKeys.table'], {'properties': '{}'}), '(model.APIKeys, model.APIKeys.table, properties={})\n', (125993, 126044), False, 'from sqlalchemy.orm import backref, object_session, relation, mapper, class_mapper\n'), ((1031, 1070), 'sqlalchemy.Column', 'Column', (['"""id"""', 'Integer'], {'primary_key': '(True)'}), "('id', Integer, primary_key=True)\n", (1037, 1070), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((1077, 1121), 'sqlalchemy.Column', 'Column', (['"""create_time"""', 'DateTime'], {'default': 'now'}), "('create_time', DateTime, default=now)\n", (1083, 1121), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((1129, 1187), 'sqlalchemy.Column', 'Column', (['"""update_time"""', 'DateTime'], {'default': 'now', 'onupdate': 'now'}), "('update_time', DateTime, default=now, onupdate=now)\n", (1135, 1187), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((1405, 1447), 'sqlalchemy.Column', 'Column', (['"""external"""', 'Boolean'], {'default': '(False)'}), "('external', Boolean, default=False)\n", (1411, 1447), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((1540, 1593), 'sqlalchemy.Column', 'Column', (['"""deleted"""', 'Boolean'], {'index': '(True)', 'default': '(False)'}), "('deleted', Boolean, index=True, default=False)\n", (1546, 1593), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((1601, 1653), 'sqlalchemy.Column', 'Column', (['"""purged"""', 'Boolean'], {'index': '(True)', 'default': '(False)'}), "('purged', Boolean, index=True, default=False)\n", (1607, 1653), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((1720, 1787), 'sqlalchemy.Column', 'Column', (['"""active"""', 'Boolean'], {'index': '(True)', 'default': '(True)', 'nullable': '(False)'}), "('active', Boolean, index=True, default=True, nullable=False)\n", (1726, 1787), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((1939, 1978), 'sqlalchemy.Column', 'Column', (['"""id"""', 'Integer'], {'primary_key': '(True)'}), "('id', Integer, primary_key=True)\n", (1945, 1978), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((1985, 2029), 'sqlalchemy.Column', 'Column', (['"""create_time"""', 'DateTime'], {'default': 'now'}), "('create_time', DateTime, default=now)\n", (1991, 2029), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((2037, 2095), 'sqlalchemy.Column', 'Column', (['"""update_time"""', 'DateTime'], {'default': 'now', 'onupdate': 'now'}), "('update_time', DateTime, default=now, onupdate=now)\n", (2043, 2095), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((2686, 2739), 'sqlalchemy.Column', 'Column', (['"""deleted"""', 'Boolean'], {'index': '(True)', 'default': '(False)'}), "('deleted', Boolean, index=True, default=False)\n", (2692, 2739), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((2747, 2799), 'sqlalchemy.Column', 'Column', (['"""purged"""', 'Boolean'], {'index': '(True)', 'default': '(False)'}), "('purged', Boolean, index=True, default=False)\n", (2753, 2799), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((2873, 2912), 'sqlalchemy.Column', 'Column', (['"""id"""', 'Integer'], {'primary_key': '(True)'}), "('id', Integer, primary_key=True)\n", (2879, 2912), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((2920, 2964), 'sqlalchemy.Column', 'Column', (['"""create_time"""', 'DateTime'], {'default': 'now'}), "('create_time', DateTime, default=now)\n", (2926, 2964), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((2972, 3042), 'sqlalchemy.Column', 'Column', (['"""update_time"""', 'DateTime'], {'index': '(True)', 'default': 'now', 'onupdate': 'now'}), "('update_time', DateTime, index=True, default=now, onupdate=now)\n", (2978, 3042), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((3212, 3259), 'sqlalchemy.Column', 'Column', (['"""openid"""', 'TEXT'], {'index': '(True)', 'unique': '(True)'}), "('openid', TEXT, index=True, unique=True)\n", (3218, 3259), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((3372, 3411), 'sqlalchemy.Column', 'Column', (['"""id"""', 'Integer'], {'primary_key': '(True)'}), "('id', Integer, primary_key=True)\n", (3378, 3411), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((3418, 3462), 'sqlalchemy.Column', 'Column', (['"""create_time"""', 'DateTime'], {'default': 'now'}), "('create_time', DateTime, default=now)\n", (3424, 3462), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((3470, 3540), 'sqlalchemy.Column', 'Column', (['"""update_time"""', 'DateTime'], {'index': '(True)', 'default': 'now', 'onupdate': 'now'}), "('update_time', DateTime, index=True, default=now, onupdate=now)\n", (3476, 3540), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((3670, 3711), 'sqlalchemy.Column', 'Column', (['"""hid_counter"""', 'Integer'], {'default': '(1)'}), "('hid_counter', Integer, default=1)\n", (3676, 3711), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((3719, 3772), 'sqlalchemy.Column', 'Column', (['"""deleted"""', 'Boolean'], {'index': '(True)', 'default': '(False)'}), "('deleted', Boolean, index=True, default=False)\n", (3725, 3772), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((3780, 3832), 'sqlalchemy.Column', 'Column', (['"""purged"""', 'Boolean'], {'index': '(True)', 'default': '(False)'}), "('purged', Boolean, index=True, default=False)\n", (3786, 3832), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((3840, 3895), 'sqlalchemy.Column', 'Column', (['"""importing"""', 'Boolean'], {'index': '(True)', 'default': '(False)'}), "('importing', Boolean, index=True, default=False)\n", (3846, 3895), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((3954, 3998), 'sqlalchemy.Column', 'Column', (['"""importable"""', 'Boolean'], {'default': '(False)'}), "('importable', Boolean, default=False)\n", (3960, 3998), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((4006, 4038), 'sqlalchemy.Column', 'Column', (['"""slug"""', 'TEXT'], {'index': '(True)'}), "('slug', TEXT, index=True)\n", (4012, 4038), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((4046, 4101), 'sqlalchemy.Column', 'Column', (['"""published"""', 'Boolean'], {'index': '(True)', 'default': '(False)'}), "('published', Boolean, index=True, default=False)\n", (4052, 4101), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((4204, 4243), 'sqlalchemy.Column', 'Column', (['"""id"""', 'Integer'], {'primary_key': '(True)'}), "('id', Integer, primary_key=True)\n", (4210, 4243), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((4500, 4539), 'sqlalchemy.Column', 'Column', (['"""id"""', 'Integer'], {'primary_key': '(True)'}), "('id', Integer, primary_key=True)\n", (4506, 4539), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((4701, 4745), 'sqlalchemy.Column', 'Column', (['"""create_time"""', 'DateTime'], {'default': 'now'}), "('create_time', DateTime, default=now)\n", (4707, 4745), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((4753, 4811), 'sqlalchemy.Column', 'Column', (['"""update_time"""', 'DateTime'], {'default': 'now', 'onupdate': 'now'}), "('update_time', DateTime, default=now, onupdate=now)\n", (4759, 4811), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((5169, 5191), 'sqlalchemy.Column', 'Column', (['"""hid"""', 'Integer'], {}), "('hid', Integer)\n", (5175, 5191), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((5332, 5352), 'sqlalchemy.Column', 'Column', (['"""peek"""', 'TEXT'], {}), "('peek', TEXT)\n", (5338, 5352), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((5361, 5389), 'sqlalchemy.Column', 'Column', (['"""tool_version"""', 'TEXT'], {}), "('tool_version', TEXT)\n", (5367, 5389), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((5655, 5708), 'sqlalchemy.Column', 'Column', (['"""deleted"""', 'Boolean'], {'index': '(True)', 'default': '(False)'}), "('deleted', Boolean, index=True, default=False)\n", (5661, 5708), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((5716, 5768), 'sqlalchemy.Column', 'Column', (['"""purged"""', 'Boolean'], {'index': '(True)', 'default': '(False)'}), "('purged', Boolean, index=True, default=False)\n", (5722, 5768), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((5776, 5802), 'sqlalchemy.Column', 'Column', (['"""visible"""', 'Boolean'], {}), "('visible', Boolean)\n", (5782, 5802), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((6100, 6139), 'sqlalchemy.Column', 'Column', (['"""id"""', 'Integer'], {'primary_key': '(True)'}), "('id', Integer, primary_key=True)\n", (6106, 6139), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((6147, 6191), 'sqlalchemy.Column', 'Column', (['"""create_time"""', 'DateTime'], {'default': 'now'}), "('create_time', DateTime, default=now)\n", (6153, 6191), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((6199, 6269), 'sqlalchemy.Column', 'Column', (['"""update_time"""', 'DateTime'], {'index': '(True)', 'default': 'now', 'onupdate': 'now'}), "('update_time', DateTime, index=True, default=now, onupdate=now)\n", (6205, 6269), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((6333, 6386), 'sqlalchemy.Column', 'Column', (['"""deleted"""', 'Boolean'], {'index': '(True)', 'default': '(False)'}), "('deleted', Boolean, index=True, default=False)\n", (6339, 6386), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((6394, 6446), 'sqlalchemy.Column', 'Column', (['"""purged"""', 'Boolean'], {'index': '(True)', 'default': '(False)'}), "('purged', Boolean, index=True, default=False)\n", (6400, 6446), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((6454, 6495), 'sqlalchemy.Column', 'Column', (['"""purgable"""', 'Boolean'], {'default': '(True)'}), "('purgable', Boolean, default=True)\n", (6460, 6495), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((6570, 6603), 'sqlalchemy.Column', 'Column', (['"""external_filename"""', 'TEXT'], {}), "('external_filename', TEXT)\n", (6576, 6603), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((6612, 6645), 'sqlalchemy.Column', 'Column', (['"""_extra_files_path"""', 'TEXT'], {}), "('_extra_files_path', TEXT)\n", (6618, 6645), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((6915, 6954), 'sqlalchemy.Column', 'Column', (['"""id"""', 'Integer'], {'primary_key': '(True)'}), "('id', Integer, primary_key=True)\n", (6921, 6954), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((6962, 7006), 'sqlalchemy.Column', 'Column', (['"""create_time"""', 'DateTime'], {'default': 'now'}), "('create_time', DateTime, default=now)\n", (6968, 7006), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((7014, 7084), 'sqlalchemy.Column', 'Column', (['"""update_time"""', 'DateTime'], {'index': '(True)', 'default': 'now', 'onupdate': 'now'}), "('update_time', DateTime, index=True, default=now, onupdate=now)\n", (7020, 7084), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((7434, 7473), 'sqlalchemy.Column', 'Column', (['"""id"""', 'Integer'], {'primary_key': '(True)'}), "('id', Integer, primary_key=True)\n", (7440, 7473), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((7888, 7927), 'sqlalchemy.Column', 'Column', (['"""id"""', 'Integer'], {'primary_key': '(True)'}), "('id', Integer, primary_key=True)\n", (7894, 7927), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((7935, 7979), 'sqlalchemy.Column', 'Column', (['"""create_time"""', 'DateTime'], {'default': 'now'}), "('create_time', DateTime, default=now)\n", (7941, 7979), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((7987, 8045), 'sqlalchemy.Column', 'Column', (['"""update_time"""', 'DateTime'], {'default': 'now', 'onupdate': 'now'}), "('update_time', DateTime, default=now, onupdate=now)\n", (7993, 8045), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((8487, 8540), 'sqlalchemy.Column', 'Column', (['"""deleted"""', 'Boolean'], {'index': '(True)', 'default': '(False)'}), "('deleted', Boolean, index=True, default=False)\n", (8493, 8540), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((8548, 8606), 'sqlalchemy.Column', 'Column', (['"""metadata_safe"""', 'Boolean'], {'index': '(True)', 'default': '(True)'}), "('metadata_safe', Boolean, index=True, default=True)\n", (8554, 8606), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((8727, 8766), 'sqlalchemy.Column', 'Column', (['"""id"""', 'Integer'], {'primary_key': '(True)'}), "('id', Integer, primary_key=True)\n", (8733, 8766), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((8965, 8991), 'sqlalchemy.Column', 'Column', (['"""attributes"""', 'TEXT'], {}), "('attributes', TEXT)\n", (8971, 8991), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((9054, 9093), 'sqlalchemy.Column', 'Column', (['"""id"""', 'Integer'], {'primary_key': '(True)'}), "('id', Integer, primary_key=True)\n", (9060, 9093), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((9101, 9145), 'sqlalchemy.Column', 'Column', (['"""create_time"""', 'DateTime'], {'default': 'now'}), "('create_time', DateTime, default=now)\n", (9107, 9145), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((9153, 9211), 'sqlalchemy.Column', 'Column', (['"""update_time"""', 'DateTime'], {'default': 'now', 'onupdate': 'now'}), "('update_time', DateTime, default=now, onupdate=now)\n", (9159, 9211), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((9281, 9334), 'sqlalchemy.Column', 'Column', (['"""deleted"""', 'Boolean'], {'index': '(True)', 'default': '(False)'}), "('deleted', Boolean, index=True, default=False)\n", (9287, 9334), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((9422, 9461), 'sqlalchemy.Column', 'Column', (['"""id"""', 'Integer'], {'primary_key': '(True)'}), "('id', Integer, primary_key=True)\n", (9428, 9461), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((9627, 9671), 'sqlalchemy.Column', 'Column', (['"""create_time"""', 'DateTime'], {'default': 'now'}), "('create_time', DateTime, default=now)\n", (9633, 9671), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((9679, 9737), 'sqlalchemy.Column', 'Column', (['"""update_time"""', 'DateTime'], {'default': 'now', 'onupdate': 'now'}), "('update_time', DateTime, default=now, onupdate=now)\n", (9685, 9737), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((9823, 9862), 'sqlalchemy.Column', 'Column', (['"""id"""', 'Integer'], {'primary_key': '(True)'}), "('id', Integer, primary_key=True)\n", (9829, 9862), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((10019, 10063), 'sqlalchemy.Column', 'Column', (['"""create_time"""', 'DateTime'], {'default': 'now'}), "('create_time', DateTime, default=now)\n", (10025, 10063), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((10071, 10129), 'sqlalchemy.Column', 'Column', (['"""update_time"""', 'DateTime'], {'default': 'now', 'onupdate': 'now'}), "('update_time', DateTime, default=now, onupdate=now)\n", (10077, 10129), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((10217, 10256), 'sqlalchemy.Column', 'Column', (['"""id"""', 'Integer'], {'primary_key': '(True)'}), "('id', Integer, primary_key=True)\n", (10223, 10256), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((10415, 10459), 'sqlalchemy.Column', 'Column', (['"""create_time"""', 'DateTime'], {'default': 'now'}), "('create_time', DateTime, default=now)\n", (10421, 10459), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((10467, 10525), 'sqlalchemy.Column', 'Column', (['"""update_time"""', 'DateTime'], {'default': 'now', 'onupdate': 'now'}), "('update_time', DateTime, default=now, onupdate=now)\n", (10473, 10525), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((10579, 10618), 'sqlalchemy.Column', 'Column', (['"""id"""', 'Integer'], {'primary_key': '(True)'}), "('id', Integer, primary_key=True)\n", (10585, 10618), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((10626, 10670), 'sqlalchemy.Column', 'Column', (['"""create_time"""', 'DateTime'], {'default': 'now'}), "('create_time', DateTime, default=now)\n", (10632, 10670), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((10678, 10736), 'sqlalchemy.Column', 'Column', (['"""update_time"""', 'DateTime'], {'default': 'now', 'onupdate': 'now'}), "('update_time', DateTime, default=now, onupdate=now)\n", (10684, 10736), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((10806, 10833), 'sqlalchemy.Column', 'Column', (['"""description"""', 'TEXT'], {}), "('description', TEXT)\n", (10812, 10833), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((10889, 10942), 'sqlalchemy.Column', 'Column', (['"""deleted"""', 'Boolean'], {'index': '(True)', 'default': '(False)'}), "('deleted', Boolean, index=True, default=False)\n", (10895, 10942), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((11030, 11069), 'sqlalchemy.Column', 'Column', (['"""id"""', 'Integer'], {'primary_key': '(True)'}), "('id', Integer, primary_key=True)\n", (11036, 11069), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((11228, 11272), 'sqlalchemy.Column', 'Column', (['"""create_time"""', 'DateTime'], {'default': 'now'}), "('create_time', DateTime, default=now)\n", (11234, 11272), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((11280, 11338), 'sqlalchemy.Column', 'Column', (['"""update_time"""', 'DateTime'], {'default': 'now', 'onupdate': 'now'}), "('update_time', DateTime, default=now, onupdate=now)\n", (11286, 11338), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((11428, 11467), 'sqlalchemy.Column', 'Column', (['"""id"""', 'Integer'], {'primary_key': '(True)'}), "('id', Integer, primary_key=True)\n", (11434, 11467), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((11628, 11672), 'sqlalchemy.Column', 'Column', (['"""create_time"""', 'DateTime'], {'default': 'now'}), "('create_time', DateTime, default=now)\n", (11634, 11672), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((11680, 11738), 'sqlalchemy.Column', 'Column', (['"""update_time"""', 'DateTime'], {'default': 'now', 'onupdate': 'now'}), "('update_time', DateTime, default=now, onupdate=now)\n", (11686, 11738), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((11794, 11833), 'sqlalchemy.Column', 'Column', (['"""id"""', 'Integer'], {'primary_key': '(True)'}), "('id', Integer, primary_key=True)\n", (11800, 11833), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((11841, 11885), 'sqlalchemy.Column', 'Column', (['"""create_time"""', 'DateTime'], {'default': 'now'}), "('create_time', DateTime, default=now)\n", (11847, 11885), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((11893, 11951), 'sqlalchemy.Column', 'Column', (['"""update_time"""', 'DateTime'], {'default': 'now', 'onupdate': 'now'}), "('update_time', DateTime, default=now, onupdate=now)\n", (11899, 11951), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((12021, 12048), 'sqlalchemy.Column', 'Column', (['"""description"""', 'TEXT'], {}), "('description', TEXT)\n", (12027, 12048), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((12056, 12083), 'sqlalchemy.Column', 'Column', (['"""bytes"""', 'BigInteger'], {}), "('bytes', BigInteger)\n", (12062, 12083), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((12131, 12184), 'sqlalchemy.Column', 'Column', (['"""deleted"""', 'Boolean'], {'index': '(True)', 'default': '(False)'}), "('deleted', Boolean, index=True, default=False)\n", (12137, 12184), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((12278, 12317), 'sqlalchemy.Column', 'Column', (['"""id"""', 'Integer'], {'primary_key': '(True)'}), "('id', Integer, primary_key=True)\n", (12284, 12317), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((12325, 12369), 'sqlalchemy.Column', 'Column', (['"""create_time"""', 'DateTime'], {'default': 'now'}), "('create_time', DateTime, default=now)\n", (12331, 12369), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((12377, 12435), 'sqlalchemy.Column', 'Column', (['"""update_time"""', 'DateTime'], {'default': 'now', 'onupdate': 'now'}), "('update_time', DateTime, default=now, onupdate=now)\n", (12383, 12435), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((12652, 12691), 'sqlalchemy.Column', 'Column', (['"""id"""', 'Integer'], {'primary_key': '(True)'}), "('id', Integer, primary_key=True)\n", (12658, 12691), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((12699, 12743), 'sqlalchemy.Column', 'Column', (['"""create_time"""', 'DateTime'], {'default': 'now'}), "('create_time', DateTime, default=now)\n", (12705, 12743), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((12751, 12809), 'sqlalchemy.Column', 'Column', (['"""update_time"""', 'DateTime'], {'default': 'now', 'onupdate': 'now'}), "('update_time', DateTime, default=now, onupdate=now)\n", (12757, 12809), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((12817, 12839), 'sqlalchemy.Column', 'Column', (['"""action"""', 'TEXT'], {}), "('action', TEXT)\n", (12823, 12839), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((13070, 13109), 'sqlalchemy.Column', 'Column', (['"""id"""', 'Integer'], {'primary_key': '(True)'}), "('id', Integer, primary_key=True)\n", (13076, 13109), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((13117, 13161), 'sqlalchemy.Column', 'Column', (['"""create_time"""', 'DateTime'], {'default': 'now'}), "('create_time', DateTime, default=now)\n", (13123, 13161), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((13169, 13227), 'sqlalchemy.Column', 'Column', (['"""update_time"""', 'DateTime'], {'default': 'now', 'onupdate': 'now'}), "('update_time', DateTime, default=now, onupdate=now)\n", (13175, 13227), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((13235, 13257), 'sqlalchemy.Column', 'Column', (['"""action"""', 'TEXT'], {}), "('action', TEXT)\n", (13241, 13257), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((13516, 13555), 'sqlalchemy.Column', 'Column', (['"""id"""', 'Integer'], {'primary_key': '(True)'}), "('id', Integer, primary_key=True)\n", (13522, 13555), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((13563, 13607), 'sqlalchemy.Column', 'Column', (['"""create_time"""', 'DateTime'], {'default': 'now'}), "('create_time', DateTime, default=now)\n", (13569, 13607), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((13615, 13673), 'sqlalchemy.Column', 'Column', (['"""update_time"""', 'DateTime'], {'default': 'now', 'onupdate': 'now'}), "('update_time', DateTime, default=now, onupdate=now)\n", (13621, 13673), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((13681, 13703), 'sqlalchemy.Column', 'Column', (['"""action"""', 'TEXT'], {}), "('action', TEXT)\n", (13687, 13703), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((13978, 14017), 'sqlalchemy.Column', 'Column', (['"""id"""', 'Integer'], {'primary_key': '(True)'}), "('id', Integer, primary_key=True)\n", (13984, 14017), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((14025, 14069), 'sqlalchemy.Column', 'Column', (['"""create_time"""', 'DateTime'], {'default': 'now'}), "('create_time', DateTime, default=now)\n", (14031, 14069), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((14077, 14135), 'sqlalchemy.Column', 'Column', (['"""update_time"""', 'DateTime'], {'default': 'now', 'onupdate': 'now'}), "('update_time', DateTime, default=now, onupdate=now)\n", (14083, 14135), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((14143, 14165), 'sqlalchemy.Column', 'Column', (['"""action"""', 'TEXT'], {}), "('action', TEXT)\n", (14149, 14165), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((14480, 14519), 'sqlalchemy.Column', 'Column', (['"""id"""', 'Integer'], {'primary_key': '(True)'}), "('id', Integer, primary_key=True)\n", (14486, 14519), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((14527, 14571), 'sqlalchemy.Column', 'Column', (['"""create_time"""', 'DateTime'], {'default': 'now'}), "('create_time', DateTime, default=now)\n", (14533, 14571), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((14579, 14637), 'sqlalchemy.Column', 'Column', (['"""update_time"""', 'DateTime'], {'default': 'now', 'onupdate': 'now'}), "('update_time', DateTime, default=now, onupdate=now)\n", (14585, 14637), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((14645, 14667), 'sqlalchemy.Column', 'Column', (['"""action"""', 'TEXT'], {}), "('action', TEXT)\n", (14651, 14667), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((14978, 15017), 'sqlalchemy.Column', 'Column', (['"""id"""', 'Integer'], {'primary_key': '(True)'}), "('id', Integer, primary_key=True)\n", (14984, 15017), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((15103, 15125), 'sqlalchemy.Column', 'Column', (['"""action"""', 'TEXT'], {}), "('action', TEXT)\n", (15109, 15125), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((15294, 15333), 'sqlalchemy.Column', 'Column', (['"""id"""', 'Integer'], {'primary_key': '(True)'}), "('id', Integer, primary_key=True)\n", (15300, 15333), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((15418, 15440), 'sqlalchemy.Column', 'Column', (['"""action"""', 'TEXT'], {}), "('action', TEXT)\n", (15424, 15440), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((15586, 15625), 'sqlalchemy.Column', 'Column', (['"""id"""', 'Integer'], {'primary_key': '(True)'}), "('id', Integer, primary_key=True)\n", (15592, 15625), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((16007, 16034), 'sqlalchemy.Column', 'Column', (['"""order_id"""', 'Integer'], {}), "('order_id', Integer)\n", (16013, 16034), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((16097, 16141), 'sqlalchemy.Column', 'Column', (['"""create_time"""', 'DateTime'], {'default': 'now'}), "('create_time', DateTime, default=now)\n", (16103, 16141), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((16149, 16207), 'sqlalchemy.Column', 'Column', (['"""update_time"""', 'DateTime'], {'default': 'now', 'onupdate': 'now'}), "('update_time', DateTime, default=now, onupdate=now)\n", (16155, 16207), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((16550, 16603), 'sqlalchemy.Column', 'Column', (['"""deleted"""', 'Boolean'], {'index': '(True)', 'default': '(False)'}), "('deleted', Boolean, index=True, default=False)\n", (16556, 16603), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((16611, 16663), 'sqlalchemy.Column', 'Column', (['"""purged"""', 'Boolean'], {'index': '(True)', 'default': '(False)'}), "('purged', Boolean, index=True, default=False)\n", (16617, 16663), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((16776, 16815), 'sqlalchemy.Column', 'Column', (['"""id"""', 'Integer'], {'primary_key': '(True)'}), "('id', Integer, primary_key=True)\n", (16782, 16815), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((16993, 17037), 'sqlalchemy.Column', 'Column', (['"""create_time"""', 'DateTime'], {'default': 'now'}), "('create_time', DateTime, default=now)\n", (16999, 17037), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((17045, 17103), 'sqlalchemy.Column', 'Column', (['"""update_time"""', 'DateTime'], {'default': 'now', 'onupdate': 'now'}), "('update_time', DateTime, default=now, onupdate=now)\n", (17051, 17103), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((17742, 17762), 'sqlalchemy.Column', 'Column', (['"""peek"""', 'TEXT'], {}), "('peek', TEXT)\n", (17748, 17762), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((17771, 17799), 'sqlalchemy.Column', 'Column', (['"""tool_version"""', 'TEXT'], {}), "('tool_version', TEXT)\n", (17777, 17799), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((18073, 18126), 'sqlalchemy.Column', 'Column', (['"""deleted"""', 'Boolean'], {'index': '(True)', 'default': '(False)'}), "('deleted', Boolean, index=True, default=False)\n", (18079, 18126), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((18134, 18160), 'sqlalchemy.Column', 'Column', (['"""visible"""', 'Boolean'], {}), "('visible', Boolean)\n", (18140, 18160), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((18473, 18512), 'sqlalchemy.Column', 'Column', (['"""id"""', 'Integer'], {'primary_key': '(True)'}), "('id', Integer, primary_key=True)\n", (18479, 18512), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((18520, 18544), 'sqlalchemy.Column', 'Column', (['"""data"""', 'JSONType'], {}), "('data', JSONType)\n", (18526, 18544), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((18633, 18672), 'sqlalchemy.Column', 'Column', (['"""id"""', 'Integer'], {'primary_key': '(True)'}), "('id', Integer, primary_key=True)\n", (18639, 18672), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((19009, 19030), 'sqlalchemy.Column', 'Column', (['"""value"""', 'TEXT'], {}), "('value', TEXT)\n", (19015, 19030), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((19088, 19127), 'sqlalchemy.Column', 'Column', (['"""id"""', 'Integer'], {'primary_key': '(True)'}), "('id', Integer, primary_key=True)\n", (19094, 19127), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((19223, 19267), 'sqlalchemy.Column', 'Column', (['"""create_time"""', 'DateTime'], {'default': 'now'}), "('create_time', DateTime, default=now)\n", (19229, 19267), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((19275, 19333), 'sqlalchemy.Column', 'Column', (['"""update_time"""', 'DateTime'], {'default': 'now', 'onupdate': 'now'}), "('update_time', DateTime, default=now, onupdate=now)\n", (19281, 19333), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((19390, 19443), 'sqlalchemy.Column', 'Column', (['"""deleted"""', 'Boolean'], {'index': '(True)', 'default': '(False)'}), "('deleted', Boolean, index=True, default=False)\n", (19396, 19443), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((19451, 19503), 'sqlalchemy.Column', 'Column', (['"""purged"""', 'Boolean'], {'index': '(True)', 'default': '(False)'}), "('purged', Boolean, index=True, default=False)\n", (19457, 19503), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((19511, 19538), 'sqlalchemy.Column', 'Column', (['"""description"""', 'TEXT'], {}), "('description', TEXT)\n", (19517, 19538), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((19546, 19570), 'sqlalchemy.Column', 'Column', (['"""synopsis"""', 'TEXT'], {}), "('synopsis', TEXT)\n", (19552, 19570), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((19643, 19682), 'sqlalchemy.Column', 'Column', (['"""id"""', 'Integer'], {'primary_key': '(True)'}), "('id', Integer, primary_key=True)\n", (19649, 19682), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((19790, 19834), 'sqlalchemy.Column', 'Column', (['"""create_time"""', 'DateTime'], {'default': 'now'}), "('create_time', DateTime, default=now)\n", (19796, 19834), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((19842, 19900), 'sqlalchemy.Column', 'Column', (['"""update_time"""', 'DateTime'], {'default': 'now', 'onupdate': 'now'}), "('update_time', DateTime, default=now, onupdate=now)\n", (19848, 19900), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((19908, 19940), 'sqlalchemy.Column', 'Column', (['"""name"""', 'TEXT'], {'index': '(True)'}), "('name', TEXT, index=True)\n", (19914, 19940), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((19948, 19975), 'sqlalchemy.Column', 'Column', (['"""description"""', 'TEXT'], {}), "('description', TEXT)\n", (19954, 19975), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((19983, 20010), 'sqlalchemy.Column', 'Column', (['"""order_id"""', 'Integer'], {}), "('order_id', Integer)\n", (19989, 20010), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((20073, 20102), 'sqlalchemy.Column', 'Column', (['"""item_count"""', 'Integer'], {}), "('item_count', Integer)\n", (20079, 20102), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((20110, 20163), 'sqlalchemy.Column', 'Column', (['"""deleted"""', 'Boolean'], {'index': '(True)', 'default': '(False)'}), "('deleted', Boolean, index=True, default=False)\n", (20116, 20163), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((20171, 20223), 'sqlalchemy.Column', 'Column', (['"""purged"""', 'Boolean'], {'index': '(True)', 'default': '(False)'}), "('purged', Boolean, index=True, default=False)\n", (20177, 20223), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((20366, 20405), 'sqlalchemy.Column', 'Column', (['"""id"""', 'Integer'], {'primary_key': '(True)'}), "('id', Integer, primary_key=True)\n", (20372, 20405), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((20668, 20725), 'sqlalchemy.Column', 'Column', (['"""inheritable"""', 'Boolean'], {'index': '(True)', 'default': '(False)'}), "('inheritable', Boolean, index=True, default=False)\n", (20674, 20725), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((20733, 20786), 'sqlalchemy.Column', 'Column', (['"""deleted"""', 'Boolean'], {'index': '(True)', 'default': '(False)'}), "('deleted', Boolean, index=True, default=False)\n", (20739, 20786), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((20891, 20930), 'sqlalchemy.Column', 'Column', (['"""id"""', 'Integer'], {'primary_key': '(True)'}), "('id', Integer, primary_key=True)\n", (20897, 20930), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((21222, 21279), 'sqlalchemy.Column', 'Column', (['"""inheritable"""', 'Boolean'], {'index': '(True)', 'default': '(False)'}), "('inheritable', Boolean, index=True, default=False)\n", (21228, 21279), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((21287, 21340), 'sqlalchemy.Column', 'Column', (['"""deleted"""', 'Boolean'], {'index': '(True)', 'default': '(False)'}), "('deleted', Boolean, index=True, default=False)\n", (21293, 21340), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((21462, 21501), 'sqlalchemy.Column', 'Column', (['"""id"""', 'Integer'], {'primary_key': '(True)'}), "('id', Integer, primary_key=True)\n", (21468, 21501), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((21835, 21888), 'sqlalchemy.Column', 'Column', (['"""deleted"""', 'Boolean'], {'index': '(True)', 'default': '(False)'}), "('deleted', Boolean, index=True, default=False)\n", (21841, 21888), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((21940, 21979), 'sqlalchemy.Column', 'Column', (['"""id"""', 'Integer'], {'primary_key': '(True)'}), "('id', Integer, primary_key=True)\n", (21946, 21979), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((21987, 22031), 'sqlalchemy.Column', 'Column', (['"""create_time"""', 'DateTime'], {'default': 'now'}), "('create_time', DateTime, default=now)\n", (21993, 22031), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((22039, 22097), 'sqlalchemy.Column', 'Column', (['"""update_time"""', 'DateTime'], {'default': 'now', 'onupdate': 'now'}), "('update_time', DateTime, default=now, onupdate=now)\n", (22045, 22097), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((22313, 22358), 'sqlalchemy.Column', 'Column', (['"""tool_version"""', 'TEXT'], {'default': '"""1.0.0"""'}), "('tool_version', TEXT, default='1.0.0')\n", (22319, 22358), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((22459, 22487), 'sqlalchemy.Column', 'Column', (['"""command_line"""', 'TEXT'], {}), "('command_line', TEXT)\n", (22465, 22487), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((22587, 22609), 'sqlalchemy.Column', 'Column', (['"""stdout"""', 'TEXT'], {}), "('stdout', TEXT)\n", (22593, 22609), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((22617, 22639), 'sqlalchemy.Column', 'Column', (['"""stderr"""', 'TEXT'], {}), "('stderr', TEXT)\n", (22623, 22639), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((22647, 22690), 'sqlalchemy.Column', 'Column', (['"""exit_code"""', 'Integer'], {'nullable': '(True)'}), "('exit_code', Integer, nullable=True)\n", (22653, 22690), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((22698, 22723), 'sqlalchemy.Column', 'Column', (['"""traceback"""', 'TEXT'], {}), "('traceback', TEXT)\n", (22704, 22723), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((23088, 23141), 'sqlalchemy.Column', 'Column', (['"""destination_params"""', 'JSONType'], {'nullable': '(True)'}), "('destination_params', JSONType, nullable=True)\n", (23094, 23141), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((23216, 23270), 'sqlalchemy.Column', 'Column', (['"""imported"""', 'Boolean'], {'default': '(False)', 'index': '(True)'}), "('imported', Boolean, default=False, index=True)\n", (23222, 23270), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((23463, 23502), 'sqlalchemy.Column', 'Column', (['"""id"""', 'Integer'], {'primary_key': '(True)'}), "('id', Integer, primary_key=True)\n", (23469, 23502), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((23510, 23554), 'sqlalchemy.Column', 'Column', (['"""create_time"""', 'DateTime'], {'default': 'now'}), "('create_time', DateTime, default=now)\n", (23516, 23554), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((23562, 23620), 'sqlalchemy.Column', 'Column', (['"""update_time"""', 'DateTime'], {'default': 'now', 'onupdate': 'now'}), "('update_time', DateTime, default=now, onupdate=now)\n", (23568, 23620), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((23853, 23892), 'sqlalchemy.Column', 'Column', (['"""id"""', 'Integer'], {'primary_key': '(True)'}), "('id', Integer, primary_key=True)\n", (23859, 23892), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((24004, 24025), 'sqlalchemy.Column', 'Column', (['"""value"""', 'TEXT'], {}), "('value', TEXT)\n", (24010, 24025), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((24119, 24158), 'sqlalchemy.Column', 'Column', (['"""id"""', 'Integer'], {'primary_key': '(True)'}), "('id', Integer, primary_key=True)\n", (24125, 24158), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((24455, 24494), 'sqlalchemy.Column', 'Column', (['"""id"""', 'Integer'], {'primary_key': '(True)'}), "('id', Integer, primary_key=True)\n", (24461, 24494), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((24810, 24849), 'sqlalchemy.Column', 'Column', (['"""id"""', 'Integer'], {'primary_key': '(True)'}), "('id', Integer, primary_key=True)\n", (24816, 24849), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((25190, 25229), 'sqlalchemy.Column', 'Column', (['"""id"""', 'Integer'], {'primary_key': '(True)'}), "('id', Integer, primary_key=True)\n", (25196, 25229), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((25562, 25601), 'sqlalchemy.Column', 'Column', (['"""id"""', 'Integer'], {'primary_key': '(True)'}), "('id', Integer, primary_key=True)\n", (25568, 25601), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((25918, 25957), 'sqlalchemy.Column', 'Column', (['"""id"""', 'Integer'], {'primary_key': '(True)'}), "('id', Integer, primary_key=True)\n", (25924, 25957), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((26292, 26331), 'sqlalchemy.Column', 'Column', (['"""id"""', 'Integer'], {'primary_key': '(True)'}), "('id', Integer, primary_key=True)\n", (26298, 26331), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((26710, 26749), 'sqlalchemy.Column', 'Column', (['"""id"""', 'Integer'], {'primary_key': '(True)'}), "('id', Integer, primary_key=True)\n", (26716, 26749), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((27497, 27536), 'sqlalchemy.Column', 'Column', (['"""id"""', 'Integer'], {'primary_key': '(True)'}), "('id', Integer, primary_key=True)\n", (27503, 27536), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((27767, 27823), 'sqlalchemy.Column', 'Column', (['"""compressed"""', 'Boolean'], {'index': '(True)', 'default': '(False)'}), "('compressed', Boolean, index=True, default=False)\n", (27773, 27823), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((27831, 27869), 'sqlalchemy.Column', 'Column', (['"""history_attrs_filename"""', 'TEXT'], {}), "('history_attrs_filename', TEXT)\n", (27837, 27869), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((27877, 27916), 'sqlalchemy.Column', 'Column', (['"""datasets_attrs_filename"""', 'TEXT'], {}), "('datasets_attrs_filename', TEXT)\n", (27883, 27916), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((27924, 27959), 'sqlalchemy.Column', 'Column', (['"""jobs_attrs_filename"""', 'TEXT'], {}), "('jobs_attrs_filename', TEXT)\n", (27930, 27959), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((28058, 28097), 'sqlalchemy.Column', 'Column', (['"""id"""', 'Integer'], {'primary_key': '(True)'}), "('id', Integer, primary_key=True)\n", (28064, 28097), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((28251, 28278), 'sqlalchemy.Column', 'Column', (['"""archive_dir"""', 'TEXT'], {}), "('archive_dir', TEXT)\n", (28257, 28278), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((28395, 28434), 'sqlalchemy.Column', 'Column', (['"""id"""', 'Integer'], {'primary_key': '(True)'}), "('id', Integer, primary_key=True)\n", (28401, 28434), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((28734, 28773), 'sqlalchemy.Column', 'Column', (['"""id"""', 'Integer'], {'primary_key': '(True)'}), "('id', Integer, primary_key=True)\n", (28740, 28773), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((29080, 29119), 'sqlalchemy.Column', 'Column', (['"""id"""', 'Integer'], {'primary_key': '(True)'}), "('id', Integer, primary_key=True)\n", (29086, 29119), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((29412, 29451), 'sqlalchemy.Column', 'Column', (['"""id"""', 'Integer'], {'primary_key': '(True)'}), "('id', Integer, primary_key=True)\n", (29418, 29451), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((29743, 29782), 'sqlalchemy.Column', 'Column', (['"""id"""', 'Integer'], {'primary_key': '(True)'}), "('id', Integer, primary_key=True)\n", (29749, 29782), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((30153, 30198), 'sqlalchemy.Column', 'Column', (['"""created_time"""', 'DateTime'], {'default': 'now'}), "('created_time', DateTime, default=now)\n", (30159, 30198), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((30206, 30266), 'sqlalchemy.Column', 'Column', (['"""modified_time"""', 'DateTime'], {'default': 'now', 'onupdate': 'now'}), "('modified_time', DateTime, default=now, onupdate=now)\n", (30212, 30266), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((30442, 30481), 'sqlalchemy.Column', 'Column', (['"""id"""', 'Integer'], {'primary_key': '(True)'}), "('id', Integer, primary_key=True)\n", (30448, 30481), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((30489, 30533), 'sqlalchemy.Column', 'Column', (['"""create_time"""', 'DateTime'], {'default': 'now'}), "('create_time', DateTime, default=now)\n", (30495, 30533), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((30541, 30575), 'sqlalchemy.Column', 'Column', (['"""execution_time"""', 'DateTime'], {}), "('execution_time', DateTime)\n", (30547, 30575), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((30583, 30641), 'sqlalchemy.Column', 'Column', (['"""update_time"""', 'DateTime'], {'default': 'now', 'onupdate': 'now'}), "('update_time', DateTime, default=now, onupdate=now)\n", (30589, 30641), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((30698, 30726), 'sqlalchemy.Column', 'Column', (['"""command_line"""', 'TEXT'], {}), "('command_line', TEXT)\n", (30704, 30726), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((30826, 30848), 'sqlalchemy.Column', 'Column', (['"""stdout"""', 'TEXT'], {}), "('stdout', TEXT)\n", (30832, 30848), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((30856, 30878), 'sqlalchemy.Column', 'Column', (['"""stderr"""', 'TEXT'], {}), "('stderr', TEXT)\n", (30862, 30878), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((30886, 30929), 'sqlalchemy.Column', 'Column', (['"""exit_code"""', 'Integer'], {'nullable': '(True)'}), "('exit_code', Integer, nullable=True)\n", (30892, 30929), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((30982, 31007), 'sqlalchemy.Column', 'Column', (['"""traceback"""', 'TEXT'], {}), "('traceback', TEXT)\n", (30988, 31007), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((31253, 31292), 'sqlalchemy.Column', 'Column', (['"""prepare_input_files_cmd"""', 'TEXT'], {}), "('prepare_input_files_cmd', TEXT)\n", (31259, 31292), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((31365, 31404), 'sqlalchemy.Column', 'Column', (['"""id"""', 'Integer'], {'primary_key': '(True)'}), "('id', Integer, primary_key=True)\n", (31371, 31404), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((31624, 31675), 'sqlalchemy.Column', 'Column', (['"""action_arguments"""', 'JSONType'], {'nullable': '(True)'}), "('action_arguments', JSONType, nullable=True)\n", (31630, 31675), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((31768, 31807), 'sqlalchemy.Column', 'Column', (['"""id"""', 'Integer'], {'primary_key': '(True)'}), "('id', Integer, primary_key=True)\n", (31774, 31807), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((32063, 32102), 'sqlalchemy.Column', 'Column', (['"""id"""', 'Integer'], {'primary_key': '(True)'}), "('id', Integer, primary_key=True)\n", (32069, 32102), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((32110, 32154), 'sqlalchemy.Column', 'Column', (['"""create_time"""', 'DateTime'], {'default': 'now'}), "('create_time', DateTime, default=now)\n", (32116, 32154), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((32162, 32220), 'sqlalchemy.Column', 'Column', (['"""update_time"""', 'DateTime'], {'default': 'now', 'onupdate': 'now'}), "('update_time', DateTime, default=now, onupdate=now)\n", (32168, 32220), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((32328, 32354), 'sqlalchemy.Column', 'Column', (['"""params"""', 'JSONType'], {}), "('params', JSONType)\n", (32334, 32354), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((32423, 32462), 'sqlalchemy.Column', 'Column', (['"""id"""', 'Integer'], {'primary_key': '(True)'}), "('id', Integer, primary_key=True)\n", (32429, 32462), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((32470, 32514), 'sqlalchemy.Column', 'Column', (['"""create_time"""', 'DateTime'], {'default': 'now'}), "('create_time', DateTime, default=now)\n", (32476, 32514), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((32522, 32580), 'sqlalchemy.Column', 'Column', (['"""update_time"""', 'DateTime'], {'default': 'now', 'onupdate': 'now'}), "('update_time', DateTime, default=now, onupdate=now)\n", (32528, 32580), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((32675, 32695), 'sqlalchemy.Column', 'Column', (['"""info"""', 'TEXT'], {}), "('info', TEXT)\n", (32681, 32695), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((32703, 32725), 'sqlalchemy.Column', 'Column', (['"""pid"""', 'Integer'], {}), "('pid', Integer)\n", (32709, 32725), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((32733, 32758), 'sqlalchemy.Column', 'Column', (['"""socket"""', 'Integer'], {}), "('socket', Integer)\n", (32739, 32758), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((32766, 32792), 'sqlalchemy.Column', 'Column', (['"""params"""', 'JSONType'], {}), "('params', JSONType)\n", (32772, 32792), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((32873, 32912), 'sqlalchemy.Column', 'Column', (['"""id"""', 'Integer'], {'primary_key': '(True)'}), "('id', Integer, primary_key=True)\n", (32879, 32912), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((32983, 33027), 'sqlalchemy.Column', 'Column', (['"""create_time"""', 'DateTime'], {'default': 'now'}), "('create_time', DateTime, default=now)\n", (32989, 33027), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((33035, 33093), 'sqlalchemy.Column', 'Column', (['"""update_time"""', 'DateTime'], {'default': 'now', 'onupdate': 'now'}), "('update_time', DateTime, default=now, onupdate=now)\n", (33041, 33093), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((33213, 33252), 'sqlalchemy.Column', 'Column', (['"""id"""', 'Integer'], {'primary_key': '(True)'}), "('id', Integer, primary_key=True)\n", (33219, 33252), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((33472, 33494), 'sqlalchemy.Column', 'Column', (['"""hid"""', 'Integer'], {}), "('hid', Integer)\n", (33478, 33494), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((33502, 33528), 'sqlalchemy.Column', 'Column', (['"""visible"""', 'Boolean'], {}), "('visible', Boolean)\n", (33508, 33528), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((33536, 33577), 'sqlalchemy.Column', 'Column', (['"""deleted"""', 'Boolean'], {'default': '(False)'}), "('deleted', Boolean, default=False)\n", (33542, 33577), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((33918, 33957), 'sqlalchemy.Column', 'Column', (['"""id"""', 'Integer'], {'primary_key': '(True)'}), "('id', Integer, primary_key=True)\n", (33924, 33957), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((34183, 34224), 'sqlalchemy.Column', 'Column', (['"""deleted"""', 'Boolean'], {'default': '(False)'}), "('deleted', Boolean, default=False)\n", (34189, 34224), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((34321, 34360), 'sqlalchemy.Column', 'Column', (['"""id"""', 'Integer'], {'primary_key': '(True)'}), "('id', Integer, primary_key=True)\n", (34327, 34360), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((35063, 35095), 'sqlalchemy.Column', 'Column', (['"""element_index"""', 'Integer'], {}), "('element_index', Integer)\n", (35069, 35095), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((35203, 35242), 'sqlalchemy.Column', 'Column', (['"""id"""', 'Integer'], {'primary_key': '(True)'}), "('id', Integer, primary_key=True)\n", (35209, 35242), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((35250, 35294), 'sqlalchemy.Column', 'Column', (['"""create_time"""', 'DateTime'], {'default': 'now'}), "('create_time', DateTime, default=now)\n", (35256, 35294), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((35302, 35360), 'sqlalchemy.Column', 'Column', (['"""update_time"""', 'DateTime'], {'default': 'now', 'onupdate': 'now'}), "('update_time', DateTime, default=now, onupdate=now)\n", (35308, 35360), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((35805, 35844), 'sqlalchemy.Column', 'Column', (['"""id"""', 'Integer'], {'primary_key': '(True)'}), "('id', Integer, primary_key=True)\n", (35811, 35844), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((35852, 35896), 'sqlalchemy.Column', 'Column', (['"""create_time"""', 'DateTime'], {'default': 'now'}), "('create_time', DateTime, default=now)\n", (35858, 35896), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((35904, 35962), 'sqlalchemy.Column', 'Column', (['"""update_time"""', 'DateTime'], {'default': 'now', 'onupdate': 'now'}), "('update_time', DateTime, default=now, onupdate=now)\n", (35910, 35962), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((36151, 36174), 'sqlalchemy.Column', 'Column', (['"""referer"""', 'TEXT'], {}), "('referer', TEXT)\n", (36157, 36174), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((36397, 36439), 'sqlalchemy.Column', 'Column', (['"""is_valid"""', 'Boolean'], {'default': '(False)'}), "('is_valid', Boolean, default=False)\n", (36403, 36439), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((36447, 36481), 'sqlalchemy.Column', 'Column', (['"""prev_session_id"""', 'Integer'], {}), "('prev_session_id', Integer)\n", (36453, 36481), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((36727, 36766), 'sqlalchemy.Column', 'Column', (['"""id"""', 'Integer'], {'primary_key': '(True)'}), "('id', Integer, primary_key=True)\n", (36733, 36766), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((36774, 36818), 'sqlalchemy.Column', 'Column', (['"""create_time"""', 'DateTime'], {'default': 'now'}), "('create_time', DateTime, default=now)\n", (36780, 36818), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((37054, 37093), 'sqlalchemy.Column', 'Column', (['"""id"""', 'Integer'], {'primary_key': '(True)'}), "('id', Integer, primary_key=True)\n", (37060, 37093), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((37101, 37145), 'sqlalchemy.Column', 'Column', (['"""create_time"""', 'DateTime'], {'default': 'now'}), "('create_time', DateTime, default=now)\n", (37107, 37145), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((37153, 37211), 'sqlalchemy.Column', 'Column', (['"""update_time"""', 'DateTime'], {'default': 'now', 'onupdate': 'now'}), "('update_time', DateTime, default=now, onupdate=now)\n", (37159, 37211), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((37473, 37493), 'sqlalchemy.Column', 'Column', (['"""name"""', 'TEXT'], {}), "('name', TEXT)\n", (37479, 37493), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((37501, 37542), 'sqlalchemy.Column', 'Column', (['"""deleted"""', 'Boolean'], {'default': '(False)'}), "('deleted', Boolean, default=False)\n", (37507, 37542), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((37550, 37594), 'sqlalchemy.Column', 'Column', (['"""importable"""', 'Boolean'], {'default': '(False)'}), "('importable', Boolean, default=False)\n", (37556, 37594), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((37602, 37634), 'sqlalchemy.Column', 'Column', (['"""slug"""', 'TEXT'], {'index': '(True)'}), "('slug', TEXT, index=True)\n", (37608, 37634), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((37642, 37697), 'sqlalchemy.Column', 'Column', (['"""published"""', 'Boolean'], {'index': '(True)', 'default': '(False)'}), "('published', Boolean, index=True, default=False)\n", (37648, 37697), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((37763, 37802), 'sqlalchemy.Column', 'Column', (['"""id"""', 'Integer'], {'primary_key': '(True)'}), "('id', Integer, primary_key=True)\n", (37769, 37802), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((37810, 37854), 'sqlalchemy.Column', 'Column', (['"""create_time"""', 'DateTime'], {'default': 'now'}), "('create_time', DateTime, default=now)\n", (37816, 37854), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((37862, 37920), 'sqlalchemy.Column', 'Column', (['"""update_time"""', 'DateTime'], {'default': 'now', 'onupdate': 'now'}), "('update_time', DateTime, default=now, onupdate=now)\n", (37868, 37920), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((38037, 38057), 'sqlalchemy.Column', 'Column', (['"""name"""', 'TEXT'], {}), "('name', TEXT)\n", (38043, 38057), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((38065, 38094), 'sqlalchemy.Column', 'Column', (['"""has_cycles"""', 'Boolean'], {}), "('has_cycles', Boolean)\n", (38071, 38094), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((38102, 38131), 'sqlalchemy.Column', 'Column', (['"""has_errors"""', 'Boolean'], {}), "('has_errors', Boolean)\n", (38108, 38131), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((38139, 38178), 'sqlalchemy.Column', 'Column', (['"""uuid"""', 'UUIDType'], {'nullable': '(True)'}), "('uuid', UUIDType, nullable=True)\n", (38145, 38178), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((38253, 38292), 'sqlalchemy.Column', 'Column', (['"""id"""', 'Integer'], {'primary_key': '(True)'}), "('id', Integer, primary_key=True)\n", (38259, 38292), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((38300, 38344), 'sqlalchemy.Column', 'Column', (['"""create_time"""', 'DateTime'], {'default': 'now'}), "('create_time', DateTime, default=now)\n", (38306, 38344), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((38352, 38410), 'sqlalchemy.Column', 'Column', (['"""update_time"""', 'DateTime'], {'default': 'now', 'onupdate': 'now'}), "('update_time', DateTime, default=now, onupdate=now)\n", (38358, 38410), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((38547, 38570), 'sqlalchemy.Column', 'Column', (['"""tool_id"""', 'TEXT'], {}), "('tool_id', TEXT)\n", (38553, 38570), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((38578, 38606), 'sqlalchemy.Column', 'Column', (['"""tool_version"""', 'TEXT'], {}), "('tool_version', TEXT)\n", (38584, 38606), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((38636, 38667), 'sqlalchemy.Column', 'Column', (['"""tool_inputs"""', 'JSONType'], {}), "('tool_inputs', JSONType)\n", (38642, 38667), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((38675, 38706), 'sqlalchemy.Column', 'Column', (['"""tool_errors"""', 'JSONType'], {}), "('tool_errors', JSONType)\n", (38681, 38706), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((38714, 38742), 'sqlalchemy.Column', 'Column', (['"""position"""', 'JSONType'], {}), "('position', JSONType)\n", (38720, 38742), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((38750, 38776), 'sqlalchemy.Column', 'Column', (['"""config"""', 'JSONType'], {}), "('config', JSONType)\n", (38756, 38776), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((38784, 38814), 'sqlalchemy.Column', 'Column', (['"""order_index"""', 'Integer'], {}), "('order_index', Integer)\n", (38790, 38814), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((38969, 39008), 'sqlalchemy.Column', 'Column', (['"""id"""', 'Integer'], {'primary_key': '(True)'}), "('id', Integer, primary_key=True)\n", (38975, 39008), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((39218, 39243), 'sqlalchemy.Column', 'Column', (['"""value"""', 'JSONType'], {}), "('value', JSONType)\n", (39224, 39243), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((39357, 39396), 'sqlalchemy.Column', 'Column', (['"""id"""', 'Integer'], {'primary_key': '(True)'}), "('id', Integer, primary_key=True)\n", (39363, 39396), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((39567, 39588), 'sqlalchemy.Column', 'Column', (['"""value"""', 'TEXT'], {}), "('value', TEXT)\n", (39573, 39588), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((39749, 39788), 'sqlalchemy.Column', 'Column', (['"""id"""', 'Integer'], {'primary_key': '(True)'}), "('id', Integer, primary_key=True)\n", (39755, 39788), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((40242, 40281), 'sqlalchemy.Column', 'Column', (['"""id"""', 'Integer'], {'primary_key': '(True)'}), "('id', Integer, primary_key=True)\n", (40248, 40281), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((40705, 40744), 'sqlalchemy.Column', 'Column', (['"""id"""', 'Integer'], {'primary_key': '(True)'}), "('id', Integer, primary_key=True)\n", (40711, 40744), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((40925, 40952), 'sqlalchemy.Column', 'Column', (['"""output_name"""', 'TEXT'], {}), "('output_name', TEXT)\n", (40931, 40952), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((40960, 40986), 'sqlalchemy.Column', 'Column', (['"""input_name"""', 'TEXT'], {}), "('input_name', TEXT)\n", (40966, 40986), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((41064, 41103), 'sqlalchemy.Column', 'Column', (['"""id"""', 'Integer'], {'primary_key': '(True)'}), "('id', Integer, primary_key=True)\n", (41070, 41103), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((41348, 41387), 'sqlalchemy.Column', 'Column', (['"""id"""', 'Integer'], {'primary_key': '(True)'}), "('id', Integer, primary_key=True)\n", (41354, 41387), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((41395, 41439), 'sqlalchemy.Column', 'Column', (['"""create_time"""', 'DateTime'], {'default': 'now'}), "('create_time', DateTime, default=now)\n", (41401, 41439), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((41447, 41505), 'sqlalchemy.Column', 'Column', (['"""update_time"""', 'DateTime'], {'default': 'now', 'onupdate': 'now'}), "('update_time', DateTime, default=now, onupdate=now)\n", (41453, 41505), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((41980, 42019), 'sqlalchemy.Column', 'Column', (['"""id"""', 'Integer'], {'primary_key': '(True)'}), "('id', Integer, primary_key=True)\n", (41986, 42019), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((42027, 42071), 'sqlalchemy.Column', 'Column', (['"""create_time"""', 'DateTime'], {'default': 'now'}), "('create_time', DateTime, default=now)\n", (42033, 42071), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((42079, 42137), 'sqlalchemy.Column', 'Column', (['"""update_time"""', 'DateTime'], {'default': 'now', 'onupdate': 'now'}), "('update_time', DateTime, default=now, onupdate=now)\n", (42085, 42137), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((42453, 42494), 'sqlalchemy.Column', 'Column', (['"""action"""', 'JSONType'], {'nullable': '(True)'}), "('action', JSONType, nullable=True)\n", (42459, 42494), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((42612, 42651), 'sqlalchemy.Column', 'Column', (['"""id"""', 'Integer'], {'primary_key': '(True)'}), "('id', Integer, primary_key=True)\n", (42618, 42651), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((42921, 42960), 'sqlalchemy.Column', 'Column', (['"""id"""', 'Integer'], {'primary_key': '(True)'}), "('id', Integer, primary_key=True)\n", (42927, 42960), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((43139, 43169), 'sqlalchemy.Column', 'Column', (['"""order_index"""', 'Integer'], {}), "('order_index', Integer)\n", (43145, 43169), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((43240, 43279), 'sqlalchemy.Column', 'Column', (['"""id"""', 'Integer'], {'primary_key': '(True)'}), "('id', Integer, primary_key=True)\n", (43246, 43279), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((43287, 43307), 'sqlalchemy.Column', 'Column', (['"""name"""', 'TEXT'], {}), "('name', TEXT)\n", (43293, 43307), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((43539, 43583), 'sqlalchemy.Column', 'Column', (['"""create_time"""', 'DateTime'], {'default': 'now'}), "('create_time', DateTime, default=now)\n", (43545, 43583), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((43591, 43661), 'sqlalchemy.Column', 'Column', (['"""update_time"""', 'DateTime'], {'index': '(True)', 'default': 'now', 'onupdate': 'now'}), "('update_time', DateTime, index=True, default=now, onupdate=now)\n", (43597, 43661), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((43736, 43789), 'sqlalchemy.Column', 'Column', (['"""deleted"""', 'Boolean'], {'index': '(True)', 'default': '(False)'}), "('deleted', Boolean, index=True, default=False)\n", (43742, 43789), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((43797, 43849), 'sqlalchemy.Column', 'Column', (['"""purged"""', 'Boolean'], {'index': '(True)', 'default': '(False)'}), "('purged', Boolean, index=True, default=False)\n", (43803, 43849), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((43938, 43977), 'sqlalchemy.Column', 'Column', (['"""id"""', 'Integer'], {'primary_key': '(True)'}), "('id', Integer, primary_key=True)\n", (43944, 43977), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((43984, 44028), 'sqlalchemy.Column', 'Column', (['"""create_time"""', 'DateTime'], {'default': 'now'}), "('create_time', DateTime, default=now)\n", (43990, 44028), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((44036, 44094), 'sqlalchemy.Column', 'Column', (['"""update_time"""', 'DateTime'], {'default': 'now', 'onupdate': 'now'}), "('update_time', DateTime, default=now, onupdate=now)\n", (44042, 44094), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((44191, 44244), 'sqlalchemy.Column', 'Column', (['"""deleted"""', 'Boolean'], {'index': '(True)', 'default': '(False)'}), "('deleted', Boolean, index=True, default=False)\n", (44197, 44244), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((44317, 44356), 'sqlalchemy.Column', 'Column', (['"""id"""', 'Integer'], {'primary_key': '(True)'}), "('id', Integer, primary_key=True)\n", (44323, 44356), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((44363, 44407), 'sqlalchemy.Column', 'Column', (['"""create_time"""', 'DateTime'], {'default': 'now'}), "('create_time', DateTime, default=now)\n", (44369, 44407), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((44415, 44473), 'sqlalchemy.Column', 'Column', (['"""update_time"""', 'DateTime'], {'default': 'now', 'onupdate': 'now'}), "('update_time', DateTime, default=now, onupdate=now)\n", (44421, 44473), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((44541, 44561), 'sqlalchemy.Column', 'Column', (['"""desc"""', 'TEXT'], {}), "('desc', TEXT)\n", (44547, 44561), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((44967, 45006), 'sqlalchemy.Column', 'Column', (['"""id"""', 'Integer'], {'primary_key': '(True)'}), "('id', Integer, primary_key=True)\n", (44973, 45006), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((45014, 45058), 'sqlalchemy.Column', 'Column', (['"""create_time"""', 'DateTime'], {'default': 'now'}), "('create_time', DateTime, default=now)\n", (45020, 45058), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((45066, 45124), 'sqlalchemy.Column', 'Column', (['"""update_time"""', 'DateTime'], {'default': 'now', 'onupdate': 'now'}), "('update_time', DateTime, default=now, onupdate=now)\n", (45072, 45124), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((45192, 45219), 'sqlalchemy.Column', 'Column', (['"""description"""', 'TEXT'], {}), "('description', TEXT)\n", (45198, 45219), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((45532, 45585), 'sqlalchemy.Column', 'Column', (['"""deleted"""', 'Boolean'], {'index': '(True)', 'default': '(False)'}), "('deleted', Boolean, index=True, default=False)\n", (45538, 45585), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((45653, 45692), 'sqlalchemy.Column', 'Column', (['"""id"""', 'Integer'], {'primary_key': '(True)'}), "('id', Integer, primary_key=True)\n", (45659, 45692), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((45699, 45743), 'sqlalchemy.Column', 'Column', (['"""create_time"""', 'DateTime'], {'default': 'now'}), "('create_time', DateTime, default=now)\n", (45705, 45743), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((45751, 45809), 'sqlalchemy.Column', 'Column', (['"""update_time"""', 'DateTime'], {'default': 'now', 'onupdate': 'now'}), "('update_time', DateTime, default=now, onupdate=now)\n", (45757, 45809), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((45877, 45897), 'sqlalchemy.Column', 'Column', (['"""desc"""', 'TEXT'], {}), "('desc', TEXT)\n", (45883, 45897), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((46084, 46137), 'sqlalchemy.Column', 'Column', (['"""deleted"""', 'Boolean'], {'index': '(True)', 'default': '(False)'}), "('deleted', Boolean, index=True, default=False)\n", (46090, 46137), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((46261, 46300), 'sqlalchemy.Column', 'Column', (['"""id"""', 'Integer'], {'primary_key': '(True)'}), "('id', Integer, primary_key=True)\n", (46267, 46300), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((46574, 46613), 'sqlalchemy.Column', 'Column', (['"""id"""', 'Integer'], {'primary_key': '(True)'}), "('id', Integer, primary_key=True)\n", (46580, 46613), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((46621, 46665), 'sqlalchemy.Column', 'Column', (['"""create_time"""', 'DateTime'], {'default': 'now'}), "('create_time', DateTime, default=now)\n", (46627, 46665), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((46673, 46731), 'sqlalchemy.Column', 'Column', (['"""update_time"""', 'DateTime'], {'default': 'now', 'onupdate': 'now'}), "('update_time', DateTime, default=now, onupdate=now)\n", (46679, 46731), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((46739, 46761), 'sqlalchemy.Column', 'Column', (['"""action"""', 'TEXT'], {}), "('action', TEXT)\n", (46745, 46761), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((47000, 47039), 'sqlalchemy.Column', 'Column', (['"""id"""', 'Integer'], {'primary_key': '(True)'}), "('id', Integer, primary_key=True)\n", (47006, 47039), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((47046, 47090), 'sqlalchemy.Column', 'Column', (['"""create_time"""', 'DateTime'], {'default': 'now'}), "('create_time', DateTime, default=now)\n", (47052, 47090), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((47098, 47156), 'sqlalchemy.Column', 'Column', (['"""update_time"""', 'DateTime'], {'default': 'now', 'onupdate': 'now'}), "('update_time', DateTime, default=now, onupdate=now)\n", (47104, 47156), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((47344, 47383), 'sqlalchemy.Column', 'Column', (['"""id"""', 'Integer'], {'primary_key': '(True)'}), "('id', Integer, primary_key=True)\n", (47350, 47383), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((47390, 47434), 'sqlalchemy.Column', 'Column', (['"""create_time"""', 'DateTime'], {'default': 'now'}), "('create_time', DateTime, default=now)\n", (47396, 47434), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((47442, 47500), 'sqlalchemy.Column', 'Column', (['"""update_time"""', 'DateTime'], {'default': 'now', 'onupdate': 'now'}), "('update_time', DateTime, default=now, onupdate=now)\n", (47448, 47500), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((47568, 47588), 'sqlalchemy.Column', 'Column', (['"""desc"""', 'TEXT'], {}), "('desc', TEXT)\n", (47574, 47588), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((47888, 47941), 'sqlalchemy.Column', 'Column', (['"""deleted"""', 'Boolean'], {'index': '(True)', 'default': '(False)'}), "('deleted', Boolean, index=True, default=False)\n", (47894, 47941), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((48011, 48050), 'sqlalchemy.Column', 'Column', (['"""id"""', 'Integer'], {'primary_key': '(True)'}), "('id', Integer, primary_key=True)\n", (48017, 48050), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((48057, 48101), 'sqlalchemy.Column', 'Column', (['"""create_time"""', 'DateTime'], {'default': 'now'}), "('create_time', DateTime, default=now)\n", (48063, 48101), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((48109, 48167), 'sqlalchemy.Column', 'Column', (['"""update_time"""', 'DateTime'], {'default': 'now', 'onupdate': 'now'}), "('update_time', DateTime, default=now, onupdate=now)\n", (48115, 48167), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((48310, 48333), 'sqlalchemy.Column', 'Column', (['"""comment"""', 'TEXT'], {}), "('comment', TEXT)\n", (48316, 48333), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((48390, 48429), 'sqlalchemy.Column', 'Column', (['"""id"""', 'Integer'], {'primary_key': '(True)'}), "('id', Integer, primary_key=True)\n", (48396, 48429), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((48437, 48481), 'sqlalchemy.Column', 'Column', (['"""create_time"""', 'DateTime'], {'default': 'now'}), "('create_time', DateTime, default=now)\n", (48443, 48481), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((48489, 48547), 'sqlalchemy.Column', 'Column', (['"""update_time"""', 'DateTime'], {'default': 'now', 'onupdate': 'now'}), "('update_time', DateTime, default=now, onupdate=now)\n", (48495, 48547), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((48615, 48635), 'sqlalchemy.Column', 'Column', (['"""desc"""', 'TEXT'], {}), "('desc', TEXT)\n", (48621, 48635), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((49025, 49078), 'sqlalchemy.Column', 'Column', (['"""deleted"""', 'Boolean'], {'index': '(True)', 'default': '(False)'}), "('deleted', Boolean, index=True, default=False)\n", (49031, 49078), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((49086, 49129), 'sqlalchemy.Column', 'Column', (['"""workflow"""', 'JSONType'], {'nullable': '(True)'}), "('workflow', JSONType, nullable=True)\n", (49092, 49129), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((49276, 49315), 'sqlalchemy.Column', 'Column', (['"""id"""', 'Integer'], {'primary_key': '(True)'}), "('id', Integer, primary_key=True)\n", (49282, 49315), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((49323, 49367), 'sqlalchemy.Column', 'Column', (['"""create_time"""', 'DateTime'], {'default': 'now'}), "('create_time', DateTime, default=now)\n", (49329, 49367), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((49375, 49433), 'sqlalchemy.Column', 'Column', (['"""update_time"""', 'DateTime'], {'default': 'now', 'onupdate': 'now'}), "('update_time', DateTime, default=now, onupdate=now)\n", (49381, 49433), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((49501, 49521), 'sqlalchemy.Column', 'Column', (['"""desc"""', 'TEXT'], {}), "('desc', TEXT)\n", (49507, 49521), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((49676, 49715), 'sqlalchemy.Column', 'Column', (['"""id"""', 'Integer'], {'primary_key': '(True)'}), "('id', Integer, primary_key=True)\n", (49682, 49715), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((49723, 49767), 'sqlalchemy.Column', 'Column', (['"""create_time"""', 'DateTime'], {'default': 'now'}), "('create_time', DateTime, default=now)\n", (49729, 49767), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((49775, 49833), 'sqlalchemy.Column', 'Column', (['"""update_time"""', 'DateTime'], {'default': 'now', 'onupdate': 'now'}), "('update_time', DateTime, default=now, onupdate=now)\n", (49781, 49833), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((50003, 50026), 'sqlalchemy.Column', 'Column', (['"""comment"""', 'TEXT'], {}), "('comment', TEXT)\n", (50009, 50026), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((50098, 50137), 'sqlalchemy.Column', 'Column', (['"""id"""', 'Integer'], {'primary_key': '(True)'}), "('id', Integer, primary_key=True)\n", (50104, 50137), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((50145, 50189), 'sqlalchemy.Column', 'Column', (['"""create_time"""', 'DateTime'], {'default': 'now'}), "('create_time', DateTime, default=now)\n", (50151, 50189), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((50197, 50255), 'sqlalchemy.Column', 'Column', (['"""update_time"""', 'DateTime'], {'default': 'now', 'onupdate': 'now'}), "('update_time', DateTime, default=now, onupdate=now)\n", (50203, 50255), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((50398, 50423), 'sqlalchemy.Column', 'Column', (['"""file_path"""', 'TEXT'], {}), "('file_path', TEXT)\n", (50404, 50423), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((50493, 50518), 'sqlalchemy.Column', 'Column', (['"""error_msg"""', 'TEXT'], {}), "('error_msg', TEXT)\n", (50499, 50518), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((50709, 50748), 'sqlalchemy.Column', 'Column', (['"""id"""', 'Integer'], {'primary_key': '(True)'}), "('id', Integer, primary_key=True)\n", (50715, 50748), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((50756, 50800), 'sqlalchemy.Column', 'Column', (['"""create_time"""', 'DateTime'], {'default': 'now'}), "('create_time', DateTime, default=now)\n", (50762, 50800), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((50808, 50866), 'sqlalchemy.Column', 'Column', (['"""update_time"""', 'DateTime'], {'default': 'now', 'onupdate': 'now'}), "('update_time', DateTime, default=now, onupdate=now)\n", (50814, 50866), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((51052, 51105), 'sqlalchemy.Column', 'Column', (['"""deleted"""', 'Boolean'], {'index': '(True)', 'default': '(False)'}), "('deleted', Boolean, index=True, default=False)\n", (51058, 51105), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((51264, 51303), 'sqlalchemy.Column', 'Column', (['"""id"""', 'Integer'], {'primary_key': '(True)'}), "('id', Integer, primary_key=True)\n", (51270, 51303), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((51579, 51618), 'sqlalchemy.Column', 'Column', (['"""id"""', 'Integer'], {'primary_key': '(True)'}), "('id', Integer, primary_key=True)\n", (51585, 51618), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((51848, 51887), 'sqlalchemy.Column', 'Column', (['"""id"""', 'Integer'], {'primary_key': '(True)'}), "('id', Integer, primary_key=True)\n", (51854, 51887), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((51895, 51939), 'sqlalchemy.Column', 'Column', (['"""create_time"""', 'DateTime'], {'default': 'now'}), "('create_time', DateTime, default=now)\n", (51901, 51939), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((51947, 52005), 'sqlalchemy.Column', 'Column', (['"""update_time"""', 'DateTime'], {'default': 'now', 'onupdate': 'now'}), "('update_time', DateTime, default=now, onupdate=now)\n", (51953, 52005), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((52261, 52282), 'sqlalchemy.Column', 'Column', (['"""title"""', 'TEXT'], {}), "('title', TEXT)\n", (52267, 52282), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((52290, 52335), 'sqlalchemy.Column', 'Column', (['"""slug"""', 'TEXT'], {'unique': '(True)', 'index': '(True)'}), "('slug', TEXT, unique=True, index=True)\n", (52296, 52335), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((52343, 52399), 'sqlalchemy.Column', 'Column', (['"""importable"""', 'Boolean'], {'index': '(True)', 'default': '(False)'}), "('importable', Boolean, index=True, default=False)\n", (52349, 52399), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((52407, 52462), 'sqlalchemy.Column', 'Column', (['"""published"""', 'Boolean'], {'index': '(True)', 'default': '(False)'}), "('published', Boolean, index=True, default=False)\n", (52413, 52462), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((52470, 52523), 'sqlalchemy.Column', 'Column', (['"""deleted"""', 'Boolean'], {'index': '(True)', 'default': '(False)'}), "('deleted', Boolean, index=True, default=False)\n", (52476, 52523), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((52599, 52638), 'sqlalchemy.Column', 'Column', (['"""id"""', 'Integer'], {'primary_key': '(True)'}), "('id', Integer, primary_key=True)\n", (52605, 52638), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((52646, 52690), 'sqlalchemy.Column', 'Column', (['"""create_time"""', 'DateTime'], {'default': 'now'}), "('create_time', DateTime, default=now)\n", (52652, 52690), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((52698, 52756), 'sqlalchemy.Column', 'Column', (['"""update_time"""', 'DateTime'], {'default': 'now', 'onupdate': 'now'}), "('update_time', DateTime, default=now, onupdate=now)\n", (52704, 52756), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((52851, 52872), 'sqlalchemy.Column', 'Column', (['"""title"""', 'TEXT'], {}), "('title', TEXT)\n", (52857, 52872), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((52880, 52903), 'sqlalchemy.Column', 'Column', (['"""content"""', 'TEXT'], {}), "('content', TEXT)\n", (52886, 52903), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((53004, 53043), 'sqlalchemy.Column', 'Column', (['"""id"""', 'Integer'], {'primary_key': '(True)'}), "('id', Integer, primary_key=True)\n", (53010, 53043), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((53268, 53307), 'sqlalchemy.Column', 'Column', (['"""id"""', 'Integer'], {'primary_key': '(True)'}), "('id', Integer, primary_key=True)\n", (53274, 53307), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((53315, 53359), 'sqlalchemy.Column', 'Column', (['"""create_time"""', 'DateTime'], {'default': 'now'}), "('create_time', DateTime, default=now)\n", (53321, 53359), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((53367, 53425), 'sqlalchemy.Column', 'Column', (['"""update_time"""', 'DateTime'], {'default': 'now', 'onupdate': 'now'}), "('update_time', DateTime, default=now, onupdate=now)\n", (53373, 53425), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((53699, 53720), 'sqlalchemy.Column', 'Column', (['"""title"""', 'TEXT'], {}), "('title', TEXT)\n", (53705, 53720), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((53728, 53748), 'sqlalchemy.Column', 'Column', (['"""type"""', 'TEXT'], {}), "('type', TEXT)\n", (53734, 53748), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((53756, 53789), 'sqlalchemy.Column', 'Column', (['"""dbkey"""', 'TEXT'], {'index': '(True)'}), "('dbkey', TEXT, index=True)\n", (53762, 53789), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((53797, 53850), 'sqlalchemy.Column', 'Column', (['"""deleted"""', 'Boolean'], {'default': '(False)', 'index': '(True)'}), "('deleted', Boolean, default=False, index=True)\n", (53803, 53850), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((53858, 53914), 'sqlalchemy.Column', 'Column', (['"""importable"""', 'Boolean'], {'default': '(False)', 'index': '(True)'}), "('importable', Boolean, default=False, index=True)\n", (53864, 53914), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((53922, 53954), 'sqlalchemy.Column', 'Column', (['"""slug"""', 'TEXT'], {'index': '(True)'}), "('slug', TEXT, index=True)\n", (53928, 53954), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((53962, 54017), 'sqlalchemy.Column', 'Column', (['"""published"""', 'Boolean'], {'default': '(False)', 'index': '(True)'}), "('published', Boolean, default=False, index=True)\n", (53968, 54017), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((54110, 54149), 'sqlalchemy.Column', 'Column', (['"""id"""', 'Integer'], {'primary_key': '(True)'}), "('id', Integer, primary_key=True)\n", (54116, 54149), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((54157, 54201), 'sqlalchemy.Column', 'Column', (['"""create_time"""', 'DateTime'], {'default': 'now'}), "('create_time', DateTime, default=now)\n", (54163, 54201), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((54209, 54267), 'sqlalchemy.Column', 'Column', (['"""update_time"""', 'DateTime'], {'default': 'now', 'onupdate': 'now'}), "('update_time', DateTime, default=now, onupdate=now)\n", (54215, 54267), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((54380, 54401), 'sqlalchemy.Column', 'Column', (['"""title"""', 'TEXT'], {}), "('title', TEXT)\n", (54386, 54401), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((54409, 54442), 'sqlalchemy.Column', 'Column', (['"""dbkey"""', 'TEXT'], {'index': '(True)'}), "('dbkey', TEXT, index=True)\n", (54415, 54442), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((54450, 54476), 'sqlalchemy.Column', 'Column', (['"""config"""', 'JSONType'], {}), "('config', JSONType)\n", (54456, 54476), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((54595, 54634), 'sqlalchemy.Column', 'Column', (['"""id"""', 'Integer'], {'primary_key': '(True)'}), "('id', Integer, primary_key=True)\n", (54601, 54634), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((54933, 54972), 'sqlalchemy.Column', 'Column', (['"""id"""', 'Integer'], {'primary_key': '(True)'}), "('id', Integer, primary_key=True)\n", (54939, 54972), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((54979, 55023), 'sqlalchemy.Column', 'Column', (['"""create_time"""', 'DateTime'], {'default': 'now'}), "('create_time', DateTime, default=now)\n", (54985, 55023), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((55031, 55101), 'sqlalchemy.Column', 'Column', (['"""update_time"""', 'DateTime'], {'index': '(True)', 'default': 'now', 'onupdate': 'now'}), "('update_time', DateTime, index=True, default=now, onupdate=now)\n", (55037, 55101), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((55359, 55398), 'sqlalchemy.Column', 'Column', (['"""id"""', 'Integer'], {'primary_key': '(True)'}), "('id', Integer, primary_key=True)\n", (55365, 55398), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((55405, 55449), 'sqlalchemy.Column', 'Column', (['"""create_time"""', 'DateTime'], {'default': 'now'}), "('create_time', DateTime, default=now)\n", (55411, 55449), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((55457, 55527), 'sqlalchemy.Column', 'Column', (['"""update_time"""', 'DateTime'], {'index': '(True)', 'default': 'now', 'onupdate': 'now'}), "('update_time', DateTime, index=True, default=now, onupdate=now)\n", (55463, 55527), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((55604, 55647), 'sqlalchemy.Column', 'Column', (['"""data_manager_id"""', 'TEXT'], {'index': '(True)'}), "('data_manager_id', TEXT, index=True)\n", (55610, 55647), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((55722, 55761), 'sqlalchemy.Column', 'Column', (['"""id"""', 'Integer'], {'primary_key': '(True)'}), "('id', Integer, primary_key=True)\n", (55728, 55761), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((55769, 55792), 'sqlalchemy.Column', 'Column', (['"""type"""', 'Integer'], {}), "('type', Integer)\n", (55775, 55792), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((55902, 55926), 'sqlalchemy.UniqueConstraint', 'UniqueConstraint', (['"""name"""'], {}), "('name')\n", (55918, 55926), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((56016, 56055), 'sqlalchemy.Column', 'Column', (['"""id"""', 'Integer'], {'primary_key': '(True)'}), "('id', Integer, primary_key=True)\n", (56022, 56055), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((56541, 56580), 'sqlalchemy.Column', 'Column', (['"""id"""', 'Integer'], {'primary_key': '(True)'}), "('id', Integer, primary_key=True)\n", (56547, 56580), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((57104, 57143), 'sqlalchemy.Column', 'Column', (['"""id"""', 'Integer'], {'primary_key': '(True)'}), "('id', Integer, primary_key=True)\n", (57110, 57143), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((57684, 57723), 'sqlalchemy.Column', 'Column', (['"""id"""', 'Integer'], {'primary_key': '(True)'}), "('id', Integer, primary_key=True)\n", (57690, 57723), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((58201, 58240), 'sqlalchemy.Column', 'Column', (['"""id"""', 'Integer'], {'primary_key': '(True)'}), "('id', Integer, primary_key=True)\n", (58207, 58240), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((58731, 58770), 'sqlalchemy.Column', 'Column', (['"""id"""', 'Integer'], {'primary_key': '(True)'}), "('id', Integer, primary_key=True)\n", (58737, 58770), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((59262, 59301), 'sqlalchemy.Column', 'Column', (['"""id"""', 'Integer'], {'primary_key': '(True)'}), "('id', Integer, primary_key=True)\n", (59268, 59301), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((59835, 59874), 'sqlalchemy.Column', 'Column', (['"""id"""', 'Integer'], {'primary_key': '(True)'}), "('id', Integer, primary_key=True)\n", (59841, 59874), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((60446, 60485), 'sqlalchemy.Column', 'Column', (['"""id"""', 'Integer'], {'primary_key': '(True)'}), "('id', Integer, primary_key=True)\n", (60452, 60485), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((61015, 61054), 'sqlalchemy.Column', 'Column', (['"""id"""', 'Integer'], {'primary_key': '(True)'}), "('id', Integer, primary_key=True)\n", (61021, 61054), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((61556, 61595), 'sqlalchemy.Column', 'Column', (['"""id"""', 'Integer'], {'primary_key': '(True)'}), "('id', Integer, primary_key=True)\n", (61562, 61595), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((61758, 61796), 'sqlalchemy.Column', 'Column', (['"""annotation"""', 'TEXT'], {'index': '(True)'}), "('annotation', TEXT, index=True)\n", (61764, 61796), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((61937, 61976), 'sqlalchemy.Column', 'Column', (['"""id"""', 'Integer'], {'primary_key': '(True)'}), "('id', Integer, primary_key=True)\n", (61943, 61976), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((62179, 62217), 'sqlalchemy.Column', 'Column', (['"""annotation"""', 'TEXT'], {'index': '(True)'}), "('annotation', TEXT, index=True)\n", (62185, 62217), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((62335, 62374), 'sqlalchemy.Column', 'Column', (['"""id"""', 'Integer'], {'primary_key': '(True)'}), "('id', Integer, primary_key=True)\n", (62341, 62374), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((62553, 62591), 'sqlalchemy.Column', 'Column', (['"""annotation"""', 'TEXT'], {'index': '(True)'}), "('annotation', TEXT, index=True)\n", (62559, 62591), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((62705, 62744), 'sqlalchemy.Column', 'Column', (['"""id"""', 'Integer'], {'primary_key': '(True)'}), "('id', Integer, primary_key=True)\n", (62711, 62744), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((62919, 62957), 'sqlalchemy.Column', 'Column', (['"""annotation"""', 'TEXT'], {'index': '(True)'}), "('annotation', TEXT, index=True)\n", (62925, 62957), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((63054, 63093), 'sqlalchemy.Column', 'Column', (['"""id"""', 'Integer'], {'primary_key': '(True)'}), "('id', Integer, primary_key=True)\n", (63060, 63093), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((63250, 63288), 'sqlalchemy.Column', 'Column', (['"""annotation"""', 'TEXT'], {'index': '(True)'}), "('annotation', TEXT, index=True)\n", (63256, 63288), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((63403, 63442), 'sqlalchemy.Column', 'Column', (['"""id"""', 'Integer'], {'primary_key': '(True)'}), "('id', Integer, primary_key=True)\n", (63409, 63442), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((63617, 63655), 'sqlalchemy.Column', 'Column', (['"""annotation"""', 'TEXT'], {'index': '(True)'}), "('annotation', TEXT, index=True)\n", (63623, 63655), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((63794, 63833), 'sqlalchemy.Column', 'Column', (['"""id"""', 'Integer'], {'primary_key': '(True)'}), "('id', Integer, primary_key=True)\n", (63800, 63833), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((64046, 64084), 'sqlalchemy.Column', 'Column', (['"""annotation"""', 'TEXT'], {'index': '(True)'}), "('annotation', TEXT, index=True)\n", (64052, 64084), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((64223, 64262), 'sqlalchemy.Column', 'Column', (['"""id"""', 'Integer'], {'primary_key': '(True)'}), "('id', Integer, primary_key=True)\n", (64229, 64262), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((64475, 64513), 'sqlalchemy.Column', 'Column', (['"""annotation"""', 'TEXT'], {'index': '(True)'}), "('annotation', TEXT, index=True)\n", (64481, 64513), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((64626, 64665), 'sqlalchemy.Column', 'Column', (['"""id"""', 'Integer'], {'primary_key': '(True)'}), "('id', Integer, primary_key=True)\n", (64632, 64665), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((64828, 64865), 'sqlalchemy.Column', 'Column', (['"""rating"""', 'Integer'], {'index': '(True)'}), "('rating', Integer, index=True)\n", (64834, 64865), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((64998, 65037), 'sqlalchemy.Column', 'Column', (['"""id"""', 'Integer'], {'primary_key': '(True)'}), "('id', Integer, primary_key=True)\n", (65004, 65037), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((65240, 65277), 'sqlalchemy.Column', 'Column', (['"""rating"""', 'Integer'], {'index': '(True)'}), "('rating', Integer, index=True)\n", (65246, 65277), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((65387, 65426), 'sqlalchemy.Column', 'Column', (['"""id"""', 'Integer'], {'primary_key': '(True)'}), "('id', Integer, primary_key=True)\n", (65393, 65426), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((65605, 65642), 'sqlalchemy.Column', 'Column', (['"""rating"""', 'Integer'], {'index': '(True)'}), "('rating', Integer, index=True)\n", (65611, 65642), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((65731, 65770), 'sqlalchemy.Column', 'Column', (['"""id"""', 'Integer'], {'primary_key': '(True)'}), "('id', Integer, primary_key=True)\n", (65737, 65770), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((65927, 65964), 'sqlalchemy.Column', 'Column', (['"""rating"""', 'Integer'], {'index': '(True)'}), "('rating', Integer, index=True)\n", (65933, 65964), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((66071, 66110), 'sqlalchemy.Column', 'Column', (['"""id"""', 'Integer'], {'primary_key': '(True)'}), "('id', Integer, primary_key=True)\n", (66077, 66110), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((66285, 66322), 'sqlalchemy.Column', 'Column', (['"""rating"""', 'Integer'], {'index': '(True)'}), "('rating', Integer, index=True)\n", (66291, 66322), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((66453, 66492), 'sqlalchemy.Column', 'Column', (['"""id"""', 'Integer'], {'primary_key': '(True)'}), "('id', Integer, primary_key=True)\n", (66459, 66492), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((66705, 66742), 'sqlalchemy.Column', 'Column', (['"""rating"""', 'Integer'], {'index': '(True)'}), "('rating', Integer, index=True)\n", (66711, 66742), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((66873, 66912), 'sqlalchemy.Column', 'Column', (['"""id"""', 'Integer'], {'primary_key': '(True)'}), "('id', Integer, primary_key=True)\n", (66879, 66912), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((67125, 67162), 'sqlalchemy.Column', 'Column', (['"""rating"""', 'Integer'], {'index': '(True)'}), "('rating', Integer, index=True)\n", (67131, 67162), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((67252, 67291), 'sqlalchemy.Column', 'Column', (['"""id"""', 'Integer'], {'primary_key': '(True)'}), "('id', Integer, primary_key=True)\n", (67258, 67291), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((67525, 67564), 'sqlalchemy.Column', 'Column', (['"""id"""', 'Integer'], {'primary_key': '(True)'}), "('id', Integer, primary_key=True)\n", (67531, 67564), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((67572, 67616), 'sqlalchemy.Column', 'Column', (['"""create_time"""', 'DateTime'], {'default': 'now'}), "('create_time', DateTime, default=now)\n", (67578, 67616), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((67961, 68000), 'sqlalchemy.Column', 'Column', (['"""id"""', 'Integer'], {'primary_key': '(True)'}), "('id', Integer, primary_key=True)\n", (67967, 68000), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((68008, 68052), 'sqlalchemy.Column', 'Column', (['"""create_time"""', 'DateTime'], {'default': 'now'}), "('create_time', DateTime, default=now)\n", (68014, 68052), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((68354, 68397), 'sqlalchemy.orm.mapper', 'mapper', (['model', 'model.table'], {'properties': 'kwds'}), '(model, model.table, properties=kwds)\n', (68360, 68397), False, 'from sqlalchemy.orm import backref, object_session, relation, mapper, class_mapper\n'), ((126424, 126469), 'sqlalchemy.orm.relation', 'relation', (['model.JobToOutputDatasetAssociation'], {}), '(model.JobToOutputDatasetAssociation)\n', (126432, 126469), False, 'from sqlalchemy.orm import backref, object_session, relation, mapper, class_mapper\n'), ((126570, 126622), 'sqlalchemy.orm.relation', 'relation', (['model.JobToOutputLibraryDatasetAssociation'], {}), '(model.JobToOutputLibraryDatasetAssociation)\n', (126578, 126622), False, 'from sqlalchemy.orm import backref, object_session, relation, mapper, class_mapper\n'), ((126727, 126782), 'sqlalchemy.orm.relation', 'relation', (['model.JobToOutputDatasetCollectionAssociation'], {}), '(model.JobToOutputDatasetCollectionAssociation)\n', (126735, 126782), False, 'from sqlalchemy.orm import backref, object_session, relation, mapper, class_mapper\n'), ((128067, 128146), 'galaxy.model.orm.engine_factory.build_engine', 'build_engine', (['url', 'engine_options', 'database_query_profiling_proxy', 'trace_logger'], {}), '(url, engine_options, database_query_profiling_proxy, trace_logger)\n', (128079, 128146), False, 'from galaxy.model.orm.engine_factory import build_engine\n'), ((128444, 128486), 'galaxy.model.base.ModelMapping', 'ModelMapping', (['model_modules'], {'engine': 'engine'}), '(model_modules, engine=engine)\n', (128456, 128486), False, 'from galaxy.model.base import ModelMapping\n'), ((128714, 128737), 'galaxy.security.GalaxyRBACAgent', 'GalaxyRBACAgent', (['result'], {}), '(result)\n', (128729, 128737), False, 'from galaxy.security import GalaxyRBACAgent\n'), ((1212, 1230), 'galaxy.model.custom_types.TrimmedString', 'TrimmedString', (['(255)'], {}), '(255)\n', (1225, 1230), False, 'from galaxy.model.custom_types import JSONType, MetadataType, TrimmedString, UUIDType\n'), ((1288, 1306), 'galaxy.model.custom_types.TrimmedString', 'TrimmedString', (['(255)'], {}), '(255)\n', (1301, 1306), False, 'from galaxy.model.custom_types import JSONType, MetadataType, TrimmedString, UUIDType\n'), ((1361, 1379), 'galaxy.model.custom_types.TrimmedString', 'TrimmedString', (['(255)'], {}), '(255)\n', (1374, 1379), False, 'from galaxy.model.custom_types import JSONType, MetadataType, TrimmedString, UUIDType\n'), ((1490, 1518), 'sqlalchemy.ForeignKey', 'ForeignKey', (['"""form_values.id"""'], {}), "('form_values.id')\n", (1500, 1518), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((1683, 1697), 'sqlalchemy.Numeric', 'Numeric', (['(15)', '(0)'], {}), '(15, 0)\n', (1690, 1697), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((1823, 1840), 'galaxy.model.custom_types.TrimmedString', 'TrimmedString', (['(64)'], {}), '(64)\n', (1836, 1840), False, 'from galaxy.model.custom_types import JSONType, MetadataType, TrimmedString, UUIDType\n'), ((2131, 2159), 'sqlalchemy.ForeignKey', 'ForeignKey', (['"""galaxy_user.id"""'], {}), "('galaxy_user.id')\n", (2141, 2159), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((2197, 2215), 'galaxy.model.custom_types.TrimmedString', 'TrimmedString', (['(255)'], {}), '(255)\n', (2210, 2215), False, 'from galaxy.model.custom_types import JSONType, MetadataType, TrimmedString, UUIDType\n'), ((2240, 2258), 'galaxy.model.custom_types.TrimmedString', 'TrimmedString', (['(255)'], {}), '(255)\n', (2253, 2258), False, 'from galaxy.model.custom_types import JSONType, MetadataType, TrimmedString, UUIDType\n'), ((2306, 2324), 'galaxy.model.custom_types.TrimmedString', 'TrimmedString', (['(255)'], {}), '(255)\n', (2319, 2324), False, 'from galaxy.model.custom_types import JSONType, MetadataType, TrimmedString, UUIDType\n'), ((2352, 2370), 'galaxy.model.custom_types.TrimmedString', 'TrimmedString', (['(255)'], {}), '(255)\n', (2365, 2370), False, 'from galaxy.model.custom_types import JSONType, MetadataType, TrimmedString, UUIDType\n'), ((2411, 2429), 'galaxy.model.custom_types.TrimmedString', 'TrimmedString', (['(255)'], {}), '(255)\n', (2424, 2429), False, 'from galaxy.model.custom_types import JSONType, MetadataType, TrimmedString, UUIDType\n'), ((2471, 2489), 'galaxy.model.custom_types.TrimmedString', 'TrimmedString', (['(255)'], {}), '(255)\n', (2484, 2489), False, 'from galaxy.model.custom_types import JSONType, MetadataType, TrimmedString, UUIDType\n'), ((2537, 2555), 'galaxy.model.custom_types.TrimmedString', 'TrimmedString', (['(255)'], {}), '(255)\n', (2550, 2555), False, 'from galaxy.model.custom_types import JSONType, MetadataType, TrimmedString, UUIDType\n'), ((2599, 2617), 'galaxy.model.custom_types.TrimmedString', 'TrimmedString', (['(255)'], {}), '(255)\n', (2612, 2617), False, 'from galaxy.model.custom_types import JSONType, MetadataType, TrimmedString, UUIDType\n'), ((2659, 2677), 'galaxy.model.custom_types.TrimmedString', 'TrimmedString', (['(255)'], {}), '(255)\n', (2672, 2677), False, 'from galaxy.model.custom_types import JSONType, MetadataType, TrimmedString, UUIDType\n'), ((3081, 3112), 'sqlalchemy.ForeignKey', 'ForeignKey', (['"""galaxy_session.id"""'], {}), "('galaxy_session.id')\n", (3091, 3112), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((3162, 3190), 'sqlalchemy.ForeignKey', 'ForeignKey', (['"""galaxy_user.id"""'], {}), "('galaxy_user.id')\n", (3172, 3190), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((3287, 3305), 'galaxy.model.custom_types.TrimmedString', 'TrimmedString', (['(255)'], {}), '(255)\n', (3300, 3305), False, 'from galaxy.model.custom_types import JSONType, MetadataType, TrimmedString, UUIDType\n'), ((3576, 3604), 'sqlalchemy.ForeignKey', 'ForeignKey', (['"""galaxy_user.id"""'], {}), "('galaxy_user.id')\n", (3586, 3604), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((3642, 3660), 'galaxy.model.custom_types.TrimmedString', 'TrimmedString', (['(255)'], {}), '(255)\n', (3655, 3660), False, 'from galaxy.model.custom_types import JSONType, MetadataType, TrimmedString, UUIDType\n'), ((3927, 3944), 'galaxy.model.custom_types.TrimmedString', 'TrimmedString', (['(40)'], {}), '(40)\n', (3940, 3944), False, 'from galaxy.model.custom_types import JSONType, MetadataType, TrimmedString, UUIDType\n'), ((4282, 4306), 'sqlalchemy.ForeignKey', 'ForeignKey', (['"""history.id"""'], {}), "('history.id')\n", (4292, 4306), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((4356, 4384), 'sqlalchemy.ForeignKey', 'ForeignKey', (['"""galaxy_user.id"""'], {}), "('galaxy_user.id')\n", (4366, 4384), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((4578, 4602), 'sqlalchemy.ForeignKey', 'ForeignKey', (['"""history.id"""'], {}), "('history.id')\n", (4588, 4602), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((4655, 4679), 'sqlalchemy.ForeignKey', 'ForeignKey', (['"""dataset.id"""'], {}), "('dataset.id')\n", (4665, 4679), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((4836, 4853), 'galaxy.model.custom_types.TrimmedString', 'TrimmedString', (['(64)'], {}), '(64)\n', (4849, 4853), False, 'from galaxy.model.custom_types import JSONType, MetadataType, TrimmedString, UUIDType\n'), ((4952, 4996), 'sqlalchemy.ForeignKey', 'ForeignKey', (['"""history_dataset_association.id"""'], {}), "('history_dataset_association.id')\n", (4962, 4996), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((5092, 5144), 'sqlalchemy.ForeignKey', 'ForeignKey', (['"""library_dataset_dataset_association.id"""'], {}), "('library_dataset_dataset_association.id')\n", (5102, 5144), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((5215, 5233), 'galaxy.model.custom_types.TrimmedString', 'TrimmedString', (['(255)'], {}), '(255)\n', (5228, 5233), False, 'from galaxy.model.custom_types import JSONType, MetadataType, TrimmedString, UUIDType\n'), ((5259, 5277), 'galaxy.model.custom_types.TrimmedString', 'TrimmedString', (['(255)'], {}), '(255)\n', (5272, 5277), False, 'from galaxy.model.custom_types import JSONType, MetadataType, TrimmedString, UUIDType\n'), ((5304, 5322), 'galaxy.model.custom_types.TrimmedString', 'TrimmedString', (['(255)'], {}), '(255)\n', (5317, 5322), False, 'from galaxy.model.custom_types import JSONType, MetadataType, TrimmedString, UUIDType\n'), ((5419, 5436), 'galaxy.model.custom_types.TrimmedString', 'TrimmedString', (['(64)'], {}), '(64)\n', (5432, 5436), False, 'from galaxy.model.custom_types import JSONType, MetadataType, TrimmedString, UUIDType\n'), ((5466, 5480), 'galaxy.model.custom_types.MetadataType', 'MetadataType', ([], {}), '()\n', (5478, 5480), False, 'from galaxy.model.custom_types import JSONType, MetadataType, TrimmedString, UUIDType\n'), ((5535, 5579), 'sqlalchemy.ForeignKey', 'ForeignKey', (['"""history_dataset_association.id"""'], {}), "('history_dataset_association.id')\n", (5545, 5579), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((5627, 5645), 'galaxy.model.custom_types.TrimmedString', 'TrimmedString', (['(255)'], {}), '(255)\n', (5640, 5645), False, 'from galaxy.model.custom_types import JSONType, MetadataType, TrimmedString, UUIDType\n'), ((5859, 5914), 'sqlalchemy.ForeignKey', 'ForeignKey', (['"""history_dataset_collection_association.id"""'], {}), "('history_dataset_collection_association.id')\n", (5869, 5914), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((5988, 6022), 'sqlalchemy.ForeignKey', 'ForeignKey', (['"""extended_metadata.id"""'], {}), "('extended_metadata.id')\n", (5998, 6022), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((6294, 6311), 'galaxy.model.custom_types.TrimmedString', 'TrimmedString', (['(64)'], {}), '(64)\n', (6307, 6311), False, 'from galaxy.model.custom_types import JSONType, MetadataType, TrimmedString, UUIDType\n'), ((6530, 6548), 'galaxy.model.custom_types.TrimmedString', 'TrimmedString', (['(255)'], {}), '(255)\n', (6543, 6548), False, 'from galaxy.model.custom_types import JSONType, MetadataType, TrimmedString, UUIDType\n'), ((6674, 6688), 'sqlalchemy.Numeric', 'Numeric', (['(15)', '(0)'], {}), '(15, 0)\n', (6681, 6688), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((6720, 6734), 'sqlalchemy.Numeric', 'Numeric', (['(15)', '(0)'], {}), '(15, 0)\n', (6727, 6734), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((6760, 6770), 'galaxy.model.custom_types.UUIDType', 'UUIDType', ([], {}), '()\n', (6768, 6770), False, 'from galaxy.model.custom_types import JSONType, MetadataType, TrimmedString, UUIDType\n'), ((7143, 7187), 'sqlalchemy.ForeignKey', 'ForeignKey', (['"""history_dataset_association.id"""'], {}), "('history_dataset_association.id')\n", (7153, 7187), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((7237, 7265), 'sqlalchemy.ForeignKey', 'ForeignKey', (['"""galaxy_user.id"""'], {}), "('galaxy_user.id')\n", (7247, 7265), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((7303, 7321), 'galaxy.model.custom_types.TrimmedString', 'TrimmedString', (['(255)'], {}), '(255)\n', (7316, 7321), False, 'from galaxy.model.custom_types import JSONType, MetadataType, TrimmedString, UUIDType\n'), ((7532, 7576), 'sqlalchemy.ForeignKey', 'ForeignKey', (['"""history_dataset_association.id"""'], {}), "('history_dataset_association.id')\n", (7542, 7576), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((7656, 7700), 'sqlalchemy.ForeignKey', 'ForeignKey', (['"""history_dataset_association.id"""'], {}), "('history_dataset_association.id')\n", (7666, 7700), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((7742, 7754), 'sqlalchemy.Unicode', 'Unicode', (['(255)'], {}), '(255)\n', (7749, 7754), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((8080, 8124), 'sqlalchemy.ForeignKey', 'ForeignKey', (['"""history_dataset_association.id"""'], {}), "('history_dataset_association.id')\n", (8090, 8124), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((8189, 8241), 'sqlalchemy.ForeignKey', 'ForeignKey', (['"""library_dataset_dataset_association.id"""'], {}), "('library_dataset_dataset_association.id')\n", (8199, 8241), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((8312, 8356), 'sqlalchemy.ForeignKey', 'ForeignKey', (['"""history_dataset_association.id"""'], {}), "('history_dataset_association.id')\n", (8322, 8356), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((8413, 8465), 'sqlalchemy.ForeignKey', 'ForeignKey', (['"""library_dataset_dataset_association.id"""'], {}), "('library_dataset_dataset_association.id')\n", (8423, 8465), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((8630, 8648), 'galaxy.model.custom_types.TrimmedString', 'TrimmedString', (['(255)'], {}), '(255)\n', (8643, 8648), False, 'from galaxy.model.custom_types import JSONType, MetadataType, TrimmedString, UUIDType\n'), ((8805, 8849), 'sqlalchemy.ForeignKey', 'ForeignKey', (['"""history_dataset_association.id"""'], {}), "('history_dataset_association.id')\n", (8815, 8849), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((8890, 8908), 'galaxy.model.custom_types.TrimmedString', 'TrimmedString', (['(255)'], {}), '(255)\n', (8903, 8908), False, 'from galaxy.model.custom_types import JSONType, MetadataType, TrimmedString, UUIDType\n'), ((8938, 8955), 'galaxy.model.custom_types.TrimmedString', 'TrimmedString', (['(64)'], {}), '(64)\n', (8951, 8955), False, 'from galaxy.model.custom_types import JSONType, MetadataType, TrimmedString, UUIDType\n'), ((9235, 9246), 'sqlalchemy.String', 'String', (['(255)'], {}), '(255)\n', (9241, 9246), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((9497, 9525), 'sqlalchemy.ForeignKey', 'ForeignKey', (['"""galaxy_user.id"""'], {}), "('galaxy_user.id')\n", (9507, 9525), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((9576, 9605), 'sqlalchemy.ForeignKey', 'ForeignKey', (['"""galaxy_group.id"""'], {}), "('galaxy_group.id')\n", (9586, 9605), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((9898, 9926), 'sqlalchemy.ForeignKey', 'ForeignKey', (['"""galaxy_user.id"""'], {}), "('galaxy_user.id')\n", (9908, 9926), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((9976, 9997), 'sqlalchemy.ForeignKey', 'ForeignKey', (['"""role.id"""'], {}), "('role.id')\n", (9986, 9997), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((10293, 10322), 'sqlalchemy.ForeignKey', 'ForeignKey', (['"""galaxy_group.id"""'], {}), "('galaxy_group.id')\n", (10303, 10322), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((10372, 10393), 'sqlalchemy.ForeignKey', 'ForeignKey', (['"""role.id"""'], {}), "('role.id')\n", (10382, 10393), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((10760, 10771), 'sqlalchemy.String', 'String', (['(255)'], {}), '(255)\n', (10766, 10771), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((10857, 10867), 'sqlalchemy.String', 'String', (['(40)'], {}), '(40)\n', (10863, 10867), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((11105, 11133), 'sqlalchemy.ForeignKey', 'ForeignKey', (['"""galaxy_user.id"""'], {}), "('galaxy_user.id')\n", (11115, 11133), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((11184, 11206), 'sqlalchemy.ForeignKey', 'ForeignKey', (['"""quota.id"""'], {}), "('quota.id')\n", (11194, 11206), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((11504, 11533), 'sqlalchemy.ForeignKey', 'ForeignKey', (['"""galaxy_group.id"""'], {}), "('galaxy_group.id')\n", (11514, 11533), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((11584, 11606), 'sqlalchemy.ForeignKey', 'ForeignKey', (['"""quota.id"""'], {}), "('quota.id')\n", (11594, 11606), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((11975, 11986), 'sqlalchemy.String', 'String', (['(255)'], {}), '(255)\n', (11981, 11986), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((12112, 12121), 'sqlalchemy.String', 'String', (['(8)'], {}), '(8)\n', (12118, 12121), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((12459, 12469), 'sqlalchemy.String', 'String', (['(32)'], {}), '(32)\n', (12465, 12469), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((12533, 12555), 'sqlalchemy.ForeignKey', 'ForeignKey', (['"""quota.id"""'], {}), "('quota.id')\n", (12543, 12555), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((12878, 12902), 'sqlalchemy.ForeignKey', 'ForeignKey', (['"""dataset.id"""'], {}), "('dataset.id')\n", (12888, 12902), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((12952, 12973), 'sqlalchemy.ForeignKey', 'ForeignKey', (['"""role.id"""'], {}), "('role.id')\n", (12962, 12973), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((13296, 13320), 'sqlalchemy.ForeignKey', 'ForeignKey', (['"""library.id"""'], {}), "('library.id')\n", (13306, 13320), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((13385, 13406), 'sqlalchemy.ForeignKey', 'ForeignKey', (['"""role.id"""'], {}), "('role.id')\n", (13395, 13406), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((13749, 13780), 'sqlalchemy.ForeignKey', 'ForeignKey', (['"""library_folder.id"""'], {}), "('library_folder.id')\n", (13759, 13780), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((13845, 13866), 'sqlalchemy.ForeignKey', 'ForeignKey', (['"""role.id"""'], {}), "('role.id')\n", (13855, 13866), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((14212, 14244), 'sqlalchemy.ForeignKey', 'ForeignKey', (['"""library_dataset.id"""'], {}), "('library_dataset.id')\n", (14222, 14244), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((14309, 14330), 'sqlalchemy.ForeignKey', 'ForeignKey', (['"""role.id"""'], {}), "('role.id')\n", (14319, 14330), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((14734, 14786), 'sqlalchemy.ForeignKey', 'ForeignKey', (['"""library_dataset_dataset_association.id"""'], {}), "('library_dataset_dataset_association.id')\n", (14744, 14786), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((14851, 14872), 'sqlalchemy.ForeignKey', 'ForeignKey', (['"""role.id"""'], {}), "('role.id')\n", (14861, 14872), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((15053, 15081), 'sqlalchemy.ForeignKey', 'ForeignKey', (['"""galaxy_user.id"""'], {}), "('galaxy_user.id')\n", (15063, 15081), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((15161, 15182), 'sqlalchemy.ForeignKey', 'ForeignKey', (['"""role.id"""'], {}), "('role.id')\n", (15171, 15182), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((15372, 15396), 'sqlalchemy.ForeignKey', 'ForeignKey', (['"""history.id"""'], {}), "('history.id')\n", (15382, 15396), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((15476, 15497), 'sqlalchemy.ForeignKey', 'ForeignKey', (['"""role.id"""'], {}), "('role.id')\n", (15486, 15497), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((15692, 15815), 'sqlalchemy.ForeignKey', 'ForeignKey', (['"""library_dataset_dataset_association.id"""'], {'use_alter': '(True)', 'name': '"""library_dataset_dataset_association_id_fk"""'}), "('library_dataset_dataset_association.id', use_alter=True, name=\n 'library_dataset_dataset_association_id_fk')\n", (15702, 15815), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((15954, 15985), 'sqlalchemy.ForeignKey', 'ForeignKey', (['"""library_folder.id"""'], {}), "('library_folder.id')\n", (15964, 15985), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((16231, 16249), 'galaxy.model.custom_types.TrimmedString', 'TrimmedString', (['(255)'], {}), '(255)\n', (16244, 16249), False, 'from galaxy.model.custom_types import JSONType, MetadataType, TrimmedString, UUIDType\n'), ((16404, 16422), 'galaxy.model.custom_types.TrimmedString', 'TrimmedString', (['(255)'], {}), '(255)\n', (16417, 16422), False, 'from galaxy.model.custom_types import JSONType, MetadataType, TrimmedString, UUIDType\n'), ((16862, 16894), 'sqlalchemy.ForeignKey', 'ForeignKey', (['"""library_dataset.id"""'], {}), "('library_dataset.id')\n", (16872, 16894), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((16947, 16971), 'sqlalchemy.ForeignKey', 'ForeignKey', (['"""dataset.id"""'], {}), "('dataset.id')\n", (16957, 16971), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((17128, 17145), 'galaxy.model.custom_types.TrimmedString', 'TrimmedString', (['(64)'], {}), '(64)\n', (17141, 17145), False, 'from galaxy.model.custom_types import JSONType, MetadataType, TrimmedString, UUIDType\n'), ((17244, 17361), 'sqlalchemy.ForeignKey', 'ForeignKey', (['"""history_dataset_association.id"""'], {'use_alter': '(True)', 'name': '"""history_dataset_association_dataset_id_fkey"""'}), "('history_dataset_association.id', use_alter=True, name=\n 'history_dataset_association_dataset_id_fkey')\n", (17254, 17361), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((17452, 17577), 'sqlalchemy.ForeignKey', 'ForeignKey', (['"""library_dataset_dataset_association.id"""'], {'use_alter': '(True)', 'name': '"""library_dataset_dataset_association_id_fkey"""'}), "('library_dataset_dataset_association.id', use_alter=True, name=\n 'library_dataset_dataset_association_id_fkey')\n", (17462, 17577), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((17613, 17631), 'galaxy.model.custom_types.TrimmedString', 'TrimmedString', (['(255)'], {}), '(255)\n', (17626, 17631), False, 'from galaxy.model.custom_types import JSONType, MetadataType, TrimmedString, UUIDType\n'), ((17669, 17687), 'galaxy.model.custom_types.TrimmedString', 'TrimmedString', (['(255)'], {}), '(255)\n', (17682, 17687), False, 'from galaxy.model.custom_types import JSONType, MetadataType, TrimmedString, UUIDType\n'), ((17714, 17732), 'galaxy.model.custom_types.TrimmedString', 'TrimmedString', (['(255)'], {}), '(255)\n', (17727, 17732), False, 'from galaxy.model.custom_types import JSONType, MetadataType, TrimmedString, UUIDType\n'), ((17829, 17846), 'galaxy.model.custom_types.TrimmedString', 'TrimmedString', (['(64)'], {}), '(64)\n', (17842, 17846), False, 'from galaxy.model.custom_types import JSONType, MetadataType, TrimmedString, UUIDType\n'), ((17876, 17890), 'galaxy.model.custom_types.MetadataType', 'MetadataType', ([], {}), '()\n', (17888, 17890), False, 'from galaxy.model.custom_types import JSONType, MetadataType, TrimmedString, UUIDType\n'), ((17945, 17997), 'sqlalchemy.ForeignKey', 'ForeignKey', (['"""library_dataset_dataset_association.id"""'], {}), "('library_dataset_dataset_association.id')\n", (17955, 17997), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((18045, 18063), 'galaxy.model.custom_types.TrimmedString', 'TrimmedString', (['(255)'], {}), '(255)\n', (18058, 18063), False, 'from galaxy.model.custom_types import JSONType, MetadataType, TrimmedString, UUIDType\n'), ((18196, 18224), 'sqlalchemy.ForeignKey', 'ForeignKey', (['"""galaxy_user.id"""'], {}), "('galaxy_user.id')\n", (18206, 18224), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((18265, 18283), 'galaxy.model.custom_types.TrimmedString', 'TrimmedString', (['(255)'], {}), '(255)\n', (18278, 18283), False, 'from galaxy.model.custom_types import JSONType, MetadataType, TrimmedString, UUIDType\n'), ((18342, 18376), 'sqlalchemy.ForeignKey', 'ForeignKey', (['"""extended_metadata.id"""'], {}), "('extended_metadata.id')\n", (18352, 18376), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((18721, 18795), 'sqlalchemy.ForeignKey', 'ForeignKey', (['"""extended_metadata.id"""'], {'onupdate': '"""CASCADE"""', 'ondelete': '"""CASCADE"""'}), "('extended_metadata.id', onupdate='CASCADE', ondelete='CASCADE')\n", (18731, 18795), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((18989, 19000), 'sqlalchemy.String', 'String', (['(255)'], {}), '(255)\n', (18995, 19000), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((19170, 19201), 'sqlalchemy.ForeignKey', 'ForeignKey', (['"""library_folder.id"""'], {}), "('library_folder.id')\n", (19180, 19201), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((19357, 19368), 'sqlalchemy.String', 'String', (['(255)'], {}), '(255)\n', (19363, 19368), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((19720, 19751), 'sqlalchemy.ForeignKey', 'ForeignKey', (['"""library_folder.id"""'], {}), "('library_folder.id')\n", (19730, 19751), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((20255, 20272), 'galaxy.model.custom_types.TrimmedString', 'TrimmedString', (['(40)'], {}), '(40)\n', (20268, 20272), False, 'from galaxy.model.custom_types import JSONType, MetadataType, TrimmedString, UUIDType\n'), ((20444, 20468), 'sqlalchemy.ForeignKey', 'ForeignKey', (['"""library.id"""'], {}), "('library.id')\n", (20454, 20468), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((20529, 20561), 'sqlalchemy.ForeignKey', 'ForeignKey', (['"""form_definition.id"""'], {}), "('form_definition.id')\n", (20539, 20561), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((20618, 20646), 'sqlalchemy.ForeignKey', 'ForeignKey', (['"""form_values.id"""'], {}), "('form_values.id')\n", (20628, 20646), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((20976, 21007), 'sqlalchemy.ForeignKey', 'ForeignKey', (['"""library_folder.id"""'], {}), "('library_folder.id')\n", (20986, 21007), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((21083, 21115), 'sqlalchemy.ForeignKey', 'ForeignKey', (['"""form_definition.id"""'], {}), "('form_definition.id')\n", (21093, 21115), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((21172, 21200), 'sqlalchemy.ForeignKey', 'ForeignKey', (['"""form_values.id"""'], {}), "('form_values.id')\n", (21182, 21200), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((21568, 21620), 'sqlalchemy.ForeignKey', 'ForeignKey', (['"""library_dataset_dataset_association.id"""'], {}), "('library_dataset_dataset_association.id')\n", (21578, 21620), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((21696, 21728), 'sqlalchemy.ForeignKey', 'ForeignKey', (['"""form_definition.id"""'], {}), "('form_definition.id')\n", (21706, 21728), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((21785, 21813), 'sqlalchemy.ForeignKey', 'ForeignKey', (['"""form_values.id"""'], {}), "('form_values.id')\n", (21795, 21813), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((22136, 22160), 'sqlalchemy.ForeignKey', 'ForeignKey', (['"""history.id"""'], {}), "('history.id')\n", (22146, 22160), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((22220, 22251), 'sqlalchemy.ForeignKey', 'ForeignKey', (['"""library_folder.id"""'], {}), "('library_folder.id')\n", (22230, 22251), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((22292, 22303), 'sqlalchemy.String', 'String', (['(255)'], {}), '(255)\n', (22298, 22303), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((22383, 22393), 'sqlalchemy.String', 'String', (['(64)'], {}), '(64)\n', (22389, 22393), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((22431, 22449), 'galaxy.model.custom_types.TrimmedString', 'TrimmedString', (['(255)'], {}), '(255)\n', (22444, 22449), False, 'from galaxy.model.custom_types import JSONType, MetadataType, TrimmedString, UUIDType\n'), ((22521, 22533), 'sqlalchemy.String', 'String', (['(1024)'], {}), '(1024)\n', (22527, 22533), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((22566, 22577), 'sqlalchemy.String', 'String', (['(255)'], {}), '(255)\n', (22572, 22577), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((22762, 22793), 'sqlalchemy.ForeignKey', 'ForeignKey', (['"""galaxy_session.id"""'], {}), "('galaxy_session.id')\n", (22772, 22793), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((22858, 22886), 'sqlalchemy.ForeignKey', 'ForeignKey', (['"""galaxy_user.id"""'], {}), "('galaxy_user.id')\n", (22868, 22886), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((22950, 22961), 'sqlalchemy.String', 'String', (['(255)'], {}), '(255)\n', (22956, 22961), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((23005, 23016), 'sqlalchemy.String', 'String', (['(255)'], {}), '(255)\n', (23011, 23016), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((23052, 23063), 'sqlalchemy.String', 'String', (['(255)'], {}), '(255)\n', (23058, 23063), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((23176, 23194), 'galaxy.model.custom_types.TrimmedString', 'TrimmedString', (['(255)'], {}), '(255)\n', (23189, 23194), False, 'from galaxy.model.custom_types import JSONType, MetadataType, TrimmedString, UUIDType\n'), ((23296, 23314), 'galaxy.model.custom_types.TrimmedString', 'TrimmedString', (['(255)'], {}), '(255)\n', (23309, 23314), False, 'from galaxy.model.custom_types import JSONType, MetadataType, TrimmedString, UUIDType\n'), ((23353, 23371), 'galaxy.model.custom_types.TrimmedString', 'TrimmedString', (['(255)'], {}), '(255)\n', (23366, 23371), False, 'from galaxy.model.custom_types import JSONType, MetadataType, TrimmedString, UUIDType\n'), ((23655, 23675), 'sqlalchemy.ForeignKey', 'ForeignKey', (['"""job.id"""'], {}), "('job.id')\n", (23665, 23675), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((23714, 23724), 'sqlalchemy.String', 'String', (['(64)'], {}), '(64)\n', (23720, 23724), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((23762, 23780), 'galaxy.model.custom_types.TrimmedString', 'TrimmedString', (['(255)'], {}), '(255)\n', (23775, 23780), False, 'from galaxy.model.custom_types import JSONType, MetadataType, TrimmedString, UUIDType\n'), ((23927, 23947), 'sqlalchemy.ForeignKey', 'ForeignKey', (['"""job.id"""'], {}), "('job.id')\n", (23937, 23947), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((23985, 23996), 'sqlalchemy.String', 'String', (['(255)'], {}), '(255)\n', (23991, 23996), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((24193, 24213), 'sqlalchemy.ForeignKey', 'ForeignKey', (['"""job.id"""'], {}), "('job.id')\n", (24203, 24213), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((24266, 24310), 'sqlalchemy.ForeignKey', 'ForeignKey', (['"""history_dataset_association.id"""'], {}), "('history_dataset_association.id')\n", (24276, 24310), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((24348, 24359), 'sqlalchemy.String', 'String', (['(255)'], {}), '(255)\n', (24354, 24359), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((24529, 24549), 'sqlalchemy.ForeignKey', 'ForeignKey', (['"""job.id"""'], {}), "('job.id')\n", (24539, 24549), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((24602, 24646), 'sqlalchemy.ForeignKey', 'ForeignKey', (['"""history_dataset_association.id"""'], {}), "('history_dataset_association.id')\n", (24612, 24646), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((24684, 24695), 'sqlalchemy.String', 'String', (['(255)'], {}), '(255)\n', (24690, 24695), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((24884, 24904), 'sqlalchemy.ForeignKey', 'ForeignKey', (['"""job.id"""'], {}), "('job.id')\n", (24894, 24904), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((24968, 25023), 'sqlalchemy.ForeignKey', 'ForeignKey', (['"""history_dataset_collection_association.id"""'], {}), "('history_dataset_collection_association.id')\n", (24978, 25023), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((25061, 25073), 'sqlalchemy.Unicode', 'Unicode', (['(255)'], {}), '(255)\n', (25068, 25073), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((25264, 25284), 'sqlalchemy.ForeignKey', 'ForeignKey', (['"""job.id"""'], {}), "('job.id')\n", (25274, 25284), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((25348, 25403), 'sqlalchemy.ForeignKey', 'ForeignKey', (['"""history_dataset_collection_association.id"""'], {}), "('history_dataset_collection_association.id')\n", (25358, 25403), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((25441, 25453), 'sqlalchemy.Unicode', 'Unicode', (['(255)'], {}), '(255)\n', (25448, 25453), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((25636, 25656), 'sqlalchemy.ForeignKey', 'ForeignKey', (['"""job.id"""'], {}), "('job.id')\n", (25646, 25656), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((25706, 25758), 'sqlalchemy.ForeignKey', 'ForeignKey', (['"""library_dataset_dataset_association.id"""'], {}), "('library_dataset_dataset_association.id')\n", (25716, 25758), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((25796, 25807), 'sqlalchemy.String', 'String', (['(255)'], {}), '(255)\n', (25802, 25807), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((25992, 26012), 'sqlalchemy.ForeignKey', 'ForeignKey', (['"""job.id"""'], {}), "('job.id')\n", (26002, 26012), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((26062, 26114), 'sqlalchemy.ForeignKey', 'ForeignKey', (['"""library_dataset_dataset_association.id"""'], {}), "('library_dataset_dataset_association.id')\n", (26072, 26114), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((26152, 26163), 'sqlalchemy.String', 'String', (['(255)'], {}), '(255)\n', (26158, 26163), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((26381, 26436), 'sqlalchemy.ForeignKey', 'ForeignKey', (['"""history_dataset_collection_association.id"""'], {}), "('history_dataset_collection_association.id')\n", (26391, 26436), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((26506, 26561), 'sqlalchemy.ForeignKey', 'ForeignKey', (['"""history_dataset_collection_association.id"""'], {}), "('history_dataset_collection_association.id')\n", (26516, 26561), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((26599, 26611), 'sqlalchemy.Unicode', 'Unicode', (['(255)'], {}), '(255)\n', (26606, 26611), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((26784, 26804), 'sqlalchemy.ForeignKey', 'ForeignKey', (['"""job.id"""'], {}), "('job.id')\n", (26794, 26804), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((26877, 26921), 'sqlalchemy.ForeignKey', 'ForeignKey', (['"""history_dataset_association.id"""'], {}), "('history_dataset_association.id')\n", (26887, 26921), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((27017, 27069), 'sqlalchemy.ForeignKey', 'ForeignKey', (['"""library_dataset_dataset_association.id"""'], {}), "('library_dataset_dataset_association.id')\n", (27027, 27069), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((27129, 27140), 'sqlalchemy.String', 'String', (['(255)'], {}), '(255)\n', (27135, 27140), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((27174, 27185), 'sqlalchemy.String', 'String', (['(255)'], {}), '(255)\n', (27180, 27185), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((27228, 27239), 'sqlalchemy.String', 'String', (['(255)'], {}), '(255)\n', (27234, 27239), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((27274, 27285), 'sqlalchemy.String', 'String', (['(255)'], {}), '(255)\n', (27280, 27285), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((27333, 27344), 'sqlalchemy.String', 'String', (['(255)'], {}), '(255)\n', (27339, 27344), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((27389, 27400), 'sqlalchemy.String', 'String', (['(255)'], {}), '(255)\n', (27395, 27400), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((27571, 27591), 'sqlalchemy.ForeignKey', 'ForeignKey', (['"""job.id"""'], {}), "('job.id')\n", (27581, 27591), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((27644, 27668), 'sqlalchemy.ForeignKey', 'ForeignKey', (['"""history.id"""'], {}), "('history.id')\n", (27654, 27668), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((27721, 27745), 'sqlalchemy.ForeignKey', 'ForeignKey', (['"""dataset.id"""'], {}), "('dataset.id')\n", (27731, 27745), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((28132, 28152), 'sqlalchemy.ForeignKey', 'ForeignKey', (['"""job.id"""'], {}), "('job.id')\n", (28142, 28152), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((28205, 28229), 'sqlalchemy.ForeignKey', 'ForeignKey', (['"""history.id"""'], {}), "('history.id')\n", (28215, 28229), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((28469, 28489), 'sqlalchemy.ForeignKey', 'ForeignKey', (['"""job.id"""'], {}), "('job.id')\n", (28479, 28489), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((28529, 28541), 'sqlalchemy.Unicode', 'Unicode', (['(255)'], {}), '(255)\n', (28536, 28541), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((28573, 28585), 'sqlalchemy.Unicode', 'Unicode', (['(255)'], {}), '(255)\n', (28580, 28585), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((28618, 28648), 'sqlalchemy.Unicode', 'Unicode', (['JOB_METRIC_MAX_LENGTH'], {}), '(JOB_METRIC_MAX_LENGTH)\n', (28625, 28648), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((28809, 28830), 'sqlalchemy.ForeignKey', 'ForeignKey', (['"""task.id"""'], {}), "('task.id')\n", (28819, 28830), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((28870, 28882), 'sqlalchemy.Unicode', 'Unicode', (['(255)'], {}), '(255)\n', (28877, 28882), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((28914, 28926), 'sqlalchemy.Unicode', 'Unicode', (['(255)'], {}), '(255)\n', (28921, 28926), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((28959, 28989), 'sqlalchemy.Unicode', 'Unicode', (['JOB_METRIC_MAX_LENGTH'], {}), '(JOB_METRIC_MAX_LENGTH)\n', (28966, 28989), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((29154, 29174), 'sqlalchemy.ForeignKey', 'ForeignKey', (['"""job.id"""'], {}), "('job.id')\n", (29164, 29174), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((29214, 29226), 'sqlalchemy.Unicode', 'Unicode', (['(255)'], {}), '(255)\n', (29221, 29226), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((29258, 29270), 'sqlalchemy.Unicode', 'Unicode', (['(255)'], {}), '(255)\n', (29265, 29270), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((29303, 29317), 'sqlalchemy.Numeric', 'Numeric', (['(22)', '(7)'], {}), '(22, 7)\n', (29310, 29317), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((29487, 29508), 'sqlalchemy.ForeignKey', 'ForeignKey', (['"""task.id"""'], {}), "('task.id')\n", (29497, 29508), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((29548, 29560), 'sqlalchemy.Unicode', 'Unicode', (['(255)'], {}), '(255)\n', (29555, 29560), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((29592, 29604), 'sqlalchemy.Unicode', 'Unicode', (['(255)'], {}), '(255)\n', (29599, 29604), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((29637, 29651), 'sqlalchemy.Numeric', 'Numeric', (['(22)', '(7)'], {}), '(22, 7)\n', (29644, 29651), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((29817, 29837), 'sqlalchemy.ForeignKey', 'ForeignKey', (['"""job.id"""'], {}), "('job.id')\n", (29827, 29837), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((29895, 29924), 'sqlalchemy.ForeignKey', 'ForeignKey', (['"""deferred_job.id"""'], {}), "('deferred_job.id')\n", (29905, 29924), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((29982, 30011), 'sqlalchemy.ForeignKey', 'ForeignKey', (['"""transfer_job.id"""'], {}), "('transfer_job.id')\n", (29992, 30011), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((30064, 30088), 'sqlalchemy.ForeignKey', 'ForeignKey', (['"""dataset.id"""'], {}), "('dataset.id')\n", (30074, 30088), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((30132, 30143), 'sqlalchemy.String', 'String', (['(255)'], {}), '(255)\n', (30138, 30143), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((30293, 30303), 'sqlalchemy.String', 'String', (['(64)'], {}), '(64)\n', (30299, 30303), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((30341, 30369), 'sqlalchemy.ForeignKey', 'ForeignKey', (['"""galaxy_user.id"""'], {}), "('galaxy_user.id')\n", (30351, 30369), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((30666, 30676), 'sqlalchemy.String', 'String', (['(64)'], {}), '(64)\n', (30672, 30676), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((30760, 30772), 'sqlalchemy.String', 'String', (['(1024)'], {}), '(1024)\n', (30766, 30772), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((30805, 30816), 'sqlalchemy.String', 'String', (['(255)'], {}), '(255)\n', (30811, 30816), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((30953, 30971), 'galaxy.model.custom_types.TrimmedString', 'TrimmedString', (['(255)'], {}), '(255)\n', (30966, 30971), False, 'from galaxy.model.custom_types import JSONType, MetadataType, TrimmedString, UUIDType\n'), ((31042, 31062), 'sqlalchemy.ForeignKey', 'ForeignKey', (['"""job.id"""'], {}), "('job.id')\n", (31052, 31062), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((31129, 31141), 'sqlalchemy.String', 'String', (['(1024)'], {}), '(1024)\n', (31135, 31141), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((31176, 31187), 'sqlalchemy.String', 'String', (['(255)'], {}), '(255)\n', (31182, 31187), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((31232, 31243), 'sqlalchemy.String', 'String', (['(255)'], {}), '(255)\n', (31238, 31243), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((31446, 31476), 'sqlalchemy.ForeignKey', 'ForeignKey', (['"""workflow_step.id"""'], {}), "('workflow_step.id')\n", (31456, 31476), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((31535, 31546), 'sqlalchemy.String', 'String', (['(255)'], {}), '(255)\n', (31541, 31546), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((31591, 31602), 'sqlalchemy.String', 'String', (['(255)'], {}), '(255)\n', (31597, 31602), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((31839, 31859), 'sqlalchemy.ForeignKey', 'ForeignKey', (['"""job.id"""'], {}), "('job.id')\n", (31849, 31859), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((31934, 31966), 'sqlalchemy.ForeignKey', 'ForeignKey', (['"""post_job_action.id"""'], {}), "('post_job_action.id')\n", (31944, 31966), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((32245, 32255), 'sqlalchemy.String', 'String', (['(64)'], {}), '(64)\n', (32251, 32255), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((32295, 32306), 'sqlalchemy.String', 'String', (['(128)'], {}), '(128)\n', (32301, 32306), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((32605, 32615), 'sqlalchemy.String', 'String', (['(64)'], {}), '(64)\n', (32611, 32615), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((32653, 32665), 'sqlalchemy.String', 'String', (['(1024)'], {}), '(1024)\n', (32659, 32665), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((32947, 32959), 'sqlalchemy.Unicode', 'Unicode', (['(255)'], {}), '(255)\n', (32954, 32959), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((33294, 33329), 'sqlalchemy.ForeignKey', 'ForeignKey', (['"""dataset_collection.id"""'], {}), "('dataset_collection.id')\n", (33304, 33329), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((33382, 33406), 'sqlalchemy.ForeignKey', 'ForeignKey', (['"""history.id"""'], {}), "('history.id')\n", (33392, 33406), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((33444, 33462), 'galaxy.model.custom_types.TrimmedString', 'TrimmedString', (['(255)'], {}), '(255)\n', (33457, 33462), False, 'from galaxy.model.custom_types import JSONType, MetadataType, TrimmedString, UUIDType\n'), ((33659, 33714), 'sqlalchemy.ForeignKey', 'ForeignKey', (['"""history_dataset_collection_association.id"""'], {}), "('history_dataset_collection_association.id')\n", (33669, 33714), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((33771, 33783), 'sqlalchemy.Unicode', 'Unicode', (['(255)'], {}), '(255)\n', (33778, 33783), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((33999, 34034), 'sqlalchemy.ForeignKey', 'ForeignKey', (['"""dataset_collection.id"""'], {}), "('dataset_collection.id')\n", (34009, 34034), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((34086, 34117), 'sqlalchemy.ForeignKey', 'ForeignKey', (['"""library_folder.id"""'], {}), "('library_folder.id')\n", (34096, 34117), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((34155, 34173), 'galaxy.model.custom_types.TrimmedString', 'TrimmedString', (['(255)'], {}), '(255)\n', (34168, 34173), False, 'from galaxy.model.custom_types import JSONType, MetadataType, TrimmedString, UUIDType\n'), ((34489, 34524), 'sqlalchemy.ForeignKey', 'ForeignKey', (['"""dataset_collection.id"""'], {}), "('dataset_collection.id')\n", (34499, 34524), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((34676, 34720), 'sqlalchemy.ForeignKey', 'ForeignKey', (['"""history_dataset_association.id"""'], {}), "('history_dataset_association.id')\n", (34686, 34720), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((34785, 34837), 'sqlalchemy.ForeignKey', 'ForeignKey', (['"""library_dataset_dataset_association.id"""'], {}), "('library_dataset_dataset_association.id')\n", (34795, 34837), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((34914, 34949), 'sqlalchemy.ForeignKey', 'ForeignKey', (['"""dataset_collection.id"""'], {}), "('dataset_collection.id')\n", (34924, 34949), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((35133, 35145), 'sqlalchemy.Unicode', 'Unicode', (['(255)'], {}), '(255)\n', (35140, 35145), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((35399, 35423), 'sqlalchemy.ForeignKey', 'ForeignKey', (['"""history.id"""'], {}), "('history.id')\n", (35409, 35423), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((35488, 35516), 'sqlalchemy.ForeignKey', 'ForeignKey', (['"""galaxy_user.id"""'], {}), "('galaxy_user.id')\n", (35498, 35516), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((35572, 35591), 'galaxy.model.custom_types.TrimmedString', 'TrimmedString', (['(1024)'], {}), '(1024)\n', (35585, 35591), False, 'from galaxy.model.custom_types import JSONType, MetadataType, TrimmedString, UUIDType\n'), ((35632, 35663), 'sqlalchemy.ForeignKey', 'ForeignKey', (['"""galaxy_session.id"""'], {}), "('galaxy_session.id')\n", (35642, 35663), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((35719, 35730), 'sqlalchemy.String', 'String', (['(255)'], {}), '(255)\n', (35725, 35730), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((35998, 36026), 'sqlalchemy.ForeignKey', 'ForeignKey', (['"""galaxy_user.id"""'], {}), "('galaxy_user.id')\n", (36008, 36026), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((36086, 36097), 'sqlalchemy.String', 'String', (['(255)'], {}), '(255)\n', (36092, 36097), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((36130, 36141), 'sqlalchemy.String', 'String', (['(255)'], {}), '(255)\n', (36136, 36141), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((36221, 36245), 'sqlalchemy.ForeignKey', 'ForeignKey', (['"""history.id"""'], {}), "('history.id')\n", (36231, 36245), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((36293, 36311), 'galaxy.model.custom_types.TrimmedString', 'TrimmedString', (['(255)'], {}), '(255)\n', (36306, 36311), False, 'from galaxy.model.custom_types import JSONType, MetadataType, TrimmedString, UUIDType\n'), ((36595, 36609), 'sqlalchemy.Numeric', 'Numeric', (['(15)', '(0)'], {}), '(15, 0)\n', (36602, 36609), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((36857, 36888), 'sqlalchemy.ForeignKey', 'ForeignKey', (['"""galaxy_session.id"""'], {}), "('galaxy_session.id')\n", (36867, 36888), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((36941, 36965), 'sqlalchemy.ForeignKey', 'ForeignKey', (['"""history.id"""'], {}), "('history.id')\n", (36951, 36965), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((37247, 37275), 'sqlalchemy.ForeignKey', 'ForeignKey', (['"""galaxy_user.id"""'], {}), "('galaxy_user.id')\n", (37257, 37275), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((37364, 37456), 'sqlalchemy.ForeignKey', 'ForeignKey', (['"""workflow.id"""'], {'use_alter': '(True)', 'name': '"""stored_workflow_latest_workflow_id_fk"""'}), "('workflow.id', use_alter=True, name=\n 'stored_workflow_latest_workflow_id_fk')\n", (37374, 37456), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((37967, 37999), 'sqlalchemy.ForeignKey', 'ForeignKey', (['"""stored_workflow.id"""'], {}), "('stored_workflow.id')\n", (37977, 37999), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((38450, 38475), 'sqlalchemy.ForeignKey', 'ForeignKey', (['"""workflow.id"""'], {}), "('workflow.id')\n", (38460, 38475), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((38529, 38539), 'sqlalchemy.String', 'String', (['(64)'], {}), '(64)\n', (38535, 38539), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((39059, 39135), 'sqlalchemy.ForeignKey', 'ForeignKey', (['"""workflow_invocation.id"""'], {'onupdate': '"""CASCADE"""', 'ondelete': '"""CASCADE"""'}), "('workflow_invocation.id', onupdate='CASCADE', ondelete='CASCADE')\n", (39069, 39135), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((39180, 39210), 'sqlalchemy.ForeignKey', 'ForeignKey', (['"""workflow_step.id"""'], {}), "('workflow_step.id')\n", (39190, 39210), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((39447, 39523), 'sqlalchemy.ForeignKey', 'ForeignKey', (['"""workflow_invocation.id"""'], {'onupdate': '"""CASCADE"""', 'ondelete': '"""CASCADE"""'}), "('workflow_invocation.id', onupdate='CASCADE', ondelete='CASCADE')\n", (39457, 39523), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((39547, 39559), 'sqlalchemy.Unicode', 'Unicode', (['(255)'], {}), '(255)\n', (39554, 39559), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((39612, 39624), 'sqlalchemy.Unicode', 'Unicode', (['(255)'], {}), '(255)\n', (39619, 39624), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((39812, 39823), 'sqlalchemy.String', 'String', (['(255)'], {}), '(255)\n', (39818, 39823), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((39874, 39910), 'sqlalchemy.ForeignKey', 'ForeignKey', (['"""workflow_invocation.id"""'], {}), "('workflow_invocation.id')\n", (39884, 39910), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((39969, 39999), 'sqlalchemy.ForeignKey', 'ForeignKey', (['"""workflow_step.id"""'], {}), "('workflow_step.id')\n", (39979, 39999), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((40038, 40082), 'sqlalchemy.ForeignKey', 'ForeignKey', (['"""history_dataset_association.id"""'], {}), "('history_dataset_association.id')\n", (40048, 40082), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((40305, 40316), 'sqlalchemy.String', 'String', (['(255)'], {}), '(255)\n', (40311, 40316), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((40367, 40403), 'sqlalchemy.ForeignKey', 'ForeignKey', (['"""workflow_invocation.id"""'], {}), "('workflow_invocation.id')\n", (40377, 40403), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((40462, 40492), 'sqlalchemy.ForeignKey', 'ForeignKey', (['"""workflow_step.id"""'], {}), "('workflow_step.id')\n", (40472, 40492), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((40542, 40597), 'sqlalchemy.ForeignKey', 'ForeignKey', (['"""history_dataset_collection_association.id"""'], {}), "('history_dataset_collection_association.id')\n", (40552, 40597), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((40787, 40817), 'sqlalchemy.ForeignKey', 'ForeignKey', (['"""workflow_step.id"""'], {}), "('workflow_step.id')\n", (40797, 40817), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((40873, 40903), 'sqlalchemy.ForeignKey', 'ForeignKey', (['"""workflow_step.id"""'], {}), "('workflow_step.id')\n", (40883, 40903), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((41148, 41178), 'sqlalchemy.ForeignKey', 'ForeignKey', (['"""workflow_step.id"""'], {}), "('workflow_step.id')\n", (41158, 41178), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((41236, 41247), 'sqlalchemy.String', 'String', (['(255)'], {}), '(255)\n', (41242, 41247), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((41545, 41570), 'sqlalchemy.ForeignKey', 'ForeignKey', (['"""workflow.id"""'], {}), "('workflow.id')\n", (41555, 41570), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((41625, 41642), 'galaxy.model.custom_types.TrimmedString', 'TrimmedString', (['(64)'], {}), '(64)\n', (41638, 41642), False, 'from galaxy.model.custom_types import JSONType, MetadataType, TrimmedString, UUIDType\n'), ((41685, 41703), 'galaxy.model.custom_types.TrimmedString', 'TrimmedString', (['(255)'], {}), '(255)\n', (41698, 41703), False, 'from galaxy.model.custom_types import JSONType, MetadataType, TrimmedString, UUIDType\n'), ((41744, 41762), 'galaxy.model.custom_types.TrimmedString', 'TrimmedString', (['(255)'], {}), '(255)\n', (41757, 41762), False, 'from galaxy.model.custom_types import JSONType, MetadataType, TrimmedString, UUIDType\n'), ((41800, 41810), 'galaxy.model.custom_types.UUIDType', 'UUIDType', ([], {}), '()\n', (41808, 41810), False, 'from galaxy.model.custom_types import JSONType, MetadataType, TrimmedString, UUIDType\n'), ((41849, 41873), 'sqlalchemy.ForeignKey', 'ForeignKey', (['"""history.id"""'], {}), "('history.id')\n", (41859, 41873), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((42188, 42224), 'sqlalchemy.ForeignKey', 'ForeignKey', (['"""workflow_invocation.id"""'], {}), "('workflow_invocation.id')\n", (42198, 42224), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((42300, 42330), 'sqlalchemy.ForeignKey', 'ForeignKey', (['"""workflow_step.id"""'], {}), "('workflow_step.id')\n", (42310, 42330), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((42396, 42416), 'sqlalchemy.ForeignKey', 'ForeignKey', (['"""job.id"""'], {}), "('job.id')\n", (42406, 42416), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((42698, 42730), 'sqlalchemy.ForeignKey', 'ForeignKey', (['"""stored_workflow.id"""'], {}), "('stored_workflow.id')\n", (42708, 42730), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((42780, 42808), 'sqlalchemy.ForeignKey', 'ForeignKey', (['"""galaxy_user.id"""'], {}), "('galaxy_user.id')\n", (42790, 42808), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((43007, 43039), 'sqlalchemy.ForeignKey', 'ForeignKey', (['"""stored_workflow.id"""'], {}), "('stored_workflow.id')\n", (43017, 43039), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((43089, 43117), 'sqlalchemy.ForeignKey', 'ForeignKey', (['"""galaxy_user.id"""'], {}), "('galaxy_user.id')\n", (43099, 43117), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((43342, 43386), 'sqlalchemy.ForeignKey', 'ForeignKey', (['"""history_dataset_association.id"""'], {}), "('history_dataset_association.id')\n", (43352, 43386), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((43450, 43502), 'sqlalchemy.ForeignKey', 'ForeignKey', (['"""library_dataset_dataset_association.id"""'], {}), "('library_dataset_dataset_association.id')\n", (43460, 43502), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((43696, 43714), 'galaxy.model.custom_types.TrimmedString', 'TrimmedString', (['(255)'], {}), '(255)\n', (43709, 43714), False, 'from galaxy.model.custom_types import JSONType, MetadataType, TrimmedString, UUIDType\n'), ((44137, 44169), 'sqlalchemy.ForeignKey', 'ForeignKey', (['"""form_definition.id"""'], {}), "('form_definition.id')\n", (44147, 44169), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((44497, 44515), 'galaxy.model.custom_types.TrimmedString', 'TrimmedString', (['(255)'], {}), '(255)\n', (44510, 44515), False, 'from galaxy.model.custom_types import JSONType, MetadataType, TrimmedString, UUIDType\n'), ((44640, 44740), 'sqlalchemy.ForeignKey', 'ForeignKey', (['"""form_definition_current.id"""'], {'name': '"""for_def_form_def_current_id_fk"""', 'use_alter': '(True)'}), "('form_definition_current.id', name=\n 'for_def_form_def_current_id_fk', use_alter=True)\n", (44650, 44740), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((44787, 44797), 'galaxy.model.custom_types.JSONType', 'JSONType', ([], {}), '()\n', (44795, 44797), False, 'from galaxy.model.custom_types import JSONType, MetadataType, TrimmedString, UUIDType\n'), ((44821, 44839), 'galaxy.model.custom_types.TrimmedString', 'TrimmedString', (['(255)'], {}), '(255)\n', (44834, 44839), False, 'from galaxy.model.custom_types import JSONType, MetadataType, TrimmedString, UUIDType\n'), ((44879, 44889), 'galaxy.model.custom_types.JSONType', 'JSONType', ([], {}), '()\n', (44887, 44889), False, 'from galaxy.model.custom_types import JSONType, MetadataType, TrimmedString, UUIDType\n'), ((45148, 45166), 'galaxy.model.custom_types.TrimmedString', 'TrimmedString', (['(255)'], {}), '(255)\n', (45161, 45166), False, 'from galaxy.model.custom_types import JSONType, MetadataType, TrimmedString, UUIDType\n'), ((45263, 45281), 'galaxy.model.custom_types.TrimmedString', 'TrimmedString', (['(255)'], {}), '(255)\n', (45276, 45281), False, 'from galaxy.model.custom_types import JSONType, MetadataType, TrimmedString, UUIDType\n'), ((45326, 45344), 'galaxy.model.custom_types.TrimmedString', 'TrimmedString', (['(255)'], {}), '(255)\n', (45339, 45344), False, 'from galaxy.model.custom_types import JSONType, MetadataType, TrimmedString, UUIDType\n'), ((45393, 45425), 'sqlalchemy.ForeignKey', 'ForeignKey', (['"""form_definition.id"""'], {}), "('form_definition.id')\n", (45403, 45425), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((45482, 45510), 'sqlalchemy.ForeignKey', 'ForeignKey', (['"""form_values.id"""'], {}), "('form_values.id')\n", (45492, 45510), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((45833, 45851), 'galaxy.model.custom_types.TrimmedString', 'TrimmedString', (['(255)'], {}), '(255)\n', (45846, 45851), False, 'from galaxy.model.custom_types import JSONType, MetadataType, TrimmedString, UUIDType\n'), ((45941, 45973), 'sqlalchemy.ForeignKey', 'ForeignKey', (['"""form_definition.id"""'], {}), "('form_definition.id')\n", (45951, 45973), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((46030, 46062), 'sqlalchemy.ForeignKey', 'ForeignKey', (['"""form_definition.id"""'], {}), "('form_definition.id')\n", (46040, 46062), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((46344, 46373), 'sqlalchemy.ForeignKey', 'ForeignKey', (['"""request_type.id"""'], {}), "('request_type.id')\n", (46354, 46373), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((46435, 46468), 'sqlalchemy.ForeignKey', 'ForeignKey', (['"""external_service.id"""'], {}), "('external_service.id')\n", (46445, 46468), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((46805, 46834), 'sqlalchemy.ForeignKey', 'ForeignKey', (['"""request_type.id"""'], {}), "('request_type.id')\n", (46815, 46834), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((46899, 46920), 'sqlalchemy.ForeignKey', 'ForeignKey', (['"""role.id"""'], {}), "('role.id')\n", (46909, 46920), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((47203, 47235), 'sqlalchemy.ForeignKey', 'ForeignKey', (['"""form_definition.id"""'], {}), "('form_definition.id')\n", (47213, 47235), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((47276, 47286), 'galaxy.model.custom_types.JSONType', 'JSONType', ([], {}), '()\n', (47284, 47286), False, 'from galaxy.model.custom_types import JSONType, MetadataType, TrimmedString, UUIDType\n'), ((47524, 47542), 'galaxy.model.custom_types.TrimmedString', 'TrimmedString', (['(255)'], {}), '(255)\n', (47537, 47542), False, 'from galaxy.model.custom_types import JSONType, MetadataType, TrimmedString, UUIDType\n'), ((47620, 47630), 'galaxy.model.custom_types.JSONType', 'JSONType', ([], {}), '()\n', (47628, 47630), False, 'from galaxy.model.custom_types import JSONType, MetadataType, TrimmedString, UUIDType\n'), ((47673, 47701), 'sqlalchemy.ForeignKey', 'ForeignKey', (['"""form_values.id"""'], {}), "('form_values.id')\n", (47683, 47701), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((47759, 47788), 'sqlalchemy.ForeignKey', 'ForeignKey', (['"""request_type.id"""'], {}), "('request_type.id')\n", (47769, 47788), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((47838, 47866), 'sqlalchemy.ForeignKey', 'ForeignKey', (['"""galaxy_user.id"""'], {}), "('galaxy_user.id')\n", (47848, 47866), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((48206, 48230), 'sqlalchemy.ForeignKey', 'ForeignKey', (['"""request.id"""'], {}), "('request.id')\n", (48216, 48230), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((48269, 48287), 'galaxy.model.custom_types.TrimmedString', 'TrimmedString', (['(255)'], {}), '(255)\n', (48282, 48287), False, 'from galaxy.model.custom_types import JSONType, MetadataType, TrimmedString, UUIDType\n'), ((48571, 48589), 'galaxy.model.custom_types.TrimmedString', 'TrimmedString', (['(255)'], {}), '(255)\n', (48584, 48589), False, 'from galaxy.model.custom_types import JSONType, MetadataType, TrimmedString, UUIDType\n'), ((48678, 48706), 'sqlalchemy.ForeignKey', 'ForeignKey', (['"""form_values.id"""'], {}), "('form_values.id')\n", (48688, 48706), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((48759, 48783), 'sqlalchemy.ForeignKey', 'ForeignKey', (['"""request.id"""'], {}), "('request.id')\n", (48769, 48783), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((48825, 48843), 'galaxy.model.custom_types.TrimmedString', 'TrimmedString', (['(255)'], {}), '(255)\n', (48838, 48843), False, 'from galaxy.model.custom_types import JSONType, MetadataType, TrimmedString, UUIDType\n'), ((48896, 48920), 'sqlalchemy.ForeignKey', 'ForeignKey', (['"""library.id"""'], {}), "('library.id')\n", (48906, 48920), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((48972, 49003), 'sqlalchemy.ForeignKey', 'ForeignKey', (['"""library_folder.id"""'], {}), "('library_folder.id')\n", (48982, 49003), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((49168, 49192), 'sqlalchemy.ForeignKey', 'ForeignKey', (['"""history.id"""'], {}), "('history.id')\n", (49178, 49192), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((49457, 49475), 'galaxy.model.custom_types.TrimmedString', 'TrimmedString', (['(255)'], {}), '(255)\n', (49470, 49475), False, 'from galaxy.model.custom_types import JSONType, MetadataType, TrimmedString, UUIDType\n'), ((49565, 49594), 'sqlalchemy.ForeignKey', 'ForeignKey', (['"""request_type.id"""'], {}), "('request_type.id')\n", (49575, 49594), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((49871, 49894), 'sqlalchemy.ForeignKey', 'ForeignKey', (['"""sample.id"""'], {}), "('sample.id')\n", (49881, 49894), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((49952, 49981), 'sqlalchemy.ForeignKey', 'ForeignKey', (['"""sample_state.id"""'], {}), "('sample_state.id')\n", (49962, 49981), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((50293, 50316), 'sqlalchemy.ForeignKey', 'ForeignKey', (['"""sample.id"""'], {}), "('sample.id')\n", (50303, 50316), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((50354, 50372), 'galaxy.model.custom_types.TrimmedString', 'TrimmedString', (['(255)'], {}), '(255)\n', (50367, 50372), False, 'from galaxy.model.custom_types import JSONType, MetadataType, TrimmedString, UUIDType\n'), ((50449, 50467), 'galaxy.model.custom_types.TrimmedString', 'TrimmedString', (['(255)'], {}), '(255)\n', (50462, 50467), False, 'from galaxy.model.custom_types import JSONType, MetadataType, TrimmedString, UUIDType\n'), ((50542, 50560), 'galaxy.model.custom_types.TrimmedString', 'TrimmedString', (['(255)'], {}), '(255)\n', (50555, 50560), False, 'from galaxy.model.custom_types import JSONType, MetadataType, TrimmedString, UUIDType\n'), ((50610, 50643), 'sqlalchemy.ForeignKey', 'ForeignKey', (['"""external_service.id"""'], {}), "('external_service.id')\n", (50620, 50643), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((50913, 50945), 'sqlalchemy.ForeignKey', 'ForeignKey', (['"""form_definition.id"""'], {}), "('form_definition.id')\n", (50923, 50945), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((51002, 51030), 'sqlalchemy.ForeignKey', 'ForeignKey', (['"""form_values.id"""'], {}), "('form_values.id')\n", (51012, 51030), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((51133, 51151), 'galaxy.model.custom_types.TrimmedString', 'TrimmedString', (['(255)'], {}), '(255)\n', (51146, 51151), False, 'from galaxy.model.custom_types import JSONType, MetadataType, TrimmedString, UUIDType\n'), ((51347, 51376), 'sqlalchemy.ForeignKey', 'ForeignKey', (['"""request_type.id"""'], {}), "('request_type.id')\n", (51357, 51376), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((51441, 51461), 'sqlalchemy.ForeignKey', 'ForeignKey', (['"""run.id"""'], {}), "('run.id')\n", (51451, 51461), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((51656, 51679), 'sqlalchemy.ForeignKey', 'ForeignKey', (['"""sample.id"""'], {}), "('sample.id')\n", (51666, 51679), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((51744, 51764), 'sqlalchemy.ForeignKey', 'ForeignKey', (['"""run.id"""'], {}), "('run.id')\n", (51754, 51764), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((52041, 52069), 'sqlalchemy.ForeignKey', 'ForeignKey', (['"""galaxy_user.id"""'], {}), "('galaxy_user.id')\n", (52051, 52069), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((52158, 52244), 'sqlalchemy.ForeignKey', 'ForeignKey', (['"""page_revision.id"""'], {'use_alter': '(True)', 'name': '"""page_latest_revision_id_fk"""'}), "('page_revision.id', use_alter=True, name=\n 'page_latest_revision_id_fk')\n", (52168, 52244), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((52792, 52813), 'sqlalchemy.ForeignKey', 'ForeignKey', (['"""page.id"""'], {}), "('page.id')\n", (52802, 52813), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((53079, 53100), 'sqlalchemy.ForeignKey', 'ForeignKey', (['"""page.id"""'], {}), "('page.id')\n", (53089, 53100), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((53150, 53178), 'sqlalchemy.ForeignKey', 'ForeignKey', (['"""galaxy_user.id"""'], {}), "('galaxy_user.id')\n", (53160, 53178), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((53461, 53489), 'sqlalchemy.ForeignKey', 'ForeignKey', (['"""galaxy_user.id"""'], {}), "('galaxy_user.id')\n", (53471, 53489), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((53578, 53682), 'sqlalchemy.ForeignKey', 'ForeignKey', (['"""visualization_revision.id"""'], {'use_alter': '(True)', 'name': '"""visualization_latest_revision_id_fk"""'}), "('visualization_revision.id', use_alter=True, name=\n 'visualization_latest_revision_id_fk')\n", (53588, 53682), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((54312, 54342), 'sqlalchemy.ForeignKey', 'ForeignKey', (['"""visualization.id"""'], {}), "('visualization.id')\n", (54322, 54342), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((54679, 54709), 'sqlalchemy.ForeignKey', 'ForeignKey', (['"""visualization.id"""'], {}), "('visualization.id')\n", (54689, 54709), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((54759, 54787), 'sqlalchemy.ForeignKey', 'ForeignKey', (['"""galaxy_user.id"""'], {}), "('galaxy_user.id')\n", (54769, 54787), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((55140, 55164), 'sqlalchemy.ForeignKey', 'ForeignKey', (['"""history.id"""'], {}), "('history.id')\n", (55150, 55164), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((55214, 55242), 'sqlalchemy.ForeignKey', 'ForeignKey', (['"""galaxy_user.id"""'], {}), "('galaxy_user.id')\n", (55224, 55242), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((55562, 55582), 'sqlalchemy.ForeignKey', 'ForeignKey', (['"""job.id"""'], {}), "('job.id')\n", (55572, 55582), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((55830, 55850), 'sqlalchemy.ForeignKey', 'ForeignKey', (['"""tag.id"""'], {}), "('tag.id')\n", (55840, 55850), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((55876, 55894), 'galaxy.model.custom_types.TrimmedString', 'TrimmedString', (['(255)'], {}), '(255)\n', (55889, 55894), False, 'from galaxy.model.custom_types import JSONType, MetadataType, TrimmedString, UUIDType\n'), ((56094, 56118), 'sqlalchemy.ForeignKey', 'ForeignKey', (['"""history.id"""'], {}), "('history.id')\n", (56104, 56118), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((56167, 56187), 'sqlalchemy.ForeignKey', 'ForeignKey', (['"""tag.id"""'], {}), "('tag.id')\n", (56177, 56187), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((56237, 56265), 'sqlalchemy.ForeignKey', 'ForeignKey', (['"""galaxy_user.id"""'], {}), "('galaxy_user.id')\n", (56247, 56265), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((56309, 56327), 'galaxy.model.custom_types.TrimmedString', 'TrimmedString', (['(255)'], {}), '(255)\n', (56322, 56327), False, 'from galaxy.model.custom_types import JSONType, MetadataType, TrimmedString, UUIDType\n'), ((56363, 56381), 'galaxy.model.custom_types.TrimmedString', 'TrimmedString', (['(255)'], {}), '(255)\n', (56376, 56381), False, 'from galaxy.model.custom_types import JSONType, MetadataType, TrimmedString, UUIDType\n'), ((56422, 56440), 'galaxy.model.custom_types.TrimmedString', 'TrimmedString', (['(255)'], {}), '(255)\n', (56435, 56440), False, 'from galaxy.model.custom_types import JSONType, MetadataType, TrimmedString, UUIDType\n'), ((56619, 56643), 'sqlalchemy.ForeignKey', 'ForeignKey', (['"""dataset.id"""'], {}), "('dataset.id')\n", (56629, 56643), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((56692, 56712), 'sqlalchemy.ForeignKey', 'ForeignKey', (['"""tag.id"""'], {}), "('tag.id')\n", (56702, 56712), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((56762, 56790), 'sqlalchemy.ForeignKey', 'ForeignKey', (['"""galaxy_user.id"""'], {}), "('galaxy_user.id')\n", (56772, 56790), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((56834, 56852), 'galaxy.model.custom_types.TrimmedString', 'TrimmedString', (['(255)'], {}), '(255)\n', (56847, 56852), False, 'from galaxy.model.custom_types import JSONType, MetadataType, TrimmedString, UUIDType\n'), ((56888, 56906), 'galaxy.model.custom_types.TrimmedString', 'TrimmedString', (['(255)'], {}), '(255)\n', (56901, 56906), False, 'from galaxy.model.custom_types import JSONType, MetadataType, TrimmedString, UUIDType\n'), ((56947, 56965), 'galaxy.model.custom_types.TrimmedString', 'TrimmedString', (['(255)'], {}), '(255)\n', (56960, 56965), False, 'from galaxy.model.custom_types import JSONType, MetadataType, TrimmedString, UUIDType\n'), ((57202, 57246), 'sqlalchemy.ForeignKey', 'ForeignKey', (['"""history_dataset_association.id"""'], {}), "('history_dataset_association.id')\n", (57212, 57246), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((57295, 57315), 'sqlalchemy.ForeignKey', 'ForeignKey', (['"""tag.id"""'], {}), "('tag.id')\n", (57305, 57315), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((57365, 57393), 'sqlalchemy.ForeignKey', 'ForeignKey', (['"""galaxy_user.id"""'], {}), "('galaxy_user.id')\n", (57375, 57393), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((57437, 57455), 'galaxy.model.custom_types.TrimmedString', 'TrimmedString', (['(255)'], {}), '(255)\n', (57450, 57455), False, 'from galaxy.model.custom_types import JSONType, MetadataType, TrimmedString, UUIDType\n'), ((57491, 57509), 'galaxy.model.custom_types.TrimmedString', 'TrimmedString', (['(255)'], {}), '(255)\n', (57504, 57509), False, 'from galaxy.model.custom_types import JSONType, MetadataType, TrimmedString, UUIDType\n'), ((57550, 57568), 'galaxy.model.custom_types.TrimmedString', 'TrimmedString', (['(255)'], {}), '(255)\n', (57563, 57568), False, 'from galaxy.model.custom_types import JSONType, MetadataType, TrimmedString, UUIDType\n'), ((57770, 57802), 'sqlalchemy.ForeignKey', 'ForeignKey', (['"""stored_workflow.id"""'], {}), "('stored_workflow.id')\n", (57780, 57802), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((57851, 57871), 'sqlalchemy.ForeignKey', 'ForeignKey', (['"""tag.id"""'], {}), "('tag.id')\n", (57861, 57871), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((57921, 57949), 'sqlalchemy.ForeignKey', 'ForeignKey', (['"""galaxy_user.id"""'], {}), "('galaxy_user.id')\n", (57931, 57949), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((57993, 58005), 'sqlalchemy.Unicode', 'Unicode', (['(255)'], {}), '(255)\n', (58000, 58005), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((58041, 58053), 'sqlalchemy.Unicode', 'Unicode', (['(255)'], {}), '(255)\n', (58048, 58053), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((58094, 58106), 'sqlalchemy.Unicode', 'Unicode', (['(255)'], {}), '(255)\n', (58101, 58106), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((58276, 58297), 'sqlalchemy.ForeignKey', 'ForeignKey', (['"""page.id"""'], {}), "('page.id')\n", (58286, 58297), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((58346, 58366), 'sqlalchemy.ForeignKey', 'ForeignKey', (['"""tag.id"""'], {}), "('tag.id')\n", (58356, 58366), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((58416, 58444), 'sqlalchemy.ForeignKey', 'ForeignKey', (['"""galaxy_user.id"""'], {}), "('galaxy_user.id')\n", (58426, 58444), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((58488, 58506), 'galaxy.model.custom_types.TrimmedString', 'TrimmedString', (['(255)'], {}), '(255)\n', (58501, 58506), False, 'from galaxy.model.custom_types import JSONType, MetadataType, TrimmedString, UUIDType\n'), ((58542, 58560), 'galaxy.model.custom_types.TrimmedString', 'TrimmedString', (['(255)'], {}), '(255)\n', (58555, 58560), False, 'from galaxy.model.custom_types import JSONType, MetadataType, TrimmedString, UUIDType\n'), ((58601, 58619), 'galaxy.model.custom_types.TrimmedString', 'TrimmedString', (['(255)'], {}), '(255)\n', (58614, 58619), False, 'from galaxy.model.custom_types import JSONType, MetadataType, TrimmedString, UUIDType\n'), ((58815, 58845), 'sqlalchemy.ForeignKey', 'ForeignKey', (['"""workflow_step.id"""'], {}), "('workflow_step.id')\n", (58825, 58845), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((58894, 58914), 'sqlalchemy.ForeignKey', 'ForeignKey', (['"""tag.id"""'], {}), "('tag.id')\n", (58904, 58914), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((58964, 58992), 'sqlalchemy.ForeignKey', 'ForeignKey', (['"""galaxy_user.id"""'], {}), "('galaxy_user.id')\n", (58974, 58992), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((59036, 59048), 'sqlalchemy.Unicode', 'Unicode', (['(255)'], {}), '(255)\n', (59043, 59048), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((59084, 59096), 'sqlalchemy.Unicode', 'Unicode', (['(255)'], {}), '(255)\n', (59091, 59096), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((59137, 59149), 'sqlalchemy.Unicode', 'Unicode', (['(255)'], {}), '(255)\n', (59144, 59149), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((59346, 59376), 'sqlalchemy.ForeignKey', 'ForeignKey', (['"""visualization.id"""'], {}), "('visualization.id')\n", (59356, 59376), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((59425, 59445), 'sqlalchemy.ForeignKey', 'ForeignKey', (['"""tag.id"""'], {}), "('tag.id')\n", (59435, 59445), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((59495, 59523), 'sqlalchemy.ForeignKey', 'ForeignKey', (['"""galaxy_user.id"""'], {}), "('galaxy_user.id')\n", (59505, 59523), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((59567, 59585), 'galaxy.model.custom_types.TrimmedString', 'TrimmedString', (['(255)'], {}), '(255)\n', (59580, 59585), False, 'from galaxy.model.custom_types import JSONType, MetadataType, TrimmedString, UUIDType\n'), ((59621, 59639), 'galaxy.model.custom_types.TrimmedString', 'TrimmedString', (['(255)'], {}), '(255)\n', (59634, 59639), False, 'from galaxy.model.custom_types import JSONType, MetadataType, TrimmedString, UUIDType\n'), ((59680, 59698), 'galaxy.model.custom_types.TrimmedString', 'TrimmedString', (['(255)'], {}), '(255)\n', (59693, 59698), False, 'from galaxy.model.custom_types import JSONType, MetadataType, TrimmedString, UUIDType\n'), ((59932, 59987), 'sqlalchemy.ForeignKey', 'ForeignKey', (['"""history_dataset_collection_association.id"""'], {}), "('history_dataset_collection_association.id')\n", (59942, 59987), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((60036, 60056), 'sqlalchemy.ForeignKey', 'ForeignKey', (['"""tag.id"""'], {}), "('tag.id')\n", (60046, 60056), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((60106, 60134), 'sqlalchemy.ForeignKey', 'ForeignKey', (['"""galaxy_user.id"""'], {}), "('galaxy_user.id')\n", (60116, 60134), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((60178, 60196), 'galaxy.model.custom_types.TrimmedString', 'TrimmedString', (['(255)'], {}), '(255)\n', (60191, 60196), False, 'from galaxy.model.custom_types import JSONType, MetadataType, TrimmedString, UUIDType\n'), ((60232, 60250), 'galaxy.model.custom_types.TrimmedString', 'TrimmedString', (['(255)'], {}), '(255)\n', (60245, 60250), False, 'from galaxy.model.custom_types import JSONType, MetadataType, TrimmedString, UUIDType\n'), ((60291, 60309), 'galaxy.model.custom_types.TrimmedString', 'TrimmedString', (['(255)'], {}), '(255)\n', (60304, 60309), False, 'from galaxy.model.custom_types import JSONType, MetadataType, TrimmedString, UUIDType\n'), ((60543, 60598), 'sqlalchemy.ForeignKey', 'ForeignKey', (['"""library_dataset_collection_association.id"""'], {}), "('library_dataset_collection_association.id')\n", (60553, 60598), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((60647, 60667), 'sqlalchemy.ForeignKey', 'ForeignKey', (['"""tag.id"""'], {}), "('tag.id')\n", (60657, 60667), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((60717, 60745), 'sqlalchemy.ForeignKey', 'ForeignKey', (['"""galaxy_user.id"""'], {}), "('galaxy_user.id')\n", (60727, 60745), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((60789, 60807), 'galaxy.model.custom_types.TrimmedString', 'TrimmedString', (['(255)'], {}), '(255)\n', (60802, 60807), False, 'from galaxy.model.custom_types import JSONType, MetadataType, TrimmedString, UUIDType\n'), ((60843, 60861), 'galaxy.model.custom_types.TrimmedString', 'TrimmedString', (['(255)'], {}), '(255)\n', (60856, 60861), False, 'from galaxy.model.custom_types import JSONType, MetadataType, TrimmedString, UUIDType\n'), ((60902, 60920), 'galaxy.model.custom_types.TrimmedString', 'TrimmedString', (['(255)'], {}), '(255)\n', (60915, 60920), False, 'from galaxy.model.custom_types import JSONType, MetadataType, TrimmedString, UUIDType\n'), ((61081, 61099), 'galaxy.model.custom_types.TrimmedString', 'TrimmedString', (['(255)'], {}), '(255)\n', (61094, 61099), False, 'from galaxy.model.custom_types import JSONType, MetadataType, TrimmedString, UUIDType\n'), ((61146, 61166), 'sqlalchemy.ForeignKey', 'ForeignKey', (['"""tag.id"""'], {}), "('tag.id')\n", (61156, 61166), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((61216, 61244), 'sqlalchemy.ForeignKey', 'ForeignKey', (['"""galaxy_user.id"""'], {}), "('galaxy_user.id')\n", (61226, 61244), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((61288, 61306), 'galaxy.model.custom_types.TrimmedString', 'TrimmedString', (['(255)'], {}), '(255)\n', (61301, 61306), False, 'from galaxy.model.custom_types import JSONType, MetadataType, TrimmedString, UUIDType\n'), ((61342, 61360), 'galaxy.model.custom_types.TrimmedString', 'TrimmedString', (['(255)'], {}), '(255)\n', (61355, 61360), False, 'from galaxy.model.custom_types import JSONType, MetadataType, TrimmedString, UUIDType\n'), ((61401, 61419), 'galaxy.model.custom_types.TrimmedString', 'TrimmedString', (['(255)'], {}), '(255)\n', (61414, 61419), False, 'from galaxy.model.custom_types import JSONType, MetadataType, TrimmedString, UUIDType\n'), ((61634, 61658), 'sqlalchemy.ForeignKey', 'ForeignKey', (['"""history.id"""'], {}), "('history.id')\n", (61644, 61658), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((61708, 61736), 'sqlalchemy.ForeignKey', 'ForeignKey', (['"""galaxy_user.id"""'], {}), "('galaxy_user.id')\n", (61718, 61736), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((62035, 62079), 'sqlalchemy.ForeignKey', 'ForeignKey', (['"""history_dataset_association.id"""'], {}), "('history_dataset_association.id')\n", (62045, 62079), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((62129, 62157), 'sqlalchemy.ForeignKey', 'ForeignKey', (['"""galaxy_user.id"""'], {}), "('galaxy_user.id')\n", (62139, 62157), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((62421, 62453), 'sqlalchemy.ForeignKey', 'ForeignKey', (['"""stored_workflow.id"""'], {}), "('stored_workflow.id')\n", (62431, 62453), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((62503, 62531), 'sqlalchemy.ForeignKey', 'ForeignKey', (['"""galaxy_user.id"""'], {}), "('galaxy_user.id')\n", (62513, 62531), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((62789, 62819), 'sqlalchemy.ForeignKey', 'ForeignKey', (['"""workflow_step.id"""'], {}), "('workflow_step.id')\n", (62799, 62819), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((62869, 62897), 'sqlalchemy.ForeignKey', 'ForeignKey', (['"""galaxy_user.id"""'], {}), "('galaxy_user.id')\n", (62879, 62897), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((63129, 63150), 'sqlalchemy.ForeignKey', 'ForeignKey', (['"""page.id"""'], {}), "('page.id')\n", (63139, 63150), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((63200, 63228), 'sqlalchemy.ForeignKey', 'ForeignKey', (['"""galaxy_user.id"""'], {}), "('galaxy_user.id')\n", (63210, 63228), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((63487, 63517), 'sqlalchemy.ForeignKey', 'ForeignKey', (['"""visualization.id"""'], {}), "('visualization.id')\n", (63497, 63517), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((63567, 63595), 'sqlalchemy.ForeignKey', 'ForeignKey', (['"""galaxy_user.id"""'], {}), "('galaxy_user.id')\n", (63577, 63595), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((63891, 63946), 'sqlalchemy.ForeignKey', 'ForeignKey', (['"""history_dataset_collection_association.id"""'], {}), "('history_dataset_collection_association.id')\n", (63901, 63946), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((63996, 64024), 'sqlalchemy.ForeignKey', 'ForeignKey', (['"""galaxy_user.id"""'], {}), "('galaxy_user.id')\n", (64006, 64024), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((64320, 64375), 'sqlalchemy.ForeignKey', 'ForeignKey', (['"""library_dataset_collection_association.id"""'], {}), "('library_dataset_collection_association.id')\n", (64330, 64375), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((64425, 64453), 'sqlalchemy.ForeignKey', 'ForeignKey', (['"""galaxy_user.id"""'], {}), "('galaxy_user.id')\n", (64435, 64453), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((64704, 64728), 'sqlalchemy.ForeignKey', 'ForeignKey', (['"""history.id"""'], {}), "('history.id')\n", (64714, 64728), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((64778, 64806), 'sqlalchemy.ForeignKey', 'ForeignKey', (['"""galaxy_user.id"""'], {}), "('galaxy_user.id')\n", (64788, 64806), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((65096, 65140), 'sqlalchemy.ForeignKey', 'ForeignKey', (['"""history_dataset_association.id"""'], {}), "('history_dataset_association.id')\n", (65106, 65140), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((65190, 65218), 'sqlalchemy.ForeignKey', 'ForeignKey', (['"""galaxy_user.id"""'], {}), "('galaxy_user.id')\n", (65200, 65218), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((65473, 65505), 'sqlalchemy.ForeignKey', 'ForeignKey', (['"""stored_workflow.id"""'], {}), "('stored_workflow.id')\n", (65483, 65505), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((65555, 65583), 'sqlalchemy.ForeignKey', 'ForeignKey', (['"""galaxy_user.id"""'], {}), "('galaxy_user.id')\n", (65565, 65583), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((65806, 65827), 'sqlalchemy.ForeignKey', 'ForeignKey', (['"""page.id"""'], {}), "('page.id')\n", (65816, 65827), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((65877, 65905), 'sqlalchemy.ForeignKey', 'ForeignKey', (['"""galaxy_user.id"""'], {}), "('galaxy_user.id')\n", (65887, 65905), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((66155, 66185), 'sqlalchemy.ForeignKey', 'ForeignKey', (['"""visualization.id"""'], {}), "('visualization.id')\n", (66165, 66185), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((66235, 66263), 'sqlalchemy.ForeignKey', 'ForeignKey', (['"""galaxy_user.id"""'], {}), "('galaxy_user.id')\n", (66245, 66263), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((66550, 66605), 'sqlalchemy.ForeignKey', 'ForeignKey', (['"""history_dataset_collection_association.id"""'], {}), "('history_dataset_collection_association.id')\n", (66560, 66605), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((66655, 66683), 'sqlalchemy.ForeignKey', 'ForeignKey', (['"""galaxy_user.id"""'], {}), "('galaxy_user.id')\n", (66665, 66683), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((66970, 67025), 'sqlalchemy.ForeignKey', 'ForeignKey', (['"""library_dataset_collection_association.id"""'], {}), "('library_dataset_collection_association.id')\n", (66980, 67025), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((67075, 67103), 'sqlalchemy.ForeignKey', 'ForeignKey', (['"""galaxy_user.id"""'], {}), "('galaxy_user.id')\n", (67085, 67103), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((67327, 67355), 'sqlalchemy.ForeignKey', 'ForeignKey', (['"""galaxy_user.id"""'], {}), "('galaxy_user.id')\n", (67337, 67355), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((67393, 67405), 'sqlalchemy.Unicode', 'Unicode', (['(255)'], {}), '(255)\n', (67400, 67405), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((67443, 67456), 'sqlalchemy.Unicode', 'Unicode', (['(1024)'], {}), '(1024)\n', (67450, 67456), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((67652, 67680), 'sqlalchemy.ForeignKey', 'ForeignKey', (['"""galaxy_user.id"""'], {}), "('galaxy_user.id')\n", (67662, 67680), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((67733, 67764), 'sqlalchemy.ForeignKey', 'ForeignKey', (['"""galaxy_session.id"""'], {}), "('galaxy_session.id')\n", (67743, 67764), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((67804, 67816), 'sqlalchemy.Unicode', 'Unicode', (['(255)'], {}), '(255)\n', (67811, 67816), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((67845, 67857), 'sqlalchemy.Unicode', 'Unicode', (['(512)'], {}), '(512)\n', (67852, 67857), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((67885, 67898), 'sqlalchemy.Unicode', 'Unicode', (['(1024)'], {}), '(1024)\n', (67892, 67898), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((68088, 68116), 'sqlalchemy.ForeignKey', 'ForeignKey', (['"""galaxy_user.id"""'], {}), "('galaxy_user.id')\n", (68098, 68116), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((68153, 68170), 'galaxy.model.custom_types.TrimmedString', 'TrimmedString', (['(32)'], {}), '(32)\n', (68166, 68170), False, 'from galaxy.model.custom_types import JSONType, MetadataType, TrimmedString, UUIDType\n'), ((77682, 77814), 'sqlalchemy.orm.relation', 'relation', (['model.Dataset'], {'primaryjoin': '(model.Dataset.table.c.id == model.HistoryDatasetAssociation.table.c.dataset_id\n )', 'lazy': '(False)'}), '(model.Dataset, primaryjoin=model.Dataset.table.c.id == model.\n HistoryDatasetAssociation.table.c.dataset_id, lazy=False)\n', (77690, 77814), False, 'from sqlalchemy.orm import backref, object_session, relation, mapper, class_mapper\n'), ((77918, 78194), 'sqlalchemy.orm.relation', 'relation', (['model.HistoryDatasetAssociation'], {'primaryjoin': '(model.HistoryDatasetAssociation.table.c.\n copied_from_history_dataset_association_id == model.\n HistoryDatasetAssociation.table.c.id)', 'remote_side': '[model.HistoryDatasetAssociation.table.c.id]', 'uselist': '(False)'}), '(model.HistoryDatasetAssociation, primaryjoin=model.\n HistoryDatasetAssociation.table.c.\n copied_from_history_dataset_association_id == model.\n HistoryDatasetAssociation.table.c.id, remote_side=[model.\n HistoryDatasetAssociation.table.c.id], uselist=False)\n', (77926, 78194), False, 'from sqlalchemy.orm import backref, object_session, relation, mapper, class_mapper\n'), ((78257, 78455), 'sqlalchemy.orm.relation', 'relation', (['model.HistoryDatasetAssociation'], {'primaryjoin': '(model.HistoryDatasetAssociation.table.c.\n copied_from_history_dataset_association_id == model.\n HistoryDatasetAssociation.table.c.id)'}), '(model.HistoryDatasetAssociation, primaryjoin=model.\n HistoryDatasetAssociation.table.c.\n copied_from_history_dataset_association_id == model.\n HistoryDatasetAssociation.table.c.id)\n', (78265, 78455), False, 'from sqlalchemy.orm import backref, object_session, relation, mapper, class_mapper\n'), ((78516, 78751), 'sqlalchemy.orm.relation', 'relation', (['model.LibraryDatasetDatasetAssociation'], {'primaryjoin': '(model.HistoryDatasetAssociation.table.c.\n copied_from_library_dataset_dataset_association_id == model.\n LibraryDatasetDatasetAssociation.table.c.id)', 'uselist': '(False)'}), '(model.LibraryDatasetDatasetAssociation, primaryjoin=model.\n HistoryDatasetAssociation.table.c.\n copied_from_library_dataset_dataset_association_id == model.\n LibraryDatasetDatasetAssociation.table.c.id, uselist=False)\n', (78524, 78751), False, 'from sqlalchemy.orm import backref, object_session, relation, mapper, class_mapper\n'), ((78819, 79039), 'sqlalchemy.orm.relation', 'relation', (['model.LibraryDatasetDatasetAssociation'], {'primaryjoin': '(model.HistoryDatasetAssociation.table.c.\n copied_from_library_dataset_dataset_association_id == model.\n LibraryDatasetDatasetAssociation.table.c.id)'}), '(model.LibraryDatasetDatasetAssociation, primaryjoin=model.\n HistoryDatasetAssociation.table.c.\n copied_from_library_dataset_dataset_association_id == model.\n LibraryDatasetDatasetAssociation.table.c.id)\n', (78827, 79039), False, 'from sqlalchemy.orm import backref, object_session, relation, mapper, class_mapper\n'), ((79082, 79270), 'sqlalchemy.orm.relation', 'relation', (['model.ImplicitlyConvertedDatasetAssociation'], {'primaryjoin': '(model.ImplicitlyConvertedDatasetAssociation.table.c.hda_parent_id == model\n .HistoryDatasetAssociation.table.c.id)'}), '(model.ImplicitlyConvertedDatasetAssociation, primaryjoin=model.\n ImplicitlyConvertedDatasetAssociation.table.c.hda_parent_id == model.\n HistoryDatasetAssociation.table.c.id)\n', (79090, 79270), False, 'from sqlalchemy.orm import backref, object_session, relation, mapper, class_mapper\n'), ((79325, 79506), 'sqlalchemy.orm.relation', 'relation', (['model.ImplicitlyConvertedDatasetAssociation'], {'primaryjoin': '(model.ImplicitlyConvertedDatasetAssociation.table.c.hda_id == model.\n HistoryDatasetAssociation.table.c.id)'}), '(model.ImplicitlyConvertedDatasetAssociation, primaryjoin=model.\n ImplicitlyConvertedDatasetAssociation.table.c.hda_id == model.\n HistoryDatasetAssociation.table.c.id)\n', (79333, 79506), False, 'from sqlalchemy.orm import backref, object_session, relation, mapper, class_mapper\n'), ((79951, 80178), 'sqlalchemy.orm.relation', 'relation', (['model.HistoryDatasetAssociation'], {'primaryjoin': '((model.HistoryDatasetAssociation.table.c.parent_id == model.\n HistoryDatasetAssociation.table.c.id) & (model.\n HistoryDatasetAssociation.table.c.visible == True))'}), '(model.HistoryDatasetAssociation, primaryjoin=(model.\n HistoryDatasetAssociation.table.c.parent_id == model.\n HistoryDatasetAssociation.table.c.id) & (model.\n HistoryDatasetAssociation.table.c.visible == True))\n', (79959, 80178), False, 'from sqlalchemy.orm import backref, object_session, relation, mapper, class_mapper\n'), ((80200, 80368), 'sqlalchemy.orm.relation', 'relation', (['model.HistoryDatasetAssociationTagAssociation'], {'order_by': 'model.HistoryDatasetAssociationTagAssociation.table.c.id', 'backref': '"""history_tag_associations"""'}), "(model.HistoryDatasetAssociationTagAssociation, order_by=model.\n HistoryDatasetAssociationTagAssociation.table.c.id, backref=\n 'history_tag_associations')\n", (80208, 80368), False, 'from sqlalchemy.orm import backref, object_session, relation, mapper, class_mapper\n'), ((80378, 80539), 'sqlalchemy.orm.relation', 'relation', (['model.HistoryDatasetAssociationAnnotationAssociation'], {'order_by': 'model.HistoryDatasetAssociationAnnotationAssociation.table.c.id', 'backref': '"""hdas"""'}), "(model.HistoryDatasetAssociationAnnotationAssociation, order_by=\n model.HistoryDatasetAssociationAnnotationAssociation.table.c.id,\n backref='hdas')\n", (80386, 80539), False, 'from sqlalchemy.orm import backref, object_session, relation, mapper, class_mapper\n'), ((80546, 80695), 'sqlalchemy.orm.relation', 'relation', (['model.HistoryDatasetAssociationRatingAssociation'], {'order_by': 'model.HistoryDatasetAssociationRatingAssociation.table.c.id', 'backref': '"""hdas"""'}), "(model.HistoryDatasetAssociationRatingAssociation, order_by=model.\n HistoryDatasetAssociationRatingAssociation.table.c.id, backref='hdas')\n", (80554, 80695), False, 'from sqlalchemy.orm import backref, object_session, relation, mapper, class_mapper\n'), ((80716, 80869), 'sqlalchemy.orm.relation', 'relation', (['model.ExtendedMetadata'], {'primaryjoin': '(model.HistoryDatasetAssociation.table.c.extended_metadata_id == model.\n ExtendedMetadata.table.c.id)'}), '(model.ExtendedMetadata, primaryjoin=model.\n HistoryDatasetAssociation.table.c.extended_metadata_id == model.\n ExtendedMetadata.table.c.id)\n', (80724, 80869), False, 'from sqlalchemy.orm import backref, object_session, relation, mapper, class_mapper\n'), ((80942, 81204), 'sqlalchemy.orm.relation', 'relation', (['model.HistoryDatasetCollectionAssociation'], {'primaryjoin': '(model.HistoryDatasetAssociation.table.c.\n hidden_beneath_collection_instance_id == model.\n HistoryDatasetCollectionAssociation.table.c.id)', 'uselist': '(False)', 'backref': '"""hidden_dataset_instances"""'}), "(model.HistoryDatasetCollectionAssociation, primaryjoin=model.\n HistoryDatasetAssociation.table.c.hidden_beneath_collection_instance_id ==\n model.HistoryDatasetCollectionAssociation.table.c.id, uselist=False,\n backref='hidden_dataset_instances')\n", (80950, 81204), False, 'from sqlalchemy.orm import backref, object_session, relation, mapper, class_mapper\n'), ((81298, 81436), 'sqlalchemy.orm.relation', 'relation', (['model.HistoryDatasetAssociation'], {'primaryjoin': '(model.Dataset.table.c.id == model.HistoryDatasetAssociation.table.c.dataset_id\n )'}), '(model.HistoryDatasetAssociation, primaryjoin=model.Dataset.table.c\n .id == model.HistoryDatasetAssociation.table.c.dataset_id)\n', (81306, 81436), False, 'from sqlalchemy.orm import backref, object_session, relation, mapper, class_mapper\n'), ((81487, 81758), 'sqlalchemy.orm.relation', 'relation', (['model.HistoryDatasetAssociation'], {'primaryjoin': '((model.Dataset.table.c.id == model.HistoryDatasetAssociation.table.c.\n dataset_id) & (model.HistoryDatasetAssociation.table.c.deleted == False\n ) & (model.HistoryDatasetAssociation.table.c.purged == False))'}), '(model.HistoryDatasetAssociation, primaryjoin=(model.Dataset.table.\n c.id == model.HistoryDatasetAssociation.table.c.dataset_id) & (model.\n HistoryDatasetAssociation.table.c.deleted == False) & (model.\n HistoryDatasetAssociation.table.c.purged == False))\n', (81495, 81758), False, 'from sqlalchemy.orm import backref, object_session, relation, mapper, class_mapper\n'), ((81805, 82009), 'sqlalchemy.orm.relation', 'relation', (['model.HistoryDatasetAssociation'], {'primaryjoin': '((model.Dataset.table.c.id == model.HistoryDatasetAssociation.table.c.\n dataset_id) & (model.HistoryDatasetAssociation.table.c.purged == True))'}), '(model.HistoryDatasetAssociation, primaryjoin=(model.Dataset.table.\n c.id == model.HistoryDatasetAssociation.table.c.dataset_id) & (model.\n HistoryDatasetAssociation.table.c.purged == True))\n', (81813, 82009), False, 'from sqlalchemy.orm import backref, object_session, relation, mapper, class_mapper\n'), ((82052, 82204), 'sqlalchemy.orm.relation', 'relation', (['model.LibraryDatasetDatasetAssociation'], {'primaryjoin': '(model.Dataset.table.c.id == model.LibraryDatasetDatasetAssociation.table.c\n .dataset_id)'}), '(model.LibraryDatasetDatasetAssociation, primaryjoin=model.Dataset.\n table.c.id == model.LibraryDatasetDatasetAssociation.table.c.dataset_id)\n', (82060, 82204), False, 'from sqlalchemy.orm import backref, object_session, relation, mapper, class_mapper\n'), ((82255, 82486), 'sqlalchemy.orm.relation', 'relation', (['model.LibraryDatasetDatasetAssociation'], {'primaryjoin': '((model.Dataset.table.c.id == model.LibraryDatasetDatasetAssociation.table.\n c.dataset_id) & (model.LibraryDatasetDatasetAssociation.table.c.deleted ==\n False))'}), '(model.LibraryDatasetDatasetAssociation, primaryjoin=(model.Dataset\n .table.c.id == model.LibraryDatasetDatasetAssociation.table.c.\n dataset_id) & (model.LibraryDatasetDatasetAssociation.table.c.deleted ==\n False))\n', (82263, 82486), False, 'from sqlalchemy.orm import backref, object_session, relation, mapper, class_mapper\n'), ((82509, 82620), 'sqlalchemy.orm.relation', 'relation', (['model.DatasetTagAssociation'], {'order_by': 'model.DatasetTagAssociation.table.c.id', 'backref': '"""datasets"""'}), "(model.DatasetTagAssociation, order_by=model.DatasetTagAssociation.\n table.c.id, backref='datasets')\n", (82517, 82620), False, 'from sqlalchemy.orm import backref, object_session, relation, mapper, class_mapper\n'), ((104494, 104538), 'sqlalchemy.orm.relation', 'relation', (['model.Job'], {'backref': '"""state_history"""'}), "(model.Job, backref='state_history')\n", (104502, 104538), False, 'from sqlalchemy.orm import backref, object_session, relation, mapper, class_mapper\n'), ((104594, 104637), 'sqlalchemy.orm.relation', 'relation', (['model.Job'], {'backref': '"""text_metrics"""'}), "(model.Job, backref='text_metrics')\n", (104602, 104637), False, 'from sqlalchemy.orm import backref, object_session, relation, mapper, class_mapper\n'), ((104695, 104739), 'sqlalchemy.orm.relation', 'relation', (['model.Task'], {'backref': '"""text_metrics"""'}), "(model.Task, backref='text_metrics')\n", (104703, 104739), False, 'from sqlalchemy.orm import backref, object_session, relation, mapper, class_mapper\n'), ((104798, 104844), 'sqlalchemy.orm.relation', 'relation', (['model.Job'], {'backref': '"""numeric_metrics"""'}), "(model.Job, backref='numeric_metrics')\n", (104806, 104844), False, 'from sqlalchemy.orm import backref, object_session, relation, mapper, class_mapper\n'), ((104905, 104952), 'sqlalchemy.orm.relation', 'relation', (['model.Task'], {'backref': '"""numeric_metrics"""'}), "(model.Task, backref='numeric_metrics')\n", (104913, 104952), False, 'from sqlalchemy.orm import backref, object_session, relation, mapper, class_mapper\n'), ((105055, 105272), 'sqlalchemy.orm.relation', 'relation', (['model.HistoryDatasetCollectionAssociation'], {'primaryjoin': '(model.HistoryDatasetCollectionAssociation.table.c.id == model.\n ImplicitlyCreatedDatasetCollectionInput.table.c.input_dataset_collection_id\n )'}), '(model.HistoryDatasetCollectionAssociation, primaryjoin=model.\n HistoryDatasetCollectionAssociation.table.c.id == model.\n ImplicitlyCreatedDatasetCollectionInput.table.c.input_dataset_collection_id\n )\n', (105063, 105272), False, 'from sqlalchemy.orm import backref, object_session, relation, mapper, class_mapper\n'), ((108397, 108730), 'sqlalchemy.orm.relation', 'relation', (['model.DatasetCollectionElement'], {'primaryjoin': '(model.DatasetCollection.table.c.id == model.DatasetCollectionElement.table\n .c.dataset_collection_id)', 'remote_side': '[model.DatasetCollectionElement.table.c.dataset_collection_id]', 'backref': '"""collection"""', 'order_by': 'model.DatasetCollectionElement.table.c.element_index'}), "(model.DatasetCollectionElement, primaryjoin=model.\n DatasetCollection.table.c.id == model.DatasetCollectionElement.table.c.\n dataset_collection_id, remote_side=[model.DatasetCollectionElement.\n table.c.dataset_collection_id], backref='collection', order_by=model.\n DatasetCollectionElement.table.c.element_index)\n", (108405, 108730), False, 'from sqlalchemy.orm import backref, object_session, relation, mapper, class_mapper\n'), ((108842, 108875), 'sqlalchemy.orm.relation', 'relation', (['model.DatasetCollection'], {}), '(model.DatasetCollection)\n', (108850, 108875), False, 'from sqlalchemy.orm import backref, object_session, relation, mapper, class_mapper\n'), ((108891, 108945), 'sqlalchemy.orm.relation', 'relation', (['model.History'], {'backref': '"""dataset_collections"""'}), "(model.History, backref='dataset_collections')\n", (108899, 108945), False, 'from sqlalchemy.orm import backref, object_session, relation, mapper, class_mapper\n'), ((109004, 109331), 'sqlalchemy.orm.relation', 'relation', (['model.HistoryDatasetCollectionAssociation'], {'primaryjoin': '(model.HistoryDatasetCollectionAssociation.table.c.\n copied_from_history_dataset_collection_association_id == model.\n HistoryDatasetCollectionAssociation.table.c.id)', 'remote_side': '[model.HistoryDatasetCollectionAssociation.table.c.id]', 'uselist': '(False)'}), '(model.HistoryDatasetCollectionAssociation, primaryjoin=model.\n HistoryDatasetCollectionAssociation.table.c.\n copied_from_history_dataset_collection_association_id == model.\n HistoryDatasetCollectionAssociation.table.c.id, remote_side=[model.\n HistoryDatasetCollectionAssociation.table.c.id], uselist=False)\n', (109012, 109331), False, 'from sqlalchemy.orm import backref, object_session, relation, mapper, class_mapper\n'), ((109405, 109644), 'sqlalchemy.orm.relation', 'relation', (['model.HistoryDatasetCollectionAssociation'], {'primaryjoin': '(model.HistoryDatasetCollectionAssociation.table.c.\n copied_from_history_dataset_collection_association_id == model.\n HistoryDatasetCollectionAssociation.table.c.id)'}), '(model.HistoryDatasetCollectionAssociation, primaryjoin=model.\n HistoryDatasetCollectionAssociation.table.c.\n copied_from_history_dataset_collection_association_id == model.\n HistoryDatasetCollectionAssociation.table.c.id)\n', (109413, 109644), False, 'from sqlalchemy.orm import backref, object_session, relation, mapper, class_mapper\n'), ((109684, 109928), 'sqlalchemy.orm.relation', 'relation', (['model.ImplicitlyCreatedDatasetCollectionInput'], {'primaryjoin': '(model.HistoryDatasetCollectionAssociation.table.c.id == model.\n ImplicitlyCreatedDatasetCollectionInput.table.c.dataset_collection_id)', 'backref': '"""dataset_collection"""'}), "(model.ImplicitlyCreatedDatasetCollectionInput, primaryjoin=model.\n HistoryDatasetCollectionAssociation.table.c.id == model.\n ImplicitlyCreatedDatasetCollectionInput.table.c.dataset_collection_id,\n backref='dataset_collection')\n", (109692, 109928), False, 'from sqlalchemy.orm import backref, object_session, relation, mapper, class_mapper\n'), ((109964, 110125), 'sqlalchemy.orm.relation', 'relation', (['model.HistoryDatasetCollectionTagAssociation'], {'order_by': 'model.HistoryDatasetCollectionTagAssociation.table.c.id', 'backref': '"""dataset_collections"""'}), "(model.HistoryDatasetCollectionTagAssociation, order_by=model.\n HistoryDatasetCollectionTagAssociation.table.c.id, backref=\n 'dataset_collections')\n", (109972, 110125), False, 'from sqlalchemy.orm import backref, object_session, relation, mapper, class_mapper\n'), ((110135, 110310), 'sqlalchemy.orm.relation', 'relation', (['model.HistoryDatasetCollectionAnnotationAssociation'], {'order_by': 'model.HistoryDatasetCollectionAnnotationAssociation.table.c.id', 'backref': '"""dataset_collections"""'}), "(model.HistoryDatasetCollectionAnnotationAssociation, order_by=\n model.HistoryDatasetCollectionAnnotationAssociation.table.c.id, backref\n ='dataset_collections')\n", (110143, 110310), False, 'from sqlalchemy.orm import backref, object_session, relation, mapper, class_mapper\n'), ((110316, 110483), 'sqlalchemy.orm.relation', 'relation', (['model.HistoryDatasetCollectionRatingAssociation'], {'order_by': 'model.HistoryDatasetCollectionRatingAssociation.table.c.id', 'backref': '"""dataset_collections"""'}), "(model.HistoryDatasetCollectionRatingAssociation, order_by=model.\n HistoryDatasetCollectionRatingAssociation.table.c.id, backref=\n 'dataset_collections')\n", (110324, 110483), False, 'from sqlalchemy.orm import backref, object_session, relation, mapper, class_mapper\n'), ((110554, 110587), 'sqlalchemy.orm.relation', 'relation', (['model.DatasetCollection'], {}), '(model.DatasetCollection)\n', (110562, 110587), False, 'from sqlalchemy.orm import backref, object_session, relation, mapper, class_mapper\n'), ((110602, 110662), 'sqlalchemy.orm.relation', 'relation', (['model.LibraryFolder'], {'backref': '"""dataset_collections"""'}), "(model.LibraryFolder, backref='dataset_collections')\n", (110610, 110662), False, 'from sqlalchemy.orm import backref, object_session, relation, mapper, class_mapper\n'), ((110675, 110836), 'sqlalchemy.orm.relation', 'relation', (['model.LibraryDatasetCollectionTagAssociation'], {'order_by': 'model.LibraryDatasetCollectionTagAssociation.table.c.id', 'backref': '"""dataset_collections"""'}), "(model.LibraryDatasetCollectionTagAssociation, order_by=model.\n LibraryDatasetCollectionTagAssociation.table.c.id, backref=\n 'dataset_collections')\n", (110683, 110836), False, 'from sqlalchemy.orm import backref, object_session, relation, mapper, class_mapper\n'), ((110846, 111021), 'sqlalchemy.orm.relation', 'relation', (['model.LibraryDatasetCollectionAnnotationAssociation'], {'order_by': 'model.LibraryDatasetCollectionAnnotationAssociation.table.c.id', 'backref': '"""dataset_collections"""'}), "(model.LibraryDatasetCollectionAnnotationAssociation, order_by=\n model.LibraryDatasetCollectionAnnotationAssociation.table.c.id, backref\n ='dataset_collections')\n", (110854, 111021), False, 'from sqlalchemy.orm import backref, object_session, relation, mapper, class_mapper\n'), ((111027, 111194), 'sqlalchemy.orm.relation', 'relation', (['model.LibraryDatasetCollectionRatingAssociation'], {'order_by': 'model.LibraryDatasetCollectionRatingAssociation.table.c.id', 'backref': '"""dataset_collections"""'}), "(model.LibraryDatasetCollectionRatingAssociation, order_by=model.\n LibraryDatasetCollectionRatingAssociation.table.c.id, backref=\n 'dataset_collections')\n", (111035, 111194), False, 'from sqlalchemy.orm import backref, object_session, relation, mapper, class_mapper\n'), ((111247, 111403), 'sqlalchemy.orm.relation', 'relation', (['model.HistoryDatasetAssociation'], {'primaryjoin': '(model.DatasetCollectionElement.table.c.hda_id == model.\n HistoryDatasetAssociation.table.c.id)'}), '(model.HistoryDatasetAssociation, primaryjoin=model.\n DatasetCollectionElement.table.c.hda_id == model.\n HistoryDatasetAssociation.table.c.id)\n', (111255, 111403), False, 'from sqlalchemy.orm import backref, object_session, relation, mapper, class_mapper\n'), ((111430, 111601), 'sqlalchemy.orm.relation', 'relation', (['model.LibraryDatasetDatasetAssociation'], {'primaryjoin': '(model.DatasetCollectionElement.table.c.ldda_id == model.\n LibraryDatasetDatasetAssociation.table.c.id)'}), '(model.LibraryDatasetDatasetAssociation, primaryjoin=model.\n DatasetCollectionElement.table.c.ldda_id == model.\n LibraryDatasetDatasetAssociation.table.c.id)\n', (111438, 111601), False, 'from sqlalchemy.orm import backref, object_session, relation, mapper, class_mapper\n'), ((111640, 111793), 'sqlalchemy.orm.relation', 'relation', (['model.DatasetCollection'], {'primaryjoin': '(model.DatasetCollectionElement.table.c.child_collection_id == model.\n DatasetCollection.table.c.id)'}), '(model.DatasetCollection, primaryjoin=model.\n DatasetCollectionElement.table.c.child_collection_id == model.\n DatasetCollection.table.c.id)\n', (111648, 111793), False, 'from sqlalchemy.orm import backref, object_session, relation, mapper, class_mapper\n'), ((117735, 117769), 'sqlalchemy.orm.relation', 'relation', (['model.WorkflowInvocation'], {}), '(model.WorkflowInvocation)\n', (117743, 117769), False, 'from sqlalchemy.orm import backref, object_session, relation, mapper, class_mapper\n'), ((117852, 117886), 'sqlalchemy.orm.relation', 'relation', (['model.WorkflowInvocation'], {}), '(model.WorkflowInvocation)\n', (117860, 117886), False, 'from sqlalchemy.orm import backref, object_session, relation, mapper, class_mapper\n'), ((117908, 117936), 'sqlalchemy.orm.relation', 'relation', (['model.WorkflowStep'], {}), '(model.WorkflowStep)\n', (117916, 117936), False, 'from sqlalchemy.orm import backref, object_session, relation, mapper, class_mapper\n'), ((118035, 118069), 'sqlalchemy.orm.relation', 'relation', (['model.WorkflowInvocation'], {}), '(model.WorkflowInvocation)\n', (118043, 118069), False, 'from sqlalchemy.orm import backref, object_session, relation, mapper, class_mapper\n'), ((118091, 118119), 'sqlalchemy.orm.relation', 'relation', (['model.WorkflowStep'], {}), '(model.WorkflowStep)\n', (118099, 118119), False, 'from sqlalchemy.orm import backref, object_session, relation, mapper, class_mapper\n'), ((118135, 118176), 'sqlalchemy.orm.relation', 'relation', (['model.HistoryDatasetAssociation'], {}), '(model.HistoryDatasetAssociation)\n', (118143, 118176), False, 'from sqlalchemy.orm import backref, object_session, relation, mapper, class_mapper\n'), ((118286, 118320), 'sqlalchemy.orm.relation', 'relation', (['model.WorkflowInvocation'], {}), '(model.WorkflowInvocation)\n', (118294, 118320), False, 'from sqlalchemy.orm import backref, object_session, relation, mapper, class_mapper\n'), ((118342, 118370), 'sqlalchemy.orm.relation', 'relation', (['model.WorkflowStep'], {}), '(model.WorkflowStep)\n', (118350, 118370), False, 'from sqlalchemy.orm import backref, object_session, relation, mapper, class_mapper\n'), ((118397, 118448), 'sqlalchemy.orm.relation', 'relation', (['model.HistoryDatasetCollectionAssociation'], {}), '(model.HistoryDatasetCollectionAssociation)\n', (118405, 118448), False, 'from sqlalchemy.orm import backref, object_session, relation, mapper, class_mapper\n'), ((126335, 126380), 'sqlalchemy.orm.class_mapper', 'class_mapper', (['model.HistoryDatasetAssociation'], {}), '(model.HistoryDatasetAssociation)\n', (126347, 126380), False, 'from sqlalchemy.orm import backref, object_session, relation, mapper, class_mapper\n'), ((126474, 126526), 'sqlalchemy.orm.class_mapper', 'class_mapper', (['model.LibraryDatasetDatasetAssociation'], {}), '(model.LibraryDatasetDatasetAssociation)\n', (126486, 126526), False, 'from sqlalchemy.orm import backref, object_session, relation, mapper, class_mapper\n'), ((126628, 126683), 'sqlalchemy.orm.class_mapper', 'class_mapper', (['model.HistoryDatasetCollectionAssociation'], {}), '(model.HistoryDatasetCollectionAssociation)\n', (126640, 126683), False, 'from sqlalchemy.orm import backref, object_session, relation, mapper, class_mapper\n'), ((122329, 122370), 'sqlalchemy.orm.relation', 'relation', (['model.Tag'], {'backref': 'backref_name'}), '(model.Tag, backref=backref_name)\n', (122337, 122370), False, 'from sqlalchemy.orm import backref, object_session, relation, mapper, class_mapper\n'), ((122378, 122398), 'sqlalchemy.orm.relation', 'relation', (['model.User'], {}), '(model.User)\n', (122386, 122398), False, 'from sqlalchemy.orm import backref, object_session, relation, mapper, class_mapper\n'), ((127130, 127150), 'sqlalchemy.orm.object_session', 'object_session', (['self'], {}), '(self)\n', (127144, 127150), False, 'from sqlalchemy.orm import backref, object_session, relation, mapper, class_mapper\n'), ((68821, 68931), 'sqlalchemy.orm.relation', 'relation', (['model.FormValues'], {'primaryjoin': '(model.Sample.table.c.form_values_id == model.FormValues.table.c.id)'}), '(model.FormValues, primaryjoin=model.Sample.table.c.form_values_id ==\n model.FormValues.table.c.id)\n', (68829, 68931), False, 'from sqlalchemy.orm import backref, object_session, relation, mapper, class_mapper\n'), ((68987, 69087), 'sqlalchemy.orm.relation', 'relation', (['model.Request'], {'primaryjoin': '(model.Sample.table.c.request_id == model.Request.table.c.id)'}), '(model.Request, primaryjoin=model.Sample.table.c.request_id ==\n model.Request.table.c.id)\n', (68995, 69087), False, 'from sqlalchemy.orm import backref, object_session, relation, mapper, class_mapper\n'), ((69142, 69253), 'sqlalchemy.orm.relation', 'relation', (['model.LibraryFolder'], {'primaryjoin': '(model.Sample.table.c.folder_id == model.LibraryFolder.table.c.id)'}), '(model.LibraryFolder, primaryjoin=model.Sample.table.c.folder_id ==\n model.LibraryFolder.table.c.id)\n', (69150, 69253), False, 'from sqlalchemy.orm import backref, object_session, relation, mapper, class_mapper\n'), ((69309, 69409), 'sqlalchemy.orm.relation', 'relation', (['model.Library'], {'primaryjoin': '(model.Sample.table.c.library_id == model.Library.table.c.id)'}), '(model.Library, primaryjoin=model.Sample.table.c.library_id ==\n model.Library.table.c.id)\n', (69317, 69409), False, 'from sqlalchemy.orm import backref, object_session, relation, mapper, class_mapper\n'), ((69465, 69565), 'sqlalchemy.orm.relation', 'relation', (['model.History'], {'primaryjoin': '(model.Sample.table.c.history_id == model.History.table.c.id)'}), '(model.History, primaryjoin=model.Sample.table.c.history_id ==\n model.History.table.c.id)\n', (69473, 69565), False, 'from sqlalchemy.orm import backref, object_session, relation, mapper, class_mapper\n'), ((69709, 69836), 'sqlalchemy.orm.relation', 'relation', (['model.FormDefinition'], {'primaryjoin': '(model.FormValues.table.c.form_definition_id == model.FormDefinition.table.c.id\n )'}), '(model.FormDefinition, primaryjoin=model.FormValues.table.c.\n form_definition_id == model.FormDefinition.table.c.id)\n', (69717, 69836), False, 'from sqlalchemy.orm import backref, object_session, relation, mapper, class_mapper\n'), ((70059, 70170), 'sqlalchemy.orm.relation', 'relation', (['model.FormValues'], {'primaryjoin': '(model.Request.table.c.form_values_id == model.FormValues.table.c.id)'}), '(model.FormValues, primaryjoin=model.Request.table.c.form_values_id ==\n model.FormValues.table.c.id)\n', (70067, 70170), False, 'from sqlalchemy.orm import backref, object_session, relation, mapper, class_mapper\n'), ((70260, 70375), 'sqlalchemy.orm.relation', 'relation', (['model.RequestType'], {'primaryjoin': '(model.Request.table.c.request_type_id == model.RequestType.table.c.id)'}), '(model.RequestType, primaryjoin=model.Request.table.c.\n request_type_id == model.RequestType.table.c.id)\n', (70268, 70375), False, 'from sqlalchemy.orm import backref, object_session, relation, mapper, class_mapper\n'), ((70462, 70575), 'sqlalchemy.orm.relation', 'relation', (['model.User'], {'primaryjoin': '(model.Request.table.c.user_id == model.User.table.c.id)', 'backref': '"""requests"""'}), "(model.User, primaryjoin=model.Request.table.c.user_id == model.\n User.table.c.id, backref='requests')\n", (70470, 70575), False, 'from sqlalchemy.orm import backref, object_session, relation, mapper, class_mapper\n'), ((71382, 71514), 'sqlalchemy.orm.relation', 'relation', (['model.FormDefinition'], {'primaryjoin': '(model.ExternalService.table.c.form_definition_id == model.FormDefinition.\n table.c.id)'}), '(model.FormDefinition, primaryjoin=model.ExternalService.table.c.\n form_definition_id == model.FormDefinition.table.c.id)\n', (71390, 71514), False, 'from sqlalchemy.orm import backref, object_session, relation, mapper, class_mapper\n'), ((71675, 71795), 'sqlalchemy.orm.relation', 'relation', (['model.FormValues'], {'primaryjoin': '(model.ExternalService.table.c.form_values_id == model.FormValues.table.c.id)'}), '(model.FormValues, primaryjoin=model.ExternalService.table.c.\n form_values_id == model.FormValues.table.c.id)\n', (71683, 71795), False, 'from sqlalchemy.orm import backref, object_session, relation, mapper, class_mapper\n'), ((72430, 72555), 'sqlalchemy.orm.relation', 'relation', (['model.FormDefinition'], {'primaryjoin': '(model.RequestType.table.c.request_form_id == model.FormDefinition.table.c.id)'}), '(model.FormDefinition, primaryjoin=model.RequestType.table.c.\n request_form_id == model.FormDefinition.table.c.id)\n', (72438, 72555), False, 'from sqlalchemy.orm import backref, object_session, relation, mapper, class_mapper\n'), ((72657, 72781), 'sqlalchemy.orm.relation', 'relation', (['model.FormDefinition'], {'primaryjoin': '(model.RequestType.table.c.sample_form_id == model.FormDefinition.table.c.id)'}), '(model.FormDefinition, primaryjoin=model.RequestType.table.c.\n sample_form_id == model.FormDefinition.table.c.id)\n', (72665, 72781), False, 'from sqlalchemy.orm import backref, object_session, relation, mapper, class_mapper\n'), ((73019, 73210), 'sqlalchemy.orm.relation', 'relation', (['model.RequestType'], {'primaryjoin': '(model.RequestTypeExternalServiceAssociation.table.c.request_type_id ==\n model.RequestType.table.c.id)', 'backref': '"""external_service_associations"""'}), "(model.RequestType, primaryjoin=model.\n RequestTypeExternalServiceAssociation.table.c.request_type_id == model.\n RequestType.table.c.id, backref='external_service_associations')\n", (73027, 73210), False, 'from sqlalchemy.orm import backref, object_session, relation, mapper, class_mapper\n'), ((73268, 73429), 'sqlalchemy.orm.relation', 'relation', (['model.ExternalService'], {'primaryjoin': '(model.RequestTypeExternalServiceAssociation.table.c.external_service_id ==\n model.ExternalService.table.c.id)'}), '(model.ExternalService, primaryjoin=model.\n RequestTypeExternalServiceAssociation.table.c.external_service_id ==\n model.ExternalService.table.c.id)\n', (73276, 73429), False, 'from sqlalchemy.orm import backref, object_session, relation, mapper, class_mapper\n'), ((73627, 73673), 'sqlalchemy.orm.relation', 'relation', (['model.RequestType'], {'backref': '"""actions"""'}), "(model.RequestType, backref='actions')\n", (73635, 73673), False, 'from sqlalchemy.orm import backref, object_session, relation, mapper, class_mapper\n'), ((73690, 73742), 'sqlalchemy.orm.relation', 'relation', (['model.Role'], {'backref': '"""request_type_actions"""'}), "(model.Role, backref='request_type_actions')\n", (73698, 73742), False, 'from sqlalchemy.orm import backref, object_session, relation, mapper, class_mapper\n'), ((73852, 74010), 'sqlalchemy.orm.relation', 'relation', (['model.FormDefinitionCurrent'], {'primaryjoin': '(model.FormDefinition.table.c.form_definition_current_id == model.\n FormDefinitionCurrent.table.c.id)'}), '(model.FormDefinitionCurrent, primaryjoin=model.FormDefinition.\n table.c.form_definition_current_id == model.FormDefinitionCurrent.table\n .c.id)\n', (73860, 74010), False, 'from sqlalchemy.orm import backref, object_session, relation, mapper, class_mapper\n'), ((74202, 74418), 'sqlalchemy.orm.relation', 'relation', (['model.FormDefinition'], {'backref': '"""form_definition_current"""', 'cascade': '"""all, delete-orphan"""', 'primaryjoin': '(model.FormDefinitionCurrent.table.c.id == model.FormDefinition.table.c.\n form_definition_current_id)'}), "(model.FormDefinition, backref='form_definition_current', cascade=\n 'all, delete-orphan', primaryjoin=model.FormDefinitionCurrent.table.c.\n id == model.FormDefinition.table.c.form_definition_current_id)\n", (74210, 74418), False, 'from sqlalchemy.orm import backref, object_session, relation, mapper, class_mapper\n'), ((74559, 74716), 'sqlalchemy.orm.relation', 'relation', (['model.FormDefinition'], {'post_update': '(True)', 'primaryjoin': '(model.FormDefinitionCurrent.table.c.latest_form_id == model.FormDefinition\n .table.c.id)'}), '(model.FormDefinition, post_update=True, primaryjoin=model.\n FormDefinitionCurrent.table.c.latest_form_id == model.FormDefinition.\n table.c.id)\n', (74567, 74716), False, 'from sqlalchemy.orm import backref, object_session, relation, mapper, class_mapper\n'), ((74894, 75013), 'sqlalchemy.orm.relation', 'relation', (['model.SampleState'], {'primaryjoin': '(model.SampleEvent.table.c.sample_state_id == model.SampleState.table.c.id)'}), '(model.SampleState, primaryjoin=model.SampleEvent.table.c.\n sample_state_id == model.SampleState.table.c.id)\n', (74902, 75013), False, 'from sqlalchemy.orm import backref, object_session, relation, mapper, class_mapper\n'), ((75293, 75426), 'sqlalchemy.orm.relation', 'relation', (['model.ExternalService'], {'primaryjoin': '(model.SampleDataset.table.c.external_service_id == model.ExternalService.\n table.c.id)'}), '(model.ExternalService, primaryjoin=model.SampleDataset.table.c.\n external_service_id == model.ExternalService.table.c.id)\n', (75301, 75426), False, 'from sqlalchemy.orm import backref, object_session, relation, mapper, class_mapper\n'), ((75944, 75986), 'sqlalchemy.orm.relation', 'relation', (['model.RequestType'], {'backref': '"""run"""'}), "(model.RequestType, backref='run')\n", (75952, 75986), False, 'from sqlalchemy.orm import backref, object_session, relation, mapper, class_mapper\n'), ((76026, 76069), 'sqlalchemy.orm.relation', 'relation', (['model.Run'], {'backref': '"""request_type"""'}), "(model.Run, backref='request_type')\n", (76034, 76069), False, 'from sqlalchemy.orm import backref, object_session, relation, mapper, class_mapper\n'), ((76155, 76275), 'sqlalchemy.orm.relation', 'relation', (['model.FormDefinition'], {'primaryjoin': '(model.Run.table.c.form_definition_id == model.FormDefinition.table.c.id)'}), '(model.FormDefinition, primaryjoin=model.Run.table.c.\n form_definition_id == model.FormDefinition.table.c.id)\n', (76163, 76275), False, 'from sqlalchemy.orm import backref, object_session, relation, mapper, class_mapper\n'), ((76368, 76475), 'sqlalchemy.orm.relation', 'relation', (['model.FormValues'], {'primaryjoin': '(model.Run.table.c.form_values_id == model.FormValues.table.c.id)'}), '(model.FormValues, primaryjoin=model.Run.table.c.form_values_id ==\n model.FormValues.table.c.id)\n', (76376, 76475), False, 'from sqlalchemy.orm import backref, object_session, relation, mapper, class_mapper\n'), ((79721, 79930), 'sqlalchemy.orm.backref', 'backref', (['"""parent"""'], {'primaryjoin': '(model.HistoryDatasetAssociation.table.c.parent_id == model.\n HistoryDatasetAssociation.table.c.id)', 'remote_side': '[model.HistoryDatasetAssociation.table.c.id]', 'uselist': '(False)'}), "('parent', primaryjoin=model.HistoryDatasetAssociation.table.c.\n parent_id == model.HistoryDatasetAssociation.table.c.id, remote_side=[\n model.HistoryDatasetAssociation.table.c.id], uselist=False)\n", (79728, 79930), False, 'from sqlalchemy.orm import backref, object_session, relation, mapper, class_mapper\n'), ((82794, 82835), 'sqlalchemy.orm.relation', 'relation', (['model.HistoryDatasetAssociation'], {}), '(model.HistoryDatasetAssociation)\n', (82802, 82835), False, 'from sqlalchemy.orm import backref, object_session, relation, mapper, class_mapper\n'), ((82867, 82887), 'sqlalchemy.orm.relation', 'relation', (['model.User'], {}), '(model.User)\n', (82875, 82887), False, 'from sqlalchemy.orm import backref, object_session, relation, mapper, class_mapper\n'), ((83014, 83200), 'sqlalchemy.orm.relation', 'relation', (['model.HistoryDatasetAssociation'], {'primaryjoin': '(model.HistoryDatasetAssociationSubset.table.c.\n history_dataset_association_id == model.HistoryDatasetAssociation.table\n .c.id)'}), '(model.HistoryDatasetAssociation, primaryjoin=model.\n HistoryDatasetAssociationSubset.table.c.history_dataset_association_id ==\n model.HistoryDatasetAssociation.table.c.id)\n', (83022, 83200), False, 'from sqlalchemy.orm import backref, object_session, relation, mapper, class_mapper\n'), ((83253, 83452), 'sqlalchemy.orm.relation', 'relation', (['model.HistoryDatasetAssociation'], {'primaryjoin': '(model.HistoryDatasetAssociationSubset.table.c.\n history_dataset_association_subset_id == model.\n HistoryDatasetAssociation.table.c.id)'}), '(model.HistoryDatasetAssociation, primaryjoin=model.\n HistoryDatasetAssociationSubset.table.c.\n history_dataset_association_subset_id == model.\n HistoryDatasetAssociation.table.c.id)\n', (83261, 83452), False, 'from sqlalchemy.orm import backref, object_session, relation, mapper, class_mapper\n'), ((83629, 83805), 'sqlalchemy.orm.relation', 'relation', (['model.HistoryDatasetAssociation'], {'primaryjoin': '(model.ImplicitlyConvertedDatasetAssociation.table.c.hda_parent_id == model\n .HistoryDatasetAssociation.table.c.id)'}), '(model.HistoryDatasetAssociation, primaryjoin=model.\n ImplicitlyConvertedDatasetAssociation.table.c.hda_parent_id == model.\n HistoryDatasetAssociation.table.c.id)\n', (83637, 83805), False, 'from sqlalchemy.orm import backref, object_session, relation, mapper, class_mapper\n'), ((83884, 84075), 'sqlalchemy.orm.relation', 'relation', (['model.LibraryDatasetDatasetAssociation'], {'primaryjoin': '(model.ImplicitlyConvertedDatasetAssociation.table.c.ldda_parent_id ==\n model.LibraryDatasetDatasetAssociation.table.c.id)'}), '(model.LibraryDatasetDatasetAssociation, primaryjoin=model.\n ImplicitlyConvertedDatasetAssociation.table.c.ldda_parent_id == model.\n LibraryDatasetDatasetAssociation.table.c.id)\n', (83892, 84075), False, 'from sqlalchemy.orm import backref, object_session, relation, mapper, class_mapper\n'), ((84155, 84339), 'sqlalchemy.orm.relation', 'relation', (['model.LibraryDatasetDatasetAssociation'], {'primaryjoin': '(model.ImplicitlyConvertedDatasetAssociation.table.c.ldda_id == model.\n LibraryDatasetDatasetAssociation.table.c.id)'}), '(model.LibraryDatasetDatasetAssociation, primaryjoin=model.\n ImplicitlyConvertedDatasetAssociation.table.c.ldda_id == model.\n LibraryDatasetDatasetAssociation.table.c.id)\n', (84163, 84339), False, 'from sqlalchemy.orm import backref, object_session, relation, mapper, class_mapper\n'), ((84414, 84583), 'sqlalchemy.orm.relation', 'relation', (['model.HistoryDatasetAssociation'], {'primaryjoin': '(model.ImplicitlyConvertedDatasetAssociation.table.c.hda_id == model.\n HistoryDatasetAssociation.table.c.id)'}), '(model.HistoryDatasetAssociation, primaryjoin=model.\n ImplicitlyConvertedDatasetAssociation.table.c.hda_id == model.\n HistoryDatasetAssociation.table.c.id)\n', (84422, 84583), False, 'from sqlalchemy.orm import backref, object_session, relation, mapper, class_mapper\n'), ((84714, 84763), 'sqlalchemy.orm.relation', 'relation', (['model.GalaxySessionToHistoryAssociation'], {}), '(model.GalaxySessionToHistoryAssociation)\n', (84722, 84763), False, 'from sqlalchemy.orm import backref, object_session, relation, mapper, class_mapper\n'), ((87200, 87312), 'sqlalchemy.orm.relation', 'relation', (['model.HistoryTagAssociation'], {'order_by': 'model.HistoryTagAssociation.table.c.id', 'backref': '"""histories"""'}), "(model.HistoryTagAssociation, order_by=model.HistoryTagAssociation.\n table.c.id, backref='histories')\n", (87208, 87312), False, 'from sqlalchemy.orm import backref, object_session, relation, mapper, class_mapper\n'), ((87344, 87470), 'sqlalchemy.orm.relation', 'relation', (['model.HistoryAnnotationAssociation'], {'order_by': 'model.HistoryAnnotationAssociation.table.c.id', 'backref': '"""histories"""'}), "(model.HistoryAnnotationAssociation, order_by=model.\n HistoryAnnotationAssociation.table.c.id, backref='histories')\n", (87352, 87470), False, 'from sqlalchemy.orm import backref, object_session, relation, mapper, class_mapper\n'), ((87498, 87616), 'sqlalchemy.orm.relation', 'relation', (['model.HistoryRatingAssociation'], {'order_by': 'model.HistoryRatingAssociation.table.c.id', 'backref': '"""histories"""'}), "(model.HistoryRatingAssociation, order_by=model.\n HistoryRatingAssociation.table.c.id, backref='histories')\n", (87506, 87616), False, 'from sqlalchemy.orm import backref, object_session, relation, mapper, class_mapper\n'), ((87953, 88011), 'sqlalchemy.orm.relation', 'relation', (['model.User'], {'backref': '"""histories_shared_by_others"""'}), "(model.User, backref='histories_shared_by_others')\n", (87961, 88011), False, 'from sqlalchemy.orm import backref, object_session, relation, mapper, class_mapper\n'), ((88044, 88096), 'sqlalchemy.orm.relation', 'relation', (['model.History'], {'backref': '"""users_shared_with"""'}), "(model.History, backref='users_shared_with')\n", (88052, 88096), False, 'from sqlalchemy.orm import backref, object_session, relation, mapper, class_mapper\n'), ((89326, 89434), 'sqlalchemy.orm.relation', 'relation', (['model.FormValues'], {'primaryjoin': '(model.User.table.c.form_values_id == model.FormValues.table.c.id)'}), '(model.FormValues, primaryjoin=model.User.table.c.form_values_id ==\n model.FormValues.table.c.id)\n', (89334, 89434), False, 'from sqlalchemy.orm import backref, object_session, relation, mapper, class_mapper\n'), ((89890, 89926), 'sqlalchemy.orm.relation', 'relation', (['model.UserGroupAssociation'], {}), '(model.UserGroupAssociation)\n', (89898, 89926), False, 'from sqlalchemy.orm import backref, object_session, relation, mapper, class_mapper\n'), ((90030, 90068), 'sqlalchemy.orm.relation', 'relation', (['model.User'], {'backref': '"""groups"""'}), "(model.User, backref='groups')\n", (90038, 90068), False, 'from sqlalchemy.orm import backref, object_session, relation, mapper, class_mapper\n'), ((90101, 90141), 'sqlalchemy.orm.relation', 'relation', (['model.Group'], {'backref': '"""members"""'}), "(model.Group, backref='members')\n", (90109, 90141), False, 'from sqlalchemy.orm import backref, object_session, relation, mapper, class_mapper\n'), ((90251, 90302), 'sqlalchemy.orm.relation', 'relation', (['model.User'], {'backref': '"""default_permissions"""'}), "(model.User, backref='default_permissions')\n", (90259, 90302), False, 'from sqlalchemy.orm import backref, object_session, relation, mapper, class_mapper\n'), ((90334, 90354), 'sqlalchemy.orm.relation', 'relation', (['model.Role'], {}), '(model.Role)\n', (90342, 90354), False, 'from sqlalchemy.orm import backref, object_session, relation, mapper, class_mapper\n'), ((90471, 90525), 'sqlalchemy.orm.relation', 'relation', (['model.History'], {'backref': '"""default_permissions"""'}), "(model.History, backref='default_permissions')\n", (90479, 90525), False, 'from sqlalchemy.orm import backref, object_session, relation, mapper, class_mapper\n'), ((90557, 90577), 'sqlalchemy.orm.relation', 'relation', (['model.Role'], {}), '(model.Role)\n', (90565, 90577), False, 'from sqlalchemy.orm import backref, object_session, relation, mapper, class_mapper\n'), ((90658, 90693), 'sqlalchemy.orm.relation', 'relation', (['model.UserRoleAssociation'], {}), '(model.UserRoleAssociation)\n', (90666, 90693), False, 'from sqlalchemy.orm import backref, object_session, relation, mapper, class_mapper\n'), ((90712, 90748), 'sqlalchemy.orm.relation', 'relation', (['model.GroupRoleAssociation'], {}), '(model.GroupRoleAssociation)\n', (90720, 90748), False, 'from sqlalchemy.orm import backref, object_session, relation, mapper, class_mapper\n'), ((90862, 90899), 'sqlalchemy.orm.relation', 'relation', (['model.User'], {'backref': '"""roles"""'}), "(model.User, backref='roles')\n", (90870, 90899), False, 'from sqlalchemy.orm import backref, object_session, relation, mapper, class_mapper\n'), ((91290, 91310), 'sqlalchemy.orm.relation', 'relation', (['model.Role'], {}), '(model.Role)\n', (91298, 91310), False, 'from sqlalchemy.orm import backref, object_session, relation, mapper, class_mapper\n'), ((91427, 91465), 'sqlalchemy.orm.relation', 'relation', (['model.Group'], {'backref': '"""roles"""'}), "(model.Group, backref='roles')\n", (91435, 91465), False, 'from sqlalchemy.orm import backref, object_session, relation, mapper, class_mapper\n'), ((91482, 91502), 'sqlalchemy.orm.relation', 'relation', (['model.Role'], {}), '(model.Role)\n', (91490, 91502), False, 'from sqlalchemy.orm import backref, object_session, relation, mapper, class_mapper\n'), ((91581, 91617), 'sqlalchemy.orm.relation', 'relation', (['model.UserQuotaAssociation'], {}), '(model.UserQuotaAssociation)\n', (91589, 91617), False, 'from sqlalchemy.orm import backref, object_session, relation, mapper, class_mapper\n'), ((91649, 91686), 'sqlalchemy.orm.relation', 'relation', (['model.GroupQuotaAssociation'], {}), '(model.GroupQuotaAssociation)\n', (91657, 91686), False, 'from sqlalchemy.orm import backref, object_session, relation, mapper, class_mapper\n'), ((91790, 91828), 'sqlalchemy.orm.relation', 'relation', (['model.User'], {'backref': '"""quotas"""'}), "(model.User, backref='quotas')\n", (91798, 91828), False, 'from sqlalchemy.orm import backref, object_session, relation, mapper, class_mapper\n'), ((91859, 91880), 'sqlalchemy.orm.relation', 'relation', (['model.Quota'], {}), '(model.Quota)\n', (91867, 91880), False, 'from sqlalchemy.orm import backref, object_session, relation, mapper, class_mapper\n'), ((91987, 92026), 'sqlalchemy.orm.relation', 'relation', (['model.Group'], {'backref': '"""quotas"""'}), "(model.Group, backref='quotas')\n", (91995, 92026), False, 'from sqlalchemy.orm import backref, object_session, relation, mapper, class_mapper\n'), ((92057, 92078), 'sqlalchemy.orm.relation', 'relation', (['model.Quota'], {}), '(model.Quota)\n', (92065, 92078), False, 'from sqlalchemy.orm import backref, object_session, relation, mapper, class_mapper\n'), ((92189, 92229), 'sqlalchemy.orm.relation', 'relation', (['model.Quota'], {'backref': '"""default"""'}), "(model.Quota, backref='default')\n", (92197, 92229), False, 'from sqlalchemy.orm import backref, object_session, relation, mapper, class_mapper\n'), ((92340, 92382), 'sqlalchemy.orm.relation', 'relation', (['model.Dataset'], {'backref': '"""actions"""'}), "(model.Dataset, backref='actions')\n", (92348, 92382), False, 'from sqlalchemy.orm import backref, object_session, relation, mapper, class_mapper\n'), ((92399, 92446), 'sqlalchemy.orm.relation', 'relation', (['model.Role'], {'backref': '"""dataset_actions"""'}), "(model.Role, backref='dataset_actions')\n", (92407, 92446), False, 'from sqlalchemy.orm import backref, object_session, relation, mapper, class_mapper\n'), ((92561, 92603), 'sqlalchemy.orm.relation', 'relation', (['model.Library'], {'backref': '"""actions"""'}), "(model.Library, backref='actions')\n", (92569, 92603), False, 'from sqlalchemy.orm import backref, object_session, relation, mapper, class_mapper\n'), ((92620, 92667), 'sqlalchemy.orm.relation', 'relation', (['model.Role'], {'backref': '"""library_actions"""'}), "(model.Role, backref='library_actions')\n", (92628, 92667), False, 'from sqlalchemy.orm import backref, object_session, relation, mapper, class_mapper\n'), ((92793, 92841), 'sqlalchemy.orm.relation', 'relation', (['model.LibraryFolder'], {'backref': '"""actions"""'}), "(model.LibraryFolder, backref='actions')\n", (92801, 92841), False, 'from sqlalchemy.orm import backref, object_session, relation, mapper, class_mapper\n'), ((92858, 92912), 'sqlalchemy.orm.relation', 'relation', (['model.Role'], {'backref': '"""library_folder_actions"""'}), "(model.Role, backref='library_folder_actions')\n", (92866, 92912), False, 'from sqlalchemy.orm import backref, object_session, relation, mapper, class_mapper\n'), ((93049, 93098), 'sqlalchemy.orm.relation', 'relation', (['model.LibraryDataset'], {'backref': '"""actions"""'}), "(model.LibraryDataset, backref='actions')\n", (93057, 93098), False, 'from sqlalchemy.orm import backref, object_session, relation, mapper, class_mapper\n'), ((93115, 93170), 'sqlalchemy.orm.relation', 'relation', (['model.Role'], {'backref': '"""library_dataset_actions"""'}), "(model.Role, backref='library_dataset_actions')\n", (93123, 93170), False, 'from sqlalchemy.orm import backref, object_session, relation, mapper, class_mapper\n'), ((93365, 93432), 'sqlalchemy.orm.relation', 'relation', (['model.LibraryDatasetDatasetAssociation'], {'backref': '"""actions"""'}), "(model.LibraryDatasetDatasetAssociation, backref='actions')\n", (93373, 93432), False, 'from sqlalchemy.orm import backref, object_session, relation, mapper, class_mapper\n'), ((93449, 93512), 'sqlalchemy.orm.relation', 'relation', (['model.Role'], {'backref': '"""library_dataset_dataset_actions"""'}), "(model.Role, backref='library_dataset_dataset_actions')\n", (93457, 93512), False, 'from sqlalchemy.orm import backref, object_session, relation, mapper, class_mapper\n'), ((94275, 94419), 'sqlalchemy.orm.relation', 'relation', (['model.ExtendedMetadata'], {'primaryjoin': '(model.ExtendedMetadataIndex.table.c.extended_metadata_id == model.\n ExtendedMetadata.table.c.id)'}), '(model.ExtendedMetadata, primaryjoin=model.ExtendedMetadataIndex.\n table.c.extended_metadata_id == model.ExtendedMetadata.table.c.id)\n', (94283, 94419), False, 'from sqlalchemy.orm import backref, object_session, relation, mapper, class_mapper\n'), ((94883, 95022), 'sqlalchemy.orm.relation', 'relation', (['model.FormDefinition'], {'primaryjoin': '(model.LibraryInfoAssociation.table.c.form_definition_id == model.\n FormDefinition.table.c.id)'}), '(model.FormDefinition, primaryjoin=model.LibraryInfoAssociation.\n table.c.form_definition_id == model.FormDefinition.table.c.id)\n', (94891, 95022), False, 'from sqlalchemy.orm import backref, object_session, relation, mapper, class_mapper\n'), ((95113, 95240), 'sqlalchemy.orm.relation', 'relation', (['model.FormValues'], {'primaryjoin': '(model.LibraryInfoAssociation.table.c.form_values_id == model.FormValues.\n table.c.id)'}), '(model.FormValues, primaryjoin=model.LibraryInfoAssociation.table.c\n .form_values_id == model.FormValues.table.c.id)\n', (95121, 95240), False, 'from sqlalchemy.orm import backref, object_session, relation, mapper, class_mapper\n'), ((97453, 97603), 'sqlalchemy.orm.relation', 'relation', (['model.FormDefinition'], {'primaryjoin': '(model.LibraryFolderInfoAssociation.table.c.form_definition_id == model.\n FormDefinition.table.c.id)'}), '(model.FormDefinition, primaryjoin=model.\n LibraryFolderInfoAssociation.table.c.form_definition_id == model.\n FormDefinition.table.c.id)\n', (97461, 97603), False, 'from sqlalchemy.orm import backref, object_session, relation, mapper, class_mapper\n'), ((97689, 97822), 'sqlalchemy.orm.relation', 'relation', (['model.FormValues'], {'primaryjoin': '(model.LibraryFolderInfoAssociation.table.c.form_values_id == model.\n FormValues.table.c.id)'}), '(model.FormValues, primaryjoin=model.LibraryFolderInfoAssociation.\n table.c.form_values_id == model.FormValues.table.c.id)\n', (97697, 97822), False, 'from sqlalchemy.orm import backref, object_session, relation, mapper, class_mapper\n'), ((98000, 98029), 'sqlalchemy.orm.relation', 'relation', (['model.LibraryFolder'], {}), '(model.LibraryFolder)\n', (98008, 98029), False, 'from sqlalchemy.orm import backref, object_session, relation, mapper, class_mapper\n'), ((98077, 98269), 'sqlalchemy.orm.relation', 'relation', (['model.LibraryDatasetDatasetAssociation'], {'primaryjoin': '(model.LibraryDataset.table.c.library_dataset_dataset_association_id ==\n model.LibraryDatasetDatasetAssociation.table.c.id)'}), '(model.LibraryDatasetDatasetAssociation, primaryjoin=model.\n LibraryDataset.table.c.library_dataset_dataset_association_id == model.\n LibraryDatasetDatasetAssociation.table.c.id)\n', (98085, 98269), False, 'from sqlalchemy.orm import backref, object_session, relation, mapper, class_mapper\n'), ((98892, 98915), 'sqlalchemy.orm.relation', 'relation', (['model.Dataset'], {}), '(model.Dataset)\n', (98900, 98915), False, 'from sqlalchemy.orm import backref, object_session, relation, mapper, class_mapper\n'), ((98945, 99099), 'sqlalchemy.orm.relation', 'relation', (['model.LibraryDataset'], {'primaryjoin': '(model.LibraryDatasetDatasetAssociation.table.c.library_dataset_id == model\n .LibraryDataset.table.c.id)'}), '(model.LibraryDataset, primaryjoin=model.\n LibraryDatasetDatasetAssociation.table.c.library_dataset_id == model.\n LibraryDataset.table.c.id)\n', (98953, 99099), False, 'from sqlalchemy.orm import backref, object_session, relation, mapper, class_mapper\n'), ((99163, 99183), 'sqlalchemy.orm.relation', 'relation', (['model.User'], {}), '(model.User)\n', (99171, 99183), False, 'from sqlalchemy.orm import backref, object_session, relation, mapper, class_mapper\n'), ((99243, 99555), 'sqlalchemy.orm.relation', 'relation', (['model.LibraryDatasetDatasetAssociation'], {'primaryjoin': '(model.LibraryDatasetDatasetAssociation.table.c.\n copied_from_library_dataset_dataset_association_id == model.\n LibraryDatasetDatasetAssociation.table.c.id)', 'remote_side': '[model.LibraryDatasetDatasetAssociation.table.c.id]', 'uselist': '(False)'}), '(model.LibraryDatasetDatasetAssociation, primaryjoin=model.\n LibraryDatasetDatasetAssociation.table.c.\n copied_from_library_dataset_dataset_association_id == model.\n LibraryDatasetDatasetAssociation.table.c.id, remote_side=[model.\n LibraryDatasetDatasetAssociation.table.c.id], uselist=False)\n', (99251, 99555), False, 'from sqlalchemy.orm import backref, object_session, relation, mapper, class_mapper\n'), ((99646, 99873), 'sqlalchemy.orm.relation', 'relation', (['model.LibraryDatasetDatasetAssociation'], {'primaryjoin': '(model.LibraryDatasetDatasetAssociation.table.c.\n copied_from_library_dataset_dataset_association_id == model.\n LibraryDatasetDatasetAssociation.table.c.id)'}), '(model.LibraryDatasetDatasetAssociation, primaryjoin=model.\n LibraryDatasetDatasetAssociation.table.c.\n copied_from_library_dataset_dataset_association_id == model.\n LibraryDatasetDatasetAssociation.table.c.id)\n', (99654, 99873), False, 'from sqlalchemy.orm import backref, object_session, relation, mapper, class_mapper\n'), ((99938, 100158), 'sqlalchemy.orm.relation', 'relation', (['model.HistoryDatasetAssociation'], {'primaryjoin': '(model.LibraryDatasetDatasetAssociation.table.c.\n copied_from_history_dataset_association_id == model.\n HistoryDatasetAssociation.table.c.id)', 'uselist': '(False)'}), '(model.HistoryDatasetAssociation, primaryjoin=model.\n LibraryDatasetDatasetAssociation.table.c.\n copied_from_history_dataset_association_id == model.\n HistoryDatasetAssociation.table.c.id, uselist=False)\n', (99946, 100158), False, 'from sqlalchemy.orm import backref, object_session, relation, mapper, class_mapper\n'), ((100235, 100448), 'sqlalchemy.orm.relation', 'relation', (['model.HistoryDatasetAssociation'], {'primaryjoin': '(model.HistoryDatasetAssociation.table.c.\n copied_from_library_dataset_dataset_association_id == model.\n LibraryDatasetDatasetAssociation.table.c.id)'}), '(model.HistoryDatasetAssociation, primaryjoin=model.\n HistoryDatasetAssociation.table.c.\n copied_from_library_dataset_dataset_association_id == model.\n LibraryDatasetDatasetAssociation.table.c.id)\n', (100243, 100448), False, 'from sqlalchemy.orm import backref, object_session, relation, mapper, class_mapper\n'), ((100503, 100699), 'sqlalchemy.orm.relation', 'relation', (['model.ImplicitlyConvertedDatasetAssociation'], {'primaryjoin': '(model.ImplicitlyConvertedDatasetAssociation.table.c.ldda_parent_id ==\n model.LibraryDatasetDatasetAssociation.table.c.id)'}), '(model.ImplicitlyConvertedDatasetAssociation, primaryjoin=model.\n ImplicitlyConvertedDatasetAssociation.table.c.ldda_parent_id == model.\n LibraryDatasetDatasetAssociation.table.c.id)\n', (100511, 100699), False, 'from sqlalchemy.orm import backref, object_session, relation, mapper, class_mapper\n'), ((101199, 101454), 'sqlalchemy.orm.relation', 'relation', (['model.LibraryDatasetDatasetAssociation'], {'primaryjoin': '((model.LibraryDatasetDatasetAssociation.table.c.parent_id == model.\n LibraryDatasetDatasetAssociation.table.c.id) & (model.\n LibraryDatasetDatasetAssociation.table.c.visible == True))'}), '(model.LibraryDatasetDatasetAssociation, primaryjoin=(model.\n LibraryDatasetDatasetAssociation.table.c.parent_id == model.\n LibraryDatasetDatasetAssociation.table.c.id) & (model.\n LibraryDatasetDatasetAssociation.table.c.visible == True))\n', (101207, 101454), False, 'from sqlalchemy.orm import backref, object_session, relation, mapper, class_mapper\n'), ((101501, 101661), 'sqlalchemy.orm.relation', 'relation', (['model.ExtendedMetadata'], {'primaryjoin': '(model.LibraryDatasetDatasetAssociation.table.c.extended_metadata_id ==\n model.ExtendedMetadata.table.c.id)'}), '(model.ExtendedMetadata, primaryjoin=model.\n LibraryDatasetDatasetAssociation.table.c.extended_metadata_id == model.\n ExtendedMetadata.table.c.id)\n', (101509, 101661), False, 'from sqlalchemy.orm import backref, object_session, relation, mapper, class_mapper\n'), ((102308, 102465), 'sqlalchemy.orm.relation', 'relation', (['model.FormDefinition'], {'primaryjoin': '(model.LibraryDatasetDatasetInfoAssociation.table.c.form_definition_id ==\n model.FormDefinition.table.c.id)'}), '(model.FormDefinition, primaryjoin=model.\n LibraryDatasetDatasetInfoAssociation.table.c.form_definition_id ==\n model.FormDefinition.table.c.id)\n', (102316, 102465), False, 'from sqlalchemy.orm import backref, object_session, relation, mapper, class_mapper\n'), ((102552, 102698), 'sqlalchemy.orm.relation', 'relation', (['model.FormValues'], {'primaryjoin': '(model.LibraryDatasetDatasetInfoAssociation.table.c.form_values_id == model\n .FormValues.table.c.id)'}), '(model.FormValues, primaryjoin=model.\n LibraryDatasetDatasetInfoAssociation.table.c.form_values_id == model.\n FormValues.table.c.id)\n', (102560, 102698), False, 'from sqlalchemy.orm import backref, object_session, relation, mapper, class_mapper\n'), ((102904, 102923), 'sqlalchemy.orm.relation', 'relation', (['model.Job'], {}), '(model.Job)\n', (102912, 102923), False, 'from sqlalchemy.orm import backref, object_session, relation, mapper, class_mapper\n'), ((102935, 103014), 'sqlalchemy.orm.relation', 'relation', (['model.HistoryDatasetAssociation'], {'lazy': '(False)', 'backref': '"""dependent_jobs"""'}), "(model.HistoryDatasetAssociation, lazy=False, backref='dependent_jobs')\n", (102943, 103014), False, 'from sqlalchemy.orm import backref, object_session, relation, mapper, class_mapper\n'), ((103183, 103202), 'sqlalchemy.orm.relation', 'relation', (['model.Job'], {}), '(model.Job)\n', (103191, 103202), False, 'from sqlalchemy.orm import backref, object_session, relation, mapper, class_mapper\n'), ((103214, 103267), 'sqlalchemy.orm.relation', 'relation', (['model.HistoryDatasetAssociation'], {'lazy': '(False)'}), '(model.HistoryDatasetAssociation, lazy=False)\n', (103222, 103267), False, 'from sqlalchemy.orm import backref, object_session, relation, mapper, class_mapper\n'), ((103438, 103457), 'sqlalchemy.orm.relation', 'relation', (['model.Job'], {}), '(model.Job)\n', (103446, 103457), False, 'from sqlalchemy.orm import backref, object_session, relation, mapper, class_mapper\n'), ((103480, 103574), 'sqlalchemy.orm.relation', 'relation', (['model.HistoryDatasetCollectionAssociation'], {'lazy': '(False)', 'backref': '"""dependent_jobs"""'}), "(model.HistoryDatasetCollectionAssociation, lazy=False, backref=\n 'dependent_jobs')\n", (103488, 103574), False, 'from sqlalchemy.orm import backref, object_session, relation, mapper, class_mapper\n'), ((103758, 103777), 'sqlalchemy.orm.relation', 'relation', (['model.Job'], {}), '(model.Job)\n', (103766, 103777), False, 'from sqlalchemy.orm import backref, object_session, relation, mapper, class_mapper\n'), ((103800, 103863), 'sqlalchemy.orm.relation', 'relation', (['model.HistoryDatasetCollectionAssociation'], {'lazy': '(False)'}), '(model.HistoryDatasetCollectionAssociation, lazy=False)\n', (103808, 103863), False, 'from sqlalchemy.orm import backref, object_session, relation, mapper, class_mapper\n'), ((104028, 104047), 'sqlalchemy.orm.relation', 'relation', (['model.Job'], {}), '(model.Job)\n', (104036, 104047), False, 'from sqlalchemy.orm import backref, object_session, relation, mapper, class_mapper\n'), ((104059, 104150), 'sqlalchemy.orm.relation', 'relation', (['model.LibraryDatasetDatasetAssociation'], {'lazy': '(False)', 'backref': '"""dependent_jobs"""'}), "(model.LibraryDatasetDatasetAssociation, lazy=False, backref=\n 'dependent_jobs')\n", (104067, 104150), False, 'from sqlalchemy.orm import backref, object_session, relation, mapper, class_mapper\n'), ((104328, 104347), 'sqlalchemy.orm.relation', 'relation', (['model.Job'], {}), '(model.Job)\n', (104336, 104347), False, 'from sqlalchemy.orm import backref, object_session, relation, mapper, class_mapper\n'), ((104359, 104419), 'sqlalchemy.orm.relation', 'relation', (['model.LibraryDatasetDatasetAssociation'], {'lazy': '(False)'}), '(model.LibraryDatasetDatasetAssociation, lazy=False)\n', (104367, 104419), False, 'from sqlalchemy.orm import backref, object_session, relation, mapper, class_mapper\n'), ((105516, 105535), 'sqlalchemy.orm.relation', 'relation', (['model.Job'], {}), '(model.Job)\n', (105524, 105535), False, 'from sqlalchemy.orm import backref, object_session, relation, mapper, class_mapper\n'), ((105590, 105643), 'sqlalchemy.orm.relation', 'relation', (['model.HistoryDatasetAssociation'], {'lazy': '(False)'}), '(model.HistoryDatasetAssociation, lazy=False)\n', (105598, 105643), False, 'from sqlalchemy.orm import backref, object_session, relation, mapper, class_mapper\n'), ((105708, 105768), 'sqlalchemy.orm.relation', 'relation', (['model.LibraryDatasetDatasetAssociation'], {'lazy': '(False)'}), '(model.LibraryDatasetDatasetAssociation, lazy=False)\n', (105716, 105768), False, 'from sqlalchemy.orm import backref, object_session, relation, mapper, class_mapper\n'), ((105881, 105900), 'sqlalchemy.orm.relation', 'relation', (['model.Job'], {}), '(model.Job)\n', (105889, 105900), False, 'from sqlalchemy.orm import backref, object_session, relation, mapper, class_mapper\n'), ((105935, 105958), 'sqlalchemy.orm.relation', 'relation', (['model.History'], {}), '(model.History)\n', (105943, 105958), False, 'from sqlalchemy.orm import backref, object_session, relation, mapper, class_mapper\n'), ((105993, 106016), 'sqlalchemy.orm.relation', 'relation', (['model.Dataset'], {}), '(model.Dataset)\n', (106001, 106016), False, 'from sqlalchemy.orm import backref, object_session, relation, mapper, class_mapper\n'), ((106127, 106146), 'sqlalchemy.orm.relation', 'relation', (['model.Job'], {}), '(model.Job)\n', (106135, 106146), False, 'from sqlalchemy.orm import backref, object_session, relation, mapper, class_mapper\n'), ((106160, 106183), 'sqlalchemy.orm.relation', 'relation', (['model.History'], {}), '(model.History)\n', (106168, 106183), False, 'from sqlalchemy.orm import backref, object_session, relation, mapper, class_mapper\n'), ((106286, 106320), 'sqlalchemy.orm.relation', 'relation', (['model.Job'], {'backref': '"""job"""'}), "(model.Job, backref='job')\n", (106294, 106320), False, 'from sqlalchemy.orm import backref, object_session, relation, mapper, class_mapper\n'), ((106355, 106378), 'sqlalchemy.orm.relation', 'relation', (['model.Dataset'], {}), '(model.Dataset)\n', (106363, 106378), False, 'from sqlalchemy.orm import backref, object_session, relation, mapper, class_mapper\n'), ((106410, 106430), 'sqlalchemy.orm.relation', 'relation', (['model.User'], {}), '(model.User)\n', (106418, 106430), False, 'from sqlalchemy.orm import backref, object_session, relation, mapper, class_mapper\n'), ((106466, 106517), 'sqlalchemy.orm.relation', 'relation', (['model.DeferredJob'], {'backref': '"""deferred_job"""'}), "(model.DeferredJob, backref='deferred_job')\n", (106474, 106517), False, 'from sqlalchemy.orm import backref, object_session, relation, mapper, class_mapper\n'), ((106553, 106604), 'sqlalchemy.orm.relation', 'relation', (['model.TransferJob'], {'backref': '"""transfer_job"""'}), "(model.TransferJob, backref='transfer_job')\n", (106561, 106604), False, 'from sqlalchemy.orm import backref, object_session, relation, mapper, class_mapper\n'), ((106703, 106855), 'sqlalchemy.orm.relation', 'relation', (['model.WorkflowStep'], {'backref': '"""post_job_actions"""', 'primaryjoin': '(model.WorkflowStep.table.c.id == model.PostJobAction.table.c.workflow_step_id)'}), "(model.WorkflowStep, backref='post_job_actions', primaryjoin=model.\n WorkflowStep.table.c.id == model.PostJobAction.table.c.workflow_step_id)\n", (106711, 106855), False, 'from sqlalchemy.orm import backref, object_session, relation, mapper, class_mapper\n'), ((106962, 106981), 'sqlalchemy.orm.relation', 'relation', (['model.Job'], {}), '(model.Job)\n', (106970, 106981), False, 'from sqlalchemy.orm import backref, object_session, relation, mapper, class_mapper\n'), ((107024, 107053), 'sqlalchemy.orm.relation', 'relation', (['model.PostJobAction'], {}), '(model.PostJobAction)\n', (107032, 107053), False, 'from sqlalchemy.orm import backref, object_session, relation, mapper, class_mapper\n'), ((107180, 107200), 'sqlalchemy.orm.relation', 'relation', (['model.User'], {}), '(model.User)\n', (107188, 107200), False, 'from sqlalchemy.orm import backref, object_session, relation, mapper, class_mapper\n'), ((107240, 107269), 'sqlalchemy.orm.relation', 'relation', (['model.GalaxySession'], {}), '(model.GalaxySession)\n', (107248, 107269), False, 'from sqlalchemy.orm import backref, object_session, relation, mapper, class_mapper\n'), ((107302, 107325), 'sqlalchemy.orm.relation', 'relation', (['model.History'], {}), '(model.History)\n', (107310, 107325), False, 'from sqlalchemy.orm import backref, object_session, relation, mapper, class_mapper\n'), ((107365, 107394), 'sqlalchemy.orm.relation', 'relation', (['model.LibraryFolder'], {}), '(model.LibraryFolder)\n', (107373, 107394), False, 'from sqlalchemy.orm import backref, object_session, relation, mapper, class_mapper\n'), ((107430, 107470), 'sqlalchemy.orm.relation', 'relation', (['model.JobParameter'], {'lazy': '(False)'}), '(model.JobParameter, lazy=False)\n', (107438, 107470), False, 'from sqlalchemy.orm import backref, object_session, relation, mapper, class_mapper\n'), ((107510, 107554), 'sqlalchemy.orm.relation', 'relation', (['model.JobToInputDatasetAssociation'], {}), '(model.JobToInputDatasetAssociation)\n', (107518, 107554), False, 'from sqlalchemy.orm import backref, object_session, relation, mapper, class_mapper\n'), ((107595, 107640), 'sqlalchemy.orm.relation', 'relation', (['model.JobToOutputDatasetAssociation'], {}), '(model.JobToOutputDatasetAssociation)\n', (107603, 107640), False, 'from sqlalchemy.orm import backref, object_session, relation, mapper, class_mapper\n'), ((107682, 107734), 'sqlalchemy.orm.relation', 'relation', (['model.PostJobActionAssociation'], {'lazy': '(False)'}), '(model.PostJobActionAssociation, lazy=False)\n', (107690, 107734), False, 'from sqlalchemy.orm import backref, object_session, relation, mapper, class_mapper\n'), ((107782, 107833), 'sqlalchemy.orm.relation', 'relation', (['model.JobToInputLibraryDatasetAssociation'], {}), '(model.JobToInputLibraryDatasetAssociation)\n', (107790, 107833), False, 'from sqlalchemy.orm import backref, object_session, relation, mapper, class_mapper\n'), ((107882, 107934), 'sqlalchemy.orm.relation', 'relation', (['model.JobToOutputLibraryDatasetAssociation'], {}), '(model.JobToOutputLibraryDatasetAssociation)\n', (107890, 107934), False, 'from sqlalchemy.orm import backref, object_session, relation, mapper, class_mapper\n'), ((107986, 108039), 'sqlalchemy.orm.relation', 'relation', (['model.JobExternalOutputMetadata'], {'lazy': '(False)'}), '(model.JobExternalOutputMetadata, lazy=False)\n', (107994, 108039), False, 'from sqlalchemy.orm import backref, object_session, relation, mapper, class_mapper\n'), ((108074, 108094), 'sqlalchemy.orm.relation', 'relation', (['model.Task'], {}), '(model.Task)\n', (108082, 108094), False, 'from sqlalchemy.orm import backref, object_session, relation, mapper, class_mapper\n'), ((108165, 108184), 'sqlalchemy.orm.relation', 'relation', (['model.Job'], {}), '(model.Job)\n', (108173, 108184), False, 'from sqlalchemy.orm import backref, object_session, relation, mapper, class_mapper\n'), ((111884, 111907), 'sqlalchemy.orm.relation', 'relation', (['model.History'], {}), '(model.History)\n', (111892, 111907), False, 'from sqlalchemy.orm import backref, object_session, relation, mapper, class_mapper\n'), ((111947, 111976), 'sqlalchemy.orm.relation', 'relation', (['model.GalaxySession'], {}), '(model.GalaxySession)\n', (111955, 111976), False, 'from sqlalchemy.orm import backref, object_session, relation, mapper, class_mapper\n'), ((112067, 112087), 'sqlalchemy.orm.relation', 'relation', (['model.User'], {}), '(model.User)\n', (112075, 112087), False, 'from sqlalchemy.orm import backref, object_session, relation, mapper, class_mapper\n'), ((112182, 112231), 'sqlalchemy.orm.relation', 'relation', (['model.GalaxySessionToHistoryAssociation'], {}), '(model.GalaxySessionToHistoryAssociation)\n', (112190, 112231), False, 'from sqlalchemy.orm import backref, object_session, relation, mapper, class_mapper\n'), ((112272, 112295), 'sqlalchemy.orm.relation', 'relation', (['model.History'], {}), '(model.History)\n', (112280, 112295), False, 'from sqlalchemy.orm import backref, object_session, relation, mapper, class_mapper\n'), ((112386, 112406), 'sqlalchemy.orm.relation', 'relation', (['model.User'], {}), '(model.User)\n', (112394, 112406), False, 'from sqlalchemy.orm import backref, object_session, relation, mapper, class_mapper\n'), ((112546, 112575), 'sqlalchemy.orm.relation', 'relation', (['model.GalaxySession'], {}), '(model.GalaxySession)\n', (112554, 112575), False, 'from sqlalchemy.orm import backref, object_session, relation, mapper, class_mapper\n'), ((112608, 112631), 'sqlalchemy.orm.relation', 'relation', (['model.History'], {}), '(model.History)\n', (112616, 112631), False, 'from sqlalchemy.orm import backref, object_session, relation, mapper, class_mapper\n'), ((113109, 113236), 'sqlalchemy.orm.relation', 'relation', (['model.WorkflowStepTagAssociation'], {'order_by': 'model.WorkflowStepTagAssociation.table.c.id', 'backref': '"""workflow_steps"""'}), "(model.WorkflowStepTagAssociation, order_by=model.\n WorkflowStepTagAssociation.table.c.id, backref='workflow_steps')\n", (113117, 113236), False, 'from sqlalchemy.orm import backref, object_session, relation, mapper, class_mapper\n'), ((113265, 113406), 'sqlalchemy.orm.relation', 'relation', (['model.WorkflowStepAnnotationAssociation'], {'order_by': 'model.WorkflowStepAnnotationAssociation.table.c.id', 'backref': '"""workflow_steps"""'}), "(model.WorkflowStepAnnotationAssociation, order_by=model.\n WorkflowStepAnnotationAssociation.table.c.id, backref='workflow_steps')\n", (113273, 113406), False, 'from sqlalchemy.orm import backref, object_session, relation, mapper, class_mapper\n'), ((113519, 113672), 'sqlalchemy.orm.relation', 'relation', (['model.WorkflowStep'], {'backref': '"""workflow_outputs"""', 'primaryjoin': '(model.WorkflowStep.table.c.id == model.WorkflowOutput.table.c.workflow_step_id\n )'}), "(model.WorkflowStep, backref='workflow_outputs', primaryjoin=model.\n WorkflowStep.table.c.id == model.WorkflowOutput.table.c.workflow_step_id)\n", (113527, 113672), False, 'from sqlalchemy.orm import backref, object_session, relation, mapper, class_mapper\n'), ((113780, 113958), 'sqlalchemy.orm.relation', 'relation', (['model.WorkflowStep'], {'backref': '"""input_connections"""', 'cascade': '"""all"""', 'primaryjoin': '(model.WorkflowStepConnection.table.c.input_step_id == model.WorkflowStep.\n table.c.id)'}), "(model.WorkflowStep, backref='input_connections', cascade='all',\n primaryjoin=model.WorkflowStepConnection.table.c.input_step_id == model\n .WorkflowStep.table.c.id)\n", (113788, 113958), False, 'from sqlalchemy.orm import backref, object_session, relation, mapper, class_mapper\n'), ((114032, 114211), 'sqlalchemy.orm.relation', 'relation', (['model.WorkflowStep'], {'backref': '"""output_connections"""', 'cascade': '"""all"""', 'primaryjoin': '(model.WorkflowStepConnection.table.c.output_step_id == model.WorkflowStep.\n table.c.id)'}), "(model.WorkflowStep, backref='output_connections', cascade='all',\n primaryjoin=model.WorkflowStepConnection.table.c.output_step_id ==\n model.WorkflowStep.table.c.id)\n", (114040, 114211), False, 'from sqlalchemy.orm import backref, object_session, relation, mapper, class_mapper\n'), ((114343, 114471), 'sqlalchemy.orm.relation', 'relation', (['model.User'], {'primaryjoin': '(model.User.table.c.id == model.StoredWorkflow.table.c.user_id)', 'backref': '"""stored_workflows"""'}), "(model.User, primaryjoin=model.User.table.c.id == model.\n StoredWorkflow.table.c.user_id, backref='stored_workflows')\n", (114351, 114471), False, 'from sqlalchemy.orm import backref, object_session, relation, mapper, class_mapper\n'), ((114577, 114757), 'sqlalchemy.orm.relation', 'relation', (['model.Workflow'], {'backref': '"""stored_workflow"""', 'cascade': '"""all, delete-orphan"""', 'primaryjoin': '(model.StoredWorkflow.table.c.id == model.Workflow.table.c.stored_workflow_id)'}), "(model.Workflow, backref='stored_workflow', cascade=\n 'all, delete-orphan', primaryjoin=model.StoredWorkflow.table.c.id ==\n model.Workflow.table.c.stored_workflow_id)\n", (114585, 114757), False, 'from sqlalchemy.orm import backref, object_session, relation, mapper, class_mapper\n'), ((114875, 115024), 'sqlalchemy.orm.relation', 'relation', (['model.Workflow'], {'post_update': '(True)', 'primaryjoin': '(model.StoredWorkflow.table.c.latest_workflow_id == model.Workflow.table.c.id)', 'lazy': '(False)'}), '(model.Workflow, post_update=True, primaryjoin=model.StoredWorkflow\n .table.c.latest_workflow_id == model.Workflow.table.c.id, lazy=False)\n', (114883, 115024), False, 'from sqlalchemy.orm import backref, object_session, relation, mapper, class_mapper\n'), ((115147, 115280), 'sqlalchemy.orm.relation', 'relation', (['model.StoredWorkflowTagAssociation'], {'order_by': 'model.StoredWorkflowTagAssociation.table.c.id', 'backref': '"""stored_workflows"""'}), "(model.StoredWorkflowTagAssociation, order_by=model.\n StoredWorkflowTagAssociation.table.c.id, backref='stored_workflows')\n", (115155, 115280), False, 'from sqlalchemy.orm import backref, object_session, relation, mapper, class_mapper\n'), ((115784, 115931), 'sqlalchemy.orm.relation', 'relation', (['model.StoredWorkflowAnnotationAssociation'], {'order_by': 'model.StoredWorkflowAnnotationAssociation.table.c.id', 'backref': '"""stored_workflows"""'}), "(model.StoredWorkflowAnnotationAssociation, order_by=model.\n StoredWorkflowAnnotationAssociation.table.c.id, backref='stored_workflows')\n", (115792, 115931), False, 'from sqlalchemy.orm import backref, object_session, relation, mapper, class_mapper\n'), ((115959, 116098), 'sqlalchemy.orm.relation', 'relation', (['model.StoredWorkflowRatingAssociation'], {'order_by': 'model.StoredWorkflowRatingAssociation.table.c.id', 'backref': '"""stored_workflows"""'}), "(model.StoredWorkflowRatingAssociation, order_by=model.\n StoredWorkflowRatingAssociation.table.c.id, backref='stored_workflows')\n", (115967, 116098), False, 'from sqlalchemy.orm import backref, object_session, relation, mapper, class_mapper\n'), ((116461, 116519), 'sqlalchemy.orm.relation', 'relation', (['model.User'], {'backref': '"""workflows_shared_by_others"""'}), "(model.User, backref='workflows_shared_by_others')\n", (116469, 116519), False, 'from sqlalchemy.orm import backref, object_session, relation, mapper, class_mapper\n'), ((116560, 116619), 'sqlalchemy.orm.relation', 'relation', (['model.StoredWorkflow'], {'backref': '"""users_shared_with"""'}), "(model.StoredWorkflow, backref='users_shared_with')\n", (116568, 116619), False, 'from sqlalchemy.orm import backref, object_session, relation, mapper, class_mapper\n'), ((116759, 116789), 'sqlalchemy.orm.relation', 'relation', (['model.StoredWorkflow'], {}), '(model.StoredWorkflow)\n', (116767, 116789), False, 'from sqlalchemy.orm import backref, object_session, relation, mapper, class_mapper\n'), ((116900, 116923), 'sqlalchemy.orm.relation', 'relation', (['model.History'], {}), '(model.History)\n', (116908, 116923), False, 'from sqlalchemy.orm import backref, object_session, relation, mapper, class_mapper\n'), ((116952, 116997), 'sqlalchemy.orm.relation', 'relation', (['model.WorkflowRequestInputParameter'], {}), '(model.WorkflowRequestInputParameter)\n', (116960, 116997), False, 'from sqlalchemy.orm import backref, object_session, relation, mapper, class_mapper\n'), ((117021, 117061), 'sqlalchemy.orm.relation', 'relation', (['model.WorkflowRequestStepState'], {}), '(model.WorkflowRequestStepState)\n', (117029, 117061), False, 'from sqlalchemy.orm import backref, object_session, relation, mapper, class_mapper\n'), ((117088, 117144), 'sqlalchemy.orm.relation', 'relation', (['model.WorkflowRequestToInputDatasetAssociation'], {}), '(model.WorkflowRequestToInputDatasetAssociation)\n', (117096, 117144), False, 'from sqlalchemy.orm import backref, object_session, relation, mapper, class_mapper\n'), ((117182, 117248), 'sqlalchemy.orm.relation', 'relation', (['model.WorkflowRequestToInputDatasetCollectionAssociation'], {}), '(model.WorkflowRequestToInputDatasetCollectionAssociation)\n', (117190, 117248), False, 'from sqlalchemy.orm import backref, object_session, relation, mapper, class_mapper\n'), ((117266, 117352), 'sqlalchemy.orm.relation', 'relation', (['model.WorkflowInvocationStep'], {'backref': '"""workflow_invocation"""', 'lazy': '(False)'}), "(model.WorkflowInvocationStep, backref='workflow_invocation', lazy=\n False)\n", (117274, 117352), False, 'from sqlalchemy.orm import backref, object_session, relation, mapper, class_mapper\n'), ((117368, 117392), 'sqlalchemy.orm.relation', 'relation', (['model.Workflow'], {}), '(model.Workflow)\n', (117376, 117392), False, 'from sqlalchemy.orm import backref, object_session, relation, mapper, class_mapper\n'), ((117519, 117547), 'sqlalchemy.orm.relation', 'relation', (['model.WorkflowStep'], {}), '(model.WorkflowStep)\n', (117527, 117547), False, 'from sqlalchemy.orm import backref, object_session, relation, mapper, class_mapper\n'), ((118547, 118588), 'sqlalchemy.orm.relation', 'relation', (['model.HistoryDatasetAssociation'], {}), '(model.HistoryDatasetAssociation)\n', (118555, 118588), False, 'from sqlalchemy.orm import backref, object_session, relation, mapper, class_mapper\n'), ((118608, 118656), 'sqlalchemy.orm.relation', 'relation', (['model.LibraryDatasetDatasetAssociation'], {}), '(model.LibraryDatasetDatasetAssociation)\n', (118616, 118656), False, 'from sqlalchemy.orm import backref, object_session, relation, mapper, class_mapper\n'), ((118784, 118804), 'sqlalchemy.orm.relation', 'relation', (['model.User'], {}), '(model.User)\n', (118792, 118804), False, 'from sqlalchemy.orm import backref, object_session, relation, mapper, class_mapper\n'), ((118839, 118990), 'sqlalchemy.orm.relation', 'relation', (['model.PageRevision'], {'backref': '"""page"""', 'cascade': '"""all, delete-orphan"""', 'primaryjoin': '(model.Page.table.c.id == model.PageRevision.table.c.page_id)'}), "(model.PageRevision, backref='page', cascade='all, delete-orphan',\n primaryjoin=model.Page.table.c.id == model.PageRevision.table.c.page_id)\n", (118847, 118990), False, 'from sqlalchemy.orm import backref, object_session, relation, mapper, class_mapper\n'), ((119113, 119260), 'sqlalchemy.orm.relation', 'relation', (['model.PageRevision'], {'post_update': '(True)', 'primaryjoin': '(model.Page.table.c.latest_revision_id == model.PageRevision.table.c.id)', 'lazy': '(False)'}), '(model.PageRevision, post_update=True, primaryjoin=model.Page.table\n .c.latest_revision_id == model.PageRevision.table.c.id, lazy=False)\n', (119121, 119260), False, 'from sqlalchemy.orm import backref, object_session, relation, mapper, class_mapper\n'), ((119383, 119485), 'sqlalchemy.orm.relation', 'relation', (['model.PageTagAssociation'], {'order_by': 'model.PageTagAssociation.table.c.id', 'backref': '"""pages"""'}), "(model.PageTagAssociation, order_by=model.PageTagAssociation.table.\n c.id, backref='pages')\n", (119391, 119485), False, 'from sqlalchemy.orm import backref, object_session, relation, mapper, class_mapper\n'), ((119515, 119631), 'sqlalchemy.orm.relation', 'relation', (['model.PageAnnotationAssociation'], {'order_by': 'model.PageAnnotationAssociation.table.c.id', 'backref': '"""pages"""'}), "(model.PageAnnotationAssociation, order_by=model.\n PageAnnotationAssociation.table.c.id, backref='pages')\n", (119523, 119631), False, 'from sqlalchemy.orm import backref, object_session, relation, mapper, class_mapper\n'), ((119659, 119767), 'sqlalchemy.orm.relation', 'relation', (['model.PageRatingAssociation'], {'order_by': 'model.PageRatingAssociation.table.c.id', 'backref': '"""pages"""'}), "(model.PageRatingAssociation, order_by=model.PageRatingAssociation.\n table.c.id, backref='pages')\n", (119667, 119767), False, 'from sqlalchemy.orm import backref, object_session, relation, mapper, class_mapper\n'), ((120085, 120139), 'sqlalchemy.orm.relation', 'relation', (['model.User'], {'backref': '"""pages_shared_by_others"""'}), "(model.User, backref='pages_shared_by_others')\n", (120093, 120139), False, 'from sqlalchemy.orm import backref, object_session, relation, mapper, class_mapper\n'), ((120168, 120217), 'sqlalchemy.orm.relation', 'relation', (['model.Page'], {'backref': '"""users_shared_with"""'}), "(model.Page, backref='users_shared_with')\n", (120176, 120217), False, 'from sqlalchemy.orm import backref, object_session, relation, mapper, class_mapper\n'), ((120399, 120419), 'sqlalchemy.orm.relation', 'relation', (['model.User'], {}), '(model.User)\n', (120407, 120419), False, 'from sqlalchemy.orm import backref, object_session, relation, mapper, class_mapper\n'), ((120454, 120655), 'sqlalchemy.orm.relation', 'relation', (['model.VisualizationRevision'], {'backref': '"""visualization"""', 'cascade': '"""all, delete-orphan"""', 'primaryjoin': '(model.Visualization.table.c.id == model.VisualizationRevision.table.c.\n visualization_id)'}), "(model.VisualizationRevision, backref='visualization', cascade=\n 'all, delete-orphan', primaryjoin=model.Visualization.table.c.id ==\n model.VisualizationRevision.table.c.visualization_id)\n", (120462, 120655), False, 'from sqlalchemy.orm import backref, object_session, relation, mapper, class_mapper\n'), ((120773, 120952), 'sqlalchemy.orm.relation', 'relation', (['model.VisualizationRevision'], {'post_update': '(True)', 'primaryjoin': '(model.Visualization.table.c.latest_revision_id == model.\n VisualizationRevision.table.c.id)', 'lazy': '(False)'}), '(model.VisualizationRevision, post_update=True, primaryjoin=model.\n Visualization.table.c.latest_revision_id == model.VisualizationRevision\n .table.c.id, lazy=False)\n', (120781, 120952), False, 'from sqlalchemy.orm import backref, object_session, relation, mapper, class_mapper\n'), ((121070, 121199), 'sqlalchemy.orm.relation', 'relation', (['model.VisualizationTagAssociation'], {'order_by': 'model.VisualizationTagAssociation.table.c.id', 'backref': '"""visualizations"""'}), "(model.VisualizationTagAssociation, order_by=model.\n VisualizationTagAssociation.table.c.id, backref='visualizations')\n", (121078, 121199), False, 'from sqlalchemy.orm import backref, object_session, relation, mapper, class_mapper\n'), ((121231, 121374), 'sqlalchemy.orm.relation', 'relation', (['model.VisualizationAnnotationAssociation'], {'order_by': 'model.VisualizationAnnotationAssociation.table.c.id', 'backref': '"""visualizations"""'}), "(model.VisualizationAnnotationAssociation, order_by=model.\n VisualizationAnnotationAssociation.table.c.id, backref='visualizations')\n", (121239, 121374), False, 'from sqlalchemy.orm import backref, object_session, relation, mapper, class_mapper\n'), ((121402, 121537), 'sqlalchemy.orm.relation', 'relation', (['model.VisualizationRatingAssociation'], {'order_by': 'model.VisualizationRatingAssociation.table.c.id', 'backref': '"""visualizations"""'}), "(model.VisualizationRatingAssociation, order_by=model.\n VisualizationRatingAssociation.table.c.id, backref='visualizations')\n", (121410, 121537), False, 'from sqlalchemy.orm import backref, object_session, relation, mapper, class_mapper\n'), ((121899, 121962), 'sqlalchemy.orm.relation', 'relation', (['model.User'], {'backref': '"""visualizations_shared_by_others"""'}), "(model.User, backref='visualizations_shared_by_others')\n", (121907, 121962), False, 'from sqlalchemy.orm import backref, object_session, relation, mapper, class_mapper\n'), ((121999, 122057), 'sqlalchemy.orm.relation', 'relation', (['model.Visualization'], {'backref': '"""users_shared_with"""'}), "(model.Visualization, backref='users_shared_with')\n", (122007, 122057), False, 'from sqlalchemy.orm import backref, object_session, relation, mapper, class_mapper\n'), ((122164, 122217), 'sqlalchemy.orm.backref', 'backref', (['"""parent"""'], {'remote_side': '[model.Tag.table.c.id]'}), "('parent', remote_side=[model.Tag.table.c.id])\n", (122171, 122217), False, 'from sqlalchemy.orm import backref, object_session, relation, mapper, class_mapper\n'), ((123263, 123278), 'sqlalchemy.orm.relation', 'relation', (['value'], {}), '(value)\n', (123271, 123278), False, 'from sqlalchemy.orm import backref, object_session, relation, mapper, class_mapper\n'), ((124350, 124365), 'sqlalchemy.orm.relation', 'relation', (['value'], {}), '(value)\n', (124358, 124365), False, 'from sqlalchemy.orm import backref, object_session, relation, mapper, class_mapper\n'), ((125330, 125353), 'sqlalchemy.orm.relation', 'relation', (['model.History'], {}), '(model.History)\n', (125338, 125353), False, 'from sqlalchemy.orm import backref, object_session, relation, mapper, class_mapper\n'), ((125383, 125437), 'sqlalchemy.orm.relation', 'relation', (['model.User'], {'backref': '"""data_manager_histories"""'}), "(model.User, backref='data_manager_histories')\n", (125391, 125437), False, 'from sqlalchemy.orm import backref, object_session, relation, mapper, class_mapper\n'), ((125947, 125967), 'sqlalchemy.orm.relation', 'relation', (['model.User'], {}), '(model.User)\n', (125955, 125967), False, 'from sqlalchemy.orm import backref, object_session, relation, mapper, class_mapper\n'), ((127242, 127311), 'sqlalchemy.select', 'select', (['[table.c.hid_counter]', '(table.c.id == self.id)'], {'for_update': '(True)'}), '([table.c.hid_counter], table.c.id == self.id, for_update=True)\n', (127248, 127311), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((123372, 123392), 'sqlalchemy.orm.relation', 'relation', (['model.User'], {}), '(model.User)\n', (123380, 123392), False, 'from sqlalchemy.orm import backref, object_session, relation, mapper, class_mapper\n'), ((124455, 124475), 'sqlalchemy.orm.relation', 'relation', (['model.User'], {}), '(model.User)\n', (124463, 124475), False, 'from sqlalchemy.orm import backref, object_session, relation, mapper, class_mapper\n'), ((68583, 68626), 'sqlalchemy.desc', 'desc', (['model.SampleEvent.table.c.update_time'], {}), '(model.SampleEvent.table.c.update_time)\n', (68587, 68626), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((68743, 68788), 'sqlalchemy.desc', 'desc', (['model.SampleDataset.table.c.update_time'], {}), '(model.SampleDataset.table.c.update_time)\n', (68747, 68788), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((70922, 70950), 'sqlalchemy.asc', 'asc', (['model.Sample.table.c.id'], {}), '(model.Sample.table.c.id)\n', (70925, 70950), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((71102, 71146), 'sqlalchemy.desc', 'desc', (['model.RequestEvent.table.c.update_time'], {}), '(model.RequestEvent.table.c.update_time)\n', (71106, 71146), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((72337, 72379), 'sqlalchemy.asc', 'asc', (['model.SampleState.table.c.update_time'], {}), '(model.SampleState.table.c.update_time)\n', (72340, 72379), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((75655, 75690), 'sqlalchemy.desc', 'desc', (['model.Run.table.c.update_time'], {}), '(model.Run.table.c.update_time)\n', (75659, 75690), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((75781, 75809), 'sqlalchemy.asc', 'asc', (['model.Sample.table.c.id'], {}), '(model.Sample.table.c.id)\n', (75784, 75809), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((76873, 76916), 'sqlalchemy.desc', 'desc', (['model.UserAddress.table.c.update_time'], {}), '(model.UserAddress.table.c.update_time)\n', (76877, 76916), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((77251, 77293), 'sqlalchemy.desc', 'desc', (['model.UserOpenID.table.c.update_time'], {}), '(model.UserOpenID.table.c.update_time)\n', (77255, 77293), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((77506, 77548), 'sqlalchemy.desc', 'desc', (['model.UserOpenID.table.c.update_time'], {}), '(model.UserOpenID.table.c.update_time)\n', (77510, 77548), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((84868, 84916), 'sqlalchemy.asc', 'asc', (['model.HistoryDatasetAssociation.table.c.hid'], {}), '(model.HistoryDatasetAssociation.table.c.hid)\n', (84871, 84916), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((85093, 85139), 'sqlalchemy.desc', 'desc', (['model.JobExportHistoryArchive.table.c.id'], {}), '(model.JobExportHistoryArchive.table.c.id)\n', (85097, 85139), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((85516, 85564), 'sqlalchemy.asc', 'asc', (['model.HistoryDatasetAssociation.table.c.hid'], {}), '(model.HistoryDatasetAssociation.table.c.hid)\n', (85519, 85564), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((86018, 86076), 'sqlalchemy.asc', 'asc', (['model.HistoryDatasetCollectionAssociation.table.c.hid'], {}), '(model.HistoryDatasetCollectionAssociation.table.c.hid)\n', (86021, 86076), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((86513, 86561), 'sqlalchemy.asc', 'asc', (['model.HistoryDatasetAssociation.table.c.hid'], {}), '(model.HistoryDatasetAssociation.table.c.hid)\n', (86516, 86561), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((87049, 87107), 'sqlalchemy.asc', 'asc', (['model.HistoryDatasetCollectionAssociation.table.c.hid'], {}), '(model.HistoryDatasetCollectionAssociation.table.c.hid)\n', (87052, 87107), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((88283, 88322), 'sqlalchemy.desc', 'desc', (['model.History.table.c.update_time'], {}), '(model.History.table.c.update_time)\n', (88287, 88322), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((88518, 88557), 'sqlalchemy.desc', 'desc', (['model.History.table.c.update_time'], {}), '(model.History.table.c.update_time)\n', (88522, 88557), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((88640, 88685), 'sqlalchemy.desc', 'desc', (['model.GalaxySession.table.c.update_time'], {}), '(model.GalaxySession.table.c.update_time)\n', (88644, 88685), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((88965, 88993), 'sqlalchemy.ext.orderinglist.ordering_list', 'ordering_list', (['"""order_index"""'], {}), "('order_index')\n", (88978, 88993), False, 'from sqlalchemy.ext.orderinglist import ordering_list\n'), ((89098, 89133), 'sqlalchemy.orm.collections.attribute_mapped_collection', 'attribute_mapped_collection', (['"""name"""'], {}), "('name')\n", (89125, 89133), False, 'from sqlalchemy.orm.collections import attribute_mapped_collection\n'), ((89556, 89595), 'sqlalchemy.desc', 'desc', (['model.APIKeys.table.c.create_time'], {}), '(model.APIKeys.table.c.create_time)\n', (89560, 89595), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((93648, 93671), 'sqlalchemy.orm.backref', 'backref', (['"""library_root"""'], {}), "('library_root')\n", (93655, 93671), False, 'from sqlalchemy.orm import backref, object_session, relation, mapper, class_mapper\n'), ((93979, 94108), 'sqlalchemy.orm.backref', 'backref', (['"""parent"""'], {'primaryjoin': '(model.ExtendedMetadataIndex.table.c.extended_metadata_id == model.\n ExtendedMetadata.table.c.id)'}), "('parent', primaryjoin=model.ExtendedMetadataIndex.table.c.\n extended_metadata_id == model.ExtendedMetadata.table.c.id)\n", (93986, 94108), False, 'from sqlalchemy.orm import backref, object_session, relation, mapper, class_mapper\n'), ((95582, 95619), 'sqlalchemy.asc', 'asc', (['model.LibraryFolder.table.c.name'], {}), '(model.LibraryFolder.table.c.name)\n', (95585, 95619), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((95643, 95800), 'sqlalchemy.orm.backref', 'backref', (['"""parent"""'], {'primaryjoin': '(model.LibraryFolder.table.c.parent_id == model.LibraryFolder.table.c.id)', 'remote_side': '[model.LibraryFolder.table.c.id]'}), "('parent', primaryjoin=model.LibraryFolder.table.c.parent_id ==\n model.LibraryFolder.table.c.id, remote_side=[model.LibraryFolder.table.\n c.id])\n", (95650, 95800), False, 'from sqlalchemy.orm import backref, object_session, relation, mapper, class_mapper\n'), ((96031, 96068), 'sqlalchemy.asc', 'asc', (['model.LibraryFolder.table.c.name'], {}), '(model.LibraryFolder.table.c.name)\n', (96034, 96068), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((96564, 96603), 'sqlalchemy.asc', 'asc', (['model.LibraryDataset.table.c._name'], {}), '(model.LibraryDataset.table.c._name)\n', (96567, 96603), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((96893, 96932), 'sqlalchemy.asc', 'asc', (['model.LibraryDataset.table.c._name'], {}), '(model.LibraryDataset.table.c._name)\n', (96896, 96932), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((100959, 101173), 'sqlalchemy.orm.backref', 'backref', (['"""parent"""'], {'primaryjoin': '(model.LibraryDatasetDatasetAssociation.table.c.parent_id == model.\n LibraryDatasetDatasetAssociation.table.c.id)', 'remote_side': '[model.LibraryDatasetDatasetAssociation.table.c.id]'}), "('parent', primaryjoin=model.LibraryDatasetDatasetAssociation.table.\n c.parent_id == model.LibraryDatasetDatasetAssociation.table.c.id,\n remote_side=[model.LibraryDatasetDatasetAssociation.table.c.id])\n", (100966, 101173), False, 'from sqlalchemy.orm import backref, object_session, relation, mapper, class_mapper\n'), ((112808, 112851), 'sqlalchemy.asc', 'asc', (['model.WorkflowStep.table.c.order_index'], {}), '(model.WorkflowStep.table.c.order_index)\n', (112811, 112851), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((115405, 115608), 'sqlalchemy.and_', 'and_', (['(model.StoredWorkflow.table.c.id == model.StoredWorkflowTagAssociation.\n table.c.stored_workflow_id)', '(model.StoredWorkflow.table.c.user_id == model.StoredWorkflowTagAssociation\n .table.c.user_id)'], {}), '(model.StoredWorkflow.table.c.id == model.StoredWorkflowTagAssociation.\n table.c.stored_workflow_id, model.StoredWorkflow.table.c.user_id ==\n model.StoredWorkflowTagAssociation.table.c.user_id)\n', (115409, 115608), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((117594, 117644), 'sqlalchemy.orm.backref', 'backref', (['"""workflow_invocation_step"""'], {'uselist': '(False)'}), "('workflow_invocation_step', uselist=False)\n", (117601, 117644), False, 'from sqlalchemy.orm import backref, object_session, relation, mapper, class_mapper\n'), ((125613, 125663), 'sqlalchemy.orm.backref', 'backref', (['"""data_manager_association"""'], {'uselist': '(False)'}), "('data_manager_association', uselist=False)\n", (125620, 125663), False, 'from sqlalchemy.orm import backref, object_session, relation, mapper, class_mapper\n'), ((85400, 85453), 'sqlalchemy.not_', 'not_', (['model.HistoryDatasetAssociation.table.c.deleted'], {}), '(model.HistoryDatasetAssociation.table.c.deleted)\n', (85404, 85453), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((85918, 85981), 'sqlalchemy.not_', 'not_', (['model.HistoryDatasetCollectionAssociation.table.c.deleted'], {}), '(model.HistoryDatasetCollectionAssociation.table.c.deleted)\n', (85922, 85981), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((88466, 88501), 'sqlalchemy.not_', 'not_', (['model.History.table.c.deleted'], {}), '(model.History.table.c.deleted)\n', (88470, 88501), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((91212, 91269), 'sqlalchemy.not_', 'not_', (['(model.Role.table.c.name == model.User.table.c.email)'], {}), '(model.Role.table.c.name == model.User.table.c.email)\n', (91216, 91269), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((94754, 94804), 'sqlalchemy.not_', 'not_', (['model.LibraryInfoAssociation.table.c.deleted'], {}), '(model.LibraryInfoAssociation.table.c.deleted)\n', (94758, 94804), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((95961, 96002), 'sqlalchemy.not_', 'not_', (['model.LibraryFolder.table.c.deleted'], {}), '(model.LibraryFolder.table.c.deleted)\n', (95965, 96002), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((96822, 96864), 'sqlalchemy.not_', 'not_', (['model.LibraryDataset.table.c.deleted'], {}), '(model.LibraryDataset.table.c.deleted)\n', (96826, 96864), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((97318, 97374), 'sqlalchemy.not_', 'not_', (['model.LibraryFolderInfoAssociation.table.c.deleted'], {}), '(model.LibraryFolderInfoAssociation.table.c.deleted)\n', (97322, 97374), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((98584, 98714), 'sqlalchemy.not_', 'not_', (['(model.LibraryDataset.table.c.library_dataset_dataset_association_id ==\n model.LibraryDatasetDatasetAssociation.table.c.id)'], {}), '(model.LibraryDataset.table.c.library_dataset_dataset_association_id ==\n model.LibraryDatasetDatasetAssociation.table.c.id)\n', (98588, 98714), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((102165, 102229), 'sqlalchemy.not_', 'not_', (['model.LibraryDatasetDatasetInfoAssociation.table.c.deleted'], {}), '(model.LibraryDatasetDatasetInfoAssociation.table.c.deleted)\n', (102169, 102229), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((86371, 86424), 'sqlalchemy.not_', 'not_', (['model.HistoryDatasetAssociation.table.c.deleted'], {}), '(model.HistoryDatasetAssociation.table.c.deleted)\n', (86375, 86424), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n'), ((86887, 86950), 'sqlalchemy.not_', 'not_', (['model.HistoryDatasetCollectionAssociation.table.c.deleted'], {}), '(model.HistoryDatasetCollectionAssociation.table.c.deleted)\n', (86891, 86950), False, 'from sqlalchemy import and_, asc, Boolean, Column, DateTime, desc, ForeignKey, Integer, MetaData, not_, Numeric, select, String, Table, TEXT, Unicode, UniqueConstraint\n')]
|
import tensorflow as tf
import rnn.Configurations
from util.Pose2d import Pose2d
class RNNController(object):
def __init__(self, folder):
self.config = rnn.Configurations.get_config(folder)
self.config.load_normal_data(folder)
self.pose = Pose2d()
self.model = self.config.model(1, 1)
self.sess = tf.Session()
saver = tf.train.Saver()
self.sess.run(tf.global_variables_initializer())
saver.restore(self.sess, "%s/train/ckpt"%(folder))
# saver.restore(self.sess, "%s/train/ckpt"%(folder))
# saver.save(self.sess, "%s/train2/ckpt"%(folder))
self.state = None
self.current_y = [[0]*self.config.y_normal.size()]
def step(self, target):
target = self.config.x_normal.normalize_l(target)
m = self.model
feed_dict = { m.x: [[target]], m.prev_y:self.current_y}
if (self.state != None):
feed_dict[m.initial_state] = self.state
# x : target x, target y => 2
# y : foot contact=2, root transform(rotation, tx, ty)=3, root_height, joint pos=3*13=39 => 45
output, self.state, self.current_y = self.sess.run([m.generated, m.final_state, m.final_y], feed_dict)
output = output[0][0]
output = self.config.y_normal.de_normalize_l(output)
output = output[2:]
# move root
self.pose = self.pose.transform(output)
points = [[0, output[3], 0]]
output = output[4:]
for i in range(int(len(output)/3)):
points.append(output[i*3:(i+1)*3])
for i in range(len(points)):
points[i] = self.pose.global_point_3d(points[i])
return points
|
[
"util.Pose2d.Pose2d",
"tensorflow.Session",
"tensorflow.train.Saver",
"tensorflow.global_variables_initializer"
] |
[((270, 278), 'util.Pose2d.Pose2d', 'Pose2d', ([], {}), '()\n', (276, 278), False, 'from util.Pose2d import Pose2d\n'), ((353, 365), 'tensorflow.Session', 'tf.Session', ([], {}), '()\n', (363, 365), True, 'import tensorflow as tf\n'), ((382, 398), 'tensorflow.train.Saver', 'tf.train.Saver', ([], {}), '()\n', (396, 398), True, 'import tensorflow as tf\n'), ((421, 454), 'tensorflow.global_variables_initializer', 'tf.global_variables_initializer', ([], {}), '()\n', (452, 454), True, 'import tensorflow as tf\n')]
|
from PyQt5 import QtGui, QtCore, QtWidgets
import sys
from Crazystuff import *
class GO(Window):
def __init__(self):
super().__init__()
v = View(600, 600, parent = self)
self.setFixedSize(v.width(), v.height())
rect = v.stage.addRect(GetRekt(0,0,100,100))
rect.setBrush(Brush(Color(255,0,0,)))
rect.setPen(Pen(Color(0,0,255)))
self.show()
class View(GView):
def __init__(self, w, h, parent=None):
# stage is the scene, for holding object trees
self.stage = Stage(0, 0, w, h, parent)
# init QGraphicsView
super(View, self).__init__(self.stage, parent)
self.setGeometry(0,0, w, h)
if __name__ == '__main__':
app = QtWidgets.QApplication(sys.argv)
gui = GO()
sys.exit(app.exec_())
|
[
"PyQt5.QtWidgets.QApplication"
] |
[((739, 771), 'PyQt5.QtWidgets.QApplication', 'QtWidgets.QApplication', (['sys.argv'], {}), '(sys.argv)\n', (761, 771), False, 'from PyQt5 import QtGui, QtCore, QtWidgets\n')]
|
from zipfile import ZipFile
class P:
def pack(self,folpat,folsave,name):
self.save = folsave
self.fname = (name+".zip")
self.pat = (self.save+'/'+self.fname)
with ZipFile(self.pat,'w') as zip:
for data in folpat:
zip.write(data)
print ("All files have been packed successfully")
unzip = P()
a = "C:/Users/advik.ADVIK-PC/Downloads"
b = "C:\S"
c = "hope"
unzip.pack(a,b,c)
|
[
"zipfile.ZipFile"
] |
[((232, 254), 'zipfile.ZipFile', 'ZipFile', (['self.pat', '"""w"""'], {}), "(self.pat, 'w')\n", (239, 254), False, 'from zipfile import ZipFile\n')]
|
from pandac.PandaModules import *
from direct.interval.IntervalGlobal import Sequence, Parallel, Func, Wait, SoundInterval
from pirates.effects.VolcanoSmoke import VolcanoSmoke
from pirates.effects.EruptionSmoke import EruptionSmoke
from pirates.effects.LavaEruption import LavaEruption
from pirates.effects.LavaSplats import LavaSplats
from pirates.effects.CameraShaker import CameraShaker
from pirates.audio import SoundGlobals
from pirates.audio.SoundGlobals import loadSfx
import random
class VolcanoEffect(NodePath):
eruptionSfx = None
def __init__(self):
NodePath.__init__(self, 'VolcanoEffect')
self.smoke = VolcanoSmoke()
self.smoke.setEffectScale(1.0)
self.smoke.reparentTo(self)
self.eruptionSmoke = None
self.eruption = None
self.splats = None
self.cameraShaker = None
if not self.eruptionSfx:
self.eruptionSfx = (
loadSfx(SoundGlobals.SFX_FX_VOLCANO_ERUPT),)
self.inEditor = hasattr(base, 'pe')
return
def startLavaEruption(self):
self.stopLavaEruption()
duration = random.randint(10, 20)
base.playSfx(self.eruptionSfx[0], node=self, cutoff=5000)
self.eruption = LavaEruption()
self.eruption.duration = duration
self.eruption.setEffectScale(1.0)
self.eruption.reparentTo(self)
self.eruption.play()
if self.inEditor or base.options.getSpecialEffectsSetting() >= base.options.SpecialEffectsMedium:
self.cameraShaker = CameraShaker()
self.cameraShaker.reparentTo(self)
self.cameraShaker.shakeSpeed = 0.05
self.cameraShaker.shakePower = 0.2
self.cameraShaker.scalePower = True
self.cameraShaker.numShakes = duration * 10
self.cameraShaker.play(2200.0)
if self.inEditor or base.options.getSpecialEffectsSetting() >= base.options.SpecialEffectsHigh:
self.eruptionSmoke = EruptionSmoke()
self.eruptionSmoke.duration = duration
self.eruptionSmoke.reparentTo(self)
self.eruptionSmoke.setEffectScale(1.0)
self.eruptionSmoke.play()
self.splats = LavaSplats()
self.splats.duration = duration
self.splats.setEffectScale(1.0)
self.splats.reparentTo(self)
self.splats.play()
taskMgr.doMethodLater(duration + 10.0, self.stopLavaEruption, 'stopLavaEruptionTask')
def stopLavaEruption(self, task=None):
if self.eruption:
self.eruption.destroy()
self.eruption = None
if self.splats:
self.splats.destroy()
self.splats = None
if self.eruptionSmoke:
self.eruptionSmoke.destroy()
self.eruptionSmoke = None
return
def enable(self):
self.smoke.enableEffect()
def disable(self):
self.smoke.disableEffect()
def destroy(self):
self.disable()
taskMgr.remove('stopLavataEruptionTask')
if self.smoke:
self.smoke.cleanUpEffect()
self.smoke = None
self.stopLavaEruption()
return
|
[
"pirates.audio.SoundGlobals.loadSfx",
"random.randint",
"pirates.effects.LavaEruption.LavaEruption",
"pirates.effects.CameraShaker.CameraShaker",
"pirates.effects.EruptionSmoke.EruptionSmoke",
"pirates.effects.LavaSplats.LavaSplats",
"pirates.effects.VolcanoSmoke.VolcanoSmoke"
] |
[((641, 655), 'pirates.effects.VolcanoSmoke.VolcanoSmoke', 'VolcanoSmoke', ([], {}), '()\n', (653, 655), False, 'from pirates.effects.VolcanoSmoke import VolcanoSmoke\n'), ((1122, 1144), 'random.randint', 'random.randint', (['(10)', '(20)'], {}), '(10, 20)\n', (1136, 1144), False, 'import random\n'), ((1235, 1249), 'pirates.effects.LavaEruption.LavaEruption', 'LavaEruption', ([], {}), '()\n', (1247, 1249), False, 'from pirates.effects.LavaEruption import LavaEruption\n'), ((1540, 1554), 'pirates.effects.CameraShaker.CameraShaker', 'CameraShaker', ([], {}), '()\n', (1552, 1554), False, 'from pirates.effects.CameraShaker import CameraShaker\n'), ((1981, 1996), 'pirates.effects.EruptionSmoke.EruptionSmoke', 'EruptionSmoke', ([], {}), '()\n', (1994, 1996), False, 'from pirates.effects.EruptionSmoke import EruptionSmoke\n'), ((2211, 2223), 'pirates.effects.LavaSplats.LavaSplats', 'LavaSplats', ([], {}), '()\n', (2221, 2223), False, 'from pirates.effects.LavaSplats import LavaSplats\n'), ((933, 975), 'pirates.audio.SoundGlobals.loadSfx', 'loadSfx', (['SoundGlobals.SFX_FX_VOLCANO_ERUPT'], {}), '(SoundGlobals.SFX_FX_VOLCANO_ERUPT)\n', (940, 975), False, 'from pirates.audio.SoundGlobals import loadSfx\n')]
|
# Copyright 2020 Google Inc. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""A tool to test sigma rules.
This tool can be used to verify your rules before running an analyzer.
It also does not require you to have a full blown Timesketch instance.
Default this tool will show only the rules that cause problems.
Example way of running the tool:
$ PYTHONPATH=. python3 test_tools/sigma_verify_rules.py --config_file
data/sigma_config.yaml --debug data/sigma/rules/windows/
--move data/sigma/rules/problematic/
"""
import logging
import os
import argparse
import sys
import pandas as pd
from timesketch.lib import sigma_util # pylint: disable=no-name-in-module
logger = logging.getLogger("timesketch.test_tool.sigma-verify")
logging.basicConfig(level=os.environ.get("LOGLEVEL", "INFO"))
def get_sigma_rule_status(rule_status_path="./data/sigma_rule_status.csv"):
"""Get a dataframe of sigma rules status.
This includes filenames, paths, ids.
Args:
rule_status_path(str): Path to a status file.
The default value is './data/sigma_rule_status.csv'
Returns:
Pandas dataframe with rule status
Raises:
ValueError: Sigma rule status file is not readabale.
"""
if rule_status_path is None or rule_status_path == "":
rule_status_path = "./data/sigma_rule_status.csv"
if not rule_status_path:
raise ValueError("No rule_status_path set via param or config file")
if not os.path.isfile(rule_status_path):
raise ValueError(
"Unable to open file: [{0:s}], it does not exist.".format(
rule_status_path
)
)
if not os.access(rule_status_path, os.R_OK):
raise ValueError(
"Unable to open file: [{0:s}], cannot open it for "
"read, please check permissions.".format(rule_status_path)
)
return pd.read_csv(rule_status_path)
def run_verifier(rules_path, config_file_path, rule_status_path=None):
"""Run an sigma parsing test on a dir and returns results from the run.
Args:
rules_path (str): Path to the Sigma rules.
config_file_path (str): Path to a config file with Sigma mapping data.
rule_status_path (str): Optional path to a status file.
The default value is none.
Raises:
IOError: if the path to either test or analyzer file does not exist
or if the analyzer module or class cannot be loaded.
Returns:
a tuple of lists:
- sigma_verified_rules with rules that can be added
- sigma_rules_with_problems with rules that should not be added
"""
if not config_file_path:
raise IOError("No config_file_path given")
if not os.path.isdir(rules_path):
raise IOError("Rules not found at path: {0:s}".format(rules_path))
if not os.path.isfile(config_file_path):
raise IOError(
"Config file path not found at path: {0:s}".format(
config_file_path
)
)
sigma_config = sigma_util.get_sigma_config_file(
config_file=config_file_path
)
return_verified_rules = []
return_rules_with_problems = []
ignore = get_sigma_rule_status(rule_status_path)
ignore_list = list(ignore["path"].unique())
for dirpath, dirnames, files in os.walk(rules_path):
if "deprecated" in [x.lower() for x in dirnames]:
dirnames.remove("deprecated")
for rule_filename in files:
if rule_filename.lower().endswith(".yml"):
# if a sub dir is found, do not try to parse it.
if os.path.isdir(os.path.join(dirpath, rule_filename)):
continue
rule_file_path = os.path.join(dirpath, rule_filename)
block_because_csv = False
if any(x in rule_file_path for x in ignore_list):
return_rules_with_problems.append(rule_file_path)
block_because_csv = True
if block_because_csv:
continue
try:
parsed_rule = sigma_util.get_sigma_rule(
rule_file_path, sigma_config
)
print(parsed_rule)
# This except is to keep the unknown exceptions
# this function is made to catch them and document
# them the broad exception is needed
except Exception: # pylint: disable=broad-except
logger.debug("Rule parsing error", exc_info=True)
return_rules_with_problems.append(rule_file_path)
if parsed_rule:
return_verified_rules.append(rule_file_path)
else:
return_rules_with_problems.append(rule_file_path)
return return_verified_rules, return_rules_with_problems
def move_problematic_rule(filepath, move_to_path, reason=None):
"""Moves a problematic rule to a subfolder so it is not used again
Args:
filepath: path to the sigma rule that caused problems
move_to_path: path to move the problematic rules to
reason: optional reason why file is moved
"""
logging.info(
"Moving the rule: {0:s} to {1:s}".format(filepath, move_to_path)
)
try:
os.makedirs(move_to_path, exist_ok=True)
debug_path = os.path.join(move_to_path, "debug.log")
with open(debug_path, "a") as file_objec:
file_objec.write(f"{filepath}\n{reason}\n\n")
base_path = os.path.basename(filepath)
logging.info(
"Moving the rule: {0:s} to {1:s}".format(
filepath, f"{move_to_path}{base_path}"
)
)
os.rename(filepath, os.path.join(move_to_path, base_path))
except OSError:
logger.error("OS Error - rule not moved", exc_info=True)
if __name__ == "__main__":
description = (
"Mock an sigma parser run. This tool is intended for developers "
"of sigma rules as well as Timesketch server admins. "
"The tool can also be used for automatic testing to make sure the "
"rules are still working as intended."
)
epilog = "Remember to feed the tool with proper rule data."
arguments = argparse.ArgumentParser(
description=description, allow_abbrev=True
)
arguments.add_argument(
"--config_file",
"--file",
dest="config_file_path",
action="store",
default="",
type=str,
metavar="PATH_TO_TEST_FILE",
help=("Path to the file containing the config data to feed sigma "),
)
arguments.add_argument(
"--rule_status_file",
dest="rule_status_path",
action="store",
default="",
type=str,
metavar="PATH_TO_STATUS_FILE",
help=("Path to the file containing the rule status"),
)
arguments.add_argument(
"rules_path",
action="store",
default="",
type=str,
metavar="PATH_TO_RULES",
help="Path to the rules to test.",
)
arguments.add_argument(
"--debug", action="store_true", help="print debug messages "
)
arguments.add_argument(
"--info", action="store_true", help="print info messages "
)
arguments.add_argument(
"--move",
dest="move_to_path",
action="store",
default="",
type=str,
help=("Move problematic rules to this path"),
)
try:
options = arguments.parse_args()
except UnicodeEncodeError:
print(arguments.format_help())
sys.exit(1)
if options.debug:
logger.setLevel(logging.DEBUG)
if options.info:
logger.setLevel(logging.INFO)
if not os.path.isfile(options.config_file_path):
print("Config file not found.")
sys.exit(1)
if not os.path.isdir(options.rules_path):
print(
"The path to the rules does not exist ({0:s})".format(
options.rules_path
)
)
sys.exit(1)
if len(options.rule_status_path) > 0:
if not os.path.isfile(options.rule_status_path):
print("rule status file not found.")
sys.exit(1)
sigma_verified_rules, sigma_rules_with_problems = run_verifier(
rules_path=options.rules_path,
config_file_path=options.config_file_path,
rule_status_path=options.rule_status_path,
)
if len(sigma_rules_with_problems) > 0:
print("### Do NOT import below.###")
for badrule in sigma_rules_with_problems:
if options.move_to_path:
move_problematic_rule(
badrule,
options.move_to_path,
"sigma_verify_rules.py found an issue",
)
print(badrule)
if len(sigma_verified_rules) > 0:
logging.info("### You can import the following rules ###")
for goodrule in sigma_verified_rules:
logging.info(goodrule)
|
[
"timesketch.lib.sigma_util.get_sigma_rule",
"argparse.ArgumentParser",
"os.makedirs",
"os.path.basename",
"pandas.read_csv",
"os.path.isdir",
"os.walk",
"timesketch.lib.sigma_util.get_sigma_config_file",
"os.environ.get",
"logging.info",
"os.path.isfile",
"sys.exit",
"os.path.join",
"os.access",
"logging.getLogger"
] |
[((1195, 1249), 'logging.getLogger', 'logging.getLogger', (['"""timesketch.test_tool.sigma-verify"""'], {}), "('timesketch.test_tool.sigma-verify')\n", (1212, 1249), False, 'import logging\n'), ((2403, 2432), 'pandas.read_csv', 'pd.read_csv', (['rule_status_path'], {}), '(rule_status_path)\n', (2414, 2432), True, 'import pandas as pd\n'), ((3576, 3638), 'timesketch.lib.sigma_util.get_sigma_config_file', 'sigma_util.get_sigma_config_file', ([], {'config_file': 'config_file_path'}), '(config_file=config_file_path)\n', (3608, 3638), False, 'from timesketch.lib import sigma_util\n'), ((3860, 3879), 'os.walk', 'os.walk', (['rules_path'], {}), '(rules_path)\n', (3867, 3879), False, 'import os\n'), ((6845, 6912), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {'description': 'description', 'allow_abbrev': '(True)'}), '(description=description, allow_abbrev=True)\n', (6868, 6912), False, 'import argparse\n'), ((1276, 1310), 'os.environ.get', 'os.environ.get', (['"""LOGLEVEL"""', '"""INFO"""'], {}), "('LOGLEVEL', 'INFO')\n", (1290, 1310), False, 'import os\n'), ((1982, 2014), 'os.path.isfile', 'os.path.isfile', (['rule_status_path'], {}), '(rule_status_path)\n', (1996, 2014), False, 'import os\n'), ((2182, 2218), 'os.access', 'os.access', (['rule_status_path', 'os.R_OK'], {}), '(rule_status_path, os.R_OK)\n', (2191, 2218), False, 'import os\n'), ((3265, 3290), 'os.path.isdir', 'os.path.isdir', (['rules_path'], {}), '(rules_path)\n', (3278, 3290), False, 'import os\n'), ((3378, 3410), 'os.path.isfile', 'os.path.isfile', (['config_file_path'], {}), '(config_file_path)\n', (3392, 3410), False, 'import os\n'), ((5883, 5923), 'os.makedirs', 'os.makedirs', (['move_to_path'], {'exist_ok': '(True)'}), '(move_to_path, exist_ok=True)\n', (5894, 5923), False, 'import os\n'), ((5945, 5984), 'os.path.join', 'os.path.join', (['move_to_path', '"""debug.log"""'], {}), "(move_to_path, 'debug.log')\n", (5957, 5984), False, 'import os\n'), ((6115, 6141), 'os.path.basename', 'os.path.basename', (['filepath'], {}), '(filepath)\n', (6131, 6141), False, 'import os\n'), ((8342, 8382), 'os.path.isfile', 'os.path.isfile', (['options.config_file_path'], {}), '(options.config_file_path)\n', (8356, 8382), False, 'import os\n'), ((8432, 8443), 'sys.exit', 'sys.exit', (['(1)'], {}), '(1)\n', (8440, 8443), False, 'import sys\n'), ((8456, 8489), 'os.path.isdir', 'os.path.isdir', (['options.rules_path'], {}), '(options.rules_path)\n', (8469, 8489), False, 'import os\n'), ((8640, 8651), 'sys.exit', 'sys.exit', (['(1)'], {}), '(1)\n', (8648, 8651), False, 'import sys\n'), ((9479, 9537), 'logging.info', 'logging.info', (['"""### You can import the following rules ###"""'], {}), "('### You can import the following rules ###')\n", (9491, 9537), False, 'import logging\n'), ((6325, 6362), 'os.path.join', 'os.path.join', (['move_to_path', 'base_path'], {}), '(move_to_path, base_path)\n', (6337, 6362), False, 'import os\n'), ((8196, 8207), 'sys.exit', 'sys.exit', (['(1)'], {}), '(1)\n', (8204, 8207), False, 'import sys\n'), ((8710, 8750), 'os.path.isfile', 'os.path.isfile', (['options.rule_status_path'], {}), '(options.rule_status_path)\n', (8724, 8750), False, 'import os\n'), ((8813, 8824), 'sys.exit', 'sys.exit', (['(1)'], {}), '(1)\n', (8821, 8824), False, 'import sys\n'), ((9596, 9618), 'logging.info', 'logging.info', (['goodrule'], {}), '(goodrule)\n', (9608, 9618), False, 'import logging\n'), ((4273, 4309), 'os.path.join', 'os.path.join', (['dirpath', 'rule_filename'], {}), '(dirpath, rule_filename)\n', (4285, 4309), False, 'import os\n'), ((4171, 4207), 'os.path.join', 'os.path.join', (['dirpath', 'rule_filename'], {}), '(dirpath, rule_filename)\n', (4183, 4207), False, 'import os\n'), ((4658, 4713), 'timesketch.lib.sigma_util.get_sigma_rule', 'sigma_util.get_sigma_rule', (['rule_file_path', 'sigma_config'], {}), '(rule_file_path, sigma_config)\n', (4683, 4713), False, 'from timesketch.lib import sigma_util\n')]
|
import sys
sys.path.append('../')
sys.path.append('/opt/nvidia/deepstream/deepstream/lib')
from time import sleep
import time
import numpy as np
import gi
gi.require_version('Gst', '1.0')
gi.require_version('GstVideo', '1.0')
from gi.repository import GObject, Gst, GstVideo
from common.FPS import GETFPS
import pyds
from gstutils import get_num_channels, get_np_dtype
from my_utils import Segmentor
PGIE_CLASS_ID_VEHICLE = 0
PGIE_CLASS_ID_BICYCLE = 1
PGIE_CLASS_ID_PERSON = 2
PGIE_CLASS_ID_ROADSIGN = 3
segmentor = Segmentor((720, 1280, 3), network_name='fcn-resnet18-cityscapes-1024x512')
def gst_to_np(sample):
buffer = sample.get_buffer()
# print(f'pts: {buffer.pts / 1e9} -- dts: {buffer.dts / 1e9} -- offset: {buffer.offset} -- duration: {buffer.duration / 1e9}')
caps = sample.get_caps()
# print(caps.get_structure(0).get_value('format'))
# print(caps.get_structure(0).get_value('height'))
# print(caps.get_structure(0).get_value('width'))
# batch_meta = pyds.gst_buffer_get_nvds_batch_meta(hash(buffer))
# l_frame = batch_meta.frame_meta_list
# frame_meta = pyds.NvDsFrameMeta.cast(l_frame.data)
# frame_number = frame_meta.frame_num
# pts = frame_meta.buf_pts
# ntp_ts = frame_meta.ntp_timestamp
# print(f'frame number: {frame_number}')
# print(f'frame pts (seconds): {pts / 1e9}')
# print(f'ntp timestamp (seconds): {ntp_ts / 1e9}')
print(f'from appsink ------- pts: {buffer.pts / 1e9}')
caps_format = caps.get_structure(0)
video_format = GstVideo.VideoFormat.from_string(
caps_format.get_value('format'))
w, h = caps_format.get_value('width'), caps_format.get_value('height')
c = get_num_channels(video_format)
buffer_size = buffer.get_size()
shape = (h, w, c) if (h * w * c == buffer_size) else buffer_size
array = np.ndarray(shape=shape, buffer=buffer.extract_dup(0, buffer_size),
dtype=get_np_dtype(video_format))
return np.squeeze(array), buffer.pts
def new_buffer(sink, data):
start_time = time.time()
sample = sink.emit("pull-sample")
arr, pts = gst_to_np(sample)
# print(f'data type: {arr.dtype}')
segmentor.do_segmentation(arr, str(pts))
# seg_map = segnet.predict(arr)
# cv2.imwrite(f'{pts}.jpg', cv2.cvtColor(seg_map, cv2.COLOR_RGB2BGR))
print(f'--------- segmentation done: {time.time() - start_time} ----------')
return Gst.FlowReturn.OK
class Pipeline:
def __init__(self,
input_file_path,
model_config_path='./model/config_infer_primary_detectnet_v2.txt',
labels_path='./model/detectnet_v2_labels.txt',
output_file_path='./out.mp4'):
self.model_config_path = model_config_path
self.labels_path = labels_path
self.output_file_path = output_file_path
self.width = 1280
self.height = 720
GObject.threads_init()
Gst.init(None)
self.pipeline = Gst.Pipeline()
if not self.pipeline:
sys.stderr.write(" Unable to create Pipeline \n")
self.source, self.h264parser, self.decoder = self._create_source_elements(input_file_path)
self.streammux, self.pgie = self._create_middle_elements()
self.nvvidconv, self.capsfilter, self.sink = self._create_sink_elements()
# Link the elements
print("Linking elements in the Pipeline \n")
self._link()
# osdsinkpad = self.nvosd.get_static_pad("sink")
# if not osdsinkpad:
# sys.stderr.write(" Unable to get sink pad of nvosd \n")
#
# osdsinkpad.add_probe(Gst.PadProbeType.BUFFER, self.osd_sink_pad_buffer_probe, 0)
self.loop = GObject.MainLoop()
self.bus = self.pipeline.get_bus()
self.bus.add_signal_watch()
self.bus.connect("message::eos", self._bus_call, self.loop)
def start(self):
# start play back and listen to events
print("Starting pipeline \n")
self.pipeline.set_state(Gst.State.PLAYING)
self.loop.run()
def _create_source_elements(self, file_path):
# Source element for reading from the file
source = Gst.ElementFactory.make("filesrc", "file-source")
if not source:
sys.stderr.write(" Unable to create Source \n")
# Since the data format in the input file is elementary h264 stream,
# we need a h264parser
h264parser = Gst.ElementFactory.make("h264parse", "h264-parser")
if not h264parser:
sys.stderr.write(" Unable to create h264 parser \n")
# Use nvdec_h264 for hardware accelerated decode on GPU
decoder = Gst.ElementFactory.make("nvv4l2decoder", "nvv4l2-decoder")
if not decoder:
sys.stderr.write(" Unable to create Nvv4l2 Decoder \n")
source.set_property('location', file_path)
self.pipeline.add(source)
self.pipeline.add(h264parser)
self.pipeline.add(decoder)
return source, h264parser, decoder
def _create_middle_elements(self):
streammux = Gst.ElementFactory.make("nvstreammux", "Stream-muxer")
if not streammux:
sys.stderr.write(" Unable to create NvStreamMux \n")
# Use nvinfer to run inferencing on decoder's output,
# behaviour of inferencing is set through config file
pgie = Gst.ElementFactory.make("nvinfer", "primary-inference")
if not pgie:
sys.stderr.write(" Unable to create pgie \n")
# # Use convertor to convert from NV12 to RGBA as required by nvosd
# nvvidconv = Gst.ElementFactory.make("nvvideoconvert", "convertor")
# if not nvvidconv:
# sys.stderr.write(" Unable to create nvvidconv \n")
# Create OSD to draw on the converted RGBA buffer
# nvosd = Gst.ElementFactory.make("nvdsosd", "onscreendisplay")
# if not nvosd:
# sys.stderr.write(" Unable to create nvosd \n")
#
# nvosd.set_property('display-clock', 1) # here: https://docs.nvidia.com/metropolis/deepstream/dev-guide/text/DS_plugin_gst-nvdsosd.html
streammux.set_property('width', self.width)
streammux.set_property('height', self.height)
streammux.set_property('batch-size', 1)
streammux.set_property('batched-push-timeout', 4000000)
pgie.set_property('config-file-path', self.model_config_path)
self.pipeline.add(streammux)
self.pipeline.add(pgie)
# self.pipeline.add(nvvidconv)
# self.pipeline.add(nvosd)
return streammux, pgie
def _create_sink_elements(self):
nvvidconv = Gst.ElementFactory.make("nvvideoconvert", "convertor appsink")
if not nvvidconv:
sys.stderr.write(" Unable to create nvvidconv2 \n")
capsfilter = Gst.ElementFactory.make("capsfilter", "capsfilter")
if not capsfilter:
sys.stderr.write(" Unable to create capsfilter \n")
caps = Gst.Caps.from_string("video/x-raw, format=RGBA")
capsfilter.set_property("caps", caps)
sink = Gst.ElementFactory.make("appsink", "sink")
if not sink:
sys.stderr.write(" Unable to create appsink \n")
sink.set_property("emit-signals", True)
caps = Gst.caps_from_string("video/x-raw, format=RGBA")
sink.set_property("caps", caps)
# sink.set_property("drop", True)
# sink.set_property("max_buffers", 3)
# sink.set_property("sync", False)
sink.set_property("wait-on-eos", False)
sink.connect("new-sample", new_buffer, sink)
self.pipeline.add(nvvidconv)
self.pipeline.add(capsfilter)
self.pipeline.add(sink)
return nvvidconv, capsfilter, sink
def _link(self):
self.source.link(self.h264parser)
self.h264parser.link(self.decoder)
sinkpad = self.streammux.get_request_pad("sink_0")
if not sinkpad:
sys.stderr.write(" Unable to get the sink pad of streammux \n")
srcpad = self.decoder.get_static_pad("src")
if not srcpad:
sys.stderr.write(" Unable to get source pad of decoder \n")
srcpad.link(sinkpad)
self.streammux.link(self.pgie)
self.pgie.link(self.nvvidconv)
self.nvvidconv.link(self.capsfilter)
self.capsfilter.link(self.sink)
@staticmethod
def _bus_call(bus, message, loop):
print('buss called on {}'.format(message))
t = message.type
if t == Gst.MessageType.EOS:
sys.stdout.write("End-of-stream\n")
loop.quit()
elif t == Gst.MessageType.WARNING:
err, debug = message.parse_warning()
sys.stderr.write("Warning: %s: %s\n" % (err, debug))
elif t == Gst.MessageType.ERROR:
err, debug = message.parse_error()
sys.stderr.write("Error: %s: %s\n" % (err, debug))
loop.quit()
return True
@staticmethod
def osd_sink_pad_buffer_probe(pad, info, u_data):
obj_counter = {
PGIE_CLASS_ID_VEHICLE: 0,
PGIE_CLASS_ID_PERSON: 0,
PGIE_CLASS_ID_BICYCLE: 0,
PGIE_CLASS_ID_ROADSIGN: 0
}
gst_buffer = info.get_buffer()
if not gst_buffer:
print("Unable to get GstBuffer ")
return
# Retrieve batch metadata from the gst_buffer
# Note that pyds.gst_buffer_get_nvds_batch_meta() expects the
# C address of gst_buffer as input, which is obtained with hash(gst_buffer)
batch_meta = pyds.gst_buffer_get_nvds_batch_meta(hash(gst_buffer))
l_frame = batch_meta.frame_meta_list
while l_frame is not None:
try:
# Note that l_frame.data needs a cast to pyds.NvDsFrameMeta
# The casting is done by pyds.glist_get_nvds_frame_meta()
# The casting also keeps ownership of the underlying memory
# in the C code, so the Python garbage collector will leave
# it alone.
# frame_meta = pyds.glist_get_nvds_frame_meta(l_frame.data)
frame_meta = pyds.NvDsFrameMeta.cast(l_frame.data)
except StopIteration:
break
frame_number = frame_meta.frame_num
num_rects = frame_meta.num_obj_meta
l_obj = frame_meta.obj_meta_list
while l_obj is not None:
try:
# Casting l_obj.data to pyds.NvDsObjectMeta
# obj_meta=pyds.glist_get_nvds_object_meta(l_obj.data)
obj_meta = pyds.NvDsObjectMeta.cast(l_obj.data)
except StopIteration:
break
obj_counter[obj_meta.class_id] += 1
obj_meta.rect_params.border_color.set(0.0, 0.0, 1.0, 0.0)
try:
l_obj = l_obj.next
except StopIteration:
break
# Acquiring a display meta object. The memory ownership remains in
# the C code so downstream plugins can still access it. Otherwise
# the garbage collector will claim it when this probe function exits.
display_meta = pyds.nvds_acquire_display_meta_from_pool(batch_meta)
display_meta.num_labels = 1
py_nvosd_text_params = display_meta.text_params[0]
# Setting display text to be shown on screen
# Note that the pyds module allocates a buffer for the string, and the
# memory will not be claimed by the garbage collector.
# Reading the display_text field here will return the C address of the
# allocated string. Use pyds.get_string() to get the string content.
fps_stream.get_fps()
py_nvosd_text_params.display_text = "Frame Number={} Number of Objects={} Vehicle_count={} Person_count={}" \
.format(frame_number, num_rects, obj_counter[PGIE_CLASS_ID_VEHICLE], obj_counter[PGIE_CLASS_ID_PERSON])
# Now set the offsets where the string should appear
py_nvosd_text_params.x_offset = 10
py_nvosd_text_params.y_offset = 12
# Font , font-color and font-size
py_nvosd_text_params.font_params.font_name = "Serif"
py_nvosd_text_params.font_params.font_size = 10
# set(red, green, blue, alpha); set to White
py_nvosd_text_params.font_params.font_color.set(1.0, 1.0, 1.0, 1.0)
# Text background color
py_nvosd_text_params.set_bg_clr = 1
# set(red, green, blue, alpha); set to Black
py_nvosd_text_params.text_bg_clr.set(0.0, 0.0, 0.0, 1.0)
# Using pyds.get_string() to get display_text as string
print(pyds.get_string(py_nvosd_text_params.display_text))
pyds.nvds_add_display_meta_to_frame(frame_meta, display_meta)
# if WRITE_FRAMES:
# n_frame = pyds.get_nvds_buf_surface(hash(gst_buffer), frame_meta.batch_id)
# # convert python array into numy array format.
# frame_image = np.array(n_frame, copy=True, order='C')
# # covert the array into cv2 default color format
# frame_image = cv2.cvtColor(frame_image, cv2.COLOR_RGBA2BGRA)
# cv2.imwrite("./frame_" + str(frame_number) + ".jpg",
# frame_image)
# print('saved to')
try:
l_frame = l_frame.next
except StopIteration:
break
return Gst.PadProbeReturn.OK
class PipelineCamera:
def __init__(self,
model_config_path='./model/config_infer_primary_detectnet_v2.txt',
labels_path='./model/detectnet_v2_labels.txt',
output_file_path='./out.mp4'):
self.model_config_path = model_config_path
self.labels_path = labels_path
self.output_file_path = output_file_path
self.width = 1280
self.height = 720
GObject.threads_init()
Gst.init(None)
self.pipeline = Gst.Pipeline()
if not self.pipeline:
sys.stderr.write(" Unable to create Pipeline \n")
self.source, self.nvvidconv_src, self.caps_nvvidconv_src = self._create_source_elements()
self.tee, self.queue_od, self.queue_seg = self._create_branching_elements()
self.streammux, self.pgie, self.nvvidconvosd, self.nvosd = self._create_middle_elements()
self.nvvidconv, self.capsfilter, self.sink, self.fake_sink = self._create_sink_elements()
# Link the elements
print("Linking elements in the Pipeline \n")
self._link()
od_sink_pad = self.queue_od.get_static_pad("sink")
seg_sink_pad = self.queue_seg.get_static_pad("sink")
tee_od_pad = self.tee.get_request_pad("src_%u")
tee_seg_pad = self.tee.get_request_pad("src_%u")
if not tee_od_pad or not tee_seg_pad:
sys.stderr.write("Unable to get request pads\n")
tee_od_pad.link(od_sink_pad)
tee_seg_pad.link(seg_sink_pad)
osdsinkpad = self.nvosd.get_static_pad("sink")
if not osdsinkpad:
sys.stderr.write(" Unable to get sink pad of nvosd \n")
osdsinkpad.add_probe(Gst.PadProbeType.BUFFER, self.osd_sink_pad_buffer_probe, 0)
self.loop = GObject.MainLoop()
self.bus = self.pipeline.get_bus()
self.bus.add_signal_watch()
self.bus.connect("message::eos", self._bus_call, self.loop)
def start(self):
# start play back and listen to events
print("Starting pipeline \n")
self.pipeline.set_state(Gst.State.PLAYING)
self.loop.run()
def _create_source_elements(self):
source = Gst.ElementFactory.make("nvarguscamerasrc", "src-elem")
if not source:
sys.stderr.write(" Unable to create Source \n")
# Converter to scale the image
nvvidconv_src = Gst.ElementFactory.make("nvvideoconvert", "convertor_src")
if not nvvidconv_src:
sys.stderr.write(" Unable to create nvvidconv_src \n")
# Caps for NVMM and resolution scaling
caps_nvvidconv_src = Gst.ElementFactory.make("capsfilter", "nvmm_caps")
if not caps_nvvidconv_src:
sys.stderr.write(" Unable to create capsfilter \n")
source.set_property('bufapi-version', True)
caps_nvvidconv_src.set_property('caps', Gst.Caps.from_string(
'video/x-raw(memory:NVMM), width={}, height={}'.format(self.width, self.height)))
self.pipeline.add(source)
self.pipeline.add(nvvidconv_src)
self.pipeline.add(caps_nvvidconv_src)
return source, nvvidconv_src, caps_nvvidconv_src
def _create_middle_elements(self):
streammux = Gst.ElementFactory.make("nvstreammux", "Stream-muxer")
if not streammux:
sys.stderr.write(" Unable to create NvStreamMux \n")
# Use nvinfer to run inferencing on decoder's output,
# behaviour of inferencing is set through config file
pgie = Gst.ElementFactory.make("nvinfer", "primary-inference")
if not pgie:
sys.stderr.write(" Unable to create pgie \n")
# Use convertor to convert from NV12 to RGBA as required by nvosd
nvvidconvosd = Gst.ElementFactory.make("nvvideoconvert", "convertor")
if not nvvidconvosd:
sys.stderr.write(" Unable to create nvvidconv \n")
# Create OSD to draw on the converted RGBA buffer
nvosd = Gst.ElementFactory.make("nvdsosd", "onscreendisplay")
if not nvosd:
sys.stderr.write(" Unable to create nvosd \n")
nvosd.set_property('display-clock', 1) # here: https://docs.nvidia.com/metropolis/deepstream/dev-guide/text/DS_plugin_gst-nvdsosd.html
streammux.set_property('width', self.width)
streammux.set_property('height', self.height)
streammux.set_property('batch-size', 1)
streammux.set_property('batched-push-timeout', 4000000)
pgie.set_property('config-file-path', self.model_config_path)
self.pipeline.add(streammux)
self.pipeline.add(pgie)
self.pipeline.add(nvvidconvosd)
self.pipeline.add(nvosd)
return streammux, pgie, nvvidconvosd, nvosd
def _create_sink_elements(self):
nvvidconv = Gst.ElementFactory.make("nvvideoconvert", "convertor appsink")
if not nvvidconv:
sys.stderr.write(" Unable to create nvvidconv2 \n")
capsfilter = Gst.ElementFactory.make("capsfilter", "capsfilter")
if not capsfilter:
sys.stderr.write(" Unable to create capsfilter \n")
caps = Gst.Caps.from_string("video/x-raw, format=RGBA")
capsfilter.set_property("caps", caps)
sink = Gst.ElementFactory.make("appsink", "sink")
if not sink:
sys.stderr.write(" Unable to create appsink \n")
sink.set_property("emit-signals", True)
caps = Gst.caps_from_string("video/x-raw, format=RGBA")
sink.set_property("caps", caps)
sink.set_property("drop", True)
sink.set_property("max_buffers", 1)
# sink.set_property("sync", False)
sink.set_property("wait-on-eos", False)
sink.connect("new-sample", new_buffer, sink)
fakesink = Gst.ElementFactory.make("fakesink", "fakesink")
self.pipeline.add(nvvidconv)
self.pipeline.add(capsfilter)
self.pipeline.add(sink)
self.pipeline.add(fakesink)
return nvvidconv, capsfilter, sink, fakesink
def _create_branching_elements(self):
tee = Gst.ElementFactory.make("tee", "tee")
queue_od = Gst.ElementFactory.make("queue", "object detection queue")
queue_seg = Gst.ElementFactory.make("queue", "segmentation queue")
queue_od.set_property("max-size-buffers", 1)
queue_seg.set_property("max-size-buffers", 1)
queue_seg.set_property("leaky", 2)
self.pipeline.add(tee)
self.pipeline.add(queue_od)
self.pipeline.add(queue_seg)
return tee, queue_od, queue_seg
def _link(self):
self.source.link(self.tee)
self.queue_od.link(self.nvvidconv_src)
self.nvvidconv_src.link(self.caps_nvvidconv_src)
sinkpad = self.streammux.get_request_pad("sink_0")
if not sinkpad:
sys.stderr.write(" Unable to get the sink pad of streammux \n")
srcpad = self.caps_nvvidconv_src.get_static_pad("src")
if not srcpad:
sys.stderr.write(" Unable to get source pad of decoder \n")
srcpad.link(sinkpad)
self.streammux.link(self.pgie)
self.pgie.link(self.nvvidconvosd)
self.nvvidconvosd.link(self.nvosd)
self.nvosd.link(self.fake_sink)
# self.pgie.link(self.nvvidconv)
# self.nvvidconv.link(self.capsfilter)
# self.capsfilter.link(self.sink)
self.queue_seg.link(self.nvvidconv)
self.nvvidconv.link(self.capsfilter)
self.capsfilter.link(self.sink)
@staticmethod
def _bus_call(bus, message, loop):
print('buss called on {}'.format(message))
t = message.type
if t == Gst.MessageType.EOS:
sys.stdout.write("End-of-stream\n")
loop.quit()
elif t == Gst.MessageType.WARNING:
err, debug = message.parse_warning()
sys.stderr.write("Warning: %s: %s\n" % (err, debug))
elif t == Gst.MessageType.ERROR:
err, debug = message.parse_error()
sys.stderr.write("Error: %s: %s\n" % (err, debug))
loop.quit()
return True
@staticmethod
def osd_sink_pad_buffer_probe(pad, info, u_data):
obj_counter = {
PGIE_CLASS_ID_VEHICLE: 0,
PGIE_CLASS_ID_PERSON: 0,
PGIE_CLASS_ID_BICYCLE: 0,
PGIE_CLASS_ID_ROADSIGN: 0
}
gst_buffer = info.get_buffer()
if not gst_buffer:
print("Unable to get GstBuffer ")
return
# Retrieve batch metadata from the gst_buffer
# Note that pyds.gst_buffer_get_nvds_batch_meta() expects the
# C address of gst_buffer as input, which is obtained with hash(gst_buffer)
batch_meta = pyds.gst_buffer_get_nvds_batch_meta(hash(gst_buffer))
l_frame = batch_meta.frame_meta_list
while l_frame is not None:
try:
# Note that l_frame.data needs a cast to pyds.NvDsFrameMeta
# The casting is done by pyds.glist_get_nvds_frame_meta()
# The casting also keeps ownership of the underlying memory
# in the C code, so the Python garbage collector will leave
# it alone.
# frame_meta = pyds.glist_get_nvds_frame_meta(l_frame.data)
frame_meta = pyds.NvDsFrameMeta.cast(l_frame.data)
except StopIteration:
break
frame_number = frame_meta.frame_num
num_rects = frame_meta.num_obj_meta
l_obj = frame_meta.obj_meta_list
pts = frame_meta.buf_pts
print(f'from osd ------- pts: {pts / 1e9}')
while l_obj is not None:
try:
# Casting l_obj.data to pyds.NvDsObjectMeta
# obj_meta=pyds.glist_get_nvds_object_meta(l_obj.data)
obj_meta = pyds.NvDsObjectMeta.cast(l_obj.data)
except StopIteration:
break
obj_counter[obj_meta.class_id] += 1
obj_meta.rect_params.border_color.set(0.0, 0.0, 1.0, 0.0)
try:
l_obj = l_obj.next
except StopIteration:
break
# Acquiring a display meta object. The memory ownership remains in
# the C code so downstream plugins can still access it. Otherwise
# the garbage collector will claim it when this probe function exits.
display_meta = pyds.nvds_acquire_display_meta_from_pool(batch_meta)
display_meta.num_labels = 1
py_nvosd_text_params = display_meta.text_params[0]
# Setting display text to be shown on screen
# Note that the pyds module allocates a buffer for the string, and the
# memory will not be claimed by the garbage collector.
# Reading the display_text field here will return the C address of the
# allocated string. Use pyds.get_string() to get the string content.
fps_stream.get_fps()
py_nvosd_text_params.display_text = "Frame Number={} Number of Objects={} Vehicle_count={} Person_count={}" \
.format(frame_number, num_rects, obj_counter[PGIE_CLASS_ID_VEHICLE], obj_counter[PGIE_CLASS_ID_PERSON])
# Now set the offsets where the string should appear
py_nvosd_text_params.x_offset = 10
py_nvosd_text_params.y_offset = 12
# Font , font-color and font-size
py_nvosd_text_params.font_params.font_name = "Serif"
py_nvosd_text_params.font_params.font_size = 10
# set(red, green, blue, alpha); set to White
py_nvosd_text_params.font_params.font_color.set(1.0, 1.0, 1.0, 1.0)
# Text background color
py_nvosd_text_params.set_bg_clr = 1
# set(red, green, blue, alpha); set to Black
py_nvosd_text_params.text_bg_clr.set(0.0, 0.0, 0.0, 1.0)
# Using pyds.get_string() to get display_text as string
print(pyds.get_string(py_nvosd_text_params.display_text))
pyds.nvds_add_display_meta_to_frame(frame_meta, display_meta)
# if WRITE_FRAMES:
# n_frame = pyds.get_nvds_buf_surface(hash(gst_buffer), frame_meta.batch_id)
# # convert python array into numy array format.
# frame_image = np.array(n_frame, copy=True, order='C')
# # covert the array into cv2 default color format
# frame_image = cv2.cvtColor(frame_image, cv2.COLOR_RGBA2BGRA)
# cv2.imwrite("./frame_" + str(frame_number) + ".jpg",
# frame_image)
# print('saved to')
try:
l_frame = l_frame.next
except StopIteration:
break
return Gst.PadProbeReturn.OK
if __name__ == '__main__':
fps_stream = GETFPS(0)
# out_file_name = '{}.mp4'.format(sys.argv[1])
# in_file_path = sys.argv[2]
out_file_name = 'out.mp4'
# pipeline = Pipeline(output_file_path=out_file_name)
# pipeline = Pipeline(in_file_path, output_file_path=out_file_name)
pipeline = PipelineCamera(output_file_path=out_file_name)
try:
pipeline.start()
except KeyboardInterrupt as e:
# sink.get_static_pad('sink').send_event(Gst.Event.new_eos())
# pipeline.send_event(Gst.Event.new_eos())
# pipeline.set_state(Gst.State.NULL)
pipeline.pipeline.send_event(Gst.Event.new_eos())
# Wait for EOS to be catched up by the bus
msg = pipeline.bus.timed_pop_filtered(
Gst.CLOCK_TIME_NONE,
Gst.MessageType.EOS
)
print(msg)
sleep(5)
except Exception as e:
print(e)
finally:
pyds.unset_callback_funcs()
pipeline.pipeline.set_state(Gst.State.NULL)
|
[
"sys.stdout.write",
"pyds.unset_callback_funcs",
"gstutils.get_np_dtype",
"pyds.get_string",
"my_utils.Segmentor",
"pyds.nvds_add_display_meta_to_frame",
"sys.path.append",
"gi.repository.Gst.Caps.from_string",
"gi.repository.GObject.MainLoop",
"gi.repository.GObject.threads_init",
"pyds.nvds_acquire_display_meta_from_pool",
"time.sleep",
"gi.repository.Gst.init",
"numpy.squeeze",
"gi.require_version",
"gi.repository.Gst.ElementFactory.make",
"pyds.NvDsFrameMeta.cast",
"gstutils.get_num_channels",
"gi.repository.Gst.Pipeline",
"gi.repository.Gst.caps_from_string",
"time.time",
"gi.repository.Gst.Event.new_eos",
"common.FPS.GETFPS",
"sys.stderr.write",
"pyds.NvDsObjectMeta.cast"
] |
[((11, 33), 'sys.path.append', 'sys.path.append', (['"""../"""'], {}), "('../')\n", (26, 33), False, 'import sys\n'), ((34, 90), 'sys.path.append', 'sys.path.append', (['"""/opt/nvidia/deepstream/deepstream/lib"""'], {}), "('/opt/nvidia/deepstream/deepstream/lib')\n", (49, 90), False, 'import sys\n'), ((157, 189), 'gi.require_version', 'gi.require_version', (['"""Gst"""', '"""1.0"""'], {}), "('Gst', '1.0')\n", (175, 189), False, 'import gi\n'), ((190, 227), 'gi.require_version', 'gi.require_version', (['"""GstVideo"""', '"""1.0"""'], {}), "('GstVideo', '1.0')\n", (208, 227), False, 'import gi\n'), ((525, 599), 'my_utils.Segmentor', 'Segmentor', (['(720, 1280, 3)'], {'network_name': '"""fcn-resnet18-cityscapes-1024x512"""'}), "((720, 1280, 3), network_name='fcn-resnet18-cityscapes-1024x512')\n", (534, 599), False, 'from my_utils import Segmentor\n'), ((1693, 1723), 'gstutils.get_num_channels', 'get_num_channels', (['video_format'], {}), '(video_format)\n', (1709, 1723), False, 'from gstutils import get_num_channels, get_np_dtype\n'), ((2055, 2066), 'time.time', 'time.time', ([], {}), '()\n', (2064, 2066), False, 'import time\n'), ((26582, 26591), 'common.FPS.GETFPS', 'GETFPS', (['(0)'], {}), '(0)\n', (26588, 26591), False, 'from common.FPS import GETFPS\n'), ((1978, 1995), 'numpy.squeeze', 'np.squeeze', (['array'], {}), '(array)\n', (1988, 1995), True, 'import numpy as np\n'), ((2919, 2941), 'gi.repository.GObject.threads_init', 'GObject.threads_init', ([], {}), '()\n', (2939, 2941), False, 'from gi.repository import GObject, Gst, GstVideo\n'), ((2950, 2964), 'gi.repository.Gst.init', 'Gst.init', (['None'], {}), '(None)\n', (2958, 2964), False, 'from gi.repository import GObject, Gst, GstVideo\n'), ((2990, 3004), 'gi.repository.Gst.Pipeline', 'Gst.Pipeline', ([], {}), '()\n', (3002, 3004), False, 'from gi.repository import GObject, Gst, GstVideo\n'), ((3728, 3746), 'gi.repository.GObject.MainLoop', 'GObject.MainLoop', ([], {}), '()\n', (3744, 3746), False, 'from gi.repository import GObject, Gst, GstVideo\n'), ((4195, 4244), 'gi.repository.Gst.ElementFactory.make', 'Gst.ElementFactory.make', (['"""filesrc"""', '"""file-source"""'], {}), "('filesrc', 'file-source')\n", (4218, 4244), False, 'from gi.repository import GObject, Gst, GstVideo\n'), ((4458, 4509), 'gi.repository.Gst.ElementFactory.make', 'Gst.ElementFactory.make', (['"""h264parse"""', '"""h264-parser"""'], {}), "('h264parse', 'h264-parser')\n", (4481, 4509), False, 'from gi.repository import GObject, Gst, GstVideo\n'), ((4685, 4743), 'gi.repository.Gst.ElementFactory.make', 'Gst.ElementFactory.make', (['"""nvv4l2decoder"""', '"""nvv4l2-decoder"""'], {}), "('nvv4l2decoder', 'nvv4l2-decoder')\n", (4708, 4743), False, 'from gi.repository import GObject, Gst, GstVideo\n'), ((5099, 5153), 'gi.repository.Gst.ElementFactory.make', 'Gst.ElementFactory.make', (['"""nvstreammux"""', '"""Stream-muxer"""'], {}), "('nvstreammux', 'Stream-muxer')\n", (5122, 5153), False, 'from gi.repository import GObject, Gst, GstVideo\n'), ((5385, 5440), 'gi.repository.Gst.ElementFactory.make', 'Gst.ElementFactory.make', (['"""nvinfer"""', '"""primary-inference"""'], {}), "('nvinfer', 'primary-inference')\n", (5408, 5440), False, 'from gi.repository import GObject, Gst, GstVideo\n'), ((6662, 6724), 'gi.repository.Gst.ElementFactory.make', 'Gst.ElementFactory.make', (['"""nvvideoconvert"""', '"""convertor appsink"""'], {}), "('nvvideoconvert', 'convertor appsink')\n", (6685, 6724), False, 'from gi.repository import GObject, Gst, GstVideo\n'), ((6837, 6888), 'gi.repository.Gst.ElementFactory.make', 'Gst.ElementFactory.make', (['"""capsfilter"""', '"""capsfilter"""'], {}), "('capsfilter', 'capsfilter')\n", (6860, 6888), False, 'from gi.repository import GObject, Gst, GstVideo\n'), ((6996, 7044), 'gi.repository.Gst.Caps.from_string', 'Gst.Caps.from_string', (['"""video/x-raw, format=RGBA"""'], {}), "('video/x-raw, format=RGBA')\n", (7016, 7044), False, 'from gi.repository import GObject, Gst, GstVideo\n'), ((7107, 7149), 'gi.repository.Gst.ElementFactory.make', 'Gst.ElementFactory.make', (['"""appsink"""', '"""sink"""'], {}), "('appsink', 'sink')\n", (7130, 7149), False, 'from gi.repository import GObject, Gst, GstVideo\n'), ((7295, 7343), 'gi.repository.Gst.caps_from_string', 'Gst.caps_from_string', (['"""video/x-raw, format=RGBA"""'], {}), "('video/x-raw, format=RGBA')\n", (7315, 7343), False, 'from gi.repository import GObject, Gst, GstVideo\n'), ((14110, 14132), 'gi.repository.GObject.threads_init', 'GObject.threads_init', ([], {}), '()\n', (14130, 14132), False, 'from gi.repository import GObject, Gst, GstVideo\n'), ((14141, 14155), 'gi.repository.Gst.init', 'Gst.init', (['None'], {}), '(None)\n', (14149, 14155), False, 'from gi.repository import GObject, Gst, GstVideo\n'), ((14181, 14195), 'gi.repository.Gst.Pipeline', 'Gst.Pipeline', ([], {}), '()\n', (14193, 14195), False, 'from gi.repository import GObject, Gst, GstVideo\n'), ((15450, 15468), 'gi.repository.GObject.MainLoop', 'GObject.MainLoop', ([], {}), '()\n', (15466, 15468), False, 'from gi.repository import GObject, Gst, GstVideo\n'), ((15855, 15910), 'gi.repository.Gst.ElementFactory.make', 'Gst.ElementFactory.make', (['"""nvarguscamerasrc"""', '"""src-elem"""'], {}), "('nvarguscamerasrc', 'src-elem')\n", (15878, 15910), False, 'from gi.repository import GObject, Gst, GstVideo\n'), ((16058, 16116), 'gi.repository.Gst.ElementFactory.make', 'Gst.ElementFactory.make', (['"""nvvideoconvert"""', '"""convertor_src"""'], {}), "('nvvideoconvert', 'convertor_src')\n", (16081, 16116), False, 'from gi.repository import GObject, Gst, GstVideo\n'), ((16291, 16341), 'gi.repository.Gst.ElementFactory.make', 'Gst.ElementFactory.make', (['"""capsfilter"""', '"""nvmm_caps"""'], {}), "('capsfilter', 'nvmm_caps')\n", (16314, 16341), False, 'from gi.repository import GObject, Gst, GstVideo\n'), ((16897, 16951), 'gi.repository.Gst.ElementFactory.make', 'Gst.ElementFactory.make', (['"""nvstreammux"""', '"""Stream-muxer"""'], {}), "('nvstreammux', 'Stream-muxer')\n", (16920, 16951), False, 'from gi.repository import GObject, Gst, GstVideo\n'), ((17183, 17238), 'gi.repository.Gst.ElementFactory.make', 'Gst.ElementFactory.make', (['"""nvinfer"""', '"""primary-inference"""'], {}), "('nvinfer', 'primary-inference')\n", (17206, 17238), False, 'from gi.repository import GObject, Gst, GstVideo\n'), ((17416, 17470), 'gi.repository.Gst.ElementFactory.make', 'Gst.ElementFactory.make', (['"""nvvideoconvert"""', '"""convertor"""'], {}), "('nvvideoconvert', 'convertor')\n", (17439, 17470), False, 'from gi.repository import GObject, Gst, GstVideo\n'), ((17638, 17691), 'gi.repository.Gst.ElementFactory.make', 'Gst.ElementFactory.make', (['"""nvdsosd"""', '"""onscreendisplay"""'], {}), "('nvdsosd', 'onscreendisplay')\n", (17661, 17691), False, 'from gi.repository import GObject, Gst, GstVideo\n'), ((18461, 18523), 'gi.repository.Gst.ElementFactory.make', 'Gst.ElementFactory.make', (['"""nvvideoconvert"""', '"""convertor appsink"""'], {}), "('nvvideoconvert', 'convertor appsink')\n", (18484, 18523), False, 'from gi.repository import GObject, Gst, GstVideo\n'), ((18636, 18687), 'gi.repository.Gst.ElementFactory.make', 'Gst.ElementFactory.make', (['"""capsfilter"""', '"""capsfilter"""'], {}), "('capsfilter', 'capsfilter')\n", (18659, 18687), False, 'from gi.repository import GObject, Gst, GstVideo\n'), ((18795, 18843), 'gi.repository.Gst.Caps.from_string', 'Gst.Caps.from_string', (['"""video/x-raw, format=RGBA"""'], {}), "('video/x-raw, format=RGBA')\n", (18815, 18843), False, 'from gi.repository import GObject, Gst, GstVideo\n'), ((18906, 18948), 'gi.repository.Gst.ElementFactory.make', 'Gst.ElementFactory.make', (['"""appsink"""', '"""sink"""'], {}), "('appsink', 'sink')\n", (18929, 18948), False, 'from gi.repository import GObject, Gst, GstVideo\n'), ((19094, 19142), 'gi.repository.Gst.caps_from_string', 'Gst.caps_from_string', (['"""video/x-raw, format=RGBA"""'], {}), "('video/x-raw, format=RGBA')\n", (19114, 19142), False, 'from gi.repository import GObject, Gst, GstVideo\n'), ((19431, 19478), 'gi.repository.Gst.ElementFactory.make', 'Gst.ElementFactory.make', (['"""fakesink"""', '"""fakesink"""'], {}), "('fakesink', 'fakesink')\n", (19454, 19478), False, 'from gi.repository import GObject, Gst, GstVideo\n'), ((19734, 19771), 'gi.repository.Gst.ElementFactory.make', 'Gst.ElementFactory.make', (['"""tee"""', '"""tee"""'], {}), "('tee', 'tee')\n", (19757, 19771), False, 'from gi.repository import GObject, Gst, GstVideo\n'), ((19791, 19849), 'gi.repository.Gst.ElementFactory.make', 'Gst.ElementFactory.make', (['"""queue"""', '"""object detection queue"""'], {}), "('queue', 'object detection queue')\n", (19814, 19849), False, 'from gi.repository import GObject, Gst, GstVideo\n'), ((19870, 19924), 'gi.repository.Gst.ElementFactory.make', 'Gst.ElementFactory.make', (['"""queue"""', '"""segmentation queue"""'], {}), "('queue', 'segmentation queue')\n", (19893, 19924), False, 'from gi.repository import GObject, Gst, GstVideo\n'), ((27468, 27495), 'pyds.unset_callback_funcs', 'pyds.unset_callback_funcs', ([], {}), '()\n', (27493, 27495), False, 'import pyds\n'), ((1938, 1964), 'gstutils.get_np_dtype', 'get_np_dtype', (['video_format'], {}), '(video_format)\n', (1950, 1964), False, 'from gstutils import get_num_channels, get_np_dtype\n'), ((3047, 3096), 'sys.stderr.write', 'sys.stderr.write', (['""" Unable to create Pipeline \n"""'], {}), "(' Unable to create Pipeline \\n')\n", (3063, 3096), False, 'import sys\n'), ((4280, 4327), 'sys.stderr.write', 'sys.stderr.write', (['""" Unable to create Source \n"""'], {}), "(' Unable to create Source \\n')\n", (4296, 4327), False, 'import sys\n'), ((4549, 4601), 'sys.stderr.write', 'sys.stderr.write', (['""" Unable to create h264 parser \n"""'], {}), "(' Unable to create h264 parser \\n')\n", (4565, 4601), False, 'import sys\n'), ((4780, 4835), 'sys.stderr.write', 'sys.stderr.write', (['""" Unable to create Nvv4l2 Decoder \n"""'], {}), "(' Unable to create Nvv4l2 Decoder \\n')\n", (4796, 4835), False, 'import sys\n'), ((5192, 5244), 'sys.stderr.write', 'sys.stderr.write', (['""" Unable to create NvStreamMux \n"""'], {}), "(' Unable to create NvStreamMux \\n')\n", (5208, 5244), False, 'import sys\n'), ((5474, 5519), 'sys.stderr.write', 'sys.stderr.write', (['""" Unable to create pgie \n"""'], {}), "(' Unable to create pgie \\n')\n", (5490, 5519), False, 'import sys\n'), ((6763, 6814), 'sys.stderr.write', 'sys.stderr.write', (['""" Unable to create nvvidconv2 \n"""'], {}), "(' Unable to create nvvidconv2 \\n')\n", (6779, 6814), False, 'import sys\n'), ((6928, 6979), 'sys.stderr.write', 'sys.stderr.write', (['""" Unable to create capsfilter \n"""'], {}), "(' Unable to create capsfilter \\n')\n", (6944, 6979), False, 'import sys\n'), ((7183, 7231), 'sys.stderr.write', 'sys.stderr.write', (['""" Unable to create appsink \n"""'], {}), "(' Unable to create appsink \\n')\n", (7199, 7231), False, 'import sys\n'), ((7973, 8036), 'sys.stderr.write', 'sys.stderr.write', (['""" Unable to get the sink pad of streammux \n"""'], {}), "(' Unable to get the sink pad of streammux \\n')\n", (7989, 8036), False, 'import sys\n'), ((8124, 8183), 'sys.stderr.write', 'sys.stderr.write', (['""" Unable to get source pad of decoder \n"""'], {}), "(' Unable to get source pad of decoder \\n')\n", (8140, 8183), False, 'import sys\n'), ((8560, 8595), 'sys.stdout.write', 'sys.stdout.write', (['"""End-of-stream\n"""'], {}), "('End-of-stream\\n')\n", (8576, 8595), False, 'import sys\n'), ((11260, 11312), 'pyds.nvds_acquire_display_meta_from_pool', 'pyds.nvds_acquire_display_meta_from_pool', (['batch_meta'], {}), '(batch_meta)\n', (11300, 11312), False, 'import pyds\n'), ((12894, 12955), 'pyds.nvds_add_display_meta_to_frame', 'pyds.nvds_add_display_meta_to_frame', (['frame_meta', 'display_meta'], {}), '(frame_meta, display_meta)\n', (12929, 12955), False, 'import pyds\n'), ((14238, 14287), 'sys.stderr.write', 'sys.stderr.write', (['""" Unable to create Pipeline \n"""'], {}), "(' Unable to create Pipeline \\n')\n", (14254, 14287), False, 'import sys\n'), ((15063, 15111), 'sys.stderr.write', 'sys.stderr.write', (['"""Unable to get request pads\n"""'], {}), "('Unable to get request pads\\n')\n", (15079, 15111), False, 'import sys\n'), ((15283, 15338), 'sys.stderr.write', 'sys.stderr.write', (['""" Unable to get sink pad of nvosd \n"""'], {}), "(' Unable to get sink pad of nvosd \\n')\n", (15299, 15338), False, 'import sys\n'), ((15946, 15993), 'sys.stderr.write', 'sys.stderr.write', (['""" Unable to create Source \n"""'], {}), "(' Unable to create Source \\n')\n", (15962, 15993), False, 'import sys\n'), ((16159, 16213), 'sys.stderr.write', 'sys.stderr.write', (['""" Unable to create nvvidconv_src \n"""'], {}), "(' Unable to create nvvidconv_src \\n')\n", (16175, 16213), False, 'import sys\n'), ((16389, 16440), 'sys.stderr.write', 'sys.stderr.write', (['""" Unable to create capsfilter \n"""'], {}), "(' Unable to create capsfilter \\n')\n", (16405, 16440), False, 'import sys\n'), ((16990, 17042), 'sys.stderr.write', 'sys.stderr.write', (['""" Unable to create NvStreamMux \n"""'], {}), "(' Unable to create NvStreamMux \\n')\n", (17006, 17042), False, 'import sys\n'), ((17272, 17317), 'sys.stderr.write', 'sys.stderr.write', (['""" Unable to create pgie \n"""'], {}), "(' Unable to create pgie \\n')\n", (17288, 17317), False, 'import sys\n'), ((17512, 17562), 'sys.stderr.write', 'sys.stderr.write', (['""" Unable to create nvvidconv \n"""'], {}), "(' Unable to create nvvidconv \\n')\n", (17528, 17562), False, 'import sys\n'), ((17726, 17772), 'sys.stderr.write', 'sys.stderr.write', (['""" Unable to create nvosd \n"""'], {}), "(' Unable to create nvosd \\n')\n", (17742, 17772), False, 'import sys\n'), ((18562, 18613), 'sys.stderr.write', 'sys.stderr.write', (['""" Unable to create nvvidconv2 \n"""'], {}), "(' Unable to create nvvidconv2 \\n')\n", (18578, 18613), False, 'import sys\n'), ((18727, 18778), 'sys.stderr.write', 'sys.stderr.write', (['""" Unable to create capsfilter \n"""'], {}), "(' Unable to create capsfilter \\n')\n", (18743, 18778), False, 'import sys\n'), ((18982, 19030), 'sys.stderr.write', 'sys.stderr.write', (['""" Unable to create appsink \n"""'], {}), "(' Unable to create appsink \\n')\n", (18998, 19030), False, 'import sys\n'), ((20479, 20542), 'sys.stderr.write', 'sys.stderr.write', (['""" Unable to get the sink pad of streammux \n"""'], {}), "(' Unable to get the sink pad of streammux \\n')\n", (20495, 20542), False, 'import sys\n'), ((20641, 20700), 'sys.stderr.write', 'sys.stderr.write', (['""" Unable to get source pad of decoder \n"""'], {}), "(' Unable to get source pad of decoder \\n')\n", (20657, 20700), False, 'import sys\n'), ((21338, 21373), 'sys.stdout.write', 'sys.stdout.write', (['"""End-of-stream\n"""'], {}), "('End-of-stream\\n')\n", (21354, 21373), False, 'import sys\n'), ((24131, 24183), 'pyds.nvds_acquire_display_meta_from_pool', 'pyds.nvds_acquire_display_meta_from_pool', (['batch_meta'], {}), '(batch_meta)\n', (24171, 24183), False, 'import pyds\n'), ((25765, 25826), 'pyds.nvds_add_display_meta_to_frame', 'pyds.nvds_add_display_meta_to_frame', (['frame_meta', 'display_meta'], {}), '(frame_meta, display_meta)\n', (25800, 25826), False, 'import pyds\n'), ((27394, 27402), 'time.sleep', 'sleep', (['(5)'], {}), '(5)\n', (27399, 27402), False, 'from time import sleep\n'), ((8724, 8776), 'sys.stderr.write', 'sys.stderr.write', (["('Warning: %s: %s\\n' % (err, debug))"], {}), "('Warning: %s: %s\\n' % (err, debug))\n", (8740, 8776), False, 'import sys\n'), ((10178, 10215), 'pyds.NvDsFrameMeta.cast', 'pyds.NvDsFrameMeta.cast', (['l_frame.data'], {}), '(l_frame.data)\n', (10201, 10215), False, 'import pyds\n'), ((12830, 12880), 'pyds.get_string', 'pyds.get_string', (['py_nvosd_text_params.display_text'], {}), '(py_nvosd_text_params.display_text)\n', (12845, 12880), False, 'import pyds\n'), ((21502, 21554), 'sys.stderr.write', 'sys.stderr.write', (["('Warning: %s: %s\\n' % (err, debug))"], {}), "('Warning: %s: %s\\n' % (err, debug))\n", (21518, 21554), False, 'import sys\n'), ((22956, 22993), 'pyds.NvDsFrameMeta.cast', 'pyds.NvDsFrameMeta.cast', (['l_frame.data'], {}), '(l_frame.data)\n', (22979, 22993), False, 'import pyds\n'), ((25701, 25751), 'pyds.get_string', 'pyds.get_string', (['py_nvosd_text_params.display_text'], {}), '(py_nvosd_text_params.display_text)\n', (25716, 25751), False, 'import pyds\n'), ((27172, 27191), 'gi.repository.Gst.Event.new_eos', 'Gst.Event.new_eos', ([], {}), '()\n', (27189, 27191), False, 'from gi.repository import GObject, Gst, GstVideo\n'), ((2378, 2389), 'time.time', 'time.time', ([], {}), '()\n', (2387, 2389), False, 'import time\n'), ((8877, 8927), 'sys.stderr.write', 'sys.stderr.write', (["('Error: %s: %s\\n' % (err, debug))"], {}), "('Error: %s: %s\\n' % (err, debug))\n", (8893, 8927), False, 'import sys\n'), ((10642, 10678), 'pyds.NvDsObjectMeta.cast', 'pyds.NvDsObjectMeta.cast', (['l_obj.data'], {}), '(l_obj.data)\n', (10666, 10678), False, 'import pyds\n'), ((21655, 21705), 'sys.stderr.write', 'sys.stderr.write', (["('Error: %s: %s\\n' % (err, debug))"], {}), "('Error: %s: %s\\n' % (err, debug))\n", (21671, 21705), False, 'import sys\n'), ((23513, 23549), 'pyds.NvDsObjectMeta.cast', 'pyds.NvDsObjectMeta.cast', (['l_obj.data'], {}), '(l_obj.data)\n', (23537, 23549), False, 'import pyds\n')]
|
import tensorflow as tf
import h5py
from pathlib import Path
from .reader import get_meta, get_tokens, RecordReader
class DatasetBase:
def __init__(self,
dataset_path,
splits = ['training', 'validation'],
shuffle_splits = ['training'],
max_shuffle_len = 10000,
prefetch_batch = True,
):
self.dataset_path = dataset_path
self.splits = splits
self.shuffle_splits = shuffle_splits
self.max_shuffle_len = max_shuffle_len
self.prefetch_batch = prefetch_batch
self.db_file = h5py.File(self.dataset_path, 'r')
self.record_tokens = {}
self.datasets = {}
self._shuffle_tokens = True
self._shuffle_db = True
def __del__(self):
self.db_file.close()
def get_metadata(self):
return get_meta(self.db_file, self.get_dataset_name())
def get_dataset_name(self):
raise NotImplementedError
def get_record_names(self):
raise NotImplementedError
def get_record_keys(self):
raise NotImplementedError
def get_record_types(self):
raise NotImplementedError
def get_record_shapes(self):
raise NotImplementedError
def get_paddings(self):
raise NotImplementedError
def get_padded_shapes(self):
raise NotImplementedError
def is_chunked(self):
return False
def _maybe_tuple(self, vals):
vals = tuple(vals)
if len(vals) > 1:
return vals
elif len(vals) == 1:
return vals[0]
elif len(vals) == 0:
return None
def _ensure_dict(self, *vals):
rvals = []
for val in vals:
if not isinstance(val,dict):
rvals.append(dict((k,val) for k in self.splits))
else:
rvals.append(val)
return self._maybe_tuple(rvals)
def load_records(self, split):
AT = tf.data.experimental.AUTOTUNE
record_tokens = get_tokens(self.db_file, self.get_dataset_name(), split,
self.is_chunked())
self.record_tokens[split] = record_tokens
ds_tokens = tf.data.Dataset.from_tensor_slices(record_tokens)
if (split in self.shuffle_splits) and self._shuffle_tokens:
ds_tokens = ds_tokens.shuffle(len(record_tokens))
record_reader = RecordReader(self.db_file,
self.get_record_names(), self.get_record_keys(),
self.get_record_types(), self.get_record_shapes())
db_records = ds_tokens.map(record_reader, AT)
return db_records
def load_split(self, split):
return self.load_records(split)
def map_data_split(self, split, data):
return data
def load_data(self):
for split in self.splits:
if split not in self.datasets:
self.datasets[split] = self.map_data_split(split,
self.load_split(split))
return self._maybe_tuple(self.datasets[s] for s in self.splits)
def get_batched_split(self, split, batch_size, drop_remainder=False):
dataset = self.datasets[split]
if (split in self.shuffle_splits) and self._shuffle_db:
buffer_size = min(len(self.record_tokens[split]), self.max_shuffle_len)
dataset = dataset.shuffle(buffer_size)
all_paddings = self.get_paddings()
paddings = dict((k,all_paddings[k]) for k in dataset.element_spec)
all_shapes = self.get_padded_shapes()
shapes = dict((k,all_shapes[k]) for k in dataset.element_spec)
return dataset.padded_batch(batch_size, shapes, paddings,
drop_remainder)
def get_batched_data(self, batch_size, drop_remainder=False, map_fns=None):
batch_size, drop_remainder, map_fns \
= self._ensure_dict(batch_size, drop_remainder, map_fns)
self.load_data()
batched_splits = []
for split in self.splits:
batched_split = self.get_batched_split(split, batch_size[split],
drop_remainder[split])
map_fn = map_fns[split]
if map_fn is not None:
batched_split = batched_split.map(map_fn)
if self.prefetch_batch:
AT = tf.data.experimental.AUTOTUNE
batched_split = batched_split.prefetch(AT)
batched_splits.append(batched_split)
return self._maybe_tuple(batched_splits)
def cache(self, paths=None, clear=False):
self.load_data()
if paths is None:
for split in self.splits:
self.datasets[split] = self.datasets[split].cache()
else:
if not isinstance(paths, dict):
paths = dict((k,Path(paths)/k) for k in self.splits)
else:
paths = dict((k,Path(paths[k])) for k in self.splits)
for split in self.splits:
path = paths[split]
if path.exists():
if clear:
for f in path.glob('*'):
assert('.index' in f.name or '.data' in f.name)
f.unlink()
else:
path.mkdir(parents=True, exist_ok=True)
self.datasets[split] = self.datasets[split].cache(str(path/split))
return self._maybe_tuple(self.datasets[s] for s in self.splits)
def map(self, functions):
self.load_data()
functions = self._ensure_dict(functions)
self.datasets = dict((s, d.map(functions[s]))
for s,d in self.datasets.items()
if functions[s] is not None)
return self._maybe_tuple(self.datasets[s] for s in self.splits)
|
[
"h5py.File",
"tensorflow.data.Dataset.from_tensor_slices",
"pathlib.Path"
] |
[((686, 719), 'h5py.File', 'h5py.File', (['self.dataset_path', '"""r"""'], {}), "(self.dataset_path, 'r')\n", (695, 719), False, 'import h5py\n'), ((2352, 2401), 'tensorflow.data.Dataset.from_tensor_slices', 'tf.data.Dataset.from_tensor_slices', (['record_tokens'], {}), '(record_tokens)\n', (2386, 2401), True, 'import tensorflow as tf\n'), ((5219, 5233), 'pathlib.Path', 'Path', (['paths[k]'], {}), '(paths[k])\n', (5223, 5233), False, 'from pathlib import Path\n'), ((5132, 5143), 'pathlib.Path', 'Path', (['paths'], {}), '(paths)\n', (5136, 5143), False, 'from pathlib import Path\n')]
|
# Copyright 2020 Huawei Technologies Co., Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""operator dsl function: softplus_grad"""
import akg
from akg import tvm
from akg.ops.math.div import div
from akg.utils.format_transform import get_shape
from akg.utils import validation_check as vc_util, kernel_exec as utils
from akg.utils.dsl_create import produce_shapes
# define a scalar, value = 1
SCALAR_ONE = 1
def softplus_grad_compute(input_gradients, input_features):
"""compute for calculations of softplus gradients"""
shape_dy = get_shape(input_gradients)
shape_x = get_shape(input_features)
dtype = input_gradients.dtype
if list(shape_dy) != list(shape_x):
shape_dy, shape_x, shape_max = produce_shapes(shape_dy, shape_x)
input_gradients = akg.lang.cce.broadcast(
input_gradients, shape_max, dtype)
input_features = akg.lang.cce.broadcast(
input_features, shape_max, dtype)
else:
shape_max = shape_dy
if dtype != "float32":
input_gradients = akg.lang.cce.cast_to(input_gradients, "float32")
input_features = akg.lang.cce.cast_to(
input_features, "float16" if utils.product_is_mini() else "float32")
data_exp_tmp = akg.lang.cce.vexp(input_features)
data_add_tmp = akg.lang.cce.vadds(data_exp_tmp, SCALAR_ONE)
data_div_tmp = div(data_exp_tmp, data_add_tmp)
res_tmp = akg.lang.cce.vmul(input_gradients, data_div_tmp)
if dtype == "float16":
res = akg.lang.cce.cast_to(res_tmp, "float16")
elif dtype == "int32" or dtype == "int8" or dtype == "uint8":
data_zero = akg.lang.cce.broadcast(
tvm.const(0, "float16"), shape_max, "float16")
res_min = akg.lang.cce.vmin(res_tmp, data_zero)
res_max = akg.lang.cce.vmax(res_tmp, data_zero)
res_max_int = akg.lang.cce.floor(res_max)
res_min_int = akg.lang.cce.ceil(res_min)
res = akg.lang.cce.vadd(res_max_int, res_min_int)
else:
res = res_tmp
if dtype == "int8":
res = akg.lang.cce.cast_to(res, "int8")
elif dtype == "uint8":
res = akg.lang.cce.cast_to(res, "uint8")
return res
@vc_util.check_input_type(akg.tvm.tensor.Tensor, akg.tvm.tensor.Tensor)
def softplus_grad(data_dy, data_x):
"""
Computes softplus gradients for a softplus operation.
.. math::
dx = \\dfrac{dy * e^x}{1 + e^x}
Notes:
Some value of result will be one less while dtype is "uint8".
Args:
data_dy (tvm.tensor.Tensor): The backpropagated gradients to
the corresponding softplus operation.
data_x (tvm.tensor.Tensor): The input_features passed as input
to the corresponding softplus operation.
source data type support "float16",
"float32", "int32", "int8", "uint8".
Returns:
tvm.tensor.Tensor as gradients of data_x.
"""
shape_dy = get_shape(data_dy)
dtype_dy = data_dy.dtype
shape_x = get_shape(data_x)
dtype_x = data_x.dtype
if dtype_dy != dtype_x:
raise RuntimeError(
"type of dy and type of x must be same, \
while the types are different")
else:
dtype = dtype_dy
vc_util.check_shape(shape_dy)
vc_util.check_shape(shape_x)
vc_util.ops_dtype_check(
dtype,
(vc_util.DtypeForDavinci.FLOAT16,
vc_util.DtypeForDavinci.FLOAT32,
vc_util.DtypeForDavinci.INT32,
vc_util.DtypeForDavinci.INT8,
vc_util.DtypeForDavinci.UINT8
) if not utils.product_is_mini() else \
(vc_util.DtypeForDavinci.FLOAT16,
vc_util.DtypeForDavinci.FLOAT32))
return softplus_grad_compute(data_dy, data_x)
|
[
"akg.utils.validation_check.check_input_type",
"akg.lang.cce.cast_to",
"akg.lang.cce.vadd",
"akg.lang.cce.vmul",
"akg.lang.cce.vmax",
"akg.utils.format_transform.get_shape",
"akg.utils.validation_check.check_shape",
"akg.utils.dsl_create.produce_shapes",
"akg.tvm.const",
"akg.lang.cce.vmin",
"akg.lang.cce.vadds",
"akg.lang.cce.broadcast",
"akg.lang.cce.vexp",
"akg.lang.cce.floor",
"akg.utils.kernel_exec.product_is_mini",
"akg.ops.math.div.div",
"akg.lang.cce.ceil"
] |
[((2676, 2746), 'akg.utils.validation_check.check_input_type', 'vc_util.check_input_type', (['akg.tvm.tensor.Tensor', 'akg.tvm.tensor.Tensor'], {}), '(akg.tvm.tensor.Tensor, akg.tvm.tensor.Tensor)\n', (2700, 2746), True, 'from akg.utils import validation_check as vc_util, kernel_exec as utils\n'), ((1046, 1072), 'akg.utils.format_transform.get_shape', 'get_shape', (['input_gradients'], {}), '(input_gradients)\n', (1055, 1072), False, 'from akg.utils.format_transform import get_shape\n'), ((1087, 1112), 'akg.utils.format_transform.get_shape', 'get_shape', (['input_features'], {}), '(input_features)\n', (1096, 1112), False, 'from akg.utils.format_transform import get_shape\n'), ((1743, 1776), 'akg.lang.cce.vexp', 'akg.lang.cce.vexp', (['input_features'], {}), '(input_features)\n', (1760, 1776), False, 'import akg\n'), ((1796, 1840), 'akg.lang.cce.vadds', 'akg.lang.cce.vadds', (['data_exp_tmp', 'SCALAR_ONE'], {}), '(data_exp_tmp, SCALAR_ONE)\n', (1814, 1840), False, 'import akg\n'), ((1860, 1891), 'akg.ops.math.div.div', 'div', (['data_exp_tmp', 'data_add_tmp'], {}), '(data_exp_tmp, data_add_tmp)\n', (1863, 1891), False, 'from akg.ops.math.div import div\n'), ((1906, 1954), 'akg.lang.cce.vmul', 'akg.lang.cce.vmul', (['input_gradients', 'data_div_tmp'], {}), '(input_gradients, data_div_tmp)\n', (1923, 1954), False, 'import akg\n'), ((3525, 3543), 'akg.utils.format_transform.get_shape', 'get_shape', (['data_dy'], {}), '(data_dy)\n', (3534, 3543), False, 'from akg.utils.format_transform import get_shape\n'), ((3587, 3604), 'akg.utils.format_transform.get_shape', 'get_shape', (['data_x'], {}), '(data_x)\n', (3596, 3604), False, 'from akg.utils.format_transform import get_shape\n'), ((3828, 3857), 'akg.utils.validation_check.check_shape', 'vc_util.check_shape', (['shape_dy'], {}), '(shape_dy)\n', (3847, 3857), True, 'from akg.utils import validation_check as vc_util, kernel_exec as utils\n'), ((3862, 3890), 'akg.utils.validation_check.check_shape', 'vc_util.check_shape', (['shape_x'], {}), '(shape_x)\n', (3881, 3890), True, 'from akg.utils import validation_check as vc_util, kernel_exec as utils\n'), ((1227, 1260), 'akg.utils.dsl_create.produce_shapes', 'produce_shapes', (['shape_dy', 'shape_x'], {}), '(shape_dy, shape_x)\n', (1241, 1260), False, 'from akg.utils.dsl_create import produce_shapes\n'), ((1287, 1344), 'akg.lang.cce.broadcast', 'akg.lang.cce.broadcast', (['input_gradients', 'shape_max', 'dtype'], {}), '(input_gradients, shape_max, dtype)\n', (1309, 1344), False, 'import akg\n'), ((1383, 1439), 'akg.lang.cce.broadcast', 'akg.lang.cce.broadcast', (['input_features', 'shape_max', 'dtype'], {}), '(input_features, shape_max, dtype)\n', (1405, 1439), False, 'import akg\n'), ((1546, 1594), 'akg.lang.cce.cast_to', 'akg.lang.cce.cast_to', (['input_gradients', '"""float32"""'], {}), "(input_gradients, 'float32')\n", (1566, 1594), False, 'import akg\n'), ((1997, 2037), 'akg.lang.cce.cast_to', 'akg.lang.cce.cast_to', (['res_tmp', '"""float16"""'], {}), "(res_tmp, 'float16')\n", (2017, 2037), False, 'import akg\n'), ((2547, 2580), 'akg.lang.cce.cast_to', 'akg.lang.cce.cast_to', (['res', '"""int8"""'], {}), "(res, 'int8')\n", (2567, 2580), False, 'import akg\n'), ((2225, 2262), 'akg.lang.cce.vmin', 'akg.lang.cce.vmin', (['res_tmp', 'data_zero'], {}), '(res_tmp, data_zero)\n', (2242, 2262), False, 'import akg\n'), ((2281, 2318), 'akg.lang.cce.vmax', 'akg.lang.cce.vmax', (['res_tmp', 'data_zero'], {}), '(res_tmp, data_zero)\n', (2298, 2318), False, 'import akg\n'), ((2341, 2368), 'akg.lang.cce.floor', 'akg.lang.cce.floor', (['res_max'], {}), '(res_max)\n', (2359, 2368), False, 'import akg\n'), ((2391, 2417), 'akg.lang.cce.ceil', 'akg.lang.cce.ceil', (['res_min'], {}), '(res_min)\n', (2408, 2417), False, 'import akg\n'), ((2432, 2475), 'akg.lang.cce.vadd', 'akg.lang.cce.vadd', (['res_max_int', 'res_min_int'], {}), '(res_max_int, res_min_int)\n', (2449, 2475), False, 'import akg\n'), ((2622, 2656), 'akg.lang.cce.cast_to', 'akg.lang.cce.cast_to', (['res', '"""uint8"""'], {}), "(res, 'uint8')\n", (2642, 2656), False, 'import akg\n'), ((1683, 1706), 'akg.utils.kernel_exec.product_is_mini', 'utils.product_is_mini', ([], {}), '()\n', (1704, 1706), True, 'from akg.utils import validation_check as vc_util, kernel_exec as utils\n'), ((2160, 2183), 'akg.tvm.const', 'tvm.const', (['(0)', '"""float16"""'], {}), "(0, 'float16')\n", (2169, 2183), False, 'from akg import tvm\n'), ((4156, 4179), 'akg.utils.kernel_exec.product_is_mini', 'utils.product_is_mini', ([], {}), '()\n', (4177, 4179), True, 'from akg.utils import validation_check as vc_util, kernel_exec as utils\n')]
|
# coding: utf-8
# Copyright (c) 2016, 2022, Oracle and/or its affiliates. All rights reserved.
# This software is dual-licensed to you under the Universal Permissive License (UPL) 1.0 as shown at https://oss.oracle.com/licenses/upl or Apache License 2.0 as shown at http://www.apache.org/licenses/LICENSE-2.0. You may choose either license.
from oci.util import formatted_flat_dict, NONE_SENTINEL, value_allowed_none_or_none_sentinel # noqa: F401
from oci.decorators import init_model_state_from_kwargs
@init_model_state_from_kwargs
class VulnerabilityAudit(object):
"""
A Vulnerability Audit associates the Application Dependencies of a project with their associated vulnerabilities.
Each Vulnerability is associated with a score (Common Vulnerability Scoring System V2 or V3).
"""
#: A constant which can be used with the lifecycle_state property of a VulnerabilityAudit.
#: This constant has a value of "ACTIVE"
LIFECYCLE_STATE_ACTIVE = "ACTIVE"
#: A constant which can be used with the lifecycle_state property of a VulnerabilityAudit.
#: This constant has a value of "CREATING"
LIFECYCLE_STATE_CREATING = "CREATING"
#: A constant which can be used with the lifecycle_state property of a VulnerabilityAudit.
#: This constant has a value of "DELETED"
LIFECYCLE_STATE_DELETED = "DELETED"
#: A constant which can be used with the lifecycle_state property of a VulnerabilityAudit.
#: This constant has a value of "DELETING"
LIFECYCLE_STATE_DELETING = "DELETING"
#: A constant which can be used with the lifecycle_state property of a VulnerabilityAudit.
#: This constant has a value of "FAILED"
LIFECYCLE_STATE_FAILED = "FAILED"
#: A constant which can be used with the build_type property of a VulnerabilityAudit.
#: This constant has a value of "MAVEN"
BUILD_TYPE_MAVEN = "MAVEN"
def __init__(self, **kwargs):
"""
Initializes a new VulnerabilityAudit object with values from keyword arguments.
The following keyword arguments are supported (corresponding to the getters/setters of this class):
:param id:
The value to assign to the id property of this VulnerabilityAudit.
:type id: str
:param display_name:
The value to assign to the display_name property of this VulnerabilityAudit.
:type display_name: str
:param knowledge_base_id:
The value to assign to the knowledge_base_id property of this VulnerabilityAudit.
:type knowledge_base_id: str
:param time_created:
The value to assign to the time_created property of this VulnerabilityAudit.
:type time_created: datetime
:param time_updated:
The value to assign to the time_updated property of this VulnerabilityAudit.
:type time_updated: datetime
:param lifecycle_state:
The value to assign to the lifecycle_state property of this VulnerabilityAudit.
Allowed values for this property are: "ACTIVE", "CREATING", "DELETED", "DELETING", "FAILED", 'UNKNOWN_ENUM_VALUE'.
Any unrecognized values returned by a service will be mapped to 'UNKNOWN_ENUM_VALUE'.
:type lifecycle_state: str
:param vulnerabilities:
The value to assign to the vulnerabilities property of this VulnerabilityAudit.
:type vulnerabilities: list[oci.adm.models.Vulnerability]
:param max_observed_cvss_v2_score:
The value to assign to the max_observed_cvss_v2_score property of this VulnerabilityAudit.
:type max_observed_cvss_v2_score: float
:param max_observed_cvss_v3_score:
The value to assign to the max_observed_cvss_v3_score property of this VulnerabilityAudit.
:type max_observed_cvss_v3_score: float
:param vulnerable_artifacts_count:
The value to assign to the vulnerable_artifacts_count property of this VulnerabilityAudit.
:type vulnerable_artifacts_count: int
:param configuration:
The value to assign to the configuration property of this VulnerabilityAudit.
:type configuration: oci.adm.models.VulnerabilityAuditConfiguration
:param is_success:
The value to assign to the is_success property of this VulnerabilityAudit.
:type is_success: bool
:param build_type:
The value to assign to the build_type property of this VulnerabilityAudit.
Allowed values for this property are: "MAVEN", 'UNKNOWN_ENUM_VALUE'.
Any unrecognized values returned by a service will be mapped to 'UNKNOWN_ENUM_VALUE'.
:type build_type: str
:param compartment_id:
The value to assign to the compartment_id property of this VulnerabilityAudit.
:type compartment_id: str
:param freeform_tags:
The value to assign to the freeform_tags property of this VulnerabilityAudit.
:type freeform_tags: dict(str, str)
:param defined_tags:
The value to assign to the defined_tags property of this VulnerabilityAudit.
:type defined_tags: dict(str, dict(str, object))
:param system_tags:
The value to assign to the system_tags property of this VulnerabilityAudit.
:type system_tags: dict(str, dict(str, object))
"""
self.swagger_types = {
'id': 'str',
'display_name': 'str',
'knowledge_base_id': 'str',
'time_created': 'datetime',
'time_updated': 'datetime',
'lifecycle_state': 'str',
'vulnerabilities': 'list[Vulnerability]',
'max_observed_cvss_v2_score': 'float',
'max_observed_cvss_v3_score': 'float',
'vulnerable_artifacts_count': 'int',
'configuration': 'VulnerabilityAuditConfiguration',
'is_success': 'bool',
'build_type': 'str',
'compartment_id': 'str',
'freeform_tags': 'dict(str, str)',
'defined_tags': 'dict(str, dict(str, object))',
'system_tags': 'dict(str, dict(str, object))'
}
self.attribute_map = {
'id': 'id',
'display_name': 'displayName',
'knowledge_base_id': 'knowledgeBaseId',
'time_created': 'timeCreated',
'time_updated': 'timeUpdated',
'lifecycle_state': 'lifecycleState',
'vulnerabilities': 'vulnerabilities',
'max_observed_cvss_v2_score': 'maxObservedCvssV2Score',
'max_observed_cvss_v3_score': 'maxObservedCvssV3Score',
'vulnerable_artifacts_count': 'vulnerableArtifactsCount',
'configuration': 'configuration',
'is_success': 'isSuccess',
'build_type': 'buildType',
'compartment_id': 'compartmentId',
'freeform_tags': 'freeformTags',
'defined_tags': 'definedTags',
'system_tags': 'systemTags'
}
self._id = None
self._display_name = None
self._knowledge_base_id = None
self._time_created = None
self._time_updated = None
self._lifecycle_state = None
self._vulnerabilities = None
self._max_observed_cvss_v2_score = None
self._max_observed_cvss_v3_score = None
self._vulnerable_artifacts_count = None
self._configuration = None
self._is_success = None
self._build_type = None
self._compartment_id = None
self._freeform_tags = None
self._defined_tags = None
self._system_tags = None
@property
def id(self):
"""
**[Required]** Gets the id of this VulnerabilityAudit.
The Oracle Cloud identifier (`OCID`__) of the Vulnerability Audit.
__ https://docs.cloud.oracle.com/Content/General/Concepts/identifiers.htm
:return: The id of this VulnerabilityAudit.
:rtype: str
"""
return self._id
@id.setter
def id(self, id):
"""
Sets the id of this VulnerabilityAudit.
The Oracle Cloud identifier (`OCID`__) of the Vulnerability Audit.
__ https://docs.cloud.oracle.com/Content/General/Concepts/identifiers.htm
:param id: The id of this VulnerabilityAudit.
:type: str
"""
self._id = id
@property
def display_name(self):
"""
Gets the display_name of this VulnerabilityAudit.
The name of the Vulnerability Audit.
:return: The display_name of this VulnerabilityAudit.
:rtype: str
"""
return self._display_name
@display_name.setter
def display_name(self, display_name):
"""
Sets the display_name of this VulnerabilityAudit.
The name of the Vulnerability Audit.
:param display_name: The display_name of this VulnerabilityAudit.
:type: str
"""
self._display_name = display_name
@property
def knowledge_base_id(self):
"""
**[Required]** Gets the knowledge_base_id of this VulnerabilityAudit.
The Oracle Cloud identifier (`OCID`__) of the Knowledge Base.
__ https://docs.cloud.oracle.com/Content/General/Concepts/identifiers.htm
:return: The knowledge_base_id of this VulnerabilityAudit.
:rtype: str
"""
return self._knowledge_base_id
@knowledge_base_id.setter
def knowledge_base_id(self, knowledge_base_id):
"""
Sets the knowledge_base_id of this VulnerabilityAudit.
The Oracle Cloud identifier (`OCID`__) of the Knowledge Base.
__ https://docs.cloud.oracle.com/Content/General/Concepts/identifiers.htm
:param knowledge_base_id: The knowledge_base_id of this VulnerabilityAudit.
:type: str
"""
self._knowledge_base_id = knowledge_base_id
@property
def time_created(self):
"""
**[Required]** Gets the time_created of this VulnerabilityAudit.
The creation date and time of the Vulnerability Audit (formatted according to `RFC3339`__).
__ https://datatracker.ietf.org/doc/html/rfc3339
:return: The time_created of this VulnerabilityAudit.
:rtype: datetime
"""
return self._time_created
@time_created.setter
def time_created(self, time_created):
"""
Sets the time_created of this VulnerabilityAudit.
The creation date and time of the Vulnerability Audit (formatted according to `RFC3339`__).
__ https://datatracker.ietf.org/doc/html/rfc3339
:param time_created: The time_created of this VulnerabilityAudit.
:type: datetime
"""
self._time_created = time_created
@property
def time_updated(self):
"""
**[Required]** Gets the time_updated of this VulnerabilityAudit.
The update date and time of the Vulnerability Audit (formatted according to `RFC3339`__).
__ https://datatracker.ietf.org/doc/html/rfc3339
:return: The time_updated of this VulnerabilityAudit.
:rtype: datetime
"""
return self._time_updated
@time_updated.setter
def time_updated(self, time_updated):
"""
Sets the time_updated of this VulnerabilityAudit.
The update date and time of the Vulnerability Audit (formatted according to `RFC3339`__).
__ https://datatracker.ietf.org/doc/html/rfc3339
:param time_updated: The time_updated of this VulnerabilityAudit.
:type: datetime
"""
self._time_updated = time_updated
@property
def lifecycle_state(self):
"""
**[Required]** Gets the lifecycle_state of this VulnerabilityAudit.
The current lifecycle state of the Vulnerability Audit.
Allowed values for this property are: "ACTIVE", "CREATING", "DELETED", "DELETING", "FAILED", 'UNKNOWN_ENUM_VALUE'.
Any unrecognized values returned by a service will be mapped to 'UNKNOWN_ENUM_VALUE'.
:return: The lifecycle_state of this VulnerabilityAudit.
:rtype: str
"""
return self._lifecycle_state
@lifecycle_state.setter
def lifecycle_state(self, lifecycle_state):
"""
Sets the lifecycle_state of this VulnerabilityAudit.
The current lifecycle state of the Vulnerability Audit.
:param lifecycle_state: The lifecycle_state of this VulnerabilityAudit.
:type: str
"""
allowed_values = ["ACTIVE", "CREATING", "DELETED", "DELETING", "FAILED"]
if not value_allowed_none_or_none_sentinel(lifecycle_state, allowed_values):
lifecycle_state = 'UNKNOWN_ENUM_VALUE'
self._lifecycle_state = lifecycle_state
@property
def vulnerabilities(self):
"""
**[Required]** Gets the vulnerabilities of this VulnerabilityAudit.
List of vulnerabilities found in the Vulnerability Audit.
:return: The vulnerabilities of this VulnerabilityAudit.
:rtype: list[oci.adm.models.Vulnerability]
"""
return self._vulnerabilities
@vulnerabilities.setter
def vulnerabilities(self, vulnerabilities):
"""
Sets the vulnerabilities of this VulnerabilityAudit.
List of vulnerabilities found in the Vulnerability Audit.
:param vulnerabilities: The vulnerabilities of this VulnerabilityAudit.
:type: list[oci.adm.models.Vulnerability]
"""
self._vulnerabilities = vulnerabilities
@property
def max_observed_cvss_v2_score(self):
"""
**[Required]** Gets the max_observed_cvss_v2_score of this VulnerabilityAudit.
Maximum Common Vulnerability Scoring System Version 2 score of vulnerabilities.
:return: The max_observed_cvss_v2_score of this VulnerabilityAudit.
:rtype: float
"""
return self._max_observed_cvss_v2_score
@max_observed_cvss_v2_score.setter
def max_observed_cvss_v2_score(self, max_observed_cvss_v2_score):
"""
Sets the max_observed_cvss_v2_score of this VulnerabilityAudit.
Maximum Common Vulnerability Scoring System Version 2 score of vulnerabilities.
:param max_observed_cvss_v2_score: The max_observed_cvss_v2_score of this VulnerabilityAudit.
:type: float
"""
self._max_observed_cvss_v2_score = max_observed_cvss_v2_score
@property
def max_observed_cvss_v3_score(self):
"""
**[Required]** Gets the max_observed_cvss_v3_score of this VulnerabilityAudit.
Maximum Common Vulnerability Scoring System Version 3 score of vulnerabilities.
:return: The max_observed_cvss_v3_score of this VulnerabilityAudit.
:rtype: float
"""
return self._max_observed_cvss_v3_score
@max_observed_cvss_v3_score.setter
def max_observed_cvss_v3_score(self, max_observed_cvss_v3_score):
"""
Sets the max_observed_cvss_v3_score of this VulnerabilityAudit.
Maximum Common Vulnerability Scoring System Version 3 score of vulnerabilities.
:param max_observed_cvss_v3_score: The max_observed_cvss_v3_score of this VulnerabilityAudit.
:type: float
"""
self._max_observed_cvss_v3_score = max_observed_cvss_v3_score
@property
def vulnerable_artifacts_count(self):
"""
**[Required]** Gets the vulnerable_artifacts_count of this VulnerabilityAudit.
Count of vulnerable artifacts.
:return: The vulnerable_artifacts_count of this VulnerabilityAudit.
:rtype: int
"""
return self._vulnerable_artifacts_count
@vulnerable_artifacts_count.setter
def vulnerable_artifacts_count(self, vulnerable_artifacts_count):
"""
Sets the vulnerable_artifacts_count of this VulnerabilityAudit.
Count of vulnerable artifacts.
:param vulnerable_artifacts_count: The vulnerable_artifacts_count of this VulnerabilityAudit.
:type: int
"""
self._vulnerable_artifacts_count = vulnerable_artifacts_count
@property
def configuration(self):
"""
Gets the configuration of this VulnerabilityAudit.
:return: The configuration of this VulnerabilityAudit.
:rtype: oci.adm.models.VulnerabilityAuditConfiguration
"""
return self._configuration
@configuration.setter
def configuration(self, configuration):
"""
Sets the configuration of this VulnerabilityAudit.
:param configuration: The configuration of this VulnerabilityAudit.
:type: oci.adm.models.VulnerabilityAuditConfiguration
"""
self._configuration = configuration
@property
def is_success(self):
"""
Gets the is_success of this VulnerabilityAudit.
Indicates if an audit succeeded according to the configuration. The value is `null` if the audit is in the `CREATING` state.
:return: The is_success of this VulnerabilityAudit.
:rtype: bool
"""
return self._is_success
@is_success.setter
def is_success(self, is_success):
"""
Sets the is_success of this VulnerabilityAudit.
Indicates if an audit succeeded according to the configuration. The value is `null` if the audit is in the `CREATING` state.
:param is_success: The is_success of this VulnerabilityAudit.
:type: bool
"""
self._is_success = is_success
@property
def build_type(self):
"""
**[Required]** Gets the build_type of this VulnerabilityAudit.
The type of the build tool.
Allowed values for this property are: "MAVEN", 'UNKNOWN_ENUM_VALUE'.
Any unrecognized values returned by a service will be mapped to 'UNKNOWN_ENUM_VALUE'.
:return: The build_type of this VulnerabilityAudit.
:rtype: str
"""
return self._build_type
@build_type.setter
def build_type(self, build_type):
"""
Sets the build_type of this VulnerabilityAudit.
The type of the build tool.
:param build_type: The build_type of this VulnerabilityAudit.
:type: str
"""
allowed_values = ["MAVEN"]
if not value_allowed_none_or_none_sentinel(build_type, allowed_values):
build_type = 'UNKNOWN_ENUM_VALUE'
self._build_type = build_type
@property
def compartment_id(self):
"""
**[Required]** Gets the compartment_id of this VulnerabilityAudit.
The Oracle Cloud identifier (`OCID`__) of the compartment associated with the Vulnerability Audit.
__ https://docs.cloud.oracle.com/Content/General/Concepts/identifiers.htm
:return: The compartment_id of this VulnerabilityAudit.
:rtype: str
"""
return self._compartment_id
@compartment_id.setter
def compartment_id(self, compartment_id):
"""
Sets the compartment_id of this VulnerabilityAudit.
The Oracle Cloud identifier (`OCID`__) of the compartment associated with the Vulnerability Audit.
__ https://docs.cloud.oracle.com/Content/General/Concepts/identifiers.htm
:param compartment_id: The compartment_id of this VulnerabilityAudit.
:type: str
"""
self._compartment_id = compartment_id
@property
def freeform_tags(self):
"""
Gets the freeform_tags of this VulnerabilityAudit.
Simple key-value pair that is applied without any predefined name, type or scope. Exists for cross-compatibility only.
Example: `{\"bar-key\": \"value\"}`
:return: The freeform_tags of this VulnerabilityAudit.
:rtype: dict(str, str)
"""
return self._freeform_tags
@freeform_tags.setter
def freeform_tags(self, freeform_tags):
"""
Sets the freeform_tags of this VulnerabilityAudit.
Simple key-value pair that is applied without any predefined name, type or scope. Exists for cross-compatibility only.
Example: `{\"bar-key\": \"value\"}`
:param freeform_tags: The freeform_tags of this VulnerabilityAudit.
:type: dict(str, str)
"""
self._freeform_tags = freeform_tags
@property
def defined_tags(self):
"""
Gets the defined_tags of this VulnerabilityAudit.
Defined tags for this resource. Each key is predefined and scoped to a namespace.
Example: `{\"foo-namespace\": {\"bar-key\": \"value\"}}`
:return: The defined_tags of this VulnerabilityAudit.
:rtype: dict(str, dict(str, object))
"""
return self._defined_tags
@defined_tags.setter
def defined_tags(self, defined_tags):
"""
Sets the defined_tags of this VulnerabilityAudit.
Defined tags for this resource. Each key is predefined and scoped to a namespace.
Example: `{\"foo-namespace\": {\"bar-key\": \"value\"}}`
:param defined_tags: The defined_tags of this VulnerabilityAudit.
:type: dict(str, dict(str, object))
"""
self._defined_tags = defined_tags
@property
def system_tags(self):
"""
Gets the system_tags of this VulnerabilityAudit.
Usage of system tag keys. These predefined keys are scoped to namespaces.
Example: `{\"orcl-cloud\": {\"free-tier-retained\": \"true\"}}`
:return: The system_tags of this VulnerabilityAudit.
:rtype: dict(str, dict(str, object))
"""
return self._system_tags
@system_tags.setter
def system_tags(self, system_tags):
"""
Sets the system_tags of this VulnerabilityAudit.
Usage of system tag keys. These predefined keys are scoped to namespaces.
Example: `{\"orcl-cloud\": {\"free-tier-retained\": \"true\"}}`
:param system_tags: The system_tags of this VulnerabilityAudit.
:type: dict(str, dict(str, object))
"""
self._system_tags = system_tags
def __repr__(self):
return formatted_flat_dict(self)
def __eq__(self, other):
if other is None:
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
return not self == other
|
[
"oci.util.formatted_flat_dict",
"oci.util.value_allowed_none_or_none_sentinel"
] |
[((22146, 22171), 'oci.util.formatted_flat_dict', 'formatted_flat_dict', (['self'], {}), '(self)\n', (22165, 22171), False, 'from oci.util import formatted_flat_dict, NONE_SENTINEL, value_allowed_none_or_none_sentinel\n'), ((12638, 12706), 'oci.util.value_allowed_none_or_none_sentinel', 'value_allowed_none_or_none_sentinel', (['lifecycle_state', 'allowed_values'], {}), '(lifecycle_state, allowed_values)\n', (12673, 12706), False, 'from oci.util import formatted_flat_dict, NONE_SENTINEL, value_allowed_none_or_none_sentinel\n'), ((18336, 18399), 'oci.util.value_allowed_none_or_none_sentinel', 'value_allowed_none_or_none_sentinel', (['build_type', 'allowed_values'], {}), '(build_type, allowed_values)\n', (18371, 18399), False, 'from oci.util import formatted_flat_dict, NONE_SENTINEL, value_allowed_none_or_none_sentinel\n')]
|
import warnings
from typing import Callable, Optional
from fedot.core.log import Log
from fedot.core.operations.evaluation. \
operation_implementations.implementation_interfaces import ModelImplementation
from fedot.core.repository.dataset_types import DataTypesEnum
class CustomModelImplementation(ModelImplementation):
"""
Implementation of container for custom model, which is presented as function with
input train_data(np.array), test_data(np.array), parameters(dict)
output type specification DataTypesEnum (string - 'ts', 'table', 'image', 'text')
into parameters dictionary {'model_predict': function, 'model_fit': function}
"""
def __init__(self, params: dict = None, log: Optional[Log] = None):
super().__init__(log)
self.params = params
self.model_fit = None
self.model_predict = None
self.fitted_model = None
if not self.params:
raise ValueError('There is no specified parameters for custom model!')
else:
# init model
if 'model_predict' in self.params.keys():
self.model_predict = self.params.get('model_predict')
if not isinstance(self.model_predict, Callable):
warnings.warn('Input model_predict is not Callable')
else:
raise ValueError('There is no key word "model_predict" for model definition in input dictionary.')
# custom model can be without fitting
if 'model_fit' in self.params.keys():
self.model_fit = self.params.get('model_fit')
if not isinstance(self.model_fit, Callable):
raise ValueError('Input model is not Callable')
def fit(self, input_data):
""" Fit method for custom model implementation """
if self.model_fit:
self.fitted_model = self.model_fit(input_data.idx, input_data.features, input_data.target, self.params)
return self.fitted_model
def predict(self, input_data, is_fit_pipeline_stage: Optional[bool]):
output_type = input_data.data_type
# if there is no need in fitting custom model and it is fit call
if is_fit_pipeline_stage and not self.model_fit:
predict = input_data.features
# If custom model has exceptions inviolate train data goes to Output
# make prediction if predict call or there is need to fit custom model
else:
try:
predict, output_type = self.model_predict(self.fitted_model,
input_data.idx,
input_data.features,
self.params)
if (input_data.data_type == DataTypesEnum.ts and
input_data.target is not None and len(input_data.target.shape) > 1):
# change target after custom model is processed
input_data.target = input_data.target[:, 0]
output_type = DataTypesEnum[output_type]
except Exception as e:
raise TypeError(f'{e}\nInput model has incorrect behaviour. Check type hints for model: \
Callable[[any, np.array, dict], np.array, str]')
output_data = self._convert_to_output(input_data,
predict=predict,
data_type=output_type)
return output_data
def get_params(self):
return self.params
|
[
"warnings.warn"
] |
[((1256, 1308), 'warnings.warn', 'warnings.warn', (['"""Input model_predict is not Callable"""'], {}), "('Input model_predict is not Callable')\n", (1269, 1308), False, 'import warnings\n')]
|
import unittest
import xjpath
class TestXJPath(unittest.TestCase):
def test_get_all_dict_values_from_top(self):
d = {'t1': 1, 't2': 2, 't3': 3, 't4': 4}
v = xjpath.strict_path_lookup(d, '*')
self.assertTrue(isinstance(v, tuple))
self.assertEqual([1, 2, 3, 4], sorted(v))
def test_get_all_dict_values_from_level_down(self):
d = {'l1': {'t1': 1, 't2': 2, 't3': 3, 't4': 4}}
v = xjpath.strict_path_lookup(d, 'l1.*')
self.assertTrue(isinstance(v, tuple))
self.assertEqual([1, 2, 3, 4], sorted(v))
def test_get_all_list_values_copy(self):
d = {'l1': [1, 2, 3, 4]}
v = xjpath.strict_path_lookup(d, 'l1.*')
self.assertEqual((1, 2, 3, 4), v)
def test_get_all_same_attribute_values_if_list(self):
d = {'l1': [{'s': 5, 'r': ''}, {'s': 6, 'r': ''}, {'s': 7}]}
v = xjpath.strict_path_lookup(d, 'l1.*.s')
self.assertEqual((5, 6, 7), v)
def test_get_all_same_attribute_values_if_dict(self):
d = {'l1': {'t0': {'s': 5, 'r': ''},
't1': {'s': 6, 'r': ''},
't2': {'s': 7}}}
v = xjpath.strict_path_lookup(d, 'l1.*.s')
self.assertEqual([5, 6, 7], sorted(v))
def test_get_last_array_element(self):
d = [1, 2, 3]
v = xjpath.strict_path_lookup(d, '@last')
self.assertEqual(3, v)
def test_get_first_array_element(self):
d = [1, 2, 3]
v = xjpath.strict_path_lookup(d, '@first')
self.assertEqual(1, v)
def test_get_second_array_element(self):
d = [1, 2, 3]
v = xjpath.strict_path_lookup(d, '@1')
self.assertEqual(2, v)
def test_get_element_from_dict_that_is_second_element_in_array(self):
d = ['1', {'element': 999}, '3']
v = xjpath.strict_path_lookup(d, '@1.element')
self.assertEqual(999, v)
def test_get_element_from_wrong_index_with_exception(self):
d = [1, 2, 3]
with self.assertRaises(xjpath.XJPathError):
xjpath.strict_path_lookup(d, '@10')
def test_get_element_from_wrong_index_with_no_exception(self):
d = [1, 2, 3]
value, exists = xjpath.path_lookup(d, '@10')
self.assertIsNone(value)
self.assertFalse(exists)
def test_get_element_from_dict_using_array_index(self):
d = {'1': '1', '2': '2'}
value, exists = xjpath.path_lookup(d, '@1')
self.assertIsNone(value)
self.assertFalse(exists)
def test_test_no_values(self):
with self.assertRaises(xjpath.XJPathError):
xjpath.strict_path_lookup({}, 'path')
def test_test_no_values_deep(self):
with self.assertRaises(xjpath.XJPathError):
xjpath.strict_path_lookup({'path': {'test': [1]}}, 'path.test.num')
def test_with_one_escape(self):
value, exists = xjpath.path_lookup({'v.v': {'t': 31}}, 'v\.v.t')
self.assertEqual(31, value)
self.assertTrue(exists)
def test_with_two_escape(self):
value, exists = xjpath.path_lookup({'v.v': {'t.t': 31}}, 'v\.v.t\.t')
self.assertEqual(31, value)
self.assertTrue(exists)
def test_with_last_escape(self):
value, exists = xjpath.path_lookup({'v': {'t.t': 31}}, 'v.t\.t')
self.assertEqual(31, value)
self.assertTrue(exists)
def test_with_sobachka_escape(self):
"""Sobachka == '@'"""
value, exists = xjpath.path_lookup({'v': {'@id': 31}}, 'v.\@id')
self.assertEqual(31, value)
self.assertTrue(exists)
def test_with_sobachka_double_escape(self):
"""Sobachka == '@'"""
value, exists = xjpath.path_lookup({'v': {'\@id': 31}}, 'v.\\\\@id')
self.assertEqual(31, value)
self.assertTrue(exists)
def test_with_sobachka_triple_escape(self):
"""Sobachka == '@'"""
value, exists = xjpath.path_lookup({'v': {'\\\\@id': 31}},
'v.\\\\\\\\@id')
self.assertEqual(31, value)
self.assertTrue(exists)
def test_with_star_escape(self):
value, exists = xjpath.path_lookup({'v': {'*id': 31}}, 'v.\*id')
self.assertEqual(31, value)
self.assertTrue(exists)
def test_escapes_non_special(self):
value, exists = xjpath.path_lookup({'v': {'\\': 31}}, r'v.\\')
self.assertEqual(31, value)
self.assertTrue(exists)
def test_validate_path_ok(self):
xjpath.validate_path('x.f.g.@first')
xjpath.validate_path('x.f.g.@last')
xjpath.validate_path('x.*.*.@last')
xjpath.validate_path('x.@first.\*.@last')
def test_validate_path_fail(self):
with self.assertRaises(xjpath.XJPathError):
xjpath.validate_path('x.@wedwe')
with self.assertRaises(xjpath.XJPathError):
xjpath.validate_path(10)
def test_auto_dict_creations(self):
a = {}
xjpath.path_lookup(a, 'a{}.b{}.c{}', True)
self.assertEqual({'a': {'b': {'c': {}}}}, a)
def test_auto_dict_and_last_array_creations(self):
a = {}
xjpath.path_lookup(a, 'a{}.b{}.c[]', True)
self.assertEqual({'a': {'b': {'c': []}}}, a)
def test_path_create_type_mismatch1(self):
a = {'a': 1}
with self.assertRaises(xjpath.XJPathError):
xjpath.path_lookup(a, 'a[]')
def test_path_create_type_mismatch2(self):
a = {'a': []}
with self.assertRaises(xjpath.XJPathError):
xjpath.path_lookup(a, 'a{}')
def test_path_create_type_mismatch3(self):
a = [{}]
with self.assertRaises(xjpath.XJPathError):
xjpath.path_lookup(a, '@first[]')
def test_path_create_type_mismatch4(self):
a = [{}]
with self.assertRaises(xjpath.XJPathError):
xjpath.path_lookup(a, '@-1[]')
def test_path_lookup_dict_as_array(self):
a = []
with self.assertRaises(xjpath.XJPathError):
xjpath.path_lookup(a, 'a{}')
def test_path_lookup_array_as_dict(self):
a = {}
with self.assertRaises(xjpath.XJPathError):
xjpath.path_lookup(a, '@first[]')
def test_type_escape_for_str(self):
self.assertEqual(('v', True),
xjpath.path_lookup({'a$': 'v'}, 'a\$', True))
self.assertEqual(('', True),
xjpath.path_lookup({'a$': 'a'}, 'a$', True))
def test_type_escape_for_number(self):
self.assertEqual((123, True),
xjpath.path_lookup({'a#': 123}, 'a\#', True))
self.assertEqual((0, True),
xjpath.path_lookup({'a$': 123}, 'a#', True))
def test_type_escape_for_float(self):
self.assertEqual((.1, True),
xjpath.path_lookup({'a%': .1}, 'a\%', True))
self.assertEqual((.0, True),
xjpath.path_lookup({'a%': 123}, 'a%', True))
def test_type_escape_for_dict(self):
self.assertEqual(({"1": 1}, True),
xjpath.path_lookup({'a{}': {"1": 1}}, 'a\{}', True))
self.assertEqual(({}, True),
xjpath.path_lookup({'a{}': {"1": 1}}, 'a{}', True))
def test_type_escape_for_list(self):
self.assertEqual(([1], True),
xjpath.path_lookup({'a[]': [1]}, 'a\[]', True))
self.assertEqual(([], True),
xjpath.path_lookup({'a[]': [1]}, 'a[]', True))
def test_XJPath(self):
d = {'t1': 1, 't2': 2, 't3': 3, 't4': 4}
x = xjpath.XJPath(d)
self.assertEqual(1, x['t1'])
self.assertEqual(None, x.get('t1.1'))
if __name__ == '__main__':
import logging
logging.basicConfig(level=logging.CRITICAL)
unittest.main()
|
[
"unittest.main",
"xjpath.strict_path_lookup",
"xjpath.path_lookup",
"logging.basicConfig",
"xjpath.validate_path",
"xjpath.XJPath"
] |
[((7716, 7759), 'logging.basicConfig', 'logging.basicConfig', ([], {'level': 'logging.CRITICAL'}), '(level=logging.CRITICAL)\n', (7735, 7759), False, 'import logging\n'), ((7764, 7779), 'unittest.main', 'unittest.main', ([], {}), '()\n', (7777, 7779), False, 'import unittest\n'), ((181, 214), 'xjpath.strict_path_lookup', 'xjpath.strict_path_lookup', (['d', '"""*"""'], {}), "(d, '*')\n", (206, 214), False, 'import xjpath\n'), ((437, 473), 'xjpath.strict_path_lookup', 'xjpath.strict_path_lookup', (['d', '"""l1.*"""'], {}), "(d, 'l1.*')\n", (462, 473), False, 'import xjpath\n'), ((661, 697), 'xjpath.strict_path_lookup', 'xjpath.strict_path_lookup', (['d', '"""l1.*"""'], {}), "(d, 'l1.*')\n", (686, 697), False, 'import xjpath\n'), ((880, 918), 'xjpath.strict_path_lookup', 'xjpath.strict_path_lookup', (['d', '"""l1.*.s"""'], {}), "(d, 'l1.*.s')\n", (905, 918), False, 'import xjpath\n'), ((1156, 1194), 'xjpath.strict_path_lookup', 'xjpath.strict_path_lookup', (['d', '"""l1.*.s"""'], {}), "(d, 'l1.*.s')\n", (1181, 1194), False, 'import xjpath\n'), ((1320, 1357), 'xjpath.strict_path_lookup', 'xjpath.strict_path_lookup', (['d', '"""@last"""'], {}), "(d, '@last')\n", (1345, 1357), False, 'import xjpath\n'), ((1468, 1506), 'xjpath.strict_path_lookup', 'xjpath.strict_path_lookup', (['d', '"""@first"""'], {}), "(d, '@first')\n", (1493, 1506), False, 'import xjpath\n'), ((1618, 1652), 'xjpath.strict_path_lookup', 'xjpath.strict_path_lookup', (['d', '"""@1"""'], {}), "(d, '@1')\n", (1643, 1652), False, 'import xjpath\n'), ((1812, 1854), 'xjpath.strict_path_lookup', 'xjpath.strict_path_lookup', (['d', '"""@1.element"""'], {}), "(d, '@1.element')\n", (1837, 1854), False, 'import xjpath\n'), ((2189, 2217), 'xjpath.path_lookup', 'xjpath.path_lookup', (['d', '"""@10"""'], {}), "(d, '@10')\n", (2207, 2217), False, 'import xjpath\n'), ((2402, 2429), 'xjpath.path_lookup', 'xjpath.path_lookup', (['d', '"""@1"""'], {}), "(d, '@1')\n", (2420, 2429), False, 'import xjpath\n'), ((2868, 2917), 'xjpath.path_lookup', 'xjpath.path_lookup', (["{'v.v': {'t': 31}}", '"""v\\\\.v.t"""'], {}), "({'v.v': {'t': 31}}, 'v\\\\.v.t')\n", (2886, 2917), False, 'import xjpath\n'), ((3046, 3101), 'xjpath.path_lookup', 'xjpath.path_lookup', (["{'v.v': {'t.t': 31}}", '"""v\\\\.v.t\\\\.t"""'], {}), "({'v.v': {'t.t': 31}}, 'v\\\\.v.t\\\\.t')\n", (3064, 3101), False, 'import xjpath\n'), ((3230, 3279), 'xjpath.path_lookup', 'xjpath.path_lookup', (["{'v': {'t.t': 31}}", '"""v.t\\\\.t"""'], {}), "({'v': {'t.t': 31}}, 'v.t\\\\.t')\n", (3248, 3279), False, 'import xjpath\n'), ((3443, 3492), 'xjpath.path_lookup', 'xjpath.path_lookup', (["{'v': {'@id': 31}}", '"""v.\\\\@id"""'], {}), "({'v': {'@id': 31}}, 'v.\\\\@id')\n", (3461, 3492), False, 'import xjpath\n'), ((3663, 3716), 'xjpath.path_lookup', 'xjpath.path_lookup', (["{'v': {'\\\\@id': 31}}", '"""v.\\\\\\\\@id"""'], {}), "({'v': {'\\\\@id': 31}}, 'v.\\\\\\\\@id')\n", (3681, 3716), False, 'import xjpath\n'), ((3887, 3946), 'xjpath.path_lookup', 'xjpath.path_lookup', (["{'v': {'\\\\\\\\@id': 31}}", '"""v.\\\\\\\\\\\\\\\\@id"""'], {}), "({'v': {'\\\\\\\\@id': 31}}, 'v.\\\\\\\\\\\\\\\\@id')\n", (3905, 3946), False, 'import xjpath\n'), ((4120, 4169), 'xjpath.path_lookup', 'xjpath.path_lookup', (["{'v': {'*id': 31}}", '"""v.\\\\*id"""'], {}), "({'v': {'*id': 31}}, 'v.\\\\*id')\n", (4138, 4169), False, 'import xjpath\n'), ((4302, 4349), 'xjpath.path_lookup', 'xjpath.path_lookup', (["{'v': {'\\\\': 31}}", '"""v.\\\\\\\\"""'], {}), "({'v': {'\\\\': 31}}, 'v.\\\\\\\\')\n", (4320, 4349), False, 'import xjpath\n'), ((4463, 4499), 'xjpath.validate_path', 'xjpath.validate_path', (['"""x.f.g.@first"""'], {}), "('x.f.g.@first')\n", (4483, 4499), False, 'import xjpath\n'), ((4508, 4543), 'xjpath.validate_path', 'xjpath.validate_path', (['"""x.f.g.@last"""'], {}), "('x.f.g.@last')\n", (4528, 4543), False, 'import xjpath\n'), ((4552, 4587), 'xjpath.validate_path', 'xjpath.validate_path', (['"""x.*.*.@last"""'], {}), "('x.*.*.@last')\n", (4572, 4587), False, 'import xjpath\n'), ((4596, 4638), 'xjpath.validate_path', 'xjpath.validate_path', (['"""x.@first.\\\\*.@last"""'], {}), "('x.@first.\\\\*.@last')\n", (4616, 4638), False, 'import xjpath\n'), ((4929, 4971), 'xjpath.path_lookup', 'xjpath.path_lookup', (['a', '"""a{}.b{}.c{}"""', '(True)'], {}), "(a, 'a{}.b{}.c{}', True)\n", (4947, 4971), False, 'import xjpath\n'), ((5104, 5146), 'xjpath.path_lookup', 'xjpath.path_lookup', (['a', '"""a{}.b{}.c[]"""', '(True)'], {}), "(a, 'a{}.b{}.c[]', True)\n", (5122, 5146), False, 'import xjpath\n'), ((7564, 7580), 'xjpath.XJPath', 'xjpath.XJPath', (['d'], {}), '(d)\n', (7577, 7580), False, 'import xjpath\n'), ((2039, 2074), 'xjpath.strict_path_lookup', 'xjpath.strict_path_lookup', (['d', '"""@10"""'], {}), "(d, '@10')\n", (2064, 2074), False, 'import xjpath\n'), ((2596, 2633), 'xjpath.strict_path_lookup', 'xjpath.strict_path_lookup', (['{}', '"""path"""'], {}), "({}, 'path')\n", (2621, 2633), False, 'import xjpath\n'), ((2739, 2806), 'xjpath.strict_path_lookup', 'xjpath.strict_path_lookup', (["{'path': {'test': [1]}}", '"""path.test.num"""'], {}), "({'path': {'test': [1]}}, 'path.test.num')\n", (2764, 2806), False, 'import xjpath\n'), ((4742, 4774), 'xjpath.validate_path', 'xjpath.validate_path', (['"""x.@wedwe"""'], {}), "('x.@wedwe')\n", (4762, 4774), False, 'import xjpath\n'), ((4840, 4864), 'xjpath.validate_path', 'xjpath.validate_path', (['(10)'], {}), '(10)\n', (4860, 4864), False, 'import xjpath\n'), ((5333, 5361), 'xjpath.path_lookup', 'xjpath.path_lookup', (['a', '"""a[]"""'], {}), "(a, 'a[]')\n", (5351, 5361), False, 'import xjpath\n'), ((5496, 5524), 'xjpath.path_lookup', 'xjpath.path_lookup', (['a', '"""a{}"""'], {}), "(a, 'a{}')\n", (5514, 5524), False, 'import xjpath\n'), ((5654, 5687), 'xjpath.path_lookup', 'xjpath.path_lookup', (['a', '"""@first[]"""'], {}), "(a, '@first[]')\n", (5672, 5687), False, 'import xjpath\n'), ((5817, 5847), 'xjpath.path_lookup', 'xjpath.path_lookup', (['a', '"""@-1[]"""'], {}), "(a, '@-1[]')\n", (5835, 5847), False, 'import xjpath\n'), ((5974, 6002), 'xjpath.path_lookup', 'xjpath.path_lookup', (['a', '"""a{}"""'], {}), "(a, 'a{}')\n", (5992, 6002), False, 'import xjpath\n'), ((6129, 6162), 'xjpath.path_lookup', 'xjpath.path_lookup', (['a', '"""@first[]"""'], {}), "(a, '@first[]')\n", (6147, 6162), False, 'import xjpath\n'), ((6267, 6312), 'xjpath.path_lookup', 'xjpath.path_lookup', (["{'a$': 'v'}", '"""a\\\\$"""', '(True)'], {}), "({'a$': 'v'}, 'a\\\\$', True)\n", (6285, 6312), False, 'import xjpath\n'), ((6375, 6418), 'xjpath.path_lookup', 'xjpath.path_lookup', (["{'a$': 'a'}", '"""a$"""', '(True)'], {}), "({'a$': 'a'}, 'a$', True)\n", (6393, 6418), False, 'import xjpath\n'), ((6527, 6572), 'xjpath.path_lookup', 'xjpath.path_lookup', (["{'a#': 123}", '"""a\\\\#"""', '(True)'], {}), "({'a#': 123}, 'a\\\\#', True)\n", (6545, 6572), False, 'import xjpath\n'), ((6634, 6677), 'xjpath.path_lookup', 'xjpath.path_lookup', (["{'a$': 123}", '"""a#"""', '(True)'], {}), "({'a$': 123}, 'a#', True)\n", (6652, 6677), False, 'import xjpath\n'), ((6784, 6829), 'xjpath.path_lookup', 'xjpath.path_lookup', (["{'a%': 0.1}", '"""a\\\\%"""', '(True)'], {}), "({'a%': 0.1}, 'a\\\\%', True)\n", (6802, 6829), False, 'import xjpath\n'), ((6891, 6934), 'xjpath.path_lookup', 'xjpath.path_lookup', (["{'a%': 123}", '"""a%"""', '(True)'], {}), "({'a%': 123}, 'a%', True)\n", (6909, 6934), False, 'import xjpath\n'), ((7046, 7098), 'xjpath.path_lookup', 'xjpath.path_lookup', (["{'a{}': {'1': 1}}", '"""a\\\\{}"""', '(True)'], {}), "({'a{}': {'1': 1}}, 'a\\\\{}', True)\n", (7064, 7098), False, 'import xjpath\n'), ((7161, 7211), 'xjpath.path_lookup', 'xjpath.path_lookup', (["{'a{}': {'1': 1}}", '"""a{}"""', '(True)'], {}), "({'a{}': {'1': 1}}, 'a{}', True)\n", (7179, 7211), False, 'import xjpath\n'), ((7318, 7365), 'xjpath.path_lookup', 'xjpath.path_lookup', (["{'a[]': [1]}", '"""a\\\\[]"""', '(True)'], {}), "({'a[]': [1]}, 'a\\\\[]', True)\n", (7336, 7365), False, 'import xjpath\n'), ((7428, 7473), 'xjpath.path_lookup', 'xjpath.path_lookup', (["{'a[]': [1]}", '"""a[]"""', '(True)'], {}), "({'a[]': [1]}, 'a[]', True)\n", (7446, 7473), False, 'import xjpath\n')]
|
# -*- coding: utf-8 -*-
#
# Copyright (c) 2009 by <NAME> <http://gustavonarea.net/>.
#
# This file is part of Booleano <http://code.gustavonarea.net/booleano/>.
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, distribute with
# modifications, sublicense, and/or sell copies of the Software, and to permit
# persons to whom the Software is furnished to do so, subject to the following
# conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# ABOVE COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
# WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR
# IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
#
# Except as contained in this notice, the name(s) of the above copyright
# holders shall not be used in advertising or otherwise to promote the sale,
# use or other dealings in this Software without prior written authorization.
"""
Booleano scope handling.
"""
from logging import getLogger
from booleano.exc import ScopeError
__all__ = ("Bind", "SymbolTable", "Namespace")
LOGGER = getLogger(__name__)
class _Identifier(object):
"""
Multilingual identifier.
"""
def __init__(self, global_name, **names):
"""
Create the identifier using ``global_name`` as it's name.
:param global_name: The identifier string (excludes parent symbol
tables, if any).
:type global_name: basestring
Additional keyword arguments represent the translations of the
``global_name`` into other languages.
"""
# By default, identifiers are not bound to a symbol table:
self.symbol_table = None
# Convert the ``names`` to lower-case:
self.global_name = global_name.lower()
for (locale, name) in names.items():
names[locale] = name.lower()
self.names = names
def get_localized_name(self, locale):
"""
Return the localized name of the identifier in ``locale``.
:param locale: The locale of the name.
:type locale: basestring
:return: The name of the identifier in ``locale``; if it's not defined,
the global name is returned.
:rtype: basestring
"""
if locale in self.names:
name = self.names[locale]
else:
LOGGER.warn("%s doesn't have a name in %s; using the global one",
self, locale)
name = self.global_name
return name
def _get_contents(self, locale):
"""
Return the contents being wrapped, filtered by ``locale`` where
relevant.
:param locale: The locale used to filter the contents.
:param locale: basestring
:return: The contents being wrapped; in a binding, it's the operand,
while in a symbol table, it's the namespace for the ``locale``.
"""
raise NotImplementedError()
# { Comparison stuff
def __hash__(self):
"""
Make the identifier hashable based on its global name.
"""
first = ord(self.global_name[0])
last = ord(self.global_name[-1])
hash_ = first * 2 + last * 3 + len(self.global_name)
return hash_
def __eq__(self, other):
"""
Check that the ``other`` identifier is equivalent to this one.
Two identifiers are equivalent if the have the same names.
"""
if isinstance(other, _Identifier) and self.global_name == other.global_name and self.names == other.names:
return True
return False
def __ne__(self, other):
"""
Check that the ``other`` identifier is NOT equivalent to this one.
"""
return not self.__eq__(other)
# { Representations
def __unicode__(self):
"""
Return the Unicode representation for the identifier.
This must be overridden by the specific identifiers.
"""
raise NotImplementedError("Identifiers must set their Unicode "
"representation")
def __str__(self):
"""
Return the ASCII representation for this identifier.
This method returns the same as :meth:`__unicode__`.
"""
return self.__unicode__().encode("utf-8")
# }
class Bind(_Identifier):
"""
Operand binder.
Each instance is a name :term:`binding`, which assigns an identifier to an
operand (even in different languages).
"""
def __init__(self, global_name, operand, **names):
"""
:param global_name: The identifier string (excludes parent names, if
any).
:type global_name: basestring
:param operand: The operand to be bound.
:type operand: :class:`booleano.operations.operands.Operand`
Additional keyword arguments represent the translations of the
``global_name`` into other languages.
"""
self.operand = operand
super(Bind, self).__init__(global_name, **names)
def _get_contents(self, locale):
"""
Return the operand bound.
The ``locale`` does nothing here.
"""
return self.operand
def __eq__(self, other):
"""
Check that the ``other`` binding is equivalent to this one.
Two bindings are equivalent if they have the same names, even though
they don't wrap the same operand.
"""
same_id = super(Bind, self).__eq__(other)
# We have to make sure ``other`` is a binding; otherwise, a symbol
# table with the same names would equal this binding:
return same_id and isinstance(other, Bind)
def __hash__(self):
return id(self)
def __unicode__(self):
"""
Return the Unicode representation for this binding, including its
symbol table (if any).
"""
description = 'Operand %s bound as "%s"' % (self.operand,
self.global_name)
if self.symbol_table:
description = "%s (in %s)" % (description, self.symbol_table)
return description
class SymbolTable(_Identifier):
"""
Symbol table.
Symbol tables wrap *bound* operands (aka, "bindings").
"""
def __init__(self, global_name, objects, *subtables, **names):
"""
:param global_name: The name of the symbol table (excludes parent
symbol tables, if any).
:type global_name: basestring
:param objects: List of bound operands available in this symbol table.
:type objects: list
:raises booleano.exc.ScopeError: If an object/subtable is already
included or already belongs to another symbol table.
Additional positional arguments represent the sub-tables of this
symbol table.
Additional keyword arguments represent the other names this table
can take in different locales.
"""
super(SymbolTable, self).__init__(global_name, **names)
self.objects = set()
self.subtables = set()
for obj in objects:
self.add_object(obj)
for table in subtables:
self.add_subtable(table)
def add_object(self, obj):
"""
Add the ``obj`` object to this symbol table.
:param obj: The bound operand to be added.
:type obj: :class:`Bind`
:raises booleano.exc.ScopeError: If ``obj`` is already included or it
already belongs to another symbol table.
"""
# Checking if it's safe to include the object:
if obj.symbol_table:
raise ScopeError(u"%s already belongs to %s" % (obj.global_name,
obj.symbol_table))
if obj in self.objects or obj.symbol_table:
raise ScopeError(u"An equivalent of %s is already defined in %s" %
(obj, self))
# It's safe to include it!
obj.symbol_table = self
self.objects.add(obj)
def add_subtable(self, table):
"""
Include ``table`` in the child tables of this symbol table.
:param table: The symbol table to be added.
:type table: :class:`SymbolTable`
:raises booleano.exc.ScopeError: If ``table`` is already included or it
already belongs to another symbol table.
"""
# Checking if it's safe to include the sub-table:
if table.symbol_table:
raise ScopeError(u"%s already belongs to %s" %
(table, table.symbol_table))
if table in self.subtables:
raise ScopeError(u"An equivalent of %s is already available in %s" %
(table, self))
# It's safe to include it!
table.symbol_table = self
self.subtables.add(table)
def validate_scope(self):
"""
Make sure there's no name clash in the symbol table.
:raise booleano.exc.ScopeError: If a name clash in found, either in the
global names or with the localized names.
Users may want to run this in their test suite, instead of in
production, for performance reasons.
Note that it's perfectly valid for one object and one sub-table to
have the same name in the parent symbol table.
"""
# <--- Checking that there's no name clash among the global names
unique_objects = set([obj.global_name for obj in self.objects])
if len(unique_objects) != len(self.objects):
raise ScopeError("Two or more objects in %s share the same global "
"name" % self)
unique_tables = set([table.global_name for table in self.subtables])
if len(unique_tables) != len(self.subtables):
raise ScopeError("Two or more sub-tables in %s share the same "
"global name" % self)
# <--- Checking that there's no name clash in the sub-tables
for table in self.subtables:
table.validate_scope()
# <--- Checking that there's no name clash among the localized names
# Collecting all the locales used:
locales = set()
for id_ in (self.objects | self.subtables):
locales |= set(id_.names.keys())
# Now let's see if any of them are duplicate:
for locale in locales:
# Checking the objects:
used_object_names = set()
for obj in self.objects:
name = obj.get_localized_name(locale)
if name in used_object_names:
raise ScopeError('The name "%s" is shared by two or more '
'bindings in %s (locale: %s)' %
(name, self, locale))
used_object_names.add(name)
# Checking the sub-tables:
used_table_names = set()
for table in self.subtables:
name = table.get_localized_name(locale)
if name in used_table_names:
raise ScopeError('The name "%s" is shared by two or more '
'sub-tables in %s (locale: %s)' %
(name, self, locale))
used_table_names.add(name)
def get_namespace(self, locale=None):
"""
Extract the namespace for this symbol table in the ``locale``.
:param locale: The locale of the namespace; if ``None``, the global
names will be used instead.
:param locale: basestring
:return: The namespace in ``locale``.
:rtype: :class:`booleano.parser.scope.Namespace`
"""
objects = self._get_objects(locale)
subnamespaces = self._get_subnamespaces(locale)
return Namespace(objects, subnamespaces)
def __unicode__(self):
"""
Return the Unicode representation for this symbol table, including its
ancestors.
"""
ancestors = self._get_ancestors_global_names()
names = u":".join(ancestors)
return u"Symbol table %s" % names
def __eq__(self, other):
"""
Check that the ``other`` symbol table is equivalent to this one.
Two tables are equivalent if they are equivalent identifiers
(:meth:`_Identifier.__eq__`) and wrap the same objects and
sub-tables.
"""
same_id = super(SymbolTable, self).__eq__(other)
return (same_id and
hasattr(other, "subtables") and
hasattr(other, "objects") and
other.subtables == self.subtables and
self.objects == other.objects)
def __hash__(self):
return id(self)
def _get_contents(self, locale):
"""Return the namespace for this symbol table in ``locale``."""
return self.get_namespace(locale)
def _get_objects(self, locale):
"""
Return the objects available in this symbol table.
:param locale: The locale to be used while resolving the names of the
objects.
:type locale: basestring
:return: The operands in this table, in a dictionary whose keys
are the names of the objects in ``locale``.
:rtype: dict
"""
objects = self.__extract_items__(self.objects, locale)
return objects
def _get_subnamespaces(self, locale):
"""
Return the sub-tables available under this symbol table, turned into
namespaces for the ``locale``.
:param locale: The locale to be used while resolving the names of the
sub-tables.
:type locale: basestring
:return: The namespaces for the sub-tables under this symbol table,
in a dictionary whose keys are the namespace strings in
``locale``.
:rtype: dict
"""
subnamespaces = self.__extract_items__(self.subtables, locale)
return subnamespaces
def __extract_items__(self, items, locale):
"""
Filter the contents of the ``items`` identifiers based on the
``locale``.
:param items: A list of identifiers whose contents should be extracted.
:type items: list
:param locale: The locale to be used to filter the contents.
:type locale: basestring or ``None``
:return: The contents of each item in ``items``, in a dictionary whose
keys are the names of such items in the ``locale``.
:rtype: dict
"""
extracted_items = {}
if locale:
# The items have to be extracted by their localized names:
for item in items:
localized_name = item.get_localized_name(locale)
extracted_items[localized_name] = item._get_contents(locale)
else:
# We have to extract the items by their global names:
for item in items:
extracted_items[item.global_name] = item._get_contents(locale)
return extracted_items
def _get_ancestors_global_names(self):
"""
Return the global names for the ancestors **and** the current
symbol table's.
:return: The list of names, from the topmost table to the current one.
:rtype: list
"""
if self.symbol_table:
ancestors = self.symbol_table._get_ancestors_global_names()
else:
ancestors = []
ancestors.append(self.global_name)
return ancestors
class Namespace(object):
"""
A namespace for a given locale.
This is not aimed at end-users, it should only be used internally in
Booleano.
The parser only deals with this, not with the symbol table directly.
A symbol table has one namespace per locale.
"""
def __init__(self, objects, subnamespaces={}):
"""
:param objects: The objects that belong to the table.
:type objects: dict
:param subnamespaces: The namespaces under this namespace, if any.
:type subnamespaces: dict
"""
self.objects = objects
self.subnamespaces = subnamespaces
def get_object(self, object_name, namespace_parts=None):
"""
Return the object identified by ``object_name``, which is under the
namespace whose names are ``namespace_parts``.
:param object_name: The name of the object to be returned.
:type object_name: basestring
:param namespace_parts: The sub-namespace that contains the object
identified by ``object_name``, represented by a list of names; or,
``None`` if the object is in the current namespace.
:type namespace_parts: list
:return: The requested object.
:rtype: Operand
:raises ScopeError: If the requested object doesn't exist in the
namespace, or if the sub-namespace in ``namespace_parts`` doesn't
exist.
"""
object_name = object_name.lower()
ns = self._get_subnamespace(namespace_parts)
if ns is None:
msg = u'No such object "%s"' % object_name
if namespace_parts:
msg = u'%s in %s' % (msg, u":".join(namespace_parts))
raise ScopeError(msg)
if namespace_parts is not None:
ns2 = self._get_subnamespace(namespace_parts[0:-1])
from booleano.operations import ArrayVariable
if (object_name not in ns.objects and namespace_parts is not None and namespace_parts[-1].lower() in ns2.objects
and isinstance(ns2.objects[namespace_parts[-1].lower()], ArrayVariable)):
var = ns2.objects[namespace_parts[-1].lower()]
var.set_index(object_name)
return var
if ns is None or object_name not in ns.objects:
msg = u'No such object "%s"' % object_name
if namespace_parts:
msg = u'%s in %s' % (msg, u":".join(namespace_parts))
raise ScopeError(msg)
return ns.objects[object_name]
def _get_subnamespace(self, namespace_parts):
"""
Return the sub-namespace represented by the names in
``namespace_parts``.
:param namespace_parts: The names that resolve a sub-namespace in this
namespace.
:type namespace_parts: list
:return: The namespace represented by the names in
``namespace_parts`` or ``None`` if it's not found.
:rtype: Namespace
"""
if not namespace_parts:
return self
import sys
if sys.version_info >= (3, 0):
namespace_parts = namespace_parts.copy()
else:
namespace_parts = namespace_parts[:]
current_part = namespace_parts.pop(0)
if current_part not in self.subnamespaces:
return None
# It's been found!
subnamespace = self.subnamespaces[current_part]
return subnamespace._get_subnamespace(namespace_parts)
|
[
"booleano.exc.ScopeError",
"logging.getLogger"
] |
[((1634, 1653), 'logging.getLogger', 'getLogger', (['__name__'], {}), '(__name__)\n', (1643, 1653), False, 'from logging import getLogger\n'), ((8018, 8095), 'booleano.exc.ScopeError', 'ScopeError', (["(u'%s already belongs to %s' % (obj.global_name, obj.symbol_table))"], {}), "(u'%s already belongs to %s' % (obj.global_name, obj.symbol_table))\n", (8028, 8095), False, 'from booleano.exc import ScopeError\n'), ((8202, 8275), 'booleano.exc.ScopeError', 'ScopeError', (["(u'An equivalent of %s is already defined in %s' % (obj, self))"], {}), "(u'An equivalent of %s is already defined in %s' % (obj, self))\n", (8212, 8275), False, 'from booleano.exc import ScopeError\n'), ((8826, 8895), 'booleano.exc.ScopeError', 'ScopeError', (["(u'%s already belongs to %s' % (table, table.symbol_table))"], {}), "(u'%s already belongs to %s' % (table, table.symbol_table))\n", (8836, 8895), False, 'from booleano.exc import ScopeError\n'), ((8955, 9032), 'booleano.exc.ScopeError', 'ScopeError', (["(u'An equivalent of %s is already available in %s' % (table, self))"], {}), "(u'An equivalent of %s is already available in %s' % (table, self))\n", (8965, 9032), False, 'from booleano.exc import ScopeError\n'), ((9852, 9925), 'booleano.exc.ScopeError', 'ScopeError', (["('Two or more objects in %s share the same global name' % self)"], {}), "('Two or more objects in %s share the same global name' % self)\n", (9862, 9925), False, 'from booleano.exc import ScopeError\n'), ((10078, 10154), 'booleano.exc.ScopeError', 'ScopeError', (["('Two or more sub-tables in %s share the same global name' % self)"], {}), "('Two or more sub-tables in %s share the same global name' % self)\n", (10088, 10154), False, 'from booleano.exc import ScopeError\n'), ((17018, 17033), 'booleano.exc.ScopeError', 'ScopeError', (['msg'], {}), '(msg)\n', (17028, 17033), False, 'from booleano.exc import ScopeError\n'), ((17654, 17669), 'booleano.exc.ScopeError', 'ScopeError', (['msg'], {}), '(msg)\n', (17664, 17669), False, 'from booleano.exc import ScopeError\n'), ((10754, 10866), 'booleano.exc.ScopeError', 'ScopeError', (['(\'The name "%s" is shared by two or more bindings in %s (locale: %s)\' % (\n name, self, locale))'], {}), '(\n \'The name "%s" is shared by two or more bindings in %s (locale: %s)\' %\n (name, self, locale))\n', (10764, 10866), False, 'from booleano.exc import ScopeError\n'), ((11116, 11230), 'booleano.exc.ScopeError', 'ScopeError', (['(\'The name "%s" is shared by two or more sub-tables in %s (locale: %s)\' % (\n name, self, locale))'], {}), '(\n \'The name "%s" is shared by two or more sub-tables in %s (locale: %s)\' %\n (name, self, locale))\n', (11126, 11230), False, 'from booleano.exc import ScopeError\n')]
|
from detectron2.data.datasets.register_coco import register_coco_instances
import os
categories = [
{'id': 1, 'name': 'aeroplane'},
{'id': 2, 'name': 'bicycle'},
{'id': 3, 'name': 'bird'},
{'id': 4, 'name': 'boat'},
{'id': 5, 'name': 'bottle'},
{'id': 6, 'name': 'bus'},
{'id': 7, 'name': 'car'},
{'id': 8, 'name': 'cat'},
{'id': 9, 'name': 'chair'},
{'id': 10, 'name': 'cow'},
{'id': 11, 'name': 'diningtable'},
{'id': 12, 'name': 'dog'},
{'id': 13, 'name': 'horse'},
{'id': 14, 'name': 'motorbike'},
{'id': 15, 'name': 'person'},
{'id': 16, 'name': 'pottedplant'},
{'id': 17, 'name': 'sheep'},
{'id': 18, 'name': 'sofa'},
{'id': 19, 'name': 'train'},
{'id': 20, 'name': 'tvmonitor'},
]
def _get_builtin_metadata():
thing_dataset_id_to_contiguous_id = {
x['id']: i for i, x in enumerate(sorted(categories, key=lambda x: x['id']))}
thing_classes = [x['name'] for x in sorted(categories, key=lambda x: x['id'])]
return {
"thing_dataset_id_to_contiguous_id": thing_dataset_id_to_contiguous_id,
"thing_classes": thing_classes}
_PREDEFINED_SPLITS_VOC = {
"voc_cocoformat_test": ("voc/images/", "voc/annotations/pascal_test2007.json"),
}
for key, (image_root, json_file) in _PREDEFINED_SPLITS_VOC.items():
register_coco_instances(
key,
_get_builtin_metadata(),
os.path.join("datasets", json_file) if "://" not in json_file else json_file,
os.path.join("datasets", image_root),
)
|
[
"os.path.join"
] |
[((1537, 1573), 'os.path.join', 'os.path.join', (['"""datasets"""', 'image_root'], {}), "('datasets', image_root)\n", (1549, 1573), False, 'import os\n'), ((1450, 1485), 'os.path.join', 'os.path.join', (['"""datasets"""', 'json_file'], {}), "('datasets', json_file)\n", (1462, 1485), False, 'import os\n')]
|
from django import template
register = template.Library()
@register.inclusion_tag('bookmarks/_tags/collections_modal.html', takes_context=True)
def collections_modal(context):
user = context.get('user')
return {'collections': user.collections.all()}
|
[
"django.template.Library"
] |
[((40, 58), 'django.template.Library', 'template.Library', ([], {}), '()\n', (56, 58), False, 'from django import template\n')]
|
from django.urls import path, include
from auth_demos.auth_app.views import UserRegistrationView, UserLoginView, UserLogoutView, RestrictedView
urlpatterns = [
path('register/', UserRegistrationView.as_view(), name='register user'),
path('login/', UserLoginView.as_view(), name='login user'),
path('logout/', UserLogoutView.as_view(), name='logout user'),
path('restricted/', RestrictedView.as_view(), name='restricted'),
]
|
[
"auth_demos.auth_app.views.UserRegistrationView.as_view",
"auth_demos.auth_app.views.UserLoginView.as_view",
"auth_demos.auth_app.views.RestrictedView.as_view",
"auth_demos.auth_app.views.UserLogoutView.as_view"
] |
[((184, 214), 'auth_demos.auth_app.views.UserRegistrationView.as_view', 'UserRegistrationView.as_view', ([], {}), '()\n', (212, 214), False, 'from auth_demos.auth_app.views import UserRegistrationView, UserLoginView, UserLogoutView, RestrictedView\n'), ((258, 281), 'auth_demos.auth_app.views.UserLoginView.as_view', 'UserLoginView.as_view', ([], {}), '()\n', (279, 281), False, 'from auth_demos.auth_app.views import UserRegistrationView, UserLoginView, UserLogoutView, RestrictedView\n'), ((323, 347), 'auth_demos.auth_app.views.UserLogoutView.as_view', 'UserLogoutView.as_view', ([], {}), '()\n', (345, 347), False, 'from auth_demos.auth_app.views import UserRegistrationView, UserLoginView, UserLogoutView, RestrictedView\n'), ((394, 418), 'auth_demos.auth_app.views.RestrictedView.as_view', 'RestrictedView.as_view', ([], {}), '()\n', (416, 418), False, 'from auth_demos.auth_app.views import UserRegistrationView, UserLoginView, UserLogoutView, RestrictedView\n')]
|
import os
import re
import json
def getnewname(oldname):
try:
[uppath, filename] = os.path.split(oldname)
[filename, exname] = os.path.splitext(filename)
p = re.compile(r"([a-zA-Z]{3,4})(-|00|_|)([0-9]{3})((-|_|)[CcRr]){0,}")
m = p.search(filename)
if m == None:
return oldname
else:
AVname = str.upper(m.group(1))
AVindex = m.group(3)
AVChineseSub = m.group(4)
if(AVChineseSub):
newname = AVname+'-'+AVindex+'-'+AVChineseSub+exname
else:
newname = AVname+'-'+AVindex+exname
newname = os.path.join(uppath, newname)
return newname
except:
print('AVre error')
return oldname
def getallfile(path):
def _getallfile(path,l):
its = os.listdir(path)
for i in its:
if os.path.isfile(os.path.join(path,i)):
l.append(os.path.join(path,i))
else:
_getallfile(os.path.join(path,i),l)
file = []
_getallfile(path,file)
return file
def moveCname(namelog):
def nameadd(name,add):
upname, exname = os.path.splitext(name)
name = upname+'-'+str(add)+exname
return name
newnamelist = [x[1] for x in namelog]
cname = {}
for index,newname in enumerate(newnamelist):
if(newnamelist.count(newname) == 1):
pass
else:
if newname in cname.keys():
pass
else:
cname[newname] = []
for i,n in enumerate(newnamelist):
if(newname == n):
cname[newname].append(i)
for basefilename in cname.keys():
for ifile in range(len(cname[basefilename])):
namebeadded = nameadd(basefilename,ifile)
newnamelist[cname[basefilename][ifile]] = namebeadded
for index, newname in enumerate(newnamelist):
namelog[index][1] = newname
return namelog
def rename(logs):
for index,log in enumerate(logs):
try:
os.renames(log[0],log[1])
except:
print('ERROR:',log)
def savejson(log):
logjson = json.dumps(log)
with open(os.path.join(os.getcwd(),'log.josn'),'w',encoding = 'utf-8') as f:
f.write(logjson)
if __name__ == "__main__":
path = os.getcwd()
allfile = getallfile(os.getcwd())
log = []
for oldname in allfile:
houzhui = os.path.splitext(oldname)[1]
if(houzhui in [".mp4",".avi",".wmv",".rmvb"]):
newname = getnewname(str(oldname))
if(newname != oldname):
log.append([oldname,newname])
log = moveCname(log)
rename(log)
savejson(log)
|
[
"os.getcwd",
"json.dumps",
"os.path.splitext",
"os.renames",
"os.path.split",
"os.path.join",
"os.listdir",
"re.compile"
] |
[((2229, 2244), 'json.dumps', 'json.dumps', (['log'], {}), '(log)\n', (2239, 2244), False, 'import json\n'), ((2389, 2400), 'os.getcwd', 'os.getcwd', ([], {}), '()\n', (2398, 2400), False, 'import os\n'), ((96, 118), 'os.path.split', 'os.path.split', (['oldname'], {}), '(oldname)\n', (109, 118), False, 'import os\n'), ((148, 174), 'os.path.splitext', 'os.path.splitext', (['filename'], {}), '(filename)\n', (164, 174), False, 'import os\n'), ((187, 253), 're.compile', 're.compile', (['"""([a-zA-Z]{3,4})(-|00|_|)([0-9]{3})((-|_|)[CcRr]){0,}"""'], {}), "('([a-zA-Z]{3,4})(-|00|_|)([0-9]{3})((-|_|)[CcRr]){0,}')\n", (197, 253), False, 'import re\n'), ((848, 864), 'os.listdir', 'os.listdir', (['path'], {}), '(path)\n', (858, 864), False, 'import os\n'), ((1191, 1213), 'os.path.splitext', 'os.path.splitext', (['name'], {}), '(name)\n', (1207, 1213), False, 'import os\n'), ((2426, 2437), 'os.getcwd', 'os.getcwd', ([], {}), '()\n', (2435, 2437), False, 'import os\n'), ((654, 683), 'os.path.join', 'os.path.join', (['uppath', 'newname'], {}), '(uppath, newname)\n', (666, 683), False, 'import os\n'), ((2121, 2147), 'os.renames', 'os.renames', (['log[0]', 'log[1]'], {}), '(log[0], log[1])\n', (2131, 2147), False, 'import os\n'), ((2498, 2523), 'os.path.splitext', 'os.path.splitext', (['oldname'], {}), '(oldname)\n', (2514, 2523), False, 'import os\n'), ((917, 938), 'os.path.join', 'os.path.join', (['path', 'i'], {}), '(path, i)\n', (929, 938), False, 'import os\n'), ((2272, 2283), 'os.getcwd', 'os.getcwd', ([], {}), '()\n', (2281, 2283), False, 'import os\n'), ((965, 986), 'os.path.join', 'os.path.join', (['path', 'i'], {}), '(path, i)\n', (977, 986), False, 'import os\n'), ((1033, 1054), 'os.path.join', 'os.path.join', (['path', 'i'], {}), '(path, i)\n', (1045, 1054), False, 'import os\n')]
|
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from __future__ import print_function
import sys
import time
from cinderclient import client as cinderclient
from cinderclient import exceptions as cinder_exections
def cinder_client(session):
return cinderclient.Client(2, session=session)
def get_volume(cinder, name_or_id):
try:
volume = cinder.volumes.get(name_or_id)
return volume
except cinder_exections.NotFound:
return cinder.volumes.find(name=name_or_id)
def wait_instance(
cinder,
instance,
timeout=300,
target_states=('in-use', 'available', 'downloading'),
transition_states=('creating'),
status_attr='status',
):
_timeout = 0
status = getattr(instance, status_attr)
while status not in target_states:
if status not in transition_states:
raise RuntimeError(
'Fail to volume "%s": %s (%s)' % (
target_states,
instance.name,
status
)
)
sys.stderr.write(
'Waiting volume %s: %s (%s)\n' % (
target_states,
instance.name,
status)
)
time.sleep(5)
_timeout += 5
if _timeout > timeout:
raise RuntimeError("Timeout!")
instance = cinder.volumes.get(instance.id)
status = getattr(instance, status_attr)
|
[
"cinderclient.client.Client",
"sys.stderr.write",
"time.sleep"
] |
[((782, 821), 'cinderclient.client.Client', 'cinderclient.Client', (['(2)'], {'session': 'session'}), '(2, session=session)\n', (801, 821), True, 'from cinderclient import client as cinderclient\n'), ((1585, 1679), 'sys.stderr.write', 'sys.stderr.write', (["('Waiting volume %s: %s (%s)\\n' % (target_states, instance.name, status))"], {}), "('Waiting volume %s: %s (%s)\\n' % (target_states, instance.\n name, status))\n", (1601, 1679), False, 'import sys\n'), ((1754, 1767), 'time.sleep', 'time.sleep', (['(5)'], {}), '(5)\n', (1764, 1767), False, 'import time\n')]
|
# ------------------------------------------------------------------------
# Copyright (c) Hitachi, Ltd. All Rights Reserved.
# Licensed under the Apache License, Version 2.0 [see LICENSE for details]
# ------------------------------------------------------------------------
import argparse
import torch
from torch import nn
def get_args():
parser = argparse.ArgumentParser()
parser.add_argument(
'--load_path', type=str, required=True,
)
parser.add_argument(
'--save_path', type=str, required=True,
)
parser.add_argument(
'--dataset', type=str, default='hico',
)
args = parser.parse_args()
return args
def main(args):
ps = torch.load(args.load_path)
obj_ids = [1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 13,
14, 15, 16, 17, 18, 19, 20, 21, 22, 23,
24, 25, 27, 28, 31, 32, 33, 34, 35, 36,
37, 38, 39, 40, 41, 42, 43, 44, 46, 47,
48, 49, 50, 51, 52, 53, 54, 55, 56, 57,
58, 59, 60, 61, 62, 63, 64, 65, 67, 70,
72, 73, 74, 75, 76, 77, 78, 79, 80, 81,
82, 84, 85, 86, 87, 88, 89, 90]
# For no pair
obj_ids.append(91)
ps['model']['sub_bbox_embed.layers.0.weight'] = ps['model']['bbox_embed.layers.0.weight'].clone()
ps['model']['sub_bbox_embed.layers.0.bias'] = ps['model']['bbox_embed.layers.0.bias'].clone()
ps['model']['sub_bbox_embed.layers.1.weight'] = ps['model']['bbox_embed.layers.1.weight'].clone()
ps['model']['sub_bbox_embed.layers.1.bias'] = ps['model']['bbox_embed.layers.1.bias'].clone()
ps['model']['sub_bbox_embed.layers.2.weight'] = ps['model']['bbox_embed.layers.2.weight'].clone()
ps['model']['sub_bbox_embed.layers.2.bias'] = ps['model']['bbox_embed.layers.2.bias'].clone()
ps['model']['obj_bbox_embed.layers.0.weight'] = ps['model']['bbox_embed.layers.0.weight'].clone()
ps['model']['obj_bbox_embed.layers.0.bias'] = ps['model']['bbox_embed.layers.0.bias'].clone()
ps['model']['obj_bbox_embed.layers.1.weight'] = ps['model']['bbox_embed.layers.1.weight'].clone()
ps['model']['obj_bbox_embed.layers.1.bias'] = ps['model']['bbox_embed.layers.1.bias'].clone()
ps['model']['obj_bbox_embed.layers.2.weight'] = ps['model']['bbox_embed.layers.2.weight'].clone()
ps['model']['obj_bbox_embed.layers.2.bias'] = ps['model']['bbox_embed.layers.2.bias'].clone()
ps['model']['obj_class_embed.weight'] = ps['model']['class_embed.weight'].clone()[obj_ids]
ps['model']['obj_class_embed.bias'] = ps['model']['class_embed.bias'].clone()[obj_ids]
if args.dataset == 'vcoco':
l = nn.Linear(ps['model']['obj_class_embed.weight'].shape[1], 1)
l.to(ps['model']['obj_class_embed.weight'].device)
ps['model']['obj_class_embed.weight'] = torch.cat((
ps['model']['obj_class_embed.weight'][:-1], l.weight, ps['model']['obj_class_embed.weight'][[-1]]))
ps['model']['obj_class_embed.bias'] = torch.cat(
(ps['model']['obj_class_embed.bias'][:-1], l.bias, ps['model']['obj_class_embed.bias'][[-1]]))
torch.save(ps, args.save_path)
if __name__ == '__main__':
args = get_args()
main(args)
|
[
"argparse.ArgumentParser",
"torch.load",
"torch.cat",
"torch.save",
"torch.nn.Linear"
] |
[((358, 383), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {}), '()\n', (381, 383), False, 'import argparse\n'), ((697, 723), 'torch.load', 'torch.load', (['args.load_path'], {}), '(args.load_path)\n', (707, 723), False, 'import torch\n'), ((3093, 3123), 'torch.save', 'torch.save', (['ps', 'args.save_path'], {}), '(ps, args.save_path)\n', (3103, 3123), False, 'import torch\n'), ((2632, 2692), 'torch.nn.Linear', 'nn.Linear', (["ps['model']['obj_class_embed.weight'].shape[1]", '(1)'], {}), "(ps['model']['obj_class_embed.weight'].shape[1], 1)\n", (2641, 2692), False, 'from torch import nn\n'), ((2800, 2915), 'torch.cat', 'torch.cat', (["(ps['model']['obj_class_embed.weight'][:-1], l.weight, ps['model'][\n 'obj_class_embed.weight'][[-1]])"], {}), "((ps['model']['obj_class_embed.weight'][:-1], l.weight, ps['model'\n ]['obj_class_embed.weight'][[-1]]))\n", (2809, 2915), False, 'import torch\n'), ((2970, 3079), 'torch.cat', 'torch.cat', (["(ps['model']['obj_class_embed.bias'][:-1], l.bias, ps['model'][\n 'obj_class_embed.bias'][[-1]])"], {}), "((ps['model']['obj_class_embed.bias'][:-1], l.bias, ps['model'][\n 'obj_class_embed.bias'][[-1]]))\n", (2979, 3079), False, 'import torch\n')]
|
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from rally.plugins.openstack import scenario
from rally.plugins.openstack.scenarios.nova import utils as nova_utils
from rally.task import types
from rally.task import validation
class BrowbeatPlugin(nova_utils.NovaScenario, scenario.OpenStackScenario):
@types.convert(image={"type": "glance_image"},
flavor={"type": "nova_flavor"})
@validation.image_valid_on_flavor("flavor", "image")
@validation.required_openstack(users=True)
@scenario.configure(context={})
def nova_boot_persist(self, image, flavor, **kwargs):
self._boot_server(image, flavor)
|
[
"rally.plugins.openstack.scenario.configure",
"rally.task.validation.image_valid_on_flavor",
"rally.task.validation.required_openstack",
"rally.task.types.convert"
] |
[((821, 898), 'rally.task.types.convert', 'types.convert', ([], {'image': "{'type': 'glance_image'}", 'flavor': "{'type': 'nova_flavor'}"}), "(image={'type': 'glance_image'}, flavor={'type': 'nova_flavor'})\n", (834, 898), False, 'from rally.task import types\n'), ((923, 974), 'rally.task.validation.image_valid_on_flavor', 'validation.image_valid_on_flavor', (['"""flavor"""', '"""image"""'], {}), "('flavor', 'image')\n", (955, 974), False, 'from rally.task import validation\n'), ((980, 1021), 'rally.task.validation.required_openstack', 'validation.required_openstack', ([], {'users': '(True)'}), '(users=True)\n', (1009, 1021), False, 'from rally.task import validation\n'), ((1027, 1057), 'rally.plugins.openstack.scenario.configure', 'scenario.configure', ([], {'context': '{}'}), '(context={})\n', (1045, 1057), False, 'from rally.plugins.openstack import scenario\n')]
|
"""
Django settings for project project.
Generated by 'django-admin startproject' using Django 2.1.
SECURITY WARNING :: SEE CHECKLIST BEFORE DEPLOYING IN PRODUCTION
https://docs.djangoproject.com/en/2.1/howto/deployment/checklist/
For more information on this file, see
https://docs.djangoproject.com/en/2.1/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/2.1/ref/settings/
"""
import os
import environ
# build paths inside the project like this: os.path.join(BASE_DIR, ...)
SETTINGS_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
BASE_DIR = os.path.dirname(SETTINGS_DIR)
# load default .env file (casting, default)
env = environ.Env(
DEBUG=(bool, False),
DB_PORT=(int, 5432),
)
dotenv_dir = os.path.join(BASE_DIR, 'env')
for env_name in [ 'django_app.env', 'django_db.env' ]:
env_file = os.path.join(dotenv_dir, env_name)
environ.Env.read_env(env_file=env_file)
SECRET_KEY = env('SECRET_KEY')
DEBUG = env('DEBUG')
DJANGO_DEBUG = DEBUG
CSRF_TRUSTED_ORIGINS = [ 'localhost', 'iris.devincive.com' ]
ALLOWED_HOSTS = ['*']
# Application definition
INSTALLED_APPS = [
'django_extensions',
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'whitenoise.runserver_nostatic', # overrides django behavior, to use whitenoise instead
'django.contrib.staticfiles',
'rest_framework.authtoken',
'rest_framework',
'backend.api',
'djoser',
]
REST_FRAMEWORK = {
'DEFAULT_AUTHENTICATION_CLASSES': [
'rest_framework.authentication.TokenAuthentication',
'rest_framework.authentication.SessionAuthentication',
],
# lock API for authenticated users only
'DEFAULT_PERMISSION_CLASSES': [
'rest_framework.permissions.IsAuthenticated',
],
# pagination
'DEFAULT_PAGINATION_CLASS': 'rest_framework.pagination.PageNumberPagination',
'PAGE_SIZE': 50
}
MIDDLEWARE = [
'corsheaders.middleware.CorsMiddleware',
'django.middleware.security.SecurityMiddleware',
'whitenoise.middleware.WhiteNoiseMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
]
ROOT_URLCONF = 'backend.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': ['dist'],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
LOGGING = {
'version': 1,
'disable_existing_loggers': True,
'formatters': {
'verbose': {
'format':
'%(levelname)s %(asctime)s %(module)s %(process)d %(thread)d %(message)s'
},
},
'handlers': {
'console': {
'level': 'NOTSET',
'class': 'logging.StreamHandler',
'formatter': 'verbose'
}
},
'loggers': {
'': {
'handlers': ['console'],
'level': 'NOTSET',
},
'django.request': {
'handlers': ['console'],
'propagate': False,
'level': 'ERROR'
}
}
}
# WSGI_APPLICATION = 'backend.wsgi.application'
# Database
# https://docs.djangoproject.com/en/2.1/ref/settings/#databases
DATABASES = {
'default': {
# 'ENGINE': 'django.db.backends.sqlite3',
# 'NAME': os.path.join(BASE_DIR, 'db.sqlite3'),
'ENGINE': 'django.db.backends.postgresql_psycopg2',
'NAME': env('DB_NAME'),
'USER': env('DB_USER'),
'PASSWORD': env('DB_PASS'),
'HOST': env('DB_HOST'),
'PORT': env('DB_PORT'),
}
}
# Password validation
# https://docs.djangoproject.com/en/2.1/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# Internationalization
# https://docs.djangoproject.com/en/2.1/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = env('TIMEZONE')
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/2.1/howto/static-files/
# When Vue Builds, path will be `/static/css/...` so we will have Django Serve
# In Production, it's recommended use an alternative approach such as:
# http://whitenoise.evans.io/en/stable/django.html?highlight=django
MIDDLEWARE_CLASSES = (
'whitenoise.middleware.WhiteNoiseMiddleware',
)
# Static files
STATICFILES_DIRS = []
STATIC_HOST = os.environ.get('DJANGO_STATIC_HOST', '') # modify host for CDN hosting,
STATIC_URL = os.path.join(STATIC_HOST, 'static') + '/'
STATIC_ROOT = os.path.join(BASE_DIR, 'dist', 'static') # static files in the same location as webpack build files
DATASET_ROOT = os.path.join(STATIC_ROOT, 'data', 'images') # dataset files
STATICFILES_STORAGE = 'whitenoise.storage.CompressedManifestStaticFilesStorage'
# Insert Whitenoise Middleware at top but below Security Middleware
# MIDDLEWARE.insert(1, 'whitenoise.middleware.WhiteNoiseMiddleware',)
# http://whitenoise.evans.io/en/stable/django.html#make-sure-staticfiles-is-configured-correctly
|
[
"os.path.abspath",
"os.path.dirname",
"environ.Env.read_env",
"os.environ.get",
"os.path.join",
"environ.Env"
] |
[((622, 651), 'os.path.dirname', 'os.path.dirname', (['SETTINGS_DIR'], {}), '(SETTINGS_DIR)\n', (637, 651), False, 'import os\n'), ((703, 756), 'environ.Env', 'environ.Env', ([], {'DEBUG': '(bool, False)', 'DB_PORT': '(int, 5432)'}), '(DEBUG=(bool, False), DB_PORT=(int, 5432))\n', (714, 756), False, 'import environ\n'), ((781, 810), 'os.path.join', 'os.path.join', (['BASE_DIR', '"""env"""'], {}), "(BASE_DIR, 'env')\n", (793, 810), False, 'import os\n'), ((5375, 5415), 'os.environ.get', 'os.environ.get', (['"""DJANGO_STATIC_HOST"""', '""""""'], {}), "('DJANGO_STATIC_HOST', '')\n", (5389, 5415), False, 'import os\n'), ((5538, 5578), 'os.path.join', 'os.path.join', (['BASE_DIR', '"""dist"""', '"""static"""'], {}), "(BASE_DIR, 'dist', 'static')\n", (5550, 5578), False, 'import os\n'), ((5664, 5707), 'os.path.join', 'os.path.join', (['STATIC_ROOT', '"""data"""', '"""images"""'], {}), "(STATIC_ROOT, 'data', 'images')\n", (5676, 5707), False, 'import os\n'), ((881, 915), 'os.path.join', 'os.path.join', (['dotenv_dir', 'env_name'], {}), '(dotenv_dir, env_name)\n', (893, 915), False, 'import os\n'), ((920, 959), 'environ.Env.read_env', 'environ.Env.read_env', ([], {'env_file': 'env_file'}), '(env_file=env_file)\n', (940, 959), False, 'import environ\n'), ((5474, 5509), 'os.path.join', 'os.path.join', (['STATIC_HOST', '"""static"""'], {}), "(STATIC_HOST, 'static')\n", (5486, 5509), False, 'import os\n'), ((583, 608), 'os.path.abspath', 'os.path.abspath', (['__file__'], {}), '(__file__)\n', (598, 608), False, 'import os\n')]
|
import BaseHTTPServer, SimpleHTTPServer
import ssl
## Variables you can modify
#openssl req -x509 -newkey rsa:4096 -keyout server1.example.com.key -out server1.example.com.pem -days 365 -nodes
bind_to_address = ''
server_port = 8080
ssl_key_file = "/Users/benjaminbales/Documents/GoTeachMe/ProductDev/WebXR/server1.example.com.key"
ssl_certificate_file = "/Users/benjaminbales/Documents/GoTeachMe/ProductDev/WebXR/server1.example.com.pem"
## Don't modify anything below
httpd = BaseHTTPServer.HTTPServer((bind_to_address, server_port), SimpleHTTPServer.SimpleHTTPRequestHandler)
httpd.socket = ssl.wrap_socket (httpd.socket, server_side=True,
keyfile=ssl_key_file,
certfile=ssl_certificate_file)
httpd.serve_forever()
|
[
"BaseHTTPServer.HTTPServer",
"ssl.wrap_socket"
] |
[((489, 594), 'BaseHTTPServer.HTTPServer', 'BaseHTTPServer.HTTPServer', (['(bind_to_address, server_port)', 'SimpleHTTPServer.SimpleHTTPRequestHandler'], {}), '((bind_to_address, server_port), SimpleHTTPServer.\n SimpleHTTPRequestHandler)\n', (514, 594), False, 'import BaseHTTPServer, SimpleHTTPServer\n'), ((605, 709), 'ssl.wrap_socket', 'ssl.wrap_socket', (['httpd.socket'], {'server_side': '(True)', 'keyfile': 'ssl_key_file', 'certfile': 'ssl_certificate_file'}), '(httpd.socket, server_side=True, keyfile=ssl_key_file,\n certfile=ssl_certificate_file)\n', (620, 709), False, 'import ssl\n')]
|
"""Download a copy of the CAISO curtailment data.
Oversupply data can be found at the following URL:
https://www.caiso.com/informed/Pages/ManagingOversupply.aspx
"""
import requests
from pathlib import Path
from loguru import logger
from urllib.parse import urlparse
from src.conf import settings
OUTPUT_DIR = settings.DATA_DIR / "raw/caiso/"
base_url = "https://www.caiso.com/Documents/{}"
def generate_urls():
"""Generate a set of URLs for each year of data in the analysis period.
"""
yield base_url.format("ProductionAndCurtailmentsData-May1_2014-May31_2017.xlsx")
yield base_url.format("ProductionAndCurtailmentsData-Jun1_2017-Dec31_2017.xlsx")
for year in range(2018, 2021):
yield base_url.format(f"ProductionAndCurtailmentsData_{year}.xlsx")
def main():
for url in generate_urls():
logger.info("Downloading CAISO Curtailment file at: {url}", url=url)
fn = Path(urlparse(url).path).name
with requests.get(url) as response:
response.raise_for_status()
fp = OUTPUT_DIR / fn
with open(fp, "wb") as fh:
for chunk in response.iter_content():
fh.write(chunk)
if __name__ == "__main__":
OUTPUT_DIR.mkdir(exist_ok=True)
main()
|
[
"loguru.logger.info",
"urllib.parse.urlparse",
"requests.get"
] |
[((840, 908), 'loguru.logger.info', 'logger.info', (['"""Downloading CAISO Curtailment file at: {url}"""'], {'url': 'url'}), "('Downloading CAISO Curtailment file at: {url}', url=url)\n", (851, 908), False, 'from loguru import logger\n'), ((965, 982), 'requests.get', 'requests.get', (['url'], {}), '(url)\n', (977, 982), False, 'import requests\n'), ((927, 940), 'urllib.parse.urlparse', 'urlparse', (['url'], {}), '(url)\n', (935, 940), False, 'from urllib.parse import urlparse\n')]
|
"""This is a cog for a discord.py bot.
It adds Lamp
"""
from discord.ext import commands
class Lamp(commands.Cog, command_attrs=dict(hidden=True)):
def __init__(self, client):
self.client = client
@commands.group(
name='lamp',
hidden=True,
invoke_without_command=True,
)
async def lamp(self, ctx):
"""Commands to control the live stream integration"""
await ctx.send_help('lamp')
@lamp.command(
name='off',
)
async def lamp_off(self, ctx):
url = 'https://a1.tuyaus.com/api.json?appVersion=3.13.0&appRnVersion=5.18&channel=oem&sign=47e07d9cf53bbab369fc504760c8d3752f0f7c2f8a56fe8c63f28c99d7bb8e1c&platform=ONEPLUS%20A5000&requestId=7c696d1e-8579-4871-b271-71b6a3a093d5&lang=en&a=tuya.m.device.dp.publish&clientId=ekmnwp9f5pnh3trdtpgy&osSystem=9&os=Android&timeZoneId=America%2FChicago&ttid=sdk_tuya%40ekmnwp9f5pnh3trdtpgy&et=0.0.1&v=1.0&sdkVersion=3.13.0&time=1572717891'
headers = {
'User-Agent':'TY-UA=APP/Android/3.13.0/SDK/3.13.0',
'Content-Type':'application/x-www-form-urlencoded',
'Content-Length':'260',
'Host':'a1.tuyaus.com',
'Connection':'Keep-Alive',
'Accept-Encoding':'gzip',
}
data = {
'postData':'{"devId":"06200623b4e62d1a196d","dps":"{\\"1\\":false}","gwId":"06200623b4e62d1a196d"}',
'deviceId':'0cbe6a9f082da9d8ad9607677542561f46adb4592222',
'sid':'az152789n0645407g6y4cy235e9cec2811a8b93caefedeea3c2ce5a8',
}
async with self.client.session.post(url, headers=headers, data=data) as response:
res = await response.json()
print(res)
if res['status'] == 'ok':
await ctx.send('Success')
@lamp.command(
name='on',
)
async def lamp_on(self, ctx):
print('on')
url = 'https://a1.tuyaus.com/api.json?appVersion=3.13.0&appRnVersion=5.18&channel=oem&sign=a8a0a9914c77dc5d01f2826a2588bb25151a1d9b46688223b10586a3fc56a4c7&platform=ONEPLUS%20A5000&requestId=3a891769-255a-4a55-971a-551df700252f&lang=en&a=tuya.m.device.dp.publish&clientId=ekmnwp9f5pnh3trdtpgy&osSystem=9&os=Android&timeZoneId=America%2FChicago&ttid=sdk_tuya%40ekmnwp9f5pnh3trdtpgy&et=0.0.1&v=1.0&sdkVersion=3.13.0&time=1572717894'
headers = {
'User-Agent':'TY-UA=APP/Android/3.13.0/SDK/3.13.0',
'Content-Type':'application/x-www-form-urlencoded',
'Content-Length':'259',
'Host':'a1.tuyaus.com',
'Connection':'Keep-Alive',
'Accept-Encoding':'gzip',
}
data = {
'postData':'{"devId":"06200623b4e62d1a196d","dps":"{\\"1\\":true}","gwId":"06200623b4e62d1a196d"}',
'deviceId':'0cbe6a9f082da9d8ad9607677542561f46adb4592222',
'sid':'az152789n0645407g6y4cy235e9cec2811a8b93caefedeea3c2ce5a8',
}
print('sending')
async with self.client.session.post(url, headers=headers, data=data) as response:
res = await response.json()
print(res)
if res['status'] == 'ok':
await ctx.send('Success')
def setup(client):
"""This is called when the cog is loaded via load_extension"""
client.add_cog(Lamp(client))
|
[
"discord.ext.commands.group"
] |
[((217, 286), 'discord.ext.commands.group', 'commands.group', ([], {'name': '"""lamp"""', 'hidden': '(True)', 'invoke_without_command': '(True)'}), "(name='lamp', hidden=True, invoke_without_command=True)\n", (231, 286), False, 'from discord.ext import commands\n')]
|
from telethon import events
import asyncio
from userbot.utils import admin_cmd
@borg.on(admin_cmd("dubey"))
async def _(event):
if event.fwd_from:
return
animation_interval = 1.0
animation_ttl = range(0,36)
#input_str = event.pattern_match.group(1)
# if input_str == "dubey":
await event.edit("Dubey Joins the chat")
animation_chars = [
"Allahabad wale",
"**Ek Jhapad Mar du toh Saale Mu ke Dant Gaand se nikalenge**"
]
for i in animation_ttl:
await asyncio.sleep(animation_interval)
await event.edit(animation_chars[i % 18])
|
[
"userbot.utils.admin_cmd",
"asyncio.sleep"
] |
[((97, 115), 'userbot.utils.admin_cmd', 'admin_cmd', (['"""dubey"""'], {}), "('dubey')\n", (106, 115), False, 'from userbot.utils import admin_cmd\n'), ((580, 613), 'asyncio.sleep', 'asyncio.sleep', (['animation_interval'], {}), '(animation_interval)\n', (593, 613), False, 'import asyncio\n')]
|
"""
# Sample code to perform I/O:
name = input() # Reading input from STDIN
print('Hi, %s.' % name) # Writing output to STDOUT
# Warning: Printing unwanted or ill-formatted data to output will cause the test cases to fail
"""
# Write your code here
import sys
from bisect import bisect_left
n = int(sys.stdin.readline())
a = list(map(int, sys.stdin.readline().strip().split()))
q = int(sys.stdin.readline())
stones = [a[-1]]
for i in a[-2::-1]:
stones.append(stones[-1] + i)
for _ in range(q):
x = int(sys.stdin.readline())
print('A' if (n - bisect_left(stones, x)) % 2 else 'B')
# data = list(map(int, sys.stdin.readline().strip().split()))
# n = data[0]
# a = data[1:n + 1]
# q = data[n + 1]
# stones = [a[-1]]
# for i in a[-2::-1]:
# stones.append(stones[-1] + i)
# for i in range(q):
# x = data[n + 2 + i]
# sys.stdout.write('A\n' if (n - bisect_left(stones, x)) % 2 else 'B\n')
|
[
"bisect.bisect_left",
"sys.stdin.readline"
] |
[((328, 348), 'sys.stdin.readline', 'sys.stdin.readline', ([], {}), '()\n', (346, 348), False, 'import sys\n'), ((415, 435), 'sys.stdin.readline', 'sys.stdin.readline', ([], {}), '()\n', (433, 435), False, 'import sys\n'), ((539, 559), 'sys.stdin.readline', 'sys.stdin.readline', ([], {}), '()\n', (557, 559), False, 'import sys\n'), ((583, 605), 'bisect.bisect_left', 'bisect_left', (['stones', 'x'], {}), '(stones, x)\n', (594, 605), False, 'from bisect import bisect_left\n'), ((368, 388), 'sys.stdin.readline', 'sys.stdin.readline', ([], {}), '()\n', (386, 388), False, 'import sys\n')]
|
"""
This module contains the functions necessary for the estimation process of transition
probabilities.
"""
import numba
import numpy as np
import pandas as pd
from estimagic.optimization.optimize import minimize
def estimate_transitions(df):
"""Estimating the transition proabilities.
The sub function for managing the estimation of the transition probabilities.
Parameters
----------
df : pandas.DataFrame
see :ref:`df`
Returns
-------
result_transitions : dictionary
see :ref:`result_trans`
"""
result_transitions = {}
usage = df["usage"].to_numpy(dtype=float)
usage = usage[~np.isnan(usage)].astype(int)
result_transitions["trans_count"] = transition_count = np.bincount(usage)
# Prepare DataFrame for estimagic
name = ["trans_prob"]
number = np.arange(1, len(transition_count) + 1)
index = pd.MultiIndex.from_product([name, number], names=["name", "number"])
params = pd.DataFrame(
transition_count / sum(transition_count),
columns=["value"],
index=index,
)
params.loc[params["value"] == 0] = 1e-20
constr = [{"loc": "trans_prob", "type": "probability"}]
raw_result_trans = minimize(
criterion=loglike_trans,
params=params,
algorithm="scipy_lbfgsb",
constraints=constr,
criterion_kwargs={"transition_count": transition_count},
logging=False,
)
result_transitions["x"] = raw_result_trans["solution_params"]["value"].to_numpy()
result_transitions["fun"] = raw_result_trans["solution_criterion"]
return result_transitions
def loglike_trans_individual(params, transition_count):
"""
Individual negative Log-likelihood function of transition probability estimation.
Parameters
----------
p_raw : pandas.DataFrame
The untransformed transition probability guess.
transition_count : numpy.array
The pooled count of state increases per period in the data.
Returns
-------
log_like_individual : numpy.array
The individual negative log-likelihood contributions of the transition probabilities
"""
p_raw = params.loc["trans_prob", "value"].to_numpy()
log_like_individual = -np.multiply(transition_count, np.log(p_raw))
return log_like_individual
def loglike_trans(params, transition_count):
"""
Sum the individual negative log-likelihood.
Parameters
----------
params : pd.DataFrame
parameter guess of the transition probabilities.
transition_count : numpy.array
The pooled count of state increases per period in the data.
Returns
-------
log_like : float
the negative log likelihood given some transition probability guess.
"""
log_like = loglike_trans_individual(params, transition_count).sum()
return log_like
def loglike_trans_individual_derivative(params, transition_count):
"""
generates the jacobian of the individual log likelihood function of the
transition probabilities. This function is currently not used but is kept
for further development of the package when estimagic can handle constrains
with analytical derivatives.
Parameters
----------
params : pd.DataFrame
parameter guess of the transition probabilities.
transition_count : numpy.array
The pooled count of state increases per period in the data.
Returns
-------
jacobian : np.array
a dim(params) x dim(params) matrix containing the Jacobian.
"""
p_raw = params.loc["trans_prob", "value"].to_numpy()
diagonal = -np.multiply(transition_count, 1 / p_raw)
jacobian = diagonal * np.eye(len(p_raw))
return jacobian
def loglike_trans_derivative(params, transition_count):
gradient = loglike_trans_individual_derivative(params, transition_count).sum(axis=1)
return gradient
@numba.jit(nopython=True)
def create_transition_matrix(num_states, trans_prob):
"""
Creating the transition matrix with the assumption, that in every row the state
increases have the same probability.
Parameters
----------
num_states : int
The size of the state space.
trans_prob : numpy.array
The probabilities of an state increase.
Returns
-------
trans_mat : numpy.array
see :ref:`trans_mat`
"""
trans_mat = np.zeros((num_states, num_states))
for i in range(num_states): # Loop over all states.
for j, p in enumerate(trans_prob): # Loop over the possible increases.
if i + j < num_states - 1:
trans_mat[i, i + j] = p
elif i + j == num_states - 1:
trans_mat[i, num_states - 1] = trans_prob[j:].sum()
else:
pass
return trans_mat
|
[
"numpy.multiply",
"numpy.log",
"numpy.zeros",
"numpy.isnan",
"pandas.MultiIndex.from_product",
"numba.jit",
"estimagic.optimization.optimize.minimize",
"numpy.bincount"
] |
[((3902, 3926), 'numba.jit', 'numba.jit', ([], {'nopython': '(True)'}), '(nopython=True)\n', (3911, 3926), False, 'import numba\n'), ((738, 756), 'numpy.bincount', 'np.bincount', (['usage'], {}), '(usage)\n', (749, 756), True, 'import numpy as np\n'), ((887, 955), 'pandas.MultiIndex.from_product', 'pd.MultiIndex.from_product', (['[name, number]'], {'names': "['name', 'number']"}), "([name, number], names=['name', 'number'])\n", (913, 955), True, 'import pandas as pd\n'), ((1216, 1390), 'estimagic.optimization.optimize.minimize', 'minimize', ([], {'criterion': 'loglike_trans', 'params': 'params', 'algorithm': '"""scipy_lbfgsb"""', 'constraints': 'constr', 'criterion_kwargs': "{'transition_count': transition_count}", 'logging': '(False)'}), "(criterion=loglike_trans, params=params, algorithm='scipy_lbfgsb',\n constraints=constr, criterion_kwargs={'transition_count':\n transition_count}, logging=False)\n", (1224, 1390), False, 'from estimagic.optimization.optimize import minimize\n'), ((4387, 4421), 'numpy.zeros', 'np.zeros', (['(num_states, num_states)'], {}), '((num_states, num_states))\n', (4395, 4421), True, 'import numpy as np\n'), ((3625, 3665), 'numpy.multiply', 'np.multiply', (['transition_count', '(1 / p_raw)'], {}), '(transition_count, 1 / p_raw)\n', (3636, 3665), True, 'import numpy as np\n'), ((2275, 2288), 'numpy.log', 'np.log', (['p_raw'], {}), '(p_raw)\n', (2281, 2288), True, 'import numpy as np\n'), ((650, 665), 'numpy.isnan', 'np.isnan', (['usage'], {}), '(usage)\n', (658, 665), True, 'import numpy as np\n')]
|
import os
import torch
import torch.multiprocessing as mp
from dataset.skeleton_multitask import *
from utils.evaluation import MultiAttrDatasetEvaluator
from utils.misc import get_folders_and_files
os.environ['CUDA_VISIBLE_DEVICES'] = '0'
mp.set_start_method('spawn', force=True)
def benchmark(evaluator: MultiAttrDatasetEvaluator, m_path: str, o_path: str):
evaluator.set_model(m_path, o_path)
evaluator.run_evaluation()
def kill_all_processes(p_list: list):
for process in p_list:
process.terminate()
def chunks(l: list, n: int):
"""Yield successive n-sized chunks from l."""
for i in range(0, len(l), n):
yield l[i:i + n]
if __name__ == '__main__':
use_gpu = False # torch.cuda.is_available()
device = torch.device('cuda:0' if use_gpu else 'cpu')
dataset_path = './dataset/JL_Dataset_translated_crossSample12.skeldat'
output_path = ('./validation/'
'/')
cde = MultiAttrDatasetEvaluator(dataset_path=dataset_path, device=device)
if isinstance(cde.dataset, SkeletonDatasetK3Da):
if cde.dataset.downsample_factor != 2:
cde.dataset.downsample_factor = 2
models_for_testing = get_folders_and_files(output_path)[1]
processes = []
for model in models_for_testing:
if not model.endswith('.tar'):
continue
model_path = output_path + model
p = mp.Process(name=model, target=benchmark, args=(cde, model_path, output_path))
p.daemon = True # set false if not running in interactive mode
p.start()
processes.append(p)
|
[
"utils.evaluation.MultiAttrDatasetEvaluator",
"torch.multiprocessing.set_start_method",
"torch.multiprocessing.Process",
"utils.misc.get_folders_and_files",
"torch.device"
] |
[((242, 282), 'torch.multiprocessing.set_start_method', 'mp.set_start_method', (['"""spawn"""'], {'force': '(True)'}), "('spawn', force=True)\n", (261, 282), True, 'import torch.multiprocessing as mp\n'), ((764, 808), 'torch.device', 'torch.device', (["('cuda:0' if use_gpu else 'cpu')"], {}), "('cuda:0' if use_gpu else 'cpu')\n", (776, 808), False, 'import torch\n'), ((953, 1020), 'utils.evaluation.MultiAttrDatasetEvaluator', 'MultiAttrDatasetEvaluator', ([], {'dataset_path': 'dataset_path', 'device': 'device'}), '(dataset_path=dataset_path, device=device)\n', (978, 1020), False, 'from utils.evaluation import MultiAttrDatasetEvaluator\n'), ((1192, 1226), 'utils.misc.get_folders_and_files', 'get_folders_and_files', (['output_path'], {}), '(output_path)\n', (1213, 1226), False, 'from utils.misc import get_folders_and_files\n'), ((1399, 1476), 'torch.multiprocessing.Process', 'mp.Process', ([], {'name': 'model', 'target': 'benchmark', 'args': '(cde, model_path, output_path)'}), '(name=model, target=benchmark, args=(cde, model_path, output_path))\n', (1409, 1476), True, 'import torch.multiprocessing as mp\n')]
|
# -*- coding: utf-8 -*-
"""
test_text_chem
~~~~~~~~~~~~~~
Test the text chem package.
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
import logging
import unittest
from chemdataextractor.text.chem import SOLVENT_RE, INCHI_RE, SMILES_RE
logging.basicConfig(level=logging.DEBUG)
log = logging.getLogger(__name__)
class TestRegex(unittest.TestCase):
def test_solvent(self):
"""Test solvent regex."""
self.assertEqual([u'CH2Cl2'], SOLVENT_RE.findall(u'λmax(CH2Cl2)/nm'))
self.assertEqual([u'acetonitrile', u'C6H6'], SOLVENT_RE.findall(u'Measured in acetonitrile and C6H6'))
self.assertEqual([u'd2-dichloromethane'], SOLVENT_RE.findall(u'Spectra in d2-dichloromethane'))
self.assertEqual([u'isopropanol'], SOLVENT_RE.findall(u'The solvent was isopropanol'))
self.assertEqual([u'1,2-dichlorobenzene'], SOLVENT_RE.findall(u'Mixed with 1,2-dichlorobenzene.'))
self.assertEqual([u'CHCl3', u'HCl'], SOLVENT_RE.findall(u'The mixture CHCl3/HCl was added.'))
self.assertEqual([u'Ethyl acetate', u'Diethyl ether'], SOLVENT_RE.findall(u'Ethyl acetate. Diethyl ether.'))
self.assertEqual([u'Ethylacetate', u'Diethylether'], SOLVENT_RE.findall(u'Ethylacetate. Diethylether.'))
self.assertEqual([], SOLVENT_RE.findall(u'[Rh2(dihex)4]2+'))
def test_inchi(self):
"""Test InChI regex."""
self.assertFalse(INCHI_RE.match(u'InChI'))
self.assertFalse(INCHI_RE.match(u'InChI=1S'))
self.assertTrue(INCHI_RE.match(u'InChI=1S/C7H4N.Li/c1-2-7-4-3-5-8-6-7;/h3-6H;/q-1;+1'))
self.assertTrue(INCHI_RE.match(u'InChI=1S/C9H13BO2S/c1-9(2)6-11-10(12-7-9)8-4-3-5-13-8/h3-5H,6-7H2,1-2H3'))
self.assertTrue(INCHI_RE.match(u'InChI=1S/C7H12O/c8-6-7-4-2-1-3-5-7/h6-7H,1-5H2'))
self.assertTrue(INCHI_RE.match(u'InChI=1S/Ca.2H2O.2H2/h;2*1H2;2*1H'))
self.assertTrue(INCHI_RE.match(u'InChI=1S/2BrH.Fe/h2*1H;'))
self.assertTrue(INCHI_RE.match(u'InChI=1S/C8H2Br2N2/c9-7-1-5(3-11)6(4-12)2-8(7)10/h1-2H'))
self.assertTrue(INCHI_RE.match(u'InChI=1S/C9H10O3/c1-11-8-3-4-9(12-2)7(5-8)6-10/h3-6H,1-2H3'))
self.assertTrue(INCHI_RE.match(u'InChI=1S/C8H13NOS/c1-6-11-7-8(1)9-2-4-10-5-3-9/h1H,2-7H2'))
self.assertTrue(INCHI_RE.match(u'InChI=1S/C7H10N.BrH/c1-2-8-6-4-3-5-7-8;/h3-7H,2H2,1H3;1H/q+1;/p-1'))
self.assertTrue(INCHI_RE.match(u'InChI=1S/C13H9N.C2H7N5/c1-2-6-11-10(5-1)9-14-13-8-4-3-7-12(11)13;3-1(4)7-2(5)6/h1-9H;(H7,3,4,5,6,7)'))
self.assertTrue(INCHI_RE.match(u'InChI=1S/C26H56N/c1-5-7-9-11-13-15-17-19-21-23-25-27(3,4)26-24-22-20-18-16-14-12-10-8-6-2/h5-26H2,1-4H3/q+1'))
self.assertTrue(INCHI_RE.match(u'InChI=1S/C12H10Si/c1-2-5-10-9(4-1)8-12-11(10)6-3-7-13-12/h1-7,13H,8H2'))
self.assertTrue(INCHI_RE.match(u'InChI=1S/C7H10N2.Au/c1-9(2)7-3-5-8-6-4-7;/h3-6H,1-2H3;'))
self.assertTrue(INCHI_RE.match(u'InChI=1S/C8H6Cl4/c1-3(2)4-5(9)7(11)8(12)6(4)10/h1-2H3'))
self.assertTrue(INCHI_RE.match(u'InChI=1S/BH3IP/c2-1-3/h1H,3H2'))
self.assertTrue(INCHI_RE.match(u'InChI=1S/C6H15N3/c1-2-4-8-9-6-5-7-3-1/h7-9H,1-6H2'))
self.assertTrue(INCHI_RE.match(u'InChI=1S/2C8H5.Ru/c2*1-2-8-6-4-3-5-7-8;/h2*3-7H;/q2*-1;+2'))
self.assertTrue(INCHI_RE.match(u'InChI=1S/HI/h1H/i/hD'))
self.assertTrue(INCHI_RE.match(u'InChI=1S/F5P.FH.H3N/c1-6(2,3,4)5;;/h;1H;1H3'))
self.assertTrue(INCHI_RE.match(u'InChI=1S/C10H16/c1-8(2)10-6-4-9(3)5-7-10/h4,10H,1,5-7H2,2-3H3'))
self.assertTrue(INCHI_RE.match(u'InChI=1S/Mo.4O/q;;;2*-1'))
self.assertTrue(INCHI_RE.match(u'InChI=1S/In.N'))
self.assertTrue(INCHI_RE.match(u'InChI=1S/Au.H3P/h;1H3'))
self.assertTrue(INCHI_RE.match(u'InChI=1S/Cd.Hg.Te'))
self.assertTrue(INCHI_RE.match(u'InChI=1S/3CH3.In/h3*1H3;'))
self.assertTrue(INCHI_RE.match(u'InChI=1S/In.3H2O/h;3*1H2/q+3;;;/p-3'))
self.assertTrue(INCHI_RE.match(u'InChI=1S/I.W'))
self.assertTrue(INCHI_RE.match(u'InChI=1S/Pt.H/q+1;'))
self.assertTrue(INCHI_RE.match(u'InChI=1S/p+1/i/hD'))
self.assertTrue(INCHI_RE.match(u'InChI=1S/p+1/i/hH'))
self.assertTrue(INCHI_RE.match(u'InChI=1S/p+1/i/hT'))
self.assertTrue(INCHI_RE.match(u'InChI=1S/C5H5N5O/c6-5-9-3-2(4(11)10-5)7-1-8-3/h1H,(H4,6,7,8,9,10,11)'))
self.assertTrue(INCHI_RE.match(u'InChI=1S/C2H4ClNO2/c3-1(4)2(5)6/h1H,4H2,(H,5,6)/p+1/t1-/m1/s1'))
self.assertTrue(INCHI_RE.match(u'InChI=1S/H2/h1H/i1+2T'))
self.assertTrue(INCHI_RE.match(u'InChI=1S/CH2Cl2/c2-1-3/h1H2/i1D2'))
self.assertTrue(INCHI_RE.match(u'InChI=1S/H2/h1H/i1+1D'))
self.assertTrue(INCHI_RE.match(u'InChI=1S/H2O4S/c1-5(2,3)4/h(H2,1,2,3,4)/i/hD2'))
def test_smiles(self):
self.assertTrue(SMILES_RE.match(u'S=S'))
self.assertTrue(SMILES_RE.match(u'P1P=P1'))
self.assertTrue(SMILES_RE.match(u'[V].[Cu+2]'))
self.assertTrue(SMILES_RE.match(u'O'))
self.assertTrue(SMILES_RE.match(u'CC1=C(SC=N1)C=CC2=C(NC(SC2)C(C(=O)O)NC(=O)C(=NOC)C3=CSC(=N3)N)C(=O)O'))
self.assertTrue(SMILES_RE.match(u'C1=CC=C(C=C1)C2=CC=C(C=C2)C3=NN=C(O3)C4=CC=CC=C4'))
self.assertTrue(SMILES_RE.match(u'CC(=O)OO'))
self.assertTrue(SMILES_RE.match(u'CCCCCCCC/C=C\CCCCCCCCN'))
self.assertTrue(SMILES_RE.match(u'C[N+](C)(C)CCCCCC[N+](C)(C)C.[Br-]'))
self.assertTrue(SMILES_RE.match(u'C([C@H](C(=O)O)N)F'))
self.assertTrue(SMILES_RE.match(u'[Ru]'))
self.assertTrue(SMILES_RE.match(u'[S-2].[Cu+2].[Cu+2]'))
self.assertTrue(SMILES_RE.match(u'[Cd]=[Te]'))
self.assertTrue(SMILES_RE.match(u'C1C[C@H](OC1)C(=O)O'))
self.assertTrue(SMILES_RE.match(u'C(=O)(O)[O-].[OH-].[Zn+2]'))
self.assertTrue(SMILES_RE.match(u'N#N'))
self.assertTrue(SMILES_RE.match(u'[HH]'))
self.assertTrue(SMILES_RE.match(u'[Li+].[Li+].[O-][Ti](=O)[O-]'))
self.assertTrue(SMILES_RE.match(u'[F-]'))
self.assertTrue(SMILES_RE.match(u'CC(C)[C@@H](C(=O)O)N'))
self.assertTrue(SMILES_RE.match(u'CC(C)C(C#C)O'))
self.assertTrue(SMILES_RE.match(u'CCCC#N'))
self.assertTrue(SMILES_RE.match(u'C(/C=C\O)Cl'))
if __name__ == '__main__':
unittest.main()
|
[
"unittest.main",
"chemdataextractor.text.chem.INCHI_RE.match",
"chemdataextractor.text.chem.SOLVENT_RE.findall",
"logging.basicConfig",
"chemdataextractor.text.chem.SMILES_RE.match",
"logging.getLogger"
] |
[((367, 407), 'logging.basicConfig', 'logging.basicConfig', ([], {'level': 'logging.DEBUG'}), '(level=logging.DEBUG)\n', (386, 407), False, 'import logging\n'), ((415, 442), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (432, 442), False, 'import logging\n'), ((6416, 6431), 'unittest.main', 'unittest.main', ([], {}), '()\n', (6429, 6431), False, 'import unittest\n'), ((589, 627), 'chemdataextractor.text.chem.SOLVENT_RE.findall', 'SOLVENT_RE.findall', (['u"""λmax(CH2Cl2)/nm"""'], {}), "(u'λmax(CH2Cl2)/nm')\n", (607, 627), False, 'from chemdataextractor.text.chem import SOLVENT_RE, INCHI_RE, SMILES_RE\n'), ((683, 739), 'chemdataextractor.text.chem.SOLVENT_RE.findall', 'SOLVENT_RE.findall', (['u"""Measured in acetonitrile and C6H6"""'], {}), "(u'Measured in acetonitrile and C6H6')\n", (701, 739), False, 'from chemdataextractor.text.chem import SOLVENT_RE, INCHI_RE, SMILES_RE\n'), ((792, 844), 'chemdataextractor.text.chem.SOLVENT_RE.findall', 'SOLVENT_RE.findall', (['u"""Spectra in d2-dichloromethane"""'], {}), "(u'Spectra in d2-dichloromethane')\n", (810, 844), False, 'from chemdataextractor.text.chem import SOLVENT_RE, INCHI_RE, SMILES_RE\n'), ((890, 940), 'chemdataextractor.text.chem.SOLVENT_RE.findall', 'SOLVENT_RE.findall', (['u"""The solvent was isopropanol"""'], {}), "(u'The solvent was isopropanol')\n", (908, 940), False, 'from chemdataextractor.text.chem import SOLVENT_RE, INCHI_RE, SMILES_RE\n'), ((994, 1048), 'chemdataextractor.text.chem.SOLVENT_RE.findall', 'SOLVENT_RE.findall', (['u"""Mixed with 1,2-dichlorobenzene."""'], {}), "(u'Mixed with 1,2-dichlorobenzene.')\n", (1012, 1048), False, 'from chemdataextractor.text.chem import SOLVENT_RE, INCHI_RE, SMILES_RE\n'), ((1096, 1151), 'chemdataextractor.text.chem.SOLVENT_RE.findall', 'SOLVENT_RE.findall', (['u"""The mixture CHCl3/HCl was added."""'], {}), "(u'The mixture CHCl3/HCl was added.')\n", (1114, 1151), False, 'from chemdataextractor.text.chem import SOLVENT_RE, INCHI_RE, SMILES_RE\n'), ((1217, 1269), 'chemdataextractor.text.chem.SOLVENT_RE.findall', 'SOLVENT_RE.findall', (['u"""Ethyl acetate. Diethyl ether."""'], {}), "(u'Ethyl acetate. Diethyl ether.')\n", (1235, 1269), False, 'from chemdataextractor.text.chem import SOLVENT_RE, INCHI_RE, SMILES_RE\n'), ((1333, 1383), 'chemdataextractor.text.chem.SOLVENT_RE.findall', 'SOLVENT_RE.findall', (['u"""Ethylacetate. Diethylether."""'], {}), "(u'Ethylacetate. Diethylether.')\n", (1351, 1383), False, 'from chemdataextractor.text.chem import SOLVENT_RE, INCHI_RE, SMILES_RE\n'), ((1415, 1453), 'chemdataextractor.text.chem.SOLVENT_RE.findall', 'SOLVENT_RE.findall', (['u"""[Rh2(dihex)4]2+"""'], {}), "(u'[Rh2(dihex)4]2+')\n", (1433, 1453), False, 'from chemdataextractor.text.chem import SOLVENT_RE, INCHI_RE, SMILES_RE\n'), ((1543, 1567), 'chemdataextractor.text.chem.INCHI_RE.match', 'INCHI_RE.match', (['u"""InChI"""'], {}), "(u'InChI')\n", (1557, 1567), False, 'from chemdataextractor.text.chem import SOLVENT_RE, INCHI_RE, SMILES_RE\n'), ((1595, 1622), 'chemdataextractor.text.chem.INCHI_RE.match', 'INCHI_RE.match', (['u"""InChI=1S"""'], {}), "(u'InChI=1S')\n", (1609, 1622), False, 'from chemdataextractor.text.chem import SOLVENT_RE, INCHI_RE, SMILES_RE\n'), ((1649, 1719), 'chemdataextractor.text.chem.INCHI_RE.match', 'INCHI_RE.match', (['u"""InChI=1S/C7H4N.Li/c1-2-7-4-3-5-8-6-7;/h3-6H;/q-1;+1"""'], {}), "(u'InChI=1S/C7H4N.Li/c1-2-7-4-3-5-8-6-7;/h3-6H;/q-1;+1')\n", (1663, 1719), False, 'from chemdataextractor.text.chem import SOLVENT_RE, INCHI_RE, SMILES_RE\n'), ((1746, 1841), 'chemdataextractor.text.chem.INCHI_RE.match', 'INCHI_RE.match', (['u"""InChI=1S/C9H13BO2S/c1-9(2)6-11-10(12-7-9)8-4-3-5-13-8/h3-5H,6-7H2,1-2H3"""'], {}), "(\n u'InChI=1S/C9H13BO2S/c1-9(2)6-11-10(12-7-9)8-4-3-5-13-8/h3-5H,6-7H2,1-2H3')\n", (1760, 1841), False, 'from chemdataextractor.text.chem import SOLVENT_RE, INCHI_RE, SMILES_RE\n'), ((1863, 1928), 'chemdataextractor.text.chem.INCHI_RE.match', 'INCHI_RE.match', (['u"""InChI=1S/C7H12O/c8-6-7-4-2-1-3-5-7/h6-7H,1-5H2"""'], {}), "(u'InChI=1S/C7H12O/c8-6-7-4-2-1-3-5-7/h6-7H,1-5H2')\n", (1877, 1928), False, 'from chemdataextractor.text.chem import SOLVENT_RE, INCHI_RE, SMILES_RE\n'), ((1955, 2007), 'chemdataextractor.text.chem.INCHI_RE.match', 'INCHI_RE.match', (['u"""InChI=1S/Ca.2H2O.2H2/h;2*1H2;2*1H"""'], {}), "(u'InChI=1S/Ca.2H2O.2H2/h;2*1H2;2*1H')\n", (1969, 2007), False, 'from chemdataextractor.text.chem import SOLVENT_RE, INCHI_RE, SMILES_RE\n'), ((2034, 2076), 'chemdataextractor.text.chem.INCHI_RE.match', 'INCHI_RE.match', (['u"""InChI=1S/2BrH.Fe/h2*1H;"""'], {}), "(u'InChI=1S/2BrH.Fe/h2*1H;')\n", (2048, 2076), False, 'from chemdataextractor.text.chem import SOLVENT_RE, INCHI_RE, SMILES_RE\n'), ((2103, 2176), 'chemdataextractor.text.chem.INCHI_RE.match', 'INCHI_RE.match', (['u"""InChI=1S/C8H2Br2N2/c9-7-1-5(3-11)6(4-12)2-8(7)10/h1-2H"""'], {}), "(u'InChI=1S/C8H2Br2N2/c9-7-1-5(3-11)6(4-12)2-8(7)10/h1-2H')\n", (2117, 2176), False, 'from chemdataextractor.text.chem import SOLVENT_RE, INCHI_RE, SMILES_RE\n'), ((2203, 2280), 'chemdataextractor.text.chem.INCHI_RE.match', 'INCHI_RE.match', (['u"""InChI=1S/C9H10O3/c1-11-8-3-4-9(12-2)7(5-8)6-10/h3-6H,1-2H3"""'], {}), "(u'InChI=1S/C9H10O3/c1-11-8-3-4-9(12-2)7(5-8)6-10/h3-6H,1-2H3')\n", (2217, 2280), False, 'from chemdataextractor.text.chem import SOLVENT_RE, INCHI_RE, SMILES_RE\n'), ((2307, 2382), 'chemdataextractor.text.chem.INCHI_RE.match', 'INCHI_RE.match', (['u"""InChI=1S/C8H13NOS/c1-6-11-7-8(1)9-2-4-10-5-3-9/h1H,2-7H2"""'], {}), "(u'InChI=1S/C8H13NOS/c1-6-11-7-8(1)9-2-4-10-5-3-9/h1H,2-7H2')\n", (2321, 2382), False, 'from chemdataextractor.text.chem import SOLVENT_RE, INCHI_RE, SMILES_RE\n'), ((2409, 2498), 'chemdataextractor.text.chem.INCHI_RE.match', 'INCHI_RE.match', (['u"""InChI=1S/C7H10N.BrH/c1-2-8-6-4-3-5-7-8;/h3-7H,2H2,1H3;1H/q+1;/p-1"""'], {}), "(\n u'InChI=1S/C7H10N.BrH/c1-2-8-6-4-3-5-7-8;/h3-7H,2H2,1H3;1H/q+1;/p-1')\n", (2423, 2498), False, 'from chemdataextractor.text.chem import SOLVENT_RE, INCHI_RE, SMILES_RE\n'), ((2520, 2648), 'chemdataextractor.text.chem.INCHI_RE.match', 'INCHI_RE.match', (['u"""InChI=1S/C13H9N.C2H7N5/c1-2-6-11-10(5-1)9-14-13-8-4-3-7-12(11)13;3-1(4)7-2(5)6/h1-9H;(H7,3,4,5,6,7)"""'], {}), "(\n u'InChI=1S/C13H9N.C2H7N5/c1-2-6-11-10(5-1)9-14-13-8-4-3-7-12(11)13;3-1(4)7-2(5)6/h1-9H;(H7,3,4,5,6,7)'\n )\n", (2534, 2648), False, 'from chemdataextractor.text.chem import SOLVENT_RE, INCHI_RE, SMILES_RE\n'), ((2665, 2801), 'chemdataextractor.text.chem.INCHI_RE.match', 'INCHI_RE.match', (['u"""InChI=1S/C26H56N/c1-5-7-9-11-13-15-17-19-21-23-25-27(3,4)26-24-22-20-18-16-14-12-10-8-6-2/h5-26H2,1-4H3/q+1"""'], {}), "(\n u'InChI=1S/C26H56N/c1-5-7-9-11-13-15-17-19-21-23-25-27(3,4)26-24-22-20-18-16-14-12-10-8-6-2/h5-26H2,1-4H3/q+1'\n )\n", (2679, 2801), False, 'from chemdataextractor.text.chem import SOLVENT_RE, INCHI_RE, SMILES_RE\n'), ((2818, 2911), 'chemdataextractor.text.chem.INCHI_RE.match', 'INCHI_RE.match', (['u"""InChI=1S/C12H10Si/c1-2-5-10-9(4-1)8-12-11(10)6-3-7-13-12/h1-7,13H,8H2"""'], {}), "(\n u'InChI=1S/C12H10Si/c1-2-5-10-9(4-1)8-12-11(10)6-3-7-13-12/h1-7,13H,8H2')\n", (2832, 2911), False, 'from chemdataextractor.text.chem import SOLVENT_RE, INCHI_RE, SMILES_RE\n'), ((2933, 3006), 'chemdataextractor.text.chem.INCHI_RE.match', 'INCHI_RE.match', (['u"""InChI=1S/C7H10N2.Au/c1-9(2)7-3-5-8-6-4-7;/h3-6H,1-2H3;"""'], {}), "(u'InChI=1S/C7H10N2.Au/c1-9(2)7-3-5-8-6-4-7;/h3-6H,1-2H3;')\n", (2947, 3006), False, 'from chemdataextractor.text.chem import SOLVENT_RE, INCHI_RE, SMILES_RE\n'), ((3033, 3105), 'chemdataextractor.text.chem.INCHI_RE.match', 'INCHI_RE.match', (['u"""InChI=1S/C8H6Cl4/c1-3(2)4-5(9)7(11)8(12)6(4)10/h1-2H3"""'], {}), "(u'InChI=1S/C8H6Cl4/c1-3(2)4-5(9)7(11)8(12)6(4)10/h1-2H3')\n", (3047, 3105), False, 'from chemdataextractor.text.chem import SOLVENT_RE, INCHI_RE, SMILES_RE\n'), ((3132, 3180), 'chemdataextractor.text.chem.INCHI_RE.match', 'INCHI_RE.match', (['u"""InChI=1S/BH3IP/c2-1-3/h1H,3H2"""'], {}), "(u'InChI=1S/BH3IP/c2-1-3/h1H,3H2')\n", (3146, 3180), False, 'from chemdataextractor.text.chem import SOLVENT_RE, INCHI_RE, SMILES_RE\n'), ((3207, 3275), 'chemdataextractor.text.chem.INCHI_RE.match', 'INCHI_RE.match', (['u"""InChI=1S/C6H15N3/c1-2-4-8-9-6-5-7-3-1/h7-9H,1-6H2"""'], {}), "(u'InChI=1S/C6H15N3/c1-2-4-8-9-6-5-7-3-1/h7-9H,1-6H2')\n", (3221, 3275), False, 'from chemdataextractor.text.chem import SOLVENT_RE, INCHI_RE, SMILES_RE\n'), ((3302, 3378), 'chemdataextractor.text.chem.INCHI_RE.match', 'INCHI_RE.match', (['u"""InChI=1S/2C8H5.Ru/c2*1-2-8-6-4-3-5-7-8;/h2*3-7H;/q2*-1;+2"""'], {}), "(u'InChI=1S/2C8H5.Ru/c2*1-2-8-6-4-3-5-7-8;/h2*3-7H;/q2*-1;+2')\n", (3316, 3378), False, 'from chemdataextractor.text.chem import SOLVENT_RE, INCHI_RE, SMILES_RE\n'), ((3405, 3444), 'chemdataextractor.text.chem.INCHI_RE.match', 'INCHI_RE.match', (['u"""InChI=1S/HI/h1H/i/hD"""'], {}), "(u'InChI=1S/HI/h1H/i/hD')\n", (3419, 3444), False, 'from chemdataextractor.text.chem import SOLVENT_RE, INCHI_RE, SMILES_RE\n'), ((3471, 3533), 'chemdataextractor.text.chem.INCHI_RE.match', 'INCHI_RE.match', (['u"""InChI=1S/F5P.FH.H3N/c1-6(2,3,4)5;;/h;1H;1H3"""'], {}), "(u'InChI=1S/F5P.FH.H3N/c1-6(2,3,4)5;;/h;1H;1H3')\n", (3485, 3533), False, 'from chemdataextractor.text.chem import SOLVENT_RE, INCHI_RE, SMILES_RE\n'), ((3560, 3645), 'chemdataextractor.text.chem.INCHI_RE.match', 'INCHI_RE.match', (['u"""InChI=1S/C10H16/c1-8(2)10-6-4-9(3)5-7-10/h4,10H,1,5-7H2,2-3H3"""'], {}), "(u'InChI=1S/C10H16/c1-8(2)10-6-4-9(3)5-7-10/h4,10H,1,5-7H2,2-3H3'\n )\n", (3574, 3645), False, 'from chemdataextractor.text.chem import SOLVENT_RE, INCHI_RE, SMILES_RE\n'), ((3667, 3709), 'chemdataextractor.text.chem.INCHI_RE.match', 'INCHI_RE.match', (['u"""InChI=1S/Mo.4O/q;;;2*-1"""'], {}), "(u'InChI=1S/Mo.4O/q;;;2*-1')\n", (3681, 3709), False, 'from chemdataextractor.text.chem import SOLVENT_RE, INCHI_RE, SMILES_RE\n'), ((3736, 3768), 'chemdataextractor.text.chem.INCHI_RE.match', 'INCHI_RE.match', (['u"""InChI=1S/In.N"""'], {}), "(u'InChI=1S/In.N')\n", (3750, 3768), False, 'from chemdataextractor.text.chem import SOLVENT_RE, INCHI_RE, SMILES_RE\n'), ((3795, 3835), 'chemdataextractor.text.chem.INCHI_RE.match', 'INCHI_RE.match', (['u"""InChI=1S/Au.H3P/h;1H3"""'], {}), "(u'InChI=1S/Au.H3P/h;1H3')\n", (3809, 3835), False, 'from chemdataextractor.text.chem import SOLVENT_RE, INCHI_RE, SMILES_RE\n'), ((3862, 3898), 'chemdataextractor.text.chem.INCHI_RE.match', 'INCHI_RE.match', (['u"""InChI=1S/Cd.Hg.Te"""'], {}), "(u'InChI=1S/Cd.Hg.Te')\n", (3876, 3898), False, 'from chemdataextractor.text.chem import SOLVENT_RE, INCHI_RE, SMILES_RE\n'), ((3925, 3968), 'chemdataextractor.text.chem.INCHI_RE.match', 'INCHI_RE.match', (['u"""InChI=1S/3CH3.In/h3*1H3;"""'], {}), "(u'InChI=1S/3CH3.In/h3*1H3;')\n", (3939, 3968), False, 'from chemdataextractor.text.chem import SOLVENT_RE, INCHI_RE, SMILES_RE\n'), ((3995, 4049), 'chemdataextractor.text.chem.INCHI_RE.match', 'INCHI_RE.match', (['u"""InChI=1S/In.3H2O/h;3*1H2/q+3;;;/p-3"""'], {}), "(u'InChI=1S/In.3H2O/h;3*1H2/q+3;;;/p-3')\n", (4009, 4049), False, 'from chemdataextractor.text.chem import SOLVENT_RE, INCHI_RE, SMILES_RE\n'), ((4076, 4107), 'chemdataextractor.text.chem.INCHI_RE.match', 'INCHI_RE.match', (['u"""InChI=1S/I.W"""'], {}), "(u'InChI=1S/I.W')\n", (4090, 4107), False, 'from chemdataextractor.text.chem import SOLVENT_RE, INCHI_RE, SMILES_RE\n'), ((4134, 4171), 'chemdataextractor.text.chem.INCHI_RE.match', 'INCHI_RE.match', (['u"""InChI=1S/Pt.H/q+1;"""'], {}), "(u'InChI=1S/Pt.H/q+1;')\n", (4148, 4171), False, 'from chemdataextractor.text.chem import SOLVENT_RE, INCHI_RE, SMILES_RE\n'), ((4198, 4234), 'chemdataextractor.text.chem.INCHI_RE.match', 'INCHI_RE.match', (['u"""InChI=1S/p+1/i/hD"""'], {}), "(u'InChI=1S/p+1/i/hD')\n", (4212, 4234), False, 'from chemdataextractor.text.chem import SOLVENT_RE, INCHI_RE, SMILES_RE\n'), ((4261, 4297), 'chemdataextractor.text.chem.INCHI_RE.match', 'INCHI_RE.match', (['u"""InChI=1S/p+1/i/hH"""'], {}), "(u'InChI=1S/p+1/i/hH')\n", (4275, 4297), False, 'from chemdataextractor.text.chem import SOLVENT_RE, INCHI_RE, SMILES_RE\n'), ((4324, 4360), 'chemdataextractor.text.chem.INCHI_RE.match', 'INCHI_RE.match', (['u"""InChI=1S/p+1/i/hT"""'], {}), "(u'InChI=1S/p+1/i/hT')\n", (4338, 4360), False, 'from chemdataextractor.text.chem import SOLVENT_RE, INCHI_RE, SMILES_RE\n'), ((4387, 4479), 'chemdataextractor.text.chem.INCHI_RE.match', 'INCHI_RE.match', (['u"""InChI=1S/C5H5N5O/c6-5-9-3-2(4(11)10-5)7-1-8-3/h1H,(H4,6,7,8,9,10,11)"""'], {}), "(\n u'InChI=1S/C5H5N5O/c6-5-9-3-2(4(11)10-5)7-1-8-3/h1H,(H4,6,7,8,9,10,11)')\n", (4401, 4479), False, 'from chemdataextractor.text.chem import SOLVENT_RE, INCHI_RE, SMILES_RE\n'), ((4501, 4586), 'chemdataextractor.text.chem.INCHI_RE.match', 'INCHI_RE.match', (['u"""InChI=1S/C2H4ClNO2/c3-1(4)2(5)6/h1H,4H2,(H,5,6)/p+1/t1-/m1/s1"""'], {}), "(u'InChI=1S/C2H4ClNO2/c3-1(4)2(5)6/h1H,4H2,(H,5,6)/p+1/t1-/m1/s1'\n )\n", (4515, 4586), False, 'from chemdataextractor.text.chem import SOLVENT_RE, INCHI_RE, SMILES_RE\n'), ((4608, 4648), 'chemdataextractor.text.chem.INCHI_RE.match', 'INCHI_RE.match', (['u"""InChI=1S/H2/h1H/i1+2T"""'], {}), "(u'InChI=1S/H2/h1H/i1+2T')\n", (4622, 4648), False, 'from chemdataextractor.text.chem import SOLVENT_RE, INCHI_RE, SMILES_RE\n'), ((4675, 4726), 'chemdataextractor.text.chem.INCHI_RE.match', 'INCHI_RE.match', (['u"""InChI=1S/CH2Cl2/c2-1-3/h1H2/i1D2"""'], {}), "(u'InChI=1S/CH2Cl2/c2-1-3/h1H2/i1D2')\n", (4689, 4726), False, 'from chemdataextractor.text.chem import SOLVENT_RE, INCHI_RE, SMILES_RE\n'), ((4753, 4793), 'chemdataextractor.text.chem.INCHI_RE.match', 'INCHI_RE.match', (['u"""InChI=1S/H2/h1H/i1+1D"""'], {}), "(u'InChI=1S/H2/h1H/i1+1D')\n", (4767, 4793), False, 'from chemdataextractor.text.chem import SOLVENT_RE, INCHI_RE, SMILES_RE\n'), ((4820, 4884), 'chemdataextractor.text.chem.INCHI_RE.match', 'INCHI_RE.match', (['u"""InChI=1S/H2O4S/c1-5(2,3)4/h(H2,1,2,3,4)/i/hD2"""'], {}), "(u'InChI=1S/H2O4S/c1-5(2,3)4/h(H2,1,2,3,4)/i/hD2')\n", (4834, 4884), False, 'from chemdataextractor.text.chem import SOLVENT_RE, INCHI_RE, SMILES_RE\n'), ((4941, 4964), 'chemdataextractor.text.chem.SMILES_RE.match', 'SMILES_RE.match', (['u"""S=S"""'], {}), "(u'S=S')\n", (4956, 4964), False, 'from chemdataextractor.text.chem import SOLVENT_RE, INCHI_RE, SMILES_RE\n'), ((4991, 5017), 'chemdataextractor.text.chem.SMILES_RE.match', 'SMILES_RE.match', (['u"""P1P=P1"""'], {}), "(u'P1P=P1')\n", (5006, 5017), False, 'from chemdataextractor.text.chem import SOLVENT_RE, INCHI_RE, SMILES_RE\n'), ((5044, 5074), 'chemdataextractor.text.chem.SMILES_RE.match', 'SMILES_RE.match', (['u"""[V].[Cu+2]"""'], {}), "(u'[V].[Cu+2]')\n", (5059, 5074), False, 'from chemdataextractor.text.chem import SOLVENT_RE, INCHI_RE, SMILES_RE\n'), ((5101, 5122), 'chemdataextractor.text.chem.SMILES_RE.match', 'SMILES_RE.match', (['u"""O"""'], {}), "(u'O')\n", (5116, 5122), False, 'from chemdataextractor.text.chem import SOLVENT_RE, INCHI_RE, SMILES_RE\n'), ((5149, 5242), 'chemdataextractor.text.chem.SMILES_RE.match', 'SMILES_RE.match', (['u"""CC1=C(SC=N1)C=CC2=C(NC(SC2)C(C(=O)O)NC(=O)C(=NOC)C3=CSC(=N3)N)C(=O)O"""'], {}), "(\n u'CC1=C(SC=N1)C=CC2=C(NC(SC2)C(C(=O)O)NC(=O)C(=NOC)C3=CSC(=N3)N)C(=O)O')\n", (5164, 5242), False, 'from chemdataextractor.text.chem import SOLVENT_RE, INCHI_RE, SMILES_RE\n'), ((5264, 5332), 'chemdataextractor.text.chem.SMILES_RE.match', 'SMILES_RE.match', (['u"""C1=CC=C(C=C1)C2=CC=C(C=C2)C3=NN=C(O3)C4=CC=CC=C4"""'], {}), "(u'C1=CC=C(C=C1)C2=CC=C(C=C2)C3=NN=C(O3)C4=CC=CC=C4')\n", (5279, 5332), False, 'from chemdataextractor.text.chem import SOLVENT_RE, INCHI_RE, SMILES_RE\n'), ((5359, 5387), 'chemdataextractor.text.chem.SMILES_RE.match', 'SMILES_RE.match', (['u"""CC(=O)OO"""'], {}), "(u'CC(=O)OO')\n", (5374, 5387), False, 'from chemdataextractor.text.chem import SOLVENT_RE, INCHI_RE, SMILES_RE\n'), ((5414, 5457), 'chemdataextractor.text.chem.SMILES_RE.match', 'SMILES_RE.match', (['u"""CCCCCCCC/C=C\\\\CCCCCCCCN"""'], {}), "(u'CCCCCCCC/C=C\\\\CCCCCCCCN')\n", (5429, 5457), False, 'from chemdataextractor.text.chem import SOLVENT_RE, INCHI_RE, SMILES_RE\n'), ((5483, 5537), 'chemdataextractor.text.chem.SMILES_RE.match', 'SMILES_RE.match', (['u"""C[N+](C)(C)CCCCCC[N+](C)(C)C.[Br-]"""'], {}), "(u'C[N+](C)(C)CCCCCC[N+](C)(C)C.[Br-]')\n", (5498, 5537), False, 'from chemdataextractor.text.chem import SOLVENT_RE, INCHI_RE, SMILES_RE\n'), ((5564, 5602), 'chemdataextractor.text.chem.SMILES_RE.match', 'SMILES_RE.match', (['u"""C([C@H](C(=O)O)N)F"""'], {}), "(u'C([C@H](C(=O)O)N)F')\n", (5579, 5602), False, 'from chemdataextractor.text.chem import SOLVENT_RE, INCHI_RE, SMILES_RE\n'), ((5629, 5653), 'chemdataextractor.text.chem.SMILES_RE.match', 'SMILES_RE.match', (['u"""[Ru]"""'], {}), "(u'[Ru]')\n", (5644, 5653), False, 'from chemdataextractor.text.chem import SOLVENT_RE, INCHI_RE, SMILES_RE\n'), ((5680, 5719), 'chemdataextractor.text.chem.SMILES_RE.match', 'SMILES_RE.match', (['u"""[S-2].[Cu+2].[Cu+2]"""'], {}), "(u'[S-2].[Cu+2].[Cu+2]')\n", (5695, 5719), False, 'from chemdataextractor.text.chem import SOLVENT_RE, INCHI_RE, SMILES_RE\n'), ((5746, 5775), 'chemdataextractor.text.chem.SMILES_RE.match', 'SMILES_RE.match', (['u"""[Cd]=[Te]"""'], {}), "(u'[Cd]=[Te]')\n", (5761, 5775), False, 'from chemdataextractor.text.chem import SOLVENT_RE, INCHI_RE, SMILES_RE\n'), ((5802, 5841), 'chemdataextractor.text.chem.SMILES_RE.match', 'SMILES_RE.match', (['u"""C1C[C@H](OC1)C(=O)O"""'], {}), "(u'C1C[C@H](OC1)C(=O)O')\n", (5817, 5841), False, 'from chemdataextractor.text.chem import SOLVENT_RE, INCHI_RE, SMILES_RE\n'), ((5868, 5913), 'chemdataextractor.text.chem.SMILES_RE.match', 'SMILES_RE.match', (['u"""C(=O)(O)[O-].[OH-].[Zn+2]"""'], {}), "(u'C(=O)(O)[O-].[OH-].[Zn+2]')\n", (5883, 5913), False, 'from chemdataextractor.text.chem import SOLVENT_RE, INCHI_RE, SMILES_RE\n'), ((5940, 5963), 'chemdataextractor.text.chem.SMILES_RE.match', 'SMILES_RE.match', (['u"""N#N"""'], {}), "(u'N#N')\n", (5955, 5963), False, 'from chemdataextractor.text.chem import SOLVENT_RE, INCHI_RE, SMILES_RE\n'), ((5990, 6014), 'chemdataextractor.text.chem.SMILES_RE.match', 'SMILES_RE.match', (['u"""[HH]"""'], {}), "(u'[HH]')\n", (6005, 6014), False, 'from chemdataextractor.text.chem import SOLVENT_RE, INCHI_RE, SMILES_RE\n'), ((6041, 6089), 'chemdataextractor.text.chem.SMILES_RE.match', 'SMILES_RE.match', (['u"""[Li+].[Li+].[O-][Ti](=O)[O-]"""'], {}), "(u'[Li+].[Li+].[O-][Ti](=O)[O-]')\n", (6056, 6089), False, 'from chemdataextractor.text.chem import SOLVENT_RE, INCHI_RE, SMILES_RE\n'), ((6116, 6140), 'chemdataextractor.text.chem.SMILES_RE.match', 'SMILES_RE.match', (['u"""[F-]"""'], {}), "(u'[F-]')\n", (6131, 6140), False, 'from chemdataextractor.text.chem import SOLVENT_RE, INCHI_RE, SMILES_RE\n'), ((6167, 6207), 'chemdataextractor.text.chem.SMILES_RE.match', 'SMILES_RE.match', (['u"""CC(C)[C@@H](C(=O)O)N"""'], {}), "(u'CC(C)[C@@H](C(=O)O)N')\n", (6182, 6207), False, 'from chemdataextractor.text.chem import SOLVENT_RE, INCHI_RE, SMILES_RE\n'), ((6234, 6266), 'chemdataextractor.text.chem.SMILES_RE.match', 'SMILES_RE.match', (['u"""CC(C)C(C#C)O"""'], {}), "(u'CC(C)C(C#C)O')\n", (6249, 6266), False, 'from chemdataextractor.text.chem import SOLVENT_RE, INCHI_RE, SMILES_RE\n'), ((6293, 6319), 'chemdataextractor.text.chem.SMILES_RE.match', 'SMILES_RE.match', (['u"""CCCC#N"""'], {}), "(u'CCCC#N')\n", (6308, 6319), False, 'from chemdataextractor.text.chem import SOLVENT_RE, INCHI_RE, SMILES_RE\n'), ((6346, 6378), 'chemdataextractor.text.chem.SMILES_RE.match', 'SMILES_RE.match', (['u"""C(/C=C\\\\O)Cl"""'], {}), "(u'C(/C=C\\\\O)Cl')\n", (6361, 6378), False, 'from chemdataextractor.text.chem import SOLVENT_RE, INCHI_RE, SMILES_RE\n')]
|
# -*- coding: utf-8 -*-
import logging
from enum import EnumMeta
from typing import List
from urllib.parse import urljoin
import allure
from django.urls import reverse
from pytest_django.live_server_helper import LiveServer
from selenium.common.exceptions import StaleElementReferenceException
from selenium.webdriver.remote.webdriver import WebDriver
from tests.browser.util import attach_jpg_screenshot, is_element_visible
logger = logging.getLogger(__name__)
@allure.step('Visit {page_name} page')
def visit_page(
live_server: LiveServer,
browser: WebDriver,
view_name: str,
page_name: str,
check_for_errors: bool = True,
*,
endpoint: str = None,
):
if view_name and not endpoint:
url = urljoin(live_server.url, reverse(view_name))
else:
url = urljoin(live_server.url, endpoint)
browser.get(url)
attach_jpg_screenshot(browser, f'Visited {page_name}')
if check_for_errors:
should_not_see_errors(browser)
@allure.step('Should see all expected page sections')
def should_see_all_expected_page_sections(browser: WebDriver, selector_enums: List[EnumMeta]):
attach_jpg_screenshot(browser, f'View of the whole page: {browser.current_url}')
for selector_enum in selector_enums:
should_see_all_elements(browser, selector_enum)
@allure.step('Should see all elements from: {selectors_enum}')
def should_see_all_elements(browser: WebDriver, selectors_enum: EnumMeta):
for selector in selectors_enum:
if not selector.value:
continue
if not selector.is_visible:
continue
if selector.name in ['CONTAINER', 'MODAL']:
attach_jpg_screenshot(browser, f'{selectors_enum.__name__} container', selector=selector)
error = f'Expected element "{selector}" is not visible'
if not is_element_visible(browser, selector):
attach_jpg_screenshot(browser, error)
assert is_element_visible(browser, selector), error
logger.info(f'All elements from {selectors_enum} are visible on {browser.current_url}')
@allure.step('Should not see element: {selector}')
def should_not_see_element(browser, selector):
if not selector.is_visible:
return
assertion_error = f'Unexpected element is visible "{selector}"'
try:
assert not is_element_visible(browser, selector), assertion_error
except AssertionError:
attach_jpg_screenshot(browser, assertion_error)
raise
except StaleElementReferenceException:
attach_jpg_screenshot(browser, 'StaleElementReferenceException')
raise
@allure.step('Should not see elements from: {selectors_enum}')
def should_not_see_any_element(browser, selectors_enum):
for selector in selectors_enum:
if not selector.is_visible:
continue
assertion_error = f'Unexpected element is visible "{selector}"'
try:
assert not is_element_visible(browser, selector), assertion_error
except AssertionError:
attach_jpg_screenshot(browser, assertion_error)
raise
except StaleElementReferenceException:
attach_jpg_screenshot(browser, 'StaleElementReferenceException')
raise
@allure.step('Should not see errors')
def should_not_see_errors(browser):
assertion_error = ''
page_source = browser.page_source
try:
assertion_error = f'500 ISE on {browser.current_url}'
assert 'there is a problem with the service' not in page_source, assertion_error
assert 'Internal Server Error' not in page_source, assertion_error
assertion_error = f'404 Not Found on {browser.current_url}'
assert 'This page cannot be found' not in page_source, assertion_error
assertion_error = f'Unexpected Error on {browser.current_url}'
assert 'Unexpected Error' not in page_source, assertion_error
assertion_error = f'Error fetching data on {browser.current_url}'
assert 'Error fetching data' not in page_source, assertion_error
assertion_error = f'A server error occurred on {browser.current_url}'
assert 'A server error occurred' not in page_source, assertion_error
except AssertionError:
attach_jpg_screenshot(browser, assertion_error)
raise
|
[
"urllib.parse.urljoin",
"tests.browser.util.is_element_visible",
"allure.step",
"django.urls.reverse",
"tests.browser.util.attach_jpg_screenshot",
"logging.getLogger"
] |
[((437, 464), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (454, 464), False, 'import logging\n'), ((468, 505), 'allure.step', 'allure.step', (['"""Visit {page_name} page"""'], {}), "('Visit {page_name} page')\n", (479, 505), False, 'import allure\n'), ((986, 1038), 'allure.step', 'allure.step', (['"""Should see all expected page sections"""'], {}), "('Should see all expected page sections')\n", (997, 1038), False, 'import allure\n'), ((1319, 1380), 'allure.step', 'allure.step', (['"""Should see all elements from: {selectors_enum}"""'], {}), "('Should see all elements from: {selectors_enum}')\n", (1330, 1380), False, 'import allure\n'), ((2078, 2127), 'allure.step', 'allure.step', (['"""Should not see element: {selector}"""'], {}), "('Should not see element: {selector}')\n", (2089, 2127), False, 'import allure\n'), ((2603, 2664), 'allure.step', 'allure.step', (['"""Should not see elements from: {selectors_enum}"""'], {}), "('Should not see elements from: {selectors_enum}')\n", (2614, 2664), False, 'import allure\n'), ((3232, 3268), 'allure.step', 'allure.step', (['"""Should not see errors"""'], {}), "('Should not see errors')\n", (3243, 3268), False, 'import allure\n'), ((864, 918), 'tests.browser.util.attach_jpg_screenshot', 'attach_jpg_screenshot', (['browser', 'f"""Visited {page_name}"""'], {}), "(browser, f'Visited {page_name}')\n", (885, 918), False, 'from tests.browser.util import attach_jpg_screenshot, is_element_visible\n'), ((1138, 1223), 'tests.browser.util.attach_jpg_screenshot', 'attach_jpg_screenshot', (['browser', 'f"""View of the whole page: {browser.current_url}"""'], {}), "(browser, f'View of the whole page: {browser.current_url}'\n )\n", (1159, 1223), False, 'from tests.browser.util import attach_jpg_screenshot, is_element_visible\n'), ((804, 838), 'urllib.parse.urljoin', 'urljoin', (['live_server.url', 'endpoint'], {}), '(live_server.url, endpoint)\n', (811, 838), False, 'from urllib.parse import urljoin\n'), ((1938, 1975), 'tests.browser.util.is_element_visible', 'is_element_visible', (['browser', 'selector'], {}), '(browser, selector)\n', (1956, 1975), False, 'from tests.browser.util import attach_jpg_screenshot, is_element_visible\n'), ((760, 778), 'django.urls.reverse', 'reverse', (['view_name'], {}), '(view_name)\n', (767, 778), False, 'from django.urls import reverse\n'), ((1665, 1758), 'tests.browser.util.attach_jpg_screenshot', 'attach_jpg_screenshot', (['browser', 'f"""{selectors_enum.__name__} container"""'], {'selector': 'selector'}), "(browser, f'{selectors_enum.__name__} container',\n selector=selector)\n", (1686, 1758), False, 'from tests.browser.util import attach_jpg_screenshot, is_element_visible\n'), ((1834, 1871), 'tests.browser.util.is_element_visible', 'is_element_visible', (['browser', 'selector'], {}), '(browser, selector)\n', (1852, 1871), False, 'from tests.browser.util import attach_jpg_screenshot, is_element_visible\n'), ((1885, 1922), 'tests.browser.util.attach_jpg_screenshot', 'attach_jpg_screenshot', (['browser', 'error'], {}), '(browser, error)\n', (1906, 1922), False, 'from tests.browser.util import attach_jpg_screenshot, is_element_visible\n'), ((2318, 2355), 'tests.browser.util.is_element_visible', 'is_element_visible', (['browser', 'selector'], {}), '(browser, selector)\n', (2336, 2355), False, 'from tests.browser.util import attach_jpg_screenshot, is_element_visible\n'), ((2408, 2455), 'tests.browser.util.attach_jpg_screenshot', 'attach_jpg_screenshot', (['browser', 'assertion_error'], {}), '(browser, assertion_error)\n', (2429, 2455), False, 'from tests.browser.util import attach_jpg_screenshot, is_element_visible\n'), ((2521, 2585), 'tests.browser.util.attach_jpg_screenshot', 'attach_jpg_screenshot', (['browser', '"""StaleElementReferenceException"""'], {}), "(browser, 'StaleElementReferenceException')\n", (2542, 2585), False, 'from tests.browser.util import attach_jpg_screenshot, is_element_visible\n'), ((4228, 4275), 'tests.browser.util.attach_jpg_screenshot', 'attach_jpg_screenshot', (['browser', 'assertion_error'], {}), '(browser, assertion_error)\n', (4249, 4275), False, 'from tests.browser.util import attach_jpg_screenshot, is_element_visible\n'), ((2923, 2960), 'tests.browser.util.is_element_visible', 'is_element_visible', (['browser', 'selector'], {}), '(browser, selector)\n', (2941, 2960), False, 'from tests.browser.util import attach_jpg_screenshot, is_element_visible\n'), ((3021, 3068), 'tests.browser.util.attach_jpg_screenshot', 'attach_jpg_screenshot', (['browser', 'assertion_error'], {}), '(browser, assertion_error)\n', (3042, 3068), False, 'from tests.browser.util import attach_jpg_screenshot, is_element_visible\n'), ((3146, 3210), 'tests.browser.util.attach_jpg_screenshot', 'attach_jpg_screenshot', (['browser', '"""StaleElementReferenceException"""'], {}), "(browser, 'StaleElementReferenceException')\n", (3167, 3210), False, 'from tests.browser.util import attach_jpg_screenshot, is_element_visible\n')]
|
import argparse
import logging
import socketserver
import sys
import threading
import time
from google.protobuf import text_format
import numpy as np
from pyqtgraph.Qt import QtCore, QtGui
from math import degrees
from cvra_studio.viewers.LivePlotter2D import LivePlotter2D
import messages
from log_udp_protobuf import parse_packet
def argparser(parser=None):
parser = parser or argparse.ArgumentParser(description=__doc__)
parser.add_argument("--port", "-p", default=10000, help="Port to listen on (10000)")
parser.add_argument("--verbose", "-v", action="count", default=0)
parser.add_argument(
"--topic",
"-t",
default="/position",
help="Topic name to listen for the robot position",
)
return parser
def main(args):
logging.basicConfig(level=max(logging.CRITICAL - (10 * args.verbose), 0))
app = QtGui.QApplication(sys.argv)
app.setFont(QtGui.QFont("Open Sans", pointSize=20))
data_lock = threading.RLock()
data = {
"Start1": {"pts": [(0, 300), (0, 600), (450, 600), (450, 300)], "fill": "r"},
"Start2": {"pts": [(0, 600), (0, 900), (450, 900), (450, 600)], "fill": "g"},
"Start3": {"pts": [(0, 900), (0, 1200), (450, 1200), (450, 900)], "fill": "b"},
"Start4": {
"pts": [(2550, 300), (2550, 600), (3000, 600), (3000, 300)],
"fill": "r",
},
"Start5": {
"pts": [(2550, 600), (2550, 900), (3000, 900), (3000, 600)],
"fill": "g",
},
"Start6": {
"pts": [(2550, 900), (2550, 1200), (3000, 1200), (3000, 900)],
"fill": "b",
},
"Ramp": {
"pts": [(450, 2000), (450, 1622), (2550, 1622), (2550, 2000)],
"fill": "o",
},
"Ramp2": {
"pts": [(1228, 2000), (1228, 1622), (1772, 1622), (1772, 2000)],
"fill": "o",
},
"MidBeacon": {
"pts": [(1300, 0), (1700, 0), (1700, -222), (1300, -222)],
"fill": "grey",
},
"Experiment1": {
"pts": [(0, 0), (0, -222), (450, -222), (450, 0)],
"fill": "grey",
},
"Experiment2": {
"pts": [(2550, 0), (2550, -222), (3000, -222), (3000, 0)],
"fill": "grey",
},
}
plot = LivePlotter2D((3000, 2000))
plot.widget.show()
curve = plot.getPort()
def live_plot():
while True:
curve.put(data)
time.sleep(0.1)
threading.Thread(target=live_plot).start()
class Handler(socketserver.BaseRequestHandler):
def handle(self):
req = self.request[0]
header, msg = parse_packet(req)
if header.name != args.topic:
return
with data_lock:
data.update(
{
"robot": {
"x": msg.x,
"y": msg.y,
"a": degrees(msg.a),
"r": 150,
"n": 6,
"fill": "cvra",
},
}
)
def udp_listener():
with socketserver.UDPServer(("0.0.0.0", args.port), Handler) as server:
server.serve_forever()
threading.Thread(target=udp_listener).start()
if (sys.flags.interactive != 1) or not hasattr(QtCore, "PYQT_VERSION"):
QtGui.QApplication.instance().exec_()
if __name__ == "__main__":
args = argparser().parse_args()
main(args)
|
[
"threading.Thread",
"argparse.ArgumentParser",
"pyqtgraph.Qt.QtGui.QApplication.instance",
"threading.RLock",
"math.degrees",
"time.sleep",
"log_udp_protobuf.parse_packet",
"socketserver.UDPServer",
"cvra_studio.viewers.LivePlotter2D.LivePlotter2D",
"pyqtgraph.Qt.QtGui.QApplication",
"pyqtgraph.Qt.QtGui.QFont"
] |
[((871, 899), 'pyqtgraph.Qt.QtGui.QApplication', 'QtGui.QApplication', (['sys.argv'], {}), '(sys.argv)\n', (889, 899), False, 'from pyqtgraph.Qt import QtCore, QtGui\n'), ((973, 990), 'threading.RLock', 'threading.RLock', ([], {}), '()\n', (988, 990), False, 'import threading\n'), ((2327, 2354), 'cvra_studio.viewers.LivePlotter2D.LivePlotter2D', 'LivePlotter2D', (['(3000, 2000)'], {}), '((3000, 2000))\n', (2340, 2354), False, 'from cvra_studio.viewers.LivePlotter2D import LivePlotter2D\n'), ((387, 431), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {'description': '__doc__'}), '(description=__doc__)\n', (410, 431), False, 'import argparse\n'), ((916, 954), 'pyqtgraph.Qt.QtGui.QFont', 'QtGui.QFont', (['"""Open Sans"""'], {'pointSize': '(20)'}), "('Open Sans', pointSize=20)\n", (927, 954), False, 'from pyqtgraph.Qt import QtCore, QtGui\n'), ((2488, 2503), 'time.sleep', 'time.sleep', (['(0.1)'], {}), '(0.1)\n', (2498, 2503), False, 'import time\n'), ((2509, 2543), 'threading.Thread', 'threading.Thread', ([], {'target': 'live_plot'}), '(target=live_plot)\n', (2525, 2543), False, 'import threading\n'), ((2691, 2708), 'log_udp_protobuf.parse_packet', 'parse_packet', (['req'], {}), '(req)\n', (2703, 2708), False, 'from log_udp_protobuf import parse_packet\n'), ((3242, 3297), 'socketserver.UDPServer', 'socketserver.UDPServer', (["('0.0.0.0', args.port)", 'Handler'], {}), "(('0.0.0.0', args.port), Handler)\n", (3264, 3297), False, 'import socketserver\n'), ((3349, 3386), 'threading.Thread', 'threading.Thread', ([], {'target': 'udp_listener'}), '(target=udp_listener)\n', (3365, 3386), False, 'import threading\n'), ((3480, 3509), 'pyqtgraph.Qt.QtGui.QApplication.instance', 'QtGui.QApplication.instance', ([], {}), '()\n', (3507, 3509), False, 'from pyqtgraph.Qt import QtCore, QtGui\n'), ((3003, 3017), 'math.degrees', 'degrees', (['msg.a'], {}), '(msg.a)\n', (3010, 3017), False, 'from math import degrees\n')]
|
from unittest import TestCase
from app import create_app
from app.view.conversation_manager import generate_token
import json
class SearchRuleTestCase(TestCase):
"""
Unit tests for the Admin Search Rule.
LJF: all tests clear 2020-5-13
"""
def setUp(self):
self.app = create_app().test_client()
self.myheaders = {'Content-Type': 'application/json'}
self.token = generate_token(b'buaa', 3600)
def test_no_attribute(self):
r = self.app.get(
'admin/search_rule',
headers=self.myheaders
)
result = json.loads(r.data.decode('utf-8'))
self.assertEqual(result['code'], 10000001)
self.assertEqual(r.status_code, 400)
def test_no_username(self):
r = self.app.get(
'admin/search_rule?token=<PASSWORD>&id=',
headers=self.myheaders
)
result = json.loads(r.data.decode('utf-8'))
self.assertEqual(result['code'], 10000001)
self.assertEqual(r.status_code, 400)
def test_no_token(self):
r = self.app.get(
'admin/search_rule?username=wechatterbot&id=1',
headers=self.myheaders
)
result = json.loads(r.data.decode('utf-8'))
self.assertEqual(result['code'], 10000001)
self.assertEqual(r.status_code, 400)
def test_wrong_username(self):
r = self.app.get(
'admin/search_rule?username=wechatterwhat' +
'&token='+self.token+'&id=1',
headers=self.myheaders
)
result = json.loads(r.data.decode('utf-8'))
self.assertEqual(result['code'], 10000044)
self.assertEqual(r.status_code, 401)
def test_wrong_token(self):
wrong_token = generate_token(b'what', 3600)
r = self.app.get(
'admin/search_rule?username=wechatterbot' +
'&token=' + wrong_token + '&id=1',
headers=self.myheaders
)
result = json.loads(r.data.decode('utf-8'))
self.assertEqual(result['code'], 10000044)
self.assertEqual(r.status_code, 401)
def test_empty_id_and_empty_text(self):
r = self.app.get(
'admin/search_rule?username=wechatterbot' +
'&token=' + self.token,
headers=self.myheaders
)
result = json.loads(r.data.decode('utf-8'))
self.assertEqual(result['code'], 10000001)
self.assertEqual(r.status_code, 400)
def test_empty_id_and_no_text(self):
r = self.app.get(
'admin/search_rule?username=wechatterbot' +
'&token=' + self.token + '&id=',
headers=self.myheaders
)
result = json.loads(r.data.decode('utf-8'))
self.assertEqual(result['code'], 10000001)
self.assertEqual(r.status_code, 400)
def test_no_id_and_empty_text(self):
r = self.app.get(
'admin/search_rule?username=wechatterbot' +
'&token=' + self.token + '&text=',
headers=self.myheaders
)
result = json.loads(r.data.decode('utf-8'))
self.assertEqual(result['code'], 10000001)
self.assertEqual(r.status_code, 400)
def test_no_id_and_no_text(self):
r = self.app.get(
'admin/search_rule?username=wechatterbot' +
'&token=' + self.token,
headers=self.myheaders
)
result = json.loads(r.data.decode('utf-8'))
self.assertEqual(result['code'], 10000001)
self.assertEqual(r.status_code, 400)
def test_id_not_a_number(self):
r = self.app.get(
'admin/search_rule?username=wechatterbot' +
'&token=' + self.token + '&id=string',
headers=self.myheaders
)
result = json.loads(r.data.decode('utf-8'))
self.assertEqual(result['code'], 10000001)
def test_successful_search_with_text(self):
data = {
'response': '临时回复规则',
'text': '临时规则内容',
'username': 'wechatterbot',
'token': self.token
}
self.app.post(
'http://localhost:5000/admin/create_rule',
data=json.dumps(data),
headers=self.myheaders
)
r = self.app.get(
'admin/search_rule?username=wechatterbot' +
'&token=' + self.token + '&text=临时规则内容',
headers=self.myheaders
)
result = json.loads(r.data.decode('utf-8'))
rules = result['rules']
self.assertEqual(rules[0]['text'], u"临时规则内容")
self.assertEqual(r.status_code, 200)
def test_successful_search_with_id(self):
r = self.app.get(
'admin/search_rule?username=wechatterbot' +
'&token=' + self.token + '&id=1',
headers=self.myheaders
)
result = json.loads(r.data.decode('utf-8'))
rules = result['rules']
self.assertEqual(rules[0]['id'], 1)
self.assertEqual(r.status_code, 200)
self.assertEqual(result['number'], 1)
|
[
"app.view.conversation_manager.generate_token",
"app.create_app",
"json.dumps"
] |
[((408, 437), 'app.view.conversation_manager.generate_token', 'generate_token', (["b'buaa'", '(3600)'], {}), "(b'buaa', 3600)\n", (422, 437), False, 'from app.view.conversation_manager import generate_token\n'), ((1748, 1777), 'app.view.conversation_manager.generate_token', 'generate_token', (["b'what'", '(3600)'], {}), "(b'what', 3600)\n", (1762, 1777), False, 'from app.view.conversation_manager import generate_token\n'), ((298, 310), 'app.create_app', 'create_app', ([], {}), '()\n', (308, 310), False, 'from app import create_app\n'), ((4157, 4173), 'json.dumps', 'json.dumps', (['data'], {}), '(data)\n', (4167, 4173), False, 'import json\n')]
|
from __future__ import unicode_literals
from mayan.apps.dependencies.classes import PythonDependency
PythonDependency(
module=__name__, name='graphviz', version_string='==0.10.1'
)
|
[
"mayan.apps.dependencies.classes.PythonDependency"
] |
[((103, 180), 'mayan.apps.dependencies.classes.PythonDependency', 'PythonDependency', ([], {'module': '__name__', 'name': '"""graphviz"""', 'version_string': '"""==0.10.1"""'}), "(module=__name__, name='graphviz', version_string='==0.10.1')\n", (119, 180), False, 'from mayan.apps.dependencies.classes import PythonDependency\n')]
|
# -*- coding: utf-8 -*-
from __future__ import absolute_import
from scrapy.linkextractors import LinkExtractor
from scrapy.spiders import CrawlSpider,Rule
from scrapy.http import FormRequest, Request
from urllib import urlencode
import json
import requests
import StringIO
from ..components import get_data_for_post
from ..items import *
# globals
data = get_data_for_post()
headers = {
'Cookie': 'appver=1.5.0.75771;',
'Referer': 'http://music.163.com/',
'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; WOW64) AppleWebKit/537.36'
' (KHTML, like Gecko) Chrome/53.0.2785.8 Safari/537.36'
}
url = 'http://music.163.com/weapi/v1/resource/comments/' + 'A_PL_0_86040100' + '/?csrf_token='
_API = {
'user_playlist': 'http://music.163.com/api/user/playlist?',
'playlist_detail': 'http://music.163.com/api/playlist/detail?',
'song_detail': 'http://music.163.com/api/song/detail/?id={0}&ids=%5B{0}%5D'
}
_PATH = {
'artist' : 'http://music.163.com/artist?id={0}',
}
class Music163Spider(CrawlSpider):
name = "song"
allowed_domains = ["music.163.com"]
start_urls = ["http://music.163.com/discover/artist"]
rules = [Rule(LinkExtractor(allow='/artist\?id=\d+')),
# Rule(LinkExtractor(allow='/discover/artist/cat\?id=\d+')),
# Rule(LinkExtractor(allow='/discover/artist/cat\?id=\d+&initial=\d+')),
Rule(LinkExtractor(allow='/song\?id=\d+'), callback="parse_song")]
# get songs comments
def parse_song(self, response):
song_id = response.url[29:]
url_1 = _API['song_detail'].format(song_id)
yield Request(url=url_1, callback=self.parse_song_detail)
url_2 = 'http://music.163.com/weapi/v1/resource/comments/R_SO_4_' + response.url[29:]+'/?csrf_token='
f = FormRequest(url_2, formdata=data, headers=headers, callback=self.parse_user_id)
yield f
# sample
# http://music.163.com/api/song/detail?id=17721274&ids=%5B17721274%5D
def parse_song_detail(self, response):
json_object = json.loads(response.text)
result = json_object['songs'][0]
item = SongAudioItem()
item['album_name'] = result['album']['name']
item['song_name'] = result['name']
if result['mp3Url'] is None:
fake_file = StringIO.StringIO("Not Available.")
item['audio'] = fake_file
else:
r = requests.get(url=result['mp3Url'])
fake_file = StringIO.StringIO(r.content)
item['audio'] = fake_file
yield item
# generate user ids
def parse_user_id(self, response):
result = json.loads(response.text)
comments = result['comments']
hotComments = result['hotComments']
for comment in comments:
userId = comment['user']['userId']
req = {"offset": 0, "limit": 100, "uid": userId}
url = _API['user_playlist'] + urlencode(req)
meta = {'req': req, 'name': comment['user']['nickname']}
yield Request(url=url, callback=self.parse_user_playlist, meta=meta)
for comment in hotComments:
userId = comment['user']['userId']
req = {"offset": 0, "limit": 100, "uid": userId}
url = _API['user_playlist'] + urlencode(req)
meta = {'req': req, 'name':comment['user']['nickname']}
yield Request(url=url, callback=self.parse_user_playlist, meta=meta)
# sample
# http://music.163.com/api/user/playlist?uid=107273856&limit=100&offset=0
def parse_user_playlist(self, response):
json_object = json.loads(response.text)
# 判断是否为第一次
# if 'item' in response.meta:
# item = response.meta['item']
# else:
# item = UserItem()
# item['name'] = response.meta['name']
# item['uid'] = response.meta['req']['uid']
# item['playlists'] = []
for playlist in json_object['playlist']:
id = playlist['id']
# item['playlists'].append(id)
url = _API['playlist_detail'] + urlencode({'id': id})
yield Request(url, callback=self.parse_playlist)
if json_object['more']:
req = response.meta['req']
req['offset'] = int(req['offset']) + 100
url = _API['user_playlist'] + urlencode(req)
yield Request(url=url, callback=self.parse_user_playlist, meta={'req': req})
# yield Request(url=url, callback=self.parse_user_playlist, meta={'req':req, 'item':item})
# else:
# yield item
# sample
# http://music.163.com/api/playlist/detail?id=578391
def parse_playlist(self, response):
json_object = json.loads(response.text)
result = json_object['result']
# item = PlaylistItem()
# item['name'] = result['name']
# item['uid'] = result['id']
# songs = []
# item['songs'] = songs
for song in result['tracks']:
id = song['id']
# songs.append(id)
url = _API['song_detail'].format(id)
yield Request(url=url, callback=self.parse_song_detail)
url = 'http://music.163.com/weapi/v1/resource/comments/R_SO_4_' + str(id) + '/?csrf_token='
f = FormRequest(url, formdata=data, headers=headers, callback=self.parse_user_id)
yield f
# yield item
|
[
"scrapy.http.Request",
"json.loads",
"scrapy.http.FormRequest",
"requests.get",
"urllib.urlencode",
"scrapy.linkextractors.LinkExtractor",
"StringIO.StringIO"
] |
[((1796, 1875), 'scrapy.http.FormRequest', 'FormRequest', (['url_2'], {'formdata': 'data', 'headers': 'headers', 'callback': 'self.parse_user_id'}), '(url_2, formdata=data, headers=headers, callback=self.parse_user_id)\n', (1807, 1875), False, 'from scrapy.http import FormRequest, Request\n'), ((2045, 2070), 'json.loads', 'json.loads', (['response.text'], {}), '(response.text)\n', (2055, 2070), False, 'import json\n'), ((2633, 2658), 'json.loads', 'json.loads', (['response.text'], {}), '(response.text)\n', (2643, 2658), False, 'import json\n'), ((3600, 3625), 'json.loads', 'json.loads', (['response.text'], {}), '(response.text)\n', (3610, 3625), False, 'import json\n'), ((4719, 4744), 'json.loads', 'json.loads', (['response.text'], {}), '(response.text)\n', (4729, 4744), False, 'import json\n'), ((1176, 1216), 'scrapy.linkextractors.LinkExtractor', 'LinkExtractor', ([], {'allow': '"""/artist\\\\?id=\\\\d+"""'}), "(allow='/artist\\\\?id=\\\\d+')\n", (1189, 1216), False, 'from scrapy.linkextractors import LinkExtractor\n'), ((1395, 1433), 'scrapy.linkextractors.LinkExtractor', 'LinkExtractor', ([], {'allow': '"""/song\\\\?id=\\\\d+"""'}), "(allow='/song\\\\?id=\\\\d+')\n", (1408, 1433), False, 'from scrapy.linkextractors import LinkExtractor\n'), ((1621, 1672), 'scrapy.http.Request', 'Request', ([], {'url': 'url_1', 'callback': 'self.parse_song_detail'}), '(url=url_1, callback=self.parse_song_detail)\n', (1628, 1672), False, 'from scrapy.http import FormRequest, Request\n'), ((2302, 2337), 'StringIO.StringIO', 'StringIO.StringIO', (['"""Not Available."""'], {}), "('Not Available.')\n", (2319, 2337), False, 'import StringIO\n'), ((2406, 2440), 'requests.get', 'requests.get', ([], {'url': "result['mp3Url']"}), "(url=result['mp3Url'])\n", (2418, 2440), False, 'import requests\n'), ((2465, 2493), 'StringIO.StringIO', 'StringIO.StringIO', (['r.content'], {}), '(r.content)\n', (2482, 2493), False, 'import StringIO\n'), ((5284, 5361), 'scrapy.http.FormRequest', 'FormRequest', (['url'], {'formdata': 'data', 'headers': 'headers', 'callback': 'self.parse_user_id'}), '(url, formdata=data, headers=headers, callback=self.parse_user_id)\n', (5295, 5361), False, 'from scrapy.http import FormRequest, Request\n'), ((2925, 2939), 'urllib.urlencode', 'urlencode', (['req'], {}), '(req)\n', (2934, 2939), False, 'from urllib import urlencode\n'), ((3027, 3089), 'scrapy.http.Request', 'Request', ([], {'url': 'url', 'callback': 'self.parse_user_playlist', 'meta': 'meta'}), '(url=url, callback=self.parse_user_playlist, meta=meta)\n', (3034, 3089), False, 'from scrapy.http import FormRequest, Request\n'), ((3277, 3291), 'urllib.urlencode', 'urlencode', (['req'], {}), '(req)\n', (3286, 3291), False, 'from urllib import urlencode\n'), ((3378, 3440), 'scrapy.http.Request', 'Request', ([], {'url': 'url', 'callback': 'self.parse_user_playlist', 'meta': 'meta'}), '(url=url, callback=self.parse_user_playlist, meta=meta)\n', (3385, 3440), False, 'from scrapy.http import FormRequest, Request\n'), ((4088, 4109), 'urllib.urlencode', 'urlencode', (["{'id': id}"], {}), "({'id': id})\n", (4097, 4109), False, 'from urllib import urlencode\n'), ((4128, 4170), 'scrapy.http.Request', 'Request', (['url'], {'callback': 'self.parse_playlist'}), '(url, callback=self.parse_playlist)\n', (4135, 4170), False, 'from scrapy.http import FormRequest, Request\n'), ((4338, 4352), 'urllib.urlencode', 'urlencode', (['req'], {}), '(req)\n', (4347, 4352), False, 'from urllib import urlencode\n'), ((4371, 4441), 'scrapy.http.Request', 'Request', ([], {'url': 'url', 'callback': 'self.parse_user_playlist', 'meta': "{'req': req}"}), "(url=url, callback=self.parse_user_playlist, meta={'req': req})\n", (4378, 4441), False, 'from scrapy.http import FormRequest, Request\n'), ((5113, 5162), 'scrapy.http.Request', 'Request', ([], {'url': 'url', 'callback': 'self.parse_song_detail'}), '(url=url, callback=self.parse_song_detail)\n', (5120, 5162), False, 'from scrapy.http import FormRequest, Request\n')]
|
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
import logging
import sys
if (sys.version_info > (3,)):
from urllib.parse import quote
else:
from urllib import quote
from io import StringIO
import os
import jsonpickle
from cairis.core.DocumentReference import DocumentReference
from cairis.test.CairisDaemonTestCase import CairisDaemonTestCase
from cairis.mio.ModelImport import importModelFile
from cairis.tools.JsonConverter import json_deserialize
import os
__author__ = '<NAME>'
class DocumentReferenceAPITests(CairisDaemonTestCase):
@classmethod
def setUpClass(cls):
importModelFile(os.environ['CAIRIS_SRC'] + '/../examples/exemplars/ACME_Water/ACME_Water.xml',1,'test')
def setUp(self):
self.logger = logging.getLogger(__name__)
self.new_dr = DocumentReference(
refId = '-1',
refName = 'Test document reference name',
docName = 'Alarm handling GT concept',
cName = 'SF',
docExc = 'Test text segment')
self.new_dr_dict = {
'session_id' : 'test',
'object': self.new_dr
}
self.existing_dr_name = 'Role restrictions'
def test_get_all(self):
method = 'test_get_document_references'
url = '/api/document_references?session_id=test'
self.logger.info('[%s] URL: %s', method, url)
rv = self.app.get(url)
if (sys.version_info > (3,)):
responseData = rv.data.decode('utf-8')
else:
responseData = rv.data
drs = jsonpickle.decode(responseData)
self.assertIsNotNone(drs, 'No results after deserialization')
self.assertIsInstance(drs, list, 'The result is not a list as expected')
self.assertGreater(len(drs), 0, 'No document references in the dictionary')
self.logger.info('[%s] Document references found: %d', method, len(drs))
dr = drs[0]
self.logger.info('[%s] First document reference: %s\n', method, dr['theName'])
def test_get_by_name(self):
method = 'test_get_by_name'
url = '/api/document_references/name/%s?session_id=test' % quote(self.existing_dr_name)
rv = self.app.get(url)
if (sys.version_info > (3,)):
responseData = rv.data.decode('utf-8')
else:
responseData = rv.data
self.assertIsNotNone(responseData, 'No response')
self.logger.debug('[%s] Response data: %s', method, responseData)
dr = jsonpickle.decode(responseData)
self.assertIsNotNone(dr, 'No results after deserialization')
self.logger.info('[%s] Document reference: %s\n', method, dr['theName'])
def test_post(self):
method = 'test_post_new'
rv = self.app.post('/api/document_references', content_type='application/json', data=jsonpickle.encode(self.new_dr_dict))
if (sys.version_info > (3,)):
responseData = rv.data.decode('utf-8')
else:
responseData = rv.data
self.logger.debug('[%s] Response data: %s', method, responseData)
json_resp = json_deserialize(responseData)
self.assertIsNotNone(json_resp, 'No results after deserialization')
ackMsg = json_resp.get('message', None)
self.assertEqual(ackMsg, 'Document Reference successfully added')
def test_put(self):
method = 'test_put'
self.new_dr_dict['object'].theExcerpt = 'Updated text segment'
url = '/api/document_references/name/%s?session_id=test' % quote(self.existing_dr_name)
rv = self.app.put(url, content_type='application/json', data=jsonpickle.encode(self.new_dr_dict))
if (sys.version_info > (3,)):
responseData = rv.data.decode('utf-8')
else:
responseData = rv.data
self.logger.debug('[%s] Response data: %s', method, responseData)
json_resp = json_deserialize(responseData)
self.assertIsNotNone(json_resp, 'No results after deserialization')
ackMsg = json_resp.get('message', None)
self.assertEqual(ackMsg, 'Document Reference successfully updated')
def test_delete(self):
method = 'test_delete'
rv = self.app.post('/api/document_references', content_type='application/json', data=jsonpickle.encode(self.new_dr_dict))
if (sys.version_info > (3,)):
responseData = rv.data.decode('utf-8')
else:
responseData = rv.data
self.logger.debug('[%s] Response data: %s', method, responseData)
json_resp = json_deserialize(responseData)
url = '/api/document_references/name/%s?session_id=test' % quote(self.new_dr.theName)
rv = self.app.delete(url)
if (sys.version_info > (3,)):
responseData = rv.data.decode('utf-8')
else:
responseData = rv.data
self.logger.debug('[%s] Response data: %s', method, responseData)
json_resp = json_deserialize(responseData)
self.assertIsNotNone(json_resp, 'No results after deserialization')
ackMsg = json_resp.get('message', None)
self.assertEqual(ackMsg, 'Document Reference successfully deleted')
|
[
"urllib.quote",
"logging.getLogger",
"cairis.mio.ModelImport.importModelFile",
"jsonpickle.decode",
"cairis.core.DocumentReference.DocumentReference",
"cairis.tools.JsonConverter.json_deserialize",
"jsonpickle.encode"
] |
[((1337, 1446), 'cairis.mio.ModelImport.importModelFile', 'importModelFile', (["(os.environ['CAIRIS_SRC'] + '/../examples/exemplars/ACME_Water/ACME_Water.xml')", '(1)', '"""test"""'], {}), "(os.environ['CAIRIS_SRC'] +\n '/../examples/exemplars/ACME_Water/ACME_Water.xml', 1, 'test')\n", (1352, 1446), False, 'from cairis.mio.ModelImport import importModelFile\n'), ((1480, 1507), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (1497, 1507), False, 'import logging\n'), ((1526, 1681), 'cairis.core.DocumentReference.DocumentReference', 'DocumentReference', ([], {'refId': '"""-1"""', 'refName': '"""Test document reference name"""', 'docName': '"""Alarm handling GT concept"""', 'cName': '"""SF"""', 'docExc': '"""Test text segment"""'}), "(refId='-1', refName='Test document reference name',\n docName='Alarm handling GT concept', cName='SF', docExc='Test text segment'\n )\n", (1543, 1681), False, 'from cairis.core.DocumentReference import DocumentReference\n'), ((2179, 2210), 'jsonpickle.decode', 'jsonpickle.decode', (['responseData'], {}), '(responseData)\n', (2196, 2210), False, 'import jsonpickle\n'), ((3043, 3074), 'jsonpickle.decode', 'jsonpickle.decode', (['responseData'], {}), '(responseData)\n', (3060, 3074), False, 'import jsonpickle\n'), ((3600, 3630), 'cairis.tools.JsonConverter.json_deserialize', 'json_deserialize', (['responseData'], {}), '(responseData)\n', (3616, 3630), False, 'from cairis.tools.JsonConverter import json_deserialize\n'), ((4329, 4359), 'cairis.tools.JsonConverter.json_deserialize', 'json_deserialize', (['responseData'], {}), '(responseData)\n', (4345, 4359), False, 'from cairis.tools.JsonConverter import json_deserialize\n'), ((4932, 4962), 'cairis.tools.JsonConverter.json_deserialize', 'json_deserialize', (['responseData'], {}), '(responseData)\n', (4948, 4962), False, 'from cairis.tools.JsonConverter import json_deserialize\n'), ((5288, 5318), 'cairis.tools.JsonConverter.json_deserialize', 'json_deserialize', (['responseData'], {}), '(responseData)\n', (5304, 5318), False, 'from cairis.tools.JsonConverter import json_deserialize\n'), ((2736, 2764), 'urllib.quote', 'quote', (['self.existing_dr_name'], {}), '(self.existing_dr_name)\n', (2741, 2764), False, 'from urllib import quote\n'), ((3994, 4022), 'urllib.quote', 'quote', (['self.existing_dr_name'], {}), '(self.existing_dr_name)\n', (3999, 4022), False, 'from urllib import quote\n'), ((5027, 5053), 'urllib.quote', 'quote', (['self.new_dr.theName'], {}), '(self.new_dr.theName)\n', (5032, 5053), False, 'from urllib import quote\n'), ((3359, 3394), 'jsonpickle.encode', 'jsonpickle.encode', (['self.new_dr_dict'], {}), '(self.new_dr_dict)\n', (3376, 3394), False, 'import jsonpickle\n'), ((4088, 4123), 'jsonpickle.encode', 'jsonpickle.encode', (['self.new_dr_dict'], {}), '(self.new_dr_dict)\n', (4105, 4123), False, 'import jsonpickle\n'), ((4691, 4726), 'jsonpickle.encode', 'jsonpickle.encode', (['self.new_dr_dict'], {}), '(self.new_dr_dict)\n', (4708, 4726), False, 'import jsonpickle\n')]
|
# -*- coding: utf-8 -*-
#BEGIN_HEADER
import logging
import os
import re
import subprocess
import sys
import shutil
import traceback
import uuid
from datetime import datetime
from pprint import pformat
from installed_clients.WorkspaceClient import Workspace as workspaceService
from installed_clients.KBaseReportClient import KBaseReport
from installed_clients.ReadsUtilsClient import ReadsUtils
from installed_clients.DataFileUtilClient import DataFileUtil
from installed_clients.GenomeFileUtilClient import GenomeFileUtil
from installed_clients.AssemblyUtilClient import AssemblyUtil
#from installed_clients.SetAPIServiceClient import SetAPI_Service # if you want to use the service wizard
from installed_clients.SetAPIClient import SetAPI # if you want to run as SDK_LOCAL
from installed_clients.kb_meta_decoderClient import kb_meta_decoder
#END_HEADER
class kb_StrainFinder:
'''
Module Name:
kb_StrainFinder
Module Description:
A KBase module: kb_StrainFinder
'''
######## WARNING FOR GEVENT USERS ####### noqa
# Since asynchronous IO can lead to methods - even the same method -
# interrupting each other, you must be *very* careful when using global
# state. A method could easily clobber the state set by another while
# the latter method is running.
######################################### noqa
VERSION = "0.1.4"
GIT_URL = "https://github.com/kbaseapps/kb_StrainFinder"
GIT_COMMIT_HASH = "dc761622114212ce15a511d3691f019352fcba92"
#BEGIN_CLASS_HEADER
# binaries
STRAINFINDER_v1_installdir = "/kb/module/strainfinder"
STRAINFINDER_v1_bin = os.path.join(STRAINFINDER_v1_installdir, "strainFinder.py")
STRAINFINDER_v1_RUN_FIT_bin = os.path.join(STRAINFINDER_v1_installdir, "example", "run_fit.py")
#VCFTOOLS_bin = "/usr/local/bin/vcftools"
# timestamp
def now_ISO(self):
now_timestamp = datetime.now()
now_secs_from_epoch = (now_timestamp - datetime(1970,1,1)).total_seconds()
now_timestamp_in_iso = datetime.fromtimestamp(int(now_secs_from_epoch)).strftime('%Y-%m-%d_%T')
return now_timestamp_in_iso
# message logging
def log(self, target, message):
message = '['+self.now_ISO()+'] '+message
if target is not None:
target.append(message)
print(message)
sys.stdout.flush()
def _translate_nuc_to_prot_seq(self, nuc_seq=None, genetic_code=None, keep_stop=False, truncate_to_stop=False):
if not genetic_code:
genetic_code = '11'
if genetic_code != '11':
raise ValueError('Method _translate_nuc_to_prot_seq() only knows genetic code 11')
stop_char = '*'
nuc_seq = nuc_seq.upper()
prot_seq = ''
genetic_code_table = dict()
genetic_code_table['11'] = {
'ATA':'I', 'ATC':'I', 'ATT':'I', 'ATG':'M',
'ACA':'T', 'ACC':'T', 'ACG':'T', 'ACT':'T',
'AAC':'N', 'AAT':'N', 'AAA':'K', 'AAG':'K',
'AGC':'S', 'AGT':'S', 'AGA':'R', 'AGG':'R',
'CTA':'L', 'CTC':'L', 'CTG':'L', 'CTT':'L',
'CCA':'P', 'CCC':'P', 'CCG':'P', 'CCT':'P',
'CAC':'H', 'CAT':'H', 'CAA':'Q', 'CAG':'Q',
'CGA':'R', 'CGC':'R', 'CGG':'R', 'CGT':'R',
'GTA':'V', 'GTC':'V', 'GTG':'V', 'GTT':'V',
'GCA':'A', 'GCC':'A', 'GCG':'A', 'GCT':'A',
'GAC':'D', 'GAT':'D', 'GAA':'E', 'GAG':'E',
'GGA':'G', 'GGC':'G', 'GGG':'G', 'GGT':'G',
'TCA':'S', 'TCC':'S', 'TCG':'S', 'TCT':'S',
'TTC':'F', 'TTT':'F', 'TTA':'L', 'TTG':'L',
'TAC':'Y', 'TAT':'Y', 'TAA':stop_char, 'TAG':stop_char,
'TGC':'C', 'TGT':'C', 'TGA':stop_char, 'TGG':'W'
}
if genetic_code not in genetic_code_table:
raise ValueError ("genetic code '"+str(genetic_code)+"' not configured in genetic_code_table")
prot_seq = ''.join([genetic_code_table[genetic_code].get(nuc_seq[3*i:3*i+3],'X') for i in range(len(nuc_seq)//3)])
if truncate_to_stop:
new_prot_seq = ''
for c in prot_seq:
new_prot_seq += c
if c == stop_char:
break
prot_seq = new_prot_seq
if prot_seq.endswith(stop_char) and not keep_stop:
prot_seq = prot_seq.rstrip(stop_char)
return prot_seq
def _reverse_complement (self, nuc_seq):
nuc_seq = nuc_seq.upper()
complement = { 'G': 'C',
'C': 'G',
'A': 'T',
'T': 'A',
'U': 'A'
}
rev_nuc_seq = ''
for c in nuc_seq[::-1]:
rev_nuc_seq += complement[c]
return rev_nuc_seq
def read_fasta_file (self, fasta_file):
fasta_buf = dict()
headers = dict()
id_order = []
console = []
with open (fasta_file, 'r') as fasta_handle:
last_header = None
last_id = None
seq = ''
for fasta_row in fasta_handle.readlines():
fasta_row = fasta_row.rstrip()
if fasta_row.startswith('>'):
this_header = fasta_row
this_id = this_header.replace('>','',1)
this_id = re.sub(' .*$','',this_id)
id_order.append(this_id)
if last_id != None:
headers[last_id] = last_header
fasta_buf[last_id] = seq
last_header = this_header
last_id = this_id
seq = ''
else:
seq += fasta_row.replace(' ','')
if last_id != None:
headers[last_id] = last_header
fasta_buf[last_id] = seq
last_header = None
last_id = None
seq = ''
return {'fasta': fasta_buf,
'headers': headers,
'id_order': id_order
}
#END_CLASS_HEADER
# config contains contents of config file in a hash or None if it couldn't
# be found
def __init__(self, config):
#BEGIN_CONSTRUCTOR
self.shared_folder = config['scratch']
logging.basicConfig(format='%(created)s %(levelname)s: %(message)s',
level=logging.INFO)
self.config = config
self.workspaceURL = config['workspace-url']
self.shockURL = config['shock-url']
self.handleURL = config['handle-service-url']
self.serviceWizardURL = config['srv-wiz-url']
self.callbackURL = os.environ.get('SDK_CALLBACK_URL')
if self.callbackURL == None:
raise ValueError ("SDK_CALLBACK_URL not set in environment")
self.scratch = os.path.abspath(config['scratch'])
if self.scratch == None:
self.scratch = os.path.join('/kb','module','local_scratch')
if not os.path.exists(self.scratch):
os.makedirs(self.scratch)
self.SE_flag = 'SE'
self.PE_flag = 'PE'
#END_CONSTRUCTOR
pass
def run_StrainFinder_v1(self, ctx, params):
"""
This example function accepts any number of parameters and returns results in a KBaseReport
:param params: instance of type "StrainFinder_v1_InputType" ->
structure: parameter "workspace_name" of String, parameter
"in_genome_ref" of String, parameter "in_readslib_refs" of list of
String, parameter "out_genomeSet_obj_name" of String, parameter
"min_mapping_quality" of Long, parameter "min_depth" of Long
:returns: instance of type "ReportResults" -> structure: parameter
"report_name" of String, parameter "report_ref" of String
"""
# ctx is the context object
# return variables are: output
#BEGIN run_StrainFinder_v1
#### STEP 0: Init
##
DEBUG_MODE = 0
method = 'run_StrainFinder_v1'
console = []
report_text = ''
html_links = []
file_links = []
objects_created = []
self.log(console, 'Running run_StrainFinder_v1() with parameters: ')
self.log(console, "\n"+pformat(params))
token = ctx['token']
headers = {'Authorization': 'OAuth '+token}
env = os.environ.copy()
env['KB_AUTH_TOKEN'] = token
output_dir = os.path.join(self.scratch, 'output_'+str(uuid.uuid4()))
if not os.path.exists(output_dir):
os.makedirs(output_dir)
# object_info tuple
[OBJID_I, NAME_I, TYPE_I, SAVE_DATE_I, VERSION_I, SAVED_BY_I, WSID_I, WORKSPACE_I, CHSUM_I, SIZE_I, META_I] = range(11)
# Client handles
#SERVICE_VER = 'dev' # DEBUG
SERVICE_VER = 'release'
try:
wsClient = workspaceService(self.workspaceURL, token=token)
except Exception as e:
raise ValueError('Unable to get Workspace Client' +"\n" + str(e))
try:
dfuClient = DataFileUtil (url=self.callbackURL, token=token) # SDK local
except Exception as e:
raise ValueError('Unable to get DataFileUtil Client' +"\n" + str(e))
try:
gfuClient = GenomeFileUtil (url=self.callbackURL, token=token) # SDK local
except Exception as e:
raise ValueError('Unable to get GenomeFileUtil Client' +"\n" + str(e))
try:
auClient = AssemblyUtil (url=self.callbackURL, token=token) # SDK local
except Exception as e:
raise ValueError('Unable to get AssemblyUtil Client' +"\n" + str(e))
try:
#setAPI_Client = SetAPI_Service (url=self.serviceWizardURL, token=token, service_ver=SERVICE_VER) # Service
setAPI_Client = SetAPI (url=self.callbackURL, token=token, service_ver=SERVICE_VER) # SDK Local
except Exception as e:
raise ValueError('Unable to get SetAPI Client' +"\n" + str(e))
#### STEP 1: Param checks
##
required_params = ['workspace_name',
'in_genome_ref',
'in_readslib_refs',
'min_mapping_quality',
'min_depth',
'out_genomeSet_obj_name'
]
for required_param in required_params:
if required_param not in params or params[required_param] == None:
raise ValueError ("Must define required param: '"+required_param+"'")
"""
# and param defaults
defaults = { 'split_num': 10
}
for arg in defaults.keys():
if arg not in params or params[arg] == None or params[arg] == '':
params[arg] = defaults[arg]
"""
#### STEP 2: Configure overall provenance
##
provenance = [{}]
if 'provenance' in ctx:
provenance = ctx['provenance']
provenance[0]['input_ws_objects']=[]
provenance[0]['input_ws_objects'].append(params['in_genome_ref'])
provenance[0]['input_ws_objects'].extend(params['in_readslib_refs'])
provenance[0]['service'] = 'kb_StrainFinder'
provenance[0]['method'] = method
#### STEP 3: Get Reads refs
##
expanded_reads = []
input_ref_seen = dict()
SE_types = ['KBaseFile.SingleEndLibrary', 'KBaseAssembly.SingleEndLibrary']
PE_types = ['KBaseFile.PairedEndLibrary', 'KBaseAssembly.PairedEndLibrary']
[OBJID_I, NAME_I, TYPE_I, SAVE_DATE_I, VERSION_I, SAVED_BY_I, WSID_I, WORKSPACE_I, CHSUM_I, SIZE_I, META_I] = range(11) # object_info tuple
for input_ref in params['in_readslib_refs']:
input_info = wsClient.get_object_info3({'objects': [{'ref': input_ref}]})['infos'][0]
obj_name = input_info[NAME_I]
type_name = input_info[TYPE_I].split('-')[0]
# ReadsSet
if type_name in ['KBaseSets.ReadsSet']:
try:
input_readsSet_obj = setAPI_Client.get_reads_set_v1 ({'ref':input_ref,'include_item_info':1})
except Exception as e:
raise ValueError('SetAPI FAILURE: Unable to get read library set object from workspace: (' + str(input_ref)+")\n" + str(e))
for readsLibrary_obj in input_readsSet_obj['data']['items']:
this_reads_ref = readsLibrary_obj['ref']
if this_reads_ref in input_ref_seen:
continue
input_ref_seen[this_reads_ref] = True
this_reads_name = readsLibrary_obj['info'][NAME_I]
reads_item_type = readsLibrary_obj['info'][TYPE_I]
reads_item_type = re.sub ('-[0-9]+\.[0-9]+$', "", reads_item_type) # remove trailing version
if reads_item_type in PE_types:
this_reads_type = self.PE_flag
elif reads_item_type in SE_types:
this_reads_type = self.SE_flag
else:
raise ValueError ("Can't handle read item type '"+reads_item_type+"' obj_name: '"+this_reads_name+" in Set: '"+str(input_ref)+"'")
expanded_reads.append({'ref': this_reads_ref,
'name': this_reads_name,
'type': this_reads_type
})
# SingleEnd Library
elif type_name in SE_types:
this_reads_ref = input_ref
if this_reads_ref in input_ref_seen:
continue
input_ref_seen[this_reads_ref] = True
this_reads_name = obj_name
this_reads_type = self.SE_flag
expanded_reads.append({'ref': this_reads_ref,
'name': this_reads_name,
'type': this_reads_type
})
# PairedEnd Library
elif type_name in PE_types:
this_reads_ref = input_ref
if this_reads_ref in input_ref_seen:
continue
input_ref_seen[this_reads_ref] = True
this_reads_name = obj_name
this_reads_type = self.PE_flag
expanded_reads.append({'ref': this_reads_ref,
'name': this_reads_name,
'type': this_reads_type
})
else:
raise ValueError ("Illegal type in input_refs: "+str(obj_name)+" ("+str(input_ref)+") is of type: '"+str(type_name)+"'")
expanded_reads_refs = []
for reads_element in expanded_reads:
expanded_reads_refs.append(reads_element['ref'])
"""
PairedEndTypes = ["KBaseFile.PairedEndLibrary","KBaseAssembly.PairedEndLibrary"]
SingleEndTypes = ["KBaseFile.SingleEndLibrary","KBaseAssembly.SingleEndLibrary"]
acceptable_types = PairedEndTypes + SingleEndTypes
try:
input_reads_ref = params['in_readslib_ref']
input_reads_obj_info = wsClient.get_object_info_new ({'objects':[{'ref':input_reads_ref}]})[0]
input_reads_obj_type = input_reads_obj_info[TYPE_I]
input_reads_obj_type = re.sub ('-[0-9]+\.[0-9]+$', "", input_reads_obj_type) # remove trailing version
#input_reads_obj_version = input_reads_obj_info[VERSION_I] # this is object version, not type version
except Exception as e:
raise ValueError('Unable to get read library object info from workspace: (' +
str(input_reads_ref) +')' + str(e))
if input_reads_obj_type not in acceptable_types:
raise ValueError ("Input reads of type: '"+input_reads_obj_type+"'. Must be one of "+", ".join(acceptable_types))
# Download Reads
self.log (console, "DOWNLOADING READS") # DEBUG
try:
readsUtils_Client = ReadsUtils (url=self.callbackURL, token=ctx['token']) # SDK local
except Exception as e:
raise ValueError('Unable to get ReadsUtils Client' +"\n" + str(e))
try:
readsLibrary = readsUtils_Client.download_reads ({'read_libraries': [input_reads_ref],
'interleaved': 'true'
})
except Exception as e:
raise ValueError('Unable to download read library sequences from workspace: (' + str(input_reads_ref) +")\n" + str(e))
if input_reads_obj_type in PairedEndTypes:
# Download reads Libs to FASTQ files
in_reads_file_path = readsLibrary['files'][input_reads_ref]['files']['fwd']
else: # if input_reads_obj_type in SingleEndTypes:
in_reads_file_path = readsLibrary['files'][input_reads_ref]['files']['fwd']
"""
#### STEP 4: Get Genome's Assembly ref
##
self.log(console, "GETTING GENOME ASSEMBLY OBJECT")
try:
genome_object_ret = wsClient.get_objects2({'objects':[{'ref':params['in_genome_ref']}]})['data'][0]
except Exception as e:
raise ValueError ("unable to get genome object "+params['in_genome_ref']+". "+str(e))
genome_object = genome_object_ret['data']
genome_obj_info = genome_object_ret['info']
input_genome_obj_name = genome_obj_info[NAME_I]
if not genome_object.get('assembly_ref'):
raise ValueError ('OLD Genome type cannot be used with method. Please update your Genome '+params['in_genome_ref']+' to modern Geomee object format')
assembly_ref = genome_object['assembly_ref']
#### STEP 5: Do Read mapping and get VCF variant info
##
self.log(console, "READ MAPPING PHASE")
ws_info = wsClient.get_workspace_info({'workspace': params['workspace_name']})
workspace_id = ws_info[0]
sub_method = 'call_variants'
call_variants_params = {
'workspace_name': params['workspace_name'],
'workspace_id': workspace_id,
'assembly_ref': assembly_ref,
#'reads_ref': params['in_readslib_ref'],
'reads_refs': expanded_reads_refs,
'min_mapping_quality': params['min_mapping_quality'],
'min_depth': params['min_depth'],
'output_vcf': params['out_genomeSet_obj_name']+'.VCF' # may need to make separate names
}
#MD_SERVICE_VER = 'release'
MD_SERVICE_VER = 'dev'
try:
mdClient = kb_meta_decoder(self.callbackURL, token=token, service_ver=MD_SERVICE_VER)
except Exception as e:
raise ValueError("unable to instantiate metadecoderClient. "+str(e))
try:
self.log(console, "RUNNING call_variants()")
this_retVal = mdClient.call_variants(call_variants_params)
except Exception as e:
raise ValueError ("unable to run "+sub_method+". "+str(e))
try:
this_report_obj = wsClient.get_objects2({'objects':[{'ref':this_retVal['report_ref']}]})['data'][0]['data']
except Exception as e:
raise ValueError("unable to fetch "+sub_method+" report: " + this_retVal['report_ref']+". "+str(e))
# save this report obj for later
metadecoder_call_variants_reportObj = this_report_obj
#self.log(console, pformat(metadecoder_call_variants_reportObj)) # DEBUG
#### STEP 6: Extract VCFs
##
## Note: this should ultimately come from a VCF object with an API to spit it to file
## currently will pull VCF from SHOCK directly.
## from https://github.com/kbaseapps/kb_meta_decoder/blob/master/lib/kb_meta_decoder/kb_meta_decoderImpl.py
## file_links[0] is BAM
## file_links[1] is VCF
##
""" It'll be something like this
vcf_types = ['KBaseVariation.VCF']
for obj in metadecoder_call_variants_reportObj['objects_created']:
obj_info = wsClient.get_obj_info_new({'objects':[{'ref':obj_ref}]})[0]
obj_type = re.sub('-[0-9]+\.[0-9]+$', "", obj_info[TYPE_I]) # remove trailing version
if obj_type in vcf_types:
vcf_ref = obj_ref
break
"""
vcf_files = []
for reads_lib_i,reads_ref in enumerate(expanded_reads_refs):
self.log(console, "EXTRACTING VCF for ReadsLib "+str(reads_lib_i+1))
vcf_shock_handle_id = None
vcf_seen_cnt = 0;
for file_link in metadecoder_call_variants_reportObj['file_links']:
if re.search('VCF', file_link['label']):
vcf_seen_cnt += 1
if vcf_seen_cnt == (reads_lib_i+1):
vcf_shock_handle_id = file_link['handle'] # not 'handle_id'
#self.log(console,"VCF label selected: "+file_link['label']) # DEBUG
break
if not vcf_shock_handle_id:
raise ValueError ("Failure to find VCF in SHOCK from alignment submethod")
vcf_file = os.path.join(self.scratch, params['out_genomeSet_obj_name']+'-'+str(reads_lib_i)+'.vcf')
vcf_dl_result = dfuClient.shock_to_file({'handle_id': vcf_shock_handle_id,
'file_path': vcf_file,
'unpack': 'uncompress'
})
vcf_files.append(vcf_file)
self.log(console,"VCF file: "+vcf_file) # DEBUG
# DEBUG
#with open(vcf_file, 'r') as vcf_file_handle:
# for line in vcf_file_handle.readlines():
# self.log(console, 'VCF_line: '+line)
#### STEP 7: Parse VCF to get polymorphism frequencies
##
run_dirs_list = []
position_row_index = []
for reads_lib_i,reads_ref in enumerate(expanded_reads_refs):
self.log(console, "Parsing VCF to polymorphism frequencies for ReadsLib "+str(reads_lib_i+1))
vcf_file = vcf_files[reads_lib_i]
#self.log(console,"VCF file: "+vcf_file) # DEBUG
vcf_buf = []
SNP_freqs = dict()
with open (vcf_file, 'r') as vcf_handle:
vcf_buf = vcf_handle.readlines()
(CONTIG_ID_I, POS_I, SNP_ID_I, REF_SEQ_I, ALT_SEQ_I, QUAL_I, FILTER_I, INFO_I, FORMAT_I, EXTRA_INFO_I) = range(10)
bases = ['A', 'C', 'G', 'T']
base_i = { 'A': 0,
'C': 1,
'G': 2,
'T': 3,
'U': 3
}
for vcf_line in vcf_buf:
if vcf_line.startswith('#'):
continue
vcf_line = vcf_line.rstrip()
row = vcf_line.split()
contig_id = row[CONTIG_ID_I]
pos = row[POS_I]
ref_seq = row[REF_SEQ_I]
alt_seqs = row[ALT_SEQ_I]
var_info = row[INFO_I]
var_format = row[FORMAT_I]
var_extra_info = row[EXTRA_INFO_I]
# just record SNPs
if not var_format.startswith('GT:PL'): # skip invariant positions
continue
if var_info.startswith('INDEL'): # can't handle INDELs yet
continue
# init
if contig_id not in SNP_freqs:
SNP_freqs[contig_id] = dict()
if pos not in SNP_freqs[contig_id]:
SNP_freqs[contig_id][pos] = []
for i in range(4):
SNP_freqs[contig_id][pos].append(0)
# get counts
counts = []
alt_seq_list = alt_seqs.split(',')
[GT, PL, AD_counts_str] = var_extra_info.split(':')
counts = list(map(int, AD_counts_str.split(',')))
total_counts = 0
for cnt in counts:
total_counts += cnt
SNP_freqs[contig_id][pos][base_i[ref_seq]] = int(0.5 + 100.0 * counts[0] / float(total_counts))
for alt_i,alt_seq in enumerate(alt_seq_list):
SNP_freqs[contig_id][pos][base_i[alt_seq]] = int(0.5 + 100.0 * float(counts[1+alt_i]) / float(total_counts))
# write SNPs in strainfinder format and record position for later contig sequence
#run_dir = os.path.join(self.STRAINFINDER_v1_installdir, 'example')
this_run_dir = os.path.join(self.STRAINFINDER_v1_installdir, 'run_'+str(reads_lib_i))
if not os.path.exists(this_run_dir):
os.makedirs (this_run_dir)
run_dirs_list.append(this_run_dir)
allele_counts_file = os.path.join(this_run_dir, 'allele_counts.txt')
#self.log(console,"ALLELE COUNTS file: "+allele_counts_file) # DEBUG
if os.path.exists(allele_counts_file):
shutil.move(allele_counts_file, allele_counts_file+'.orig-'+str(reads_lib_i))
allele_counts_buf = []
position_row_index.append([])
allele_counts_buf.append("\t".join(['# A', 'C', 'G', 'T'])+"\n")
for contig_id in sorted(SNP_freqs.keys()):
for pos in sorted(SNP_freqs[contig_id].keys()):
position_row_index[reads_lib_i].append("\t".join([contig_id, pos]))
allele_counts_buf.append("\t".join(list(map(str, SNP_freqs[contig_id][pos])))+"\n")
with open (allele_counts_file, 'w') as allele_counts_handle:
allele_counts_handle.writelines(allele_counts_buf)
# DEBUG
#with open(allele_counts_file, 'r') as file_handle:
# for line in file_handle.readlines():
# self.log(console, 'ALLELE_line: '+line)
#### STEP 8: Run StrainFinder
##
fitted_genomes_files = []
abund_vecs = []
for reads_lib_i,reads_lib_ref in enumerate(expanded_reads_refs):
self.log(console, "RUNNING STRAINFINDER for ReadsLib "+str(reads_lib_i+1))
this_run_dir = run_dirs_list[reads_lib_i]
fitted_genomes_file = os.path.join(this_run_dir, 'fitted_genomes.txt')
if os.path.exists(fitted_genomes_file):
os.remove(fitted_genomes_file)
#self.log(console,"FITTED GENOMES file: "+fitted_genomes_file) # DEBUG
# Some subprocesses require shell=True in order to see input data
# also, if you do a redirect to out, you must join command first
this_STRAINFINDER_v1_RUN_FIT_bin = os.path.join(this_run_dir, "run_fit.py")
#shutil.copy(self.STRAINFINDER_v1_RUN_FIT_bin, this_STRAINFINDER_v1_RUN_FIT_bin)
bin_buf = []
with open (self.STRAINFINDER_v1_RUN_FIT_bin, 'r') as src_bin_handle:
for line in src_bin_handle.readlines():
if 'True strain relative abundances' in line:
continue
bin_buf.append(line)
with open (this_STRAINFINDER_v1_RUN_FIT_bin, 'w') as dst_bin_handle:
dst_bin_handle.writelines(bin_buf)
strainfinder_cmd = ['python']
strainfinder_cmd.append(this_STRAINFINDER_v1_RUN_FIT_bin)
#env = os.environ.copy()
#p = subprocess.Popen([joined_cmd], \
p = subprocess.Popen(strainfinder_cmd, \
cwd = this_run_dir, \
stdout = subprocess.PIPE, \
stderr = subprocess.STDOUT, \
shell = False)
#env = env)
# Read output
#
while True:
line = p.stdout.readline()
#line = p.stderr.readline()
if not line: break
self.log(console, line.replace('\n', ''))
if line.startswith('Inferred strain relative abundances = ['):
#self.log(console,line) # DEBUG
inferred_abundances = line.replace('Inferred strain relative abundances = [','')
inferred_abundances = inferred_abundances.replace(']','')
inferred_abundances = inferred_abundances.strip()
inferred_abundances = inferred_abundances.replace(' ',' ')
abund_vec = inferred_abundances.split()
abund_vecs.append(abund_vec)
p.stdout.close()
#p.stderr.close()
p.wait()
self.log(console, 'return code: ' + str(p.returncode))
if p.returncode != 0:
raise ValueError('Error running STRAINFINDER, return code: '+str(p.returncode) +
'\n\n'+ '\n'.join(console))
# Check that STRAINFINDER produced output
#
if not os.path.isfile(fitted_genomes_file):
raise ValueError("failed to create STRAINFINDER output: "+fitted_genomes_file)
fitted_genomes_files.append(fitted_genomes_file)
#### STEP 9: download in genome assembly fasta file, gff file, and read fields from Genome obj
##
## Note: running call_variants() appears to delete the input assembly
## so must download that assembly AFTER (or copy it to somewhere safe)
## we're doing it now.
src_scientific_name = None
src_source = None
src_release = None
src_genetic_code = None
#src_taxon_wsname = None
#src_taxon_id = None
# Get genome obj to read fields to set on GFF upload
try:
input_genome_obj = wsClient.get_objects2({'objects':[{'ref': params['in_genome_ref']}]})['data'][0]['data']
except:
raise ValueError ("unable to get genome obj "+params['in_genome_ref']+". "+str(e))
if 'scientific_name' in input_genome_obj:
src_scientific_name = input_genome_obj['scientific_name']
if 'source' in input_genome_obj:
src_source = input_genome_obj['source']
if 'release' in input_genome_obj:
src_release = input_genome_obj['release']
if 'genetic_code' in input_genome_obj:
src_genetic_code = input_genome_obj['genetic_code']
# Get genome assembly as fasta file
try:
input_genome_fasta_file = auClient.get_assembly_as_fasta({'ref':assembly_ref})['path']
except Exception as e:
raise ValueError ("unable to get genome fasta "+params['in_genome_ref']+". "+str(e))
# DEBUG
if DEBUG_MODE == 1:
input_genome_fasta_file = "/kb/module/dev_test/data/Bin.002.Genome.assembly.fa"
fasta_read = self.read_fasta_file(input_genome_fasta_file)
base_genome_fasta = fasta_read['fasta']
base_genome_headers = fasta_read['headers']
base_genome_contigID_order = fasta_read['id_order']
# Get GFF as file
try:
input_genome_gff_file = gfuClient.genome_to_gff({
'genome_ref': params['in_genome_ref'],
'target_dir': output_dir})['file_path']
except:
raise ValueError ("unable to get genome gff "+params['in_genome_ref']+". "+str(e))
# DEBUG
if DEBUG_MODE == 1:
input_genome_gff_file = "/kb/module/dev_test/data/Bin.002.fasta_assembly.RAST.gff"
#### STEP 10: Create Assembly FASTAs for strain genomes
##
all_new_genome_refs = []
all_new_genome_names = []
all_set_elements = dict()
num_genomes_found_per_readslib = []
num_genomes_found_this_readslib = 0
num_strain_genomes_generated = 0
for reads_lib_i,reads_lib_ref in enumerate(expanded_reads_refs):
self.log(console, "GETTING ALLELES FOR STRAIN MODES for ReadsLib "+str(reads_lib_i+1))
fitted_genomes_file = fitted_genomes_files[reads_lib_i]
fitted_genomes_rows = []
with open (fitted_genomes_file, 'r') as fitted_genomes_handle:
for row in fitted_genomes_handle.readlines():
fitted_genomes_rows.append(row.rstrip().split())
num_genomes_found_this_readslib = len(fitted_genomes_rows[0])
num_genomes_found_per_readslib.append(num_genomes_found_this_readslib)
# only one strain mode (may not be same as reference so do generate strain genome
if num_genomes_found_this_readslib == 1:
msg = "ReadsLib "+str(reads_lib_i)+": StrainFinder found only one strain in the data. Not creating GenomeSet for Reads Library "+str(reads_lib_i+1)
self.log(console, msg)
report_text += msg+"\n"
# multiple strain modes
num_strain_genomes_generated += num_genomes_found_this_readslib
# set provenance to just include this reads lib for input_ws_objects
this_provenance = provenance
this_provenance[0]['input_ws_objects']=[]
this_provenance[0]['input_ws_objects'].append(params['in_genome_ref'])
this_provenance[0]['input_ws_objects'].append(reads_lib_ref)
#### Make revised Assembly Fasta
##
new_fasta_files = []
for genome_i in range(num_genomes_found_per_readslib[reads_lib_i]):
new_genome_fasta = dict()
new_genome_headers = dict()
new_genome_fasta_len = dict()
for contigID in base_genome_contigID_order:
new_genome_fasta[contigID] = base_genome_fasta[contigID]
new_genome_headers[contigID] = base_genome_headers[contigID]
new_genome_fasta_len[contigID] = len(base_genome_fasta[contigID])
for row_i,row in enumerate(fitted_genomes_rows):
this_allele = row[genome_i]
[this_contigID, this_pos_n] = position_row_index[reads_lib_i][row_i].split("\t")
pos_i = int(this_pos_n)-1
if pos_i == 0:
new_genome_fasta[this_contigID] = this_allele + new_genome_fasta[this_contigID][pos_i+1:]
elif pos_i == new_genome_fasta_len[this_contigID]-1:
new_genome_fasta[this_contigID] = new_genome_fasta[this_contigID][:pos_i] + this_allele
else:
new_genome_fasta[this_contigID] = new_genome_fasta[this_contigID][:pos_i] + this_allele + new_genome_fasta[this_contigID][pos_i+1:]
this_fasta_file = os.path.join(output_dir, 'new_fasta-'+str(reads_lib_i)+'-'+str(genome_i)+'-'+str(uuid.uuid4())+'.fasta')
new_fasta_files.append(this_fasta_file)
with open (this_fasta_file, 'w') as this_fasta_handle:
for contigID in base_genome_contigID_order:
this_fasta_handle.write(new_genome_headers[contigID]+"\n")
this_fasta_handle.write(new_genome_fasta[contigID]+"\n")
#### Check for stop codons and if yes, adjust GFFs
##
self.log(console, "READING FEATURES")
SNP_pos = dict()
SNP_CDS = dict()
# initialize and determine SNP positions
for contigID in base_genome_contigID_order:
SNP_pos[contigID] = []
SNP_CDS[contigID] = dict()
for row_i,row in enumerate(fitted_genomes_rows):
[this_contigID, this_pos_n] = position_row_index[reads_lib_i][row_i].split("\t")
SNP_pos[this_contigID].append(int(this_pos_n))
# read base genome feature locations and determine which have SNPs
with open (input_genome_gff_file, 'r') as gff_handle:
for gff_line in gff_handle.readlines():
gff_line = gff_line.rstrip()
if gff_line.startswith('#'):
continue
[contigID, annot_db, locus_type, beg_str, end_str, d1, strand, d2, info] = gff_line.split("\t")
beg = int(beg_str)
end = int(end_str)
feature_has_SNP = False
for pos_n in SNP_pos[contigID]:
if pos_n >= beg and pos_n <= end:
feature_has_SNP = True
break
if feature_has_SNP:
#report_text += gff_line+"\n" # too big to include
if locus_type == 'CDS':
loc = ",".join([str(beg),str(end),strand])
SNP_CDS[contigID][loc] = True
# build strain genomes
new_gff_files = []
for genome_i in range(num_genomes_found_per_readslib[reads_lib_i]):
new_gff_buf = []
with open (input_genome_gff_file, 'r') as gff_handle:
for gff_line in gff_handle.readlines():
gff_line = gff_line.rstrip()
if gff_line.startswith('#'):
new_gff_buf.append(gff_line)
continue
[contigID, annot_db, locus_type, beg_str, end_str, d1, strand, d2, info] = gff_line.split("\t")
# this will cover both CDS and parent gene
loc = ",".join([str(beg),str(end),strand])
if loc not in SNP_CDS[contigID]:
new_gff_buf.append(gff_line)
continue
# else check for altered stop
contig_len = len(base_genome_fasta[contigID])
if end <= contig_len:
nuc_seq = base_genome_fasta[contigID][beg-1:end]
else:
nuc_seq = base_genome_fasta[contigID][beg-1:contig_len] + \
base_genome_fasta[contigID][0:end-contig_len+1]
if strand == '-':
nuc_seq = self._reverse_complement(nuc_seq)
prot_seq = self._translate_nuc_to_prot_seq(nuc_seq=nuc_seq, keep_stop=True, truncate_to_stop=True)
old_len = end - beg + 1
new_len = 3 * len(prot_seq)
if new_len == old_len and prot_seq.endswith('*'):
new_gff_buf.append(gff_line)
continue
if not prot_seq.endswith('*'):
if strand == '-':
new_nuc_seq = self._reverse_complement(base_genome_fasta[contigID][0:end])
else:
new_nuc_seq = base_genome_fasta[contigID][beg-1:contig_len]
new_prot_seq = self._translate_nuc_to_prot_seq(nuc_seq=new_nuc_seq, keep_stop=True, truncate_to_stop=True)
new_len = 3 * len(new_prot_seq)
# adjust feature coords
if strand == '-':
new_beg = end - new_len+1
new_end = end
else:
new_beg = beg
new_end = beg + new_len-1
if new_end <= contig_len:
new_gff_line = "\t".join([contigID, annot_db, locus_type, str(new_beg), str(new_end), d1, strand, d2, info])
new_gff_buf.append(new_gff_line)
else:
if new_beg > contig_len:
new_beg_p1 = new_beg - contig_len
new_end_p1 = new_end - contig_len
new_gff_line = "\t".join([contigID, annot_db, locus_type, str(new_beg_p1), str(new_end_p1), d1, strand, d2, info])
new_gff_buf.append(new_gff_line)
else: # break into two
new_beg_p1 = new_beg
new_end_p1 = contig_len
new_beg_p2 = 1
new_end_p2 = new_end - contig_len
new_gff_line_p1 = "\t".join([contigID, annot_db, locus_type, str(new_beg_p1), str(new_end_p1), d1, strand, d2, info])
new_gff_line_p2 = "\t".join([contigID, annot_db, locus_type, str(new_beg_p2), str(new_end_p2), d1, strand, d2, info])
new_gff_buf.append(new_gff_line_p1)
new_gff_buf.append(new_gff_line_p2)
this_gff_file = os.path.join(output_dir, 'new_gff-'+str(reads_lib_i)+'-'+str(genome_i)+'-'+str(uuid.uuid4())+'.gff')
new_gff_files.append(this_gff_file)
with open (this_gff_file, 'w') as this_gff_handle:
for gff_line in new_gff_buf:
this_gff_handle.write(gff_line+"\n")
#### STEP 12: Upload Strain Genomes and make GenomeSet for this ReadsLib
##
self.log(console, "UPLOADING GENOMES for ReadsLib "+str(reads_lib_i+1))
new_genome_refs = []
new_genome_names = []
set_elements = dict()
#items = []
for genome_i in range(num_genomes_found_per_readslib[reads_lib_i]):
new_genome_obj_name = re.sub(r'\.[^\.]+$','',params['out_genomeSet_obj_name'])
new_genome_obj_name += '-Reads_'+str(reads_lib_i+1)+'-Strain_'+str(genome_i+1)
new_genome_obj_name += ".Genome"
self.log(console, "UPLOADING "+new_genome_obj_name)
new_genome_ref = gfuClient.fasta_gff_to_genome({
'workspace_name': params['workspace_name'],
'fasta_file': {'path': new_fasta_files[genome_i]},
'gff_file': {'path': new_gff_files[genome_i]},
'genome_name': new_genome_obj_name,
'scientific_name': src_scientific_name,
'source': src_source,
'release': src_release,
'genetic_code': src_genetic_code
})['genome_ref']
new_genome_names.append(new_genome_obj_name)
new_genome_refs.append(new_genome_ref)
all_new_genome_names.append(new_genome_obj_name)
all_new_genome_refs.append(new_genome_ref)
set_elements[new_genome_obj_name] = dict()
set_elements[new_genome_obj_name]['ref'] = new_genome_ref
all_set_elements[new_genome_obj_name] = dict()
all_set_elements[new_genome_obj_name]['ref'] = new_genome_ref
# attach created Genome objs to report
for genome_i in range(num_genomes_found_per_readslib[reads_lib_i]):
objects_created.append({'ref': new_genome_refs[genome_i],
'description': new_genome_names[genome_i]+' StrainFinder Genome'})
# DEBUG
#self.log(console, 'SET:'+"\n"+pformat(set_elements))
#self.log(console, 'PROV:'+"\n"+pformat(this_provenance))
# HERE
# create GenomeSet for this ReadsLib
if num_genomes_found_per_readslib[reads_lib_i] > 1 and len(expanded_reads_refs) > 1:
genomeSet_name = params['out_genomeSet_obj_name']+'-Reads_'+str(reads_lib_i+1)
genomeSet_obj = { 'description': 'Strain Genomes of '+input_genome_obj_name+' from ReadsLib '+str(reads_lib_i+1),
#'items': items
'elements': set_elements
}
try:
#output_genomeSet_ref = setAPI_Client.save_genome_set_v1 ({'workspace_name': params['workspace_name'],
# 'output_object_name': genomeSet_name,
# 'data': genomeSet_obj
# })['set_ref']
new_obj_info = wsClient.save_objects({'workspace': params['workspace_name'],
'objects': [{'type': 'KBaseSearch.GenomeSet',
'data': genomeSet_obj,
'name': genomeSet_name,
'meta': {},
'provenance': this_provenance
}]
})[0]
except Exception as e:
raise ValueError('SetAPI FAILURE: Unable to save genome set object to workspace: (' + params['workspace_name']+")\n" + str(e))
genomeSet_ref = '/'.join([str(new_obj_info[WORKSPACE_I]),
str(new_obj_info[OBJID_I]),
str(new_obj_info[VERSION_I])])
objects_created.append({'ref': genomeSet_ref,
'description': 'StrainFinder GenomeSet for ReadsLib '+str(reads_lib_i+1)})
#### STEP 13: Add haplotype impact on genotype to Report? Just minimally for now
##
self.log(console, "ANALYZING GENOTYPES")
for genome_i in range(num_genomes_found_per_readslib[reads_lib_i]):
msg = 'READS LIB '+str(reads_lib_i+1)+' GENOME '+str(genome_i+1)+' RELATIVE ABUNDANCE: '+str(100*(float(abund_vecs[reads_lib_i][genome_i])))+' %'+"\n"
self.log(console, msg)
report_text += msg
#### STEP 14: create GenomeSet for all generated Strains
##
if num_strain_genomes_generated > 1:
genomeSet_name = params['out_genomeSet_obj_name']
genomeSet_obj = { 'description': 'Strain Genomes of '+input_genome_obj_name+' from ALL ReadsLibs',
#'items': items
'elements': all_set_elements
}
try:
#output_genomeSet_ref = setAPI_Client.save_genome_set_v1 ({'workspace_name': params['workspace_name'],
# 'output_object_name': genomeSet_name,
# 'data': genomeSet_obj
# })['set_ref']
new_obj_info = wsClient.save_objects({'workspace': params['workspace_name'],
'objects': [{'type': 'KBaseSearch.GenomeSet',
'data': genomeSet_obj,
'name': genomeSet_name,
'meta': {},
'provenance': provenance
}]
})[0]
except Exception as e:
raise ValueError('SetAPI FAILURE: Unable to save genome set object to workspace: (' + params['workspace_name']+")\n" + str(e))
genomeSet_ref = '/'.join([str(new_obj_info[WORKSPACE_I]),
str(new_obj_info[OBJID_I]),
str(new_obj_info[VERSION_I])])
objects_created.append({'ref': genomeSet_ref,
'description': 'StrainFinder GenomeSet for ALL ReadsLibs'})
#### STEP 15: Create Report
##
self.log(console, "CREATING REPORT")
# instantiate report object
reportName = method+'_report_' + str(uuid.uuid4())
reportObj = {#'objects_created': [],
'direct_html_link_index': 0,
'file_links': [],
'html_links': [],
'workspace_name': params['workspace_name'],
'report_object_name': reportName
}
# can't just copy substructures because format of those fields in report
# object different from the format needed to pass to create_extended_report() method.
# for example, below doesn't work
#for field in ('direct_html_link_index', 'file_links', 'html_links'):
# reportObj[field] = view_tree_reportObj[field]
# self.log<(console, "REPORT "+field+": "+pformat(view_tree_reportObj[field])) # DEBUG
#
# attach created objects and files to final report
#objects_created.append(metadecoder_call_variants_reportObj['objects_created'])
for file_link_item in metadecoder_call_variants_reportObj['file_links']:
#this_shock_id = file_link_item['URL']
this_shock_id = re.sub('^.*/', '', file_link_item['URL'])
new_file_link_item = {'shock_id': this_shock_id,
'name': file_link_item['name'],
'label': file_link_item['label']
}
file_links.append(new_file_link_item)
for html_link_item in metadecoder_call_variants_reportObj['html_links']:
#this_shock_id = html_link_item['URL']
this_shock_id = re.sub('^.*/', '', html_link_item['URL'])
new_html_link_item = {'shock_id': this_shock_id,
'name': html_link_item['name'],
'label': html_link_item['label']
}
html_links.append(new_html_link_item)
# until we replace with StrainFinder report info
reportObj['direct_html_link_index'] = metadecoder_call_variants_reportObj['direct_html_link_index']
reportObj['html_links'] = html_links
reportObj['file_links'] = file_links
if len(objects_created) > 0:
reportObj['objects_created'] = objects_created
#reportObj['objects_created'] = objects_created.reverse() # this displays better order (I think)
if report_text:
reportObj['message'] = report_text
# save report object
try:
reportClient = KBaseReport(self.callbackURL, token=ctx['token'], service_ver=SERVICE_VER)
except:
raise ValueError ("unable to instantiate KBaseReport")
report_info = reportClient.create_extended_report(reportObj)
# Done
self.log(console, "BUILDING RETURN OBJECT")
output = {'report_name': report_info['name'],
'report_ref': report_info['ref']
}
#END run_StrainFinder_v1
# At some point might do deeper type checking...
if not isinstance(output, dict):
raise ValueError('Method run_StrainFinder_v1 return value ' +
'output is not type dict as required.')
# return the results
return [output]
def status(self, ctx):
#BEGIN_STATUS
returnVal = {'state': "OK",
'message': "",
'version': self.VERSION,
'git_url': self.GIT_URL,
'git_commit_hash': self.GIT_COMMIT_HASH}
#END_STATUS
return [returnVal]
|
[
"installed_clients.WorkspaceClient.Workspace",
"pprint.pformat",
"os.remove",
"installed_clients.KBaseReportClient.KBaseReport",
"os.environ.copy",
"installed_clients.AssemblyUtilClient.AssemblyUtil",
"os.path.isfile",
"sys.stdout.flush",
"os.path.join",
"os.path.abspath",
"os.path.exists",
"installed_clients.DataFileUtilClient.DataFileUtil",
"datetime.datetime.now",
"installed_clients.kb_meta_decoderClient.kb_meta_decoder",
"re.sub",
"re.search",
"subprocess.Popen",
"datetime.datetime",
"installed_clients.GenomeFileUtilClient.GenomeFileUtil",
"uuid.uuid4",
"os.makedirs",
"logging.basicConfig",
"os.environ.get",
"installed_clients.SetAPIClient.SetAPI"
] |
[((1646, 1705), 'os.path.join', 'os.path.join', (['STRAINFINDER_v1_installdir', '"""strainFinder.py"""'], {}), "(STRAINFINDER_v1_installdir, 'strainFinder.py')\n", (1658, 1705), False, 'import os\n'), ((1740, 1805), 'os.path.join', 'os.path.join', (['STRAINFINDER_v1_installdir', '"""example"""', '"""run_fit.py"""'], {}), "(STRAINFINDER_v1_installdir, 'example', 'run_fit.py')\n", (1752, 1805), False, 'import os\n'), ((1935, 1949), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (1947, 1949), False, 'from datetime import datetime\n'), ((2379, 2397), 'sys.stdout.flush', 'sys.stdout.flush', ([], {}), '()\n', (2395, 2397), False, 'import sys\n'), ((6406, 6499), 'logging.basicConfig', 'logging.basicConfig', ([], {'format': '"""%(created)s %(levelname)s: %(message)s"""', 'level': 'logging.INFO'}), "(format='%(created)s %(levelname)s: %(message)s', level=\n logging.INFO)\n", (6425, 6499), False, 'import logging\n'), ((6783, 6817), 'os.environ.get', 'os.environ.get', (['"""SDK_CALLBACK_URL"""'], {}), "('SDK_CALLBACK_URL')\n", (6797, 6817), False, 'import os\n'), ((6952, 6986), 'os.path.abspath', 'os.path.abspath', (["config['scratch']"], {}), "(config['scratch'])\n", (6967, 6986), False, 'import os\n'), ((8517, 8534), 'os.environ.copy', 'os.environ.copy', ([], {}), '()\n', (8532, 8534), False, 'import os\n'), ((7047, 7093), 'os.path.join', 'os.path.join', (['"""/kb"""', '"""module"""', '"""local_scratch"""'], {}), "('/kb', 'module', 'local_scratch')\n", (7059, 7093), False, 'import os\n'), ((7107, 7135), 'os.path.exists', 'os.path.exists', (['self.scratch'], {}), '(self.scratch)\n', (7121, 7135), False, 'import os\n'), ((7149, 7174), 'os.makedirs', 'os.makedirs', (['self.scratch'], {}), '(self.scratch)\n', (7160, 7174), False, 'import os\n'), ((8673, 8699), 'os.path.exists', 'os.path.exists', (['output_dir'], {}), '(output_dir)\n', (8687, 8699), False, 'import os\n'), ((8713, 8736), 'os.makedirs', 'os.makedirs', (['output_dir'], {}), '(output_dir)\n', (8724, 8736), False, 'import os\n'), ((9034, 9082), 'installed_clients.WorkspaceClient.Workspace', 'workspaceService', (['self.workspaceURL'], {'token': 'token'}), '(self.workspaceURL, token=token)\n', (9050, 9082), True, 'from installed_clients.WorkspaceClient import Workspace as workspaceService\n'), ((9229, 9276), 'installed_clients.DataFileUtilClient.DataFileUtil', 'DataFileUtil', ([], {'url': 'self.callbackURL', 'token': 'token'}), '(url=self.callbackURL, token=token)\n', (9241, 9276), False, 'from installed_clients.DataFileUtilClient import DataFileUtil\n'), ((9440, 9489), 'installed_clients.GenomeFileUtilClient.GenomeFileUtil', 'GenomeFileUtil', ([], {'url': 'self.callbackURL', 'token': 'token'}), '(url=self.callbackURL, token=token)\n', (9454, 9489), False, 'from installed_clients.GenomeFileUtilClient import GenomeFileUtil\n'), ((9654, 9701), 'installed_clients.AssemblyUtilClient.AssemblyUtil', 'AssemblyUtil', ([], {'url': 'self.callbackURL', 'token': 'token'}), '(url=self.callbackURL, token=token)\n', (9666, 9701), False, 'from installed_clients.AssemblyUtilClient import AssemblyUtil\n'), ((9990, 10056), 'installed_clients.SetAPIClient.SetAPI', 'SetAPI', ([], {'url': 'self.callbackURL', 'token': 'token', 'service_ver': 'SERVICE_VER'}), '(url=self.callbackURL, token=token, service_ver=SERVICE_VER)\n', (9996, 10056), False, 'from installed_clients.SetAPIClient import SetAPI\n'), ((19107, 19181), 'installed_clients.kb_meta_decoderClient.kb_meta_decoder', 'kb_meta_decoder', (['self.callbackURL'], {'token': 'token', 'service_ver': 'MD_SERVICE_VER'}), '(self.callbackURL, token=token, service_ver=MD_SERVICE_VER)\n', (19122, 19181), False, 'from installed_clients.kb_meta_decoderClient import kb_meta_decoder\n'), ((25547, 25594), 'os.path.join', 'os.path.join', (['this_run_dir', '"""allele_counts.txt"""'], {}), "(this_run_dir, 'allele_counts.txt')\n", (25559, 25594), False, 'import os\n'), ((25692, 25726), 'os.path.exists', 'os.path.exists', (['allele_counts_file'], {}), '(allele_counts_file)\n', (25706, 25726), False, 'import os\n'), ((27002, 27050), 'os.path.join', 'os.path.join', (['this_run_dir', '"""fitted_genomes.txt"""'], {}), "(this_run_dir, 'fitted_genomes.txt')\n", (27014, 27050), False, 'import os\n'), ((27066, 27101), 'os.path.exists', 'os.path.exists', (['fitted_genomes_file'], {}), '(fitted_genomes_file)\n', (27080, 27101), False, 'import os\n'), ((27438, 27478), 'os.path.join', 'os.path.join', (['this_run_dir', '"""run_fit.py"""'], {}), "(this_run_dir, 'run_fit.py')\n", (27450, 27478), False, 'import os\n'), ((28231, 28350), 'subprocess.Popen', 'subprocess.Popen', (['strainfinder_cmd'], {'cwd': 'this_run_dir', 'stdout': 'subprocess.PIPE', 'stderr': 'subprocess.STDOUT', 'shell': '(False)'}), '(strainfinder_cmd, cwd=this_run_dir, stdout=subprocess.PIPE,\n stderr=subprocess.STDOUT, shell=False)\n', (28247, 28350), False, 'import subprocess\n'), ((50623, 50664), 're.sub', 're.sub', (['"""^.*/"""', '""""""', "file_link_item['URL']"], {}), "('^.*/', '', file_link_item['URL'])\n", (50629, 50664), False, 'import re\n'), ((51084, 51125), 're.sub', 're.sub', (['"""^.*/"""', '""""""', "html_link_item['URL']"], {}), "('^.*/', '', html_link_item['URL'])\n", (51090, 51125), False, 'import re\n'), ((52000, 52074), 'installed_clients.KBaseReportClient.KBaseReport', 'KBaseReport', (['self.callbackURL'], {'token': "ctx['token']", 'service_ver': 'SERVICE_VER'}), "(self.callbackURL, token=ctx['token'], service_ver=SERVICE_VER)\n", (52011, 52074), False, 'from installed_clients.KBaseReportClient import KBaseReport\n'), ((8396, 8411), 'pprint.pformat', 'pformat', (['params'], {}), '(params)\n', (8403, 8411), False, 'from pprint import pformat\n'), ((21211, 21247), 're.search', 're.search', (['"""VCF"""', "file_link['label']"], {}), "('VCF', file_link['label'])\n", (21220, 21247), False, 'import re\n'), ((25394, 25422), 'os.path.exists', 'os.path.exists', (['this_run_dir'], {}), '(this_run_dir)\n', (25408, 25422), False, 'import os\n'), ((25440, 25465), 'os.makedirs', 'os.makedirs', (['this_run_dir'], {}), '(this_run_dir)\n', (25451, 25465), False, 'import os\n'), ((27119, 27149), 'os.remove', 'os.remove', (['fitted_genomes_file'], {}), '(fitted_genomes_file)\n', (27128, 27149), False, 'import os\n'), ((29812, 29847), 'os.path.isfile', 'os.path.isfile', (['fitted_genomes_file'], {}), '(fitted_genomes_file)\n', (29826, 29847), False, 'import os\n'), ((42727, 42786), 're.sub', 're.sub', (['"""\\\\.[^\\\\.]+$"""', '""""""', "params['out_genomeSet_obj_name']"], {}), "('\\\\.[^\\\\.]+$', '', params['out_genomeSet_obj_name'])\n", (42733, 42786), False, 'import re\n'), ((49521, 49533), 'uuid.uuid4', 'uuid.uuid4', ([], {}), '()\n', (49531, 49533), False, 'import uuid\n'), ((1997, 2017), 'datetime.datetime', 'datetime', (['(1970)', '(1)', '(1)'], {}), '(1970, 1, 1)\n', (2005, 2017), False, 'from datetime import datetime\n'), ((5416, 5443), 're.sub', 're.sub', (['""" .*$"""', '""""""', 'this_id'], {}), "(' .*$', '', this_id)\n", (5422, 5443), False, 'import re\n'), ((8643, 8655), 'uuid.uuid4', 'uuid.uuid4', ([], {}), '()\n', (8653, 8655), False, 'import uuid\n'), ((13046, 13094), 're.sub', 're.sub', (['"""-[0-9]+\\\\.[0-9]+$"""', '""""""', 'reads_item_type'], {}), "('-[0-9]+\\\\.[0-9]+$', '', reads_item_type)\n", (13052, 13094), False, 'import re\n'), ((35692, 35704), 'uuid.uuid4', 'uuid.uuid4', ([], {}), '()\n', (35702, 35704), False, 'import uuid\n'), ((42022, 42034), 'uuid.uuid4', 'uuid.uuid4', ([], {}), '()\n', (42032, 42034), False, 'import uuid\n')]
|
import urllib.parse
import datetime
import requests
from plugin import *
class weather(plugin):
def __init__(self, bot):
super().__init__(bot)
self.weather_url = r'http://api.openweathermap.org/data/2.5/weather?q=%s' \
r'&units=metric' \
r'&appid=%s'
self.forecast_url = r'http://api.openweathermap.org/data/2.5/forecast?q=%s' \
r'&units=metric' \
r'&appid=%s'
class forecast_info:
def __init__(self, max_temp, min_temp, avg_wind_speed, avg_humidity, conditions):
self.max_temp = max_temp
self.min_temp = min_temp
self.avg_wind_speed = avg_wind_speed
self.avg_humidity = avg_humidity
self.conditions = conditions
@doc('weather <location>: get current weather conditions in <location> from openweathermap (updated every ~2 hours)')
@command
def weather(self, sender_nick, msg, **kwargs):
if not msg: return
self.logger.info(f'getting weather in {msg} for {sender_nick}')
weather_info = self.get_weather_info(msg)
if not weather_info:
self.bot.say_err()
return
prefix = color.orange(f'[Latest recorded weather for {weather_info["name"]}, {weather_info["sys"]["country"]}]')
results = []
if 'main' in weather_info and 'temp' in weather_info['main']:
results.append(f'{self.colorize_temp(weather_info["main"]["temp"])} °C')
if 'weather' in weather_info and len(weather_info['weather']) > 0:
results.append(f'{weather_info["weather"][0]["description"]}')
if 'main' in weather_info and 'humidity' in weather_info['main']:
results.append(f'relative humidity: {weather_info["main"]["humidity"]}%')
if 'wind' in weather_info and 'speed' in weather_info['wind'] and 'deg' in weather_info['wind']:
results.append(f'wind speed: {weather_info["wind"]["speed"]}mps {self.wind_degree_to_direction(weather_info["wind"]["deg"])}')
self.bot.say(f'{prefix} {" :: ".join(results)}')
@doc('forecast <location>: get weather forecast in <location> from openweathermap')
@command
@command_alias('weather_forecast')
def forecast(self, sender_nick, msg, **kwargs):
if not msg: return
self.logger.info(f'getting weather forecast in {msg} for {sender_nick}')
weather_info = self.get_forecast_info(msg)
if not weather_info:
self.bot.say_err()
return
forecasts = {
'today': self.parse_forecast(weather_info, 0),
'tomorrow': self.parse_forecast(weather_info, 1),
(datetime.date.today() + datetime.timedelta(days=2)).strftime(r'%Y-%m-%d'): self.parse_forecast(weather_info, 2)
}
for _time, forec in forecasts.items():
if not forec: continue
prefix = color.orange(f'[Weather forecast for {weather_info["city"]["name"]}, {weather_info["city"]["country"]} for {_time}]')
responses = [f'{self.colorize_temp(forec.min_temp)} °C to {self.colorize_temp(forec.max_temp)} °C',
f'{forec.conditions}']
if forec.avg_humidity: responses.append(f'average relative humidity: {forec.avg_humidity}%')
if forec.avg_wind_speed: responses.append(f'average wind speed: {forec.avg_wind_speed}mps')
self.bot.say(f'{prefix} {" :: ".join(responses)}')
def get_weather_info(self, city_name):
result = self.get_weather_info_impl(city_name)
if not result:
# openweathermap behaves strange, sometimes it requires national characters and sometimes not
city_name = utils.remove_national_chars(city_name)
self.logger.info(f'getting weather in {city_name}')
result = self.get_weather_info_impl(city_name)
return result
@utils.timed_lru_cache(expiration=datetime.timedelta(minutes=3), typed=True)
def get_weather_info_impl(self, city_name):
ask = urllib.parse.quote(city_name)
response = requests.get(self.weather_url % (ask, self.config['openweathermap_api_key'])).json()
if 'cod' not in response or int(response['cod']) != requests.codes.OK:
if 'cod' not in response or int(response['cod']) != requests.codes.NOT_FOUND:
self.logger.warning(f'openweathermap error: {response}')
self.get_weather_info_impl.do_not_cache()
return None
return response
def get_forecast_info(self, city_name):
result = self.get_forecast_info_impl(city_name)
if not result:
# openweathermap behaves strange, sometimes it requires national characters and sometimes not
city_name = utils.remove_national_chars(city_name)
self.logger.info(f'getting weather forecast in {city_name}')
result = self.get_forecast_info_impl(city_name)
return result
# openweathermap API is really fucked up, I know there's ugly code duplication here...
@utils.timed_lru_cache(expiration=datetime.timedelta(minutes=3), typed=True)
def get_forecast_info_impl(self, city_name):
ask = urllib.parse.quote(city_name)
response = requests.get(self.forecast_url % (ask, self.config['openweathermap_api_key'])).json()
if 'cod' not in response or int(response['cod']) != requests.codes.OK:
if 'cod' not in response or int(response['cod']) != requests.codes.NOT_FOUND:
self.logger.warning(f'openweathermap error: {response}')
self.get_forecast_info_impl.do_not_cache()
return None
return response
def parse_forecast(self, weather_info, days):
dt_txt = (datetime.date.today() + datetime.timedelta(days=days)).strftime(r'%Y-%m-%d')
min_temp = sys.maxsize
max_temp = -sys.maxsize
avg_humidity = 0.
humidities = 0
avg_wind_speed = 0.
wind_speeds = 0
conditions = []
for forec in weather_info['list']:
if not forec['dt_txt'].startswith(dt_txt) or 'main' not in forec: continue
if forec['main']['temp_min'] < min_temp: min_temp = forec['main']['temp_min']
if forec['main']['temp_max'] > max_temp: max_temp = forec['main']['temp_max']
if 'humidity' in forec['main']:
avg_humidity += forec['main']['humidity']
humidities += 1
if 'wind' in forec and 'speed' in forec['wind']:
avg_wind_speed += forec['wind']['speed']
wind_speeds += 1
if 'weather' in forec and len(forec['weather']) > 0:
cond = forec['weather'][0]
if cond['id'] == 800: continue # clear sky
conditions.append(cond['description'])
if min_temp == sys.maxsize: return None # no data found
conditions = list(set(conditions))
return self.forecast_info(int(max_temp),
int(min_temp),
int(avg_wind_speed / wind_speeds) if wind_speeds != 0 else None,
int(avg_humidity / humidities) if humidities != 0 else None,
', '.join(conditions if conditions else ['clear sky']))
def wind_degree_to_direction(self, deg):
deg = int((deg / 22.5) + .5)
directions = ['↑', '↗', '↗', '↗',
'→', '↘', '↘', '↘',
'↓', '↙', '↙', '↙',
'←', '↖', '↖', '↖']
return directions[(deg % 16)]
def colorize_temp(self, temp):
temp = float(temp)
if temp < 0: return color.blue(temp)
if temp < 10: return color.light_cyan(temp)
if temp < 15: return color.cyan(temp)
if temp < 26: return color.yellow(temp)
if temp < 30: return color.orange(temp)
else: return color.light_red(temp)
|
[
"datetime.timedelta",
"datetime.date.today",
"requests.get"
] |
[((3999, 4028), 'datetime.timedelta', 'datetime.timedelta', ([], {'minutes': '(3)'}), '(minutes=3)\n', (4017, 4028), False, 'import datetime\n'), ((5167, 5196), 'datetime.timedelta', 'datetime.timedelta', ([], {'minutes': '(3)'}), '(minutes=3)\n', (5185, 5196), False, 'import datetime\n'), ((4153, 4230), 'requests.get', 'requests.get', (["(self.weather_url % (ask, self.config['openweathermap_api_key']))"], {}), "(self.weather_url % (ask, self.config['openweathermap_api_key']))\n", (4165, 4230), False, 'import requests\n'), ((5322, 5400), 'requests.get', 'requests.get', (["(self.forecast_url % (ask, self.config['openweathermap_api_key']))"], {}), "(self.forecast_url % (ask, self.config['openweathermap_api_key']))\n", (5334, 5400), False, 'import requests\n'), ((5828, 5849), 'datetime.date.today', 'datetime.date.today', ([], {}), '()\n', (5847, 5849), False, 'import datetime\n'), ((5852, 5881), 'datetime.timedelta', 'datetime.timedelta', ([], {'days': 'days'}), '(days=days)\n', (5870, 5881), False, 'import datetime\n'), ((2745, 2766), 'datetime.date.today', 'datetime.date.today', ([], {}), '()\n', (2764, 2766), False, 'import datetime\n'), ((2769, 2795), 'datetime.timedelta', 'datetime.timedelta', ([], {'days': '(2)'}), '(days=2)\n', (2787, 2795), False, 'import datetime\n')]
|
from rx.core import AnonymousObservable
def create(subscribe):
def _subscribe(observer, _=None):
return subscribe(observer)
return AnonymousObservable(_subscribe)
|
[
"rx.core.AnonymousObservable"
] |
[((149, 180), 'rx.core.AnonymousObservable', 'AnonymousObservable', (['_subscribe'], {}), '(_subscribe)\n', (168, 180), False, 'from rx.core import AnonymousObservable\n')]
|
from datetime import datetime
from bson.json_util import dumps
from flask import Flask, render_template, request, redirect, url_for, jsonify
from flask_login import LoginManager, login_user, login_required, logout_user, current_user
from flask_cors import CORS
from pymongo.errors import DuplicateKeyError
from string import Template
from geopy.distance import geodesic
import ast
import json
from db import get_bidders,find_rooms,distance_calc,ended,get_template,get_t,get_distance,get_room_admin,save_param,add_room_member,add_room_members,update_bid, get_closing,get_hb,get_sign,get_hbidder, get_messages, get_room, get_room_members, get_rooms_for_user, get_user, is_room_admin, is_room_member, remove_room_members, save_message, save_room, save_user, update_room
app = Flask(__name__)
cors = CORS(app)
app.secret_key = "sfdjkafnk"
login_manager = LoginManager()
login_manager.login_view = 'login'
login_manager.init_app(app)
# The login route receives the username and password as a POST request
@app.route('/login', methods=['GET', 'POST'])
def login():
if current_user.is_authenticated:
return {"message":"The user {} is already authenticated".format(current_user)},200
message = ''
if request.method == 'POST':
username = request.form.get('username')
password_input = request.form.get('password')
user = get_user(username)
if user and user.check_password(password_input):
login_user(user)
return {"message":"User {} has been authenticated".format(str(user.username))},200
else:
message = 'Failed to login!'
return message,400
# Signup function is not habilitated for the time being, users are to be created either
# by function or directly into the database
@app.route('/signup', methods=['GET', 'POST'])
def signup():
if current_user.is_authenticated:
return redirect(url_for('home'))
message = ''
if request.method == 'POST':
username = request.json.get('username')
email = request.json.get('email')
password = request.json.get('password')
sign=request.json.get('sign')
location=request.json.get('sign')
try:
save_user(username, email, password,sign,location)
return redirect(url_for('login'))
except DuplicateKeyError:
message = "User already exists!"
return render_template('signup.html', message=message)
##holi={"room_name":"Erics composite auction","members":"","highest_bid":"5000","auction_type":"Ascending","closing_time":"2021-07-06T10:34:20","reference_sector":"Composites","reference_type":"Electronic","quantity":"15","templatetype":"article","articleno":"23dd"}
# A request to this function will log out the user from the server
@app.route("/logout/")
@login_required
def logout():
logout_user()
return {'message':'the user has logged out'},200
# Use a POST request to create a new auction, user has to be logged in
@app.route('/create-room', methods=['GET', 'POST'])
#@login_required
def create_room():
if request.method == 'POST':
privacy= request.form.get('privacy')
room_name = request.form.get('room_name')
print(room_name)
highest_bid=request.form.get('highest_bid')
highest_bidder=''
auction_type=request.form.get('auction_type')
print(request.form.get('closing_time'))
closing_time=datetime.strptime(request.form.get('closing_time'), '%Y-%m-%dT%H:%M:%S')
reference_sector=request.form.get('reference_sector')
reference_type=request.form.get('reference_type')
quantity=request.form.get('quantity')
articleno=request.form.get('articleno')
user=request.authorization.username
print(user)
sellersign=get_sign(user)
buyersign=''
templatetype=request.form.get('templatetype')
print(templatetype)
print(request.form.get('members'))
if(request.form.get('members')):
usernames = [username.strip() for username in request.form.get('members').split(',')]
else:
print(user)
usernames=[user]
if len(room_name) and len(usernames):
room_id = save_room(privacy, room_name, user,auction_type,highest_bid,highest_bidder,closing_time,sellersign,buyersign,templatetype)
save_param(room_id,user,room_name,reference_sector,reference_type,quantity,articleno)
if user in usernames:
usernames.remove(user)
print(len(usernames))
if len(usernames)>=1:
print('hay')
print('usernames')
add_room_members(room_id, room_name, usernames, user)
return {"message":"The room {} has been created id: {}".format(str(room_name),room_id)},200
else:
return {"message":"Unable to create room"},400
# Edit room also is not enabled but should work with little effort if needed
@app.route('/rooms/<room_id>/edit', methods=['GET', 'POST'])
@login_required
def edit_room(room_id):
room = get_room(room_id)
if room and is_room_admin(room_id, current_user.username):
existing_room_members = [member['_id']['username'] for member in get_room_members(room_id)]
room_members_str = ",".join(existing_room_members)
message = ''
if request.method == 'POST':
room_name = request.json.get('room_name')
room['name'] = room_name
update_room(room_id, room_name)
new_members = [username.strip() for username in request.json.get('members').split(',')]
members_to_add = list(set(new_members) - set(existing_room_members))
members_to_remove = list(set(existing_room_members) - set(new_members))
if len(members_to_add):
add_room_members(room_id, room_name, members_to_add, current_user.username)
if len(members_to_remove):
remove_room_members(room_id, members_to_remove)
message = 'Room edited successfully'
room_members_str = ",".join(new_members)
return render_template('edit_room.html', room=room, room_members_str=room_members_str, message=message)
else:
return "Room not found", 404
# GET request to this route has to include room_id for the room you want to join but no aditional parameters are needed
@app.route('/rooms/<room_id>/join', methods=['GET'])
#@login_required
def join_room(room_id):
room = get_room(room_id)
room_name=room['payload']['name']['val'][0]
user=request.authorization.username
existing_room_members = [member['_id']['username'] for member in get_room_members(room_id)]
if request.method == 'GET':
new_members = user
if new_members in list(set(existing_room_members)):
return {"message":"You are already in a room"},200
add_room_member(room_id, room_name, new_members, user)
return {"message":"You have joined the room {}".format(str(room_name))},200
# A POST request to this route will receive parameter message_input and will generate a bid to the auction
# A GET request will show all the messages submited to this auction.
@app.route('/rooms/<room_id>', methods=['GET','POST'])
#@login_required
def chat(room_id):
room = get_room(room_id)
rn=room['payload']['name']['val'][0]
closing_time=get_closing(room_id)
user=request.authorization.username
if room and is_room_member(room_id, user):
## The event for the timeout message could go here
if request.method=='POST':
bid=request.form.get("message_input")
if (closing_time)>datetime.now():
print(is_room_admin(room_id,user))
if(is_room_admin(room_id,user)==0):
app.logger.info("{} has summited a new bid to the room {}: {}".format(user,
rn,
bid))
sign=get_sign(user)
## Calculation of distance between users done at every bid
print(user,get_room_admin(rn))
distance=distance_calc(user,get_room_admin(rn))
#
save_message(str(room['_id']),bid,user,sign,distance)
else:
app.logger.info("Cannot bid if you are Admin")
return{"message":"You cannot issue bids as room admin"},400
else:
app.logger.info("Auction time has ended")
return {"message":"The auction {} has already ended".format(str(rn))},400
return {"message":"You have issued the bid {}".format(str(bid))},200
elif request.method=='GET':
messages = get_messages(room_id)
if room and is_room_member(room_id, user):
## Here the bids from all users are shown to the user
keys = ['sender','text', 'created_at','distance']
d=[]
for message in messages:
m_pay=message['payload']
filtered_d = dict((k, m_pay[k]) for k in keys if k in m_pay)
d.append(filtered_d)
body = {"Bids": d}
return jsonify(body),200
else:
return "Room not found or user is not member", 404
# A POST request to this auction is used to select the winner with the paremeter "winner" only in case no winner is selected yet
# A GET request in case the auction isnt ended will display the highest bids from all the biders
# and will show the ricardian contract in case the auction is ended
@app.route('/rooms/<room_id>/end', methods=['GET','POST'])
##@login_required
def winner(room_id):
closing_time=get_closing(room_id)
room = get_room(room_id)
rn=room['payload']['name']['val'][0]
user=request.authorization.username
## Withing this function the logic for the winner selection is specified, the admin shall input the username of the winner
if request.method=='POST':
if(is_room_admin(room_id,user)==1):
if (closing_time)>datetime.now(): #Auction hasnt ended
return{"message":"The specified auction hasnt ended"},400
if get_hbidder(room_id)=='': ## This would mean the auction doesnt have a winner yet
winner=request.form.get("winner") #Should be username
wi=json.loads(get_hb(room_id,winner)) ## Get hb should be changed in case the auction is descending
if wi:
for d in wi:
sen=d['sender']
bid=d['text']
sign=d['sign']
update_bid(room['_id'],bid,sen,sign)
return {"message":"winner has been selected"},200
else:
return {"message":"User does not participate the auction"},403
else:
return {"message":"the winner for this auciton has already been selected"},200
else: return{"message":"You are not room admin"},400
elif request.method=='GET':
print()
if user == get_room_admin(rn):
if get_hbidder(room_id)=='': #Winner hasnt been selected
return get_bidders(room_id),200
else: #Winner is selected
response={'contract':ended(room_id)}
return jsonify(response),200
elif (user==get_hbidder(room_id)):
response={'contract':ended(room_id)}
return jsonify(response),200
elif get_hbidder(room_id)=='':
return {"message":"Winner hasnt been selected"},400
else:
return {"message":"The auction has ended, the winner is {}".format(room['highest_bidder'])},400
# A GET request to this route is used to query auction based in the parameters listed below
@app.route('/rooms', methods=['GET'])
#@login_required
def query():
if request.method=='GET':
user=request.authorization.username
room_name=request.json.get("room_name")
reference_sector=request.json.get("reference_sector")
reference_type=request.json.get("reference_type")
ongoing=request.json.get("ongoing")
distance= request.json.get("distance")
print(distance, user)
auctions=find_rooms(room_name,reference_sector,reference_type,ongoing,user,distance)
return auctions,200
@login_manager.user_loader
def load_user(username):
return get_user(username)
if __name__ == '__main__':
app.run(host='0.0.0.0', debug=True)
|
[
"db.get_user",
"flask.request.form.get",
"flask_cors.CORS",
"db.update_room",
"db.save_param",
"flask.url_for",
"flask.jsonify",
"db.get_messages",
"db.is_room_admin",
"db.get_hb",
"db.save_room",
"db.find_rooms",
"db.ended",
"flask.render_template",
"db.add_room_members",
"datetime.datetime.now",
"db.remove_room_members",
"db.update_bid",
"db.get_room_members",
"flask_login.login_user",
"flask_login.logout_user",
"flask.request.json.get",
"db.save_user",
"flask_login.LoginManager",
"db.get_sign",
"db.get_closing",
"db.add_room_member",
"db.get_room_admin",
"flask.Flask",
"db.get_room",
"db.get_hbidder",
"db.is_room_member",
"db.get_bidders"
] |
[((777, 792), 'flask.Flask', 'Flask', (['__name__'], {}), '(__name__)\n', (782, 792), False, 'from flask import Flask, render_template, request, redirect, url_for, jsonify\n'), ((802, 811), 'flask_cors.CORS', 'CORS', (['app'], {}), '(app)\n', (806, 811), False, 'from flask_cors import CORS\n'), ((857, 871), 'flask_login.LoginManager', 'LoginManager', ([], {}), '()\n', (869, 871), False, 'from flask_login import LoginManager, login_user, login_required, logout_user, current_user\n'), ((2412, 2459), 'flask.render_template', 'render_template', (['"""signup.html"""'], {'message': 'message'}), "('signup.html', message=message)\n", (2427, 2459), False, 'from flask import Flask, render_template, request, redirect, url_for, jsonify\n'), ((2855, 2868), 'flask_login.logout_user', 'logout_user', ([], {}), '()\n', (2866, 2868), False, 'from flask_login import LoginManager, login_user, login_required, logout_user, current_user\n'), ((5133, 5150), 'db.get_room', 'get_room', (['room_id'], {}), '(room_id)\n', (5141, 5150), False, 'from db import get_bidders, find_rooms, distance_calc, ended, get_template, get_t, get_distance, get_room_admin, save_param, add_room_member, add_room_members, update_bid, get_closing, get_hb, get_sign, get_hbidder, get_messages, get_room, get_room_members, get_rooms_for_user, get_user, is_room_admin, is_room_member, remove_room_members, save_message, save_room, save_user, update_room\n'), ((6552, 6569), 'db.get_room', 'get_room', (['room_id'], {}), '(room_id)\n', (6560, 6569), False, 'from db import get_bidders, find_rooms, distance_calc, ended, get_template, get_t, get_distance, get_room_admin, save_param, add_room_member, add_room_members, update_bid, get_closing, get_hb, get_sign, get_hbidder, get_messages, get_room, get_room_members, get_rooms_for_user, get_user, is_room_admin, is_room_member, remove_room_members, save_message, save_room, save_user, update_room\n'), ((7373, 7390), 'db.get_room', 'get_room', (['room_id'], {}), '(room_id)\n', (7381, 7390), False, 'from db import get_bidders, find_rooms, distance_calc, ended, get_template, get_t, get_distance, get_room_admin, save_param, add_room_member, add_room_members, update_bid, get_closing, get_hb, get_sign, get_hbidder, get_messages, get_room, get_room_members, get_rooms_for_user, get_user, is_room_admin, is_room_member, remove_room_members, save_message, save_room, save_user, update_room\n'), ((7449, 7469), 'db.get_closing', 'get_closing', (['room_id'], {}), '(room_id)\n', (7460, 7469), False, 'from db import get_bidders, find_rooms, distance_calc, ended, get_template, get_t, get_distance, get_room_admin, save_param, add_room_member, add_room_members, update_bid, get_closing, get_hb, get_sign, get_hbidder, get_messages, get_room, get_room_members, get_rooms_for_user, get_user, is_room_admin, is_room_member, remove_room_members, save_message, save_room, save_user, update_room\n'), ((10042, 10062), 'db.get_closing', 'get_closing', (['room_id'], {}), '(room_id)\n', (10053, 10062), False, 'from db import get_bidders, find_rooms, distance_calc, ended, get_template, get_t, get_distance, get_room_admin, save_param, add_room_member, add_room_members, update_bid, get_closing, get_hb, get_sign, get_hbidder, get_messages, get_room, get_room_members, get_rooms_for_user, get_user, is_room_admin, is_room_member, remove_room_members, save_message, save_room, save_user, update_room\n'), ((10079, 10096), 'db.get_room', 'get_room', (['room_id'], {}), '(room_id)\n', (10087, 10096), False, 'from db import get_bidders, find_rooms, distance_calc, ended, get_template, get_t, get_distance, get_room_admin, save_param, add_room_member, add_room_members, update_bid, get_closing, get_hb, get_sign, get_hbidder, get_messages, get_room, get_room_members, get_rooms_for_user, get_user, is_room_admin, is_room_member, remove_room_members, save_message, save_room, save_user, update_room\n'), ((12844, 12862), 'db.get_user', 'get_user', (['username'], {}), '(username)\n', (12852, 12862), False, 'from db import get_bidders, find_rooms, distance_calc, ended, get_template, get_t, get_distance, get_room_admin, save_param, add_room_member, add_room_members, update_bid, get_closing, get_hb, get_sign, get_hbidder, get_messages, get_room, get_room_members, get_rooms_for_user, get_user, is_room_admin, is_room_member, remove_room_members, save_message, save_room, save_user, update_room\n'), ((1267, 1295), 'flask.request.form.get', 'request.form.get', (['"""username"""'], {}), "('username')\n", (1283, 1295), False, 'from flask import Flask, render_template, request, redirect, url_for, jsonify\n'), ((1321, 1349), 'flask.request.form.get', 'request.form.get', (['"""password"""'], {}), "('password')\n", (1337, 1349), False, 'from flask import Flask, render_template, request, redirect, url_for, jsonify\n'), ((1365, 1383), 'db.get_user', 'get_user', (['username'], {}), '(username)\n', (1373, 1383), False, 'from db import get_bidders, find_rooms, distance_calc, ended, get_template, get_t, get_distance, get_room_admin, save_param, add_room_member, add_room_members, update_bid, get_closing, get_hb, get_sign, get_hbidder, get_messages, get_room, get_room_members, get_rooms_for_user, get_user, is_room_admin, is_room_member, remove_room_members, save_message, save_room, save_user, update_room\n'), ((2001, 2029), 'flask.request.json.get', 'request.json.get', (['"""username"""'], {}), "('username')\n", (2017, 2029), False, 'from flask import Flask, render_template, request, redirect, url_for, jsonify\n'), ((2046, 2071), 'flask.request.json.get', 'request.json.get', (['"""email"""'], {}), "('email')\n", (2062, 2071), False, 'from flask import Flask, render_template, request, redirect, url_for, jsonify\n'), ((2091, 2119), 'flask.request.json.get', 'request.json.get', (['"""password"""'], {}), "('password')\n", (2107, 2119), False, 'from flask import Flask, render_template, request, redirect, url_for, jsonify\n'), ((2133, 2157), 'flask.request.json.get', 'request.json.get', (['"""sign"""'], {}), "('sign')\n", (2149, 2157), False, 'from flask import Flask, render_template, request, redirect, url_for, jsonify\n'), ((2175, 2199), 'flask.request.json.get', 'request.json.get', (['"""sign"""'], {}), "('sign')\n", (2191, 2199), False, 'from flask import Flask, render_template, request, redirect, url_for, jsonify\n'), ((3134, 3161), 'flask.request.form.get', 'request.form.get', (['"""privacy"""'], {}), "('privacy')\n", (3150, 3161), False, 'from flask import Flask, render_template, request, redirect, url_for, jsonify\n'), ((3182, 3211), 'flask.request.form.get', 'request.form.get', (['"""room_name"""'], {}), "('room_name')\n", (3198, 3211), False, 'from flask import Flask, render_template, request, redirect, url_for, jsonify\n'), ((3257, 3288), 'flask.request.form.get', 'request.form.get', (['"""highest_bid"""'], {}), "('highest_bid')\n", (3273, 3288), False, 'from flask import Flask, render_template, request, redirect, url_for, jsonify\n'), ((3336, 3368), 'flask.request.form.get', 'request.form.get', (['"""auction_type"""'], {}), "('auction_type')\n", (3352, 3368), False, 'from flask import Flask, render_template, request, redirect, url_for, jsonify\n'), ((3536, 3572), 'flask.request.form.get', 'request.form.get', (['"""reference_sector"""'], {}), "('reference_sector')\n", (3552, 3572), False, 'from flask import Flask, render_template, request, redirect, url_for, jsonify\n'), ((3596, 3630), 'flask.request.form.get', 'request.form.get', (['"""reference_type"""'], {}), "('reference_type')\n", (3612, 3630), False, 'from flask import Flask, render_template, request, redirect, url_for, jsonify\n'), ((3648, 3676), 'flask.request.form.get', 'request.form.get', (['"""quantity"""'], {}), "('quantity')\n", (3664, 3676), False, 'from flask import Flask, render_template, request, redirect, url_for, jsonify\n'), ((3695, 3724), 'flask.request.form.get', 'request.form.get', (['"""articleno"""'], {}), "('articleno')\n", (3711, 3724), False, 'from flask import Flask, render_template, request, redirect, url_for, jsonify\n'), ((3808, 3822), 'db.get_sign', 'get_sign', (['user'], {}), '(user)\n', (3816, 3822), False, 'from db import get_bidders, find_rooms, distance_calc, ended, get_template, get_t, get_distance, get_room_admin, save_param, add_room_member, add_room_members, update_bid, get_closing, get_hb, get_sign, get_hbidder, get_messages, get_room, get_room_members, get_rooms_for_user, get_user, is_room_admin, is_room_member, remove_room_members, save_message, save_room, save_user, update_room\n'), ((3865, 3897), 'flask.request.form.get', 'request.form.get', (['"""templatetype"""'], {}), "('templatetype')\n", (3881, 3897), False, 'from flask import Flask, render_template, request, redirect, url_for, jsonify\n'), ((3980, 4007), 'flask.request.form.get', 'request.form.get', (['"""members"""'], {}), "('members')\n", (3996, 4007), False, 'from flask import Flask, render_template, request, redirect, url_for, jsonify\n'), ((5167, 5212), 'db.is_room_admin', 'is_room_admin', (['room_id', 'current_user.username'], {}), '(room_id, current_user.username)\n', (5180, 5212), False, 'from db import get_bidders, find_rooms, distance_calc, ended, get_template, get_t, get_distance, get_room_admin, save_param, add_room_member, add_room_members, update_bid, get_closing, get_hb, get_sign, get_hbidder, get_messages, get_room, get_room_members, get_rooms_for_user, get_user, is_room_admin, is_room_member, remove_room_members, save_message, save_room, save_user, update_room\n'), ((6180, 6281), 'flask.render_template', 'render_template', (['"""edit_room.html"""'], {'room': 'room', 'room_members_str': 'room_members_str', 'message': 'message'}), "('edit_room.html', room=room, room_members_str=\n room_members_str, message=message)\n", (6195, 6281), False, 'from flask import Flask, render_template, request, redirect, url_for, jsonify\n'), ((6945, 6999), 'db.add_room_member', 'add_room_member', (['room_id', 'room_name', 'new_members', 'user'], {}), '(room_id, room_name, new_members, user)\n', (6960, 6999), False, 'from db import get_bidders, find_rooms, distance_calc, ended, get_template, get_t, get_distance, get_room_admin, save_param, add_room_member, add_room_members, update_bid, get_closing, get_hb, get_sign, get_hbidder, get_messages, get_room, get_room_members, get_rooms_for_user, get_user, is_room_admin, is_room_member, remove_room_members, save_message, save_room, save_user, update_room\n'), ((7527, 7556), 'db.is_room_member', 'is_room_member', (['room_id', 'user'], {}), '(room_id, user)\n', (7541, 7556), False, 'from db import get_bidders, find_rooms, distance_calc, ended, get_template, get_t, get_distance, get_room_admin, save_param, add_room_member, add_room_members, update_bid, get_closing, get_hb, get_sign, get_hbidder, get_messages, get_room, get_room_members, get_rooms_for_user, get_user, is_room_admin, is_room_member, remove_room_members, save_message, save_room, save_user, update_room\n'), ((12384, 12413), 'flask.request.json.get', 'request.json.get', (['"""room_name"""'], {}), "('room_name')\n", (12400, 12413), False, 'from flask import Flask, render_template, request, redirect, url_for, jsonify\n'), ((12439, 12475), 'flask.request.json.get', 'request.json.get', (['"""reference_sector"""'], {}), "('reference_sector')\n", (12455, 12475), False, 'from flask import Flask, render_template, request, redirect, url_for, jsonify\n'), ((12499, 12533), 'flask.request.json.get', 'request.json.get', (['"""reference_type"""'], {}), "('reference_type')\n", (12515, 12533), False, 'from flask import Flask, render_template, request, redirect, url_for, jsonify\n'), ((12550, 12577), 'flask.request.json.get', 'request.json.get', (['"""ongoing"""'], {}), "('ongoing')\n", (12566, 12577), False, 'from flask import Flask, render_template, request, redirect, url_for, jsonify\n'), ((12596, 12624), 'flask.request.json.get', 'request.json.get', (['"""distance"""'], {}), "('distance')\n", (12612, 12624), False, 'from flask import Flask, render_template, request, redirect, url_for, jsonify\n'), ((12672, 12757), 'db.find_rooms', 'find_rooms', (['room_name', 'reference_sector', 'reference_type', 'ongoing', 'user', 'distance'], {}), '(room_name, reference_sector, reference_type, ongoing, user, distance\n )\n', (12682, 12757), False, 'from db import get_bidders, find_rooms, distance_calc, ended, get_template, get_t, get_distance, get_room_admin, save_param, add_room_member, add_room_members, update_bid, get_closing, get_hb, get_sign, get_hbidder, get_messages, get_room, get_room_members, get_rooms_for_user, get_user, is_room_admin, is_room_member, remove_room_members, save_message, save_room, save_user, update_room\n'), ((1454, 1470), 'flask_login.login_user', 'login_user', (['user'], {}), '(user)\n', (1464, 1470), False, 'from flask_login import LoginManager, login_user, login_required, logout_user, current_user\n'), ((1914, 1929), 'flask.url_for', 'url_for', (['"""home"""'], {}), "('home')\n", (1921, 1929), False, 'from flask import Flask, render_template, request, redirect, url_for, jsonify\n'), ((2225, 2277), 'db.save_user', 'save_user', (['username', 'email', 'password', 'sign', 'location'], {}), '(username, email, password, sign, location)\n', (2234, 2277), False, 'from db import get_bidders, find_rooms, distance_calc, ended, get_template, get_t, get_distance, get_room_admin, save_param, add_room_member, add_room_members, update_bid, get_closing, get_hb, get_sign, get_hbidder, get_messages, get_room, get_room_members, get_rooms_for_user, get_user, is_room_admin, is_room_member, remove_room_members, save_message, save_room, save_user, update_room\n'), ((3383, 3415), 'flask.request.form.get', 'request.form.get', (['"""closing_time"""'], {}), "('closing_time')\n", (3399, 3415), False, 'from flask import Flask, render_template, request, redirect, url_for, jsonify\n'), ((3456, 3488), 'flask.request.form.get', 'request.form.get', (['"""closing_time"""'], {}), "('closing_time')\n", (3472, 3488), False, 'from flask import Flask, render_template, request, redirect, url_for, jsonify\n'), ((3940, 3967), 'flask.request.form.get', 'request.form.get', (['"""members"""'], {}), "('members')\n", (3956, 3967), False, 'from flask import Flask, render_template, request, redirect, url_for, jsonify\n'), ((4268, 4401), 'db.save_room', 'save_room', (['privacy', 'room_name', 'user', 'auction_type', 'highest_bid', 'highest_bidder', 'closing_time', 'sellersign', 'buyersign', 'templatetype'], {}), '(privacy, room_name, user, auction_type, highest_bid,\n highest_bidder, closing_time, sellersign, buyersign, templatetype)\n', (4277, 4401), False, 'from db import get_bidders, find_rooms, distance_calc, ended, get_template, get_t, get_distance, get_room_admin, save_param, add_room_member, add_room_members, update_bid, get_closing, get_hb, get_sign, get_hbidder, get_messages, get_room, get_room_members, get_rooms_for_user, get_user, is_room_admin, is_room_member, remove_room_members, save_message, save_room, save_user, update_room\n'), ((4403, 4498), 'db.save_param', 'save_param', (['room_id', 'user', 'room_name', 'reference_sector', 'reference_type', 'quantity', 'articleno'], {}), '(room_id, user, room_name, reference_sector, reference_type,\n quantity, articleno)\n', (4413, 4498), False, 'from db import get_bidders, find_rooms, distance_calc, ended, get_template, get_t, get_distance, get_room_admin, save_param, add_room_member, add_room_members, update_bid, get_closing, get_hb, get_sign, get_hbidder, get_messages, get_room, get_room_members, get_rooms_for_user, get_user, is_room_admin, is_room_member, remove_room_members, save_message, save_room, save_user, update_room\n'), ((5455, 5484), 'flask.request.json.get', 'request.json.get', (['"""room_name"""'], {}), "('room_name')\n", (5471, 5484), False, 'from flask import Flask, render_template, request, redirect, url_for, jsonify\n'), ((5534, 5565), 'db.update_room', 'update_room', (['room_id', 'room_name'], {}), '(room_id, room_name)\n', (5545, 5565), False, 'from db import get_bidders, find_rooms, distance_calc, ended, get_template, get_t, get_distance, get_room_admin, save_param, add_room_member, add_room_members, update_bid, get_closing, get_hb, get_sign, get_hbidder, get_messages, get_room, get_room_members, get_rooms_for_user, get_user, is_room_admin, is_room_member, remove_room_members, save_message, save_room, save_user, update_room\n'), ((6728, 6753), 'db.get_room_members', 'get_room_members', (['room_id'], {}), '(room_id)\n', (6744, 6753), False, 'from db import get_bidders, find_rooms, distance_calc, ended, get_template, get_t, get_distance, get_room_admin, save_param, add_room_member, add_room_members, update_bid, get_closing, get_hb, get_sign, get_hbidder, get_messages, get_room, get_room_members, get_rooms_for_user, get_user, is_room_admin, is_room_member, remove_room_members, save_message, save_room, save_user, update_room\n'), ((7686, 7719), 'flask.request.form.get', 'request.form.get', (['"""message_input"""'], {}), "('message_input')\n", (7702, 7719), False, 'from flask import Flask, render_template, request, redirect, url_for, jsonify\n'), ((10362, 10390), 'db.is_room_admin', 'is_room_admin', (['room_id', 'user'], {}), '(room_id, user)\n', (10375, 10390), False, 'from db import get_bidders, find_rooms, distance_calc, ended, get_template, get_t, get_distance, get_room_admin, save_param, add_room_member, add_room_members, update_bid, get_closing, get_hb, get_sign, get_hbidder, get_messages, get_room, get_room_members, get_rooms_for_user, get_user, is_room_admin, is_room_member, remove_room_members, save_message, save_room, save_user, update_room\n'), ((2304, 2320), 'flask.url_for', 'url_for', (['"""login"""'], {}), "('login')\n", (2311, 2320), False, 'from flask import Flask, render_template, request, redirect, url_for, jsonify\n'), ((4710, 4763), 'db.add_room_members', 'add_room_members', (['room_id', 'room_name', 'usernames', 'user'], {}), '(room_id, room_name, usernames, user)\n', (4726, 4763), False, 'from db import get_bidders, find_rooms, distance_calc, ended, get_template, get_t, get_distance, get_room_admin, save_param, add_room_member, add_room_members, update_bid, get_closing, get_hb, get_sign, get_hbidder, get_messages, get_room, get_room_members, get_rooms_for_user, get_user, is_room_admin, is_room_member, remove_room_members, save_message, save_room, save_user, update_room\n'), ((5287, 5312), 'db.get_room_members', 'get_room_members', (['room_id'], {}), '(room_id)\n', (5303, 5312), False, 'from db import get_bidders, find_rooms, distance_calc, ended, get_template, get_t, get_distance, get_room_admin, save_param, add_room_member, add_room_members, update_bid, get_closing, get_hb, get_sign, get_hbidder, get_messages, get_room, get_room_members, get_rooms_for_user, get_user, is_room_admin, is_room_member, remove_room_members, save_message, save_room, save_user, update_room\n'), ((5884, 5959), 'db.add_room_members', 'add_room_members', (['room_id', 'room_name', 'members_to_add', 'current_user.username'], {}), '(room_id, room_name, members_to_add, current_user.username)\n', (5900, 5959), False, 'from db import get_bidders, find_rooms, distance_calc, ended, get_template, get_t, get_distance, get_room_admin, save_param, add_room_member, add_room_members, update_bid, get_closing, get_hb, get_sign, get_hbidder, get_messages, get_room, get_room_members, get_rooms_for_user, get_user, is_room_admin, is_room_member, remove_room_members, save_message, save_room, save_user, update_room\n'), ((6015, 6062), 'db.remove_room_members', 'remove_room_members', (['room_id', 'members_to_remove'], {}), '(room_id, members_to_remove)\n', (6034, 6062), False, 'from db import get_bidders, find_rooms, distance_calc, ended, get_template, get_t, get_distance, get_room_admin, save_param, add_room_member, add_room_members, update_bid, get_closing, get_hb, get_sign, get_hbidder, get_messages, get_room, get_room_members, get_rooms_for_user, get_user, is_room_admin, is_room_member, remove_room_members, save_message, save_room, save_user, update_room\n'), ((7750, 7764), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (7762, 7764), False, 'from datetime import datetime\n'), ((8984, 9005), 'db.get_messages', 'get_messages', (['room_id'], {}), '(room_id)\n', (8996, 9005), False, 'from db import get_bidders, find_rooms, distance_calc, ended, get_template, get_t, get_distance, get_room_admin, save_param, add_room_member, add_room_members, update_bid, get_closing, get_hb, get_sign, get_hbidder, get_messages, get_room, get_room_members, get_rooms_for_user, get_user, is_room_admin, is_room_member, remove_room_members, save_message, save_room, save_user, update_room\n'), ((10438, 10452), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (10450, 10452), False, 'from datetime import datetime\n'), ((10568, 10588), 'db.get_hbidder', 'get_hbidder', (['room_id'], {}), '(room_id)\n', (10579, 10588), False, 'from db import get_bidders, find_rooms, distance_calc, ended, get_template, get_t, get_distance, get_room_admin, save_param, add_room_member, add_room_members, update_bid, get_closing, get_hb, get_sign, get_hbidder, get_messages, get_room, get_room_members, get_rooms_for_user, get_user, is_room_admin, is_room_member, remove_room_members, save_message, save_room, save_user, update_room\n'), ((10673, 10699), 'flask.request.form.get', 'request.form.get', (['"""winner"""'], {}), "('winner')\n", (10689, 10699), False, 'from flask import Flask, render_template, request, redirect, url_for, jsonify\n'), ((11484, 11502), 'db.get_room_admin', 'get_room_admin', (['rn'], {}), '(rn)\n', (11498, 11502), False, 'from db import get_bidders, find_rooms, distance_calc, ended, get_template, get_t, get_distance, get_room_admin, save_param, add_room_member, add_room_members, update_bid, get_closing, get_hb, get_sign, get_hbidder, get_messages, get_room, get_room_members, get_rooms_for_user, get_user, is_room_admin, is_room_member, remove_room_members, save_message, save_room, save_user, update_room\n'), ((7788, 7816), 'db.is_room_admin', 'is_room_admin', (['room_id', 'user'], {}), '(room_id, user)\n', (7801, 7816), False, 'from db import get_bidders, find_rooms, distance_calc, ended, get_template, get_t, get_distance, get_room_admin, save_param, add_room_member, add_room_members, update_bid, get_closing, get_hb, get_sign, get_hbidder, get_messages, get_room, get_room_members, get_rooms_for_user, get_user, is_room_admin, is_room_member, remove_room_members, save_message, save_room, save_user, update_room\n'), ((7836, 7864), 'db.is_room_admin', 'is_room_admin', (['room_id', 'user'], {}), '(room_id, user)\n', (7849, 7864), False, 'from db import get_bidders, find_rooms, distance_calc, ended, get_template, get_t, get_distance, get_room_admin, save_param, add_room_member, add_room_members, update_bid, get_closing, get_hb, get_sign, get_hbidder, get_messages, get_room, get_room_members, get_rooms_for_user, get_user, is_room_admin, is_room_member, remove_room_members, save_message, save_room, save_user, update_room\n'), ((8152, 8166), 'db.get_sign', 'get_sign', (['user'], {}), '(user)\n', (8160, 8166), False, 'from db import get_bidders, find_rooms, distance_calc, ended, get_template, get_t, get_distance, get_room_admin, save_param, add_room_member, add_room_members, update_bid, get_closing, get_hb, get_sign, get_hbidder, get_messages, get_room, get_room_members, get_rooms_for_user, get_user, is_room_admin, is_room_member, remove_room_members, save_message, save_room, save_user, update_room\n'), ((9030, 9059), 'db.is_room_member', 'is_room_member', (['room_id', 'user'], {}), '(room_id, user)\n', (9044, 9059), False, 'from db import get_bidders, find_rooms, distance_calc, ended, get_template, get_t, get_distance, get_room_admin, save_param, add_room_member, add_room_members, update_bid, get_closing, get_hb, get_sign, get_hbidder, get_messages, get_room, get_room_members, get_rooms_for_user, get_user, is_room_admin, is_room_member, remove_room_members, save_message, save_room, save_user, update_room\n'), ((10750, 10773), 'db.get_hb', 'get_hb', (['room_id', 'winner'], {}), '(room_id, winner)\n', (10756, 10773), False, 'from db import get_bidders, find_rooms, distance_calc, ended, get_template, get_t, get_distance, get_room_admin, save_param, add_room_member, add_room_members, update_bid, get_closing, get_hb, get_sign, get_hbidder, get_messages, get_room, get_room_members, get_rooms_for_user, get_user, is_room_admin, is_room_member, remove_room_members, save_message, save_room, save_user, update_room\n'), ((11029, 11068), 'db.update_bid', 'update_bid', (["room['_id']", 'bid', 'sen', 'sign'], {}), "(room['_id'], bid, sen, sign)\n", (11039, 11068), False, 'from db import get_bidders, find_rooms, distance_calc, ended, get_template, get_t, get_distance, get_room_admin, save_param, add_room_member, add_room_members, update_bid, get_closing, get_hb, get_sign, get_hbidder, get_messages, get_room, get_room_members, get_rooms_for_user, get_user, is_room_admin, is_room_member, remove_room_members, save_message, save_room, save_user, update_room\n'), ((11519, 11539), 'db.get_hbidder', 'get_hbidder', (['room_id'], {}), '(room_id)\n', (11530, 11539), False, 'from db import get_bidders, find_rooms, distance_calc, ended, get_template, get_t, get_distance, get_room_admin, save_param, add_room_member, add_room_members, update_bid, get_closing, get_hb, get_sign, get_hbidder, get_messages, get_room, get_room_members, get_rooms_for_user, get_user, is_room_admin, is_room_member, remove_room_members, save_message, save_room, save_user, update_room\n'), ((11777, 11797), 'db.get_hbidder', 'get_hbidder', (['room_id'], {}), '(room_id)\n', (11788, 11797), False, 'from db import get_bidders, find_rooms, distance_calc, ended, get_template, get_t, get_distance, get_room_admin, save_param, add_room_member, add_room_members, update_bid, get_closing, get_hb, get_sign, get_hbidder, get_messages, get_room, get_room_members, get_rooms_for_user, get_user, is_room_admin, is_room_member, remove_room_members, save_message, save_room, save_user, update_room\n'), ((8277, 8295), 'db.get_room_admin', 'get_room_admin', (['rn'], {}), '(rn)\n', (8291, 8295), False, 'from db import get_bidders, find_rooms, distance_calc, ended, get_template, get_t, get_distance, get_room_admin, save_param, add_room_member, add_room_members, update_bid, get_closing, get_hb, get_sign, get_hbidder, get_messages, get_room, get_room_members, get_rooms_for_user, get_user, is_room_admin, is_room_member, remove_room_members, save_message, save_room, save_user, update_room\n'), ((8345, 8363), 'db.get_room_admin', 'get_room_admin', (['rn'], {}), '(rn)\n', (8359, 8363), False, 'from db import get_bidders, find_rooms, distance_calc, ended, get_template, get_t, get_distance, get_room_admin, save_param, add_room_member, add_room_members, update_bid, get_closing, get_hb, get_sign, get_hbidder, get_messages, get_room, get_room_members, get_rooms_for_user, get_user, is_room_admin, is_room_member, remove_room_members, save_message, save_room, save_user, update_room\n'), ((9537, 9550), 'flask.jsonify', 'jsonify', (['body'], {}), '(body)\n', (9544, 9550), False, 'from flask import Flask, render_template, request, redirect, url_for, jsonify\n'), ((11596, 11616), 'db.get_bidders', 'get_bidders', (['room_id'], {}), '(room_id)\n', (11607, 11616), False, 'from db import get_bidders, find_rooms, distance_calc, ended, get_template, get_t, get_distance, get_room_admin, save_param, add_room_member, add_room_members, update_bid, get_closing, get_hb, get_sign, get_hbidder, get_messages, get_room, get_room_members, get_rooms_for_user, get_user, is_room_admin, is_room_member, remove_room_members, save_message, save_room, save_user, update_room\n'), ((11696, 11710), 'db.ended', 'ended', (['room_id'], {}), '(room_id)\n', (11701, 11710), False, 'from db import get_bidders, find_rooms, distance_calc, ended, get_template, get_t, get_distance, get_room_admin, save_param, add_room_member, add_room_members, update_bid, get_closing, get_hb, get_sign, get_hbidder, get_messages, get_room, get_room_members, get_rooms_for_user, get_user, is_room_admin, is_room_member, remove_room_members, save_message, save_room, save_user, update_room\n'), ((11735, 11752), 'flask.jsonify', 'jsonify', (['response'], {}), '(response)\n', (11742, 11752), False, 'from flask import Flask, render_template, request, redirect, url_for, jsonify\n'), ((11833, 11847), 'db.ended', 'ended', (['room_id'], {}), '(room_id)\n', (11838, 11847), False, 'from db import get_bidders, find_rooms, distance_calc, ended, get_template, get_t, get_distance, get_room_admin, save_param, add_room_member, add_room_members, update_bid, get_closing, get_hb, get_sign, get_hbidder, get_messages, get_room, get_room_members, get_rooms_for_user, get_user, is_room_admin, is_room_member, remove_room_members, save_message, save_room, save_user, update_room\n'), ((11868, 11885), 'flask.jsonify', 'jsonify', (['response'], {}), '(response)\n', (11875, 11885), False, 'from flask import Flask, render_template, request, redirect, url_for, jsonify\n'), ((11903, 11923), 'db.get_hbidder', 'get_hbidder', (['room_id'], {}), '(room_id)\n', (11914, 11923), False, 'from db import get_bidders, find_rooms, distance_calc, ended, get_template, get_t, get_distance, get_room_admin, save_param, add_room_member, add_room_members, update_bid, get_closing, get_hb, get_sign, get_hbidder, get_messages, get_room, get_room_members, get_rooms_for_user, get_user, is_room_admin, is_room_member, remove_room_members, save_message, save_room, save_user, update_room\n'), ((4068, 4095), 'flask.request.form.get', 'request.form.get', (['"""members"""'], {}), "('members')\n", (4084, 4095), False, 'from flask import Flask, render_template, request, redirect, url_for, jsonify\n'), ((5627, 5654), 'flask.request.json.get', 'request.json.get', (['"""members"""'], {}), "('members')\n", (5643, 5654), False, 'from flask import Flask, render_template, request, redirect, url_for, jsonify\n')]
|
#
# Copyright (c) 2019, Oracle and/or its affiliates. All rights reserved.
# Licensed under the Universal Permissive License v 1.0 as shown at http://oss.oracle.com/licenses/upl.
#
from __future__ import print_function
import tensorflow as tf
from numpy import genfromtxt
import numpy as np
import os
from os.path import dirname
# set logging parameters
os.environ['TF_CPP_MIN_LOG_LEVEL'] = '2'
tf.logging.set_verbosity(tf.logging.ERROR)
def oneHotEncoder(data, n_classes):
y = np.array([int(i[0]) for i in data])
y_onehot = [0]*len(y)
for i,j in enumerate(y):
y_onehot[i] = [0]*n_classes
y_onehot[i][j]= 1
return (y,y_onehot)
# model parameters
n_input = 200
n_classes = 2
# tunable hyper-parameters
shape1 = 10
shape2 = 20
init_learning_rate = 0.004
training_iters = 40000
batch_size = 256
dropout = 0.5 # probability to keep units (for dropout)
seed = 0
# display test metrics
test_step = 10
# set random seed for reproducability
tf.set_random_seed(seed)
#NCI109 dataset
data_dir = os.path.join(dirname(os.getcwd()), "data/")
train_data = genfromtxt(data_dir+"NCI09_train.csv", delimiter=',') # Training data
test_data = genfromtxt(data_dir+"NCI09_test.csv", delimiter=',') # Training data
x_train = np.array([ i[1::] for i in train_data])
y_train, y_train_onehot = oneHotEncoder(train_data, n_classes)
x_test = np.array([ i[1::] for i in test_data])
y_test, y_test_onehot = oneHotEncoder(test_data, n_classes)
# tf Graph input
x = tf.placeholder(tf.float32, [None, n_input])
y = tf.placeholder(tf.float32, [None, n_classes])
keep_prob = tf.placeholder(tf.float32)
# Conv2D wrapper, with bias and relu activation
def conv2d(x, W, b, strides=1):
x = tf.nn.conv2d(x, W, strides=[1, strides, strides, 1], padding='SAME')
x = tf.nn.bias_add(x, b)
return tf.nn.relu(x)
# MaxPool2D wrapper
def pool2d(x, k=2):
return tf.nn.max_pool(x, ksize=[1, k, k, 1], strides=[1, k, k, 1], padding='SAME')
# convnet model
def conv_net(x, keep_prob):
# Store layers weight & bias
weights = {
'conv_layer1': tf.Variable(tf.random_normal([5, 5, 1, 32], seed=seed)),
'conv_layer2': tf.Variable(tf.random_normal([5, 5, 32, 64], seed=seed)),
'dense_layer': tf.Variable(tf.random_normal([shape1*shape2*64, 1024], seed=seed)),
'output_layer': tf.Variable(tf.random_normal([1024, n_classes], seed=seed))
}
biases = {
'conv_layer1': tf.Variable(tf.random_normal([32], seed=seed)),
'conv_layer2': tf.Variable(tf.random_normal([64], seed=seed)),
'dense_layer': tf.Variable(tf.random_normal([1024], seed=seed)),
'output_layer': tf.Variable(tf.random_normal([n_classes], seed=seed))
}
# Reshape input picture
x = tf.reshape(x, shape=[-1, shape1, shape2, 1])
# Convolution Layer
conv1 = conv2d(x, weights['conv_layer1'], biases['conv_layer1'])
# Max Pooling (down-sampling)
conv1 = pool2d(conv1, k=1)
# Convolution Layer
conv2 = conv2d(conv1, weights['conv_layer2'], biases['conv_layer2'])
# Max Pooling (down-sampling)
conv2 = pool2d(conv2, k=1)
fc1 = tf.reshape(conv2, [-1, weights['dense_layer'].get_shape().as_list()[0]])
fc1 = tf.add(tf.matmul(fc1, weights['dense_layer']), biases['dense_layer'])
fc1 = tf.nn.relu(fc1)
# Apply Dropout
fc1 = tf.nn.dropout(fc1, keep_prob, seed=seed)
# Output, class prediction
out = tf.add(tf.matmul(fc1, weights['output_layer']), biases['output_layer'])
return out
global_step = tf.Variable(0, trainable=False)
learning_rate = tf.train.exponential_decay(init_learning_rate, global_step, 10000, 0.96, staircase=True)
# construct model with input data
pred = conv_net(x, keep_prob)
# define loss and optimizer
cost = tf.reduce_mean(tf.nn.softmax_cross_entropy_with_logits(logits=pred, labels=y))
optimizer = tf.train.AdamOptimizer(learning_rate=learning_rate).minimize(cost,global_step=global_step)
# evaluate model
correct_pred = tf.equal(tf.argmax(pred, 1), tf.argmax(y, 1))
accuracy = tf.reduce_mean(tf.cast(correct_pred, tf.float32))
# initializing the variables
init = tf.global_variables_initializer()
# Launch the graph
with tf.Session() as sess:
sess.run(init)
step = 1
# keep training until reach max iterations
while step * batch_size < training_iters:
np.random.seed(step+seed)
idx = np.random.randint(len(x_train), size=batch_size)
batch_x = x_train[idx,:]
batch_y = np.asarray(y_train_onehot)[idx,:]
# run optimization op (backprop)
sess.run(optimizer, feed_dict={x: batch_x, y: batch_y, keep_prob: dropout})
if step % test_step == 0:
# calculate accuracy for test data
loss, acc = sess.run([cost, accuracy], feed_dict={x: x_test, y: np.asarray(y_test_onehot), keep_prob: 1.})
print("Iterations: %s, Test Accuracy: %f" % (str(step*batch_size),acc))
step += 1
print("Complete!")
|
[
"numpy.random.seed",
"tensorflow.reshape",
"tensorflow.logging.set_verbosity",
"tensorflow.matmul",
"tensorflow.Variable",
"tensorflow.nn.conv2d",
"tensorflow.nn.relu",
"tensorflow.nn.softmax_cross_entropy_with_logits",
"numpy.genfromtxt",
"tensorflow.set_random_seed",
"tensorflow.placeholder",
"tensorflow.cast",
"tensorflow.nn.bias_add",
"tensorflow.global_variables_initializer",
"numpy.asarray",
"tensorflow.Session",
"tensorflow.nn.max_pool",
"tensorflow.random_normal",
"tensorflow.train.exponential_decay",
"tensorflow.argmax",
"os.getcwd",
"numpy.array",
"tensorflow.train.AdamOptimizer",
"tensorflow.nn.dropout"
] |
[((399, 441), 'tensorflow.logging.set_verbosity', 'tf.logging.set_verbosity', (['tf.logging.ERROR'], {}), '(tf.logging.ERROR)\n', (423, 441), True, 'import tensorflow as tf\n'), ((974, 998), 'tensorflow.set_random_seed', 'tf.set_random_seed', (['seed'], {}), '(seed)\n', (992, 998), True, 'import tensorflow as tf\n'), ((1085, 1140), 'numpy.genfromtxt', 'genfromtxt', (["(data_dir + 'NCI09_train.csv')"], {'delimiter': '""","""'}), "(data_dir + 'NCI09_train.csv', delimiter=',')\n", (1095, 1140), False, 'from numpy import genfromtxt\n'), ((1168, 1222), 'numpy.genfromtxt', 'genfromtxt', (["(data_dir + 'NCI09_test.csv')"], {'delimiter': '""","""'}), "(data_dir + 'NCI09_test.csv', delimiter=',')\n", (1178, 1222), False, 'from numpy import genfromtxt\n'), ((1250, 1287), 'numpy.array', 'np.array', (['[i[1:] for i in train_data]'], {}), '([i[1:] for i in train_data])\n', (1258, 1287), True, 'import numpy as np\n'), ((1363, 1399), 'numpy.array', 'np.array', (['[i[1:] for i in test_data]'], {}), '([i[1:] for i in test_data])\n', (1371, 1399), True, 'import numpy as np\n'), ((1484, 1527), 'tensorflow.placeholder', 'tf.placeholder', (['tf.float32', '[None, n_input]'], {}), '(tf.float32, [None, n_input])\n', (1498, 1527), True, 'import tensorflow as tf\n'), ((1532, 1577), 'tensorflow.placeholder', 'tf.placeholder', (['tf.float32', '[None, n_classes]'], {}), '(tf.float32, [None, n_classes])\n', (1546, 1577), True, 'import tensorflow as tf\n'), ((1590, 1616), 'tensorflow.placeholder', 'tf.placeholder', (['tf.float32'], {}), '(tf.float32)\n', (1604, 1616), True, 'import tensorflow as tf\n'), ((3520, 3551), 'tensorflow.Variable', 'tf.Variable', (['(0)'], {'trainable': '(False)'}), '(0, trainable=False)\n', (3531, 3551), True, 'import tensorflow as tf\n'), ((3568, 3660), 'tensorflow.train.exponential_decay', 'tf.train.exponential_decay', (['init_learning_rate', 'global_step', '(10000)', '(0.96)'], {'staircase': '(True)'}), '(init_learning_rate, global_step, 10000, 0.96,\n staircase=True)\n', (3594, 3660), True, 'import tensorflow as tf\n'), ((4117, 4150), 'tensorflow.global_variables_initializer', 'tf.global_variables_initializer', ([], {}), '()\n', (4148, 4150), True, 'import tensorflow as tf\n'), ((1706, 1774), 'tensorflow.nn.conv2d', 'tf.nn.conv2d', (['x', 'W'], {'strides': '[1, strides, strides, 1]', 'padding': '"""SAME"""'}), "(x, W, strides=[1, strides, strides, 1], padding='SAME')\n", (1718, 1774), True, 'import tensorflow as tf\n'), ((1783, 1803), 'tensorflow.nn.bias_add', 'tf.nn.bias_add', (['x', 'b'], {}), '(x, b)\n', (1797, 1803), True, 'import tensorflow as tf\n'), ((1815, 1828), 'tensorflow.nn.relu', 'tf.nn.relu', (['x'], {}), '(x)\n', (1825, 1828), True, 'import tensorflow as tf\n'), ((1882, 1957), 'tensorflow.nn.max_pool', 'tf.nn.max_pool', (['x'], {'ksize': '[1, k, k, 1]', 'strides': '[1, k, k, 1]', 'padding': '"""SAME"""'}), "(x, ksize=[1, k, k, 1], strides=[1, k, k, 1], padding='SAME')\n", (1896, 1957), True, 'import tensorflow as tf\n'), ((2747, 2791), 'tensorflow.reshape', 'tf.reshape', (['x'], {'shape': '[-1, shape1, shape2, 1]'}), '(x, shape=[-1, shape1, shape2, 1])\n', (2757, 2791), True, 'import tensorflow as tf\n'), ((3288, 3303), 'tensorflow.nn.relu', 'tf.nn.relu', (['fc1'], {}), '(fc1)\n', (3298, 3303), True, 'import tensorflow as tf\n'), ((3334, 3374), 'tensorflow.nn.dropout', 'tf.nn.dropout', (['fc1', 'keep_prob'], {'seed': 'seed'}), '(fc1, keep_prob, seed=seed)\n', (3347, 3374), True, 'import tensorflow as tf\n'), ((3773, 3835), 'tensorflow.nn.softmax_cross_entropy_with_logits', 'tf.nn.softmax_cross_entropy_with_logits', ([], {'logits': 'pred', 'labels': 'y'}), '(logits=pred, labels=y)\n', (3812, 3835), True, 'import tensorflow as tf\n'), ((3982, 4000), 'tensorflow.argmax', 'tf.argmax', (['pred', '(1)'], {}), '(pred, 1)\n', (3991, 4000), True, 'import tensorflow as tf\n'), ((4002, 4017), 'tensorflow.argmax', 'tf.argmax', (['y', '(1)'], {}), '(y, 1)\n', (4011, 4017), True, 'import tensorflow as tf\n'), ((4045, 4078), 'tensorflow.cast', 'tf.cast', (['correct_pred', 'tf.float32'], {}), '(correct_pred, tf.float32)\n', (4052, 4078), True, 'import tensorflow as tf\n'), ((4176, 4188), 'tensorflow.Session', 'tf.Session', ([], {}), '()\n', (4186, 4188), True, 'import tensorflow as tf\n'), ((1049, 1060), 'os.getcwd', 'os.getcwd', ([], {}), '()\n', (1058, 1060), False, 'import os\n'), ((3215, 3253), 'tensorflow.matmul', 'tf.matmul', (['fc1', "weights['dense_layer']"], {}), "(fc1, weights['dense_layer'])\n", (3224, 3253), True, 'import tensorflow as tf\n'), ((3424, 3463), 'tensorflow.matmul', 'tf.matmul', (['fc1', "weights['output_layer']"], {}), "(fc1, weights['output_layer'])\n", (3433, 3463), True, 'import tensorflow as tf\n'), ((3849, 3900), 'tensorflow.train.AdamOptimizer', 'tf.train.AdamOptimizer', ([], {'learning_rate': 'learning_rate'}), '(learning_rate=learning_rate)\n', (3871, 3900), True, 'import tensorflow as tf\n'), ((4331, 4358), 'numpy.random.seed', 'np.random.seed', (['(step + seed)'], {}), '(step + seed)\n', (4345, 4358), True, 'import numpy as np\n'), ((2088, 2130), 'tensorflow.random_normal', 'tf.random_normal', (['[5, 5, 1, 32]'], {'seed': 'seed'}), '([5, 5, 1, 32], seed=seed)\n', (2104, 2130), True, 'import tensorflow as tf\n'), ((2168, 2211), 'tensorflow.random_normal', 'tf.random_normal', (['[5, 5, 32, 64]'], {'seed': 'seed'}), '([5, 5, 32, 64], seed=seed)\n', (2184, 2211), True, 'import tensorflow as tf\n'), ((2249, 2306), 'tensorflow.random_normal', 'tf.random_normal', (['[shape1 * shape2 * 64, 1024]'], {'seed': 'seed'}), '([shape1 * shape2 * 64, 1024], seed=seed)\n', (2265, 2306), True, 'import tensorflow as tf\n'), ((2341, 2387), 'tensorflow.random_normal', 'tf.random_normal', (['[1024, n_classes]'], {'seed': 'seed'}), '([1024, n_classes], seed=seed)\n', (2357, 2387), True, 'import tensorflow as tf\n'), ((2446, 2479), 'tensorflow.random_normal', 'tf.random_normal', (['[32]'], {'seed': 'seed'}), '([32], seed=seed)\n', (2462, 2479), True, 'import tensorflow as tf\n'), ((2517, 2550), 'tensorflow.random_normal', 'tf.random_normal', (['[64]'], {'seed': 'seed'}), '([64], seed=seed)\n', (2533, 2550), True, 'import tensorflow as tf\n'), ((2588, 2623), 'tensorflow.random_normal', 'tf.random_normal', (['[1024]'], {'seed': 'seed'}), '([1024], seed=seed)\n', (2604, 2623), True, 'import tensorflow as tf\n'), ((2662, 2702), 'tensorflow.random_normal', 'tf.random_normal', (['[n_classes]'], {'seed': 'seed'}), '([n_classes], seed=seed)\n', (2678, 2702), True, 'import tensorflow as tf\n'), ((4471, 4497), 'numpy.asarray', 'np.asarray', (['y_train_onehot'], {}), '(y_train_onehot)\n', (4481, 4497), True, 'import numpy as np\n'), ((4787, 4812), 'numpy.asarray', 'np.asarray', (['y_test_onehot'], {}), '(y_test_onehot)\n', (4797, 4812), True, 'import numpy as np\n')]
|
#!/usr/bin/env python
# coding: utf-8
# ## Problem 2 - Plotting temperatures
#
# In this problem we will plot monthly mean temperatures from the Helsinki-Vantaa airpot for the past 30 years.
#
# ## Input data
#
# File `data/helsinki-vantaa.csv` monthly average temperatures from Helsinki Vantaa airport. Column descriptions:
#
# ### Part 1
#
# Load the Helsinki temperature data (`data/helsinki-vantaa.csv`)
#
# - Read the data into variable called `data` using pandas
# - Parse dates from the column `'DATE'` and set the dates as index in the dataframe
# YOUR CODE HERE 1 to read the data into data and parse dates
import numpy as np
import pandas as pd
import matplotlib.pyplot as plt
data=pd.read_csv("data/helsinki-vantaa.csv",parse_dates=['DATE'],index_col='DATE')
# This test print should print first five rows
print(data.head())
# Check the number of rows in the data frame
print(len(data))
# ### Part 2
#
# Select data for a 30 year period (January 1988 - December 2018)
#
# - Store the selection in a new variable `selection`
# YOUR CODE HERE 2
data["date_time"]=pd.to_datetime(data.index)
selection=data[(data["date_time"].dt.year>=1988)&(data["date_time"].dt.year<=2018)]
# Check that the data was read in correctly:
selection.head()
# Check how many rows of data you selected:
print("Number of rows:", len(selection))
# ### Part 3
#
# #### Part 3.1
#
# Create a line plot that displays the temperatures (`TEMP_C`) for yeach month in the 30 year time period:
#
# #### Part 3.2
#
# Save your figure as PNG file called `temp_line_plot.png`.
#
# YOUR CODE HERE 3
monthly_data=pd.DataFrame()
selection['DATA_Month']=selection["date_time"].astype(str).str.slice(start=0,stop=7)
grouped=selection.groupby('DATA_Month')
data1=grouped.mean()
monthly_data['temp_celsius_monthly']=data1['TEMP_C']
monthly_data["TIME"]=data1.index
monthly_data["TIME"]=monthly_data["TIME"].astype(str).str.slice(start=0,stop=4)
start_time=pd.to_datetime('19880101')
end_time=pd.to_datetime('20201231')
monthly_data.plot.line(x='TIME',y='temp_celsius_monthly',style=['k.-'],figsize=(14,6))
plt.title("Helsinki-Vantaa Airport")
plt.xlabel("Time")
plt.ylabel("Temperature(Celsius)")
plt.grid()
# Set output file name
outputfp = "temp_line_plot.png"
# Save plot as image
# YOUR CODE HERE 4
plt.savefig("temp_line_plot.png")
import os
#Check that output file exists (also open the file and check that the plot looks ok!)
os.path.exists(outputfp)
# **REMINDER**: Don't forget to upload your figure and the modified notebook into your personal GitHub repository!
#
# ### Done!
|
[
"matplotlib.pyplot.title",
"pandas.DataFrame",
"pandas.read_csv",
"os.path.exists",
"pandas.to_datetime",
"matplotlib.pyplot.ylabel",
"matplotlib.pyplot.xlabel",
"matplotlib.pyplot.grid",
"matplotlib.pyplot.savefig"
] |
[((703, 782), 'pandas.read_csv', 'pd.read_csv', (['"""data/helsinki-vantaa.csv"""'], {'parse_dates': "['DATE']", 'index_col': '"""DATE"""'}), "('data/helsinki-vantaa.csv', parse_dates=['DATE'], index_col='DATE')\n", (714, 782), True, 'import pandas as pd\n'), ((1089, 1115), 'pandas.to_datetime', 'pd.to_datetime', (['data.index'], {}), '(data.index)\n', (1103, 1115), True, 'import pandas as pd\n'), ((1616, 1630), 'pandas.DataFrame', 'pd.DataFrame', ([], {}), '()\n', (1628, 1630), True, 'import pandas as pd\n'), ((1956, 1982), 'pandas.to_datetime', 'pd.to_datetime', (['"""19880101"""'], {}), "('19880101')\n", (1970, 1982), True, 'import pandas as pd\n'), ((1992, 2018), 'pandas.to_datetime', 'pd.to_datetime', (['"""20201231"""'], {}), "('20201231')\n", (2006, 2018), True, 'import pandas as pd\n'), ((2108, 2144), 'matplotlib.pyplot.title', 'plt.title', (['"""Helsinki-Vantaa Airport"""'], {}), "('Helsinki-Vantaa Airport')\n", (2117, 2144), True, 'import matplotlib.pyplot as plt\n'), ((2145, 2163), 'matplotlib.pyplot.xlabel', 'plt.xlabel', (['"""Time"""'], {}), "('Time')\n", (2155, 2163), True, 'import matplotlib.pyplot as plt\n'), ((2164, 2198), 'matplotlib.pyplot.ylabel', 'plt.ylabel', (['"""Temperature(Celsius)"""'], {}), "('Temperature(Celsius)')\n", (2174, 2198), True, 'import matplotlib.pyplot as plt\n'), ((2199, 2209), 'matplotlib.pyplot.grid', 'plt.grid', ([], {}), '()\n', (2207, 2209), True, 'import matplotlib.pyplot as plt\n'), ((2307, 2340), 'matplotlib.pyplot.savefig', 'plt.savefig', (['"""temp_line_plot.png"""'], {}), "('temp_line_plot.png')\n", (2318, 2340), True, 'import matplotlib.pyplot as plt\n'), ((2439, 2463), 'os.path.exists', 'os.path.exists', (['outputfp'], {}), '(outputfp)\n', (2453, 2463), False, 'import os\n')]
|
from boto3 import Session
from botocore.exceptions import BotoCoreError, ClientError
import PyPDF2
import sys
pdfFileObj = open('SamplePDF.pdf', 'rb')
pdfReader = PyPDF2.PdfFileReader(pdfFileObj)
pageObject = pdfReader.getPage(0)
session = Session(profile_name="")
polly = session.client("polly")
try:
# Request speech synthesis
response = polly.synthesize_speech(Text=pageObject.extractText(), OutputFormat="mp3",
VoiceId="Joanna")
except (BotoCoreError, ClientError) as error:
# The service returned an error, exit gracefully
print(error)
sys.exit(-1)
file = open('speech.mp3', 'wb')
file.write(response['AudioStream'].read())
file.close()
|
[
"PyPDF2.PdfFileReader",
"sys.exit",
"boto3.Session"
] |
[((163, 195), 'PyPDF2.PdfFileReader', 'PyPDF2.PdfFileReader', (['pdfFileObj'], {}), '(pdfFileObj)\n', (183, 195), False, 'import PyPDF2\n'), ((242, 266), 'boto3.Session', 'Session', ([], {'profile_name': '""""""'}), "(profile_name='')\n", (249, 266), False, 'from boto3 import Session\n'), ((605, 617), 'sys.exit', 'sys.exit', (['(-1)'], {}), '(-1)\n', (613, 617), False, 'import sys\n')]
|
# Copyright 2011 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import glib
import gtk
import time
import logging
import os
from info_bar_gtk import *
from open_dialog_base import OpenDialogBase
class OpenDialogGtk(gtk.Dialog, OpenDialogBase):
def __init__(self, options, db, initial_filter):
gtk.Dialog.__init__(self)
OpenDialogBase.__init__(self, options, db, initial_filter)
self.set_title("Quick open...")
self.set_size_request(1000,400)
self.add_button("_Open",gtk.RESPONSE_OK)
self.add_button("Cancel",gtk.RESPONSE_CANCEL)
model = gtk.ListStore(object)
treeview = gtk.TreeView(model)
treeview.get_selection().set_mode(gtk.SELECTION_MULTIPLE)
treeview.get_selection().connect('changed', self._on_treeview_selection_changed)
self.connect('response', self.response)
text_cell_renderer = gtk.CellRendererText()
def add_column(title,accessor_cb):
column = gtk.TreeViewColumn(title, text_cell_renderer)
column.set_cell_data_func(text_cell_renderer, lambda column, cell, model, iter: cell.set_property('text', accessor_cb(model.get(iter,0)[0])))
treeview.append_column(column)
return column
add_column("Rank",lambda obj: obj[1])
add_column("File",lambda obj: os.path.basename(obj[0]))
add_column("Path",lambda obj: os.path.dirname(obj[0]))
self.connect('destroy', self.on_destroy)
truncated_bar = InfoBarGtk()
bad_result_button = gtk.Button("Bad result")
bad_result_button.connect('clicked', lambda *args: self.on_badresult_clicked())
reindex_button = gtk.Button("_Reindex")
reindex_button.connect('clicked', lambda *args: self.on_reindex_clicked())
status_label = gtk.Label()
self.status_label = status_label
filter_entry = gtk.Entry()
filter_entry.set_text(self._filter_text)
filter_entry.connect('key_press_event', self._on_filter_entry_keypress)
filter_entry.connect('changed', self._on_filter_text_changed)
# attach everything up
vbox = self.vbox
table_vbox = gtk.VBox()
treeview_scroll_window = gtk.ScrolledWindow()
treeview_scroll_window.set_policy(gtk.POLICY_AUTOMATIC, gtk.POLICY_AUTOMATIC)
table_options_hbox = gtk.HBox()
button_hbox = gtk.HBox()
vbox.pack_start(table_vbox,True,True,1)
table_vbox.pack_start(table_options_hbox,False,False,0)
table_options_hbox.pack_start(status_label,False,False,10)
table_options_hbox.pack_end(bad_result_button,False,False,0)
table_options_hbox.pack_end(reindex_button,False,False,0)
table_vbox.pack_start(treeview_scroll_window,True,True,0)
table_vbox.pack_start(truncated_bar,False,True,0)
table_vbox.pack_start(filter_entry,False,True,0)
treeview_scroll_window.add(treeview)
vbox.show_all()
truncated_bar.hide()
# remember things that need remembering
self._treeview = treeview
self._model = model
self._truncated_bar = truncated_bar
self._filter_entry = filter_entry
filter_entry.grab_focus()
if self.should_position_cursor_for_replace:
filter_entry.set_position(0)
filter_entry.select_region(0, len(self._filter_text))
else:
filter_entry.set_position(len(self._filter_text))
self.show_all()
def response(self, arg, *rest):
canceled = len(rest) > 0 and rest[0] != gtk.RESPONSE_OK
self.on_done(canceled)
def on_destroy(self, *args):
self.response(None, gtk.RESPONSE_CANCEL)
def redirect_to_treeview(self, event):
prev = self.get_focus()
self._treeview.grab_focus()
ret = self._treeview.emit('key_press_event', event)
if prev:
prev.grab_focus()
return True
def _on_filter_entry_keypress(self,entry,event):
keyname = gtk.gdk.keyval_name(event.keyval)
if keyname in ("Up", "Down", "Page_Up", "Page_Down", "Left", "Right"):
return self.redirect_to_treeview(event)
elif keyname == "space" and event.state & gtk.gdk.CONTROL_MASK:
return self.redirect_to_treeview(event)
elif keyname == 'n' and event.state & gtk.gdk.CONTROL_MASK:
self.move_selection(1)
return True
elif keyname == 'p' and event.state & gtk.gdk.CONTROL_MASK:
self.move_selection(-1)
return True
elif keyname == 'a' and event.state & gtk.gdk.CONTROL_MASK:
i = self._filter_entry.set_position(0)
return True
elif keyname == 'e' and event.state & gtk.gdk.CONTROL_MASK:
self._filter_entry.set_position(len(self._filter_entry.get_text()))
return True
elif keyname == 'f' and event.state & gtk.gdk.CONTROL_MASK:
i = self._filter_entry.get_position()
i = min(i + 1, len(self._filter_entry.get_text()))
self._filter_entry.set_position(i)
return True
elif keyname == 'b' and event.state & gtk.gdk.CONTROL_MASK:
i = self._filter_entry.get_position()
if i >= 1:
self._filter_entry.set_position(i - 1)
return True
elif keyname == 'k' and event.state & gtk.gdk.CONTROL_MASK:
i = self._filter_entry.get_position()
t = self._filter_entry.get_text()[:i]
self._filter_entry.set_text(t)
self._filter_entry.set_position(len(t))
return True
elif keyname == 'Return':
self.response(gtk.RESPONSE_OK)
return True
def _on_filter_text_changed(self,entry):
text = entry.get_text()
self.set_filter_text(text)
def set_results_enabled(self, en):
self._treeview.set_sensitive(en)
self.set_response_sensitive(gtk.RESPONSE_OK, en)
def status_changed(self):
self.status_label.set_text(self.status_text)
# update the model based on result
def update_results_list(self, files, ranks):
if len(files) == 0:
self._model.clear()
return
start_time = time.time()
self._treeview.freeze_child_notify()
self._treeview.set_model(None)
self._model.clear()
for i in range(len(files)):
row = self._model.append()
self._model.set(row, 0, (files[i], ranks[i]))
self._treeview.set_model(self._model)
self._treeview.thaw_child_notify()
truncated = False
if truncated:
self._truncated_bar.text = "Search was truncated at %i items" % len(files)
self._truncated_bar.show()
else:
self._truncated_bar.hide()
elapsed = time.time() - start_time
if len(self._model) > 0:
if self._treeview.get_selection():
self._treeview.get_selection().select_path((0,))
def _on_treeview_selection_changed(self, selection):
self.set_response_sensitive(gtk.RESPONSE_OK,selection.count_selected_rows() != 0)
def move_selection(self, direction):
sel = self.get_selected_indices()
if len(sel) == 0:
if self._model.iter_n_children(None) == 0:
return
self.set_selected_indices([0])
return
if direction > 0:
i = max(sel)
else:
i = min(sel)
i = i + direction
if i < 0:
return
if i >= self._model.iter_n_children(None):
return
self.set_selected_indices([i])
def get_selected_indices(self):
model,rows = self._treeview.get_selection().get_selected_rows()
return [x[0] for x in rows]
def set_selected_indices(self, indices):
sel = self._treeview.get_selection()
for i in self.get_selected_indices():
sel.unselect_path((i,))
for i in indices:
sel.select_path((i,))
def get_selected_items(self):
model,rows = self._treeview.get_selection().get_selected_rows()
files = []
for path in rows:
iter = model.get_iter(path)
obj = model.get(iter,0)[0][0]
files.append(obj)
return files
|
[
"gtk.VBox",
"gtk.HBox",
"os.path.basename",
"gtk.gdk.keyval_name",
"os.path.dirname",
"gtk.TreeViewColumn",
"gtk.ScrolledWindow",
"gtk.CellRendererText",
"open_dialog_base.OpenDialogBase.__init__",
"gtk.ListStore",
"gtk.Dialog.__init__",
"gtk.Label",
"gtk.Entry",
"time.time",
"gtk.Button",
"gtk.TreeView"
] |
[((814, 839), 'gtk.Dialog.__init__', 'gtk.Dialog.__init__', (['self'], {}), '(self)\n', (833, 839), False, 'import gtk\n'), ((844, 902), 'open_dialog_base.OpenDialogBase.__init__', 'OpenDialogBase.__init__', (['self', 'options', 'db', 'initial_filter'], {}), '(self, options, db, initial_filter)\n', (867, 902), False, 'from open_dialog_base import OpenDialogBase\n'), ((1084, 1105), 'gtk.ListStore', 'gtk.ListStore', (['object'], {}), '(object)\n', (1097, 1105), False, 'import gtk\n'), ((1122, 1141), 'gtk.TreeView', 'gtk.TreeView', (['model'], {}), '(model)\n', (1134, 1141), False, 'import gtk\n'), ((1360, 1382), 'gtk.CellRendererText', 'gtk.CellRendererText', ([], {}), '()\n', (1380, 1382), False, 'import gtk\n'), ((1955, 1979), 'gtk.Button', 'gtk.Button', (['"""Bad result"""'], {}), "('Bad result')\n", (1965, 1979), False, 'import gtk\n'), ((2086, 2108), 'gtk.Button', 'gtk.Button', (['"""_Reindex"""'], {}), "('_Reindex')\n", (2096, 2108), False, 'import gtk\n'), ((2208, 2219), 'gtk.Label', 'gtk.Label', ([], {}), '()\n', (2217, 2219), False, 'import gtk\n'), ((2277, 2288), 'gtk.Entry', 'gtk.Entry', ([], {}), '()\n', (2286, 2288), False, 'import gtk\n'), ((2543, 2553), 'gtk.VBox', 'gtk.VBox', ([], {}), '()\n', (2551, 2553), False, 'import gtk\n'), ((2583, 2603), 'gtk.ScrolledWindow', 'gtk.ScrolledWindow', ([], {}), '()\n', (2601, 2603), False, 'import gtk\n'), ((2711, 2721), 'gtk.HBox', 'gtk.HBox', ([], {}), '()\n', (2719, 2721), False, 'import gtk\n'), ((2740, 2750), 'gtk.HBox', 'gtk.HBox', ([], {}), '()\n', (2748, 2750), False, 'import gtk\n'), ((4216, 4249), 'gtk.gdk.keyval_name', 'gtk.gdk.keyval_name', (['event.keyval'], {}), '(event.keyval)\n', (4235, 4249), False, 'import gtk\n'), ((6219, 6230), 'time.time', 'time.time', ([], {}), '()\n', (6228, 6230), False, 'import time\n'), ((1438, 1483), 'gtk.TreeViewColumn', 'gtk.TreeViewColumn', (['title', 'text_cell_renderer'], {}), '(title, text_cell_renderer)\n', (1456, 1483), False, 'import gtk\n'), ((6745, 6756), 'time.time', 'time.time', ([], {}), '()\n', (6754, 6756), False, 'import time\n'), ((1765, 1789), 'os.path.basename', 'os.path.basename', (['obj[0]'], {}), '(obj[0])\n', (1781, 1789), False, 'import os\n'), ((1825, 1848), 'os.path.dirname', 'os.path.dirname', (['obj[0]'], {}), '(obj[0])\n', (1840, 1848), False, 'import os\n')]
|
from .conftest import GoProCameraTest
from goprocam import GoProCamera
import pytest
from socket import timeout
class GetMediaInfoTest(GoProCameraTest):
def test_get_media_info_empty_no_option(self):
assert self.goprocam.getMediaInfo(option="") is None
def test_get_media_info_FS_no_option(self):
with self.monkeypatch.context() as m:
m.setattr(GoProCamera.GoPro, 'infoCamera', lambda s, x: 'FS')
self.responses['/gp/gpMediaListEx'] = [[
{'media': []},
{'media': []}
]]
# should really error out
assert self.goprocam.getMediaInfo(option="") is None
def test_get_media_info_FS_folder_back_front(self):
with self.monkeypatch.context() as m:
m.setattr(GoProCamera.GoPro, 'infoCamera', lambda s, x: 'FS')
self.responses['/gp/gpMediaListEx'] = [[
{'media': [
{'d': 'folderGBACK', 'fs': []}
]},
{'media': []}
]]
assert self.goprocam.getMediaInfo('folder') == [
'folderGBACK', 'folderGFRNT'
]
def test_get_media_info_FS_folder_front_back(self):
with self.monkeypatch.context() as m:
m.setattr(GoProCamera.GoPro, 'infoCamera', lambda s, x: 'FS')
self.responses['/gp/gpMediaListEx'] = [[
{'media': [
{'d': 'folderGFRNT', 'fs': []}
]},
{'media': []}
]]
assert self.goprocam.getMediaInfo('folder') == [
'folderGFRNT', 'folderGBACK'
]
def test_get_media_info_FS_file_empty_back_front(self):
with self.monkeypatch.context() as m:
m.setattr(GoProCamera.GoPro, 'infoCamera', lambda s, x: 'FS')
self.responses['/gp/gpMediaListEx'] = [[
{'media': [
{'d': 'folderGBACK', 'fs': []}
]},
{'media': []}
]]
assert self.goprocam.getMediaInfo('file') == [
'', ''
]
def test_get_media_info_FS_file_back_front(self):
with self.monkeypatch.context() as m:
m.setattr(GoProCamera.GoPro, 'infoCamera', lambda s, x: 'FS')
self.responses['/gp/gpMediaListEx'] = [[
{'media': [
{'d': 'folderGBACK', 'fs': [
{'n': 'file1', 's': '3'}
]}
]},
{'media': [
{'d': 'folderGFRNT', 'fs': [
{'n': 'file2', 's': '4'}
]}
]}
]]
assert self.goprocam.getMediaInfo('file') == [
'file1', 'file2'
]
# this is for coverage and the results seem questionable
def test_get_media_info_FS_multiple_files_back_front(self):
with self.monkeypatch.context() as m:
m.setattr(GoProCamera.GoPro, 'infoCamera', lambda s, x: 'FS')
self.responses['/gp/gpMediaListEx'] = [[
{'media': [
{'d': 'folderGBACK', 'fs': [
{'n': 'file1', 's': '3'}
]},
{'d': 'folderGFRNT', 'fs': [
{'n': 'file3', 's': '4'}
]}
]},
{'media': [
{'d': 'folderGFRNT', 'fs': [
{'n': 'file2', 's': '4'}
]},
{'d': 'folderGBACK', 'fs': [
{'n': 'file4', 's': '3'}
]},
]}
]]
assert self.goprocam.getMediaInfo('file') == [
'file3', 'file4'
]
def test_get_media_info_FS_size_back_front(self):
with self.monkeypatch.context() as m:
m.setattr(GoProCamera.GoPro, 'infoCamera', lambda s, x: 'FS')
self.responses['/gp/gpMediaListEx'] = [[
{'media': [
{'d': 'folderGBACK', 'fs': [
{'n': 'file1', 's': '3'}
]}
]},
{'media': [
{'d': 'folderGFRNT', 'fs': [
{'n': 'file2', 's': '4'}
]}
]}
]]
assert self.goprocam.getMediaInfo('size') == [
'3.0B', '4.0B'
]
def test_get_media_info_folder_empty_folder(self):
self.responses['/gp/gpMediaList'] = {
'media': [
{'d': 'folder', 'fs': []}
]}
assert self.goprocam.getMediaInfo('folder') == 'folder'
def test_get_media_info_file_empty_folder(self):
self.responses['/gp/gpMediaList'] = {
'media': [
{'d': 'folder', 'fs': []}
]}
assert self.goprocam.getMediaInfo('file') == ''
def test_get_media_info_size_empty_folder(self):
self.responses['/gp/gpMediaList'] = {
'media': [
{'d': 'folder', 'fs': []}
]}
with pytest.raises(ValueError): # accidentally...
assert self.goprocam.getMediaInfo('size') == ''
def test_get_media_info_file(self):
self.responses['/gp/gpMediaList'] = {
'media': [
{'d': 'folder', 'fs': [{'n': 'file', 's': '1'}]}
]}
assert self.goprocam.getMediaInfo('file') == 'file'
def test_get_media_info_size(self):
self.responses['/gp/gpMediaList'] = {
'media': [
{'d': 'folder', 'fs': [{'n': 'file', 's': '1'}]}
]}
assert self.goprocam.getMediaInfo('size') == '1.0B'
def test_get_media_info_timeout(self):
self.responses['/gp/gpMediaList'] = timeout()
assert self.goprocam.getMediaInfo('') == ''
def test_get_media_info_error(self):
del self.responses['/gp/gpMediaList']
assert self.goprocam.getMediaInfo('') == ''
|
[
"pytest.raises",
"socket.timeout"
] |
[((5871, 5880), 'socket.timeout', 'timeout', ([], {}), '()\n', (5878, 5880), False, 'from socket import timeout\n'), ((5177, 5202), 'pytest.raises', 'pytest.raises', (['ValueError'], {}), '(ValueError)\n', (5190, 5202), False, 'import pytest\n')]
|
#!/usr/bin/python
import numpy as np
import pyMolecular as mol
import pyMolecular.testing as moltest
import matplotlib.pyplot as plt
# ==================== Compare two point distributions (permutation inveriant)
'''
points_ref = np.array([
[1.0,0.0,0.0], [-1.0, 0.0, 0.0],
[0.0,1.0,0.0], [ 0.0,-1.0, 0.0],
[0.0,0.0,1.0], [ 0.0, 0.0,-1.0]
], dtype=np.float64 )
mol.initComparator( points_ref )
points = points_ref.copy()
dist = mol.compDistance( points ); print( "dist (identical)", dist )
drnd = np.random.rand( points.shape[0], points.shape[1] )
points += drnd*0.01
print "========== drandom"
print points
dist = mol.compDistance( points.copy() ); print( "dist (drandom)", dist )
np.random.shuffle(points)
print points
dist = mol.compDistance( points.copy() ); print( "dist (shuffled)", dist )
'''
# ==================== Compare two TypePoint distributions (like atoms in molecule with different atom types) (permutation inveriant)
'''
atoms=np.genfromtxt( "/home/prokop/git/SimpleSimulationEngine/cpp/apps/MolecularEditor/inputs/PTCDA/PTCDA.bas", skip_header=1 )
#print "atoms=", atoms
points_ref = atoms[:,1:4].copy();
types_ref = atoms[:,0 ].astype(np.int32).copy();
print points_ref
mol.initComparatorT ( points_ref, types_ref )
print "========= identical"
points = atoms[:,1:4].copy();
types = atoms[:,0 ].astype(np.int32).copy(); print( "types = ", types)
dist = mol.compDistanceT( points_ref, types_ref ); print " >>> dist = ", dist
print "========= shuffled"
np.random.shuffle(atoms);
points = atoms[:,1:4].copy();
types = atoms[:,0 ].astype(np.int32).copy(); print( "types = ", types)
dist = mol.compDistanceT( points, types ); print " >>> dist = ", dist
print "========= drandom"
drnd = np.random.rand( points.shape[0], points.shape[1] )
points += drnd*0.01
dist = mol.compDistanceT( points, types ); print " >>> dist = ", dist
'''
# ==================== Compute fast Hash by plane waves projection of atomic coordinets (permutation inveriant)
'''
atoms=np.genfromtxt( "/home/prokop/git/SimpleSimulationEngine/cpp/apps/MolecularEditor/inputs/PTCDA/PTCDA.bas", skip_header=1 )
ks = np.array([
[1.0,0.0,0.0],
[0.0,1.0,0.0],
[0.0,0.0,1.0]
])
points_ref = atoms[:,1:4].copy();
coefs_ref = mol.getPlaneWaveDescriptor( points_ref, ks ); print "coefs (ref) ", coefs_ref
points_1 = points_ref.copy()
coefs = mol.getPlaneWaveDescriptor( points_1, ks ); print "coefs (identical) ", coefs
np.random.shuffle(points_1); points_1 = points_1.copy()
coefs = mol.getPlaneWaveDescriptor( points_1, ks ); print "coefs (shufled) ", coefs
points_3 = points_ref.copy() + np.random.rand( len(atoms), 3 ) * 0.25
coefs = mol.getPlaneWaveDescriptor( points_3, ks ); print "coefs (drand) ", coefs
'''
# ==================== Testing of statistical poperties of plane-wave hash
nrep = 10
natoms = 100
Ns = range( 1, natoms )
dx = 0.5
k = 3.0
'''
xs = np.linspace(-10.0,10.0,1000)
ys = moltest.saw_sine( xs+100 )
plt.plot( xs, ys )
'''
xs_ref = np.random.rand( natoms ); #print "xs_ref = ", xs_ref
#xs = moltest.mutateN( xs_ref.copy(), 3, 0.1 ); print "xs = ", xs
coef_ref = moltest.hash_saw( xs_ref, k )
result = np.zeros((len(Ns)*nrep,2))
ires = 0
for N in Ns:
dx_ = dx/float(N)
for i in range(nrep):
xs = moltest.mutateN( xs_ref.copy(), N, dx_ )
coef = moltest.hash_saw( xs, k )
result[ ires, 0 ] = N; result[ ires, 1 ] = coef;
ires+=1
plt.axhline(coef_ref)
plt.plot( result[:,0], result[:,1], '.' )
plt.show()
|
[
"matplotlib.pyplot.axhline",
"matplotlib.pyplot.show",
"matplotlib.pyplot.plot",
"pyMolecular.testing.hash_saw",
"numpy.random.rand"
] |
[((3075, 3097), 'numpy.random.rand', 'np.random.rand', (['natoms'], {}), '(natoms)\n', (3089, 3097), True, 'import numpy as np\n'), ((3208, 3235), 'pyMolecular.testing.hash_saw', 'moltest.hash_saw', (['xs_ref', 'k'], {}), '(xs_ref, k)\n', (3224, 3235), True, 'import pyMolecular.testing as moltest\n'), ((3518, 3539), 'matplotlib.pyplot.axhline', 'plt.axhline', (['coef_ref'], {}), '(coef_ref)\n', (3529, 3539), True, 'import matplotlib.pyplot as plt\n'), ((3544, 3585), 'matplotlib.pyplot.plot', 'plt.plot', (['result[:, 0]', 'result[:, 1]', '"""."""'], {}), "(result[:, 0], result[:, 1], '.')\n", (3552, 3585), True, 'import matplotlib.pyplot as plt\n'), ((3587, 3597), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (3595, 3597), True, 'import matplotlib.pyplot as plt\n'), ((3417, 3440), 'pyMolecular.testing.hash_saw', 'moltest.hash_saw', (['xs', 'k'], {}), '(xs, k)\n', (3433, 3440), True, 'import pyMolecular.testing as moltest\n')]
|
import re
import nltk
import string
from nltk.corpus import stopwords
from nltk.stem.porter import PorterStemmer
from nltk.stem import WordNetLemmatizer
def case_normalization(text):
return text.lower()
def remove_whitespace(text):
return text.strip()
def remove_punctuation(text):
return re.sub(r'[^\w\s]', ' ', text)
def remove_stopwords(text):
stop_words = set(stopwords.words("english"))
res = " ".join([word for word in text.split() if word not in stop_words])
return res
def remove_unicode(text_unicode):
text_encode = text_unicode.encode(encoding="ascii", errors="ignore")
text_decode = text_encode.decode()
clean_text = " ".join([word for word in text_decode.split()])
return clean_text
def remove_social_data(text):
text = re.sub("@\S+", "", text)
text = re.sub("\$", "", text)
text = re.sub("https?:\/\/.*[\r\n]*", "", text)
return text
def stemming(text):
wordList = text.split(" ")
stemmer = PorterStemmer()
stemmedWords = []
for word in wordList:
stemmedWords.append(stemmer.stem(word))
return " ".join(stemmedWords)
def lemmatizer(text):
wordList = text.split(" ")
lemmatizer = WordNetLemmatizer()
lemmatizedWords = []
for word in wordList:
lemmatizedWords.append(lemmatizer.lemmatize(word))
return " ".join(lemmatizedWords)
def removeStringsWithNumbers(text):
wordList = text.split(" ")
my_list = [item for item in wordList if item.isalpha()]
return " ".join(my_list)
def cleanString(text):
text = case_normalization(text)
text = remove_whitespace(text)
text = remove_punctuation(text)
text = remove_unicode(text)
# text = remove_social_data(text)
text = remove_stopwords(text)
text = removeStringsWithNumbers(text)
text = lemmatizer(text)
# text = stemming(text)
return text
if __name__ == "__main__":
print(cleanString("test"))
|
[
"nltk.stem.porter.PorterStemmer",
"re.sub",
"nltk.corpus.stopwords.words",
"nltk.stem.WordNetLemmatizer"
] |
[((305, 335), 're.sub', 're.sub', (['"""[^\\\\w\\\\s]"""', '""" """', 'text'], {}), "('[^\\\\w\\\\s]', ' ', text)\n", (311, 335), False, 'import re\n'), ((783, 808), 're.sub', 're.sub', (['"""@\\\\S+"""', '""""""', 'text'], {}), "('@\\\\S+', '', text)\n", (789, 808), False, 'import re\n'), ((819, 842), 're.sub', 're.sub', (['"""\\\\$"""', '""""""', 'text'], {}), "('\\\\$', '', text)\n", (825, 842), False, 'import re\n'), ((853, 895), 're.sub', 're.sub', (["'https?:\\\\/\\\\/.*[\\r\\n]*'", '""""""', 'text'], {}), "('https?:\\\\/\\\\/.*[\\r\\n]*', '', text)\n", (859, 895), False, 'import re\n'), ((976, 991), 'nltk.stem.porter.PorterStemmer', 'PorterStemmer', ([], {}), '()\n', (989, 991), False, 'from nltk.stem.porter import PorterStemmer\n'), ((1198, 1217), 'nltk.stem.WordNetLemmatizer', 'WordNetLemmatizer', ([], {}), '()\n', (1215, 1217), False, 'from nltk.stem import WordNetLemmatizer\n'), ((385, 411), 'nltk.corpus.stopwords.words', 'stopwords.words', (['"""english"""'], {}), "('english')\n", (400, 411), False, 'from nltk.corpus import stopwords\n')]
|
from setuptools import setup
from solstice.bootstrap.__version__ import __version__
setup()
|
[
"setuptools.setup"
] |
[((86, 93), 'setuptools.setup', 'setup', ([], {}), '()\n', (91, 93), False, 'from setuptools import setup\n')]
|
# Generated by Django 2.2.1 on 2019-07-23 19:46
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('vip', '0001_initial'),
]
operations = [
migrations.RenameField(
model_name='vippermission',
old_name='vip_idd',
new_name='vip_id',
),
migrations.AlterField(
model_name='permission',
name='name',
field=models.CharField(max_length=32, unique=True),
),
migrations.AlterField(
model_name='vip',
name='level',
field=models.IntegerField(default=0, unique=True),
),
migrations.AlterField(
model_name='vip',
name='name',
field=models.CharField(max_length=128, unique=True),
),
]
|
[
"django.db.models.CharField",
"django.db.models.IntegerField",
"django.db.migrations.RenameField"
] |
[((220, 313), 'django.db.migrations.RenameField', 'migrations.RenameField', ([], {'model_name': '"""vippermission"""', 'old_name': '"""vip_idd"""', 'new_name': '"""vip_id"""'}), "(model_name='vippermission', old_name='vip_idd',\n new_name='vip_id')\n", (242, 313), False, 'from django.db import migrations, models\n'), ((469, 513), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(32)', 'unique': '(True)'}), '(max_length=32, unique=True)\n', (485, 513), False, 'from django.db import migrations, models\n'), ((631, 674), 'django.db.models.IntegerField', 'models.IntegerField', ([], {'default': '(0)', 'unique': '(True)'}), '(default=0, unique=True)\n', (650, 674), False, 'from django.db import migrations, models\n'), ((791, 836), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(128)', 'unique': '(True)'}), '(max_length=128, unique=True)\n', (807, 836), False, 'from django.db import migrations, models\n')]
|
"""This module evaluates the forecasted trajectories against the ground truth."""
import argparse
from typing import Dict, List, Union
from collections import OrderedDict
import numpy as np
import pandas as pd
import pickle as pkl
from argoverse.evaluation.eval_forecasting import compute_forecasting_metrics
from argoverse.map_representation.map_api import ArgoverseMap
from utils.baseline_config import FEATURE_FORMAT
import matplotlib.pyplot as plt
def viz_predictions(
input_: np.ndarray,
output: np.ndarray,
target: np.ndarray,
centerlines: np.ndarray,
city_names: np.ndarray,
idx=None,
show: bool = False,
) -> None:
"""Visualize predicted trjectories.
Args:
input_ (numpy array): Input Trajectory with shape (num_tracks x obs_len x 2)
output (numpy array of list): Top-k predicted trajectories, each with shape (num_tracks x pred_len x 2)
target (numpy array): Ground Truth Trajectory with shape (num_tracks x pred_len x 2)
centerlines (numpy array of list of centerlines): Centerlines (Oracle/Top-k) for each trajectory
city_names (numpy array): city names for each trajectory
show (bool): if True, show
"""
num_tracks = input_.shape[0]
obs_len = input_.shape[1]
pred_len = target.shape[1]
plt.figure(0, figsize=(8, 7))
avm = ArgoverseMap()
for i in range(num_tracks):
plt.plot(
input_[i, :, 0],
input_[i, :, 1],
color="#ECA154",
label="Observed",
alpha=1,
linewidth=3,
zorder=15,
)
plt.plot(
input_[i, -1, 0],
input_[i, -1, 1],
"o",
color="#ECA154",
label="Observed",
alpha=1,
linewidth=3,
zorder=15,
markersize=9,
)
plt.plot(
target[i, :, 0],
target[i, :, 1],
color="#d33e4c",
label="Target",
alpha=1,
linewidth=3,
zorder=20,
)
plt.plot(
target[i, -1, 0],
target[i, -1, 1],
"o",
color="#d33e4c",
label="Target",
alpha=1,
linewidth=3,
zorder=20,
markersize=9,
)
for j in range(len(centerlines[i])):
plt.plot(
centerlines[i][j][:, 0],
centerlines[i][j][:, 1],
"--",
color="grey",
alpha=1,
linewidth=1,
zorder=0,
)
for j in range(len(output[i])):
plt.plot(
output[i][j][:, 0],
output[i][j][:, 1],
color="#007672",
label="Predicted",
alpha=1,
linewidth=3,
zorder=15,
)
plt.plot(
output[i][j][-1, 0],
output[i][j][-1, 1],
"o",
color="#007672",
label="Predicted",
alpha=1,
linewidth=3,
zorder=15,
markersize=9,
)
for k in range(pred_len):
lane_ids = avm.get_lane_ids_in_xy_bbox(
output[i][j][k, 0],
output[i][j][k, 1],
city_names[i],
query_search_range_manhattan=2.5,
)
for j in range(obs_len):
lane_ids = avm.get_lane_ids_in_xy_bbox(
input_[i, j, 0],
input_[i, j, 1],
city_names[i],
query_search_range_manhattan=2.5,
)
[avm.draw_lane(lane_id, city_names[i]) for lane_id in lane_ids]
for j in range(pred_len):
lane_ids = avm.get_lane_ids_in_xy_bbox(
target[i, j, 0],
target[i, j, 1],
city_names[i],
query_search_range_manhattan=2.5,
)
[avm.draw_lane(lane_id, city_names[i]) for lane_id in lane_ids]
plt.axis("equal")
plt.xticks([])
plt.yticks([])
handles, labels = plt.gca().get_legend_handles_labels()
by_label = OrderedDict(zip(labels, handles))
if show:
plt.savefig('result_images/'+str(idx)+'.jpg')
plt.show()
def parse_arguments():
"""Parse command line arguments.
Returns:
parsed arguments
"""
parser = argparse.ArgumentParser()
parser.add_argument("--metrics",
action="store_true",
help="If true, compute metrics")
parser.add_argument("--gt", default="", type=str, help="path to gt file")
parser.add_argument("--forecast",
default="",
type=str,
help="path to forecast file")
parser.add_argument("--horizon",
default="",
type=int,
help="forecast horizon")
parser.add_argument("--obs_len",
default=20,
type=int,
help="Observed Length")
parser.add_argument("--miss_threshold",
default=2.0,
type=float,
help="Threshold for miss rate")
parser.add_argument("--features",
default="",
type=str,
help="path to test features pkl file")
parser.add_argument("--max_n_guesses",
default=0,
type=int,
help="Max number of guesses")
parser.add_argument(
"--prune_n_guesses",
default=0,
type=int,
help="Pruned number of guesses of non-map baseline using map",
)
parser.add_argument(
"--n_guesses_cl",
default=0,
type=int,
help="Number of guesses along each centerline",
)
parser.add_argument("--n_cl",
default=0,
type=int,
help="Number of centerlines to consider")
parser.add_argument("--viz",
action="store_true",
help="If true, visualize predictions")
parser.add_argument(
"--viz_seq_id",
default="",
type=str,
help="Sequence ids for the trajectories to be visualized",
)
parser.add_argument(
"--max_neighbors_cl",
default=3,
type=int,
help="Number of neighbors obtained for each centerline by the baseline",
)
return parser.parse_args()
def get_city_names_from_features(features_df: pd.DataFrame) -> Dict[int, str]:
"""Get sequence id to city name mapping from the features.
Args:
features_df: DataFrame containing the features
Returns:
city_names: Dict mapping sequence id to city name
"""
city_names = {}
for index, row in features_df.iterrows():
city_names[row["SEQUENCE"]] = row["FEATURES"][0][
FEATURE_FORMAT["CITY_NAME"]]
return city_names
def get_pruned_guesses(
forecasted_trajectories: Dict[int, List[np.ndarray]],
city_names: Dict[int, str],
gt_trajectories: Dict[int, np.ndarray],
) -> Dict[int, List[np.ndarray]]:
"""Prune the number of guesses using map.
Args:
forecasted_trajectories: Trajectories forecasted by the algorithm.
city_names: Dict mapping sequence id to city name.
gt_trajectories: Ground Truth trajectories.
Returns:
Pruned number of forecasted trajectories.
"""
args = parse_arguments()
avm = ArgoverseMap()
pruned_guesses = {}
for seq_id, trajectories in forecasted_trajectories.items():
city_name = city_names[seq_id]
da_points = []
for trajectory in trajectories:
raster_layer = avm.get_raster_layer_points_boolean(
trajectory, city_name, "driveable_area")
da_points.append(np.sum(raster_layer))
sorted_idx = np.argsort(da_points)[::-1]
pruned_guesses[seq_id] = [
trajectories[i] for i in sorted_idx[:args.prune_n_guesses]
]
return pruned_guesses
def get_m_trajectories_along_n_cl(
forecasted_trajectories: Dict[int, List[np.ndarray]]
) -> Dict[int, List[np.ndarray]]:
"""Given forecasted trajectories, get <args.n_guesses_cl> trajectories along each of <args.n_cl> centerlines.
Args:
forecasted_trajectories: Trajectories forecasted by the algorithm.
Returns:
<args.n_guesses_cl> trajectories along each of <args.n_cl> centerlines.
"""
args = parse_arguments()
selected_trajectories = {}
for seq_id, trajectories in forecasted_trajectories.items():
curr_selected_trajectories = []
max_predictions_along_cl = min(len(forecasted_trajectories[seq_id]),
args.n_cl * args.max_neighbors_cl)
for i in range(0, max_predictions_along_cl, args.max_neighbors_cl):
for j in range(i, i + args.n_guesses_cl):
curr_selected_trajectories.append(
forecasted_trajectories[seq_id][j])
selected_trajectories[seq_id] = curr_selected_trajectories
return selected_trajectories
def viz_predictions_helper(
forecasted_trajectories: Dict[int, List[np.ndarray]],
gt_trajectories: Dict[int, np.ndarray],
features_df: pd.DataFrame,
viz_seq_id: Union[None, List[int]],
) -> None:
"""Visualize predictions.
Args:
forecasted_trajectories: Trajectories forecasted by the algorithm.
gt_trajectories: Ground Truth trajectories.
features_df: DataFrame containing the features
viz_seq_id: Sequence ids to be visualized
"""
args = parse_arguments()
seq_ids = gt_trajectories.keys() if viz_seq_id is None else viz_seq_id
for seq_id in seq_ids:
gt_trajectory = gt_trajectories[seq_id]
curr_features_df = features_df[features_df["SEQUENCE"] == seq_id]
input_trajectory = (
curr_features_df["FEATURES"].values[0]
[:args.obs_len, [FEATURE_FORMAT["X"], FEATURE_FORMAT["Y"]]].astype(
"float"))
output_trajectories = forecasted_trajectories[seq_id]
candidate_centerlines = curr_features_df[
"CANDIDATE_CENTERLINES"].values[0]
city_name = curr_features_df["FEATURES"].values[0][
0, FEATURE_FORMAT["CITY_NAME"]]
gt_trajectory = np.expand_dims(gt_trajectory, 0)
input_trajectory = np.expand_dims(input_trajectory, 0)
output_trajectories = np.expand_dims(np.array(output_trajectories), 0)
candidate_centerlines = np.expand_dims(np.array(candidate_centerlines),
0)
city_name = np.array([city_name])
viz_predictions(
input_trajectory,
output_trajectories,
gt_trajectory,
candidate_centerlines,
city_name,
idx=seq_id,
show=False,
)
if __name__ == "__main__":
args = parse_arguments()
with open(args.gt, "rb") as f:
gt_trajectories: Dict[int, np.ndarray] = pkl.load(f)
with open(args.forecast, "rb") as f:
forecasted_trajectories: Dict[int, List[np.ndarray]] = pkl.load(f)
with open(args.features, "rb") as f:
features_df: pd.DataFrame = pkl.load(f)
if args.metrics:
city_names = get_city_names_from_features(features_df)
# Get displacement error and dac on multiple guesses along each centerline
if not args.prune_n_guesses and args.n_cl:
forecasted_trajectories = get_m_trajectories_along_n_cl(
forecasted_trajectories)
num_trajectories = args.n_cl * args.n_guesses_cl
# Get displacement error and dac on pruned guesses
elif args.prune_n_guesses:
forecasted_trajectories = get_pruned_guesses(
forecasted_trajectories, city_names, gt_trajectories)
num_trajectories = args.prune_n_guesses
# Normal case
else:
num_trajectories = args.max_n_guesses
compute_forecasting_metrics(
forecasted_trajectories,
gt_trajectories,
city_names,
num_trajectories,
args.horizon,
args.miss_threshold,
)
if args.viz:
id_for_viz = None
if args.viz_seq_id:
with open(args.viz_seq_id, "rb") as f:
id_for_viz = pkl.load(f)
viz_predictions_helper(forecasted_trajectories, gt_trajectories,
features_df, id_for_viz)
|
[
"matplotlib.pyplot.show",
"argoverse.map_representation.map_api.ArgoverseMap",
"matplotlib.pyplot.plot",
"argparse.ArgumentParser",
"numpy.sum",
"matplotlib.pyplot.yticks",
"matplotlib.pyplot.axis",
"numpy.expand_dims",
"argoverse.evaluation.eval_forecasting.compute_forecasting_metrics",
"numpy.argsort",
"matplotlib.pyplot.figure",
"pickle.load",
"numpy.array",
"matplotlib.pyplot.gca",
"matplotlib.pyplot.xticks"
] |
[((1335, 1364), 'matplotlib.pyplot.figure', 'plt.figure', (['(0)'], {'figsize': '(8, 7)'}), '(0, figsize=(8, 7))\n', (1345, 1364), True, 'import matplotlib.pyplot as plt\n'), ((1375, 1389), 'argoverse.map_representation.map_api.ArgoverseMap', 'ArgoverseMap', ([], {}), '()\n', (1387, 1389), False, 'from argoverse.map_representation.map_api import ArgoverseMap\n'), ((4626, 4651), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {}), '()\n', (4649, 4651), False, 'import argparse\n'), ((7889, 7903), 'argoverse.map_representation.map_api.ArgoverseMap', 'ArgoverseMap', ([], {}), '()\n', (7901, 7903), False, 'from argoverse.map_representation.map_api import ArgoverseMap\n'), ((1430, 1545), 'matplotlib.pyplot.plot', 'plt.plot', (['input_[i, :, 0]', 'input_[i, :, 1]'], {'color': '"""#ECA154"""', 'label': '"""Observed"""', 'alpha': '(1)', 'linewidth': '(3)', 'zorder': '(15)'}), "(input_[i, :, 0], input_[i, :, 1], color='#ECA154', label=\n 'Observed', alpha=1, linewidth=3, zorder=15)\n", (1438, 1545), True, 'import matplotlib.pyplot as plt\n'), ((1644, 1780), 'matplotlib.pyplot.plot', 'plt.plot', (['input_[i, -1, 0]', 'input_[i, -1, 1]', '"""o"""'], {'color': '"""#ECA154"""', 'label': '"""Observed"""', 'alpha': '(1)', 'linewidth': '(3)', 'zorder': '(15)', 'markersize': '(9)'}), "(input_[i, -1, 0], input_[i, -1, 1], 'o', color='#ECA154', label=\n 'Observed', alpha=1, linewidth=3, zorder=15, markersize=9)\n", (1652, 1780), True, 'import matplotlib.pyplot as plt\n'), ((1903, 2015), 'matplotlib.pyplot.plot', 'plt.plot', (['target[i, :, 0]', 'target[i, :, 1]'], {'color': '"""#d33e4c"""', 'label': '"""Target"""', 'alpha': '(1)', 'linewidth': '(3)', 'zorder': '(20)'}), "(target[i, :, 0], target[i, :, 1], color='#d33e4c', label='Target',\n alpha=1, linewidth=3, zorder=20)\n", (1911, 2015), True, 'import matplotlib.pyplot as plt\n'), ((2115, 2249), 'matplotlib.pyplot.plot', 'plt.plot', (['target[i, -1, 0]', 'target[i, -1, 1]', '"""o"""'], {'color': '"""#d33e4c"""', 'label': '"""Target"""', 'alpha': '(1)', 'linewidth': '(3)', 'zorder': '(20)', 'markersize': '(9)'}), "(target[i, -1, 0], target[i, -1, 1], 'o', color='#d33e4c', label=\n 'Target', alpha=1, linewidth=3, zorder=20, markersize=9)\n", (2123, 2249), True, 'import matplotlib.pyplot as plt\n'), ((4204, 4221), 'matplotlib.pyplot.axis', 'plt.axis', (['"""equal"""'], {}), "('equal')\n", (4212, 4221), True, 'import matplotlib.pyplot as plt\n'), ((4230, 4244), 'matplotlib.pyplot.xticks', 'plt.xticks', (['[]'], {}), '([])\n', (4240, 4244), True, 'import matplotlib.pyplot as plt\n'), ((4253, 4267), 'matplotlib.pyplot.yticks', 'plt.yticks', (['[]'], {}), '([])\n', (4263, 4267), True, 'import matplotlib.pyplot as plt\n'), ((10794, 10826), 'numpy.expand_dims', 'np.expand_dims', (['gt_trajectory', '(0)'], {}), '(gt_trajectory, 0)\n', (10808, 10826), True, 'import numpy as np\n'), ((10854, 10889), 'numpy.expand_dims', 'np.expand_dims', (['input_trajectory', '(0)'], {}), '(input_trajectory, 0)\n', (10868, 10889), True, 'import numpy as np\n'), ((11119, 11140), 'numpy.array', 'np.array', (['[city_name]'], {}), '([city_name])\n', (11127, 11140), True, 'import numpy as np\n'), ((11516, 11527), 'pickle.load', 'pkl.load', (['f'], {}), '(f)\n', (11524, 11527), True, 'import pickle as pkl\n'), ((11633, 11644), 'pickle.load', 'pkl.load', (['f'], {}), '(f)\n', (11641, 11644), True, 'import pickle as pkl\n'), ((11723, 11734), 'pickle.load', 'pkl.load', (['f'], {}), '(f)\n', (11731, 11734), True, 'import pickle as pkl\n'), ((12498, 12636), 'argoverse.evaluation.eval_forecasting.compute_forecasting_metrics', 'compute_forecasting_metrics', (['forecasted_trajectories', 'gt_trajectories', 'city_names', 'num_trajectories', 'args.horizon', 'args.miss_threshold'], {}), '(forecasted_trajectories, gt_trajectories,\n city_names, num_trajectories, args.horizon, args.miss_threshold)\n', (12525, 12636), False, 'from argoverse.evaluation.eval_forecasting import compute_forecasting_metrics\n'), ((2422, 2537), 'matplotlib.pyplot.plot', 'plt.plot', (['centerlines[i][j][:, 0]', 'centerlines[i][j][:, 1]', '"""--"""'], {'color': '"""grey"""', 'alpha': '(1)', 'linewidth': '(1)', 'zorder': '(0)'}), "(centerlines[i][j][:, 0], centerlines[i][j][:, 1], '--', color=\n 'grey', alpha=1, linewidth=1, zorder=0)\n", (2430, 2537), True, 'import matplotlib.pyplot as plt\n'), ((2713, 2835), 'matplotlib.pyplot.plot', 'plt.plot', (['output[i][j][:, 0]', 'output[i][j][:, 1]'], {'color': '"""#007672"""', 'label': '"""Predicted"""', 'alpha': '(1)', 'linewidth': '(3)', 'zorder': '(15)'}), "(output[i][j][:, 0], output[i][j][:, 1], color='#007672', label=\n 'Predicted', alpha=1, linewidth=3, zorder=15)\n", (2721, 2835), True, 'import matplotlib.pyplot as plt\n'), ((2970, 3112), 'matplotlib.pyplot.plot', 'plt.plot', (['output[i][j][-1, 0]', 'output[i][j][-1, 1]', '"""o"""'], {'color': '"""#007672"""', 'label': '"""Predicted"""', 'alpha': '(1)', 'linewidth': '(3)', 'zorder': '(15)', 'markersize': '(9)'}), "(output[i][j][-1, 0], output[i][j][-1, 1], 'o', color='#007672',\n label='Predicted', alpha=1, linewidth=3, zorder=15, markersize=9)\n", (2978, 3112), True, 'import matplotlib.pyplot as plt\n'), ((4472, 4482), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (4480, 4482), True, 'import matplotlib.pyplot as plt\n'), ((8292, 8313), 'numpy.argsort', 'np.argsort', (['da_points'], {}), '(da_points)\n', (8302, 8313), True, 'import numpy as np\n'), ((10935, 10964), 'numpy.array', 'np.array', (['output_trajectories'], {}), '(output_trajectories)\n', (10943, 10964), True, 'import numpy as np\n'), ((11016, 11047), 'numpy.array', 'np.array', (['candidate_centerlines'], {}), '(candidate_centerlines)\n', (11024, 11047), True, 'import numpy as np\n'), ((4294, 4303), 'matplotlib.pyplot.gca', 'plt.gca', ([], {}), '()\n', (4301, 4303), True, 'import matplotlib.pyplot as plt\n'), ((8248, 8268), 'numpy.sum', 'np.sum', (['raster_layer'], {}), '(raster_layer)\n', (8254, 8268), True, 'import numpy as np\n'), ((12868, 12879), 'pickle.load', 'pkl.load', (['f'], {}), '(f)\n', (12876, 12879), True, 'import pickle as pkl\n')]
|
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Defines the ShapeSpec object which represents a parsed shape template."""
from typing import List, Union, Dict, Optional, Tuple
from tensorguard import dim_specs
from tensorguard import exception
from tensorguard import shape_spec_parser
EntriesType = List[Union[shape_spec_parser.Token, dim_specs.DimSpec]]
ShapeType = Union[Tuple[int], List[int]]
class ShapeSpec:
def __init__(self, entries: EntriesType):
super().__init__()
self.entries = [
x for x in entries if not isinstance(x, shape_spec_parser.Token)
]
if dim_specs.ellipsis_dim in self.entries:
idx = self.entries.index(dim_specs.ellipsis_dim)
self.left_entries = self.entries[:idx]
self.right_entries = self.entries[idx + 1 :]
self.has_ellipsis = True
else:
self.left_entries = self.entries
self.right_entries = []
self.has_ellipsis = False
def evaluate(self, known_dims: Dict[str, int] = None) -> List[Optional[int]]:
known_dims = known_dims or {}
if self.has_ellipsis:
raise exception.UnderspecifiedShapeError(
"Template with an ellipsis (...) cannot be fully evaluated."
)
else:
return [x.evaluate(known_dims) for x in self.entries]
def partial_evaluate(
self, known_dims: Dict[str, int] = None
) -> List[Union[int, str, None]]:
known_dims = known_dims or {}
eval_shape: List[Union[int, str, None]] = []
for x in self.entries:
try:
eval_shape.append(x.evaluate(known_dims))
except exception.UnderspecifiedShapeError:
eval_shape.append(repr(x))
return eval_shape
def rank_matches(self, shape: ShapeType) -> bool:
if self.has_ellipsis:
if len(shape) < len(self.entries) - 1:
return False
else:
if len(shape) != len(self.entries):
return False
return True
def matches(self, shape, known_dims: Dict[str, int] = None) -> bool:
known_dims = known_dims or {}
rank_matches = self.rank_matches(shape)
conflicts = any(
[x.has_conflict(s, known_dims) for s, x in self.zip_iter(shape)]
)
return rank_matches and not conflicts
def zip_iter(self, shape: ShapeType):
for s, e in zip(shape, self.left_entries):
yield s, e
if self.right_entries:
for s, e in zip(shape[-len(self.right_entries) :], self.right_entries):
yield s, e
def infer(
self, shape: ShapeType, known_dims: Dict[str, int] = None
) -> Dict[str, int]:
current_known = {}
if known_dims:
current_known.update(known_dims)
inferred = {"Start": True}
while inferred:
inferred = {}
for s, x in self.zip_iter(shape):
inferred.update(x.infer(s, current_known))
current_known.update(inferred)
return current_known
def __repr__(self) -> str:
return "<{}>".format(self.entries)
def __len__(self) -> int:
return len(self.entries)
|
[
"tensorguard.exception.UnderspecifiedShapeError"
] |
[((1661, 1762), 'tensorguard.exception.UnderspecifiedShapeError', 'exception.UnderspecifiedShapeError', (['"""Template with an ellipsis (...) cannot be fully evaluated."""'], {}), "(\n 'Template with an ellipsis (...) cannot be fully evaluated.')\n", (1695, 1762), False, 'from tensorguard import exception\n')]
|
"""
Code modified from allen.
"""
import io
import logging
import itertools
from typing import Optional, Tuple, Iterator, Any
import numpy
import torch
from torch.nn.functional import embedding
from ..common import Vocabulary
from ..common.util import printf, get_file_extension
from ..modules import util
logger = logging.getLogger(__name__) # pylint: disable=invalid-name
class Embedding(torch.nn.Module):
"""
A more featureful embedding module than the default in Pytorch. Adds the ability to:
1. embed higher-order inputs
2. pre-specify the weight matrix
3. use a non-trainable embedding
4. project the resultant embeddings to some other dimension (which only makes sense with
non-trainable embeddings).
Parameters
----------
num_embeddings : int
Size of the dictionary of embeddings (vocabulary size).
embedding_dim : int
The size of each embedding vector.
weight : torch.FloatTensor, (optional, default=None)
A pre-initialised weight matrix for the embedding lookup, allowing the use of
pretrained vectors.
padding_index : int, (optional, default=None)
If given, pads the output with zeros whenever it encounters the index.
trainable : bool, (optional, default=True)
Whether or not to optimize the embedding parameters.
max_norm : float, (optional, default=None)
If given, will renormalize the embeddings to always have a norm lesser than this
norm_type : float, (optional, default=2)
The p of the p-norm to compute for the max_norm option
scale_grad_by_freq : boolean, (optional, default=False)
If given, this will scale gradients by the frequency of the words in the mini-batch.
sparse : bool, (optional, default=False)
Whether or not the Pytorch backend should use a sparse representation of the embedding weight.
vocab_namespace : str, (optional, default=None)
In case of fine-tuning/transfer learning, the model's embedding matrix needs to be
extended according to the size of extended-vocabulary. To be able to know how much to
extend the embedding-matrix, it's necessary to know which vocab_namspace was used to
construct it in the original training. We store vocab_namespace used during the original
training as an attribute, so that it can be retrieved during fine-tuning.
pretrained_file : str, (optional, default=None)
Used to keep track of what is the source of the weights and loading more embeddings at test time.
**It does not load the weights from this pretrained_file.** For that purpose, use
``Embedding.from_params``.
Returns
-------
An Embedding module.
"""
def __init__(self,
num_embeddings: int,
embedding_dim: int,
weight: torch.FloatTensor = None,
padding_index: int = 0,
trainable: bool = True,
max_norm: float = None,
norm_type: float = 2.,
scale_grad_by_freq: bool = False,
sparse: bool = False,
**kwargs: Any) -> None:
super(Embedding, self).__init__()
self.num_embeddings = num_embeddings
self.padding_index = padding_index
self.max_norm = max_norm
self.norm_type = norm_type
self.scale_grad_by_freq = scale_grad_by_freq
self.sparse = sparse
self.output_dim = embedding_dim
if weight is None:
weight = torch.FloatTensor(num_embeddings, embedding_dim)
self.weight = torch.nn.Parameter(weight, requires_grad=trainable)
torch.nn.init.xavier_uniform_(self.weight)
else:
if weight.size() != (num_embeddings, embedding_dim):
raise Exception("A weight matrix was passed with contradictory embedding shapes.")
self.weight = torch.nn.Parameter(weight, requires_grad=trainable)
if self.padding_index is not None:
self.weight.data[self.padding_index].fill_(0)
def forward(self, inputs, **kwargs): # pylint: disable=arguments-differ
# inputs may have extra dimensions (batch_size, d1, ..., dn, sequence_length),
# but embedding expects (batch_size, sequence_length), so pass inputs to
# util.combine_initial_dims (which is a no-op if there are no extra dimensions).
# Remember the original size.
original_size = inputs.size()
inputs = util.combine_initial_dims(inputs)
embedded = embedding(inputs, self.weight,
padding_idx=self.padding_index,
max_norm=self.max_norm,
norm_type=self.norm_type,
scale_grad_by_freq=self.scale_grad_by_freq,
sparse=self.sparse)
# Now (if necessary) add back in the extra dimensions.
embedded = util.uncombine_initial_dims(embedded, original_size)
return embedded
@classmethod
def from_pretrain(cls,
vocab: Vocabulary,
pretrained_file: str,
vocab_namespace: str,
padding_index: int = 0,
trainable: bool = False,
max_norm: float = None,
norm_type: float = 2.,
scale_grad_by_freq: bool = False,
sparse: bool = False
) -> 'Embedding': # type: ignore
"""
We need the vocabulary here to know how many items we need to embed, and we look for a
``vocab_namespace`` key in the parameter dictionary to know which vocabulary to use. If
you know beforehand exactly how many embeddings you need, or aren't using a vocabulary
mapping for the things getting embedded here, then you can pass in the ``num_embeddings``
key directly, and the vocabulary will be ignored.
In the configuration file, a file containing pretrained embeddings can be specified
using the parameter ``"pretrained_file"``.
It can be the path to a local file.
Format:
* text file - an utf-8 encoded text file with space separated fields::
[word] [dim 1] [dim 2] ...
The text file can eventually be compressed with gzip, bz2, lzma or zip.
"""
# If we're loading a saved model, we don't want to actually read a pre-trained
# embedding file - the embeddings will just be in our saved weights, and we might not
# have the original embedding file anymore, anyway.
tokens_to_keep = set(vocab.get_index_to_token_vocabulary(vocab_namespace).values())
vocab_size = vocab.get_vocab_size(vocab_namespace)
embeddings = dict()
# First we read the embeddings from the file, only keeping vectors for the words we need.
printf("Reading pretrained embeddings from file")
with EmbeddingsTextFile(pretrained_file) as embeddings_file:
embedding_dim = embeddings_file.embedding_dim
for line in embeddings_file:
token = line.split(' ', 1)[0]
if token in tokens_to_keep:
fields = line.rstrip().split(' ')
if len(fields) - 1 != embedding_dim:
# Sometimes there are funny unicode parsing problems that lead to different
# fields lengths (e.g., a word with a unicode space character that splits
# into more than one column). We skip those lines. Note that if you have
# some kind of long header, this could result in all of your lines getting
# skipped. It's hard to check for that here; you just have to look in the
# embedding_misses_file and at the model summary to make sure things look
# like they are supposed to.
logger.warning("Found line with wrong number of dimensions (expected: %d; actual: %d): %s",
embedding_dim, len(fields) - 1, line)
continue
vector = numpy.asarray(fields[1:], dtype='float32')
embeddings[token] = vector
if not embeddings:
raise Exception("No embeddings of correct dimension found; you probably "
"misspecified your embedding_dim parameter, or didn't "
"pre-populate your Vocabulary")
all_embeddings = numpy.asarray(list(embeddings.values()))
embeddings_mean = float(numpy.mean(all_embeddings))
embeddings_std = float(numpy.std(all_embeddings))
# Now we initialize the weight matrix for an embedding layer, starting with random vectors,
# then filling in the word vectors we just read.
printf("Initializing pre-trained embedding layer")
embedding_matrix = torch.FloatTensor(vocab_size, embedding_dim).normal_(embeddings_mean,
embeddings_std)
num_tokens_found = 0
index_to_token = vocab.get_index_to_token_vocabulary(vocab_namespace)
for i in range(vocab_size):
token = index_to_token[i]
# If we don't have a pre-trained vector for this word, we'll just leave this row alone,
# so the word has a random initialization.
if token in embeddings:
embedding_matrix[i] = torch.FloatTensor(embeddings[token])
num_tokens_found += 1
else:
logger.debug("Token %s was not found in the embedding file. Initialising randomly.", token)
printf(f"Pretrained embeddings were found for {num_tokens_found} out of {vocab_size} tokens")
return cls(num_embeddings=embedding_matrix.size(0),
embedding_dim=embedding_matrix.size(1),
weight=embedding_matrix,
padding_index=padding_index,
trainable=trainable,
max_norm=max_norm,
norm_type=norm_type,
scale_grad_by_freq=scale_grad_by_freq,
sparse=sparse)
class EmbeddingsTextFile(Iterator[str]):
"""
Utility class for opening embeddings text files. Handles various compression formats,
as well as context management.
Parameters
----------
file_uri: a file system path or a URL of an eventually compressed text file
encoding: str
"""
DEFAULT_ENCODING = 'utf-8'
def __init__(self,
file_uri: str,
encoding: str = DEFAULT_ENCODING) -> None:
# All the python packages for compressed files share the same interface of io.open
extension = get_file_extension(file_uri)
# Some systems don't have support for all of these libraries, so we import them only
# when necessary.
package = None
if extension in ['.txt', '.vec']:
package = io
elif extension == '.gz':
import gzip
package = gzip
elif extension == ".bz2":
import bz2
package = bz2
elif extension == ".lzma":
import lzma
package = lzma
if package is None:
logger.warning('The embeddings file has an unknown file extension "%s". '
'We will assume the file is an (uncompressed) text file', extension)
package = io
self._handle = package.open(file_uri, 'rt', encoding=encoding) # type: ignore
# To use this with tqdm we'd like to know the number of tokens. It's possible that the
# first line of the embeddings file contains this: if it does, we want to start iteration
# from the 2nd line, otherwise we want to start from the 1st.
# Unfortunately, once we read the first line, we cannot move back the file iterator
# because the underlying file may be "not seekable"; we use itertools.chain instead.
first_line = next(self._handle) # this moves the iterator forward
self.num_tokens, self.embedding_dim = self._read_first_line(first_line)
if self.num_tokens:
# the first line is a header line: start iterating from the 2nd line
self._iterator = self._handle
else:
# the first line is not a header line: start iterating from the 1st line
self._iterator = itertools.chain([first_line], self._handle)
def read(self) -> str:
return ''.join(self._iterator)
def readline(self) -> str:
return next(self._iterator)
def __enter__(self) -> 'EmbeddingsTextFile':
return self
def __exit__(self, exc_type, exc_val, exc_tb) -> None:
self._handle.close()
def __iter__(self) -> 'EmbeddingsTextFile':
return self
def __next__(self) -> str:
return next(self._iterator)
def __len__(self) -> Optional[int]:
""" Hack for tqdm: no need for explicitly passing ``total=file.num_tokens`` """
if self.num_tokens:
return self.num_tokens
raise AttributeError('an object of type EmbeddingsTextFile has "len()" only if the underlying '
'text file declares the number of tokens (i.e. the number of lines following)'
'in the first line. That is not the case of this particular instance.')
@staticmethod
def _read_first_line(line: str) -> Optional[Tuple]:
""" This function takes in input a string and if it contains 1 or 2 integers, it assumes the
largest one it the number of tokens. Returns None if the line doesn't match that pattern. """
fields = line.split(' ')
if 1 <= len(fields) <= 2:
try:
int_fields = [int(x) for x in fields]
except ValueError:
return None, None
else:
num_tokens, embedding_dim = max(int_fields), min(int_fields)
logger.info('Recognized a header line with number of tokens: %d',
num_tokens)
return num_tokens, embedding_dim
else:
raise ValueError('Unrecognized header line!')
|
[
"torch.nn.Parameter",
"numpy.std",
"torch.nn.init.xavier_uniform_",
"numpy.asarray",
"torch.FloatTensor",
"torch.nn.functional.embedding",
"numpy.mean",
"itertools.chain",
"logging.getLogger"
] |
[((319, 346), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (336, 346), False, 'import logging\n'), ((4593, 4779), 'torch.nn.functional.embedding', 'embedding', (['inputs', 'self.weight'], {'padding_idx': 'self.padding_index', 'max_norm': 'self.max_norm', 'norm_type': 'self.norm_type', 'scale_grad_by_freq': 'self.scale_grad_by_freq', 'sparse': 'self.sparse'}), '(inputs, self.weight, padding_idx=self.padding_index, max_norm=\n self.max_norm, norm_type=self.norm_type, scale_grad_by_freq=self.\n scale_grad_by_freq, sparse=self.sparse)\n', (4602, 4779), False, 'from torch.nn.functional import embedding\n'), ((3571, 3619), 'torch.FloatTensor', 'torch.FloatTensor', (['num_embeddings', 'embedding_dim'], {}), '(num_embeddings, embedding_dim)\n', (3588, 3619), False, 'import torch\n'), ((3646, 3697), 'torch.nn.Parameter', 'torch.nn.Parameter', (['weight'], {'requires_grad': 'trainable'}), '(weight, requires_grad=trainable)\n', (3664, 3697), False, 'import torch\n'), ((3710, 3752), 'torch.nn.init.xavier_uniform_', 'torch.nn.init.xavier_uniform_', (['self.weight'], {}), '(self.weight)\n', (3739, 3752), False, 'import torch\n'), ((3957, 4008), 'torch.nn.Parameter', 'torch.nn.Parameter', (['weight'], {'requires_grad': 'trainable'}), '(weight, requires_grad=trainable)\n', (3975, 4008), False, 'import torch\n'), ((8776, 8802), 'numpy.mean', 'numpy.mean', (['all_embeddings'], {}), '(all_embeddings)\n', (8786, 8802), False, 'import numpy\n'), ((8835, 8860), 'numpy.std', 'numpy.std', (['all_embeddings'], {}), '(all_embeddings)\n', (8844, 8860), False, 'import numpy\n'), ((12678, 12721), 'itertools.chain', 'itertools.chain', (['[first_line]', 'self._handle'], {}), '([first_line], self._handle)\n', (12693, 12721), False, 'import itertools\n'), ((9105, 9149), 'torch.FloatTensor', 'torch.FloatTensor', (['vocab_size', 'embedding_dim'], {}), '(vocab_size, embedding_dim)\n', (9122, 9149), False, 'import torch\n'), ((9682, 9718), 'torch.FloatTensor', 'torch.FloatTensor', (['embeddings[token]'], {}), '(embeddings[token])\n', (9699, 9718), False, 'import torch\n'), ((8329, 8371), 'numpy.asarray', 'numpy.asarray', (['fields[1:]'], {'dtype': '"""float32"""'}), "(fields[1:], dtype='float32')\n", (8342, 8371), False, 'import numpy\n')]
|
# gpl: authors <NAME>, <NAME>
import bpy
import os
from bpy.props import IntProperty
from bpy.types import (
Operator,
Panel,
)
from . import functions
proxy_qualities = [
("1", "25%", ""), ("2", "50%", ""),
("3", "75%", ""), ("4", "100%", ""),
("5", "none", "")
]
# functions
def createdatamosh(context, strip):
preferences = context.user_preferences
prefs = preferences.addons[__package__].preferences
fileinput = bpy.path.abspath(strip.filepath)
fileoutput = fileinput.rpartition(".")[0] + "_datamosh.avi"
if prefs.all_keyframes:
command = "datamosh '{}' -a -o '{}'".format(fileinput, fileoutput)
else:
command = "datamosh '{}' -o '{}'".format(fileinput, fileoutput)
print(command)
os.system(command)
return fileoutput
def createavi(context, strip):
fileinput = bpy.path.abspath(strip.filepath)
fileoutput = fileinput.rpartition(".")[0] + "_.avi"
command = "ffmpeg -i '{}' -vcodec copy '{}'".format(fileinput, fileoutput)
print(command)
os.system(command)
return fileoutput
def createavimjpeg(context, strip):
fileinput = bpy.path.abspath(strip.filepath)
fileoutput = fileinput.rpartition(".")[0] + "_mjpeg.avi"
command = "ffmpeg -i '{}' -vcodec mjpeg -q:v 1 '{}'".format(fileinput, fileoutput)
print(command)
os.system(command)
return fileoutput
# classes
class CreateAvi(Operator):
bl_idname = "sequencer.createavi"
bl_label = "Create avi file"
bl_description = "Create an avi output file"
bl_options = {'REGISTER', 'UNDO'}
size = IntProperty(
name="proxysize",
default=1
)
@classmethod
def poll(self, context):
strip = functions.act_strip(context)
scn = context.scene
if scn and scn.sequence_editor and scn.sequence_editor.active_strip:
return strip.type in ('MOVIE')
else:
return False
def execute(self, context):
strips = functions.get_selected_strips(context)
for strip in strips:
# deselect all other strips
for i in strips:
i.select = False
# select current strip
strip.select = True
if strip.type == "MOVIE":
if self.size == 1:
fileoutput = createavi(context, strip)
elif self.size == 2:
fileoutput = createavimjpeg(context, strip)
strip.filepath = fileoutput
# select all strips again
for strip in strips:
try:
strip.select = True
except ReferenceError:
pass
bpy.ops.sequencer.reload()
return {'FINISHED'}
class CreateDatamosh(Operator):
bl_idname = "sequencer.createdatamosh"
bl_label = "Create Datamosh"
bl_description = "Create Datamosh"
@classmethod
def poll(self, context):
strip = functions.act_strip(context)
scn = context.scene
if scn and scn.sequence_editor and scn.sequence_editor.active_strip:
return strip.type in ('MOVIE')
else:
return False
def execute(self, context):
preferences = context.user_preferences
prefs = preferences.addons[__package__].preferences
strips = functions.get_selected_strips(context)
for strip in strips:
# deselect all other strips
for i in strips:
i.select = False
# select current strip
strip.select = True
if strip.type == "MOVIE":
fileoutput = createdatamosh(context, strip)
if prefs.load_glitch:
strip.filepath = fileoutput
# select all strips again
for strip in strips:
try:
strip.select = True
except ReferenceError:
pass
bpy.ops.sequencer.reload()
return {'FINISHED'}
class CreateGlitchToolPanel(Panel):
bl_label = "Glitch Tools"
bl_idname = "OBJECT_PT_GlitchTool"
bl_space_type = 'SEQUENCE_EDITOR'
bl_region_type = 'UI'
@classmethod
def poll(self, context):
if context.space_data.view_type in {'SEQUENCER',
'SEQUENCER_PREVIEW'}:
strip = functions.act_strip(context)
scn = context.scene
preferences = context.user_preferences
prefs = preferences.addons[__package__].preferences
if scn and scn.sequence_editor and scn.sequence_editor.active_strip:
if prefs.use_glitch_panel:
return strip.type in ('MOVIE')
else:
return False
def draw_header(self, context):
layout = self.layout
layout.label(text="", icon="GAME")
def draw(self, context):
preferences = context.user_preferences
prefs = preferences.addons[__package__].preferences
layout = self.layout
layout.operator("sequencer.createavi", text="Create avi (same codec)")
layout.operator("sequencer.createavi", text="Create avi (mjpeg)").size = 2
layout.prop(prefs, "all_keyframes")
layout.prop(prefs, "load_glitch")
layout.operator("sequencer.createdatamosh")
|
[
"bpy.props.IntProperty",
"bpy.path.abspath",
"os.system",
"bpy.ops.sequencer.reload"
] |
[((490, 522), 'bpy.path.abspath', 'bpy.path.abspath', (['strip.filepath'], {}), '(strip.filepath)\n', (506, 522), False, 'import bpy\n'), ((796, 814), 'os.system', 'os.system', (['command'], {}), '(command)\n', (805, 814), False, 'import os\n'), ((886, 918), 'bpy.path.abspath', 'bpy.path.abspath', (['strip.filepath'], {}), '(strip.filepath)\n', (902, 918), False, 'import bpy\n'), ((1079, 1097), 'os.system', 'os.system', (['command'], {}), '(command)\n', (1088, 1097), False, 'import os\n'), ((1175, 1207), 'bpy.path.abspath', 'bpy.path.abspath', (['strip.filepath'], {}), '(strip.filepath)\n', (1191, 1207), False, 'import bpy\n'), ((1381, 1399), 'os.system', 'os.system', (['command'], {}), '(command)\n', (1390, 1399), False, 'import os\n'), ((1632, 1672), 'bpy.props.IntProperty', 'IntProperty', ([], {'name': '"""proxysize"""', 'default': '(1)'}), "(name='proxysize', default=1)\n", (1643, 1672), False, 'from bpy.props import IntProperty\n'), ((2737, 2763), 'bpy.ops.sequencer.reload', 'bpy.ops.sequencer.reload', ([], {}), '()\n', (2761, 2763), False, 'import bpy\n'), ((3982, 4008), 'bpy.ops.sequencer.reload', 'bpy.ops.sequencer.reload', ([], {}), '()\n', (4006, 4008), False, 'import bpy\n')]
|
from eth_utils import decode_hex, encode_hex
from web3 import Web3
from raiden.constants import UINT256_MAX
from raiden.utils.signing import pack_data
from raiden.utils.typing import Address, ChannelID
from raiden_contracts.constants import MessageTypeId
class BalanceProof:
""" A Balance Proof
If transferred_amount, locked_amount and locksroot are set, balance_proof hash is
computed using these values. Otherwise a value stored in _balance_hash is returned.
Serialization will also add these items only if each of transferred_amount, locked_amount
and locksroot is set.
"""
def __init__(
self,
channel_identifier: ChannelID,
token_network_address: Address,
balance_hash: str = None,
nonce: int = 0,
additional_hash: str = "0x%064x" % 0,
chain_id: int = 1,
signature: str = None,
transferred_amount: int = None,
locked_amount: int = 0,
locksroot: str = "0x%064x" % 0,
):
self.channel_identifier = channel_identifier
self.token_network_address = token_network_address
self._balance_hash = balance_hash
self.additional_hash = additional_hash
self.nonce = nonce
self.chain_id = chain_id
self.signature = signature
if transferred_amount and locked_amount and locksroot and balance_hash:
assert 0 <= transferred_amount <= UINT256_MAX
assert 0 <= locked_amount <= UINT256_MAX
assert (
self.hash_balance_data(transferred_amount, locked_amount, locksroot)
== balance_hash
)
self.transferred_amount = transferred_amount
self.locked_amount = locked_amount
self.locksroot = locksroot
def serialize_bin(self, msg_type: MessageTypeId = MessageTypeId.BALANCE_PROOF):
return pack_data(
["address", "uint256", "uint256", "uint256", "bytes32", "uint256", "bytes32"],
[
self.token_network_address,
self.chain_id,
msg_type.value,
self.channel_identifier,
decode_hex(self.balance_hash),
self.nonce,
decode_hex(self.additional_hash),
],
)
@property
def balance_hash(self) -> str:
if self._balance_hash:
return self._balance_hash
if None not in (self.transferred_amount, self.locked_amount, self.locksroot):
assert isinstance(self.transferred_amount, int)
return encode_hex(
self.hash_balance_data(self.transferred_amount, self.locked_amount, self.locksroot)
)
raise ValueError("Can't compute balance hash")
@balance_hash.setter
def balance_hash(self, value) -> None:
self._balance_hash = value
@staticmethod
def hash_balance_data(transferred_amount: int, locked_amount: int, locksroot: str) -> str:
return Web3.soliditySha3( # pylint: disable=no-value-for-parameter
["uint256", "uint256", "bytes32"], [transferred_amount, locked_amount, locksroot]
)
|
[
"eth_utils.decode_hex",
"web3.Web3.soliditySha3"
] |
[((2984, 3088), 'web3.Web3.soliditySha3', 'Web3.soliditySha3', (["['uint256', 'uint256', 'bytes32']", '[transferred_amount, locked_amount, locksroot]'], {}), "(['uint256', 'uint256', 'bytes32'], [transferred_amount,\n locked_amount, locksroot])\n", (3001, 3088), False, 'from web3 import Web3\n'), ((2152, 2181), 'eth_utils.decode_hex', 'decode_hex', (['self.balance_hash'], {}), '(self.balance_hash)\n', (2162, 2181), False, 'from eth_utils import decode_hex, encode_hex\n'), ((2227, 2259), 'eth_utils.decode_hex', 'decode_hex', (['self.additional_hash'], {}), '(self.additional_hash)\n', (2237, 2259), False, 'from eth_utils import decode_hex, encode_hex\n')]
|
#!/usr/bin/env python
# Copyright (C) 2016, <NAME>
from __future__ import print_function
from lasm import A,B,C,D,E,F
def boot(a):
'''Bootstrap loader
Loads the initial program from a serial memory
device that uses the SPI protocol.
This may be anywhere in RAM, except of course
where the program is being loaded.
'''
# Register assignments
spw = 1; # address of spiword routine
rem = 2; # remaining count of words
start = 3; # start address
top = 6; # top of loop
#
a.L('Boot'); # Start here
a.addi(4, F, F); a.C('Jump around constants');
a.dc(0x0300); a.C('SPI read command, high SPI address bits')
a.dc(0x0000); a.C('Byte address of source')
a.dc(0x0080); a.C('Start address of loaded program')
a.dc(0x0800); a.C('Program length, in words')
a.addi(-2, F, C); a.C('Address of program length')
a.clr(B); a.C('Initialize B latch')
a.clr(0); a.C('Clear R0')
a.addi(1, F, spw); a.C('R1 = address of spiword subr')
a.jmp('L1'); a.C('Jump around subr')
a.L('spiword')
a.addi(-8, 0, B); a.C('B = -8')
a.addi(-8, B, B); a.C('B = -16 -- bit loop counter')
a.genop(3,1,F,F); a.C('while B < 0')
a.genop(-5,6,A,A); a.C(' Shift IO through A')
a.addi(1, B, B); a.C(' B = B + 1')
a.addi(-4, F, F); a.C(' endloop')
a.mov(E, F); a.C('return')
a.L('L1')
a.genop(3,4,0,B); a.C(' Deselect SPI')
a.mov(D, rem); a.C('Init word count')
a.addi(-3, C, C); a.C('Point at read command')
a.mov(D, A); a.C('Load it')
a.addi(1, F, E); a.C('Return address')
a.mov(spw, F); a.C('Call spiword')
a.addi(1, C, C); a.C('Second command word')
a.mov(D, A); a.C('Load it')
a.addi(1, F, E); a.C('Return address')
a.mov(spw, F); a.C('Call spiword')
a.addi(1, C, C); a.C('Point at start address')
a.mov(D, start); a.C('Load start address, for later')
a.mov(start, C); a.C('C will hold current dest addr')
a.mov(F, top); a.C('Top of loop')
a.addi(-1, rem, B); a.C('Test for end')
a.genop(2,1,F,F); a.C('if done')
a.genop(3,4,0,B); a.C(' Deselect SPI')
a.mov(start, F); a.C(' Jump to loaded program')
a.clr(A); a.C('Clear A for tidiness')
a.addi(1, F, E); a.C('Return address')
a.mov(spw, F); a.C('Call spiword')
a.mov(A, D); a.C('Store the word we just read')
a.addi(1, C, C); a.C('Bump address')
a.addi(-1,rem,rem); a.C('Decrement count')
a.mov(top, F); a.C('Jump to top of loop')
if __name__ == "__main__":
import lasm
a = lasm.assemble(boot)
for i in range(0, len(a.inst)):
print('%2X: %04X %s' % (i, a.inst[i], a.comments[i]))
|
[
"lasm.assemble"
] |
[((2747, 2766), 'lasm.assemble', 'lasm.assemble', (['boot'], {}), '(boot)\n', (2760, 2766), False, 'import lasm\n')]
|
"""Pressure Sensor Driver Class."""
from typing import Optional, AsyncIterator
from opentrons_hardware.drivers.can_bus.can_messenger import CanMessenger
from contextlib import asynccontextmanager
from opentrons_hardware.firmware_bindings.constants import (
SensorId,
SensorType,
NodeId,
SensorThresholdMode,
)
from opentrons_hardware.firmware_bindings.constants import SensorOutputBinding
from opentrons_hardware.sensors.utils import (
ReadSensorInformation,
PollSensorInformation,
WriteSensorInformation,
SensorDataType,
SensorThresholdInformation,
)
from opentrons_hardware.firmware_bindings.messages.payloads import (
BindSensorOutputRequestPayload,
)
from opentrons_hardware.firmware_bindings.messages.fields import (
SensorOutputBindingField,
SensorTypeField,
SensorIdField,
)
from opentrons_hardware.firmware_bindings.messages.message_definitions import (
BindSensorOutputRequest,
)
from .sensor_abc import AbstractAdvancedSensor
class PressureSensor(AbstractAdvancedSensor):
"""MMR820C04 Driver."""
def __init__(
self,
sensor_id: SensorId = SensorId.S0,
zero_threshold: float = 0.0,
stop_threshold: float = 0.0,
offset: float = 0.0,
) -> None:
"""Constructor."""
super().__init__(
zero_threshold, stop_threshold, offset, SensorType.pressure, sensor_id
)
async def get_report(
self,
node_id: NodeId,
can_messenger: CanMessenger,
timeout: int = 1,
) -> Optional[SensorDataType]:
"""This function retrieves ReadFromResponse messages.
This is meant to be called after a bind_to_sync call,
with the sensor being bound to "report".
"""
return await self._scheduler.read(can_messenger, node_id)
async def get_baseline(
self,
can_messenger: CanMessenger,
node_id: NodeId,
poll_for_ms: int,
sample_rate: int,
timeout: int = 1,
) -> Optional[SensorDataType]:
"""Poll the pressure sensor."""
poll = PollSensorInformation(
self._sensor_type, self._sensor_id, node_id, poll_for_ms
)
return await self._scheduler.run_poll(poll, can_messenger, timeout)
async def poll_temperature(
self,
can_messenger: CanMessenger,
node_id: NodeId,
poll_for_ms: int,
timeout: int = 1,
) -> Optional[SensorDataType]:
"""Poll the pressure sensor."""
poll = PollSensorInformation(
SensorType.pressure_temperature, self._sensor_id, node_id, poll_for_ms
)
return await self._scheduler.run_poll(poll, can_messenger, timeout)
async def read(
self,
can_messenger: CanMessenger,
node_id: NodeId,
offset: bool,
timeout: int = 1,
) -> Optional[SensorDataType]:
"""Poll the read sensor."""
read = ReadSensorInformation(
self._sensor_type, self._sensor_id, node_id, offset
)
return await self._scheduler.send_read(read, can_messenger, timeout)
async def read_temperature(
self,
can_messenger: CanMessenger,
node_id: NodeId,
offset: bool,
timeout: int = 1,
) -> Optional[SensorDataType]:
"""Poll the read sensor."""
read = ReadSensorInformation(
SensorType.pressure_temperature, self._sensor_id, node_id, offset
)
return await self._scheduler.send_read(read, can_messenger, timeout)
async def write(
self,
can_messenger: CanMessenger,
node_id: NodeId,
data: SensorDataType,
) -> None:
"""Write to a register of the pressure sensor."""
write = WriteSensorInformation(
self._sensor_type, self._sensor_id, node_id, data
)
await self._scheduler.send_write(write, can_messenger)
async def send_zero_threshold(
self,
can_messenger: CanMessenger,
node_id: NodeId,
threshold: SensorDataType,
timeout: int = 1,
) -> Optional[SensorDataType]:
"""Send the zero threshold which the offset value is compared to."""
write = SensorThresholdInformation(
self._sensor_type,
self._sensor_id,
node_id,
threshold,
SensorThresholdMode.absolute,
)
threshold_data = await self._scheduler.send_threshold(
write, can_messenger, timeout
)
if threshold_data:
self.zero_threshold = threshold_data.to_float()
return threshold_data
@asynccontextmanager
async def bind_output(
self,
can_messenger: CanMessenger,
node_id: NodeId,
binding: SensorOutputBinding = SensorOutputBinding.sync,
) -> AsyncIterator[None]:
"""Send a BindSensorOutputRequest."""
try:
await can_messenger.send(
node_id=node_id,
message=BindSensorOutputRequest(
payload=BindSensorOutputRequestPayload(
sensor=SensorTypeField(self._sensor_type),
sensor_id=SensorIdField(self._sensor_id),
binding=SensorOutputBindingField(binding),
)
),
)
yield
finally:
await can_messenger.send(
node_id=node_id,
message=BindSensorOutputRequest(
payload=BindSensorOutputRequestPayload(
sensor=SensorTypeField(self._sensor_type),
sensor_id=SensorIdField(self._sensor_id),
binding=SensorOutputBindingField(SensorOutputBinding.none),
)
),
)
async def get_device_status(
self,
can_messenger: CanMessenger,
node_id: NodeId,
timeout: int = 1,
) -> bool:
"""Send a PeripheralStatusRequest and read the response message."""
return await self._scheduler.request_peripheral_status(
self._sensor_type, self._sensor_id, node_id, can_messenger, timeout
)
|
[
"opentrons_hardware.sensors.utils.WriteSensorInformation",
"opentrons_hardware.firmware_bindings.messages.fields.SensorOutputBindingField",
"opentrons_hardware.firmware_bindings.messages.fields.SensorTypeField",
"opentrons_hardware.sensors.utils.PollSensorInformation",
"opentrons_hardware.firmware_bindings.messages.fields.SensorIdField",
"opentrons_hardware.sensors.utils.SensorThresholdInformation",
"opentrons_hardware.sensors.utils.ReadSensorInformation"
] |
[((2099, 2178), 'opentrons_hardware.sensors.utils.PollSensorInformation', 'PollSensorInformation', (['self._sensor_type', 'self._sensor_id', 'node_id', 'poll_for_ms'], {}), '(self._sensor_type, self._sensor_id, node_id, poll_for_ms)\n', (2120, 2178), False, 'from opentrons_hardware.sensors.utils import ReadSensorInformation, PollSensorInformation, WriteSensorInformation, SensorDataType, SensorThresholdInformation\n'), ((2528, 2625), 'opentrons_hardware.sensors.utils.PollSensorInformation', 'PollSensorInformation', (['SensorType.pressure_temperature', 'self._sensor_id', 'node_id', 'poll_for_ms'], {}), '(SensorType.pressure_temperature, self._sensor_id,\n node_id, poll_for_ms)\n', (2549, 2625), False, 'from opentrons_hardware.sensors.utils import ReadSensorInformation, PollSensorInformation, WriteSensorInformation, SensorDataType, SensorThresholdInformation\n'), ((2951, 3025), 'opentrons_hardware.sensors.utils.ReadSensorInformation', 'ReadSensorInformation', (['self._sensor_type', 'self._sensor_id', 'node_id', 'offset'], {}), '(self._sensor_type, self._sensor_id, node_id, offset)\n', (2972, 3025), False, 'from opentrons_hardware.sensors.utils import ReadSensorInformation, PollSensorInformation, WriteSensorInformation, SensorDataType, SensorThresholdInformation\n'), ((3368, 3460), 'opentrons_hardware.sensors.utils.ReadSensorInformation', 'ReadSensorInformation', (['SensorType.pressure_temperature', 'self._sensor_id', 'node_id', 'offset'], {}), '(SensorType.pressure_temperature, self._sensor_id,\n node_id, offset)\n', (3389, 3460), False, 'from opentrons_hardware.sensors.utils import ReadSensorInformation, PollSensorInformation, WriteSensorInformation, SensorDataType, SensorThresholdInformation\n'), ((3773, 3846), 'opentrons_hardware.sensors.utils.WriteSensorInformation', 'WriteSensorInformation', (['self._sensor_type', 'self._sensor_id', 'node_id', 'data'], {}), '(self._sensor_type, self._sensor_id, node_id, data)\n', (3795, 3846), False, 'from opentrons_hardware.sensors.utils import ReadSensorInformation, PollSensorInformation, WriteSensorInformation, SensorDataType, SensorThresholdInformation\n'), ((4233, 4349), 'opentrons_hardware.sensors.utils.SensorThresholdInformation', 'SensorThresholdInformation', (['self._sensor_type', 'self._sensor_id', 'node_id', 'threshold', 'SensorThresholdMode.absolute'], {}), '(self._sensor_type, self._sensor_id, node_id,\n threshold, SensorThresholdMode.absolute)\n', (4259, 4349), False, 'from opentrons_hardware.sensors.utils import ReadSensorInformation, PollSensorInformation, WriteSensorInformation, SensorDataType, SensorThresholdInformation\n'), ((5143, 5177), 'opentrons_hardware.firmware_bindings.messages.fields.SensorTypeField', 'SensorTypeField', (['self._sensor_type'], {}), '(self._sensor_type)\n', (5158, 5177), False, 'from opentrons_hardware.firmware_bindings.messages.fields import SensorOutputBindingField, SensorTypeField, SensorIdField\n'), ((5213, 5243), 'opentrons_hardware.firmware_bindings.messages.fields.SensorIdField', 'SensorIdField', (['self._sensor_id'], {}), '(self._sensor_id)\n', (5226, 5243), False, 'from opentrons_hardware.firmware_bindings.messages.fields import SensorOutputBindingField, SensorTypeField, SensorIdField\n'), ((5277, 5310), 'opentrons_hardware.firmware_bindings.messages.fields.SensorOutputBindingField', 'SensorOutputBindingField', (['binding'], {}), '(binding)\n', (5301, 5310), False, 'from opentrons_hardware.firmware_bindings.messages.fields import SensorOutputBindingField, SensorTypeField, SensorIdField\n'), ((5613, 5647), 'opentrons_hardware.firmware_bindings.messages.fields.SensorTypeField', 'SensorTypeField', (['self._sensor_type'], {}), '(self._sensor_type)\n', (5628, 5647), False, 'from opentrons_hardware.firmware_bindings.messages.fields import SensorOutputBindingField, SensorTypeField, SensorIdField\n'), ((5683, 5713), 'opentrons_hardware.firmware_bindings.messages.fields.SensorIdField', 'SensorIdField', (['self._sensor_id'], {}), '(self._sensor_id)\n', (5696, 5713), False, 'from opentrons_hardware.firmware_bindings.messages.fields import SensorOutputBindingField, SensorTypeField, SensorIdField\n'), ((5747, 5797), 'opentrons_hardware.firmware_bindings.messages.fields.SensorOutputBindingField', 'SensorOutputBindingField', (['SensorOutputBinding.none'], {}), '(SensorOutputBinding.none)\n', (5771, 5797), False, 'from opentrons_hardware.firmware_bindings.messages.fields import SensorOutputBindingField, SensorTypeField, SensorIdField\n')]
|
import os
import sys
import dronekit
import getpass
from time import sleep
N = int(sys.argv[1])
launch_string_mopso="gnome-terminal"
launch_string="gnome-terminal"
for i in range(N):
os.chdir("/home/"+getpass.getuser()+"/IAF/Integrated_CodeBase/")
launch_string += " --tab -- python3 modified_server7.py " + str(i)
sleep(0.1)
os.system(launch_string)
sleep(3)
print("sleep_over")
for i in range(N):
os.chdir("/home/"+getpass.getuser()+"/IAF/Integrated_CodeBase/")
launch_string_mopso += " --tab -- python3 Iteration6_Integrated_Swarm.py " + str(i)
sleep(0.1)
print("doing_this")
os.system(launch_string_mopso)
print("done")
|
[
"getpass.getuser",
"os.system",
"time.sleep"
] |
[((331, 355), 'os.system', 'os.system', (['launch_string'], {}), '(launch_string)\n', (340, 355), False, 'import os\n'), ((357, 365), 'time.sleep', 'sleep', (['(3)'], {}), '(3)\n', (362, 365), False, 'from time import sleep\n'), ((588, 618), 'os.system', 'os.system', (['launch_string_mopso'], {}), '(launch_string_mopso)\n', (597, 618), False, 'import os\n'), ((319, 329), 'time.sleep', 'sleep', (['(0.1)'], {}), '(0.1)\n', (324, 329), False, 'from time import sleep\n'), ((557, 567), 'time.sleep', 'sleep', (['(0.1)'], {}), '(0.1)\n', (562, 567), False, 'from time import sleep\n'), ((203, 220), 'getpass.getuser', 'getpass.getuser', ([], {}), '()\n', (218, 220), False, 'import getpass\n'), ((424, 441), 'getpass.getuser', 'getpass.getuser', ([], {}), '()\n', (439, 441), False, 'import getpass\n')]
|
from graphics import GraphWin,Point,Line,Text,update
def trans_X(Xw):
return int((Xw-Xwmin)/(Xwmax-Xwmin)*(Xvmax-Xvmin)+Xvmin)
def trans_Y(Yw):
return int(Yvmax - ((Yw-Ywmin)/(Ywmax-Ywmin)*(Yvmax-Yvmin)))
Xwmin = -4
Xwmax = 4
Ywmax = 6
Ywmin = -2
Xvmin = 0
Xvmax = int(input("Enter your screen width : "))
Yvmax = int(input("Enter your screen height: "))
Yvmin = 0
win = GraphWin("<NAME> - 18/424179/PA/18284", Xvmax, Yvmax,autoflush=False)
p1 = Point(trans_X(-3),trans_Y(0))
p2 = Point(trans_X(3),trans_Y(0))
line = Line(p1,p2)
line.draw(win)
p1 = Point(trans_X(0),trans_Y(5))
p2 = Point(trans_X(0),trans_Y(-1))
line = Line(p1,p2)
line.draw(win)
i = -3
while i <= 3:
if i!=0:
p1 = Point(trans_X(i),trans_Y(-0.1))
teks = Text(p1,i)
teks.draw(win)
i = i+1
i=-1
while i<=5:
if i!=0:
p1 = Point(trans_X(0.05),trans_Y(i))
teks = Text(p1,i)
teks.draw(win)
i = i+1
x = float(-2)
while x <= 2 :
nx = x
ny = x**2
point = Point(trans_X(nx),trans_Y(ny))
point.draw(win)
update(2000)
x = x + 0.001
win.getMouse()
win.close()
write()
|
[
"graphics.Text",
"graphics.update",
"graphics.Line",
"graphics.GraphWin"
] |
[((398, 468), 'graphics.GraphWin', 'GraphWin', (['"""<NAME> - 18/424179/PA/18284"""', 'Xvmax', 'Yvmax'], {'autoflush': '(False)'}), "('<NAME> - 18/424179/PA/18284', Xvmax, Yvmax, autoflush=False)\n", (406, 468), False, 'from graphics import GraphWin, Point, Line, Text, update\n'), ((549, 561), 'graphics.Line', 'Line', (['p1', 'p2'], {}), '(p1, p2)\n', (553, 561), False, 'from graphics import GraphWin, Point, Line, Text, update\n'), ((656, 668), 'graphics.Line', 'Line', (['p1', 'p2'], {}), '(p1, p2)\n', (660, 668), False, 'from graphics import GraphWin, Point, Line, Text, update\n'), ((1108, 1120), 'graphics.update', 'update', (['(2000)'], {}), '(2000)\n', (1114, 1120), False, 'from graphics import GraphWin, Point, Line, Text, update\n'), ((785, 796), 'graphics.Text', 'Text', (['p1', 'i'], {}), '(p1, i)\n', (789, 796), False, 'from graphics import GraphWin, Point, Line, Text, update\n'), ((930, 941), 'graphics.Text', 'Text', (['p1', 'i'], {}), '(p1, i)\n', (934, 941), False, 'from graphics import GraphWin, Point, Line, Text, update\n')]
|
# Copyright (c) 2021, NVIDIA CORPORATION.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import json
import os
import glob
import pandas as pd
import tensorflow as tf
import matplotlib.pyplot as plt
import seaborn as sns
client_results_root = "./workspaces/results/site1"
server_results_root = "./workspaces/results/server"
# 4.1 Central vs. FedAvg
experiments = {"cifar10_central": {"run": "run_1", "tag": "val_acc_local_model"},
"cifar10_fedavg": {"run": "run_2", "tag": "val_acc_global_model"},
"cifar10_fedavg_he": {"run": "run_8", "tag": "val_acc_global_model"}}
# # 4.2 Impact of client data heterogeneity
# experiments = {"cifar10_fedavg (alpha=1.0)": {"run": "run_2", "tag": "val_acc_global_model"},
# "cifar10_fedavg (alpha=0.5)": {"run": "run_3", "tag": "val_acc_global_model"},
# "cifar10_fedavg (alpha=0.3)": {"run": "run_4", "tag": "val_acc_global_model"},
# "cifar10_fedavg (alpha=0.1)": {"run": "run_5", "tag": "val_acc_global_model"}}
#
# # 4.3 FedProx vs. FedOpt
# experiments = {"cifar10_fedavg": {"run": "run_5", "tag": "val_acc_global_model"},
# "cifar10_fedprox": {"run": "run_6", "tag": "val_acc_global_model"},
# "cifar10_fedopt": {"run": "run_7", "tag": "val_acc_global_model"}}
add_cross_site_val = True
def read_eventfile(filepath, tags=["val_acc_global_model"]):
data = {}
for summary in tf.compat.v1.train.summary_iterator(filepath):
for v in summary.summary.value:
if v.tag in tags:
# print(v.tag, summary.step, v.simple_value)
if v.tag in data.keys():
data[v.tag].append([summary.step, v.simple_value])
else:
data[v.tag] = [[summary.step, v.simple_value]]
return data
def add_eventdata(data, config, filepath, tag="val_acc_global_model"):
event_data = read_eventfile(filepath, tags=[tag])
assert len(event_data[tag]) > 0, f"No data for key {tag}"
# print(event_data)
for e in event_data[tag]:
# print(e)
data["Config"].append(config)
data["Step"].append(e[0])
data["Accuracy"].append(e[1])
print(f"added {len(event_data[tag])} entries for {tag}")
def main():
data = {
"Config": [],
"Step": [],
"Accuracy": []
}
if add_cross_site_val:
xsite_keys = ["SRV_server", "SRV_server_best"]
xsite_data = {
"Config": []
}
for k in xsite_keys:
xsite_data.update({k: []})
else:
xsite_data = None
xsite_keys = None
# add event files
for config, exp in experiments.items():
eventfile = glob.glob(os.path.join(client_results_root, exp["run"] + "/**/events.*"), recursive=True)
assert len(eventfile) == 1, "No unique event file found!"
eventfile = eventfile[0]
print("adding", eventfile)
add_eventdata(data, config, eventfile, tag=exp["tag"])
if add_cross_site_val:
xsite_file = glob.glob(os.path.join(server_results_root, exp["run"] + "/**/cross_site_val.json"), recursive=True)
assert len(xsite_file) == 1, "No unique x-site file found!"
with open(xsite_file[0], "r") as f:
xsite_results = json.load(f)
xsite_data["Config"].append(config)
for k in xsite_keys:
xsite_data[k].append(xsite_results["site-1"][k]["val_accuracy"])
print("Training TB data:")
print(pd.DataFrame(data))
if xsite_data:
print("Cross-site val data:")
print(pd.DataFrame(xsite_data))
sns.lineplot(x="Step", y="Accuracy", hue="Config", data=data)
plt.show()
if __name__ == "__main__":
main()
|
[
"pandas.DataFrame",
"seaborn.lineplot",
"json.load",
"matplotlib.pyplot.show",
"tensorflow.compat.v1.train.summary_iterator",
"os.path.join"
] |
[((1927, 1972), 'tensorflow.compat.v1.train.summary_iterator', 'tf.compat.v1.train.summary_iterator', (['filepath'], {}), '(filepath)\n', (1962, 1972), True, 'import tensorflow as tf\n'), ((4150, 4211), 'seaborn.lineplot', 'sns.lineplot', ([], {'x': '"""Step"""', 'y': '"""Accuracy"""', 'hue': '"""Config"""', 'data': 'data'}), "(x='Step', y='Accuracy', hue='Config', data=data)\n", (4162, 4211), True, 'import seaborn as sns\n'), ((4216, 4226), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (4224, 4226), True, 'import matplotlib.pyplot as plt\n'), ((4027, 4045), 'pandas.DataFrame', 'pd.DataFrame', (['data'], {}), '(data)\n', (4039, 4045), True, 'import pandas as pd\n'), ((3222, 3284), 'os.path.join', 'os.path.join', (['client_results_root', "(exp['run'] + '/**/events.*')"], {}), "(client_results_root, exp['run'] + '/**/events.*')\n", (3234, 3284), False, 'import os\n'), ((4119, 4143), 'pandas.DataFrame', 'pd.DataFrame', (['xsite_data'], {}), '(xsite_data)\n', (4131, 4143), True, 'import pandas as pd\n'), ((3566, 3639), 'os.path.join', 'os.path.join', (['server_results_root', "(exp['run'] + '/**/cross_site_val.json')"], {}), "(server_results_root, exp['run'] + '/**/cross_site_val.json')\n", (3578, 3639), False, 'import os\n'), ((3809, 3821), 'json.load', 'json.load', (['f'], {}), '(f)\n', (3818, 3821), False, 'import json\n')]
|
"""
Preprocess road / stream crossings into data needed by tippecanoe for creating vector tiles.
Input:
* USGS Road / Stream crossings, projected to match SARP standard projection (Albers CONUS).
* pre-processed and snapped small barriers
Outputs:
`data/barriers/intermediate/road_crossings.feather`: road / stream crossing data for merging in with small barriers that do not have networks
"""
from pathlib import Path
from time import time
import warnings
import geopandas as gp
import pygeos as pg
from pyogrio import read_dataframe
from analysis.constants import CRS
from analysis.prep.barriers.lib.duplicates import mark_duplicates
from analysis.prep.barriers.lib.spatial_joins import add_spatial_joins
warnings.filterwarnings("ignore", message=".*initial implementation of Parquet.*")
start = time()
data_dir = Path("data")
boundaries_dir = data_dir / "boundaries"
barriers_dir = data_dir / "barriers"
src_dir = barriers_dir / "source"
qa_dir = barriers_dir / "qa"
print("Reading road crossings")
# rename columns to match small barriers
# NOTE: tiger2020_feature_names is a combination of multiple road names
df = read_dataframe(
src_dir / "stream_crossings_united_states_feb_2022.gpkg",
layer="stream_crossing_sites",
columns=[
"stream_crossing_id",
"tiger2020_feature_names",
"nhdhr_gnis_stream_name",
"crossing_type",
],
).rename(
columns={
"tiger2020_feature_names": "Road",
"nhdhr_gnis_stream_name": "Stream",
"stream_crossing_id": "SARPID",
"crossing_type": "crossingtype",
}
)
print(f"Read {len(df):,} road crossings")
# project HUC4 to match crossings
huc4 = gp.read_feather(boundaries_dir / "huc4.feather", columns=["geometry"]).to_crs(
df.crs
)
tree = pg.STRtree(df.geometry.values.data)
ix = tree.query_bulk(huc4.geometry.values.data, predicate="intersects")[1]
df = df.take(ix).reset_index(drop=True)
print(f"Selected {len(df):,} road crossings in region")
# use original latitude / longitude (NAD83) values
lon, lat = pg.get_coordinates(df.geometry.values.data).astype("float32").T
df["lon"] = lon
df["lat"] = lat
# project to match SARP CRS
df = df.to_crs(CRS)
df["id"] = df.index.astype("uint32")
df = df.set_index("id", drop=False)
# There are a bunch of crossings with identical coordinates, remove them
# NOTE: they have different labels, but that seems like it is a result of
# them methods used to identify the crossings (e.g., named highways, roads, etc)
print("Removing duplicate crossings...")
# round to int
x, y = pg.get_coordinates(df.geometry.values.data).astype("int").T
df["x"] = x
df["y"] = y
keep_ids = df[["x", "y", "id"]].groupby(["x", "y"]).first().reset_index().id
print(f"Dropping {len(df) - len(keep_ids):,} duplicate road crossings")
df = df.loc[keep_ids].copy()
### Remove crossings that are very close
print("Removing nearby road crossings...")
# consider 5 m nearby
df = mark_duplicates(df, 5)
print(f"Dropping {df.duplicate.sum():,} very close road crossings")
df = (
df.loc[~df.duplicate]
.drop(columns=["duplicate", "dup_count", "dup_group"])
.reset_index(drop=True)
)
print(f"now have {len(df):,} road crossings")
# Cleanup fields
df.Stream = df.Stream.str.strip().fillna("")
df.Road = df.Road.str.strip().fillna("")
df.loc[
(df.Stream.str.strip().str.len() > 0) & (df.Road.str.strip().str.len() > 0), "Name"
] = (df.Stream + " / " + df.Road)
df.Name = df.Name.fillna("")
# match dtype of SARPID elsewhere
df.SARPID = "cr" + df.SARPID.round().astype(int).astype(str)
df = add_spatial_joins(df)
print(f"now have {len(df):,} road crossings after spatial joins")
# Cleanup HUC, state, county, and ecoregion columns that weren't assigned
for col in [
"HUC2",
"HUC6",
"HUC8",
"HUC12",
"Basin",
"County",
"COUNTYFIPS",
"STATEFIPS",
"State",
"ECO3",
"ECO4",
]:
df[col] = df[col].fillna("")
df.reset_index(drop=True).to_feather(src_dir / "road_crossings.feather")
print(f"Done in {time() - start:.2f}")
|
[
"warnings.filterwarnings",
"pyogrio.read_dataframe",
"geopandas.read_feather",
"time.time",
"pathlib.Path",
"pygeos.STRtree",
"analysis.prep.barriers.lib.spatial_joins.add_spatial_joins",
"analysis.prep.barriers.lib.duplicates.mark_duplicates",
"pygeos.get_coordinates"
] |
[((715, 802), 'warnings.filterwarnings', 'warnings.filterwarnings', (['"""ignore"""'], {'message': '""".*initial implementation of Parquet.*"""'}), "('ignore', message=\n '.*initial implementation of Parquet.*')\n", (738, 802), False, 'import warnings\n'), ((807, 813), 'time.time', 'time', ([], {}), '()\n', (811, 813), False, 'from time import time\n'), ((826, 838), 'pathlib.Path', 'Path', (['"""data"""'], {}), "('data')\n", (830, 838), False, 'from pathlib import Path\n'), ((1776, 1811), 'pygeos.STRtree', 'pg.STRtree', (['df.geometry.values.data'], {}), '(df.geometry.values.data)\n', (1786, 1811), True, 'import pygeos as pg\n'), ((2935, 2957), 'analysis.prep.barriers.lib.duplicates.mark_duplicates', 'mark_duplicates', (['df', '(5)'], {}), '(df, 5)\n', (2950, 2957), False, 'from analysis.prep.barriers.lib.duplicates import mark_duplicates\n'), ((3562, 3583), 'analysis.prep.barriers.lib.spatial_joins.add_spatial_joins', 'add_spatial_joins', (['df'], {}), '(df)\n', (3579, 3583), False, 'from analysis.prep.barriers.lib.spatial_joins import add_spatial_joins\n'), ((1134, 1347), 'pyogrio.read_dataframe', 'read_dataframe', (["(src_dir / 'stream_crossings_united_states_feb_2022.gpkg')"], {'layer': '"""stream_crossing_sites"""', 'columns': "['stream_crossing_id', 'tiger2020_feature_names', 'nhdhr_gnis_stream_name',\n 'crossing_type']"}), "(src_dir / 'stream_crossings_united_states_feb_2022.gpkg',\n layer='stream_crossing_sites', columns=['stream_crossing_id',\n 'tiger2020_feature_names', 'nhdhr_gnis_stream_name', 'crossing_type'])\n", (1148, 1347), False, 'from pyogrio import read_dataframe\n'), ((1676, 1746), 'geopandas.read_feather', 'gp.read_feather', (["(boundaries_dir / 'huc4.feather')"], {'columns': "['geometry']"}), "(boundaries_dir / 'huc4.feather', columns=['geometry'])\n", (1691, 1746), True, 'import geopandas as gp\n'), ((2047, 2090), 'pygeos.get_coordinates', 'pg.get_coordinates', (['df.geometry.values.data'], {}), '(df.geometry.values.data)\n', (2065, 2090), True, 'import pygeos as pg\n'), ((2559, 2602), 'pygeos.get_coordinates', 'pg.get_coordinates', (['df.geometry.values.data'], {}), '(df.geometry.values.data)\n', (2577, 2602), True, 'import pygeos as pg\n'), ((4016, 4022), 'time.time', 'time', ([], {}), '()\n', (4020, 4022), False, 'from time import time\n')]
|
import sys
from awsglue.transforms import *
from awsglue.utils import getResolvedOptions
from pyspark.context import SparkContext
from awsglue.context import GlueContext
from awsglue.job import Job
args = getResolvedOptions(sys.argv, ['TempDir','JOB_NAME','database','s3_staging_path','job_connection'])
sc = SparkContext()
glueContext = GlueContext(sc)
spark = glueContext.spark_session
job = Job(glueContext)
job.init(args['JOB_NAME'], args)
s3_staging_path = args['s3_staging_path']
database = args['database']
job_connection = args['job_connection']
dbtable_attachments = "strike_off_objection_attachment"
dbtable_objections = "strike_off_objection"
# Dont Use Bookmarks with this ETL as aws does not upsert instead we have to delete * from table first
preactions_attachments = "delete from strike_off_objection_attachment;"
preactions_objections = "delete from strike_off_objection;"
# TIP - use myDynamicFrame.printSchema() and .show() to log out schema and contents of the dynamic frame
# TIP - If you need to perform sql-like queries on the data you can convert the DynamicFrame to a Apache Spark SQL DataFrame using myDynFrame.toDF()
# Then you can import the pyspark.sql functions and use them to manipulate the data
# To convert back to an AWS Glue DynamicFrame use DynamicFrame.fromDF(myDataFrame, glueContext, "<name of resulting DynamicFrame>")
Datasource0 = glueContext.create_dynamic_frame.from_catalog(database = "strike-off-objections-mongo-extract", table_name = "strike_off_objections", transformation_ctx = "Datasource0")
# This will flatten nested schema in a DynamicFrame and pivots out array columns from the flattened frame.
# Also writes temporary files to the staging_path bucket
relationalized_json = Datasource0.relationalize(root_table_name = "root", staging_path = s3_staging_path)
root_df_attachments = relationalized_json.select('root_attachments')
root_df = relationalized_json.select('root')
## Do the Attachments first
# relationalize will create empty attachments record for objections that have no attachments, so filter them out
df_filtered_attachments = Filter.apply(frame = root_df_attachments, f = lambda x: x["attachments.val.id"] != '')
# Make the join between the attachments in df_filtered_attachments and the objections in the root_df
df_attachments_joined = Join.apply(df_filtered_attachments, root_df, 'id', 'attachments')
# Map the attachment field names into what will become the column names in Redshift
df_attachments_mapped = ApplyMapping.apply(frame = df_attachments_joined, mappings =
[("`attachments.val.id`", "string", "id", "string"),
("_id", "string", "strike_off_objection_id", "string"),
("`attachments.val.name`", "string", "name", "string"),
("`attachments.val.content_type`", "string", "content_type", "string"),
("`attachments.val.size`", "int", "size", "int"),
("`attachments.val.links.linksMap.self`", "string", "link_self", "string"),
("`attachments.val.links.linksMap.download`", "string", "link_download", "string")], transformation_ctx = "df_attachments_mapped")
# If there are any ambiguous data types, "make_cols" will create new columns for them eg fieldname_date or _string
# Hopefully we won't have anything unexpected as the mapping should resolve this above
df_attachments_resolved = ResolveChoice.apply(frame = df_attachments_mapped, choice = "make_cols", transformation_ctx = "df_attachments_resolved")
# If we have any records that are just nulls then drop them
df_attachments_dropped_null_fields = DropNullFields.apply(frame = df_attachments_resolved, transformation_ctx = "df_attachments_dropped_null_fields")
# Write the attachments data in the df_attachments_dropped_null_fields dynamic frame into Redshift
datasink_attachments = glueContext.write_dynamic_frame.from_jdbc_conf(frame = df_attachments_dropped_null_fields,
catalog_connection = job_connection, connection_options = {"preactions": preactions_attachments, "dbtable": dbtable_attachments, "database": database}, redshift_tmp_dir = args["TempDir"], transformation_ctx = "datasink_attachments")
## Now do the Objections
# Map the objections field names into what will become the column names in Redshift
df_obj_mapped = ApplyMapping.apply(frame = root_df, mappings =
[("_id", "string", "id", "string"),
("`created_on.$date`", "string", "created_on", "timestamp"),
("`created_by.id`", "string", "created_by_id", "string"),
("`created_by.email`", "string", "created_by_email", "string"),
("`created_by.full_name`", "string", "created_by_full_name", "string"),
("`created_by.share_identity`", "boolean", "created_by_share_identity", "boolean"),
("`company_number`", "string", "company_number", "string"),
("`status`", "string", "status", "string"),
("`status_changed_on.$date`", "string", "status_changed_on", "timestamp"),
("`reason`", "string", "reason", "string"),
("`action_code`", "int", "action_code", "int"),
("`http_request_id`", "string", "http_request_id", "string"),
("`links.linksMap.self`", "string", "links_self", "string")], transformation_ctx = "df_obj_mapped")
# If there are any ambiguous data types, "make_cols" will create new columns for them eg fieldname_date or _string
# Hopefully we won't have anything unexpected as the mapping should resolve this above
df_obj_resolved = ResolveChoice.apply(frame = df_obj_mapped, choice = "make_cols", transformation_ctx = "df_obj_resolved")
# If we have any records that are just nulls then drop them
df_obj_dropped_null_fields = DropNullFields.apply(frame = df_obj_resolved, transformation_ctx = "df_obj_dropped_null_fields")
# Write the objections data in the df_obj_dropped_null_fields dynamic frame into Redshift
datasink_objections = glueContext.write_dynamic_frame.from_jdbc_conf(frame = df_obj_dropped_null_fields, catalog_connection = job_connection, connection_options = {"preactions":preactions_objections, "dbtable": dbtable_objections, "database": database}, redshift_tmp_dir = args["TempDir"], transformation_ctx = "datasink_objections")
job.commit()
|
[
"pyspark.context.SparkContext",
"awsglue.context.GlueContext",
"awsglue.utils.getResolvedOptions",
"awsglue.job.Job"
] |
[((206, 312), 'awsglue.utils.getResolvedOptions', 'getResolvedOptions', (['sys.argv', "['TempDir', 'JOB_NAME', 'database', 's3_staging_path', 'job_connection']"], {}), "(sys.argv, ['TempDir', 'JOB_NAME', 'database',\n 's3_staging_path', 'job_connection'])\n", (224, 312), False, 'from awsglue.utils import getResolvedOptions\n'), ((311, 325), 'pyspark.context.SparkContext', 'SparkContext', ([], {}), '()\n', (323, 325), False, 'from pyspark.context import SparkContext\n'), ((340, 355), 'awsglue.context.GlueContext', 'GlueContext', (['sc'], {}), '(sc)\n', (351, 355), False, 'from awsglue.context import GlueContext\n'), ((396, 412), 'awsglue.job.Job', 'Job', (['glueContext'], {}), '(glueContext)\n', (399, 412), False, 'from awsglue.job import Job\n')]
|
# Generated by Django 2.0.6 on 2019-08-04 14:33
import django.db.models.deletion
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
("hustlers", "0001_initial"),
("knowledge", "0005_auto_20180624_1317"),
]
operations = [
migrations.CreateModel(
name="Packet",
fields=[
(
"id",
models.AutoField(
auto_created=True,
primary_key=True,
serialize=False,
verbose_name="ID",
),
),
("is_active", models.BooleanField(default=True)),
("created_at", models.DateTimeField(auto_now_add=True)),
("modified_at", models.DateTimeField(auto_now=True)),
("name", models.CharField(max_length=100)),
("slug", models.CharField(blank=True, max_length=100, null=True)),
("sequence_no", models.PositiveIntegerField(default=1)),
(
"created_by",
models.ForeignKey(
null=True,
on_delete=django.db.models.deletion.SET_NULL,
related_name="packet",
to="hustlers.Hustler",
),
),
(
"resources",
models.ManyToManyField(
related_name="packet", to="knowledge.Category"
),
),
],
options={"db_table": "packet",},
),
]
|
[
"django.db.models.ManyToManyField",
"django.db.models.CharField",
"django.db.models.ForeignKey",
"django.db.models.PositiveIntegerField",
"django.db.models.BooleanField",
"django.db.models.AutoField",
"django.db.models.DateTimeField"
] |
[((444, 537), 'django.db.models.AutoField', 'models.AutoField', ([], {'auto_created': '(True)', 'primary_key': '(True)', 'serialize': '(False)', 'verbose_name': '"""ID"""'}), "(auto_created=True, primary_key=True, serialize=False,\n verbose_name='ID')\n", (460, 537), False, 'from django.db import migrations, models\n'), ((703, 736), 'django.db.models.BooleanField', 'models.BooleanField', ([], {'default': '(True)'}), '(default=True)\n', (722, 736), False, 'from django.db import migrations, models\n'), ((770, 809), 'django.db.models.DateTimeField', 'models.DateTimeField', ([], {'auto_now_add': '(True)'}), '(auto_now_add=True)\n', (790, 809), False, 'from django.db import migrations, models\n'), ((844, 879), 'django.db.models.DateTimeField', 'models.DateTimeField', ([], {'auto_now': '(True)'}), '(auto_now=True)\n', (864, 879), False, 'from django.db import migrations, models\n'), ((907, 939), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(100)'}), '(max_length=100)\n', (923, 939), False, 'from django.db import migrations, models\n'), ((967, 1022), 'django.db.models.CharField', 'models.CharField', ([], {'blank': '(True)', 'max_length': '(100)', 'null': '(True)'}), '(blank=True, max_length=100, null=True)\n', (983, 1022), False, 'from django.db import migrations, models\n'), ((1057, 1095), 'django.db.models.PositiveIntegerField', 'models.PositiveIntegerField', ([], {'default': '(1)'}), '(default=1)\n', (1084, 1095), False, 'from django.db import migrations, models\n'), ((1170, 1294), 'django.db.models.ForeignKey', 'models.ForeignKey', ([], {'null': '(True)', 'on_delete': 'django.db.models.deletion.SET_NULL', 'related_name': '"""packet"""', 'to': '"""hustlers.Hustler"""'}), "(null=True, on_delete=django.db.models.deletion.SET_NULL,\n related_name='packet', to='hustlers.Hustler')\n", (1187, 1294), False, 'from django.db import migrations, models\n'), ((1501, 1571), 'django.db.models.ManyToManyField', 'models.ManyToManyField', ([], {'related_name': '"""packet"""', 'to': '"""knowledge.Category"""'}), "(related_name='packet', to='knowledge.Category')\n", (1523, 1571), False, 'from django.db import migrations, models\n')]
|
# Lint as: python3
# Copyright 2019 DeepMind Technologies Limited.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Tests for tf_client."""
from concurrent import futures
import time
import numpy as np
from reverb import client as reverb_client
from reverb import item_selectors
from reverb import rate_limiters
from reverb import server
from reverb import tf_client
import tensorflow.compat.v1 as tf
def make_tables_and_server():
tables = [
server.Table(
'dist',
sampler=item_selectors.Prioritized(priority_exponent=1),
remover=item_selectors.Fifo(),
max_size=1000000,
rate_limiter=rate_limiters.MinSize(1)),
server.Table(
'dist2',
sampler=item_selectors.Prioritized(priority_exponent=1),
remover=item_selectors.Fifo(),
max_size=1000000,
rate_limiter=rate_limiters.MinSize(1)),
]
return tables, server.Server(tables=tables)
class SampleOpTest(tf.test.TestCase):
@classmethod
def setUpClass(cls):
super().setUpClass()
cls._tables, cls._server = make_tables_and_server()
cls._client = reverb_client.Client(f'localhost:{cls._server.port}')
def tearDown(self):
super().tearDown()
self._client.reset('dist')
@classmethod
def tearDownClass(cls):
super().tearDownClass()
cls._server.stop()
def test_sets_meta_data_fields(self):
input_data = [np.ones((81, 81), dtype=np.float64)]
self._client.insert(input_data, {'dist': 1})
with self.session() as session:
client = tf_client.TFClient(self._client.server_address)
sample = session.run(client.sample('dist', [tf.float64]))
np.testing.assert_equal(input_data, sample.data)
self.assertNotEqual(sample.info.key, 0)
self.assertEqual(sample.info.probability, 1)
self.assertEqual(sample.info.table_size, 1)
self.assertEqual(sample.info.priority, 1)
def test_dtype_mismatch_result_in_error_raised(self):
data = [np.zeros((81, 81))]
self._client.insert(data, {'dist': 1})
with self.session() as session:
client = tf_client.TFClient(self._client.server_address)
with self.assertRaises(tf.errors.InternalError):
session.run(client.sample('dist', [tf.float32]))
def test_forwards_server_error(self):
with self.session() as session:
client = tf_client.TFClient(self._client.server_address)
with self.assertRaises(tf.errors.NotFoundError):
session.run(client.sample('invalid', [tf.float64]))
def test_retries_until_success_or_fatal_error(self):
with self.session() as session:
client = tf_client.TFClient(self._client.server_address)
with futures.ThreadPoolExecutor(max_workers=1) as executor:
sample = executor.submit(session.run,
client.sample('dist', [tf.float64]))
input_data = [np.zeros((81, 81))]
self._client.insert(input_data, {'dist': 1})
np.testing.assert_equal(input_data, sample.result().data)
class UpdatePrioritiesOpTest(tf.test.TestCase):
@classmethod
def setUpClass(cls):
super().setUpClass()
cls._tables, cls._server = make_tables_and_server()
cls._client = reverb_client.Client(f'localhost:{cls._server.port}')
def tearDown(self):
super().tearDown()
self._client.reset('dist')
@classmethod
def tearDownClass(cls):
super().tearDownClass()
cls._server.stop()
def test_shape_result_in_error_raised(self):
with self.session() as session:
client = tf_client.TFClient(self._client.server_address)
update_op = client.update_priorities(
tf.constant('dist'), tf.constant([1, 2], dtype=tf.uint64),
tf.constant([1], dtype=tf.float64))
with self.assertRaises(tf.errors.InvalidArgumentError):
session.run(update_op)
def test_priority_update_is_applied(self):
# Start with uniform distribution
for i in range(4):
self._client.insert([np.array([i], dtype=np.uint32)], {'dist': 1})
for _ in range(100):
if self._tables[0].info.current_size == 4:
break
time.sleep(0.01)
self.assertEqual(self._tables[0].info.current_size, 4)
# Until we have recieved all 4 items.
items = {}
while len(items) < 4:
item = next(self._client.sample('dist'))[0]
items[item.info.key] = item.info.probability
self.assertEqual(item.info.probability, 0.25)
# Update the priority of one of the items.
update_key = next(iter(items.keys()))
with self.session() as session:
client = tf_client.TFClient(self._client.server_address)
update_op = client.update_priorities(
table=tf.constant('dist'),
keys=tf.constant([update_key], dtype=tf.uint64),
priorities=tf.constant([3], dtype=tf.float64))
self.assertIsNone(session.run(update_op))
# The updated item now has priority 3 and the other 3 items have priority 1
# each. The probability of sampling the new item should thus be 50%. We
# sample until the updated item is seen and check that the probability (and
# thus the priority) has been updated.
for _ in range(1000):
item = next(self._client.sample('dist'))[0]
if item.info.key == update_key:
self.assertEqual(item.info.probability, 0.5)
break
else:
self.fail('Updated item was not found')
class InsertOpTest(tf.test.TestCase):
@classmethod
def setUpClass(cls):
super().setUpClass()
cls._tables, cls._server = make_tables_and_server()
cls._client = reverb_client.Client(f'localhost:{cls._server.port}')
def tearDown(self):
super().tearDown()
self._client.reset('dist')
self._client.reset('dist2')
@classmethod
def tearDownClass(cls):
super().tearDownClass()
cls._server.stop()
def setUp(self):
super().setUp()
self.data = [tf.constant([1, 2, 3], dtype=tf.int8)]
def test_checks_that_table_has_rank_1(self):
client = tf_client.TFClient(self._client.server_address)
priorities = tf.constant([1.0], dtype=tf.float64)
# Works for rank 1.
client.insert(self.data, tf.constant(['dist']), priorities)
# Does not work for rank > 1.
with self.assertRaises(ValueError):
client.insert(self.data, tf.constant([['dist']]), priorities)
# Does not work for rank < 1.
with self.assertRaises(ValueError):
client.insert(self.data, tf.constant('dist'), priorities)
def test_checks_dtype_of_table_argument(self):
client = tf_client.TFClient(self._client.server_address)
with self.assertRaises(ValueError):
client.insert(self.data, tf.constant([1]),
tf.constant([1.0], dtype=tf.float64))
def test_checks_that_priorities_argument_has_rank_1(self):
client = tf_client.TFClient(self._client.server_address)
data = [tf.constant([1, 2])]
tables = tf.constant(['dist'])
# Works for rank 1.
client.insert(data, tables, tf.constant([1.0], dtype=tf.float64))
# Does not work for rank > 1.
with self.assertRaises(ValueError):
client.insert(data, tables, tf.constant([[1.0]], dtype=tf.float64))
# Does not work for rank < 1.
with self.assertRaises(ValueError):
client.insert(data, tables, tf.constant(1.0, dtype=tf.float64))
def test_checks_that_priorities_argument_has_dtype_float64(self):
client = tf_client.TFClient(self._client.server_address)
with self.assertRaises(ValueError):
client.insert(self.data, tf.constant(['dist']),
tf.constant([1.0], dtype=tf.float32))
def test_checks_that_tables_and_priorities_arguments_have_same_shape(self):
client = tf_client.TFClient(self._client.server_address)
with self.assertRaises(ValueError):
client.insert(self.data, tf.constant(['dist', 'dist2']),
tf.constant([1.0], dtype=tf.float64))
def test_single_table_insert(self):
with self.session() as session:
client = tf_client.TFClient(self._client.server_address)
insert_op = client.insert(
data=[tf.constant([1, 2, 3], dtype=tf.int8)],
tables=tf.constant(['dist']),
priorities=tf.constant([1.0], dtype=tf.float64))
sample_op = client.sample('dist', [tf.int8])
# Check that insert op succeeds.
self.assertIsNone(session.run(insert_op))
# Check that the sampled data matches the inserted.
sample = session.run(sample_op)
self.assertLen(sample.data, 1)
np.testing.assert_equal(
np.array([1, 2, 3], dtype=np.int8), sample.data[0])
def test_multi_table_insert(self):
with self.session() as session:
client = tf_client.TFClient(self._client.server_address)
insert_op = client.insert(
data=[tf.constant([1, 2, 3], dtype=tf.int8)],
tables=tf.constant(['dist', 'dist2']),
priorities=tf.constant([1.0, 2.0], dtype=tf.float64))
sample_ops = [
client.sample('dist', [tf.int8]),
client.sample('dist2', [tf.int8])
]
# Check that insert op succeeds.
self.assertIsNone(session.run(insert_op))
# Check that the sampled data matches the inserted in all tables.
for sample_op in sample_ops:
sample = session.run(sample_op)
self.assertLen(sample.data, 1)
np.testing.assert_equal(
np.array([1, 2, 3], dtype=np.int8), sample.data[0])
if __name__ == '__main__':
tf.disable_eager_execution()
tf.test.main()
|
[
"reverb.item_selectors.Fifo",
"reverb.rate_limiters.MinSize",
"tensorflow.compat.v1.constant",
"numpy.zeros",
"numpy.ones",
"time.sleep",
"tensorflow.compat.v1.disable_eager_execution",
"tensorflow.compat.v1.test.main",
"numpy.array",
"reverb.client.Client",
"numpy.testing.assert_equal",
"concurrent.futures.ThreadPoolExecutor",
"reverb.tf_client.TFClient",
"reverb.server.Server",
"reverb.item_selectors.Prioritized"
] |
[((9890, 9918), 'tensorflow.compat.v1.disable_eager_execution', 'tf.disable_eager_execution', ([], {}), '()\n', (9916, 9918), True, 'import tensorflow.compat.v1 as tf\n'), ((9921, 9935), 'tensorflow.compat.v1.test.main', 'tf.test.main', ([], {}), '()\n', (9933, 9935), True, 'import tensorflow.compat.v1 as tf\n'), ((1419, 1447), 'reverb.server.Server', 'server.Server', ([], {'tables': 'tables'}), '(tables=tables)\n', (1432, 1447), False, 'from reverb import server\n'), ((1626, 1679), 'reverb.client.Client', 'reverb_client.Client', (['f"""localhost:{cls._server.port}"""'], {}), "(f'localhost:{cls._server.port}')\n", (1646, 1679), True, 'from reverb import client as reverb_client\n'), ((3692, 3745), 'reverb.client.Client', 'reverb_client.Client', (['f"""localhost:{cls._server.port}"""'], {}), "(f'localhost:{cls._server.port}')\n", (3712, 3745), True, 'from reverb import client as reverb_client\n'), ((6031, 6084), 'reverb.client.Client', 'reverb_client.Client', (['f"""localhost:{cls._server.port}"""'], {}), "(f'localhost:{cls._server.port}')\n", (6051, 6084), True, 'from reverb import client as reverb_client\n'), ((6444, 6491), 'reverb.tf_client.TFClient', 'tf_client.TFClient', (['self._client.server_address'], {}), '(self._client.server_address)\n', (6462, 6491), False, 'from reverb import tf_client\n'), ((6509, 6545), 'tensorflow.compat.v1.constant', 'tf.constant', (['[1.0]'], {'dtype': 'tf.float64'}), '([1.0], dtype=tf.float64)\n', (6520, 6545), True, 'import tensorflow.compat.v1 as tf\n'), ((6980, 7027), 'reverb.tf_client.TFClient', 'tf_client.TFClient', (['self._client.server_address'], {}), '(self._client.server_address)\n', (6998, 7027), False, 'from reverb import tf_client\n'), ((7250, 7297), 'reverb.tf_client.TFClient', 'tf_client.TFClient', (['self._client.server_address'], {}), '(self._client.server_address)\n', (7268, 7297), False, 'from reverb import tf_client\n'), ((7344, 7365), 'tensorflow.compat.v1.constant', 'tf.constant', (["['dist']"], {}), "(['dist'])\n", (7355, 7365), True, 'import tensorflow.compat.v1 as tf\n'), ((7837, 7884), 'reverb.tf_client.TFClient', 'tf_client.TFClient', (['self._client.server_address'], {}), '(self._client.server_address)\n', (7855, 7884), False, 'from reverb import tf_client\n'), ((8129, 8176), 'reverb.tf_client.TFClient', 'tf_client.TFClient', (['self._client.server_address'], {}), '(self._client.server_address)\n', (8147, 8176), False, 'from reverb import tf_client\n'), ((1909, 1944), 'numpy.ones', 'np.ones', (['(81, 81)'], {'dtype': 'np.float64'}), '((81, 81), dtype=np.float64)\n', (1916, 1944), True, 'import numpy as np\n'), ((2046, 2093), 'reverb.tf_client.TFClient', 'tf_client.TFClient', (['self._client.server_address'], {}), '(self._client.server_address)\n', (2064, 2093), False, 'from reverb import tf_client\n'), ((2164, 2212), 'numpy.testing.assert_equal', 'np.testing.assert_equal', (['input_data', 'sample.data'], {}), '(input_data, sample.data)\n', (2187, 2212), True, 'import numpy as np\n'), ((2477, 2495), 'numpy.zeros', 'np.zeros', (['(81, 81)'], {}), '((81, 81))\n', (2485, 2495), True, 'import numpy as np\n'), ((2591, 2638), 'reverb.tf_client.TFClient', 'tf_client.TFClient', (['self._client.server_address'], {}), '(self._client.server_address)\n', (2609, 2638), False, 'from reverb import tf_client\n'), ((2843, 2890), 'reverb.tf_client.TFClient', 'tf_client.TFClient', (['self._client.server_address'], {}), '(self._client.server_address)\n', (2861, 2890), False, 'from reverb import tf_client\n'), ((3113, 3160), 'reverb.tf_client.TFClient', 'tf_client.TFClient', (['self._client.server_address'], {}), '(self._client.server_address)\n', (3131, 3160), False, 'from reverb import tf_client\n'), ((4015, 4062), 'reverb.tf_client.TFClient', 'tf_client.TFClient', (['self._client.server_address'], {}), '(self._client.server_address)\n', (4033, 4062), False, 'from reverb import tf_client\n'), ((4590, 4606), 'time.sleep', 'time.sleep', (['(0.01)'], {}), '(0.01)\n', (4600, 4606), False, 'import time\n'), ((5043, 5090), 'reverb.tf_client.TFClient', 'tf_client.TFClient', (['self._client.server_address'], {}), '(self._client.server_address)\n', (5061, 5090), False, 'from reverb import tf_client\n'), ((6344, 6381), 'tensorflow.compat.v1.constant', 'tf.constant', (['[1, 2, 3]'], {'dtype': 'tf.int8'}), '([1, 2, 3], dtype=tf.int8)\n', (6355, 6381), True, 'import tensorflow.compat.v1 as tf\n'), ((6600, 6621), 'tensorflow.compat.v1.constant', 'tf.constant', (["['dist']"], {}), "(['dist'])\n", (6611, 6621), True, 'import tensorflow.compat.v1 as tf\n'), ((7310, 7329), 'tensorflow.compat.v1.constant', 'tf.constant', (['[1, 2]'], {}), '([1, 2])\n', (7321, 7329), True, 'import tensorflow.compat.v1 as tf\n'), ((7423, 7459), 'tensorflow.compat.v1.constant', 'tf.constant', (['[1.0]'], {'dtype': 'tf.float64'}), '([1.0], dtype=tf.float64)\n', (7434, 7459), True, 'import tensorflow.compat.v1 as tf\n'), ((8428, 8475), 'reverb.tf_client.TFClient', 'tf_client.TFClient', (['self._client.server_address'], {}), '(self._client.server_address)\n', (8446, 8475), False, 'from reverb import tf_client\n'), ((9119, 9166), 'reverb.tf_client.TFClient', 'tf_client.TFClient', (['self._client.server_address'], {}), '(self._client.server_address)\n', (9137, 9166), False, 'from reverb import tf_client\n'), ((1005, 1052), 'reverb.item_selectors.Prioritized', 'item_selectors.Prioritized', ([], {'priority_exponent': '(1)'}), '(priority_exponent=1)\n', (1031, 1052), False, 'from reverb import item_selectors\n'), ((1072, 1093), 'reverb.item_selectors.Fifo', 'item_selectors.Fifo', ([], {}), '()\n', (1091, 1093), False, 'from reverb import item_selectors\n'), ((1146, 1170), 'reverb.rate_limiters.MinSize', 'rate_limiters.MinSize', (['(1)'], {}), '(1)\n', (1167, 1170), False, 'from reverb import rate_limiters\n'), ((1230, 1277), 'reverb.item_selectors.Prioritized', 'item_selectors.Prioritized', ([], {'priority_exponent': '(1)'}), '(priority_exponent=1)\n', (1256, 1277), False, 'from reverb import item_selectors\n'), ((1297, 1318), 'reverb.item_selectors.Fifo', 'item_selectors.Fifo', ([], {}), '()\n', (1316, 1318), False, 'from reverb import item_selectors\n'), ((1371, 1395), 'reverb.rate_limiters.MinSize', 'rate_limiters.MinSize', (['(1)'], {}), '(1)\n', (1392, 1395), False, 'from reverb import rate_limiters\n'), ((3172, 3213), 'concurrent.futures.ThreadPoolExecutor', 'futures.ThreadPoolExecutor', ([], {'max_workers': '(1)'}), '(max_workers=1)\n', (3198, 3213), False, 'from concurrent import futures\n'), ((4117, 4136), 'tensorflow.compat.v1.constant', 'tf.constant', (['"""dist"""'], {}), "('dist')\n", (4128, 4136), True, 'import tensorflow.compat.v1 as tf\n'), ((4138, 4174), 'tensorflow.compat.v1.constant', 'tf.constant', (['[1, 2]'], {'dtype': 'tf.uint64'}), '([1, 2], dtype=tf.uint64)\n', (4149, 4174), True, 'import tensorflow.compat.v1 as tf\n'), ((4186, 4220), 'tensorflow.compat.v1.constant', 'tf.constant', (['[1]'], {'dtype': 'tf.float64'}), '([1], dtype=tf.float64)\n', (4197, 4220), True, 'import tensorflow.compat.v1 as tf\n'), ((6741, 6764), 'tensorflow.compat.v1.constant', 'tf.constant', (["[['dist']]"], {}), "([['dist']])\n", (6752, 6764), True, 'import tensorflow.compat.v1 as tf\n'), ((6884, 6903), 'tensorflow.compat.v1.constant', 'tf.constant', (['"""dist"""'], {}), "('dist')\n", (6895, 6903), True, 'import tensorflow.compat.v1 as tf\n'), ((7099, 7115), 'tensorflow.compat.v1.constant', 'tf.constant', (['[1]'], {}), '([1])\n', (7110, 7115), True, 'import tensorflow.compat.v1 as tf\n'), ((7137, 7173), 'tensorflow.compat.v1.constant', 'tf.constant', (['[1.0]'], {'dtype': 'tf.float64'}), '([1.0], dtype=tf.float64)\n', (7148, 7173), True, 'import tensorflow.compat.v1 as tf\n'), ((7570, 7608), 'tensorflow.compat.v1.constant', 'tf.constant', (['[[1.0]]'], {'dtype': 'tf.float64'}), '([[1.0]], dtype=tf.float64)\n', (7581, 7608), True, 'import tensorflow.compat.v1 as tf\n'), ((7719, 7753), 'tensorflow.compat.v1.constant', 'tf.constant', (['(1.0)'], {'dtype': 'tf.float64'}), '(1.0, dtype=tf.float64)\n', (7730, 7753), True, 'import tensorflow.compat.v1 as tf\n'), ((7956, 7977), 'tensorflow.compat.v1.constant', 'tf.constant', (["['dist']"], {}), "(['dist'])\n", (7967, 7977), True, 'import tensorflow.compat.v1 as tf\n'), ((7999, 8035), 'tensorflow.compat.v1.constant', 'tf.constant', (['[1.0]'], {'dtype': 'tf.float32'}), '([1.0], dtype=tf.float32)\n', (8010, 8035), True, 'import tensorflow.compat.v1 as tf\n'), ((8248, 8278), 'tensorflow.compat.v1.constant', 'tf.constant', (["['dist', 'dist2']"], {}), "(['dist', 'dist2'])\n", (8259, 8278), True, 'import tensorflow.compat.v1 as tf\n'), ((8300, 8336), 'tensorflow.compat.v1.constant', 'tf.constant', (['[1.0]'], {'dtype': 'tf.float64'}), '([1.0], dtype=tf.float64)\n', (8311, 8336), True, 'import tensorflow.compat.v1 as tf\n'), ((8978, 9012), 'numpy.array', 'np.array', (['[1, 2, 3]'], {'dtype': 'np.int8'}), '([1, 2, 3], dtype=np.int8)\n', (8986, 9012), True, 'import numpy as np\n'), ((3365, 3383), 'numpy.zeros', 'np.zeros', (['(81, 81)'], {}), '((81, 81))\n', (3373, 3383), True, 'import numpy as np\n'), ((4449, 4479), 'numpy.array', 'np.array', (['[i]'], {'dtype': 'np.uint32'}), '([i], dtype=np.uint32)\n', (4457, 4479), True, 'import numpy as np\n'), ((5151, 5170), 'tensorflow.compat.v1.constant', 'tf.constant', (['"""dist"""'], {}), "('dist')\n", (5162, 5170), True, 'import tensorflow.compat.v1 as tf\n'), ((5187, 5229), 'tensorflow.compat.v1.constant', 'tf.constant', (['[update_key]'], {'dtype': 'tf.uint64'}), '([update_key], dtype=tf.uint64)\n', (5198, 5229), True, 'import tensorflow.compat.v1 as tf\n'), ((5252, 5286), 'tensorflow.compat.v1.constant', 'tf.constant', (['[3]'], {'dtype': 'tf.float64'}), '([3], dtype=tf.float64)\n', (5263, 5286), True, 'import tensorflow.compat.v1 as tf\n'), ((8582, 8603), 'tensorflow.compat.v1.constant', 'tf.constant', (["['dist']"], {}), "(['dist'])\n", (8593, 8603), True, 'import tensorflow.compat.v1 as tf\n'), ((8626, 8662), 'tensorflow.compat.v1.constant', 'tf.constant', (['[1.0]'], {'dtype': 'tf.float64'}), '([1.0], dtype=tf.float64)\n', (8637, 8662), True, 'import tensorflow.compat.v1 as tf\n'), ((9273, 9303), 'tensorflow.compat.v1.constant', 'tf.constant', (["['dist', 'dist2']"], {}), "(['dist', 'dist2'])\n", (9284, 9303), True, 'import tensorflow.compat.v1 as tf\n'), ((9326, 9367), 'tensorflow.compat.v1.constant', 'tf.constant', (['[1.0, 2.0]'], {'dtype': 'tf.float64'}), '([1.0, 2.0], dtype=tf.float64)\n', (9337, 9367), True, 'import tensorflow.compat.v1 as tf\n'), ((9807, 9841), 'numpy.array', 'np.array', (['[1, 2, 3]'], {'dtype': 'np.int8'}), '([1, 2, 3], dtype=np.int8)\n', (9815, 9841), True, 'import numpy as np\n'), ((8525, 8562), 'tensorflow.compat.v1.constant', 'tf.constant', (['[1, 2, 3]'], {'dtype': 'tf.int8'}), '([1, 2, 3], dtype=tf.int8)\n', (8536, 8562), True, 'import tensorflow.compat.v1 as tf\n'), ((9216, 9253), 'tensorflow.compat.v1.constant', 'tf.constant', (['[1, 2, 3]'], {'dtype': 'tf.int8'}), '([1, 2, 3], dtype=tf.int8)\n', (9227, 9253), True, 'import tensorflow.compat.v1 as tf\n')]
|
from django import forms
class AddressToDoForm(forms.Form):
promouter = forms.CharField()
address = forms.CharField()
entrance_img = forms.ImageField()
mailbox_img = forms.ImageField()
|
[
"django.forms.CharField",
"django.forms.ImageField"
] |
[((77, 94), 'django.forms.CharField', 'forms.CharField', ([], {}), '()\n', (92, 94), False, 'from django import forms\n'), ((109, 126), 'django.forms.CharField', 'forms.CharField', ([], {}), '()\n', (124, 126), False, 'from django import forms\n'), ((146, 164), 'django.forms.ImageField', 'forms.ImageField', ([], {}), '()\n', (162, 164), False, 'from django import forms\n'), ((183, 201), 'django.forms.ImageField', 'forms.ImageField', ([], {}), '()\n', (199, 201), False, 'from django import forms\n')]
|
import sys
import os
def run():
base = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
## FIXME: this is kind of crude; if we could create a fake pip
## module, then exec into it and update pip.__path__ properly, we
## wouldn't have to update sys.path:
sys.path.insert(0, base)
import pip
return pip.main()
if __name__ == '__main__':
exit = run()
if exit:
sys.exit(exit)
|
[
"sys.exit",
"os.path.abspath",
"sys.path.insert",
"pip.main"
] |
[((287, 311), 'sys.path.insert', 'sys.path.insert', (['(0)', 'base'], {}), '(0, base)\n', (302, 311), False, 'import sys\n'), ((338, 348), 'pip.main', 'pip.main', ([], {}), '()\n', (346, 348), False, 'import pip\n'), ((416, 430), 'sys.exit', 'sys.exit', (['exit'], {}), '(exit)\n', (424, 430), False, 'import sys\n'), ((77, 102), 'os.path.abspath', 'os.path.abspath', (['__file__'], {}), '(__file__)\n', (92, 102), False, 'import os\n')]
|
from kernel_tuner import tune_kernel
import numpy
import argparse
import json
def generate_code(tuning_parameters):
code = \
"__global__ void fct_ale_c_horizontal(const int maxLevels, const int * __restrict__ nLevels, const int * __restrict__ nodesPerEdge, const int * __restrict__ elementsPerEdge, <%REAL_TYPE%> * __restrict__ del_ttf_advhoriz, const <%REAL_TYPE%> * __restrict__ fct_adf_h, const <%REAL_TYPE%> dt, const <%REAL_TYPE%> * __restrict__ area)\n" \
"{\n" \
"const <%INT_TYPE%> edge = blockIdx.x * 2;\n" \
"<%INT_TYPE%> levelBound = 0;\n" \
"const <%INT_TYPE%> nodeOne = (nodesPerEdge[edge] - 1) * maxLevels;\n" \
"const <%INT_TYPE%> nodeTwo = (nodesPerEdge[edge + 1] - 1) * maxLevels;\n" \
"\n" \
"/* Compute the upper bound for the level */\n" \
"levelBound = elementsPerEdge[edge + 1];\n" \
"if ( levelBound > 0 )\n" \
"{\n" \
"levelBound = max(nLevels[(elementsPerEdge[edge]) - 1], nLevels[levelBound - 1]);\n" \
"}\n" \
"else\n" \
"{\n" \
"levelBound = max(nLevels[(elementsPerEdge[edge]) - 1], 0);\n" \
"}\n" \
"\n" \
"for ( <%INT_TYPE%> level = threadIdx.x; level < levelBound - 1; level += <%BLOCK_SIZE%> )\n" \
"{\n" \
"<%REAL_TYPE%> fct_adf_h_item = 0;\n" \
"<%COMPUTE_BLOCK%>" \
"}\n" \
"}\n"
compute_block = \
"fct_adf_h_item = fct_adf_h[(blockIdx.x * maxLevels) + level + <%OFFSET%>];\n" \
"atomicAdd(&(del_ttf_advhoriz[nodeOne + level + <%OFFSET%>]), (fct_adf_h_item * (dt / area[nodeOne + level + <%OFFSET%>])));\n" \
"atomicAdd(&(del_ttf_advhoriz[nodeTwo + level + <%OFFSET%>]), -(fct_adf_h_item * (dt / area[nodeTwo + level + <%OFFSET%>])));\n"
if tuning_parameters["tiling_x"] > 1:
code = code.replace("<%BLOCK_SIZE%>", str(tuning_parameters["block_size_x"] * tuning_parameters["tiling_x"]))
else:
code = code.replace("<%BLOCK_SIZE%>", str(tuning_parameters["block_size_x"]))
compute = str()
for tile in range(0, tuning_parameters["tiling_x"]):
if tile == 0:
compute = compute + compute_block.replace(" + <%OFFSET%>", "")
else:
offset = tuning_parameters["block_size_x"] * tile
compute = compute + "if ( level + {} < (levelBound - 1) )\n{{\n{}}}\n".format(str(offset), compute_block.replace("<%OFFSET%>", str(offset)))
code = code.replace("<%COMPUTE_BLOCK%>", compute)
code = code.replace("<%INT_TYPE%>", tuning_parameters["int_type"].replace("_", " "))
code = code.replace("<%REAL_TYPE%>", tuning_parameters["real_type"])
return code
def reference(edges, nodes_per_edge, elements_per_edge, levels, max_levels, del_ttf_advhoriz, fct_adf_h, dt, area, numpy_real_type):
memory_bytes = 0
for edge in range(0, edges):
memory_bytes = memory_bytes + (3 * 4)
node_one = nodes_per_edge[edge * 2] - 1
node_two = nodes_per_edge[(edge * 2) + 1] - 1
element_one = elements_per_edge[edge * 2] - 1
element_two = elements_per_edge[(edge * 2) + 1] - 1
if element_two < 0:
memory_bytes = memory_bytes + (4)
number_levels = max(levels[element_one], 0)
else:
memory_bytes = memory_bytes + (2 * 4)
number_levels = max(levels[element_one], levels[element_two])
for level in range(0, number_levels - 1):
memory_bytes = memory_bytes + (7 * numpy.dtype(numpy_real_type).itemsize)
del_ttf_advhoriz[(node_one * max_levels) + level] = del_ttf_advhoriz[(node_one * max_levels) + level] + (fct_adf_h[(edge * max_levels) + level] * (dt / area[(node_one * max_levels) + level]))
del_ttf_advhoriz[(node_two * max_levels) + level] = del_ttf_advhoriz[(node_two * max_levels) + level] - (fct_adf_h[(edge * max_levels) + level] * (dt / area[(node_two * max_levels) + level]))
return memory_bytes
def tune(nodes, edges, elements, max_levels, max_tile, real_type, quiet=True):
numpy_real_type = None
if real_type == "float":
numpy_real_type = numpy.float32
elif real_type == "double":
numpy_real_type = numpy.float64
else:
raise ValueError
# Tuning and code generation parameters
tuning_parameters = dict()
tuning_parameters["int_type"] = ["unsigned_int", "int"]
tuning_parameters["real_type"] = [real_type]
tuning_parameters["max_levels"] = [str(max_levels)]
tuning_parameters["block_size_x"] = [32 * i for i in range(1, 33)]
tuning_parameters["tiling_x"] = [i for i in range(1, max_tile)]
constraints = list()
constraints.append("block_size_x * tiling_x <= max_levels")
# Memory allocation and initialization
del_ttf_advhoriz = numpy.random.randn(nodes * max_levels).astype(numpy_real_type)
del_ttf_advhoriz_control = numpy.copy(del_ttf_advhoriz)
fct_adf_h = numpy.random.randn(edges * max_levels).astype(numpy_real_type)
area = numpy.random.randn(nodes * max_levels).astype(numpy_real_type)
dt = numpy.random.random()
levels = numpy.zeros(elements).astype(numpy.int32)
for element in range(0, elements):
levels[element] = numpy.random.randint(3, max_levels)
nodes_per_edge = numpy.zeros(edges * 2).astype(numpy.int32)
elements_per_edge = numpy.zeros(edges * 2).astype(numpy.int32)
for edge in range(0, edges):
nodes_per_edge[edge * 2] = numpy.random.randint(1, nodes + 1)
nodes_per_edge[(edge * 2) + 1] = numpy.random.randint(1, nodes + 1)
elements_per_edge[edge * 2] = numpy.random.randint(1, elements + 1)
elements_per_edge[(edge * 2) + 1] = numpy.random.randint(0, elements + 1)
if real_type == "float":
arguments = [numpy.int32(max_levels), levels, nodes_per_edge, elements_per_edge, del_ttf_advhoriz, fct_adf_h, numpy.float32(dt), area]
elif real_type == "double":
arguments = [numpy.int32(max_levels), levels, nodes_per_edge, elements_per_edge, del_ttf_advhoriz, fct_adf_h, numpy.float64(dt), area]
else:
raise ValueError
# Reference
memory_bytes = reference(edges, nodes_per_edge, elements_per_edge, levels, max_levels, del_ttf_advhoriz_control, fct_adf_h, dt, area, numpy_real_type)
arguments_control = [None, None, None, None, del_ttf_advhoriz_control, None, None, None]
# Tuning
results, _ = tune_kernel("fct_ale_c_horizontal", generate_code, "{} * block_size_x".format(edges), arguments, tuning_parameters, lang="CUDA", answer=arguments_control, restrictions=constraints, quiet=quiet, atol=1e-03)
# Memory bandwidth
for result in results:
result["memory_bandwidth"] = memory_bytes / (result["time"] / 10**3)
return results
def parse_command_line():
parser = argparse.ArgumentParser(description="FESOM2 FCT ALE C HORIZONTAL")
parser.add_argument("--nodes", help="The number of nodes.", type=int, required=True)
parser.add_argument("--edges", help="The number of edges.", type=int, required=True)
parser.add_argument("--elements", help="The number of elements.", type=int, required=True)
parser.add_argument("--max_levels", help="The maximum number of horizontal levels per node.", type=int, required=True)
parser.add_argument("--max_tile", help="The maximum tiling factor.", type=int, default=2)
parser.add_argument("--real_type", help="The floating point type to use.", choices=["float", "double"], type=str, required=True)
parser.add_argument("--verbose", help="Print all kernel configurations.", default=True, action="store_false")
parser.add_argument("--store", help="Store performance results in a JSON file.", default=False, action="store_true")
return parser.parse_args()
if __name__ == "__main__":
command_line = parse_command_line()
results = tune(command_line.nodes, command_line.edges, command_line.elements, command_line.max_levels, command_line.max_tile, command_line.real_type, command_line.verbose)
best_configuration = min(results, key=lambda x : x["time"])
print("/* Memory bandwidth: {:.2f} GB/s */".format(best_configuration["memory_bandwidth"] / 10**9))
print("/* Block size X: {} */".format(best_configuration["block_size_x"]))
print(generate_code(best_configuration))
if command_line.store:
try:
with open("fct_ale_c_horizontal_{}_{}_{}_{}_{}.json".format(command_line.nodes, command_line.elements, command_line.edges, command_line.max_levels, command_line.real_type), "x") as fp:
json.dump(results, fp)
except FileExistsError:
print("Impossible to save the results, a results file already exists for a similar experiment.")
|
[
"json.dump",
"argparse.ArgumentParser",
"numpy.copy",
"numpy.random.randn",
"numpy.float32",
"numpy.dtype",
"numpy.zeros",
"numpy.random.random",
"numpy.random.randint",
"numpy.int32",
"numpy.float64"
] |
[((4888, 4916), 'numpy.copy', 'numpy.copy', (['del_ttf_advhoriz'], {}), '(del_ttf_advhoriz)\n', (4898, 4916), False, 'import numpy\n'), ((5079, 5100), 'numpy.random.random', 'numpy.random.random', ([], {}), '()\n', (5098, 5100), False, 'import numpy\n'), ((6793, 6859), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {'description': '"""FESOM2 FCT ALE C HORIZONTAL"""'}), "(description='FESOM2 FCT ALE C HORIZONTAL')\n", (6816, 6859), False, 'import argparse\n'), ((5221, 5256), 'numpy.random.randint', 'numpy.random.randint', (['(3)', 'max_levels'], {}), '(3, max_levels)\n', (5241, 5256), False, 'import numpy\n'), ((5456, 5490), 'numpy.random.randint', 'numpy.random.randint', (['(1)', '(nodes + 1)'], {}), '(1, nodes + 1)\n', (5476, 5490), False, 'import numpy\n'), ((5532, 5566), 'numpy.random.randint', 'numpy.random.randint', (['(1)', '(nodes + 1)'], {}), '(1, nodes + 1)\n', (5552, 5566), False, 'import numpy\n'), ((5605, 5642), 'numpy.random.randint', 'numpy.random.randint', (['(1)', '(elements + 1)'], {}), '(1, elements + 1)\n', (5625, 5642), False, 'import numpy\n'), ((5687, 5724), 'numpy.random.randint', 'numpy.random.randint', (['(0)', '(elements + 1)'], {}), '(0, elements + 1)\n', (5707, 5724), False, 'import numpy\n'), ((4794, 4832), 'numpy.random.randn', 'numpy.random.randn', (['(nodes * max_levels)'], {}), '(nodes * max_levels)\n', (4812, 4832), False, 'import numpy\n'), ((4933, 4971), 'numpy.random.randn', 'numpy.random.randn', (['(edges * max_levels)'], {}), '(edges * max_levels)\n', (4951, 4971), False, 'import numpy\n'), ((5007, 5045), 'numpy.random.randn', 'numpy.random.randn', (['(nodes * max_levels)'], {}), '(nodes * max_levels)\n', (5025, 5045), False, 'import numpy\n'), ((5114, 5135), 'numpy.zeros', 'numpy.zeros', (['elements'], {}), '(elements)\n', (5125, 5135), False, 'import numpy\n'), ((5278, 5300), 'numpy.zeros', 'numpy.zeros', (['(edges * 2)'], {}), '(edges * 2)\n', (5289, 5300), False, 'import numpy\n'), ((5345, 5367), 'numpy.zeros', 'numpy.zeros', (['(edges * 2)'], {}), '(edges * 2)\n', (5356, 5367), False, 'import numpy\n'), ((5775, 5798), 'numpy.int32', 'numpy.int32', (['max_levels'], {}), '(max_levels)\n', (5786, 5798), False, 'import numpy\n'), ((5872, 5889), 'numpy.float32', 'numpy.float32', (['dt'], {}), '(dt)\n', (5885, 5889), False, 'import numpy\n'), ((5950, 5973), 'numpy.int32', 'numpy.int32', (['max_levels'], {}), '(max_levels)\n', (5961, 5973), False, 'import numpy\n'), ((6047, 6064), 'numpy.float64', 'numpy.float64', (['dt'], {}), '(dt)\n', (6060, 6064), False, 'import numpy\n'), ((8538, 8560), 'json.dump', 'json.dump', (['results', 'fp'], {}), '(results, fp)\n', (8547, 8560), False, 'import json\n'), ((3506, 3534), 'numpy.dtype', 'numpy.dtype', (['numpy_real_type'], {}), '(numpy_real_type)\n', (3517, 3534), False, 'import numpy\n')]
|
#!/usr/bin/env python
from __future__ import absolute_import, division, print_function
import argparse
import csv
import io
import os
from datetime import datetime
from threading import Thread
import cv2
import requests
from PIL import Image
os.environ["OPENCV_FFMPEG_CAPTURE_OPTIONS"] = "rtsp_transport;udp"
def parse_arguments():
parser = argparse.ArgumentParser(
description=
'Read license plates from a RTSP stream and save the result in a CSV file.',
epilog=
'For example: anpr_camera_stream.py --camera rtsp://192.168.1.2:5554/camera --api-key TOKEN --regions fr --output /path/to/output.csv'
)
parser.add_argument('--api-key', help='Your API key.', required=True)
parser.add_argument('--camera', help='RTSP stream url.', required=True)
parser.add_argument(
'--regions',
help='Regions http://docs.platerecognizer.com/#regions-supported.',
required=False)
parser.add_argument('--output', help='CSV output file.', required=True)
parser.add_argument(
'--show-image',
help='Show a window with the frame being sent for recognition.',
action='store_true')
parser.add_argument(
'--inference-server',
help='Server used for recognition. Default to cloud server.',
default='https://api.platerecognizer.com/v1/plate-reader/')
return parser.parse_args()
class ThreadedCamera(object):
def __init__(self, args):
self.capture = cv2.VideoCapture(args.camera, cv2.CAP_FFMPEG)
self.capture.set(cv2.CAP_PROP_BUFFERSIZE, 1)
if not self.capture.isOpened():
print('No stream available: ' + args.camera)
self.thread = Thread(target=self.update, args=())
self.thread.daemon = True
self.thread.start()
self.frame = None
self.status = False
def update(self):
while self.capture.isOpened():
(self.status, self.frame) = self.capture.read()
def get_frame(self,):
if self.frame is None or not self.status:
return
cv2.waitKey(1)
return self.frame
def capture(args, writer):
camera = ThreadedCamera(args)
while camera.capture.isOpened():
frame = camera.get_frame()
if frame is None:
continue
if args.show_image:
cv2.imshow('frame', frame)
buffer = io.BytesIO()
im = Image.fromarray(frame)
im.save(buffer, 'JPEG')
buffer.seek(0)
response = requests.post(
args.inference_server,
files=dict(upload=buffer),
data=dict(regions=args.regions or ''),
headers={'Authorization': 'Token ' + args.api_key})
res = response.json()
for result in res['results']:
writer.writerow(
dict(date=datetime.today().strftime('%x %X'),
license_plate=result['plate'],
score=result['score'],
dscore=result['dscore'],
vehicle_type=result['vehicle']['type']))
def main():
args = parse_arguments()
with open(args.output, 'w') as output:
fields = ['date', 'license_plate', 'score', 'dscore', 'vehicle_type']
writer = csv.DictWriter(output, fieldnames=fields)
writer.writeheader()
capture(args, writer)
if __name__ == "__main__":
main()
|
[
"threading.Thread",
"io.BytesIO",
"argparse.ArgumentParser",
"datetime.datetime.today",
"cv2.waitKey",
"cv2.VideoCapture",
"PIL.Image.fromarray",
"cv2.imshow",
"csv.DictWriter"
] |
[((350, 625), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {'description': '"""Read license plates from a RTSP stream and save the result in a CSV file."""', 'epilog': '"""For example: anpr_camera_stream.py --camera rtsp://192.168.1.2:5554/camera --api-key TOKEN --regions fr --output /path/to/output.csv"""'}), "(description=\n 'Read license plates from a RTSP stream and save the result in a CSV file.'\n , epilog=\n 'For example: anpr_camera_stream.py --camera rtsp://192.168.1.2:5554/camera --api-key TOKEN --regions fr --output /path/to/output.csv'\n )\n", (373, 625), False, 'import argparse\n'), ((1479, 1524), 'cv2.VideoCapture', 'cv2.VideoCapture', (['args.camera', 'cv2.CAP_FFMPEG'], {}), '(args.camera, cv2.CAP_FFMPEG)\n', (1495, 1524), False, 'import cv2\n'), ((1697, 1732), 'threading.Thread', 'Thread', ([], {'target': 'self.update', 'args': '()'}), '(target=self.update, args=())\n', (1703, 1732), False, 'from threading import Thread\n'), ((2075, 2089), 'cv2.waitKey', 'cv2.waitKey', (['(1)'], {}), '(1)\n', (2086, 2089), False, 'import cv2\n'), ((2383, 2395), 'io.BytesIO', 'io.BytesIO', ([], {}), '()\n', (2393, 2395), False, 'import io\n'), ((2409, 2431), 'PIL.Image.fromarray', 'Image.fromarray', (['frame'], {}), '(frame)\n', (2424, 2431), False, 'from PIL import Image\n'), ((3254, 3295), 'csv.DictWriter', 'csv.DictWriter', (['output'], {'fieldnames': 'fields'}), '(output, fieldnames=fields)\n', (3268, 3295), False, 'import csv\n'), ((2338, 2364), 'cv2.imshow', 'cv2.imshow', (['"""frame"""', 'frame'], {}), "('frame', frame)\n", (2348, 2364), False, 'import cv2\n'), ((2833, 2849), 'datetime.datetime.today', 'datetime.today', ([], {}), '()\n', (2847, 2849), False, 'from datetime import datetime\n')]
|
"""Request/Response Schemas are defined here"""
# pylint: disable=invalid-name
from marshmallow import Schema, fields, validate
from todo.constants import TO_DO, IN_PROGRESS, DONE
class TaskSchema(Schema):
"""Schema for serializing an instance of Task"""
id = fields.Int(required=True)
title = fields.Str(required=True)
description = fields.Str(required=True)
status = fields.Str(
required=True,
validate=validate.OneOf(
choices=[TO_DO, IN_PROGRESS, DONE],
error="Status must be one of {choices} (given: {input})"))
number = fields.Int(required=True)
created_at = fields.DateTime(required=True)
updated_at = fields.DateTime(required=True)
class BoardSchema(Schema):
"""Schema for serializing an instance of Board"""
id = fields.Int(required=True)
name = fields.Str(required=True)
created_at = fields.DateTime(required=True)
updated_at = fields.DateTime(required=True)
class BoardDetailsSchema(BoardSchema):
"""Schema for serializing an instance of Board and its tasks"""
tasks = fields.Nested(TaskSchema, many=True)
|
[
"marshmallow.fields.Int",
"marshmallow.validate.OneOf",
"marshmallow.fields.DateTime",
"marshmallow.fields.Str",
"marshmallow.fields.Nested"
] |
[((272, 297), 'marshmallow.fields.Int', 'fields.Int', ([], {'required': '(True)'}), '(required=True)\n', (282, 297), False, 'from marshmallow import Schema, fields, validate\n'), ((310, 335), 'marshmallow.fields.Str', 'fields.Str', ([], {'required': '(True)'}), '(required=True)\n', (320, 335), False, 'from marshmallow import Schema, fields, validate\n'), ((354, 379), 'marshmallow.fields.Str', 'fields.Str', ([], {'required': '(True)'}), '(required=True)\n', (364, 379), False, 'from marshmallow import Schema, fields, validate\n'), ((593, 618), 'marshmallow.fields.Int', 'fields.Int', ([], {'required': '(True)'}), '(required=True)\n', (603, 618), False, 'from marshmallow import Schema, fields, validate\n'), ((636, 666), 'marshmallow.fields.DateTime', 'fields.DateTime', ([], {'required': '(True)'}), '(required=True)\n', (651, 666), False, 'from marshmallow import Schema, fields, validate\n'), ((684, 714), 'marshmallow.fields.DateTime', 'fields.DateTime', ([], {'required': '(True)'}), '(required=True)\n', (699, 714), False, 'from marshmallow import Schema, fields, validate\n'), ((807, 832), 'marshmallow.fields.Int', 'fields.Int', ([], {'required': '(True)'}), '(required=True)\n', (817, 832), False, 'from marshmallow import Schema, fields, validate\n'), ((844, 869), 'marshmallow.fields.Str', 'fields.Str', ([], {'required': '(True)'}), '(required=True)\n', (854, 869), False, 'from marshmallow import Schema, fields, validate\n'), ((887, 917), 'marshmallow.fields.DateTime', 'fields.DateTime', ([], {'required': '(True)'}), '(required=True)\n', (902, 917), False, 'from marshmallow import Schema, fields, validate\n'), ((935, 965), 'marshmallow.fields.DateTime', 'fields.DateTime', ([], {'required': '(True)'}), '(required=True)\n', (950, 965), False, 'from marshmallow import Schema, fields, validate\n'), ((1087, 1123), 'marshmallow.fields.Nested', 'fields.Nested', (['TaskSchema'], {'many': '(True)'}), '(TaskSchema, many=True)\n', (1100, 1123), False, 'from marshmallow import Schema, fields, validate\n'), ((445, 558), 'marshmallow.validate.OneOf', 'validate.OneOf', ([], {'choices': '[TO_DO, IN_PROGRESS, DONE]', 'error': '"""Status must be one of {choices} (given: {input})"""'}), "(choices=[TO_DO, IN_PROGRESS, DONE], error=\n 'Status must be one of {choices} (given: {input})')\n", (459, 558), False, 'from marshmallow import Schema, fields, validate\n')]
|
import torch
import torch.autograd as autograd
import torch.nn as nn
import torch.nn.functional as F
import torch.optim as optim
import numpy as np
import datetime
from Hyperparameters import args
class Encoder(nn.Module):
def __init__(self,w2i, i2w, embedding,bidirectional = False):
"""
Args:
args: parameters of the model
textData: the dataset object
"""
super(Encoder, self).__init__()
print("Encoder creation...")
self.word2index = w2i
self.index2word = i2w
self.max_length = args['maxLengthDeco']
self.dtype = 'float32'
self.embedding = embedding
self.bidirectional = bidirectional
if args['encunit'] == 'lstm':
self.enc_unit = nn.LSTM(input_size=args['embeddingSize'], hidden_size=args['hiddenSize'],
num_layers=args['enc_numlayer'], bidirectional = bidirectional)
elif args['encunit'] == 'gru':
self.enc_unit = nn.GRU(input_size=args['embeddingSize'], hidden_size=args['hiddenSize'],
num_layers=args['enc_numlayer'], bidirectional = bidirectional)
self.element_len = args['hiddenSize']
def forward(self, encoderInputs, encoder_lengths):
'''
:param encoderInputs: [batch, enc_len]
:param decoderInputs: [batch, dec_len]
:param decoderTargets: [batch, dec_len]
:return:
'''
# print(x['enc_input'])
self.encoderInputs = encoderInputs
self.encoder_lengths = encoder_lengths
self.batch_size = self.encoderInputs.size()[0]
self.enc_len = self.encoderInputs.size()[1]
enc_input_embed = self.embedding(self.encoderInputs)
en_outputs, en_state = self.encode(enc_input_embed, self.batch_size) # seq batch emb
en_outputs = torch.transpose(en_outputs, 0, 1)
return en_outputs, en_state
def encode(self, inputs, batch_size):
inputs = torch.transpose(inputs, 0, 1)
bidirec = 2 if self.bidirectional else 1
hidden = (
autograd.Variable(torch.randn(args['enc_numlayer']*bidirec, batch_size, args['hiddenSize'])).to(args['device']),
autograd.Variable(torch.randn(args['enc_numlayer']*bidirec, batch_size, args['hiddenSize'])).to(args['device']))
packed_input = inputs
packed_out, hidden = self.enc_unit(packed_input, hidden)
return packed_out, hidden
|
[
"torch.nn.GRU",
"torch.nn.LSTM",
"torch.randn",
"torch.transpose"
] |
[((1884, 1917), 'torch.transpose', 'torch.transpose', (['en_outputs', '(0)', '(1)'], {}), '(en_outputs, 0, 1)\n', (1899, 1917), False, 'import torch\n'), ((2014, 2043), 'torch.transpose', 'torch.transpose', (['inputs', '(0)', '(1)'], {}), '(inputs, 0, 1)\n', (2029, 2043), False, 'import torch\n'), ((777, 916), 'torch.nn.LSTM', 'nn.LSTM', ([], {'input_size': "args['embeddingSize']", 'hidden_size': "args['hiddenSize']", 'num_layers': "args['enc_numlayer']", 'bidirectional': 'bidirectional'}), "(input_size=args['embeddingSize'], hidden_size=args['hiddenSize'],\n num_layers=args['enc_numlayer'], bidirectional=bidirectional)\n", (784, 916), True, 'import torch.nn as nn\n'), ((1018, 1156), 'torch.nn.GRU', 'nn.GRU', ([], {'input_size': "args['embeddingSize']", 'hidden_size': "args['hiddenSize']", 'num_layers': "args['enc_numlayer']", 'bidirectional': 'bidirectional'}), "(input_size=args['embeddingSize'], hidden_size=args['hiddenSize'],\n num_layers=args['enc_numlayer'], bidirectional=bidirectional)\n", (1024, 1156), True, 'import torch.nn as nn\n'), ((2138, 2213), 'torch.randn', 'torch.randn', (["(args['enc_numlayer'] * bidirec)", 'batch_size', "args['hiddenSize']"], {}), "(args['enc_numlayer'] * bidirec, batch_size, args['hiddenSize'])\n", (2149, 2213), False, 'import torch\n'), ((2259, 2334), 'torch.randn', 'torch.randn', (["(args['enc_numlayer'] * bidirec)", 'batch_size', "args['hiddenSize']"], {}), "(args['enc_numlayer'] * bidirec, batch_size, args['hiddenSize'])\n", (2270, 2334), False, 'import torch\n')]
|