hexsha
stringlengths 40
40
| size
int64 1
1.03M
| ext
stringclasses 10
values | lang
stringclasses 1
value | max_stars_repo_path
stringlengths 3
239
| max_stars_repo_name
stringlengths 5
130
| max_stars_repo_head_hexsha
stringlengths 40
78
| max_stars_repo_licenses
listlengths 1
10
| max_stars_count
int64 1
191k
⌀ | max_stars_repo_stars_event_min_datetime
stringlengths 24
24
⌀ | max_stars_repo_stars_event_max_datetime
stringlengths 24
24
⌀ | max_issues_repo_path
stringlengths 3
239
| max_issues_repo_name
stringlengths 5
130
| max_issues_repo_head_hexsha
stringlengths 40
78
| max_issues_repo_licenses
listlengths 1
10
| max_issues_count
int64 1
67k
⌀ | max_issues_repo_issues_event_min_datetime
stringlengths 24
24
⌀ | max_issues_repo_issues_event_max_datetime
stringlengths 24
24
⌀ | max_forks_repo_path
stringlengths 3
239
| max_forks_repo_name
stringlengths 5
130
| max_forks_repo_head_hexsha
stringlengths 40
78
| max_forks_repo_licenses
listlengths 1
10
| max_forks_count
int64 1
105k
⌀ | max_forks_repo_forks_event_min_datetime
stringlengths 24
24
⌀ | max_forks_repo_forks_event_max_datetime
stringlengths 24
24
⌀ | content
stringlengths 1
1.03M
| avg_line_length
float64 1
958k
| max_line_length
int64 1
1.03M
| alphanum_fraction
float64 0
1
|
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
794846e46e6531ec69b94830ceff280a92bc4e6d
| 511
|
py
|
Python
|
Lib/site-packages/plotly/validators/histogram/marker/line/_reversescale.py
|
tytanya/my-first-blog
|
2b40adb0816c3546e90ad6ca1e7fb50d924c1536
|
[
"bzip2-1.0.6"
] | 12
|
2020-04-18T18:10:22.000Z
|
2021-12-06T10:11:15.000Z
|
plotly/validators/histogram/marker/line/_reversescale.py
|
Vesauza/plotly.py
|
e53e626d59495d440341751f60aeff73ff365c28
|
[
"MIT"
] | 27
|
2020-04-28T21:23:12.000Z
|
2021-06-25T15:36:38.000Z
|
plotly/validators/histogram/marker/line/_reversescale.py
|
Vesauza/plotly.py
|
e53e626d59495d440341751f60aeff73ff365c28
|
[
"MIT"
] | 6
|
2020-04-18T23:07:08.000Z
|
2021-11-18T07:53:06.000Z
|
import _plotly_utils.basevalidators
class ReversescaleValidator(_plotly_utils.basevalidators.BooleanValidator):
def __init__(
self,
plotly_name='reversescale',
parent_name='histogram.marker.line',
**kwargs
):
super(ReversescaleValidator, self).__init__(
plotly_name=plotly_name,
parent_name=parent_name,
edit_type=kwargs.pop('edit_type', 'plot'),
role=kwargs.pop('role', 'style'),
**kwargs
)
| 26.894737
| 75
| 0.620352
|
79484759270586212bbf1b6aebe1d1168aae1382
| 1,189
|
py
|
Python
|
FavoriteFilesLib/file_strip/json.py
|
jasondavis/FavoriteFiles
|
be088259ac36383399eebe85d8d5b35e235d25b0
|
[
"MIT",
"Unlicense"
] | 1
|
2019-04-27T20:13:19.000Z
|
2019-04-27T20:13:19.000Z
|
FavoriteFilesLib/file_strip/json.py
|
jasondavis/FavoriteFiles
|
be088259ac36383399eebe85d8d5b35e235d25b0
|
[
"MIT",
"Unlicense"
] | null | null | null |
FavoriteFilesLib/file_strip/json.py
|
jasondavis/FavoriteFiles
|
be088259ac36383399eebe85d8d5b35e235d25b0
|
[
"MIT",
"Unlicense"
] | null | null | null |
'''
Favorite Files
Licensed under MIT
Copyright (c) 2012 Isaac Muse <isaacmuse@gmail.com>
'''
import re
from comments import Comments
def strip_dangling_commas(text, preserve_lines=False):
regex = re.compile(
# ([1st group] dangling commas) | ([8th group] everything else)
r"""((,([\s\r\n]*)(\]))|(,([\s\r\n]*)(\})))|("(?:\\.|[^"\\])*"|'(?:\\.|[^'\\])*'|.[^,"']*)""",
re.MULTILINE | re.DOTALL
)
def remove_comma(m, preserve_lines=False):
if preserve_lines:
# ,] -> ] else ,} -> }
return m.group(3) + m.group(4) if m.group(2) else m.group(6) + m.group(7)
else:
# ,] -> ] else ,} -> }
return m.group(4) if m.group(2) else m.group(7)
return (
''.join(
map(
lambda m: m.group(8) if m.group(8) else remove_comma(m, preserve_lines),
regex.finditer(text)
)
)
)
def strip_comments(text, preserve_lines=False):
return Comments('json', preserve_lines).strip(text)
def sanitize_json(text, preserve_lines=False):
return strip_dangling_commas(Comments('json', preserve_lines).strip(text), preserve_lines)
| 28.309524
| 102
| 0.552565
|
794847f22d9b011e7fce5e58e89a0a07ff42ed68
| 205
|
py
|
Python
|
src/python/paperetl/file/__main__.py
|
Abhimalya/paperetl
|
99a76ab1c9267fe44b9e56739545cbd831d0dce4
|
[
"Apache-2.0"
] | 95
|
2020-07-10T17:51:14.000Z
|
2022-03-30T21:03:59.000Z
|
src/python/paperetl/file/__main__.py
|
Abhimalya/paperetl
|
99a76ab1c9267fe44b9e56739545cbd831d0dce4
|
[
"Apache-2.0"
] | 40
|
2020-07-24T20:07:44.000Z
|
2021-12-17T18:09:53.000Z
|
src/python/paperetl/file/__main__.py
|
Abhimalya/paperetl
|
99a76ab1c9267fe44b9e56739545cbd831d0dce4
|
[
"Apache-2.0"
] | 12
|
2020-07-16T01:16:51.000Z
|
2022-02-19T19:01:48.000Z
|
"""
Defines main entry point for ETL process.
"""
import sys
from .execute import Execute
if __name__ == "__main__":
if len(sys.argv) > 3:
Execute.run(sys.argv[1], sys.argv[2], sys.argv[3])
| 17.083333
| 58
| 0.643902
|
794848183302c39cde630c6c86f0d09293f854b8
| 2,650
|
py
|
Python
|
moldesign/_tests/test_qmmm.py
|
Autodesk/molecular-design-toolkit
|
5f45a47fea21d3603899a6366cb163024f0e2ec4
|
[
"Apache-2.0"
] | 147
|
2016-07-15T18:53:55.000Z
|
2022-01-30T04:36:39.000Z
|
moldesign/_tests/test_qmmm.py
|
cherishyli/molecular-design-toolkit
|
5f45a47fea21d3603899a6366cb163024f0e2ec4
|
[
"Apache-2.0"
] | 151
|
2016-07-15T21:35:11.000Z
|
2019-10-10T08:57:29.000Z
|
moldesign/_tests/test_qmmm.py
|
cherishyli/molecular-design-toolkit
|
5f45a47fea21d3603899a6366cb163024f0e2ec4
|
[
"Apache-2.0"
] | 33
|
2016-08-02T00:04:51.000Z
|
2021-09-02T10:05:04.000Z
|
import pytest
import moldesign as mdt
from moldesign import units as u
from . import helpers
# Tests:
# 1. internal bonds on QM region are removed in all cases
# 2. wavefunction is perturbed for electrostatic embedding
# 3.
@pytest.fixture
def h2params():
mol = mdt.from_smiles('[H][H]')
mol.atoms[0].name = 'HA'
mol.atoms[1].name = 'HB'
mol.residues[0].name = 'H2'
params = mdt.create_ff_parameters(mol, charges='gasteiger')
return mol, params
@pytest.fixture(scope='function')
def h2_h2_with_ff(h2params):
ma, params = h2params
ma.residues[0].resname = 'UNL'
ma.atoms[0].name = 'HA'
ma.atoms[1].name = 'HB'
list(ma.bonds)[0].align('x')
mb = ma.copy()
mb.translate([0.0, 2.0, 0.0]*u.angstrom)
mol = ma.combine(mb)
params.assign(mol)
return mol
@pytest.fixture
def h2_mm(h2params):
mol, params = h2params
params.assign(mol)
mol.set_energy_model(mdt.models.OpenMMPotential)
return mol
@pytest.fixture
def h2_qm(h2params):
mol, params = h2params
mol.set_energy_model(mdt.models.RHF, basis='sto-3g')
return mol
@pytest.fixture
def h2_h2_mm(h2_h2_with_ff):
h2_h2_with_ff.set_energy_model(mdt.models.OpenMMPotential)
return h2_h2_with_ff
@pytest.fixture
def h2_h2_rhf(h2_h2_with_ff):
h2_h2_with_ff.set_energy_model(mdt.models.RHF, basis='sto-3g')
return h2_h2_with_ff
@pytest.fixture
def h2_h2_mechanical_embedding_rhf(h2_h2_with_ff):
mol = h2_h2_with_ff
mol.set_energy_model(mdt.models.MechanicalEmbeddingQMMM,
qm_atom_indices=[0, 1],
qm_model=mdt.models.RHF(basis='sto-3g'),
mm_model=mdt.models.OpenMMPotential)
return mol
@pytest.fixture
def h2_h2_mechanical_embedding_zeroqm(h2_h2_with_ff):
mol = h2_h2_with_ff
mol.set_energy_model(mdt.models.MechanicalEmbeddingQMMM,
qm_atom_indices=[0, 1],
qm_model=helpers.ZeroEnergy,
mm_model=mdt.models.OpenMMPotential)
return mol
def test_mechanical_embedding_wfn(h2_h2_mechanical_embedding_rhf):
mol = h2_h2_mechanical_embedding_rhf
mol.calculate()
qmprops = mol.properties.qmprops
mmprops = mol.properties.mmprops
h2_qm = mdt.Molecule(mol.residues[0])
h2_qm.set_energy_model(mdt.models.RHF, basis='sto-3g')
h2_qm.calculate()
assert abs(h2_qm.potential_energy - qmprops.potential_energy) < 1e-8 * u.hartree
helpers.assert_almost_equal(h2_qm.wfn.fock_ao, qmprops.wfn.fock_ao)
assert qmprops.potential_energy + mmprops.potential_energy == mol.potential_energy
| 25.728155
| 86
| 0.688679
|
7948485e6fa9776c962180094d82469ba0f55c4e
| 8,149
|
py
|
Python
|
close_crawl/modules/cleaner.py
|
kug3lblitz/maryland-foreclosure-scraper
|
d1b8851efb4f82bc305f5d23c079b1f83ef73be4
|
[
"MIT"
] | 6
|
2018-04-24T05:35:38.000Z
|
2021-04-03T23:53:24.000Z
|
close_crawl/modules/cleaner.py
|
kug3lblitz/maryland-foreclosure-scraper
|
d1b8851efb4f82bc305f5d23c079b1f83ef73be4
|
[
"MIT"
] | 22
|
2016-12-27T16:58:01.000Z
|
2017-04-14T15:15:46.000Z
|
close_crawl/modules/cleaner.py
|
BNIA/maryland-foreclosure-scraper
|
d1b8851efb4f82bc305f5d23c079b1f83ef73be4
|
[
"MIT"
] | 4
|
2016-12-22T22:07:18.000Z
|
2020-01-03T04:31:08.000Z
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""Cleaner
This module implements post-scraping cleaning processes on the raw initial
dataset. Processes include stripping excess strings off Address values,
removing Zip Code and Partial Cost values mislabeled as Address, and merging
rows containing blank values in alternating features.
The script works as an internal module for Close Crawl, but can be executed
as a standalone to manually process datasets:
$ python cleaner.py <path/to/old/dataset> <path/of/new/dataset>
"""
from __future__ import absolute_import, print_function, unicode_literals
from pandas import DataFrame, concat, read_csv, to_datetime
from .patterns import NULL_ADDR, STRIP_ADDR, filter_addr, punctuation
class Cleaner(object):
"""Class object for cleaning the raw dataset extracted after the initial
scraping
"""
def __init__(self, path):
"""Constructor for Cleaner
Args:
path (`str`): path to input CSV dataset
Attributes:
df (`pandas.core.frame.DataFrame`): initial DataFrame
columns (`list` of `str`): columns of the DataFrame
clean_df (`pandas.core.frame.DataFrame`): final DataFrame to be
outputted
"""
self.df = self.prettify(read_csv(path))
self.columns = list(self.df)
self.clean_df = []
@staticmethod
def prettify(df, internal=True):
"""Drops duplicates, sorts and fills missing values in the DataFrame
to make it manageable.
Args:
df (`pandas.core.frame.DataFrame`): DataFrame to be managed
internal (`bool`, optional): flag for determining state of
DataFrame
Returns:
df (`pandas.core.frame.DataFrame`): organized DataFrame
"""
df.drop_duplicates(inplace=True, keep=False)
df["Filing Date"] = to_datetime(df["Filing Date"])
df.sort_values(
["Filing Date", "Case Number", "Address"],
ascending=[True] * 3,
inplace=True
)
if internal:
df["Zip Code"] = df["Zip Code"].fillna(0.0).astype(int)
df["Zip Code"] = df["Zip Code"].replace(0, '')
return df
def clean_addr(self):
"""Cleans excess strings off Address values and removes Zip Code and
Partial Cost values mislabeled as Address.
Args:
None
Returns:
None
"""
def clean_string(addr):
"""Applies regular expressions and other filters on Address
values
Args:
addr (`str`): Address value to be filtered
Returns:
addr (`str`): filtered Address value
"""
# if value does not match the street_address pattern
if not filter_addr(addr): # patterns.filter_addr
if NULL_ADDR.sub('', addr): # value may contain valid Address
return unicode(
STRIP_ADDR.sub(
'', addr) # strip off Zip Code and Partial Cost
).translate(
{ord(c): None for c in punctuation}
).strip() # strip off punctuations
return addr
print("Cleaning addresses...", end=" ")
self.df["Address"] = self.df["Address"].apply(
lambda x: clean_string(x)
)
self.df["Address"] = self.df["Address"].apply(
lambda x: NULL_ADDR.sub('', x)
)
# replace empty string values with NULL
self.df["Zip Code"] = self.df["Zip Code"].replace('', float("nan"))
self.df["Address"] = self.df["Address"].replace('', float("nan"))
print("Done")
@staticmethod
def combine_rows(row):
"""Merges rows after filtering out common values
Args:
row (`list` of `list` of `str`): groupby("Case Number") rows
Returns:
(`list` of `str`): merged row
"""
def __filter_tuple(col):
"""Filters common values from rows
Args:
col (`tuple` of `str`): values per column
Returns:
value (`str`): common value found per mergeable rows
"""
for value in set(col):
if value == value: # equivalent to value != NaN
return value
return [__filter_tuple(x) for x in zip(*row)]
@staticmethod
def mergeable(bool_vec):
"""Determines if groupby("Case Number") rows are mergeable
Example:
bool_vec = [
[True, True, True, True, True, True, False, True, True],
[True, True, True, True, True, True, True, False, False],
[True, True, True, True, True, True, False, False, False]
]
__sum_col(bool_vec) -> [3, 3, 3, 3, 3, 3, 1, 1, 1]
__bool_pat(__sum_col(bool_vec)) -> True
Args:
bool_vec (`list` of `bool`): represents non-NULL values
Returns:
(`bool`): True if rows are mergeable
"""
def __sum_col():
"""Sums columns
Args:
None
Returns:
(`list` of `int`): sum of columns
"""
return [sum(x) for x in zip(*bool_vec)]
def __bool_pat(row):
"""Determines mergeability
Args:
None
Returns:
(`bool`): True if rows are mergeable
"""
return set(row[-3:]) == set([1]) and set(row[:-3]) != set([1])
return True if __bool_pat(__sum_col()) else False
def merge_nulls(self):
"""Splits DataFrames into those with NULL values to be merged, and then
later merged with the original DataFrame
Args:
None
Returns:
None
"""
print("Merging rows...", end=" ")
# filter out rows with any NULL values
origin_df = self.df.dropna()
# filter out rows only with NULL values
null_df = self.df[self.df.isnull().any(axis=1)]
# boolean representation of the DataFrame with NULL values
bool_df = null_df.notnull()
# (`list` of `dict` of `str` : `str`) to be converted to a DataFrame
new_df = []
for i in null_df["Case Number"].unique():
bool_row = bool_df[null_df["Case Number"] == i]
new_row = null_df[null_df["Case Number"] == i]
# if the rows are mergeable, combine them
if self.mergeable(bool_row.values):
new_row = self.combine_rows(new_row.values.tolist())
new_df.append(
{
feature: value
for feature, value in zip(self.columns, new_row)
}
)
# else, treat them individually
else:
new_row = new_row.values.tolist()
for row in new_row:
new_df.append(
{
feature: value
for feature, value in zip(self.columns, row)
}
)
# merge the DataFrames back
self.clean_df = concat(
[origin_df, DataFrame(new_df)]
).reset_index(drop=True)
# prettify the new DataFrame
self.clean_df = self.prettify(
self.clean_df[self.columns], internal=False
)
print("Done")
def init_clean(self):
"""Initializes cleaning process
Args:
None
Returns:
None
"""
self.clean_addr()
self.merge_nulls()
def download(self, output_name):
"""Downloads the cleaned and manipulated DataFrame into a CSV file
Args:
output_name (`str`): path of the new output file
Returns:
None
"""
self.clean_df.to_csv(output_name, index=False)
| 28.393728
| 79
| 0.53344
|
79484907978ebd8618833a7e02f8bf39e168c71e
| 614
|
py
|
Python
|
utils/get_discount_sum_15.py
|
Cratosart/test_disc
|
565a86c0f0da15208b4822c5157d655ffaa86451
|
[
"MIT"
] | null | null | null |
utils/get_discount_sum_15.py
|
Cratosart/test_disc
|
565a86c0f0da15208b4822c5157d655ffaa86451
|
[
"MIT"
] | null | null | null |
utils/get_discount_sum_15.py
|
Cratosart/test_disc
|
565a86c0f0da15208b4822c5157d655ffaa86451
|
[
"MIT"
] | null | null | null |
def get_discount_sum_15(state, price):
if state['what_to_store'] == 'season_things':
thing = state['thing']
final_sum = price[thing]['week'] * state['weeks_amount'] + \
price[thing]['month'] * state['month_amount']
final_sum = final_sum * 0.85
else:
first_month_sum = price['ceel']['first_sq_m'] * state['cell_size']
other_month_sum = price['ceel']['other_sq_m'] * \
state['cell_size'] * (state['cell_period'] - 1)
final_sum = first_month_sum + other_month_sum
final_sum = final_sum * 0.85
return final_sum
| 40.933333
| 75
| 0.594463
|
7948497338aa016c7400850d6cec327f8a251279
| 3,571
|
py
|
Python
|
ironic/conf/opts.py
|
armohamm/ironic
|
21093ca886ed736a7a25bf5e71e05d41e132fd2f
|
[
"Apache-2.0"
] | 2
|
2019-06-17T21:37:53.000Z
|
2020-07-11T03:58:39.000Z
|
ironic/conf/opts.py
|
armohamm/ironic
|
21093ca886ed736a7a25bf5e71e05d41e132fd2f
|
[
"Apache-2.0"
] | 5
|
2019-08-14T06:46:03.000Z
|
2021-12-13T20:01:25.000Z
|
ironic/conf/opts.py
|
armohamm/ironic
|
21093ca886ed736a7a25bf5e71e05d41e132fd2f
|
[
"Apache-2.0"
] | 6
|
2019-06-13T12:49:33.000Z
|
2021-04-17T16:33:19.000Z
|
# Licensed under the Apache License, Version 2.0 (the "License"); you may not
# use this file except in compliance with the License. You may obtain a copy
# of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import itertools
from oslo_log import log
import ironic.conf
_default_opt_lists = [
ironic.conf.default.api_opts,
ironic.conf.default.driver_opts,
ironic.conf.default.exc_log_opts,
ironic.conf.default.hash_opts,
ironic.conf.default.image_opts,
ironic.conf.default.img_cache_opts,
ironic.conf.default.netconf_opts,
ironic.conf.default.notification_opts,
ironic.conf.default.path_opts,
ironic.conf.default.portgroup_opts,
ironic.conf.default.service_opts,
ironic.conf.default.utils_opts,
]
_opts = [
('DEFAULT', itertools.chain(*_default_opt_lists)),
('agent', ironic.conf.agent.opts),
('ansible', ironic.conf.ansible.opts),
('api', ironic.conf.api.opts),
('audit', ironic.conf.audit.opts),
('cimc', ironic.conf.cisco.cimc_opts),
('cinder', ironic.conf.cinder.list_opts()),
('cisco_ucs', ironic.conf.cisco.ucsm_opts),
('conductor', ironic.conf.conductor.opts),
('console', ironic.conf.console.opts),
('database', ironic.conf.database.opts),
('deploy', ironic.conf.deploy.opts),
('dhcp', ironic.conf.dhcp.opts),
('drac', ironic.conf.drac.opts),
('glance', ironic.conf.glance.list_opts()),
('healthcheck', ironic.conf.healthcheck.opts),
('ilo', ironic.conf.ilo.opts),
('inspector', ironic.conf.inspector.list_opts()),
('ipmi', ironic.conf.ipmi.opts),
('irmc', ironic.conf.irmc.opts),
('iscsi', ironic.conf.iscsi.opts),
('json_rpc', ironic.conf.json_rpc.list_opts()),
('metrics', ironic.conf.metrics.opts),
('metrics_statsd', ironic.conf.metrics_statsd.opts),
('neutron', ironic.conf.neutron.list_opts()),
('pxe', ironic.conf.pxe.opts),
('service_catalog', ironic.conf.service_catalog.list_opts()),
('snmp', ironic.conf.snmp.opts),
('swift', ironic.conf.swift.list_opts()),
('xclarity', ironic.conf.xclarity.opts),
]
def list_opts():
"""Return a list of oslo.config options available in Ironic code.
The returned list includes all oslo.config options. Each element of
the list is a tuple. The first element is the name of the group, the
second element is the options.
The function is discoverable via the 'ironic' entry point under the
'oslo.config.opts' namespace.
The function is used by Oslo sample config file generator to discover the
options.
:returns: a list of (group, options) tuples
"""
return _opts
def update_opt_defaults():
log.set_defaults(
default_log_levels=[
'amqp=WARNING',
'amqplib=WARNING',
'qpid.messaging=INFO',
'oslo.messaging=INFO',
'sqlalchemy=WARNING',
'stevedore=INFO',
'eventlet.wsgi.server=INFO',
'iso8601=WARNING',
'requests=WARNING',
'neutronclient=WARNING',
'glanceclient=WARNING',
'urllib3.connectionpool=WARNING',
'keystonemiddleware.auth_token=INFO',
'keystoneauth.session=INFO',
]
)
| 34.009524
| 77
| 0.672641
|
79484baceacf87b4181d697ae171edcfb66776e7
| 6,243
|
py
|
Python
|
route/login_check_key.py
|
norhu1130/openNAMU
|
332ffd4d571cc1f28de427761ef926a015ea3862
|
[
"BSD-3-Clause"
] | null | null | null |
route/login_check_key.py
|
norhu1130/openNAMU
|
332ffd4d571cc1f28de427761ef926a015ea3862
|
[
"BSD-3-Clause"
] | null | null | null |
route/login_check_key.py
|
norhu1130/openNAMU
|
332ffd4d571cc1f28de427761ef926a015ea3862
|
[
"BSD-3-Clause"
] | null | null | null |
from .tool.func import *
def login_check_key_2(conn, tool):
curs = conn.cursor()
if flask.request.method == 'POST' or \
('c_key' in flask.session and flask.session['c_key'] == 'email_pass'):
re_set_list = ['c_id', 'c_pw', 'c_ans', 'c_que', 'c_key', 'c_type', 'c_email']
ip = ip_check()
input_key = flask.request.form.get('key', '')
user_agent = flask.request.headers.get('User-Agent', '')
if 'c_type' in flask.session and \
flask.session['c_type'] == 'pass_find' and \
flask.session['c_key'] == input_key:
curs.execute(db_change("update user set pw = ? where id = ?"), [pw_encode(flask.session['c_key']), flask.session['c_id']])
conn.commit()
user_id = flask.session['c_id']
user_pw = flask.session['c_key']
for i in re_set_list:
flask.session.pop(i, None)
curs.execute(db_change('select data from other where name = "reset_user_text"'))
sql_d = curs.fetchall()
b_text = (sql_d[0][0] + '<hr class="main_hr">') if sql_d and sql_d[0][0] != '' else ''
curs.execute(db_change('select data from user_set where name = "2fa" and id = ?'), [user_id])
if curs.fetchall():
curs.execute(db_change("update user_set set data = '' where name = '2fa' and id = ?"), [user_id])
return easy_minify(flask.render_template(skin_check(),
imp = [load_lang('reset_user_ok'), wiki_set(), custom(), other2([0, 0])],
data = b_text + load_lang('id') + ' : ' + user_id + '<br>' + load_lang('password') + ' : ' + user_pw,
menu = [['user', load_lang('return')]]
))
elif 'c_type' in flask.session and \
(flask.session['c_key'] == input_key or flask.session['c_key'] == 'email_pass'):
curs.execute(db_change('select data from other where name = "encode"'))
db_data = curs.fetchall()
if flask.session['c_type'] == 'register':
if flask.session['c_key'] == 'email_pass':
flask.session['c_email'] = ''
curs.execute(db_change("select id from user limit 1"))
first = 1 if not curs.fetchall() else 0
curs.execute(db_change("select id from user where id = ?"), [flask.session['c_id']])
if curs.fetchall():
for i in re_set_list:
flask.session.pop(i, None)
return re_error('/error/6')
curs.execute(db_change("select id from user_application where id = ?"), [flask.session['c_id']])
if curs.fetchall():
for i in re_set_list:
flask.session.pop(i, None)
return re_error('/error/6')
curs.execute(db_change('select data from other where name = "requires_approval"'))
requires_approval = curs.fetchall()
if requires_approval and requires_approval[0][0] == 'on':
application_token = load_random_key(32)
curs.execute(db_change(
"insert into user_application (id, pw, date, encode, question, answer, token, ip, ua, email) " + \
"values (?, ?, ?, ?, ?, ?, ?, ?, ?, ?)"
), [
flask.session['c_id'],
flask.session['c_pw'],
get_time(),
db_data[0][0],
flask.session['c_que'],
flask.session['c_ans'],
application_token,
ip,
user_agent,
flask.session['c_email']
])
conn.commit()
for i in re_set_list:
flask.session.pop(i, None)
return redirect('/application_submitted')
else:
curs.execute(db_change("insert into user (id, pw, acl, date, encode) values (?, ?, ?, ?, ?)"), [
flask.session['c_id'],
flask.session['c_pw'],
'user' if first == 0 else 'owner',
get_time(),
db_data[0][0]
])
curs.execute(db_change("insert into user_set (name, id, data) values ('email', ?, ?)"), [
flask.session['c_id'],
flask.session['c_email']
])
ua_plus(flask.session['c_id'], ip, user_agent, get_time())
flask.session['id'] = flask.session['c_id']
flask.session['head'] = ''
conn.commit()
else:
curs.execute(db_change('delete from user_set where name = "email" and id = ?'), [ip])
curs.execute(db_change('insert into user_set (name, id, data) values ("email", ?, ?)'), [ip, flask.session['c_email']])
first = 0
for i in re_set_list:
flask.session.pop(i, None)
return redirect('/change') if first == 0 else redirect('/setting')
else:
for i in re_set_list:
flask.session.pop(i, None)
return redirect('/user')
else:
curs.execute(db_change('select data from other where name = "check_key_text"'))
sql_d = curs.fetchall()
b_text = (sql_d[0][0] + '<hr class="main_hr">') if sql_d and sql_d[0][0] != '' else ''
return easy_minify(flask.render_template(skin_check(),
imp = [load_lang('check_key'), wiki_set(), custom(), other2([0, 0])],
data = '''
<form method="post">
''' + b_text + '''
<input placeholder="''' + load_lang('key') + '''" name="key" type="text">
<hr class="main_hr">
<button type="submit">''' + load_lang('save') + '''</button>
</form>
''',
menu = [['user', load_lang('return')]]
))
| 45.23913
| 135
| 0.480218
|
79484bcff99111edf0a6adabf1c6dbbc5518d8b9
| 593
|
bzl
|
Python
|
tensorflow/core/platform/default/build_config_root.bzl
|
jingjunLi/tensorflow-multi-stream
|
d3b75ab179e1a62300739084b48816d2467b0cee
|
[
"Apache-2.0"
] | null | null | null |
tensorflow/core/platform/default/build_config_root.bzl
|
jingjunLi/tensorflow-multi-stream
|
d3b75ab179e1a62300739084b48816d2467b0cee
|
[
"Apache-2.0"
] | null | null | null |
tensorflow/core/platform/default/build_config_root.bzl
|
jingjunLi/tensorflow-multi-stream
|
d3b75ab179e1a62300739084b48816d2467b0cee
|
[
"Apache-2.0"
] | 1
|
2020-02-11T10:29:06.000Z
|
2020-02-11T10:29:06.000Z
|
# Lower-level functionality for build config.
# The functions in this file might be referred by tensorflow.bzl. They have to
# be separate to avoid cyclic references.
WITH_XLA_SUPPORT = True
def tf_cuda_tests_tags():
return ["local"]
def tf_sycl_tests_tags():
return ["local"]
def tf_additional_plugin_deps():
deps = []
if WITH_XLA_SUPPORT:
deps.append("//tensorflow/compiler/jit")
return deps
def tf_additional_xla_deps_py():
return []
def tf_additional_license_deps():
licenses = []
if WITH_XLA_SUPPORT:
licenses.append("@llvm//:LICENSE.TXT")
return licenses
| 21.962963
| 78
| 0.738617
|
79484c5a1b374dcefde83d7298e30e0030c47886
| 1,651
|
py
|
Python
|
scripts/Alec/7ghz/7ghz_pyEPR.py
|
nikosavola/pyEPR
|
fb9eac3eae5522da80d9ea112bc39ae025000afb
|
[
"BSD-3-Clause"
] | 1
|
2020-03-02T22:09:44.000Z
|
2020-03-02T22:09:44.000Z
|
scripts/Alec/7ghz/7ghz_pyEPR.py
|
nikosavola/pyEPR
|
fb9eac3eae5522da80d9ea112bc39ae025000afb
|
[
"BSD-3-Clause"
] | null | null | null |
scripts/Alec/7ghz/7ghz_pyEPR.py
|
nikosavola/pyEPR
|
fb9eac3eae5522da80d9ea112bc39ae025000afb
|
[
"BSD-3-Clause"
] | null | null | null |
# -*- coding: utf-8 -*-
"""
Created on Wed Aug 23 10:34:00 2017
@author: alec-eickbusch
"""
from pyEPR import *
if 1:
# Specify the HFSS project to be analyzed
project_info = ProjectInfo(r"C:\Users\awe4\Documents\Simulations\HFSS\11ghz\\")
project_info.project_name = '2017_08_Zlatko_Shyam_AutStab' # Name of the project file (string). "None" will get the current active one.
project_info.design_name = 'pyEPR_2_chips' # Name of the design file (string). "None" will get the current active one.
project_info.setup_name = None # Name of the setup(string). "None" will get the current active one.
## Describe the junctions in the HFSS design
project_info.junctions['jAlice'] = {'rect':'qubitAlice', 'line': 'alice_line', 'Lj_variable':'LJAlice', 'length':0.0001}
project_info.junctions['jBob'] = {'rect':'qubitBob', 'line': 'bob_line', 'Lj_variable':'LJBob', 'length':0.0001}
# Dissipative elements EPR
project_info.dissipative['dielectric_surfaces'] = None # supply names here, there are more options in project_info.dissipative.
# Run analysis
epr_hfss = DistributedAnalysis(project_info)
epr_hfss.do_EPR_analysis()
if 0: # Hamiltonian analysis
filename = epr_hfss.data_filename
#filename = r'C:\\Users\\rslqulab\\Desktop\\zkm\\2017_pyEPR_data\\\\/2017_08_Zlatko_Shyam_AutStab/2 pyEPR/2 pyEPR_20170825_170550.hdf5'
epr = QuantumAnalysis(filename)
epr._renorm_pj = False
#result = epr.analyze_variation('1', cos_trunc = 8, fock_trunc = 7)
epr.analyze_all_variations(cos_trunc = 8, fock_trunc = 7)
epr.plot_hamiltonian_results()
| 47.171429
| 141
| 0.705027
|
79484c7872b7d42736d3b5bbb4b26e72b09145e2
| 1,398
|
py
|
Python
|
integration/combination/test_state_machine_with_cwe_dlq_and_retry_policy.py
|
hawflau/serverless-application-model
|
d2cf4b7e23d26cdf677c564d53bb58e6a5b6cac2
|
[
"Apache-2.0"
] | null | null | null |
integration/combination/test_state_machine_with_cwe_dlq_and_retry_policy.py
|
hawflau/serverless-application-model
|
d2cf4b7e23d26cdf677c564d53bb58e6a5b6cac2
|
[
"Apache-2.0"
] | 1
|
2020-03-03T01:46:46.000Z
|
2020-03-03T01:46:46.000Z
|
integration/combination/test_state_machine_with_cwe_dlq_and_retry_policy.py
|
hawflau/serverless-application-model
|
d2cf4b7e23d26cdf677c564d53bb58e6a5b6cac2
|
[
"Apache-2.0"
] | null | null | null |
from unittest.case import skipIf
from integration.helpers.base_test import BaseTest
from integration.helpers.resource import current_region_does_not_support
from integration.config.service_names import CWE_CWS_DLQ
@skipIf(current_region_does_not_support([CWE_CWS_DLQ]), "CweCwsDlq is not supported in this testing region")
class TestStateMachineWithCweDlqAndRetryPolicy(BaseTest):
def test_state_machine_with_api(self):
self.create_and_verify_stack("combination/state_machine_with_cwe_with_dlq_and_retry_policy")
outputs = self.get_stack_outputs()
state_machine_arn = outputs["MyStateMachineArn"]
rule_name = outputs["MyEventName"]
state_machine_target_dlq_arn = outputs["MyDLQArn"]
cloud_watch_event_client = self.client_provider.cloudwatch_event_client
# checking if the target's DLQ and RetryPolicy properties are correct
targets = cloud_watch_event_client.list_targets_by_rule(Rule=rule_name)["Targets"]
self.assertEqual(len(targets), 1, "Rule should contain a single target")
target = targets[0]
self.assertEqual(target["Arn"], state_machine_arn)
self.assertEqual(target["DeadLetterConfig"]["Arn"], state_machine_target_dlq_arn)
self.assertEqual(target["RetryPolicy"]["MaximumEventAgeInSeconds"], 400)
self.assertEqual(target["RetryPolicy"]["MaximumRetryAttempts"], 5)
| 46.6
| 108
| 0.771102
|
79484cb2eeb577394002a8f1a4c401d78e490202
| 778
|
py
|
Python
|
tests/denoise/test_nlm.py
|
gmazzamuto/gputools
|
73a4dee76a119f94d8163781a85b691fd080d506
|
[
"BSD-3-Clause"
] | 89
|
2015-08-28T14:17:33.000Z
|
2022-01-20T16:19:34.000Z
|
tests/denoise/test_nlm.py
|
gmazzamuto/gputools
|
73a4dee76a119f94d8163781a85b691fd080d506
|
[
"BSD-3-Clause"
] | 24
|
2015-08-28T19:06:22.000Z
|
2022-02-21T21:10:13.000Z
|
tests/denoise/test_nlm.py
|
gmazzamuto/gputools
|
73a4dee76a119f94d8163781a85b691fd080d506
|
[
"BSD-3-Clause"
] | 17
|
2015-08-28T18:56:43.000Z
|
2021-09-15T23:15:36.000Z
|
from __future__ import print_function, unicode_literals, absolute_import, division
from gputools.denoise import nlm2, nlm3
import numpy as np
def test_2d(fac = .3):
from scipy.misc import ascent
d = ascent().astype(np.float32)
d = d[:,:-10]
sig = .2*np.amax(d)
y = d+sig*np.random.uniform(0,1.,d.shape)
out = nlm2(y.astype(np.float32),fac*sig,3,5)
return y, out
def test_3d(fac = .3):
x = np.linspace(-1,1,100)
R = np.sqrt(np.sum([X**2 for X in np.meshgrid(x,x,x,indexing="ij")],axis=0))
d = 1.*(R<.4)
sig = .2*np.amax(d)
y = d+sig*np.random.uniform(0,1.,d.shape)
out = nlm3(y.astype(np.float32),fac*sig,3,5)
return y, out
if __name__ == '__main__':
y2, out2 = test_2d()
#y3, out3 = test_3d(10)
| 18.97561
| 82
| 0.609254
|
79484d2a2b8e3aff4522eb9bd7a71da9d7f75f91
| 7,492
|
py
|
Python
|
tests/test_colormap.py
|
kevinyu/inspec
|
28e7fd224fdeae50fb1ce0e74eb02d940c1eab5a
|
[
"MIT"
] | 1
|
2021-03-01T07:37:13.000Z
|
2021-03-01T07:37:13.000Z
|
tests/test_colormap.py
|
kevinyu/inspec
|
28e7fd224fdeae50fb1ce0e74eb02d940c1eab5a
|
[
"MIT"
] | null | null | null |
tests/test_colormap.py
|
kevinyu/inspec
|
28e7fd224fdeae50fb1ce0e74eb02d940c1eab5a
|
[
"MIT"
] | null | null | null |
import unittest
from unittest import mock
import numpy as np
from inspec.colormap import (
_registered_colormaps,
Colormap,
ColormapNotFound,
CursesColormapSingleton,
PairedColormap,
VALID_CMAPS,
curses_cmap,
load_cmap,
)
class TestPairedColormap(unittest.TestCase):
def test_init_colors(self):
cmap = PairedColormap(range(10))
for i, color in enumerate(cmap.colors):
self.assertTrue(isinstance(color, Colormap.Color256))
self.assertEqual(color.idx, i)
def test_default_bin_edges(self):
colors = [0, 1, 2, 3]
expected_edges = np.array([0.25, 0.5, 0.75])
np.testing.assert_array_equal(
PairedColormap.default_bin_edges(colors),
expected_edges,
)
colors = list(range(10))
expected_edges = np.array(np.arange(1, 10) / 10)
np.testing.assert_array_equal(
PairedColormap.default_bin_edges(colors),
expected_edges,
)
def test_invalid_colors(self):
too_few = [0]
with self.assertRaises(ValueError):
PairedColormap(too_few)
too_few = [1]
with self.assertRaises(ValueError):
PairedColormap(too_few)
too_many = list(range(23))
with self.assertRaises(ValueError):
PairedColormap(too_many)
def test_scale(self):
# Should have 10 levels
colors = list(range(10))
cmap = PairedColormap(range(10))
expected_edges = np.array(np.arange(1, 10) / 10)
assert len(colors) == 10
np.testing.assert_array_equal(
cmap.bin_edges,
expected_edges,
)
self.assertEqual(cmap.scale(0), colors[0])
self.assertEqual(cmap.scale(1.0), colors[-1])
for i in range(len(expected_edges)):
# Test boundaries of edges and that being right on an edge is consistent
self.assertEqual(cmap.scale(expected_edges[i] - 0.01), colors[i])
self.assertEqual(cmap.scale(expected_edges[i]), colors[i])
self.assertEqual(cmap.scale(expected_edges[i] + 0.01), colors[i + 1])
class TestCursesColormapSingleton(unittest.TestCase):
def setUp(self):
self.dummy_cmap = PairedColormap(colors=[1, 2, 4, 3])
self.expected_bins_to_slot_mappings = (
((1, 0), 1),
((2, 0), 2),
((3, 0), 3),
((2, 1), 4),
((3, 1), 5),
((3, 2), 6),
)
self.expected_colors_to_slot_mappings = (
((2, 1), 1),
((4, 1), 2),
((3, 1), 3),
((4, 2), 4),
((3, 2), 5),
((3, 4), 6),
)
self.preinstalled_cmap_name = VALID_CMAPS[0]
def test_singleton(self):
"""Test the mapping between fg and bg bins (0-1) each to a color slot (0-255)
"""
curses_cmap_new = CursesColormapSingleton()
self.assertIs(curses_cmap, curses_cmap_new, "Singleton should not create a new instance")
@mock.patch("curses.init_pair")
def test_bins_to_color_slot(self, p):
"""Test the mapping between fg and bg bins (0-1) each to a color slot (0-255)
"""
curses_cmap.init_colormap(self.dummy_cmap)
assert self.dummy_cmap is curses_cmap.cmap
with self.assertRaises(ValueError):
curses_cmap._color_bins_to_slot(1, 1)
with self.assertRaises(ValueError):
curses_cmap._color_bins_to_slot(0, 1)
with self.assertRaises(ValueError):
curses_cmap._color_bins_to_slot(1, -1)
with self.assertRaises(ValueError):
curses_cmap._color_bins_to_slot(len(self.dummy_cmap.colors), 0)
self.assertEqual(curses_cmap._color_bins_to_slot(1, 0), 1)
self.assertEqual(curses_cmap._color_bins_to_slot(2, 0), 2)
self.assertEqual(curses_cmap._color_bins_to_slot(3, 0), 3)
self.assertEqual(curses_cmap._color_bins_to_slot(2, 1), 4)
self.assertEqual(curses_cmap._color_bins_to_slot(3, 1), 5)
self.assertEqual(curses_cmap._color_bins_to_slot(3, 2), 6)
@mock.patch("curses.init_pair")
def test_get_slot(self, p):
"""Test the mapping between fg and bg bins (0-1) each to a color slot (0-255)
"""
curses_cmap.init_colormap(self.dummy_cmap)
assert self.dummy_cmap is curses_cmap.cmap
for (i0, i1), slot in self.expected_bins_to_slot_mappings:
self.assertEqual(
curses_cmap.get_slot(
curses_cmap.cmap.colors[i0],
curses_cmap.cmap.colors[i1],
),
(slot, False)
)
self.assertEqual(
curses_cmap.get_slot(
curses_cmap.cmap.colors[i1],
curses_cmap.cmap.colors[i0],
),
(slot, True),
"get_slot() is not returning the invert bool correctly"
)
self.assertEqual(
curses_cmap.get_slot(
curses_cmap.cmap.colors[0],
curses_cmap.cmap.colors[0],
),
(1, False)
)
self.assertEqual(
curses_cmap.get_slot(
curses_cmap.cmap.colors[1],
curses_cmap.cmap.colors[1],
),
(1, False)
)
self.assertEqual(
curses_cmap.get_slot(
curses_cmap.cmap.colors[2],
curses_cmap.cmap.colors[2],
),
(4, False)
)
def test_init_colormap(self):
expected_init_pair_calls = [
mock.call(slot, color0, color1)
for (color0, color1), slot
in self.expected_colors_to_slot_mappings
]
with mock.patch("curses.init_pair") as p:
curses_cmap.init_colormap(self.dummy_cmap)
p.assert_has_calls(expected_init_pair_calls, any_order=True)
self.assertEqual(p.call_count, len(expected_init_pair_calls))
self.assertIs(curses_cmap.cmap, self.dummy_cmap)
@mock.patch("curses.init_pair")
def test_init_colormap_by_name_not_existing(self, p):
with self.assertRaises(ColormapNotFound):
curses_cmap.init_colormap("thiscmapdoesnotexist")
@mock.patch("curses.init_pair")
def test_init_colormap_by_name(self, p):
existing_cmap = _registered_colormaps[self.preinstalled_cmap_name]
curses_cmap.init_colormap(self.preinstalled_cmap_name)
self.assertIs(curses_cmap.cmap, existing_cmap)
class TestLoadCmap(unittest.TestCase):
def setUp(self):
self.dummy_cmap = PairedColormap(colors=[1, 2, 4, 3])
def test_load_cmap_by_cmap(self):
self.assertIs(self.dummy_cmap, load_cmap(self.dummy_cmap))
for preinstalled_cmap_name in VALID_CMAPS:
existing_cmap = _registered_colormaps[preinstalled_cmap_name]
self.assertIs(load_cmap(existing_cmap), existing_cmap)
def test_load_cmap_by_name(self):
for preinstalled_cmap_name in VALID_CMAPS:
existing_cmap = _registered_colormaps[preinstalled_cmap_name]
self.assertIs(load_cmap(preinstalled_cmap_name), existing_cmap)
with self.assertRaises(ColormapNotFound):
load_cmap("thiscmapdoesnotexist")
def test_load_cmap_bad_input(self):
with self.assertRaises(ColormapNotFound):
load_cmap(1000.0)
| 33.297778
| 97
| 0.60598
|
79484d5f64bf398eaca009197c9d37cb83594838
| 4,346
|
py
|
Python
|
sympy/geometry/tests/test_curve.py
|
msgoff/sympy
|
1e7daef7514902f5e89718fa957b7b36c6669a10
|
[
"BSD-3-Clause"
] | null | null | null |
sympy/geometry/tests/test_curve.py
|
msgoff/sympy
|
1e7daef7514902f5e89718fa957b7b36c6669a10
|
[
"BSD-3-Clause"
] | null | null | null |
sympy/geometry/tests/test_curve.py
|
msgoff/sympy
|
1e7daef7514902f5e89718fa957b7b36c6669a10
|
[
"BSD-3-Clause"
] | null | null | null |
from sympy import Symbol, pi, symbols, Tuple, S, sqrt, asinh, Rational
from sympy.geometry import (
Curve,
Line,
Point,
Ellipse,
Ray,
Segment,
Circle,
Polygon,
RegularPolygon,
)
from sympy.testing.pytest import raises, slow
def test_curve():
x = Symbol("x", real=True)
s = Symbol("s")
z = Symbol("z")
# this curve is independent of the indicated parameter
c = Curve([2 * s, s ** 2], (z, 0, 2))
assert c.parameter == z
assert c.functions == (2 * s, s ** 2)
assert c.arbitrary_point() == Point(2 * s, s ** 2)
assert c.arbitrary_point(z) == Point(2 * s, s ** 2)
# this is how it is normally used
c = Curve([2 * s, s ** 2], (s, 0, 2))
assert c.parameter == s
assert c.functions == (2 * s, s ** 2)
t = Symbol("t")
# the t returned as assumptions
assert c.arbitrary_point() != Point(2 * t, t ** 2)
t = Symbol("t", real=True)
# now t has the same assumptions so the test passes
assert c.arbitrary_point() == Point(2 * t, t ** 2)
assert c.arbitrary_point(z) == Point(2 * z, z ** 2)
assert c.arbitrary_point(c.parameter) == Point(2 * s, s ** 2)
assert c.arbitrary_point(None) == Point(2 * s, s ** 2)
assert c.plot_interval() == [t, 0, 2]
assert c.plot_interval(z) == [z, 0, 2]
assert (
Curve([x, x], (x, 0, 1))
.rotate(pi / 2, (1, 2))
.scale(2, 3)
.translate(1, 3)
.arbitrary_point(s)
== Line((0, 0), (1, 1))
.rotate(pi / 2, (1, 2))
.scale(2, 3)
.translate(1, 3)
.arbitrary_point(s)
== Point(-2 * s + 7, 3 * s + 6)
)
raises(ValueError, lambda: Curve((s), (s, 1, 2)))
raises(ValueError, lambda: Curve((x, x * 2), (1, x)))
raises(ValueError, lambda: Curve((s, s + t), (s, 1, 2)).arbitrary_point())
raises(ValueError, lambda: Curve((s, s + t), (t, 1, 2)).arbitrary_point(s))
@slow
def test_free_symbols():
a, b, c, d, e, f, s = symbols("a:f,s")
assert Point(a, b).free_symbols == {a, b}
assert Line((a, b), (c, d)).free_symbols == {a, b, c, d}
assert Ray((a, b), (c, d)).free_symbols == {a, b, c, d}
assert Ray((a, b), angle=c).free_symbols == {a, b, c}
assert Segment((a, b), (c, d)).free_symbols == {a, b, c, d}
assert Line((a, b), slope=c).free_symbols == {a, b, c}
assert Curve((a * s, b * s), (s, c, d)).free_symbols == {a, b, c, d}
assert Ellipse((a, b), c, d).free_symbols == {a, b, c, d}
assert Ellipse((a, b), c, eccentricity=d).free_symbols == {a, b, c, d}
assert Ellipse((a, b), vradius=c, eccentricity=d).free_symbols == {a, b, c, d}
assert Circle((a, b), c).free_symbols == {a, b, c}
assert Circle((a, b), (c, d), (e, f)).free_symbols == {e, d, c, b, f, a}
assert Polygon((a, b), (c, d), (e, f)).free_symbols == {e, b, d, f, a, c}
assert RegularPolygon((a, b), c, d, e).free_symbols == {e, a, b, c, d}
def test_transform():
x = Symbol("x", real=True)
y = Symbol("y", real=True)
c = Curve((x, x ** 2), (x, 0, 1))
cout = Curve((2 * x - 4, 3 * x ** 2 - 10), (x, 0, 1))
pts = [Point(0, 0), Point(S.Half, Rational(1, 4)), Point(1, 1)]
pts_out = [Point(-4, -10), Point(-3, Rational(-37, 4)), Point(-2, -7)]
assert c.scale(2, 3, (4, 5)) == cout
assert [c.subs(x, xi / 2) for xi in Tuple(0, 1, 2)] == pts
assert [cout.subs(x, xi / 2) for xi in Tuple(0, 1, 2)] == pts_out
assert Curve((x + y, 3 * x), (x, 0, 1)).subs(y, S.Half) == Curve(
(x + S.Half, 3 * x), (x, 0, 1)
)
assert Curve((x, 3 * x), (x, 0, 1)).translate(4, 5) == Curve(
(x + 4, 3 * x + 5), (x, 0, 1)
)
def test_length():
t = Symbol("t", real=True)
c1 = Curve((t, 0), (t, 0, 1))
assert c1.length == 1
c2 = Curve((t, t), (t, 0, 1))
assert c2.length == sqrt(2)
c3 = Curve((t ** 2, t), (t, 2, 5))
assert c3.length == -sqrt(17) - asinh(4) / 4 + asinh(10) / 4 + 5 * sqrt(101) / 2
def test_parameter_value():
t = Symbol("t")
C = Curve([2 * t, t ** 2], (t, 0, 2))
assert C.parameter_value((2, 1), t) == {t: 1}
raises(ValueError, lambda: C.parameter_value((2, 0), t))
def test_issue_17997():
t, s = symbols("t s")
c = Curve((t, t ** 2), (t, 0, 10))
p = Curve([2 * s, s ** 2], (s, 0, 2))
assert c(2) == Point(2, 4)
assert p(1) == Point(2, 1)
| 33.175573
| 84
| 0.518408
|
79484e29b73f55be596b08d46cfd75eed61e3a7e
| 2,796
|
py
|
Python
|
integrationtest/vm/hybrid/test_sync_delete_vpc_vpn_gateway_local.py
|
bgerxx/woodpecker
|
fdc51245945cc9be4d1f028988079213eb99b2ad
|
[
"Apache-2.0"
] | null | null | null |
integrationtest/vm/hybrid/test_sync_delete_vpc_vpn_gateway_local.py
|
bgerxx/woodpecker
|
fdc51245945cc9be4d1f028988079213eb99b2ad
|
[
"Apache-2.0"
] | null | null | null |
integrationtest/vm/hybrid/test_sync_delete_vpc_vpn_gateway_local.py
|
bgerxx/woodpecker
|
fdc51245945cc9be4d1f028988079213eb99b2ad
|
[
"Apache-2.0"
] | null | null | null |
'''
New Integration Test for hybrid.
@author: Legion
'''
import zstackwoodpecker.test_util as test_util
import zstackwoodpecker.test_lib as test_lib
import zstackwoodpecker.test_state as test_state
import zstackwoodpecker.operations.hybrid_operations as hyb_ops
import zstackwoodpecker.operations.resource_operations as res_ops
import time
import os
date_s = time.strftime('%m%d-%H%M%S', time.localtime())
test_obj_dict = test_state.TestStateDict()
ks_inv = None
datacenter_inv = None
def test():
global ks_inv
global datacenter_inv
datacenter_type = os.getenv('datacenterType')
ks_existed = hyb_ops.query_aliyun_key_secret()
if not ks_existed:
ks_inv = hyb_ops.add_aliyun_key_secret('test_hybrid', 'test for hybrid', os.getenv('aliyunKey'), os.getenv('aliyunSecret'))
# Clear datacenter remained in local
datacenter_local = hyb_ops.query_datacenter_local()
if datacenter_local:
for d in datacenter_local:
hyb_ops.del_datacenter_in_local(d.uuid)
datacenter_list = hyb_ops.get_datacenter_from_remote(datacenter_type)
regions = [ i.regionId for i in datacenter_list]
for r in regions:
datacenter_inv = hyb_ops.add_datacenter_from_remote(datacenter_type, r, 'datacenter for test')
# Add Identity Zone
iz_list = hyb_ops.get_identity_zone_from_remote(datacenter_type, r)
vpn_gateway_list = []
for iz in iz_list:
if not iz.availableInstanceTypes:
continue
iz_inv = hyb_ops.add_identity_zone_from_remote(datacenter_type, datacenter_inv.uuid, iz.zoneId)
vpn_gateway_list = hyb_ops.sync_vpc_vpn_gateway_from_remote(datacenter_inv.uuid)
if vpn_gateway_list:
break
else:
hyb_ops.del_identity_zone_in_local(iz_inv.uuid)
if vpn_gateway_list:
break
else:
hyb_ops.del_datacenter_in_local(datacenter_inv.uuid)
if not vpn_gateway_list:
test_util.test_fail("VpnGate was not found in all available dataCenter")
vpc_vpn_gw_local = hyb_ops.query_vpc_vpn_gateway_local()
assert len(vpc_vpn_gw_local) > 0
for gw in vpc_vpn_gw_local:
hyb_ops.del_vpc_vpn_gateway_local(gw.uuid)
vpc_vpn_gw_local_after = hyb_ops.query_vpc_vpn_gateway_local()
assert len(vpc_vpn_gw_local_after) == 0
test_util.test_pass('Sync Delete Vpc Vpn Gateway Local Test Success')
def env_recover():
global datacenter_inv
if datacenter_inv:
hyb_ops.del_datacenter_in_local(datacenter_inv.uuid)
global ks_inv
if ks_inv:
hyb_ops.del_aliyun_key_secret(ks_inv.uuid)
#Will be called only if exception happens in test().
def error_cleanup():
global test_obj_dict
test_lib.lib_error_cleanup(test_obj_dict)
| 36.789474
| 131
| 0.731044
|
7948503824e4fb456500aa275c6ffc26a2e1bdc0
| 10,071
|
py
|
Python
|
test/model/test_models.py
|
strawberrypie/gluon-ts
|
1d27423478f1dc4621f81c4659d8ba78f88ee89b
|
[
"Apache-2.0"
] | 1
|
2019-11-10T03:56:28.000Z
|
2019-11-10T03:56:28.000Z
|
test/model/test_models.py
|
strawberrypie/gluon-ts
|
1d27423478f1dc4621f81c4659d8ba78f88ee89b
|
[
"Apache-2.0"
] | null | null | null |
test/model/test_models.py
|
strawberrypie/gluon-ts
|
1d27423478f1dc4621f81c4659d8ba78f88ee89b
|
[
"Apache-2.0"
] | null | null | null |
# Copyright 2018 Amazon.com, Inc. or its affiliates. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License").
# You may not use this file except in compliance with the License.
# A copy of the License is located at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# or in the "license" file accompanying this file. This file is distributed
# on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
# express or implied. See the License for the specific language governing
# permissions and limitations under the License.
# Standard library imports
import tempfile
from pathlib import Path
# Third-party imports
import pytest
from flaky import flaky
# First-party imports
from gluonts import time_feature
from gluonts.core.serde import load_code
from gluonts.dataset.artificial import constant_dataset
from gluonts.evaluation.backtest import backtest_metrics
from gluonts.model.deepar import DeepAREstimator
from gluonts.model.deep_factor import DeepFactorEstimator
from gluonts.model.gp_forecaster import GaussianProcessEstimator
from gluonts.model.npts import NPTSEstimator
from gluonts.model.predictor import Predictor
from gluonts.model.seasonal_naive import SeasonalNaiveEstimator
from gluonts.model.seq2seq import (
MQCNNEstimator,
MQRNNEstimator,
Seq2SeqEstimator,
)
from gluonts.model.simple_feedforward import SimpleFeedForwardEstimator
from gluonts.model.transformer import TransformerEstimator
from gluonts.model.canonical._estimator import (
CanonicalRNNEstimator,
MLPForecasterEstimator,
)
dataset_info, train_ds, test_ds = constant_dataset()
freq = dataset_info.metadata.freq
prediction_length = dataset_info.prediction_length
cardinality = int(dataset_info.metadata.feat_static_cat[0].cardinality)
# FIXME: Should time features should not be needed for GP
time_features = [time_feature.DayOfWeek(), time_feature.HourOfDay()]
num_parallel_samples = 2
epochs = 1
def seq2seq_base(seq2seq_model, hybridize: bool = True, batches_per_epoch=1):
return (
seq2seq_model,
dict(
ctx="cpu",
epochs=epochs,
learning_rate=1e-2,
hybridize=hybridize,
prediction_length=prediction_length,
context_length=prediction_length,
num_batches_per_epoch=batches_per_epoch,
quantiles=[0.1, 0.5, 0.9],
use_symbol_block_predictor=True,
num_parallel_samples=num_parallel_samples,
),
)
def mqcnn_estimator(hybridize: bool = True, batches_per_epoch=1):
return seq2seq_base(MQCNNEstimator, hybridize, batches_per_epoch)
def mqrnn_estimator(hybridize: bool = True, batches_per_epoch=1):
return seq2seq_base(MQRNNEstimator, hybridize, batches_per_epoch)
def rnn_estimator(hybridize: bool = False, batches_per_epoch=1):
return (
CanonicalRNNEstimator,
dict(
ctx="cpu",
epochs=epochs,
learning_rate=1e-2,
hybridize=hybridize,
num_cells=2,
num_layers=1,
prediction_length=prediction_length,
context_length=2,
num_batches_per_epoch=batches_per_epoch,
use_symbol_block_predictor=False,
num_parallel_samples=2,
),
)
def mlp_estimator(hybridize: bool = False, batches_per_epoch=1):
return (
MLPForecasterEstimator,
dict(
ctx="cpu",
epochs=epochs,
learning_rate=1e-2,
hybridize=hybridize,
num_cells=2,
num_layers=1,
prediction_length=prediction_length,
context_length=2,
num_batches_per_epoch=batches_per_epoch,
use_symbol_block_predictor=False,
num_parallel_samples=2,
),
)
def npts_estimator():
return (
NPTSEstimator,
dict(
kernel_type="uniform",
use_default_features=True,
prediction_length=prediction_length,
num_parallel_samples=num_parallel_samples,
),
)
def simple_seq2seq_estimator(hybridize: bool = True, batches_per_epoch=1):
return seq2seq_base(Seq2SeqEstimator, hybridize, batches_per_epoch)
def simple_feedforward_estimator(hybridize: bool = True, batches_per_epoch=1):
return (
SimpleFeedForwardEstimator,
dict(
ctx="cpu",
epochs=epochs,
learning_rate=1e-2,
hybridize=hybridize,
num_hidden_dimensions=[3],
prediction_length=prediction_length,
num_batches_per_epoch=batches_per_epoch,
use_symbol_block_predictor=True,
num_parallel_samples=num_parallel_samples,
),
)
def deep_factor_estimator(hybridize: bool = True, batches_per_epoch=1):
return (
DeepFactorEstimator,
dict(
ctx="cpu",
epochs=epochs,
learning_rate=1e-2,
hybridize=hybridize,
prediction_length=prediction_length,
cardinality=[cardinality],
num_batches_per_epoch=batches_per_epoch,
use_symbol_block_predictor=False,
num_parallel_samples=num_parallel_samples,
),
)
def gp_estimator(hybridize: bool = True, batches_per_epoch=1):
return (
GaussianProcessEstimator,
dict(
ctx="cpu",
epochs=epochs,
learning_rate=1e-2,
hybridize=hybridize,
prediction_length=prediction_length,
cardinality=cardinality,
num_batches_per_epoch=batches_per_epoch,
time_features=time_features,
use_symbol_block_predictor=False,
num_parallel_samples=num_parallel_samples,
# FIXME: test_shell fails with use_symbol_block_predictor=True
# FIXME and float_type = np.float64
),
)
def deepar_estimator(hybridize: bool = True, batches_per_epoch=1):
return (
DeepAREstimator,
dict(
ctx="cpu",
epochs=epochs,
learning_rate=1e-2,
hybridize=hybridize,
num_cells=2,
num_layers=1,
prediction_length=prediction_length,
context_length=2,
num_batches_per_epoch=batches_per_epoch,
use_symbol_block_predictor=False,
num_parallel_samples=2,
),
)
def transformer_estimator(hybridize: bool = False, batches_per_epoch=1):
return (
TransformerEstimator,
dict(
ctx="cpu",
epochs=epochs,
learning_rate=1e-2,
hybridize=hybridize,
model_dim=4,
inner_ff_dim_scale=1,
num_heads=2,
prediction_length=prediction_length,
context_length=2,
num_batches_per_epoch=batches_per_epoch,
use_symbol_block_predictor=False,
num_parallel_samples=2,
),
)
def seasonal_estimator():
return SeasonalNaiveEstimator, dict(prediction_length=prediction_length)
@flaky(max_runs=3, min_passes=1)
@pytest.mark.timeout(10) # DeepAR occasionally fails with the 5 second timeout
@pytest.mark.parametrize(
"Estimator, hyperparameters, accuracy",
[
estimator
for hyb in [False, True]
for estimator in [
deepar_estimator(hybridize=hyb, batches_per_epoch=50)
+ (1.5,), # large value as this test is breaking frequently
deep_factor_estimator(hybridize=hyb, batches_per_epoch=200)
+ (0.3,),
gp_estimator(hybridize=hyb, batches_per_epoch=200) + (0.2,),
mlp_estimator(hybridize=hyb) + (10.0,),
mqcnn_estimator(hybridize=hyb, batches_per_epoch=200) + (0.2,),
mqrnn_estimator(hybridize=hyb, batches_per_epoch=200) + (0.2,),
rnn_estimator(hybridize=hyb) + (10.0,),
simple_feedforward_estimator(hybridize=hyb, batches_per_epoch=200)
+ (0.3,),
transformer_estimator(hybridize=hyb, batches_per_epoch=80)
+ (0.2,),
]
]
+ [npts_estimator() + (0.0,), seasonal_estimator() + (0.0,)],
)
def test_accuracy(Estimator, hyperparameters, accuracy):
estimator = Estimator.from_hyperparameters(freq=freq, **hyperparameters)
agg_metrics, item_metrics = backtest_metrics(
train_dataset=train_ds, test_dataset=test_ds, forecaster=estimator
)
assert agg_metrics["ND"] <= accuracy
@pytest.mark.parametrize(
"Estimator, hyperparameters",
[
simple_feedforward_estimator(),
deepar_estimator(),
deep_factor_estimator(),
npts_estimator(),
seasonal_estimator(),
mqcnn_estimator(),
mqrnn_estimator(),
gp_estimator(),
transformer_estimator(),
],
)
def test_repr(Estimator, hyperparameters):
estimator = Estimator.from_hyperparameters(freq=freq, **hyperparameters)
assert repr(estimator) == repr(load_code(repr(estimator)))
@pytest.mark.parametrize(
"Estimator, hyperparameters",
[
simple_feedforward_estimator(),
deepar_estimator(),
# TODO: Enable this test: Error: assert <gluonts.model.predictor.RepresentableBlockPredictor object at
# TODO: 0x124701240> == <gluonts.model.predictor.RepresentableBlockPredictor object at 0x124632940>
# TODO: deep_factor_estimator(),
npts_estimator(),
seasonal_estimator(),
mqcnn_estimator(),
mqrnn_estimator(),
gp_estimator(),
transformer_estimator(),
],
)
def test_serialize(Estimator, hyperparameters):
estimator = Estimator.from_hyperparameters(freq=freq, **hyperparameters)
with tempfile.TemporaryDirectory() as temp_dir:
predictor_act = estimator.train(train_ds)
predictor_act.serialize(Path(temp_dir))
predictor_exp = Predictor.deserialize(Path(temp_dir))
# TODO: DeepFactorEstimator does not pass this assert
assert predictor_act == predictor_exp
| 32.592233
| 111
| 0.665972
|
794851b6177836b786383d5bc09a5f570367a71b
| 659
|
py
|
Python
|
flatpage_main/migrations/0002_auto_20210622_1405.py
|
davidkartuzinski/ellieplatformsite
|
63a41cb2a15ae81a7cd3cdf68d783398b3205ce2
|
[
"MIT"
] | null | null | null |
flatpage_main/migrations/0002_auto_20210622_1405.py
|
davidkartuzinski/ellieplatformsite
|
63a41cb2a15ae81a7cd3cdf68d783398b3205ce2
|
[
"MIT"
] | 2
|
2021-06-25T15:30:34.000Z
|
2021-06-29T13:55:02.000Z
|
flatpage_main/migrations/0002_auto_20210622_1405.py
|
davidkartuzinski/ellieplatformsite
|
63a41cb2a15ae81a7cd3cdf68d783398b3205ce2
|
[
"MIT"
] | null | null | null |
# Generated by Django 3.2.4 on 2021-06-22 12:05
import ckeditor_uploader.fields
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('flatpage_main', '0001_initial'),
]
operations = [
migrations.AlterModelOptions(
name='newflatpage',
options={},
),
migrations.RemoveField(
model_name='newflatpage',
name='content',
),
migrations.AddField(
model_name='newflatpage',
name='content_section',
field=ckeditor_uploader.fields.RichTextUploadingField(default=''),
),
]
| 23.535714
| 78
| 0.588771
|
794852b648e68112e8bbdebb48f095c51dc307d2
| 1,043
|
py
|
Python
|
app/core/migrations/0004_recipe.py
|
Saurav-Shrivastav/recipe-app-api
|
bbb024828784ca91c34b81cdf1aacfd5a3021f30
|
[
"MIT"
] | null | null | null |
app/core/migrations/0004_recipe.py
|
Saurav-Shrivastav/recipe-app-api
|
bbb024828784ca91c34b81cdf1aacfd5a3021f30
|
[
"MIT"
] | null | null | null |
app/core/migrations/0004_recipe.py
|
Saurav-Shrivastav/recipe-app-api
|
bbb024828784ca91c34b81cdf1aacfd5a3021f30
|
[
"MIT"
] | null | null | null |
# Generated by Django 2.1.15 on 2020-10-09 05:25
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('core', '0003_ingredient'),
]
operations = [
migrations.CreateModel(
name='Recipe',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('title', models.CharField(max_length=255)),
('time_minutes', models.IntegerField()),
('price', models.DecimalField(decimal_places=2, max_digits=5)),
('link', models.CharField(blank=True, max_length=255)),
('ingredients', models.ManyToManyField(to='core.Ingredient')),
('tags', models.ManyToManyField(to='core.Tag')),
('user', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)),
],
),
]
| 35.965517
| 118
| 0.604027
|
794853da5596187ea115fba1a7d868665ad671b3
| 4,331
|
py
|
Python
|
prepare_trainset.py
|
bond005/runne_contrastive_ner
|
ec94cdde296ca4b394f65b206fdc0d9877a025ba
|
[
"Apache-2.0"
] | null | null | null |
prepare_trainset.py
|
bond005/runne_contrastive_ner
|
ec94cdde296ca4b394f65b206fdc0d9877a025ba
|
[
"Apache-2.0"
] | null | null | null |
prepare_trainset.py
|
bond005/runne_contrastive_ner
|
ec94cdde296ca4b394f65b206fdc0d9877a025ba
|
[
"Apache-2.0"
] | null | null | null |
import codecs
import os
import pickle
import random
import sys
from transformers import BertTokenizer
from io_utils.io_utils import load_data
from trainset_building.trainset_building import build_trainset_for_siam
from trainset_building.trainset_building import build_trainset_for_ner
def main():
random.seed(42)
if len(sys.argv) < 2:
err_msg = 'The source training file is not specified!'
raise ValueError(err_msg)
src_fname = os.path.normpath(sys.argv[1])
if len(sys.argv) < 3:
err_msg = 'The destination training file is not specified!'
raise ValueError(err_msg)
dst_fname = os.path.normpath(sys.argv[2])
if len(sys.argv) < 4:
err_msg = 'The NER vocabulary file is not specified!'
raise ValueError(err_msg)
ners_fname = os.path.normpath(sys.argv[3])
if len(sys.argv) < 5:
err_msg = 'The training mode is not specified!'
raise ValueError(err_msg)
training_mode = sys.argv[4].strip().lower()
if len(training_mode) == 0:
err_msg = 'The training mode is not specified!'
raise ValueError(err_msg)
if len(sys.argv) < 6:
err_msg = 'The maximal sequence length is not specified!'
raise ValueError(err_msg)
try:
max_len = int(sys.argv[5])
except:
max_len = 0
if max_len < 1:
err_msg = f'{sys.argv[5]} is inadmissible value ' \
f'of the maximal sequence length!'
raise ValueError(err_msg)
if len(sys.argv) < 7:
err_msg = 'The pre-trained BERT model is not specified!'
raise ValueError(err_msg)
pretrained_model = sys.argv[6]
if training_mode == 'siamese':
if len(sys.argv) < 8:
err_msg = 'The maximal number of samples is not specified!'
raise ValueError(err_msg)
try:
max_samples = int(sys.argv[7])
except:
max_samples = 0
if max_samples < 1:
err_msg = f'{sys.argv[7]} is inadmissible value ' \
f'of the maximal number of samples!'
raise ValueError(err_msg)
else:
max_samples = 0
if not os.path.isfile(src_fname):
raise IOError(f'The file {src_fname} does not exist!')
if not os.path.isfile(ners_fname):
raise IOError(f'The file {ners_fname} does not exist!')
dname = os.path.dirname(dst_fname)
if len(dname) > 0:
if not os.path.isdir(dname):
raise IOError(f'The directory {dname} does not exist!')
if training_mode not in {'siamese', 'ner'}:
err_msg = f'The training mode {training_mode} is unknown! ' \
f'Possible values: siamese, ner.'
raise ValueError(err_msg)
with codecs.open(ners_fname, mode='r', encoding='utf-8') as fp:
possible_named_entities = list(filter(
lambda it2: len(it2) > 0,
map(
lambda it1: it1.strip(),
fp.readlines()
)
))
if len(possible_named_entities) == 0:
err_msg = f'The file {ners_fname} is empty!'
raise IOError(err_msg)
if len(possible_named_entities) != len(set(possible_named_entities)):
err_msg = f'The file {ners_fname} contains a wrong data! ' \
f'Some entities are duplicated!'
raise IOError(err_msg)
source_data = load_data(src_fname)
bert_tokenizer = BertTokenizer.from_pretrained(pretrained_model)
if training_mode == 'ner':
prep_data = build_trainset_for_ner(
data=source_data,
tokenizer=bert_tokenizer,
entities=possible_named_entities,
max_seq_len=max_len
)
print('')
print(f'X.shape = {prep_data[0].shape}')
for output_idx in range(len(prep_data[1])):
print(f'y[{output_idx}].shape = {prep_data[1][output_idx].shape}')
else:
prep_data = build_trainset_for_siam(
data=source_data,
tokenizer=bert_tokenizer,
entities=possible_named_entities,
max_seq_len=max_len,
max_samples=max_samples
)
with open(dst_fname, 'wb') as fp:
pickle.dump(
file=fp,
obj=prep_data,
protocol=pickle.HIGHEST_PROTOCOL
)
if __name__ == '__main__':
main()
| 34.102362
| 78
| 0.611175
|
7948548d1317c0fa4341dc312be9a5949cb8f656
| 248
|
py
|
Python
|
features_extraction/regular_features.py
|
eyalho/NetML-Competition2020
|
cdf7b21642a8ce1ff8cc4c3ba7ed7fc6e1a91a81
|
[
"BSD-2-Clause"
] | null | null | null |
features_extraction/regular_features.py
|
eyalho/NetML-Competition2020
|
cdf7b21642a8ce1ff8cc4c3ba7ed7fc6e1a91a81
|
[
"BSD-2-Clause"
] | null | null | null |
features_extraction/regular_features.py
|
eyalho/NetML-Competition2020
|
cdf7b21642a8ce1ff8cc4c3ba7ed7fc6e1a91a81
|
[
"BSD-2-Clause"
] | null | null | null |
import numpy as np
from pandas import DataFrame
from features_extraction.abs_features_extraction import ABSFeatureExtraction
class RegularFeatures(ABSFeatureExtraction):
def extract(self, df: DataFrame) -> np.array:
return df.values
| 24.8
| 76
| 0.798387
|
794854dc90f2d4e1c72a532492b9adbecbe551cd
| 470
|
py
|
Python
|
IWESEP2016/kousatsu2/bug_ranking.py
|
hideshis/scripts_for_research
|
f633bdef0f9b959d7b18c8b95f169306eb8bb50d
|
[
"MIT"
] | null | null | null |
IWESEP2016/kousatsu2/bug_ranking.py
|
hideshis/scripts_for_research
|
f633bdef0f9b959d7b18c8b95f169306eb8bb50d
|
[
"MIT"
] | null | null | null |
IWESEP2016/kousatsu2/bug_ranking.py
|
hideshis/scripts_for_research
|
f633bdef0f9b959d7b18c8b95f169306eb8bb50d
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
import sys
import os
import subprocess
import csv
os.chdir("./evolution_info")
csv_list = os.listdir("./")
rank = []
for csv_file in csv_list:
result = subprocess.check_output('grep "bug" ' + csv_file + " | wc -l", shell=True)
result = result.replace('\r', '')
result = result.replace('\n', '')
result = result.replace(' ', '')
rank.append([int(result), csv_file])
rank.sort(reverse=True)
for x in range(10):
print rank[x]
os.chdir("..")
| 22.380952
| 84
| 0.648936
|
794855d07b967464fa463b2ba9dd7683a00f2311
| 3,466
|
py
|
Python
|
kw3pan/pancakeswap/factory/core/pancakeswap_factory.py
|
kkristof200/py_web3_pancakeswap
|
ae9dc7021b7da2365ce675f29f89e103fe44d77f
|
[
"MIT"
] | 6
|
2021-05-09T12:43:37.000Z
|
2021-12-07T01:56:02.000Z
|
kw3pan/pancakeswap/factory/core/pancakeswap_factory.py
|
kkristof200/py_web3_pancakeswap
|
ae9dc7021b7da2365ce675f29f89e103fe44d77f
|
[
"MIT"
] | null | null | null |
kw3pan/pancakeswap/factory/core/pancakeswap_factory.py
|
kkristof200/py_web3_pancakeswap
|
ae9dc7021b7da2365ce675f29f89e103fe44d77f
|
[
"MIT"
] | null | null | null |
# ------------------------------------------------------------ Imports ----------------------------------------------------------- #
# System
from typing import Optional
# Pip
from kw3 import WrappedContract, Web3
from kw3.constants import Constants as KW3Constants
# Local
from ._abi import pancakeswap_factory_abi
from ...liquidity_pool import PancakeswapLiquidityPool, PancakeswapBusdLiquidityPool, PancakeswapWbnbLiquidityPool
from ...constants import Constants
# -------------------------------------------------------------------------------------------------------------------------------- #
# --------------------------------------------------- class: PancakeswapFactory -------------------------------------------------- #
class PancakeswapFactory(WrappedContract):
# --------------------------------------------------------- Init --------------------------------------------------------- #
def __init__(
self,
web3: Web3
):
super().__init__(
web3=web3,
address=Constants.ADDRESS_PANCAKESWAP_FACTORY,
abi=pancakeswap_factory_abi
)
# ---------------------------------------------------- Public methods ---------------------------------------------------- #
# Forwarders
def liquidityPoolAddressesLength(self) -> int:
return self.functions.allPairsLength().call()
def liquidityPoolAddressAtIndex(
self,
index: int
) -> str:
return self.functions.allPairs(index).call()
def liquidityPoolAtIndex(
self,
index: int
) -> PancakeswapLiquidityPool:
return PancakeswapBusdLiquidityPool(
web3=self._web3,
address=self.liquidityPoolAddressAtIndex(
index=index
)
)
# Custom
def getPairAddress(
self,
address0: str,
address1: str
) -> Optional[str]:
return self.functions.getPair(
Web3.toChecksumAddress(address0),
Web3.toChecksumAddress(address1)
).call()
def getPair(
self,
address0: str,
address1: str
) -> Optional[PancakeswapLiquidityPool]:
return self.__getPair(
PancakeswapLiquidityPool,
address0=address0,
address1=address1
)
def getWbnbPair(
self,
token_address: str
) -> Optional[PancakeswapWbnbLiquidityPool]:
return self.__getPair(
PancakeswapWbnbLiquidityPool,
address0=KW3Constants.WBNB.ADDRESS,
address1=token_address
)
def getBusdPair(
self,
token_address: str
) -> Optional[PancakeswapBusdLiquidityPool]:
return self.__getPair(
PancakeswapBusdLiquidityPool,
address0=KW3Constants.BUSD.ADDRESS,
address1=token_address
)
# ---------------------------------------------------- Private methods --------------------------------------------------- #
def __getPair(
self,
_type,
address0: str,
address1: str
) -> Optional[PancakeswapLiquidityPool]:
pair_address = self.getPairAddress(address0, address1)
return _type(
self._web3,
pair_address
) if pair_address else None
# -------------------------------------------------------------------------------------------------------------------------------- #
| 28.409836
| 132
| 0.467398
|
79485621667c6770026cf4bc4d62b13ae4f28526
| 47,272
|
py
|
Python
|
scripts/old_scripts/analyze_results_by_classification.py
|
quimaguirre/diana
|
930da0ea91ad87e354061af18db6c437a3318366
|
[
"MIT"
] | 3
|
2019-07-11T05:32:13.000Z
|
2021-03-12T01:10:21.000Z
|
scripts/old_scripts/analyze_results_by_classification.py
|
quimaguirre/diana
|
930da0ea91ad87e354061af18db6c437a3318366
|
[
"MIT"
] | null | null | null |
scripts/old_scripts/analyze_results_by_classification.py
|
quimaguirre/diana
|
930da0ea91ad87e354061af18db6c437a3318366
|
[
"MIT"
] | null | null | null |
import argparse
import copy
import cPickle
import matplotlib.pyplot as plt
import numpy as np
import pandas as pd
import pylab
#from pylab import plot, show, savefig, xlim, figure, hold, ylim, legend, boxplot, setp, axes, xlabel, ylabel
import scipy
import time
import sys, os, re
from sklearn.decomposition import PCA
from sklearn.discriminant_analysis import QuadraticDiscriminantAnalysis
from sklearn.ensemble import RandomForestClassifier, AdaBoostClassifier
from sklearn.gaussian_process import GaussianProcessClassifier
from sklearn.gaussian_process.kernels import RBF
from sklearn import metrics
from sklearn.metrics import roc_curve, auc
from sklearn.naive_bayes import GaussianNB
from sklearn.neighbors import KNeighborsClassifier
from sklearn.neural_network import MLPClassifier
from sklearn.preprocessing import StandardScaler
from sklearn.svm import SVC
from sklearn.tree import DecisionTreeClassifier
from context import diana
import diana.classes.analysis as diana_analysis
def main():
options = parse_user_arguments()
analysis_results(options)
def parse_user_arguments(*args, **kwds):
"""
Parses the arguments of the program
"""
parser = argparse.ArgumentParser(
description = "Generate the profiles of the input drug",
epilog = "@oliva's lab 2017")
parser.add_argument('-th','--threshold_list',dest='threshold_list',action = 'store',
help = """List of percentages that will be used as cut-offs to define the profiles of the drugs. It has to be a file containing:
- Different numbers that will be the threshold values separated by newline characters.
For example, a file called "top_threshold.list" containing:
0.1
0.5
1
5
10
""")
parser.add_argument('-cl','--classification',dest='classification',action = 'store',default='dcdb',
help = """Define the type of classification that will be used. It can be: dcdb, biological_process, pathway""")
parser.add_argument('-se','--consider_se',dest='consider_se',action = 'store_true',
help = """" Consider Side Effects / ATCs. """)
parser.add_argument('-datc','--different_atc',dest='different_atc',action = 'store_true',
help = """ Consider only drug combinations with different ATCs. """)
parser.add_argument('-pca','--pca',dest='pca',action = 'store_true',
help = """" Make a PCA to reduce dimensionality. """)
parser.add_argument('-ws','--workspace',dest='workspace',action = 'store',default=os.path.join(os.path.join(os.path.dirname(__file__), '..'), 'workspace'),
help = """Define the workspace directory where the data directory and the results directory will be created""")
options=parser.parse_args()
return options
#################
#################
# MAIN FUNCTION #
#################
#################
def analysis_results(options):
"""
Analyzes the results of the comparisons
"""
# Start marker for time measure
start = time.time()
print("\n\t\t-------------------------------------------------------------------------------------------------------------------------------\n")
print("\t\tStarting Drug Interactions ANAlysis (DIANA), a program created by @OLIVA'S LAB. Analysis of results: Analysis by classification\n")
print("\t\t-------------------------------------------------------------------------------------------------------------------------------\n")
# Get the script path
main_path = os.path.abspath(os.path.join(os.path.dirname(__file__), '..'))
toolbox_dir = os.path.join(main_path, 'diana/toolbox')
# Check the directory of the profiles, comparisons and analysis
data_dir = os.path.join(options.workspace, "profiles")
check_directory(data_dir)
results_dir = os.path.join(options.workspace, "comparisons")
check_directory(results_dir)
analysis_dir = os.path.join(options.workspace, "analysis")
check_directory(analysis_dir)
# Get the list of thresholds to create the profiles
if options.threshold_list and fileExist(options.threshold_list):
threshold_list = get_values_from_threshold_file(options.threshold_list)
else:
threshold_list = [1, 5, 10, 20, 50]
# Do we consider Side Effects/ATC?
if options.consider_se:
consider_se = True
else:
consider_se = False
# Get the names of the columns
columns = diana_analysis.obtain_columns(threshold_list, ATC_SE=consider_se)
#-----------------------------------------------------#
# PARSE THE RESULTS AND CREATE A PANDAS DATAFRAME #
#-----------------------------------------------------#
diana_id_to_drugbank_file = os.path.join(toolbox_dir, 'diana_id_to_drugbank.pcl')
diana_id_to_drugbank = cPickle.load(open(diana_id_to_drugbank_file))
pair2comb_file = os.path.join(toolbox_dir, 'pair2comb.pcl')
pair2comb = cPickle.load(open(pair2comb_file))
ddi = sum(1 for x in pair2comb.values() if x == 1)
non_ddi = sum(1 for x in pair2comb.values() if x == 0)
print('NUMBER OF DRUG COMBINATIONS:\t\t{}\n'.format(ddi))
print('NUMBER OF NON-DRUG COMBINATIONS:\t{}\n'.format(non_ddi))
output_dataframe = os.path.join(analysis_dir, 'dcdb_comparisons.csv')
if not fileExist(output_dataframe):
# Create a data frame to store the results
df = pd.DataFrame(columns=columns)
# Obtain all the results subfolders of the results main folder
results_dir_list = [f for f in os.listdir(results_dir) if os.path.isdir(os.path.join(results_dir, f))]
for comparison in results_dir_list:
drug_id1, drug_id2 = comparison.split('---')
comparison_dir = os.path.join(results_dir, comparison)
results_table = os.path.join(comparison_dir, 'results_table.tsv')
# Add the Comb field (if it is drug combination or not)
drug1 = diana_id_to_drugbank[drug_id1].upper()
drug2 = diana_id_to_drugbank[drug_id2].upper()
comparison_without_id = '{}---{}'.format(drug1, drug2)
if comparison_without_id in pair2comb:
combination_field = pair2comb[comparison_without_id]
else:
print('The comparison {} is not in the pair2comb dictionary!\n'.format(comparison_without_id))
print(pair2comb)
sys.exit(10)
if not fileExist(results_table):
print('The comparison {} has not been executed properly!\n'.format(comparison))
sys.exit(10)
results = diana_analysis.get_results_from_table(results_table, columns, combination_field)
df2 = pd.DataFrame([results], columns=columns, index=[comparison])
# Add the information to the main data frame
df = df.append(df2)
# Output the Pandas dataframe in a CSV file
df.to_csv(output_dataframe)
else:
df = pd.read_csv(output_dataframe, index_col=0)
#---------------------------#
# REMOVE MISSING VALUES #
#---------------------------#
# Replace the None values in dcstructure by nan
if 'None' in df['dcstructure']:
df = df.replace(to_replace={'dcstructure':{'None':np.nan}})
# Remove the nan values in dcstructure
df = df.dropna()
# Count the number of drug combinations / non-drug combinations
dc_data = df[df['combination'] == 1]
ndc_data = df[df['combination'] == 0]
num_dc = len(dc_data.index)
num_ndc = len(ndc_data.index)
print('Number of drug combinations after removing missing values:\t{}\n'.format(num_dc))
print('Number of non-drug combinations after removing missing values:\t{}\n'.format(num_ndc))
#---------------------------#
# IDENTIFY ME-TOO DRUGS #
#---------------------------#
me_too_dir = os.path.join(analysis_dir, 'me_too_drugs')
create_directory(me_too_dir)
me_too_drugs_table = os.path.join(me_too_dir, 'me_too_drugs.tsv')
me_too_drug_combs_table = os.path.join(me_too_dir, 'me_too_drug_combinations.tsv')
me_too_drug_pairs_file = os.path.join(me_too_dir, 'me_too_drug_pairs.pcl')
me_too_drug_comb_pairs_file = os.path.join(me_too_dir, 'me_too_drug_comb_pairs.pcl')
if not fileExist(me_too_drug_pairs_file) or not fileExist(me_too_drug_comb_pairs_file):
df_struc = df[['dcstructure']]
df_struc = df_struc.astype(float)
me_too_drug_pairs, me_too_drug_comb_pairs = diana_analysis.obtain_me_too_drugs_and_combinations(df_struc, columns, me_too_drugs_table, me_too_drug_combs_table)
cPickle.dump(me_too_drug_pairs, open(me_too_drug_pairs_file, 'w'))
cPickle.dump(me_too_drug_comb_pairs, open(me_too_drug_comb_pairs_file, 'w'))
else:
me_too_drug_pairs = cPickle.load(open(me_too_drug_pairs_file))
me_too_drug_comb_pairs = cPickle.load(open(me_too_drug_comb_pairs_file))
# Process me-too drug combination pairs
me_too_drug_combinations = set()
drug_pair_to_me_too_times = {}
for pair in me_too_drug_comb_pairs:
drug_comb1, drug_comb2 = pair.split('___')
me_too_drug_combinations.add(frozenset([drug_comb1, drug_comb2]))
drug_pair_to_me_too_times.setdefault(drug_comb1, 0)
drug_pair_to_me_too_times.setdefault(drug_comb2, 0)
drug_pair_to_me_too_times[drug_comb1] += 1
drug_pair_to_me_too_times[drug_comb2] += 1
removed_drug_pairs = set()
for pair in me_too_drug_comb_pairs:
drug_comb1, drug_comb2 = pair.split('___')
if drug_comb1 in removed_drug_pairs or drug_comb2 in removed_drug_pairs:
continue
if drug_pair_to_me_too_times[drug_comb1] > drug_pair_to_me_too_times[drug_comb2]:
removed_drug_pairs.add(drug_comb1)
else:
removed_drug_pairs.add(drug_comb2)
# Remove the drug pairs which appear in me-too pairs of drug pairs more times
df = df.loc[~df.index.isin(list(removed_drug_pairs))]
# Count the number of drug combinations / non-drug combinations
dc_data = df[df['combination'] == 1]
ndc_data = df[df['combination'] == 0]
num_dc = len(dc_data.index)
num_ndc = len(ndc_data.index)
print('Number of drug combinations after removing me-too conflictive drug pairs:\t{}\n'.format(num_dc))
print('Number of non-drug combinations after removing me-too conflictive drug pairs:\t{}\n'.format(num_ndc))
#------------------------------------------------------------------#
# EVALUATE PERFORMANCE BY CLASSIFICATION OF DRUGS COMBINATIONS #
#------------------------------------------------------------------#
img_dir = os.path.join(analysis_dir, 'figures')
create_directory(img_dir)
fig_format = 'png'
tables_dir = os.path.join(analysis_dir, 'tables')
create_directory(tables_dir)
# Names of the methods
if consider_se:
if options.different_atc:
types_analysis = ['dctargets', 'dcguild', 'dcstructure', 'dcse', 'random']
types_analysis2 = ['dctargets', 'dcguild', 'dcstructure', 'dcse'] # Without random!!
#types_analysis_labels = ['dcTargets', 'dcGUILD', 'dcStructure', 'dcSE', 'Random']
types_analysis_labels = [ 'Target', 'PPI','Structure', 'Side Effects', 'Random']
else:
types_analysis = ['dctargets', 'dcguild', 'dcstructure', 'dcatc', 'dcse', 'random']
types_analysis2 = ['dctargets', 'dcguild', 'dcstructure', 'dcatc', 'dcse'] # Without random!!
#types_analysis_labels = ['dcTargets', 'dcGUILD', 'dcStructure', 'dcATC', 'dcSE', 'Random']
types_analysis_labels = [ 'Target', 'PPI','Structure', 'ATC', 'Side Effects', 'Random']
else:
types_analysis = ['dctargets', 'dcguild', 'dcstructure', 'random']
types_analysis2 = ['dctargets', 'dcguild', 'dcstructure'] # Without random!!
types_analysis_labels = ['dcTargets', 'dcGUILD', 'dcStructure', 'Random']
types_analysis_labels = [ 'Target', 'PPI','Structure', 'Random']
# Define the type of classification
if options.classification == 'dcdb':
classifications = ['Different targets in different biological processes',
'Different targets in related biological processes',
'Different targets in same biological process',
'Same target']
classifications_labels = ['Class 1', 'Class 2', 'Class 3', 'Class 4']
type_classification = '_dcdb'
elif options.classification == 'biological_process':
classifications = ['different_targets_different_bp',
'different_targets_similar_bp',
'similar_targets']
classifications_labels = ['Class 1', 'Class 2', 'Class 3']
type_classification = '_bp'
classification_file = os.path.join(toolbox_dir, 'classification_targets_bp.pcl')
classification_dict = cPickle.load(open(classification_file))
elif options.classification == 'pathway':
classifications = ['different_targets_different_pathways',
'different_targets_similar_pathways',
'similar_targets']
classifications_labels = ['Class 1', 'Class 2', 'Class 3']
type_classification = '_pathway'
classification_file = os.path.join(toolbox_dir, 'classification_targets_pathways.pcl')
classification_dict = cPickle.load(open(classification_file))
else:
raise IncorrectClassificationType(options.classification)
# Machine learning parameters
repetitions = 25 # Number of repetititons
n_fold = 2 # Number of folds
min_num_dc_group = 10
classifier = 'SVC best 1'
classifiers = {
'KNeighbors' : KNeighborsClassifier(3),
'SVC' : SVC(probability=True),
'SVC linear' : SVC(kernel="linear", C=0.025),
'SVC rbf' : SVC(gamma=2, C=1),
'DecisionTree' : DecisionTreeClassifier(max_depth=5),
'RandomForest' : RandomForestClassifier(max_depth=5, n_estimators=10, max_features=1),
'MLP' : MLPClassifier(alpha=1),
'AdaBoost' : AdaBoostClassifier(),
'GaussianNB' : GaussianNB(),
'QuadraticDiscr.' : QuadraticDiscriminantAnalysis(),
'SVC best 1' : SVC(kernel="rbf", gamma=0.01, C=100, probability=True),
'SVC best 2' : SVC(kernel="rbf", gamma=0.1, C=1.0, probability=True)
}
if options.pca:
pca_str = '_withPCA'
else:
pca_str = '_withoutPCA'
if options.different_atc:
atc_str = '_diff_ATC'
else:
atc_str = ''
# Plot of distributions of AUC
plot_auc_distribution = os.path.join(img_dir, 'classification{}_auc_distribution{}{}.{}'.format(type_classification, atc_str, pca_str, fig_format))
# Plot of accuracy/sensitivity name
acc_sens_dctargets = os.path.join(img_dir, 'classification{}_accsens_dctargets{}{}.{}'.format(type_classification, atc_str, pca_str, fig_format))
acc_sens_dcguild = os.path.join(img_dir, 'classification{}_accsens_dcguild{}{}.{}'.format(type_classification, atc_str, pca_str, fig_format))
acc_sens_dcstructure = os.path.join(img_dir, 'classification{}_accsens_dcstructure{}{}.{}'.format(type_classification, atc_str, pca_str, fig_format))
acc_sens_dcatc = os.path.join(img_dir, 'classification{}_accsens_dcatc{}{}.{}'.format(type_classification, atc_str, pca_str, fig_format))
acc_sens_dcse = os.path.join(img_dir, 'classification{}_accsens_dcse{}{}.{}'.format(type_classification, atc_str, pca_str, fig_format))
# Results table
results_table = os.path.join(tables_dir, 'classification{}_auc_table{}{}.txt'.format(type_classification, atc_str, pca_str))
# Accuracy/Sensitivity results table
prec_rec_table = os.path.join(tables_dir, 'classification{}_accsens_table{}{}.txt'.format(type_classification, atc_str, pca_str))
# File with results of Mann Whitney tests
mannwhitney_file = os.path.join(tables_dir, 'classification{}_mannwhitney{}{}.txt'.format(type_classification, atc_str, pca_str))
# Get the classification files
drug_int_2_drugs_file = os.path.join(toolbox_dir, 'drug_int_2_drugs.pcl')
drug_int_2_drugs = cPickle.load(open(drug_int_2_drugs_file))
drug_int_2_info_file = os.path.join(toolbox_dir, 'drug_int_2_info.pcl')
drug_int_2_info = cPickle.load(open(drug_int_2_info_file))
drugbank_to_dcdb_file = os.path.join(toolbox_dir, 'drugbank_to_dcdb.pcl')
drugbank_to_dcdb = cPickle.load(open(drugbank_to_dcdb_file))
drugbank_to_atcs_file = os.path.join(toolbox_dir, 'drugbank_to_atcs.pcl')
drugbank_to_atcs = cPickle.load(open(drugbank_to_atcs_file))
#-------------------------------------------------#
# SELECT DRUG COMBINATIONS WITH DIFFERENT ATC #
#-------------------------------------------------#
if options.different_atc:
selected_rows = []
for index, row in df.iterrows():
(drug_id1, drug_id2) = index.split('---')
drug1 = diana_id_to_drugbank[drug_id1].upper()
drug2 = diana_id_to_drugbank[drug_id2].upper()
atcs_drug1 = set([ atc[0] for atc in drugbank_to_atcs[drug1] ])
atcs_drug2 = set([ atc[0] for atc in drugbank_to_atcs[drug2] ])
intersection = atcs_drug1 & atcs_drug2
if len(intersection) == 0:
selected_rows.append(index)
df = df.ix[selected_rows]
dc_data = df[df['combination'] == 1]
ndc_data = df[df['combination'] == 0]
num_dc = len(dc_data.index)
num_ndc = len(ndc_data.index)
print('Num drug combinations after removing the ones with same ATC in training: {}'.format(num_dc))
print('Num non-drug combinations after removing the ones with same ATC in training: {}'.format(num_ndc))
analysis_results = {} # Defining the dictionary that will store the results
if consider_se:
dct_columns, dcg_columns, dcs_columns, dcatc_columns, dcse_columns = diana_analysis.obtain_method_to_columns(threshold_list, ATC_SE=consider_se)
else:
dct_columns, dcg_columns, dcs_columns = diana_analysis.obtain_method_to_columns(threshold_list, ATC_SE=consider_se)
for classification in classifications:
print('\nANALIZING CLASSIFICATION: {}\n'.format(classification))
class_rows = []
no_class_rows = []
for index, row in df.iterrows():
(drug_id1, drug_id2) = index.split('---')
drug1 = diana_id_to_drugbank[drug_id1].upper()
drug2 = diana_id_to_drugbank[drug_id2].upper()
di1_bool = False
di2_bool = False
di_bool = False
if options.classification == 'dcdb':
for DI in drug_int_2_drugs:
# If it is drug interaction...
if drug1 in drugbank_to_dcdb and drug2 in drugbank_to_dcdb:
for db_id in drugbank_to_dcdb[drug1]:
if db_id in drug_int_2_drugs[DI]:
di1_bool = True
for db_id in drugbank_to_dcdb[drug2]:
if db_id in drug_int_2_drugs[DI]:
di2_bool = True
if di1_bool and di2_bool:
di_bool = True
# If it is from the classification of interest we store it in the class group
if classification == drug_int_2_info[DI]['classification']:
class_rows.append(index)
break
# If it is NOT from the classification of interest we store it in the no class group
else:
no_class_rows.append(index)
break
if di_bool == False:
# If it not drug interaction, we store it in both groups
class_rows.append(index)
no_class_rows.append(index)
elif options.classification == 'pathway' or options.classification == 'biological_process':
if index in classification_dict:
if classification == classification_dict[index]:
class_rows.append(index)
else:
no_class_rows.append(index)
else:
class_rows.append(index)
no_class_rows.append(index)
df_class = df.ix[class_rows] # Create a table with the rows selected (DDIs of the class and non-DDIs)
df_noclass = df.ix[no_class_rows] # Create a table with the DDIs not of the class and non-DDIs
# Get the number of DC for the given classification
dc_data = df_class[df_class['combination'] == 1]
num_dc = len(dc_data.index)
print('Number of DC for {}: {}'.format(classification, num_dc))
if consider_se:
if options.different_atc:
list_methods = [ ['dctargets', dct_columns], ['dcguild', dcg_columns], ['dcstructure', dcs_columns], ['dcse', dcse_columns], ['random', columns] ]
else:
list_methods = [ ['dctargets', dct_columns], ['dcguild', dcg_columns], ['dcstructure', dcs_columns], ['dcatc', dcatc_columns], ['dcse', dcse_columns], ['random', columns] ]
else:
list_methods = [ ['dctargets', dct_columns], ['dcguild', dcg_columns], ['dcstructure', dcs_columns], ['random', columns] ]
for method, columns_method in list_methods:
print('Evaluating classification {} with method {}\n'.format(classification, method))
#------------------------------------------------------------------#
# SELECT RELEVANT FEATURES / REDUCE DIMENSIONALITY OF THE DATA #
#------------------------------------------------------------------#
if options.pca:
variance_cut_off = 0.01
num_components = 0
df_method = df_class[columns_method]
df_raw = df_method.drop('combination', axis=1)
raw_columns = copy.copy(columns_method)
raw_columns.remove('combination')
pca = PCA(n_components=None)
pca.fit(df_raw)
values_trans = pca.transform(df_raw)
explained_variance = pca.explained_variance_ratio_
for column, var in sorted(zip(raw_columns, explained_variance), key=lambda x: x[1], reverse=True):
#print(column, var)
if var > variance_cut_off:
num_components += 1
if num_components < len(raw_columns):
print('Number of features:\t{}\n'.format(len(raw_columns)))
print('Reduction to {} components\n'.format(num_components))
pca = PCA(n_components=num_components)
pca.fit(df_raw)
values_trans = pca.transform(df_raw)
indexes = df_method.index.values
df_trans = pd.DataFrame.from_records(values_trans, index=indexes)
df_comb = df_method[['combination']]
df_new = pd.concat([df_trans, df_comb], axis=1)
df_method = df_new
else:
# Manually introduced features
guild_thresholds = [1, 5]
rank_scoring = ['spearman', 'dot_product']
list_scoring = ['jaccard']
if method == 'Combination' or method == 'random':
selected_columns = diana_analysis.obtain_columns_best_features(guild_thresholds, rank_scoring, list_scoring, ATC_SE=consider_se)
else:
selected_columns = diana_analysis.obtain_columns_best_features_for_specific_method(method, guild_thresholds, rank_scoring, list_scoring)
# Remove ATC columns if different ATC
if options.different_atc and consider_se:
selected_columns = [col for col in selected_columns if col not in dcatc_columns or col == 'combination']
print('Selected columns: {}\n'.format(', '.join(selected_columns)))
print('Number of selected features: {}\n'.format(len(selected_columns)-1)) # We take away the combinations column
# Define the new table with the selected columns
df_method = df_class[selected_columns]
dc_data = df_method[df_method['combination'] == 1]
ndc_data = df_method[df_method['combination'] == 0]
num_dc = len(dc_data.index)
num_ndc = len(ndc_data.index)
#------------------------------------------------------------------#
dc_data = df_method[df_method['combination'] == 1]
ndc_data = df_method[df_method['combination'] == 0]
num_dc = len(dc_data.index)
num_ndc = len(ndc_data.index)
# Stop if the number of drug interactions is smaller than the number of cross-validations!!
if num_dc < n_fold:
print('Not possible to do the analysis for classification {}. The number of positive samples is {} and the n-fold is {}\n'.format(classification, num_dc, n_fold))
analysis_results.setdefault(classification, {})
analysis_results.setdefault[classification](method, {})
analysis_results[classification][method] = {'mean':'-','std':'-','num_dc':int(num_dc),'all_aucs':'-'}
#analysis_results = {'in classification' : {'mean':'-','std':'-','num_dc':int(num_dc)}, 'not in classification' : {'mean':'-','std':'-','num_dc':int(num_dc)}} # Defining the variable
continue
# Stop if the number of drug interactions is smaller than the minimum number given!!
if num_dc < min_num_dc_group:
print('Not possible to do the analysis for classification {}. The number of positive samples is {} and the minimum number per group is {}\n'.format(classification, num_dc, min_num_dc_group))
analysis_results.setdefault(classification, {})
analysis_results[classification].setdefault(method, {})
analysis_results[classification][method] = {'mean':'-','std':'-','num_dc':int(num_dc),'all_aucs':'-'}
#analysis_results = {'in classification' : {'mean':'-','std':'-','num_dc':int(num_dc)}, 'not in classification' : {'mean':'-','std':'-','num_dc':int(num_dc)}} # Defining the variable
continue
# Obtain the different non-drug combination groups to repeat the analysis
# We will use the number of drug combinations when classification in order to have same number of samples
print('Building {} repetition groups of {} (same) DC and {} (different) non-DC'.format(repetitions,num_dc,num_dc))
ndc_repetitions = diana_analysis.obtain_n_groups_of_k_length(ndc_data, repetitions, num_dc) # Obtain n number of groups containing different non-drug combinations to repeat the analysis n times
#print(ndc_repetitions)
mean_aucs = [] # Here we will store the means of AUCs from the cross-validations
std_aucs = [] # Here we will store the standard deviations of the AUCs from the cross-validations
all_aucs = [] # Here we will store ALL the AUCs
all_probs = [] # Here we store all the probabilities and labels
num_repetitions=0
for ndc_data_equal in ndc_repetitions:
num_items_group = int( float(num_dc) / float(n_fold) ) # Calculate the number of items in each group of the cross-validation
num_repetitions+=1
if num_repetitions == 1:
print('Building {} fold groups of {} DC and {} non-DC'.format(n_fold,num_items_group,num_items_group))
dc_groups = diana_analysis.obtain_n_groups_of_k_length(dc_data, n_fold, num_items_group, me_too_drug_combinations) # Defining the drug combination groups in each cross-validation step
ndc_groups = diana_analysis.obtain_n_groups_of_k_length(ndc_data_equal, n_fold, num_items_group, me_too_drug_combinations) # Defining the non-drug combination groups in each cross-validation step
merged_groups = [pd.concat([x,y]) for x,y in zip(dc_groups, ndc_groups)]
if method == 'random':
mean, var, std, list_auc, list_prob = diana_analysis.run_nfold_crossvalidation_dummy(n_fold, merged_groups, classifiers[classifier])
else:
mean, var, std, list_auc, list_prob = diana_analysis.run_nfold_crossvalidation_scikit_with_prob(n_fold, merged_groups, classifiers[classifier])
mean_aucs.append(mean)
std_aucs.append(std)
all_aucs = all_aucs + list_auc
all_probs = all_probs + list_prob
final_mean = np.mean(all_aucs)
#final_mean = np.mean(mean_aucs)
std = np.std(all_aucs)
mean_std = np.mean(std_aucs)
std_means = np.std(mean_aucs)
print('FINAL MEAN: {}'.format(final_mean))
print('STD: {}\n'.format(std))
#print('MEAN of STD: {}'.format(mean_std))
analysis_results.setdefault(classification, {})
analysis_results[classification].setdefault(method, {})
analysis_results[classification][method]['mean'] = final_mean
analysis_results[classification][method]['std'] = std
analysis_results[classification][method]['num_dc'] = int(num_dc)
analysis_results[classification][method]['all_aucs'] = all_aucs
analysis_results[classification][method]['all_probs'] = all_probs
#------------------------------------#
# PLOT PRECISION VS. SENSITIVITY #
#------------------------------------#
analysis_results = plot_precision_sensitivity(analysis_results, 'dctargets', classifications, classifications_labels, acc_sens_dctargets)
analysis_results = plot_precision_sensitivity(analysis_results, 'dcguild', classifications, classifications_labels, acc_sens_dcguild)
analysis_results = plot_precision_sensitivity(analysis_results, 'dcstructure', classifications, classifications_labels, acc_sens_dcstructure)
if consider_se:
if options.different_atc:
analysis_results = plot_precision_sensitivity(analysis_results, 'dcse', classifications, classifications_labels, acc_sens_dcse)
else:
analysis_results = plot_precision_sensitivity(analysis_results, 'dcatc', classifications, classifications_labels, acc_sens_dcatc)
analysis_results = plot_precision_sensitivity(analysis_results, 'dcse', classifications, classifications_labels, acc_sens_dcse)
#-------------------------------------------------#
# PLOT DISTRIBUTION OF AUC PER CLASSIFICATION #
#-------------------------------------------------#
plot_auc_distributions(analysis_results, classifications, classifications_labels, types_analysis, types_analysis_labels, plot_auc_distribution, fig_format=fig_format, consider_se=consider_se, different_atc=options.different_atc)
#--------------------------------------------------------#
# TABLE OF DISTRIBUTION OF AUC PER NUMBER OF TARGETS #
#--------------------------------------------------------#
with open(results_table, 'w') as results_table_fd:
# Header
results_table_fd.write(' ')
for method in types_analysis_labels:
results_table_fd.write('\t{}\t \t '.format(method))
results_table_fd.write('\n')
for classification in classifications:
results_table_fd.write('{}'.format(classification))
for method in types_analysis:
mean = analysis_results[classification][method]['mean']
std = analysis_results[classification][method]['std']
num_dc = analysis_results[classification][method]['num_dc']
results_table_fd.write('\t{}\t{}\t{}'.format(mean, std, num_dc))
results_table_fd.write('\n')
#----------------------------------------#
# TABLE OF PRECISION VS. SENSITIVITY #
#----------------------------------------#
with open(prec_rec_table, 'w') as prec_rec_table_fd:
# Header
prec_rec_table_fd.write(' ')
for method in types_analysis2:
prec_rec_table_fd.write('\t{}\t '.format(method))
prec_rec_table_fd.write('\n')
for classification in classifications:
prec_rec_table_fd.write('{}'.format(classification))
for method in types_analysis2:
cut_off = analysis_results[classification][method]['cut_off']
value = analysis_results[classification][method]['value']
prec_rec_table_fd.write('\t{}\t{}'.format(cut_off, value))
prec_rec_table_fd.write('\n')
#-------------------------------------------------------------------#
# TABLE OF COMPARISON OF AUC DISTRIBUTIONS USING MANN WHITNEY U #
#-------------------------------------------------------------------#
with open(mannwhitney_file, 'w') as mannwhitney_fd:
mann_results = {}
mannwhitney_fd.write(' \t ')
for method in types_analysis_labels:
mannwhitney_fd.write('\t{}'.format(method))
mannwhitney_fd.write('\n')
# Perform the comparisons
for classification in classifications:
mann_results.setdefault(classification, {})
for method1 in types_analysis:
mann_results[classification].setdefault(method1, {})
for method2 in types_analysis:
if method1 == method2:
mann_results[classification][method1][method2] = '-'
else:
method1_dist = analysis_results[classification][method1]['all_aucs']
method2_dist = analysis_results[classification][method2]['all_aucs']
stat, pval = scipy.stats.mannwhitneyu(method1_dist, method2_dist)
mann_results[classification][method1][method2] = [stat, pval]
# Write the table of crossings
for classification in classifications:
for method1 in types_analysis:
mannwhitney_fd.write('{}\t{}'.format(classification, method1))
for method2 in types_analysis:
if method1 == method2:
mannwhitney_fd.write('\t-')
else:
stat, pval = mann_results[classification][method1][method2]
mannwhitney_fd.write('\t{}, {:.2e}'.format(stat,pval))
mannwhitney_fd.write('\n')
# End marker for time
end = time.time()
print('\n DIANA INFO:\tTIME OF EXECUTION: {:.3f} seconds or {:.3f} minutes.\n'.format(end - start, (end - start) / 60))
return
#######################
#######################
# SECONDARY FUNCTIONS #
#######################
#######################
def fileExist(file):
"""
Checks if a file exists AND is a file
"""
return os.path.exists(file) and os.path.isfile(file)
def check_file(file):
"""
Checks if a file exists and if not, raises FileNotFound exception
"""
if not fileExist(file):
raise FileNotFound(file)
def create_directory(directory):
"""
Checks if a directory exists and if not, creates it
"""
try:
os.stat(directory)
except:
os.mkdir(directory)
return
def check_directory(directory):
"""
Checks if a directory exists and if not, raises DirNotFound exception
"""
try:
os.stat(directory)
except:
raise DirNotFound(directory)
class FileNotFound(Exception):
"""
Exception raised when a file is not found.
"""
def __init__(self, file):
self.file = file
def __str__(self):
return 'The file {} has not been found.\nTherefore, the comparison cannot be performed. Please, check that all the profiles have been correctly generated.\n'.format(self.file)
class DirNotFound(Exception):
"""
Exception raised when a directory is not found.
"""
def __init__(self, directory):
self.directory = directory
def __str__(self):
return 'The directory {} has not been found.\nTherefore, the comparison cannot be performed. Please, check that all the parameters have been correctly introduced and the profiles have been correctly generated.\n'.format(self.directory)
class IncorrectClassificationType(Exception):
"""
Exception raised when the classification type is not correct.
"""
def __init__(self, classification):
self.classification = classification
def __str__(self):
return 'The classification {} is not correct.\nPlease, introduce "dcdb", "biological_process", or "pathway".\n'.format(self.classification)
def plot_precision_sensitivity(analysis_results, type_analysis, classifications, classifications_labels, name_plot, fig_format='png'):
"""
Plots the precision vs. sensitivity curve.
"""
cut_offs = frange(0,1,0.005)
fig, ax = plt.subplots()
prec_colors = ['#a6d0ff','#4199fd','#005bc1','#003169']
#prec_colors = ['#a6d0ff','#003169']
sens_colors = ['#a6ff9e','#15ff00','#0d9f00','#085c00']
#sens_colors = ['#a6ff9e','#085c00']
point_colors = ['#ffb1b1','#ff3737','#d40303','#890000']
#point_colors = ['#ffb1b1','#890000']
c = 0
for classification in classifications:
precision = []
sensitivity = []
print(type_analysis)
print(classification)
all_probs = analysis_results[classification][type_analysis]['all_probs']
for cut_off in cut_offs:
tp,fp,tn,fn = calculate_tp_fp_tn_fn(all_probs, cut_off)
try:
prec_val = float(tp)/(float(tp)+float(fp))
except:
prec_val = 1
sens_val = float(tp)/(float(tp)+float(fn))
precision.append(prec_val)
sensitivity.append(sens_val)
ax.plot( cut_offs, precision, '-', color=prec_colors[c])
ax.plot( cut_offs, sensitivity, '-', color=sens_colors[c])
# Find the intersection point
idx = np.argwhere(np.diff(np.sign(np.array(precision) - np.array(sensitivity))) != 0).reshape(-1) + 0
# Plot the intersection point
ax.plot(cut_offs[idx[0]], precision[idx[0]], 'o', color=point_colors[c])
pylab.xlabel('Probability thresholds')
pylab.ylabel('Precision-Recall')
analysis_results[classification][type_analysis]['cut_off'] = cut_offs[idx[0]]
analysis_results[classification][type_analysis]['value'] = precision[idx[0]]
c+=1
classifications_labels = classifications_labels + ['Precision', 'Recall']
# draw temporary color lines and use them to create a legend
hB, = pylab.plot([1,1],'o', color='#ffb1b1')
hG, = pylab.plot([1,1],'o', color='#ff3737')
hY, = pylab.plot([1,1],'o', color='#d40303')
hR, = pylab.plot([1,1],'o', color='#890000')
hPr, = pylab.plot([1,1],'-', color='#0078ff')
hRe, = pylab.plot([1,1],'-', color='#0d9f00')
lgd = ax.legend(handles=(hB, hG, hY, hR, hPr, hRe), labels=classifications_labels, bbox_to_anchor=(1.05, 1), loc=2, borderaxespad=0.)
hB.set_visible(False)
hG.set_visible(False)
hY.set_visible(False)
hR.set_visible(False)
hPr.set_visible(False)
hRe.set_visible(False)
pylab.savefig(name_plot, format=fig_format, dpi=300, bbox_extra_artists=(lgd,), bbox_inches='tight')
#pylab.show()
return analysis_results
def calculate_tp_fp_tn_fn(all_probs, cut_off):
tp=0
fp=0
tn=0
fn=0
for prob, label, _ in all_probs:
if prob > cut_off:
if label == 1:
tp+=1
else:
fp+=1
else:
if label == 1:
fn+=1
else:
tn+=1
return tp,fp,tn,fn
def frange(x,y,jump):
array = []
x = 0
y = 1
jump = 0.005
while x <= y:
array.append(x)
x += jump
return array
def plot_auc_distributions(analysis_results, classifications, classifications_labels, types_analysis, types_analysis_labels, plot_name, fig_format='png', consider_se=False, different_atc=False):
fig = pylab.figure(dpi=300)
ax = pylab.axes()
#pylab.hold(True)
pos = 2
xticks = [] # Define the places in which the labels will be
for classification in classifications:
positions = []
for x in xrange(len(types_analysis)):
positions.append(pos) # Define the positions of the boxplots
pos+=1
pos+=1 # Add separation between boxplot groups
pylab.plt.axvline(x=pos,linewidth=0.3,linestyle='--',color='black',dashes=(1, 1))
pos+=2 # Add separation between boxplot groups
data = []
for method in types_analysis:
data.append(analysis_results[classification][method]['all_aucs']) # Get the groups of plots that we will add
# Boxplot group
#bp = boxplot(data, positions = positions, widths = 0.6)
bp = pylab.boxplot(data, positions = positions, widths = 0.6, patch_artist=True)
setBoxColors(bp, len(types_analysis), consider_se, different_atc)
tick = np.mean(positions) # The label will be at the mean of the positions (in the middle)
xticks.append(tick)
# Set axes limits and labels
pylab.xlim(0,pos-2)
pylab.ylim(0,1)
axes_labels = classifications_labels
ax.set_xticklabels(axes_labels)
ax.set_xticks(xticks)
ax.yaxis.grid(True)
pylab.ylabel('Distribution of AUC values')
ax.yaxis.grid(True)
# draw temporary color lines and use them to create a legend
hR, = pylab.plot([1,1],'-', color='red')
hG, = pylab.plot([1,1],'-', color='green')
hB, = pylab.plot([1,1],'-', color='black')
hW, = pylab.plot([1,1],'-', color='#aeaeae') # grey
if consider_se:
hBl, = pylab.plot([1,1],'-', color='#22a9bd') # blue
hO, = pylab.plot([1,1],'-', color='#e59600') # orange
lgd = ax.legend(handles=(hR, hG, hB, hBl, hO, hW), labels=types_analysis_labels, bbox_to_anchor=(1.05, 1), loc=2, borderaxespad=0.)
hBl.set_visible(False)
hO.set_visible(False)
else:
lgd = ax.legend(handles=(hR, hG, hB, hW), labels=types_analysis_labels, bbox_to_anchor=(1.05, 1), loc=2, borderaxespad=0.)
hR.set_visible(False)
hG.set_visible(False)
hB.set_visible(False)
hW.set_visible(False)
pylab.savefig(plot_name, format=fig_format, bbox_extra_artists=(lgd,), bbox_inches='tight')
pylab.show()
return
def setBoxColors(bp, bg_color, consider_se, different_atc):
"""
Set the colors of the box plots groups
Code from: http://stackoverflow.com/questions/16592222/matplotlib-group-boxplots
"""
pylab.setp(bp['boxes'][0], color='#b80000')
pylab.setp(bp['caps'][0], color='#b80000')
pylab.setp(bp['caps'][1], color='#b80000')
pylab.setp(bp['whiskers'][0], color='#b80000')
pylab.setp(bp['whiskers'][1], color='#b80000')
pylab.setp(bp['medians'][0], color='black')
bp['boxes'][0].set_facecolor('#ff7373') #red
pylab.setp(bp['boxes'][1], color='green')
pylab.setp(bp['caps'][2], color='green')
pylab.setp(bp['caps'][3], color='green')
pylab.setp(bp['whiskers'][2], color='green')
pylab.setp(bp['whiskers'][3], color='green')
pylab.setp(bp['medians'][1], color='black')
bp['boxes'][1].set_facecolor('#32f232') #green
pylab.setp(bp['boxes'][2], color='black')
pylab.setp(bp['caps'][4], color='black')
pylab.setp(bp['caps'][5], color='black')
pylab.setp(bp['whiskers'][4], color='black')
pylab.setp(bp['whiskers'][5], color='black')
pylab.setp(bp['medians'][2], color='#fbc562')
bp['boxes'][2].set_facecolor('#4f4f4f') #black
if consider_se:
if not different_atc:
# BLUE (dcATC)
pylab.setp(bp['boxes'][3], color='#0049e5')
pylab.setp(bp['caps'][6], color='#0049e5')
pylab.setp(bp['caps'][7], color='#0049e5')
pylab.setp(bp['whiskers'][6], color='#0049e5')
pylab.setp(bp['whiskers'][7], color='#0049e5') #dark blue
pylab.setp(bp['medians'][3], color='black')
bp['boxes'][3].set_facecolor('#22a9bd') #blue
# ORANGE (dcSE)
pylab.setp(bp['boxes'][4], color='#966200')
pylab.setp(bp['caps'][8], color='#966200')
pylab.setp(bp['caps'][9], color='#966200')
pylab.setp(bp['whiskers'][8], color='#966200')
pylab.setp(bp['whiskers'][9], color='#966200') #brown
pylab.setp(bp['medians'][4], color='black')
bp['boxes'][4].set_facecolor('#e59600') #orange
# GREY (Random)
pylab.setp(bp['boxes'][5], color='black')
pylab.setp(bp['caps'][10], color='black')
pylab.setp(bp['caps'][11], color='black')
pylab.setp(bp['whiskers'][10], color='black')
pylab.setp(bp['whiskers'][11], color='black')
pylab.setp(bp['medians'][5], color='#fbc562')
bp['boxes'][5].set_facecolor('#aeaeae') #grey
else:
# ORANGE (dcSE)
pylab.setp(bp['boxes'][3], color='#966200')
pylab.setp(bp['caps'][6], color='#966200')
pylab.setp(bp['caps'][7], color='#966200')
pylab.setp(bp['whiskers'][6], color='#966200')
pylab.setp(bp['whiskers'][7], color='#966200') #brown
pylab.setp(bp['medians'][3], color='black')
bp['boxes'][3].set_facecolor('#e59600') #orange
# GREY (Random)
pylab.setp(bp['boxes'][4], color='black')
pylab.setp(bp['caps'][8], color='black')
pylab.setp(bp['caps'][9], color='black')
pylab.setp(bp['whiskers'][8], color='black')
pylab.setp(bp['whiskers'][9], color='black')
pylab.setp(bp['medians'][4], color='#fbc562')
bp['boxes'][4].set_facecolor('#aeaeae') #grey
else:
# GREY (Random)
pylab.setp(bp['boxes'][3], color='black')
pylab.setp(bp['caps'][6], color='black')
pylab.setp(bp['caps'][7], color='black')
pylab.setp(bp['whiskers'][6], color='black')
pylab.setp(bp['whiskers'][7], color='black') #dark blue
pylab.setp(bp['medians'][3], color='#fbc562')
bp['boxes'][3].set_facecolor('#aeaeae') #blue
return
if __name__ == "__main__":
main()
| 43.170776
| 243
| 0.609684
|
794856cdab68d2694768924e6f6d61a8baf7891d
| 563
|
py
|
Python
|
tests/pyre/schemata/decimals.py
|
BryanRiel/pyre
|
179359634a7091979cced427b6133dd0ec4726ea
|
[
"BSD-3-Clause"
] | null | null | null |
tests/pyre/schemata/decimals.py
|
BryanRiel/pyre
|
179359634a7091979cced427b6133dd0ec4726ea
|
[
"BSD-3-Clause"
] | null | null | null |
tests/pyre/schemata/decimals.py
|
BryanRiel/pyre
|
179359634a7091979cced427b6133dd0ec4726ea
|
[
"BSD-3-Clause"
] | null | null | null |
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
#
# michael a.g. aïvázis
# orthologue
# (c) 1998-2018 all rights reserved
#
"""
Verify that decimal conversions work as expected
"""
def test():
import decimal
import pyre.schemata
# create a descriptor
descriptor = pyre.schemata.decimal()
# check
assert descriptor.coerce("1.20") == decimal.Decimal("1.20")
return
# main
if __name__ == "__main__":
# skip pyre initialization since we don't rely on the executive
pyre_noboot = True
# do...
test()
# end of file
| 15.216216
| 67
| 0.639432
|
794856f275f3e101f28ef1bfae2eddf8d9ab52e9
| 16,447
|
py
|
Python
|
dev-files/targets/RP2040/generators-service/svc-dual-core-v2/service_generator.py
|
pierremolinaro/real-time-kernel-pi-pico
|
581360dd1135e17fe0c4ddabbe74052a366de7d6
|
[
"MIT"
] | 3
|
2021-05-05T19:40:01.000Z
|
2021-05-08T06:40:35.000Z
|
dev-files/targets/RP2040/generators-service/svc-dual-core-v2/service_generator.py
|
pierremolinaro/real-time-kernel-pi-pico
|
581360dd1135e17fe0c4ddabbe74052a366de7d6
|
[
"MIT"
] | null | null | null |
dev-files/targets/RP2040/generators-service/svc-dual-core-v2/service_generator.py
|
pierremolinaro/real-time-kernel-pi-pico
|
581360dd1135e17fe0c4ddabbe74052a366de7d6
|
[
"MIT"
] | null | null | null |
#! /usr/bin/env python3
# -*- coding: UTF-8 -*-
#---------------------------------------------------------------------------------------------------
def asSeparator () :
return "//" + ("-" * 78) + "\n"
#---------------------------------------------------------------------------------------------------
def generate_svc_handler () :
sCode = asSeparator ()
sCode += "// SVC HANDLE (DOUBLE STACK MODE)\n"
sCode += asSeparator ()
sCode += "//\n"
sCode += "// PSP+32 -> | |\n"
sCode += "// |----------------------------| -\n"
sCode += "// PSP+28 -> | xPSR | |\n"
sCode += "// |----------------------------| |\n"
sCode += "// PSP+24 -> | PC (after SVC instruction) | |\n"
sCode += "// |----------------------------| |\n"
sCode += "// PSP+20 -> | LR | |\n"
sCode += "// |----------------------------| |\n"
sCode += "// PSP+16 -> | R12 | | Saved by interrupt response\n"
sCode += "// |----------------------------| |\n"
sCode += "// PSP+12 -> | R3 | |\n"
sCode += "// |----------------------------| |\n"
sCode += "// PSP+8 -> | R2 | |\n"
sCode += "// |----------------------------| |\n"
sCode += "// PSP+4 -> | R1 | |\n"
sCode += "// |----------------------------| |\n"
sCode += "// /--- PSP ----> | R0 | |\n"
sCode += "// | |----------------------------| -\n"
sCode += "// | | |\n"
sCode += "// |\n"
sCode += "// | *---------------------*\n"
sCode += "// | | LR return code | +36 [ 9]\n"
sCode += "// | *---------------------*\n"
sCode += "// \----------------------------------------- | R13 (PSP) | +32 [ 8]\n"
sCode += "// *---------------------*\n"
sCode += "// | R11 | +28 [ 7]\n"
sCode += "// *---------------------*\n"
sCode += "// | R10 | +24 [ 6]\n"
sCode += "// *---------------------*\n"
sCode += "// | R9 | +20 [ 5]\n"
sCode += "// *---------------------*\n"
sCode += "// | R8 | +16 [ 4]\n"
sCode += "// *---------------------*\n"
sCode += "// | R7 | +12 [ 3]\n"
sCode += "// *---------------------*\n"
sCode += "// | R6 | + 8 [ 2]\n"
sCode += "// *---------------------*\n"
sCode += "// | R5 | + 4 [ 1]\n"
sCode += "// *------------------------------------* *---------------------*\n"
sCode += "// | var.running.task.control.block.ptr +------> | R4 | + 0 [ 0]\n"
sCode += "// *------------------------------------* *---------------------*\n"
sCode += "//\n"
sCode += asSeparator () + "\n"
sCode += " .section .bss.var.background.tasks.context, \"aw\", %nobits\n"
sCode += " .align 3\n\n"
sCode += "var.background.tasks.context:\n"
sCode += " .space 8 // uint32_t [2]\n\n"
sCode += asSeparator () + "\n"
sCode += " .section .text.interrupt.SVC, \"ax\", %progbits\n\n"
sCode += " .global interrupt.SVC\n"
sCode += " .type interrupt.SVC, %function\n\n"
sCode += "interrupt.SVC:\n"
sCode += "//----------------------------------------- R3 <- Running task var pointer\n"
sCode += " ldr r2, = 0xD0000000 + 0x000 // Address of SIO CPUID control register\n"
sCode += " ldr r1, [r2] // R1 <- 0 for CPU0, 1 for CPU 1\n"
sCode += " lsls r1, r1, # 2 // R1 <- 0 for CPU0, 4 for CPU 1\n"
sCode += " ldr r3, = var.running.tasks.control.block.ptr\n"
sCode += " add r3, r1\n"
sCode += "//----------------------------------------- R0 <- calling task context\n"
sCode += " ldr r0, [r3]\n"
sCode += "//----------------------------------------- Save calling task context\n"
sCode += " movs r1, r0 // R1 <- calling task context, update condition flags\n"
sCode += " beq context.has.been.partially.saved\n"
sCode += "//--- Save registers r4 to r7\n"
sCode += " stm r0!, {r4, r5, r6, r7}\n"
sCode += "context.has.been.partially.saved:\n"
sCode += "//----------------------------------------- R7 <- Current task descriptor pointer\n"
sCode += " mov r7, r1\n"
sCode += "//----------------------------------------- R5 <- Running task var pointer\n"
sCode += " mov r5, r3\n"
sCode += "//----------------------------------------- R6 <- PSP\n"
sCode += " mrs r6, psp\n"
sCode += "//----------------------------------------- R0 <- Address of instruction following SVC\n"
sCode += " ldr r0, [r6, #24] // 24 : 6 stacked registers before saved PC\n"
sCode += "//----------------------------------------- R0 <- bits 0-7 of SVC instruction\n"
sCode += " subs r0, #2 // R0 <- Address of SVC instruction\n"
sCode += " ldrb r0, [r0] // R0 is service call index\n"
sCode += "//----------------------------------------- R1 <- address of dispatcher table\n"
sCode += " ldr r1, = svc.dispatcher.table\n"
sCode += "//----------------------------------------- R12 <- address of routine to call\n"
sCode += " lsls r0, r0, # 2 // R0 = (R0 << 2)\n"
sCode += " ldr r1, [r1, r0]\n"
sCode += " mov r12, r1\n"
sCode += "//----------------------------------------- Restore R0, R1, R2 and R3 from saved stack\n"
sCode += " ldmia r6!, {r0, r1, r2, r3}\n"
sCode += "//----------------------------------------- Get spinlock 0\n"
sCode += "//--- R6 <- Address of SPINLOCK 0 (rp2040 datasheet, 2.3.1.7, page 42)\n"
sCode += " ldr r6, = 0xD0000000 + 0x100\n"
sCode += "//--- Read: attempt to claim the lock. Read value is nonzero if the lock was\n"
sCode += "// successfully claimed, or zero if the lock had already been claimed\n"
sCode += "// by a previous read (rp2040 datasheet, section 2.3.1.3 page 30).\n"
sCode += "svc.spinlock.busy.wait:\n"
sCode += " ldr r4, [r6]\n"
sCode += " cmp r4, # 0\n"
sCode += " beq svc.spinlock.busy.wait\n"
sCode += "//----------------------------------------- R4 <- Saved LR\n"
sCode += " mov r4, lr\n"
sCode += "//----------------------------------------- Call service routine\n"
sCode += " blx r12\n"
sCode += "//--- Continues in sequence to handle.context.switch\n\n"
sCode += asSeparator ()
sCode += "// HANDLE CONTEXT SWITCH (DOUBLE STACK MODE)\n"
sCode += "// - R4 contains the saved LR\n"
sCode += "// - R5 contains the running task var pointer\n"
sCode += "// - R6 contains the spinlock #0 address\n"
sCode += "// - R7 contains the current task descriptor pointer\n"
sCode += asSeparator () + "\n"
sCode += "handle.context.switch:\n"
sCode += "//----------------------------------------- Select task to run\n"
sCode += " bl kernel.select.task.to.run.and.notify.other.cpu\n"
sCode += "//----------------------------------------- Restore LR from R4\n"
sCode += " mov lr, r4\n"
sCode += "//----------------------------------------- Release spinlock\n"
sCode += "//--- Write (any value): release the lock (rp2040 datasheet, section 2.3.1.3 page 30).\n"
sCode += "// The next attempt to claim the lock will be successful.\n"
sCode += " str r4, [r6]\n"
sCode += "//----------------------------------------- R0 <- new task context\n"
sCode += " ldr r0, [r5]\n"
sCode += "//----------------------------------------- Current task did change ?\n"
sCode += " cmp r0, r7 // R0: new task context, R7: current task context\n"
sCode += " bne current.task.did.change\n"
sCode += "//----------------------------------------- Restore R4, R5, R6, R7 ?\n"
sCode += " movs r1, r7 // R1 <- calling task context, update condition flags\n"
sCode += " beq background.task.is.current.task\n"
sCode += " ldm r1!, {r4, r5, r6, r7}\n"
sCode += "background.task.is.current.task:\n"
sCode += " bx lr\n"
sCode += "//----------------------------------------- Yes, current task did change\n"
sCode += "current.task.did.change:\n"
sCode += "//----------------------------------------- Save current task context\n"
sCode += " mrs r6, psp\n"
sCode += " movs r1, r7\n"
sCode += " beq save.background.task.context\n"
sCode += " adds r1, # 16 // R4 to R7 have been already saved\n"
sCode += " mov r2, r8\n"
sCode += " mov r3, r9\n"
sCode += " mov r4, r10\n"
sCode += " mov r5, r11\n"
sCode += " mov r7, lr\n"
sCode += " stm r1!, {r2, r3, r4, r5, r6, r7} // R6 contains PSP\n"
sCode += "context.has.been.saved:\n"
sCode += "//----------------------------------------- Restore new task context\n"
sCode += " cmp r0, # 0\n"
sCode += " beq restore.background.task.context\n"
sCode += " ldm r0!, {r4, r5, r6, r7} // Restore R4 to R7\n"
sCode += " ldm r0!, {r1, r2, r3} // Restore R8 to R10\n"
sCode += " mov r8, r1\n"
sCode += " mov r9, r2\n"
sCode += " mov r10, r3\n"
sCode += " ldm r0!, {r1, r2, r3} // Restore R11, PSP, LR\n"
sCode += " mov r11, r1\n"
sCode += " msr psp, r2\n"
sCode += " bx r3\n"
sCode += "//----------------------------------------- Restore background task context\n"
sCode += "restore.background.task.context:\n"
sCode += " ldr r1, = 0xD0000000 + 0x000 // Address of SIO CPUID control register\n"
sCode += " ldr r1, [r1] // R1 <- 0 for CPU0, 1 for CPU 1\n"
sCode += " lsls r1, r1, # 2 // R1 <- 0 for CPU0, 2 for CPU 1\n"
sCode += " ldr r0, = var.background.tasks.context\n"
sCode += " ldr r2, [r0, r1]\n"
sCode += " msr psp, r2\n"
sCode += "//--- R3 <- 0xFFFFFFFD \n"
sCode += " movs r3, # ~ 0xFFFFFFFD\n"
sCode += " rsbs r3, r3, # 0\n"
sCode += "//--- Return from exception\n"
sCode += " bx r3\n"
sCode += "//----------------------------------------- Save background context\n"
sCode += "save.background.task.context:\n"
sCode += " ldr r1, = 0xD0000000 + 0x000 // Address of SIO CPUID control register\n"
sCode += " ldr r1, [r1] // R1 <- 0 for CPU0, 1 for CPU 1\n"
sCode += " lsls r1, r1, # 2 // R1 <- 0 for CPU0, 4 for CPU 1\n"
sCode += " ldr r2, = var.background.tasks.context\n"
sCode += " str r6, [r2, r1] // R6 contains PSP\n"
sCode += " b context.has.been.saved\n\n"
return sCode
#---------------------------------------------------------------------------------------------------
# ENTRY POINT
#---------------------------------------------------------------------------------------------------
def buildServiceCode (serviceList, boolServiceSet, interruptServiceList, interruptDictionary) :
sFile = generate_svc_handler ()
del interruptDictionary ["SVC"]
sFile += asSeparator ()
sFile += "// SERVICES\n"
idx = 2
for service in serviceList :
sFile += asSeparator () + "\n"
sFile += " .section .text." + service + ", \"ax\", %progbits\n"
sFile += " .global " + service +"\n"
sFile += " .align 1\n"
sFile += " .type " + service +", %function\n\n"
sFile += service +":\n"
sFile += " .fnstart\n"
sFile += " svc #" + str (idx) + "\n"
if service in boolServiceSet :
sFile += " ldr r3, = get.user.result\n"
sFile += " bx r3\n\n"
else:
sFile += " bx lr\n\n"
sFile += ".Lfunc_end_" + service +":\n"
sFile += " .size " + service +", .Lfunc_end_" + service +" - " + service +"\n"
sFile += " .cantunwind\n"
sFile += " .fnend\n\n"
idx += 1
sFile += asSeparator ()
sFile += "// SERVICE DISPATCHER TABLE\n"
sFile += asSeparator () + "\n"
sFile += " .align 2\n"
sFile += " .global svc.dispatcher.table\n\n"
sFile += "svc.dispatcher.table:\n"
sFile += " .word cpu.0.phase3.init // 0\n"
sFile += " .word cpu.1.phase3.init // 1\n"
idx = 2
for service in serviceList :
sFile += " .word service." + service + " // " + str (idx) + "\n"
idx += 1
sFile += "\n"
#------------------------------ Interrupts as service
for interruptServiceName in interruptServiceList :
sFile += asSeparator ()
sFile += "// INTERRUPT - SERVICE: " + interruptServiceName + "\n"
sFile += asSeparator () + "\n"
sFile += " .section .text.interrupt." + interruptServiceName + ", \"ax\", %progbits\n\n"
sFile += " .align 1\n"
sFile += " .global interrupt." + interruptServiceName + "\n"
sFile += " .type interrupt." + interruptServiceName + ", %function\n\n"
sFile += "interrupt." + interruptServiceName + ":\n"
sFile += "//----------------------------------------- R2 <- CPU INDEX\n"
sFile += " ldr r3, = 0xD0000000 + 0x000 // Address of SIO CPUID control register\n"
sFile += " ldr r2, [r3] // R2 <- 0 for CPU0, 1 for CPU 1\n"
sFile += "//----------------------------------------- Activity led On\n"
sFile += "// Uses R2, do not change it\n"
sFile += " MACRO_ACTIVITY_LED_0_OR_1_ON\n"
sFile += "//----------------------------------------- R3 <- Running task var pointer\n"
sFile += " lsls r1, r2, # 2 // R1 <- 0 for CPU0, 4 for CPU 1\n"
sFile += " ldr r3, = var.running.tasks.control.block.ptr\n"
sFile += " add r3, r1\n"
sFile += "//----------------------------------------- R0 <- calling task context\n"
sFile += " ldr r0, [r3]\n"
sFile += "//----------------------------------------- Save calling task context\n"
sFile += " movs r1, r0\n"
sFile += " beq context.has.been.partially.saved." + interruptServiceName + "\n"
sFile += "//--- Save registers r4 to r7\n"
sFile += " stm r0!, {r4, r5, r6, r7}\n"
sFile += "context.has.been.partially.saved." + interruptServiceName + ":\n"
sFile += "//----------------------------------------- R7 <- Current task descriptor pointer\n"
sFile += " mov r7, r1\n"
sFile += "//----------------------------------------- R5 <- Running task var pointer\n"
sFile += " mov r5, r3\n"
sFile += "//----------------------------------------- Get spinlock 0\n"
sFile += "//--- R6 <- Address of SPINLOCK 0 (rp2040 datasheet, 2.3.1.7, page 42)\n"
sFile += " ldr r6, = 0xD0000000 + 0x100\n"
sFile += "//--- Read: attempt to claim the lock. Read value is nonzero if the lock was\n"
sFile += "// successfully claimed, or zero if the lock had already been claimed\n"
sFile += "// by a previous read (rp2040 datasheet, section 2.3.1.3 page 30).\n"
sFile += interruptServiceName + ".spinlock.busy.wait:\n"
sFile += " ldr r4, [r6]\n"
sFile += " cmp r4, # 0\n"
sFile += " beq " + interruptServiceName +".spinlock.busy.wait\n"
sFile += "//----------------------------------------- R4 <- Saved LR\n"
sFile += " mov r4, lr\n"
sFile += "//----------------------------------------- Call Interrupt handler\n"
sFile += " bl interrupt.service." + interruptServiceName + "\n"
sFile += "//----------------------------------------- Perform the context switch, if needed\n"
sFile += "// - R4 contains the saved LR\n"
sFile += "// - R5 contains the running task var pointer\n"
sFile += "// - R6 contains the spinlock #0 address\n"
sFile += "// - R7 contains the current task descriptor pointer\n"
sFile += " b handle.context.switch\n\n"
cppFile = ""
return (cppFile, sFile, interruptDictionary)
#---------------------------------------------------------------------------------------------------
| 56.910035
| 101
| 0.407977
|
7948575250c4618b719d86f58b3c04c98641b623
| 1,809
|
py
|
Python
|
sqlalchemy_utils/types/locale.py
|
jarkkorantala/sqlalchemy-utils
|
7cee65f0a3074245b853425e19a732aa274bfa3e
|
[
"BSD-3-Clause"
] | 11
|
2016-09-14T21:59:55.000Z
|
2019-01-28T21:58:31.000Z
|
sqlalchemy_utils/types/locale.py
|
jarkkorantala/sqlalchemy-utils
|
7cee65f0a3074245b853425e19a732aa274bfa3e
|
[
"BSD-3-Clause"
] | 11
|
2019-12-26T17:21:03.000Z
|
2022-03-21T22:17:07.000Z
|
sqlalchemy_utils/types/locale.py
|
jarkkorantala/sqlalchemy-utils
|
7cee65f0a3074245b853425e19a732aa274bfa3e
|
[
"BSD-3-Clause"
] | 3
|
2021-03-22T14:24:40.000Z
|
2021-04-02T08:05:27.000Z
|
import six
from sqlalchemy import types
from ..exceptions import ImproperlyConfigured
from .scalar_coercible import ScalarCoercible
babel = None
try:
import babel
except ImportError:
pass
class LocaleType(types.TypeDecorator, ScalarCoercible):
"""
LocaleType saves Babel_ Locale objects into database. The Locale objects
are converted to string on the way in and back to object on the way out.
In order to use LocaleType you need to install Babel_ first.
.. _Babel: http://babel.pocoo.org/
::
from sqlalchemy_utils import LocaleType
from babel import Locale
class User(Base):
__tablename__ = 'user'
id = sa.Column(sa.Integer, autoincrement=True)
name = sa.Column(sa.Unicode(50))
locale = sa.Column(LocaleType)
user = User()
user.locale = Locale('en_US')
session.add(user)
session.commit()
Like many other types this type also supports scalar coercion:
::
user.locale = 'de_DE'
user.locale # Locale('de', territory='DE')
"""
impl = types.Unicode(10)
def __init__(self):
if babel is None:
raise ImproperlyConfigured(
'Babel packaged is required with LocaleType.'
)
def process_bind_param(self, value, dialect):
if isinstance(value, babel.Locale):
return six.text_type(value)
if isinstance(value, six.string_types):
return value
def process_result_value(self, value, dialect):
if value is not None:
return babel.Locale.parse(value)
def _coerce(self, value):
if value is not None and not isinstance(value, babel.Locale):
return babel.Locale.parse(value)
return value
| 23.802632
| 76
| 0.632946
|
794857d1ce05cbb50b4b476265d374584128148b
| 998
|
py
|
Python
|
doc/examples/box.py
|
djkool/OcempGUI3
|
43a68033cb0dbad10654231299cb762cd18b7c25
|
[
"BSD-2-Clause"
] | null | null | null |
doc/examples/box.py
|
djkool/OcempGUI3
|
43a68033cb0dbad10654231299cb762cd18b7c25
|
[
"BSD-2-Clause"
] | null | null | null |
doc/examples/box.py
|
djkool/OcempGUI3
|
43a68033cb0dbad10654231299cb762cd18b7c25
|
[
"BSD-2-Clause"
] | null | null | null |
# Box examples.
from ocempgui.widgets import *
from ocempgui.widgets.Constants import *
def create_box_view ():
frame = VFrame (Label ("Box example"))
frame.topleft = 10, 10
# The Box with 200x200 pixels in size.
box = Box (200, 200)
# Widgets to place into it.
label = ImageLabel ("image.png")
label.topleft = 10, 10
button = Button ("A Button")
button.topleft = 30, 30
frame1 = VFrame (Label ("A VFrame"))
frame1.add_child (Label ("Label in the VFrame"))
frame1.topleft = 60, 80
chk = CheckButton ("A CheckButton")
chk.topleft = 130, 110
box.children = label, button, frame1, chk
frame.add_child (box)
return frame
if __name__ == "__main__":
# Initialize the drawing window.
re = Renderer ()
re.create_screen (300, 300)
re.title = "Box examples"
re.color = (234, 228, 223)
re.show_layer_info = True
re.add_widget (create_box_view ())
# Start the main rendering loop.
re.start ()
| 24.341463
| 52
| 0.634269
|
79485ae75eab2d18044d9795502f67c41838d1b6
| 2,572
|
py
|
Python
|
aliyun-python-sdk-vs/aliyunsdkvs/request/v20181212/UpdateVsPullStreamInfoConfigRequest.py
|
yndu13/aliyun-openapi-python-sdk
|
12ace4fb39fe2fb0e3927a4b1b43ee4872da43f5
|
[
"Apache-2.0"
] | 1,001
|
2015-07-24T01:32:41.000Z
|
2022-03-25T01:28:18.000Z
|
aliyun-python-sdk-vs/aliyunsdkvs/request/v20181212/UpdateVsPullStreamInfoConfigRequest.py
|
yndu13/aliyun-openapi-python-sdk
|
12ace4fb39fe2fb0e3927a4b1b43ee4872da43f5
|
[
"Apache-2.0"
] | 363
|
2015-10-20T03:15:00.000Z
|
2022-03-08T12:26:19.000Z
|
aliyun-python-sdk-vs/aliyunsdkvs/request/v20181212/UpdateVsPullStreamInfoConfigRequest.py
|
yndu13/aliyun-openapi-python-sdk
|
12ace4fb39fe2fb0e3927a4b1b43ee4872da43f5
|
[
"Apache-2.0"
] | 682
|
2015-09-22T07:19:02.000Z
|
2022-03-22T09:51:46.000Z
|
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
#
# http://www.apache.org/licenses/LICENSE-2.0
#
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from aliyunsdkcore.request import RpcRequest
from aliyunsdkvs.endpoint import endpoint_data
class UpdateVsPullStreamInfoConfigRequest(RpcRequest):
def __init__(self):
RpcRequest.__init__(self, 'vs', '2018-12-12', 'UpdateVsPullStreamInfoConfig')
self.set_method('POST')
if hasattr(self, "endpoint_map"):
setattr(self, "endpoint_map", endpoint_data.getEndpointMap())
if hasattr(self, "endpoint_regional"):
setattr(self, "endpoint_regional", endpoint_data.getEndpointRegional())
def get_StartTime(self):
return self.get_query_params().get('StartTime')
def set_StartTime(self,StartTime):
self.add_query_param('StartTime',StartTime)
def get_AppName(self):
return self.get_query_params().get('AppName')
def set_AppName(self,AppName):
self.add_query_param('AppName',AppName)
def get_StreamName(self):
return self.get_query_params().get('StreamName')
def set_StreamName(self,StreamName):
self.add_query_param('StreamName',StreamName)
def get_Always(self):
return self.get_query_params().get('Always')
def set_Always(self,Always):
self.add_query_param('Always',Always)
def get_DomainName(self):
return self.get_query_params().get('DomainName')
def set_DomainName(self,DomainName):
self.add_query_param('DomainName',DomainName)
def get_EndTime(self):
return self.get_query_params().get('EndTime')
def set_EndTime(self,EndTime):
self.add_query_param('EndTime',EndTime)
def get_OwnerId(self):
return self.get_query_params().get('OwnerId')
def set_OwnerId(self,OwnerId):
self.add_query_param('OwnerId',OwnerId)
def get_SourceUrl(self):
return self.get_query_params().get('SourceUrl')
def set_SourceUrl(self,SourceUrl):
self.add_query_param('SourceUrl',SourceUrl)
| 32.15
| 80
| 0.756998
|
79485b6d052bcafbb5dad4d3b8993f92574c2554
| 331
|
py
|
Python
|
appmode/__init__.py
|
KathyaFigueroa/Indifference-curves
|
ecf2fdc8adbfb745c09621b77e51080152b158d9
|
[
"MIT"
] | 408
|
2017-11-09T12:06:31.000Z
|
2022-03-30T23:13:37.000Z
|
appmode/__init__.py
|
KathyaFigueroa/Indifference-curves
|
ecf2fdc8adbfb745c09621b77e51080152b158d9
|
[
"MIT"
] | 54
|
2017-11-09T12:20:54.000Z
|
2021-07-22T16:38:56.000Z
|
appmode/__init__.py
|
KathyaFigueroa/Indifference-curves
|
ecf2fdc8adbfb745c09621b77e51080152b158d9
|
[
"MIT"
] | 83
|
2017-11-09T10:49:20.000Z
|
2022-02-27T23:59:29.000Z
|
# -*- coding: utf-8 -*-
__version__ = '0.8.0'
# Jupyter Extension points
def _jupyter_nbextension_paths():
return [dict(
section="notebook",
src="static",
dest="appmode",
require="appmode/main")]
def _jupyter_server_extension_paths():
return [{"module":"appmode.server_extension"}]
#EOF
| 19.470588
| 50
| 0.634441
|
79485b96bb876adb845a2fe839881a7c53ea1c1d
| 9,069
|
py
|
Python
|
mmdet/apis/train.py
|
AndreasKriegler/mmdetection
|
ce1d418f520ddeeb9bcb0fabbee48e76439ef6ec
|
[
"Apache-2.0"
] | 483
|
2019-09-04T01:01:26.000Z
|
2022-03-30T06:28:16.000Z
|
mmdet/apis/train.py
|
AndreasKriegler/mmdetection
|
ce1d418f520ddeeb9bcb0fabbee48e76439ef6ec
|
[
"Apache-2.0"
] | 47
|
2019-09-07T07:04:45.000Z
|
2022-03-08T08:22:21.000Z
|
mmdet/apis/train.py
|
AndreasKriegler/mmdetection
|
ce1d418f520ddeeb9bcb0fabbee48e76439ef6ec
|
[
"Apache-2.0"
] | 94
|
2019-09-04T04:56:29.000Z
|
2022-03-19T05:45:07.000Z
|
from __future__ import division
import re
from collections import OrderedDict
import torch
from mmcv.parallel import MMDataParallel, MMDistributedDataParallel
from mmcv.runner import DistSamplerSeedHook, Runner, obj_from_dict
from mmdet import datasets
from mmdet.core import (CocoDistEvalmAPHook, CocoDistEvalRecallHook,
DistEvalmAPHook, DistOptimizerHook, Fp16OptimizerHook)
from mmdet.datasets import DATASETS, build_dataloader
from mmdet.models import RPN
from .env import get_root_logger
def parse_losses(losses):
log_vars = OrderedDict()
for loss_name, loss_value in losses.items():
if isinstance(loss_value, torch.Tensor):
log_vars[loss_name] = loss_value.mean()
elif isinstance(loss_value, list):
log_vars[loss_name] = sum(_loss.mean() for _loss in loss_value)
else:
raise TypeError(
'{} is not a tensor or list of tensors'.format(loss_name))
loss = sum(_value for _key, _value in log_vars.items() if 'loss' in _key)
log_vars['loss'] = loss
for name in log_vars:
log_vars[name] = log_vars[name].item()
return loss, log_vars
def batch_processor(model, data, train_mode):
losses = model(**data)
loss, log_vars = parse_losses(losses)
outputs = dict(
loss=loss, log_vars=log_vars, num_samples=len(data['img'].data))
return outputs
def train_detector(model,
dataset,
cfg,
distributed=False,
validate=False,
logger=None):
if logger is None:
logger = get_root_logger(cfg.log_level)
# start training
if distributed:
_dist_train(model, dataset, cfg, validate=validate)
else:
_non_dist_train(model, dataset, cfg, validate=validate)
def build_optimizer(model, optimizer_cfg):
"""Build optimizer from configs.
Args:
model (:obj:`nn.Module`): The model with parameters to be optimized.
optimizer_cfg (dict): The config dict of the optimizer.
Positional fields are:
- type: class name of the optimizer.
- lr: base learning rate.
Optional fields are:
- any arguments of the corresponding optimizer type, e.g.,
weight_decay, momentum, etc.
- paramwise_options: a dict with 3 accepted fileds
(bias_lr_mult, bias_decay_mult, norm_decay_mult).
`bias_lr_mult` and `bias_decay_mult` will be multiplied to
the lr and weight decay respectively for all bias parameters
(except for the normalization layers), and
`norm_decay_mult` will be multiplied to the weight decay
for all weight and bias parameters of normalization layers.
Returns:
torch.optim.Optimizer: The initialized optimizer.
Example:
>>> model = torch.nn.modules.Conv1d(1, 1, 1)
>>> optimizer_cfg = dict(type='SGD', lr=0.01, momentum=0.9,
>>> weight_decay=0.0001)
>>> optimizer = build_optimizer(model, optimizer_cfg)
"""
if hasattr(model, 'module'):
model = model.module
optimizer_cfg = optimizer_cfg.copy()
paramwise_options = optimizer_cfg.pop('paramwise_options', None)
# if no paramwise option is specified, just use the global setting
if paramwise_options is None:
return obj_from_dict(optimizer_cfg, torch.optim,
dict(params=model.parameters()))
else:
assert isinstance(paramwise_options, dict)
# get base lr and weight decay
base_lr = optimizer_cfg['lr']
base_wd = optimizer_cfg.get('weight_decay', None)
# weight_decay must be explicitly specified if mult is specified
if ('bias_decay_mult' in paramwise_options
or 'norm_decay_mult' in paramwise_options):
assert base_wd is not None
# get param-wise options
bias_lr_mult = paramwise_options.get('bias_lr_mult', 1.)
bias_decay_mult = paramwise_options.get('bias_decay_mult', 1.)
norm_decay_mult = paramwise_options.get('norm_decay_mult', 1.)
# set param-wise lr and weight decay
params = []
for name, param in model.named_parameters():
param_group = {'params': [param]}
if not param.requires_grad:
# FP16 training needs to copy gradient/weight between master
# weight copy and model weight, it is convenient to keep all
# parameters here to align with model.parameters()
params.append(param_group)
continue
# for norm layers, overwrite the weight decay of weight and bias
# TODO: obtain the norm layer prefixes dynamically
if re.search(r'(bn|gn)(\d+)?.(weight|bias)', name):
if base_wd is not None:
param_group['weight_decay'] = base_wd * norm_decay_mult
# for other layers, overwrite both lr and weight decay of bias
elif name.endswith('.bias'):
param_group['lr'] = base_lr * bias_lr_mult
if base_wd is not None:
param_group['weight_decay'] = base_wd * bias_decay_mult
# otherwise use the global settings
params.append(param_group)
optimizer_cls = getattr(torch.optim, optimizer_cfg.pop('type'))
return optimizer_cls(params, **optimizer_cfg)
def _dist_train(model, dataset, cfg, validate=False):
# prepare data loaders
dataset = dataset if isinstance(dataset, (list, tuple)) else [dataset]
data_loaders = [
build_dataloader(
ds, cfg.data.imgs_per_gpu, cfg.data.workers_per_gpu, dist=True)
for ds in dataset
]
# put model on gpus
model = MMDistributedDataParallel(model.cuda())
# build runner
optimizer = build_optimizer(model, cfg.optimizer)
runner = Runner(model, batch_processor, optimizer, cfg.work_dir,
cfg.log_level)
# fp16 setting
fp16_cfg = cfg.get('fp16', None)
if fp16_cfg is not None:
optimizer_config = Fp16OptimizerHook(**cfg.optimizer_config,
**fp16_cfg)
else:
optimizer_config = DistOptimizerHook(**cfg.optimizer_config)
# register hooks
runner.register_training_hooks(cfg.lr_config, optimizer_config,
cfg.checkpoint_config, cfg.log_config)
runner.register_hook(DistSamplerSeedHook())
# register eval hooks
if validate:
val_dataset_cfg = cfg.data.val
eval_cfg = cfg.get('evaluation', {})
if isinstance(model.module, RPN):
# TODO: implement recall hooks for other datasets
runner.register_hook(
CocoDistEvalRecallHook(val_dataset_cfg, **eval_cfg))
else:
dataset_type = DATASETS.get(val_dataset_cfg.type)
if issubclass(dataset_type, datasets.CocoDataset):
runner.register_hook(
CocoDistEvalmAPHook(val_dataset_cfg, **eval_cfg))
else:
runner.register_hook(
DistEvalmAPHook(val_dataset_cfg, **eval_cfg))
if cfg.resume_from:
runner.resume(cfg.resume_from)
elif cfg.load_from:
runner.load_checkpoint(cfg.load_from)
runner.run(data_loaders, cfg.workflow, cfg.total_epochs)
def _non_dist_train(model, dataset, cfg, validate=False):
if validate:
raise NotImplementedError('Built-in validation is not implemented '
'yet in not-distributed training. Use '
'distributed training or test.py and '
'*eval.py scripts instead.')
# prepare data loaders
dataset = dataset if isinstance(dataset, (list, tuple)) else [dataset]
data_loaders = [
build_dataloader(
ds,
cfg.data.imgs_per_gpu,
cfg.data.workers_per_gpu,
cfg.gpus,
dist=False) for ds in dataset
]
# put model on gpus
model = MMDataParallel(model, device_ids=range(cfg.gpus)).cuda()
# build runner
optimizer = build_optimizer(model, cfg.optimizer)
runner = Runner(model, batch_processor, optimizer, cfg.work_dir,
cfg.log_level)
# fp16 setting
fp16_cfg = cfg.get('fp16', None)
if fp16_cfg is not None:
optimizer_config = Fp16OptimizerHook(
**cfg.optimizer_config, **fp16_cfg, distributed=False)
else:
optimizer_config = cfg.optimizer_config
runner.register_training_hooks(cfg.lr_config, optimizer_config,
cfg.checkpoint_config, cfg.log_config)
if cfg.resume_from:
runner.resume(cfg.resume_from)
elif cfg.load_from:
runner.load_checkpoint(cfg.load_from)
runner.run(data_loaders, cfg.workflow, cfg.total_epochs)
| 38.922747
| 78
| 0.626971
|
79485ca92f3adf8a4a21885885e1babf8b74a66c
| 6,271
|
py
|
Python
|
tools_webrtc/autoroller/unittests/roll_deps_test.py
|
lianhuaren/webrtc
|
096f18c11d8acb0d92820f75fdf934607f424cfc
|
[
"DOC",
"BSD-3-Clause"
] | 2,151
|
2020-04-18T07:31:17.000Z
|
2022-03-31T08:39:18.000Z
|
tools_webrtc/autoroller/unittests/roll_deps_test.py
|
lianhuaren/webrtc
|
096f18c11d8acb0d92820f75fdf934607f424cfc
|
[
"DOC",
"BSD-3-Clause"
] | 395
|
2020-04-18T08:22:18.000Z
|
2021-12-08T13:04:49.000Z
|
tools_webrtc/autoroller/unittests/roll_deps_test.py
|
lianhuaren/webrtc
|
096f18c11d8acb0d92820f75fdf934607f424cfc
|
[
"DOC",
"BSD-3-Clause"
] | 338
|
2020-04-18T08:03:10.000Z
|
2022-03-29T12:33:22.000Z
|
#!/usr/bin/env python
# Copyright (c) 2015 The WebRTC project authors. All Rights Reserved.
#
# Use of this source code is governed by a BSD-style license
# that can be found in the LICENSE file in the root of the source
# tree. An additional intellectual property rights grant can be found
# in the file PATENTS. All contributing project authors may
# be found in the AUTHORS file in the root of the source tree.
import glob
import os
import shutil
import sys
import tempfile
import unittest
SCRIPT_DIR = os.path.dirname(os.path.abspath(__file__))
PARENT_DIR = os.path.join(SCRIPT_DIR, os.pardir)
sys.path.append(PARENT_DIR)
import roll_deps
from roll_deps import CalculateChangedDeps, ChooseCQMode, \
GetMatchingDepsEntries, ParseDepsDict, ParseLocalDepsFile, UpdateDepsFile, \
ChromiumRevisionUpdate
TEST_DATA_VARS = {
'chromium_git': 'https://chromium.googlesource.com',
'chromium_revision': '1b9c098a08e40114e44b6c1ec33ddf95c40b901d',
}
DEPS_ENTRIES = {
'src/build': 'https://build.com',
'src/buildtools': 'https://buildtools.com',
'src/testing/gtest': 'https://gtest.com',
'src/testing/gmock': 'https://gmock.com',
}
BUILD_OLD_REV = '52f7afeca991d96d68cf0507e20dbdd5b845691f'
BUILD_NEW_REV = 'HEAD'
BUILDTOOLS_OLD_REV = '64e38f0cebdde27aa0cfb405f330063582f9ac76'
BUILDTOOLS_NEW_REV = '55ad626b08ef971fd82a62b7abb325359542952b'
class TestError(Exception):
pass
class FakeCmd(object):
def __init__(self):
self.expectations = []
def AddExpectation(self, *args, **kwargs):
returns = kwargs.pop('_returns', None)
self.expectations.append((args, kwargs, returns))
def __call__(self, *args, **kwargs):
if not self.expectations:
raise TestError('Got unexpected\n%s\n%s' % (args, kwargs))
exp_args, exp_kwargs, exp_returns = self.expectations.pop(0)
if args != exp_args or kwargs != exp_kwargs:
message = 'Expected:\n args: %s\n kwargs: %s\n' % (exp_args, exp_kwargs)
message += 'Got:\n args: %s\n kwargs: %s\n' % (args, kwargs)
raise TestError(message)
return exp_returns
class TestRollChromiumRevision(unittest.TestCase):
def setUp(self):
self._output_dir = tempfile.mkdtemp()
test_data_dir = os.path.join(SCRIPT_DIR, 'testdata', 'roll_deps')
for test_file in glob.glob(os.path.join(test_data_dir, '*')):
shutil.copy(test_file, self._output_dir)
self._webrtc_depsfile = os.path.join(self._output_dir, 'DEPS')
self._new_cr_depsfile = os.path.join(self._output_dir, 'DEPS.chromium.new')
self.fake = FakeCmd()
self.old_run_command = getattr(roll_deps, '_RunCommand')
setattr(roll_deps, '_RunCommand', self.fake)
def tearDown(self):
shutil.rmtree(self._output_dir, ignore_errors=True)
self.assertEqual(self.fake.expectations, [])
setattr(roll_deps, '_RunCommand', self.old_run_command)
def testVarLookup(self):
local_scope = {'foo': 'wrong', 'vars': {'foo': 'bar'}}
lookup = roll_deps.VarLookup(local_scope)
self.assertEquals(lookup('foo'), 'bar')
def testUpdateDepsFile(self):
new_rev = 'aaaaabbbbbcccccdddddeeeeefffff0000011111'
current_rev = TEST_DATA_VARS['chromium_revision']
UpdateDepsFile(self._webrtc_depsfile,
ChromiumRevisionUpdate(
current_rev, new_rev, current_rev, new_rev),
[])
with open(self._webrtc_depsfile) as deps_file:
deps_contents = deps_file.read()
self.assertTrue(new_rev in deps_contents,
'Failed to find %s in\n%s' % (new_rev, deps_contents))
def testParseDepsDict(self):
with open(self._webrtc_depsfile) as deps_file:
deps_contents = deps_file.read()
local_scope = ParseDepsDict(deps_contents)
vars_dict = local_scope['vars']
def AssertVar(variable_name):
self.assertEquals(vars_dict[variable_name], TEST_DATA_VARS[variable_name])
AssertVar('chromium_git')
AssertVar('chromium_revision')
self.assertEquals(len(local_scope['deps']), 3)
self.assertEquals(len(local_scope['deps_os']), 1)
def testGetMatchingDepsEntriesReturnsPathInSimpleCase(self):
entries = GetMatchingDepsEntries(DEPS_ENTRIES, 'src/testing/gtest')
self.assertEquals(len(entries), 1)
self.assertEquals(entries[0], DEPS_ENTRIES['src/testing/gtest'])
def testGetMatchingDepsEntriesHandlesSimilarStartingPaths(self):
entries = GetMatchingDepsEntries(DEPS_ENTRIES, 'src/testing')
self.assertEquals(len(entries), 2)
def testGetMatchingDepsEntriesHandlesTwoPathsWithIdenticalFirstParts(self):
entries = GetMatchingDepsEntries(DEPS_ENTRIES, 'src/build')
self.assertEquals(len(entries), 1)
self.assertEquals(entries[0], DEPS_ENTRIES['src/build'])
def testCalculateChangedDeps(self):
_SetupGitLsRemoteCall(self.fake,
'https://chromium.googlesource.com/chromium/src/build', BUILD_NEW_REV)
webrtc_deps = ParseLocalDepsFile(self._webrtc_depsfile)
new_cr_deps = ParseLocalDepsFile(self._new_cr_depsfile)
changed_deps = CalculateChangedDeps(webrtc_deps, new_cr_deps)
self.assertEquals(len(changed_deps), 3)
self.assertEquals(changed_deps[0].path, 'src/build')
self.assertEquals(changed_deps[0].current_rev, BUILD_OLD_REV)
self.assertEquals(changed_deps[0].new_rev, BUILD_NEW_REV)
self.assertEquals(changed_deps[1].path, 'src/buildtools')
self.assertEquals(changed_deps[1].current_rev, BUILDTOOLS_OLD_REV)
self.assertEquals(changed_deps[1].new_rev, BUILDTOOLS_NEW_REV)
self.assertEquals(changed_deps[2].path, 'src/third_party/xstream')
self.assertEquals(changed_deps[2].package, 'chromium/third_party/xstream')
self.assertEquals(changed_deps[2].current_version, '1.4.8-cr0')
self.assertEquals(changed_deps[2].new_version, '1.10.0-cr0')
class TestChooseCQMode(unittest.TestCase):
def testSkip(self):
self.assertEquals(ChooseCQMode(True, 99, 500000, 500100), 0)
def testDryRun(self):
self.assertEquals(ChooseCQMode(False, 101, 500000, 500100), 1)
def testSubmit(self):
self.assertEquals(ChooseCQMode(False, 100, 500000, 500100), 2)
def _SetupGitLsRemoteCall(cmd_fake, url, revision):
cmd = ['git', 'ls-remote', url, revision]
cmd_fake.AddExpectation(cmd, _returns=(revision, None))
if __name__ == '__main__':
unittest.main()
| 36.888235
| 80
| 0.73513
|
79485e57fbce5cd8c3453807e54303cbe75b2bd0
| 19,547
|
py
|
Python
|
Python/libraries/recognizers-number/recognizers_number/resources/french_numeric.py
|
vijayraavi/Recognizers-Text
|
bfdd32440ef033c969839332d8be75162d1b241a
|
[
"MIT"
] | null | null | null |
Python/libraries/recognizers-number/recognizers_number/resources/french_numeric.py
|
vijayraavi/Recognizers-Text
|
bfdd32440ef033c969839332d8be75162d1b241a
|
[
"MIT"
] | null | null | null |
Python/libraries/recognizers-number/recognizers_number/resources/french_numeric.py
|
vijayraavi/Recognizers-Text
|
bfdd32440ef033c969839332d8be75162d1b241a
|
[
"MIT"
] | null | null | null |
# ------------------------------------------------------------------------------
# <auto-generated>
# This code was generated by a tool.
# Changes to this file may cause incorrect behavior and will be lost if
# the code is regenerated.
# </auto-generated>
# ------------------------------------------------------------------------------
from .base_numbers import BaseNumbers
# pylint: disable=line-too-long
class FrenchNumeric:
LangMarker = 'Fr'
RoundNumberIntegerRegex = f'(cent|mille|millions|million|milliard|milliards|billion|billions)'
ZeroToNineIntegerRegex = f'(et un|un|une|deux|trois|quatre|cinq|six|sept|huit|neuf)'
TenToNineteenIntegerRegex = f'((seize|quinze|quatorze|treize|douze|onze)|dix(\\Wneuf|\\Whuit|\\Wsept)?)'
TensNumberIntegerRegex = f'(quatre\\Wvingt(s|\\Wdix)?|soixante\\Wdix|vingt|trente|quarante|cinquante|soixante|septante|octante|huitante|nonante)'
DigitsNumberRegex = f'\\d|\\d{{1,3}}(\\.\\d{{3}})'
NegativeNumberTermsRegex = f'^[.]'
NegativeNumberSignRegex = f'^({NegativeNumberTermsRegex}\\s+).*'
HundredsNumberIntegerRegex = f'(({ZeroToNineIntegerRegex}(\\s+cent))|cent|((\\s+cent\\s)+{TensNumberIntegerRegex}))'
BelowHundredsRegex = f'(({TenToNineteenIntegerRegex}|({TensNumberIntegerRegex}([-\\s]+({TenToNineteenIntegerRegex}|{ZeroToNineIntegerRegex}))?))|{ZeroToNineIntegerRegex})'
BelowThousandsRegex = f'(({HundredsNumberIntegerRegex}(\\s+{BelowHundredsRegex})?|{BelowHundredsRegex}|{TenToNineteenIntegerRegex})|cent\\s+{TenToNineteenIntegerRegex})'
SupportThousandsRegex = f'(({BelowThousandsRegex}|{BelowHundredsRegex})\\s+{RoundNumberIntegerRegex}(\\s+{RoundNumberIntegerRegex})?)'
SeparaIntRegex = f'({SupportThousandsRegex}(\\s+{SupportThousandsRegex})*(\\s+{BelowThousandsRegex})?|{BelowThousandsRegex})'
AllIntRegex = f'({SeparaIntRegex}|mille(\\s+{BelowThousandsRegex})?)'
NumbersWithPlaceHolder = lambda placeholder: f'(((?<!\\d+\\s*)-\\s*)|(?<=\\b))\\d+(?!([,\\.]\\d+[a-zA-Z]))(?={placeholder})'
NumbersWithSuffix = f'(((?<=\\W|^)-\\s*)|(?<=\\b))\\d+\\s*{BaseNumbers.NumberMultiplierRegex}(?=\\b)'
RoundNumberIntegerRegexWithLocks = f'(?<=\\b)({DigitsNumberRegex})+\\s+{RoundNumberIntegerRegex}(?=\\b)'
NumbersWithDozenSuffix = f'(((?<!\\d+\\s*)-\\s*)|(?<=\\b))\\d+\\s+douzaine(s)?(?=\\b)'
AllIntRegexWithLocks = f'((?<=\\b){AllIntRegex}(?=\\b))'
AllIntRegexWithDozenSuffixLocks = f'(?<=\\b)(((demi\\s+)?\\s+douzaine)|({AllIntRegex}\\s+douzaines?))(?=\\b)'
SimpleRoundOrdinalRegex = f'(centi[eè]me|milli[eè]me|millioni[eè]me|milliardi[eè]me|billioni[eè]me)'
OneToNineOrdinalRegex = f'(premier|premi[eè]re|deuxi[eè]me|second[e]|troisi[eè]me|tiers|tierce|quatri[eè]me|cinqui[eè]me|sixi[eè]me|septi[eè]me|huiti[eè]me|neuvi[eè]me)'
SpecialUnderHundredOrdinalRegex = f'(onzi[eè]me|douzi[eè]me)'
TensOrdinalRegex = f'(quatre-vingt-dixi[eè]me|quatre-vingti[eè]me|huitanti[eè]me|octanti[eè]me|soixante-dixi[eè]me|septanti[eè]me|soixanti[eè]me|cinquanti[eè]me|quaranti[eè]me|trenti[eè]me|vingti[eè]me)'
HundredOrdinalRegex = f'({AllIntRegex}(\\s+(centi[eè]me\\s)))'
UnderHundredOrdinalRegex = f'((({AllIntRegex}(\\W)?)?{OneToNineOrdinalRegex})|({TensNumberIntegerRegex}(\\W)?)?{OneToNineOrdinalRegex}|{TensOrdinalRegex}|{SpecialUnderHundredOrdinalRegex})'
UnderThousandOrdinalRegex = f'((({HundredOrdinalRegex}(\\s)?)?{UnderHundredOrdinalRegex})|(({AllIntRegex}(\\W)?)?{SimpleRoundOrdinalRegex})|{HundredOrdinalRegex})'
OverThousandOrdinalRegex = f'(({AllIntRegex})(i[eè]me))'
ComplexOrdinalRegex = f'(({OverThousandOrdinalRegex}(\\s)?)?{UnderThousandOrdinalRegex}|{OverThousandOrdinalRegex}|{UnderHundredOrdinalRegex})'
SuffixOrdinalRegex = f'(({AllIntRegex})({SimpleRoundOrdinalRegex}))'
ComplexRoundOrdinalRegex = f'((({SuffixOrdinalRegex}(\\s)?)?{ComplexOrdinalRegex})|{SuffixOrdinalRegex})'
AllOrdinalRegex = f'({ComplexOrdinalRegex}|{SimpleRoundOrdinalRegex}|{ComplexRoundOrdinalRegex})'
PlaceHolderPureNumber = f'\\b'
PlaceHolderDefault = f'\\D|\\b'
OrdinalSuffixRegex = f'(?<=\\b)((\\d*(1er|2e|2eme|3e|3eme|4e|4eme|5e|5eme|6e|6eme|7e|7eme|8e|8eme|9e|9eme|0e|0eme))|(11e|11eme|12e|12eme))(?=\\b)'
OrdinalFrenchRegex = f'(?<=\\b){AllOrdinalRegex}(?=\\b)'
FractionNotationWithSpacesRegex = f'(((?<=\\W|^)-\\s*)|(?<=\\b))\\d+\\s+\\d+[/]\\d+(?=(\\b[^/]|$))'
FractionNotationRegex = f'(((?<=\\W|^)-\\s*)|(?<=\\b))\\d+[/]\\d+(?=(\\b[^/]|$))'
FractionNounRegex = f'(?<=\\b)({AllIntRegex}\\s+((et)\\s+)?)?({AllIntRegex})(\\s+((et)\\s)?)((({AllOrdinalRegex})s?|({SuffixOrdinalRegex})s?)|demis?|tiers?|quarts?)(?=\\b)'
FractionNounWithArticleRegex = f'(?<=\\b)({AllIntRegex}\\s+(et\\s+)?)?(un|une)(\\s+)(({AllOrdinalRegex})|({SuffixOrdinalRegex})|(et\\s+)?demis?)(?=\\b)'
FractionPrepositionRegex = f'(?<=\\b)(?<numerator>({AllIntRegex})|((?<!\\.)\\d+))\\s+sur\\s+(?<denominator>({AllIntRegex})|((\\d+)(?!\\.)))(?=\\b)'
AllPointRegex = f'((\\s+{ZeroToNineIntegerRegex})+|(\\s+{SeparaIntRegex}))'
AllFloatRegex = f'({AllIntRegex}(\\s+(virgule|point)){AllPointRegex})'
DoubleDecimalPointRegex = lambda placeholder: f'(((?<!\\d+\\s*)-\\s*)|((?<=\\b)(?<!\\d+[,\\.])))\\d+[,\\.]\\d+(?!([,\\.]\\d+))(?={placeholder})'
DoubleWithoutIntegralRegex = lambda placeholder: f'(?<=\\s|^)(?<!(\\d+))[,\\.]\\d+(?!([,\\.]\\d+))(?={placeholder})'
DoubleWithMultiplierRegex = f'(((?<!\\d+\\s*)-\\s*)|((?<=\\b)(?<!\\d+\\[,\\.])))\\d+[,\\.]\\d+\\s*{BaseNumbers.NumberMultiplierRegex}(?=\\b)'
DoubleWithRoundNumber = f'(((?<!\\d+\\s*)-\\s*)|((?<=\\b)(?<!\\d+\\[,\\.])))\\d+[,\\.]\\d+\\s+{RoundNumberIntegerRegex}(?=\\b)'
DoubleAllFloatRegex = f'((?<=\\b){AllFloatRegex}(?=\\b))'
DoubleExponentialNotationRegex = f'(((?<!\\d+\\s*)-\\s*)|((?<=\\b)(?<!\\d+[,\\.])))(\\d+([,\\.]\\d+)?)e([+-]*[1-9]\\d*)(?=\\b)'
DoubleCaretExponentialNotationRegex = f'(((?<!\\d+\\s*)-\\s*)|((?<=\\b)(?<!\\d+[,\\.])))(\\d+([,\\.]\\d+)?)\\^([+-]*[1-9]\\d*)(?=\\b)'
NumberWithSuffixPercentage = f'(?<!%)({BaseNumbers.NumberReplaceToken})(\\s*)(%(?!{BaseNumbers.NumberReplaceToken})|(pourcentages|pourcents|pourcentage|pourcent)\\b)'
NumberWithPrefixPercentage = f'((?<!{BaseNumbers.NumberReplaceToken})%|pourcent|pourcent des|pourcentage de)(\\s*)({BaseNumbers.NumberReplaceToken})(?=\\s|$)'
DecimalSeparatorChar = ','
FractionMarkerToken = 'sur'
NonDecimalSeparatorChar = '.'
HalfADozenText = 'six'
WordSeparatorToken = 'et'
WrittenDecimalSeparatorTexts = [r'virgule']
WrittenGroupSeparatorTexts = [r'point', r'points']
WrittenIntegerSeparatorTexts = [r'et', r'-']
WrittenFractionSeparatorTexts = [r'et', r'sur']
HalfADozenRegex = f'(?<=\\b)+demi\\s+douzaine'
DigitalNumberRegex = f'((?<=\\b)(cent|mille|million|millions|milliard|milliards|billions|billion|douzaine(s)?)(?=\\b))|((?<=(\\d|\\b)){BaseNumbers.MultiplierLookupRegex}(?=\\b))'
AmbiguousFractionConnectorsRegex = f'^[.]'
CardinalNumberMap = dict([("zéro", 0),
("zero", 0),
("un", 1),
("une", 1),
("deux", 2),
("trois", 3),
("quatre", 4),
("cinq", 5),
("six", 6),
("sept", 7),
("huit", 8),
("neuf", 9),
("dix", 10),
("onze", 11),
("douze", 12),
("treize", 13),
("quatorze", 14),
("quinze", 15),
("seize", 16),
("dix-sept", 17),
("dix-huit", 18),
("dix-neuf", 19),
("vingt", 20),
("trente", 30),
("quarante", 40),
("cinquante", 50),
("soixante", 60),
("soixante-dix", 70),
("septante", 70),
("quatre-vingts", 80),
("quatre-vingt", 80),
("quatre vingts", 80),
("quatre vingt", 80),
("quatre-vingt-dix", 90),
("quatre-vingt dix", 90),
("quatre vingt dix", 90),
("quatre-vingts-dix", 90),
("quatre-vingts-onze", 91),
("quatre-vingt-onze", 91),
("quatre-vingts-douze", 92),
("quatre-vingt-douze", 92),
("quatre-vingts-treize", 93),
("quatre-vingt-treize", 93),
("quatre-vingts-quatorze", 94),
("quatre-vingt-quatorze", 94),
("quatre-vingts-quinze", 95),
("quatre-vingt-quinze", 95),
("quatre-vingts-seize", 96),
("quatre-vingt-seize", 96),
("huitante", 80),
("octante", 80),
("nonante", 90),
("cent", 100),
("mille", 1000),
("un million", 1000000),
("million", 1000000),
("millions", 1000000),
("un milliard", 1000000000),
("milliard", 1000000000),
("milliards", 1000000000),
("un mille milliards", 1000000000000),
("un billion", 1000000000000)])
OrdinalNumberMap = dict([("premier", 1),
("première", 1),
("premiere", 1),
("deuxième", 2),
("deuxieme", 2),
("second", 2),
("seconde", 2),
("troisième", 3),
("demi", 2),
("tiers", 3),
("tierce", 3),
("quart", 4),
("quarts", 4),
("troisieme", 3),
("quatrième", 4),
("quatrieme", 4),
("cinquième", 5),
("cinquieme", 5),
("sixième", 6),
("sixieme", 6),
("septième", 7),
("septieme", 7),
("huitième", 8),
("huitieme", 8),
("neuvième", 9),
("neuvieme", 9),
("dixième", 10),
("dixieme", 10),
("onzième", 11),
("onzieme", 11),
("douzième", 12),
("douzieme", 12),
("treizième", 13),
("treizieme", 13),
("quatorzième", 14),
("quatorizieme", 14),
("quinzième", 15),
("quinzieme", 15),
("seizième", 16),
("seizieme", 16),
("dix-septième", 17),
("dix-septieme", 17),
("dix-huitième", 18),
("dix-huitieme", 18),
("dix-neuvième", 19),
("dix-neuvieme", 19),
("vingtième", 20),
("vingtieme", 20),
("trentième", 30),
("trentieme", 30),
("quarantième", 40),
("quarantieme", 40),
("cinquantième", 50),
("cinquantieme", 50),
("soixantième", 60),
("soixantieme", 60),
("soixante-dixième", 70),
("soixante-dixieme", 70),
("septantième", 70),
("septantieme", 70),
("quatre-vingtième", 80),
("quatre-vingtieme", 80),
("huitantième", 80),
("huitantieme", 80),
("octantième", 80),
("octantieme", 80),
("quatre-vingt-dixième", 90),
("quatre-vingt-dixieme", 90),
("nonantième", 90),
("nonantieme", 90),
("centième", 100),
("centieme", 100),
("millième", 1000),
("millieme", 1000),
("millionième", 1000000),
("millionieme", 1000000),
("milliardième", 1000000000),
("milliardieme", 1000000000),
("billionieme", 1000000000000),
("billionième", 1000000000000),
("trillionième", 1000000000000000000),
("trillionieme", 1000000000000000000)])
PrefixCardinalMap = dict([("deux", 2),
("trois", 3),
("quatre", 4),
("cinq", 5),
("six", 6),
("sept", 7),
("huit", 8),
("neuf", 9),
("dix", 10),
("onze", 11),
("douze", 12),
("treize", 13),
("quatorze", 14),
("quinze", 15),
("seize", 16),
("dix sept", 17),
("dix-sept", 17),
("dix-huit", 18),
("dix huit", 18),
("dix-neuf", 19),
("dix neuf", 19),
("vingt", 20),
("vingt-et-un", 21),
("vingt et un", 21),
("vingt-deux", 21),
("vingt deux", 22),
("vingt-trois", 23),
("vingt trois", 23),
("vingt-quatre", 24),
("vingt quatre", 24),
("vingt-cinq", 25),
("vingt cinq", 25),
("vingt-six", 26),
("vingt six", 26),
("vingt-sept", 27),
("vingt sept", 27),
("vingt-huit", 28),
("vingt huit", 28),
("vingt-neuf", 29),
("vingt neuf", 29),
("trente", 30),
("quarante", 40),
("cinquante", 50),
("soixante", 60),
("soixante-dix", 70),
("soixante dix", 70),
("septante", 70),
("quatre-vingt", 80),
("quatre vingt", 80),
("huitante", 80),
("octante", 80),
("nonante", 90),
("quatre vingt dix", 90),
("quatre-vingt-dix", 90),
("cent", 100),
("deux cent", 200),
("trois cents", 300),
("quatre cents", 400),
("cinq cent", 500),
("six cent", 600),
("sept cent", 700),
("huit cent", 800),
("neuf cent", 900)])
SuffixOrdinalMap = dict([("millième", 1000),
("million", 1000000),
("milliardième", 1000000000000)])
RoundNumberMap = dict([("cent", 100),
("mille", 1000),
("million", 1000000),
("millions", 1000000),
("milliard", 1000000000),
("milliards", 1000000000),
("billion", 1000000000000),
("billions", 1000000000000),
("centieme", 100),
("centième", 100),
("millieme", 1000),
("millième", 1000),
("millionième", 1000000),
("millionieme", 1000000),
("milliardième", 1000000000),
("milliardieme", 1000000000),
("billionième", 1000000000000),
("billionieme", 1000000000000),
("centiemes", 100),
("centièmes", 100),
("millièmes", 1000),
("milliemes", 1000),
("millionièmes", 1000000),
("millioniemes", 1000000),
("milliardièmes", 1000000000),
("milliardiemes", 1000000000),
("billionièmes", 1000000000000),
("billioniemes", 1000000000000),
("douzaine", 12),
("douzaines", 12),
("k", 1000),
("m", 1000000),
("g", 1000000000),
("b", 1000000000),
("t", 1000000000000)])
RelativeReferenceMap = dict([("", "")])
# pylint: enable=line-too-long
| 60.517028
| 208
| 0.383742
|
79485f6dcc5ecb2c2cb69418d49a7b55ff9bdf80
| 205
|
py
|
Python
|
big_o_notation/space_complexity/constant_space.py
|
LauraBeatris/algorithms-and-data-structures
|
fe8c2f096bf409a6ac7bfbc55c63a023a3f3ce60
|
[
"MIT"
] | 11
|
2021-04-09T15:51:00.000Z
|
2021-07-18T11:40:07.000Z
|
big_o_notation/space_complexity/constant_space.py
|
LauraBeatris/algorithms-and-data-structures
|
fe8c2f096bf409a6ac7bfbc55c63a023a3f3ce60
|
[
"MIT"
] | null | null | null |
big_o_notation/space_complexity/constant_space.py
|
LauraBeatris/algorithms-and-data-structures
|
fe8c2f096bf409a6ac7bfbc55c63a023a3f3ce60
|
[
"MIT"
] | 2
|
2021-04-13T03:00:09.000Z
|
2022-01-03T22:37:00.000Z
|
def constant_space(numbers_list): # Big O => O(1) => The space allocated is always constant doesn't matter the input size
unused_variable = 1 # O(1)
for number in numbers_list: # O(1)
print(number)
| 51.25
| 121
| 0.712195
|
79485fb7358e03d727a23837e8da701ed1630813
| 4,961
|
py
|
Python
|
isi_sdk_8_1_1/isi_sdk_8_1_1/models/node_state.py
|
mohitjain97/isilon_sdk_python
|
a371f438f542568edb8cda35e929e6b300b1177c
|
[
"Unlicense"
] | 24
|
2018-06-22T14:13:23.000Z
|
2022-03-23T01:21:26.000Z
|
isi_sdk_8_1_1/isi_sdk_8_1_1/models/node_state.py
|
mohitjain97/isilon_sdk_python
|
a371f438f542568edb8cda35e929e6b300b1177c
|
[
"Unlicense"
] | 46
|
2018-04-30T13:28:22.000Z
|
2022-03-21T21:11:07.000Z
|
isi_sdk_8_1_1/isi_sdk_8_1_1/models/node_state.py
|
mohitjain97/isilon_sdk_python
|
a371f438f542568edb8cda35e929e6b300b1177c
|
[
"Unlicense"
] | 29
|
2018-06-19T00:14:04.000Z
|
2022-02-08T17:51:19.000Z
|
# coding: utf-8
"""
Isilon SDK
Isilon SDK - Language bindings for the OneFS API # noqa: E501
OpenAPI spec version: 6
Contact: sdk@isilon.com
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
import pprint
import re # noqa: F401
import six
from isi_sdk_8_1_1.models.node_drives_purposelist_error import NodeDrivesPurposelistError # noqa: F401,E501
from isi_sdk_8_1_1.models.node_state_node import NodeStateNode # noqa: F401,E501
class NodeState(object):
"""NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
"""
"""
Attributes:
swagger_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
swagger_types = {
'errors': 'list[NodeDrivesPurposelistError]',
'nodes': 'list[NodeStateNode]',
'total': 'int'
}
attribute_map = {
'errors': 'errors',
'nodes': 'nodes',
'total': 'total'
}
def __init__(self, errors=None, nodes=None, total=None): # noqa: E501
"""NodeState - a model defined in Swagger""" # noqa: E501
self._errors = None
self._nodes = None
self._total = None
self.discriminator = None
if errors is not None:
self.errors = errors
if nodes is not None:
self.nodes = nodes
if total is not None:
self.total = total
@property
def errors(self):
"""Gets the errors of this NodeState. # noqa: E501
A list of errors encountered by the individual nodes involved in this request, or an empty list if there were no errors. # noqa: E501
:return: The errors of this NodeState. # noqa: E501
:rtype: list[NodeDrivesPurposelistError]
"""
return self._errors
@errors.setter
def errors(self, errors):
"""Sets the errors of this NodeState.
A list of errors encountered by the individual nodes involved in this request, or an empty list if there were no errors. # noqa: E501
:param errors: The errors of this NodeState. # noqa: E501
:type: list[NodeDrivesPurposelistError]
"""
self._errors = errors
@property
def nodes(self):
"""Gets the nodes of this NodeState. # noqa: E501
The responses from the individual nodes involved in this request. # noqa: E501
:return: The nodes of this NodeState. # noqa: E501
:rtype: list[NodeStateNode]
"""
return self._nodes
@nodes.setter
def nodes(self, nodes):
"""Sets the nodes of this NodeState.
The responses from the individual nodes involved in this request. # noqa: E501
:param nodes: The nodes of this NodeState. # noqa: E501
:type: list[NodeStateNode]
"""
self._nodes = nodes
@property
def total(self):
"""Gets the total of this NodeState. # noqa: E501
The total number of nodes responding. # noqa: E501
:return: The total of this NodeState. # noqa: E501
:rtype: int
"""
return self._total
@total.setter
def total(self, total):
"""Sets the total of this NodeState.
The total number of nodes responding. # noqa: E501
:param total: The total of this NodeState. # noqa: E501
:type: int
"""
self._total = total
def to_dict(self):
"""Returns the model properties as a dict"""
result = {}
for attr, _ in six.iteritems(self.swagger_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
return result
def to_str(self):
"""Returns the string representation of the model"""
return pprint.pformat(self.to_dict())
def __repr__(self):
"""For `print` and `pprint`"""
return self.to_str()
def __eq__(self, other):
"""Returns true if both objects are equal"""
if not isinstance(other, NodeState):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
"""Returns true if both objects are not equal"""
return not self == other
| 28.511494
| 142
| 0.581334
|
7948601946d027fc723aa0bbfd1da9aa6b09c634
| 417
|
py
|
Python
|
pyspedas/stereo/config.py
|
pulupa/pyspedas
|
7228199cf16eca2a27d130f1e4985ef1e69462ea
|
[
"MIT"
] | 75
|
2019-02-22T12:59:33.000Z
|
2022-02-26T15:33:20.000Z
|
pyspedas/stereo/config.py
|
pulupa/pyspedas
|
7228199cf16eca2a27d130f1e4985ef1e69462ea
|
[
"MIT"
] | 40
|
2019-07-02T07:46:34.000Z
|
2022-02-23T21:48:50.000Z
|
pyspedas/stereo/config.py
|
pulupa/pyspedas
|
7228199cf16eca2a27d130f1e4985ef1e69462ea
|
[
"MIT"
] | 43
|
2019-02-22T13:03:41.000Z
|
2022-01-24T19:26:59.000Z
|
import os
CONFIG = {'local_data_dir': 'stereo_data/',
'remote_data_dir': 'http://sprg.ssl.berkeley.edu/data/misc/stereo/'}
# override local data directory with environment variables
if os.environ.get('SPEDAS_DATA_DIR'):
CONFIG['local_data_dir'] = os.sep.join([os.environ['SPEDAS_DATA_DIR'], 'stereo'])
if os.environ.get('STEREO_DATA_DIR'):
CONFIG['local_data_dir'] = os.environ['STEREO_DATA_DIR']
| 37.909091
| 85
| 0.721823
|
79486077ddd1bbd31f60ff896632cd92ff8b50e9
| 16,990
|
py
|
Python
|
python/ccxt/cex.py
|
flexycode/ccxt
|
bb3500f921d2a2fc23d61ab31e8cf1645a5102c3
|
[
"MIT"
] | 1
|
2019-03-12T09:30:24.000Z
|
2019-03-12T09:30:24.000Z
|
python/ccxt/cex.py
|
flexycode/ccxt
|
bb3500f921d2a2fc23d61ab31e8cf1645a5102c3
|
[
"MIT"
] | null | null | null |
python/ccxt/cex.py
|
flexycode/ccxt
|
bb3500f921d2a2fc23d61ab31e8cf1645a5102c3
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
# PLEASE DO NOT EDIT THIS FILE, IT IS GENERATED AND WILL BE OVERWRITTEN:
# https://github.com/ccxt/ccxt/blob/master/CONTRIBUTING.md#how-to-contribute-code
from ccxt.base.exchange import Exchange
import math
import json
from ccxt.base.errors import ExchangeError
from ccxt.base.errors import InvalidOrder
class cex (Exchange):
def describe(self):
return self.deep_extend(super(cex, self).describe(), {
'id': 'cex',
'name': 'CEX.IO',
'countries': ['GB', 'EU', 'CY', 'RU'],
'rateLimit': 1500,
'has': {
'CORS': True,
'fetchTickers': True,
'fetchOHLCV': True,
'fetchOpenOrders': True,
'fetchOrders': True,
},
'timeframes': {
'1m': '1m',
},
'urls': {
'logo': 'https://user-images.githubusercontent.com/1294454/27766442-8ddc33b0-5ed8-11e7-8b98-f786aef0f3c9.jpg',
'api': 'https://cex.io/api',
'www': 'https://cex.io',
'doc': 'https://cex.io/cex-api',
'fees': [
'https://cex.io/fee-schedule',
'https://cex.io/limits-commissions',
],
},
'requiredCredentials': {
'apiKey': True,
'secret': True,
'uid': True,
},
'api': {
'public': {
'get': [
'currency_limits/',
'last_price/{pair}/',
'last_prices/{currencies}/',
'ohlcv/hd/{yyyymmdd}/{pair}',
'order_book/{pair}/',
'ticker/{pair}/',
'tickers/{currencies}/',
'trade_history/{pair}/',
],
'post': [
'convert/{pair}',
'price_stats/{pair}',
],
},
'private': {
'post': [
'active_orders_status/',
'archived_orders/{pair}/',
'balance/',
'cancel_order/',
'cancel_orders/{pair}/',
'cancel_replace_order/{pair}/',
'close_position/{pair}/',
'get_address/',
'get_myfee/',
'get_order/',
'get_order_tx/',
'open_orders/{pair}/',
'open_orders/',
'open_position/{pair}/',
'open_positions/{pair}/',
'place_order/{pair}/',
],
},
},
'fees': {
'trading': {
'maker': 0.16 / 100,
'taker': 0.25 / 100,
},
'funding': {
'withdraw': {
# 'USD': None,
# 'EUR': None,
# 'RUB': None,
# 'GBP': None,
'BTC': 0.001,
'ETH': 0.01,
'BCH': 0.001,
'DASH': 0.01,
'BTG': 0.001,
'ZEC': 0.001,
'XRP': 0.02,
'XLM': None,
},
'deposit': {
# 'USD': amount => amount * 0.035 + 0.25,
# 'EUR': amount => amount * 0.035 + 0.24,
# 'RUB': amount => amount * 0.05 + 15.57,
# 'GBP': amount => amount * 0.035 + 0.2,
'BTC': 0.0,
'ETH': 0.0,
'BCH': 0.0,
'DASH': 0.0,
'BTG': 0.0,
'ZEC': 0.0,
'XRP': 0.0,
'XLM': 0.0,
},
},
},
})
def fetch_markets(self):
markets = self.publicGetCurrencyLimits()
result = []
for p in range(0, len(markets['data']['pairs'])):
market = markets['data']['pairs'][p]
id = market['symbol1'] + '/' + market['symbol2']
symbol = id
base, quote = symbol.split('/')
result.append({
'id': id,
'info': market,
'symbol': symbol,
'base': base,
'quote': quote,
'lot': market['minLotSize'],
'precision': {
'price': self.precision_from_string(market['minPrice']),
'amount': -1 * math.log10(market['minLotSize']),
},
'limits': {
'amount': {
'min': market['minLotSize'],
'max': market['maxLotSize'],
},
'price': {
'min': float(market['minPrice']),
'max': float(market['maxPrice']),
},
'cost': {
'min': market['minLotSizeS2'],
'max': None,
},
},
})
return result
def fetch_balance(self, params={}):
self.load_markets()
response = self.privatePostBalance()
result = {'info': response}
ommited = ['username', 'timestamp']
balances = self.omit(response, ommited)
currencies = list(balances.keys())
for i in range(0, len(currencies)):
currency = currencies[i]
if currency in balances:
account = {
'free': self.safe_float(balances[currency], 'available', 0.0),
'used': self.safe_float(balances[currency], 'orders', 0.0),
'total': 0.0,
}
account['total'] = self.sum(account['free'], account['used'])
result[currency] = account
return self.parse_balance(result)
def fetch_order_book(self, symbol, limit=None, params={}):
self.load_markets()
orderbook = self.publicGetOrderBookPair(self.extend({
'pair': self.market_id(symbol),
}, params))
timestamp = orderbook['timestamp'] * 1000
return self.parse_order_book(orderbook, timestamp)
def parse_ohlcv(self, ohlcv, market=None, timeframe='1m', since=None, limit=None):
return [
ohlcv[0] * 1000,
ohlcv[1],
ohlcv[2],
ohlcv[3],
ohlcv[4],
ohlcv[5],
]
def fetch_ohlcv(self, symbol, timeframe='1m', since=None, limit=None, params={}):
self.load_markets()
market = self.market(symbol)
if not since:
since = self.milliseconds() - 86400000 # yesterday
ymd = self.ymd(since)
ymd = ymd.split('-')
ymd = ''.join(ymd)
request = {
'pair': market['id'],
'yyyymmdd': ymd,
}
response = self.publicGetOhlcvHdYyyymmddPair(self.extend(request, params))
key = 'data' + self.timeframes[timeframe]
ohlcvs = json.loads(response[key])
return self.parse_ohlcvs(ohlcvs, market, timeframe, since, limit)
def parse_ticker(self, ticker, market=None):
timestamp = None
iso8601 = None
if 'timestamp' in ticker:
timestamp = int(ticker['timestamp']) * 1000
iso8601 = self.iso8601(timestamp)
volume = self.safe_float(ticker, 'volume')
high = self.safe_float(ticker, 'high')
low = self.safe_float(ticker, 'low')
bid = self.safe_float(ticker, 'bid')
ask = self.safe_float(ticker, 'ask')
last = self.safe_float(ticker, 'last')
symbol = None
if market:
symbol = market['symbol']
return {
'symbol': symbol,
'timestamp': timestamp,
'datetime': iso8601,
'high': high,
'low': low,
'bid': bid,
'ask': ask,
'vwap': None,
'open': None,
'close': None,
'first': None,
'last': last,
'change': None,
'percentage': None,
'average': None,
'baseVolume': volume,
'quoteVolume': None,
'info': ticker,
}
def fetch_tickers(self, symbols=None, params={}):
self.load_markets()
currencies = list(self.currencies.keys())
response = self.publicGetTickersCurrencies(self.extend({
'currencies': '/'.join(currencies),
}, params))
tickers = response['data']
result = {}
for t in range(0, len(tickers)):
ticker = tickers[t]
symbol = ticker['pair'].replace(':', '/')
market = self.markets[symbol]
result[symbol] = self.parse_ticker(ticker, market)
return result
def fetch_ticker(self, symbol, params={}):
self.load_markets()
market = self.market(symbol)
ticker = self.publicGetTickerPair(self.extend({
'pair': market['id'],
}, params))
return self.parse_ticker(ticker, market)
def parse_trade(self, trade, market=None):
timestamp = int(trade['date']) * 1000
return {
'info': trade,
'id': trade['tid'],
'timestamp': timestamp,
'datetime': self.iso8601(timestamp),
'symbol': market['symbol'],
'type': None,
'side': trade['type'],
'price': float(trade['price']),
'amount': float(trade['amount']),
}
def fetch_trades(self, symbol, since=None, limit=None, params={}):
self.load_markets()
market = self.market(symbol)
response = self.publicGetTradeHistoryPair(self.extend({
'pair': market['id'],
}, params))
return self.parse_trades(response, market, since, limit)
def create_order(self, symbol, type, side, amount, price=None, params={}):
self.load_markets()
order = {
'pair': self.market_id(symbol),
'type': side,
'amount': amount,
}
if type == 'limit':
order['price'] = price
else:
# for market buy CEX.io requires the amount of quote currency to spend
if side == 'buy':
if not price:
raise InvalidOrder('For market buy orders ' + self.id + " requires the amount of quote currency to spend, to calculate proper costs call createOrder(symbol, 'market', 'buy', amount, price)")
order['amount'] = amount * price
order['order_type'] = type
response = self.privatePostPlaceOrderPair(self.extend(order, params))
return {
'info': response,
'id': response['id'],
}
def cancel_order(self, id, symbol=None, params={}):
self.load_markets()
return self.privatePostCancelOrder({'id': id})
def parse_order(self, order, market=None):
timestamp = int(order['time'])
symbol = None
if not market:
symbol = order['symbol1'] + '/' + order['symbol2']
if symbol in self.markets:
market = self.market(symbol)
status = order['status']
if status == 'a':
status = 'open' # the unified status
elif status == 'cd':
status = 'canceled'
elif status == 'c':
status = 'canceled'
elif status == 'd':
status = 'closed'
price = self.safe_float(order, 'price')
amount = self.safe_float(order, 'amount')
remaining = self.safe_float(order, 'pending')
if not remaining:
remaining = self.safe_float(order, 'remains')
filled = amount - remaining
fee = None
cost = None
if market:
symbol = market['symbol']
cost = self.safe_float(order, 'ta:' + market['quote'])
if cost is None:
cost = self.safe_float(order, 'tta:' + market['quote'])
baseFee = 'fa:' + market['base']
baseTakerFee = 'tfa:' + market['base']
quoteFee = 'fa:' + market['quote']
quoteTakerFee = 'tfa:' + market['quote']
feeRate = self.safe_float(order, 'tradingFeeMaker')
if not feeRate:
feeRate = self.safe_float(order, 'tradingFeeTaker', feeRate)
if feeRate:
feeRate /= 100.0 # convert to mathematically-correct percentage coefficients: 1.0 = 100%
if (baseFee in list(order.keys())) or (baseTakerFee in list(order.keys())):
baseFeeCost = self.safe_float(order, baseFee)
if baseFeeCost is None:
baseFeeCost = self.safe_float(order, baseTakerFee)
fee = {
'currency': market['base'],
'rate': feeRate,
'cost': baseFeeCost,
}
elif (quoteFee in list(order.keys())) or (quoteTakerFee in list(order.keys())):
quoteFeeCost = self.safe_float(order, quoteFee)
if quoteFeeCost is None:
quoteFeeCost = self.safe_float(order, quoteTakerFee)
fee = {
'currency': market['quote'],
'rate': feeRate,
'cost': quoteFeeCost,
}
if not cost:
cost = price * filled
return {
'id': order['id'],
'datetime': self.iso8601(timestamp),
'timestamp': timestamp,
'status': status,
'symbol': symbol,
'type': None,
'side': order['type'],
'price': price,
'cost': cost,
'amount': amount,
'filled': filled,
'remaining': remaining,
'trades': None,
'fee': fee,
'info': order,
}
def fetch_open_orders(self, symbol=None, since=None, limit=None, params={}):
self.load_markets()
request = {}
method = 'privatePostOpenOrders'
market = None
if symbol:
market = self.market(symbol)
request['pair'] = market['id']
method += 'Pair'
orders = getattr(self, method)(self.extend(request, params))
for i in range(0, len(orders)):
orders[i] = self.extend(orders[i], {'status': 'open'})
return self.parse_orders(orders, market, since, limit)
def fetch_order(self, id, symbol=None, params={}):
self.load_markets()
response = self.privatePostGetOrder(self.extend({
'id': str(id),
}, params))
return self.parse_order(response)
def nonce(self):
return self.milliseconds()
def sign(self, path, api='public', method='GET', params={}, headers=None, body=None):
url = self.urls['api'] + '/' + self.implode_params(path, params)
query = self.omit(params, self.extract_params(path))
if api == 'public':
if query:
url += '?' + self.urlencode(query)
else:
self.check_required_credentials()
nonce = str(self.nonce())
auth = nonce + self.uid + self.apiKey
signature = self.hmac(self.encode(auth), self.encode(self.secret))
body = self.urlencode(self.extend({
'key': self.apiKey,
'signature': signature.upper(),
'nonce': nonce,
}, query))
headers = {
'Content-Type': 'application/x-www-form-urlencoded',
}
return {'url': url, 'method': method, 'body': body, 'headers': headers}
def request(self, path, api='public', method='GET', params={}, headers=None, body=None):
response = self.fetch2(path, api, method, params, headers, body)
if not response:
raise ExchangeError(self.id + ' returned ' + self.json(response))
elif response is True:
return response
elif 'e' in response:
if 'ok' in response:
if response['ok'] == 'ok':
return response
raise ExchangeError(self.id + ' ' + self.json(response))
elif 'error' in response:
if response['error']:
raise ExchangeError(self.id + ' ' + self.json(response))
return response
| 37.340659
| 210
| 0.459211
|
794860e302dff64adc78e1b68f55e1be887d053a
| 6,361
|
py
|
Python
|
serotonin_inference/tasks/_iblrig_tasks_passive_opto_long/session_params.py
|
int-brain-lab/personal_project_protocols
|
c20f923c1596384ba585164d65c3a40d58d8cbb1
|
[
"MIT"
] | null | null | null |
serotonin_inference/tasks/_iblrig_tasks_passive_opto_long/session_params.py
|
int-brain-lab/personal_project_protocols
|
c20f923c1596384ba585164d65c3a40d58d8cbb1
|
[
"MIT"
] | null | null | null |
serotonin_inference/tasks/_iblrig_tasks_passive_opto_long/session_params.py
|
int-brain-lab/personal_project_protocols
|
c20f923c1596384ba585164d65c3a40d58d8cbb1
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# @Author: Niccolò Bonacchi
# @Date: 2018-02-02 17:19:09
import logging
import os
import tkinter as tk
from pathlib import Path
from sys import platform
from tkinter import messagebox
from pythonosc import udp_client
import iblrig.adaptive as adaptive
import iblrig.ambient_sensor as ambient_sensor
import iblrig.bonsai as bonsai
import iblrig.frame2TTL as frame2TTL
import iblrig.iotasks as iotasks
import iblrig.misc as misc
import iblrig.sound as sound
import iblrig.user_input as user_input
from iblrig.path_helper import SessionPathCreator
from iblrig.rotary_encoder import MyRotaryEncoder
log = logging.getLogger("iblrig")
class SessionParamHandler(object):
"""Session object imports user_settings and task_settings
will and calculates other secondary session parameters,
runs Bonsai and saves all params in a settings file.json"""
def __init__(self, task_settings, user_settings, debug=False, fmake=True):
self.DEBUG = debug
make = True
self.IBLRIG_FOLDER = "C:\\iblrig"
self.IBLRIG_DATA_FOLDER = None # ..\\iblrig_data if None
# =====================================================================
# IMPORT task_settings, user_settings, and SessionPathCreator params
# =====================================================================
ts = {
i: task_settings.__dict__[i]
for i in [x for x in dir(task_settings) if "__" not in x]
}
self.__dict__.update(ts)
us = {
i: user_settings.__dict__[i]
for i in [x for x in dir(user_settings) if "__" not in x]
}
self.__dict__.update(us)
self = iotasks.deserialize_pybpod_user_settings(self)
spc = SessionPathCreator(
self.PYBPOD_SUBJECTS[0], protocol=self.PYBPOD_PROTOCOL, make=make
)
self.__dict__.update(spc.__dict__)
# =====================================================================
# SETTINGS
# =====================================================================
self.RECORD_SOUND = False
self.RECORD_AMBIENT_SENSOR_DATA = True
self.RECORD_VIDEO = False
self.OPEN_CAMERA_VIEW = False # Always True if RECORD_VIDEO is True
# =====================================================================
# SUBJECT
# =====================================================================
# self.SUBJECT_WEIGHT = self.ask_subject_weight()
self.POOP_COUNT = False
# =====================================================================
# OSC CLIENT
# =====================================================================
self.OSC_CLIENT_PORT = 7110
self.OSC_CLIENT_IP = "127.0.0.1"
self.OSC_CLIENT = udp_client.SimpleUDPClient(
self.OSC_CLIENT_IP, self.OSC_CLIENT_PORT
)
# =====================================================================
# SAVE SETTINGS FILE AND TASK CODE
# =====================================================================
if not self.DEBUG:
iotasks.save_session_settings(self)
iotasks.copy_task_code(self)
iotasks.save_task_code(self)
self.bpod_lights(0)
# =========================================================================
# METHODS
# =========================================================================
def patch_settings_file(self, patch):
self.__dict__.update(patch)
misc.patch_settings_file(self.SETTINGS_FILE_PATH, patch)
def save_ambient_sensor_reading(self, bpod_instance):
return ambient_sensor.get_reading(
bpod_instance, save_to=self.SESSION_RAW_DATA_FOLDER
)
def bpod_lights(self, command: int):
fpath = Path(self.IBLRIG_FOLDER) / "scripts" / "bpod_lights.py"
os.system(f"python {fpath} {command}")
def get_port_events(self, events, name=""):
return misc.get_port_events(events, name=name)
# =========================================================================
# JSON ENCODER PATCHES
# =========================================================================
def reprJSON(self):
def remove_from_dict(sx):
if "weighings" in sx.keys():
sx["weighings"] = None
if "water_administration" in sx.keys():
sx["water_administration"] = None
return sx
d = self.__dict__.copy()
d["OSC_CLIENT"] = str(d["OSC_CLIENT"])
if isinstance(d["PYBPOD_SUBJECT_EXTRA"], list):
sub = []
for sx in d["PYBPOD_SUBJECT_EXTRA"]:
sub.append(remove_from_dict(sx))
d["PYBPOD_SUBJECT_EXTRA"] = sub
elif isinstance(d["PYBPOD_SUBJECT_EXTRA"], dict):
d["PYBPOD_SUBJECT_EXTRA"] = remove_from_dict(d["PYBPOD_SUBJECT_EXTRA"])
return d
if __name__ == "__main__":
"""
SessionParamHandler fmake flag=False disables:
making folders/files;
SessionParamHandler debug flag disables:
running auto calib;
calling bonsai
turning off lights of bpod board
"""
import task_settings as _task_settings
import iblrig.fake_user_settings as _user_settings
import datetime
dt = datetime.datetime.now()
dt = [
str(dt.year),
str(dt.month),
str(dt.day),
str(dt.hour),
str(dt.minute),
str(dt.second),
]
dt = [x if int(x) >= 10 else "0" + x for x in dt]
dt.insert(3, "-")
_user_settings.PYBPOD_SESSION = "".join(dt)
_user_settings.PYBPOD_SETUP = "ephysChoiceWorld"
_user_settings.PYBPOD_PROTOCOL = "_iblrig_tasks_passive_opto_long"
if platform == "linux":
_task_settings.AUTOMATIC_CALIBRATION = False
_task_settings.USE_VISUAL_STIMULUS = False
sph = SessionParamHandler(_task_settings, _user_settings, debug=False, fmake=True)
for k in sph.__dict__:
if sph.__dict__[k] is None:
print(f"{k}: {sph.__dict__[k]}")
self = sph
print("Done!")
| 38.08982
| 87
| 0.519101
|
794861a89e2a72904301f0eb55ad249cc5cfddfb
| 1,268
|
py
|
Python
|
migrations/versions/d9fde69982c2_first_migration.py
|
SophieO1970/Personal-Blog
|
24c5906a6420004206f889379fd89392f37884a6
|
[
"MIT"
] | null | null | null |
migrations/versions/d9fde69982c2_first_migration.py
|
SophieO1970/Personal-Blog
|
24c5906a6420004206f889379fd89392f37884a6
|
[
"MIT"
] | null | null | null |
migrations/versions/d9fde69982c2_first_migration.py
|
SophieO1970/Personal-Blog
|
24c5906a6420004206f889379fd89392f37884a6
|
[
"MIT"
] | null | null | null |
"""first migration
Revision ID: d9fde69982c2
Revises:
Create Date: 2020-10-30 22:24:40.554718
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = 'd9fde69982c2'
down_revision = None
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.create_table('users',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('username', sa.String(length=255), nullable=True),
sa.Column('email', sa.String(length=255), nullable=True),
sa.Column('bio', sa.String(length=255), nullable=True),
sa.Column('profile_pic_path', sa.String(), nullable=True),
sa.Column('pass_secure', sa.String(length=255), nullable=True),
sa.PrimaryKeyConstraint('id')
)
op.create_index(op.f('ix_users_email'), 'users', ['email'], unique=True)
op.create_index(op.f('ix_users_username'), 'users', ['username'], unique=False)
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.drop_index(op.f('ix_users_username'), table_name='users')
op.drop_index(op.f('ix_users_email'), table_name='users')
op.drop_table('users')
# ### end Alembic commands ###
| 30.926829
| 83
| 0.682177
|
7948637ad48acbaeddba8a2cc37343184dc4bda7
| 5,662
|
py
|
Python
|
B1B0_fuel.py
|
Mrluo123/rooster
|
871af75d1847f5a21776178fba09e186afa58837
|
[
"MIT"
] | 2
|
2022-02-21T10:10:55.000Z
|
2022-02-21T10:11:00.000Z
|
B1B0_fuel.py
|
Mrluo123/rooster
|
871af75d1847f5a21776178fba09e186afa58837
|
[
"MIT"
] | null | null | null |
B1B0_fuel.py
|
Mrluo123/rooster
|
871af75d1847f5a21776178fba09e186afa58837
|
[
"MIT"
] | null | null | null |
from B1B0A_fuelgrain import FuelGrain
import math
import sys
#--------------------------------------------------------------------------------------------------
class Fuel:
#----------------------------------------------------------------------------------------------
# constructor: self is a 'fuel' object created in B1B,
# indx is the axial index of this object in the fuel rod with index indxfuelrod
def __init__(self, indx, indxfuelrod, dz, reactor):
# INITIALIZATION
# dictionary of the fuel rod to which the fuel belongs
dictfuelrod = reactor.control.input['fuelrod'][indxfuelrod]
# current fuel id
fuelid = dictfuelrod['fuelid'][indx]
# radial power peaking factor of fuel rod
self.kr = dictfuelrod['kr'][indx]
# axial power peaking factor of fuel
self.kz = dictfuelrod['kz'][indx]
# list of fuel dictionaries specified in input
list = reactor.control.input['fuel']
# index of the current fuel in the list of fuel dictionaries
i = [x['id'] for x in list].index(fuelid)
# fuel inner radius
self.ri = list[i]['ri']
# fuel outer radius
self.ro = list[i]['ro']
# number of fuel radial nodes
self.nr = list[i]['nr']
# fuel material id
matid = list[i]['matid']
# find the fuel material id in the list of materials
try:
ifuel = [x['id'] for x in reactor.control.input['mat']].index(matid)
except:
print('****ERROR: fuel material id ' + matid + ' is not specified in the \'mat\' card of input.')
sys.exit()
# dictionary of material properties of the current fuel
mat = reactor.control.input['mat'][ifuel]
# material type of fuel
self.type = mat['type']
# list of Pu content in fuel radial nodes
self.pu = [mat['pu']]*self.nr
# list of fuel burnup in fuel radial nodes
self.b = [mat['b']]*self.nr
# list of deviation from stoechiometry in fuel radial nodes
self.x = [mat['x']]*self.nr
# list of porosity in fuel radial nodes
self.por = [mat['por']]*self.nr
# list of initial temperatures in fuel radial nodes
self.temp = [mat['temp0']]*self.nr
# mesh grid step
self.dr = (self.ro - self.ri)/(self.nr-1)
# list of node radii (size = nr)
self.r = [self.ri + i*self.dr for i in range(self.nr)]
# list of node boundary radii (size = nr-1)
self.rb = [self.r[i]+self.dr/2 for i in range(self.nr-1)]
# list of node volume (size = nr)
self.vol = [self.rb[0]**2 - self.r[0]**2] + [self.rb[i]**2 - self.rb[i-1]**2 for i in range(1, self.nr-1)] + [self.r[self.nr-1]**2 - self.rb[self.nr-2]**2]
if 'fuelgrain' in reactor.solve:
# create an object fuel grain for every radial node of fuel
self.fuelgrain = []
for i in range(self.nr):
self.fuelgrain.append(FuelGrain(i, indx, indxfuelrod, reactor))
#----------------------------------------------------------------------------------------------
# create right-hand side list: self is a 'fuel' object created in B1B
# indx is the axial index of this object in the fuel rod with index indxfuelrod
def calculate_rhs(self, indx, indxfuelrod, reactor, t):
# construct right-hand side list
rhs = []
if 'fuelgrain' in reactor.solve and indx == 0 and indxfuelrod == 0:
for i in range(self.nr):
if i == 0:
rhs += self.fuelgrain[indx].calculate_rhs(reactor, t)
# FUEL PROPERTIES:
self.prop = {'rho':[], 'cp':[], 'k':[]}
for j in range(self.nr):
# call material property function
pro = reactor.data.matpro( {'type':self.type, 't':self.temp[j], 'b':self.b[j], 'por':self.por[j], 'pu':self.pu[j], 'x':self.x[j]} )
# density (kg/m3)
self.prop['rho'].append(pro['rho'])
# specific heat (J/kg-K)
self.prop['cp'].append(pro['cp'])
# thermal conductivity (W/m-K)
self.prop['k'].append(pro['k'])
# TIME DERIVATIVE OF FUEL TEMPERATURE:
# inner gas object
innergas = reactor.solid.fuelrod[indxfuelrod].innergas
# gap conductance list
hgap = innergas.calculate_hgap(indxfuelrod, reactor, t)
# clad object
clad = reactor.solid.fuelrod[indxfuelrod].clad[indx]
# fuel thermal conductivity between nodes
kb = [0.5*(self.prop['k'][i] + self.prop['k'][i+1]) for i in range(self.nr-1)]
# heat flux (W/m**2) times heat transfer area per unit height at node boundaries: 2*rb * kb * dT/dr (size = nr-1)
Q = [0] + [2*self.rb[i]*kb[i]*(self.temp[i] - self.temp[i+1])/self.dr for i in range(self.nr-1)]
# add heat flux (W/m**2) times heat transfer area per unit height from fuel to clad
Q += [0.5*(self.ro + clad.ri) * hgap[indx] * (self.temp[self.nr-1] - clad.temp[0])]
# power density
if 'pointkinetics' in reactor.solve:
qv = reactor.core.qv_average * self.kr * self.kz
else:
qv = reactor.control.input['power0']/(math.pi * sum(self.vol)) * self.kr * self.kz
rhocpv = [self.prop['rho'][i]*self.prop['cp'][i]*self.vol[i] for i in range(self.nr)]
dTdt = [(Q[i] - Q[i+1] + qv*self.vol[i])/rhocpv[i] for i in range(self.nr)]
rhs += dTdt
return rhs
| 46.793388
| 171
| 0.532851
|
7948641b8c98eecefe70e7e53b102c4c48d4ea5e
| 5,526
|
py
|
Python
|
mmdet/core/evaluation/class_names.py
|
jodistiara/mmdetection
|
e0ad079832ea51210f557959c51f2f6e146c75bc
|
[
"Apache-2.0"
] | null | null | null |
mmdet/core/evaluation/class_names.py
|
jodistiara/mmdetection
|
e0ad079832ea51210f557959c51f2f6e146c75bc
|
[
"Apache-2.0"
] | null | null | null |
mmdet/core/evaluation/class_names.py
|
jodistiara/mmdetection
|
e0ad079832ea51210f557959c51f2f6e146c75bc
|
[
"Apache-2.0"
] | null | null | null |
# Copyright (c) OpenMMLab. All rights reserved.
import mmcv
def wider_face_classes():
return ['face']
def voc_classes():
return [
'aeroplane', 'bicycle', 'bird', 'boat', 'bottle', 'bus', 'car', 'cat',
'chair', 'cow', 'diningtable', 'dog', 'horse', 'motorbike', 'person',
'pottedplant', 'sheep', 'sofa', 'train', 'tvmonitor'
]
def imagenet_det_classes():
return [
'accordion', 'airplane', 'ant', 'antelope', 'apple', 'armadillo',
'artichoke', 'axe', 'baby_bed', 'backpack', 'bagel', 'balance_beam',
'banana', 'band_aid', 'banjo', 'baseball', 'basketball', 'bathing_cap',
'beaker', 'bear', 'bee', 'bell_pepper', 'bench', 'bicycle', 'binder',
'bird', 'bookshelf', 'bow_tie', 'bow', 'bowl', 'brassiere', 'burrito',
'bus', 'butterfly', 'camel', 'can_opener', 'car', 'cart', 'cattle',
'cello', 'centipede', 'chain_saw', 'chair', 'chime', 'cocktail_shaker',
'coffee_maker', 'computer_keyboard', 'computer_mouse', 'corkscrew',
'cream', 'croquet_ball', 'crutch', 'cucumber', 'cup_or_mug', 'diaper',
'digital_clock', 'dishwasher', 'dog', 'domestic_cat', 'dragonfly',
'drum', 'dumbbell', 'electric_fan', 'elephant', 'face_powder', 'fig',
'filing_cabinet', 'flower_pot', 'flute', 'fox', 'french_horn', 'frog',
'frying_pan', 'giant_panda', 'goldfish', 'golf_ball', 'golfcart',
'guacamole', 'guitar', 'hair_dryer', 'hair_spray', 'hamburger',
'hammer', 'hamster', 'harmonica', 'harp', 'hat_with_a_wide_brim',
'head_cabbage', 'helmet', 'hippopotamus', 'horizontal_bar', 'horse',
'hotdog', 'iPod', 'isopod', 'jellyfish', 'koala_bear', 'ladle',
'ladybug', 'lamp', 'laptop', 'lemon', 'lion', 'lipstick', 'lizard',
'lobster', 'maillot', 'maraca', 'microphone', 'microwave', 'milk_can',
'miniskirt', 'monkey', 'motorcycle', 'mushroom', 'nail', 'neck_brace',
'oboe', 'orange', 'otter', 'pencil_box', 'pencil_sharpener', 'perfume',
'person', 'piano', 'pineapple', 'ping-pong_ball', 'pitcher', 'pizza',
'plastic_bag', 'plate_rack', 'pomegranate', 'popsicle', 'porcupine',
'power_drill', 'pretzel', 'printer', 'puck', 'punching_bag', 'purse',
'rabbit', 'racket', 'ray', 'red_panda', 'refrigerator',
'remote_control', 'rubber_eraser', 'rugby_ball', 'ruler',
'salt_or_pepper_shaker', 'saxophone', 'scorpion', 'screwdriver',
'seal', 'sheep', 'ski', 'skunk', 'snail', 'snake', 'snowmobile',
'snowplow', 'soap_dispenser', 'soccer_ball', 'sofa', 'spatula',
'squirrel', 'starfish', 'stethoscope', 'stove', 'strainer',
'strawberry', 'stretcher', 'sunglasses', 'swimming_trunks', 'swine',
'syringe', 'table', 'tape_player', 'tennis_ball', 'tick', 'tie',
'tiger', 'toaster', 'traffic_light', 'train', 'trombone', 'trumpet',
'turtle', 'tv_or_monitor', 'unicycle', 'vacuum', 'violin',
'volleyball', 'waffle_iron', 'washer', 'water_bottle', 'watercraft',
'whale', 'wine_bottle', 'zebra'
]
def imagenet_vid_classes():
return [
'airplane', 'antelope', 'bear', 'bicycle', 'bird', 'bus', 'car',
'cattle', 'dog', 'domestic_cat', 'elephant', 'fox', 'giant_panda',
'hamster', 'horse', 'lion', 'lizard', 'monkey', 'motorcycle', 'rabbit',
'red_panda', 'sheep', 'snake', 'squirrel', 'tiger', 'train', 'turtle',
'watercraft', 'whale', 'zebra'
]
def coco_classes():
return ['module']
# return [
# 'person', 'bicycle', 'car', 'motorcycle', 'airplane', 'bus', 'train',
# 'truck', 'boat', 'traffic_light', 'fire_hydrant', 'stop_sign',
# 'parking_meter', 'bench', 'bird', 'cat', 'dog', 'horse', 'sheep',
# 'cow', 'elephant', 'bear', 'zebra', 'giraffe', 'backpack', 'umbrella',
# 'handbag', 'tie', 'suitcase', 'frisbee', 'skis', 'snowboard',
# 'sports_ball', 'kite', 'baseball_bat', 'baseball_glove', 'skateboard',
# 'surfboard', 'tennis_racket', 'bottle', 'wine_glass', 'cup', 'fork',
# 'knife', 'spoon', 'bowl', 'banana', 'apple', 'sandwich', 'orange',
# 'broccoli', 'carrot', 'hot_dog', 'pizza', 'donut', 'cake', 'chair',
# 'couch', 'potted_plant', 'bed', 'dining_table', 'toilet', 'tv',
# 'laptop', 'mouse', 'remote', 'keyboard', 'cell_phone', 'microwave',
# 'oven', 'toaster', 'sink', 'refrigerator', 'book', 'clock', 'vase',
# 'scissors', 'teddy_bear', 'hair_drier', 'toothbrush'
# ]
def cityscapes_classes():
return [
'person', 'rider', 'car', 'truck', 'bus', 'train', 'motorcycle',
'bicycle'
]
dataset_aliases = {
'voc': ['voc', 'pascal_voc', 'voc07', 'voc12'],
'imagenet_det': ['det', 'imagenet_det', 'ilsvrc_det'],
'imagenet_vid': ['vid', 'imagenet_vid', 'ilsvrc_vid'],
'coco': ['coco', 'mscoco', 'ms_coco'],
'wider_face': ['WIDERFaceDataset', 'wider_face', 'WIDERFace'],
'cityscapes': ['cityscapes']
}
def get_classes(dataset):
"""Get class names of a dataset."""
alias2name = {}
for name, aliases in dataset_aliases.items():
for alias in aliases:
alias2name[alias] = name
if mmcv.is_str(dataset):
if dataset in alias2name:
labels = eval(alias2name[dataset] + '_classes()')
else:
raise ValueError(f'Unrecognized dataset: {dataset}')
else:
raise TypeError(f'dataset must a str, but got {type(dataset)}')
return labels
| 46.436975
| 80
| 0.5789
|
79486429bb9d22cd84a6996dc271bfc9858a2762
| 4,401
|
py
|
Python
|
data/celeba_cropped.py
|
Gerryflap/master_thesis
|
5dc16e21b23837fee8a4532679bb5cb961af0b7c
|
[
"MIT"
] | null | null | null |
data/celeba_cropped.py
|
Gerryflap/master_thesis
|
5dc16e21b23837fee8a4532679bb5cb961af0b7c
|
[
"MIT"
] | null | null | null |
data/celeba_cropped.py
|
Gerryflap/master_thesis
|
5dc16e21b23837fee8a4532679bb5cb961af0b7c
|
[
"MIT"
] | null | null | null |
# This loader contains code from https://github.com/pytorch/vision/blob/master/torchvision/datasets/celeba.py
from functools import partial
import PIL
import pandas
import torch
from torchvision.datasets import VisionDataset, CelebA
import os
from torchvision.datasets.utils import verify_str_arg
import data.data_prep.face_cropper as face_cropper
from shutil import copyfile
from data.data_prep.celeba_sideways_detector import gen_aligned_faces
assert os.path.isdir("data")
class CelebaCropped(VisionDataset):
cropped_base_folder = "celeba_cropped/img_align/"
def __init__(self, split="train", transform=None, target_transform=None, download=False, morgan_like_filtering=False, validate_files=False, use_pair_split=True):
super().__init__("data", transforms=None, transform=transform, target_transform=target_transform)
# This option enables the train/valid/test splits used for the thesis
self.use_pair_split = use_pair_split
if not os.path.isdir("data/celeba"):
# try to download celeba
celeba = CelebA("data", split=split, transform=transform, target_transform=target_transform, download=download)
# Check if files exist
if not os.path.isdir("data/" + self.cropped_base_folder) or validate_files:
if not download:
raise IOError("Download is False, but the data does not exist")
self.crop()
if not os.path.isfile("data/celeba_cropped/list_eval_partition.txt"):
with open("data/celeba/list_eval_partition.txt", "r") as f:
lines = f.readlines()
splitted = [line.split(" ") for line in lines]
outlines = []
for fname, n in splitted:
if not os.path.isfile("data/" + self.cropped_base_folder + fname):
continue
outlines.append("%s %s" % (fname, n))
with open("data/celeba_cropped/list_eval_partition.txt", "w") as f:
f.writelines(outlines)
if not use_pair_split and morgan_like_filtering and not os.path.isfile("data/celeba_cropped/list_eval_partition_filtered.txt"):
# Get all aligned faces
aligned = gen_aligned_faces()
with open("data/celeba/list_eval_partition.txt", "r") as f:
lines = f.readlines()
splitted = [line.split(" ") for line in lines]
outlines = []
for fname, n in splitted:
if not os.path.isfile("data/" + self.cropped_base_folder + fname) or fname not in aligned:
continue
outlines.append("%s %s" % (fname, n))
with open("data/celeba_cropped/list_eval_partition_filtered.txt", "w") as f:
f.writelines(outlines)
split_map = {
"train": 0,
"valid": 1,
"test": 2,
"all": None,
}
split = split_map[verify_str_arg(split.lower(), "split",
("train", "valid", "test", "all"))]
fn = partial(os.path.join, self.root)
if use_pair_split:
partition_file = "celeba_cropped/list_eval_partition_morphing.txt"
else:
partition_file = "celeba_cropped/list_eval_partition.txt" if not morgan_like_filtering else \
"celeba_cropped/list_eval_partition_filtered.txt"
splits = pandas.read_csv(fn(partition_file), delim_whitespace=True, header=None,
index_col=0)
mask = slice(None) if split is None else (splits[1] == split)
self.filename = splits[mask].index.values
def __getitem__(self, index):
X = PIL.Image.open(os.path.join(self.root, self.cropped_base_folder, self.filename[index]))
if self.transform is not None:
X = self.transform(X)
return X, []
def __len__(self):
return len(self.filename)
def crop(self):
# Create the data directory
if not os.path.exists("data/" + self.cropped_base_folder):
os.mkdir("data/" + self.cropped_base_folder)
# Crop images
face_cropper.crop_images("data/celeba/img_align_celeba/", "data/" + self.cropped_base_folder + "/")
if __name__ == "__main__":
ds = CelebaCropped(download=True, validate_files=True)
| 37.29661
| 165
| 0.623949
|
79486493dee3ec7bc27e043d0a43144435c1d66a
| 30,783
|
py
|
Python
|
theano/compile/ops.py
|
jych/Theano
|
d7d722faa96aac95c19f460bf60e8e8654ff58df
|
[
"BSD-3-Clause"
] | 1
|
2021-07-01T02:51:08.000Z
|
2021-07-01T02:51:08.000Z
|
theano/compile/ops.py
|
mayunpeng/Theano
|
c74da33de3768e231ffa0d92d9d11667a2a5aedb
|
[
"BSD-3-Clause"
] | null | null | null |
theano/compile/ops.py
|
mayunpeng/Theano
|
c74da33de3768e231ffa0d92d9d11667a2a5aedb
|
[
"BSD-3-Clause"
] | null | null | null |
"""This file contains auxiliary Ops, used during the compilation phase
and Ops building class (:class:`FromFunctionOp`) and decorator
(:func:`as_op`) that help make new Ops more rapidly.
"""
import copy
import six.moves.cPickle as pickle
import warnings
import theano
from theano import gof
from six import iteritems
from six.moves import xrange
import numpy
def register_view_op_c_code(type, code, version=()):
""" Tell ViewOp how to generate C code for a Theano Type
:param type: A Theano type. It must be the Theano class itself and not an
instance of the class.
:param code: C code that returns a view for the Theano type 'type'.
Use %(iname)s and %(oname)s for the input and output C
variable names respectively.
:param version: A number indicating the version of the code, for cache.
"""
ViewOp.c_code_and_version[type] = (code, version)
class ViewOp(gof.Op):
"""
Returns an inplace view of the input. Used internally by Theano.
"""
view_map = {0: [0]}
# Mapping from Type to C code (and version) to use.
# In the C code, the name of the input variable is %(iname)s,
# the output variable is %(oname)s.
c_code_and_version = {}
def make_node(self, x):
return gof.Apply(self, [x], [x.type()])
def __eq__(self, other):
return type(self) == type(other)
def __hash__(self):
return hash(type(self))
def perform(self, node, inp, out):
x, = inp
z, = out
z[0] = x
def __str__(self):
return '%s' % self.__class__.__name__
def c_code(self, node, nodename, inp, out, sub):
iname, = inp
oname, = out
fail = sub['fail']
itype = node.inputs[0].type.__class__
if itype in self.c_code_and_version:
code, version = self.c_code_and_version[itype]
return code % locals()
# Else, no C code
return super(ViewOp, self).c_code(node, nodename, inp, out, sub)
def c_code_cache_version(self):
version = []
# If any of the c code is unversionned, we have to return ()
# Else, we will return a list of (type name, version) pairs.
for t, (c, v) in sorted(iteritems(self.c_code_and_version),
key=lambda pair: str(pair[0])):
if not v:
warnings.warn("Type %s has C code for ViewOp, but it has no "
"version. You should add a 'version' keyword "
"arg when calling register_view_op_c_code." % t,
stacklevel=2)
return ()
version.append((str(t), v))
return tuple(version)
def infer_shape(self, node, input_shapes):
return input_shapes
def grad(self, args, g_outs):
return g_outs
view_op = ViewOp()
class OutputGuard(ViewOp):
"""
This op is used only internally by Theano.
Only the AddDestroyHandler optimizer tries to insert them in the graph.
This Op is declared as destructive while it is not destroying
anything. It returns a view. This is used to prevent destruction of
the output variables of a Theano function.
There is a mechanism in Theano that should prevent this, but the use
of OutputGuard adds a safeguard: it may be possible for some optimization
run before the add_destroy_handler phase to bypass this mechanism, by
making in-place optimizations.
TODO: find a current full explanation.
"""
destroy_map = {0: [0]}
check_input = False
_output_guard = OutputGuard()
def register_deep_copy_op_c_code(typ, code, version=()):
""" Tell DeepCopyOp how to generate C code for a Theano Type
:param typ: A Theano type. It must be the Theano class itself and not an
instance of the class.
:param code: C code that deep copies the Theano type 'typ'.
Use %(iname)s and %(oname)s for the input and output C
variable names respectively.
:param version: A number indicating the version of the code, for cache.
"""
DeepCopyOp.c_code_and_version[typ] = (code, version)
class DeepCopyOp(gof.Op):
# Mapping from Type to C code (and version) to use.
# In the C code, the name of the input variable is %(iname)s,
# the output variable is %(oname)s.
c_code_and_version = {}
check_input = False
def __init__(self):
pass
def __str__(self):
return self.__class__.__name__
def __hash__(self):
return hash(type(self))
def __eq__(self, other):
return type(self) == type(other)
def make_node(self, x):
return gof.Apply(self, [x], [x.type()])
def perform(self, node, args, outs):
if hasattr(args[0], 'copy'):
# when args[0] is a an ndarray of 0 dimensions,
# this return a numpy.dtype and not an ndarray
# So when the args have a copy attribute we use it
# as this don't have this problem
outs[0][0] = args[0].copy()
else:
outs[0][0] = copy.deepcopy(args[0])
def c_code_cache_version(self):
version = []
# If any of the c code is unversionned, we have to return ()
# Else, we will return a list of (type name, version) pairs.
for t, (c, v) in sorted(iteritems(self.c_code_and_version),
key=lambda pair: str(pair[0])):
if not v:
warnings.warn("Type %s has C code for DeepCopyOp, but it has "
"no version. You should add a 'version' keyword"
" arg when calling "
"register_deep_copy_op_c_code." % t,
stacklevel=2)
return ()
version.append((str(t), v))
if version:
version.append(1)
return tuple(version)
def c_code(self, node, name, inames, onames, sub):
iname, = inames
oname, = onames
fail = sub['fail']
itype = node.inputs[0].type.__class__
if itype in self.c_code_and_version:
code, version = self.c_code_and_version[itype]
return code % locals()
# Else, no C code
return super(DeepCopyOp, self).c_code(node, name, inames, onames, sub)
deep_copy_op = DeepCopyOp()
def register_shape_c_code(type, code, version=()):
""" Tell Shape Op how to generate C code for a Theano Type
:param typ: A Theano type. It must be the Theano class itself and not an
instance of the class.
:param code: C code that return a vector representing the shape
for the Theano type 'typ'.
Use %(iname)s and %(oname)s for the input and output C
variable names respectively.
:param version: A number indicating the version of the code, for cache.
"""
Shape.c_code_and_version[type] = (code, version)
class Shape(gof.Op):
"""
L{Op} to return the shape of a matrix.
@note: Non-differentiable.
"""
_f16_ok = True
# Mapping from Type to C code (and version) to use.
# In the C code, the name of the input variable is %(iname)s,
# the output variable is %(oname)s.
c_code_and_version = {}
check_input = False
def __hash__(self):
return hash(type(self))
def __eq__(self, other):
return type(self) == type(other)
def __str__(self):
return self.__class__.__name__
def make_node(self, x):
# Must work for all type that have a shape attribute.
# This will fail at execution time.
if not isinstance(x, theano.Variable):
x = theano.tensor.as_tensor_variable(x)
return gof.Apply(self, [x], [theano.tensor.lvector()])
def perform(self, node, inp, out_):
x, = inp
out, = out_
out[0] = theano._asarray(x.shape, dtype='int64')
def infer_shape(self, node, in_shapes):
return [[len(in_shapes[0])]]
def connection_pattern(self, node):
# the grad returns the gradient with respect to the
# elements of a tensor variable
# the elements of the tensor variable do not participate
# in the computation of the shape, so they are not really
# part of the graph
return [[False]]
def grad(self, inp, grads):
# the grad returns the gradient with respect to the
# elements of a tensor variable
# the elements of the tensor variable do not participate
# in the computation of the shape, so they are not really
# part of the graph
return [theano.gradient.DisconnectedType()()]
def R_op(self, inputs, eval_points):
return [None]
def c_code(self, node, name, inames, onames, sub):
iname, = inames
oname, = onames
fail = sub['fail']
itype = node.inputs[0].type.__class__
if itype in self.c_code_and_version:
code, version = self.c_code_and_version[itype]
return code % locals()
# Else, no C code
return super(Shape, self).c_code(node, name, inames, onames, sub)
def c_code_cache_version(self):
version = []
# If any of the c code is unversionned, we have to return ()
# Else, we will return a list of (type name, version) pairs.
for t, (c, v) in sorted(iteritems(self.c_code_and_version),
key=lambda pair: str(pair[0])):
if not v:
warnings.warn("Type %s has C code for Shape, but it has no "
"version. You should add a 'version' keyword "
"arg when calling register_shape_c_code." % t,
stacklevel=2)
return ()
version.append((str(t), v))
if version:
version.append(1)
return tuple(version)
shape = Shape()
_shape = shape # was used in the past, now use shape directly.
class Shape_i(gof.Op):
"""
L{Op} to return the shape of a matrix.
@note: Non-differentiable.
"""
_f16_ok = True
# Mapping from Type to C code (and version) to use.
# In the C code, the name of the input variable is %(iname)s,
# the output variable is %(oname)s.
c_code_and_version = {}
check_input = False
__props__ = ("i",)
def __init__(self, i):
# As i will be used in the hash and that ndarray are not hashable,
# we need to convert it to an int as it is hashable.
if isinstance(i, numpy.ndarray):
assert "int" in str(i.dtype)
assert i == int(i)
i = int(i)
self.i = i
def __str__(self):
return '%s{%i}' % (self.__class__.__name__, self.i)
def make_node(self, x):
# x could be one of a number of types
# the only thing we require is that the variable have a .ndim,
# and that the value have a .shape
if not isinstance(x, theano.Variable):
raise TypeError('x must be Variable with ndim attribute', x)
if x.ndim <= self.i:
raise TypeError('x has too few dimensions for Shape_i',
(x, self.i))
return theano.Apply(self, [x], [theano.tensor.lscalar()])
def perform(self, node, inp, out_):
x, = inp
out, = out_
if out[0] is None:
out[0] = theano._asarray(x.shape[self.i], dtype='int64')
else:
out[0][...] = x.shape[self.i]
def c_code_cache_version(self):
version = []
# If any of the c code is unversionned, we have to return ()
# Else, we will return a list of (type name, version) pairs.
for t, (c, ci, v) in sorted(iteritems(self.c_code_and_version),
key=lambda pair: str(pair[0])):
if not v:
warnings.warn("Type %s has C code for Shape_i, but it has "
"no version. You should add a 'version' keyword "
"arg when calling register_shape_i_c_code." % t,
stacklevel=2)
return ()
version.append((str(t), v))
if version:
version.append(1)
return tuple(version)
def c_code(self, node, name, inames, onames, sub):
iname, = inames
oname, = onames
fail = sub['fail']
i = self.i
itype = node.inputs[0].type.__class__
if itype in self.c_code_and_version:
code, check_input, version = self.c_code_and_version[itype]
return (check_input + code) % locals()
# Else, no C code
return super(Shape_i, self).c_code(node, name, inames, onames, sub)
def infer_shape(self, node, input_shapes):
return [()]
def grad(self, inp, grads):
return [theano.gradient.grad_not_implemented(
op=self, x_pos=0, x=inp[0],
comment=("No gradient for the shape of a matrix "
"is implemented."))]
def shape_i(var, i, fgraph=None):
"""Equivalent of var.shape[i], but apply if possible the shape
feature optimization
This is useful in optimization that need to get the shape. This
remove the need of the following shape_feature optimization that
convert it. So this speed up optimization and remove Equilibrium
max iteration problems.
:param var: the variable we want to take the shape of
:param i: The shape dimensions we want
:param fgraph: optional. If var.fgraph do not exist, the fgraph that
have the shape_feature to introduce var in to get the optimized shape.
"""
if fgraph is None and hasattr(var, 'fgraph'):
fgraph = var.fgraph
if fgraph and hasattr(fgraph, 'shape_feature'):
shape_feature = fgraph.shape_feature
shape_of = shape_feature.shape_of
def recur(node):
if not hasattr(node.outputs[0], 'fgraph'):
for inp in node.inputs:
if inp.owner:
recur(inp.owner)
# If the output var isn't marked as being in the graph,
# we need to att it in the ShapeFeature.
shape_feature.on_import(fgraph, node,
'gof.ops.shape_i')
if var not in shape_of:
recur(var.owner)
return shape_of[var][i]
# If we are not able to use the shape feature, we should not put
# Shape_i in the graph. Otherwise, the shape feature optimization
# won't get applied.
return var.shape[i]
def register_shape_i_c_code(typ, code, check_input, version=()):
""" Tell Shape_i how to generate C code for a Theano Type
:param typ: A Theano type. It must be the Theano class itself and not
an instance of the class.
:param code: C code that gets the shape of dimensions %(i)s for the
Theano type 'typ'.
Use %(iname)s and %(oname)s for the input and output C
variable names respectively.
:param version: A number indicating the version of the code, for cache.
"""
Shape_i.c_code_and_version[typ] = (code, check_input, version)
# List of Theano Types that one can add an extra dimension and for which
# Scan can deal with.
expandable_types = ()
def load_back(mod, name):
__import__(mod)
import sys
module = sys.modules[mod]
obj = getattr(module, name)
return obj
class FromFunctionOp(gof.Op):
"""
Build a basic Theano Op around a function.
Since the resulting Op is very basic and is missing most of the
optional functionalities, some optimizations may not apply. If you
want to help, you can supply an infer_shape function that computes
the shapes of the output given the shapes of the inputs.
Also the gradient is undefined in the resulting op and Theano will
raise an error if you attempt to get the gradient of a graph
containing this op.
"""
def __init__(self, fn, itypes, otypes, infer_shape):
self.__fn = fn
self.itypes = itypes
self.otypes = otypes
self.__infer_shape = infer_shape
if self.__infer_shape is not None:
self.infer_shape = self._infer_shape
def __eq__(self, other):
return (type(self) == type(other) and
self.__fn == other.__fn)
def __hash__(self):
return hash(type(self)) ^ hash(self.__fn)
def __str__(self):
return 'FromFunctionOp{%s}' % self.__fn.__name__
def make_node(self, *inputs):
if len(inputs) != len(self.itypes):
raise ValueError("We expected %d inputs but got %d." %
(len(self.itypes), len(inputs)))
if not all(inp.type == it for inp, it in zip(inputs, self.itypes)):
raise TypeError(
"We expected inputs of types '%s' but got types '%s' " %
(str([inp.type for inp in inputs]), str(self.itypes)))
return theano.Apply(self, inputs, [o() for o in self.otypes])
def perform(self, node, inputs, outputs):
outs = self.__fn(*inputs)
if not isinstance(outs, (list, tuple)):
outs = (outs,)
assert len(outs) == len(outputs)
for i in range(len(outs)):
outputs[i][0] = outs[i]
def __reduce__(self):
mod = self.__fn.__module__
name = self.__fn.__name__
try:
obj = load_back(mod, name)
except (ImportError, KeyError, AttributeError):
raise pickle.PicklingError(
"Can't pickle as_op(), not found as %s.%s" %
(mod, name))
else:
if obj is not self:
raise pickle.PicklingError(
"Can't pickle as_op(), not the object "
"at %s.%s" % (mod, name))
return load_back, (mod, name)
def _infer_shape(self, node, input_shapes):
return self.__infer_shape(node, input_shapes)
def as_op(itypes, otypes, infer_shape=None):
"""
Decorator that converts a function into a basic Theano op that
will call the supplied function as its implementation.
It takes an optional infer_shape parameter that should be a
callable with this signature:
def infer_shape(node, input_shapes):
...
return output_shapes
Here `input_shapes` and `output_shapes` are lists of tuples that
represent the shape of the corresponding inputs/outputs.
This should not be used when performance is a concern since the
very basic nature of the resulting Op may interfere with certain
graph optimizations.
Example usage:
@as_op(itypes=[theano.tensor.fmatrix, theano.tensor.fmatrix],
otypes=[theano.tensor.fmatrix])
def numpy_dot(a, b):
return numpy.dot(a, b)
"""
if not isinstance(itypes, (list, tuple)):
itypes = [itypes]
if any(not isinstance(t, theano.Type) for t in itypes):
raise TypeError("itypes has to be a list of Theano types")
if not isinstance(otypes, (list, tuple)):
otypes = [otypes]
if any(not isinstance(t, theano.Type) for t in otypes):
raise TypeError("otypes has to be a list of Theano types")
# make sure they are lists and not tuples
itypes = list(itypes)
otypes = list(otypes)
if infer_shape is not None and not callable(infer_shape):
raise TypeError("infer_shape needs to be a callable")
def make_op(fn):
return FromFunctionOp(fn, itypes, otypes, infer_shape)
return make_op
def register_rebroadcast_c_code(typ, code, version=()):
"""Tell Rebroadcast how to generate C code for a Theano Type
:param typ: A Theano type. It must be the Theano class itself and not an
instance of the class.
:param code: C code that checks if the dimension %(axis)s is of
shape 1 for the Theano type 'typ'. Use %(iname)s and
%(oname)s for the input and output C variable names
respectively, and %(axis)s for the axis that we need to
check. This code is put in a loop for all axes.
:param version: A number indicating the version of the code, for cache.
"""
Rebroadcast.c_code_and_version[typ] = (code, version)
class Rebroadcast(gof.Op):
"""
Change the input's broadcastable fields in some predetermined way.
:code:`Rebroadcast((0, True), (1, False))(x)` would make :code:`x`
broadcastable in axis 0 and not broadcastable in axis 1
.. seealso::
:func:`unbroadcast <theano.tensor.unbroadcast>`
:func:`addbroadcast <theano.tensor.addbroadcast>`
:func:`patternbroadcast <theano.tensor.patternbroadcast>`
..note: works inplace and works for CudaNdarrayType
"""
view_map = {0: [0]}
_f16_ok = True
# Mapping from Type to C code (and version) to use.
# In the C code, the name of the input variable is %(iname)s,
# the output variable is %(oname)s.
c_code_and_version = {}
check_input = False
def __init__(self, *axis):
self.axis = dict(axis)
for axis, broad in iteritems(self.axis):
assert isinstance(axis, (numpy.integer, int)), (
"Rebroadcast needs integer axes. Got ", axis)
def __eq__(self, other):
return type(self) == type(other) and self.axis == other.axis
def __hash__(self):
# no ambiguity because each item key is unique
items = sorted(iteritems(self.axis))
return hash((type(self), tuple(items)))
def __str__(self):
if len(self.axis) == 0:
broadcast_pattern = []
else:
broadcast_pattern = ['?' for i
in xrange(1 + max(self.axis.keys()))]
for k, v in iteritems(self.axis):
broadcast_pattern[k] = str(int(v))
return '%s{%s}' % (self.__class__.__name__,
','.join(broadcast_pattern))
def make_node(self, x):
if self.axis.keys() and (x.ndim <= max(self.axis.keys())):
raise ValueError('Trying to rebroadcast non-existent dimension')
t = x.type.clone(
broadcastable=[self.axis.get(i, b)
for i, b in enumerate(x.type.broadcastable)])
return gof.Apply(self, [x], [t()])
def perform(self, node, inp, out_):
x, = inp
out, = out_
for axis, value in iteritems(self.axis):
if value and x.shape[axis] != 1:
raise ValueError('Dimension %s in Rebroadcast\'s input was'
' supposed to be 1 (got %s instead)' %
(axis, x.shape[axis]))
out[0] = x
def grad(self, inp, grads):
x, = inp
gz, = grads
# restore the broadcasting pattern of the input
return Rebroadcast(*[(axis, x.type.broadcastable[axis])
for axis, value in iteritems(self.axis)])(gz),
def infer_shape(self, node, ishapes):
assert len(ishapes) == 1
l = []
one = theano.tensor.basic.constant(1)
for ax in xrange(len(ishapes[0])):
if self.axis.get(ax, False):
l.append(one)
else:
l.append(ishapes[0][ax])
return [tuple(l)]
def R_op(self, inputs, eval_points):
if eval_points[0] is None:
return [None]
return self(*eval_points, **dict(return_list=True))
def c_code(self, node, nodename, inp, out, sub):
iname, = inp
oname, = out
fail = sub['fail']
itype = node.inputs[0].type.__class__
if itype in self.c_code_and_version:
code, version = self.c_code_and_version[itype]
final_code = ""
for axis, value in iteritems(self.axis):
if value:
final_code += code % locals()
return final_code + """
Py_XDECREF(%(oname)s);
%(oname)s = %(iname)s;
Py_XINCREF(%(oname)s);
""" % locals()
return super(Rebroadcast, self).c_code(node, nodename, inp, out, sub)
def c_code_cache_version(self):
version = []
# If any of the c code is unversionned, we have to return ()
# Else, we will return a list of (type name, version) pairs.
for t, (c, v) in sorted(iteritems(self.c_code_and_version),
key=lambda pair: str(pair[0])):
if not v:
warnings.warn("Type %s has C code for Rebroadcast, but it "
"has no version. You should add a 'version' "
"keyword arg when calling "
"register_rebroadcast_c_code." % t,
stacklevel=2)
return ()
version.append((str(t), v))
if version:
version.append(1)
return tuple(version)
def register_specify_shape_c_code(typ, code, version=(),
c_support_code_apply=None):
""" Tell SpecifyShape how to generate C code for a Theano Type
:param typ: A Theano type. It must be the Theano class itself and
not an instance of the class.
:param code: C code that checks the shape and returns a view for
the Theano type 'typ'. Use %(iname)s and %(oname)s
for the input and output C variable names
respectively. %(shape)s is the vector of shape of
%(iname)s. Check that its length is good.
:param version: A number indicating the version of the code, for cache.
:param c_support_code_apply: extra code.
"""
SpecifyShape.c_code_and_version[typ] = (code, version,
c_support_code_apply)
class SpecifyShape(gof.Op):
"""
L{Op} that puts into the graph the user-provided shape.
In the case where this op stays in the final graph, we assert the shape.
For this the output of this op must be used in the graph. This is not
the case most of the time if we only take the shape of the output.
Maybe there are other optimizations that will mess with this.
@note: Maybe in the future we will never do the assert!
@note: We currently don't support specifying partial shape information.
@todo: test this op with sparse and cuda ndarray.
Do C code for them too.
"""
view_map = {0: [0]}
# Mapping from Type to C code (and version) to use.
# In the C code, the name of the input variable is %(iname)s,
# the output variable is %(oname)s.
c_code_and_version = {}
def __hash__(self):
return hash(type(self))
def __eq__(self, other):
return type(self) == type(other)
def __str__(self):
return self.__class__.__name__
def make_node(self, x, shape):
if not isinstance(x, gof.Variable):
x = theano.tensor.as_tensor_variable(x)
shape = theano.tensor.as_tensor_variable(shape)
assert shape.ndim == 1
assert "int" in shape.dtype
if isinstance(shape, theano.tensor.TensorConstant):
assert shape.data.size == x.ndim
return gof.Apply(self, [x, shape], [x.type()])
def perform(self, node, inp, out_):
x, shape = inp
out, = out_
assert x.ndim == shape.size
assert numpy.all(x.shape == shape), ("got shape", x.shape,
"expected", shape)
out[0] = x
def infer_shape(self, node, shapes):
xshape, sshape = shapes
new_shape = []
for dim in xrange(node.inputs[0].ndim):
try:
s = theano.tensor.get_scalar_constant_value(
node.inputs[1][dim])
s = theano.tensor.as_tensor_variable(s)
new_shape.append(s)
except theano.tensor.NotScalarConstantError:
new_shape.append(node.inputs[1][dim])
assert len(new_shape) == len(xshape)
return [new_shape]
def connection_pattern(self, node):
return [[True], [False]]
def grad(self, inp, grads):
x, s = inp
gz, = grads
# Should I set an SpecifyShape on gz? I think so
# But I don't do it now as we need to make an optimization
# to remove that op from the graph to don't block other optimization
# Should I do an optimizer that will remove the SpecifyShape?
# I think Yes
return [gz, theano.gradient.DisconnectedType()()]
return [specify_shape(gz, s), theano.gradient.DisconnectedType()()]
def R_op(self, inputs, eval_points):
if eval_points[0] is None:
# It means that the this op sits on top of a non-differentiable
# path
return [None]
return self.make_node(eval_points[0], *inputs[1:]).outputs
def c_support_code_apply(self, node, name):
itype = node.inputs[0].type.__class__
if itype in self.c_code_and_version:
_, _, support_code = self.c_code_and_version[itype]
if support_code:
return support_code
return super(SpecifyShape, self).c_support_code_apply(node, name)
def c_code(self, node, name, inames, onames, sub):
iname, shape = inames
oname, = onames
fail = sub['fail']
itype = node.inputs[0].type.__class__
if itype in self.c_code_and_version:
code, version, _ = self.c_code_and_version[itype]
return code % locals()
return super(SpecifyShape, self).c_code(node, node, inames,
onames, sub)
def c_code_cache_version(self):
version = []
# If any of the c code is unversionned, we have to return ()
# Else, we will return a list of (type name, version) pairs.
for t, (c, v, _) in sorted(iteritems(self.c_code_and_version),
key=lambda pair: str(pair[0])):
if not v:
warnings.warn("Type %s has C code for SpecifyShape, but it "
"has no version. You should add a 'version' "
"keyword arg when calling "
"register_specify_shape_c_code." % t,
stacklevel=2)
return ()
version.append((str(t), v))
return tuple(version)
specify_shape = SpecifyShape()
| 35.180571
| 79
| 0.586947
|
7948657f750ba2e2397af79069b01666fb0b00e7
| 5,392
|
py
|
Python
|
third_party/virtualbox/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/GenFds/OptRomInfStatement.py
|
Fimbure/icebox-1
|
0b81992a53e1b410955ca89bdb6f8169d6f2da86
|
[
"MIT"
] | 521
|
2019-03-29T15:44:08.000Z
|
2022-03-22T09:46:19.000Z
|
third_party/virtualbox/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/GenFds/OptRomInfStatement.py
|
Fimbure/icebox-1
|
0b81992a53e1b410955ca89bdb6f8169d6f2da86
|
[
"MIT"
] | 30
|
2019-06-04T17:00:49.000Z
|
2021-09-08T20:44:19.000Z
|
third_party/virtualbox/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/GenFds/OptRomInfStatement.py
|
Fimbure/icebox-1
|
0b81992a53e1b410955ca89bdb6f8169d6f2da86
|
[
"MIT"
] | 99
|
2019-03-29T16:04:13.000Z
|
2022-03-28T16:59:34.000Z
|
## @file
# process OptionROM generation from INF statement
#
# Copyright (c) 2007, Intel Corporation. All rights reserved.<BR>
#
# This program and the accompanying materials
# are licensed and made available under the terms and conditions of the BSD License
# which accompanies this distribution. The full text of the license may be found at
# http://opensource.org/licenses/bsd-license.php
#
# THE PROGRAM IS DISTRIBUTED UNDER THE BSD LICENSE ON AN "AS IS" BASIS,
# WITHOUT WARRANTIES OR REPRESENTATIONS OF ANY KIND, EITHER EXPRESS OR IMPLIED.
#
##
# Import Modules
#
import RuleSimpleFile
import RuleComplexFile
import Section
import OptionRom
import Common.GlobalData as GlobalData
from Common.DataType import *
from Common.String import *
from FfsInfStatement import FfsInfStatement
from GenFdsGlobalVariable import GenFdsGlobalVariable
##
#
#
class OptRomInfStatement (FfsInfStatement):
## The constructor
#
# @param self The object pointer
#
def __init__(self):
FfsInfStatement.__init__(self)
self.OverrideAttribs = None
## __GetOptRomParams() method
#
# Parse inf file to get option ROM related parameters
#
# @param self The object pointer
#
def __GetOptRomParams(self):
if self.OverrideAttribs == None:
self.OverrideAttribs = OptionRom.OverrideAttribs()
if self.OverrideAttribs.NeedCompress == None:
self.OverrideAttribs.NeedCompress = self.OptRomDefs.get ('PCI_COMPRESS')
if self.OverrideAttribs.NeedCompress is not None:
if self.OverrideAttribs.NeedCompress.upper() not in ('TRUE', 'FALSE'):
GenFdsGlobalVariable.ErrorLogger( "Expected TRUE/FALSE for PCI_COMPRESS: %s" %self.InfFileName)
self.OverrideAttribs.NeedCompress = \
self.OverrideAttribs.NeedCompress.upper() == 'TRUE'
if self.OverrideAttribs.PciVendorId == None:
self.OverrideAttribs.PciVendorId = self.OptRomDefs.get ('PCI_VENDOR_ID')
if self.OverrideAttribs.PciClassCode == None:
self.OverrideAttribs.PciClassCode = self.OptRomDefs.get ('PCI_CLASS_CODE')
if self.OverrideAttribs.PciDeviceId == None:
self.OverrideAttribs.PciDeviceId = self.OptRomDefs.get ('PCI_DEVICE_ID')
if self.OverrideAttribs.PciRevision == None:
self.OverrideAttribs.PciRevision = self.OptRomDefs.get ('PCI_REVISION')
# InfObj = GenFdsGlobalVariable.WorkSpace.BuildObject[self.PathClassObj, self.CurrentArch]
# RecordList = InfObj._RawData[MODEL_META_DATA_HEADER, InfObj._Arch, InfObj._Platform]
# for Record in RecordList:
# Record = ReplaceMacros(Record, GlobalData.gEdkGlobal, False)
# Name = Record[0]
## GenFfs() method
#
# Generate FFS
#
# @param self The object pointer
# @retval string Generated .efi file name
#
def GenFfs(self):
#
# Parse Inf file get Module related information
#
self.__InfParse__()
self.__GetOptRomParams()
#
# Get the rule of how to generate Ffs file
#
Rule = self.__GetRule__()
GenFdsGlobalVariable.VerboseLogger( "Packing binaries from inf file : %s" %self.InfFileName)
#FileType = Ffs.Ffs.ModuleTypeToFileType[Rule.ModuleType]
#
# For the rule only has simpleFile
#
if isinstance (Rule, RuleSimpleFile.RuleSimpleFile) :
EfiOutputList = self.__GenSimpleFileSection__(Rule)
return EfiOutputList
#
# For Rule has ComplexFile
#
elif isinstance(Rule, RuleComplexFile.RuleComplexFile):
EfiOutputList = self.__GenComplexFileSection__(Rule)
return EfiOutputList
## __GenSimpleFileSection__() method
#
# Get .efi files according to simple rule.
#
# @param self The object pointer
# @param Rule The rule object used to generate section
# @retval string File name of the generated section file
#
def __GenSimpleFileSection__(self, Rule):
#
# Prepare the parameter of GenSection
#
OutputFileList = []
if Rule.FileName != None:
GenSecInputFile = self.__ExtendMacro__(Rule.FileName)
OutputFileList.append(GenSecInputFile)
else:
OutputFileList, IsSect = Section.Section.GetFileList(self, '', Rule.FileExtension)
return OutputFileList
## __GenComplexFileSection__() method
#
# Get .efi by sections in complex Rule
#
# @param self The object pointer
# @param Rule The rule object used to generate section
# @retval string File name of the generated section file
#
def __GenComplexFileSection__(self, Rule):
OutputFileList = []
for Sect in Rule.SectionList:
if Sect.SectionType == 'PE32':
if Sect.FileName != None:
GenSecInputFile = self.__ExtendMacro__(Sect.FileName)
OutputFileList.append(GenSecInputFile)
else:
FileList, IsSect = Section.Section.GetFileList(self, '', Sect.FileExtension)
OutputFileList.extend(FileList)
return OutputFileList
| 34.787097
| 115
| 0.648182
|
7948659d614246078f34235e52dcb4383d957696
| 1,385
|
py
|
Python
|
ooobuild/dyn/xml/crypto/x_cipher_context.py
|
Amourspirit/ooo_uno_tmpl
|
64e0c86fd68f24794acc22d63d8d32ae05dd12b8
|
[
"Apache-2.0"
] | null | null | null |
ooobuild/dyn/xml/crypto/x_cipher_context.py
|
Amourspirit/ooo_uno_tmpl
|
64e0c86fd68f24794acc22d63d8d32ae05dd12b8
|
[
"Apache-2.0"
] | null | null | null |
ooobuild/dyn/xml/crypto/x_cipher_context.py
|
Amourspirit/ooo_uno_tmpl
|
64e0c86fd68f24794acc22d63d8d32ae05dd12b8
|
[
"Apache-2.0"
] | null | null | null |
# coding: utf-8
#
# Copyright 2022 :Barry-Thomas-Paul: Moss
#
# Licensed under the Apache License, Version 2.0 (the "License")
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http: // www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# Interface Class
# this is a auto generated file generated by Cheetah
# Libre Office Version: 7.3
# Namespace: com.sun.star.xml.crypto
from typing import TYPE_CHECKING
from ooo.oenv.env_const import UNO_ENVIRONMENT, UNO_RUNTIME
_DYNAMIC = False
if (not TYPE_CHECKING) and UNO_RUNTIME and UNO_ENVIRONMENT:
_DYNAMIC = True
if not TYPE_CHECKING and _DYNAMIC:
from com.sun.star.xml.crypto import XCipherContext as XCipherContext
setattr(XCipherContext, '__ooo_ns__', 'com.sun.star.xml.crypto')
setattr(XCipherContext, '__ooo_full_ns__', 'com.sun.star.xml.crypto.XCipherContext')
setattr(XCipherContext, '__ooo_type_name__', 'interface')
else:
from ....lo.xml.crypto.x_cipher_context import XCipherContext as XCipherContext
__all__ = ['XCipherContext']
| 37.432432
| 88
| 0.768231
|
7948669e41fe1e35880eb66642e79f1f66bb1e75
| 10,449
|
py
|
Python
|
doc/conf.py
|
dburkhardt/slalom
|
547a56316e5c3ccc63e592eb907dc53b00212466
|
[
"Apache-2.0"
] | 24
|
2017-10-30T13:58:51.000Z
|
2021-08-14T17:07:46.000Z
|
doc/conf.py
|
dburkhardt/slalom
|
547a56316e5c3ccc63e592eb907dc53b00212466
|
[
"Apache-2.0"
] | 6
|
2017-11-11T04:49:01.000Z
|
2019-12-24T13:24:50.000Z
|
doc/conf.py
|
dburkhardt/slalom
|
547a56316e5c3ccc63e592eb907dc53b00212466
|
[
"Apache-2.0"
] | 5
|
2018-01-09T12:19:31.000Z
|
2019-11-26T17:01:57.000Z
|
# -*- coding: utf-8 -*-
#
# slalom documentation build configuration file, created by
# sphinx-quickstart on Tue Nov 1 14:32:47 2016.
#
# This file is execfile()d with the current directory set to its
# containing dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
import os
import sys
sys.path.insert(0, os.path.abspath('py/'))
from slalom import __version__
# -- General configuration ------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
#
# needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
# ones.
extensions = [
'sphinx.ext.autodoc',
'sphinx.ext.doctest',
'sphinx.ext.intersphinx',
'sphinx.ext.coverage',
'sphinx.ext.mathjax',
'sphinx.ext.viewcode',
'sphinx.ext.githubpages',
'sphinx.ext.napoleon'
]
napoleon_google_docstring = True
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix(es) of source filenames.
# You can specify multiple suffix as a list of string:
#
# source_suffix = ['.rst', '.md']
source_suffix = '.rst'
# The encoding of source files.
#
# source_encoding = 'utf-8-sig'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = u'slalom'
copyright = u'2016-2017, Florian Buettner and Oliver Stegle'
author = u'Florian Buettner'
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
__version__ = '1.0.0.dev10'
version = __version__
# The full version, including alpha/beta/rc tags.
release = __version__
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#
# This is also used if you do content translation via gettext catalogs.
# Usually you set "language" from the command line for these cases.
language = None
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
#
# today = ''
#
# Else, today_fmt is used as the format for a strftime call.
#
# today_fmt = '%B %d, %Y'
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
# This patterns also effect to html_static_path and html_extra_path
exclude_patterns = ['_build', 'Thumbs.db', '.DS_Store']
# The reST default role (used for this markup: `text`) to use for all
# documents.
#
# default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
#
# add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
#
# add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
#
# show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# A list of ignored prefixes for module index sorting.
# modindex_common_prefix = []
# If true, keep warnings as "system message" paragraphs in the built documents.
# keep_warnings = False
# If true, `todo` and `todoList` produce output, else they produce nothing.
todo_include_todos = False
# -- Options for HTML output ----------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
#
html_theme = 'nature'
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
#
# html_theme_options = {}
# Add any paths that contain custom themes here, relative to this directory.
# html_theme_path = []
# The name for this set of Sphinx documents.
# "<project> v<release> documentation" by default.
#
# html_title = u'slalom v1.0.0'
# A shorter title for the navigation bar. Default is the same as html_title.
#
# html_short_title = None
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
#
# html_logo = None
# The name of an image file (relative to this directory) to use as a favicon of
# the docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
#
# html_favicon = None
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
# Add any extra paths that contain custom files (such as robots.txt or
# .htaccess) here, relative to this directory. These files are copied
# directly to the root of the documentation.
#
# html_extra_path = []
# If not None, a 'Last updated on:' timestamp is inserted at every page
# bottom, using the given strftime format.
# The empty string is equivalent to '%b %d, %Y'.
#
# html_last_updated_fmt = None
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
#
# html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
#
# html_sidebars = {}
# Additional templates that should be rendered to pages, maps page names to
# template names.
#
# html_additional_pages = {}
# If false, no module index is generated.
#
# html_domain_indices = True
# If false, no index is generated.
#
# html_use_index = True
# If true, the index is split into individual pages for each letter.
#
# html_split_index = False
# If true, links to the reST sources are added to the pages.
#
# html_show_sourcelink = True
# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
#
# html_show_sphinx = True
# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
#
# html_show_copyright = True
# If true, an OpenSearch description file will be output, and all pages will
# contain a <link> tag referring to it. The value of this option must be the
# base URL from which the finished HTML is served.
#
# html_use_opensearch = ''
# This is the file name suffix for HTML files (e.g. ".xhtml").
# html_file_suffix = None
# Language to be used for generating the HTML full-text search index.
# Sphinx supports the following languages:
# 'da', 'de', 'en', 'es', 'fi', 'fr', 'hu', 'it', 'ja'
# 'nl', 'no', 'pt', 'ro', 'ru', 'sv', 'tr', 'zh'
#
# html_search_language = 'en'
# A dictionary with options for the search language support, empty by default.
# 'ja' uses this config value.
# 'zh' user can custom change `jieba` dictionary path.
#
# html_search_options = {'type': 'default'}
# The name of a javascript file (relative to the configuration directory) that
# implements a search results scorer. If empty, the default will be used.
#
# html_search_scorer = 'scorer.js'
# Output file base name for HTML help builder.
htmlhelp_basename = 'slalomdoc'
# -- Options for LaTeX output ---------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
#
# 'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
#
# 'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
#
# 'preamble': '',
# Latex figure (float) alignment
#
# 'figure_align': 'htbp',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title,
# author, documentclass [howto, manual, or own class]).
latex_documents = [
(master_doc, 'slalom.tex', u'slalom Documentation',
u'Florian Buettner', 'manual'),
]
# The name of an image file (relative to this directory) to place at the top of
# the title page.
#
# latex_logo = None
# For "manual" documents, if this is true, then toplevel headings are parts,
# not chapters.
#
# latex_use_parts = False
# If true, show page references after internal links.
#
# latex_show_pagerefs = False
# If true, show URL addresses after external links.
#
# latex_show_urls = False
# Documents to append as an appendix to all manuals.
#
# latex_appendices = []
# It false, will not define \strong, \code, itleref, \crossref ... but only
# \sphinxstrong, ..., \sphinxtitleref, ... To help avoid clash with user added
# packages.
#
# latex_keep_old_macro_names = True
# If false, no module index is generated.
#
# latex_domain_indices = True
# -- Options for manual page output ---------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
(master_doc, 'slalom', u'slalom Documentation',
[author], 1)
]
# If true, show URL addresses after external links.
#
# man_show_urls = False
# -- Options for Texinfo output -------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
(master_doc, 'slalom', u'slalom Documentation',
author, 'slalom', 'One line description of project.',
'Miscellaneous'),
]
# Documents to append as an appendix to all manuals.
#
# texinfo_appendices = []
# If false, no module index is generated.
#
# texinfo_domain_indices = True
# How to display URL addresses: 'footnote', 'no', or 'inline'.
#
# texinfo_show_urls = 'footnote'
# If true, do not generate a @detailmenu in the "Top" node's menu.
#
# texinfo_no_detailmenu = False
# Example configuration for intersphinx: refer to the Python standard library.
intersphinx_mapping = {
'python': ('http://docs.python.org/', None),
'numpy': ('http://docs.scipy.org/doc/numpy/', None),
'scipy': ('http://docs.scipy.org/doc/scipy/reference/', None),
'matplotlib': ('http://matplotlib.sourceforge.net/', None)
}
| 28.944598
| 80
| 0.702172
|
794866da9ceb005f8956225f710b22b28a1cd5e3
| 7,990
|
py
|
Python
|
qa/rpc-tests/txn_clone.py
|
boItcurrency/BOLT
|
09b7263fbf4b3f30f106c0f1ee36ba89c8eb8d94
|
[
"MIT"
] | 4
|
2018-01-21T14:27:51.000Z
|
2020-02-13T16:52:21.000Z
|
qa/rpc-tests/txn_clone.py
|
boItcurrency/BOLT
|
09b7263fbf4b3f30f106c0f1ee36ba89c8eb8d94
|
[
"MIT"
] | 1
|
2018-02-01T11:53:40.000Z
|
2018-02-02T01:29:57.000Z
|
qa/rpc-tests/txn_clone.py
|
boItcurrency/BOLT
|
09b7263fbf4b3f30f106c0f1ee36ba89c8eb8d94
|
[
"MIT"
] | 4
|
2018-01-26T02:23:40.000Z
|
2020-03-05T13:32:10.000Z
|
#!/usr/bin/env python2
# Copyright (c) 2014-2015 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
#
# Test proper accounting with an equivalent malleability clone
#
from test_framework.test_framework import BitcoinTestFramework
from test_framework.util import *
class TxnMallTest(BitcoinTestFramework):
def add_options(self, parser):
parser.add_option("--mineblock", dest="mine_block", default=False, action="store_true",
help="Test double-spend of 1-confirmed transaction")
def setup_network(self):
# Start with split network:
return super(TxnMallTest, self).setup_network(True)
def run_test(self):
# All nodes should start with 12,500 BOLT:
starting_balance = 12500
for i in range(4):
assert_equal(self.nodes[i].getbalance(), starting_balance)
self.nodes[i].getnewaddress("") # bug workaround, coins generated assigned to first getnewaddress!
# Assign coins to foo and bar accounts:
self.nodes[0].settxfee(.001)
node0_address_foo = self.nodes[0].getnewaddress("foo")
fund_foo_txid = self.nodes[0].sendfrom("", node0_address_foo, 12190)
fund_foo_tx = self.nodes[0].gettransaction(fund_foo_txid)
node0_address_bar = self.nodes[0].getnewaddress("bar")
fund_bar_txid = self.nodes[0].sendfrom("", node0_address_bar, 290)
fund_bar_tx = self.nodes[0].gettransaction(fund_bar_txid)
assert_equal(self.nodes[0].getbalance(""),
starting_balance - 12190 - 290 + fund_foo_tx["fee"] + fund_bar_tx["fee"])
# Coins are sent to node1_address
node1_address = self.nodes[1].getnewaddress("from0")
# Send tx1, and another transaction tx2 that won't be cloned
txid1 = self.nodes[0].sendfrom("foo", node1_address, 400, 0)
txid2 = self.nodes[0].sendfrom("bar", node1_address, 200, 0)
# Construct a clone of tx1, to be malleated
rawtx1 = self.nodes[0].getrawtransaction(txid1,1)
clone_inputs = [{"txid":rawtx1["vin"][0]["txid"],"vout":rawtx1["vin"][0]["vout"]}]
clone_outputs = {rawtx1["vout"][0]["scriptPubKey"]["addresses"][0]:rawtx1["vout"][0]["value"],
rawtx1["vout"][1]["scriptPubKey"]["addresses"][0]:rawtx1["vout"][1]["value"]}
clone_raw = self.nodes[0].createrawtransaction(clone_inputs, clone_outputs)
# 3 hex manipulations on the clone are required
# manipulation 1. sequence is at version+#inputs+input+sigstub
posseq = 2*(4+1+36+1)
seqbe = '%08x' % rawtx1["vin"][0]["sequence"]
clone_raw = clone_raw[:posseq] + seqbe[6:8] + seqbe[4:6] + seqbe[2:4] + seqbe[0:2] + clone_raw[posseq + 8:]
# manipulation 2. createrawtransaction randomizes the order of its outputs, so swap them if necessary.
# output 0 is at version+#inputs+input+sigstub+sequence+#outputs
# 400 BOLT serialized is 00902f5009000000
pos0 = 2*(4+1+36+1+4+1)
hex400 = "00902f5009000000"
output_len = 16 + 2 + 2 * int("0x" + clone_raw[pos0 + 16 : pos0 + 16 + 2], 0)
if (rawtx1["vout"][0]["value"] == 400 and clone_raw[pos0 : pos0 + 16] != hex400 or
rawtx1["vout"][0]["value"] != 400 and clone_raw[pos0 : pos0 + 16] == hex400):
output0 = clone_raw[pos0 : pos0 + output_len]
output1 = clone_raw[pos0 + output_len : pos0 + 2 * output_len]
clone_raw = clone_raw[:pos0] + output1 + output0 + clone_raw[pos0 + 2 * output_len:]
# manipulation 3. locktime is after outputs
poslt = pos0 + 2 * output_len
ltbe = '%08x' % rawtx1["locktime"]
clone_raw = clone_raw[:poslt] + ltbe[6:8] + ltbe[4:6] + ltbe[2:4] + ltbe[0:2] + clone_raw[poslt + 8:]
# Use a different signature hash type to sign. This creates an equivalent but malleated clone.
# Don't send the clone anywhere yet
tx1_clone = self.nodes[0].signrawtransaction(clone_raw, None, None, "ALL|ANYONECANPAY")
assert_equal(tx1_clone["complete"], True)
# Have node0 mine a block, if requested:
if (self.options.mine_block):
self.nodes[0].generate(1)
sync_blocks(self.nodes[0:2])
tx1 = self.nodes[0].gettransaction(txid1)
tx2 = self.nodes[0].gettransaction(txid2)
# Node0's balance should be starting balance, plus 500BOLT for another
# matured block, minus tx1 and tx2 amounts, and minus transaction fees:
expected = starting_balance + fund_foo_tx["fee"] + fund_bar_tx["fee"]
if self.options.mine_block: expected += 500
expected += tx1["amount"] + tx1["fee"]
expected += tx2["amount"] + tx2["fee"]
assert_equal(self.nodes[0].getbalance(), expected)
# foo and bar accounts should be debited:
assert_equal(self.nodes[0].getbalance("foo", 0), 12190 + tx1["amount"] + tx1["fee"])
assert_equal(self.nodes[0].getbalance("bar", 0), 290 + tx2["amount"] + tx2["fee"])
if self.options.mine_block:
assert_equal(tx1["confirmations"], 1)
assert_equal(tx2["confirmations"], 1)
# Node1's "from0" balance should be both transaction amounts:
assert_equal(self.nodes[1].getbalance("from0"), -(tx1["amount"] + tx2["amount"]))
else:
assert_equal(tx1["confirmations"], 0)
assert_equal(tx2["confirmations"], 0)
# Send clone and its parent to miner
self.nodes[2].sendrawtransaction(fund_foo_tx["hex"])
txid1_clone = self.nodes[2].sendrawtransaction(tx1_clone["hex"])
# ... mine a block...
self.nodes[2].generate(1)
# Reconnect the split network, and sync chain:
connect_nodes(self.nodes[1], 2)
self.nodes[2].sendrawtransaction(fund_bar_tx["hex"])
self.nodes[2].sendrawtransaction(tx2["hex"])
self.nodes[2].generate(1) # Mine another block to make sure we sync
sync_blocks(self.nodes)
# Re-fetch transaction info:
tx1 = self.nodes[0].gettransaction(txid1)
tx1_clone = self.nodes[0].gettransaction(txid1_clone)
tx2 = self.nodes[0].gettransaction(txid2)
# Verify expected confirmations
assert_equal(tx1["confirmations"], -2)
assert_equal(tx1_clone["confirmations"], 2)
assert_equal(tx2["confirmations"], 1)
# Check node0's total balance; should be same as before the clone, + 1000 BOLT for 2 matured,
# less possible orphaned matured subsidy
expected += 1000
if (self.options.mine_block):
expected -= 500
assert_equal(self.nodes[0].getbalance(), expected)
assert_equal(self.nodes[0].getbalance("*", 0), expected)
# Check node0's individual account balances.
# "foo" should have been debited by the equivalent clone of tx1
assert_equal(self.nodes[0].getbalance("foo"), 12190 + tx1["amount"] + tx1["fee"])
# "bar" should have been debited by (possibly unconfirmed) tx2
assert_equal(self.nodes[0].getbalance("bar", 0), 290 + tx2["amount"] + tx2["fee"])
# "" should have starting balance, less funding txes, plus subsidies
assert_equal(self.nodes[0].getbalance("", 0), starting_balance
- 12190
+ fund_foo_tx["fee"]
- 290
+ fund_bar_tx["fee"]
+ 1000)
# Node1's "from0" account balance
assert_equal(self.nodes[1].getbalance("from0", 0), -(tx1["amount"] + tx2["amount"]))
if __name__ == '__main__':
TxnMallTest().main()
| 48.13253
| 115
| 0.609387
|
794867ff0188dd7325ed263b2adc119dca41cd98
| 67
|
py
|
Python
|
Chapter 09/Chap09_Example9.50.py
|
bpbpublications/Programming-Techniques-using-Python
|
49b785f37e95a3aad1d36cef51e219ac56e5e9f0
|
[
"MIT"
] | null | null | null |
Chapter 09/Chap09_Example9.50.py
|
bpbpublications/Programming-Techniques-using-Python
|
49b785f37e95a3aad1d36cef51e219ac56e5e9f0
|
[
"MIT"
] | null | null | null |
Chapter 09/Chap09_Example9.50.py
|
bpbpublications/Programming-Techniques-using-Python
|
49b785f37e95a3aad1d36cef51e219ac56e5e9f0
|
[
"MIT"
] | null | null | null |
#TypeError
from datetime import time
mytime = time(hour = '23')
| 16.75
| 27
| 0.701493
|
79486808f028fbb96a41218757f575977ed80f5c
| 390
|
py
|
Python
|
Alexio/wsgi.py
|
MonkeyAndres/AlexioProject
|
561fdbbfb561bb2ee40c6e90696ba4759029959d
|
[
"MIT"
] | null | null | null |
Alexio/wsgi.py
|
MonkeyAndres/AlexioProject
|
561fdbbfb561bb2ee40c6e90696ba4759029959d
|
[
"MIT"
] | null | null | null |
Alexio/wsgi.py
|
MonkeyAndres/AlexioProject
|
561fdbbfb561bb2ee40c6e90696ba4759029959d
|
[
"MIT"
] | null | null | null |
"""
WSGI config for Alexio project.
It exposes the WSGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/1.11/howto/deployment/wsgi/
"""
import os
from django.core.wsgi import get_wsgi_application
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "Alexio.settings")
application = get_wsgi_application()
| 22.941176
| 78
| 0.784615
|
7948687308a5e04d65c5bf8c18d13bcf7139029b
| 10,519
|
py
|
Python
|
doc/conf.py
|
dhuppenkothen/altair
|
6846d79418145e91c69fd183cbebd1a321b6a969
|
[
"BSD-3-Clause"
] | 1,134
|
2015-09-19T05:38:36.000Z
|
2021-09-21T15:15:11.000Z
|
doc/conf.py
|
dhuppenkothen/altair
|
6846d79418145e91c69fd183cbebd1a321b6a969
|
[
"BSD-3-Clause"
] | 165
|
2015-09-19T05:09:33.000Z
|
2018-10-08T19:42:28.000Z
|
doc/conf.py
|
dhuppenkothen/altair
|
6846d79418145e91c69fd183cbebd1a321b6a969
|
[
"BSD-3-Clause"
] | 75
|
2015-09-19T03:30:25.000Z
|
2018-11-19T05:37:38.000Z
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
#
# altair documentation build configuration file, created by
# sphinx-quickstart on Wed Sep 7 12:52:48 2016.
#
# This file is execfile()d with the current directory set to its
# containing dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
import sys
import os
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
sys.path.insert(0, os.path.abspath('..'))
# -- General configuration ------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
#needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
# ones.
extensions = [
'sphinx.ext.autodoc',
'sphinx.ext.autosummary',
'sphinx.ext.doctest',
'sphinx.ext.coverage',
'sphinx.ext.githubpages',
'numpydoc.numpydoc',
'altair.sphinxext.altairplot',
'altair.sphinxext.altairgallery',
'altair.sphinxext.schematable'
]
altair_plot_links = {'editor': True, 'source': False, 'export': False}
autodoc_default_flags = ['members']
autodoc_member_order = 'groupwise'
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix(es) of source filenames.
# You can specify multiple suffix as a list of string:
# source_suffix = ['.rst', '.md']
source_suffix = '.rst'
# The encoding of source files.
#source_encoding = 'utf-8-sig'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = 'Altair'
copyright = '2016-2018, Altair Developers'
author = 'Brian Granger and Jake VanderPlas'
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
version = '2.1.0dev'
# The full version, including alpha/beta/rc tags.
release = '2.1.0dev0'
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#
# This is also used if you do content translation via gettext catalogs.
# Usually you set "language" from the command line for these cases.
language = None
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
#today = ''
# Else, today_fmt is used as the format for a strftime call.
#today_fmt = '%B %d, %Y'
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
# This patterns also effect to html_static_path and html_extra_path
exclude_patterns = ['_build', 'Thumbs.db', '.DS_Store']
# The reST default role (used for this markup: `text`) to use for all
# documents.
#default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
#add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
#add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
#show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# A list of ignored prefixes for module index sorting.
#modindex_common_prefix = []
# If true, keep warnings as "system message" paragraphs in the built documents.
#keep_warnings = False
# If true, `todo` and `todoList` produce output, else they produce nothing.
todo_include_todos = False
# -- Options for HTML output ----------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
html_theme = 'sphinx_rtd_theme'
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
#html_theme_options = {}
# Add any paths that contain custom themes here, relative to this directory.
#html_theme_path = []
# The name for this set of Sphinx documents.
# "<project> v<release> documentation" by default.
#html_title = 'altair v1.0.0'
# A shorter title for the navigation bar. Default is the same as html_title.
html_short_title = 'Altair'
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
html_logo = '_static/altair-logo-light.png'
# The name of an image file (relative to this directory) to use as a favicon of
# the docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
html_favicon = '_static/favicon.ico'
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static', '_images']
# adapted from: http://rackerlabs.github.io/docs-rackspace/tools/rtd-tables.html
# and
# https://github.com/rtfd/sphinx_rtd_theme/issues/117
def setup(app):
app.add_stylesheet('theme_overrides.css')
app.add_stylesheet('custom.css')
# Add any extra paths that contain custom files (such as robots.txt or
# .htaccess) here, relative to this directory. These files are copied
# directly to the root of the documentation.
#html_extra_path = []
# If not None, a 'Last updated on:' timestamp is inserted at every page
# bottom, using the given strftime format.
# The empty string is equivalent to '%b %d, %Y'.
#html_last_updated_fmt = None
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
#html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
#html_sidebars = {}
# Additional templates that should be rendered to pages, maps page names to
# template names.
#html_additional_pages = {}
# If false, no module index is generated.
#html_domain_indices = True
# If false, no index is generated.
#html_use_index = True
# If true, the index is split into individual pages for each letter.
#html_split_index = False
# If true, links to the reST sources are added to the pages.
#html_show_sourcelink = True
# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
#html_show_sphinx = True
# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
#html_show_copyright = True
# If true, an OpenSearch description file will be output, and all pages will
# contain a <link> tag referring to it. The value of this option must be the
# base URL from which the finished HTML is served.
#html_use_opensearch = ''
# This is the file name suffix for HTML files (e.g. ".xhtml").
#html_file_suffix = None
# Language to be used for generating the HTML full-text search index.
# Sphinx supports the following languages:
# 'da', 'de', 'en', 'es', 'fi', 'fr', 'h', 'it', 'ja'
# 'nl', 'no', 'pt', 'ro', 'r', 'sv', 'tr', 'zh'
#html_search_language = 'en'
# A dictionary with options for the search language support, empty by default.
# 'ja' uses this config value.
# 'zh' user can custom change `jieba` dictionary path.
#html_search_options = {'type': 'default'}
# The name of a javascript file (relative to the configuration directory) that
# implements a search results scorer. If empty, the default will be used.
#html_search_scorer = 'scorer.js'
# Output file base name for HTML help builder.
htmlhelp_basename = 'altairdoc'
# -- Options for LaTeX output ---------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
#'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
#'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
#'preamble': '',
# Latex figure (float) alignment
#'figure_align': 'htbp',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title,
# author, documentclass [howto, manual, or own class]).
latex_documents = [
(master_doc, 'altair.tex', 'altair Documentation',
'Brian Granger and Jake VanderPlas', 'manual'),
]
# The name of an image file (relative to this directory) to place at the top of
# the title page.
#latex_logo = None
# For "manual" documents, if this is true, then toplevel headings are parts,
# not chapters.
#latex_use_parts = False
# If true, show page references after internal links.
#latex_show_pagerefs = False
# If true, show URL addresses after external links.
#latex_show_urls = False
# Documents to append as an appendix to all manuals.
#latex_appendices = []
# If false, no module index is generated.
#latex_domain_indices = True
# -- Options for manual page output ---------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
(master_doc, 'altair', 'altair Documentation',
[author], 1)
]
# If true, show URL addresses after external links.
#man_show_urls = False
# -- Options for Texinfo output -------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
(master_doc, 'altair', 'altair Documentation',
author, 'altair', 'One line description of project.',
'Miscellaneous'),
]
# Documents to append as an appendix to all manuals.
#texinfo_appendices = []
# If false, no module index is generated.
#texinfo_domain_indices = True
# How to display URL addresses: 'footnote', 'no', or 'inline'.
#texinfo_show_urls = 'footnote'
# If true, do not generate a @detailmenu in the "Top" node's menu.
#texinfo_no_detailmenu = False
# Hide extra class members
numpydoc_show_class_members = False
# For the altairplot extension
altairplot_links = {'editor': True, 'source': True, 'export': True}
altairplot_vega_js_url = "https://cdn.jsdelivr.net/npm/vega@3.3"
altairplot_vegalite_js_url = "https://cdn.jsdelivr.net/npm/vega-lite@2.4"
altairplot_vegaembed_js_url = "https://cdn.jsdelivr.net/npm/vega-embed@3.6"
| 32.974922
| 80
| 0.720221
|
794868922a6d9deb7a050e48be8d0c5f2b5c804e
| 204
|
py
|
Python
|
migration/20210727_01_Lim9D-add-new-event-type-uuid.py
|
randomicu/database
|
42e42f52c70297332e12ded243a7cac8660d5825
|
[
"Unlicense"
] | null | null | null |
migration/20210727_01_Lim9D-add-new-event-type-uuid.py
|
randomicu/database
|
42e42f52c70297332e12ded243a7cac8660d5825
|
[
"Unlicense"
] | 9
|
2020-10-27T19:13:10.000Z
|
2021-07-19T04:16:11.000Z
|
migration/20210727_01_Lim9D-add-new-event-type-uuid.py
|
randomicu/database
|
42e42f52c70297332e12ded243a7cac8660d5825
|
[
"Unlicense"
] | null | null | null |
"""
Add new event type: uuid
"""
from yoyo import step
__depends__ = {'20210718_01_E6H4i-add-view-with-event-summary'}
steps = [
step("ALTER TYPE \"event_type\" ADD VALUE IF NOT EXISTS 'uuid';")
]
| 17
| 69
| 0.681373
|
7948693d15c647f2c5ce8a02e951b4bde6b6bb66
| 379,876
|
py
|
Python
|
vistrails/db/versions/v1_0_3/persistence/sql/auto_gen.py
|
remram44/VisTrails-mybinder
|
ee7477b471920d738f3ac430932f01901b56ed44
|
[
"BSD-3-Clause"
] | 83
|
2015-01-05T14:50:50.000Z
|
2021-09-17T19:45:26.000Z
|
vistrails/db/versions/v1_0_3/persistence/sql/auto_gen.py
|
remram44/VisTrails-mybinder
|
ee7477b471920d738f3ac430932f01901b56ed44
|
[
"BSD-3-Clause"
] | 254
|
2015-01-02T20:39:19.000Z
|
2018-11-28T17:16:44.000Z
|
vistrails/db/versions/v1_0_3/persistence/sql/auto_gen.py
|
remram44/VisTrails-mybinder
|
ee7477b471920d738f3ac430932f01901b56ed44
|
[
"BSD-3-Clause"
] | 40
|
2015-04-17T16:46:36.000Z
|
2021-09-28T22:43:24.000Z
|
###############################################################################
##
## Copyright (C) 2014-2016, New York University.
## Copyright (C) 2011-2014, NYU-Poly.
## Copyright (C) 2006-2011, University of Utah.
## All rights reserved.
## Contact: contact@vistrails.org
##
## This file is part of VisTrails.
##
## "Redistribution and use in source and binary forms, with or without
## modification, are permitted provided that the following conditions are met:
##
## - Redistributions of source code must retain the above copyright notice,
## this list of conditions and the following disclaimer.
## - Redistributions in binary form must reproduce the above copyright
## notice, this list of conditions and the following disclaimer in the
## documentation and/or other materials provided with the distribution.
## - Neither the name of the New York University nor the names of its
## contributors may be used to endorse or promote products derived from
## this software without specific prior written permission.
##
## THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
## AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
## THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
## PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR
## CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
## EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
## PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
## OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
## WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
## OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
## ADVISED OF THE POSSIBILITY OF SUCH DAMAGE."
##
###############################################################################
"""generated automatically by auto_dao.py"""
from __future__ import division
from sql_dao import SQLDAO
from vistrails.db.versions.v1_0_3.domain import *
class DBMashupAliasSQLDAOBase(SQLDAO):
def __init__(self, daoList):
self.daoList = daoList
self.table = 'mashup_alias'
def getDao(self, dao):
return self.daoList[dao]
def get_sql_columns(self, db, global_props,lock=False):
columns = ['id', 'name', 'parent_id', 'entity_id', 'entity_type']
table = 'mashup_alias'
whereMap = global_props
orderBy = 'id'
dbCommand = self.createSQLSelect(table, columns, whereMap, orderBy, lock)
data = self.executeSQL(db, dbCommand, True)
res = {}
for row in data:
id = self.convertFromDB(row[0], 'long', 'int')
name = self.convertFromDB(row[1], 'str', 'varchar(255)')
parent = self.convertFromDB(row[2], 'long', 'int')
entity_id = self.convertFromDB(row[3], 'long', 'int')
entity_type = self.convertFromDB(row[4], 'str', 'char(16)')
mashup_alias = DBMashupAlias(name=name,
id=id)
mashup_alias.db_parent = parent
mashup_alias.db_entity_id = entity_id
mashup_alias.db_entity_type = entity_type
mashup_alias.is_dirty = False
res[('mashup_alias', id)] = mashup_alias
return res
def get_sql_select(self, db, global_props,lock=False):
columns = ['id', 'name', 'parent_id', 'entity_id', 'entity_type']
table = 'mashup_alias'
whereMap = global_props
orderBy = 'id'
return self.createSQLSelect(table, columns, whereMap, orderBy, lock)
def process_sql_columns(self, data, global_props):
res = {}
for row in data:
id = self.convertFromDB(row[0], 'long', 'int')
name = self.convertFromDB(row[1], 'str', 'varchar(255)')
parent = self.convertFromDB(row[2], 'long', 'int')
entity_id = self.convertFromDB(row[3], 'long', 'int')
entity_type = self.convertFromDB(row[4], 'str', 'char(16)')
mashup_alias = DBMashupAlias(name=name,
id=id)
mashup_alias.db_parent = parent
mashup_alias.db_entity_id = entity_id
mashup_alias.db_entity_type = entity_type
mashup_alias.is_dirty = False
res[('mashup_alias', id)] = mashup_alias
return res
def from_sql_fast(self, obj, all_objects):
if ('mashup', obj.db_parent) in all_objects:
p = all_objects[('mashup', obj.db_parent)]
p.db_add_alias(obj)
def set_sql_columns(self, db, obj, global_props, do_copy=True):
if not do_copy and not obj.is_dirty:
return
columns = ['id', 'name', 'parent_id', 'entity_id', 'entity_type']
table = 'mashup_alias'
whereMap = {}
whereMap.update(global_props)
if obj.db_id is not None:
keyStr = self.convertToDB(obj.db_id, 'long', 'int')
whereMap['id'] = keyStr
columnMap = {}
if hasattr(obj, 'db_id') and obj.db_id is not None:
columnMap['id'] = \
self.convertToDB(obj.db_id, 'long', 'int')
if hasattr(obj, 'db_name') and obj.db_name is not None:
columnMap['name'] = \
self.convertToDB(obj.db_name, 'str', 'varchar(255)')
if hasattr(obj, 'db_parent') and obj.db_parent is not None:
columnMap['parent_id'] = \
self.convertToDB(obj.db_parent, 'long', 'int')
if hasattr(obj, 'db_entity_id') and obj.db_entity_id is not None:
columnMap['entity_id'] = \
self.convertToDB(obj.db_entity_id, 'long', 'int')
if hasattr(obj, 'db_entity_type') and obj.db_entity_type is not None:
columnMap['entity_type'] = \
self.convertToDB(obj.db_entity_type, 'str', 'char(16)')
columnMap.update(global_props)
if obj.is_new or do_copy:
dbCommand = self.createSQLInsert(table, columnMap)
else:
dbCommand = self.createSQLUpdate(table, columnMap, whereMap)
lastId = self.executeSQL(db, dbCommand, False)
def set_sql_command(self, db, obj, global_props, do_copy=True):
if not do_copy and not obj.is_dirty:
return None
columns = ['id', 'name', 'parent_id', 'entity_id', 'entity_type']
table = 'mashup_alias'
whereMap = {}
whereMap.update(global_props)
if obj.db_id is not None:
keyStr = self.convertToDB(obj.db_id, 'long', 'int')
whereMap['id'] = keyStr
columnMap = {}
if hasattr(obj, 'db_id') and obj.db_id is not None:
columnMap['id'] = \
self.convertToDB(obj.db_id, 'long', 'int')
if hasattr(obj, 'db_name') and obj.db_name is not None:
columnMap['name'] = \
self.convertToDB(obj.db_name, 'str', 'varchar(255)')
if hasattr(obj, 'db_parent') and obj.db_parent is not None:
columnMap['parent_id'] = \
self.convertToDB(obj.db_parent, 'long', 'int')
if hasattr(obj, 'db_entity_id') and obj.db_entity_id is not None:
columnMap['entity_id'] = \
self.convertToDB(obj.db_entity_id, 'long', 'int')
if hasattr(obj, 'db_entity_type') and obj.db_entity_type is not None:
columnMap['entity_type'] = \
self.convertToDB(obj.db_entity_type, 'str', 'char(16)')
columnMap.update(global_props)
if obj.is_new or do_copy:
dbCommand = self.createSQLInsert(table, columnMap)
else:
dbCommand = self.createSQLUpdate(table, columnMap, whereMap)
return dbCommand
def set_sql_process(self, obj, global_props, lastId):
pass
def to_sql_fast(self, obj, do_copy=True):
if obj.db_component is not None:
child = obj.db_component
child.db_mashup_alias = obj.db_id
def delete_sql_column(self, db, obj, global_props):
table = 'mashup_alias'
whereMap = {}
whereMap.update(global_props)
if obj.db_id is not None:
keyStr = self.convertToDB(obj.db_id, 'long', 'int')
whereMap['id'] = keyStr
dbCommand = self.createSQLDelete(table, whereMap)
self.executeSQL(db, dbCommand, False)
class DBGroupSQLDAOBase(SQLDAO):
def __init__(self, daoList):
self.daoList = daoList
self.table = 'group_tbl'
def getDao(self, dao):
return self.daoList[dao]
def get_sql_columns(self, db, global_props,lock=False):
columns = ['id', 'cache', 'name', 'namespace', 'package', 'version', 'parent_type', 'entity_id', 'entity_type', 'parent_id']
table = 'group_tbl'
whereMap = global_props
orderBy = 'id'
dbCommand = self.createSQLSelect(table, columns, whereMap, orderBy, lock)
data = self.executeSQL(db, dbCommand, True)
res = {}
for row in data:
id = self.convertFromDB(row[0], 'long', 'int')
cache = self.convertFromDB(row[1], 'int', 'int')
name = self.convertFromDB(row[2], 'str', 'varchar(255)')
namespace = self.convertFromDB(row[3], 'str', 'varchar(255)')
package = self.convertFromDB(row[4], 'str', 'varchar(511)')
version = self.convertFromDB(row[5], 'str', 'varchar(255)')
parentType = self.convertFromDB(row[6], 'str', 'char(32)')
entity_id = self.convertFromDB(row[7], 'long', 'int')
entity_type = self.convertFromDB(row[8], 'str', 'char(16)')
parent = self.convertFromDB(row[9], 'long', 'long')
group = DBGroup(cache=cache,
name=name,
namespace=namespace,
package=package,
version=version,
id=id)
group.db_parentType = parentType
group.db_entity_id = entity_id
group.db_entity_type = entity_type
group.db_parent = parent
group.is_dirty = False
res[('group', id)] = group
return res
def get_sql_select(self, db, global_props,lock=False):
columns = ['id', 'cache', 'name', 'namespace', 'package', 'version', 'parent_type', 'entity_id', 'entity_type', 'parent_id']
table = 'group_tbl'
whereMap = global_props
orderBy = 'id'
return self.createSQLSelect(table, columns, whereMap, orderBy, lock)
def process_sql_columns(self, data, global_props):
res = {}
for row in data:
id = self.convertFromDB(row[0], 'long', 'int')
cache = self.convertFromDB(row[1], 'int', 'int')
name = self.convertFromDB(row[2], 'str', 'varchar(255)')
namespace = self.convertFromDB(row[3], 'str', 'varchar(255)')
package = self.convertFromDB(row[4], 'str', 'varchar(511)')
version = self.convertFromDB(row[5], 'str', 'varchar(255)')
parentType = self.convertFromDB(row[6], 'str', 'char(32)')
entity_id = self.convertFromDB(row[7], 'long', 'int')
entity_type = self.convertFromDB(row[8], 'str', 'char(16)')
parent = self.convertFromDB(row[9], 'long', 'long')
group = DBGroup(cache=cache,
name=name,
namespace=namespace,
package=package,
version=version,
id=id)
group.db_parentType = parentType
group.db_entity_id = entity_id
group.db_entity_type = entity_type
group.db_parent = parent
group.is_dirty = False
res[('group', id)] = group
return res
def from_sql_fast(self, obj, all_objects):
if obj.db_parentType == 'workflow':
p = all_objects[('workflow', obj.db_parent)]
p.db_add_module(obj)
elif obj.db_parentType == 'add':
p = all_objects[('add', obj.db_parent)]
p.db_add_data(obj)
elif obj.db_parentType == 'change':
p = all_objects[('change', obj.db_parent)]
p.db_add_data(obj)
def set_sql_columns(self, db, obj, global_props, do_copy=True):
if not do_copy and not obj.is_dirty:
return
columns = ['id', 'cache', 'name', 'namespace', 'package', 'version', 'parent_type', 'entity_id', 'entity_type', 'parent_id']
table = 'group_tbl'
whereMap = {}
whereMap.update(global_props)
if obj.db_id is not None:
keyStr = self.convertToDB(obj.db_id, 'long', 'int')
whereMap['id'] = keyStr
columnMap = {}
if hasattr(obj, 'db_id') and obj.db_id is not None:
columnMap['id'] = \
self.convertToDB(obj.db_id, 'long', 'int')
if hasattr(obj, 'db_cache') and obj.db_cache is not None:
columnMap['cache'] = \
self.convertToDB(obj.db_cache, 'int', 'int')
if hasattr(obj, 'db_name') and obj.db_name is not None:
columnMap['name'] = \
self.convertToDB(obj.db_name, 'str', 'varchar(255)')
if hasattr(obj, 'db_namespace') and obj.db_namespace is not None:
columnMap['namespace'] = \
self.convertToDB(obj.db_namespace, 'str', 'varchar(255)')
if hasattr(obj, 'db_package') and obj.db_package is not None:
columnMap['package'] = \
self.convertToDB(obj.db_package, 'str', 'varchar(511)')
if hasattr(obj, 'db_version') and obj.db_version is not None:
columnMap['version'] = \
self.convertToDB(obj.db_version, 'str', 'varchar(255)')
if hasattr(obj, 'db_parentType') and obj.db_parentType is not None:
columnMap['parent_type'] = \
self.convertToDB(obj.db_parentType, 'str', 'char(32)')
if hasattr(obj, 'db_entity_id') and obj.db_entity_id is not None:
columnMap['entity_id'] = \
self.convertToDB(obj.db_entity_id, 'long', 'int')
if hasattr(obj, 'db_entity_type') and obj.db_entity_type is not None:
columnMap['entity_type'] = \
self.convertToDB(obj.db_entity_type, 'str', 'char(16)')
if hasattr(obj, 'db_parent') and obj.db_parent is not None:
columnMap['parent_id'] = \
self.convertToDB(obj.db_parent, 'long', 'long')
columnMap.update(global_props)
if obj.is_new or do_copy:
dbCommand = self.createSQLInsert(table, columnMap)
else:
dbCommand = self.createSQLUpdate(table, columnMap, whereMap)
lastId = self.executeSQL(db, dbCommand, False)
def set_sql_command(self, db, obj, global_props, do_copy=True):
if not do_copy and not obj.is_dirty:
return None
columns = ['id', 'cache', 'name', 'namespace', 'package', 'version', 'parent_type', 'entity_id', 'entity_type', 'parent_id']
table = 'group_tbl'
whereMap = {}
whereMap.update(global_props)
if obj.db_id is not None:
keyStr = self.convertToDB(obj.db_id, 'long', 'int')
whereMap['id'] = keyStr
columnMap = {}
if hasattr(obj, 'db_id') and obj.db_id is not None:
columnMap['id'] = \
self.convertToDB(obj.db_id, 'long', 'int')
if hasattr(obj, 'db_cache') and obj.db_cache is not None:
columnMap['cache'] = \
self.convertToDB(obj.db_cache, 'int', 'int')
if hasattr(obj, 'db_name') and obj.db_name is not None:
columnMap['name'] = \
self.convertToDB(obj.db_name, 'str', 'varchar(255)')
if hasattr(obj, 'db_namespace') and obj.db_namespace is not None:
columnMap['namespace'] = \
self.convertToDB(obj.db_namespace, 'str', 'varchar(255)')
if hasattr(obj, 'db_package') and obj.db_package is not None:
columnMap['package'] = \
self.convertToDB(obj.db_package, 'str', 'varchar(511)')
if hasattr(obj, 'db_version') and obj.db_version is not None:
columnMap['version'] = \
self.convertToDB(obj.db_version, 'str', 'varchar(255)')
if hasattr(obj, 'db_parentType') and obj.db_parentType is not None:
columnMap['parent_type'] = \
self.convertToDB(obj.db_parentType, 'str', 'char(32)')
if hasattr(obj, 'db_entity_id') and obj.db_entity_id is not None:
columnMap['entity_id'] = \
self.convertToDB(obj.db_entity_id, 'long', 'int')
if hasattr(obj, 'db_entity_type') and obj.db_entity_type is not None:
columnMap['entity_type'] = \
self.convertToDB(obj.db_entity_type, 'str', 'char(16)')
if hasattr(obj, 'db_parent') and obj.db_parent is not None:
columnMap['parent_id'] = \
self.convertToDB(obj.db_parent, 'long', 'long')
columnMap.update(global_props)
if obj.is_new or do_copy:
dbCommand = self.createSQLInsert(table, columnMap)
else:
dbCommand = self.createSQLUpdate(table, columnMap, whereMap)
return dbCommand
def set_sql_process(self, obj, global_props, lastId):
pass
def to_sql_fast(self, obj, do_copy=True):
if obj.db_workflow is not None:
child = obj.db_workflow
child.db_group = obj.db_id
if obj.db_location is not None:
child = obj.db_location
child.db_parentType = obj.vtType
child.db_parent = obj.db_id
for child in obj.db_functions:
child.db_parentType = obj.vtType
child.db_parent = obj.db_id
for child in obj.db_annotations:
child.db_parentType = obj.vtType
child.db_parent = obj.db_id
def delete_sql_column(self, db, obj, global_props):
table = 'group_tbl'
whereMap = {}
whereMap.update(global_props)
if obj.db_id is not None:
keyStr = self.convertToDB(obj.db_id, 'long', 'int')
whereMap['id'] = keyStr
dbCommand = self.createSQLDelete(table, whereMap)
self.executeSQL(db, dbCommand, False)
class DBAddSQLDAOBase(SQLDAO):
def __init__(self, daoList):
self.daoList = daoList
self.table = 'add_tbl'
def getDao(self, dao):
return self.daoList[dao]
def get_sql_columns(self, db, global_props,lock=False):
columns = ['id', 'what', 'object_id', 'par_obj_id', 'par_obj_type', 'action_id', 'entity_id', 'entity_type']
table = 'add_tbl'
whereMap = global_props
orderBy = 'id'
dbCommand = self.createSQLSelect(table, columns, whereMap, orderBy, lock)
data = self.executeSQL(db, dbCommand, True)
res = {}
for row in data:
id = self.convertFromDB(row[0], 'long', 'int')
what = self.convertFromDB(row[1], 'str', 'varchar(255)')
objectId = self.convertFromDB(row[2], 'long', 'int')
parentObjId = self.convertFromDB(row[3], 'long', 'int')
parentObjType = self.convertFromDB(row[4], 'str', 'char(16)')
action = self.convertFromDB(row[5], 'long', 'int')
entity_id = self.convertFromDB(row[6], 'long', 'int')
entity_type = self.convertFromDB(row[7], 'str', 'char(16)')
add = DBAdd(what=what,
objectId=objectId,
parentObjId=parentObjId,
parentObjType=parentObjType,
id=id)
add.db_action = action
add.db_entity_id = entity_id
add.db_entity_type = entity_type
add.is_dirty = False
res[('add', id)] = add
return res
def get_sql_select(self, db, global_props,lock=False):
columns = ['id', 'what', 'object_id', 'par_obj_id', 'par_obj_type', 'action_id', 'entity_id', 'entity_type']
table = 'add_tbl'
whereMap = global_props
orderBy = 'id'
return self.createSQLSelect(table, columns, whereMap, orderBy, lock)
def process_sql_columns(self, data, global_props):
res = {}
for row in data:
id = self.convertFromDB(row[0], 'long', 'int')
what = self.convertFromDB(row[1], 'str', 'varchar(255)')
objectId = self.convertFromDB(row[2], 'long', 'int')
parentObjId = self.convertFromDB(row[3], 'long', 'int')
parentObjType = self.convertFromDB(row[4], 'str', 'char(16)')
action = self.convertFromDB(row[5], 'long', 'int')
entity_id = self.convertFromDB(row[6], 'long', 'int')
entity_type = self.convertFromDB(row[7], 'str', 'char(16)')
add = DBAdd(what=what,
objectId=objectId,
parentObjId=parentObjId,
parentObjType=parentObjType,
id=id)
add.db_action = action
add.db_entity_id = entity_id
add.db_entity_type = entity_type
add.is_dirty = False
res[('add', id)] = add
return res
def from_sql_fast(self, obj, all_objects):
if ('action', obj.db_action) in all_objects:
p = all_objects[('action', obj.db_action)]
p.db_add_operation(obj)
def set_sql_columns(self, db, obj, global_props, do_copy=True):
if not do_copy and not obj.is_dirty:
return
columns = ['id', 'what', 'object_id', 'par_obj_id', 'par_obj_type', 'action_id', 'entity_id', 'entity_type']
table = 'add_tbl'
whereMap = {}
whereMap.update(global_props)
if obj.db_id is not None:
keyStr = self.convertToDB(obj.db_id, 'long', 'int')
whereMap['id'] = keyStr
columnMap = {}
if hasattr(obj, 'db_id') and obj.db_id is not None:
columnMap['id'] = \
self.convertToDB(obj.db_id, 'long', 'int')
if hasattr(obj, 'db_what') and obj.db_what is not None:
columnMap['what'] = \
self.convertToDB(obj.db_what, 'str', 'varchar(255)')
if hasattr(obj, 'db_objectId') and obj.db_objectId is not None:
columnMap['object_id'] = \
self.convertToDB(obj.db_objectId, 'long', 'int')
if hasattr(obj, 'db_parentObjId') and obj.db_parentObjId is not None:
columnMap['par_obj_id'] = \
self.convertToDB(obj.db_parentObjId, 'long', 'int')
if hasattr(obj, 'db_parentObjType') and obj.db_parentObjType is not None:
columnMap['par_obj_type'] = \
self.convertToDB(obj.db_parentObjType, 'str', 'char(16)')
if hasattr(obj, 'db_action') and obj.db_action is not None:
columnMap['action_id'] = \
self.convertToDB(obj.db_action, 'long', 'int')
if hasattr(obj, 'db_entity_id') and obj.db_entity_id is not None:
columnMap['entity_id'] = \
self.convertToDB(obj.db_entity_id, 'long', 'int')
if hasattr(obj, 'db_entity_type') and obj.db_entity_type is not None:
columnMap['entity_type'] = \
self.convertToDB(obj.db_entity_type, 'str', 'char(16)')
columnMap.update(global_props)
if obj.is_new or do_copy:
dbCommand = self.createSQLInsert(table, columnMap)
else:
dbCommand = self.createSQLUpdate(table, columnMap, whereMap)
lastId = self.executeSQL(db, dbCommand, False)
def set_sql_command(self, db, obj, global_props, do_copy=True):
if not do_copy and not obj.is_dirty:
return None
columns = ['id', 'what', 'object_id', 'par_obj_id', 'par_obj_type', 'action_id', 'entity_id', 'entity_type']
table = 'add_tbl'
whereMap = {}
whereMap.update(global_props)
if obj.db_id is not None:
keyStr = self.convertToDB(obj.db_id, 'long', 'int')
whereMap['id'] = keyStr
columnMap = {}
if hasattr(obj, 'db_id') and obj.db_id is not None:
columnMap['id'] = \
self.convertToDB(obj.db_id, 'long', 'int')
if hasattr(obj, 'db_what') and obj.db_what is not None:
columnMap['what'] = \
self.convertToDB(obj.db_what, 'str', 'varchar(255)')
if hasattr(obj, 'db_objectId') and obj.db_objectId is not None:
columnMap['object_id'] = \
self.convertToDB(obj.db_objectId, 'long', 'int')
if hasattr(obj, 'db_parentObjId') and obj.db_parentObjId is not None:
columnMap['par_obj_id'] = \
self.convertToDB(obj.db_parentObjId, 'long', 'int')
if hasattr(obj, 'db_parentObjType') and obj.db_parentObjType is not None:
columnMap['par_obj_type'] = \
self.convertToDB(obj.db_parentObjType, 'str', 'char(16)')
if hasattr(obj, 'db_action') and obj.db_action is not None:
columnMap['action_id'] = \
self.convertToDB(obj.db_action, 'long', 'int')
if hasattr(obj, 'db_entity_id') and obj.db_entity_id is not None:
columnMap['entity_id'] = \
self.convertToDB(obj.db_entity_id, 'long', 'int')
if hasattr(obj, 'db_entity_type') and obj.db_entity_type is not None:
columnMap['entity_type'] = \
self.convertToDB(obj.db_entity_type, 'str', 'char(16)')
columnMap.update(global_props)
if obj.is_new or do_copy:
dbCommand = self.createSQLInsert(table, columnMap)
else:
dbCommand = self.createSQLUpdate(table, columnMap, whereMap)
return dbCommand
def set_sql_process(self, obj, global_props, lastId):
pass
def to_sql_fast(self, obj, do_copy=True):
if obj.db_data is not None:
child = obj.db_data
child.db_parentType = obj.vtType
child.db_parent = obj.db_id
def delete_sql_column(self, db, obj, global_props):
table = 'add_tbl'
whereMap = {}
whereMap.update(global_props)
if obj.db_id is not None:
keyStr = self.convertToDB(obj.db_id, 'long', 'int')
whereMap['id'] = keyStr
dbCommand = self.createSQLDelete(table, whereMap)
self.executeSQL(db, dbCommand, False)
class DBGroupExecSQLDAOBase(SQLDAO):
def __init__(self, daoList):
self.daoList = daoList
self.table = 'group_exec'
def getDao(self, dao):
return self.daoList[dao]
def get_sql_columns(self, db, global_props,lock=False):
columns = ['id', 'ts_start', 'ts_end', 'cached', 'module_id', 'group_name', 'group_type', 'completed', 'error', 'machine_id', 'parent_type', 'entity_id', 'entity_type', 'parent_id']
table = 'group_exec'
whereMap = global_props
orderBy = 'id'
dbCommand = self.createSQLSelect(table, columns, whereMap, orderBy, lock)
data = self.executeSQL(db, dbCommand, True)
res = {}
for row in data:
id = self.convertFromDB(row[0], 'long', 'int')
ts_start = self.convertFromDB(row[1], 'datetime', 'datetime')
ts_end = self.convertFromDB(row[2], 'datetime', 'datetime')
cached = self.convertFromDB(row[3], 'int', 'int')
module_id = self.convertFromDB(row[4], 'long', 'int')
group_name = self.convertFromDB(row[5], 'str', 'varchar(255)')
group_type = self.convertFromDB(row[6], 'str', 'varchar(255)')
completed = self.convertFromDB(row[7], 'int', 'int')
error = self.convertFromDB(row[8], 'str', 'varchar(1023)')
machine_id = self.convertFromDB(row[9], 'long', 'int')
parentType = self.convertFromDB(row[10], 'str', 'char(32)')
entity_id = self.convertFromDB(row[11], 'long', 'int')
entity_type = self.convertFromDB(row[12], 'str', 'char(16)')
parent = self.convertFromDB(row[13], 'long', 'long')
group_exec = DBGroupExec(ts_start=ts_start,
ts_end=ts_end,
cached=cached,
module_id=module_id,
group_name=group_name,
group_type=group_type,
completed=completed,
error=error,
machine_id=machine_id,
id=id)
group_exec.db_parentType = parentType
group_exec.db_entity_id = entity_id
group_exec.db_entity_type = entity_type
group_exec.db_parent = parent
group_exec.is_dirty = False
res[('group_exec', id)] = group_exec
return res
def get_sql_select(self, db, global_props,lock=False):
columns = ['id', 'ts_start', 'ts_end', 'cached', 'module_id', 'group_name', 'group_type', 'completed', 'error', 'machine_id', 'parent_type', 'entity_id', 'entity_type', 'parent_id']
table = 'group_exec'
whereMap = global_props
orderBy = 'id'
return self.createSQLSelect(table, columns, whereMap, orderBy, lock)
def process_sql_columns(self, data, global_props):
res = {}
for row in data:
id = self.convertFromDB(row[0], 'long', 'int')
ts_start = self.convertFromDB(row[1], 'datetime', 'datetime')
ts_end = self.convertFromDB(row[2], 'datetime', 'datetime')
cached = self.convertFromDB(row[3], 'int', 'int')
module_id = self.convertFromDB(row[4], 'long', 'int')
group_name = self.convertFromDB(row[5], 'str', 'varchar(255)')
group_type = self.convertFromDB(row[6], 'str', 'varchar(255)')
completed = self.convertFromDB(row[7], 'int', 'int')
error = self.convertFromDB(row[8], 'str', 'varchar(1023)')
machine_id = self.convertFromDB(row[9], 'long', 'int')
parentType = self.convertFromDB(row[10], 'str', 'char(32)')
entity_id = self.convertFromDB(row[11], 'long', 'int')
entity_type = self.convertFromDB(row[12], 'str', 'char(16)')
parent = self.convertFromDB(row[13], 'long', 'long')
group_exec = DBGroupExec(ts_start=ts_start,
ts_end=ts_end,
cached=cached,
module_id=module_id,
group_name=group_name,
group_type=group_type,
completed=completed,
error=error,
machine_id=machine_id,
id=id)
group_exec.db_parentType = parentType
group_exec.db_entity_id = entity_id
group_exec.db_entity_type = entity_type
group_exec.db_parent = parent
group_exec.is_dirty = False
res[('group_exec', id)] = group_exec
return res
def from_sql_fast(self, obj, all_objects):
if obj.db_parentType == 'workflow_exec':
p = all_objects[('workflow_exec', obj.db_parent)]
p.db_add_item_exec(obj)
elif obj.db_parentType == 'loop_exec':
p = all_objects[('loop_exec', obj.db_parent)]
p.db_add_item_exec(obj)
elif obj.db_parentType == 'group_exec':
p = all_objects[('group_exec', obj.db_parent)]
p.db_add_item_exec(obj)
def set_sql_columns(self, db, obj, global_props, do_copy=True):
if not do_copy and not obj.is_dirty:
return
columns = ['id', 'ts_start', 'ts_end', 'cached', 'module_id', 'group_name', 'group_type', 'completed', 'error', 'machine_id', 'parent_type', 'entity_id', 'entity_type', 'parent_id']
table = 'group_exec'
whereMap = {}
whereMap.update(global_props)
if obj.db_id is not None:
keyStr = self.convertToDB(obj.db_id, 'long', 'int')
whereMap['id'] = keyStr
columnMap = {}
if hasattr(obj, 'db_id') and obj.db_id is not None:
columnMap['id'] = \
self.convertToDB(obj.db_id, 'long', 'int')
if hasattr(obj, 'db_ts_start') and obj.db_ts_start is not None:
columnMap['ts_start'] = \
self.convertToDB(obj.db_ts_start, 'datetime', 'datetime')
if hasattr(obj, 'db_ts_end') and obj.db_ts_end is not None:
columnMap['ts_end'] = \
self.convertToDB(obj.db_ts_end, 'datetime', 'datetime')
if hasattr(obj, 'db_cached') and obj.db_cached is not None:
columnMap['cached'] = \
self.convertToDB(obj.db_cached, 'int', 'int')
if hasattr(obj, 'db_module_id') and obj.db_module_id is not None:
columnMap['module_id'] = \
self.convertToDB(obj.db_module_id, 'long', 'int')
if hasattr(obj, 'db_group_name') and obj.db_group_name is not None:
columnMap['group_name'] = \
self.convertToDB(obj.db_group_name, 'str', 'varchar(255)')
if hasattr(obj, 'db_group_type') and obj.db_group_type is not None:
columnMap['group_type'] = \
self.convertToDB(obj.db_group_type, 'str', 'varchar(255)')
if hasattr(obj, 'db_completed') and obj.db_completed is not None:
columnMap['completed'] = \
self.convertToDB(obj.db_completed, 'int', 'int')
if hasattr(obj, 'db_error') and obj.db_error is not None:
columnMap['error'] = \
self.convertToDB(obj.db_error, 'str', 'varchar(1023)')
if hasattr(obj, 'db_machine_id') and obj.db_machine_id is not None:
columnMap['machine_id'] = \
self.convertToDB(obj.db_machine_id, 'long', 'int')
if hasattr(obj, 'db_parentType') and obj.db_parentType is not None:
columnMap['parent_type'] = \
self.convertToDB(obj.db_parentType, 'str', 'char(32)')
if hasattr(obj, 'db_entity_id') and obj.db_entity_id is not None:
columnMap['entity_id'] = \
self.convertToDB(obj.db_entity_id, 'long', 'int')
if hasattr(obj, 'db_entity_type') and obj.db_entity_type is not None:
columnMap['entity_type'] = \
self.convertToDB(obj.db_entity_type, 'str', 'char(16)')
if hasattr(obj, 'db_parent') and obj.db_parent is not None:
columnMap['parent_id'] = \
self.convertToDB(obj.db_parent, 'long', 'long')
columnMap.update(global_props)
if obj.is_new or do_copy:
dbCommand = self.createSQLInsert(table, columnMap)
else:
dbCommand = self.createSQLUpdate(table, columnMap, whereMap)
lastId = self.executeSQL(db, dbCommand, False)
def set_sql_command(self, db, obj, global_props, do_copy=True):
if not do_copy and not obj.is_dirty:
return None
columns = ['id', 'ts_start', 'ts_end', 'cached', 'module_id', 'group_name', 'group_type', 'completed', 'error', 'machine_id', 'parent_type', 'entity_id', 'entity_type', 'parent_id']
table = 'group_exec'
whereMap = {}
whereMap.update(global_props)
if obj.db_id is not None:
keyStr = self.convertToDB(obj.db_id, 'long', 'int')
whereMap['id'] = keyStr
columnMap = {}
if hasattr(obj, 'db_id') and obj.db_id is not None:
columnMap['id'] = \
self.convertToDB(obj.db_id, 'long', 'int')
if hasattr(obj, 'db_ts_start') and obj.db_ts_start is not None:
columnMap['ts_start'] = \
self.convertToDB(obj.db_ts_start, 'datetime', 'datetime')
if hasattr(obj, 'db_ts_end') and obj.db_ts_end is not None:
columnMap['ts_end'] = \
self.convertToDB(obj.db_ts_end, 'datetime', 'datetime')
if hasattr(obj, 'db_cached') and obj.db_cached is not None:
columnMap['cached'] = \
self.convertToDB(obj.db_cached, 'int', 'int')
if hasattr(obj, 'db_module_id') and obj.db_module_id is not None:
columnMap['module_id'] = \
self.convertToDB(obj.db_module_id, 'long', 'int')
if hasattr(obj, 'db_group_name') and obj.db_group_name is not None:
columnMap['group_name'] = \
self.convertToDB(obj.db_group_name, 'str', 'varchar(255)')
if hasattr(obj, 'db_group_type') and obj.db_group_type is not None:
columnMap['group_type'] = \
self.convertToDB(obj.db_group_type, 'str', 'varchar(255)')
if hasattr(obj, 'db_completed') and obj.db_completed is not None:
columnMap['completed'] = \
self.convertToDB(obj.db_completed, 'int', 'int')
if hasattr(obj, 'db_error') and obj.db_error is not None:
columnMap['error'] = \
self.convertToDB(obj.db_error, 'str', 'varchar(1023)')
if hasattr(obj, 'db_machine_id') and obj.db_machine_id is not None:
columnMap['machine_id'] = \
self.convertToDB(obj.db_machine_id, 'long', 'int')
if hasattr(obj, 'db_parentType') and obj.db_parentType is not None:
columnMap['parent_type'] = \
self.convertToDB(obj.db_parentType, 'str', 'char(32)')
if hasattr(obj, 'db_entity_id') and obj.db_entity_id is not None:
columnMap['entity_id'] = \
self.convertToDB(obj.db_entity_id, 'long', 'int')
if hasattr(obj, 'db_entity_type') and obj.db_entity_type is not None:
columnMap['entity_type'] = \
self.convertToDB(obj.db_entity_type, 'str', 'char(16)')
if hasattr(obj, 'db_parent') and obj.db_parent is not None:
columnMap['parent_id'] = \
self.convertToDB(obj.db_parent, 'long', 'long')
columnMap.update(global_props)
if obj.is_new or do_copy:
dbCommand = self.createSQLInsert(table, columnMap)
else:
dbCommand = self.createSQLUpdate(table, columnMap, whereMap)
return dbCommand
def set_sql_process(self, obj, global_props, lastId):
pass
def to_sql_fast(self, obj, do_copy=True):
for child in obj.db_annotations:
child.db_parentType = obj.vtType
child.db_parent = obj.db_id
for child in obj.db_item_execs:
child.db_parentType = obj.vtType
child.db_parent = obj.db_id
def delete_sql_column(self, db, obj, global_props):
table = 'group_exec'
whereMap = {}
whereMap.update(global_props)
if obj.db_id is not None:
keyStr = self.convertToDB(obj.db_id, 'long', 'int')
whereMap['id'] = keyStr
dbCommand = self.createSQLDelete(table, whereMap)
self.executeSQL(db, dbCommand, False)
class DBParameterSQLDAOBase(SQLDAO):
def __init__(self, daoList):
self.daoList = daoList
self.table = 'parameter'
def getDao(self, dao):
return self.daoList[dao]
def get_sql_columns(self, db, global_props,lock=False):
columns = ['id', 'pos', 'name', 'type', 'val', 'alias', 'parent_type', 'entity_id', 'entity_type', 'parent_id']
table = 'parameter'
whereMap = global_props
orderBy = 'id'
dbCommand = self.createSQLSelect(table, columns, whereMap, orderBy, lock)
data = self.executeSQL(db, dbCommand, True)
res = {}
for row in data:
id = self.convertFromDB(row[0], 'long', 'int')
pos = self.convertFromDB(row[1], 'long', 'int')
name = self.convertFromDB(row[2], 'str', 'varchar(255)')
type = self.convertFromDB(row[3], 'str', 'varchar(255)')
val = self.convertFromDB(row[4], 'str', 'mediumtext')
alias = self.convertFromDB(row[5], 'str', 'varchar(255)')
parentType = self.convertFromDB(row[6], 'str', 'char(32)')
entity_id = self.convertFromDB(row[7], 'long', 'int')
entity_type = self.convertFromDB(row[8], 'str', 'char(16)')
parent = self.convertFromDB(row[9], 'long', 'long')
parameter = DBParameter(pos=pos,
name=name,
type=type,
val=val,
alias=alias,
id=id)
parameter.db_parentType = parentType
parameter.db_entity_id = entity_id
parameter.db_entity_type = entity_type
parameter.db_parent = parent
parameter.is_dirty = False
res[('parameter', id)] = parameter
return res
def get_sql_select(self, db, global_props,lock=False):
columns = ['id', 'pos', 'name', 'type', 'val', 'alias', 'parent_type', 'entity_id', 'entity_type', 'parent_id']
table = 'parameter'
whereMap = global_props
orderBy = 'id'
return self.createSQLSelect(table, columns, whereMap, orderBy, lock)
def process_sql_columns(self, data, global_props):
res = {}
for row in data:
id = self.convertFromDB(row[0], 'long', 'int')
pos = self.convertFromDB(row[1], 'long', 'int')
name = self.convertFromDB(row[2], 'str', 'varchar(255)')
type = self.convertFromDB(row[3], 'str', 'varchar(255)')
val = self.convertFromDB(row[4], 'str', 'mediumtext')
alias = self.convertFromDB(row[5], 'str', 'varchar(255)')
parentType = self.convertFromDB(row[6], 'str', 'char(32)')
entity_id = self.convertFromDB(row[7], 'long', 'int')
entity_type = self.convertFromDB(row[8], 'str', 'char(16)')
parent = self.convertFromDB(row[9], 'long', 'long')
parameter = DBParameter(pos=pos,
name=name,
type=type,
val=val,
alias=alias,
id=id)
parameter.db_parentType = parentType
parameter.db_entity_id = entity_id
parameter.db_entity_type = entity_type
parameter.db_parent = parent
parameter.is_dirty = False
res[('parameter', id)] = parameter
return res
def from_sql_fast(self, obj, all_objects):
if obj.db_parentType == 'function':
p = all_objects[('function', obj.db_parent)]
p.db_add_parameter(obj)
elif obj.db_parentType == 'add':
p = all_objects[('add', obj.db_parent)]
p.db_add_data(obj)
elif obj.db_parentType == 'change':
p = all_objects[('change', obj.db_parent)]
p.db_add_data(obj)
def set_sql_columns(self, db, obj, global_props, do_copy=True):
if not do_copy and not obj.is_dirty:
return
columns = ['id', 'pos', 'name', 'type', 'val', 'alias', 'parent_type', 'entity_id', 'entity_type', 'parent_id']
table = 'parameter'
whereMap = {}
whereMap.update(global_props)
if obj.db_id is not None:
keyStr = self.convertToDB(obj.db_id, 'long', 'int')
whereMap['id'] = keyStr
columnMap = {}
if hasattr(obj, 'db_id') and obj.db_id is not None:
columnMap['id'] = \
self.convertToDB(obj.db_id, 'long', 'int')
if hasattr(obj, 'db_pos') and obj.db_pos is not None:
columnMap['pos'] = \
self.convertToDB(obj.db_pos, 'long', 'int')
if hasattr(obj, 'db_name') and obj.db_name is not None:
columnMap['name'] = \
self.convertToDB(obj.db_name, 'str', 'varchar(255)')
if hasattr(obj, 'db_type') and obj.db_type is not None:
columnMap['type'] = \
self.convertToDB(obj.db_type, 'str', 'varchar(255)')
if hasattr(obj, 'db_val') and obj.db_val is not None:
columnMap['val'] = \
self.convertToDB(obj.db_val, 'str', 'mediumtext')
if hasattr(obj, 'db_alias') and obj.db_alias is not None:
columnMap['alias'] = \
self.convertToDB(obj.db_alias, 'str', 'varchar(255)')
if hasattr(obj, 'db_parentType') and obj.db_parentType is not None:
columnMap['parent_type'] = \
self.convertToDB(obj.db_parentType, 'str', 'char(32)')
if hasattr(obj, 'db_entity_id') and obj.db_entity_id is not None:
columnMap['entity_id'] = \
self.convertToDB(obj.db_entity_id, 'long', 'int')
if hasattr(obj, 'db_entity_type') and obj.db_entity_type is not None:
columnMap['entity_type'] = \
self.convertToDB(obj.db_entity_type, 'str', 'char(16)')
if hasattr(obj, 'db_parent') and obj.db_parent is not None:
columnMap['parent_id'] = \
self.convertToDB(obj.db_parent, 'long', 'long')
columnMap.update(global_props)
if obj.is_new or do_copy:
dbCommand = self.createSQLInsert(table, columnMap)
else:
dbCommand = self.createSQLUpdate(table, columnMap, whereMap)
lastId = self.executeSQL(db, dbCommand, False)
def set_sql_command(self, db, obj, global_props, do_copy=True):
if not do_copy and not obj.is_dirty:
return None
columns = ['id', 'pos', 'name', 'type', 'val', 'alias', 'parent_type', 'entity_id', 'entity_type', 'parent_id']
table = 'parameter'
whereMap = {}
whereMap.update(global_props)
if obj.db_id is not None:
keyStr = self.convertToDB(obj.db_id, 'long', 'int')
whereMap['id'] = keyStr
columnMap = {}
if hasattr(obj, 'db_id') and obj.db_id is not None:
columnMap['id'] = \
self.convertToDB(obj.db_id, 'long', 'int')
if hasattr(obj, 'db_pos') and obj.db_pos is not None:
columnMap['pos'] = \
self.convertToDB(obj.db_pos, 'long', 'int')
if hasattr(obj, 'db_name') and obj.db_name is not None:
columnMap['name'] = \
self.convertToDB(obj.db_name, 'str', 'varchar(255)')
if hasattr(obj, 'db_type') and obj.db_type is not None:
columnMap['type'] = \
self.convertToDB(obj.db_type, 'str', 'varchar(255)')
if hasattr(obj, 'db_val') and obj.db_val is not None:
columnMap['val'] = \
self.convertToDB(obj.db_val, 'str', 'mediumtext')
if hasattr(obj, 'db_alias') and obj.db_alias is not None:
columnMap['alias'] = \
self.convertToDB(obj.db_alias, 'str', 'varchar(255)')
if hasattr(obj, 'db_parentType') and obj.db_parentType is not None:
columnMap['parent_type'] = \
self.convertToDB(obj.db_parentType, 'str', 'char(32)')
if hasattr(obj, 'db_entity_id') and obj.db_entity_id is not None:
columnMap['entity_id'] = \
self.convertToDB(obj.db_entity_id, 'long', 'int')
if hasattr(obj, 'db_entity_type') and obj.db_entity_type is not None:
columnMap['entity_type'] = \
self.convertToDB(obj.db_entity_type, 'str', 'char(16)')
if hasattr(obj, 'db_parent') and obj.db_parent is not None:
columnMap['parent_id'] = \
self.convertToDB(obj.db_parent, 'long', 'long')
columnMap.update(global_props)
if obj.is_new or do_copy:
dbCommand = self.createSQLInsert(table, columnMap)
else:
dbCommand = self.createSQLUpdate(table, columnMap, whereMap)
return dbCommand
def set_sql_process(self, obj, global_props, lastId):
pass
def to_sql_fast(self, obj, do_copy=True):
pass
def delete_sql_column(self, db, obj, global_props):
table = 'parameter'
whereMap = {}
whereMap.update(global_props)
if obj.db_id is not None:
keyStr = self.convertToDB(obj.db_id, 'long', 'int')
whereMap['id'] = keyStr
dbCommand = self.createSQLDelete(table, whereMap)
self.executeSQL(db, dbCommand, False)
class DBVistrailSQLDAOBase(SQLDAO):
def __init__(self, daoList):
self.daoList = daoList
self.table = 'vistrail'
def getDao(self, dao):
return self.daoList[dao]
def get_sql_columns(self, db, global_props,lock=False):
columns = ['id', 'entity_type', 'version', 'name', 'last_modified']
table = 'vistrail'
whereMap = global_props
orderBy = 'id'
dbCommand = self.createSQLSelect(table, columns, whereMap, orderBy, lock)
data = self.executeSQL(db, dbCommand, True)
res = {}
for row in data:
id = self.convertFromDB(row[0], 'long', 'int')
global_props['entity_id'] = self.convertToDB(id, 'long', 'int')
entity_type = self.convertFromDB(row[1], 'str', 'char(16)')
global_props['entity_type'] = self.convertToDB(entity_type, 'str', 'char(16)')
version = self.convertFromDB(row[2], 'str', 'char(16)')
name = self.convertFromDB(row[3], 'str', 'varchar(255)')
last_modified = self.convertFromDB(row[4], 'datetime', 'datetime')
vistrail = DBVistrail(entity_type=entity_type,
version=version,
name=name,
last_modified=last_modified,
id=id)
vistrail.is_dirty = False
res[('vistrail', id)] = vistrail
return res
def get_sql_select(self, db, global_props,lock=False):
columns = ['id', 'entity_type', 'version', 'name', 'last_modified']
table = 'vistrail'
whereMap = global_props
orderBy = 'id'
return self.createSQLSelect(table, columns, whereMap, orderBy, lock)
def process_sql_columns(self, data, global_props):
res = {}
for row in data:
id = self.convertFromDB(row[0], 'long', 'int')
global_props['entity_id'] = self.convertToDB(id, 'long', 'int')
entity_type = self.convertFromDB(row[1], 'str', 'char(16)')
global_props['entity_type'] = self.convertToDB(entity_type, 'str', 'char(16)')
version = self.convertFromDB(row[2], 'str', 'char(16)')
name = self.convertFromDB(row[3], 'str', 'varchar(255)')
last_modified = self.convertFromDB(row[4], 'datetime', 'datetime')
vistrail = DBVistrail(entity_type=entity_type,
version=version,
name=name,
last_modified=last_modified,
id=id)
vistrail.is_dirty = False
res[('vistrail', id)] = vistrail
return res
def from_sql_fast(self, obj, all_objects):
pass
def set_sql_columns(self, db, obj, global_props, do_copy=True):
if not do_copy and not obj.is_dirty:
return
columns = ['id', 'entity_type', 'version', 'name', 'last_modified']
table = 'vistrail'
whereMap = {}
whereMap.update(global_props)
if obj.db_id is not None:
keyStr = self.convertToDB(obj.db_id, 'long', 'int')
whereMap['id'] = keyStr
columnMap = {}
if hasattr(obj, 'db_id') and obj.db_id is not None:
columnMap['id'] = \
self.convertToDB(obj.db_id, 'long', 'int')
if hasattr(obj, 'db_entity_type') and obj.db_entity_type is not None:
columnMap['entity_type'] = \
self.convertToDB(obj.db_entity_type, 'str', 'char(16)')
if hasattr(obj, 'db_version') and obj.db_version is not None:
columnMap['version'] = \
self.convertToDB(obj.db_version, 'str', 'char(16)')
if hasattr(obj, 'db_name') and obj.db_name is not None:
columnMap['name'] = \
self.convertToDB(obj.db_name, 'str', 'varchar(255)')
if hasattr(obj, 'db_last_modified') and obj.db_last_modified is not None:
columnMap['last_modified'] = \
self.convertToDB(obj.db_last_modified, 'datetime', 'datetime')
columnMap.update(global_props)
if obj.is_new or do_copy:
dbCommand = self.createSQLInsert(table, columnMap)
else:
dbCommand = self.createSQLUpdate(table, columnMap, whereMap)
lastId = self.executeSQL(db, dbCommand, False)
if obj.db_id is None:
obj.db_id = lastId
keyStr = self.convertToDB(obj.db_id, 'long', 'int')
if hasattr(obj, 'db_entity_type') and obj.db_entity_type is not None:
global_props['entity_type'] = self.convertToDB(obj.db_entity_type, 'str', 'char(16)')
if hasattr(obj, 'db_id') and obj.db_id is not None:
global_props['entity_id'] = self.convertToDB(obj.db_id, 'long', 'int')
def set_sql_command(self, db, obj, global_props, do_copy=True):
if not do_copy and not obj.is_dirty:
return None
columns = ['id', 'entity_type', 'version', 'name', 'last_modified']
table = 'vistrail'
whereMap = {}
whereMap.update(global_props)
if obj.db_id is not None:
keyStr = self.convertToDB(obj.db_id, 'long', 'int')
whereMap['id'] = keyStr
columnMap = {}
if hasattr(obj, 'db_id') and obj.db_id is not None:
columnMap['id'] = \
self.convertToDB(obj.db_id, 'long', 'int')
if hasattr(obj, 'db_entity_type') and obj.db_entity_type is not None:
columnMap['entity_type'] = \
self.convertToDB(obj.db_entity_type, 'str', 'char(16)')
if hasattr(obj, 'db_version') and obj.db_version is not None:
columnMap['version'] = \
self.convertToDB(obj.db_version, 'str', 'char(16)')
if hasattr(obj, 'db_name') and obj.db_name is not None:
columnMap['name'] = \
self.convertToDB(obj.db_name, 'str', 'varchar(255)')
if hasattr(obj, 'db_last_modified') and obj.db_last_modified is not None:
columnMap['last_modified'] = \
self.convertToDB(obj.db_last_modified, 'datetime', 'datetime')
columnMap.update(global_props)
if obj.is_new or do_copy:
dbCommand = self.createSQLInsert(table, columnMap)
else:
dbCommand = self.createSQLUpdate(table, columnMap, whereMap)
return dbCommand
def set_sql_process(self, obj, global_props, lastId):
if obj.db_id is None:
obj.db_id = lastId
keyStr = self.convertToDB(obj.db_id, 'long', 'int')
if hasattr(obj, 'db_entity_type') and obj.db_entity_type is not None:
global_props['entity_type'] = self.convertToDB(obj.db_entity_type, 'str', 'char(16)')
if hasattr(obj, 'db_id') and obj.db_id is not None:
global_props['entity_id'] = self.convertToDB(obj.db_id, 'long', 'int')
pass
def to_sql_fast(self, obj, do_copy=True):
for child in obj.db_actions:
child.db_vistrail = obj.db_id
for child in obj.db_tags:
child.db_vistrail = obj.db_id
for child in obj.db_annotations:
child.db_parentType = obj.vtType
child.db_parent = obj.db_id
for child in obj.db_vistrailVariables:
child.db_vistrail = obj.db_id
for child in obj.db_parameter_explorations:
child.db_vistrail = obj.db_id
for child in obj.db_actionAnnotations:
child.db_vistrail = obj.db_id
def delete_sql_column(self, db, obj, global_props):
table = 'vistrail'
whereMap = {}
whereMap.update(global_props)
if obj.db_id is not None:
keyStr = self.convertToDB(obj.db_id, 'long', 'int')
whereMap['id'] = keyStr
dbCommand = self.createSQLDelete(table, whereMap)
self.executeSQL(db, dbCommand, False)
class DBModuleSQLDAOBase(SQLDAO):
def __init__(self, daoList):
self.daoList = daoList
self.table = 'module'
def getDao(self, dao):
return self.daoList[dao]
def get_sql_columns(self, db, global_props,lock=False):
columns = ['id', 'cache', 'name', 'namespace', 'package', 'version', 'parent_type', 'entity_id', 'entity_type', 'parent_id']
table = 'module'
whereMap = global_props
orderBy = 'id'
dbCommand = self.createSQLSelect(table, columns, whereMap, orderBy, lock)
data = self.executeSQL(db, dbCommand, True)
res = {}
for row in data:
id = self.convertFromDB(row[0], 'long', 'int')
cache = self.convertFromDB(row[1], 'int', 'int')
name = self.convertFromDB(row[2], 'str', 'varchar(255)')
namespace = self.convertFromDB(row[3], 'str', 'varchar(255)')
package = self.convertFromDB(row[4], 'str', 'varchar(511)')
version = self.convertFromDB(row[5], 'str', 'varchar(255)')
parentType = self.convertFromDB(row[6], 'str', 'char(32)')
entity_id = self.convertFromDB(row[7], 'long', 'int')
entity_type = self.convertFromDB(row[8], 'str', 'char(16)')
parent = self.convertFromDB(row[9], 'long', 'long')
module = DBModule(cache=cache,
name=name,
namespace=namespace,
package=package,
version=version,
id=id)
module.db_parentType = parentType
module.db_entity_id = entity_id
module.db_entity_type = entity_type
module.db_parent = parent
module.is_dirty = False
res[('module', id)] = module
return res
def get_sql_select(self, db, global_props,lock=False):
columns = ['id', 'cache', 'name', 'namespace', 'package', 'version', 'parent_type', 'entity_id', 'entity_type', 'parent_id']
table = 'module'
whereMap = global_props
orderBy = 'id'
return self.createSQLSelect(table, columns, whereMap, orderBy, lock)
def process_sql_columns(self, data, global_props):
res = {}
for row in data:
id = self.convertFromDB(row[0], 'long', 'int')
cache = self.convertFromDB(row[1], 'int', 'int')
name = self.convertFromDB(row[2], 'str', 'varchar(255)')
namespace = self.convertFromDB(row[3], 'str', 'varchar(255)')
package = self.convertFromDB(row[4], 'str', 'varchar(511)')
version = self.convertFromDB(row[5], 'str', 'varchar(255)')
parentType = self.convertFromDB(row[6], 'str', 'char(32)')
entity_id = self.convertFromDB(row[7], 'long', 'int')
entity_type = self.convertFromDB(row[8], 'str', 'char(16)')
parent = self.convertFromDB(row[9], 'long', 'long')
module = DBModule(cache=cache,
name=name,
namespace=namespace,
package=package,
version=version,
id=id)
module.db_parentType = parentType
module.db_entity_id = entity_id
module.db_entity_type = entity_type
module.db_parent = parent
module.is_dirty = False
res[('module', id)] = module
return res
def from_sql_fast(self, obj, all_objects):
if obj.db_parentType == 'workflow':
p = all_objects[('workflow', obj.db_parent)]
p.db_add_module(obj)
elif obj.db_parentType == 'add':
p = all_objects[('add', obj.db_parent)]
p.db_add_data(obj)
elif obj.db_parentType == 'change':
p = all_objects[('change', obj.db_parent)]
p.db_add_data(obj)
def set_sql_columns(self, db, obj, global_props, do_copy=True):
if not do_copy and not obj.is_dirty:
return
columns = ['id', 'cache', 'name', 'namespace', 'package', 'version', 'parent_type', 'entity_id', 'entity_type', 'parent_id']
table = 'module'
whereMap = {}
whereMap.update(global_props)
if obj.db_id is not None:
keyStr = self.convertToDB(obj.db_id, 'long', 'int')
whereMap['id'] = keyStr
columnMap = {}
if hasattr(obj, 'db_id') and obj.db_id is not None:
columnMap['id'] = \
self.convertToDB(obj.db_id, 'long', 'int')
if hasattr(obj, 'db_cache') and obj.db_cache is not None:
columnMap['cache'] = \
self.convertToDB(obj.db_cache, 'int', 'int')
if hasattr(obj, 'db_name') and obj.db_name is not None:
columnMap['name'] = \
self.convertToDB(obj.db_name, 'str', 'varchar(255)')
if hasattr(obj, 'db_namespace') and obj.db_namespace is not None:
columnMap['namespace'] = \
self.convertToDB(obj.db_namespace, 'str', 'varchar(255)')
if hasattr(obj, 'db_package') and obj.db_package is not None:
columnMap['package'] = \
self.convertToDB(obj.db_package, 'str', 'varchar(511)')
if hasattr(obj, 'db_version') and obj.db_version is not None:
columnMap['version'] = \
self.convertToDB(obj.db_version, 'str', 'varchar(255)')
if hasattr(obj, 'db_parentType') and obj.db_parentType is not None:
columnMap['parent_type'] = \
self.convertToDB(obj.db_parentType, 'str', 'char(32)')
if hasattr(obj, 'db_entity_id') and obj.db_entity_id is not None:
columnMap['entity_id'] = \
self.convertToDB(obj.db_entity_id, 'long', 'int')
if hasattr(obj, 'db_entity_type') and obj.db_entity_type is not None:
columnMap['entity_type'] = \
self.convertToDB(obj.db_entity_type, 'str', 'char(16)')
if hasattr(obj, 'db_parent') and obj.db_parent is not None:
columnMap['parent_id'] = \
self.convertToDB(obj.db_parent, 'long', 'long')
columnMap.update(global_props)
if obj.is_new or do_copy:
dbCommand = self.createSQLInsert(table, columnMap)
else:
dbCommand = self.createSQLUpdate(table, columnMap, whereMap)
lastId = self.executeSQL(db, dbCommand, False)
def set_sql_command(self, db, obj, global_props, do_copy=True):
if not do_copy and not obj.is_dirty:
return None
columns = ['id', 'cache', 'name', 'namespace', 'package', 'version', 'parent_type', 'entity_id', 'entity_type', 'parent_id']
table = 'module'
whereMap = {}
whereMap.update(global_props)
if obj.db_id is not None:
keyStr = self.convertToDB(obj.db_id, 'long', 'int')
whereMap['id'] = keyStr
columnMap = {}
if hasattr(obj, 'db_id') and obj.db_id is not None:
columnMap['id'] = \
self.convertToDB(obj.db_id, 'long', 'int')
if hasattr(obj, 'db_cache') and obj.db_cache is not None:
columnMap['cache'] = \
self.convertToDB(obj.db_cache, 'int', 'int')
if hasattr(obj, 'db_name') and obj.db_name is not None:
columnMap['name'] = \
self.convertToDB(obj.db_name, 'str', 'varchar(255)')
if hasattr(obj, 'db_namespace') and obj.db_namespace is not None:
columnMap['namespace'] = \
self.convertToDB(obj.db_namespace, 'str', 'varchar(255)')
if hasattr(obj, 'db_package') and obj.db_package is not None:
columnMap['package'] = \
self.convertToDB(obj.db_package, 'str', 'varchar(511)')
if hasattr(obj, 'db_version') and obj.db_version is not None:
columnMap['version'] = \
self.convertToDB(obj.db_version, 'str', 'varchar(255)')
if hasattr(obj, 'db_parentType') and obj.db_parentType is not None:
columnMap['parent_type'] = \
self.convertToDB(obj.db_parentType, 'str', 'char(32)')
if hasattr(obj, 'db_entity_id') and obj.db_entity_id is not None:
columnMap['entity_id'] = \
self.convertToDB(obj.db_entity_id, 'long', 'int')
if hasattr(obj, 'db_entity_type') and obj.db_entity_type is not None:
columnMap['entity_type'] = \
self.convertToDB(obj.db_entity_type, 'str', 'char(16)')
if hasattr(obj, 'db_parent') and obj.db_parent is not None:
columnMap['parent_id'] = \
self.convertToDB(obj.db_parent, 'long', 'long')
columnMap.update(global_props)
if obj.is_new or do_copy:
dbCommand = self.createSQLInsert(table, columnMap)
else:
dbCommand = self.createSQLUpdate(table, columnMap, whereMap)
return dbCommand
def set_sql_process(self, obj, global_props, lastId):
pass
def to_sql_fast(self, obj, do_copy=True):
if obj.db_location is not None:
child = obj.db_location
child.db_parentType = obj.vtType
child.db_parent = obj.db_id
for child in obj.db_functions:
child.db_parentType = obj.vtType
child.db_parent = obj.db_id
for child in obj.db_annotations:
child.db_parentType = obj.vtType
child.db_parent = obj.db_id
for child in obj.db_portSpecs:
child.db_parentType = obj.vtType
child.db_parent = obj.db_id
def delete_sql_column(self, db, obj, global_props):
table = 'module'
whereMap = {}
whereMap.update(global_props)
if obj.db_id is not None:
keyStr = self.convertToDB(obj.db_id, 'long', 'int')
whereMap['id'] = keyStr
dbCommand = self.createSQLDelete(table, whereMap)
self.executeSQL(db, dbCommand, False)
class DBPortSQLDAOBase(SQLDAO):
def __init__(self, daoList):
self.daoList = daoList
self.table = 'port'
def getDao(self, dao):
return self.daoList[dao]
def get_sql_columns(self, db, global_props,lock=False):
columns = ['id', 'type', 'moduleId', 'moduleName', 'name', 'signature', 'parent_type', 'entity_id', 'entity_type', 'parent_id']
table = 'port'
whereMap = global_props
orderBy = 'id'
dbCommand = self.createSQLSelect(table, columns, whereMap, orderBy, lock)
data = self.executeSQL(db, dbCommand, True)
res = {}
for row in data:
id = self.convertFromDB(row[0], 'long', 'int')
type = self.convertFromDB(row[1], 'str', 'varchar(255)')
moduleId = self.convertFromDB(row[2], 'long', 'int')
moduleName = self.convertFromDB(row[3], 'str', 'varchar(255)')
name = self.convertFromDB(row[4], 'str', 'varchar(255)')
signature = self.convertFromDB(row[5], 'str', 'varchar(4095)')
parentType = self.convertFromDB(row[6], 'str', 'char(32)')
entity_id = self.convertFromDB(row[7], 'long', 'int')
entity_type = self.convertFromDB(row[8], 'str', 'char(16)')
parent = self.convertFromDB(row[9], 'long', 'long')
port = DBPort(type=type,
moduleId=moduleId,
moduleName=moduleName,
name=name,
signature=signature,
id=id)
port.db_parentType = parentType
port.db_entity_id = entity_id
port.db_entity_type = entity_type
port.db_parent = parent
port.is_dirty = False
res[('port', id)] = port
return res
def get_sql_select(self, db, global_props,lock=False):
columns = ['id', 'type', 'moduleId', 'moduleName', 'name', 'signature', 'parent_type', 'entity_id', 'entity_type', 'parent_id']
table = 'port'
whereMap = global_props
orderBy = 'id'
return self.createSQLSelect(table, columns, whereMap, orderBy, lock)
def process_sql_columns(self, data, global_props):
res = {}
for row in data:
id = self.convertFromDB(row[0], 'long', 'int')
type = self.convertFromDB(row[1], 'str', 'varchar(255)')
moduleId = self.convertFromDB(row[2], 'long', 'int')
moduleName = self.convertFromDB(row[3], 'str', 'varchar(255)')
name = self.convertFromDB(row[4], 'str', 'varchar(255)')
signature = self.convertFromDB(row[5], 'str', 'varchar(4095)')
parentType = self.convertFromDB(row[6], 'str', 'char(32)')
entity_id = self.convertFromDB(row[7], 'long', 'int')
entity_type = self.convertFromDB(row[8], 'str', 'char(16)')
parent = self.convertFromDB(row[9], 'long', 'long')
port = DBPort(type=type,
moduleId=moduleId,
moduleName=moduleName,
name=name,
signature=signature,
id=id)
port.db_parentType = parentType
port.db_entity_id = entity_id
port.db_entity_type = entity_type
port.db_parent = parent
port.is_dirty = False
res[('port', id)] = port
return res
def from_sql_fast(self, obj, all_objects):
if obj.db_parentType == 'connection':
p = all_objects[('connection', obj.db_parent)]
p.db_add_port(obj)
elif obj.db_parentType == 'add':
p = all_objects[('add', obj.db_parent)]
p.db_add_data(obj)
elif obj.db_parentType == 'change':
p = all_objects[('change', obj.db_parent)]
p.db_add_data(obj)
def set_sql_columns(self, db, obj, global_props, do_copy=True):
if not do_copy and not obj.is_dirty:
return
columns = ['id', 'type', 'moduleId', 'moduleName', 'name', 'signature', 'parent_type', 'entity_id', 'entity_type', 'parent_id']
table = 'port'
whereMap = {}
whereMap.update(global_props)
if obj.db_id is not None:
keyStr = self.convertToDB(obj.db_id, 'long', 'int')
whereMap['id'] = keyStr
columnMap = {}
if hasattr(obj, 'db_id') and obj.db_id is not None:
columnMap['id'] = \
self.convertToDB(obj.db_id, 'long', 'int')
if hasattr(obj, 'db_type') and obj.db_type is not None:
columnMap['type'] = \
self.convertToDB(obj.db_type, 'str', 'varchar(255)')
if hasattr(obj, 'db_moduleId') and obj.db_moduleId is not None:
columnMap['moduleId'] = \
self.convertToDB(obj.db_moduleId, 'long', 'int')
if hasattr(obj, 'db_moduleName') and obj.db_moduleName is not None:
columnMap['moduleName'] = \
self.convertToDB(obj.db_moduleName, 'str', 'varchar(255)')
if hasattr(obj, 'db_name') and obj.db_name is not None:
columnMap['name'] = \
self.convertToDB(obj.db_name, 'str', 'varchar(255)')
if hasattr(obj, 'db_signature') and obj.db_signature is not None:
columnMap['signature'] = \
self.convertToDB(obj.db_signature, 'str', 'varchar(4095)')
if hasattr(obj, 'db_parentType') and obj.db_parentType is not None:
columnMap['parent_type'] = \
self.convertToDB(obj.db_parentType, 'str', 'char(32)')
if hasattr(obj, 'db_entity_id') and obj.db_entity_id is not None:
columnMap['entity_id'] = \
self.convertToDB(obj.db_entity_id, 'long', 'int')
if hasattr(obj, 'db_entity_type') and obj.db_entity_type is not None:
columnMap['entity_type'] = \
self.convertToDB(obj.db_entity_type, 'str', 'char(16)')
if hasattr(obj, 'db_parent') and obj.db_parent is not None:
columnMap['parent_id'] = \
self.convertToDB(obj.db_parent, 'long', 'long')
columnMap.update(global_props)
if obj.is_new or do_copy:
dbCommand = self.createSQLInsert(table, columnMap)
else:
dbCommand = self.createSQLUpdate(table, columnMap, whereMap)
lastId = self.executeSQL(db, dbCommand, False)
def set_sql_command(self, db, obj, global_props, do_copy=True):
if not do_copy and not obj.is_dirty:
return None
columns = ['id', 'type', 'moduleId', 'moduleName', 'name', 'signature', 'parent_type', 'entity_id', 'entity_type', 'parent_id']
table = 'port'
whereMap = {}
whereMap.update(global_props)
if obj.db_id is not None:
keyStr = self.convertToDB(obj.db_id, 'long', 'int')
whereMap['id'] = keyStr
columnMap = {}
if hasattr(obj, 'db_id') and obj.db_id is not None:
columnMap['id'] = \
self.convertToDB(obj.db_id, 'long', 'int')
if hasattr(obj, 'db_type') and obj.db_type is not None:
columnMap['type'] = \
self.convertToDB(obj.db_type, 'str', 'varchar(255)')
if hasattr(obj, 'db_moduleId') and obj.db_moduleId is not None:
columnMap['moduleId'] = \
self.convertToDB(obj.db_moduleId, 'long', 'int')
if hasattr(obj, 'db_moduleName') and obj.db_moduleName is not None:
columnMap['moduleName'] = \
self.convertToDB(obj.db_moduleName, 'str', 'varchar(255)')
if hasattr(obj, 'db_name') and obj.db_name is not None:
columnMap['name'] = \
self.convertToDB(obj.db_name, 'str', 'varchar(255)')
if hasattr(obj, 'db_signature') and obj.db_signature is not None:
columnMap['signature'] = \
self.convertToDB(obj.db_signature, 'str', 'varchar(4095)')
if hasattr(obj, 'db_parentType') and obj.db_parentType is not None:
columnMap['parent_type'] = \
self.convertToDB(obj.db_parentType, 'str', 'char(32)')
if hasattr(obj, 'db_entity_id') and obj.db_entity_id is not None:
columnMap['entity_id'] = \
self.convertToDB(obj.db_entity_id, 'long', 'int')
if hasattr(obj, 'db_entity_type') and obj.db_entity_type is not None:
columnMap['entity_type'] = \
self.convertToDB(obj.db_entity_type, 'str', 'char(16)')
if hasattr(obj, 'db_parent') and obj.db_parent is not None:
columnMap['parent_id'] = \
self.convertToDB(obj.db_parent, 'long', 'long')
columnMap.update(global_props)
if obj.is_new or do_copy:
dbCommand = self.createSQLInsert(table, columnMap)
else:
dbCommand = self.createSQLUpdate(table, columnMap, whereMap)
return dbCommand
def set_sql_process(self, obj, global_props, lastId):
pass
def to_sql_fast(self, obj, do_copy=True):
pass
def delete_sql_column(self, db, obj, global_props):
table = 'port'
whereMap = {}
whereMap.update(global_props)
if obj.db_id is not None:
keyStr = self.convertToDB(obj.db_id, 'long', 'int')
whereMap['id'] = keyStr
dbCommand = self.createSQLDelete(table, whereMap)
self.executeSQL(db, dbCommand, False)
class DBPEFunctionSQLDAOBase(SQLDAO):
def __init__(self, daoList):
self.daoList = daoList
self.table = 'pe_function'
def getDao(self, dao):
return self.daoList[dao]
def get_sql_columns(self, db, global_props,lock=False):
columns = ['id', 'module_id', 'port_name', 'is_alias', 'parent_type', 'parent_id', 'entity_id', 'entity_type']
table = 'pe_function'
whereMap = global_props
orderBy = 'id'
dbCommand = self.createSQLSelect(table, columns, whereMap, orderBy, lock)
data = self.executeSQL(db, dbCommand, True)
res = {}
for row in data:
id = self.convertFromDB(row[0], 'long', 'int')
module_id = self.convertFromDB(row[1], 'long', 'int')
port_name = self.convertFromDB(row[2], 'str', 'varchar(255)')
is_alias = self.convertFromDB(row[3], 'long', 'int')
parentType = self.convertFromDB(row[4], 'str', 'char(32)')
parameter_exploration = self.convertFromDB(row[5], 'long', 'int')
entity_id = self.convertFromDB(row[6], 'long', 'int')
entity_type = self.convertFromDB(row[7], 'str', 'char(16)')
pe_function = DBPEFunction(module_id=module_id,
port_name=port_name,
id=id)
pe_function.db_parentType = parentType
pe_function.db_parameter_exploration = parameter_exploration
pe_function.db_entity_id = entity_id
pe_function.db_entity_type = entity_type
pe_function.is_dirty = False
res[('pe_function', id)] = pe_function
return res
def get_sql_select(self, db, global_props,lock=False):
columns = ['id', 'module_id', 'port_name', 'is_alias', 'parent_type', 'parent_id', 'entity_id', 'entity_type']
table = 'pe_function'
whereMap = global_props
orderBy = 'id'
return self.createSQLSelect(table, columns, whereMap, orderBy, lock)
def process_sql_columns(self, data, global_props):
res = {}
for row in data:
id = self.convertFromDB(row[0], 'long', 'int')
module_id = self.convertFromDB(row[1], 'long', 'int')
port_name = self.convertFromDB(row[2], 'str', 'varchar(255)')
is_alias = self.convertFromDB(row[3], 'long', 'int')
parentType = self.convertFromDB(row[4], 'str', 'char(32)')
parameter_exploration = self.convertFromDB(row[5], 'long', 'int')
entity_id = self.convertFromDB(row[6], 'long', 'int')
entity_type = self.convertFromDB(row[7], 'str', 'char(16)')
pe_function = DBPEFunction(module_id=module_id,
port_name=port_name,
id=id)
pe_function.db_parentType = parentType
pe_function.db_parameter_exploration = parameter_exploration
pe_function.db_entity_id = entity_id
pe_function.db_entity_type = entity_type
pe_function.is_dirty = False
res[('pe_function', id)] = pe_function
return res
def from_sql_fast(self, obj, all_objects):
if ('parameter_exploration', obj.db_parameter_exploration) in all_objects:
p = all_objects[('parameter_exploration', obj.db_parameter_exploration)]
p.db_add_function(obj)
def set_sql_columns(self, db, obj, global_props, do_copy=True):
if not do_copy and not obj.is_dirty:
return
columns = ['id', 'module_id', 'port_name', 'is_alias', 'parent_type', 'parent_id', 'entity_id', 'entity_type']
table = 'pe_function'
whereMap = {}
whereMap.update(global_props)
if obj.db_id is not None:
keyStr = self.convertToDB(obj.db_id, 'long', 'int')
whereMap['id'] = keyStr
columnMap = {}
if hasattr(obj, 'db_id') and obj.db_id is not None:
columnMap['id'] = \
self.convertToDB(obj.db_id, 'long', 'int')
if hasattr(obj, 'db_module_id') and obj.db_module_id is not None:
columnMap['module_id'] = \
self.convertToDB(obj.db_module_id, 'long', 'int')
if hasattr(obj, 'db_port_name') and obj.db_port_name is not None:
columnMap['port_name'] = \
self.convertToDB(obj.db_port_name, 'str', 'varchar(255)')
if hasattr(obj, 'db_is_alias') and obj.db_is_alias is not None:
columnMap['is_alias'] = \
self.convertToDB(obj.db_is_alias, 'long', 'int')
if hasattr(obj, 'db_parentType') and obj.db_parentType is not None:
columnMap['parent_type'] = \
self.convertToDB(obj.db_parentType, 'str', 'char(32)')
if hasattr(obj, 'db_parameter_exploration') and obj.db_parameter_exploration is not None:
columnMap['parent_id'] = \
self.convertToDB(obj.db_parameter_exploration, 'long', 'int')
if hasattr(obj, 'db_entity_id') and obj.db_entity_id is not None:
columnMap['entity_id'] = \
self.convertToDB(obj.db_entity_id, 'long', 'int')
if hasattr(obj, 'db_entity_type') and obj.db_entity_type is not None:
columnMap['entity_type'] = \
self.convertToDB(obj.db_entity_type, 'str', 'char(16)')
columnMap.update(global_props)
if obj.is_new or do_copy:
dbCommand = self.createSQLInsert(table, columnMap)
else:
dbCommand = self.createSQLUpdate(table, columnMap, whereMap)
lastId = self.executeSQL(db, dbCommand, False)
def set_sql_command(self, db, obj, global_props, do_copy=True):
if not do_copy and not obj.is_dirty:
return None
columns = ['id', 'module_id', 'port_name', 'is_alias', 'parent_type', 'parent_id', 'entity_id', 'entity_type']
table = 'pe_function'
whereMap = {}
whereMap.update(global_props)
if obj.db_id is not None:
keyStr = self.convertToDB(obj.db_id, 'long', 'int')
whereMap['id'] = keyStr
columnMap = {}
if hasattr(obj, 'db_id') and obj.db_id is not None:
columnMap['id'] = \
self.convertToDB(obj.db_id, 'long', 'int')
if hasattr(obj, 'db_module_id') and obj.db_module_id is not None:
columnMap['module_id'] = \
self.convertToDB(obj.db_module_id, 'long', 'int')
if hasattr(obj, 'db_port_name') and obj.db_port_name is not None:
columnMap['port_name'] = \
self.convertToDB(obj.db_port_name, 'str', 'varchar(255)')
if hasattr(obj, 'db_is_alias') and obj.db_is_alias is not None:
columnMap['is_alias'] = \
self.convertToDB(obj.db_is_alias, 'long', 'int')
if hasattr(obj, 'db_parentType') and obj.db_parentType is not None:
columnMap['parent_type'] = \
self.convertToDB(obj.db_parentType, 'str', 'char(32)')
if hasattr(obj, 'db_parameter_exploration') and obj.db_parameter_exploration is not None:
columnMap['parent_id'] = \
self.convertToDB(obj.db_parameter_exploration, 'long', 'int')
if hasattr(obj, 'db_entity_id') and obj.db_entity_id is not None:
columnMap['entity_id'] = \
self.convertToDB(obj.db_entity_id, 'long', 'int')
if hasattr(obj, 'db_entity_type') and obj.db_entity_type is not None:
columnMap['entity_type'] = \
self.convertToDB(obj.db_entity_type, 'str', 'char(16)')
columnMap.update(global_props)
if obj.is_new or do_copy:
dbCommand = self.createSQLInsert(table, columnMap)
else:
dbCommand = self.createSQLUpdate(table, columnMap, whereMap)
return dbCommand
def set_sql_process(self, obj, global_props, lastId):
pass
def to_sql_fast(self, obj, do_copy=True):
for child in obj.db_parameters:
child.db_pe_function = obj.db_id
def delete_sql_column(self, db, obj, global_props):
table = 'pe_function'
whereMap = {}
whereMap.update(global_props)
if obj.db_id is not None:
keyStr = self.convertToDB(obj.db_id, 'long', 'int')
whereMap['id'] = keyStr
dbCommand = self.createSQLDelete(table, whereMap)
self.executeSQL(db, dbCommand, False)
class DBWorkflowSQLDAOBase(SQLDAO):
def __init__(self, daoList):
self.daoList = daoList
self.table = 'workflow'
def getDao(self, dao):
return self.daoList[dao]
def get_sql_columns(self, db, global_props,lock=False):
columns = ['id', 'entity_id', 'entity_type', 'name', 'version', 'last_modified', 'vistrail_id', 'parent_id']
table = 'workflow'
whereMap = global_props
orderBy = 'id'
dbCommand = self.createSQLSelect(table, columns, whereMap, orderBy, lock)
data = self.executeSQL(db, dbCommand, True)
res = {}
for row in data:
id = self.convertFromDB(row[0], 'long', 'int')
global_props['entity_id'] = self.convertToDB(id, 'long', 'int')
entity_id = self.convertFromDB(row[1], 'long', 'int')
entity_type = self.convertFromDB(row[2], 'str', 'char(16)')
global_props['entity_type'] = self.convertToDB(entity_type, 'str', 'char(16)')
name = self.convertFromDB(row[3], 'str', 'varchar(255)')
version = self.convertFromDB(row[4], 'str', 'char(16)')
last_modified = self.convertFromDB(row[5], 'datetime', 'datetime')
vistrail_id = self.convertFromDB(row[6], 'long', 'int')
group = self.convertFromDB(row[7], 'long', 'int')
workflow = DBWorkflow(entity_type=entity_type,
name=name,
version=version,
last_modified=last_modified,
vistrail_id=vistrail_id,
id=id)
workflow.db_entity_id = entity_id
workflow.db_group = group
workflow.is_dirty = False
res[('workflow', id)] = workflow
return res
def get_sql_select(self, db, global_props,lock=False):
columns = ['id', 'entity_id', 'entity_type', 'name', 'version', 'last_modified', 'vistrail_id', 'parent_id']
table = 'workflow'
whereMap = global_props
orderBy = 'id'
return self.createSQLSelect(table, columns, whereMap, orderBy, lock)
def process_sql_columns(self, data, global_props):
res = {}
for row in data:
id = self.convertFromDB(row[0], 'long', 'int')
global_props['entity_id'] = self.convertToDB(id, 'long', 'int')
entity_id = self.convertFromDB(row[1], 'long', 'int')
entity_type = self.convertFromDB(row[2], 'str', 'char(16)')
global_props['entity_type'] = self.convertToDB(entity_type, 'str', 'char(16)')
name = self.convertFromDB(row[3], 'str', 'varchar(255)')
version = self.convertFromDB(row[4], 'str', 'char(16)')
last_modified = self.convertFromDB(row[5], 'datetime', 'datetime')
vistrail_id = self.convertFromDB(row[6], 'long', 'int')
group = self.convertFromDB(row[7], 'long', 'int')
workflow = DBWorkflow(entity_type=entity_type,
name=name,
version=version,
last_modified=last_modified,
vistrail_id=vistrail_id,
id=id)
workflow.db_entity_id = entity_id
workflow.db_group = group
workflow.is_dirty = False
res[('workflow', id)] = workflow
return res
def from_sql_fast(self, obj, all_objects):
if ('group', obj.db_group) in all_objects:
p = all_objects[('group', obj.db_group)]
p.db_add_workflow(obj)
def set_sql_columns(self, db, obj, global_props, do_copy=True):
if not do_copy and not obj.is_dirty:
return
columns = ['id', 'entity_id', 'entity_type', 'name', 'version', 'last_modified', 'vistrail_id', 'parent_id']
table = 'workflow'
whereMap = {}
whereMap.update(global_props)
if obj.db_id is not None:
keyStr = self.convertToDB(obj.db_id, 'long', 'int')
whereMap['id'] = keyStr
columnMap = {}
if hasattr(obj, 'db_id') and obj.db_id is not None:
columnMap['id'] = \
self.convertToDB(obj.db_id, 'long', 'int')
if hasattr(obj, 'db_entity_id') and obj.db_entity_id is not None:
columnMap['entity_id'] = \
self.convertToDB(obj.db_entity_id, 'long', 'int')
if hasattr(obj, 'db_entity_type') and obj.db_entity_type is not None:
columnMap['entity_type'] = \
self.convertToDB(obj.db_entity_type, 'str', 'char(16)')
if hasattr(obj, 'db_name') and obj.db_name is not None:
columnMap['name'] = \
self.convertToDB(obj.db_name, 'str', 'varchar(255)')
if hasattr(obj, 'db_version') and obj.db_version is not None:
columnMap['version'] = \
self.convertToDB(obj.db_version, 'str', 'char(16)')
if hasattr(obj, 'db_last_modified') and obj.db_last_modified is not None:
columnMap['last_modified'] = \
self.convertToDB(obj.db_last_modified, 'datetime', 'datetime')
if hasattr(obj, 'db_vistrail_id') and obj.db_vistrail_id is not None:
columnMap['vistrail_id'] = \
self.convertToDB(obj.db_vistrail_id, 'long', 'int')
if hasattr(obj, 'db_group') and obj.db_group is not None:
columnMap['parent_id'] = \
self.convertToDB(obj.db_group, 'long', 'int')
columnMap.update(global_props)
if obj.is_new or do_copy:
dbCommand = self.createSQLInsert(table, columnMap)
else:
dbCommand = self.createSQLUpdate(table, columnMap, whereMap)
lastId = self.executeSQL(db, dbCommand, False)
if obj.db_id is None:
obj.db_id = lastId
keyStr = self.convertToDB(obj.db_id, 'long', 'int')
if hasattr(obj, 'db_entity_type') and obj.db_entity_type is not None:
global_props['entity_type'] = self.convertToDB(obj.db_entity_type, 'str', 'char(16)')
if hasattr(obj, 'db_id') and obj.db_id is not None:
global_props['entity_id'] = self.convertToDB(obj.db_id, 'long', 'int')
def set_sql_command(self, db, obj, global_props, do_copy=True):
if not do_copy and not obj.is_dirty:
return None
columns = ['id', 'entity_id', 'entity_type', 'name', 'version', 'last_modified', 'vistrail_id', 'parent_id']
table = 'workflow'
whereMap = {}
whereMap.update(global_props)
if obj.db_id is not None:
keyStr = self.convertToDB(obj.db_id, 'long', 'int')
whereMap['id'] = keyStr
columnMap = {}
if hasattr(obj, 'db_id') and obj.db_id is not None:
columnMap['id'] = \
self.convertToDB(obj.db_id, 'long', 'int')
if hasattr(obj, 'db_entity_id') and obj.db_entity_id is not None:
columnMap['entity_id'] = \
self.convertToDB(obj.db_entity_id, 'long', 'int')
if hasattr(obj, 'db_entity_type') and obj.db_entity_type is not None:
columnMap['entity_type'] = \
self.convertToDB(obj.db_entity_type, 'str', 'char(16)')
if hasattr(obj, 'db_name') and obj.db_name is not None:
columnMap['name'] = \
self.convertToDB(obj.db_name, 'str', 'varchar(255)')
if hasattr(obj, 'db_version') and obj.db_version is not None:
columnMap['version'] = \
self.convertToDB(obj.db_version, 'str', 'char(16)')
if hasattr(obj, 'db_last_modified') and obj.db_last_modified is not None:
columnMap['last_modified'] = \
self.convertToDB(obj.db_last_modified, 'datetime', 'datetime')
if hasattr(obj, 'db_vistrail_id') and obj.db_vistrail_id is not None:
columnMap['vistrail_id'] = \
self.convertToDB(obj.db_vistrail_id, 'long', 'int')
if hasattr(obj, 'db_group') and obj.db_group is not None:
columnMap['parent_id'] = \
self.convertToDB(obj.db_group, 'long', 'int')
columnMap.update(global_props)
if obj.is_new or do_copy:
dbCommand = self.createSQLInsert(table, columnMap)
else:
dbCommand = self.createSQLUpdate(table, columnMap, whereMap)
return dbCommand
def set_sql_process(self, obj, global_props, lastId):
if obj.db_id is None:
obj.db_id = lastId
keyStr = self.convertToDB(obj.db_id, 'long', 'int')
if hasattr(obj, 'db_entity_type') and obj.db_entity_type is not None:
global_props['entity_type'] = self.convertToDB(obj.db_entity_type, 'str', 'char(16)')
if hasattr(obj, 'db_id') and obj.db_id is not None:
global_props['entity_id'] = self.convertToDB(obj.db_id, 'long', 'int')
pass
def to_sql_fast(self, obj, do_copy=True):
for child in obj.db_connections:
child.db_parentType = obj.vtType
child.db_parent = obj.db_id
for child in obj.db_annotations:
child.db_parentType = obj.vtType
child.db_parent = obj.db_id
for child in obj.db_plugin_datas:
child.db_parentType = obj.vtType
child.db_parent = obj.db_id
for child in obj.db_others:
child.db_parentType = obj.vtType
child.db_parent = obj.db_id
for child in obj.db_modules:
child.db_parentType = obj.vtType
child.db_parent = obj.db_id
def delete_sql_column(self, db, obj, global_props):
table = 'workflow'
whereMap = {}
whereMap.update(global_props)
if obj.db_id is not None:
keyStr = self.convertToDB(obj.db_id, 'long', 'int')
whereMap['id'] = keyStr
dbCommand = self.createSQLDelete(table, whereMap)
self.executeSQL(db, dbCommand, False)
class DBMashupActionSQLDAOBase(SQLDAO):
def __init__(self, daoList):
self.daoList = daoList
self.table = 'mashup_action'
def getDao(self, dao):
return self.daoList[dao]
def get_sql_columns(self, db, global_props,lock=False):
columns = ['id', 'prev_id', 'date', 'user', 'parent_id', 'entity_id', 'entity_type']
table = 'mashup_action'
whereMap = global_props
orderBy = 'id'
dbCommand = self.createSQLSelect(table, columns, whereMap, orderBy, lock)
data = self.executeSQL(db, dbCommand, True)
res = {}
for row in data:
id = self.convertFromDB(row[0], 'long', 'int')
prevId = self.convertFromDB(row[1], 'long', 'int')
date = self.convertFromDB(row[2], 'datetime', 'datetime')
user = self.convertFromDB(row[3], 'str', 'varchar(255)')
mashuptrail = self.convertFromDB(row[4], 'long', 'int')
entity_id = self.convertFromDB(row[5], 'long', 'int')
entity_type = self.convertFromDB(row[6], 'str', 'char(16)')
mashup_action = DBMashupAction(prevId=prevId,
date=date,
user=user,
id=id)
mashup_action.db_mashuptrail = mashuptrail
mashup_action.db_entity_id = entity_id
mashup_action.db_entity_type = entity_type
mashup_action.is_dirty = False
res[('mashup_action', id)] = mashup_action
return res
def get_sql_select(self, db, global_props,lock=False):
columns = ['id', 'prev_id', 'date', 'user', 'parent_id', 'entity_id', 'entity_type']
table = 'mashup_action'
whereMap = global_props
orderBy = 'id'
return self.createSQLSelect(table, columns, whereMap, orderBy, lock)
def process_sql_columns(self, data, global_props):
res = {}
for row in data:
id = self.convertFromDB(row[0], 'long', 'int')
prevId = self.convertFromDB(row[1], 'long', 'int')
date = self.convertFromDB(row[2], 'datetime', 'datetime')
user = self.convertFromDB(row[3], 'str', 'varchar(255)')
mashuptrail = self.convertFromDB(row[4], 'long', 'int')
entity_id = self.convertFromDB(row[5], 'long', 'int')
entity_type = self.convertFromDB(row[6], 'str', 'char(16)')
mashup_action = DBMashupAction(prevId=prevId,
date=date,
user=user,
id=id)
mashup_action.db_mashuptrail = mashuptrail
mashup_action.db_entity_id = entity_id
mashup_action.db_entity_type = entity_type
mashup_action.is_dirty = False
res[('mashup_action', id)] = mashup_action
return res
def from_sql_fast(self, obj, all_objects):
if ('mashuptrail', obj.db_mashuptrail) in all_objects:
p = all_objects[('mashuptrail', obj.db_mashuptrail)]
p.db_add_action(obj)
def set_sql_columns(self, db, obj, global_props, do_copy=True):
if not do_copy and not obj.is_dirty:
return
columns = ['id', 'prev_id', 'date', 'user', 'parent_id', 'entity_id', 'entity_type']
table = 'mashup_action'
whereMap = {}
whereMap.update(global_props)
if obj.db_id is not None:
keyStr = self.convertToDB(obj.db_id, 'long', 'int')
whereMap['id'] = keyStr
columnMap = {}
if hasattr(obj, 'db_id') and obj.db_id is not None:
columnMap['id'] = \
self.convertToDB(obj.db_id, 'long', 'int')
if hasattr(obj, 'db_prevId') and obj.db_prevId is not None:
columnMap['prev_id'] = \
self.convertToDB(obj.db_prevId, 'long', 'int')
if hasattr(obj, 'db_date') and obj.db_date is not None:
columnMap['date'] = \
self.convertToDB(obj.db_date, 'datetime', 'datetime')
if hasattr(obj, 'db_user') and obj.db_user is not None:
columnMap['user'] = \
self.convertToDB(obj.db_user, 'str', 'varchar(255)')
if hasattr(obj, 'db_mashuptrail') and obj.db_mashuptrail is not None:
columnMap['parent_id'] = \
self.convertToDB(obj.db_mashuptrail, 'long', 'int')
if hasattr(obj, 'db_entity_id') and obj.db_entity_id is not None:
columnMap['entity_id'] = \
self.convertToDB(obj.db_entity_id, 'long', 'int')
if hasattr(obj, 'db_entity_type') and obj.db_entity_type is not None:
columnMap['entity_type'] = \
self.convertToDB(obj.db_entity_type, 'str', 'char(16)')
columnMap.update(global_props)
if obj.is_new or do_copy:
dbCommand = self.createSQLInsert(table, columnMap)
else:
dbCommand = self.createSQLUpdate(table, columnMap, whereMap)
lastId = self.executeSQL(db, dbCommand, False)
def set_sql_command(self, db, obj, global_props, do_copy=True):
if not do_copy and not obj.is_dirty:
return None
columns = ['id', 'prev_id', 'date', 'user', 'parent_id', 'entity_id', 'entity_type']
table = 'mashup_action'
whereMap = {}
whereMap.update(global_props)
if obj.db_id is not None:
keyStr = self.convertToDB(obj.db_id, 'long', 'int')
whereMap['id'] = keyStr
columnMap = {}
if hasattr(obj, 'db_id') and obj.db_id is not None:
columnMap['id'] = \
self.convertToDB(obj.db_id, 'long', 'int')
if hasattr(obj, 'db_prevId') and obj.db_prevId is not None:
columnMap['prev_id'] = \
self.convertToDB(obj.db_prevId, 'long', 'int')
if hasattr(obj, 'db_date') and obj.db_date is not None:
columnMap['date'] = \
self.convertToDB(obj.db_date, 'datetime', 'datetime')
if hasattr(obj, 'db_user') and obj.db_user is not None:
columnMap['user'] = \
self.convertToDB(obj.db_user, 'str', 'varchar(255)')
if hasattr(obj, 'db_mashuptrail') and obj.db_mashuptrail is not None:
columnMap['parent_id'] = \
self.convertToDB(obj.db_mashuptrail, 'long', 'int')
if hasattr(obj, 'db_entity_id') and obj.db_entity_id is not None:
columnMap['entity_id'] = \
self.convertToDB(obj.db_entity_id, 'long', 'int')
if hasattr(obj, 'db_entity_type') and obj.db_entity_type is not None:
columnMap['entity_type'] = \
self.convertToDB(obj.db_entity_type, 'str', 'char(16)')
columnMap.update(global_props)
if obj.is_new or do_copy:
dbCommand = self.createSQLInsert(table, columnMap)
else:
dbCommand = self.createSQLUpdate(table, columnMap, whereMap)
return dbCommand
def set_sql_process(self, obj, global_props, lastId):
pass
def to_sql_fast(self, obj, do_copy=True):
if obj.db_mashup is not None:
child = obj.db_mashup
child.db_parent = obj.db_id
def delete_sql_column(self, db, obj, global_props):
table = 'mashup_action'
whereMap = {}
whereMap.update(global_props)
if obj.db_id is not None:
keyStr = self.convertToDB(obj.db_id, 'long', 'int')
whereMap['id'] = keyStr
dbCommand = self.createSQLDelete(table, whereMap)
self.executeSQL(db, dbCommand, False)
class DBChangeSQLDAOBase(SQLDAO):
def __init__(self, daoList):
self.daoList = daoList
self.table = 'change_tbl'
def getDao(self, dao):
return self.daoList[dao]
def get_sql_columns(self, db, global_props,lock=False):
columns = ['id', 'what', 'old_obj_id', 'new_obj_id', 'par_obj_id', 'par_obj_type', 'action_id', 'entity_id', 'entity_type']
table = 'change_tbl'
whereMap = global_props
orderBy = 'id'
dbCommand = self.createSQLSelect(table, columns, whereMap, orderBy, lock)
data = self.executeSQL(db, dbCommand, True)
res = {}
for row in data:
id = self.convertFromDB(row[0], 'long', 'int')
what = self.convertFromDB(row[1], 'str', 'varchar(255)')
oldObjId = self.convertFromDB(row[2], 'long', 'int')
newObjId = self.convertFromDB(row[3], 'long', 'int')
parentObjId = self.convertFromDB(row[4], 'long', 'int')
parentObjType = self.convertFromDB(row[5], 'str', 'char(16)')
action = self.convertFromDB(row[6], 'long', 'int')
entity_id = self.convertFromDB(row[7], 'long', 'int')
entity_type = self.convertFromDB(row[8], 'str', 'char(16)')
change = DBChange(what=what,
oldObjId=oldObjId,
newObjId=newObjId,
parentObjId=parentObjId,
parentObjType=parentObjType,
id=id)
change.db_action = action
change.db_entity_id = entity_id
change.db_entity_type = entity_type
change.is_dirty = False
res[('change', id)] = change
return res
def get_sql_select(self, db, global_props,lock=False):
columns = ['id', 'what', 'old_obj_id', 'new_obj_id', 'par_obj_id', 'par_obj_type', 'action_id', 'entity_id', 'entity_type']
table = 'change_tbl'
whereMap = global_props
orderBy = 'id'
return self.createSQLSelect(table, columns, whereMap, orderBy, lock)
def process_sql_columns(self, data, global_props):
res = {}
for row in data:
id = self.convertFromDB(row[0], 'long', 'int')
what = self.convertFromDB(row[1], 'str', 'varchar(255)')
oldObjId = self.convertFromDB(row[2], 'long', 'int')
newObjId = self.convertFromDB(row[3], 'long', 'int')
parentObjId = self.convertFromDB(row[4], 'long', 'int')
parentObjType = self.convertFromDB(row[5], 'str', 'char(16)')
action = self.convertFromDB(row[6], 'long', 'int')
entity_id = self.convertFromDB(row[7], 'long', 'int')
entity_type = self.convertFromDB(row[8], 'str', 'char(16)')
change = DBChange(what=what,
oldObjId=oldObjId,
newObjId=newObjId,
parentObjId=parentObjId,
parentObjType=parentObjType,
id=id)
change.db_action = action
change.db_entity_id = entity_id
change.db_entity_type = entity_type
change.is_dirty = False
res[('change', id)] = change
return res
def from_sql_fast(self, obj, all_objects):
if ('action', obj.db_action) in all_objects:
p = all_objects[('action', obj.db_action)]
p.db_add_operation(obj)
def set_sql_columns(self, db, obj, global_props, do_copy=True):
if not do_copy and not obj.is_dirty:
return
columns = ['id', 'what', 'old_obj_id', 'new_obj_id', 'par_obj_id', 'par_obj_type', 'action_id', 'entity_id', 'entity_type']
table = 'change_tbl'
whereMap = {}
whereMap.update(global_props)
if obj.db_id is not None:
keyStr = self.convertToDB(obj.db_id, 'long', 'int')
whereMap['id'] = keyStr
columnMap = {}
if hasattr(obj, 'db_id') and obj.db_id is not None:
columnMap['id'] = \
self.convertToDB(obj.db_id, 'long', 'int')
if hasattr(obj, 'db_what') and obj.db_what is not None:
columnMap['what'] = \
self.convertToDB(obj.db_what, 'str', 'varchar(255)')
if hasattr(obj, 'db_oldObjId') and obj.db_oldObjId is not None:
columnMap['old_obj_id'] = \
self.convertToDB(obj.db_oldObjId, 'long', 'int')
if hasattr(obj, 'db_newObjId') and obj.db_newObjId is not None:
columnMap['new_obj_id'] = \
self.convertToDB(obj.db_newObjId, 'long', 'int')
if hasattr(obj, 'db_parentObjId') and obj.db_parentObjId is not None:
columnMap['par_obj_id'] = \
self.convertToDB(obj.db_parentObjId, 'long', 'int')
if hasattr(obj, 'db_parentObjType') and obj.db_parentObjType is not None:
columnMap['par_obj_type'] = \
self.convertToDB(obj.db_parentObjType, 'str', 'char(16)')
if hasattr(obj, 'db_action') and obj.db_action is not None:
columnMap['action_id'] = \
self.convertToDB(obj.db_action, 'long', 'int')
if hasattr(obj, 'db_entity_id') and obj.db_entity_id is not None:
columnMap['entity_id'] = \
self.convertToDB(obj.db_entity_id, 'long', 'int')
if hasattr(obj, 'db_entity_type') and obj.db_entity_type is not None:
columnMap['entity_type'] = \
self.convertToDB(obj.db_entity_type, 'str', 'char(16)')
columnMap.update(global_props)
if obj.is_new or do_copy:
dbCommand = self.createSQLInsert(table, columnMap)
else:
dbCommand = self.createSQLUpdate(table, columnMap, whereMap)
lastId = self.executeSQL(db, dbCommand, False)
def set_sql_command(self, db, obj, global_props, do_copy=True):
if not do_copy and not obj.is_dirty:
return None
columns = ['id', 'what', 'old_obj_id', 'new_obj_id', 'par_obj_id', 'par_obj_type', 'action_id', 'entity_id', 'entity_type']
table = 'change_tbl'
whereMap = {}
whereMap.update(global_props)
if obj.db_id is not None:
keyStr = self.convertToDB(obj.db_id, 'long', 'int')
whereMap['id'] = keyStr
columnMap = {}
if hasattr(obj, 'db_id') and obj.db_id is not None:
columnMap['id'] = \
self.convertToDB(obj.db_id, 'long', 'int')
if hasattr(obj, 'db_what') and obj.db_what is not None:
columnMap['what'] = \
self.convertToDB(obj.db_what, 'str', 'varchar(255)')
if hasattr(obj, 'db_oldObjId') and obj.db_oldObjId is not None:
columnMap['old_obj_id'] = \
self.convertToDB(obj.db_oldObjId, 'long', 'int')
if hasattr(obj, 'db_newObjId') and obj.db_newObjId is not None:
columnMap['new_obj_id'] = \
self.convertToDB(obj.db_newObjId, 'long', 'int')
if hasattr(obj, 'db_parentObjId') and obj.db_parentObjId is not None:
columnMap['par_obj_id'] = \
self.convertToDB(obj.db_parentObjId, 'long', 'int')
if hasattr(obj, 'db_parentObjType') and obj.db_parentObjType is not None:
columnMap['par_obj_type'] = \
self.convertToDB(obj.db_parentObjType, 'str', 'char(16)')
if hasattr(obj, 'db_action') and obj.db_action is not None:
columnMap['action_id'] = \
self.convertToDB(obj.db_action, 'long', 'int')
if hasattr(obj, 'db_entity_id') and obj.db_entity_id is not None:
columnMap['entity_id'] = \
self.convertToDB(obj.db_entity_id, 'long', 'int')
if hasattr(obj, 'db_entity_type') and obj.db_entity_type is not None:
columnMap['entity_type'] = \
self.convertToDB(obj.db_entity_type, 'str', 'char(16)')
columnMap.update(global_props)
if obj.is_new or do_copy:
dbCommand = self.createSQLInsert(table, columnMap)
else:
dbCommand = self.createSQLUpdate(table, columnMap, whereMap)
return dbCommand
def set_sql_process(self, obj, global_props, lastId):
pass
def to_sql_fast(self, obj, do_copy=True):
if obj.db_data is not None:
child = obj.db_data
child.db_parentType = obj.vtType
child.db_parent = obj.db_id
def delete_sql_column(self, db, obj, global_props):
table = 'change_tbl'
whereMap = {}
whereMap.update(global_props)
if obj.db_id is not None:
keyStr = self.convertToDB(obj.db_id, 'long', 'int')
whereMap['id'] = keyStr
dbCommand = self.createSQLDelete(table, whereMap)
self.executeSQL(db, dbCommand, False)
class DBPackageSQLDAOBase(SQLDAO):
def __init__(self, daoList):
self.daoList = daoList
self.table = 'package'
def getDao(self, dao):
return self.daoList[dao]
def get_sql_columns(self, db, global_props,lock=False):
columns = ['id', 'name', 'identifier', 'codepath', 'load_configuration', 'version', 'description', 'parent_id', 'entity_id', 'entity_type']
table = 'package'
whereMap = global_props
orderBy = 'id'
dbCommand = self.createSQLSelect(table, columns, whereMap, orderBy, lock)
data = self.executeSQL(db, dbCommand, True)
res = {}
for row in data:
id = self.convertFromDB(row[0], 'long', 'int')
name = self.convertFromDB(row[1], 'str', 'varchar(255)')
identifier = self.convertFromDB(row[2], 'str', 'varchar(1023)')
codepath = self.convertFromDB(row[3], 'str', 'varchar(1023)')
load_configuration = self.convertFromDB(row[4], 'int', 'int')
version = self.convertFromDB(row[5], 'str', 'varchar(255)')
description = self.convertFromDB(row[6], 'str', 'varchar(1023)')
registry = self.convertFromDB(row[7], 'long', 'int')
entity_id = self.convertFromDB(row[8], 'long', 'int')
entity_type = self.convertFromDB(row[9], 'str', 'char(16)')
package = DBPackage(name=name,
identifier=identifier,
codepath=codepath,
load_configuration=load_configuration,
version=version,
description=description,
id=id)
package.db_registry = registry
package.db_entity_id = entity_id
package.db_entity_type = entity_type
package.is_dirty = False
res[('package', id)] = package
return res
def get_sql_select(self, db, global_props,lock=False):
columns = ['id', 'name', 'identifier', 'codepath', 'load_configuration', 'version', 'description', 'parent_id', 'entity_id', 'entity_type']
table = 'package'
whereMap = global_props
orderBy = 'id'
return self.createSQLSelect(table, columns, whereMap, orderBy, lock)
def process_sql_columns(self, data, global_props):
res = {}
for row in data:
id = self.convertFromDB(row[0], 'long', 'int')
name = self.convertFromDB(row[1], 'str', 'varchar(255)')
identifier = self.convertFromDB(row[2], 'str', 'varchar(1023)')
codepath = self.convertFromDB(row[3], 'str', 'varchar(1023)')
load_configuration = self.convertFromDB(row[4], 'int', 'int')
version = self.convertFromDB(row[5], 'str', 'varchar(255)')
description = self.convertFromDB(row[6], 'str', 'varchar(1023)')
registry = self.convertFromDB(row[7], 'long', 'int')
entity_id = self.convertFromDB(row[8], 'long', 'int')
entity_type = self.convertFromDB(row[9], 'str', 'char(16)')
package = DBPackage(name=name,
identifier=identifier,
codepath=codepath,
load_configuration=load_configuration,
version=version,
description=description,
id=id)
package.db_registry = registry
package.db_entity_id = entity_id
package.db_entity_type = entity_type
package.is_dirty = False
res[('package', id)] = package
return res
def from_sql_fast(self, obj, all_objects):
if ('registry', obj.db_registry) in all_objects:
p = all_objects[('registry', obj.db_registry)]
p.db_add_package(obj)
def set_sql_columns(self, db, obj, global_props, do_copy=True):
if not do_copy and not obj.is_dirty:
return
columns = ['id', 'name', 'identifier', 'codepath', 'load_configuration', 'version', 'description', 'parent_id', 'entity_id', 'entity_type']
table = 'package'
whereMap = {}
whereMap.update(global_props)
if obj.db_id is not None:
keyStr = self.convertToDB(obj.db_id, 'long', 'int')
whereMap['id'] = keyStr
columnMap = {}
if hasattr(obj, 'db_id') and obj.db_id is not None:
columnMap['id'] = \
self.convertToDB(obj.db_id, 'long', 'int')
if hasattr(obj, 'db_name') and obj.db_name is not None:
columnMap['name'] = \
self.convertToDB(obj.db_name, 'str', 'varchar(255)')
if hasattr(obj, 'db_identifier') and obj.db_identifier is not None:
columnMap['identifier'] = \
self.convertToDB(obj.db_identifier, 'str', 'varchar(1023)')
if hasattr(obj, 'db_codepath') and obj.db_codepath is not None:
columnMap['codepath'] = \
self.convertToDB(obj.db_codepath, 'str', 'varchar(1023)')
if hasattr(obj, 'db_load_configuration') and obj.db_load_configuration is not None:
columnMap['load_configuration'] = \
self.convertToDB(obj.db_load_configuration, 'int', 'int')
if hasattr(obj, 'db_version') and obj.db_version is not None:
columnMap['version'] = \
self.convertToDB(obj.db_version, 'str', 'varchar(255)')
if hasattr(obj, 'db_description') and obj.db_description is not None:
columnMap['description'] = \
self.convertToDB(obj.db_description, 'str', 'varchar(1023)')
if hasattr(obj, 'db_registry') and obj.db_registry is not None:
columnMap['parent_id'] = \
self.convertToDB(obj.db_registry, 'long', 'int')
if hasattr(obj, 'db_entity_id') and obj.db_entity_id is not None:
columnMap['entity_id'] = \
self.convertToDB(obj.db_entity_id, 'long', 'int')
if hasattr(obj, 'db_entity_type') and obj.db_entity_type is not None:
columnMap['entity_type'] = \
self.convertToDB(obj.db_entity_type, 'str', 'char(16)')
columnMap.update(global_props)
if obj.is_new or do_copy:
dbCommand = self.createSQLInsert(table, columnMap)
else:
dbCommand = self.createSQLUpdate(table, columnMap, whereMap)
lastId = self.executeSQL(db, dbCommand, False)
if obj.db_id is None:
obj.db_id = lastId
keyStr = self.convertToDB(obj.db_id, 'long', 'int')
def set_sql_command(self, db, obj, global_props, do_copy=True):
if not do_copy and not obj.is_dirty:
return None
columns = ['id', 'name', 'identifier', 'codepath', 'load_configuration', 'version', 'description', 'parent_id', 'entity_id', 'entity_type']
table = 'package'
whereMap = {}
whereMap.update(global_props)
if obj.db_id is not None:
keyStr = self.convertToDB(obj.db_id, 'long', 'int')
whereMap['id'] = keyStr
columnMap = {}
if hasattr(obj, 'db_id') and obj.db_id is not None:
columnMap['id'] = \
self.convertToDB(obj.db_id, 'long', 'int')
if hasattr(obj, 'db_name') and obj.db_name is not None:
columnMap['name'] = \
self.convertToDB(obj.db_name, 'str', 'varchar(255)')
if hasattr(obj, 'db_identifier') and obj.db_identifier is not None:
columnMap['identifier'] = \
self.convertToDB(obj.db_identifier, 'str', 'varchar(1023)')
if hasattr(obj, 'db_codepath') and obj.db_codepath is not None:
columnMap['codepath'] = \
self.convertToDB(obj.db_codepath, 'str', 'varchar(1023)')
if hasattr(obj, 'db_load_configuration') and obj.db_load_configuration is not None:
columnMap['load_configuration'] = \
self.convertToDB(obj.db_load_configuration, 'int', 'int')
if hasattr(obj, 'db_version') and obj.db_version is not None:
columnMap['version'] = \
self.convertToDB(obj.db_version, 'str', 'varchar(255)')
if hasattr(obj, 'db_description') and obj.db_description is not None:
columnMap['description'] = \
self.convertToDB(obj.db_description, 'str', 'varchar(1023)')
if hasattr(obj, 'db_registry') and obj.db_registry is not None:
columnMap['parent_id'] = \
self.convertToDB(obj.db_registry, 'long', 'int')
if hasattr(obj, 'db_entity_id') and obj.db_entity_id is not None:
columnMap['entity_id'] = \
self.convertToDB(obj.db_entity_id, 'long', 'int')
if hasattr(obj, 'db_entity_type') and obj.db_entity_type is not None:
columnMap['entity_type'] = \
self.convertToDB(obj.db_entity_type, 'str', 'char(16)')
columnMap.update(global_props)
if obj.is_new or do_copy:
dbCommand = self.createSQLInsert(table, columnMap)
else:
dbCommand = self.createSQLUpdate(table, columnMap, whereMap)
return dbCommand
def set_sql_process(self, obj, global_props, lastId):
if obj.db_id is None:
obj.db_id = lastId
keyStr = self.convertToDB(obj.db_id, 'long', 'int')
pass
def to_sql_fast(self, obj, do_copy=True):
for child in obj.db_module_descriptors:
child.db_package = obj.db_id
def delete_sql_column(self, db, obj, global_props):
table = 'package'
whereMap = {}
whereMap.update(global_props)
if obj.db_id is not None:
keyStr = self.convertToDB(obj.db_id, 'long', 'int')
whereMap['id'] = keyStr
dbCommand = self.createSQLDelete(table, whereMap)
self.executeSQL(db, dbCommand, False)
class DBLoopExecSQLDAOBase(SQLDAO):
def __init__(self, daoList):
self.daoList = daoList
self.table = 'loop_exec'
def getDao(self, dao):
return self.daoList[dao]
def get_sql_columns(self, db, global_props,lock=False):
columns = ['id', 'ts_start', 'ts_end', 'iteration', 'completed', 'error', 'parent_type', 'entity_id', 'entity_type', 'parent_id']
table = 'loop_exec'
whereMap = global_props
orderBy = 'id'
dbCommand = self.createSQLSelect(table, columns, whereMap, orderBy, lock)
data = self.executeSQL(db, dbCommand, True)
res = {}
for row in data:
id = self.convertFromDB(row[0], 'long', 'int')
ts_start = self.convertFromDB(row[1], 'datetime', 'datetime')
ts_end = self.convertFromDB(row[2], 'datetime', 'datetime')
iteration = self.convertFromDB(row[3], 'int', 'int')
completed = self.convertFromDB(row[4], 'int', 'int')
error = self.convertFromDB(row[5], 'str', 'varchar(1023)')
parentType = self.convertFromDB(row[6], 'str', 'char(32)')
entity_id = self.convertFromDB(row[7], 'long', 'int')
entity_type = self.convertFromDB(row[8], 'str', 'char(16)')
parent = self.convertFromDB(row[9], 'long', 'long')
loop_exec = DBLoopExec(ts_start=ts_start,
ts_end=ts_end,
iteration=iteration,
completed=completed,
error=error,
id=id)
loop_exec.db_parentType = parentType
loop_exec.db_entity_id = entity_id
loop_exec.db_entity_type = entity_type
loop_exec.db_parent = parent
loop_exec.is_dirty = False
res[('loop_exec', id)] = loop_exec
return res
def get_sql_select(self, db, global_props,lock=False):
columns = ['id', 'ts_start', 'ts_end', 'iteration', 'completed', 'error', 'parent_type', 'entity_id', 'entity_type', 'parent_id']
table = 'loop_exec'
whereMap = global_props
orderBy = 'id'
return self.createSQLSelect(table, columns, whereMap, orderBy, lock)
def process_sql_columns(self, data, global_props):
res = {}
for row in data:
id = self.convertFromDB(row[0], 'long', 'int')
ts_start = self.convertFromDB(row[1], 'datetime', 'datetime')
ts_end = self.convertFromDB(row[2], 'datetime', 'datetime')
iteration = self.convertFromDB(row[3], 'int', 'int')
completed = self.convertFromDB(row[4], 'int', 'int')
error = self.convertFromDB(row[5], 'str', 'varchar(1023)')
parentType = self.convertFromDB(row[6], 'str', 'char(32)')
entity_id = self.convertFromDB(row[7], 'long', 'int')
entity_type = self.convertFromDB(row[8], 'str', 'char(16)')
parent = self.convertFromDB(row[9], 'long', 'long')
loop_exec = DBLoopExec(ts_start=ts_start,
ts_end=ts_end,
iteration=iteration,
completed=completed,
error=error,
id=id)
loop_exec.db_parentType = parentType
loop_exec.db_entity_id = entity_id
loop_exec.db_entity_type = entity_type
loop_exec.db_parent = parent
loop_exec.is_dirty = False
res[('loop_exec', id)] = loop_exec
return res
def from_sql_fast(self, obj, all_objects):
if obj.db_parentType == 'workflow_exec':
p = all_objects[('workflow_exec', obj.db_parent)]
p.db_add_item_exec(obj)
elif obj.db_parentType == 'group_exec':
p = all_objects[('group_exec', obj.db_parent)]
p.db_add_item_exec(obj)
elif obj.db_parentType == 'loop_exec':
p = all_objects[('loop_exec', obj.db_parent)]
p.db_add_item_exec(obj)
elif obj.db_parentType == 'module_exec':
p = all_objects[('module_exec', obj.db_parent)]
p.db_add_loop_exec(obj)
def set_sql_columns(self, db, obj, global_props, do_copy=True):
if not do_copy and not obj.is_dirty:
return
columns = ['id', 'ts_start', 'ts_end', 'iteration', 'completed', 'error', 'parent_type', 'entity_id', 'entity_type', 'parent_id']
table = 'loop_exec'
whereMap = {}
whereMap.update(global_props)
if obj.db_id is not None:
keyStr = self.convertToDB(obj.db_id, 'long', 'int')
whereMap['id'] = keyStr
columnMap = {}
if hasattr(obj, 'db_id') and obj.db_id is not None:
columnMap['id'] = \
self.convertToDB(obj.db_id, 'long', 'int')
if hasattr(obj, 'db_ts_start') and obj.db_ts_start is not None:
columnMap['ts_start'] = \
self.convertToDB(obj.db_ts_start, 'datetime', 'datetime')
if hasattr(obj, 'db_ts_end') and obj.db_ts_end is not None:
columnMap['ts_end'] = \
self.convertToDB(obj.db_ts_end, 'datetime', 'datetime')
if hasattr(obj, 'db_iteration') and obj.db_iteration is not None:
columnMap['iteration'] = \
self.convertToDB(obj.db_iteration, 'int', 'int')
if hasattr(obj, 'db_completed') and obj.db_completed is not None:
columnMap['completed'] = \
self.convertToDB(obj.db_completed, 'int', 'int')
if hasattr(obj, 'db_error') and obj.db_error is not None:
columnMap['error'] = \
self.convertToDB(obj.db_error, 'str', 'varchar(1023)')
if hasattr(obj, 'db_parentType') and obj.db_parentType is not None:
columnMap['parent_type'] = \
self.convertToDB(obj.db_parentType, 'str', 'char(32)')
if hasattr(obj, 'db_entity_id') and obj.db_entity_id is not None:
columnMap['entity_id'] = \
self.convertToDB(obj.db_entity_id, 'long', 'int')
if hasattr(obj, 'db_entity_type') and obj.db_entity_type is not None:
columnMap['entity_type'] = \
self.convertToDB(obj.db_entity_type, 'str', 'char(16)')
if hasattr(obj, 'db_parent') and obj.db_parent is not None:
columnMap['parent_id'] = \
self.convertToDB(obj.db_parent, 'long', 'long')
columnMap.update(global_props)
if obj.is_new or do_copy:
dbCommand = self.createSQLInsert(table, columnMap)
else:
dbCommand = self.createSQLUpdate(table, columnMap, whereMap)
lastId = self.executeSQL(db, dbCommand, False)
def set_sql_command(self, db, obj, global_props, do_copy=True):
if not do_copy and not obj.is_dirty:
return None
columns = ['id', 'ts_start', 'ts_end', 'iteration', 'completed', 'error', 'parent_type', 'entity_id', 'entity_type', 'parent_id']
table = 'loop_exec'
whereMap = {}
whereMap.update(global_props)
if obj.db_id is not None:
keyStr = self.convertToDB(obj.db_id, 'long', 'int')
whereMap['id'] = keyStr
columnMap = {}
if hasattr(obj, 'db_id') and obj.db_id is not None:
columnMap['id'] = \
self.convertToDB(obj.db_id, 'long', 'int')
if hasattr(obj, 'db_ts_start') and obj.db_ts_start is not None:
columnMap['ts_start'] = \
self.convertToDB(obj.db_ts_start, 'datetime', 'datetime')
if hasattr(obj, 'db_ts_end') and obj.db_ts_end is not None:
columnMap['ts_end'] = \
self.convertToDB(obj.db_ts_end, 'datetime', 'datetime')
if hasattr(obj, 'db_iteration') and obj.db_iteration is not None:
columnMap['iteration'] = \
self.convertToDB(obj.db_iteration, 'int', 'int')
if hasattr(obj, 'db_completed') and obj.db_completed is not None:
columnMap['completed'] = \
self.convertToDB(obj.db_completed, 'int', 'int')
if hasattr(obj, 'db_error') and obj.db_error is not None:
columnMap['error'] = \
self.convertToDB(obj.db_error, 'str', 'varchar(1023)')
if hasattr(obj, 'db_parentType') and obj.db_parentType is not None:
columnMap['parent_type'] = \
self.convertToDB(obj.db_parentType, 'str', 'char(32)')
if hasattr(obj, 'db_entity_id') and obj.db_entity_id is not None:
columnMap['entity_id'] = \
self.convertToDB(obj.db_entity_id, 'long', 'int')
if hasattr(obj, 'db_entity_type') and obj.db_entity_type is not None:
columnMap['entity_type'] = \
self.convertToDB(obj.db_entity_type, 'str', 'char(16)')
if hasattr(obj, 'db_parent') and obj.db_parent is not None:
columnMap['parent_id'] = \
self.convertToDB(obj.db_parent, 'long', 'long')
columnMap.update(global_props)
if obj.is_new or do_copy:
dbCommand = self.createSQLInsert(table, columnMap)
else:
dbCommand = self.createSQLUpdate(table, columnMap, whereMap)
return dbCommand
def set_sql_process(self, obj, global_props, lastId):
pass
def to_sql_fast(self, obj, do_copy=True):
for child in obj.db_item_execs:
child.db_parentType = obj.vtType
child.db_parent = obj.db_id
def delete_sql_column(self, db, obj, global_props):
table = 'loop_exec'
whereMap = {}
whereMap.update(global_props)
if obj.db_id is not None:
keyStr = self.convertToDB(obj.db_id, 'long', 'int')
whereMap['id'] = keyStr
dbCommand = self.createSQLDelete(table, whereMap)
self.executeSQL(db, dbCommand, False)
class DBConnectionSQLDAOBase(SQLDAO):
def __init__(self, daoList):
self.daoList = daoList
self.table = 'connection_tbl'
def getDao(self, dao):
return self.daoList[dao]
def get_sql_columns(self, db, global_props,lock=False):
columns = ['id', 'parent_type', 'entity_id', 'entity_type', 'parent_id']
table = 'connection_tbl'
whereMap = global_props
orderBy = 'id'
dbCommand = self.createSQLSelect(table, columns, whereMap, orderBy, lock)
data = self.executeSQL(db, dbCommand, True)
res = {}
for row in data:
id = self.convertFromDB(row[0], 'long', 'int')
parentType = self.convertFromDB(row[1], 'str', 'char(32)')
entity_id = self.convertFromDB(row[2], 'long', 'int')
entity_type = self.convertFromDB(row[3], 'str', 'char(16)')
parent = self.convertFromDB(row[4], 'long', 'long')
connection = DBConnection(id=id)
connection.db_parentType = parentType
connection.db_entity_id = entity_id
connection.db_entity_type = entity_type
connection.db_parent = parent
connection.is_dirty = False
res[('connection', id)] = connection
return res
def get_sql_select(self, db, global_props,lock=False):
columns = ['id', 'parent_type', 'entity_id', 'entity_type', 'parent_id']
table = 'connection_tbl'
whereMap = global_props
orderBy = 'id'
return self.createSQLSelect(table, columns, whereMap, orderBy, lock)
def process_sql_columns(self, data, global_props):
res = {}
for row in data:
id = self.convertFromDB(row[0], 'long', 'int')
parentType = self.convertFromDB(row[1], 'str', 'char(32)')
entity_id = self.convertFromDB(row[2], 'long', 'int')
entity_type = self.convertFromDB(row[3], 'str', 'char(16)')
parent = self.convertFromDB(row[4], 'long', 'long')
connection = DBConnection(id=id)
connection.db_parentType = parentType
connection.db_entity_id = entity_id
connection.db_entity_type = entity_type
connection.db_parent = parent
connection.is_dirty = False
res[('connection', id)] = connection
return res
def from_sql_fast(self, obj, all_objects):
if obj.db_parentType == 'workflow':
p = all_objects[('workflow', obj.db_parent)]
p.db_add_connection(obj)
elif obj.db_parentType == 'add':
p = all_objects[('add', obj.db_parent)]
p.db_add_data(obj)
elif obj.db_parentType == 'change':
p = all_objects[('change', obj.db_parent)]
p.db_add_data(obj)
def set_sql_columns(self, db, obj, global_props, do_copy=True):
if not do_copy and not obj.is_dirty:
return
columns = ['id', 'parent_type', 'entity_id', 'entity_type', 'parent_id']
table = 'connection_tbl'
whereMap = {}
whereMap.update(global_props)
if obj.db_id is not None:
keyStr = self.convertToDB(obj.db_id, 'long', 'int')
whereMap['id'] = keyStr
columnMap = {}
if hasattr(obj, 'db_id') and obj.db_id is not None:
columnMap['id'] = \
self.convertToDB(obj.db_id, 'long', 'int')
if hasattr(obj, 'db_parentType') and obj.db_parentType is not None:
columnMap['parent_type'] = \
self.convertToDB(obj.db_parentType, 'str', 'char(32)')
if hasattr(obj, 'db_entity_id') and obj.db_entity_id is not None:
columnMap['entity_id'] = \
self.convertToDB(obj.db_entity_id, 'long', 'int')
if hasattr(obj, 'db_entity_type') and obj.db_entity_type is not None:
columnMap['entity_type'] = \
self.convertToDB(obj.db_entity_type, 'str', 'char(16)')
if hasattr(obj, 'db_parent') and obj.db_parent is not None:
columnMap['parent_id'] = \
self.convertToDB(obj.db_parent, 'long', 'long')
columnMap.update(global_props)
if obj.is_new or do_copy:
dbCommand = self.createSQLInsert(table, columnMap)
else:
dbCommand = self.createSQLUpdate(table, columnMap, whereMap)
lastId = self.executeSQL(db, dbCommand, False)
def set_sql_command(self, db, obj, global_props, do_copy=True):
if not do_copy and not obj.is_dirty:
return None
columns = ['id', 'parent_type', 'entity_id', 'entity_type', 'parent_id']
table = 'connection_tbl'
whereMap = {}
whereMap.update(global_props)
if obj.db_id is not None:
keyStr = self.convertToDB(obj.db_id, 'long', 'int')
whereMap['id'] = keyStr
columnMap = {}
if hasattr(obj, 'db_id') and obj.db_id is not None:
columnMap['id'] = \
self.convertToDB(obj.db_id, 'long', 'int')
if hasattr(obj, 'db_parentType') and obj.db_parentType is not None:
columnMap['parent_type'] = \
self.convertToDB(obj.db_parentType, 'str', 'char(32)')
if hasattr(obj, 'db_entity_id') and obj.db_entity_id is not None:
columnMap['entity_id'] = \
self.convertToDB(obj.db_entity_id, 'long', 'int')
if hasattr(obj, 'db_entity_type') and obj.db_entity_type is not None:
columnMap['entity_type'] = \
self.convertToDB(obj.db_entity_type, 'str', 'char(16)')
if hasattr(obj, 'db_parent') and obj.db_parent is not None:
columnMap['parent_id'] = \
self.convertToDB(obj.db_parent, 'long', 'long')
columnMap.update(global_props)
if obj.is_new or do_copy:
dbCommand = self.createSQLInsert(table, columnMap)
else:
dbCommand = self.createSQLUpdate(table, columnMap, whereMap)
return dbCommand
def set_sql_process(self, obj, global_props, lastId):
pass
def to_sql_fast(self, obj, do_copy=True):
for child in obj.db_ports:
child.db_parentType = obj.vtType
child.db_parent = obj.db_id
def delete_sql_column(self, db, obj, global_props):
table = 'connection_tbl'
whereMap = {}
whereMap.update(global_props)
if obj.db_id is not None:
keyStr = self.convertToDB(obj.db_id, 'long', 'int')
whereMap['id'] = keyStr
dbCommand = self.createSQLDelete(table, whereMap)
self.executeSQL(db, dbCommand, False)
class DBActionSQLDAOBase(SQLDAO):
def __init__(self, daoList):
self.daoList = daoList
self.table = 'action'
def getDao(self, dao):
return self.daoList[dao]
def get_sql_columns(self, db, global_props,lock=False):
columns = ['id', 'prev_id', 'date', 'session', 'user', 'parent_id', 'entity_id', 'entity_type']
table = 'action'
whereMap = global_props
orderBy = 'id'
dbCommand = self.createSQLSelect(table, columns, whereMap, orderBy, lock)
data = self.executeSQL(db, dbCommand, True)
res = {}
for row in data:
id = self.convertFromDB(row[0], 'long', 'int')
prevId = self.convertFromDB(row[1], 'long', 'int')
date = self.convertFromDB(row[2], 'datetime', 'datetime')
session = self.convertFromDB(row[3], 'long', 'int')
user = self.convertFromDB(row[4], 'str', 'varchar(255)')
vistrail = self.convertFromDB(row[5], 'long', 'int')
entity_id = self.convertFromDB(row[6], 'long', 'int')
entity_type = self.convertFromDB(row[7], 'str', 'char(16)')
action = DBAction(prevId=prevId,
date=date,
session=session,
user=user,
id=id)
action.db_vistrail = vistrail
action.db_entity_id = entity_id
action.db_entity_type = entity_type
action.is_dirty = False
res[('action', id)] = action
return res
def get_sql_select(self, db, global_props,lock=False):
columns = ['id', 'prev_id', 'date', 'session', 'user', 'parent_id', 'entity_id', 'entity_type']
table = 'action'
whereMap = global_props
orderBy = 'id'
return self.createSQLSelect(table, columns, whereMap, orderBy, lock)
def process_sql_columns(self, data, global_props):
res = {}
for row in data:
id = self.convertFromDB(row[0], 'long', 'int')
prevId = self.convertFromDB(row[1], 'long', 'int')
date = self.convertFromDB(row[2], 'datetime', 'datetime')
session = self.convertFromDB(row[3], 'long', 'int')
user = self.convertFromDB(row[4], 'str', 'varchar(255)')
vistrail = self.convertFromDB(row[5], 'long', 'int')
entity_id = self.convertFromDB(row[6], 'long', 'int')
entity_type = self.convertFromDB(row[7], 'str', 'char(16)')
action = DBAction(prevId=prevId,
date=date,
session=session,
user=user,
id=id)
action.db_vistrail = vistrail
action.db_entity_id = entity_id
action.db_entity_type = entity_type
action.is_dirty = False
res[('action', id)] = action
return res
def from_sql_fast(self, obj, all_objects):
if ('vistrail', obj.db_vistrail) in all_objects:
p = all_objects[('vistrail', obj.db_vistrail)]
p.db_add_action(obj)
def set_sql_columns(self, db, obj, global_props, do_copy=True):
if not do_copy and not obj.is_dirty:
return
columns = ['id', 'prev_id', 'date', 'session', 'user', 'parent_id', 'entity_id', 'entity_type']
table = 'action'
whereMap = {}
whereMap.update(global_props)
if obj.db_id is not None:
keyStr = self.convertToDB(obj.db_id, 'long', 'int')
whereMap['id'] = keyStr
columnMap = {}
if hasattr(obj, 'db_id') and obj.db_id is not None:
columnMap['id'] = \
self.convertToDB(obj.db_id, 'long', 'int')
if hasattr(obj, 'db_prevId') and obj.db_prevId is not None:
columnMap['prev_id'] = \
self.convertToDB(obj.db_prevId, 'long', 'int')
if hasattr(obj, 'db_date') and obj.db_date is not None:
columnMap['date'] = \
self.convertToDB(obj.db_date, 'datetime', 'datetime')
if hasattr(obj, 'db_session') and obj.db_session is not None:
columnMap['session'] = \
self.convertToDB(obj.db_session, 'long', 'int')
if hasattr(obj, 'db_user') and obj.db_user is not None:
columnMap['user'] = \
self.convertToDB(obj.db_user, 'str', 'varchar(255)')
if hasattr(obj, 'db_vistrail') and obj.db_vistrail is not None:
columnMap['parent_id'] = \
self.convertToDB(obj.db_vistrail, 'long', 'int')
if hasattr(obj, 'db_entity_id') and obj.db_entity_id is not None:
columnMap['entity_id'] = \
self.convertToDB(obj.db_entity_id, 'long', 'int')
if hasattr(obj, 'db_entity_type') and obj.db_entity_type is not None:
columnMap['entity_type'] = \
self.convertToDB(obj.db_entity_type, 'str', 'char(16)')
columnMap.update(global_props)
if obj.is_new or do_copy:
dbCommand = self.createSQLInsert(table, columnMap)
else:
dbCommand = self.createSQLUpdate(table, columnMap, whereMap)
lastId = self.executeSQL(db, dbCommand, False)
def set_sql_command(self, db, obj, global_props, do_copy=True):
if not do_copy and not obj.is_dirty:
return None
columns = ['id', 'prev_id', 'date', 'session', 'user', 'parent_id', 'entity_id', 'entity_type']
table = 'action'
whereMap = {}
whereMap.update(global_props)
if obj.db_id is not None:
keyStr = self.convertToDB(obj.db_id, 'long', 'int')
whereMap['id'] = keyStr
columnMap = {}
if hasattr(obj, 'db_id') and obj.db_id is not None:
columnMap['id'] = \
self.convertToDB(obj.db_id, 'long', 'int')
if hasattr(obj, 'db_prevId') and obj.db_prevId is not None:
columnMap['prev_id'] = \
self.convertToDB(obj.db_prevId, 'long', 'int')
if hasattr(obj, 'db_date') and obj.db_date is not None:
columnMap['date'] = \
self.convertToDB(obj.db_date, 'datetime', 'datetime')
if hasattr(obj, 'db_session') and obj.db_session is not None:
columnMap['session'] = \
self.convertToDB(obj.db_session, 'long', 'int')
if hasattr(obj, 'db_user') and obj.db_user is not None:
columnMap['user'] = \
self.convertToDB(obj.db_user, 'str', 'varchar(255)')
if hasattr(obj, 'db_vistrail') and obj.db_vistrail is not None:
columnMap['parent_id'] = \
self.convertToDB(obj.db_vistrail, 'long', 'int')
if hasattr(obj, 'db_entity_id') and obj.db_entity_id is not None:
columnMap['entity_id'] = \
self.convertToDB(obj.db_entity_id, 'long', 'int')
if hasattr(obj, 'db_entity_type') and obj.db_entity_type is not None:
columnMap['entity_type'] = \
self.convertToDB(obj.db_entity_type, 'str', 'char(16)')
columnMap.update(global_props)
if obj.is_new or do_copy:
dbCommand = self.createSQLInsert(table, columnMap)
else:
dbCommand = self.createSQLUpdate(table, columnMap, whereMap)
return dbCommand
def set_sql_process(self, obj, global_props, lastId):
pass
def to_sql_fast(self, obj, do_copy=True):
for child in obj.db_annotations:
child.db_parentType = obj.vtType
child.db_parent = obj.db_id
for child in obj.db_operations:
child.db_action = obj.db_id
def delete_sql_column(self, db, obj, global_props):
table = 'action'
whereMap = {}
whereMap.update(global_props)
if obj.db_id is not None:
keyStr = self.convertToDB(obj.db_id, 'long', 'int')
whereMap['id'] = keyStr
dbCommand = self.createSQLDelete(table, whereMap)
self.executeSQL(db, dbCommand, False)
class DBPortSpecSQLDAOBase(SQLDAO):
def __init__(self, daoList):
self.daoList = daoList
self.table = 'port_spec'
def getDao(self, dao):
return self.daoList[dao]
def get_sql_columns(self, db, global_props,lock=False):
columns = ['id', 'name', 'type', 'optional', 'sort_key', 'min_conns', 'max_conns', 'parent_type', 'entity_id', 'entity_type', 'parent_id']
table = 'port_spec'
whereMap = global_props
orderBy = 'id'
dbCommand = self.createSQLSelect(table, columns, whereMap, orderBy, lock)
data = self.executeSQL(db, dbCommand, True)
res = {}
for row in data:
id = self.convertFromDB(row[0], 'long', 'int')
name = self.convertFromDB(row[1], 'str', 'varchar(255)')
type = self.convertFromDB(row[2], 'str', 'varchar(255)')
optional = self.convertFromDB(row[3], 'int', 'int')
sort_key = self.convertFromDB(row[4], 'int', 'int')
min_conns = self.convertFromDB(row[5], 'int', 'int')
max_conns = self.convertFromDB(row[6], 'int', 'int')
parentType = self.convertFromDB(row[7], 'str', 'char(32)')
entity_id = self.convertFromDB(row[8], 'long', 'int')
entity_type = self.convertFromDB(row[9], 'str', 'char(16)')
parent = self.convertFromDB(row[10], 'long', 'long')
portSpec = DBPortSpec(name=name,
type=type,
optional=optional,
sort_key=sort_key,
min_conns=min_conns,
max_conns=max_conns,
id=id)
portSpec.db_parentType = parentType
portSpec.db_entity_id = entity_id
portSpec.db_entity_type = entity_type
portSpec.db_parent = parent
portSpec.is_dirty = False
res[('portSpec', id)] = portSpec
return res
def get_sql_select(self, db, global_props,lock=False):
columns = ['id', 'name', 'type', 'optional', 'sort_key', 'min_conns', 'max_conns', 'parent_type', 'entity_id', 'entity_type', 'parent_id']
table = 'port_spec'
whereMap = global_props
orderBy = 'id'
return self.createSQLSelect(table, columns, whereMap, orderBy, lock)
def process_sql_columns(self, data, global_props):
res = {}
for row in data:
id = self.convertFromDB(row[0], 'long', 'int')
name = self.convertFromDB(row[1], 'str', 'varchar(255)')
type = self.convertFromDB(row[2], 'str', 'varchar(255)')
optional = self.convertFromDB(row[3], 'int', 'int')
sort_key = self.convertFromDB(row[4], 'int', 'int')
min_conns = self.convertFromDB(row[5], 'int', 'int')
max_conns = self.convertFromDB(row[6], 'int', 'int')
parentType = self.convertFromDB(row[7], 'str', 'char(32)')
entity_id = self.convertFromDB(row[8], 'long', 'int')
entity_type = self.convertFromDB(row[9], 'str', 'char(16)')
parent = self.convertFromDB(row[10], 'long', 'long')
portSpec = DBPortSpec(name=name,
type=type,
optional=optional,
sort_key=sort_key,
min_conns=min_conns,
max_conns=max_conns,
id=id)
portSpec.db_parentType = parentType
portSpec.db_entity_id = entity_id
portSpec.db_entity_type = entity_type
portSpec.db_parent = parent
portSpec.is_dirty = False
res[('portSpec', id)] = portSpec
return res
def from_sql_fast(self, obj, all_objects):
if obj.db_parentType == 'module':
p = all_objects[('module', obj.db_parent)]
p.db_add_portSpec(obj)
elif obj.db_parentType == 'module_descriptor':
p = all_objects[('module_descriptor', obj.db_parent)]
p.db_add_portSpec(obj)
elif obj.db_parentType == 'add':
p = all_objects[('add', obj.db_parent)]
p.db_add_data(obj)
elif obj.db_parentType == 'change':
p = all_objects[('change', obj.db_parent)]
p.db_add_data(obj)
def set_sql_columns(self, db, obj, global_props, do_copy=True):
if not do_copy and not obj.is_dirty:
return
columns = ['id', 'name', 'type', 'optional', 'sort_key', 'min_conns', 'max_conns', 'parent_type', 'entity_id', 'entity_type', 'parent_id']
table = 'port_spec'
whereMap = {}
whereMap.update(global_props)
if obj.db_id is not None:
keyStr = self.convertToDB(obj.db_id, 'long', 'int')
whereMap['id'] = keyStr
columnMap = {}
if hasattr(obj, 'db_id') and obj.db_id is not None:
columnMap['id'] = \
self.convertToDB(obj.db_id, 'long', 'int')
if hasattr(obj, 'db_name') and obj.db_name is not None:
columnMap['name'] = \
self.convertToDB(obj.db_name, 'str', 'varchar(255)')
if hasattr(obj, 'db_type') and obj.db_type is not None:
columnMap['type'] = \
self.convertToDB(obj.db_type, 'str', 'varchar(255)')
if hasattr(obj, 'db_optional') and obj.db_optional is not None:
columnMap['optional'] = \
self.convertToDB(obj.db_optional, 'int', 'int')
if hasattr(obj, 'db_sort_key') and obj.db_sort_key is not None:
columnMap['sort_key'] = \
self.convertToDB(obj.db_sort_key, 'int', 'int')
if hasattr(obj, 'db_min_conns') and obj.db_min_conns is not None:
columnMap['min_conns'] = \
self.convertToDB(obj.db_min_conns, 'int', 'int')
if hasattr(obj, 'db_max_conns') and obj.db_max_conns is not None:
columnMap['max_conns'] = \
self.convertToDB(obj.db_max_conns, 'int', 'int')
if hasattr(obj, 'db_parentType') and obj.db_parentType is not None:
columnMap['parent_type'] = \
self.convertToDB(obj.db_parentType, 'str', 'char(32)')
if hasattr(obj, 'db_entity_id') and obj.db_entity_id is not None:
columnMap['entity_id'] = \
self.convertToDB(obj.db_entity_id, 'long', 'int')
if hasattr(obj, 'db_entity_type') and obj.db_entity_type is not None:
columnMap['entity_type'] = \
self.convertToDB(obj.db_entity_type, 'str', 'char(16)')
if hasattr(obj, 'db_parent') and obj.db_parent is not None:
columnMap['parent_id'] = \
self.convertToDB(obj.db_parent, 'long', 'long')
columnMap.update(global_props)
if obj.is_new or do_copy:
dbCommand = self.createSQLInsert(table, columnMap)
else:
dbCommand = self.createSQLUpdate(table, columnMap, whereMap)
lastId = self.executeSQL(db, dbCommand, False)
def set_sql_command(self, db, obj, global_props, do_copy=True):
if not do_copy and not obj.is_dirty:
return None
columns = ['id', 'name', 'type', 'optional', 'sort_key', 'min_conns', 'max_conns', 'parent_type', 'entity_id', 'entity_type', 'parent_id']
table = 'port_spec'
whereMap = {}
whereMap.update(global_props)
if obj.db_id is not None:
keyStr = self.convertToDB(obj.db_id, 'long', 'int')
whereMap['id'] = keyStr
columnMap = {}
if hasattr(obj, 'db_id') and obj.db_id is not None:
columnMap['id'] = \
self.convertToDB(obj.db_id, 'long', 'int')
if hasattr(obj, 'db_name') and obj.db_name is not None:
columnMap['name'] = \
self.convertToDB(obj.db_name, 'str', 'varchar(255)')
if hasattr(obj, 'db_type') and obj.db_type is not None:
columnMap['type'] = \
self.convertToDB(obj.db_type, 'str', 'varchar(255)')
if hasattr(obj, 'db_optional') and obj.db_optional is not None:
columnMap['optional'] = \
self.convertToDB(obj.db_optional, 'int', 'int')
if hasattr(obj, 'db_sort_key') and obj.db_sort_key is not None:
columnMap['sort_key'] = \
self.convertToDB(obj.db_sort_key, 'int', 'int')
if hasattr(obj, 'db_min_conns') and obj.db_min_conns is not None:
columnMap['min_conns'] = \
self.convertToDB(obj.db_min_conns, 'int', 'int')
if hasattr(obj, 'db_max_conns') and obj.db_max_conns is not None:
columnMap['max_conns'] = \
self.convertToDB(obj.db_max_conns, 'int', 'int')
if hasattr(obj, 'db_parentType') and obj.db_parentType is not None:
columnMap['parent_type'] = \
self.convertToDB(obj.db_parentType, 'str', 'char(32)')
if hasattr(obj, 'db_entity_id') and obj.db_entity_id is not None:
columnMap['entity_id'] = \
self.convertToDB(obj.db_entity_id, 'long', 'int')
if hasattr(obj, 'db_entity_type') and obj.db_entity_type is not None:
columnMap['entity_type'] = \
self.convertToDB(obj.db_entity_type, 'str', 'char(16)')
if hasattr(obj, 'db_parent') and obj.db_parent is not None:
columnMap['parent_id'] = \
self.convertToDB(obj.db_parent, 'long', 'long')
columnMap.update(global_props)
if obj.is_new or do_copy:
dbCommand = self.createSQLInsert(table, columnMap)
else:
dbCommand = self.createSQLUpdate(table, columnMap, whereMap)
return dbCommand
def set_sql_process(self, obj, global_props, lastId):
pass
def to_sql_fast(self, obj, do_copy=True):
for child in obj.db_portSpecItems:
child.db_portSpec = obj.db_id
def delete_sql_column(self, db, obj, global_props):
table = 'port_spec'
whereMap = {}
whereMap.update(global_props)
if obj.db_id is not None:
keyStr = self.convertToDB(obj.db_id, 'long', 'int')
whereMap['id'] = keyStr
dbCommand = self.createSQLDelete(table, whereMap)
self.executeSQL(db, dbCommand, False)
class DBLogSQLDAOBase(SQLDAO):
def __init__(self, daoList):
self.daoList = daoList
self.table = 'log_tbl'
def getDao(self, dao):
return self.daoList[dao]
def get_sql_columns(self, db, global_props,lock=False):
columns = ['id', 'entity_type', 'version', 'name', 'last_modified', 'vistrail_id']
table = 'log_tbl'
whereMap = global_props
orderBy = 'id'
dbCommand = self.createSQLSelect(table, columns, whereMap, orderBy, lock)
data = self.executeSQL(db, dbCommand, True)
res = {}
for row in data:
id = self.convertFromDB(row[0], 'long', 'int')
global_props['entity_id'] = self.convertToDB(id, 'long', 'int')
entity_type = self.convertFromDB(row[1], 'str', 'char(16)')
global_props['entity_type'] = self.convertToDB(entity_type, 'str', 'char(16)')
version = self.convertFromDB(row[2], 'str', 'char(16)')
name = self.convertFromDB(row[3], 'str', 'varchar(255)')
last_modified = self.convertFromDB(row[4], 'datetime', 'datetime')
vistrail_id = self.convertFromDB(row[5], 'long', 'int')
log = DBLog(entity_type=entity_type,
version=version,
name=name,
last_modified=last_modified,
vistrail_id=vistrail_id,
id=id)
log.is_dirty = False
res[('log', id)] = log
return res
def get_sql_select(self, db, global_props,lock=False):
columns = ['id', 'entity_type', 'version', 'name', 'last_modified', 'vistrail_id']
table = 'log_tbl'
whereMap = global_props
orderBy = 'id'
return self.createSQLSelect(table, columns, whereMap, orderBy, lock)
def process_sql_columns(self, data, global_props):
res = {}
for row in data:
id = self.convertFromDB(row[0], 'long', 'int')
global_props['entity_id'] = self.convertToDB(id, 'long', 'int')
entity_type = self.convertFromDB(row[1], 'str', 'char(16)')
global_props['entity_type'] = self.convertToDB(entity_type, 'str', 'char(16)')
version = self.convertFromDB(row[2], 'str', 'char(16)')
name = self.convertFromDB(row[3], 'str', 'varchar(255)')
last_modified = self.convertFromDB(row[4], 'datetime', 'datetime')
vistrail_id = self.convertFromDB(row[5], 'long', 'int')
log = DBLog(entity_type=entity_type,
version=version,
name=name,
last_modified=last_modified,
vistrail_id=vistrail_id,
id=id)
log.is_dirty = False
res[('log', id)] = log
return res
def from_sql_fast(self, obj, all_objects):
pass
def set_sql_columns(self, db, obj, global_props, do_copy=True):
if not do_copy and not obj.is_dirty:
return
columns = ['id', 'entity_type', 'version', 'name', 'last_modified', 'vistrail_id']
table = 'log_tbl'
whereMap = {}
whereMap.update(global_props)
if obj.db_id is not None:
keyStr = self.convertToDB(obj.db_id, 'long', 'int')
whereMap['id'] = keyStr
columnMap = {}
if hasattr(obj, 'db_id') and obj.db_id is not None:
columnMap['id'] = \
self.convertToDB(obj.db_id, 'long', 'int')
if hasattr(obj, 'db_entity_type') and obj.db_entity_type is not None:
columnMap['entity_type'] = \
self.convertToDB(obj.db_entity_type, 'str', 'char(16)')
if hasattr(obj, 'db_version') and obj.db_version is not None:
columnMap['version'] = \
self.convertToDB(obj.db_version, 'str', 'char(16)')
if hasattr(obj, 'db_name') and obj.db_name is not None:
columnMap['name'] = \
self.convertToDB(obj.db_name, 'str', 'varchar(255)')
if hasattr(obj, 'db_last_modified') and obj.db_last_modified is not None:
columnMap['last_modified'] = \
self.convertToDB(obj.db_last_modified, 'datetime', 'datetime')
if hasattr(obj, 'db_vistrail_id') and obj.db_vistrail_id is not None:
columnMap['vistrail_id'] = \
self.convertToDB(obj.db_vistrail_id, 'long', 'int')
columnMap.update(global_props)
if obj.is_new or do_copy:
dbCommand = self.createSQLInsert(table, columnMap)
else:
dbCommand = self.createSQLUpdate(table, columnMap, whereMap)
lastId = self.executeSQL(db, dbCommand, False)
if obj.db_id is None:
obj.db_id = lastId
keyStr = self.convertToDB(obj.db_id, 'long', 'int')
if hasattr(obj, 'db_entity_type') and obj.db_entity_type is not None:
global_props['entity_type'] = self.convertToDB(obj.db_entity_type, 'str', 'char(16)')
if hasattr(obj, 'db_id') and obj.db_id is not None:
global_props['entity_id'] = self.convertToDB(obj.db_id, 'long', 'int')
def set_sql_command(self, db, obj, global_props, do_copy=True):
if not do_copy and not obj.is_dirty:
return None
columns = ['id', 'entity_type', 'version', 'name', 'last_modified', 'vistrail_id']
table = 'log_tbl'
whereMap = {}
whereMap.update(global_props)
if obj.db_id is not None:
keyStr = self.convertToDB(obj.db_id, 'long', 'int')
whereMap['id'] = keyStr
columnMap = {}
if hasattr(obj, 'db_id') and obj.db_id is not None:
columnMap['id'] = \
self.convertToDB(obj.db_id, 'long', 'int')
if hasattr(obj, 'db_entity_type') and obj.db_entity_type is not None:
columnMap['entity_type'] = \
self.convertToDB(obj.db_entity_type, 'str', 'char(16)')
if hasattr(obj, 'db_version') and obj.db_version is not None:
columnMap['version'] = \
self.convertToDB(obj.db_version, 'str', 'char(16)')
if hasattr(obj, 'db_name') and obj.db_name is not None:
columnMap['name'] = \
self.convertToDB(obj.db_name, 'str', 'varchar(255)')
if hasattr(obj, 'db_last_modified') and obj.db_last_modified is not None:
columnMap['last_modified'] = \
self.convertToDB(obj.db_last_modified, 'datetime', 'datetime')
if hasattr(obj, 'db_vistrail_id') and obj.db_vistrail_id is not None:
columnMap['vistrail_id'] = \
self.convertToDB(obj.db_vistrail_id, 'long', 'int')
columnMap.update(global_props)
if obj.is_new or do_copy:
dbCommand = self.createSQLInsert(table, columnMap)
else:
dbCommand = self.createSQLUpdate(table, columnMap, whereMap)
return dbCommand
def set_sql_process(self, obj, global_props, lastId):
if obj.db_id is None:
obj.db_id = lastId
keyStr = self.convertToDB(obj.db_id, 'long', 'int')
if hasattr(obj, 'db_entity_type') and obj.db_entity_type is not None:
global_props['entity_type'] = self.convertToDB(obj.db_entity_type, 'str', 'char(16)')
if hasattr(obj, 'db_id') and obj.db_id is not None:
global_props['entity_id'] = self.convertToDB(obj.db_id, 'long', 'int')
pass
def to_sql_fast(self, obj, do_copy=True):
for child in obj.db_workflow_execs:
child.db_log = obj.db_id
for child in obj.db_machines:
child.db_log = obj.db_id
def delete_sql_column(self, db, obj, global_props):
table = 'log_tbl'
whereMap = {}
whereMap.update(global_props)
if obj.db_id is not None:
keyStr = self.convertToDB(obj.db_id, 'long', 'int')
whereMap['id'] = keyStr
dbCommand = self.createSQLDelete(table, whereMap)
self.executeSQL(db, dbCommand, False)
class DBPEParameterSQLDAOBase(SQLDAO):
def __init__(self, daoList):
self.daoList = daoList
self.table = 'pe_parameter'
def getDao(self, dao):
return self.daoList[dao]
def get_sql_columns(self, db, global_props,lock=False):
columns = ['id', 'pos', 'interpolator', 'value', 'dimension', 'parent_type', 'parent_id', 'entity_id', 'entity_type']
table = 'pe_parameter'
whereMap = global_props
orderBy = 'id'
dbCommand = self.createSQLSelect(table, columns, whereMap, orderBy, lock)
data = self.executeSQL(db, dbCommand, True)
res = {}
for row in data:
id = self.convertFromDB(row[0], 'long', 'int')
pos = self.convertFromDB(row[1], 'long', 'int')
interpolator = self.convertFromDB(row[2], 'str', 'varchar(255)')
value = self.convertFromDB(row[3], 'str', 'mediumtext')
dimension = self.convertFromDB(row[4], 'long', 'int')
parentType = self.convertFromDB(row[5], 'str', 'char(32)')
pe_function = self.convertFromDB(row[6], 'long', 'int')
entity_id = self.convertFromDB(row[7], 'long', 'int')
entity_type = self.convertFromDB(row[8], 'str', 'char(16)')
pe_parameter = DBPEParameter(pos=pos,
interpolator=interpolator,
value=value,
dimension=dimension,
id=id)
pe_parameter.db_parentType = parentType
pe_parameter.db_pe_function = pe_function
pe_parameter.db_entity_id = entity_id
pe_parameter.db_entity_type = entity_type
pe_parameter.is_dirty = False
res[('pe_parameter', id)] = pe_parameter
return res
def get_sql_select(self, db, global_props,lock=False):
columns = ['id', 'pos', 'interpolator', 'value', 'dimension', 'parent_type', 'parent_id', 'entity_id', 'entity_type']
table = 'pe_parameter'
whereMap = global_props
orderBy = 'id'
return self.createSQLSelect(table, columns, whereMap, orderBy, lock)
def process_sql_columns(self, data, global_props):
res = {}
for row in data:
id = self.convertFromDB(row[0], 'long', 'int')
pos = self.convertFromDB(row[1], 'long', 'int')
interpolator = self.convertFromDB(row[2], 'str', 'varchar(255)')
value = self.convertFromDB(row[3], 'str', 'mediumtext')
dimension = self.convertFromDB(row[4], 'long', 'int')
parentType = self.convertFromDB(row[5], 'str', 'char(32)')
pe_function = self.convertFromDB(row[6], 'long', 'int')
entity_id = self.convertFromDB(row[7], 'long', 'int')
entity_type = self.convertFromDB(row[8], 'str', 'char(16)')
pe_parameter = DBPEParameter(pos=pos,
interpolator=interpolator,
value=value,
dimension=dimension,
id=id)
pe_parameter.db_parentType = parentType
pe_parameter.db_pe_function = pe_function
pe_parameter.db_entity_id = entity_id
pe_parameter.db_entity_type = entity_type
pe_parameter.is_dirty = False
res[('pe_parameter', id)] = pe_parameter
return res
def from_sql_fast(self, obj, all_objects):
if ('pe_function', obj.db_pe_function) in all_objects:
p = all_objects[('pe_function', obj.db_pe_function)]
p.db_add_parameter(obj)
def set_sql_columns(self, db, obj, global_props, do_copy=True):
if not do_copy and not obj.is_dirty:
return
columns = ['id', 'pos', 'interpolator', 'value', 'dimension', 'parent_type', 'parent_id', 'entity_id', 'entity_type']
table = 'pe_parameter'
whereMap = {}
whereMap.update(global_props)
if obj.db_id is not None:
keyStr = self.convertToDB(obj.db_id, 'long', 'int')
whereMap['id'] = keyStr
columnMap = {}
if hasattr(obj, 'db_id') and obj.db_id is not None:
columnMap['id'] = \
self.convertToDB(obj.db_id, 'long', 'int')
if hasattr(obj, 'db_pos') and obj.db_pos is not None:
columnMap['pos'] = \
self.convertToDB(obj.db_pos, 'long', 'int')
if hasattr(obj, 'db_interpolator') and obj.db_interpolator is not None:
columnMap['interpolator'] = \
self.convertToDB(obj.db_interpolator, 'str', 'varchar(255)')
if hasattr(obj, 'db_value') and obj.db_value is not None:
columnMap['value'] = \
self.convertToDB(obj.db_value, 'str', 'mediumtext')
if hasattr(obj, 'db_dimension') and obj.db_dimension is not None:
columnMap['dimension'] = \
self.convertToDB(obj.db_dimension, 'long', 'int')
if hasattr(obj, 'db_parentType') and obj.db_parentType is not None:
columnMap['parent_type'] = \
self.convertToDB(obj.db_parentType, 'str', 'char(32)')
if hasattr(obj, 'db_pe_function') and obj.db_pe_function is not None:
columnMap['parent_id'] = \
self.convertToDB(obj.db_pe_function, 'long', 'int')
if hasattr(obj, 'db_entity_id') and obj.db_entity_id is not None:
columnMap['entity_id'] = \
self.convertToDB(obj.db_entity_id, 'long', 'int')
if hasattr(obj, 'db_entity_type') and obj.db_entity_type is not None:
columnMap['entity_type'] = \
self.convertToDB(obj.db_entity_type, 'str', 'char(16)')
columnMap.update(global_props)
if obj.is_new or do_copy:
dbCommand = self.createSQLInsert(table, columnMap)
else:
dbCommand = self.createSQLUpdate(table, columnMap, whereMap)
lastId = self.executeSQL(db, dbCommand, False)
def set_sql_command(self, db, obj, global_props, do_copy=True):
if not do_copy and not obj.is_dirty:
return None
columns = ['id', 'pos', 'interpolator', 'value', 'dimension', 'parent_type', 'parent_id', 'entity_id', 'entity_type']
table = 'pe_parameter'
whereMap = {}
whereMap.update(global_props)
if obj.db_id is not None:
keyStr = self.convertToDB(obj.db_id, 'long', 'int')
whereMap['id'] = keyStr
columnMap = {}
if hasattr(obj, 'db_id') and obj.db_id is not None:
columnMap['id'] = \
self.convertToDB(obj.db_id, 'long', 'int')
if hasattr(obj, 'db_pos') and obj.db_pos is not None:
columnMap['pos'] = \
self.convertToDB(obj.db_pos, 'long', 'int')
if hasattr(obj, 'db_interpolator') and obj.db_interpolator is not None:
columnMap['interpolator'] = \
self.convertToDB(obj.db_interpolator, 'str', 'varchar(255)')
if hasattr(obj, 'db_value') and obj.db_value is not None:
columnMap['value'] = \
self.convertToDB(obj.db_value, 'str', 'mediumtext')
if hasattr(obj, 'db_dimension') and obj.db_dimension is not None:
columnMap['dimension'] = \
self.convertToDB(obj.db_dimension, 'long', 'int')
if hasattr(obj, 'db_parentType') and obj.db_parentType is not None:
columnMap['parent_type'] = \
self.convertToDB(obj.db_parentType, 'str', 'char(32)')
if hasattr(obj, 'db_pe_function') and obj.db_pe_function is not None:
columnMap['parent_id'] = \
self.convertToDB(obj.db_pe_function, 'long', 'int')
if hasattr(obj, 'db_entity_id') and obj.db_entity_id is not None:
columnMap['entity_id'] = \
self.convertToDB(obj.db_entity_id, 'long', 'int')
if hasattr(obj, 'db_entity_type') and obj.db_entity_type is not None:
columnMap['entity_type'] = \
self.convertToDB(obj.db_entity_type, 'str', 'char(16)')
columnMap.update(global_props)
if obj.is_new or do_copy:
dbCommand = self.createSQLInsert(table, columnMap)
else:
dbCommand = self.createSQLUpdate(table, columnMap, whereMap)
return dbCommand
def set_sql_process(self, obj, global_props, lastId):
pass
def to_sql_fast(self, obj, do_copy=True):
pass
def delete_sql_column(self, db, obj, global_props):
table = 'pe_parameter'
whereMap = {}
whereMap.update(global_props)
if obj.db_id is not None:
keyStr = self.convertToDB(obj.db_id, 'long', 'int')
whereMap['id'] = keyStr
dbCommand = self.createSQLDelete(table, whereMap)
self.executeSQL(db, dbCommand, False)
class DBWorkflowExecSQLDAOBase(SQLDAO):
def __init__(self, daoList):
self.daoList = daoList
self.table = 'workflow_exec'
def getDao(self, dao):
return self.daoList[dao]
def get_sql_columns(self, db, global_props,lock=False):
columns = ['id', 'user', 'ip', 'session', 'vt_version', 'ts_start', 'ts_end', 'parent_id', 'parent_type', 'parent_version', 'completed', 'name', 'log_id', 'entity_id', 'entity_type']
table = 'workflow_exec'
whereMap = global_props
orderBy = 'id'
dbCommand = self.createSQLSelect(table, columns, whereMap, orderBy, lock)
data = self.executeSQL(db, dbCommand, True)
res = {}
for row in data:
id = self.convertFromDB(row[0], 'long', 'int')
user = self.convertFromDB(row[1], 'str', 'varchar(255)')
ip = self.convertFromDB(row[2], 'str', 'varchar(255)')
session = self.convertFromDB(row[3], 'long', 'int')
vt_version = self.convertFromDB(row[4], 'str', 'varchar(255)')
ts_start = self.convertFromDB(row[5], 'datetime', 'datetime')
ts_end = self.convertFromDB(row[6], 'datetime', 'datetime')
parent_id = self.convertFromDB(row[7], 'long', 'int')
parent_type = self.convertFromDB(row[8], 'str', 'varchar(255)')
parent_version = self.convertFromDB(row[9], 'long', 'int')
completed = self.convertFromDB(row[10], 'int', 'int')
name = self.convertFromDB(row[11], 'str', 'varchar(255)')
log = self.convertFromDB(row[12], 'long', 'int')
entity_id = self.convertFromDB(row[13], 'long', 'int')
entity_type = self.convertFromDB(row[14], 'str', 'char(16)')
workflow_exec = DBWorkflowExec(user=user,
ip=ip,
session=session,
vt_version=vt_version,
ts_start=ts_start,
ts_end=ts_end,
parent_id=parent_id,
parent_type=parent_type,
parent_version=parent_version,
completed=completed,
name=name,
id=id)
workflow_exec.db_log = log
workflow_exec.db_entity_id = entity_id
workflow_exec.db_entity_type = entity_type
workflow_exec.is_dirty = False
res[('workflow_exec', id)] = workflow_exec
return res
def get_sql_select(self, db, global_props,lock=False):
columns = ['id', 'user', 'ip', 'session', 'vt_version', 'ts_start', 'ts_end', 'parent_id', 'parent_type', 'parent_version', 'completed', 'name', 'log_id', 'entity_id', 'entity_type']
table = 'workflow_exec'
whereMap = global_props
orderBy = 'id'
return self.createSQLSelect(table, columns, whereMap, orderBy, lock)
def process_sql_columns(self, data, global_props):
res = {}
for row in data:
id = self.convertFromDB(row[0], 'long', 'int')
user = self.convertFromDB(row[1], 'str', 'varchar(255)')
ip = self.convertFromDB(row[2], 'str', 'varchar(255)')
session = self.convertFromDB(row[3], 'long', 'int')
vt_version = self.convertFromDB(row[4], 'str', 'varchar(255)')
ts_start = self.convertFromDB(row[5], 'datetime', 'datetime')
ts_end = self.convertFromDB(row[6], 'datetime', 'datetime')
parent_id = self.convertFromDB(row[7], 'long', 'int')
parent_type = self.convertFromDB(row[8], 'str', 'varchar(255)')
parent_version = self.convertFromDB(row[9], 'long', 'int')
completed = self.convertFromDB(row[10], 'int', 'int')
name = self.convertFromDB(row[11], 'str', 'varchar(255)')
log = self.convertFromDB(row[12], 'long', 'int')
entity_id = self.convertFromDB(row[13], 'long', 'int')
entity_type = self.convertFromDB(row[14], 'str', 'char(16)')
workflow_exec = DBWorkflowExec(user=user,
ip=ip,
session=session,
vt_version=vt_version,
ts_start=ts_start,
ts_end=ts_end,
parent_id=parent_id,
parent_type=parent_type,
parent_version=parent_version,
completed=completed,
name=name,
id=id)
workflow_exec.db_log = log
workflow_exec.db_entity_id = entity_id
workflow_exec.db_entity_type = entity_type
workflow_exec.is_dirty = False
res[('workflow_exec', id)] = workflow_exec
return res
def from_sql_fast(self, obj, all_objects):
if ('log', obj.db_log) in all_objects:
p = all_objects[('log', obj.db_log)]
p.db_add_workflow_exec(obj)
def set_sql_columns(self, db, obj, global_props, do_copy=True):
if not do_copy and not obj.is_dirty:
return
columns = ['id', 'user', 'ip', 'session', 'vt_version', 'ts_start', 'ts_end', 'parent_id', 'parent_type', 'parent_version', 'completed', 'name', 'log_id', 'entity_id', 'entity_type']
table = 'workflow_exec'
whereMap = {}
whereMap.update(global_props)
if obj.db_id is not None:
keyStr = self.convertToDB(obj.db_id, 'long', 'int')
whereMap['id'] = keyStr
columnMap = {}
if hasattr(obj, 'db_id') and obj.db_id is not None:
columnMap['id'] = \
self.convertToDB(obj.db_id, 'long', 'int')
if hasattr(obj, 'db_user') and obj.db_user is not None:
columnMap['user'] = \
self.convertToDB(obj.db_user, 'str', 'varchar(255)')
if hasattr(obj, 'db_ip') and obj.db_ip is not None:
columnMap['ip'] = \
self.convertToDB(obj.db_ip, 'str', 'varchar(255)')
if hasattr(obj, 'db_session') and obj.db_session is not None:
columnMap['session'] = \
self.convertToDB(obj.db_session, 'long', 'int')
if hasattr(obj, 'db_vt_version') and obj.db_vt_version is not None:
columnMap['vt_version'] = \
self.convertToDB(obj.db_vt_version, 'str', 'varchar(255)')
if hasattr(obj, 'db_ts_start') and obj.db_ts_start is not None:
columnMap['ts_start'] = \
self.convertToDB(obj.db_ts_start, 'datetime', 'datetime')
if hasattr(obj, 'db_ts_end') and obj.db_ts_end is not None:
columnMap['ts_end'] = \
self.convertToDB(obj.db_ts_end, 'datetime', 'datetime')
if hasattr(obj, 'db_parent_id') and obj.db_parent_id is not None:
columnMap['parent_id'] = \
self.convertToDB(obj.db_parent_id, 'long', 'int')
if hasattr(obj, 'db_parent_type') and obj.db_parent_type is not None:
columnMap['parent_type'] = \
self.convertToDB(obj.db_parent_type, 'str', 'varchar(255)')
if hasattr(obj, 'db_parent_version') and obj.db_parent_version is not None:
columnMap['parent_version'] = \
self.convertToDB(obj.db_parent_version, 'long', 'int')
if hasattr(obj, 'db_completed') and obj.db_completed is not None:
columnMap['completed'] = \
self.convertToDB(obj.db_completed, 'int', 'int')
if hasattr(obj, 'db_name') and obj.db_name is not None:
columnMap['name'] = \
self.convertToDB(obj.db_name, 'str', 'varchar(255)')
if hasattr(obj, 'db_log') and obj.db_log is not None:
columnMap['log_id'] = \
self.convertToDB(obj.db_log, 'long', 'int')
if hasattr(obj, 'db_entity_id') and obj.db_entity_id is not None:
columnMap['entity_id'] = \
self.convertToDB(obj.db_entity_id, 'long', 'int')
if hasattr(obj, 'db_entity_type') and obj.db_entity_type is not None:
columnMap['entity_type'] = \
self.convertToDB(obj.db_entity_type, 'str', 'char(16)')
columnMap.update(global_props)
if obj.is_new or do_copy:
dbCommand = self.createSQLInsert(table, columnMap)
else:
dbCommand = self.createSQLUpdate(table, columnMap, whereMap)
lastId = self.executeSQL(db, dbCommand, False)
def set_sql_command(self, db, obj, global_props, do_copy=True):
if not do_copy and not obj.is_dirty:
return None
columns = ['id', 'user', 'ip', 'session', 'vt_version', 'ts_start', 'ts_end', 'parent_id', 'parent_type', 'parent_version', 'completed', 'name', 'log_id', 'entity_id', 'entity_type']
table = 'workflow_exec'
whereMap = {}
whereMap.update(global_props)
if obj.db_id is not None:
keyStr = self.convertToDB(obj.db_id, 'long', 'int')
whereMap['id'] = keyStr
columnMap = {}
if hasattr(obj, 'db_id') and obj.db_id is not None:
columnMap['id'] = \
self.convertToDB(obj.db_id, 'long', 'int')
if hasattr(obj, 'db_user') and obj.db_user is not None:
columnMap['user'] = \
self.convertToDB(obj.db_user, 'str', 'varchar(255)')
if hasattr(obj, 'db_ip') and obj.db_ip is not None:
columnMap['ip'] = \
self.convertToDB(obj.db_ip, 'str', 'varchar(255)')
if hasattr(obj, 'db_session') and obj.db_session is not None:
columnMap['session'] = \
self.convertToDB(obj.db_session, 'long', 'int')
if hasattr(obj, 'db_vt_version') and obj.db_vt_version is not None:
columnMap['vt_version'] = \
self.convertToDB(obj.db_vt_version, 'str', 'varchar(255)')
if hasattr(obj, 'db_ts_start') and obj.db_ts_start is not None:
columnMap['ts_start'] = \
self.convertToDB(obj.db_ts_start, 'datetime', 'datetime')
if hasattr(obj, 'db_ts_end') and obj.db_ts_end is not None:
columnMap['ts_end'] = \
self.convertToDB(obj.db_ts_end, 'datetime', 'datetime')
if hasattr(obj, 'db_parent_id') and obj.db_parent_id is not None:
columnMap['parent_id'] = \
self.convertToDB(obj.db_parent_id, 'long', 'int')
if hasattr(obj, 'db_parent_type') and obj.db_parent_type is not None:
columnMap['parent_type'] = \
self.convertToDB(obj.db_parent_type, 'str', 'varchar(255)')
if hasattr(obj, 'db_parent_version') and obj.db_parent_version is not None:
columnMap['parent_version'] = \
self.convertToDB(obj.db_parent_version, 'long', 'int')
if hasattr(obj, 'db_completed') and obj.db_completed is not None:
columnMap['completed'] = \
self.convertToDB(obj.db_completed, 'int', 'int')
if hasattr(obj, 'db_name') and obj.db_name is not None:
columnMap['name'] = \
self.convertToDB(obj.db_name, 'str', 'varchar(255)')
if hasattr(obj, 'db_log') and obj.db_log is not None:
columnMap['log_id'] = \
self.convertToDB(obj.db_log, 'long', 'int')
if hasattr(obj, 'db_entity_id') and obj.db_entity_id is not None:
columnMap['entity_id'] = \
self.convertToDB(obj.db_entity_id, 'long', 'int')
if hasattr(obj, 'db_entity_type') and obj.db_entity_type is not None:
columnMap['entity_type'] = \
self.convertToDB(obj.db_entity_type, 'str', 'char(16)')
columnMap.update(global_props)
if obj.is_new or do_copy:
dbCommand = self.createSQLInsert(table, columnMap)
else:
dbCommand = self.createSQLUpdate(table, columnMap, whereMap)
return dbCommand
def set_sql_process(self, obj, global_props, lastId):
pass
def to_sql_fast(self, obj, do_copy=True):
for child in obj.db_annotations:
child.db_parentType = obj.vtType
child.db_parent = obj.db_id
for child in obj.db_item_execs:
child.db_parentType = obj.vtType
child.db_parent = obj.db_id
def delete_sql_column(self, db, obj, global_props):
table = 'workflow_exec'
whereMap = {}
whereMap.update(global_props)
if obj.db_id is not None:
keyStr = self.convertToDB(obj.db_id, 'long', 'int')
whereMap['id'] = keyStr
dbCommand = self.createSQLDelete(table, whereMap)
self.executeSQL(db, dbCommand, False)
class DBLocationSQLDAOBase(SQLDAO):
def __init__(self, daoList):
self.daoList = daoList
self.table = 'location'
def getDao(self, dao):
return self.daoList[dao]
def get_sql_columns(self, db, global_props,lock=False):
columns = ['id', 'x', 'y', 'parent_type', 'entity_id', 'entity_type', 'parent_id']
table = 'location'
whereMap = global_props
orderBy = 'id'
dbCommand = self.createSQLSelect(table, columns, whereMap, orderBy, lock)
data = self.executeSQL(db, dbCommand, True)
res = {}
for row in data:
id = self.convertFromDB(row[0], 'long', 'int')
x = self.convertFromDB(row[1], 'float', 'DECIMAL(18,12)')
y = self.convertFromDB(row[2], 'float', 'DECIMAL(18,12)')
parentType = self.convertFromDB(row[3], 'str', 'char(32)')
entity_id = self.convertFromDB(row[4], 'long', 'int')
entity_type = self.convertFromDB(row[5], 'str', 'char(16)')
parent = self.convertFromDB(row[6], 'long', 'long')
location = DBLocation(x=x,
y=y,
id=id)
location.db_parentType = parentType
location.db_entity_id = entity_id
location.db_entity_type = entity_type
location.db_parent = parent
location.is_dirty = False
res[('location', id)] = location
return res
def get_sql_select(self, db, global_props,lock=False):
columns = ['id', 'x', 'y', 'parent_type', 'entity_id', 'entity_type', 'parent_id']
table = 'location'
whereMap = global_props
orderBy = 'id'
return self.createSQLSelect(table, columns, whereMap, orderBy, lock)
def process_sql_columns(self, data, global_props):
res = {}
for row in data:
id = self.convertFromDB(row[0], 'long', 'int')
x = self.convertFromDB(row[1], 'float', 'DECIMAL(18,12)')
y = self.convertFromDB(row[2], 'float', 'DECIMAL(18,12)')
parentType = self.convertFromDB(row[3], 'str', 'char(32)')
entity_id = self.convertFromDB(row[4], 'long', 'int')
entity_type = self.convertFromDB(row[5], 'str', 'char(16)')
parent = self.convertFromDB(row[6], 'long', 'long')
location = DBLocation(x=x,
y=y,
id=id)
location.db_parentType = parentType
location.db_entity_id = entity_id
location.db_entity_type = entity_type
location.db_parent = parent
location.is_dirty = False
res[('location', id)] = location
return res
def from_sql_fast(self, obj, all_objects):
if obj.db_parentType == 'module':
p = all_objects[('module', obj.db_parent)]
p.db_add_location(obj)
elif obj.db_parentType == 'abstraction':
p = all_objects[('abstraction', obj.db_parent)]
p.db_add_location(obj)
elif obj.db_parentType == 'group':
p = all_objects[('group', obj.db_parent)]
p.db_add_location(obj)
elif obj.db_parentType == 'add':
p = all_objects[('add', obj.db_parent)]
p.db_add_data(obj)
elif obj.db_parentType == 'change':
p = all_objects[('change', obj.db_parent)]
p.db_add_data(obj)
def set_sql_columns(self, db, obj, global_props, do_copy=True):
if not do_copy and not obj.is_dirty:
return
columns = ['id', 'x', 'y', 'parent_type', 'entity_id', 'entity_type', 'parent_id']
table = 'location'
whereMap = {}
whereMap.update(global_props)
if obj.db_id is not None:
keyStr = self.convertToDB(obj.db_id, 'long', 'int')
whereMap['id'] = keyStr
columnMap = {}
if hasattr(obj, 'db_id') and obj.db_id is not None:
columnMap['id'] = \
self.convertToDB(obj.db_id, 'long', 'int')
if hasattr(obj, 'db_x') and obj.db_x is not None:
columnMap['x'] = \
self.convertToDB(obj.db_x, 'float', 'DECIMAL(18,12)')
if hasattr(obj, 'db_y') and obj.db_y is not None:
columnMap['y'] = \
self.convertToDB(obj.db_y, 'float', 'DECIMAL(18,12)')
if hasattr(obj, 'db_parentType') and obj.db_parentType is not None:
columnMap['parent_type'] = \
self.convertToDB(obj.db_parentType, 'str', 'char(32)')
if hasattr(obj, 'db_entity_id') and obj.db_entity_id is not None:
columnMap['entity_id'] = \
self.convertToDB(obj.db_entity_id, 'long', 'int')
if hasattr(obj, 'db_entity_type') and obj.db_entity_type is not None:
columnMap['entity_type'] = \
self.convertToDB(obj.db_entity_type, 'str', 'char(16)')
if hasattr(obj, 'db_parent') and obj.db_parent is not None:
columnMap['parent_id'] = \
self.convertToDB(obj.db_parent, 'long', 'long')
columnMap.update(global_props)
if obj.is_new or do_copy:
dbCommand = self.createSQLInsert(table, columnMap)
else:
dbCommand = self.createSQLUpdate(table, columnMap, whereMap)
lastId = self.executeSQL(db, dbCommand, False)
def set_sql_command(self, db, obj, global_props, do_copy=True):
if not do_copy and not obj.is_dirty:
return None
columns = ['id', 'x', 'y', 'parent_type', 'entity_id', 'entity_type', 'parent_id']
table = 'location'
whereMap = {}
whereMap.update(global_props)
if obj.db_id is not None:
keyStr = self.convertToDB(obj.db_id, 'long', 'int')
whereMap['id'] = keyStr
columnMap = {}
if hasattr(obj, 'db_id') and obj.db_id is not None:
columnMap['id'] = \
self.convertToDB(obj.db_id, 'long', 'int')
if hasattr(obj, 'db_x') and obj.db_x is not None:
columnMap['x'] = \
self.convertToDB(obj.db_x, 'float', 'DECIMAL(18,12)')
if hasattr(obj, 'db_y') and obj.db_y is not None:
columnMap['y'] = \
self.convertToDB(obj.db_y, 'float', 'DECIMAL(18,12)')
if hasattr(obj, 'db_parentType') and obj.db_parentType is not None:
columnMap['parent_type'] = \
self.convertToDB(obj.db_parentType, 'str', 'char(32)')
if hasattr(obj, 'db_entity_id') and obj.db_entity_id is not None:
columnMap['entity_id'] = \
self.convertToDB(obj.db_entity_id, 'long', 'int')
if hasattr(obj, 'db_entity_type') and obj.db_entity_type is not None:
columnMap['entity_type'] = \
self.convertToDB(obj.db_entity_type, 'str', 'char(16)')
if hasattr(obj, 'db_parent') and obj.db_parent is not None:
columnMap['parent_id'] = \
self.convertToDB(obj.db_parent, 'long', 'long')
columnMap.update(global_props)
if obj.is_new or do_copy:
dbCommand = self.createSQLInsert(table, columnMap)
else:
dbCommand = self.createSQLUpdate(table, columnMap, whereMap)
return dbCommand
def set_sql_process(self, obj, global_props, lastId):
pass
def to_sql_fast(self, obj, do_copy=True):
pass
def delete_sql_column(self, db, obj, global_props):
table = 'location'
whereMap = {}
whereMap.update(global_props)
if obj.db_id is not None:
keyStr = self.convertToDB(obj.db_id, 'long', 'int')
whereMap['id'] = keyStr
dbCommand = self.createSQLDelete(table, whereMap)
self.executeSQL(db, dbCommand, False)
class DBFunctionSQLDAOBase(SQLDAO):
def __init__(self, daoList):
self.daoList = daoList
self.table = 'function'
def getDao(self, dao):
return self.daoList[dao]
def get_sql_columns(self, db, global_props,lock=False):
columns = ['id', 'pos', 'name', 'parent_type', 'entity_id', 'entity_type', 'parent_id']
table = 'function'
whereMap = global_props
orderBy = 'id'
dbCommand = self.createSQLSelect(table, columns, whereMap, orderBy, lock)
data = self.executeSQL(db, dbCommand, True)
res = {}
for row in data:
id = self.convertFromDB(row[0], 'long', 'int')
pos = self.convertFromDB(row[1], 'long', 'int')
name = self.convertFromDB(row[2], 'str', 'varchar(255)')
parentType = self.convertFromDB(row[3], 'str', 'char(32)')
entity_id = self.convertFromDB(row[4], 'long', 'int')
entity_type = self.convertFromDB(row[5], 'str', 'char(16)')
parent = self.convertFromDB(row[6], 'long', 'long')
function = DBFunction(pos=pos,
name=name,
id=id)
function.db_parentType = parentType
function.db_entity_id = entity_id
function.db_entity_type = entity_type
function.db_parent = parent
function.is_dirty = False
res[('function', id)] = function
return res
def get_sql_select(self, db, global_props,lock=False):
columns = ['id', 'pos', 'name', 'parent_type', 'entity_id', 'entity_type', 'parent_id']
table = 'function'
whereMap = global_props
orderBy = 'id'
return self.createSQLSelect(table, columns, whereMap, orderBy, lock)
def process_sql_columns(self, data, global_props):
res = {}
for row in data:
id = self.convertFromDB(row[0], 'long', 'int')
pos = self.convertFromDB(row[1], 'long', 'int')
name = self.convertFromDB(row[2], 'str', 'varchar(255)')
parentType = self.convertFromDB(row[3], 'str', 'char(32)')
entity_id = self.convertFromDB(row[4], 'long', 'int')
entity_type = self.convertFromDB(row[5], 'str', 'char(16)')
parent = self.convertFromDB(row[6], 'long', 'long')
function = DBFunction(pos=pos,
name=name,
id=id)
function.db_parentType = parentType
function.db_entity_id = entity_id
function.db_entity_type = entity_type
function.db_parent = parent
function.is_dirty = False
res[('function', id)] = function
return res
def from_sql_fast(self, obj, all_objects):
if obj.db_parentType == 'module':
p = all_objects[('module', obj.db_parent)]
p.db_add_function(obj)
elif obj.db_parentType == 'abstraction':
p = all_objects[('abstraction', obj.db_parent)]
p.db_add_function(obj)
elif obj.db_parentType == 'group':
p = all_objects[('group', obj.db_parent)]
p.db_add_function(obj)
elif obj.db_parentType == 'add':
p = all_objects[('add', obj.db_parent)]
p.db_add_data(obj)
elif obj.db_parentType == 'change':
p = all_objects[('change', obj.db_parent)]
p.db_add_data(obj)
def set_sql_columns(self, db, obj, global_props, do_copy=True):
if not do_copy and not obj.is_dirty:
return
columns = ['id', 'pos', 'name', 'parent_type', 'entity_id', 'entity_type', 'parent_id']
table = 'function'
whereMap = {}
whereMap.update(global_props)
if obj.db_id is not None:
keyStr = self.convertToDB(obj.db_id, 'long', 'int')
whereMap['id'] = keyStr
columnMap = {}
if hasattr(obj, 'db_id') and obj.db_id is not None:
columnMap['id'] = \
self.convertToDB(obj.db_id, 'long', 'int')
if hasattr(obj, 'db_pos') and obj.db_pos is not None:
columnMap['pos'] = \
self.convertToDB(obj.db_pos, 'long', 'int')
if hasattr(obj, 'db_name') and obj.db_name is not None:
columnMap['name'] = \
self.convertToDB(obj.db_name, 'str', 'varchar(255)')
if hasattr(obj, 'db_parentType') and obj.db_parentType is not None:
columnMap['parent_type'] = \
self.convertToDB(obj.db_parentType, 'str', 'char(32)')
if hasattr(obj, 'db_entity_id') and obj.db_entity_id is not None:
columnMap['entity_id'] = \
self.convertToDB(obj.db_entity_id, 'long', 'int')
if hasattr(obj, 'db_entity_type') and obj.db_entity_type is not None:
columnMap['entity_type'] = \
self.convertToDB(obj.db_entity_type, 'str', 'char(16)')
if hasattr(obj, 'db_parent') and obj.db_parent is not None:
columnMap['parent_id'] = \
self.convertToDB(obj.db_parent, 'long', 'long')
columnMap.update(global_props)
if obj.is_new or do_copy:
dbCommand = self.createSQLInsert(table, columnMap)
else:
dbCommand = self.createSQLUpdate(table, columnMap, whereMap)
lastId = self.executeSQL(db, dbCommand, False)
def set_sql_command(self, db, obj, global_props, do_copy=True):
if not do_copy and not obj.is_dirty:
return None
columns = ['id', 'pos', 'name', 'parent_type', 'entity_id', 'entity_type', 'parent_id']
table = 'function'
whereMap = {}
whereMap.update(global_props)
if obj.db_id is not None:
keyStr = self.convertToDB(obj.db_id, 'long', 'int')
whereMap['id'] = keyStr
columnMap = {}
if hasattr(obj, 'db_id') and obj.db_id is not None:
columnMap['id'] = \
self.convertToDB(obj.db_id, 'long', 'int')
if hasattr(obj, 'db_pos') and obj.db_pos is not None:
columnMap['pos'] = \
self.convertToDB(obj.db_pos, 'long', 'int')
if hasattr(obj, 'db_name') and obj.db_name is not None:
columnMap['name'] = \
self.convertToDB(obj.db_name, 'str', 'varchar(255)')
if hasattr(obj, 'db_parentType') and obj.db_parentType is not None:
columnMap['parent_type'] = \
self.convertToDB(obj.db_parentType, 'str', 'char(32)')
if hasattr(obj, 'db_entity_id') and obj.db_entity_id is not None:
columnMap['entity_id'] = \
self.convertToDB(obj.db_entity_id, 'long', 'int')
if hasattr(obj, 'db_entity_type') and obj.db_entity_type is not None:
columnMap['entity_type'] = \
self.convertToDB(obj.db_entity_type, 'str', 'char(16)')
if hasattr(obj, 'db_parent') and obj.db_parent is not None:
columnMap['parent_id'] = \
self.convertToDB(obj.db_parent, 'long', 'long')
columnMap.update(global_props)
if obj.is_new or do_copy:
dbCommand = self.createSQLInsert(table, columnMap)
else:
dbCommand = self.createSQLUpdate(table, columnMap, whereMap)
return dbCommand
def set_sql_process(self, obj, global_props, lastId):
pass
def to_sql_fast(self, obj, do_copy=True):
for child in obj.db_parameters:
child.db_parentType = obj.vtType
child.db_parent = obj.db_id
def delete_sql_column(self, db, obj, global_props):
table = 'function'
whereMap = {}
whereMap.update(global_props)
if obj.db_id is not None:
keyStr = self.convertToDB(obj.db_id, 'long', 'int')
whereMap['id'] = keyStr
dbCommand = self.createSQLDelete(table, whereMap)
self.executeSQL(db, dbCommand, False)
class DBActionAnnotationSQLDAOBase(SQLDAO):
def __init__(self, daoList):
self.daoList = daoList
self.table = 'action_annotation'
def getDao(self, dao):
return self.daoList[dao]
def get_sql_columns(self, db, global_props,lock=False):
columns = ['id', 'akey', 'value', 'action_id', 'date', 'user', 'parent_id', 'entity_id', 'entity_type']
table = 'action_annotation'
whereMap = global_props
orderBy = 'id'
dbCommand = self.createSQLSelect(table, columns, whereMap, orderBy, lock)
data = self.executeSQL(db, dbCommand, True)
res = {}
for row in data:
id = self.convertFromDB(row[0], 'long', 'int')
key = self.convertFromDB(row[1], 'str', 'varchar(255)')
value = self.convertFromDB(row[2], 'str', 'varchar(8191)')
action_id = self.convertFromDB(row[3], 'long', 'int')
date = self.convertFromDB(row[4], 'datetime', 'datetime')
user = self.convertFromDB(row[5], 'str', 'varchar(255)')
vistrail = self.convertFromDB(row[6], 'long', 'int')
entity_id = self.convertFromDB(row[7], 'long', 'int')
entity_type = self.convertFromDB(row[8], 'str', 'char(16)')
actionAnnotation = DBActionAnnotation(key=key,
value=value,
action_id=action_id,
date=date,
user=user,
id=id)
actionAnnotation.db_vistrail = vistrail
actionAnnotation.db_entity_id = entity_id
actionAnnotation.db_entity_type = entity_type
actionAnnotation.is_dirty = False
res[('actionAnnotation', id)] = actionAnnotation
return res
def get_sql_select(self, db, global_props,lock=False):
columns = ['id', 'akey', 'value', 'action_id', 'date', 'user', 'parent_id', 'entity_id', 'entity_type']
table = 'action_annotation'
whereMap = global_props
orderBy = 'id'
return self.createSQLSelect(table, columns, whereMap, orderBy, lock)
def process_sql_columns(self, data, global_props):
res = {}
for row in data:
id = self.convertFromDB(row[0], 'long', 'int')
key = self.convertFromDB(row[1], 'str', 'varchar(255)')
value = self.convertFromDB(row[2], 'str', 'varchar(8191)')
action_id = self.convertFromDB(row[3], 'long', 'int')
date = self.convertFromDB(row[4], 'datetime', 'datetime')
user = self.convertFromDB(row[5], 'str', 'varchar(255)')
vistrail = self.convertFromDB(row[6], 'long', 'int')
entity_id = self.convertFromDB(row[7], 'long', 'int')
entity_type = self.convertFromDB(row[8], 'str', 'char(16)')
actionAnnotation = DBActionAnnotation(key=key,
value=value,
action_id=action_id,
date=date,
user=user,
id=id)
actionAnnotation.db_vistrail = vistrail
actionAnnotation.db_entity_id = entity_id
actionAnnotation.db_entity_type = entity_type
actionAnnotation.is_dirty = False
res[('actionAnnotation', id)] = actionAnnotation
return res
def from_sql_fast(self, obj, all_objects):
if ('vistrail', obj.db_vistrail) in all_objects:
p = all_objects[('vistrail', obj.db_vistrail)]
p.db_add_actionAnnotation(obj)
def set_sql_columns(self, db, obj, global_props, do_copy=True):
if not do_copy and not obj.is_dirty:
return
columns = ['id', 'akey', 'value', 'action_id', 'date', 'user', 'parent_id', 'entity_id', 'entity_type']
table = 'action_annotation'
whereMap = {}
whereMap.update(global_props)
if obj.db_id is not None:
keyStr = self.convertToDB(obj.db_id, 'long', 'int')
whereMap['id'] = keyStr
columnMap = {}
if hasattr(obj, 'db_id') and obj.db_id is not None:
columnMap['id'] = \
self.convertToDB(obj.db_id, 'long', 'int')
if hasattr(obj, 'db_key') and obj.db_key is not None:
columnMap['akey'] = \
self.convertToDB(obj.db_key, 'str', 'varchar(255)')
if hasattr(obj, 'db_value') and obj.db_value is not None:
columnMap['value'] = \
self.convertToDB(obj.db_value, 'str', 'varchar(8191)')
if hasattr(obj, 'db_action_id') and obj.db_action_id is not None:
columnMap['action_id'] = \
self.convertToDB(obj.db_action_id, 'long', 'int')
if hasattr(obj, 'db_date') and obj.db_date is not None:
columnMap['date'] = \
self.convertToDB(obj.db_date, 'datetime', 'datetime')
if hasattr(obj, 'db_user') and obj.db_user is not None:
columnMap['user'] = \
self.convertToDB(obj.db_user, 'str', 'varchar(255)')
if hasattr(obj, 'db_vistrail') and obj.db_vistrail is not None:
columnMap['parent_id'] = \
self.convertToDB(obj.db_vistrail, 'long', 'int')
if hasattr(obj, 'db_entity_id') and obj.db_entity_id is not None:
columnMap['entity_id'] = \
self.convertToDB(obj.db_entity_id, 'long', 'int')
if hasattr(obj, 'db_entity_type') and obj.db_entity_type is not None:
columnMap['entity_type'] = \
self.convertToDB(obj.db_entity_type, 'str', 'char(16)')
columnMap.update(global_props)
if obj.is_new or do_copy:
dbCommand = self.createSQLInsert(table, columnMap)
else:
dbCommand = self.createSQLUpdate(table, columnMap, whereMap)
lastId = self.executeSQL(db, dbCommand, False)
def set_sql_command(self, db, obj, global_props, do_copy=True):
if not do_copy and not obj.is_dirty:
return None
columns = ['id', 'akey', 'value', 'action_id', 'date', 'user', 'parent_id', 'entity_id', 'entity_type']
table = 'action_annotation'
whereMap = {}
whereMap.update(global_props)
if obj.db_id is not None:
keyStr = self.convertToDB(obj.db_id, 'long', 'int')
whereMap['id'] = keyStr
columnMap = {}
if hasattr(obj, 'db_id') and obj.db_id is not None:
columnMap['id'] = \
self.convertToDB(obj.db_id, 'long', 'int')
if hasattr(obj, 'db_key') and obj.db_key is not None:
columnMap['akey'] = \
self.convertToDB(obj.db_key, 'str', 'varchar(255)')
if hasattr(obj, 'db_value') and obj.db_value is not None:
columnMap['value'] = \
self.convertToDB(obj.db_value, 'str', 'varchar(8191)')
if hasattr(obj, 'db_action_id') and obj.db_action_id is not None:
columnMap['action_id'] = \
self.convertToDB(obj.db_action_id, 'long', 'int')
if hasattr(obj, 'db_date') and obj.db_date is not None:
columnMap['date'] = \
self.convertToDB(obj.db_date, 'datetime', 'datetime')
if hasattr(obj, 'db_user') and obj.db_user is not None:
columnMap['user'] = \
self.convertToDB(obj.db_user, 'str', 'varchar(255)')
if hasattr(obj, 'db_vistrail') and obj.db_vistrail is not None:
columnMap['parent_id'] = \
self.convertToDB(obj.db_vistrail, 'long', 'int')
if hasattr(obj, 'db_entity_id') and obj.db_entity_id is not None:
columnMap['entity_id'] = \
self.convertToDB(obj.db_entity_id, 'long', 'int')
if hasattr(obj, 'db_entity_type') and obj.db_entity_type is not None:
columnMap['entity_type'] = \
self.convertToDB(obj.db_entity_type, 'str', 'char(16)')
columnMap.update(global_props)
if obj.is_new or do_copy:
dbCommand = self.createSQLInsert(table, columnMap)
else:
dbCommand = self.createSQLUpdate(table, columnMap, whereMap)
return dbCommand
def set_sql_process(self, obj, global_props, lastId):
pass
def to_sql_fast(self, obj, do_copy=True):
pass
def delete_sql_column(self, db, obj, global_props):
table = 'action_annotation'
whereMap = {}
whereMap.update(global_props)
if obj.db_id is not None:
keyStr = self.convertToDB(obj.db_id, 'long', 'int')
whereMap['id'] = keyStr
dbCommand = self.createSQLDelete(table, whereMap)
self.executeSQL(db, dbCommand, False)
class DBPluginDataSQLDAOBase(SQLDAO):
def __init__(self, daoList):
self.daoList = daoList
self.table = 'plugin_data'
def getDao(self, dao):
return self.daoList[dao]
def get_sql_columns(self, db, global_props,lock=False):
columns = ['id', 'data', 'parent_type', 'entity_id', 'entity_type', 'parent_id']
table = 'plugin_data'
whereMap = global_props
orderBy = 'id'
dbCommand = self.createSQLSelect(table, columns, whereMap, orderBy, lock)
data = self.executeSQL(db, dbCommand, True)
res = {}
for row in data:
id = self.convertFromDB(row[0], 'long', 'int')
data = self.convertFromDB(row[1], 'str', 'varchar(8191)')
parentType = self.convertFromDB(row[2], 'str', 'char(32)')
entity_id = self.convertFromDB(row[3], 'long', 'int')
entity_type = self.convertFromDB(row[4], 'str', 'char(16)')
parent = self.convertFromDB(row[5], 'long', 'long')
plugin_data = DBPluginData(data=data,
id=id)
plugin_data.db_parentType = parentType
plugin_data.db_entity_id = entity_id
plugin_data.db_entity_type = entity_type
plugin_data.db_parent = parent
plugin_data.is_dirty = False
res[('plugin_data', id)] = plugin_data
return res
def get_sql_select(self, db, global_props,lock=False):
columns = ['id', 'data', 'parent_type', 'entity_id', 'entity_type', 'parent_id']
table = 'plugin_data'
whereMap = global_props
orderBy = 'id'
return self.createSQLSelect(table, columns, whereMap, orderBy, lock)
def process_sql_columns(self, data, global_props):
res = {}
for row in data:
id = self.convertFromDB(row[0], 'long', 'int')
data = self.convertFromDB(row[1], 'str', 'varchar(8191)')
parentType = self.convertFromDB(row[2], 'str', 'char(32)')
entity_id = self.convertFromDB(row[3], 'long', 'int')
entity_type = self.convertFromDB(row[4], 'str', 'char(16)')
parent = self.convertFromDB(row[5], 'long', 'long')
plugin_data = DBPluginData(data=data,
id=id)
plugin_data.db_parentType = parentType
plugin_data.db_entity_id = entity_id
plugin_data.db_entity_type = entity_type
plugin_data.db_parent = parent
plugin_data.is_dirty = False
res[('plugin_data', id)] = plugin_data
return res
def from_sql_fast(self, obj, all_objects):
if obj.db_parentType == 'workflow':
p = all_objects[('workflow', obj.db_parent)]
p.db_add_plugin_data(obj)
elif obj.db_parentType == 'add':
p = all_objects[('add', obj.db_parent)]
p.db_add_data(obj)
elif obj.db_parentType == 'change':
p = all_objects[('change', obj.db_parent)]
p.db_add_data(obj)
def set_sql_columns(self, db, obj, global_props, do_copy=True):
if not do_copy and not obj.is_dirty:
return
columns = ['id', 'data', 'parent_type', 'entity_id', 'entity_type', 'parent_id']
table = 'plugin_data'
whereMap = {}
whereMap.update(global_props)
if obj.db_id is not None:
keyStr = self.convertToDB(obj.db_id, 'long', 'int')
whereMap['id'] = keyStr
columnMap = {}
if hasattr(obj, 'db_id') and obj.db_id is not None:
columnMap['id'] = \
self.convertToDB(obj.db_id, 'long', 'int')
if hasattr(obj, 'db_data') and obj.db_data is not None:
columnMap['data'] = \
self.convertToDB(obj.db_data, 'str', 'varchar(8191)')
if hasattr(obj, 'db_parentType') and obj.db_parentType is not None:
columnMap['parent_type'] = \
self.convertToDB(obj.db_parentType, 'str', 'char(32)')
if hasattr(obj, 'db_entity_id') and obj.db_entity_id is not None:
columnMap['entity_id'] = \
self.convertToDB(obj.db_entity_id, 'long', 'int')
if hasattr(obj, 'db_entity_type') and obj.db_entity_type is not None:
columnMap['entity_type'] = \
self.convertToDB(obj.db_entity_type, 'str', 'char(16)')
if hasattr(obj, 'db_parent') and obj.db_parent is not None:
columnMap['parent_id'] = \
self.convertToDB(obj.db_parent, 'long', 'long')
columnMap.update(global_props)
if obj.is_new or do_copy:
dbCommand = self.createSQLInsert(table, columnMap)
else:
dbCommand = self.createSQLUpdate(table, columnMap, whereMap)
lastId = self.executeSQL(db, dbCommand, False)
def set_sql_command(self, db, obj, global_props, do_copy=True):
if not do_copy and not obj.is_dirty:
return None
columns = ['id', 'data', 'parent_type', 'entity_id', 'entity_type', 'parent_id']
table = 'plugin_data'
whereMap = {}
whereMap.update(global_props)
if obj.db_id is not None:
keyStr = self.convertToDB(obj.db_id, 'long', 'int')
whereMap['id'] = keyStr
columnMap = {}
if hasattr(obj, 'db_id') and obj.db_id is not None:
columnMap['id'] = \
self.convertToDB(obj.db_id, 'long', 'int')
if hasattr(obj, 'db_data') and obj.db_data is not None:
columnMap['data'] = \
self.convertToDB(obj.db_data, 'str', 'varchar(8191)')
if hasattr(obj, 'db_parentType') and obj.db_parentType is not None:
columnMap['parent_type'] = \
self.convertToDB(obj.db_parentType, 'str', 'char(32)')
if hasattr(obj, 'db_entity_id') and obj.db_entity_id is not None:
columnMap['entity_id'] = \
self.convertToDB(obj.db_entity_id, 'long', 'int')
if hasattr(obj, 'db_entity_type') and obj.db_entity_type is not None:
columnMap['entity_type'] = \
self.convertToDB(obj.db_entity_type, 'str', 'char(16)')
if hasattr(obj, 'db_parent') and obj.db_parent is not None:
columnMap['parent_id'] = \
self.convertToDB(obj.db_parent, 'long', 'long')
columnMap.update(global_props)
if obj.is_new or do_copy:
dbCommand = self.createSQLInsert(table, columnMap)
else:
dbCommand = self.createSQLUpdate(table, columnMap, whereMap)
return dbCommand
def set_sql_process(self, obj, global_props, lastId):
pass
def to_sql_fast(self, obj, do_copy=True):
pass
def delete_sql_column(self, db, obj, global_props):
table = 'plugin_data'
whereMap = {}
whereMap.update(global_props)
if obj.db_id is not None:
keyStr = self.convertToDB(obj.db_id, 'long', 'int')
whereMap['id'] = keyStr
dbCommand = self.createSQLDelete(table, whereMap)
self.executeSQL(db, dbCommand, False)
class DBDeleteSQLDAOBase(SQLDAO):
def __init__(self, daoList):
self.daoList = daoList
self.table = 'delete_tbl'
def getDao(self, dao):
return self.daoList[dao]
def get_sql_columns(self, db, global_props,lock=False):
columns = ['id', 'what', 'object_id', 'par_obj_id', 'par_obj_type', 'action_id', 'entity_id', 'entity_type']
table = 'delete_tbl'
whereMap = global_props
orderBy = 'id'
dbCommand = self.createSQLSelect(table, columns, whereMap, orderBy, lock)
data = self.executeSQL(db, dbCommand, True)
res = {}
for row in data:
id = self.convertFromDB(row[0], 'long', 'int')
what = self.convertFromDB(row[1], 'str', 'varchar(255)')
objectId = self.convertFromDB(row[2], 'long', 'int')
parentObjId = self.convertFromDB(row[3], 'long', 'int')
parentObjType = self.convertFromDB(row[4], 'str', 'char(16)')
action = self.convertFromDB(row[5], 'long', 'int')
entity_id = self.convertFromDB(row[6], 'long', 'int')
entity_type = self.convertFromDB(row[7], 'str', 'char(16)')
delete = DBDelete(what=what,
objectId=objectId,
parentObjId=parentObjId,
parentObjType=parentObjType,
id=id)
delete.db_action = action
delete.db_entity_id = entity_id
delete.db_entity_type = entity_type
delete.is_dirty = False
res[('delete', id)] = delete
return res
def get_sql_select(self, db, global_props,lock=False):
columns = ['id', 'what', 'object_id', 'par_obj_id', 'par_obj_type', 'action_id', 'entity_id', 'entity_type']
table = 'delete_tbl'
whereMap = global_props
orderBy = 'id'
return self.createSQLSelect(table, columns, whereMap, orderBy, lock)
def process_sql_columns(self, data, global_props):
res = {}
for row in data:
id = self.convertFromDB(row[0], 'long', 'int')
what = self.convertFromDB(row[1], 'str', 'varchar(255)')
objectId = self.convertFromDB(row[2], 'long', 'int')
parentObjId = self.convertFromDB(row[3], 'long', 'int')
parentObjType = self.convertFromDB(row[4], 'str', 'char(16)')
action = self.convertFromDB(row[5], 'long', 'int')
entity_id = self.convertFromDB(row[6], 'long', 'int')
entity_type = self.convertFromDB(row[7], 'str', 'char(16)')
delete = DBDelete(what=what,
objectId=objectId,
parentObjId=parentObjId,
parentObjType=parentObjType,
id=id)
delete.db_action = action
delete.db_entity_id = entity_id
delete.db_entity_type = entity_type
delete.is_dirty = False
res[('delete', id)] = delete
return res
def from_sql_fast(self, obj, all_objects):
if ('action', obj.db_action) in all_objects:
p = all_objects[('action', obj.db_action)]
p.db_add_operation(obj)
def set_sql_columns(self, db, obj, global_props, do_copy=True):
if not do_copy and not obj.is_dirty:
return
columns = ['id', 'what', 'object_id', 'par_obj_id', 'par_obj_type', 'action_id', 'entity_id', 'entity_type']
table = 'delete_tbl'
whereMap = {}
whereMap.update(global_props)
if obj.db_id is not None:
keyStr = self.convertToDB(obj.db_id, 'long', 'int')
whereMap['id'] = keyStr
columnMap = {}
if hasattr(obj, 'db_id') and obj.db_id is not None:
columnMap['id'] = \
self.convertToDB(obj.db_id, 'long', 'int')
if hasattr(obj, 'db_what') and obj.db_what is not None:
columnMap['what'] = \
self.convertToDB(obj.db_what, 'str', 'varchar(255)')
if hasattr(obj, 'db_objectId') and obj.db_objectId is not None:
columnMap['object_id'] = \
self.convertToDB(obj.db_objectId, 'long', 'int')
if hasattr(obj, 'db_parentObjId') and obj.db_parentObjId is not None:
columnMap['par_obj_id'] = \
self.convertToDB(obj.db_parentObjId, 'long', 'int')
if hasattr(obj, 'db_parentObjType') and obj.db_parentObjType is not None:
columnMap['par_obj_type'] = \
self.convertToDB(obj.db_parentObjType, 'str', 'char(16)')
if hasattr(obj, 'db_action') and obj.db_action is not None:
columnMap['action_id'] = \
self.convertToDB(obj.db_action, 'long', 'int')
if hasattr(obj, 'db_entity_id') and obj.db_entity_id is not None:
columnMap['entity_id'] = \
self.convertToDB(obj.db_entity_id, 'long', 'int')
if hasattr(obj, 'db_entity_type') and obj.db_entity_type is not None:
columnMap['entity_type'] = \
self.convertToDB(obj.db_entity_type, 'str', 'char(16)')
columnMap.update(global_props)
if obj.is_new or do_copy:
dbCommand = self.createSQLInsert(table, columnMap)
else:
dbCommand = self.createSQLUpdate(table, columnMap, whereMap)
lastId = self.executeSQL(db, dbCommand, False)
def set_sql_command(self, db, obj, global_props, do_copy=True):
if not do_copy and not obj.is_dirty:
return None
columns = ['id', 'what', 'object_id', 'par_obj_id', 'par_obj_type', 'action_id', 'entity_id', 'entity_type']
table = 'delete_tbl'
whereMap = {}
whereMap.update(global_props)
if obj.db_id is not None:
keyStr = self.convertToDB(obj.db_id, 'long', 'int')
whereMap['id'] = keyStr
columnMap = {}
if hasattr(obj, 'db_id') and obj.db_id is not None:
columnMap['id'] = \
self.convertToDB(obj.db_id, 'long', 'int')
if hasattr(obj, 'db_what') and obj.db_what is not None:
columnMap['what'] = \
self.convertToDB(obj.db_what, 'str', 'varchar(255)')
if hasattr(obj, 'db_objectId') and obj.db_objectId is not None:
columnMap['object_id'] = \
self.convertToDB(obj.db_objectId, 'long', 'int')
if hasattr(obj, 'db_parentObjId') and obj.db_parentObjId is not None:
columnMap['par_obj_id'] = \
self.convertToDB(obj.db_parentObjId, 'long', 'int')
if hasattr(obj, 'db_parentObjType') and obj.db_parentObjType is not None:
columnMap['par_obj_type'] = \
self.convertToDB(obj.db_parentObjType, 'str', 'char(16)')
if hasattr(obj, 'db_action') and obj.db_action is not None:
columnMap['action_id'] = \
self.convertToDB(obj.db_action, 'long', 'int')
if hasattr(obj, 'db_entity_id') and obj.db_entity_id is not None:
columnMap['entity_id'] = \
self.convertToDB(obj.db_entity_id, 'long', 'int')
if hasattr(obj, 'db_entity_type') and obj.db_entity_type is not None:
columnMap['entity_type'] = \
self.convertToDB(obj.db_entity_type, 'str', 'char(16)')
columnMap.update(global_props)
if obj.is_new or do_copy:
dbCommand = self.createSQLInsert(table, columnMap)
else:
dbCommand = self.createSQLUpdate(table, columnMap, whereMap)
return dbCommand
def set_sql_process(self, obj, global_props, lastId):
pass
def to_sql_fast(self, obj, do_copy=True):
pass
def delete_sql_column(self, db, obj, global_props):
table = 'delete_tbl'
whereMap = {}
whereMap.update(global_props)
if obj.db_id is not None:
keyStr = self.convertToDB(obj.db_id, 'long', 'int')
whereMap['id'] = keyStr
dbCommand = self.createSQLDelete(table, whereMap)
self.executeSQL(db, dbCommand, False)
class DBVistrailVariableSQLDAOBase(SQLDAO):
def __init__(self, daoList):
self.daoList = daoList
self.table = 'vistrail_variable'
def getDao(self, dao):
return self.daoList[dao]
def get_sql_columns(self, db, global_props,lock=False):
columns = ['name', 'uuid', 'package', 'module', 'namespace', 'value', 'parent_id', 'entity_id', 'entity_type']
table = 'vistrail_variable'
whereMap = global_props
orderBy = 'name'
dbCommand = self.createSQLSelect(table, columns, whereMap, orderBy, lock)
data = self.executeSQL(db, dbCommand, True)
res = {}
for row in data:
name = self.convertFromDB(row[0], 'str', 'varchar(255)')
uuid = self.convertFromDB(row[1], 'str', 'char(36)')
package = self.convertFromDB(row[2], 'str', 'varchar(255)')
module = self.convertFromDB(row[3], 'str', 'varchar(255)')
namespace = self.convertFromDB(row[4], 'str', 'varchar(255)')
value = self.convertFromDB(row[5], 'str', 'varchar(8191)')
vistrail = self.convertFromDB(row[6], 'long', 'int')
entity_id = self.convertFromDB(row[7], 'long', 'int')
entity_type = self.convertFromDB(row[8], 'str', 'char(16)')
vistrailVariable = DBVistrailVariable(uuid=uuid,
package=package,
module=module,
namespace=namespace,
value=value,
name=name)
vistrailVariable.db_vistrail = vistrail
vistrailVariable.db_entity_id = entity_id
vistrailVariable.db_entity_type = entity_type
vistrailVariable.is_dirty = False
res[('vistrailVariable', name)] = vistrailVariable
return res
def get_sql_select(self, db, global_props,lock=False):
columns = ['name', 'uuid', 'package', 'module', 'namespace', 'value', 'parent_id', 'entity_id', 'entity_type']
table = 'vistrail_variable'
whereMap = global_props
orderBy = 'name'
return self.createSQLSelect(table, columns, whereMap, orderBy, lock)
def process_sql_columns(self, data, global_props):
res = {}
for row in data:
name = self.convertFromDB(row[0], 'str', 'varchar(255)')
uuid = self.convertFromDB(row[1], 'str', 'char(36)')
package = self.convertFromDB(row[2], 'str', 'varchar(255)')
module = self.convertFromDB(row[3], 'str', 'varchar(255)')
namespace = self.convertFromDB(row[4], 'str', 'varchar(255)')
value = self.convertFromDB(row[5], 'str', 'varchar(8191)')
vistrail = self.convertFromDB(row[6], 'long', 'int')
entity_id = self.convertFromDB(row[7], 'long', 'int')
entity_type = self.convertFromDB(row[8], 'str', 'char(16)')
vistrailVariable = DBVistrailVariable(uuid=uuid,
package=package,
module=module,
namespace=namespace,
value=value,
name=name)
vistrailVariable.db_vistrail = vistrail
vistrailVariable.db_entity_id = entity_id
vistrailVariable.db_entity_type = entity_type
vistrailVariable.is_dirty = False
res[('vistrailVariable', name)] = vistrailVariable
return res
def from_sql_fast(self, obj, all_objects):
if ('vistrail', obj.db_vistrail) in all_objects:
p = all_objects[('vistrail', obj.db_vistrail)]
p.db_add_vistrailVariable(obj)
def set_sql_columns(self, db, obj, global_props, do_copy=True):
if not do_copy and not obj.is_dirty:
return
columns = ['name', 'uuid', 'package', 'module', 'namespace', 'value', 'parent_id', 'entity_id', 'entity_type']
table = 'vistrail_variable'
whereMap = {}
whereMap.update(global_props)
if obj.db_name is not None:
keyStr = self.convertToDB(obj.db_name, 'str', 'varchar(255)')
whereMap['name'] = keyStr
columnMap = {}
if hasattr(obj, 'db_name') and obj.db_name is not None:
columnMap['name'] = \
self.convertToDB(obj.db_name, 'str', 'varchar(255)')
if hasattr(obj, 'db_uuid') and obj.db_uuid is not None:
columnMap['uuid'] = \
self.convertToDB(obj.db_uuid, 'str', 'char(36)')
if hasattr(obj, 'db_package') and obj.db_package is not None:
columnMap['package'] = \
self.convertToDB(obj.db_package, 'str', 'varchar(255)')
if hasattr(obj, 'db_module') and obj.db_module is not None:
columnMap['module'] = \
self.convertToDB(obj.db_module, 'str', 'varchar(255)')
if hasattr(obj, 'db_namespace') and obj.db_namespace is not None:
columnMap['namespace'] = \
self.convertToDB(obj.db_namespace, 'str', 'varchar(255)')
if hasattr(obj, 'db_value') and obj.db_value is not None:
columnMap['value'] = \
self.convertToDB(obj.db_value, 'str', 'varchar(8191)')
if hasattr(obj, 'db_vistrail') and obj.db_vistrail is not None:
columnMap['parent_id'] = \
self.convertToDB(obj.db_vistrail, 'long', 'int')
if hasattr(obj, 'db_entity_id') and obj.db_entity_id is not None:
columnMap['entity_id'] = \
self.convertToDB(obj.db_entity_id, 'long', 'int')
if hasattr(obj, 'db_entity_type') and obj.db_entity_type is not None:
columnMap['entity_type'] = \
self.convertToDB(obj.db_entity_type, 'str', 'char(16)')
columnMap.update(global_props)
if obj.is_new or do_copy:
dbCommand = self.createSQLInsert(table, columnMap)
else:
dbCommand = self.createSQLUpdate(table, columnMap, whereMap)
lastId = self.executeSQL(db, dbCommand, False)
def set_sql_command(self, db, obj, global_props, do_copy=True):
if not do_copy and not obj.is_dirty:
return None
columns = ['name', 'uuid', 'package', 'module', 'namespace', 'value', 'parent_id', 'entity_id', 'entity_type']
table = 'vistrail_variable'
whereMap = {}
whereMap.update(global_props)
if obj.db_name is not None:
keyStr = self.convertToDB(obj.db_name, 'str', 'varchar(255)')
whereMap['name'] = keyStr
columnMap = {}
if hasattr(obj, 'db_name') and obj.db_name is not None:
columnMap['name'] = \
self.convertToDB(obj.db_name, 'str', 'varchar(255)')
if hasattr(obj, 'db_uuid') and obj.db_uuid is not None:
columnMap['uuid'] = \
self.convertToDB(obj.db_uuid, 'str', 'char(36)')
if hasattr(obj, 'db_package') and obj.db_package is not None:
columnMap['package'] = \
self.convertToDB(obj.db_package, 'str', 'varchar(255)')
if hasattr(obj, 'db_module') and obj.db_module is not None:
columnMap['module'] = \
self.convertToDB(obj.db_module, 'str', 'varchar(255)')
if hasattr(obj, 'db_namespace') and obj.db_namespace is not None:
columnMap['namespace'] = \
self.convertToDB(obj.db_namespace, 'str', 'varchar(255)')
if hasattr(obj, 'db_value') and obj.db_value is not None:
columnMap['value'] = \
self.convertToDB(obj.db_value, 'str', 'varchar(8191)')
if hasattr(obj, 'db_vistrail') and obj.db_vistrail is not None:
columnMap['parent_id'] = \
self.convertToDB(obj.db_vistrail, 'long', 'int')
if hasattr(obj, 'db_entity_id') and obj.db_entity_id is not None:
columnMap['entity_id'] = \
self.convertToDB(obj.db_entity_id, 'long', 'int')
if hasattr(obj, 'db_entity_type') and obj.db_entity_type is not None:
columnMap['entity_type'] = \
self.convertToDB(obj.db_entity_type, 'str', 'char(16)')
columnMap.update(global_props)
if obj.is_new or do_copy:
dbCommand = self.createSQLInsert(table, columnMap)
else:
dbCommand = self.createSQLUpdate(table, columnMap, whereMap)
return dbCommand
def set_sql_process(self, obj, global_props, lastId):
pass
def to_sql_fast(self, obj, do_copy=True):
pass
def delete_sql_column(self, db, obj, global_props):
table = 'vistrail_variable'
whereMap = {}
whereMap.update(global_props)
if obj.db_name is not None:
keyStr = self.convertToDB(obj.db_name, 'str', 'varchar(255)')
whereMap['name'] = keyStr
dbCommand = self.createSQLDelete(table, whereMap)
self.executeSQL(db, dbCommand, False)
class DBModuleDescriptorSQLDAOBase(SQLDAO):
def __init__(self, daoList):
self.daoList = daoList
self.table = 'module_descriptor'
def getDao(self, dao):
return self.daoList[dao]
def get_sql_columns(self, db, global_props,lock=False):
columns = ['id', 'name', 'package', 'namespace', 'package_version', 'version', 'base_descriptor_id', 'parent_id', 'entity_id', 'entity_type']
table = 'module_descriptor'
whereMap = global_props
orderBy = 'id'
dbCommand = self.createSQLSelect(table, columns, whereMap, orderBy, lock)
data = self.executeSQL(db, dbCommand, True)
res = {}
for row in data:
id = self.convertFromDB(row[0], 'long', 'int')
name = self.convertFromDB(row[1], 'str', 'varchar(255)')
package = self.convertFromDB(row[2], 'str', 'varchar(255)')
namespace = self.convertFromDB(row[3], 'str', 'varchar(255)')
package_version = self.convertFromDB(row[4], 'str', 'varchar(255)')
version = self.convertFromDB(row[5], 'str', 'varchar(255)')
base_descriptor_id = self.convertFromDB(row[6], 'long', 'int')
package = self.convertFromDB(row[7], 'long', 'int')
entity_id = self.convertFromDB(row[8], 'long', 'int')
entity_type = self.convertFromDB(row[9], 'str', 'char(16)')
module_descriptor = DBModuleDescriptor(name=name,
package=package,
namespace=namespace,
package_version=package_version,
version=version,
base_descriptor_id=base_descriptor_id,
id=id)
module_descriptor.db_package = package
module_descriptor.db_entity_id = entity_id
module_descriptor.db_entity_type = entity_type
module_descriptor.is_dirty = False
res[('module_descriptor', id)] = module_descriptor
return res
def get_sql_select(self, db, global_props,lock=False):
columns = ['id', 'name', 'package', 'namespace', 'package_version', 'version', 'base_descriptor_id', 'parent_id', 'entity_id', 'entity_type']
table = 'module_descriptor'
whereMap = global_props
orderBy = 'id'
return self.createSQLSelect(table, columns, whereMap, orderBy, lock)
def process_sql_columns(self, data, global_props):
res = {}
for row in data:
id = self.convertFromDB(row[0], 'long', 'int')
name = self.convertFromDB(row[1], 'str', 'varchar(255)')
package = self.convertFromDB(row[2], 'str', 'varchar(255)')
namespace = self.convertFromDB(row[3], 'str', 'varchar(255)')
package_version = self.convertFromDB(row[4], 'str', 'varchar(255)')
version = self.convertFromDB(row[5], 'str', 'varchar(255)')
base_descriptor_id = self.convertFromDB(row[6], 'long', 'int')
package = self.convertFromDB(row[7], 'long', 'int')
entity_id = self.convertFromDB(row[8], 'long', 'int')
entity_type = self.convertFromDB(row[9], 'str', 'char(16)')
module_descriptor = DBModuleDescriptor(name=name,
package=package,
namespace=namespace,
package_version=package_version,
version=version,
base_descriptor_id=base_descriptor_id,
id=id)
module_descriptor.db_package = package
module_descriptor.db_entity_id = entity_id
module_descriptor.db_entity_type = entity_type
module_descriptor.is_dirty = False
res[('module_descriptor', id)] = module_descriptor
return res
def from_sql_fast(self, obj, all_objects):
if ('package', obj.db_package) in all_objects:
p = all_objects[('package', obj.db_package)]
p.db_add_module_descriptor(obj)
def set_sql_columns(self, db, obj, global_props, do_copy=True):
if not do_copy and not obj.is_dirty:
return
columns = ['id', 'name', 'package', 'namespace', 'package_version', 'version', 'base_descriptor_id', 'parent_id', 'entity_id', 'entity_type']
table = 'module_descriptor'
whereMap = {}
whereMap.update(global_props)
if obj.db_id is not None:
keyStr = self.convertToDB(obj.db_id, 'long', 'int')
whereMap['id'] = keyStr
columnMap = {}
if hasattr(obj, 'db_id') and obj.db_id is not None:
columnMap['id'] = \
self.convertToDB(obj.db_id, 'long', 'int')
if hasattr(obj, 'db_name') and obj.db_name is not None:
columnMap['name'] = \
self.convertToDB(obj.db_name, 'str', 'varchar(255)')
if hasattr(obj, 'db_package') and obj.db_package is not None:
columnMap['package'] = \
self.convertToDB(obj.db_package, 'str', 'varchar(255)')
if hasattr(obj, 'db_namespace') and obj.db_namespace is not None:
columnMap['namespace'] = \
self.convertToDB(obj.db_namespace, 'str', 'varchar(255)')
if hasattr(obj, 'db_package_version') and obj.db_package_version is not None:
columnMap['package_version'] = \
self.convertToDB(obj.db_package_version, 'str', 'varchar(255)')
if hasattr(obj, 'db_version') and obj.db_version is not None:
columnMap['version'] = \
self.convertToDB(obj.db_version, 'str', 'varchar(255)')
if hasattr(obj, 'db_base_descriptor_id') and obj.db_base_descriptor_id is not None:
columnMap['base_descriptor_id'] = \
self.convertToDB(obj.db_base_descriptor_id, 'long', 'int')
if hasattr(obj, 'db_package') and obj.db_package is not None:
columnMap['parent_id'] = \
self.convertToDB(obj.db_package, 'long', 'int')
if hasattr(obj, 'db_entity_id') and obj.db_entity_id is not None:
columnMap['entity_id'] = \
self.convertToDB(obj.db_entity_id, 'long', 'int')
if hasattr(obj, 'db_entity_type') and obj.db_entity_type is not None:
columnMap['entity_type'] = \
self.convertToDB(obj.db_entity_type, 'str', 'char(16)')
columnMap.update(global_props)
if obj.is_new or do_copy:
dbCommand = self.createSQLInsert(table, columnMap)
else:
dbCommand = self.createSQLUpdate(table, columnMap, whereMap)
lastId = self.executeSQL(db, dbCommand, False)
def set_sql_command(self, db, obj, global_props, do_copy=True):
if not do_copy and not obj.is_dirty:
return None
columns = ['id', 'name', 'package', 'namespace', 'package_version', 'version', 'base_descriptor_id', 'parent_id', 'entity_id', 'entity_type']
table = 'module_descriptor'
whereMap = {}
whereMap.update(global_props)
if obj.db_id is not None:
keyStr = self.convertToDB(obj.db_id, 'long', 'int')
whereMap['id'] = keyStr
columnMap = {}
if hasattr(obj, 'db_id') and obj.db_id is not None:
columnMap['id'] = \
self.convertToDB(obj.db_id, 'long', 'int')
if hasattr(obj, 'db_name') and obj.db_name is not None:
columnMap['name'] = \
self.convertToDB(obj.db_name, 'str', 'varchar(255)')
if hasattr(obj, 'db_package') and obj.db_package is not None:
columnMap['package'] = \
self.convertToDB(obj.db_package, 'str', 'varchar(255)')
if hasattr(obj, 'db_namespace') and obj.db_namespace is not None:
columnMap['namespace'] = \
self.convertToDB(obj.db_namespace, 'str', 'varchar(255)')
if hasattr(obj, 'db_package_version') and obj.db_package_version is not None:
columnMap['package_version'] = \
self.convertToDB(obj.db_package_version, 'str', 'varchar(255)')
if hasattr(obj, 'db_version') and obj.db_version is not None:
columnMap['version'] = \
self.convertToDB(obj.db_version, 'str', 'varchar(255)')
if hasattr(obj, 'db_base_descriptor_id') and obj.db_base_descriptor_id is not None:
columnMap['base_descriptor_id'] = \
self.convertToDB(obj.db_base_descriptor_id, 'long', 'int')
if hasattr(obj, 'db_package') and obj.db_package is not None:
columnMap['parent_id'] = \
self.convertToDB(obj.db_package, 'long', 'int')
if hasattr(obj, 'db_entity_id') and obj.db_entity_id is not None:
columnMap['entity_id'] = \
self.convertToDB(obj.db_entity_id, 'long', 'int')
if hasattr(obj, 'db_entity_type') and obj.db_entity_type is not None:
columnMap['entity_type'] = \
self.convertToDB(obj.db_entity_type, 'str', 'char(16)')
columnMap.update(global_props)
if obj.is_new or do_copy:
dbCommand = self.createSQLInsert(table, columnMap)
else:
dbCommand = self.createSQLUpdate(table, columnMap, whereMap)
return dbCommand
def set_sql_process(self, obj, global_props, lastId):
pass
def to_sql_fast(self, obj, do_copy=True):
for child in obj.db_portSpecs:
child.db_parentType = obj.vtType
child.db_parent = obj.db_id
def delete_sql_column(self, db, obj, global_props):
table = 'module_descriptor'
whereMap = {}
whereMap.update(global_props)
if obj.db_id is not None:
keyStr = self.convertToDB(obj.db_id, 'long', 'int')
whereMap['id'] = keyStr
dbCommand = self.createSQLDelete(table, whereMap)
self.executeSQL(db, dbCommand, False)
class DBTagSQLDAOBase(SQLDAO):
def __init__(self, daoList):
self.daoList = daoList
self.table = 'tag'
def getDao(self, dao):
return self.daoList[dao]
def get_sql_columns(self, db, global_props,lock=False):
columns = ['id', 'name', 'parent_id', 'entity_id', 'entity_type']
table = 'tag'
whereMap = global_props
orderBy = 'id'
dbCommand = self.createSQLSelect(table, columns, whereMap, orderBy, lock)
data = self.executeSQL(db, dbCommand, True)
res = {}
for row in data:
id = self.convertFromDB(row[0], 'long', 'int')
name = self.convertFromDB(row[1], 'str', 'varchar(255)')
vistrail = self.convertFromDB(row[2], 'long', 'int')
entity_id = self.convertFromDB(row[3], 'long', 'int')
entity_type = self.convertFromDB(row[4], 'str', 'char(16)')
tag = DBTag(name=name,
id=id)
tag.db_vistrail = vistrail
tag.db_entity_id = entity_id
tag.db_entity_type = entity_type
tag.is_dirty = False
res[('tag', id)] = tag
return res
def get_sql_select(self, db, global_props,lock=False):
columns = ['id', 'name', 'parent_id', 'entity_id', 'entity_type']
table = 'tag'
whereMap = global_props
orderBy = 'id'
return self.createSQLSelect(table, columns, whereMap, orderBy, lock)
def process_sql_columns(self, data, global_props):
res = {}
for row in data:
id = self.convertFromDB(row[0], 'long', 'int')
name = self.convertFromDB(row[1], 'str', 'varchar(255)')
vistrail = self.convertFromDB(row[2], 'long', 'int')
entity_id = self.convertFromDB(row[3], 'long', 'int')
entity_type = self.convertFromDB(row[4], 'str', 'char(16)')
tag = DBTag(name=name,
id=id)
tag.db_vistrail = vistrail
tag.db_entity_id = entity_id
tag.db_entity_type = entity_type
tag.is_dirty = False
res[('tag', id)] = tag
return res
def from_sql_fast(self, obj, all_objects):
if ('vistrail', obj.db_vistrail) in all_objects:
p = all_objects[('vistrail', obj.db_vistrail)]
p.db_add_tag(obj)
def set_sql_columns(self, db, obj, global_props, do_copy=True):
if not do_copy and not obj.is_dirty:
return
columns = ['id', 'name', 'parent_id', 'entity_id', 'entity_type']
table = 'tag'
whereMap = {}
whereMap.update(global_props)
if obj.db_id is not None:
keyStr = self.convertToDB(obj.db_id, 'long', 'int')
whereMap['id'] = keyStr
columnMap = {}
if hasattr(obj, 'db_id') and obj.db_id is not None:
columnMap['id'] = \
self.convertToDB(obj.db_id, 'long', 'int')
if hasattr(obj, 'db_name') and obj.db_name is not None:
columnMap['name'] = \
self.convertToDB(obj.db_name, 'str', 'varchar(255)')
if hasattr(obj, 'db_vistrail') and obj.db_vistrail is not None:
columnMap['parent_id'] = \
self.convertToDB(obj.db_vistrail, 'long', 'int')
if hasattr(obj, 'db_entity_id') and obj.db_entity_id is not None:
columnMap['entity_id'] = \
self.convertToDB(obj.db_entity_id, 'long', 'int')
if hasattr(obj, 'db_entity_type') and obj.db_entity_type is not None:
columnMap['entity_type'] = \
self.convertToDB(obj.db_entity_type, 'str', 'char(16)')
columnMap.update(global_props)
if obj.is_new or do_copy:
dbCommand = self.createSQLInsert(table, columnMap)
else:
dbCommand = self.createSQLUpdate(table, columnMap, whereMap)
lastId = self.executeSQL(db, dbCommand, False)
def set_sql_command(self, db, obj, global_props, do_copy=True):
if not do_copy and not obj.is_dirty:
return None
columns = ['id', 'name', 'parent_id', 'entity_id', 'entity_type']
table = 'tag'
whereMap = {}
whereMap.update(global_props)
if obj.db_id is not None:
keyStr = self.convertToDB(obj.db_id, 'long', 'int')
whereMap['id'] = keyStr
columnMap = {}
if hasattr(obj, 'db_id') and obj.db_id is not None:
columnMap['id'] = \
self.convertToDB(obj.db_id, 'long', 'int')
if hasattr(obj, 'db_name') and obj.db_name is not None:
columnMap['name'] = \
self.convertToDB(obj.db_name, 'str', 'varchar(255)')
if hasattr(obj, 'db_vistrail') and obj.db_vistrail is not None:
columnMap['parent_id'] = \
self.convertToDB(obj.db_vistrail, 'long', 'int')
if hasattr(obj, 'db_entity_id') and obj.db_entity_id is not None:
columnMap['entity_id'] = \
self.convertToDB(obj.db_entity_id, 'long', 'int')
if hasattr(obj, 'db_entity_type') and obj.db_entity_type is not None:
columnMap['entity_type'] = \
self.convertToDB(obj.db_entity_type, 'str', 'char(16)')
columnMap.update(global_props)
if obj.is_new or do_copy:
dbCommand = self.createSQLInsert(table, columnMap)
else:
dbCommand = self.createSQLUpdate(table, columnMap, whereMap)
return dbCommand
def set_sql_process(self, obj, global_props, lastId):
pass
def to_sql_fast(self, obj, do_copy=True):
pass
def delete_sql_column(self, db, obj, global_props):
table = 'tag'
whereMap = {}
whereMap.update(global_props)
if obj.db_id is not None:
keyStr = self.convertToDB(obj.db_id, 'long', 'int')
whereMap['id'] = keyStr
dbCommand = self.createSQLDelete(table, whereMap)
self.executeSQL(db, dbCommand, False)
class DBPortSpecItemSQLDAOBase(SQLDAO):
def __init__(self, daoList):
self.daoList = daoList
self.table = 'port_spec_item'
def getDao(self, dao):
return self.daoList[dao]
def get_sql_columns(self, db, global_props,lock=False):
columns = ['id', 'pos', 'module', 'package', 'namespace', 'label', '_default', '_values', 'entry_type', 'parent_id', 'entity_id', 'entity_type']
table = 'port_spec_item'
whereMap = global_props
orderBy = 'id'
dbCommand = self.createSQLSelect(table, columns, whereMap, orderBy, lock)
data = self.executeSQL(db, dbCommand, True)
res = {}
for row in data:
id = self.convertFromDB(row[0], 'long', 'int')
pos = self.convertFromDB(row[1], 'long', 'int')
module = self.convertFromDB(row[2], 'str', 'varchar(255)')
package = self.convertFromDB(row[3], 'str', 'varchar(255)')
namespace = self.convertFromDB(row[4], 'str', 'varchar(255)')
label = self.convertFromDB(row[5], 'str', 'varchar(4095)')
default = self.convertFromDB(row[6], 'str', 'varchar(4095)')
values = self.convertFromDB(row[7], 'str', 'mediumtext')
entry_type = self.convertFromDB(row[8], 'str', 'varchar(255)')
portSpec = self.convertFromDB(row[9], 'long', 'int')
entity_id = self.convertFromDB(row[10], 'long', 'int')
entity_type = self.convertFromDB(row[11], 'str', 'char(16)')
portSpecItem = DBPortSpecItem(pos=pos,
module=module,
package=package,
namespace=namespace,
label=label,
default=default,
values=values,
entry_type=entry_type,
id=id)
portSpecItem.db_portSpec = portSpec
portSpecItem.db_entity_id = entity_id
portSpecItem.db_entity_type = entity_type
portSpecItem.is_dirty = False
res[('portSpecItem', id)] = portSpecItem
return res
def get_sql_select(self, db, global_props,lock=False):
columns = ['id', 'pos', 'module', 'package', 'namespace', 'label', '_default', '_values', 'entry_type', 'parent_id', 'entity_id', 'entity_type']
table = 'port_spec_item'
whereMap = global_props
orderBy = 'id'
return self.createSQLSelect(table, columns, whereMap, orderBy, lock)
def process_sql_columns(self, data, global_props):
res = {}
for row in data:
id = self.convertFromDB(row[0], 'long', 'int')
pos = self.convertFromDB(row[1], 'long', 'int')
module = self.convertFromDB(row[2], 'str', 'varchar(255)')
package = self.convertFromDB(row[3], 'str', 'varchar(255)')
namespace = self.convertFromDB(row[4], 'str', 'varchar(255)')
label = self.convertFromDB(row[5], 'str', 'varchar(4095)')
default = self.convertFromDB(row[6], 'str', 'varchar(4095)')
values = self.convertFromDB(row[7], 'str', 'mediumtext')
entry_type = self.convertFromDB(row[8], 'str', 'varchar(255)')
portSpec = self.convertFromDB(row[9], 'long', 'int')
entity_id = self.convertFromDB(row[10], 'long', 'int')
entity_type = self.convertFromDB(row[11], 'str', 'char(16)')
portSpecItem = DBPortSpecItem(pos=pos,
module=module,
package=package,
namespace=namespace,
label=label,
default=default,
values=values,
entry_type=entry_type,
id=id)
portSpecItem.db_portSpec = portSpec
portSpecItem.db_entity_id = entity_id
portSpecItem.db_entity_type = entity_type
portSpecItem.is_dirty = False
res[('portSpecItem', id)] = portSpecItem
return res
def from_sql_fast(self, obj, all_objects):
if ('portSpec', obj.db_portSpec) in all_objects:
p = all_objects[('portSpec', obj.db_portSpec)]
p.db_add_portSpecItem(obj)
def set_sql_columns(self, db, obj, global_props, do_copy=True):
if not do_copy and not obj.is_dirty:
return
columns = ['id', 'pos', 'module', 'package', 'namespace', 'label', '_default', '_values', 'entry_type', 'parent_id', 'entity_id', 'entity_type']
table = 'port_spec_item'
whereMap = {}
whereMap.update(global_props)
if obj.db_id is not None:
keyStr = self.convertToDB(obj.db_id, 'long', 'int')
whereMap['id'] = keyStr
columnMap = {}
if hasattr(obj, 'db_id') and obj.db_id is not None:
columnMap['id'] = \
self.convertToDB(obj.db_id, 'long', 'int')
if hasattr(obj, 'db_pos') and obj.db_pos is not None:
columnMap['pos'] = \
self.convertToDB(obj.db_pos, 'long', 'int')
if hasattr(obj, 'db_module') and obj.db_module is not None:
columnMap['module'] = \
self.convertToDB(obj.db_module, 'str', 'varchar(255)')
if hasattr(obj, 'db_package') and obj.db_package is not None:
columnMap['package'] = \
self.convertToDB(obj.db_package, 'str', 'varchar(255)')
if hasattr(obj, 'db_namespace') and obj.db_namespace is not None:
columnMap['namespace'] = \
self.convertToDB(obj.db_namespace, 'str', 'varchar(255)')
if hasattr(obj, 'db_label') and obj.db_label is not None:
columnMap['label'] = \
self.convertToDB(obj.db_label, 'str', 'varchar(4095)')
if hasattr(obj, 'db_default') and obj.db_default is not None:
columnMap['_default'] = \
self.convertToDB(obj.db_default, 'str', 'varchar(4095)')
if hasattr(obj, 'db_values') and obj.db_values is not None:
columnMap['_values'] = \
self.convertToDB(obj.db_values, 'str', 'mediumtext')
if hasattr(obj, 'db_entry_type') and obj.db_entry_type is not None:
columnMap['entry_type'] = \
self.convertToDB(obj.db_entry_type, 'str', 'varchar(255)')
if hasattr(obj, 'db_portSpec') and obj.db_portSpec is not None:
columnMap['parent_id'] = \
self.convertToDB(obj.db_portSpec, 'long', 'int')
if hasattr(obj, 'db_entity_id') and obj.db_entity_id is not None:
columnMap['entity_id'] = \
self.convertToDB(obj.db_entity_id, 'long', 'int')
if hasattr(obj, 'db_entity_type') and obj.db_entity_type is not None:
columnMap['entity_type'] = \
self.convertToDB(obj.db_entity_type, 'str', 'char(16)')
columnMap.update(global_props)
if obj.is_new or do_copy:
dbCommand = self.createSQLInsert(table, columnMap)
else:
dbCommand = self.createSQLUpdate(table, columnMap, whereMap)
lastId = self.executeSQL(db, dbCommand, False)
def set_sql_command(self, db, obj, global_props, do_copy=True):
if not do_copy and not obj.is_dirty:
return None
columns = ['id', 'pos', 'module', 'package', 'namespace', 'label', '_default', '_values', 'entry_type', 'parent_id', 'entity_id', 'entity_type']
table = 'port_spec_item'
whereMap = {}
whereMap.update(global_props)
if obj.db_id is not None:
keyStr = self.convertToDB(obj.db_id, 'long', 'int')
whereMap['id'] = keyStr
columnMap = {}
if hasattr(obj, 'db_id') and obj.db_id is not None:
columnMap['id'] = \
self.convertToDB(obj.db_id, 'long', 'int')
if hasattr(obj, 'db_pos') and obj.db_pos is not None:
columnMap['pos'] = \
self.convertToDB(obj.db_pos, 'long', 'int')
if hasattr(obj, 'db_module') and obj.db_module is not None:
columnMap['module'] = \
self.convertToDB(obj.db_module, 'str', 'varchar(255)')
if hasattr(obj, 'db_package') and obj.db_package is not None:
columnMap['package'] = \
self.convertToDB(obj.db_package, 'str', 'varchar(255)')
if hasattr(obj, 'db_namespace') and obj.db_namespace is not None:
columnMap['namespace'] = \
self.convertToDB(obj.db_namespace, 'str', 'varchar(255)')
if hasattr(obj, 'db_label') and obj.db_label is not None:
columnMap['label'] = \
self.convertToDB(obj.db_label, 'str', 'varchar(4095)')
if hasattr(obj, 'db_default') and obj.db_default is not None:
columnMap['_default'] = \
self.convertToDB(obj.db_default, 'str', 'varchar(4095)')
if hasattr(obj, 'db_values') and obj.db_values is not None:
columnMap['_values'] = \
self.convertToDB(obj.db_values, 'str', 'mediumtext')
if hasattr(obj, 'db_entry_type') and obj.db_entry_type is not None:
columnMap['entry_type'] = \
self.convertToDB(obj.db_entry_type, 'str', 'varchar(255)')
if hasattr(obj, 'db_portSpec') and obj.db_portSpec is not None:
columnMap['parent_id'] = \
self.convertToDB(obj.db_portSpec, 'long', 'int')
if hasattr(obj, 'db_entity_id') and obj.db_entity_id is not None:
columnMap['entity_id'] = \
self.convertToDB(obj.db_entity_id, 'long', 'int')
if hasattr(obj, 'db_entity_type') and obj.db_entity_type is not None:
columnMap['entity_type'] = \
self.convertToDB(obj.db_entity_type, 'str', 'char(16)')
columnMap.update(global_props)
if obj.is_new or do_copy:
dbCommand = self.createSQLInsert(table, columnMap)
else:
dbCommand = self.createSQLUpdate(table, columnMap, whereMap)
return dbCommand
def set_sql_process(self, obj, global_props, lastId):
pass
def to_sql_fast(self, obj, do_copy=True):
pass
def delete_sql_column(self, db, obj, global_props):
table = 'port_spec_item'
whereMap = {}
whereMap.update(global_props)
if obj.db_id is not None:
keyStr = self.convertToDB(obj.db_id, 'long', 'int')
whereMap['id'] = keyStr
dbCommand = self.createSQLDelete(table, whereMap)
self.executeSQL(db, dbCommand, False)
class DBMashupComponentSQLDAOBase(SQLDAO):
def __init__(self, daoList):
self.daoList = daoList
self.table = 'mashup_component'
def getDao(self, dao):
return self.daoList[dao]
def get_sql_columns(self, db, global_props,lock=False):
columns = ['id', 'vtid', 'vttype', 'vtparent_type', 'vtparent_id', 'vtpos', 'vtmid', 'pos', 'type', 'val', 'minVal', 'maxVal', 'stepSize', 'strvaluelist', 'widget', 'seq', 'parent', 'alias_id', 'entity_id', 'entity_type']
table = 'mashup_component'
whereMap = global_props
orderBy = 'id'
dbCommand = self.createSQLSelect(table, columns, whereMap, orderBy, lock)
data = self.executeSQL(db, dbCommand, True)
res = {}
for row in data:
id = self.convertFromDB(row[0], 'long', 'int')
vtid = self.convertFromDB(row[1], 'long', 'int')
vttype = self.convertFromDB(row[2], 'str', 'varchar(255)')
vtparent_type = self.convertFromDB(row[3], 'str', 'char(32)')
vtparent_id = self.convertFromDB(row[4], 'long', 'int')
vtpos = self.convertFromDB(row[5], 'long', 'int')
vtmid = self.convertFromDB(row[6], 'long', 'int')
pos = self.convertFromDB(row[7], 'long', 'int')
type = self.convertFromDB(row[8], 'str', 'varchar(255)')
val = self.convertFromDB(row[9], 'str', 'mediumtext')
minVal = self.convertFromDB(row[10], 'str', 'varchar(255)')
maxVal = self.convertFromDB(row[11], 'str', 'varchar(255)')
stepSize = self.convertFromDB(row[12], 'str', 'varchar(255)')
strvaluelist = self.convertFromDB(row[13], 'str', 'mediumtext')
widget = self.convertFromDB(row[14], 'str', 'varchar(255)')
seq = self.convertFromDB(row[15], 'int', 'int')
parent = self.convertFromDB(row[16], 'str', 'varchar(255)')
mashup_alias = self.convertFromDB(row[17], 'long', 'int')
entity_id = self.convertFromDB(row[18], 'long', 'int')
entity_type = self.convertFromDB(row[19], 'str', 'char(16)')
mashup_component = DBMashupComponent(vtid=vtid,
vttype=vttype,
vtparent_type=vtparent_type,
vtparent_id=vtparent_id,
vtpos=vtpos,
vtmid=vtmid,
pos=pos,
type=type,
val=val,
minVal=minVal,
maxVal=maxVal,
stepSize=stepSize,
strvaluelist=strvaluelist,
widget=widget,
seq=seq,
parent=parent,
id=id)
mashup_component.db_mashup_alias = mashup_alias
mashup_component.db_entity_id = entity_id
mashup_component.db_entity_type = entity_type
mashup_component.is_dirty = False
res[('mashup_component', id)] = mashup_component
return res
def get_sql_select(self, db, global_props,lock=False):
columns = ['id', 'vtid', 'vttype', 'vtparent_type', 'vtparent_id', 'vtpos', 'vtmid', 'pos', 'type', 'val', 'minVal', 'maxVal', 'stepSize', 'strvaluelist', 'widget', 'seq', 'parent', 'alias_id', 'entity_id', 'entity_type']
table = 'mashup_component'
whereMap = global_props
orderBy = 'id'
return self.createSQLSelect(table, columns, whereMap, orderBy, lock)
def process_sql_columns(self, data, global_props):
res = {}
for row in data:
id = self.convertFromDB(row[0], 'long', 'int')
vtid = self.convertFromDB(row[1], 'long', 'int')
vttype = self.convertFromDB(row[2], 'str', 'varchar(255)')
vtparent_type = self.convertFromDB(row[3], 'str', 'char(32)')
vtparent_id = self.convertFromDB(row[4], 'long', 'int')
vtpos = self.convertFromDB(row[5], 'long', 'int')
vtmid = self.convertFromDB(row[6], 'long', 'int')
pos = self.convertFromDB(row[7], 'long', 'int')
type = self.convertFromDB(row[8], 'str', 'varchar(255)')
val = self.convertFromDB(row[9], 'str', 'mediumtext')
minVal = self.convertFromDB(row[10], 'str', 'varchar(255)')
maxVal = self.convertFromDB(row[11], 'str', 'varchar(255)')
stepSize = self.convertFromDB(row[12], 'str', 'varchar(255)')
strvaluelist = self.convertFromDB(row[13], 'str', 'mediumtext')
widget = self.convertFromDB(row[14], 'str', 'varchar(255)')
seq = self.convertFromDB(row[15], 'int', 'int')
parent = self.convertFromDB(row[16], 'str', 'varchar(255)')
mashup_alias = self.convertFromDB(row[17], 'long', 'int')
entity_id = self.convertFromDB(row[18], 'long', 'int')
entity_type = self.convertFromDB(row[19], 'str', 'char(16)')
mashup_component = DBMashupComponent(vtid=vtid,
vttype=vttype,
vtparent_type=vtparent_type,
vtparent_id=vtparent_id,
vtpos=vtpos,
vtmid=vtmid,
pos=pos,
type=type,
val=val,
minVal=minVal,
maxVal=maxVal,
stepSize=stepSize,
strvaluelist=strvaluelist,
widget=widget,
seq=seq,
parent=parent,
id=id)
mashup_component.db_mashup_alias = mashup_alias
mashup_component.db_entity_id = entity_id
mashup_component.db_entity_type = entity_type
mashup_component.is_dirty = False
res[('mashup_component', id)] = mashup_component
return res
def from_sql_fast(self, obj, all_objects):
if ('mashup_alias', obj.db_mashup_alias) in all_objects:
p = all_objects[('mashup_alias', obj.db_mashup_alias)]
p.db_add_component(obj)
def set_sql_columns(self, db, obj, global_props, do_copy=True):
if not do_copy and not obj.is_dirty:
return
columns = ['id', 'vtid', 'vttype', 'vtparent_type', 'vtparent_id', 'vtpos', 'vtmid', 'pos', 'type', 'val', 'minVal', 'maxVal', 'stepSize', 'strvaluelist', 'widget', 'seq', 'parent', 'alias_id', 'entity_id', 'entity_type']
table = 'mashup_component'
whereMap = {}
whereMap.update(global_props)
if obj.db_id is not None:
keyStr = self.convertToDB(obj.db_id, 'long', 'int')
whereMap['id'] = keyStr
columnMap = {}
if hasattr(obj, 'db_id') and obj.db_id is not None:
columnMap['id'] = \
self.convertToDB(obj.db_id, 'long', 'int')
if hasattr(obj, 'db_vtid') and obj.db_vtid is not None:
columnMap['vtid'] = \
self.convertToDB(obj.db_vtid, 'long', 'int')
if hasattr(obj, 'db_vttype') and obj.db_vttype is not None:
columnMap['vttype'] = \
self.convertToDB(obj.db_vttype, 'str', 'varchar(255)')
if hasattr(obj, 'db_vtparent_type') and obj.db_vtparent_type is not None:
columnMap['vtparent_type'] = \
self.convertToDB(obj.db_vtparent_type, 'str', 'char(32)')
if hasattr(obj, 'db_vtparent_id') and obj.db_vtparent_id is not None:
columnMap['vtparent_id'] = \
self.convertToDB(obj.db_vtparent_id, 'long', 'int')
if hasattr(obj, 'db_vtpos') and obj.db_vtpos is not None:
columnMap['vtpos'] = \
self.convertToDB(obj.db_vtpos, 'long', 'int')
if hasattr(obj, 'db_vtmid') and obj.db_vtmid is not None:
columnMap['vtmid'] = \
self.convertToDB(obj.db_vtmid, 'long', 'int')
if hasattr(obj, 'db_pos') and obj.db_pos is not None:
columnMap['pos'] = \
self.convertToDB(obj.db_pos, 'long', 'int')
if hasattr(obj, 'db_type') and obj.db_type is not None:
columnMap['type'] = \
self.convertToDB(obj.db_type, 'str', 'varchar(255)')
if hasattr(obj, 'db_val') and obj.db_val is not None:
columnMap['val'] = \
self.convertToDB(obj.db_val, 'str', 'mediumtext')
if hasattr(obj, 'db_minVal') and obj.db_minVal is not None:
columnMap['minVal'] = \
self.convertToDB(obj.db_minVal, 'str', 'varchar(255)')
if hasattr(obj, 'db_maxVal') and obj.db_maxVal is not None:
columnMap['maxVal'] = \
self.convertToDB(obj.db_maxVal, 'str', 'varchar(255)')
if hasattr(obj, 'db_stepSize') and obj.db_stepSize is not None:
columnMap['stepSize'] = \
self.convertToDB(obj.db_stepSize, 'str', 'varchar(255)')
if hasattr(obj, 'db_strvaluelist') and obj.db_strvaluelist is not None:
columnMap['strvaluelist'] = \
self.convertToDB(obj.db_strvaluelist, 'str', 'mediumtext')
if hasattr(obj, 'db_widget') and obj.db_widget is not None:
columnMap['widget'] = \
self.convertToDB(obj.db_widget, 'str', 'varchar(255)')
if hasattr(obj, 'db_seq') and obj.db_seq is not None:
columnMap['seq'] = \
self.convertToDB(obj.db_seq, 'int', 'int')
if hasattr(obj, 'db_parent') and obj.db_parent is not None:
columnMap['parent'] = \
self.convertToDB(obj.db_parent, 'str', 'varchar(255)')
if hasattr(obj, 'db_mashup_alias') and obj.db_mashup_alias is not None:
columnMap['alias_id'] = \
self.convertToDB(obj.db_mashup_alias, 'long', 'int')
if hasattr(obj, 'db_entity_id') and obj.db_entity_id is not None:
columnMap['entity_id'] = \
self.convertToDB(obj.db_entity_id, 'long', 'int')
if hasattr(obj, 'db_entity_type') and obj.db_entity_type is not None:
columnMap['entity_type'] = \
self.convertToDB(obj.db_entity_type, 'str', 'char(16)')
columnMap.update(global_props)
if obj.is_new or do_copy:
dbCommand = self.createSQLInsert(table, columnMap)
else:
dbCommand = self.createSQLUpdate(table, columnMap, whereMap)
lastId = self.executeSQL(db, dbCommand, False)
def set_sql_command(self, db, obj, global_props, do_copy=True):
if not do_copy and not obj.is_dirty:
return None
columns = ['id', 'vtid', 'vttype', 'vtparent_type', 'vtparent_id', 'vtpos', 'vtmid', 'pos', 'type', 'val', 'minVal', 'maxVal', 'stepSize', 'strvaluelist', 'widget', 'seq', 'parent', 'alias_id', 'entity_id', 'entity_type']
table = 'mashup_component'
whereMap = {}
whereMap.update(global_props)
if obj.db_id is not None:
keyStr = self.convertToDB(obj.db_id, 'long', 'int')
whereMap['id'] = keyStr
columnMap = {}
if hasattr(obj, 'db_id') and obj.db_id is not None:
columnMap['id'] = \
self.convertToDB(obj.db_id, 'long', 'int')
if hasattr(obj, 'db_vtid') and obj.db_vtid is not None:
columnMap['vtid'] = \
self.convertToDB(obj.db_vtid, 'long', 'int')
if hasattr(obj, 'db_vttype') and obj.db_vttype is not None:
columnMap['vttype'] = \
self.convertToDB(obj.db_vttype, 'str', 'varchar(255)')
if hasattr(obj, 'db_vtparent_type') and obj.db_vtparent_type is not None:
columnMap['vtparent_type'] = \
self.convertToDB(obj.db_vtparent_type, 'str', 'char(32)')
if hasattr(obj, 'db_vtparent_id') and obj.db_vtparent_id is not None:
columnMap['vtparent_id'] = \
self.convertToDB(obj.db_vtparent_id, 'long', 'int')
if hasattr(obj, 'db_vtpos') and obj.db_vtpos is not None:
columnMap['vtpos'] = \
self.convertToDB(obj.db_vtpos, 'long', 'int')
if hasattr(obj, 'db_vtmid') and obj.db_vtmid is not None:
columnMap['vtmid'] = \
self.convertToDB(obj.db_vtmid, 'long', 'int')
if hasattr(obj, 'db_pos') and obj.db_pos is not None:
columnMap['pos'] = \
self.convertToDB(obj.db_pos, 'long', 'int')
if hasattr(obj, 'db_type') and obj.db_type is not None:
columnMap['type'] = \
self.convertToDB(obj.db_type, 'str', 'varchar(255)')
if hasattr(obj, 'db_val') and obj.db_val is not None:
columnMap['val'] = \
self.convertToDB(obj.db_val, 'str', 'mediumtext')
if hasattr(obj, 'db_minVal') and obj.db_minVal is not None:
columnMap['minVal'] = \
self.convertToDB(obj.db_minVal, 'str', 'varchar(255)')
if hasattr(obj, 'db_maxVal') and obj.db_maxVal is not None:
columnMap['maxVal'] = \
self.convertToDB(obj.db_maxVal, 'str', 'varchar(255)')
if hasattr(obj, 'db_stepSize') and obj.db_stepSize is not None:
columnMap['stepSize'] = \
self.convertToDB(obj.db_stepSize, 'str', 'varchar(255)')
if hasattr(obj, 'db_strvaluelist') and obj.db_strvaluelist is not None:
columnMap['strvaluelist'] = \
self.convertToDB(obj.db_strvaluelist, 'str', 'mediumtext')
if hasattr(obj, 'db_widget') and obj.db_widget is not None:
columnMap['widget'] = \
self.convertToDB(obj.db_widget, 'str', 'varchar(255)')
if hasattr(obj, 'db_seq') and obj.db_seq is not None:
columnMap['seq'] = \
self.convertToDB(obj.db_seq, 'int', 'int')
if hasattr(obj, 'db_parent') and obj.db_parent is not None:
columnMap['parent'] = \
self.convertToDB(obj.db_parent, 'str', 'varchar(255)')
if hasattr(obj, 'db_mashup_alias') and obj.db_mashup_alias is not None:
columnMap['alias_id'] = \
self.convertToDB(obj.db_mashup_alias, 'long', 'int')
if hasattr(obj, 'db_entity_id') and obj.db_entity_id is not None:
columnMap['entity_id'] = \
self.convertToDB(obj.db_entity_id, 'long', 'int')
if hasattr(obj, 'db_entity_type') and obj.db_entity_type is not None:
columnMap['entity_type'] = \
self.convertToDB(obj.db_entity_type, 'str', 'char(16)')
columnMap.update(global_props)
if obj.is_new or do_copy:
dbCommand = self.createSQLInsert(table, columnMap)
else:
dbCommand = self.createSQLUpdate(table, columnMap, whereMap)
return dbCommand
def set_sql_process(self, obj, global_props, lastId):
pass
def to_sql_fast(self, obj, do_copy=True):
pass
def delete_sql_column(self, db, obj, global_props):
table = 'mashup_component'
whereMap = {}
whereMap.update(global_props)
if obj.db_id is not None:
keyStr = self.convertToDB(obj.db_id, 'long', 'int')
whereMap['id'] = keyStr
dbCommand = self.createSQLDelete(table, whereMap)
self.executeSQL(db, dbCommand, False)
class DBMashupSQLDAOBase(SQLDAO):
def __init__(self, daoList):
self.daoList = daoList
self.table = 'mashup'
def getDao(self, dao):
return self.daoList[dao]
def get_sql_columns(self, db, global_props,lock=False):
columns = ['id', 'name', 'version', 'type', 'vtid', 'layout', 'geometry', 'has_seq', 'parent_id', 'entity_id', 'entity_type']
table = 'mashup'
whereMap = global_props
orderBy = 'id'
dbCommand = self.createSQLSelect(table, columns, whereMap, orderBy, lock)
data = self.executeSQL(db, dbCommand, True)
res = {}
for row in data:
id = self.convertFromDB(row[0], 'long', 'int')
name = self.convertFromDB(row[1], 'str', 'varchar(255)')
version = self.convertFromDB(row[2], 'long', 'int')
type = self.convertFromDB(row[3], 'str', 'varchar(255)')
vtid = self.convertFromDB(row[4], 'long', 'int')
layout = self.convertFromDB(row[5], 'str', 'mediumtext')
geometry = self.convertFromDB(row[6], 'str', 'mediumtext')
has_seq = self.convertFromDB(row[7], 'int', 'int')
parent = self.convertFromDB(row[8], 'long', 'int')
entity_id = self.convertFromDB(row[9], 'long', 'int')
entity_type = self.convertFromDB(row[10], 'str', 'char(16)')
mashup = DBMashup(name=name,
version=version,
type=type,
vtid=vtid,
layout=layout,
geometry=geometry,
has_seq=has_seq,
id=id)
mashup.db_parent = parent
mashup.db_entity_id = entity_id
mashup.db_entity_type = entity_type
mashup.is_dirty = False
res[('mashup', id)] = mashup
return res
def get_sql_select(self, db, global_props,lock=False):
columns = ['id', 'name', 'version', 'type', 'vtid', 'layout', 'geometry', 'has_seq', 'parent_id', 'entity_id', 'entity_type']
table = 'mashup'
whereMap = global_props
orderBy = 'id'
return self.createSQLSelect(table, columns, whereMap, orderBy, lock)
def process_sql_columns(self, data, global_props):
res = {}
for row in data:
id = self.convertFromDB(row[0], 'long', 'int')
name = self.convertFromDB(row[1], 'str', 'varchar(255)')
version = self.convertFromDB(row[2], 'long', 'int')
type = self.convertFromDB(row[3], 'str', 'varchar(255)')
vtid = self.convertFromDB(row[4], 'long', 'int')
layout = self.convertFromDB(row[5], 'str', 'mediumtext')
geometry = self.convertFromDB(row[6], 'str', 'mediumtext')
has_seq = self.convertFromDB(row[7], 'int', 'int')
parent = self.convertFromDB(row[8], 'long', 'int')
entity_id = self.convertFromDB(row[9], 'long', 'int')
entity_type = self.convertFromDB(row[10], 'str', 'char(16)')
mashup = DBMashup(name=name,
version=version,
type=type,
vtid=vtid,
layout=layout,
geometry=geometry,
has_seq=has_seq,
id=id)
mashup.db_parent = parent
mashup.db_entity_id = entity_id
mashup.db_entity_type = entity_type
mashup.is_dirty = False
res[('mashup', id)] = mashup
return res
def from_sql_fast(self, obj, all_objects):
if ('mashup_action', obj.db_parent) in all_objects:
p = all_objects[('mashup_action', obj.db_parent)]
p.db_add_mashup(obj)
def set_sql_columns(self, db, obj, global_props, do_copy=True):
if not do_copy and not obj.is_dirty:
return
columns = ['id', 'name', 'version', 'type', 'vtid', 'layout', 'geometry', 'has_seq', 'parent_id', 'entity_id', 'entity_type']
table = 'mashup'
whereMap = {}
whereMap.update(global_props)
if obj.db_id is not None:
keyStr = self.convertToDB(obj.db_id, 'long', 'int')
whereMap['id'] = keyStr
columnMap = {}
if hasattr(obj, 'db_id') and obj.db_id is not None:
columnMap['id'] = \
self.convertToDB(obj.db_id, 'long', 'int')
if hasattr(obj, 'db_name') and obj.db_name is not None:
columnMap['name'] = \
self.convertToDB(obj.db_name, 'str', 'varchar(255)')
if hasattr(obj, 'db_version') and obj.db_version is not None:
columnMap['version'] = \
self.convertToDB(obj.db_version, 'long', 'int')
if hasattr(obj, 'db_type') and obj.db_type is not None:
columnMap['type'] = \
self.convertToDB(obj.db_type, 'str', 'varchar(255)')
if hasattr(obj, 'db_vtid') and obj.db_vtid is not None:
columnMap['vtid'] = \
self.convertToDB(obj.db_vtid, 'long', 'int')
if hasattr(obj, 'db_layout') and obj.db_layout is not None:
columnMap['layout'] = \
self.convertToDB(obj.db_layout, 'str', 'mediumtext')
if hasattr(obj, 'db_geometry') and obj.db_geometry is not None:
columnMap['geometry'] = \
self.convertToDB(obj.db_geometry, 'str', 'mediumtext')
if hasattr(obj, 'db_has_seq') and obj.db_has_seq is not None:
columnMap['has_seq'] = \
self.convertToDB(obj.db_has_seq, 'int', 'int')
if hasattr(obj, 'db_parent') and obj.db_parent is not None:
columnMap['parent_id'] = \
self.convertToDB(obj.db_parent, 'long', 'int')
if hasattr(obj, 'db_entity_id') and obj.db_entity_id is not None:
columnMap['entity_id'] = \
self.convertToDB(obj.db_entity_id, 'long', 'int')
if hasattr(obj, 'db_entity_type') and obj.db_entity_type is not None:
columnMap['entity_type'] = \
self.convertToDB(obj.db_entity_type, 'str', 'char(16)')
columnMap.update(global_props)
if obj.is_new or do_copy:
dbCommand = self.createSQLInsert(table, columnMap)
else:
dbCommand = self.createSQLUpdate(table, columnMap, whereMap)
lastId = self.executeSQL(db, dbCommand, False)
def set_sql_command(self, db, obj, global_props, do_copy=True):
if not do_copy and not obj.is_dirty:
return None
columns = ['id', 'name', 'version', 'type', 'vtid', 'layout', 'geometry', 'has_seq', 'parent_id', 'entity_id', 'entity_type']
table = 'mashup'
whereMap = {}
whereMap.update(global_props)
if obj.db_id is not None:
keyStr = self.convertToDB(obj.db_id, 'long', 'int')
whereMap['id'] = keyStr
columnMap = {}
if hasattr(obj, 'db_id') and obj.db_id is not None:
columnMap['id'] = \
self.convertToDB(obj.db_id, 'long', 'int')
if hasattr(obj, 'db_name') and obj.db_name is not None:
columnMap['name'] = \
self.convertToDB(obj.db_name, 'str', 'varchar(255)')
if hasattr(obj, 'db_version') and obj.db_version is not None:
columnMap['version'] = \
self.convertToDB(obj.db_version, 'long', 'int')
if hasattr(obj, 'db_type') and obj.db_type is not None:
columnMap['type'] = \
self.convertToDB(obj.db_type, 'str', 'varchar(255)')
if hasattr(obj, 'db_vtid') and obj.db_vtid is not None:
columnMap['vtid'] = \
self.convertToDB(obj.db_vtid, 'long', 'int')
if hasattr(obj, 'db_layout') and obj.db_layout is not None:
columnMap['layout'] = \
self.convertToDB(obj.db_layout, 'str', 'mediumtext')
if hasattr(obj, 'db_geometry') and obj.db_geometry is not None:
columnMap['geometry'] = \
self.convertToDB(obj.db_geometry, 'str', 'mediumtext')
if hasattr(obj, 'db_has_seq') and obj.db_has_seq is not None:
columnMap['has_seq'] = \
self.convertToDB(obj.db_has_seq, 'int', 'int')
if hasattr(obj, 'db_parent') and obj.db_parent is not None:
columnMap['parent_id'] = \
self.convertToDB(obj.db_parent, 'long', 'int')
if hasattr(obj, 'db_entity_id') and obj.db_entity_id is not None:
columnMap['entity_id'] = \
self.convertToDB(obj.db_entity_id, 'long', 'int')
if hasattr(obj, 'db_entity_type') and obj.db_entity_type is not None:
columnMap['entity_type'] = \
self.convertToDB(obj.db_entity_type, 'str', 'char(16)')
columnMap.update(global_props)
if obj.is_new or do_copy:
dbCommand = self.createSQLInsert(table, columnMap)
else:
dbCommand = self.createSQLUpdate(table, columnMap, whereMap)
return dbCommand
def set_sql_process(self, obj, global_props, lastId):
pass
def to_sql_fast(self, obj, do_copy=True):
for child in obj.db_aliases:
child.db_parent = obj.db_id
def delete_sql_column(self, db, obj, global_props):
table = 'mashup'
whereMap = {}
whereMap.update(global_props)
if obj.db_id is not None:
keyStr = self.convertToDB(obj.db_id, 'long', 'int')
whereMap['id'] = keyStr
dbCommand = self.createSQLDelete(table, whereMap)
self.executeSQL(db, dbCommand, False)
class DBMachineSQLDAOBase(SQLDAO):
def __init__(self, daoList):
self.daoList = daoList
self.table = 'machine'
def getDao(self, dao):
return self.daoList[dao]
def get_sql_columns(self, db, global_props,lock=False):
columns = ['id', 'name', 'os', 'architecture', 'processor', 'ram', 'vt_id', 'log_id', 'entity_id', 'entity_type']
table = 'machine'
whereMap = global_props
orderBy = 'id'
dbCommand = self.createSQLSelect(table, columns, whereMap, orderBy, lock)
data = self.executeSQL(db, dbCommand, True)
res = {}
for row in data:
id = self.convertFromDB(row[0], 'long', 'int')
name = self.convertFromDB(row[1], 'str', 'varchar(255)')
os = self.convertFromDB(row[2], 'str', 'varchar(255)')
architecture = self.convertFromDB(row[3], 'str', 'varchar(255)')
processor = self.convertFromDB(row[4], 'str', 'varchar(255)')
ram = self.convertFromDB(row[5], 'int', 'bigint')
vistrailId = self.convertFromDB(row[6], 'long', 'int')
log = self.convertFromDB(row[7], 'long', 'int')
entity_id = self.convertFromDB(row[8], 'long', 'int')
entity_type = self.convertFromDB(row[9], 'str', 'char(16)')
machine = DBMachine(name=name,
os=os,
architecture=architecture,
processor=processor,
ram=ram,
id=id)
machine.db_vistrailId = vistrailId
machine.db_log = log
machine.db_entity_id = entity_id
machine.db_entity_type = entity_type
machine.is_dirty = False
res[('machine', id)] = machine
return res
def get_sql_select(self, db, global_props,lock=False):
columns = ['id', 'name', 'os', 'architecture', 'processor', 'ram', 'vt_id', 'log_id', 'entity_id', 'entity_type']
table = 'machine'
whereMap = global_props
orderBy = 'id'
return self.createSQLSelect(table, columns, whereMap, orderBy, lock)
def process_sql_columns(self, data, global_props):
res = {}
for row in data:
id = self.convertFromDB(row[0], 'long', 'int')
name = self.convertFromDB(row[1], 'str', 'varchar(255)')
os = self.convertFromDB(row[2], 'str', 'varchar(255)')
architecture = self.convertFromDB(row[3], 'str', 'varchar(255)')
processor = self.convertFromDB(row[4], 'str', 'varchar(255)')
ram = self.convertFromDB(row[5], 'int', 'bigint')
vistrailId = self.convertFromDB(row[6], 'long', 'int')
log = self.convertFromDB(row[7], 'long', 'int')
entity_id = self.convertFromDB(row[8], 'long', 'int')
entity_type = self.convertFromDB(row[9], 'str', 'char(16)')
machine = DBMachine(name=name,
os=os,
architecture=architecture,
processor=processor,
ram=ram,
id=id)
machine.db_vistrailId = vistrailId
machine.db_log = log
machine.db_entity_id = entity_id
machine.db_entity_type = entity_type
machine.is_dirty = False
res[('machine', id)] = machine
return res
def from_sql_fast(self, obj, all_objects):
if ('log', obj.db_log) in all_objects:
p = all_objects[('log', obj.db_log)]
p.db_add_machine(obj)
def set_sql_columns(self, db, obj, global_props, do_copy=True):
if not do_copy and not obj.is_dirty:
return
columns = ['id', 'name', 'os', 'architecture', 'processor', 'ram', 'vt_id', 'log_id', 'entity_id', 'entity_type']
table = 'machine'
whereMap = {}
whereMap.update(global_props)
if obj.db_id is not None:
keyStr = self.convertToDB(obj.db_id, 'long', 'int')
whereMap['id'] = keyStr
columnMap = {}
if hasattr(obj, 'db_id') and obj.db_id is not None:
columnMap['id'] = \
self.convertToDB(obj.db_id, 'long', 'int')
if hasattr(obj, 'db_name') and obj.db_name is not None:
columnMap['name'] = \
self.convertToDB(obj.db_name, 'str', 'varchar(255)')
if hasattr(obj, 'db_os') and obj.db_os is not None:
columnMap['os'] = \
self.convertToDB(obj.db_os, 'str', 'varchar(255)')
if hasattr(obj, 'db_architecture') and obj.db_architecture is not None:
columnMap['architecture'] = \
self.convertToDB(obj.db_architecture, 'str', 'varchar(255)')
if hasattr(obj, 'db_processor') and obj.db_processor is not None:
columnMap['processor'] = \
self.convertToDB(obj.db_processor, 'str', 'varchar(255)')
if hasattr(obj, 'db_ram') and obj.db_ram is not None:
columnMap['ram'] = \
self.convertToDB(obj.db_ram, 'int', 'bigint')
if hasattr(obj, 'db_vistrailId') and obj.db_vistrailId is not None:
columnMap['vt_id'] = \
self.convertToDB(obj.db_vistrailId, 'long', 'int')
if hasattr(obj, 'db_log') and obj.db_log is not None:
columnMap['log_id'] = \
self.convertToDB(obj.db_log, 'long', 'int')
if hasattr(obj, 'db_entity_id') and obj.db_entity_id is not None:
columnMap['entity_id'] = \
self.convertToDB(obj.db_entity_id, 'long', 'int')
if hasattr(obj, 'db_entity_type') and obj.db_entity_type is not None:
columnMap['entity_type'] = \
self.convertToDB(obj.db_entity_type, 'str', 'char(16)')
columnMap.update(global_props)
if obj.is_new or do_copy:
dbCommand = self.createSQLInsert(table, columnMap)
else:
dbCommand = self.createSQLUpdate(table, columnMap, whereMap)
lastId = self.executeSQL(db, dbCommand, False)
def set_sql_command(self, db, obj, global_props, do_copy=True):
if not do_copy and not obj.is_dirty:
return None
columns = ['id', 'name', 'os', 'architecture', 'processor', 'ram', 'vt_id', 'log_id', 'entity_id', 'entity_type']
table = 'machine'
whereMap = {}
whereMap.update(global_props)
if obj.db_id is not None:
keyStr = self.convertToDB(obj.db_id, 'long', 'int')
whereMap['id'] = keyStr
columnMap = {}
if hasattr(obj, 'db_id') and obj.db_id is not None:
columnMap['id'] = \
self.convertToDB(obj.db_id, 'long', 'int')
if hasattr(obj, 'db_name') and obj.db_name is not None:
columnMap['name'] = \
self.convertToDB(obj.db_name, 'str', 'varchar(255)')
if hasattr(obj, 'db_os') and obj.db_os is not None:
columnMap['os'] = \
self.convertToDB(obj.db_os, 'str', 'varchar(255)')
if hasattr(obj, 'db_architecture') and obj.db_architecture is not None:
columnMap['architecture'] = \
self.convertToDB(obj.db_architecture, 'str', 'varchar(255)')
if hasattr(obj, 'db_processor') and obj.db_processor is not None:
columnMap['processor'] = \
self.convertToDB(obj.db_processor, 'str', 'varchar(255)')
if hasattr(obj, 'db_ram') and obj.db_ram is not None:
columnMap['ram'] = \
self.convertToDB(obj.db_ram, 'int', 'bigint')
if hasattr(obj, 'db_vistrailId') and obj.db_vistrailId is not None:
columnMap['vt_id'] = \
self.convertToDB(obj.db_vistrailId, 'long', 'int')
if hasattr(obj, 'db_log') and obj.db_log is not None:
columnMap['log_id'] = \
self.convertToDB(obj.db_log, 'long', 'int')
if hasattr(obj, 'db_entity_id') and obj.db_entity_id is not None:
columnMap['entity_id'] = \
self.convertToDB(obj.db_entity_id, 'long', 'int')
if hasattr(obj, 'db_entity_type') and obj.db_entity_type is not None:
columnMap['entity_type'] = \
self.convertToDB(obj.db_entity_type, 'str', 'char(16)')
columnMap.update(global_props)
if obj.is_new or do_copy:
dbCommand = self.createSQLInsert(table, columnMap)
else:
dbCommand = self.createSQLUpdate(table, columnMap, whereMap)
return dbCommand
def set_sql_process(self, obj, global_props, lastId):
pass
def to_sql_fast(self, obj, do_copy=True):
pass
def delete_sql_column(self, db, obj, global_props):
table = 'machine'
whereMap = {}
whereMap.update(global_props)
if obj.db_id is not None:
keyStr = self.convertToDB(obj.db_id, 'long', 'int')
whereMap['id'] = keyStr
dbCommand = self.createSQLDelete(table, whereMap)
self.executeSQL(db, dbCommand, False)
class DBOtherSQLDAOBase(SQLDAO):
def __init__(self, daoList):
self.daoList = daoList
self.table = 'other'
def getDao(self, dao):
return self.daoList[dao]
def get_sql_columns(self, db, global_props,lock=False):
columns = ['id', 'okey', 'value', 'parent_type', 'entity_id', 'entity_type', 'parent_id']
table = 'other'
whereMap = global_props
orderBy = 'id'
dbCommand = self.createSQLSelect(table, columns, whereMap, orderBy, lock)
data = self.executeSQL(db, dbCommand, True)
res = {}
for row in data:
id = self.convertFromDB(row[0], 'long', 'int')
key = self.convertFromDB(row[1], 'str', 'varchar(255)')
value = self.convertFromDB(row[2], 'str', 'varchar(255)')
parentType = self.convertFromDB(row[3], 'str', 'char(32)')
entity_id = self.convertFromDB(row[4], 'long', 'int')
entity_type = self.convertFromDB(row[5], 'str', 'char(16)')
parent = self.convertFromDB(row[6], 'long', 'long')
other = DBOther(key=key,
value=value,
id=id)
other.db_parentType = parentType
other.db_entity_id = entity_id
other.db_entity_type = entity_type
other.db_parent = parent
other.is_dirty = False
res[('other', id)] = other
return res
def get_sql_select(self, db, global_props,lock=False):
columns = ['id', 'okey', 'value', 'parent_type', 'entity_id', 'entity_type', 'parent_id']
table = 'other'
whereMap = global_props
orderBy = 'id'
return self.createSQLSelect(table, columns, whereMap, orderBy, lock)
def process_sql_columns(self, data, global_props):
res = {}
for row in data:
id = self.convertFromDB(row[0], 'long', 'int')
key = self.convertFromDB(row[1], 'str', 'varchar(255)')
value = self.convertFromDB(row[2], 'str', 'varchar(255)')
parentType = self.convertFromDB(row[3], 'str', 'char(32)')
entity_id = self.convertFromDB(row[4], 'long', 'int')
entity_type = self.convertFromDB(row[5], 'str', 'char(16)')
parent = self.convertFromDB(row[6], 'long', 'long')
other = DBOther(key=key,
value=value,
id=id)
other.db_parentType = parentType
other.db_entity_id = entity_id
other.db_entity_type = entity_type
other.db_parent = parent
other.is_dirty = False
res[('other', id)] = other
return res
def from_sql_fast(self, obj, all_objects):
if obj.db_parentType == 'workflow':
p = all_objects[('workflow', obj.db_parent)]
p.db_add_other(obj)
elif obj.db_parentType == 'add':
p = all_objects[('add', obj.db_parent)]
p.db_add_data(obj)
elif obj.db_parentType == 'change':
p = all_objects[('change', obj.db_parent)]
p.db_add_data(obj)
def set_sql_columns(self, db, obj, global_props, do_copy=True):
if not do_copy and not obj.is_dirty:
return
columns = ['id', 'okey', 'value', 'parent_type', 'entity_id', 'entity_type', 'parent_id']
table = 'other'
whereMap = {}
whereMap.update(global_props)
if obj.db_id is not None:
keyStr = self.convertToDB(obj.db_id, 'long', 'int')
whereMap['id'] = keyStr
columnMap = {}
if hasattr(obj, 'db_id') and obj.db_id is not None:
columnMap['id'] = \
self.convertToDB(obj.db_id, 'long', 'int')
if hasattr(obj, 'db_key') and obj.db_key is not None:
columnMap['okey'] = \
self.convertToDB(obj.db_key, 'str', 'varchar(255)')
if hasattr(obj, 'db_value') and obj.db_value is not None:
columnMap['value'] = \
self.convertToDB(obj.db_value, 'str', 'varchar(255)')
if hasattr(obj, 'db_parentType') and obj.db_parentType is not None:
columnMap['parent_type'] = \
self.convertToDB(obj.db_parentType, 'str', 'char(32)')
if hasattr(obj, 'db_entity_id') and obj.db_entity_id is not None:
columnMap['entity_id'] = \
self.convertToDB(obj.db_entity_id, 'long', 'int')
if hasattr(obj, 'db_entity_type') and obj.db_entity_type is not None:
columnMap['entity_type'] = \
self.convertToDB(obj.db_entity_type, 'str', 'char(16)')
if hasattr(obj, 'db_parent') and obj.db_parent is not None:
columnMap['parent_id'] = \
self.convertToDB(obj.db_parent, 'long', 'long')
columnMap.update(global_props)
if obj.is_new or do_copy:
dbCommand = self.createSQLInsert(table, columnMap)
else:
dbCommand = self.createSQLUpdate(table, columnMap, whereMap)
lastId = self.executeSQL(db, dbCommand, False)
def set_sql_command(self, db, obj, global_props, do_copy=True):
if not do_copy and not obj.is_dirty:
return None
columns = ['id', 'okey', 'value', 'parent_type', 'entity_id', 'entity_type', 'parent_id']
table = 'other'
whereMap = {}
whereMap.update(global_props)
if obj.db_id is not None:
keyStr = self.convertToDB(obj.db_id, 'long', 'int')
whereMap['id'] = keyStr
columnMap = {}
if hasattr(obj, 'db_id') and obj.db_id is not None:
columnMap['id'] = \
self.convertToDB(obj.db_id, 'long', 'int')
if hasattr(obj, 'db_key') and obj.db_key is not None:
columnMap['okey'] = \
self.convertToDB(obj.db_key, 'str', 'varchar(255)')
if hasattr(obj, 'db_value') and obj.db_value is not None:
columnMap['value'] = \
self.convertToDB(obj.db_value, 'str', 'varchar(255)')
if hasattr(obj, 'db_parentType') and obj.db_parentType is not None:
columnMap['parent_type'] = \
self.convertToDB(obj.db_parentType, 'str', 'char(32)')
if hasattr(obj, 'db_entity_id') and obj.db_entity_id is not None:
columnMap['entity_id'] = \
self.convertToDB(obj.db_entity_id, 'long', 'int')
if hasattr(obj, 'db_entity_type') and obj.db_entity_type is not None:
columnMap['entity_type'] = \
self.convertToDB(obj.db_entity_type, 'str', 'char(16)')
if hasattr(obj, 'db_parent') and obj.db_parent is not None:
columnMap['parent_id'] = \
self.convertToDB(obj.db_parent, 'long', 'long')
columnMap.update(global_props)
if obj.is_new or do_copy:
dbCommand = self.createSQLInsert(table, columnMap)
else:
dbCommand = self.createSQLUpdate(table, columnMap, whereMap)
return dbCommand
def set_sql_process(self, obj, global_props, lastId):
pass
def to_sql_fast(self, obj, do_copy=True):
pass
def delete_sql_column(self, db, obj, global_props):
table = 'other'
whereMap = {}
whereMap.update(global_props)
if obj.db_id is not None:
keyStr = self.convertToDB(obj.db_id, 'long', 'int')
whereMap['id'] = keyStr
dbCommand = self.createSQLDelete(table, whereMap)
self.executeSQL(db, dbCommand, False)
class DBAbstractionSQLDAOBase(SQLDAO):
def __init__(self, daoList):
self.daoList = daoList
self.table = 'abstraction'
def getDao(self, dao):
return self.daoList[dao]
def get_sql_columns(self, db, global_props,lock=False):
columns = ['id', 'cache', 'name', 'namespace', 'package', 'version', 'internal_version', 'parent_type', 'entity_id', 'entity_type', 'parent_id']
table = 'abstraction'
whereMap = global_props
orderBy = 'id'
dbCommand = self.createSQLSelect(table, columns, whereMap, orderBy, lock)
data = self.executeSQL(db, dbCommand, True)
res = {}
for row in data:
id = self.convertFromDB(row[0], 'long', 'int')
cache = self.convertFromDB(row[1], 'int', 'int')
name = self.convertFromDB(row[2], 'str', 'varchar(255)')
namespace = self.convertFromDB(row[3], 'str', 'varchar(255)')
package = self.convertFromDB(row[4], 'str', 'varchar(511)')
version = self.convertFromDB(row[5], 'str', 'varchar(255)')
internal_version = self.convertFromDB(row[6], 'str', 'varchar(255)')
parentType = self.convertFromDB(row[7], 'str', 'char(32)')
entity_id = self.convertFromDB(row[8], 'long', 'int')
entity_type = self.convertFromDB(row[9], 'str', 'char(16)')
parent = self.convertFromDB(row[10], 'long', 'long')
abstraction = DBAbstraction(cache=cache,
name=name,
namespace=namespace,
package=package,
version=version,
internal_version=internal_version,
id=id)
abstraction.db_parentType = parentType
abstraction.db_entity_id = entity_id
abstraction.db_entity_type = entity_type
abstraction.db_parent = parent
abstraction.is_dirty = False
res[('abstraction', id)] = abstraction
return res
def get_sql_select(self, db, global_props,lock=False):
columns = ['id', 'cache', 'name', 'namespace', 'package', 'version', 'internal_version', 'parent_type', 'entity_id', 'entity_type', 'parent_id']
table = 'abstraction'
whereMap = global_props
orderBy = 'id'
return self.createSQLSelect(table, columns, whereMap, orderBy, lock)
def process_sql_columns(self, data, global_props):
res = {}
for row in data:
id = self.convertFromDB(row[0], 'long', 'int')
cache = self.convertFromDB(row[1], 'int', 'int')
name = self.convertFromDB(row[2], 'str', 'varchar(255)')
namespace = self.convertFromDB(row[3], 'str', 'varchar(255)')
package = self.convertFromDB(row[4], 'str', 'varchar(511)')
version = self.convertFromDB(row[5], 'str', 'varchar(255)')
internal_version = self.convertFromDB(row[6], 'str', 'varchar(255)')
parentType = self.convertFromDB(row[7], 'str', 'char(32)')
entity_id = self.convertFromDB(row[8], 'long', 'int')
entity_type = self.convertFromDB(row[9], 'str', 'char(16)')
parent = self.convertFromDB(row[10], 'long', 'long')
abstraction = DBAbstraction(cache=cache,
name=name,
namespace=namespace,
package=package,
version=version,
internal_version=internal_version,
id=id)
abstraction.db_parentType = parentType
abstraction.db_entity_id = entity_id
abstraction.db_entity_type = entity_type
abstraction.db_parent = parent
abstraction.is_dirty = False
res[('abstraction', id)] = abstraction
return res
def from_sql_fast(self, obj, all_objects):
if obj.db_parentType == 'workflow':
p = all_objects[('workflow', obj.db_parent)]
p.db_add_module(obj)
elif obj.db_parentType == 'add':
p = all_objects[('add', obj.db_parent)]
p.db_add_data(obj)
elif obj.db_parentType == 'change':
p = all_objects[('change', obj.db_parent)]
p.db_add_data(obj)
def set_sql_columns(self, db, obj, global_props, do_copy=True):
if not do_copy and not obj.is_dirty:
return
columns = ['id', 'cache', 'name', 'namespace', 'package', 'version', 'internal_version', 'parent_type', 'entity_id', 'entity_type', 'parent_id']
table = 'abstraction'
whereMap = {}
whereMap.update(global_props)
if obj.db_id is not None:
keyStr = self.convertToDB(obj.db_id, 'long', 'int')
whereMap['id'] = keyStr
columnMap = {}
if hasattr(obj, 'db_id') and obj.db_id is not None:
columnMap['id'] = \
self.convertToDB(obj.db_id, 'long', 'int')
if hasattr(obj, 'db_cache') and obj.db_cache is not None:
columnMap['cache'] = \
self.convertToDB(obj.db_cache, 'int', 'int')
if hasattr(obj, 'db_name') and obj.db_name is not None:
columnMap['name'] = \
self.convertToDB(obj.db_name, 'str', 'varchar(255)')
if hasattr(obj, 'db_namespace') and obj.db_namespace is not None:
columnMap['namespace'] = \
self.convertToDB(obj.db_namespace, 'str', 'varchar(255)')
if hasattr(obj, 'db_package') and obj.db_package is not None:
columnMap['package'] = \
self.convertToDB(obj.db_package, 'str', 'varchar(511)')
if hasattr(obj, 'db_version') and obj.db_version is not None:
columnMap['version'] = \
self.convertToDB(obj.db_version, 'str', 'varchar(255)')
if hasattr(obj, 'db_internal_version') and obj.db_internal_version is not None:
columnMap['internal_version'] = \
self.convertToDB(obj.db_internal_version, 'str', 'varchar(255)')
if hasattr(obj, 'db_parentType') and obj.db_parentType is not None:
columnMap['parent_type'] = \
self.convertToDB(obj.db_parentType, 'str', 'char(32)')
if hasattr(obj, 'db_entity_id') and obj.db_entity_id is not None:
columnMap['entity_id'] = \
self.convertToDB(obj.db_entity_id, 'long', 'int')
if hasattr(obj, 'db_entity_type') and obj.db_entity_type is not None:
columnMap['entity_type'] = \
self.convertToDB(obj.db_entity_type, 'str', 'char(16)')
if hasattr(obj, 'db_parent') and obj.db_parent is not None:
columnMap['parent_id'] = \
self.convertToDB(obj.db_parent, 'long', 'long')
columnMap.update(global_props)
if obj.is_new or do_copy:
dbCommand = self.createSQLInsert(table, columnMap)
else:
dbCommand = self.createSQLUpdate(table, columnMap, whereMap)
lastId = self.executeSQL(db, dbCommand, False)
def set_sql_command(self, db, obj, global_props, do_copy=True):
if not do_copy and not obj.is_dirty:
return None
columns = ['id', 'cache', 'name', 'namespace', 'package', 'version', 'internal_version', 'parent_type', 'entity_id', 'entity_type', 'parent_id']
table = 'abstraction'
whereMap = {}
whereMap.update(global_props)
if obj.db_id is not None:
keyStr = self.convertToDB(obj.db_id, 'long', 'int')
whereMap['id'] = keyStr
columnMap = {}
if hasattr(obj, 'db_id') and obj.db_id is not None:
columnMap['id'] = \
self.convertToDB(obj.db_id, 'long', 'int')
if hasattr(obj, 'db_cache') and obj.db_cache is not None:
columnMap['cache'] = \
self.convertToDB(obj.db_cache, 'int', 'int')
if hasattr(obj, 'db_name') and obj.db_name is not None:
columnMap['name'] = \
self.convertToDB(obj.db_name, 'str', 'varchar(255)')
if hasattr(obj, 'db_namespace') and obj.db_namespace is not None:
columnMap['namespace'] = \
self.convertToDB(obj.db_namespace, 'str', 'varchar(255)')
if hasattr(obj, 'db_package') and obj.db_package is not None:
columnMap['package'] = \
self.convertToDB(obj.db_package, 'str', 'varchar(511)')
if hasattr(obj, 'db_version') and obj.db_version is not None:
columnMap['version'] = \
self.convertToDB(obj.db_version, 'str', 'varchar(255)')
if hasattr(obj, 'db_internal_version') and obj.db_internal_version is not None:
columnMap['internal_version'] = \
self.convertToDB(obj.db_internal_version, 'str', 'varchar(255)')
if hasattr(obj, 'db_parentType') and obj.db_parentType is not None:
columnMap['parent_type'] = \
self.convertToDB(obj.db_parentType, 'str', 'char(32)')
if hasattr(obj, 'db_entity_id') and obj.db_entity_id is not None:
columnMap['entity_id'] = \
self.convertToDB(obj.db_entity_id, 'long', 'int')
if hasattr(obj, 'db_entity_type') and obj.db_entity_type is not None:
columnMap['entity_type'] = \
self.convertToDB(obj.db_entity_type, 'str', 'char(16)')
if hasattr(obj, 'db_parent') and obj.db_parent is not None:
columnMap['parent_id'] = \
self.convertToDB(obj.db_parent, 'long', 'long')
columnMap.update(global_props)
if obj.is_new or do_copy:
dbCommand = self.createSQLInsert(table, columnMap)
else:
dbCommand = self.createSQLUpdate(table, columnMap, whereMap)
return dbCommand
def set_sql_process(self, obj, global_props, lastId):
pass
def to_sql_fast(self, obj, do_copy=True):
if obj.db_location is not None:
child = obj.db_location
child.db_parentType = obj.vtType
child.db_parent = obj.db_id
for child in obj.db_functions:
child.db_parentType = obj.vtType
child.db_parent = obj.db_id
for child in obj.db_annotations:
child.db_parentType = obj.vtType
child.db_parent = obj.db_id
def delete_sql_column(self, db, obj, global_props):
table = 'abstraction'
whereMap = {}
whereMap.update(global_props)
if obj.db_id is not None:
keyStr = self.convertToDB(obj.db_id, 'long', 'int')
whereMap['id'] = keyStr
dbCommand = self.createSQLDelete(table, whereMap)
self.executeSQL(db, dbCommand, False)
class DBMashuptrailSQLDAOBase(SQLDAO):
def __init__(self, daoList):
self.daoList = daoList
self.table = 'mashuptrail'
def getDao(self, dao):
return self.daoList[dao]
def get_sql_columns(self, db, global_props,lock=False):
columns = ['id', 'name', 'version', 'vt_version', 'last_modified', 'entity_type']
table = 'mashuptrail'
whereMap = global_props
orderBy = 'id'
dbCommand = self.createSQLSelect(table, columns, whereMap, orderBy, lock)
data = self.executeSQL(db, dbCommand, True)
res = {}
for row in data:
id = self.convertFromDB(row[0], 'long', 'int')
global_props['entity_id'] = self.convertToDB(id, 'long', 'int')
name = self.convertFromDB(row[1], 'str', 'char(36)')
version = self.convertFromDB(row[2], 'str', 'char(16)')
vtVersion = self.convertFromDB(row[3], 'long', 'int')
last_modified = self.convertFromDB(row[4], 'datetime', 'datetime')
entity_type = self.convertFromDB(row[5], 'str', 'char(16)')
mashuptrail = DBMashuptrail(name=name,
version=version,
vtVersion=vtVersion,
last_modified=last_modified,
id=id)
mashuptrail.db_entity_type = entity_type
mashuptrail.is_dirty = False
res[('mashuptrail', id)] = mashuptrail
return res
def get_sql_select(self, db, global_props,lock=False):
columns = ['id', 'name', 'version', 'vt_version', 'last_modified', 'entity_type']
table = 'mashuptrail'
whereMap = global_props
orderBy = 'id'
return self.createSQLSelect(table, columns, whereMap, orderBy, lock)
def process_sql_columns(self, data, global_props):
res = {}
for row in data:
id = self.convertFromDB(row[0], 'long', 'int')
global_props['entity_id'] = self.convertToDB(id, 'long', 'int')
name = self.convertFromDB(row[1], 'str', 'char(36)')
version = self.convertFromDB(row[2], 'str', 'char(16)')
vtVersion = self.convertFromDB(row[3], 'long', 'int')
last_modified = self.convertFromDB(row[4], 'datetime', 'datetime')
entity_type = self.convertFromDB(row[5], 'str', 'char(16)')
mashuptrail = DBMashuptrail(name=name,
version=version,
vtVersion=vtVersion,
last_modified=last_modified,
id=id)
mashuptrail.db_entity_type = entity_type
mashuptrail.is_dirty = False
res[('mashuptrail', id)] = mashuptrail
return res
def from_sql_fast(self, obj, all_objects):
pass
def set_sql_columns(self, db, obj, global_props, do_copy=True):
if not do_copy and not obj.is_dirty:
return
columns = ['id', 'name', 'version', 'vt_version', 'last_modified', 'entity_type']
table = 'mashuptrail'
whereMap = {}
whereMap.update(global_props)
if obj.db_id is not None:
keyStr = self.convertToDB(obj.db_id, 'long', 'int')
whereMap['id'] = keyStr
columnMap = {}
if hasattr(obj, 'db_id') and obj.db_id is not None:
columnMap['id'] = \
self.convertToDB(obj.db_id, 'long', 'int')
if hasattr(obj, 'db_name') and obj.db_name is not None:
columnMap['name'] = \
self.convertToDB(obj.db_name, 'str', 'char(36)')
if hasattr(obj, 'db_version') and obj.db_version is not None:
columnMap['version'] = \
self.convertToDB(obj.db_version, 'str', 'char(16)')
if hasattr(obj, 'db_vtVersion') and obj.db_vtVersion is not None:
columnMap['vt_version'] = \
self.convertToDB(obj.db_vtVersion, 'long', 'int')
if hasattr(obj, 'db_last_modified') and obj.db_last_modified is not None:
columnMap['last_modified'] = \
self.convertToDB(obj.db_last_modified, 'datetime', 'datetime')
if hasattr(obj, 'db_entity_type') and obj.db_entity_type is not None:
columnMap['entity_type'] = \
self.convertToDB(obj.db_entity_type, 'str', 'char(16)')
columnMap.update(global_props)
if obj.is_new or do_copy:
dbCommand = self.createSQLInsert(table, columnMap)
else:
dbCommand = self.createSQLUpdate(table, columnMap, whereMap)
lastId = self.executeSQL(db, dbCommand, False)
if obj.db_id is None:
obj.db_id = lastId
keyStr = self.convertToDB(obj.db_id, 'long', 'int')
if hasattr(obj, 'db_id') and obj.db_id is not None:
global_props['entity_id'] = self.convertToDB(obj.db_id, 'long', 'int')
def set_sql_command(self, db, obj, global_props, do_copy=True):
if not do_copy and not obj.is_dirty:
return None
columns = ['id', 'name', 'version', 'vt_version', 'last_modified', 'entity_type']
table = 'mashuptrail'
whereMap = {}
whereMap.update(global_props)
if obj.db_id is not None:
keyStr = self.convertToDB(obj.db_id, 'long', 'int')
whereMap['id'] = keyStr
columnMap = {}
if hasattr(obj, 'db_id') and obj.db_id is not None:
columnMap['id'] = \
self.convertToDB(obj.db_id, 'long', 'int')
if hasattr(obj, 'db_name') and obj.db_name is not None:
columnMap['name'] = \
self.convertToDB(obj.db_name, 'str', 'char(36)')
if hasattr(obj, 'db_version') and obj.db_version is not None:
columnMap['version'] = \
self.convertToDB(obj.db_version, 'str', 'char(16)')
if hasattr(obj, 'db_vtVersion') and obj.db_vtVersion is not None:
columnMap['vt_version'] = \
self.convertToDB(obj.db_vtVersion, 'long', 'int')
if hasattr(obj, 'db_last_modified') and obj.db_last_modified is not None:
columnMap['last_modified'] = \
self.convertToDB(obj.db_last_modified, 'datetime', 'datetime')
if hasattr(obj, 'db_entity_type') and obj.db_entity_type is not None:
columnMap['entity_type'] = \
self.convertToDB(obj.db_entity_type, 'str', 'char(16)')
columnMap.update(global_props)
if obj.is_new or do_copy:
dbCommand = self.createSQLInsert(table, columnMap)
else:
dbCommand = self.createSQLUpdate(table, columnMap, whereMap)
return dbCommand
def set_sql_process(self, obj, global_props, lastId):
if obj.db_id is None:
obj.db_id = lastId
keyStr = self.convertToDB(obj.db_id, 'long', 'int')
if hasattr(obj, 'db_id') and obj.db_id is not None:
global_props['entity_id'] = self.convertToDB(obj.db_id, 'long', 'int')
pass
def to_sql_fast(self, obj, do_copy=True):
for child in obj.db_actions:
child.db_mashuptrail = obj.db_id
for child in obj.db_annotations:
child.db_parentType = obj.vtType
child.db_parent = obj.db_id
for child in obj.db_actionAnnotations:
child.db_mashuptrail = obj.db_id
def delete_sql_column(self, db, obj, global_props):
table = 'mashuptrail'
whereMap = {}
whereMap.update(global_props)
if obj.db_id is not None:
keyStr = self.convertToDB(obj.db_id, 'long', 'int')
whereMap['id'] = keyStr
dbCommand = self.createSQLDelete(table, whereMap)
self.executeSQL(db, dbCommand, False)
class DBRegistrySQLDAOBase(SQLDAO):
def __init__(self, daoList):
self.daoList = daoList
self.table = 'registry'
def getDao(self, dao):
return self.daoList[dao]
def get_sql_columns(self, db, global_props,lock=False):
columns = ['id', 'entity_type', 'version', 'root_descriptor_id', 'name', 'last_modified']
table = 'registry'
whereMap = global_props
orderBy = 'id'
dbCommand = self.createSQLSelect(table, columns, whereMap, orderBy, lock)
data = self.executeSQL(db, dbCommand, True)
res = {}
for row in data:
id = self.convertFromDB(row[0], 'long', 'int')
global_props['entity_id'] = self.convertToDB(id, 'long', 'int')
entity_type = self.convertFromDB(row[1], 'str', 'char(16)')
global_props['entity_type'] = self.convertToDB(entity_type, 'str', 'char(16)')
version = self.convertFromDB(row[2], 'str', 'char(16)')
root_descriptor_id = self.convertFromDB(row[3], 'long', 'int')
name = self.convertFromDB(row[4], 'str', 'varchar(255)')
last_modified = self.convertFromDB(row[5], 'datetime', 'datetime')
registry = DBRegistry(entity_type=entity_type,
version=version,
root_descriptor_id=root_descriptor_id,
name=name,
last_modified=last_modified,
id=id)
registry.is_dirty = False
res[('registry', id)] = registry
return res
def get_sql_select(self, db, global_props,lock=False):
columns = ['id', 'entity_type', 'version', 'root_descriptor_id', 'name', 'last_modified']
table = 'registry'
whereMap = global_props
orderBy = 'id'
return self.createSQLSelect(table, columns, whereMap, orderBy, lock)
def process_sql_columns(self, data, global_props):
res = {}
for row in data:
id = self.convertFromDB(row[0], 'long', 'int')
global_props['entity_id'] = self.convertToDB(id, 'long', 'int')
entity_type = self.convertFromDB(row[1], 'str', 'char(16)')
global_props['entity_type'] = self.convertToDB(entity_type, 'str', 'char(16)')
version = self.convertFromDB(row[2], 'str', 'char(16)')
root_descriptor_id = self.convertFromDB(row[3], 'long', 'int')
name = self.convertFromDB(row[4], 'str', 'varchar(255)')
last_modified = self.convertFromDB(row[5], 'datetime', 'datetime')
registry = DBRegistry(entity_type=entity_type,
version=version,
root_descriptor_id=root_descriptor_id,
name=name,
last_modified=last_modified,
id=id)
registry.is_dirty = False
res[('registry', id)] = registry
return res
def from_sql_fast(self, obj, all_objects):
pass
def set_sql_columns(self, db, obj, global_props, do_copy=True):
if not do_copy and not obj.is_dirty:
return
columns = ['id', 'entity_type', 'version', 'root_descriptor_id', 'name', 'last_modified']
table = 'registry'
whereMap = {}
whereMap.update(global_props)
if obj.db_id is not None:
keyStr = self.convertToDB(obj.db_id, 'long', 'int')
whereMap['id'] = keyStr
columnMap = {}
if hasattr(obj, 'db_id') and obj.db_id is not None:
columnMap['id'] = \
self.convertToDB(obj.db_id, 'long', 'int')
if hasattr(obj, 'db_entity_type') and obj.db_entity_type is not None:
columnMap['entity_type'] = \
self.convertToDB(obj.db_entity_type, 'str', 'char(16)')
if hasattr(obj, 'db_version') and obj.db_version is not None:
columnMap['version'] = \
self.convertToDB(obj.db_version, 'str', 'char(16)')
if hasattr(obj, 'db_root_descriptor_id') and obj.db_root_descriptor_id is not None:
columnMap['root_descriptor_id'] = \
self.convertToDB(obj.db_root_descriptor_id, 'long', 'int')
if hasattr(obj, 'db_name') and obj.db_name is not None:
columnMap['name'] = \
self.convertToDB(obj.db_name, 'str', 'varchar(255)')
if hasattr(obj, 'db_last_modified') and obj.db_last_modified is not None:
columnMap['last_modified'] = \
self.convertToDB(obj.db_last_modified, 'datetime', 'datetime')
columnMap.update(global_props)
if obj.is_new or do_copy:
dbCommand = self.createSQLInsert(table, columnMap)
else:
dbCommand = self.createSQLUpdate(table, columnMap, whereMap)
lastId = self.executeSQL(db, dbCommand, False)
if obj.db_id is None:
obj.db_id = lastId
keyStr = self.convertToDB(obj.db_id, 'long', 'int')
if hasattr(obj, 'db_entity_type') and obj.db_entity_type is not None:
global_props['entity_type'] = self.convertToDB(obj.db_entity_type, 'str', 'char(16)')
if hasattr(obj, 'db_id') and obj.db_id is not None:
global_props['entity_id'] = self.convertToDB(obj.db_id, 'long', 'int')
def set_sql_command(self, db, obj, global_props, do_copy=True):
if not do_copy and not obj.is_dirty:
return None
columns = ['id', 'entity_type', 'version', 'root_descriptor_id', 'name', 'last_modified']
table = 'registry'
whereMap = {}
whereMap.update(global_props)
if obj.db_id is not None:
keyStr = self.convertToDB(obj.db_id, 'long', 'int')
whereMap['id'] = keyStr
columnMap = {}
if hasattr(obj, 'db_id') and obj.db_id is not None:
columnMap['id'] = \
self.convertToDB(obj.db_id, 'long', 'int')
if hasattr(obj, 'db_entity_type') and obj.db_entity_type is not None:
columnMap['entity_type'] = \
self.convertToDB(obj.db_entity_type, 'str', 'char(16)')
if hasattr(obj, 'db_version') and obj.db_version is not None:
columnMap['version'] = \
self.convertToDB(obj.db_version, 'str', 'char(16)')
if hasattr(obj, 'db_root_descriptor_id') and obj.db_root_descriptor_id is not None:
columnMap['root_descriptor_id'] = \
self.convertToDB(obj.db_root_descriptor_id, 'long', 'int')
if hasattr(obj, 'db_name') and obj.db_name is not None:
columnMap['name'] = \
self.convertToDB(obj.db_name, 'str', 'varchar(255)')
if hasattr(obj, 'db_last_modified') and obj.db_last_modified is not None:
columnMap['last_modified'] = \
self.convertToDB(obj.db_last_modified, 'datetime', 'datetime')
columnMap.update(global_props)
if obj.is_new or do_copy:
dbCommand = self.createSQLInsert(table, columnMap)
else:
dbCommand = self.createSQLUpdate(table, columnMap, whereMap)
return dbCommand
def set_sql_process(self, obj, global_props, lastId):
if obj.db_id is None:
obj.db_id = lastId
keyStr = self.convertToDB(obj.db_id, 'long', 'int')
if hasattr(obj, 'db_entity_type') and obj.db_entity_type is not None:
global_props['entity_type'] = self.convertToDB(obj.db_entity_type, 'str', 'char(16)')
if hasattr(obj, 'db_id') and obj.db_id is not None:
global_props['entity_id'] = self.convertToDB(obj.db_id, 'long', 'int')
pass
def to_sql_fast(self, obj, do_copy=True):
for child in obj.db_packages:
child.db_registry = obj.db_id
def delete_sql_column(self, db, obj, global_props):
table = 'registry'
whereMap = {}
whereMap.update(global_props)
if obj.db_id is not None:
keyStr = self.convertToDB(obj.db_id, 'long', 'int')
whereMap['id'] = keyStr
dbCommand = self.createSQLDelete(table, whereMap)
self.executeSQL(db, dbCommand, False)
class DBAnnotationSQLDAOBase(SQLDAO):
def __init__(self, daoList):
self.daoList = daoList
self.table = 'annotation'
def getDao(self, dao):
return self.daoList[dao]
def get_sql_columns(self, db, global_props,lock=False):
columns = ['id', 'akey', 'value', 'parent_type', 'entity_id', 'entity_type', 'parent_id']
table = 'annotation'
whereMap = global_props
orderBy = 'id'
dbCommand = self.createSQLSelect(table, columns, whereMap, orderBy, lock)
data = self.executeSQL(db, dbCommand, True)
res = {}
for row in data:
id = self.convertFromDB(row[0], 'long', 'int')
key = self.convertFromDB(row[1], 'str', 'varchar(255)')
value = self.convertFromDB(row[2], 'str', 'mediumtext')
parentType = self.convertFromDB(row[3], 'str', 'char(32)')
entity_id = self.convertFromDB(row[4], 'long', 'int')
entity_type = self.convertFromDB(row[5], 'str', 'char(16)')
parent = self.convertFromDB(row[6], 'long', 'long')
annotation = DBAnnotation(key=key,
value=value,
id=id)
annotation.db_parentType = parentType
annotation.db_entity_id = entity_id
annotation.db_entity_type = entity_type
annotation.db_parent = parent
annotation.is_dirty = False
res[('annotation', id)] = annotation
return res
def get_sql_select(self, db, global_props,lock=False):
columns = ['id', 'akey', 'value', 'parent_type', 'entity_id', 'entity_type', 'parent_id']
table = 'annotation'
whereMap = global_props
orderBy = 'id'
return self.createSQLSelect(table, columns, whereMap, orderBy, lock)
def process_sql_columns(self, data, global_props):
res = {}
for row in data:
id = self.convertFromDB(row[0], 'long', 'int')
key = self.convertFromDB(row[1], 'str', 'varchar(255)')
value = self.convertFromDB(row[2], 'str', 'mediumtext')
parentType = self.convertFromDB(row[3], 'str', 'char(32)')
entity_id = self.convertFromDB(row[4], 'long', 'int')
entity_type = self.convertFromDB(row[5], 'str', 'char(16)')
parent = self.convertFromDB(row[6], 'long', 'long')
annotation = DBAnnotation(key=key,
value=value,
id=id)
annotation.db_parentType = parentType
annotation.db_entity_id = entity_id
annotation.db_entity_type = entity_type
annotation.db_parent = parent
annotation.is_dirty = False
res[('annotation', id)] = annotation
return res
def from_sql_fast(self, obj, all_objects):
if obj.db_parentType == 'vistrail':
p = all_objects[('vistrail', obj.db_parent)]
p.db_add_annotation(obj)
elif obj.db_parentType == 'workflow':
p = all_objects[('workflow', obj.db_parent)]
p.db_add_annotation(obj)
elif obj.db_parentType == 'module':
p = all_objects[('module', obj.db_parent)]
p.db_add_annotation(obj)
elif obj.db_parentType == 'workflow_exec':
p = all_objects[('workflow_exec', obj.db_parent)]
p.db_add_annotation(obj)
elif obj.db_parentType == 'module_exec':
p = all_objects[('module_exec', obj.db_parent)]
p.db_add_annotation(obj)
elif obj.db_parentType == 'group_exec':
p = all_objects[('group_exec', obj.db_parent)]
p.db_add_annotation(obj)
elif obj.db_parentType == 'add':
p = all_objects[('add', obj.db_parent)]
p.db_add_data(obj)
elif obj.db_parentType == 'change':
p = all_objects[('change', obj.db_parent)]
p.db_add_data(obj)
elif obj.db_parentType == 'action':
p = all_objects[('action', obj.db_parent)]
p.db_add_annotation(obj)
elif obj.db_parentType == 'abstraction':
p = all_objects[('abstraction', obj.db_parent)]
p.db_add_annotation(obj)
elif obj.db_parentType == 'mashuptrail':
p = all_objects[('mashuptrail', obj.db_parent)]
p.db_add_annotation(obj)
elif obj.db_parentType == 'group':
p = all_objects[('group', obj.db_parent)]
p.db_add_annotation(obj)
def set_sql_columns(self, db, obj, global_props, do_copy=True):
if not do_copy and not obj.is_dirty:
return
columns = ['id', 'akey', 'value', 'parent_type', 'entity_id', 'entity_type', 'parent_id']
table = 'annotation'
whereMap = {}
whereMap.update(global_props)
if obj.db_id is not None:
keyStr = self.convertToDB(obj.db_id, 'long', 'int')
whereMap['id'] = keyStr
columnMap = {}
if hasattr(obj, 'db_id') and obj.db_id is not None:
columnMap['id'] = \
self.convertToDB(obj.db_id, 'long', 'int')
if hasattr(obj, 'db_key') and obj.db_key is not None:
columnMap['akey'] = \
self.convertToDB(obj.db_key, 'str', 'varchar(255)')
if hasattr(obj, 'db_value') and obj.db_value is not None:
columnMap['value'] = \
self.convertToDB(obj.db_value, 'str', 'mediumtext')
if hasattr(obj, 'db_parentType') and obj.db_parentType is not None:
columnMap['parent_type'] = \
self.convertToDB(obj.db_parentType, 'str', 'char(32)')
if hasattr(obj, 'db_entity_id') and obj.db_entity_id is not None:
columnMap['entity_id'] = \
self.convertToDB(obj.db_entity_id, 'long', 'int')
if hasattr(obj, 'db_entity_type') and obj.db_entity_type is not None:
columnMap['entity_type'] = \
self.convertToDB(obj.db_entity_type, 'str', 'char(16)')
if hasattr(obj, 'db_parent') and obj.db_parent is not None:
columnMap['parent_id'] = \
self.convertToDB(obj.db_parent, 'long', 'long')
columnMap.update(global_props)
if obj.is_new or do_copy:
dbCommand = self.createSQLInsert(table, columnMap)
else:
dbCommand = self.createSQLUpdate(table, columnMap, whereMap)
lastId = self.executeSQL(db, dbCommand, False)
def set_sql_command(self, db, obj, global_props, do_copy=True):
if not do_copy and not obj.is_dirty:
return None
columns = ['id', 'akey', 'value', 'parent_type', 'entity_id', 'entity_type', 'parent_id']
table = 'annotation'
whereMap = {}
whereMap.update(global_props)
if obj.db_id is not None:
keyStr = self.convertToDB(obj.db_id, 'long', 'int')
whereMap['id'] = keyStr
columnMap = {}
if hasattr(obj, 'db_id') and obj.db_id is not None:
columnMap['id'] = \
self.convertToDB(obj.db_id, 'long', 'int')
if hasattr(obj, 'db_key') and obj.db_key is not None:
columnMap['akey'] = \
self.convertToDB(obj.db_key, 'str', 'varchar(255)')
if hasattr(obj, 'db_value') and obj.db_value is not None:
columnMap['value'] = \
self.convertToDB(obj.db_value, 'str', 'mediumtext')
if hasattr(obj, 'db_parentType') and obj.db_parentType is not None:
columnMap['parent_type'] = \
self.convertToDB(obj.db_parentType, 'str', 'char(32)')
if hasattr(obj, 'db_entity_id') and obj.db_entity_id is not None:
columnMap['entity_id'] = \
self.convertToDB(obj.db_entity_id, 'long', 'int')
if hasattr(obj, 'db_entity_type') and obj.db_entity_type is not None:
columnMap['entity_type'] = \
self.convertToDB(obj.db_entity_type, 'str', 'char(16)')
if hasattr(obj, 'db_parent') and obj.db_parent is not None:
columnMap['parent_id'] = \
self.convertToDB(obj.db_parent, 'long', 'long')
columnMap.update(global_props)
if obj.is_new or do_copy:
dbCommand = self.createSQLInsert(table, columnMap)
else:
dbCommand = self.createSQLUpdate(table, columnMap, whereMap)
return dbCommand
def set_sql_process(self, obj, global_props, lastId):
pass
def to_sql_fast(self, obj, do_copy=True):
pass
def delete_sql_column(self, db, obj, global_props):
table = 'annotation'
whereMap = {}
whereMap.update(global_props)
if obj.db_id is not None:
keyStr = self.convertToDB(obj.db_id, 'long', 'int')
whereMap['id'] = keyStr
dbCommand = self.createSQLDelete(table, whereMap)
self.executeSQL(db, dbCommand, False)
class DBParameterExplorationSQLDAOBase(SQLDAO):
def __init__(self, daoList):
self.daoList = daoList
self.table = 'parameter_exploration'
def getDao(self, dao):
return self.daoList[dao]
def get_sql_columns(self, db, global_props,lock=False):
columns = ['id', 'action_id', 'name', 'date', 'user', 'dims', 'layout', 'parent_id', 'entity_id', 'entity_type']
table = 'parameter_exploration'
whereMap = global_props
orderBy = 'id'
dbCommand = self.createSQLSelect(table, columns, whereMap, orderBy, lock)
data = self.executeSQL(db, dbCommand, True)
res = {}
for row in data:
id = self.convertFromDB(row[0], 'long', 'int')
action_id = self.convertFromDB(row[1], 'long', 'int')
name = self.convertFromDB(row[2], 'str', 'varchar(255)')
date = self.convertFromDB(row[3], 'datetime', 'datetime')
user = self.convertFromDB(row[4], 'str', 'varchar(255)')
dims = self.convertFromDB(row[5], 'str', 'varchar(255)')
layout = self.convertFromDB(row[6], 'str', 'varchar(255)')
vistrail = self.convertFromDB(row[7], 'long', 'int')
entity_id = self.convertFromDB(row[8], 'long', 'int')
entity_type = self.convertFromDB(row[9], 'str', 'char(16)')
parameter_exploration = DBParameterExploration(action_id=action_id,
name=name,
date=date,
user=user,
dims=dims,
layout=layout,
id=id)
parameter_exploration.db_vistrail = vistrail
parameter_exploration.db_entity_id = entity_id
parameter_exploration.db_entity_type = entity_type
parameter_exploration.is_dirty = False
res[('parameter_exploration', id)] = parameter_exploration
return res
def get_sql_select(self, db, global_props,lock=False):
columns = ['id', 'action_id', 'name', 'date', 'user', 'dims', 'layout', 'parent_id', 'entity_id', 'entity_type']
table = 'parameter_exploration'
whereMap = global_props
orderBy = 'id'
return self.createSQLSelect(table, columns, whereMap, orderBy, lock)
def process_sql_columns(self, data, global_props):
res = {}
for row in data:
id = self.convertFromDB(row[0], 'long', 'int')
action_id = self.convertFromDB(row[1], 'long', 'int')
name = self.convertFromDB(row[2], 'str', 'varchar(255)')
date = self.convertFromDB(row[3], 'datetime', 'datetime')
user = self.convertFromDB(row[4], 'str', 'varchar(255)')
dims = self.convertFromDB(row[5], 'str', 'varchar(255)')
layout = self.convertFromDB(row[6], 'str', 'varchar(255)')
vistrail = self.convertFromDB(row[7], 'long', 'int')
entity_id = self.convertFromDB(row[8], 'long', 'int')
entity_type = self.convertFromDB(row[9], 'str', 'char(16)')
parameter_exploration = DBParameterExploration(action_id=action_id,
name=name,
date=date,
user=user,
dims=dims,
layout=layout,
id=id)
parameter_exploration.db_vistrail = vistrail
parameter_exploration.db_entity_id = entity_id
parameter_exploration.db_entity_type = entity_type
parameter_exploration.is_dirty = False
res[('parameter_exploration', id)] = parameter_exploration
return res
def from_sql_fast(self, obj, all_objects):
if ('vistrail', obj.db_vistrail) in all_objects:
p = all_objects[('vistrail', obj.db_vistrail)]
p.db_add_parameter_exploration(obj)
def set_sql_columns(self, db, obj, global_props, do_copy=True):
if not do_copy and not obj.is_dirty:
return
columns = ['id', 'action_id', 'name', 'date', 'user', 'dims', 'layout', 'parent_id', 'entity_id', 'entity_type']
table = 'parameter_exploration'
whereMap = {}
whereMap.update(global_props)
if obj.db_id is not None:
keyStr = self.convertToDB(obj.db_id, 'long', 'int')
whereMap['id'] = keyStr
columnMap = {}
if hasattr(obj, 'db_id') and obj.db_id is not None:
columnMap['id'] = \
self.convertToDB(obj.db_id, 'long', 'int')
if hasattr(obj, 'db_action_id') and obj.db_action_id is not None:
columnMap['action_id'] = \
self.convertToDB(obj.db_action_id, 'long', 'int')
if hasattr(obj, 'db_name') and obj.db_name is not None:
columnMap['name'] = \
self.convertToDB(obj.db_name, 'str', 'varchar(255)')
if hasattr(obj, 'db_date') and obj.db_date is not None:
columnMap['date'] = \
self.convertToDB(obj.db_date, 'datetime', 'datetime')
if hasattr(obj, 'db_user') and obj.db_user is not None:
columnMap['user'] = \
self.convertToDB(obj.db_user, 'str', 'varchar(255)')
if hasattr(obj, 'db_dims') and obj.db_dims is not None:
columnMap['dims'] = \
self.convertToDB(obj.db_dims, 'str', 'varchar(255)')
if hasattr(obj, 'db_layout') and obj.db_layout is not None:
columnMap['layout'] = \
self.convertToDB(obj.db_layout, 'str', 'varchar(255)')
if hasattr(obj, 'db_vistrail') and obj.db_vistrail is not None:
columnMap['parent_id'] = \
self.convertToDB(obj.db_vistrail, 'long', 'int')
if hasattr(obj, 'db_entity_id') and obj.db_entity_id is not None:
columnMap['entity_id'] = \
self.convertToDB(obj.db_entity_id, 'long', 'int')
if hasattr(obj, 'db_entity_type') and obj.db_entity_type is not None:
columnMap['entity_type'] = \
self.convertToDB(obj.db_entity_type, 'str', 'char(16)')
columnMap.update(global_props)
if obj.is_new or do_copy:
dbCommand = self.createSQLInsert(table, columnMap)
else:
dbCommand = self.createSQLUpdate(table, columnMap, whereMap)
lastId = self.executeSQL(db, dbCommand, False)
def set_sql_command(self, db, obj, global_props, do_copy=True):
if not do_copy and not obj.is_dirty:
return None
columns = ['id', 'action_id', 'name', 'date', 'user', 'dims', 'layout', 'parent_id', 'entity_id', 'entity_type']
table = 'parameter_exploration'
whereMap = {}
whereMap.update(global_props)
if obj.db_id is not None:
keyStr = self.convertToDB(obj.db_id, 'long', 'int')
whereMap['id'] = keyStr
columnMap = {}
if hasattr(obj, 'db_id') and obj.db_id is not None:
columnMap['id'] = \
self.convertToDB(obj.db_id, 'long', 'int')
if hasattr(obj, 'db_action_id') and obj.db_action_id is not None:
columnMap['action_id'] = \
self.convertToDB(obj.db_action_id, 'long', 'int')
if hasattr(obj, 'db_name') and obj.db_name is not None:
columnMap['name'] = \
self.convertToDB(obj.db_name, 'str', 'varchar(255)')
if hasattr(obj, 'db_date') and obj.db_date is not None:
columnMap['date'] = \
self.convertToDB(obj.db_date, 'datetime', 'datetime')
if hasattr(obj, 'db_user') and obj.db_user is not None:
columnMap['user'] = \
self.convertToDB(obj.db_user, 'str', 'varchar(255)')
if hasattr(obj, 'db_dims') and obj.db_dims is not None:
columnMap['dims'] = \
self.convertToDB(obj.db_dims, 'str', 'varchar(255)')
if hasattr(obj, 'db_layout') and obj.db_layout is not None:
columnMap['layout'] = \
self.convertToDB(obj.db_layout, 'str', 'varchar(255)')
if hasattr(obj, 'db_vistrail') and obj.db_vistrail is not None:
columnMap['parent_id'] = \
self.convertToDB(obj.db_vistrail, 'long', 'int')
if hasattr(obj, 'db_entity_id') and obj.db_entity_id is not None:
columnMap['entity_id'] = \
self.convertToDB(obj.db_entity_id, 'long', 'int')
if hasattr(obj, 'db_entity_type') and obj.db_entity_type is not None:
columnMap['entity_type'] = \
self.convertToDB(obj.db_entity_type, 'str', 'char(16)')
columnMap.update(global_props)
if obj.is_new or do_copy:
dbCommand = self.createSQLInsert(table, columnMap)
else:
dbCommand = self.createSQLUpdate(table, columnMap, whereMap)
return dbCommand
def set_sql_process(self, obj, global_props, lastId):
pass
def to_sql_fast(self, obj, do_copy=True):
for child in obj.db_functions:
child.db_parameter_exploration = obj.db_id
def delete_sql_column(self, db, obj, global_props):
table = 'parameter_exploration'
whereMap = {}
whereMap.update(global_props)
if obj.db_id is not None:
keyStr = self.convertToDB(obj.db_id, 'long', 'int')
whereMap['id'] = keyStr
dbCommand = self.createSQLDelete(table, whereMap)
self.executeSQL(db, dbCommand, False)
class DBMashupActionAnnotationSQLDAOBase(SQLDAO):
def __init__(self, daoList):
self.daoList = daoList
self.table = 'mashup_action_annotation'
def getDao(self, dao):
return self.daoList[dao]
def get_sql_columns(self, db, global_props,lock=False):
columns = ['id', 'akey', 'value', 'action_id', 'date', 'user', 'parent_id', 'entity_id', 'entity_type']
table = 'mashup_action_annotation'
whereMap = global_props
orderBy = 'id'
dbCommand = self.createSQLSelect(table, columns, whereMap, orderBy, lock)
data = self.executeSQL(db, dbCommand, True)
res = {}
for row in data:
id = self.convertFromDB(row[0], 'long', 'int')
key = self.convertFromDB(row[1], 'str', 'varchar(255)')
value = self.convertFromDB(row[2], 'str', 'varchar(8191)')
action_id = self.convertFromDB(row[3], 'long', 'int')
date = self.convertFromDB(row[4], 'datetime', 'datetime')
user = self.convertFromDB(row[5], 'str', 'varchar(255)')
mashuptrail = self.convertFromDB(row[6], 'long', 'int')
entity_id = self.convertFromDB(row[7], 'long', 'int')
entity_type = self.convertFromDB(row[8], 'str', 'char(16)')
mashup_actionAnnotation = DBMashupActionAnnotation(key=key,
value=value,
action_id=action_id,
date=date,
user=user,
id=id)
mashup_actionAnnotation.db_mashuptrail = mashuptrail
mashup_actionAnnotation.db_entity_id = entity_id
mashup_actionAnnotation.db_entity_type = entity_type
mashup_actionAnnotation.is_dirty = False
res[('mashup_actionAnnotation', id)] = mashup_actionAnnotation
return res
def get_sql_select(self, db, global_props,lock=False):
columns = ['id', 'akey', 'value', 'action_id', 'date', 'user', 'parent_id', 'entity_id', 'entity_type']
table = 'mashup_action_annotation'
whereMap = global_props
orderBy = 'id'
return self.createSQLSelect(table, columns, whereMap, orderBy, lock)
def process_sql_columns(self, data, global_props):
res = {}
for row in data:
id = self.convertFromDB(row[0], 'long', 'int')
key = self.convertFromDB(row[1], 'str', 'varchar(255)')
value = self.convertFromDB(row[2], 'str', 'varchar(8191)')
action_id = self.convertFromDB(row[3], 'long', 'int')
date = self.convertFromDB(row[4], 'datetime', 'datetime')
user = self.convertFromDB(row[5], 'str', 'varchar(255)')
mashuptrail = self.convertFromDB(row[6], 'long', 'int')
entity_id = self.convertFromDB(row[7], 'long', 'int')
entity_type = self.convertFromDB(row[8], 'str', 'char(16)')
mashup_actionAnnotation = DBMashupActionAnnotation(key=key,
value=value,
action_id=action_id,
date=date,
user=user,
id=id)
mashup_actionAnnotation.db_mashuptrail = mashuptrail
mashup_actionAnnotation.db_entity_id = entity_id
mashup_actionAnnotation.db_entity_type = entity_type
mashup_actionAnnotation.is_dirty = False
res[('mashup_actionAnnotation', id)] = mashup_actionAnnotation
return res
def from_sql_fast(self, obj, all_objects):
if ('mashuptrail', obj.db_mashuptrail) in all_objects:
p = all_objects[('mashuptrail', obj.db_mashuptrail)]
p.db_add_actionAnnotation(obj)
def set_sql_columns(self, db, obj, global_props, do_copy=True):
if not do_copy and not obj.is_dirty:
return
columns = ['id', 'akey', 'value', 'action_id', 'date', 'user', 'parent_id', 'entity_id', 'entity_type']
table = 'mashup_action_annotation'
whereMap = {}
whereMap.update(global_props)
if obj.db_id is not None:
keyStr = self.convertToDB(obj.db_id, 'long', 'int')
whereMap['id'] = keyStr
columnMap = {}
if hasattr(obj, 'db_id') and obj.db_id is not None:
columnMap['id'] = \
self.convertToDB(obj.db_id, 'long', 'int')
if hasattr(obj, 'db_key') and obj.db_key is not None:
columnMap['akey'] = \
self.convertToDB(obj.db_key, 'str', 'varchar(255)')
if hasattr(obj, 'db_value') and obj.db_value is not None:
columnMap['value'] = \
self.convertToDB(obj.db_value, 'str', 'varchar(8191)')
if hasattr(obj, 'db_action_id') and obj.db_action_id is not None:
columnMap['action_id'] = \
self.convertToDB(obj.db_action_id, 'long', 'int')
if hasattr(obj, 'db_date') and obj.db_date is not None:
columnMap['date'] = \
self.convertToDB(obj.db_date, 'datetime', 'datetime')
if hasattr(obj, 'db_user') and obj.db_user is not None:
columnMap['user'] = \
self.convertToDB(obj.db_user, 'str', 'varchar(255)')
if hasattr(obj, 'db_mashuptrail') and obj.db_mashuptrail is not None:
columnMap['parent_id'] = \
self.convertToDB(obj.db_mashuptrail, 'long', 'int')
if hasattr(obj, 'db_entity_id') and obj.db_entity_id is not None:
columnMap['entity_id'] = \
self.convertToDB(obj.db_entity_id, 'long', 'int')
if hasattr(obj, 'db_entity_type') and obj.db_entity_type is not None:
columnMap['entity_type'] = \
self.convertToDB(obj.db_entity_type, 'str', 'char(16)')
columnMap.update(global_props)
if obj.is_new or do_copy:
dbCommand = self.createSQLInsert(table, columnMap)
else:
dbCommand = self.createSQLUpdate(table, columnMap, whereMap)
lastId = self.executeSQL(db, dbCommand, False)
def set_sql_command(self, db, obj, global_props, do_copy=True):
if not do_copy and not obj.is_dirty:
return None
columns = ['id', 'akey', 'value', 'action_id', 'date', 'user', 'parent_id', 'entity_id', 'entity_type']
table = 'mashup_action_annotation'
whereMap = {}
whereMap.update(global_props)
if obj.db_id is not None:
keyStr = self.convertToDB(obj.db_id, 'long', 'int')
whereMap['id'] = keyStr
columnMap = {}
if hasattr(obj, 'db_id') and obj.db_id is not None:
columnMap['id'] = \
self.convertToDB(obj.db_id, 'long', 'int')
if hasattr(obj, 'db_key') and obj.db_key is not None:
columnMap['akey'] = \
self.convertToDB(obj.db_key, 'str', 'varchar(255)')
if hasattr(obj, 'db_value') and obj.db_value is not None:
columnMap['value'] = \
self.convertToDB(obj.db_value, 'str', 'varchar(8191)')
if hasattr(obj, 'db_action_id') and obj.db_action_id is not None:
columnMap['action_id'] = \
self.convertToDB(obj.db_action_id, 'long', 'int')
if hasattr(obj, 'db_date') and obj.db_date is not None:
columnMap['date'] = \
self.convertToDB(obj.db_date, 'datetime', 'datetime')
if hasattr(obj, 'db_user') and obj.db_user is not None:
columnMap['user'] = \
self.convertToDB(obj.db_user, 'str', 'varchar(255)')
if hasattr(obj, 'db_mashuptrail') and obj.db_mashuptrail is not None:
columnMap['parent_id'] = \
self.convertToDB(obj.db_mashuptrail, 'long', 'int')
if hasattr(obj, 'db_entity_id') and obj.db_entity_id is not None:
columnMap['entity_id'] = \
self.convertToDB(obj.db_entity_id, 'long', 'int')
if hasattr(obj, 'db_entity_type') and obj.db_entity_type is not None:
columnMap['entity_type'] = \
self.convertToDB(obj.db_entity_type, 'str', 'char(16)')
columnMap.update(global_props)
if obj.is_new or do_copy:
dbCommand = self.createSQLInsert(table, columnMap)
else:
dbCommand = self.createSQLUpdate(table, columnMap, whereMap)
return dbCommand
def set_sql_process(self, obj, global_props, lastId):
pass
def to_sql_fast(self, obj, do_copy=True):
pass
def delete_sql_column(self, db, obj, global_props):
table = 'mashup_action_annotation'
whereMap = {}
whereMap.update(global_props)
if obj.db_id is not None:
keyStr = self.convertToDB(obj.db_id, 'long', 'int')
whereMap['id'] = keyStr
dbCommand = self.createSQLDelete(table, whereMap)
self.executeSQL(db, dbCommand, False)
class DBModuleExecSQLDAOBase(SQLDAO):
def __init__(self, daoList):
self.daoList = daoList
self.table = 'module_exec'
def getDao(self, dao):
return self.daoList[dao]
def get_sql_columns(self, db, global_props,lock=False):
columns = ['id', 'ts_start', 'ts_end', 'cached', 'module_id', 'module_name', 'completed', 'error', 'machine_id', 'parent_type', 'entity_id', 'entity_type', 'parent_id']
table = 'module_exec'
whereMap = global_props
orderBy = 'id'
dbCommand = self.createSQLSelect(table, columns, whereMap, orderBy, lock)
data = self.executeSQL(db, dbCommand, True)
res = {}
for row in data:
id = self.convertFromDB(row[0], 'long', 'int')
ts_start = self.convertFromDB(row[1], 'datetime', 'datetime')
ts_end = self.convertFromDB(row[2], 'datetime', 'datetime')
cached = self.convertFromDB(row[3], 'int', 'int')
module_id = self.convertFromDB(row[4], 'long', 'int')
module_name = self.convertFromDB(row[5], 'str', 'varchar(255)')
completed = self.convertFromDB(row[6], 'int', 'int')
error = self.convertFromDB(row[7], 'str', 'varchar(1023)')
machine_id = self.convertFromDB(row[8], 'long', 'int')
parentType = self.convertFromDB(row[9], 'str', 'char(32)')
entity_id = self.convertFromDB(row[10], 'long', 'int')
entity_type = self.convertFromDB(row[11], 'str', 'char(16)')
parent = self.convertFromDB(row[12], 'long', 'long')
module_exec = DBModuleExec(ts_start=ts_start,
ts_end=ts_end,
cached=cached,
module_id=module_id,
module_name=module_name,
completed=completed,
error=error,
machine_id=machine_id,
id=id)
module_exec.db_parentType = parentType
module_exec.db_entity_id = entity_id
module_exec.db_entity_type = entity_type
module_exec.db_parent = parent
module_exec.is_dirty = False
res[('module_exec', id)] = module_exec
return res
def get_sql_select(self, db, global_props,lock=False):
columns = ['id', 'ts_start', 'ts_end', 'cached', 'module_id', 'module_name', 'completed', 'error', 'machine_id', 'parent_type', 'entity_id', 'entity_type', 'parent_id']
table = 'module_exec'
whereMap = global_props
orderBy = 'id'
return self.createSQLSelect(table, columns, whereMap, orderBy, lock)
def process_sql_columns(self, data, global_props):
res = {}
for row in data:
id = self.convertFromDB(row[0], 'long', 'int')
ts_start = self.convertFromDB(row[1], 'datetime', 'datetime')
ts_end = self.convertFromDB(row[2], 'datetime', 'datetime')
cached = self.convertFromDB(row[3], 'int', 'int')
module_id = self.convertFromDB(row[4], 'long', 'int')
module_name = self.convertFromDB(row[5], 'str', 'varchar(255)')
completed = self.convertFromDB(row[6], 'int', 'int')
error = self.convertFromDB(row[7], 'str', 'varchar(1023)')
machine_id = self.convertFromDB(row[8], 'long', 'int')
parentType = self.convertFromDB(row[9], 'str', 'char(32)')
entity_id = self.convertFromDB(row[10], 'long', 'int')
entity_type = self.convertFromDB(row[11], 'str', 'char(16)')
parent = self.convertFromDB(row[12], 'long', 'long')
module_exec = DBModuleExec(ts_start=ts_start,
ts_end=ts_end,
cached=cached,
module_id=module_id,
module_name=module_name,
completed=completed,
error=error,
machine_id=machine_id,
id=id)
module_exec.db_parentType = parentType
module_exec.db_entity_id = entity_id
module_exec.db_entity_type = entity_type
module_exec.db_parent = parent
module_exec.is_dirty = False
res[('module_exec', id)] = module_exec
return res
def from_sql_fast(self, obj, all_objects):
if obj.db_parentType == 'workflow_exec':
p = all_objects[('workflow_exec', obj.db_parent)]
p.db_add_item_exec(obj)
elif obj.db_parentType == 'group_exec':
p = all_objects[('group_exec', obj.db_parent)]
p.db_add_item_exec(obj)
elif obj.db_parentType == 'loop_exec':
p = all_objects[('loop_exec', obj.db_parent)]
p.db_add_item_exec(obj)
def set_sql_columns(self, db, obj, global_props, do_copy=True):
if not do_copy and not obj.is_dirty:
return
columns = ['id', 'ts_start', 'ts_end', 'cached', 'module_id', 'module_name', 'completed', 'error', 'machine_id', 'parent_type', 'entity_id', 'entity_type', 'parent_id']
table = 'module_exec'
whereMap = {}
whereMap.update(global_props)
if obj.db_id is not None:
keyStr = self.convertToDB(obj.db_id, 'long', 'int')
whereMap['id'] = keyStr
columnMap = {}
if hasattr(obj, 'db_id') and obj.db_id is not None:
columnMap['id'] = \
self.convertToDB(obj.db_id, 'long', 'int')
if hasattr(obj, 'db_ts_start') and obj.db_ts_start is not None:
columnMap['ts_start'] = \
self.convertToDB(obj.db_ts_start, 'datetime', 'datetime')
if hasattr(obj, 'db_ts_end') and obj.db_ts_end is not None:
columnMap['ts_end'] = \
self.convertToDB(obj.db_ts_end, 'datetime', 'datetime')
if hasattr(obj, 'db_cached') and obj.db_cached is not None:
columnMap['cached'] = \
self.convertToDB(obj.db_cached, 'int', 'int')
if hasattr(obj, 'db_module_id') and obj.db_module_id is not None:
columnMap['module_id'] = \
self.convertToDB(obj.db_module_id, 'long', 'int')
if hasattr(obj, 'db_module_name') and obj.db_module_name is not None:
columnMap['module_name'] = \
self.convertToDB(obj.db_module_name, 'str', 'varchar(255)')
if hasattr(obj, 'db_completed') and obj.db_completed is not None:
columnMap['completed'] = \
self.convertToDB(obj.db_completed, 'int', 'int')
if hasattr(obj, 'db_error') and obj.db_error is not None:
columnMap['error'] = \
self.convertToDB(obj.db_error, 'str', 'varchar(1023)')
if hasattr(obj, 'db_machine_id') and obj.db_machine_id is not None:
columnMap['machine_id'] = \
self.convertToDB(obj.db_machine_id, 'long', 'int')
if hasattr(obj, 'db_parentType') and obj.db_parentType is not None:
columnMap['parent_type'] = \
self.convertToDB(obj.db_parentType, 'str', 'char(32)')
if hasattr(obj, 'db_entity_id') and obj.db_entity_id is not None:
columnMap['entity_id'] = \
self.convertToDB(obj.db_entity_id, 'long', 'int')
if hasattr(obj, 'db_entity_type') and obj.db_entity_type is not None:
columnMap['entity_type'] = \
self.convertToDB(obj.db_entity_type, 'str', 'char(16)')
if hasattr(obj, 'db_parent') and obj.db_parent is not None:
columnMap['parent_id'] = \
self.convertToDB(obj.db_parent, 'long', 'long')
columnMap.update(global_props)
if obj.is_new or do_copy:
dbCommand = self.createSQLInsert(table, columnMap)
else:
dbCommand = self.createSQLUpdate(table, columnMap, whereMap)
lastId = self.executeSQL(db, dbCommand, False)
def set_sql_command(self, db, obj, global_props, do_copy=True):
if not do_copy and not obj.is_dirty:
return None
columns = ['id', 'ts_start', 'ts_end', 'cached', 'module_id', 'module_name', 'completed', 'error', 'machine_id', 'parent_type', 'entity_id', 'entity_type', 'parent_id']
table = 'module_exec'
whereMap = {}
whereMap.update(global_props)
if obj.db_id is not None:
keyStr = self.convertToDB(obj.db_id, 'long', 'int')
whereMap['id'] = keyStr
columnMap = {}
if hasattr(obj, 'db_id') and obj.db_id is not None:
columnMap['id'] = \
self.convertToDB(obj.db_id, 'long', 'int')
if hasattr(obj, 'db_ts_start') and obj.db_ts_start is not None:
columnMap['ts_start'] = \
self.convertToDB(obj.db_ts_start, 'datetime', 'datetime')
if hasattr(obj, 'db_ts_end') and obj.db_ts_end is not None:
columnMap['ts_end'] = \
self.convertToDB(obj.db_ts_end, 'datetime', 'datetime')
if hasattr(obj, 'db_cached') and obj.db_cached is not None:
columnMap['cached'] = \
self.convertToDB(obj.db_cached, 'int', 'int')
if hasattr(obj, 'db_module_id') and obj.db_module_id is not None:
columnMap['module_id'] = \
self.convertToDB(obj.db_module_id, 'long', 'int')
if hasattr(obj, 'db_module_name') and obj.db_module_name is not None:
columnMap['module_name'] = \
self.convertToDB(obj.db_module_name, 'str', 'varchar(255)')
if hasattr(obj, 'db_completed') and obj.db_completed is not None:
columnMap['completed'] = \
self.convertToDB(obj.db_completed, 'int', 'int')
if hasattr(obj, 'db_error') and obj.db_error is not None:
columnMap['error'] = \
self.convertToDB(obj.db_error, 'str', 'varchar(1023)')
if hasattr(obj, 'db_machine_id') and obj.db_machine_id is not None:
columnMap['machine_id'] = \
self.convertToDB(obj.db_machine_id, 'long', 'int')
if hasattr(obj, 'db_parentType') and obj.db_parentType is not None:
columnMap['parent_type'] = \
self.convertToDB(obj.db_parentType, 'str', 'char(32)')
if hasattr(obj, 'db_entity_id') and obj.db_entity_id is not None:
columnMap['entity_id'] = \
self.convertToDB(obj.db_entity_id, 'long', 'int')
if hasattr(obj, 'db_entity_type') and obj.db_entity_type is not None:
columnMap['entity_type'] = \
self.convertToDB(obj.db_entity_type, 'str', 'char(16)')
if hasattr(obj, 'db_parent') and obj.db_parent is not None:
columnMap['parent_id'] = \
self.convertToDB(obj.db_parent, 'long', 'long')
columnMap.update(global_props)
if obj.is_new or do_copy:
dbCommand = self.createSQLInsert(table, columnMap)
else:
dbCommand = self.createSQLUpdate(table, columnMap, whereMap)
return dbCommand
def set_sql_process(self, obj, global_props, lastId):
pass
def to_sql_fast(self, obj, do_copy=True):
for child in obj.db_annotations:
child.db_parentType = obj.vtType
child.db_parent = obj.db_id
for child in obj.db_loop_execs:
child.db_parentType = obj.vtType
child.db_parent = obj.db_id
def delete_sql_column(self, db, obj, global_props):
table = 'module_exec'
whereMap = {}
whereMap.update(global_props)
if obj.db_id is not None:
keyStr = self.convertToDB(obj.db_id, 'long', 'int')
whereMap['id'] = keyStr
dbCommand = self.createSQLDelete(table, whereMap)
self.executeSQL(db, dbCommand, False)
"""generated automatically by auto_dao.py"""
class SQLDAOListBase(dict):
def __init__(self, daos=None):
if daos is not None:
dict.update(self, daos)
if 'mashup_alias' not in self:
self['mashup_alias'] = DBMashupAliasSQLDAOBase(self)
if 'group' not in self:
self['group'] = DBGroupSQLDAOBase(self)
if 'add' not in self:
self['add'] = DBAddSQLDAOBase(self)
if 'group_exec' not in self:
self['group_exec'] = DBGroupExecSQLDAOBase(self)
if 'parameter' not in self:
self['parameter'] = DBParameterSQLDAOBase(self)
if 'vistrail' not in self:
self['vistrail'] = DBVistrailSQLDAOBase(self)
if 'module' not in self:
self['module'] = DBModuleSQLDAOBase(self)
if 'port' not in self:
self['port'] = DBPortSQLDAOBase(self)
if 'pe_function' not in self:
self['pe_function'] = DBPEFunctionSQLDAOBase(self)
if 'workflow' not in self:
self['workflow'] = DBWorkflowSQLDAOBase(self)
if 'mashup_action' not in self:
self['mashup_action'] = DBMashupActionSQLDAOBase(self)
if 'change' not in self:
self['change'] = DBChangeSQLDAOBase(self)
if 'package' not in self:
self['package'] = DBPackageSQLDAOBase(self)
if 'loop_exec' not in self:
self['loop_exec'] = DBLoopExecSQLDAOBase(self)
if 'connection' not in self:
self['connection'] = DBConnectionSQLDAOBase(self)
if 'action' not in self:
self['action'] = DBActionSQLDAOBase(self)
if 'portSpec' not in self:
self['portSpec'] = DBPortSpecSQLDAOBase(self)
if 'log' not in self:
self['log'] = DBLogSQLDAOBase(self)
if 'pe_parameter' not in self:
self['pe_parameter'] = DBPEParameterSQLDAOBase(self)
if 'workflow_exec' not in self:
self['workflow_exec'] = DBWorkflowExecSQLDAOBase(self)
if 'location' not in self:
self['location'] = DBLocationSQLDAOBase(self)
if 'function' not in self:
self['function'] = DBFunctionSQLDAOBase(self)
if 'actionAnnotation' not in self:
self['actionAnnotation'] = DBActionAnnotationSQLDAOBase(self)
if 'plugin_data' not in self:
self['plugin_data'] = DBPluginDataSQLDAOBase(self)
if 'delete' not in self:
self['delete'] = DBDeleteSQLDAOBase(self)
if 'vistrailVariable' not in self:
self['vistrailVariable'] = DBVistrailVariableSQLDAOBase(self)
if 'module_descriptor' not in self:
self['module_descriptor'] = DBModuleDescriptorSQLDAOBase(self)
if 'tag' not in self:
self['tag'] = DBTagSQLDAOBase(self)
if 'portSpecItem' not in self:
self['portSpecItem'] = DBPortSpecItemSQLDAOBase(self)
if 'mashup_component' not in self:
self['mashup_component'] = DBMashupComponentSQLDAOBase(self)
if 'mashup' not in self:
self['mashup'] = DBMashupSQLDAOBase(self)
if 'machine' not in self:
self['machine'] = DBMachineSQLDAOBase(self)
if 'other' not in self:
self['other'] = DBOtherSQLDAOBase(self)
if 'abstraction' not in self:
self['abstraction'] = DBAbstractionSQLDAOBase(self)
if 'mashuptrail' not in self:
self['mashuptrail'] = DBMashuptrailSQLDAOBase(self)
if 'registry' not in self:
self['registry'] = DBRegistrySQLDAOBase(self)
if 'annotation' not in self:
self['annotation'] = DBAnnotationSQLDAOBase(self)
if 'parameter_exploration' not in self:
self['parameter_exploration'] = DBParameterExplorationSQLDAOBase(self)
if 'mashup_actionAnnotation' not in self:
self['mashup_actionAnnotation'] = DBMashupActionAnnotationSQLDAOBase(self)
if 'module_exec' not in self:
self['module_exec'] = DBModuleExecSQLDAOBase(self)
| 47.555834
| 229
| 0.568141
|
794869dc40acd2cd56a451ac4fe0be0c41f55476
| 1,884
|
py
|
Python
|
tests/test_ops/test_upfirdn2d.py
|
BIGWangYuDong/mmcv
|
c46deb0576edaff5cd5a7d384c617478c7a73a70
|
[
"Apache-2.0"
] | 1
|
2022-03-18T02:41:11.000Z
|
2022-03-18T02:41:11.000Z
|
tests/test_ops/test_upfirdn2d.py
|
BIGWangYuDong/mmcv
|
c46deb0576edaff5cd5a7d384c617478c7a73a70
|
[
"Apache-2.0"
] | null | null | null |
tests/test_ops/test_upfirdn2d.py
|
BIGWangYuDong/mmcv
|
c46deb0576edaff5cd5a7d384c617478c7a73a70
|
[
"Apache-2.0"
] | null | null | null |
# Copyright (c) OpenMMLab. All rights reserved.
import pytest
import torch
_USING_PARROTS = True
try:
from parrots.autograd import gradcheck
except ImportError:
from torch.autograd import gradcheck, gradgradcheck
_USING_PARROTS = False
class TestUpFirDn2d(object):
"""Unit test for UpFirDn2d.
Here, we just test the basic case of upsample version. More gerneal tests
will be included in other unit test for UpFirDnUpsample and
UpFirDnDownSample modules.
"""
@classmethod
def setup_class(cls):
kernel_1d = torch.tensor([1., 3., 3., 1.])
cls.kernel = kernel_1d[:, None] * kernel_1d[None, :]
cls.kernel = cls.kernel / cls.kernel.sum()
cls.factor = 2
pad = cls.kernel.shape[0] - cls.factor
cls.pad = ((pad + 1) // 2 + cls.factor - 1, pad // 2)
cls.input_tensor = torch.randn((2, 3, 4, 4), requires_grad=True)
@pytest.mark.skipif(not torch.cuda.is_available(), reason='requires cuda')
def test_upfirdn2d(self):
from mmcv.ops import upfirdn2d
if _USING_PARROTS:
gradcheck(
upfirdn2d,
(self.input_tensor.cuda(),
self.kernel.type_as(
self.input_tensor).cuda(), self.factor, 1, self.pad),
delta=1e-4,
pt_atol=1e-3)
else:
gradcheck(
upfirdn2d,
(self.input_tensor.cuda(),
self.kernel.type_as(
self.input_tensor).cuda(), self.factor, 1, self.pad),
eps=1e-4,
atol=1e-3)
gradgradcheck(
upfirdn2d,
(self.input_tensor.cuda(),
self.kernel.type_as(
self.input_tensor).cuda(), self.factor, 1, self.pad),
eps=1e-4,
atol=1e-3)
| 31.932203
| 78
| 0.556794
|
79486b01aa4a0dcf22f8695a93e058bbbac8f077
| 25
|
py
|
Python
|
pylib/pymex/uniprot/__init__.py
|
IMExConsortium/pymex
|
5fb1d155f630c6225a3e635a402fbe9feebc1c15
|
[
"MIT"
] | 2
|
2020-09-26T08:17:00.000Z
|
2021-03-25T16:02:15.000Z
|
pylib/pymex/uniprot/__init__.py
|
IMExConsortium/pymex
|
5fb1d155f630c6225a3e635a402fbe9feebc1c15
|
[
"MIT"
] | 6
|
2020-09-21T01:09:35.000Z
|
2021-06-04T06:06:56.000Z
|
pylib/pymex/uniprot/__init__.py
|
IMExConsortium/pymex
|
5fb1d155f630c6225a3e635a402fbe9feebc1c15
|
[
"MIT"
] | 2
|
2020-05-14T06:41:21.000Z
|
2020-06-30T04:58:54.000Z
|
from .unirecord import *
| 12.5
| 24
| 0.76
|
79486b428a48226fd9426a2be2928f0aef7d0b79
| 3,715
|
py
|
Python
|
tests/json_model_diff_test.py
|
adamknox-wf/json-regex-difftool
|
cc59ccd8f41f94955bfb516ec4a7d86b7572e675
|
[
"Apache-2.0"
] | 9
|
2015-10-22T14:19:23.000Z
|
2021-04-19T06:45:36.000Z
|
tests/json_model_diff_test.py
|
adamknox-wf/json-regex-difftool
|
cc59ccd8f41f94955bfb516ec4a7d86b7572e675
|
[
"Apache-2.0"
] | 4
|
2015-03-06T21:04:58.000Z
|
2016-11-30T19:44:20.000Z
|
tests/json_model_diff_test.py
|
adamknox-wf/json-regex-difftool
|
cc59ccd8f41f94955bfb516ec4a7d86b7572e675
|
[
"Apache-2.0"
] | 7
|
2015-03-20T20:23:33.000Z
|
2021-06-30T02:59:59.000Z
|
import sys
from .test_helper import TestHelper
from json_regex_diff.jsondiff import JsonDiff
class JsonModelDiffTest(TestHelper):
def test_simple_regex_no_difference(self):
"""
With no difference we should have an empty list
"""
new_file = self.write_string_to_file('["test"]', "item1")
old_file = self.write_string_to_file('["(.*)"]', "item2")
comparison_tool = JsonDiff.from_file(new_file, old_file)
self.assertEqual(comparison_tool.diff(use_model=True), [])
self.cleanup()
def test_simple_regex_difference(self):
"""
With no match we should show a list deletion then list addition.
"""
new_file = self.write_string_to_file('["test"]', "item1")
old_file = self.write_string_to_file('["[0-9]+"]', "item2")
comparison_tool = JsonDiff.from_file(new_file, old_file)
self.assertEqual(comparison_tool.diff(use_model=True),
[u'+: [0]=test', u'-: [0]=[0-9]+'])
self.cleanup()
def test_ambiguous_regex(self):
"""
With an ambiguous regex, we should match the first item in the list
"""
new_file = self.write_string_to_file('["test1", "test2"]', "item1")
old_file = self.write_string_to_file('["(.*)"]', "item2")
comparison_tool = JsonDiff.from_file(new_file, old_file)
self.assertEqual(comparison_tool.diff(use_model=True),
[u'+: [1]=test2'])
self.cleanup()
def test_list_order_with_regex(self):
"""
Regex matching should match the first item of the list, and then treat
the rest as out of order
"""
new_file = self.write_string_to_file('["test1", "test2"]', "item1")
old_file = self.write_string_to_file('["test2", "(.*)"]', "item2")
comparison_tool = JsonDiff.from_file(new_file, old_file)
self.assertEqual(comparison_tool.diff(use_model=True),
[u'+: [1]=test2', u'-: [0]=test2'])
self.cleanup()
def test_regex_integer_match(self):
"""
Test to ensure that we can match integers even though their type
is not text
"""
new_file = self.write_string_to_file('[42]', "item1")
old_file = self.write_string_to_file('["[0-9]+"]', "item2")
comparison_tool = JsonDiff.from_file(new_file, old_file)
self.assertEqual(comparison_tool.diff(use_model=True), [])
self.cleanup()
def test_regex_match_value(self):
filename1 = self.write_string_to_file('{"key":"value"}', "item1")
filename2 = self.write_string_to_file('{"key":"(.*)"}', "item2")
comparison_tool = JsonDiff.from_file(filename1, filename2)
self.assertEqual(comparison_tool.diff(use_model=True), [])
self.cleanup()
def test_regex_for_map_type_difference(self):
"""
Trying to match a regular expression with a dictionary should result
in a type difference
"""
filename1 = self.write_string_to_file('{"key1":{"key2":"value"}}',
"item1")
filename2 = self.write_string_to_file('{"key1":"(.*)"}', "item2")
comparison_tool = JsonDiff.from_file(filename1, filename2)
if sys.version_info.major == 3:
self.assertEqual(comparison_tool.diff(use_model=True), [
"TypeDifference : key1 - dict: ({'key2': 'value'}), "
"str: ((.*))"])
else:
self.assertEqual(comparison_tool.diff(use_model=True), [
"TypeDifference : key1 - dict: ({u'key2': u'value'}), "
"unicode: ((.*))"])
self.cleanup()
| 41.741573
| 78
| 0.597308
|
79486b8fade8ff7a785f1df72b680ff0950fd407
| 1,237
|
py
|
Python
|
diagrams/ibm/security.py
|
ryancheley/diagrams
|
8188b4d1ea47e0da832b95cd8a27adf52b26fac1
|
[
"MIT"
] | 1
|
2021-07-08T19:42:39.000Z
|
2021-07-08T19:42:39.000Z
|
diagrams/ibm/security.py
|
ryancheley/diagrams
|
8188b4d1ea47e0da832b95cd8a27adf52b26fac1
|
[
"MIT"
] | 1
|
2020-12-28T19:55:10.000Z
|
2020-12-29T18:29:53.000Z
|
diagrams/ibm/security.py
|
ryancheley/diagrams
|
8188b4d1ea47e0da832b95cd8a27adf52b26fac1
|
[
"MIT"
] | null | null | null |
# This module is automatically generated by autogen.sh. DO NOT EDIT.
from . import _IBM
class _Security(_IBM):
_type = "security"
_icon_dir = "resources/ibm/security"
class ApiSecurity(_Security):
_icon = "api-security.png"
class BlockchainSecurityService(_Security):
_icon = "blockchain-security-service.png"
class DataSecurity(_Security):
_icon = "data-security.png"
class Firewall(_Security):
_icon = "firewall.png"
class Gateway(_Security):
_icon = "gateway.png"
class GovernanceRiskCompliance(_Security):
_icon = "governance-risk-compliance.png"
class IdentityAccessManagement(_Security):
_icon = "identity-access-management.png"
class IdentityProvider(_Security):
_icon = "identity-provider.png"
class InfrastructureSecurity(_Security):
_icon = "infrastructure-security.png"
class PhysicalSecurity(_Security):
_icon = "physical-security.png"
class SecurityMonitoringIntelligence(_Security):
_icon = "security-monitoring-intelligence.png"
class SecurityServices(_Security):
_icon = "security-services.png"
class TrustendComputing(_Security):
_icon = "trustend-computing.png"
class Vpn(_Security):
_icon = "vpn.png"
# Aliases
VPN = Vpn
| 17.927536
| 68
| 0.736459
|
79486c6088334168d96fd0402f0b155991bb5f32
| 9,987
|
py
|
Python
|
test/rlai/gpi/monte_carlo/iteration_test.py
|
MatthewGerber/rlai
|
f390433c3adc285e1e9cc113deed7009b2e6dd5a
|
[
"MIT"
] | 9
|
2021-05-09T22:30:42.000Z
|
2021-12-27T19:42:56.000Z
|
test/rlai/gpi/monte_carlo/iteration_test.py
|
MatthewGerber/rlai
|
f390433c3adc285e1e9cc113deed7009b2e6dd5a
|
[
"MIT"
] | 13
|
2020-11-18T03:30:39.000Z
|
2021-12-12T04:19:16.000Z
|
test/rlai/gpi/monte_carlo/iteration_test.py
|
MatthewGerber/rlai
|
f390433c3adc285e1e9cc113deed7009b2e6dd5a
|
[
"MIT"
] | 1
|
2021-06-24T16:48:59.000Z
|
2021-06-24T16:48:59.000Z
|
import os
import pickle
import tempfile
import time
from threading import Thread
import pytest
from numpy.random import RandomState
from rlai.agents.mdp import StochasticMdpAgent
from rlai.environments.gridworld import Gridworld, GridworldFeatureExtractor
from rlai.environments.mdp import TrajectorySamplingMdpPlanningEnvironment
from rlai.gpi import PolicyImprovementEvent
from rlai.gpi.monte_carlo.iteration import iterate_value_q_pi
from rlai.gpi.utils import update_policy_iteration_plot, plot_policy_iteration
from rlai.planning.environment_models import StochasticEnvironmentModel
from rlai.policies.tabular import TabularPolicy
from rlai.q_S_A.function_approximation.estimators import ApproximateStateActionValueEstimator
from rlai.q_S_A.function_approximation.models.sklearn import SKLearnSGD
from rlai.q_S_A.tabular import TabularStateActionValueEstimator
from rlai.utils import RunThreadManager
from test.rlai.utils import tabular_estimator_legacy_eq, tabular_pi_legacy_eq
def test_iterate_value_q_pi():
random_state = RandomState(12345)
mdp_environment: Gridworld = Gridworld.example_4_1(random_state, None)
q_S_A = TabularStateActionValueEstimator(mdp_environment, 0.1, None)
mdp_agent = StochasticMdpAgent(
'test',
random_state,
q_S_A.get_initial_policy(),
1
)
iterate_value_q_pi(
agent=mdp_agent,
environment=mdp_environment,
num_improvements=3000,
num_episodes_per_improvement=1,
update_upon_every_visit=False,
planning_environment=None,
make_final_policy_greedy=False,
q_S_A=q_S_A
)
# uncomment the following line and run test to update fixture
# with open(f'{os.path.dirname(__file__)}/fixtures/test_monte_carlo_iteration_of_value_q_pi.pickle', 'wb') as file:
# pickle.dump((mdp_agent.pi, q_S_A), file)
with open(f'{os.path.dirname(__file__)}/fixtures/test_monte_carlo_iteration_of_value_q_pi.pickle', 'rb') as file:
pi_fixture, q_S_A_fixture = pickle.load(file)
assert tabular_pi_legacy_eq(mdp_agent.pi, pi_fixture) and tabular_estimator_legacy_eq(q_S_A, q_S_A_fixture)
def test_off_policy_monte_carlo():
random_state = RandomState(12345)
mdp_environment: Gridworld = Gridworld.example_4_1(random_state, None)
q_S_A = TabularStateActionValueEstimator(mdp_environment, 0.0, None)
# target agent
mdp_agent = StochasticMdpAgent(
'test',
random_state,
q_S_A.get_initial_policy(),
1
)
# episode generation (behavior) policy
off_policy_agent = StochasticMdpAgent(
'test',
random_state,
q_S_A.get_initial_policy(),
1
)
iterate_value_q_pi(
agent=mdp_agent,
environment=mdp_environment,
num_improvements=100,
num_episodes_per_improvement=1,
update_upon_every_visit=True,
planning_environment=None,
make_final_policy_greedy=False,
q_S_A=q_S_A,
off_policy_agent=off_policy_agent
)
# uncomment the following line and run test to update fixture
# with open(f'{os.path.dirname(__file__)}/fixtures/test_monte_carlo_off_policy_iteration_of_value_q_pi.pickle', 'wb') as file:
# pickle.dump((mdp_agent.pi, q_S_A), file)
with open(f'{os.path.dirname(__file__)}/fixtures/test_monte_carlo_off_policy_iteration_of_value_q_pi.pickle', 'rb') as file:
pi_fixture, q_S_A_fixture = pickle.load(file)
assert tabular_pi_legacy_eq(mdp_agent.pi, pi_fixture) and tabular_estimator_legacy_eq(q_S_A, q_S_A_fixture)
def test_off_policy_monte_carlo_with_function_approximation():
random_state = RandomState(12345)
mdp_environment: Gridworld = Gridworld.example_4_1(random_state, None)
q_S_A = ApproximateStateActionValueEstimator(
mdp_environment,
0.05,
SKLearnSGD(random_state=random_state, scale_eta0_for_y=False),
GridworldFeatureExtractor(mdp_environment),
None,
False,
None,
None
)
# target agent
mdp_agent = StochasticMdpAgent(
'test',
random_state,
q_S_A.get_initial_policy(),
1
)
# episode generation (behavior) policy
off_policy_agent = StochasticMdpAgent(
'test',
random_state,
TabularPolicy(None, None),
1
)
iterate_value_q_pi(
agent=mdp_agent,
environment=mdp_environment,
num_improvements=100,
num_episodes_per_improvement=1,
update_upon_every_visit=True,
planning_environment=None,
make_final_policy_greedy=False,
q_S_A=q_S_A,
off_policy_agent=off_policy_agent
)
# uncomment the following line and run test to update fixture
# with open(f'{os.path.dirname(__file__)}/fixtures/test_off_policy_monte_carlo_with_function_approximationo.pickle', 'wb') as file:
# pickle.dump((mdp_agent.pi, q_S_A), file)
with open(f'{os.path.dirname(__file__)}/fixtures/test_off_policy_monte_carlo_with_function_approximationo.pickle', 'rb') as file:
pi_fixture, q_S_A_fixture = pickle.load(file)
assert mdp_agent.pi == pi_fixture and q_S_A == q_S_A_fixture
assert str(mdp_agent.pi.estimator[mdp_environment.SS[5]][mdp_environment.SS[5].AA[1]]).startswith('-1.4524')
# make greedy
q_S_A.epsilon = 0.0
assert q_S_A.improve_policy(mdp_agent, None, PolicyImprovementEvent.MAKING_POLICY_GREEDY) == -1
assert mdp_agent.pi.estimator.epsilon == 0.0
def test_invalid_iterate_value_q_pi():
random_state = RandomState(12345)
mdp_environment: Gridworld = Gridworld.example_4_1(random_state, None)
q_S_A = TabularStateActionValueEstimator(mdp_environment, 0.0, None)
# target agent
mdp_agent = StochasticMdpAgent(
'test',
random_state,
q_S_A.get_initial_policy(),
1
)
# episode generation (behavior) policy
off_policy_agent = StochasticMdpAgent(
'test',
random_state,
q_S_A.get_initial_policy(),
1
)
with pytest.raises(ValueError, match='Planning environments are not currently supported for Monte Carlo iteration.'):
iterate_value_q_pi(
agent=mdp_agent,
environment=mdp_environment,
num_improvements=100,
num_episodes_per_improvement=1,
update_upon_every_visit=True,
planning_environment=TrajectorySamplingMdpPlanningEnvironment('foo', random_state, StochasticEnvironmentModel(), 100, None),
make_final_policy_greedy=False,
q_S_A=q_S_A,
off_policy_agent=off_policy_agent
)
# test warning...no off-policy agent with epsilon=0.0
q_S_A.epsilon = 0.0
iterate_value_q_pi(
agent=mdp_agent,
environment=mdp_environment,
num_improvements=100,
num_episodes_per_improvement=1,
update_upon_every_visit=True,
planning_environment=None,
make_final_policy_greedy=False,
q_S_A=q_S_A,
off_policy_agent=None
)
def test_iterate_value_q_pi_with_pdf():
random_state = RandomState(12345)
mdp_environment: Gridworld = Gridworld.example_4_1(random_state, None)
q_S_A = TabularStateActionValueEstimator(mdp_environment, 0.1, None)
mdp_agent = StochasticMdpAgent(
'test',
random_state,
q_S_A.get_initial_policy(),
1
)
iterate_value_q_pi(
agent=mdp_agent,
environment=mdp_environment,
num_improvements=3000,
num_episodes_per_improvement=1,
update_upon_every_visit=False,
planning_environment=None,
make_final_policy_greedy=False,
q_S_A=q_S_A,
num_improvements_per_plot=1500,
pdf_save_path=tempfile.NamedTemporaryFile(delete=False).name
)
with pytest.raises(ValueError, match='Epsilon must be >= 0'):
q_S_A.epsilon = -1.0
q_S_A.improve_policy(mdp_agent, states=None, event=PolicyImprovementEvent.MAKING_POLICY_GREEDY)
q_S_A.epsilon = 0.0
assert q_S_A.improve_policy(mdp_agent, None, PolicyImprovementEvent.MAKING_POLICY_GREEDY) == 14
def test_iterate_value_q_pi_multi_threaded():
thread_manager = RunThreadManager(True)
def train_thread_target():
random_state = RandomState(12345)
mdp_environment: Gridworld = Gridworld.example_4_1(random_state, None)
q_S_A = TabularStateActionValueEstimator(mdp_environment, 0.1, None)
mdp_agent = StochasticMdpAgent(
'test',
random_state,
q_S_A.get_initial_policy(),
1
)
iterate_value_q_pi(
agent=mdp_agent,
environment=mdp_environment,
num_improvements=1000000,
num_episodes_per_improvement=10,
update_upon_every_visit=False,
planning_environment=None,
make_final_policy_greedy=False,
q_S_A=q_S_A,
thread_manager=thread_manager,
num_improvements_per_plot=10
)
# premature update should have no effect
assert update_policy_iteration_plot() is None
# initialize plot from main thread
plot_policy_iteration(
iteration_average_reward=[],
iteration_total_states=[],
iteration_num_states_improved=[],
elapsed_seconds_average_rewards={},
pdf=None
)
# run training thread
run_thread = Thread(target=train_thread_target)
run_thread.start()
time.sleep(1)
# update plot asynchronously
update_policy_iteration_plot()
time.sleep(1)
# should be allowed to update plot from non-main thread
def bad_update():
with pytest.raises(ValueError, match='Can only update plot on main thread.'):
update_policy_iteration_plot()
bad_thread = Thread(target=bad_update)
bad_thread.start()
bad_thread.join()
thread_manager.abort = True
run_thread.join()
| 31.30721
| 136
| 0.705717
|
79486c6e45022e1d6d75239d4b1e58f9e1ef45bf
| 2,035
|
py
|
Python
|
dedomeno/houses/migrations/0084_room.py
|
ginopalazzo/dedomeno
|
e43df365849102016c8819b2082d2cde9109360f
|
[
"MIT"
] | 38
|
2018-03-19T12:52:17.000Z
|
2022-02-17T14:45:57.000Z
|
dedomeno/houses/migrations/0084_room.py
|
ginopalazzo/dedomeno
|
e43df365849102016c8819b2082d2cde9109360f
|
[
"MIT"
] | 7
|
2020-02-11T23:01:40.000Z
|
2020-08-06T13:30:58.000Z
|
dedomeno/houses/migrations/0084_room.py
|
ginopalazzo/dedomeno
|
e43df365849102016c8819b2082d2cde9109360f
|
[
"MIT"
] | 12
|
2019-02-23T22:10:34.000Z
|
2022-03-24T12:01:38.000Z
|
# -*- coding: utf-8 -*-
# Generated by Django 1.10.4 on 2017-01-08 17:34
from __future__ import unicode_literals
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('houses', '0083_auto_20170108_1208'),
]
operations = [
migrations.CreateModel(
name='Room',
fields=[
('property_ptr', models.OneToOneField(auto_created=True, on_delete=django.db.models.deletion.CASCADE, parent_link=True, primary_key=True, serialize=False, to='houses.Property')),
('m2_total', models.IntegerField(blank=True, null=True)),
('flat_num', models.CharField(blank=True, max_length=200, null=True)),
('elevator', models.NullBooleanField()),
('wc', models.IntegerField(blank=True, null=True)),
('min_month_stay', models.IntegerField(blank=True, null=True)),
('people_max', models.IntegerField(blank=True, null=True)),
('people_now_living_gender', models.CharField(blank=True, max_length=200, null=True)),
('people_now_living_age_min', models.IntegerField(blank=True, null=True)),
('people_now_living_age_max', models.IntegerField(blank=True, null=True)),
('smoking_allowed', models.NullBooleanField()),
('pet_allowed', models.NullBooleanField()),
('looking_for', models.CharField(blank=True, max_length=200, null=True)),
('gay_friendly', models.NullBooleanField()),
('working', models.NullBooleanField()),
('air_conditioner', models.NullBooleanField()),
('internet', models.NullBooleanField()),
('builtin_wardrobes', models.NullBooleanField()),
('furnished', models.NullBooleanField()),
('house_cleaners', models.NullBooleanField()),
],
bases=('houses.property',),
),
]
| 47.325581
| 194
| 0.606388
|
79486de21fd08e9dfa2b2a725aca619153eefb17
| 330
|
py
|
Python
|
jd/api/rest/KplOpenKeplerCartUncheckskuRequest.py
|
fengjinqi/linjuanbang
|
8cdc4e81df73ccd737ac547da7f2c7dca545862a
|
[
"MIT"
] | 5
|
2019-10-30T01:16:30.000Z
|
2020-06-14T03:32:19.000Z
|
jd/api/rest/KplOpenKeplerCartUncheckskuRequest.py
|
fengjinqi/linjuanbang
|
8cdc4e81df73ccd737ac547da7f2c7dca545862a
|
[
"MIT"
] | 2
|
2020-10-12T07:12:48.000Z
|
2021-06-02T03:15:47.000Z
|
jd/api/rest/KplOpenKeplerCartUncheckskuRequest.py
|
fengjinqi/linjuanbang
|
8cdc4e81df73ccd737ac547da7f2c7dca545862a
|
[
"MIT"
] | 3
|
2019-12-06T17:33:49.000Z
|
2021-03-01T13:24:22.000Z
|
from jd.api.base import RestApi
class KplOpenKeplerCartUncheckskuRequest(RestApi):
def __init__(self,domain='gw.api.360buy.com',port=80):
RestApi.__init__(self,domain, port)
self.commlist = None
self.userid = None
self.locationid = None
def getapiname(self):
return 'jd.kpl.open.kepler.cart.unchecksku'
| 18.333333
| 56
| 0.733333
|
79486f972430e6e4954e4cba8de54e5a7dc5340d
| 14,474
|
py
|
Python
|
blender/arm/exporter_opt.py
|
ValtoGameEngines/Armory
|
ad3d3c63e64e9225e62b414b7ec4dd9fb93fab32
|
[
"Zlib"
] | 1
|
2020-09-08T08:38:04.000Z
|
2020-09-08T08:38:04.000Z
|
blender/arm/exporter_opt.py
|
ValtoGameEngines/Armory
|
ad3d3c63e64e9225e62b414b7ec4dd9fb93fab32
|
[
"Zlib"
] | null | null | null |
blender/arm/exporter_opt.py
|
ValtoGameEngines/Armory
|
ad3d3c63e64e9225e62b414b7ec4dd9fb93fab32
|
[
"Zlib"
] | 1
|
2020-06-29T07:54:21.000Z
|
2020-06-29T07:54:21.000Z
|
import bpy
import numpy as np
from mathutils import *
import arm.utils
# Exports smaller geometry but is slower
# To be replaced with https://github.com/zeux/meshoptimizer
class Vertex:
__slots__ = ("co", "normal", "uvs", "col", "loop_indices", "index", "bone_weights", "bone_indices", "bone_count", "vertex_index")
def __init__(self, mesh, loop):
self.vertex_index = loop.vertex_index
loop_idx = loop.index
self.co = mesh.vertices[self.vertex_index].co[:]
self.normal = loop.normal[:]
self.uvs = tuple(layer.data[loop_idx].uv[:] for layer in mesh.uv_layers)
self.col = [0.0, 0.0, 0.0]
if len(mesh.vertex_colors) > 0:
self.col = mesh.vertex_colors[0].data[loop_idx].color[:]
self.loop_indices = [loop_idx]
self.index = 0
def __hash__(self):
return hash((self.co, self.normal, self.uvs))
def __eq__(self, other):
eq = (
(self.co == other.co) and
(self.normal == other.normal) and
(self.uvs == other.uvs) and
(self.col == other.col)
)
if eq:
indices = self.loop_indices + other.loop_indices
self.loop_indices = indices
other.loop_indices = indices
return eq
def calc_tangents(posa, nora, uva, ias, scale_pos):
num_verts = int(len(posa) / 4)
tangents = np.empty(num_verts * 3, dtype='<f4')
# bitangents = np.empty(num_verts * 3, dtype='<f4')
for ar in ias:
ia = ar['values']
num_tris = int(len(ia) / 3)
for i in range(0, num_tris):
i0 = ia[i * 3 ]
i1 = ia[i * 3 + 1]
i2 = ia[i * 3 + 2]
v0 = Vector((posa[i0 * 4], posa[i0 * 4 + 1], posa[i0 * 4 + 2]))
v1 = Vector((posa[i1 * 4], posa[i1 * 4 + 1], posa[i1 * 4 + 2]))
v2 = Vector((posa[i2 * 4], posa[i2 * 4 + 1], posa[i2 * 4 + 2]))
uv0 = Vector((uva[i0 * 2], uva[i0 * 2 + 1]))
uv1 = Vector((uva[i1 * 2], uva[i1 * 2 + 1]))
uv2 = Vector((uva[i2 * 2], uva[i2 * 2 + 1]))
deltaPos1 = v1 - v0
deltaPos2 = v2 - v0
deltaUV1 = uv1 - uv0
deltaUV2 = uv2 - uv0
d = (deltaUV1.x * deltaUV2.y - deltaUV1.y * deltaUV2.x)
if d != 0:
r = 1.0 / d
else:
r = 1.0
tangent = (deltaPos1 * deltaUV2.y - deltaPos2 * deltaUV1.y) * r
# bitangent = (deltaPos2 * deltaUV1.x - deltaPos1 * deltaUV2.x) * r
tangents[i0 * 3 ] += tangent.x
tangents[i0 * 3 + 1] += tangent.y
tangents[i0 * 3 + 2] += tangent.z
tangents[i1 * 3 ] += tangent.x
tangents[i1 * 3 + 1] += tangent.y
tangents[i1 * 3 + 2] += tangent.z
tangents[i2 * 3 ] += tangent.x
tangents[i2 * 3 + 1] += tangent.y
tangents[i2 * 3 + 2] += tangent.z
# bitangents[i0 * 3 ] += bitangent.x
# bitangents[i0 * 3 + 1] += bitangent.y
# bitangents[i0 * 3 + 2] += bitangent.z
# bitangents[i1 * 3 ] += bitangent.x
# bitangents[i1 * 3 + 1] += bitangent.y
# bitangents[i1 * 3 + 2] += bitangent.z
# bitangents[i2 * 3 ] += bitangent.x
# bitangents[i2 * 3 + 1] += bitangent.y
# bitangents[i2 * 3 + 2] += bitangent.z
# Orthogonalize
for i in range(0, num_verts):
t = Vector((tangents[i * 3], tangents[i * 3 + 1], tangents[i * 3 + 2]))
# b = Vector((bitangents[i * 3], bitangents[i * 3 + 1], bitangents[i * 3 + 2]))
n = Vector((nora[i * 2], nora[i * 2 + 1], posa[i * 4 + 3] / scale_pos))
v = t - n * n.dot(t)
v.normalize()
# Calculate handedness
# cnv = n.cross(v)
# if cnv.dot(b) < 0.0:
# v = v * -1.0
tangents[i * 3 ] = v.x
tangents[i * 3 + 1] = v.y
tangents[i * 3 + 2] = v.z
return tangents
def export_mesh_data(self, exportMesh, bobject, o, has_armature=False):
exportMesh.calc_normals_split()
# exportMesh.calc_loop_triangles()
vert_list = { Vertex(exportMesh, loop) : 0 for loop in exportMesh.loops}.keys()
num_verts = len(vert_list)
num_uv_layers = len(exportMesh.uv_layers)
has_tex = self.get_export_uvs(exportMesh) == True and num_uv_layers > 0
if self.has_baked_material(bobject, exportMesh.materials):
has_tex = True
has_tex1 = has_tex == True and num_uv_layers > 1
num_colors = len(exportMesh.vertex_colors)
has_col = self.get_export_vcols(exportMesh) == True and num_colors > 0
has_tang = self.has_tangents(exportMesh)
pdata = np.empty(num_verts * 4, dtype='<f4') # p.xyz, n.z
ndata = np.empty(num_verts * 2, dtype='<f4') # n.xy
if has_tex:
# Get active uvmap
t0map = 0
uv_layers = exportMesh.uv_layers
if uv_layers != None:
if 'UVMap_baked' in uv_layers:
for i in range(0, len(uv_layers)):
if uv_layers[i].name == 'UVMap_baked':
t0map = i
break
else:
for i in range(0, len(uv_layers)):
if uv_layers[i].active_render:
t0map = i
break
t1map = 1 if t0map == 0 else 0
# Alloc data
t0data = np.empty(num_verts * 2, dtype='<f4')
if has_tex1:
t1data = np.empty(num_verts * 2, dtype='<f4')
if has_col:
cdata = np.empty(num_verts * 3, dtype='<f4')
if has_tex:
# Scale for packed coords
maxdim = 1.0
lay0 = exportMesh.uv_layers[t0map]
for v in lay0.data:
if abs(v.uv[0]) > maxdim:
maxdim = abs(v.uv[0])
if abs(v.uv[1]) > maxdim:
maxdim = abs(v.uv[1])
if maxdim > 1:
o['scale_tex'] = maxdim
invscale_tex = (1 / o['scale_tex']) * 32767
else:
invscale_tex = 1 * 32767
# TODO: handle t1map
# Save aabb
aabb_center = 0.125 * sum((Vector(b) for b in bobject.bound_box), Vector())
bobject.data.arm_aabb = [ \
abs((bobject.bound_box[6][0] - bobject.bound_box[0][0]) / 2 + abs(aabb_center[0])) * 2, \
abs((bobject.bound_box[6][1] - bobject.bound_box[0][1]) / 2 + abs(aabb_center[1])) * 2, \
abs((bobject.bound_box[6][2] - bobject.bound_box[0][2]) / 2 + abs(aabb_center[2])) * 2 \
]
# Scale for packed coords
maxdim = max(bobject.data.arm_aabb[0], max(bobject.data.arm_aabb[1], bobject.data.arm_aabb[2]))
if maxdim > 2:
o['scale_pos'] = maxdim / 2
else:
o['scale_pos'] = 1.0
if has_armature: # Allow up to 2x bigger bounds for skinned mesh
o['scale_pos'] *= 2.0
scale_pos = o['scale_pos']
invscale_pos = (1 / scale_pos) * 32767
# Make arrays
for i, v in enumerate(vert_list):
v.index = i
co = v.co
normal = v.normal
i4 = i * 4
i2 = i * 2
pdata[i4 ] = co[0]
pdata[i4 + 1] = co[1]
pdata[i4 + 2] = co[2]
pdata[i4 + 3] = normal[2] * scale_pos # Cancel scale
ndata[i2 ] = normal[0]
ndata[i2 + 1] = normal[1]
if has_tex:
uv = v.uvs[t0map]
t0data[i2 ] = uv[0]
t0data[i2 + 1] = 1.0 - uv[1] # Reverse Y
if has_tex1:
uv = v.uvs[t1map]
t1data[i2 ] = uv[0]
t1data[i2 + 1] = 1.0 - uv[1]
if has_col:
i3 = i * 3
cdata[i3 ] = v.col[0]
cdata[i3 + 1] = v.col[1]
cdata[i3 + 2] = v.col[2]
# Indices
prims = {ma.name if ma else '': [] for ma in exportMesh.materials}
if not prims:
prims = {'': []}
vert_dict = {i : v for v in vert_list for i in v.loop_indices}
for poly in exportMesh.polygons:
first = poly.loop_start
if len(exportMesh.materials) == 0:
prim = prims['']
else:
mat = exportMesh.materials[min(poly.material_index, len(exportMesh.materials) - 1)]
prim = prims[mat.name if mat else '']
indices = [vert_dict[i].index for i in range(first, first+poly.loop_total)]
if poly.loop_total == 3:
prim += indices
elif poly.loop_total > 3:
for i in range(poly.loop_total-2):
prim += (indices[-1], indices[i], indices[i + 1])
# Write indices
o['index_arrays'] = []
for mat, prim in prims.items():
idata = [0] * len(prim)
for i, v in enumerate(prim):
idata[i] = v
if len(idata) == 0: # No face assigned
continue
ia = {}
ia['values'] = idata
ia['material'] = 0
# Find material index for multi-mat mesh
if len(exportMesh.materials) > 1:
for i in range(0, len(exportMesh.materials)):
if (exportMesh.materials[i] != None and mat == exportMesh.materials[i].name) or \
(exportMesh.materials[i] == None and mat == ''): # Default material for empty slots
ia['material'] = i
break
o['index_arrays'].append(ia)
if has_tang:
tangdata = calc_tangents(pdata, ndata, t0data, o['index_arrays'], scale_pos)
pdata *= invscale_pos
ndata *= 32767
pdata = np.array(pdata, dtype='<i2')
ndata = np.array(ndata, dtype='<i2')
if has_tex:
t0data *= invscale_tex
t0data = np.array(t0data, dtype='<i2')
if has_tex1:
t1data *= invscale_tex
t1data = np.array(t1data, dtype='<i2')
if has_col:
cdata *= 32767
cdata = np.array(cdata, dtype='<i2')
if has_tang:
tangdata *= 32767
tangdata = np.array(tangdata, dtype='<i2')
# Output
o['vertex_arrays'] = []
o['vertex_arrays'].append({ 'attrib': 'pos', 'values': pdata, 'data': 'short4norm' })
o['vertex_arrays'].append({ 'attrib': 'nor', 'values': ndata, 'data': 'short2norm' })
if has_tex:
o['vertex_arrays'].append({ 'attrib': 'tex', 'values': t0data, 'data': 'short2norm' })
if has_tex1:
o['vertex_arrays'].append({ 'attrib': 'tex1', 'values': t1data, 'data': 'short2norm' })
if has_col:
o['vertex_arrays'].append({ 'attrib': 'col', 'values': cdata, 'data': 'short4norm', 'padding': 1 })
if has_tang:
o['vertex_arrays'].append({ 'attrib': 'tang', 'values': tangdata, 'data': 'short4norm', 'padding': 1 })
return vert_list
def export_skin(self, bobject, armature, vert_list, o):
# This function exports all skinning data, which includes the skeleton
# and per-vertex bone influence data
oskin = {}
o['skin'] = oskin
# Write the skin bind pose transform
otrans = {}
oskin['transform'] = otrans
otrans['values'] = self.write_matrix(bobject.matrix_world)
# Write the bone object reference array
oskin['bone_ref_array'] = []
oskin['bone_len_array'] = []
bone_array = armature.data.bones
bone_count = len(bone_array)
rpdat = arm.utils.get_rp()
max_bones = rpdat.arm_skin_max_bones
if bone_count > max_bones:
log.warn(bobject.name + ' - ' + str(bone_count) + ' bones found, exceeds maximum of ' + str(max_bones) + ' bones defined - raise the value in Camera Data - Armory Render Props - Max Bones')
for i in range(bone_count):
boneRef = self.find_bone(bone_array[i].name)
if boneRef:
oskin['bone_ref_array'].append(boneRef[1]["structName"])
oskin['bone_len_array'].append(bone_array[i].length)
else:
oskin['bone_ref_array'].append("")
oskin['bone_len_array'].append(0.0)
# Write the bind pose transform array
oskin['transformsI'] = []
for i in range(bone_count):
skeletonI = (armature.matrix_world @ bone_array[i].matrix_local).inverted_safe()
skeletonI = (skeletonI @ bobject.matrix_world)
oskin['transformsI'].append(self.write_matrix(skeletonI))
# Export the per-vertex bone influence data
group_remap = []
for group in bobject.vertex_groups:
for i in range(bone_count):
if bone_array[i].name == group.name:
group_remap.append(i)
break
else:
group_remap.append(-1)
bone_count_array = np.empty(len(vert_list), dtype='<i2')
bone_index_array = np.empty(len(vert_list) * 4, dtype='<i2')
bone_weight_array = np.empty(len(vert_list) * 4, dtype='<i2')
vertices = bobject.data.vertices
count = 0
for index, v in enumerate(vert_list):
bone_count = 0
total_weight = 0.0
bone_values = []
for g in vertices[v.vertex_index].groups:
bone_index = group_remap[g.group]
bone_weight = g.weight
if bone_index >= 0: #and bone_weight != 0.0:
bone_values.append((bone_weight, bone_index))
total_weight += bone_weight
bone_count += 1
if bone_count > 4:
bone_count = 4
bone_values.sort(reverse=True)
bone_values = bone_values[:4]
bone_count_array[index] = bone_count
for bv in bone_values:
bone_weight_array[count] = bv[0] * 32767
bone_index_array[count] = bv[1]
count += 1
if total_weight != 0.0 and total_weight != 1.0:
normalizer = 1.0 / total_weight
for i in range(bone_count):
bone_weight_array[count - i - 1] *= normalizer
oskin['bone_count_array'] = bone_count_array
oskin['bone_index_array'] = bone_index_array[:count]
oskin['bone_weight_array'] = bone_weight_array[:count]
# Bone constraints
for bone in armature.pose.bones:
if len(bone.constraints) > 0:
if 'constraints' not in oskin:
oskin['constraints'] = []
self.add_constraints(bone, oskin, bone=True)
| 38.80429
| 198
| 0.529778
|
794870e5d6c6b435787fecefa77587db9259f304
| 10,245
|
py
|
Python
|
gluoncv/model_zoo/shufflenet.py
|
RafLit/gluon-cv
|
dae504a4de8fff1421fd4fe398accbe396c504cc
|
[
"Apache-2.0"
] | 1
|
2022-03-29T12:59:51.000Z
|
2022-03-29T12:59:51.000Z
|
gluoncv/model_zoo/shufflenet.py
|
RafLit/gluon-cv
|
dae504a4de8fff1421fd4fe398accbe396c504cc
|
[
"Apache-2.0"
] | null | null | null |
gluoncv/model_zoo/shufflenet.py
|
RafLit/gluon-cv
|
dae504a4de8fff1421fd4fe398accbe396c504cc
|
[
"Apache-2.0"
] | null | null | null |
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
# coding: utf-8
# pylint: disable= line-too-long,arguments-differ,unused-argument,missing-docstring,too-many-function-args
# pylint: disable= line-too-long
"""ShuffleNetV1 and ShuffleNetV2, implemented in Gluon."""
from mxnet.context import cpu
from mxnet.gluon import nn
from mxnet.gluon.nn import BatchNorm
from mxnet.gluon.block import HybridBlock
__all__ = [
'ShuffleNetV1',
'shufflenet_v1',
'get_shufflenet_v1',
'ShuffleNetV2',
'shufflenet_v2',
'get_shufflenet_v2']
def _conv2d(channel, kernel=1, padding=0, stride=1, num_group=1, use_act=True, use_bias=True, norm_layer=BatchNorm, norm_kwargs=None):
cell = nn.HybridSequential(prefix='')
cell.add(nn.Conv2D(channel, kernel_size=kernel, strides=stride, padding=padding, groups=num_group, use_bias=use_bias))
cell.add(norm_layer(epsilon=1e-5, momentum=0.9, **({} if norm_kwargs is None else norm_kwargs)))
if use_act:
cell.add(nn.Activation('relu'))
return cell
class shuffleUnit(HybridBlock):
def __init__(self, in_channels, out_channels, combine_type, groups=3, grouped_conv=True,
norm_layer=BatchNorm, norm_kwargs=None, **kwargs):
super(shuffleUnit, self).__init__(**kwargs)
if combine_type == 'add':
self.DWConv_stride = 1
elif combine_type == 'concat':
self.DWConv_stride = 2
out_channels -= in_channels
self.first_groups = groups if grouped_conv else 1
self.bottleneck_channels = out_channels // 4
self.grouped_conv = grouped_conv
self.output_channel = out_channels
self.groups = groups
self.combine_type = combine_type
with self.name_scope():
self.conv_beforshuffle = nn.HybridSequential()
self.conv_beforshuffle.add(_conv2d(channel=self.bottleneck_channels, kernel=1, stride=1, num_group=self.first_groups))
self.conv_aftershuffle = nn.HybridSequential()
self.conv_aftershuffle.add(_conv2d(channel=self.bottleneck_channels, kernel=3, padding=1, stride=self.DWConv_stride, num_group=self.bottleneck_channels, use_act=False))
self.conv_aftershuffle.add(_conv2d(channel=self.output_channel, kernel=1, stride=1, num_group=groups, use_act=False))
def combine(self, F, branch1, branch2, combine):
if combine == 'add':
data = branch1 + branch2
data = F.Activation(data, act_type='relu')
elif combine == 'concat':
data = F.concat(branch1, branch2, dim=1)
data = F.Activation(data, act_type='relu')
return data
def channel_shuffle(self, F, data, groups):
data = F.reshape(data, shape=(0, -4, groups, -1, -2))
data = F.swapaxes(data, 1, 2)
data = F.reshape(data, shape=(0, -3, -2))
return data
def hybrid_forward(self, F, x):
res = x
x = self.conv_beforshuffle(x)
if self.grouped_conv:
x = self.channel_shuffle(F, x, groups=self.groups)
x = self.conv_aftershuffle(x)
if self.combine_type == 'concat':
res = F.Pooling(data=res, kernel=(3, 3), pool_type='avg', stride=(2, 2), pad=(1, 1))
x = self.combine(F, res, x, combine=self.combine_type)
return x
class ShuffleNetV1(HybridBlock):
def __init__(self, groups=3, classes=1000, norm_layer=BatchNorm, norm_kwargs=None, **kwargs):
super(ShuffleNetV1, self).__init__(**kwargs)
with self.name_scope():
self.features = nn.HybridSequential(prefix='')
self.features.add(nn.Conv2D(24, kernel_size=3, strides=2, padding=1))
self.features.add(nn.MaxPool2D(pool_size=3, strides=2, padding=1))
self.features.add(self.make_stage(2))
self.features.add(self.make_stage(3))
self.features.add(self.make_stage(4))
self.features.add(nn.GlobalAvgPool2D())
self.output = nn.Dense(classes)
def make_stage(self, stage, groups=3):
stage_repeats = [3, 7, 3]
grouped_conv = stage > 2
if groups == 1:
out_channels = [-1, 24, 144, 288, 567]
elif groups == 2:
out_channels = [-1, 24, 200, 400, 800]
elif groups == 3:
out_channels = [-1, 24, 240, 480, 960]
elif groups == 4:
out_channels = [-1, 24, 272, 544, 1088]
elif groups == 8:
out_channels = [-1, 24, 384, 768, 1536]
body = nn.HybridSequential()
body.add(shuffleUnit(out_channels[stage - 1], out_channels[stage], 'concat', groups, grouped_conv))
for i in range(stage_repeats[stage - 2]):
body.add(shuffleUnit(out_channels[stage], out_channels[stage], 'add', groups, True))
return body
def hybrid_forward(self, F, x):
x = self.features(x)
x = self.output(x)
return x
class shuffleUnitV2(HybridBlock):
def __init__(self, in_channels, out_channels, split, norm_layer=BatchNorm, norm_kwargs=None, **kwargs):
super(shuffleUnitV2, self).__init__(**kwargs)
self.in_channels = in_channels
self.equal_channels = out_channels // 2
self.split = split
if split:
self.DWConv_stride = 1
else:
self.DWConv_stride = 2
with self.name_scope():
self.branch1_conv = nn.HybridSequential()
self.branch1_conv.add(_conv2d(channel=self.in_channels, kernel=3, padding=1, stride=self.DWConv_stride, num_group=self.in_channels, use_act=False, use_bias=False))
self.branch1_conv.add(_conv2d(channel=self.equal_channels, kernel=1, stride=1, use_act=True, use_bias=False))
with self.name_scope():
self.branch2_conv = nn.HybridSequential()
self.branch2_conv.add(_conv2d(channel=self.equal_channels, kernel=1, stride=1, use_act=True, use_bias=False))
self.branch2_conv.add(_conv2d(channel=self.equal_channels, kernel=3, padding=1, stride=self.DWConv_stride, num_group=self.equal_channels, use_act=False, use_bias=False))
self.branch2_conv.add(_conv2d(channel=self.equal_channels, kernel=1, stride=1, use_act=True, use_bias=False))
def channel_shuffle(self, F, data, groups):
data = F.reshape(data, shape=(0, -4, groups, -1, -2))
data = F.swapaxes(data, 1, 2)
data = F.reshape(data, shape=(0, -3, -2))
return data
def hybrid_forward(self, F, x):
if self.split:
branch1 = F.slice_axis(x, axis=1, begin=0, end=self.in_channels // 2)
branch2 = F.slice_axis(x, axis=1, begin=self.in_channels // 2, end=self.in_channels)
else:
branch1 = x
branch2 = x
branch1 = self.branch1_conv(branch1)
branch2 = self.branch2_conv(branch2)
x = F.concat(branch1, branch2, dim=1)
x = self.channel_shuffle(F, data=x, groups=2)
return x
class ShuffleNetV2(HybridBlock):
def __init__(self, classes=1000, norm_layer=BatchNorm, norm_kwargs=None, **kwargs):
super(ShuffleNetV2, self).__init__(**kwargs)
with self.name_scope():
self.features = nn.HybridSequential(prefix='')
self.features.add(_conv2d(channel=24, kernel=3, stride=2, padding=1, use_act=True, use_bias=False))
self.features.add(nn.MaxPool2D(pool_size=3, strides=2, padding=1))
self.features.add(self.make_stage(2))
self.features.add(self.make_stage(3))
self.features.add(self.make_stage(4))
self.features.add(_conv2d(channel=1024, kernel=1, stride=1, use_act=True, use_bias=False))
self.features.add(nn.GlobalAvgPool2D())
self.output = nn.Dense(classes)
def make_stage(self, stage, multiplier=1):
stage_repeats = [3, 7, 3]
if multiplier == 0.5:
out_channels = [-1, 24, 48, 96, 192]
elif multiplier == 1:
out_channels = [-1, 24, 116, 232, 464]
elif multiplier == 1.5:
out_channels = [-1, 24, 176, 352, 704]
elif multiplier == 2:
out_channels = [-1, 24, 244, 488, 976]
body = nn.HybridSequential()
body.add(shuffleUnitV2(out_channels[stage - 1], out_channels[stage], split=False))
for i in range(stage_repeats[stage - 2]):
body.add(shuffleUnitV2(out_channels[stage], out_channels[stage], split=True))
return body
def hybrid_forward(self, F, x):
x = self.features(x)
x = self.output(x)
return x
def get_shufflenet_v1(pretrained=False, root='~/.mxnet/models', ctx=cpu(), norm_layer=BatchNorm, norm_kwargs=None, **kwargs):
net = ShuffleNetV1(norm_layer=norm_layer, norm_kwargs=norm_kwargs, **kwargs)
from ..data import ImageNet1kAttr
attrib = ImageNet1kAttr()
net.synset = attrib.synset
net.classes = attrib.classes
net.classes_long = attrib.classes_long
return net
def shufflenet_v1(**kwargs):
return get_shufflenet_v1(**kwargs)
def get_shufflenet_v2(pretrained=False, root='~/.mxnet/models', ctx=cpu(), norm_layer=BatchNorm, norm_kwargs=None, **kwargs):
net = ShuffleNetV2(norm_layer=norm_layer, norm_kwargs=norm_kwargs, **kwargs)
from ..data import ImageNet1kAttr
attrib = ImageNet1kAttr()
net.synset = attrib.synset
net.classes = attrib.classes
net.classes_long = attrib.classes_long
return net
def shufflenet_v2(**kwargs):
return get_shufflenet_v2(**kwargs)
| 44.543478
| 181
| 0.652904
|
7948716cac2b5ca107e700ec1400b82288fa2deb
| 536
|
py
|
Python
|
setup.py
|
csullivannet/buffer
|
caa2b5e16dc21623fd52735cc79e0f45d63fdead
|
[
"MIT"
] | null | null | null |
setup.py
|
csullivannet/buffer
|
caa2b5e16dc21623fd52735cc79e0f45d63fdead
|
[
"MIT"
] | 8
|
2021-04-10T00:33:46.000Z
|
2021-04-12T00:16:55.000Z
|
setup.py
|
csullivannet/buffer
|
caa2b5e16dc21623fd52735cc79e0f45d63fdead
|
[
"MIT"
] | null | null | null |
import setuptools
setuptools.setup(
name="diagnose",
version="1.0",
author="Christopher Sullivan",
author_email="csullivannet@users.noreply.github.com",
description="Diagnoses issues with pods running on a kubernetes cluster",
packages=setuptools.find_packages(),
entry_points={"console_scripts": ["diagnose = diagnose.diagnose:main"]},
classifiers=[
"Programming Language :: Python :: 3",
"License :: OSI Approved :: MIT License",
"Operating System :: OS Independent",
],
)
| 31.529412
| 77
| 0.675373
|
794871a0c5099f775a131f1f0e20f60da7a98e8f
| 93
|
py
|
Python
|
refex/errors.py
|
d-e-h-i-o/legal-reference-extraction
|
a9f7793124818d5710315f75e52b077a232acccf
|
[
"MIT"
] | null | null | null |
refex/errors.py
|
d-e-h-i-o/legal-reference-extraction
|
a9f7793124818d5710315f75e52b077a232acccf
|
[
"MIT"
] | null | null | null |
refex/errors.py
|
d-e-h-i-o/legal-reference-extraction
|
a9f7793124818d5710315f75e52b077a232acccf
|
[
"MIT"
] | null | null | null |
class RefExError(ValueError):
pass
class AmbiguousReferenceError(RefExError):
pass
| 13.285714
| 42
| 0.763441
|
794871ba5c1c0b5057e0a8ec77ce36bb03851754
| 6,335
|
py
|
Python
|
python/paddle/fluid/tests/unittests/mkldnn/test_pool2d_bf16_mkldnn_op.py
|
L-Net-1992/Paddle
|
4d0ca02ba56760b456f3d4b42a538555b9b6c307
|
[
"Apache-2.0"
] | 11
|
2016-08-29T07:43:26.000Z
|
2016-08-29T07:51:24.000Z
|
python/paddle/fluid/tests/unittests/mkldnn/test_pool2d_bf16_mkldnn_op.py
|
L-Net-1992/Paddle
|
4d0ca02ba56760b456f3d4b42a538555b9b6c307
|
[
"Apache-2.0"
] | null | null | null |
python/paddle/fluid/tests/unittests/mkldnn/test_pool2d_bf16_mkldnn_op.py
|
L-Net-1992/Paddle
|
4d0ca02ba56760b456f3d4b42a538555b9b6c307
|
[
"Apache-2.0"
] | 1
|
2021-09-24T11:23:36.000Z
|
2021-09-24T11:23:36.000Z
|
# Copyright (c) 2020 PaddlePaddle Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import print_function
import unittest
import numpy as np
import paddle.fluid.core as core
from paddle.fluid.tests.unittests.op_test import OpTest, OpTestTool, convert_float_to_uint16
from paddle.fluid.tests.unittests.test_pool2d_op import TestPool2D_Op_Mixin, max_pool2D_forward_naive
from paddle.fluid.tests.unittests.npu.test_pool2d_op_npu import pool2d_backward_navie as pool2d_backward_naive
from paddle import enable_static
@OpTestTool.skip_if_not_cpu_bf16()
class TestPoolBf16MklDNNOpGrad(TestPool2D_Op_Mixin, OpTest):
def init_kernel_type(self):
self.use_mkldnn = True
def init_data_type(self):
self.dtype = np.uint16
def setUp(self):
super(TestPoolBf16MklDNNOpGrad, self).setUp()
self.attrs['mkldnn_data_type'] = "bfloat16"
self.x_fp32 = np.random.random(self.shape).astype(np.float32)
output = self.pool2D_forward_naive(self.x_fp32, self.ksize,
self.strides, self.paddings,
self.global_pool, self.ceil_mode,
self.exclusive, self.adaptive,
"float32").astype(np.float32)
self.inputs = {'X': convert_float_to_uint16(self.x_fp32)}
self.outputs = {'Out': convert_float_to_uint16(output)}
def test_check_output(self):
self.check_output_with_place(core.CPUPlace())
def test_check_grad(self):
x_grad = pool2d_backward_naive(self.x_fp32,
ksize=self.ksize,
strides=self.strides,
paddings=self.paddings,
global_pool=self.global_pool,
ceil_mode=False,
exclusive=self.exclusive,
adaptive=self.adaptive,
data_format=self.data_format,
pool_type=self.pool_type,
padding_algorithm=self.padding_algorithm)
x_grad = x_grad / np.prod(self.outputs['Out'].shape)
self.check_grad_with_place(core.CPUPlace(),
set(['X']),
'Out',
user_defined_grads=[x_grad])
@OpTestTool.skip_if_not_cpu_bf16()
class TestPoolBf16MklDNNOp(TestPool2D_Op_Mixin, OpTest):
def init_kernel_type(self):
self.use_mkldnn = True
def setUp(self):
TestPool2D_Op_Mixin.setUp(self)
self.dtype = np.uint16
input = np.random.random(self.shape).astype(np.float32)
output = (self.pool2D_forward_naive(input, self.ksize, self.strides,
self.paddings, self.global_pool,
self.ceil_mode, self.exclusive,
self.adaptive,
"float32")).astype(np.float32)
self.inputs = {'X': convert_float_to_uint16(input)}
self.outputs = {'Out': convert_float_to_uint16(output)}
def test_check_output(self):
self.check_output_with_place(core.CPUPlace())
def test_check_grad(self):
pass
class TestCase1Avg(TestPoolBf16MklDNNOp):
def init_test_case(self):
self.shape = [2, 3, 7, 7]
self.ksize = [3, 3]
self.strides = [1, 1]
self.paddings = [0, 0]
def init_global_pool(self):
self.global_pool = False
def init_exclusive(self):
self.exclusive = True
class TestCase2Avg(TestPoolBf16MklDNNOp):
def init_test_case(self):
self.shape = [2, 3, 7, 7]
self.ksize = [3, 3]
self.strides = [1, 1]
self.paddings = [1, 1]
def init_global_pool(self):
self.global_pool = False
def init_exclusive(self):
self.exclusive = False
class TestCase0Max(TestPoolBf16MklDNNOp):
def init_pool_type(self):
self.pool_type = "max"
self.pool2D_forward_naive = max_pool2D_forward_naive
class TestCase1Max(TestCase1Avg):
def init_pool_type(self):
self.pool_type = "max"
self.pool2D_forward_naive = max_pool2D_forward_naive
class TestCase2Max(TestCase2Avg):
def init_pool_type(self):
self.pool_type = "max"
self.pool2D_forward_naive = max_pool2D_forward_naive
class TestCase1PadZeroExclusiveAvgGrad(TestPoolBf16MklDNNOpGrad):
def init_test_case(self):
self.ksize = [3, 3]
self.strides = [1, 1]
def init_shape(self):
self.shape = [2, 3, 7, 7]
def init_paddings(self):
self.paddings = [0, 0]
def init_global_pool(self):
self.global_pool = False
def init_exclusive(self):
self.exclusive = True
class TestCase2PadOneNonExclusiveAvgGrad(TestCase1PadZeroExclusiveAvgGrad):
def init_exclusive(self):
self.exclusive = False
class TestCase0InitialMaxGrad(TestPoolBf16MklDNNOpGrad):
def init_pool_type(self):
self.pool_type = "max"
self.pool2D_forward_naive = max_pool2D_forward_naive
class TestCase1PadZeroExclusiveMaxGrad(TestCase1PadZeroExclusiveAvgGrad):
def init_pool_type(self):
self.pool_type = "max"
self.pool2D_forward_naive = max_pool2D_forward_naive
class TestCase2PadOneNonExclusiveMaxGrad(TestCase2PadOneNonExclusiveAvgGrad):
def init_pool_type(self):
self.pool_type = "max"
self.pool2D_forward_naive = max_pool2D_forward_naive
if __name__ == "__main__":
enable_static()
unittest.main()
| 31.994949
| 110
| 0.623678
|
7948720e84b78526e5e70515d37bbf49ee5ce19a
| 2,101
|
py
|
Python
|
py2jdbc/__init__.py
|
swstephe/py2jdbc
|
515d06a48e54ca9f690f94fd795e49ba7758fcd4
|
[
"MIT-0"
] | 9
|
2018-12-28T12:42:21.000Z
|
2021-11-10T10:55:48.000Z
|
py2jdbc/__init__.py
|
swstephe/py2jdbc
|
515d06a48e54ca9f690f94fd795e49ba7758fcd4
|
[
"MIT-0"
] | 8
|
2018-12-26T10:01:51.000Z
|
2021-11-25T01:52:11.000Z
|
py2jdbc/__init__.py
|
swstephe/py2jdbc
|
515d06a48e54ca9f690f94fd795e49ba7758fcd4
|
[
"MIT-0"
] | 1
|
2019-09-24T15:12:35.000Z
|
2019-09-24T15:12:35.000Z
|
# -*- coding: utf8 -*-
from py2jdbc.dbi import (
apilevel,
threadsafety,
paramstyle,
ARRAY,
BIGINT,
BINARY,
BIT,
BLOB,
BOOLEAN,
CHAR,
CLOB,
DATALINK,
DATE,
DECIMAL,
DISTINCT,
DOUBLE,
FLOAT,
INTEGER,
JAVA_OBJECT,
LONGNVARCHAR,
LONGVARBINARY,
LONGVARCHAR,
NCHAR,
NCLOB,
NULL,
NUMERIC,
NVARCHAR,
OTHER,
REAL,
REF,
REF_CURSOR,
ROWID,
SMALLINT,
SQLXML,
STRUCT,
TIME,
TIME_WITH_TIMEZONE,
TIMESTAMP,
TIMESTAMP_WITH_TIMEZONE,
TINYINT,
VARBINARY,
VARCHAR,
# noinspection PyShadowingBuiltins
Warning,
Error,
InterfaceError,
DatabaseError,
DataError,
OperationalError,
IntegrityError,
InternalError,
ProgrammingError,
NotSupportedError,
Cursor,
Connection,
connect,
Date,
Time,
Timestamp,
DateFromTicks,
TimeFromTicks,
TimestampFromTicks,
Binary,
)
__all__ = (
'__version__',
'apilevel',
'Binary',
'connect',
'Connection',
'Cursor',
'DatabaseError',
'DataError',
'Date',
'DateFromTicks',
'Error',
'IntegrityError',
'InterfaceError',
'InternalError',
'NotSupportedError',
'OperationalError',
'paramstyle',
'ProgrammingError',
'threadsafety',
'Time',
'TimeFromTicks',
'Timestamp',
'TimestampFromTicks',
'version',
'Warning',
'ARRAY',
'BIGINT',
'BINARY',
'BIT',
'BLOB',
'BOOLEAN',
'CHAR',
'CLOB',
'DATALINK',
'DATE',
'DECIMAL',
'DISTINCT',
'DOUBLE',
'FLOAT',
'INTEGER',
'JAVA_OBJECT',
'LONGNVARCHAR',
'LONGVARBINARY',
'LONGVARCHAR',
'NCHAR',
'NCLOB',
'NULL',
'NUMERIC',
'NVARCHAR',
'OTHER',
'REAL',
'REF',
'REF_CURSOR',
'ROWID',
'SMALLINT',
'SQLXML',
'STRUCT',
'TIME',
'TIME_WITH_TIMEZONE',
'TIMESTAMP',
'TIMESTAMP_WITH_TIMEZONE',
'TINYINT',
'VARBINARY',
'VARCHAR',
)
__version__ = version = '0.0.6'
| 14.900709
| 38
| 0.553546
|
79487284c043e31677621fc3e79ac1555ae43c8c
| 150
|
py
|
Python
|
flask_oauthlib/contrib/client/exceptions.py
|
PCMan/flask-oauthlib
|
3735210211ac0e50c4d32b887bbd61722dd175c7
|
[
"BSD-3-Clause"
] | 1,292
|
2015-01-04T03:20:35.000Z
|
2022-03-23T11:08:15.000Z
|
flask_oauthlib/contrib/client/exceptions.py
|
PCMan/flask-oauthlib
|
3735210211ac0e50c4d32b887bbd61722dd175c7
|
[
"BSD-3-Clause"
] | 217
|
2015-01-05T09:51:41.000Z
|
2020-09-05T04:41:52.000Z
|
flask_oauthlib/contrib/client/exceptions.py
|
PCMan/flask-oauthlib
|
3735210211ac0e50c4d32b887bbd61722dd175c7
|
[
"BSD-3-Clause"
] | 496
|
2015-01-04T03:20:35.000Z
|
2022-03-19T08:31:42.000Z
|
__all__ = ['OAuthException', 'AccessTokenNotFound']
class OAuthException(Exception):
pass
class AccessTokenNotFound(OAuthException):
pass
| 15
| 51
| 0.76
|
7948730671953d1af6efd33761dfe6dffe6f8bf6
| 13,999
|
py
|
Python
|
tests/strings_test.py
|
mathdan/vaex
|
16e91add76d2570f2f976e5936d8e4721ea3dd16
|
[
"MIT"
] | 1
|
2019-06-05T00:10:36.000Z
|
2019-06-05T00:10:36.000Z
|
tests/strings_test.py
|
alimcmaster1/vaex
|
7043df824cd8915b2cca773854023bb10b5ef538
|
[
"MIT"
] | 1
|
2019-06-03T21:25:01.000Z
|
2019-06-03T21:25:01.000Z
|
tests/strings_test.py
|
mathdan/vaex
|
16e91add76d2570f2f976e5936d8e4721ea3dd16
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
import sys
import re
import vaex
import numpy as np
import pytest
try:
unicode
str_kind = 'S'
except:
str_kind = 'U'
@pytest.mark.skipif(sys.version_info < (3,3),
reason="requires python3.4 or higher")
def test_dtype_object_string(tmpdir):
x = np.arange(8,12)
s = np.array(list(map(str, x)), dtype='O')
df = vaex.from_arrays(x=x, s=s)
assert df.columns['s'].dtype.kind == 'O'
path = str(tmpdir.join('test.arrow'))
df.export(path)
df_read = vaex.open(path)
# the data type of s can be different
assert df_read.compare(df) == ([], [], [], [])
def test_export_arrow_strings_to_hdf5(tmpdir):
df = vaex.from_arrays(names=np.array(['hi', 'is', 'l2', np.nan], dtype='O'))
path = str(tmpdir.join('test.arrow'))
df.export(path)
df_read_arrow = vaex.open(path)
path = str(tmpdir.join('test.hdf5'))
df.export(path)
df_read_hdf5 = vaex.open(path)
assert df_read_hdf5.compare(df_read_arrow) == ([], [], [], [])
def test_arrow_strings_concat(tmpdir):
df = vaex.from_arrays(names=['hi', 'is', 'l2'])
path = str(tmpdir.join('test.arrow'))
df.export(path)
df_read_arrow = vaex.open(path)
path = str(tmpdir.join('test.hdf5'))
df_read_arrow.export(path)
df_read_hdf5 = vaex.open(path)
assert df_read_hdf5.compare(df_read_arrow) == ([], [], [], [])
def test_concat():
ds1 = vaex.from_arrays(names=['hi', 'is', 'l2'])
ds2 = vaex.from_arrays(names=['hello', 'this', 'is', 'long'])
ds = ds1.concat(ds2)
assert len(ds) == len(ds1) + len(ds2)
assert ds.dtype('names') == vaex.column.str_type
assert ds.dtype('names') != np.object
def test_string_count_stat():
ds = vaex.from_arrays(names=['hello', 'this', 'is', 'long'])
assert ds.count(ds.names) == 4
ds = vaex.from_arrays(names=np.ma.array(['hello', 'this', 'is', 'long'], mask=[0, 0, 1, 0]))
assert ds.count(ds.names) == 3
df = vaex.from_arrays(names=np.array(['hi', 'is', 'l2', np.nan], dtype='O'))
assert df.count(ds.names) == 3
names = vaex.string_column(['hello', 'this', None, 'long'])
x = np.arange(len(names))
df = vaex.from_arrays(names=names, x=x)
assert df.count(ds.names, binby='x', limits=[0, 100], shape=1).tolist() == [3]
@pytest.mark.skip
def test_string_dtype_with_none():
ds = vaex.from_arrays(names=['hello', 'this', 'is', None])
assert ds.count(ds.names) == 3
def test_unicode():
ds = vaex.from_arrays(names=['bla\u1234'])
assert ds.names.dtype.kind == 'U'
ds = vaex.from_arrays(names=['bla'])
assert ds.names.dtype.kind == 'U'
@pytest.mark.skipif(sys.version_info < (3,3),
reason="requires python3.4 or higher")
def test_concat_mixed():
# this can happen when you want to concat multiple csv files
# and pandas makes one have nans, since they all have missing values
# and the other string
ds1 = vaex.from_arrays(names=['not', 'missing'])
ds2 = vaex.from_arrays(names=[np.nan, np.nan])
assert ds1.dtype(ds1.names) == str
assert ds2.dtype(ds2.names) == np.float64
ds = ds1.concat(ds2)
assert len(ds) == len(ds1) + len(ds2)
assert ds.dtype(ds.names) == ds1.names.dtype
def test_strip():
ds = vaex.from_arrays(names=['this ', ' has', ' space'])
ds['stripped'] = ds.names.str.strip()
ds.stripped.tolist() == ['this', 'has', 'space']
@pytest.mark.skipif(sys.version_info < (3,3),
reason="requires python3.4 or higher")
def test_unicode(tmpdir):
path = str(tmpdir.join('utf32.hdf5'))
ds = vaex.from_arrays(names=["vaex", "or", "væx!"])
assert ds.names.dtype == vaex.column.str_type
ds.export_hdf5(path)
ds = vaex.open(path)
assert ds.names.dtype == vaex.column.str_type
assert ds.names.tolist() == ["vaex", "or", "væx!"]
@pytest.fixture(params=['dfs_arrow', 'dfs_array'])
def dfs(request, dfs_arrow, dfs_array):
named = dict(dfs_arrow=dfs_arrow, dfs_array=dfs_array)
return named[request.param]
string_list = ["vaex", " \tor", "VæX! ", "vaex or VÆX!", "Æ and", "æ are weird", "12", "æ", "a1", "a1æ", "\t "]
unicode_compat = lambda x: x
try:
unicode
unicode_compat = lambda x: x.decode('utf8')
string_list = map(unicode_compat, string_list)
except NameError:
pass
string_list_reverse = string_list[::-1]
@pytest.fixture()
def dfs_arrow():
return vaex.from_arrays(s=vaex.string_column(string_list), sr=vaex.string_column(string_list_reverse))
def test_null_values():
df = vaex.from_arrays(s=vaex.string_column(['aap', None, 'mies']), x=[0, 1, 2])
assert df.count() == 3
assert df.count(df.s) == 2
assert df.count(df.s, selection=df.x > 0) == 1
@pytest.fixture()
def dfs_array():
return vaex.from_arrays(s=np.array(string_list, dtype='O'), sr=np.array(string_list_reverse, dtype='O'))
def test_byte_length(dfs):
assert dfs.s.str.byte_length().tolist() == [len(k.encode('utf8')) for k in string_list]
def test_string_capitalize(dfs):
assert dfs.s.str.capitalize().tolist() == dfs.s.str_pandas.capitalize().tolist()
def test_string_cat(dfs):
c = [s1+s2 for s1, s2 in zip(string_list, string_list_reverse)]
assert dfs.s.str.cat(dfs.sr).tolist() == c
assert dfs.s.str_pandas.cat(dfs.sr).tolist() == c
def test_string_contains(dfs):
assert dfs.s.str.contains('v', regex=False).tolist() == [True, False, False, True, False, False, False, False, False, False, False]
assert dfs.s.str.contains('æ', regex=False).tolist() == [False, False, True, False, False, True, False, True, False, True, False]
assert dfs.s.str.contains('Æ', regex=False).tolist() == [False, False, False, True, True, False, False, False, False, False, False]
@pytest.mark.parametrize("width", [2, 10])
def test_string_center(dfs, width):
assert dfs.s.str.center(width).tolist() == dfs.s.str_pandas.center(width).tolist()
def test_string_counts(dfs):
assert dfs.s.str.count("v", regex=False).tolist() == dfs.s.str_pandas.count("v").tolist()
assert dfs.s.str.count("[va]", regex=True).tolist() == dfs.s.str_pandas.count("[va]").tolist()
def test_string_endswith(dfs):
assert dfs.s.str.endswith("x").tolist() == dfs.s.str_pandas.endswith("x").tolist()
@pytest.mark.parametrize("sub", ["v", unicode_compat("æ")])
@pytest.mark.parametrize("start", [0, 3, 5])
@pytest.mark.parametrize("end", [-1, 3, 5, 10])
def test_string_find(dfs, sub, start, end):
assert dfs.s.str.find(sub, start, end).tolist() == dfs.s.str_pandas.find(sub, start, end).tolist()
@pytest.mark.parametrize("i", [-1, 3, 5, 10])
def test_string_get(dfs, i):
x = dfs.s.str_pandas.get(i).values.values
assert dfs.s.str.get(i).tolist() == [k[i] if i < len(k) else '' for k in string_list]
@pytest.mark.parametrize("sub", ["v", "æ"])
@pytest.mark.parametrize("start", [0, 3, 5])
@pytest.mark.parametrize("end", [-1, 3, 5, 10])
def test_string_index(dfs, sub, start, end):
assert dfs.s.str.find(sub, start, end).tolist() == dfs.s.str.index(sub, start, end).tolist()
@pytest.mark.parametrize("pattern", [None, ' '])
def test_string_join(dfs, pattern):
assert dfs.s.str.split(pattern).str.join('-').tolist() == dfs.s.str.split(pattern).str.join('-').tolist()
def test_string_len(dfs):
assert dfs.s.str.len().astype('i4').tolist() == [len(k) for k in string_list]
assert dfs.s.str_pandas.len().astype('i4').tolist() == [len(k) for k in string_list]
@pytest.mark.parametrize("width", [2, 10])
def test_string_ljust(dfs, width):
assert dfs.s.str.ljust(width).tolist() == dfs.s.str_pandas.ljust(width).tolist()
def test_string_lower(dfs):
assert dfs.s.str.lower().tolist() == dfs.s.str_pandas.lower().tolist()
def test_string_lstrip(dfs):
assert dfs.s.str.lstrip().tolist() == dfs.s.str_pandas.lstrip().tolist()
assert dfs.s.str.lstrip('vV ').tolist() == dfs.s.str_pandas.lstrip('vV ').tolist()
def test_string_match(dfs):
assert dfs.s.str.match('^v.*').tolist() == dfs.s.str_pandas.match('^v.*').tolist()
assert dfs.s.str.match('^v.*').tolist() == [k.startswith('v') for k in string_list]
# TODO: normalize
@pytest.mark.parametrize("width", [2, 10])
@pytest.mark.parametrize("side", ['left', 'right', 'both'])
def test_string_pad(dfs, width, side):
assert dfs.s.str.pad(width, side=side).tolist() == dfs.s.str_pandas.pad(width, side=side).tolist()
# TODO: partition
@pytest.mark.parametrize("repeats", [1, 3])
def test_string_repeat(dfs, repeats):
assert dfs.s.str.repeat(repeats).tolist() == dfs.s.str_pandas.repeat(repeats).tolist()
@pytest.mark.parametrize("pattern", ["v", " ", unicode_compat("VæX")])
@pytest.mark.parametrize("replacement", ["?", unicode_compat("VæX")])
@pytest.mark.parametrize("n", [-1, 1])
def test_string_replace(dfs, pattern, replacement, n):
assert dfs.s.str.replace(pattern, replacement, n).tolist() == dfs.s.str_pandas.replace(pattern, replacement, n).tolist()
@pytest.mark.parametrize("pattern", ["v", " "])
@pytest.mark.parametrize("replacement", ["?", unicode_compat("VæX")])
@pytest.mark.parametrize("flags", [0, int(re.IGNORECASE)])
def test_string_replace_regex(dfs, pattern, replacement, flags):
assert dfs.s.str.replace(pattern, replacement, flags=flags, regex=True).tolist() == dfs.s.str_pandas.replace(pattern, replacement, flags=flags, regex=True).tolist()
@pytest.mark.xfail(reason='unicode not supported fully in regex')
@pytest.mark.parametrize("pattern", [unicode_compat("VæX")])
@pytest.mark.parametrize("replacement", ["?", unicode_compat("VæX")])
@pytest.mark.parametrize("flags", [0, int(re.IGNORECASE)])
def test_string_replace_regex_unicode(dfs, pattern, replacement, flags):
assert dfs.s.str.replace(pattern, replacement, flags=flags, regex=True).tolist() == dfs.s.str_pandas.replace(pattern, replacement, flags=flags, regex=True).tolist()
@pytest.mark.parametrize("sub", ["v", unicode_compat("æ")])
@pytest.mark.parametrize("start", [0, 3, 5])
@pytest.mark.parametrize("end", [-1, 3, 5, 10])
def test_string_rfind(dfs, sub, start, end):
assert dfs.s.str.rfind(sub, start, end).tolist() == dfs.s.str_pandas.rfind(sub, start, end).tolist()
@pytest.mark.parametrize("sub", ["v", unicode_compat("æ")])
@pytest.mark.parametrize("start", [0, 3, 5])
@pytest.mark.parametrize("end", [-1, 3, 5, 10])
def test_string_rindex(dfs, sub, start, end):
assert dfs.s.str.rindex(sub, start, end).tolist() == dfs.s.str_pandas.rfind(sub, start, end).tolist()
@pytest.mark.parametrize("width", [2, 10])
def test_string_rjust(dfs, width):
assert dfs.s.str.rjust(width).tolist() == dfs.s.str_pandas.rjust(width).tolist()
def test_string_rstrip(dfs):
assert dfs.s.str.rstrip().tolist() == dfs.s.str_pandas.rstrip().tolist()
assert dfs.s.str.rstrip('x! ').tolist() == dfs.s.str_pandas.rstrip('x! ').tolist()
# @pytest.mark.parametrize("start", [0, 3, 5])
# @pytest.mark.parametrize("end", [-1, 3, 5, 10])
@pytest.mark.parametrize("start", [0, -1, -5, 10])
@pytest.mark.parametrize("end", [None, -1, 3, 1000])
def test_string_slice(dfs, start, end):
assert dfs.s.str.slice(start, end).tolist() == dfs.s.str_pandas.slice(start, end).tolist()
def test_string_startswith(dfs):
assert dfs.s.str.startswith("x").tolist() == dfs.s.str_pandas.startswith("x").tolist()
def test_string_strip(dfs):
assert dfs.s.str.rstrip().tolist() == dfs.s.str_pandas.rstrip().tolist()
assert dfs.s.str.rstrip('vx! ').tolist() == dfs.s.str_pandas.rstrip('vx! ').tolist()
def test_string_title(dfs):
assert dfs.s.str.title().tolist() == dfs.s.str_pandas.title().tolist()
def test_string_isalnum(dfs):
assert dfs.s.str.isalnum().tolist() == dfs.s.str_pandas.isalnum().tolist()
def test_string_isalpha(dfs):
assert dfs.s.str.isalpha().tolist() == dfs.s.str_pandas.isalpha().tolist()
def test_string_isdigit(dfs):
assert dfs.s.str.isdigit().tolist() == dfs.s.str_pandas.isdigit().tolist()
def test_string_isspace(dfs):
assert dfs.s.str.isspace().tolist() == dfs.s.str_pandas.isspace().tolist()
def test_string_islower(dfs):
assert dfs.s.str.islower().tolist() == dfs.s.str_pandas.islower().tolist()
assert dfs.s.str.lower().str.islower().tolist() == dfs.s.str_pandas.lower().str_pandas.islower().tolist()
def test_string_isupper(dfs):
assert dfs.s.str.isupper().tolist() == dfs.s.str_pandas.isupper().tolist()
assert dfs.s.str.upper().str.isupper().tolist() == dfs.s.str_pandas.upper().str_pandas.isupper().tolist()
# def test_string_istitle(dfs):
# assert dfs.s.str.istitle().tolist() == dfs.s.str_pandas.istitle().tolist()
# assert dfs.s.str.title.istitle().tolist() == dfs.s.str_pandas.title().str_pandas.istitle().tolist()
def test_string_isspace(dfs):
assert dfs.s.str.isspace().tolist() == dfs.s.str_pandas.isspace().tolist()
@pytest.mark.parametrize("width", [2, 10])
def test_string_zfill(dfs, width):
assert dfs.s.str.zfill(width).tolist() == dfs.s.str_pandas.zfill(width).tolist()
def test_to_string():
x = np.arange(1, 4, dtype='f4')
df = vaex.from_arrays(x=x)
df['s'] = df.x.to_string()
assert df.s.tolist() == ["%f" % k for k in x]
def test_format():
x = np.arange(1, 4, dtype='f4')
df = vaex.from_arrays(x=x)
df['s'] = df.x.format("%g")
assert df.s.tolist() == ["%g" % k for k in x]
def test_string_strip_special_case():
strings = ["Explanation\nWhy the edits made under my username Hardcore Metallica Fan were reverted? They weren't vandalisms, just closure on some GAs after I voted at New York Dolls FAC. And please don't remove the template from the talk page since I'm retired now.89.205.38.27"]
df = vaex.from_arrays(s=vaex.string_column(strings))
df.s.str.strip(' ').values#.get(0)
def test_string_strip_special_case2():
strings = ['The eunuch in question left me no choice but to reinsert it. Take action as you see fit.·snunɐw·']
df = vaex.from_arrays(s=vaex.string_column(strings))
assert df.s.str.upper().tolist() == df.s.str_pandas.upper().tolist()
@pytest.mark.xfail(reason='we need to fix this, similar to upper and lower')
def test_string_strip_special_case2():
strings = ['ɐa', 'aap']
df = vaex.from_arrays(s=vaex.string_column(strings))
assert df.s.str.capitalize().tolist() == df.s.str_pandas.capitalize().tolist()
def test_string_slice_repr():
s = ['Here', 'is', 'a', 'simple', 'unit-test']
df = vaex.from_arrays(s=s)
df['sliced_s'] = df.s.str.slice(start=2, stop=5)
repr(df['sliced_s'])
| 40.111748
| 280
| 0.688978
|
7948737c85f3ca06ee4c0ab28b541197bee0405c
| 17,737
|
py
|
Python
|
src/multihead_attention.py
|
a414351664/TRAB-IKE
|
3dd07221e1854c974127d7f6d0d95779a25166c0
|
[
"MIT"
] | 1
|
2021-06-03T10:22:07.000Z
|
2021-06-03T10:22:07.000Z
|
src/multihead_attention.py
|
LibertFan/EKI-BART
|
b822384cf9d4aa9adda46f7f306c024782fa5f15
|
[
"MIT"
] | null | null | null |
src/multihead_attention.py
|
LibertFan/EKI-BART
|
b822384cf9d4aa9adda46f7f306c024782fa5f15
|
[
"MIT"
] | null | null | null |
import math
import torch
from torch import nn
from torch.nn import Parameter
import torch.nn.functional as F
from fairseq import utils
def Embedding(num_embeddings, embedding_dim, padding_idx=None, init_scale=None):
m = nn.Embedding(num_embeddings, embedding_dim, padding_idx=padding_idx)
nn.init.normal_(m.weight, mean=0, std=embedding_dim ** -0.5 if init_scale is None else init_scale)
if padding_idx is not None:
nn.init.constant_(m.weight[padding_idx], 0)
return m
class MultiheadAttention(nn.Module):
"""Multi-headed attention.
See "Attention Is All You Need" for more details.
"""
def __init__(self, embed_dim, num_heads, kdim=None, vdim=None, dropout=0., bias=True,
add_bias_kv=False, add_zero_attn=False, self_attention=False,
encoder_decoder_attention=False,
apply_chunk_mask=False,
add_encoder_position=False, max_positions=1024, position_embedding_init_scale=None):
super().__init__()
self.embed_dim = embed_dim
self.kdim = kdim if kdim is not None else embed_dim
self.vdim = vdim if vdim is not None else embed_dim
self.qkv_same_dim = self.kdim == embed_dim and self.vdim == embed_dim
self.num_heads = num_heads
self.dropout = dropout
self.head_dim = embed_dim // num_heads
assert self.head_dim * num_heads == self.embed_dim, "embed_dim must be divisible by num_heads"
self.scaling = self.head_dim ** -0.5
self.self_attention = self_attention
self.encoder_decoder_attention = encoder_decoder_attention
assert not self.self_attention or self.qkv_same_dim, 'Self-attention requires query, key and ' \
'value to be of the same size'
self.k_proj = nn.Linear(self.kdim, embed_dim, bias=bias)
self.v_proj = nn.Linear(self.vdim, embed_dim, bias=bias)
self.q_proj = nn.Linear(embed_dim, embed_dim, bias=bias)
self.out_proj = nn.Linear(embed_dim, embed_dim, bias=bias)
if add_bias_kv:
self.bias_k = Parameter(torch.Tensor(1, 1, embed_dim))
self.bias_v = Parameter(torch.Tensor(1, 1, embed_dim))
else:
self.bias_k = self.bias_v = None
self.add_zero_attn = add_zero_attn
self.apply_chunk_mask = apply_chunk_mask
self.add_encoder_position = add_encoder_position
if self.add_encoder_position:
self.encoder_position_embedding = Embedding(
max_positions, embed_dim, padding_idx=None, init_scale=position_embedding_init_scale
)
else:
self.encoder_position_embedding = None
self.reset_parameters()
self.onnx_trace = False
self.enable_torch_version = False
if hasattr(F, "multi_head_attention_forward"):
self.enable_torch_version = True
else:
self.enable_torch_version = False
def prepare_for_onnx_export_(self):
self.onnx_trace = True
def reset_parameters(self):
if self.qkv_same_dim:
# Empirically observed the convergence to be much better with
# the scaled initialization
nn.init.xavier_uniform_(self.k_proj.weight, gain=1/math.sqrt(2))
nn.init.xavier_uniform_(self.v_proj.weight, gain=1/math.sqrt(2))
nn.init.xavier_uniform_(self.q_proj.weight, gain=1/math.sqrt(2))
else:
nn.init.xavier_uniform_(self.k_proj.weight)
nn.init.xavier_uniform_(self.v_proj.weight)
nn.init.xavier_uniform_(self.q_proj.weight)
nn.init.xavier_uniform_(self.out_proj.weight)
nn.init.constant_(self.out_proj.bias, 0.)
if self.bias_k is not None:
nn.init.xavier_normal_(self.bias_k)
if self.bias_v is not None:
nn.init.xavier_normal_(self.bias_v)
def forward(
self,
query, key, value,
key_padding_mask=None,
incremental_state=None,
need_weights=True,
static_kv=False,
attn_mask=None,
before_softmax=False,
need_head_weights=False,
chunk_mask=None,
positions=None,
):
"""Input shape: Time x Batch x Channel
Args:
key_padding_mask (ByteTensor, optional): mask to exclude
keys that are pads, of shape `(batch, src_len)`, where
padding elements are indicated by 1s.
need_weights (bool, optional): return the attention weights,
averaged over heads (default: False).
attn_mask (ByteTensor, optional): typically used to
implement causal attention, where the mask prevents the
attention from looking forward in time (default: None).
before_softmax (bool, optional): return the raw attention
weights and values before the attention softmax.
need_head_weights (bool, optional): return the attention
weights for each head. Implies *need_weights*. Default:
return the average attention weights over all heads.
"""
if need_head_weights:
need_weights = True
tgt_len, bsz, embed_dim = query.size()
assert embed_dim == self.embed_dim
assert list(query.size()) == [tgt_len, bsz, embed_dim]
if incremental_state is not None:
saved_state = self._get_input_buffer(incremental_state)
if 'prev_key' in saved_state:
# previous time steps are cached - no need to recompute
# key and value if they are static
if static_kv:
assert self.encoder_decoder_attention and not self.self_attention
key = value = None
else:
saved_state = None
if self.self_attention:
q = self.q_proj(query)
k = self.k_proj(query)
v = self.v_proj(query)
elif self.encoder_decoder_attention:
# encoder-decoder attention
q = self.q_proj(query)
if key is None:
assert value is None
k = v = None
else:
k = self.k_proj(key)
v = self.v_proj(key)
else:
q = self.q_proj(query)
k = self.k_proj(key)
v = self.v_proj(value)
q *= self.scaling
if self.bias_k is not None:
assert self.bias_v is not None
k = torch.cat([k, self.bias_k.repeat(1, bsz, 1)])
v = torch.cat([v, self.bias_v.repeat(1, bsz, 1)])
if attn_mask is not None:
attn_mask = torch.cat([attn_mask, attn_mask.new_zeros(attn_mask.size(0), 1)], dim=1)
if key_padding_mask is not None:
key_padding_mask = torch.cat(
[key_padding_mask, key_padding_mask.new_zeros(key_padding_mask.size(0), 1)], dim=1)
q = q.contiguous().view(tgt_len, bsz * self.num_heads, self.head_dim).transpose(0, 1)
if k is not None:
k = k.contiguous().view(-1, bsz * self.num_heads, self.head_dim).transpose(0, 1)
if v is not None:
v = v.contiguous().view(-1, bsz * self.num_heads, self.head_dim).transpose(0, 1)
if saved_state is not None:
# saved states are stored with shape (bsz, num_heads, seq_len, head_dim)
if 'prev_key' in saved_state:
prev_key = saved_state['prev_key'].view(bsz * self.num_heads, -1, self.head_dim)
if static_kv:
k = prev_key
else:
k = torch.cat((prev_key, k), dim=1)
if 'prev_value' in saved_state:
prev_value = saved_state['prev_value'].view(bsz * self.num_heads, -1, self.head_dim)
if static_kv:
v = prev_value
else:
v = torch.cat((prev_value, v), dim=1)
key_padding_mask = self._append_prev_key_padding_mask(
key_padding_mask=key_padding_mask,
prev_key_padding_mask=saved_state.get('prev_key_padding_mask', None),
batch_size=bsz,
src_len=k.size(1),
static_kv=static_kv,
)
saved_state['prev_key'] = k.view(bsz, self.num_heads, -1, self.head_dim)
saved_state['prev_value'] = v.view(bsz, self.num_heads, -1, self.head_dim)
saved_state['prev_key_padding_mask'] = key_padding_mask
self._set_input_buffer(incremental_state, saved_state)
src_len = k.size(1)
# This is part of a workaround to get around fork/join parallelism
# not supporting Optional types.
if key_padding_mask is not None and key_padding_mask.shape == torch.Size([]):
key_padding_mask = None
if key_padding_mask is not None:
assert key_padding_mask.size(0) == bsz
assert key_padding_mask.size(1) == src_len
if self.add_zero_attn:
src_len += 1
k = torch.cat([k, k.new_zeros((k.size(0), 1) + k.size()[2:])], dim=1)
v = torch.cat([v, v.new_zeros((v.size(0), 1) + v.size()[2:])], dim=1)
if attn_mask is not None:
attn_mask = torch.cat([attn_mask, attn_mask.new_zeros(attn_mask.size(0), 1)], dim=1)
if key_padding_mask is not None:
key_padding_mask = torch.cat(
[key_padding_mask, torch.zeros(key_padding_mask.size(0), 1).type_as(key_padding_mask)], dim=1)
attn_weights = torch.bmm(q, k.transpose(1, 2))
if positions is not None and self.encoder_position_embedding is not None:
# print(' attn-weights: ', attn_weights.min(), attn_weights.max())
max_position = positions.max().item() + 1
# print('| multihead', positions.size(0), positions.dim())
if positions.dim() == 2:
positions = positions.unsqueeze(1).repeat(1, tgt_len, 1)
# print('| max_positions: ', max_position)
position_embeddings = self.encoder_position_embedding(
torch.arange(max_position, dtype=torch.long, device=q.device)).\
view(max_position, self.num_heads, self.head_dim).\
unsqueeze(0).repeat(bsz, 1, 1, 1).\
transpose(1, 2).contiguous().\
view(bsz*self.num_heads, max_position, self.head_dim)
# print('| sel-position_embeddings: ', position_embeddings.size(), position_embeddings.min(), position_embeddings.max())
attn_position_weights = torch.bmm(q, position_embeddings.transpose(1, 2))
# print('| pre-attn-position-weights: ', attn_position_weights.size(), attn_position_weights.min(), attn_position_weights.max())
# print('| attn_position_weights: ', attn_position_weights.size())
positions = positions.unsqueeze(1).repeat(1, self.num_heads, 1, 1).\
view(bsz*self.num_heads, tgt_len, src_len)
# print('| positions: ', positions.min(), positions.max())
# print('| positions: ', positions.min(), positions.max())
attn_position_weights = attn_position_weights.gather(-1, positions)
# print('| post-attn-position-weights: ', attn_position_weights.min(), attn_position_weights.max())
# print('-'*50)
# print('| attn_position_weights: ', attn_position_weights.min(), attn_position_weights.max())
# print('| attn_weights: ', attn_weights.min(), attn_weights.max())
# print('| attn_position_weights: ', attn_position_weights[0])
attn_weights += attn_position_weights
attn_weights = self.apply_sparse_mask(attn_weights, tgt_len, src_len, bsz)
assert list(attn_weights.size()) == [bsz * self.num_heads, tgt_len, src_len]
if attn_mask is not None:
attn_mask = attn_mask.unsqueeze(0)
if self.onnx_trace:
attn_mask = attn_mask.repeat(attn_weights.size(0), 1, 1)
attn_weights += attn_mask
if self.apply_chunk_mask and chunk_mask is not None:
attn_weights = attn_weights.view(bsz, self.num_heads, tgt_len, src_len)
attn_weights = attn_weights.masked_fill(chunk_mask.unsqueeze(1), float("-inf"))
attn_weights = attn_weights.view(bsz * self.num_heads, tgt_len, src_len)
if key_padding_mask is not None:
# don't attend to padding symbols
attn_weights = attn_weights.view(bsz, self.num_heads, tgt_len, src_len)
attn_weights = attn_weights.masked_fill(
key_padding_mask.unsqueeze(1).unsqueeze(2),
float('-inf'),
)
attn_weights = attn_weights.view(bsz * self.num_heads, tgt_len, src_len)
if before_softmax:
return attn_weights, v
attn_weights_float = utils.softmax(attn_weights, dim=-1, onnx_trace=self.onnx_trace)
attn_weights = attn_weights_float.type_as(attn_weights)
attn_probs = F.dropout(attn_weights_float.type_as(attn_weights), p=self.dropout, training=self.training)
attn = torch.bmm(attn_probs, v)
assert list(attn.size()) == [bsz * self.num_heads, tgt_len, self.head_dim]
if (self.onnx_trace and attn.size(1) == 1):
# when ONNX tracing a single decoder step (sequence length == 1)
# the transpose is a no-op copy before view, thus unnecessary
attn = attn.contiguous().view(tgt_len, bsz, embed_dim)
else:
attn = attn.transpose(0, 1).contiguous().view(tgt_len, bsz, embed_dim)
attn = self.out_proj(attn)
if need_weights:
attn_weights = attn_weights_float.view(bsz, self.num_heads, tgt_len, src_len).transpose(1, 0)
if not need_head_weights:
# average attention weights over heads
attn_weights = attn_weights.mean(dim=0)
else:
attn_weights = None
return attn, attn_weights
@staticmethod
def _append_prev_key_padding_mask(
key_padding_mask,
prev_key_padding_mask,
batch_size,
src_len,
static_kv,
):
# saved key padding masks have shape (bsz, seq_len)
if prev_key_padding_mask is not None and static_kv:
key_padding_mask = prev_key_padding_mask
elif prev_key_padding_mask is not None and key_padding_mask is not None:
key_padding_mask = torch.cat((prev_key_padding_mask, key_padding_mask), dim=1)
# During incremental decoding, as the padding token enters and
# leaves the frame, there will be a time when prev or current
# is None
elif prev_key_padding_mask is not None:
filler = torch.zeros(batch_size, src_len - prev_key_padding_mask.size(1)).bool()
if prev_key_padding_mask.is_cuda:
filler = filler.cuda()
key_padding_mask = torch.cat((prev_key_padding_mask, filler), dim=1)
elif key_padding_mask is not None:
filler = torch.zeros(batch_size, src_len - key_padding_mask.size(1)).bool()
if key_padding_mask.is_cuda:
filler = filler.cuda()
key_padding_mask = torch.cat((filler, key_padding_mask), dim=1)
return key_padding_mask
def reorder_incremental_state(self, incremental_state, new_order):
"""Reorder buffered internal state (for incremental generation)."""
input_buffer = self._get_input_buffer(incremental_state)
if input_buffer is not None:
for k in input_buffer.keys():
if input_buffer[k] is not None:
input_buffer[k] = input_buffer[k].index_select(0, new_order)
self._set_input_buffer(incremental_state, input_buffer)
def _get_input_buffer(self, incremental_state):
return utils.get_incremental_state(
self,
incremental_state,
'attn_state',
) or {}
def _set_input_buffer(self, incremental_state, buffer):
utils.set_incremental_state(
self,
incremental_state,
'attn_state',
buffer,
)
def apply_sparse_mask(self, attn_weights, tgt_len, src_len, bsz):
return attn_weights
def upgrade_state_dict_named(self, state_dict, name):
prefix = name + '.' if name != '' else ''
items_to_add = {}
keys_to_remove = []
for k in state_dict.keys():
if k.endswith(prefix + 'in_proj_weight'):
# in_proj_weight used to be q + k + v with same dimensions
dim = int(state_dict[k].shape[0] / 3)
items_to_add[prefix + 'q_proj.weight'] = state_dict[k][:dim]
items_to_add[prefix + 'k_proj.weight'] = state_dict[k][dim:2*dim]
items_to_add[prefix + 'v_proj.weight'] = state_dict[k][2*dim:]
keys_to_remove.append(k)
k_bias = prefix + 'in_proj_bias'
if k_bias in state_dict.keys():
dim = int(state_dict[k].shape[0] / 3)
items_to_add[prefix + 'q_proj.bias'] = state_dict[k_bias][:dim]
items_to_add[prefix + 'k_proj.bias'] = state_dict[k_bias][dim:2*dim]
items_to_add[prefix + 'v_proj.bias'] = state_dict[k_bias][2*dim:]
keys_to_remove.append(prefix + 'in_proj_bias')
for k in keys_to_remove:
del state_dict[k]
for key, value in items_to_add.items():
state_dict[key] = value
| 44.677582
| 140
| 0.611208
|
794873c8b74607ff71092388f155d5b0e53c1eca
| 5,028
|
py
|
Python
|
src/PlotGrids.py
|
LukeMcCulloch/PyCFD
|
6720e6575e25f8c274ef591d6c215de90a740935
|
[
"MIT"
] | 1
|
2020-07-04T15:42:15.000Z
|
2020-07-04T15:42:15.000Z
|
src/PlotGrids.py
|
LukeMcCulloch/PyCFD
|
6720e6575e25f8c274ef591d6c215de90a740935
|
[
"MIT"
] | null | null | null |
src/PlotGrids.py
|
LukeMcCulloch/PyCFD
|
6720e6575e25f8c274ef591d6c215de90a740935
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python2
# -*- coding: utf-8 -*-
"""
Created on Sun Jan 5 16:04:47 2020
@author: lukemcculloch
"""
import numpy as np
# imports for plotting:
import matplotlib.pyplot as plt
class PlotGrid(object):
def __init__(self, grid):
self.grid = grid
def plot_cells(self, canvas = None,
alpha=.1):
if canvas is None:
fig, ax = plt.subplots()
ax.axis('equal')
else:
ax = canvas
grid = self.grid
#for cell in grid.cellList[:1]:
for cell in grid.cellList:
for face in cell.faces:
n0 = face.nodes[0]
n1 = face.nodes[1]
ax.plot(n0.x0,n0.x1,
color='red',
marker='o',
alpha = alpha)
ax.plot(n1.x0,n1.x1,
color='red',
marker='o',
alpha = alpha)
x = [n0.x0,n1.x0]
y = [n0.x1,n1.x1]
ax.plot(x, y,
color='black',
alpha = alpha)
return ax
def plot_centroids(self, canvas = None,
alpha=.1):
if canvas is None:
fig, ax = plt.subplots()
else:
ax = canvas
grid = self.grid
#for cell in grid.cellList[:1]:
for cell in grid.cellList:
ax.plot(cell.centroid[0],
cell.centroid[1],
color='green',
marker='o',
alpha = alpha,)
return ax
def plot_face_centers(self, canvas = None,
alpha=.1):
if canvas is None:
fig, ax = plt.subplots()
else:
ax = canvas
grid = self.grid
#for cell in grid.cellList[:1]:
for cell in grid.cellList:
for face in cell.faces:
norm0 = face.normal_vector - face.center
norm1 = face.normal_vector - face.center
ax.plot(face.center[0],
face.center[1],
color='yellow',
marker='o',
alpha = alpha)
ax.plot(face.center[0],
face.center[1],
color='yellow',
marker='o',
alpha = alpha)
return ax
def plot_normals(self, canvas = None,
alpha=.4):
"""
debugging:
ax = axTri
"""
if canvas is None:
fig, ax = plt.subplots()
else:
ax = canvas
grid = self.grid
#for cell in grid.cellList[:1]:
for cell in grid.cellList:
for face in cell.faces:
# print 'new face'
# print '\n Normal vector'
# print face.normal_vector
# print '\n center'
# print face.center
fnorm = face.normal_vector
#norm0 = .5*face.normal_vector*face.area**2 + face.center
#norm0 = norm0*face.area
norm = 2.*np.linalg.norm(face.normal_vector)*face.area
#ax.plot([ norm0[0],face.center[0] ],
# [ norm0[1],face.center[1] ],
# color='purple',
# marker='o',
# alpha = alpha)
#scalearrow = np.linalg.norm(norm0)
#dx = (fnorm[0]-face.center[0])/norm
#dy = (fnorm[1]-face.center[1])/norm
plt.arrow(x=face.center[0],
y=face.center[1],
dx=fnorm[0]/norm ,
dy=fnorm[1]/norm )
# bad!
# plt.arrow(x=face.center[0],
# y=face.center[1],
# dx=dx ,
# dy=dy )
return ax
def plot_cell(self, cell, canvas = None,
alpha=.4):
if canvas is None:
fig, ax = plt.subplots()
else:
ax = canvas
grid = self.grid
#for cell in grid.cellList[:1]:
for cell in grid.cellList:
for face in cell.faces:
norm0 = face.normal_vector + face.center
ax.plot([ norm0[0],face.center[0] ],
[ norm0[1],face.center[1] ],
color='black',
marker='o',
alpha = alpha)
return ax
| 29.063584
| 73
| 0.380072
|
794873d4278049da9787f828b1a122a0907f6cf0
| 32,014
|
py
|
Python
|
glance/tests/unit/common/test_wsgi.py
|
petrutlucian94/glance
|
fb2c2e7de1bc4fd4eead3632de9358d1a7037c9f
|
[
"Apache-2.0"
] | null | null | null |
glance/tests/unit/common/test_wsgi.py
|
petrutlucian94/glance
|
fb2c2e7de1bc4fd4eead3632de9358d1a7037c9f
|
[
"Apache-2.0"
] | null | null | null |
glance/tests/unit/common/test_wsgi.py
|
petrutlucian94/glance
|
fb2c2e7de1bc4fd4eead3632de9358d1a7037c9f
|
[
"Apache-2.0"
] | null | null | null |
# -*- coding: utf-8 -*-
# Copyright 2010-2011 OpenStack Foundation
# Copyright 2014 IBM Corp.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import datetime
import gettext
import os
import socket
from babel import localedata
import eventlet.patcher
import fixtures
import mock
from oslo_concurrency import processutils
from oslo_serialization import jsonutils
import routes
import six
from six.moves import http_client as http
import webob
from glance.api.v2 import router as router_v2
from glance.common import exception
from glance.common import utils
from glance.common import wsgi
from glance import i18n
from glance.tests import utils as test_utils
class RequestTest(test_utils.BaseTestCase):
def _set_expected_languages(self, all_locales=None, avail_locales=None):
if all_locales is None:
all_locales = []
# Override localedata.locale_identifiers to return some locales.
def returns_some_locales(*args, **kwargs):
return all_locales
self.mock_object(localedata, 'locale_identifiers',
returns_some_locales)
# Override gettext.find to return other than None for some languages.
def fake_gettext_find(lang_id, *args, **kwargs):
found_ret = '/glance/%s/LC_MESSAGES/glance.mo' % lang_id
if avail_locales is None:
# All locales are available.
return found_ret
languages = kwargs['languages']
if languages[0] in avail_locales:
return found_ret
return None
self.mock_object(gettext, 'find', fake_gettext_find)
def test_content_range(self):
request = wsgi.Request.blank('/tests/123')
request.headers["Content-Range"] = 'bytes 10-99/*'
range_ = request.get_range_from_request(120)
self.assertEqual(10, range_.start)
self.assertEqual(100, range_.stop) # non-inclusive
self.assertIsNone(range_.length)
def test_content_range_invalid(self):
request = wsgi.Request.blank('/tests/123')
request.headers["Content-Range"] = 'bytes=0-99'
self.assertRaises(webob.exc.HTTPRequestRangeNotSatisfiable,
request.get_range_from_request, 120)
def test_range(self):
request = wsgi.Request.blank('/tests/123')
request.headers["Range"] = 'bytes=10-99'
range_ = request.get_range_from_request(120)
self.assertEqual(10, range_.start)
self.assertEqual(100, range_.end) # non-inclusive
def test_range_invalid(self):
request = wsgi.Request.blank('/tests/123')
request.headers["Range"] = 'bytes=150-'
self.assertRaises(webob.exc.HTTPRequestRangeNotSatisfiable,
request.get_range_from_request, 120)
def test_content_type_missing(self):
request = wsgi.Request.blank('/tests/123')
self.assertRaises(exception.InvalidContentType,
request.get_content_type, ('application/xml',))
def test_content_type_unsupported(self):
request = wsgi.Request.blank('/tests/123')
request.headers["Content-Type"] = "text/html"
self.assertRaises(exception.InvalidContentType,
request.get_content_type, ('application/xml',))
def test_content_type_with_charset(self):
request = wsgi.Request.blank('/tests/123')
request.headers["Content-Type"] = "application/json; charset=UTF-8"
result = request.get_content_type(('application/json',))
self.assertEqual("application/json", result)
def test_params(self):
if six.PY2:
expected = webob.multidict.NestedMultiDict({
'limit': '20', 'name':
'\xd0\x9f\xd1\x80\xd0\xb8\xd0\xb2\xd0\xb5\xd1\x82',
'sort_key': 'name', 'sort_dir': 'asc'})
else:
expected = webob.multidict.NestedMultiDict({
'limit': '20', 'name': 'Привет', 'sort_key': 'name',
'sort_dir': 'asc'})
request = wsgi.Request.blank("/?limit=20&name=%D0%9F%D1%80%D0%B8"
"%D0%B2%D0%B5%D1%82&sort_key=name"
"&sort_dir=asc")
actual = request.params
self.assertEqual(expected, actual)
def test_content_type_from_accept_xml(self):
request = wsgi.Request.blank('/tests/123')
request.headers["Accept"] = "application/xml"
result = request.best_match_content_type()
self.assertEqual("application/json", result)
def test_content_type_from_accept_json(self):
request = wsgi.Request.blank('/tests/123')
request.headers["Accept"] = "application/json"
result = request.best_match_content_type()
self.assertEqual("application/json", result)
def test_content_type_from_accept_xml_json(self):
request = wsgi.Request.blank('/tests/123')
request.headers["Accept"] = "application/xml, application/json"
result = request.best_match_content_type()
self.assertEqual("application/json", result)
def test_content_type_from_accept_json_xml_quality(self):
request = wsgi.Request.blank('/tests/123')
request.headers["Accept"] = ("application/json; q=0.3, "
"application/xml; q=0.9")
result = request.best_match_content_type()
self.assertEqual("application/json", result)
def test_content_type_accept_default(self):
request = wsgi.Request.blank('/tests/123.unsupported')
request.headers["Accept"] = "application/unsupported1"
result = request.best_match_content_type()
self.assertEqual("application/json", result)
def test_language_accept_default(self):
request = wsgi.Request.blank('/tests/123')
request.headers["Accept-Language"] = "zz-ZZ,zz;q=0.8"
result = request.best_match_language()
self.assertIsNone(result)
def test_language_accept_none(self):
request = wsgi.Request.blank('/tests/123')
result = request.best_match_language()
self.assertIsNone(result)
def test_best_match_language_expected(self):
# If Accept-Language is a supported language, best_match_language()
# returns it.
self._set_expected_languages(all_locales=['it'])
req = wsgi.Request.blank('/', headers={'Accept-Language': 'it'})
self.assertEqual('it', req.best_match_language())
def test_request_match_language_unexpected(self):
# If Accept-Language is a language we do not support,
# best_match_language() returns None.
self._set_expected_languages(all_locales=['it'])
req = wsgi.Request.blank('/', headers={'Accept-Language': 'unknown'})
self.assertIsNone(req.best_match_language())
@mock.patch.object(webob.acceptparse.AcceptLanguageValidHeader, 'lookup')
def test_best_match_language_unknown(self, mock_lookup):
# Test that we are actually invoking language negotiation by WebOb
request = wsgi.Request.blank('/')
accepted = 'unknown-lang'
request.headers = {'Accept-Language': accepted}
# Bug #1765748: see comment in code in the function under test
# to understand why this is the correct return value for the
# webob 1.8.x mock
mock_lookup.return_value = 'fake_LANG'
self.assertIsNone(request.best_match_language())
mock_lookup.assert_called_once()
# If Accept-Language is missing or empty, match should be None
request.headers = {'Accept-Language': ''}
self.assertIsNone(request.best_match_language())
request.headers.pop('Accept-Language')
self.assertIsNone(request.best_match_language())
def test_http_error_response_codes(self):
sample_id, member_id, tag_val, task_id = 'abc', '123', '1', '2'
"""Makes sure v2 unallowed methods return 405"""
unallowed_methods = [
('/schemas/image', ['POST', 'PUT', 'DELETE', 'PATCH', 'HEAD']),
('/schemas/images', ['POST', 'PUT', 'DELETE', 'PATCH', 'HEAD']),
('/schemas/member', ['POST', 'PUT', 'DELETE', 'PATCH', 'HEAD']),
('/schemas/members', ['POST', 'PUT', 'DELETE', 'PATCH', 'HEAD']),
('/schemas/task', ['POST', 'PUT', 'DELETE', 'PATCH', 'HEAD']),
('/schemas/tasks', ['POST', 'PUT', 'DELETE', 'PATCH', 'HEAD']),
('/images', ['PUT', 'DELETE', 'PATCH', 'HEAD']),
('/images/%s' % sample_id, ['POST', 'PUT', 'HEAD']),
('/images/%s/file' % sample_id,
['POST', 'DELETE', 'PATCH', 'HEAD']),
('/images/%s/tags/%s' % (sample_id, tag_val),
['GET', 'POST', 'PATCH', 'HEAD']),
('/images/%s/members' % sample_id,
['PUT', 'DELETE', 'PATCH', 'HEAD']),
('/images/%s/members/%s' % (sample_id, member_id),
['POST', 'PATCH', 'HEAD']),
('/tasks', ['PUT', 'DELETE', 'PATCH', 'HEAD']),
('/tasks/%s' % task_id, ['POST', 'PUT', 'PATCH', 'HEAD']),
]
api = test_utils.FakeAuthMiddleware(router_v2.API(routes.Mapper()))
for uri, methods in unallowed_methods:
for method in methods:
req = webob.Request.blank(uri)
req.method = method
res = req.get_response(api)
self.assertEqual(http.METHOD_NOT_ALLOWED, res.status_int)
# Makes sure not implemented methods return 405
req = webob.Request.blank('/schemas/image')
req.method = 'NonexistentMethod'
res = req.get_response(api)
self.assertEqual(http.METHOD_NOT_ALLOWED, res.status_int)
class ResourceTest(test_utils.BaseTestCase):
def test_get_action_args(self):
env = {
'wsgiorg.routing_args': [
None,
{
'controller': None,
'format': None,
'action': 'update',
'id': 12,
},
],
}
expected = {'action': 'update', 'id': 12}
actual = wsgi.Resource(None, None, None).get_action_args(env)
self.assertEqual(expected, actual)
def test_get_action_args_invalid_index(self):
env = {'wsgiorg.routing_args': []}
expected = {}
actual = wsgi.Resource(None, None, None).get_action_args(env)
self.assertEqual(expected, actual)
def test_get_action_args_del_controller_error(self):
actions = {'format': None,
'action': 'update',
'id': 12}
env = {'wsgiorg.routing_args': [None, actions]}
expected = {'action': 'update', 'id': 12}
actual = wsgi.Resource(None, None, None).get_action_args(env)
self.assertEqual(expected, actual)
def test_get_action_args_del_format_error(self):
actions = {'action': 'update', 'id': 12}
env = {'wsgiorg.routing_args': [None, actions]}
expected = {'action': 'update', 'id': 12}
actual = wsgi.Resource(None, None, None).get_action_args(env)
self.assertEqual(expected, actual)
def test_dispatch(self):
class Controller(object):
def index(self, shirt, pants=None):
return (shirt, pants)
resource = wsgi.Resource(None, None, None)
actual = resource.dispatch(Controller(), 'index', 'on', pants='off')
expected = ('on', 'off')
self.assertEqual(expected, actual)
def test_dispatch_default(self):
class Controller(object):
def default(self, shirt, pants=None):
return (shirt, pants)
resource = wsgi.Resource(None, None, None)
actual = resource.dispatch(Controller(), 'index', 'on', pants='off')
expected = ('on', 'off')
self.assertEqual(expected, actual)
def test_dispatch_no_default(self):
class Controller(object):
def show(self, shirt, pants=None):
return (shirt, pants)
resource = wsgi.Resource(None, None, None)
self.assertRaises(AttributeError, resource.dispatch, Controller(),
'index', 'on', pants='off')
def test_call(self):
class FakeController(object):
def index(self, shirt, pants=None):
return (shirt, pants)
resource = wsgi.Resource(FakeController(), None, None)
def dispatch(self, obj, action, *args, **kwargs):
if isinstance(obj, wsgi.JSONRequestDeserializer):
return []
if isinstance(obj, wsgi.JSONResponseSerializer):
raise webob.exc.HTTPForbidden()
self.mock_object(wsgi.Resource, 'dispatch', dispatch)
request = wsgi.Request.blank('/')
response = resource.__call__(request)
self.assertIsInstance(response, webob.exc.HTTPForbidden)
self.assertEqual(http.FORBIDDEN, response.status_code)
def test_call_raises_exception(self):
class FakeController(object):
def index(self, shirt, pants=None):
return (shirt, pants)
resource = wsgi.Resource(FakeController(), None, None)
def dispatch(self, obj, action, *args, **kwargs):
raise Exception("test exception")
self.mock_object(wsgi.Resource, 'dispatch', dispatch)
request = wsgi.Request.blank('/')
response = resource.__call__(request)
self.assertIsInstance(response, webob.exc.HTTPInternalServerError)
self.assertEqual(http.INTERNAL_SERVER_ERROR, response.status_code)
@mock.patch.object(wsgi, 'translate_exception')
def test_resource_call_error_handle_localized(self,
mock_translate_exception):
class Controller(object):
def delete(self, req, identity):
raise webob.exc.HTTPBadRequest(explanation='Not Found')
actions = {'action': 'delete', 'identity': 12}
env = {'wsgiorg.routing_args': [None, actions]}
request = wsgi.Request.blank('/tests/123', environ=env)
message_es = 'No Encontrado'
resource = wsgi.Resource(Controller(),
wsgi.JSONRequestDeserializer(),
None)
translated_exc = webob.exc.HTTPBadRequest(message_es)
mock_translate_exception.return_value = translated_exc
e = self.assertRaises(webob.exc.HTTPBadRequest,
resource, request)
self.assertEqual(message_es, str(e))
@mock.patch.object(webob.acceptparse.AcceptLanguageValidHeader, 'lookup')
@mock.patch.object(i18n, 'translate')
def test_translate_exception(self, mock_translate, mock_lookup):
mock_translate.return_value = 'No Encontrado'
mock_lookup.return_value = 'de'
req = wsgi.Request.blank('/tests/123')
req.headers["Accept-Language"] = "de"
e = webob.exc.HTTPNotFound(explanation='Not Found')
e = wsgi.translate_exception(req, e)
self.assertEqual('No Encontrado', e.explanation)
def test_response_headers_encoded(self):
# prepare environment
for_openstack_comrades = \
u'\u0417\u0430 \u043e\u043f\u0435\u043d\u0441\u0442\u0435\u043a, ' \
u'\u0442\u043e\u0432\u0430\u0440\u0438\u0449\u0438'
class FakeController(object):
def index(self, shirt, pants=None):
return (shirt, pants)
class FakeSerializer(object):
def index(self, response, result):
response.headers['unicode_test'] = for_openstack_comrades
# make request
resource = wsgi.Resource(FakeController(), None, FakeSerializer())
actions = {'action': 'index'}
env = {'wsgiorg.routing_args': [None, actions]}
request = wsgi.Request.blank('/tests/123', environ=env)
response = resource.__call__(request)
# ensure it has been encoded correctly
value = (response.headers['unicode_test'].decode('utf-8')
if six.PY2 else response.headers['unicode_test'])
self.assertEqual(for_openstack_comrades, value)
class JSONResponseSerializerTest(test_utils.BaseTestCase):
def test_to_json(self):
fixture = {"key": "value"}
expected = b'{"key": "value"}'
actual = wsgi.JSONResponseSerializer().to_json(fixture)
self.assertEqual(expected, actual)
def test_to_json_with_date_format_value(self):
fixture = {"date": datetime.datetime(1901, 3, 8, 2)}
expected = b'{"date": "1901-03-08T02:00:00.000000"}'
actual = wsgi.JSONResponseSerializer().to_json(fixture)
self.assertEqual(expected, actual)
def test_to_json_with_more_deep_format(self):
fixture = {"is_public": True, "name": [{"name1": "test"}]}
expected = {"is_public": True, "name": [{"name1": "test"}]}
actual = wsgi.JSONResponseSerializer().to_json(fixture)
actual = jsonutils.loads(actual)
for k in expected:
self.assertEqual(expected[k], actual[k])
def test_to_json_with_set(self):
fixture = set(["foo"])
expected = b'["foo"]'
actual = wsgi.JSONResponseSerializer().to_json(fixture)
self.assertEqual(expected, actual)
def test_default(self):
fixture = {"key": "value"}
response = webob.Response()
wsgi.JSONResponseSerializer().default(response, fixture)
self.assertEqual(http.OK, response.status_int)
content_types = [h for h in response.headerlist
if h[0] == 'Content-Type']
self.assertEqual(1, len(content_types))
self.assertEqual('application/json', response.content_type)
self.assertEqual(b'{"key": "value"}', response.body)
class JSONRequestDeserializerTest(test_utils.BaseTestCase):
def test_has_body_no_content_length(self):
request = wsgi.Request.blank('/')
request.method = 'POST'
request.body = b'asdf'
request.headers.pop('Content-Length')
self.assertFalse(wsgi.JSONRequestDeserializer().has_body(request))
def test_has_body_zero_content_length(self):
request = wsgi.Request.blank('/')
request.method = 'POST'
request.body = b'asdf'
request.headers['Content-Length'] = 0
self.assertFalse(wsgi.JSONRequestDeserializer().has_body(request))
def test_has_body_has_content_length(self):
request = wsgi.Request.blank('/')
request.method = 'POST'
request.body = b'asdf'
self.assertIn('Content-Length', request.headers)
self.assertTrue(wsgi.JSONRequestDeserializer().has_body(request))
def test_no_body_no_content_length(self):
request = wsgi.Request.blank('/')
self.assertFalse(wsgi.JSONRequestDeserializer().has_body(request))
def test_from_json(self):
fixture = '{"key": "value"}'
expected = {"key": "value"}
actual = wsgi.JSONRequestDeserializer().from_json(fixture)
self.assertEqual(expected, actual)
def test_from_json_malformed(self):
fixture = 'kjasdklfjsklajf'
self.assertRaises(webob.exc.HTTPBadRequest,
wsgi.JSONRequestDeserializer().from_json, fixture)
def test_default_no_body(self):
request = wsgi.Request.blank('/')
actual = wsgi.JSONRequestDeserializer().default(request)
expected = {}
self.assertEqual(expected, actual)
def test_default_with_body(self):
request = wsgi.Request.blank('/')
request.method = 'POST'
request.body = b'{"key": "value"}'
actual = wsgi.JSONRequestDeserializer().default(request)
expected = {"body": {"key": "value"}}
self.assertEqual(expected, actual)
def test_has_body_has_transfer_encoding(self):
self.assertTrue(self._check_transfer_encoding(
transfer_encoding='chunked'))
def test_has_body_multiple_transfer_encoding(self):
self.assertTrue(self._check_transfer_encoding(
transfer_encoding='chunked, gzip'))
def test_has_body_invalid_transfer_encoding(self):
self.assertFalse(self._check_transfer_encoding(
transfer_encoding='invalid', content_length=0))
def test_has_body_invalid_transfer_encoding_no_content_len_and_body(self):
self.assertFalse(self._check_transfer_encoding(
transfer_encoding='invalid', include_body=False))
def test_has_body_invalid_transfer_encoding_no_content_len_but_body(self):
self.assertTrue(self._check_transfer_encoding(
transfer_encoding='invalid', include_body=True))
def test_has_body_invalid_transfer_encoding_with_content_length(self):
self.assertTrue(self._check_transfer_encoding(
transfer_encoding='invalid', content_length=5))
def test_has_body_valid_transfer_encoding_with_content_length(self):
self.assertTrue(self._check_transfer_encoding(
transfer_encoding='chunked', content_length=1))
def test_has_body_valid_transfer_encoding_without_content_length(self):
self.assertTrue(self._check_transfer_encoding(
transfer_encoding='chunked'))
def _check_transfer_encoding(self, transfer_encoding=None,
content_length=None, include_body=True):
request = wsgi.Request.blank('/')
request.method = 'POST'
if include_body:
request.body = b'fake_body'
request.headers['transfer-encoding'] = transfer_encoding
if content_length is not None:
request.headers['content-length'] = content_length
return wsgi.JSONRequestDeserializer().has_body(request)
def test_get_bind_addr_default_value(self):
expected = ('0.0.0.0', '123456')
actual = wsgi.get_bind_addr(default_port="123456")
self.assertEqual(expected, actual)
class ServerTest(test_utils.BaseTestCase):
def test_create_pool(self):
"""Ensure the wsgi thread pool is an eventlet.greenpool.GreenPool."""
actual = wsgi.Server(threads=1).create_pool()
self.assertIsInstance(actual, eventlet.greenpool.GreenPool)
@mock.patch.object(wsgi.Server, 'configure_socket')
def test_http_keepalive(self, mock_configure_socket):
self.config(http_keepalive=False)
self.config(workers=0)
server = wsgi.Server(threads=1)
server.sock = 'fake_socket'
# mocking eventlet.wsgi server method to check it is called with
# configured 'http_keepalive' value.
with mock.patch.object(eventlet.wsgi,
'server') as mock_server:
fake_application = "fake-application"
server.start(fake_application, 0)
server.wait()
mock_server.assert_called_once_with('fake_socket',
fake_application,
log=server._logger,
debug=False,
custom_pool=server.pool,
keepalive=False,
socket_timeout=900)
def test_number_of_workers(self):
"""Ensure the number of workers matches num cpus limited to 8."""
def pid():
i = 1
while True:
i = i + 1
yield i
with mock.patch.object(os, 'fork') as mock_fork:
with mock.patch('oslo_concurrency.processutils.get_worker_count',
return_value=4):
mock_fork.side_effect = pid
server = wsgi.Server()
server.configure = mock.Mock()
fake_application = "fake-application"
server.start(fake_application, None)
self.assertEqual(4, len(server.children))
with mock.patch('oslo_concurrency.processutils.get_worker_count',
return_value=24):
mock_fork.side_effect = pid
server = wsgi.Server()
server.configure = mock.Mock()
fake_application = "fake-application"
server.start(fake_application, None)
self.assertEqual(8, len(server.children))
mock_fork.side_effect = pid
server = wsgi.Server()
server.configure = mock.Mock()
fake_application = "fake-application"
server.start(fake_application, None)
cpus = processutils.get_worker_count()
expected_workers = cpus if cpus < 8 else 8
self.assertEqual(expected_workers,
len(server.children))
class TestHelpers(test_utils.BaseTestCase):
def test_headers_are_unicode(self):
"""
Verifies that the headers returned by conversion code are unicode.
Headers are passed via http in non-testing mode, which automatically
converts them to unicode. Verifying that the method does the
conversion proves that we aren't passing data that works in tests
but will fail in production.
"""
fixture = {'name': 'fake public image',
'is_public': True,
'size': 19,
'location': "file:///tmp/glance-tests/2",
'properties': {'distro': 'Ubuntu 10.04 LTS'}}
headers = utils.image_meta_to_http_headers(fixture)
for k, v in six.iteritems(headers):
self.assertIsInstance(v, six.text_type)
def test_data_passed_properly_through_headers(self):
"""
Verifies that data is the same after being passed through headers
"""
fixture = {'is_public': True,
'deleted': False,
'name': None,
'size': 19,
'location': "file:///tmp/glance-tests/2",
'properties': {'distro': 'Ubuntu 10.04 LTS'}}
headers = utils.image_meta_to_http_headers(fixture)
class FakeResponse(object):
pass
response = FakeResponse()
response.headers = headers
result = utils.get_image_meta_from_headers(response)
for k, v in six.iteritems(fixture):
if v is not None:
self.assertEqual(v, result[k])
else:
self.assertNotIn(k, result)
class GetSocketTestCase(test_utils.BaseTestCase):
def setUp(self):
super(GetSocketTestCase, self).setUp()
self.useFixture(fixtures.MonkeyPatch(
"glance.common.wsgi.get_bind_addr",
lambda x: ('192.168.0.13', 1234)))
addr_info_list = [(2, 1, 6, '', ('192.168.0.13', 80)),
(2, 2, 17, '', ('192.168.0.13', 80)),
(2, 3, 0, '', ('192.168.0.13', 80))]
self.useFixture(fixtures.MonkeyPatch(
"glance.common.wsgi.socket.getaddrinfo",
lambda *x: addr_info_list))
self.useFixture(fixtures.MonkeyPatch(
"glance.common.wsgi.time.time",
mock.Mock(side_effect=[0, 1, 5, 10, 20, 35])))
self.useFixture(fixtures.MonkeyPatch(
"glance.common.wsgi.utils.validate_key_cert",
lambda *x: None))
wsgi.CONF.cert_file = '/etc/ssl/cert'
wsgi.CONF.key_file = '/etc/ssl/key'
wsgi.CONF.ca_file = '/etc/ssl/ca_cert'
wsgi.CONF.tcp_keepidle = 600
def test_correct_configure_socket(self):
mock_socket = mock.Mock()
self.useFixture(fixtures.MonkeyPatch(
'glance.common.wsgi.ssl.wrap_socket',
mock_socket))
self.useFixture(fixtures.MonkeyPatch(
'glance.common.wsgi.eventlet.listen',
lambda *x, **y: mock_socket))
server = wsgi.Server()
server.default_port = 1234
server.configure_socket()
self.assertIn(mock.call.setsockopt(
socket.SOL_SOCKET,
socket.SO_REUSEADDR,
1), mock_socket.mock_calls)
self.assertIn(mock.call.setsockopt(
socket.SOL_SOCKET,
socket.SO_KEEPALIVE,
1), mock_socket.mock_calls)
if hasattr(socket, 'TCP_KEEPIDLE'):
self.assertIn(mock.call().setsockopt(
socket.IPPROTO_TCP,
socket.TCP_KEEPIDLE,
wsgi.CONF.tcp_keepidle), mock_socket.mock_calls)
def test_get_socket_without_all_ssl_reqs(self):
wsgi.CONF.key_file = None
self.assertRaises(RuntimeError, wsgi.get_socket, 1234)
def test_get_socket_with_bind_problems(self):
self.useFixture(fixtures.MonkeyPatch(
'glance.common.wsgi.eventlet.listen',
mock.Mock(side_effect=(
[wsgi.socket.error(socket.errno.EADDRINUSE)] * 3 + [None]))))
self.useFixture(fixtures.MonkeyPatch(
'glance.common.wsgi.ssl.wrap_socket',
lambda *x, **y: None))
self.assertRaises(RuntimeError, wsgi.get_socket, 1234)
def test_get_socket_with_unexpected_socket_errno(self):
self.useFixture(fixtures.MonkeyPatch(
'glance.common.wsgi.eventlet.listen',
mock.Mock(side_effect=wsgi.socket.error(socket.errno.ENOMEM))))
self.useFixture(fixtures.MonkeyPatch(
'glance.common.wsgi.ssl.wrap_socket',
lambda *x, **y: None))
self.assertRaises(wsgi.socket.error, wsgi.get_socket, 1234)
def _cleanup_uwsgi():
wsgi.uwsgi = None
class Test_UwsgiChunkedFile(test_utils.BaseTestCase):
def test_read_no_data(self):
reader = wsgi._UWSGIChunkFile()
wsgi.uwsgi = mock.MagicMock()
self.addCleanup(_cleanup_uwsgi)
def fake_read():
return None
wsgi.uwsgi.chunked_read = fake_read
out = reader.read()
self.assertEqual(out, b'')
def test_read_data_no_length(self):
reader = wsgi._UWSGIChunkFile()
wsgi.uwsgi = mock.MagicMock()
self.addCleanup(_cleanup_uwsgi)
values = iter([b'a', b'b', b'c', None])
def fake_read():
return next(values)
wsgi.uwsgi.chunked_read = fake_read
out = reader.read()
self.assertEqual(out, b'abc')
def test_read_zero_length(self):
reader = wsgi._UWSGIChunkFile()
self.assertEqual(b'', reader.read(length=0))
def test_read_data_length(self):
reader = wsgi._UWSGIChunkFile()
wsgi.uwsgi = mock.MagicMock()
self.addCleanup(_cleanup_uwsgi)
values = iter([b'a', b'b', b'c', None])
def fake_read():
return next(values)
wsgi.uwsgi.chunked_read = fake_read
out = reader.read(length=2)
self.assertEqual(out, b'ab')
def test_read_data_negative_length(self):
reader = wsgi._UWSGIChunkFile()
wsgi.uwsgi = mock.MagicMock()
self.addCleanup(_cleanup_uwsgi)
values = iter([b'a', b'b', b'c', None])
def fake_read():
return next(values)
wsgi.uwsgi.chunked_read = fake_read
out = reader.read(length=-2)
self.assertEqual(out, b'abc')
| 39.572311
| 80
| 0.614231
|
7948776eb00a77a3ee28880d53d84f3c919dd023
| 631
|
py
|
Python
|
api/topic.py
|
qianbin01/lagou_python_api
|
84c0d21cd6a2296efb974dbf7c07cc074106d799
|
[
"MIT"
] | 8
|
2018-09-10T06:30:56.000Z
|
2021-03-11T19:16:32.000Z
|
api/topic.py
|
qianbin01/lagou_python_api
|
84c0d21cd6a2296efb974dbf7c07cc074106d799
|
[
"MIT"
] | null | null | null |
api/topic.py
|
qianbin01/lagou_python_api
|
84c0d21cd6a2296efb974dbf7c07cc074106d799
|
[
"MIT"
] | 5
|
2018-10-12T12:37:16.000Z
|
2020-05-16T03:17:40.000Z
|
import models.topic as topic
from flask import Blueprint, jsonify, request
topic_blue_print = Blueprint('topic', __name__)
status = {
'msg': '请求成功',
'code': 1000
}
topic_doc = topic.get_topic_count()
@topic_blue_print.route('/lists')
def lists():
page = request.args.get('page')
topic_doc['index'] = page
return jsonify(
{'status': status,
'pageInfo': topic_doc,
'dataList': topic.get_topic_list(page)
})
@topic_blue_print.route('/single')
def single_news():
tid = request.args.get('tid')
return jsonify({'status': status, 'object': topic.get_single_topic(tid)})
| 23.37037
| 77
| 0.654517
|
7948782479b7f6cb8e90694b8c1f1d59fd2263a9
| 15,683
|
py
|
Python
|
homeassistant/components/template/light.py
|
jasperro/core
|
26d7b2164e8a971506790ae5af06f31abdf278b5
|
[
"Apache-2.0"
] | 4
|
2016-12-23T10:36:36.000Z
|
2021-04-22T12:38:16.000Z
|
homeassistant/components/template/light.py
|
jasperro/core
|
26d7b2164e8a971506790ae5af06f31abdf278b5
|
[
"Apache-2.0"
] | 9
|
2022-01-27T06:32:10.000Z
|
2022-03-31T07:07:51.000Z
|
homeassistant/components/template/light.py
|
jasperro/core
|
26d7b2164e8a971506790ae5af06f31abdf278b5
|
[
"Apache-2.0"
] | 1
|
2020-03-07T10:43:50.000Z
|
2020-03-07T10:43:50.000Z
|
"""Support for Template lights."""
import logging
import voluptuous as vol
from homeassistant.components.light import (
ATTR_BRIGHTNESS,
ATTR_COLOR_TEMP,
ATTR_HS_COLOR,
ENTITY_ID_FORMAT,
SUPPORT_BRIGHTNESS,
SUPPORT_COLOR,
SUPPORT_COLOR_TEMP,
Light,
)
from homeassistant.const import (
CONF_ENTITY_ID,
CONF_ENTITY_PICTURE_TEMPLATE,
CONF_FRIENDLY_NAME,
CONF_ICON_TEMPLATE,
CONF_LIGHTS,
CONF_VALUE_TEMPLATE,
EVENT_HOMEASSISTANT_START,
STATE_OFF,
STATE_ON,
)
from homeassistant.core import callback
from homeassistant.exceptions import TemplateError
import homeassistant.helpers.config_validation as cv
from homeassistant.helpers.config_validation import PLATFORM_SCHEMA
from homeassistant.helpers.entity import async_generate_entity_id
from homeassistant.helpers.event import async_track_state_change
from homeassistant.helpers.script import Script
from . import extract_entities, initialise_templates
from .const import CONF_AVAILABILITY_TEMPLATE
_LOGGER = logging.getLogger(__name__)
_VALID_STATES = [STATE_ON, STATE_OFF, "true", "false"]
CONF_ON_ACTION = "turn_on"
CONF_OFF_ACTION = "turn_off"
CONF_LEVEL_ACTION = "set_level"
CONF_LEVEL_TEMPLATE = "level_template"
CONF_TEMPERATURE_TEMPLATE = "temperature_template"
CONF_TEMPERATURE_ACTION = "set_temperature"
CONF_COLOR_TEMPLATE = "color_template"
CONF_COLOR_ACTION = "set_color"
LIGHT_SCHEMA = vol.Schema(
{
vol.Required(CONF_ON_ACTION): cv.SCRIPT_SCHEMA,
vol.Required(CONF_OFF_ACTION): cv.SCRIPT_SCHEMA,
vol.Optional(CONF_VALUE_TEMPLATE): cv.template,
vol.Optional(CONF_ICON_TEMPLATE): cv.template,
vol.Optional(CONF_ENTITY_PICTURE_TEMPLATE): cv.template,
vol.Optional(CONF_AVAILABILITY_TEMPLATE): cv.template,
vol.Optional(CONF_LEVEL_ACTION): cv.SCRIPT_SCHEMA,
vol.Optional(CONF_LEVEL_TEMPLATE): cv.template,
vol.Optional(CONF_FRIENDLY_NAME): cv.string,
vol.Optional(CONF_ENTITY_ID): cv.entity_ids,
vol.Optional(CONF_TEMPERATURE_TEMPLATE): cv.template,
vol.Optional(CONF_TEMPERATURE_ACTION): cv.SCRIPT_SCHEMA,
vol.Optional(CONF_COLOR_TEMPLATE): cv.template,
vol.Optional(CONF_COLOR_ACTION): cv.SCRIPT_SCHEMA,
}
)
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend(
{vol.Required(CONF_LIGHTS): cv.schema_with_slug_keys(LIGHT_SCHEMA)}
)
async def async_setup_platform(hass, config, async_add_entities, discovery_info=None):
"""Set up the Template Lights."""
lights = []
for device, device_config in config[CONF_LIGHTS].items():
friendly_name = device_config.get(CONF_FRIENDLY_NAME, device)
state_template = device_config.get(CONF_VALUE_TEMPLATE)
icon_template = device_config.get(CONF_ICON_TEMPLATE)
entity_picture_template = device_config.get(CONF_ENTITY_PICTURE_TEMPLATE)
availability_template = device_config.get(CONF_AVAILABILITY_TEMPLATE)
on_action = device_config[CONF_ON_ACTION]
off_action = device_config[CONF_OFF_ACTION]
level_action = device_config.get(CONF_LEVEL_ACTION)
level_template = device_config.get(CONF_LEVEL_TEMPLATE)
temperature_action = device_config.get(CONF_TEMPERATURE_ACTION)
temperature_template = device_config.get(CONF_TEMPERATURE_TEMPLATE)
color_action = device_config.get(CONF_COLOR_ACTION)
color_template = device_config.get(CONF_COLOR_TEMPLATE)
templates = {
CONF_VALUE_TEMPLATE: state_template,
CONF_ICON_TEMPLATE: icon_template,
CONF_ENTITY_PICTURE_TEMPLATE: entity_picture_template,
CONF_AVAILABILITY_TEMPLATE: availability_template,
CONF_LEVEL_TEMPLATE: level_template,
CONF_TEMPERATURE_TEMPLATE: temperature_template,
CONF_COLOR_TEMPLATE: color_template,
}
initialise_templates(hass, templates)
entity_ids = extract_entities(
device, "light", device_config.get(CONF_ENTITY_ID), templates
)
lights.append(
LightTemplate(
hass,
device,
friendly_name,
state_template,
icon_template,
entity_picture_template,
availability_template,
on_action,
off_action,
level_action,
level_template,
entity_ids,
temperature_action,
temperature_template,
color_action,
color_template,
)
)
async_add_entities(lights)
class LightTemplate(Light):
"""Representation of a templated Light, including dimmable."""
def __init__(
self,
hass,
device_id,
friendly_name,
state_template,
icon_template,
entity_picture_template,
availability_template,
on_action,
off_action,
level_action,
level_template,
entity_ids,
temperature_action,
temperature_template,
color_action,
color_template,
):
"""Initialize the light."""
self.hass = hass
self.entity_id = async_generate_entity_id(
ENTITY_ID_FORMAT, device_id, hass=hass
)
self._name = friendly_name
self._template = state_template
self._icon_template = icon_template
self._entity_picture_template = entity_picture_template
self._availability_template = availability_template
self._on_script = Script(hass, on_action)
self._off_script = Script(hass, off_action)
self._level_script = None
if level_action is not None:
self._level_script = Script(hass, level_action)
self._level_template = level_template
self._temperature_script = None
if temperature_action is not None:
self._temperature_script = Script(hass, temperature_action)
self._temperature_template = temperature_template
self._color_script = None
if color_action is not None:
self._color_script = Script(hass, color_action)
self._color_template = color_template
self._state = False
self._icon = None
self._entity_picture = None
self._brightness = None
self._temperature = None
self._color = None
self._entities = entity_ids
self._available = True
@property
def brightness(self):
"""Return the brightness of the light."""
return self._brightness
@property
def color_temp(self):
"""Return the CT color value in mireds."""
return self._temperature
@property
def hs_color(self):
"""Return the hue and saturation color value [float, float]."""
return self._color
@property
def name(self):
"""Return the display name of this light."""
return self._name
@property
def supported_features(self):
"""Flag supported features."""
supported_features = 0
if self._level_script is not None:
supported_features |= SUPPORT_BRIGHTNESS
if self._temperature_script is not None:
supported_features |= SUPPORT_COLOR_TEMP
if self._color_script is not None:
supported_features |= SUPPORT_COLOR
return supported_features
@property
def is_on(self):
"""Return true if device is on."""
return self._state
@property
def should_poll(self):
"""Return the polling state."""
return False
@property
def icon(self):
"""Return the icon to use in the frontend, if any."""
return self._icon
@property
def entity_picture(self):
"""Return the entity picture to use in the frontend, if any."""
return self._entity_picture
@property
def available(self) -> bool:
"""Return if the device is available."""
return self._available
async def async_added_to_hass(self):
"""Register callbacks."""
@callback
def template_light_state_listener(entity, old_state, new_state):
"""Handle target device state changes."""
self.async_schedule_update_ha_state(True)
@callback
def template_light_startup(event):
"""Update template on startup."""
if (
self._template is not None
or self._level_template is not None
or self._temperature_template is not None
or self._color_template is not None
or self._availability_template is not None
):
async_track_state_change(
self.hass, self._entities, template_light_state_listener
)
self.async_schedule_update_ha_state(True)
self.hass.bus.async_listen_once(
EVENT_HOMEASSISTANT_START, template_light_startup
)
async def async_turn_on(self, **kwargs):
"""Turn the light on."""
optimistic_set = False
# set optimistic states
if self._template is None:
self._state = True
optimistic_set = True
if self._level_template is None and ATTR_BRIGHTNESS in kwargs:
_LOGGER.info(
"Optimistically setting brightness to %s", kwargs[ATTR_BRIGHTNESS]
)
self._brightness = kwargs[ATTR_BRIGHTNESS]
optimistic_set = True
if self._temperature_template is None and ATTR_COLOR_TEMP in kwargs:
_LOGGER.info(
"Optimistically setting color temperature to %s",
kwargs[ATTR_COLOR_TEMP],
)
self._temperature = kwargs[ATTR_COLOR_TEMP]
optimistic_set = True
if ATTR_BRIGHTNESS in kwargs and self._level_script:
await self._level_script.async_run(
{"brightness": kwargs[ATTR_BRIGHTNESS]}, context=self._context
)
elif ATTR_COLOR_TEMP in kwargs and self._temperature_script:
await self._temperature_script.async_run(
{"color_temp": kwargs[ATTR_COLOR_TEMP]}, context=self._context
)
elif ATTR_HS_COLOR in kwargs and self._color_script:
hs_value = kwargs[ATTR_HS_COLOR]
await self._color_script.async_run(
{"hs": hs_value, "h": int(hs_value[0]), "s": int(hs_value[1])},
context=self._context,
)
else:
await self._on_script.async_run()
if optimistic_set:
self.async_schedule_update_ha_state()
async def async_turn_off(self, **kwargs):
"""Turn the light off."""
await self._off_script.async_run(context=self._context)
if self._template is None:
self._state = False
self.async_schedule_update_ha_state()
async def async_update(self):
"""Update from templates."""
self.update_state()
self.update_brightness()
self.update_temperature()
self.update_color()
for property_name, template in (
("_icon", self._icon_template),
("_entity_picture", self._entity_picture_template),
("_available", self._availability_template),
):
if template is None:
continue
try:
value = template.async_render()
if property_name == "_available":
value = value.lower() == "true"
setattr(self, property_name, value)
except TemplateError as ex:
friendly_property_name = property_name[1:].replace("_", " ")
if ex.args and ex.args[0].startswith(
"UndefinedError: 'None' has no attribute"
):
# Common during HA startup - so just a warning
_LOGGER.warning(
"Could not render %s template %s, the state is unknown.",
friendly_property_name,
self._name,
)
return
try:
setattr(self, property_name, getattr(super(), property_name))
except AttributeError:
_LOGGER.error(
"Could not render %s template %s: %s",
friendly_property_name,
self._name,
ex,
)
@callback
def update_brightness(self):
"""Update the brightness from the template."""
if self._level_template is None:
return
try:
brightness = self._level_template.async_render()
if 0 <= int(brightness) <= 255:
self._brightness = int(brightness)
else:
_LOGGER.error(
"Received invalid brightness : %s. Expected: 0-255", brightness
)
self._brightness = None
except TemplateError as ex:
_LOGGER.error(ex)
self._state = None
@callback
def update_state(self):
"""Update the state from the template."""
if self._template is None:
return
try:
state = self._template.async_render().lower()
if state in _VALID_STATES:
self._state = state in ("true", STATE_ON)
else:
_LOGGER.error(
"Received invalid light is_on state: %s. Expected: %s",
state,
", ".join(_VALID_STATES),
)
self._state = None
except TemplateError as ex:
_LOGGER.error(ex)
self._state = None
@callback
def update_temperature(self):
"""Update the temperature from the template."""
if self._temperature_template is None:
return
try:
temperature = int(self._temperature_template.async_render())
if self.min_mireds <= temperature <= self.max_mireds:
self._temperature = temperature
else:
_LOGGER.error(
"Received invalid color temperature : %s. Expected: 0-%s",
temperature,
self.max_mireds,
)
self._temperature = None
except TemplateError:
_LOGGER.error("Cannot evaluate temperature template", exc_info=True)
self._temperature = None
@callback
def update_color(self):
"""Update the hs_color from the template."""
if self._color_template is None:
return
self._color = None
try:
render = self._color_template.async_render()
h_str, s_str = map(
float, render.replace("(", "").replace(")", "").split(",", 1)
)
if (
h_str is not None
and s_str is not None
and 0 <= h_str <= 360
and 0 <= s_str <= 100
):
self._color = (h_str, s_str)
elif h_str is not None and s_str is not None:
_LOGGER.error(
"Received invalid hs_color : (%s, %s). Expected: (0-360, 0-100)",
h_str,
s_str,
)
else:
_LOGGER.error("Received invalid hs_color : (%s)", render)
except TemplateError as ex:
_LOGGER.error(ex)
| 33.945887
| 86
| 0.603583
|
794878bb92267289cfea7181bfd87ed03e684ab0
| 258
|
py
|
Python
|
doc/groktut/a_view_for_a_model2/src/sample/app.py
|
zopefoundation/groktoolkit
|
6d295997510139b0588b2401d5f54fca8234a15e
|
[
"ZPL-2.1"
] | 2
|
2015-12-05T05:46:55.000Z
|
2017-07-03T17:39:32.000Z
|
doc/groktut/a_view_for_a_model2/src/sample/app.py
|
zopefoundation/groktoolkit
|
6d295997510139b0588b2401d5f54fca8234a15e
|
[
"ZPL-2.1"
] | 7
|
2017-07-06T15:07:31.000Z
|
2021-04-20T14:38:21.000Z
|
doc/groktut/a_view_for_a_model2/src/sample/app.py
|
zopefoundation/groktoolkit
|
6d295997510139b0588b2401d5f54fca8234a15e
|
[
"ZPL-2.1"
] | 2
|
2016-03-20T18:03:15.000Z
|
2018-01-11T04:33:40.000Z
|
import grok
class Sample(grok.Application, grok.Container):
def information(self):
return "This is important information!"
class Index(grok.View):
def reversed_information(self):
return ''.join(reversed(self.context.information()))
| 25.8
| 60
| 0.713178
|
794878e99fe131c991195ea18fc24da22367f6bb
| 28,923
|
py
|
Python
|
cloudvolume/frontends/precomputed.py
|
SridharJagannathan/cloud-volume
|
ae4e5d8f245aacf451404e91f75e6da5182ac090
|
[
"BSD-3-Clause"
] | 1
|
2021-02-12T12:15:33.000Z
|
2021-02-12T12:15:33.000Z
|
cloudvolume/frontends/precomputed.py
|
SridharJagannathan/cloud-volume
|
ae4e5d8f245aacf451404e91f75e6da5182ac090
|
[
"BSD-3-Clause"
] | null | null | null |
cloudvolume/frontends/precomputed.py
|
SridharJagannathan/cloud-volume
|
ae4e5d8f245aacf451404e91f75e6da5182ac090
|
[
"BSD-3-Clause"
] | null | null | null |
from __future__ import print_function
import itertools
import gevent.socket
import json
import os
import sys
import uuid
import socket
import fastremap
from six.moves import range
import numpy as np
from tqdm import tqdm
from six import string_types
import multiprocessing as mp
from .. import lib
from ..cacheservice import CacheService
from .. import exceptions
from ..lib import (
colorize, red, mkdir,
Vec, Bbox, jsonify
)
from ..datasource import autocropfn
from ..datasource.precomputed import PrecomputedMetadata
from ..paths import strict_extract
from ..provenance import DataLayerProvenance
from ..storage import SimpleStorage, Storage, reset_connection_pools
from ..volumecutout import VolumeCutout
from .. import sharedmemory
def warn(text):
print(colorize('yellow', text))
class CloudVolumePrecomputed(object):
def __init__(self,
meta, cache, config,
image=None, mesh=None, skeleton=None,
mip=0
):
self.config = config
self.cache = cache
self.meta = meta
self.image = image
self.mesh = mesh
self.skeleton = skeleton
self.green_threads = self.config.green # display warning message
# needs to be set after info is defined since
# its setter is based off of scales
self.mip = mip
self.pid = os.getpid()
@property
def autocrop(self):
return self.image.autocrop
@autocrop.setter
def autocrop(self, val):
self.image.autocrop = val
@property
def background_color(self):
return self.image.background_color
@background_color.setter
def background_color(self, val):
self.image.background_color = val
@property
def bounded(self):
return self.image.bounded
@bounded.setter
def bounded(self, val):
self.image.bounded = val
@property
def fill_missing(self):
return self.image.fill_missing
@fill_missing.setter
def fill_missing(self, val):
self.image.fill_missing = val
@property
def green_threads(self):
return self.config.green
@green_threads.setter
def green_threads(self, val):
if val and socket.socket is not gevent.socket.socket:
warn("""
WARNING: green_threads is set but this process is
not monkey patched. This will cause severely degraded
performance.
CloudVolume uses gevent for cooperative (green)
threading but it requires patching the Python standard
library to perform asynchronous IO. Add this code to
the top of your program (before any other imports):
import gevent.monkey
gevent.monkey.patch_all(threads=False)
More Information:
http://www.gevent.org/intro.html#monkey-patching
""")
self.config.green = bool(val)
@property
def non_aligned_writes(self):
return self.image.non_aligned_writes
@non_aligned_writes.setter
def non_aligned_writes(self, val):
self.image.non_aligned_writes = val
@property
def delete_black_uploads(self):
return self.image.delete_black_uploads
@delete_black_uploads.setter
def delete_black_uploads(self, val):
self.image.delete_black_uploads = val
@property
def parallel(self):
return self.config.parallel
@parallel.setter
def parallel(self, num_processes):
if type(num_processes) == bool:
num_processes = mp.cpu_count() if num_processes == True else 1
elif num_processes <= 0:
raise ValueError('Number of processes must be >= 1. Got: ' + str(num_processes))
else:
num_processes = int(num_processes)
self.config.parallel = num_processes
@property
def cdn_cache(self):
return self.config.cdn_cache
@cdn_cache.setter
def cdn_cache(self, val):
self.config.cdn_cache = val
@property
def compress(self):
return self.config.compress
@compress.setter
def compress(self, val):
self.config.compress = val
@property
def progress(self):
return self.config.progress
@progress.setter
def progress(self, val):
self.config.progress = bool(val)
@property
def info(self):
return self.meta.info
@info.setter
def info(self, val):
self.meta.info = val
@property
def provenance(self):
return self.meta.provenance
@provenance.setter
def provenance(self, val):
self.meta.provenance = val
def __setstate__(self, d):
"""Called when unpickling which is integral to multiprocessing."""
self.__dict__ = d
pid = os.getpid()
if 'pid' in d and d['pid'] != pid:
# otherwise the pickle might have references to old connections
reset_connection_pools()
self.pid = pid
@classmethod
def create_new_info(cls,
num_channels, layer_type, data_type, encoding,
resolution, voxel_offset, volume_size,
mesh=None, skeletons=None, chunk_size=(64,64,64),
compressed_segmentation_block_size=(8,8,8),
max_mip=0, factor=Vec(2,2,1), redirect=None,
*args, **kwargs
):
"""
Create a new neuroglancer Precomputed info file.
Required:
num_channels: (int) 1 for grayscale, 3 for RGB
layer_type: (str) typically "image" or "segmentation"
data_type: (str) e.g. "uint8", "uint16", "uint32", "float32"
encoding: (str) "raw" for binaries like numpy arrays, "jpeg"
resolution: int (x,y,z), x,y,z voxel dimensions in nanometers
voxel_offset: int (x,y,z), beginning of dataset in positive cartesian space
volume_size: int (x,y,z), extent of dataset in cartesian space from voxel_offset
Optional:
mesh: (str) name of mesh directory, typically "mesh"
skeletons: (str) name of skeletons directory, typically "skeletons"
chunk_size: int (x,y,z), dimensions of each downloadable 3D image chunk in voxels
compressed_segmentation_block_size: (x,y,z) dimensions of each compressed sub-block
(only used when encoding is 'compressed_segmentation')
max_mip: (int), the maximum mip level id.
factor: (Vec), the downsampling factor for each mip level
redirect: If this volume has moved, you can set an automatic redirect
by specifying a cloudpath here.
Returns: dict representing a single mip level that's JSON encodable
"""
return PrecomputedMetadata.create_info(
num_channels, layer_type, data_type, encoding,
resolution, voxel_offset, volume_size,
mesh, skeletons, chunk_size,
compressed_segmentation_block_size,
max_mip, factor,
*args, **kwargs
)
def refresh_info(self):
"""Restore the current info from cache or storage."""
return self.meta.refresh_info()
def commit_info(self):
return self.meta.commit_info()
def refresh_provenance(self):
return self.meta.refresh_provenance()
def commit_provenance(self):
return self.meta.commit_provenance()
@property
def dataset_name(self):
return self.meta.dataset
@property
def layer(self):
return self.meta.layer
@property
def mip(self):
return self.config.mip
@mip.setter
def mip(self, mip):
self.config.mip = self.meta.to_mip(mip)
@property
def scales(self):
return self.meta.scales
@scales.setter
def scales(self, val):
self.meta.scales = val
@property
def scale(self):
return self.meta.scale(self.mip)
@scale.setter
def scale(self, val):
self.info['scales'][self.mip] = val
def mip_scale(self, mip):
return self.meta.scale(mip)
@property
def basepath(self):
return self.meta.basepath
@property
def layerpath(self):
return self.meta.layerpath
@property
def base_cloudpath(self):
return self.meta.base_cloudpath
@property
def cloudpath(self):
return self.layer_cloudpath
@property
def layer_cloudpath(self):
return self.meta.cloudpath
@property
def info_cloudpath(self):
return self.meta.infopath
@property
def cache_path(self):
return self.cache.path
@property
def ndim(self):
return len(self.shape)
def mip_ndim(self, mip):
return len(self.meta.shape(mip))
@property
def shape(self):
"""Returns Vec(x,y,z,channels) shape of the volume similar to numpy."""
return tuple(self.meta.shape(self.mip))
def mip_shape(self, mip):
return tuple(self.meta.shape(mip))
@property
def volume_size(self):
"""Returns Vec(x,y,z) shape of the volume (i.e. shape - channels)."""
return self.meta.volume_size(self.mip)
def mip_volume_size(self, mip):
return self.meta.volume_size(mip)
@property
def available_mips(self):
"""Returns a list of mip levels that are defined."""
return self.meta.available_mips
@property
def available_resolutions(self):
"""Returns a list of defined resolutions."""
return (s["resolution"] for s in self.scales)
@property
def layer_type(self):
"""e.g. 'image' or 'segmentation'"""
return self.meta.layer_type
@property
def dtype(self):
"""e.g. 'uint8'"""
return self.meta.dtype
@property
def data_type(self):
return self.meta.data_type
@property
def encoding(self):
"""e.g. 'raw' or 'jpeg'"""
return self.meta.encoding(self.mip)
def mip_encoding(self, mip):
return self.meta.encoding(mip)
@property
def compressed_segmentation_block_size(self):
return self.mip_compressed_segmentation_block_size(self.mip)
def mip_compressed_segmentation_block_size(self, mip):
if 'compressed_segmentation_block_size' in self.info['scales'][mip]:
return self.info['scales'][mip]['compressed_segmentation_block_size']
return None
@property
def num_channels(self):
return self.meta.num_channels
@property
def voxel_offset(self):
"""Vec(x,y,z) start of the dataset in voxels"""
return self.meta.voxel_offset(self.mip)
def mip_voxel_offset(self, mip):
return self.meta.voxel_offset(mip)
@property
def resolution(self):
"""Vec(x,y,z) dimensions of each voxel in nanometers"""
return self.meta.resolution(self.mip)
def mip_resolution(self, mip):
return self.meta.resolution(mip)
@property
def downsample_ratio(self):
"""Describes how downsampled the current mip level is as an (x,y,z) factor triple."""
return self.meta.downsample_ratio(self.mip)
@property
def chunk_size(self):
"""Underlying chunk size dimensions in voxels. Synonym for underlying."""
return self.meta.chunk_size(self.mip)
def mip_chunk_size(self, mip):
return self.meta.chunk_size(mip)
@property
def underlying(self):
"""Underlying chunk size dimensions in voxels. Synonym for chunk_size."""
return self.meta.chunk_size(self.mip)
def mip_underlying(self, mip):
return self.meta.chunk_size(mip)
@property
def key(self):
"""The subdirectory within the data layer containing the chunks for this mip level"""
return self.meta.key(self.mip)
def mip_key(self, mip):
return self.meta.key(mip)
@property
def bounds(self):
"""Returns a bounding box for the dataset with dimensions in voxels"""
return self.meta.bounds(self.mip)
def mip_bounds(self, mip):
offset = self.meta.voxel_offset(mip)
shape = self.meta.volume_size(mip)
return Bbox( offset, offset + shape )
def point_to_mip(self, pt, mip, to_mip):
return self.meta.point_to_mip(pt, mip, to_mip)
def bbox_to_mip(self, bbox, mip, to_mip):
"""Convert bbox or slices from one mip level to another."""
return self.meta.bbox_to_mip(bbox, mip, to_mip)
def slices_to_global_coords(self, slices):
"""
Used to convert from a higher mip level into mip 0 resolution.
"""
bbox = self.meta.bbox_to_mip(slices, self.mip, 0)
return bbox.to_slices()
def slices_from_global_coords(self, slices):
"""
Used for converting from mip 0 coordinates to upper mip level
coordinates. This is mainly useful for debugging since the neuroglancer
client displays the mip 0 coordinates for your cursor.
"""
bbox = self.meta.bbox_to_mip(slices, 0, self.mip)
return bbox.to_slices()
def reset_scales(self):
"""Used for manually resetting downsamples if something messed up."""
self.meta.reset_scales()
return self.commit_info()
def add_scale(self, factor, encoding=None, chunk_size=None, info=None):
"""
Generate a new downsample scale to for the info file and return an updated dictionary.
You'll still need to call self.commit_info() to make it permenant.
Required:
factor: int (x,y,z), e.g. (2,2,1) would represent a reduction of 2x in x and y
Optional:
encoding: force new layer to e.g. jpeg or compressed_segmentation
chunk_size: force new layer to new chunk size
Returns: info dict
"""
return self.meta.add_scale(factor, encoding, chunk_size, info)
def exists(self, bbox_or_slices):
"""
Produce a summary of whether all the requested chunks exist.
bbox_or_slices: accepts either a Bbox or a tuple of slices representing
the requested volume.
Returns: { chunk_file_name: boolean, ... }
"""
return self.image.exists(bbox_or_slices)
def delete(self, bbox_or_slices):
"""
Delete the files within the bounding box.
bbox_or_slices: accepts either a Bbox or a tuple of slices representing
the requested volume.
"""
return self.image.delete(bbox_or_slices)
def transfer_to(self, cloudpath, bbox, block_size=None, compress=True):
"""
Transfer files from one storage location to another, bypassing
volume painting. This enables using a single CloudVolume instance
to transfer big volumes. In some cases, gsutil or aws s3 cli tools
may be more appropriate. This method is provided for convenience. It
may be optimized for better performance over time as demand requires.
cloudpath (str): path to storage layer
bbox (Bbox object): ROI to transfer
block_size (int): number of file chunks to transfer per I/O batch.
compress (bool): Set to False to upload as uncompressed
"""
return self.image.transfer_to(cloudpath, bbox, self.mip, block_size, compress)
def __getitem__(self, slices):
if type(slices) == Bbox:
slices = slices.to_slices()
slices = self.meta.bbox(self.mip).reify_slices(slices, bounded=self.bounded)
steps = Vec(*[ slc.step for slc in slices ])
slices = [ slice(slc.start, slc.stop) for slc in slices ]
channel_slice = slices.pop()
requested_bbox = Bbox.from_slices(slices)
img = self.download(requested_bbox, self.mip)
return img[::steps.x, ::steps.y, ::steps.z, channel_slice]
def download(
self, bbox, mip=None, parallel=None,
segids=None, preserve_zeros=False,
# Absorbing polymorphic Graphene calls
agglomerate=None, timestamp=None, stop_layer=None,
# new download arguments
renumber=False
):
"""
Downloads segmentation from the indicated cutout
region.
bbox: specifies cutout to fetch
mip: which resolution level to get (default self.mip)
parallel: what parallel level to use (default self.parallel)
segids: agglomerate the leaves of these segids from the graph
server and label them with the given segid.
preserve_zeros: If segids is not None:
False: mask other segids with zero
True: mask other segids with the largest integer value
contained by the image data type and leave zero as is.
renumber: dynamically rewrite downloaded segmentation into
a more compact data type. Only compatible with single-process
non-sharded download.
agglomerate, timestamp, and stop_layer are just there to
absorb arguments to what could be a graphene frontend.
Returns: img
"""
bbox = Bbox.create(
bbox, context=self.bounds,
bounded=self.bounded,
autocrop=self.autocrop
)
if mip is None:
mip = self.mip
if parallel is None:
parallel = self.parallel
tup = self.image.download(bbox, mip, parallel=parallel, renumber=bool(renumber))
if renumber:
img, remap = tup
else:
remap = {}
img = tup
if segids is None:
return tup
mask_value = 0
if preserve_zeros:
mask_value = np.inf
if np.issubdtype(self.dtype, np.integer):
mask_value = np.iinfo(self.dtype).max
segids.append(0)
img = fastremap.mask_except(img, segids, in_place=True, value=mask_value)
img = VolumeCutout.from_volume(
self.meta, mip, img, bbox
)
if renumber:
return img, remap
else:
return img
def download_point(
self, pt, size=256,
mip=None, parallel=None,
coord_resolution=None,
**kwargs
):
"""
Download to the right of point given in mip 0 coords.
Useful for quickly visualizing a neuroglancer coordinate
at an arbitary mip level.
pt: (x,y,z)
size: int or (sx,sy,sz)
mip: int representing resolution level
parallel: number of processes to launch (0 means all cores)
coord_resolution: (rx,ry,rz) the coordinate resolution of the input point.
Sometimes Neuroglancer is working in the resolution of another
higher res layer and this can help correct that.
Return: image
"""
if isinstance(size, int):
size = Vec(size, size, size)
else:
size = Vec(*size)
if mip is None:
mip = self.mip
mip = self.meta.to_mip(mip)
size2 = size // 2
if coord_resolution is not None:
factor = self.meta.resolution(0) / Vec(*coord_resolution)
pt = Vec(*pt) / factor
pt = self.point_to_mip(pt, mip=0, to_mip=mip)
bbox = Bbox(pt - size2, pt + size2).astype(np.int64)
for i, sz in enumerate(size):
if sz == 1:
bbox.minpt[i] = pt[i]
bbox.maxpt[i] = pt[i] + 1
if self.autocrop:
bbox = Bbox.intersection(bbox, self.meta.bounds(mip))
bbox = bbox.astype(np.int32)
if parallel is None:
parallel = self.parallel
return self.image.download(bbox, mip, parallel=parallel, **kwargs)
def unlink_shared_memory(self):
"""Unlink the current shared memory location from the filesystem."""
return self.image.unlink_shared_memory()
def download_to_shared_memory(self, slices, location=None, mip=None):
"""
Download images to a shared memory array.
https://github.com/seung-lab/cloud-volume/wiki/Advanced-Topic:-Shared-Memory
tip: If you want to use slice notation, np.s_[...] will help in a pinch.
MEMORY LIFECYCLE WARNING: You are responsible for managing the lifecycle of the
shared memory. CloudVolume will merely write to it, it will not unlink the
memory automatically. To fully clear the shared memory you must unlink the
location and close any mmap file handles. You can use `cloudvolume.sharedmemory.unlink(...)`
to help you unlink the shared memory file or `vol.unlink_shared_memory()` if you do
not specify location (meaning the default instance location is used).
EXPERT MODE WARNING: If you aren't sure you need this function (e.g. to relieve
memory pressure or improve performance in some way) you should use the ordinary
download method of img = vol[:]. A typical use case is transferring arrays between
different processes without making copies. For reference, this feature was created
for downloading a 62 GB array and working with it in Julia.
Required:
slices: (Bbox or list of slices) the bounding box the shared array represents. For instance
if you have a 1024x1024x128 volume and you're uploading only a 512x512x64 corner
touching the origin, your Bbox would be `Bbox( (0,0,0), (512,512,64) )`.
Optional:
location: (str) Defaults to self.shared_memory_id. Shared memory location
e.g. 'cloudvolume-shm-RANDOM-STRING' This typically corresponds to a file
in `/dev/shm` or `/run/shm/`. It can also be a file if you're using that for mmap.
Returns: ndarray backed by shared memory
"""
if mip is None:
mip = self.mip
slices = self.meta.bbox(mip).reify_slices(slices, bounded=self.bounded)
steps = Vec(*[ slc.step for slc in slices ])
channel_slice = slices.pop()
requested_bbox = Bbox.from_slices(slices)
if self.autocrop:
requested_bbox = Bbox.intersection(requested_bbox, self.bounds)
img = self.image.download(
requested_bbox, mip, parallel=self.parallel,
location=location, retain=True, use_shared_memory=True
)
return img[::steps.x, ::steps.y, ::steps.z, channel_slice]
def download_to_file(self, path, bbox, mip=None):
"""
Download images directly to a file.
Required:
slices: (Bbox) the bounding box the shared array represents. For instance
if you have a 1024x1024x128 volume and you're uploading only a 512x512x64 corner
touching the origin, your Bbox would be `Bbox( (0,0,0), (512,512,64) )`.
path: (str)
Optional:
mip: (int; default: self.mip) The current resolution level.
Returns: ndarray backed by an mmapped file
"""
if mip is None:
mip = self.mip
slices = self.meta.bbox(mip).reify_slices(bbox, bounded=self.bounded)
steps = Vec(*[ slc.step for slc in slices ])
channel_slice = slices.pop()
requested_bbox = Bbox.from_slices(slices)
if self.autocrop:
requested_bbox = Bbox.intersection(requested_bbox, self.bounds)
img = self.image.download(
requested_bbox, mip, parallel=self.parallel,
location=lib.toabs(path), retain=True, use_file=True
)
return img[::steps.x, ::steps.y, ::steps.z, channel_slice]
def __setitem__(self, slices, img):
if type(slices) == Bbox:
slices = slices.to_slices()
slices = self.meta.bbox(self.mip).reify_slices(slices, bounded=self.bounded)
bbox = Bbox.from_slices(slices)
slice_shape = list(bbox.size())
bbox = Bbox.from_slices(slices[:3])
if np.isscalar(img):
img = np.zeros(slice_shape, dtype=self.dtype) + img
imgshape = list(img.shape)
if len(imgshape) == 3:
imgshape = imgshape + [ self.num_channels ]
if not np.array_equal(imgshape, slice_shape):
raise exceptions.AlignmentError("""
Input image shape does not match slice shape.
Image Shape: {}
Slice Shape: {}
""".format(imgshape, slice_shape))
if self.autocrop:
if not self.bounds.contains_bbox(bbox):
img, bbox = autocropfn(self.meta, img, bbox, self.mip)
if bbox.subvoxel():
return
self.image.upload(img, bbox.minpt, self.mip, parallel=self.parallel)
def upload_from_shared_memory(self, location, bbox, order='F', cutout_bbox=None):
"""
Upload from a shared memory array.
https://github.com/seung-lab/cloud-volume/wiki/Advanced-Topic:-Shared-Memory
tip: If you want to use slice notation, np.s_[...] will help in a pinch.
MEMORY LIFECYCLE WARNING: You are responsible for managing the lifecycle of the
shared memory. CloudVolume will merely read from it, it will not unlink the
memory automatically. To fully clear the shared memory you must unlink the
location and close any mmap file handles. You can use `cloudvolume.sharedmemory.unlink(...)`
to help you unlink the shared memory file.
EXPERT MODE WARNING: If you aren't sure you need this function (e.g. to relieve
memory pressure or improve performance in some way) you should use the ordinary
upload method of vol[:] = img. A typical use case is transferring arrays between
different processes without making copies. For reference, this feature was created
for uploading a 62 GB array that originated in Julia.
Required:
location: (str) Shared memory location e.g. 'cloudvolume-shm-RANDOM-STRING'
This typically corresponds to a file in `/dev/shm` or `/run/shm/`. It can
also be a file if you're using that for mmap.
bbox: (Bbox or list of slices) the bounding box the shared array represents. For instance
if you have a 1024x1024x128 volume and you're uploading only a 512x512x64 corner
touching the origin, your Bbox would be `Bbox( (0,0,0), (512,512,64) )`.
Optional:
cutout_bbox: (bbox or list of slices) If you only want to upload a section of the
array, give the bbox in volume coordinates (not image coordinates) that should
be cut out. For example, if you only want to upload 256x256x32 of the upper
rightmost corner of the above example but the entire 512x512x64 array is stored
in memory, you would provide: `Bbox( (256, 256, 32), (512, 512, 64) )`
By default, just upload the entire image.
Returns: void
"""
bbox = Bbox.create(bbox)
cutout_bbox = Bbox.create(cutout_bbox) if cutout_bbox else bbox.clone()
if not bbox.contains_bbox(cutout_bbox):
raise exceptions.AlignmentError("""
The provided cutout is not wholly contained in the given array.
Bbox: {}
Cutout: {}
""".format(bbox, cutout_bbox))
if self.autocrop:
cutout_bbox = Bbox.intersection(cutout_bbox, self.bounds)
if cutout_bbox.subvoxel():
return
shape = list(bbox.size3()) + [ self.num_channels ]
mmap_handle, shared_image = sharedmemory.ndarray(
location=location, shape=shape,
dtype=self.dtype, order=order,
readonly=True
)
delta_box = cutout_bbox.clone() - bbox.minpt
cutout_image = shared_image[ delta_box.to_slices() ]
self.image.upload(
cutout_image, cutout_bbox.minpt, self.mip,
parallel=self.parallel,
location=location,
location_bbox=bbox,
order=order,
use_shared_memory=True,
)
mmap_handle.close()
def upload_from_file(self, location, bbox, order='F', cutout_bbox=None):
"""
Upload from an mmapped file.
tip: If you want to use slice notation, np.s_[...] will help in a pinch.
Required:
location: (str) Shared memory location e.g. 'cloudvolume-shm-RANDOM-STRING'
This typically corresponds to a file in `/dev/shm` or `/run/shm/`. It can
also be a file if you're using that for mmap.
bbox: (Bbox or list of slices) the bounding box the shared array represents. For instance
if you have a 1024x1024x128 volume and you're uploading only a 512x512x64 corner
touching the origin, your Bbox would be `Bbox( (0,0,0), (512,512,64) )`.
Optional:
cutout_bbox: (bbox or list of slices) If you only want to upload a section of the
array, give the bbox in volume coordinates (not image coordinates) that should
be cut out. For example, if you only want to upload 256x256x32 of the upper
rightmost corner of the above example but the entire 512x512x64 array is stored
in memory, you would provide: `Bbox( (256, 256, 32), (512, 512, 64) )`
By default, just upload the entire image.
Returns: void
"""
bbox = Bbox.create(bbox)
cutout_bbox = Bbox.create(cutout_bbox) if cutout_bbox else bbox.clone()
if not bbox.contains_bbox(cutout_bbox):
raise exceptions.AlignmentError("""
The provided cutout is not wholly contained in the given array.
Bbox: {}
Cutout: {}
""".format(bbox, cutout_bbox))
if self.autocrop:
cutout_bbox = Bbox.intersection(cutout_bbox, self.bounds)
if cutout_bbox.subvoxel():
return
shape = list(bbox.size3()) + [ self.num_channels ]
mmap_handle, shared_image = sharedmemory.ndarray_fs(
location=lib.toabs(location), shape=shape,
dtype=self.dtype, order=order,
readonly=True, lock=None
)
delta_box = cutout_bbox.clone() - bbox.minpt
cutout_image = shared_image[ delta_box.to_slices() ]
self.image.upload(
cutout_image, cutout_bbox.minpt, self.mip,
parallel=self.parallel,
location=lib.toabs(location),
location_bbox=bbox,
order=order,
use_file=True,
)
mmap_handle.close()
def viewer(self, port=1337):
import cloudvolume.server
cloudvolume.server.view(self.cloudpath, port=port)
def to_dask(self, chunks=None, name=None):
"""Return a dask array for this volume.
Parameters
----------
chunks: tuple of ints or tuples of ints
Passed to ``da.from_array``, allows setting the chunks on
initialisation, if the chunking scheme in the stored dataset is not
optimal for the calculations to follow. Note that the chunking should
be compatible with an underlying 4d array.
name: str, optional
An optional keyname for the array. Defaults to hashing the input
Returns
-------
Dask array
"""
import dask.array as da
from dask.base import tokenize
if chunks is None:
chunks = tuple(self.chunk_size) + (self.num_channels, )
if name is None:
name = 'to-dask-' + tokenize(self, chunks)
return da.from_array(self, chunks, name=name)
| 30.834755
| 98
| 0.685095
|
794878eb395b07b22b3fa9104adfcb208ec0f2f0
| 5,235
|
py
|
Python
|
third_party/gsutil/gslib/commands/setcors.py
|
bdero/depot_tools
|
685577439cbf9cb8c660e3da39bdcbb64c197c95
|
[
"BSD-3-Clause"
] | 45
|
2015-10-12T10:02:11.000Z
|
2021-12-14T07:10:47.000Z
|
third_party/gsutil/gslib/commands/setcors.py
|
bdero/depot_tools
|
685577439cbf9cb8c660e3da39bdcbb64c197c95
|
[
"BSD-3-Clause"
] | 1
|
2019-01-14T00:36:35.000Z
|
2019-01-14T00:36:35.000Z
|
third_party/gsutil/gslib/commands/setcors.py
|
bdero/depot_tools
|
685577439cbf9cb8c660e3da39bdcbb64c197c95
|
[
"BSD-3-Clause"
] | 38
|
2016-06-25T05:57:35.000Z
|
2021-12-30T04:58:10.000Z
|
# Copyright 2012 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import xml.sax
from boto import handler
from boto.gs.cors import Cors
from gslib.command import Command
from gslib.command import COMMAND_NAME
from gslib.command import COMMAND_NAME_ALIASES
from gslib.command import CONFIG_REQUIRED
from gslib.command import FILE_URIS_OK
from gslib.command import MAX_ARGS
from gslib.command import MIN_ARGS
from gslib.command import PROVIDER_URIS_OK
from gslib.command import SUPPORTED_SUB_ARGS
from gslib.command import URIS_START_ARG
from gslib.exception import CommandException
from gslib.help_provider import HELP_NAME
from gslib.help_provider import HELP_NAME_ALIASES
from gslib.help_provider import HELP_ONE_LINE_SUMMARY
from gslib.help_provider import HELP_TEXT
from gslib.help_provider import HelpType
from gslib.help_provider import HELP_TYPE
from gslib.util import NO_MAX
_detailed_help_text = ("""
<B>SYNOPSIS</B>
gsutil setcors cors-xml-file uri...
<B>DESCRIPTION</B>
Sets the Cross-Origin Resource Sharing (CORS) configuration on one or more
buckets. This command is supported for buckets only, not objects. The
cors-xml-file specified on the command line should be a path to a local
file containing an XML document with the following structure:
<?xml version="1.0" ?>
<CorsConfig>
<Cors>
<Origins>
<Origin>http://origin1.example.com</Origin>
</Origins>
<Methods>
<Method>GET</Method>
</Methods>
<ResponseHeaders>
<ResponseHeader>Content-Type</ResponseHeader>
</ResponseHeaders>
</Cors>
</CorsConfig>
The above XML document explicitly allows cross-origin GET requests from
http://origin1.example.com and may include the Content-Type response header.
For more info about CORS, see http://www.w3.org/TR/cors/.
""")
class SetCorsCommand(Command):
"""Implementation of gsutil setcors command."""
# Command specification (processed by parent class).
command_spec = {
# Name of command.
COMMAND_NAME : 'setcors',
# List of command name aliases.
COMMAND_NAME_ALIASES : [],
# Min number of args required by this command.
MIN_ARGS : 2,
# Max number of args required by this command, or NO_MAX.
MAX_ARGS : NO_MAX,
# Getopt-style string specifying acceptable sub args.
SUPPORTED_SUB_ARGS : '',
# True if file URIs acceptable for this command.
FILE_URIS_OK : False,
# True if provider-only URIs acceptable for this command.
PROVIDER_URIS_OK : False,
# Index in args of first URI arg.
URIS_START_ARG : 1,
# True if must configure gsutil before running command.
CONFIG_REQUIRED : True,
}
help_spec = {
# Name of command or auxiliary help info for which this help applies.
HELP_NAME : 'setcors',
# List of help name aliases.
HELP_NAME_ALIASES : ['cors', 'cross-origin'],
# Type of help)
HELP_TYPE : HelpType.COMMAND_HELP,
# One line summary of this help.
HELP_ONE_LINE_SUMMARY : 'Set a CORS XML document for one or more buckets',
# The full help text.
HELP_TEXT : _detailed_help_text,
}
# Command entry point.
def RunCommand(self):
cors_arg = self.args[0]
uri_args = self.args[1:]
# Disallow multi-provider setcors requests.
storage_uri = self.UrisAreForSingleProvider(uri_args)
if not storage_uri:
raise CommandException('"%s" command spanning providers not allowed.' %
self.command_name)
# Open, read and parse file containing XML document.
cors_file = open(cors_arg, 'r')
cors_txt = cors_file.read()
cors_file.close()
cors_obj = Cors()
# Parse XML document and convert into Cors object.
h = handler.XmlHandler(cors_obj, None)
try:
xml.sax.parseString(cors_txt, h)
except xml.sax._exceptions.SAXParseException, e:
raise CommandException('Requested CORS is invalid: %s at line %s, '
'column %s' % (e.getMessage(), e.getLineNumber(),
e.getColumnNumber()))
# Iterate over URIs, expanding wildcards, and setting the CORS on each.
some_matched = False
for uri_str in uri_args:
for blr in self.WildcardIterator(uri_str):
uri = blr.GetUri()
if not uri.names_bucket():
raise CommandException('URI %s must name a bucket for the %s command'
% (str(uri), self.command_name))
some_matched = True
print 'Setting CORS on %s...' % uri
uri.set_cors(cors_obj, False, self.headers)
if not some_matched:
raise CommandException('No URIs matched')
return 0
| 35.856164
| 79
| 0.696657
|
79487929ec8acb89278a89ad1e58d8fb20b83249
| 2,799
|
py
|
Python
|
pdf/utils/utils.py
|
nightohl/phoneypdf
|
5edf945f33d8d775409dfea89462b5cb8bbddeff
|
[
"BSD-3-Clause"
] | 16
|
2015-03-04T15:27:21.000Z
|
2021-08-10T01:22:13.000Z
|
pdf/utils/utils.py
|
nightohl/phoneypdf
|
5edf945f33d8d775409dfea89462b5cb8bbddeff
|
[
"BSD-3-Clause"
] | null | null | null |
pdf/utils/utils.py
|
nightohl/phoneypdf
|
5edf945f33d8d775409dfea89462b5cb8bbddeff
|
[
"BSD-3-Clause"
] | 5
|
2015-10-12T08:52:11.000Z
|
2021-09-15T06:38:13.000Z
|
import re
from constants import *
def CopyWithoutWhiteSpace(content):
result = []
for token in content:
if token[0] != CHAR_WHITESPACE:
result.append(token)
return result
def Obj2Str(content):
return ''.join(map(lambda x: repr(x[1])[1:-1], CopyWithoutWhiteSpace(content)))
def IsNumeric(str):
return re.match('^[0-9]+', str)
def TrimLWhiteSpace(data):
while data[0][0] == CHAR_WHITESPACE:
data = data[1:]
return data
def TrimRWhiteSpace(data):
while data[-1][0] == CHAR_WHITESPACE:
data = data[:-1]
return data
def FormatOutput(data, raw):
if raw:
if type(data) == type([]):
return ''.join(map(lambda x: x[1], data))
else:
return data
else:
return repr(data)
def Canonicalize(sIn):
if sIn == "":
return sIn
elif sIn[0] != '/':
return sIn
elif sIn.find('#') == -1:
return sIn
else:
i = 0
iLen = len(sIn)
sCanonical = ''
while i < iLen:
if sIn[i] == '#' and i < iLen - 2:
try:
sCanonical += chr(int(sIn[i+1:i+3], 16))
i += 2
except:
sCanonical += sIn[i]
else:
sCanonical += sIn[i]
i += 1
return sCanonical
def EqualCanonical(s1, s2):
return Canonicalize(s1) == s2
def ConditionalCanonicalize(sIn, nocanonicalizedoutput):
if nocanonicalizedoutput:
return sIn
else:
return Canonicalize(sIn)
## Following utils are marked for deletion in the next major update
def PrintObject(object, options):
print 'obj %d %d' % (object.id, object.version)
print ' Type: %s' % ConditionalCanonicalize(object.GetType(), options.nocanonicalizedoutput)
print ' Referencing: %s' % ', '.join(map(lambda x: '%s %s %s' % x, object.GetReferences()))
stream = object.ContainsStream()
oPDFParseDictionary = None
if stream:
print ' Contains stream'
print ' %s' % FormatOutput(stream, options.raw)
oPDFParseDictionary = cPDFParseDictionary(stream, options.nocanonicalizedoutput)
else:
print ' %s' % FormatOutput(object.content, options.raw)
oPDFParseDictionary = cPDFParseDictionary(object.content, options.nocanonicalizedoutput)
print
oPDFParseDictionary.PrettyPrint(' ')
print
if options.filter and not options.dump:
filtered = object.Stream()
if filtered == []:
print ' %s' % FormatOutput(object.content, options.raw)
else:
print ' %s' % FormatOutput(filtered, options.raw)
if options.dump:
print 'Start dump:'
print object.Stream(False)
print 'End dump'
print
return
| 28.272727
| 96
| 0.582351
|
79487a100202f038a5808f0bc28a8dc24080f807
| 13,572
|
py
|
Python
|
cdna-detector.py
|
rheinbaylab/cDNA-detector
|
72c9415cea8568c11edf696542b8baa967e392ec
|
[
"BSD-3-Clause"
] | 1
|
2021-07-30T06:42:23.000Z
|
2021-07-30T06:42:23.000Z
|
cdna-detector.py
|
rheinbaylab/cDNA-detector
|
72c9415cea8568c11edf696542b8baa967e392ec
|
[
"BSD-3-Clause"
] | null | null | null |
cdna-detector.py
|
rheinbaylab/cDNA-detector
|
72c9415cea8568c11edf696542b8baa967e392ec
|
[
"BSD-3-Clause"
] | 1
|
2021-08-23T20:33:07.000Z
|
2021-08-23T20:33:07.000Z
|
#!/usr/bin/env python
# -*- coding:utf-8 -*-
import argparse
import os
import sys
import re
from argparse import RawTextHelpFormatter
def get_script_path():
return os.path.dirname(os.path.realpath(sys.argv[0]))
def prepare(args):
from scripts.prepare import step1
from scripts.prepare import global_para
global_para.gtf_file = args.annotation
global_para.output_dir = args.output_dir
global_para.chr_exclude = args.chr_exclude
global_para.chr_include = args.chr_include
global_para.genome_fasta = args.genome
global_para.format = args.format
global_para.source = args.source
global_para.feature_type_cds = args.feature_type_cds
global_para.featureid_gene_id = args.featureid_gene_id
global_para.featureid_gene_name = args.featureid_gene_name
global_para.featureid_cds_ranking = args.featureid_cds_ranking
global_para.featureid_transcript_id = args.featureid_transcript_id
step1.convert_gff2saf(global_para)
def detect(args):
from scripts.detect import step2
from scripts.detect import global_para
global_para.script_path = get_script_path()
global_para.gtf_gene_unique_file, global_para.blastn_database, global_para.file_source_known = global_para.f_get_input_files(args, global_para)
global_para.genome_bam_file = args.bam
global_para.sampleid = args.sample_id
global_para.min_quality = int(args.min_quality)
global_para.output_dir = args.output_dir
global_para.n_threads = int(args.n_threads)
global_para.cutoff_num_exon_unaligned_reads = args.median_exon_cdna_reads
global_para.num_initial_potential_cdna = args.num_initial_potential_cdna
global_para.cutoff_pvalue = args.pvalue
global_para.cutoff_ratio_gene = args.min_ratio
global_para.exclude_ehc = True if args.exclude_ehc == "True" else False
global_para.ratio_ehc = args.ratio_ehc
global_para.count_ehc = args.count_ehc
global_para.source_inference_include = args.inferred_source_include
global_para.source_inference_exclude = args.inferred_source_exclude
global_para.source_known_databases_include = args.known_source_include
global_para.source_known_databases_exclude = args.known_source_exclude
# output files
global_para.out_exon_stat = os.path.join(global_para.output_dir, (global_para.sampleid + '.exon_statistics.tsv'))
global_para.out_gene_stat = os.path.join(global_para.output_dir, (global_para.sampleid + '.gene_statistics.tsv'))
global_para.out_exon_stat_filter = os.path.join(global_para.output_dir, (global_para.sampleid + '.exon_statistics.filtered.tsv'))
global_para.out_gene_stat_filter = os.path.join(global_para.output_dir, (global_para.sampleid + '.gene_statistics.filtered.tsv'))
global_para.out_gene_stat_filter_source = os.path.join(global_para.output_dir, (global_para.sampleid + '.gene_statistics.filtered.source_filtered.tsv'))
global_para.out_bed_merge = os.path.join(global_para.output_dir, (global_para.sampleid + '.merge_region.tsv'))
global_para.out_log = os.path.join(global_para.output_dir, (global_para.sampleid + '.log'))
global_para.out_blastn_seq = os.path.join(global_para.output_dir, (global_para.sampleid + '.clipped_seq'))
global_para.out_blastn_seq_table = global_para.out_blastn_seq + ".blastn.tsv"
global_para.out_blastn_seq_source = global_para.out_blastn_seq + ".source_inference.tsv"
step2.detect_cdna(global_para)
def clean(args):
from scripts.clean import step3
from scripts.clean import global_para
# global_para = global_para;
global_para.file_region = args.region
global_para.file_bam = args.bam
# global_para.genome_bam_file = args.bam
global_para.sampleid = args.sample_id
global_para.bl_gDNA_remove = args.gDNA_remove
global_para.clean_way = args.clean_way
global_para.output_dir = args.output_dir
global_para.file_bam_clean = os.path.join(global_para.output_dir, (global_para.sampleid + '.clean.bam'))
global_para.file_region_clean = os.path.join(global_para.output_dir,(global_para.sampleid+'.clean_region.tsv'))
global_para.out_log = os.path.join(global_para.output_dir, (global_para.sampleid + '.clean.log'))
step3.cdna_remove_region(global_para)
def main():
description_str = '''
cdna-detector.py is a tool to detect and clean cDNA contamination in DNA-Seq.
Version: 0.1.0
Code: https://github.com/rheinbaylab/cDNA_detector.git
Mail: mqi3@mgh.harvard.edu
Usage:
cdna-detector.py <subcommand> [options]
Example:
cdna-detector.py prepare -h
cdna-detector.py detect -h
cdna-detector.py clean -h
'''
parser = argparse.ArgumentParser(usage = argparse.SUPPRESS,add_help = True,description = description_str,formatter_class=RawTextHelpFormatter)
subparsers = parser.add_subparsers(title = "sub-commands include",metavar = "-------------------")
# create the parser for the "step1: create gene model" command
parser_a = subparsers.add_parser('prepare', help='prepare a gene model annotation file. Input file: NCBI gff3',usage = "cdna-detector.py prepare --annotation <gtf/bed> --genome <genome sequence> [options]",add_help = True, formatter_class=argparse.RawTextHelpFormatter)
parser_a.set_defaults(func=prepare)
required_a = parser_a.add_argument_group('required arguments')
required_a.add_argument('--annotation', metavar = "", help="gene annotation files\ninput format: gtf/bed\n",required = True)
required_a.add_argument('--genome', type=str, metavar = '', help='genome fasta file\ninput format: fa/fasta', required = True)
parser_a.add_argument('--format', type=str, metavar = '', help='input annotation format: gtf or bed\n If format is "bed", only 3 or 4 columns will be used.\n- default: gtf', default = "gtf")
parser_a.add_argument('--output_dir', type=str, metavar = '', help='output directory\n- default: .',default = '.')
group = parser_a.add_mutually_exclusive_group()
group.add_argument("--chr_exclude", metavar = '', help = 'exclude chromosomes, multiple chromosomes are separated by ","\n- conflict with --chr_include\n- default: chrM\n- example: --chr_exclude chrX,chrY,chrM',default = "chrM")
group.add_argument("--chr_include", metavar = '', help = 'only include chromosomes, multiple chromosomes are separated by ","\n- conflict with --chr_exclude\n- example: --chr_include chr1,chr2',default = '')
parser_a.add_argument('--source',metavar = '', default = "all", help = 'the program that generated this feature, it is located in 2nd column of gtf. multiple sources are separated by ","\n- default: all source\n- example: --source havana')
parser_a.add_argument('--feature_type_cds',metavar = '', default = "CDS", help = 'feature name for identifying CDS regions. It is located in 3rd column of gtf. Multiple features are separated by ","\n- default: CDS')
parser_a.add_argument('--featureid_gene_id',metavar = '', default = "gene_id", help = 'attribute to show gene id. \n- default: gene_id')
parser_a.add_argument('--featureid_gene_name',metavar = '', default = "gene_name", help = 'attribute to show gene name. \n- default: gene_name')
parser_a.add_argument('--featureid_cds_ranking',metavar = '', default = "exon_number", help = 'attribute to show exon number.\n- default exon_number')
parser_a.add_argument('--featureid_transcript_id',metavar = '', default = "transcript_id", help = 'attribute to show transcript id. \n- default transcript_id')
parser_a._action_groups.reverse()
# create the parser for the "step2: detect cDNA in DNA sequencing datasets" command
parser_b = subparsers.add_parser('detect', help='detect possible cDNA regions',usage = "cdna-detector.py detect --bam <bam> --sample_id <sample_id> --gene_model <gene_model> [options]",add_help = True, formatter_class=argparse.RawTextHelpFormatter)
parser_b.set_defaults(func=detect)
required_b = parser_b.add_argument_group('required arguments')
required_b.add_argument('--bam',metavar = "", help='The input file is in BAM format. Recommend results from software bwa',required = True)
required_b.add_argument('--sample_id',metavar = "", help='Identifier. Used as output prefix',required = True)
required_b.add_argument('--gene_model',metavar = "", help='Link to gene model.\n - INPUT: hg19/hg38/mm10/(gene model generated from subcommand "prepare")')
parser_b.add_argument('--min_quality',metavar = "", type = int, default = 0, help='Minimum read mapping quality.\n- integer\n- default: 0\n')
parser_b.add_argument('--pvalue',metavar = "", type = float, default = 0.05, help='significant p values.\n- float\n- default: 0.05')
parser_b.add_argument('--min_ratio_transcript',dest = "min_ratio",metavar = "", type = float, default = 0.3, help = "minimum ratio of detected exons of one transcript\n- float\n- default: 0.3")
parser_b.add_argument('--output_dir',metavar = "", default = '.', help = 'output directory\n- default: "."')
parser_b.add_argument('--n_threads',metavar = "", type = int, default = 1, help = 'number of threads\n- integer\n- default: 1')
parser_b.add_argument('--median_exon_cdna_reads',metavar = "", type = int, default = 0, help='minimum median number of reads which may come from cDNA for each exon.\n- integer\n- default: 0')
parser_b.add_argument('--num_initial_potential_cdna',metavar = "", type = int, default = 500, help='minimum number of potential cDNAs which should be evaluated .\n- integer\n- default: 500')
parser_b.add_argument('--exclude_ehc',default = "True", choices = ["True", "False"], help='to exclude extremely high coverage of clipped reads for single gene from background calculation.\n- string\n- default: True')
parser_b.add_argument('--ratio_ehc',metavar = "", type = float, default = 0.05, help='cutoff for ratio of extremely high coverage of clipped reads for single gene.\n- float\n- default: 0.05')
parser_b.add_argument('--count_ehc',metavar = "", type = float, default = 10000, help='cutoff for read count of extremely high coverage of clipped reads for single gene.\n- integer\n- default: 10000')
parser_b.add_argument('--blastn_database', dest = "blastn_database", metavar = "", default = "", help = "databases for cDNA source inference.\n- default: corresponding databases for build-in gene models or default databases")
parser_b.add_argument('--file_source_known', dest = "file_source_known", metavar = "", default = "", help = "file represent gene class.\n- default: for human, genes from retrocopy and blacklist are listed.")
group_b_inference_source = parser_b.add_mutually_exclusive_group()
group_b_inference_source.add_argument("--inferred_source_include", metavar = "", help = 'only include cDNAs with inferred sources, multiple input are separated by ","\n- conflict with --inferred_source_exclude\n- example: --inferred_source_include vector,unknown\n- default: output all cDNAs with inferred sources', default = '')
group_b_inference_source.add_argument("--inferred_source_exclude", metavar = "", help = 'only exclude cDNAs with inference sources, multiple input are separated by ","\n- conflict with --inferred_source_include\n- example: --inferred_source_exclude retrocopy\n- default: null value', default = 'retrocopy')
group_b_known_source = parser_b.add_mutually_exclusive_group()
group_b_known_source.add_argument("--known_source_include", metavar = "", help = 'only include cDNAs with known sources, multiple input are separated by ","\n- conflict with --known_source_exclude\n- example: --known_source_include retrocopy\n- default: null value', default = '')
group_b_known_source.add_argument("--known_source_exclude", metavar = "", help = 'only exclude cDNAs with inference sources, multiple input are separated by ","\n- conflict with --known_source_include\n- example: --known_source_exclude blacklist\n- default: blacklist', default = 'blacklist')
parser_b._action_groups.reverse()
# create the parser for the "step3: remove cDNA in DNA sequencing datasets" command
parser_c = subparsers.add_parser('clean', help='remove possible cDNA from bam files',usage = "cdna-detector.py clean --bam <bam> --region <region_file> --sample_id <sample_id> [options]",add_help = True, formatter_class=argparse.RawTextHelpFormatter)
parser_c.set_defaults(func=clean)
required_c = parser_c.add_argument_group('required arguments')
required_c.add_argument('--bam', metavar = "", help='The input file is bam format\n- Note: must be sorted',required = True)
required_c.add_argument('--sample_id',metavar = "", help='Identifier. Used as output prefix',required = True)
required_c.add_argument('--region', metavar = "", help='files which show contaminated regions\n- which should be output of subcommand "detect"',required = True)
group_c = parser_c.add_mutually_exclusive_group()
group_c.add_argument('--method',dest = "clean_way",default = "automatic", choices = ["automatic","rpm","fraction"], help='method of removing reads from bam files\n- value: automatic, fraction, rpm\n- automatic: estimate cDNA based on cDNA in exon boundaries\n- fraction: fraction of reads kept after cleaning in exon regions\n- rpm: reads per million kept after cleaning in exon regions\n- default: automatic')
parser_c.add_argument('--output_dir', default = os.getcwd(), metavar = "", help = 'output directory\n- default: "."')
# parser_c.add_argument('--paired', default = 'False', action = 'store_true', help='if bam files are paired-end, if not set, bam files are recognized as single-end files')
parser_c.add_argument('--gDNA_remove', default = 'False', action = 'store_true', help='if set, gDNA will be removed if satisfied cutoff')
parser_c._action_groups.reverse()
return(parser)
if __name__ == "__main__":
parser = main()
parser._action_groups.reverse()
args = parser.parse_args()
if len(sys.argv)==1:
parser.print_help()
else:
args.func(args)
| 75.821229
| 411
| 0.768273
|
79487b1f6bfd3027a29821b3732ea0a323e2c785
| 314
|
py
|
Python
|
flavorsync/parser/xml/xml_parser.py
|
Fiware/ops.Flavor-sync
|
d4f3eb1ff3a76eb824b327f5b23632f09ebf2cda
|
[
"Apache-2.0"
] | null | null | null |
flavorsync/parser/xml/xml_parser.py
|
Fiware/ops.Flavor-sync
|
d4f3eb1ff3a76eb824b327f5b23632f09ebf2cda
|
[
"Apache-2.0"
] | null | null | null |
flavorsync/parser/xml/xml_parser.py
|
Fiware/ops.Flavor-sync
|
d4f3eb1ff3a76eb824b327f5b23632f09ebf2cda
|
[
"Apache-2.0"
] | null | null | null |
from flavorsync.parser.parser import Parser
from lxml import etree
from lxml import objectify
class XMLParser(Parser):
def __init__(self):
self.xml = ""
self.dict = {}
def _remove_xml_namespaces(self):
objectify.deannotate(self.xml)
etree.cleanup_namespaces(self.xml)
| 24.153846
| 43
| 0.684713
|
79487b2a7975d4287682db99438c41888d9dc4e2
| 10,187
|
py
|
Python
|
test/ext/test_horizontal_shard.py
|
lgo/sqlalchemy
|
eea048ff28751d4a3553c4c8e0bcca005065dcfc
|
[
"MIT"
] | null | null | null |
test/ext/test_horizontal_shard.py
|
lgo/sqlalchemy
|
eea048ff28751d4a3553c4c8e0bcca005065dcfc
|
[
"MIT"
] | null | null | null |
test/ext/test_horizontal_shard.py
|
lgo/sqlalchemy
|
eea048ff28751d4a3553c4c8e0bcca005065dcfc
|
[
"MIT"
] | null | null | null |
import datetime
import os
from sqlalchemy import *
from sqlalchemy import event
from sqlalchemy import sql, util
from sqlalchemy.orm import *
from sqlalchemy.ext.horizontal_shard import ShardedSession
from sqlalchemy.sql import operators
from sqlalchemy.testing import fixtures
from sqlalchemy.testing.engines import testing_engine
from sqlalchemy.testing import eq_
# TODO: ShardTest can be turned into a base for further subclasses
class ShardTest(object):
__skip_if__ = (lambda: util.win32,)
__requires__ = 'sqlite',
schema = None
def setUp(self):
global db1, db2, db3, db4, weather_locations, weather_reports
db1, db2, db3, db4 = self._init_dbs()
meta = MetaData()
ids = Table('ids', meta,
Column('nextid', Integer, nullable=False))
def id_generator(ctx):
# in reality, might want to use a separate transaction for this.
c = db1.contextual_connect()
nextid = c.execute(ids.select(for_update=True)).scalar()
c.execute(ids.update(values={ids.c.nextid: ids.c.nextid + 1}))
return nextid
weather_locations = Table(
"weather_locations", meta,
Column('id', Integer, primary_key=True, default=id_generator),
Column('continent', String(30), nullable=False),
Column('city', String(50), nullable=False),
schema=self.schema
)
weather_reports = Table(
'weather_reports',
meta,
Column('id', Integer, primary_key=True),
Column('location_id', Integer,
ForeignKey(weather_locations.c.id)),
Column('temperature', Float),
Column('report_time', DateTime,
default=datetime.datetime.now),
schema=self.schema
)
for db in (db1, db2, db3, db4):
meta.create_all(db)
db1.execute(ids.insert(), nextid=1)
self.setup_session()
self.setup_mappers()
@classmethod
def setup_session(cls):
global create_session
shard_lookup = {
'North America': 'north_america',
'Asia': 'asia',
'Europe': 'europe',
'South America': 'south_america',
}
def shard_chooser(mapper, instance, clause=None):
if isinstance(instance, WeatherLocation):
return shard_lookup[instance.continent]
else:
return shard_chooser(mapper, instance.location)
def id_chooser(query, ident):
return ['north_america', 'asia', 'europe', 'south_america']
def query_chooser(query):
ids = []
class FindContinent(sql.ClauseVisitor):
def visit_binary(self, binary):
if binary.left.shares_lineage(
weather_locations.c.continent):
if binary.operator == operators.eq:
ids.append(shard_lookup[binary.right.value])
elif binary.operator == operators.in_op:
for bind in binary.right.clauses:
ids.append(shard_lookup[bind.value])
if query._criterion is not None:
FindContinent().traverse(query._criterion)
if len(ids) == 0:
return ['north_america', 'asia', 'europe',
'south_america']
else:
return ids
create_session = sessionmaker(class_=ShardedSession,
autoflush=True, autocommit=False)
create_session.configure(shards={
'north_america': db1,
'asia': db2,
'europe': db3,
'south_america': db4,
}, shard_chooser=shard_chooser, id_chooser=id_chooser,
query_chooser=query_chooser)
@classmethod
def setup_mappers(cls):
global WeatherLocation, Report
class WeatherLocation(object):
def __init__(self, continent, city):
self.continent = continent
self.city = city
class Report(object):
def __init__(self, temperature):
self.temperature = temperature
mapper(WeatherLocation, weather_locations, properties={
'reports': relationship(Report, backref='location'),
'city': deferred(weather_locations.c.city),
})
mapper(Report, weather_reports)
def _fixture_data(self):
tokyo = WeatherLocation('Asia', 'Tokyo')
newyork = WeatherLocation('North America', 'New York')
toronto = WeatherLocation('North America', 'Toronto')
london = WeatherLocation('Europe', 'London')
dublin = WeatherLocation('Europe', 'Dublin')
brasilia = WeatherLocation('South America', 'Brasila')
quito = WeatherLocation('South America', 'Quito')
tokyo.reports.append(Report(80.0))
newyork.reports.append(Report(75))
quito.reports.append(Report(85))
sess = create_session()
for c in [
tokyo,
newyork,
toronto,
london,
dublin,
brasilia,
quito,
]:
sess.add(c)
sess.commit()
sess.close()
return sess
def test_roundtrip(self):
sess = self._fixture_data()
tokyo = sess.query(WeatherLocation).filter_by(city="Tokyo").one()
tokyo.city # reload 'city' attribute on tokyo
sess.expunge_all()
eq_(db2.execute(weather_locations.select()).fetchall(), [(1,
'Asia', 'Tokyo')])
eq_(db1.execute(weather_locations.select()).fetchall(), [(2,
'North America', 'New York'), (3, 'North America', 'Toronto')])
eq_(sess.execute(weather_locations.select(), shard_id='asia')
.fetchall(), [(1, 'Asia', 'Tokyo')])
t = sess.query(WeatherLocation).get(tokyo.id)
eq_(t.city, tokyo.city)
eq_(t.reports[0].temperature, 80.0)
north_american_cities = \
sess.query(WeatherLocation).filter(
WeatherLocation.continent == 'North America')
eq_(set([c.city for c in north_american_cities]),
set(['New York', 'Toronto']))
asia_and_europe = \
sess.query(WeatherLocation).filter(
WeatherLocation.continent.in_(['Europe', 'Asia']))
eq_(set([c.city for c in asia_and_europe]), set(['Tokyo',
'London', 'Dublin']))
def test_get_baked_query(self):
sess = self._fixture_data()
tokyo = sess.query(WeatherLocation).filter_by(city="Tokyo").one()
tokyo.city
sess.expunge_all()
from sqlalchemy.ext.baked import BakedQuery
bakery = BakedQuery.bakery()
bq = bakery(lambda session: session.query(WeatherLocation))
t = bq(sess).get(tokyo.id)
eq_(t.city, tokyo.city)
def test_get_baked_query_shard_id(self):
sess = self._fixture_data()
tokyo = sess.query(WeatherLocation).filter_by(city="Tokyo").one()
tokyo.city
sess.expunge_all()
from sqlalchemy.ext.baked import BakedQuery
bakery = BakedQuery.bakery()
bq = bakery(lambda session: session.query(WeatherLocation))
t = bq(sess).with_post_criteria(
lambda q: q.set_shard("asia")).get(tokyo.id)
eq_(t.city, tokyo.city)
def test_filter_baked_query_shard_id(self):
sess = self._fixture_data()
tokyo = sess.query(WeatherLocation).filter_by(city="Tokyo").one()
tokyo.city
sess.expunge_all()
from sqlalchemy.ext.baked import BakedQuery
bakery = BakedQuery.bakery()
bq = bakery(lambda session: session.query(WeatherLocation)).\
with_criteria(lambda q: q.filter_by(id=tokyo.id))
t = bq(sess).with_post_criteria(
lambda q: q.set_shard("asia")).one()
eq_(t.city, tokyo.city)
def test_shard_id_event(self):
canary = []
def load(instance, ctx):
canary.append(ctx.attributes["shard_id"])
event.listen(WeatherLocation, "load", load)
sess = self._fixture_data()
tokyo = sess.query(WeatherLocation).\
filter_by(city="Tokyo").set_shard("asia").one()
sess.query(WeatherLocation).all()
eq_(
canary,
['asia', 'north_america', 'north_america',
'europe', 'europe', 'south_america',
'south_america']
)
class DistinctEngineShardTest(ShardTest, fixtures.TestBase):
def _init_dbs(self):
db1 = testing_engine('sqlite:///shard1.db',
options=dict(pool_threadlocal=True))
db2 = testing_engine('sqlite:///shard2.db')
db3 = testing_engine('sqlite:///shard3.db')
db4 = testing_engine('sqlite:///shard4.db')
return db1, db2, db3, db4
def tearDown(self):
clear_mappers()
for db in (db1, db2, db3, db4):
db.connect().invalidate()
for i in range(1, 5):
os.remove("shard%d.db" % i)
class AttachedFileShardTest(ShardTest, fixtures.TestBase):
schema = "changeme"
def _init_dbs(self):
db1 = testing_engine('sqlite://', options={"execution_options":
{"shard_id": "shard1"}})
db2 = db1.execution_options(shard_id="shard2")
db3 = db1.execution_options(shard_id="shard3")
db4 = db1.execution_options(shard_id="shard4")
import re
@event.listens_for(db1, "before_cursor_execute", retval=True)
def _switch_shard(conn, cursor, stmt, params, context, executemany):
shard_id = conn._execution_options['shard_id']
# because SQLite can't just give us a "use" statement, we have
# to use the schema hack to locate table names
if shard_id:
stmt = re.sub(r"\"?changeme\"?\.", shard_id + "_", stmt)
return stmt, params
return db1, db2, db3, db4
| 33.843854
| 76
| 0.579268
|
79487b7c7daf01fb4706740beeff1602391efe90
| 706
|
py
|
Python
|
setup.py
|
cyber-raskolnikov/bertviz
|
4f4b170c17a2e63b46c73be7ee729e9dcce54ec1
|
[
"Apache-2.0"
] | null | null | null |
setup.py
|
cyber-raskolnikov/bertviz
|
4f4b170c17a2e63b46c73be7ee729e9dcce54ec1
|
[
"Apache-2.0"
] | null | null | null |
setup.py
|
cyber-raskolnikov/bertviz
|
4f4b170c17a2e63b46c73be7ee729e9dcce54ec1
|
[
"Apache-2.0"
] | null | null | null |
import pathlib
from setuptools import setup
# The directory containing this file
HERE = pathlib.Path(__file__).parent
# The text of the README file
README = (HERE / "README.md").read_text()
# This call to setup() does all the work
setup(
name="bertviz",
version="1.4.0",
description="Attention visualization tool for NLP Transformer models.",
long_description=README,
long_description_content_type="text/markdown",
url="https://github.com/jessevig/bertviz",
author="Jesse Vig",
license="Apache 2.0",
packages=["bertviz"],
include_package_data=True,
install_requires=["transformers>=2.0", "torch>=1.0", "tqdm", "boto3", "requests", "regex", "sentencepiece"],
)
| 30.695652
| 112
| 0.70255
|
79487c38c8ae934612d4a06df21192d727ed8774
| 1,270
|
py
|
Python
|
thefuck/rules/whois.py
|
Archstacker/thefuck
|
ebe53f0d181c28ec2f7a86f46d7d51a7d48bbd9e
|
[
"MIT"
] | 1
|
2021-05-08T23:24:17.000Z
|
2021-05-08T23:24:17.000Z
|
thefuck/rules/whois.py
|
Archstacker/thefuck
|
ebe53f0d181c28ec2f7a86f46d7d51a7d48bbd9e
|
[
"MIT"
] | null | null | null |
thefuck/rules/whois.py
|
Archstacker/thefuck
|
ebe53f0d181c28ec2f7a86f46d7d51a7d48bbd9e
|
[
"MIT"
] | 1
|
2021-06-21T09:01:08.000Z
|
2021-06-21T09:01:08.000Z
|
# -*- encoding: utf-8 -*-
from six.moves.urllib.parse import urlparse
def match(command, settings):
"""
What the `whois` command returns depends on the 'Whois server' it contacted
and is not consistent through different servers. But there can be only two
types of errors I can think of with `whois`:
- `whois https://en.wikipedia.org/` → `whois en.wikipedia.org`;
- `whois en.wikipedia.org` → `whois wikipedia.org`.
So we match any `whois` command and then:
- if there is a slash: keep only the FQDN;
- if there is no slash but there is a point: removes the left-most
subdomain.
We cannot either remove all subdomains because we cannot know which part is
the subdomains and which is the domain, consider:
- www.google.fr → subdomain: www, domain: 'google.fr';
- google.co.uk → subdomain: None, domain; 'google.co.uk'.
"""
return 'whois ' in command.script.strip()
def get_new_command(command, settings):
url = command.script.split()[1]
if '/' in command.script:
return 'whois ' + urlparse(url).netloc
elif '.' in command.script:
path = urlparse(url).path.split('.')
return ['whois ' + '.'.join(path[n:]) for n in range(1, len(path))]
| 38.484848
| 79
| 0.643307
|
79487ce616bcbd6376870598ff35408df1b72e26
| 11,373
|
py
|
Python
|
src/handler.py
|
ManyRaptors/lykos
|
1359b7435b070fd8ecab32ccc5b92722098165f2
|
[
"BSD-2-Clause"
] | null | null | null |
src/handler.py
|
ManyRaptors/lykos
|
1359b7435b070fd8ecab32ccc5b92722098165f2
|
[
"BSD-2-Clause"
] | null | null | null |
src/handler.py
|
ManyRaptors/lykos
|
1359b7435b070fd8ecab32ccc5b92722098165f2
|
[
"BSD-2-Clause"
] | null | null | null |
# The bot commands implemented in here are present no matter which module is loaded
import base64
import socket
import sys
import threading
import time
import traceback
import functools
import botconfig
import src.settings as var
from src import decorators, wolfgame, events, channels, hooks, users, errlog as log, stream_handler as alog
from src.messages import messages
from src.utilities import reply, get_role, get_templates
from src.functions import get_participants, get_all_roles
from src.dispatcher import MessageDispatcher
from src.decorators import handle_error
cmd = decorators.cmd
hook = decorators.hook
@handle_error
def on_privmsg(cli, rawnick, chan, msg, *, notice=False, force_role=None):
if notice and "!" not in rawnick or not rawnick: # server notice; we don't care about those
return
user = users._get(rawnick, allow_none=True) # FIXME
if users.equals(chan, users.Bot.nick): # PM
target = users.Bot
else:
target = channels.get(chan, allow_none=True)
if user is None or target is None:
return
wrapper = MessageDispatcher(user, target)
if wrapper.public and botconfig.IGNORE_HIDDEN_COMMANDS and not chan.startswith(tuple(hooks.Features["CHANTYPES"])):
return
if (notice and ((wrapper.public and not botconfig.ALLOW_NOTICE_COMMANDS) or
(wrapper.private and not botconfig.ALLOW_PRIVATE_NOTICE_COMMANDS))):
return # not allowed in settings
if force_role is None: # if force_role isn't None, that indicates recursion; don't fire these off twice
for fn in decorators.COMMANDS[""]:
fn.caller(cli, rawnick, chan, msg)
parts = msg.split(sep=" ", maxsplit=1)
key = parts[0].lower()
if len(parts) > 1:
message = parts[1].lstrip()
else:
message = ""
if wrapper.public and not key.startswith(botconfig.CMD_CHAR):
return # channel message but no prefix; ignore
if key.startswith(botconfig.CMD_CHAR):
key = key[len(botconfig.CMD_CHAR):]
if not key: # empty key ("") already handled above
return
# Don't change this into decorators.COMMANDS[key] even though it's a defaultdict,
# as we don't want to insert bogus command keys into the dict.
cmds = []
phase = var.PHASE
if user in get_participants():
roles = get_all_roles(user)
# A user can be a participant but not have a role, for example, dead vengeful ghost
has_roles = len(roles) != 0
if force_role is not None:
roles &= {force_role} # only fire off role commands for the forced role
common_roles = set(roles) # roles shared by every eligible role command
have_role_cmd = False
for fn in decorators.COMMANDS.get(key, []):
if not fn.roles:
cmds.append(fn)
continue
if roles.intersection(fn.roles):
have_role_cmd = True
cmds.append(fn)
common_roles.intersection_update(fn.roles)
if force_role is not None and not have_role_cmd:
# Trying to force a non-role command with a role.
# We allow non-role commands to execute if a role is forced if a role
# command is also executed, as this would allow (for example) a bot admin
# to add extra effects to all "kill" commands without needing to continually
# update the list of roles which can use "kill". However, we don't want to
# allow things like "wolf pstats" because that just doesn't make sense.
return
if has_roles and not common_roles:
# getting here means that at least one of the role_cmds is disjoint
# from the others. For example, augur see vs seer see when a bare see
# is executed. In this event, display a helpful error message instructing
# the user to resolve the ambiguity.
common_roles = set(roles)
info = [0,0]
for fn in cmds:
fn_roles = roles.intersection(fn.roles)
if not fn_roles:
continue
for role1 in common_roles:
info[0] = role1
break
for role2 in fn_roles:
info[1] = role2
break
common_roles &= fn_roles
if not common_roles:
break
wrapper.pm(messages["ambiguous_command"].format(key, info[0], info[1]))
return
elif force_role is None:
cmds = decorators.COMMANDS.get(key, [])
for fn in cmds:
if phase == var.PHASE:
# FIXME: pass in var, wrapper, message instead of cli, rawnick, chan, message
fn.caller(cli, rawnick, chan, message)
def unhandled(cli, prefix, cmd, *args):
for fn in decorators.HOOKS.get(cmd, []):
fn.caller(cli, prefix, *args)
def ping_server(cli):
cli.send("PING :{0}".format(time.time()))
@cmd("latency", pm=True)
def latency(cli, nick, chan, rest):
ping_server(cli)
@hook("pong", hookid=300)
def latency_pong(cli, server, target, ts):
lat = round(time.time() - float(ts), 3)
reply(cli, nick, chan, messages["latency"].format(lat, "" if lat == 1 else "s"))
hook.unhook(300)
def connect_callback(cli):
regaincount = 0
releasecount = 0
@hook("endofmotd", hookid=294)
@hook("nomotd", hookid=294)
def prepare_stuff(cli, prefix, *args):
alog("Received end of MOTD from {0}".format(prefix))
# This callback only sets up event listeners
wolfgame.connect_callback()
# just in case we haven't managed to successfully auth yet
if botconfig.PASS and not botconfig.SASL_AUTHENTICATION:
cli.ns_identify(botconfig.USERNAME or botconfig.NICK,
botconfig.PASS,
nickserv=var.NICKSERV,
command=var.NICKSERV_IDENTIFY_COMMAND)
channels.Main = channels.add(botconfig.CHANNEL, cli)
channels.Dummy = channels.add("*", cli)
if botconfig.ALT_CHANNELS:
for chan in botconfig.ALT_CHANNELS.split(","):
channels.add(chan, cli)
if botconfig.DEV_CHANNEL:
channels.Dev = channels.add(botconfig.DEV_CHANNEL, cli)
if var.LOG_CHANNEL:
channels.add(var.LOG_CHANNEL, cli)
#if var.CHANSERV_OP_COMMAND: # TODO: Add somewhere else if needed
# cli.msg(var.CHANSERV, var.CHANSERV_OP_COMMAND.format(channel=botconfig.CHANNEL))
users.Bot.change_nick(botconfig.NICK)
if var.SERVER_PING_INTERVAL > 0:
def ping_server_timer(cli):
ping_server(cli)
t = threading.Timer(var.SERVER_PING_INTERVAL, ping_server_timer, args=(cli,))
t.daemon = True
t.start()
ping_server_timer(cli)
def setup_handler(evt, var, target):
target.client.command_handler["privmsg"] = on_privmsg
target.client.command_handler["notice"] = functools.partial(on_privmsg, notice=True)
events.remove_listener("who_end", setup_handler)
events.add_listener("who_end", setup_handler)
def mustregain(cli, server, bot_nick, nick, msg):
nonlocal regaincount
if not botconfig.PASS or bot_nick == nick or regaincount > 3:
return
if var.NICKSERV_REGAIN_COMMAND:
cli.ns_regain(nick=botconfig.NICK, password=botconfig.PASS, nickserv=var.NICKSERV, command=var.NICKSERV_REGAIN_COMMAND)
else:
cli.ns_ghost(nick=botconfig.NICK, password=botconfig.PASS, nickserv=var.NICKSERV, command=var.NICKSERV_GHOST_COMMAND)
# it is possible (though unlikely) that regaining the nick fails for some reason and we would loop infinitely
# as such, keep track of a count of how many times we regain, and after 3 times we no longer attempt to regain nicks
# Since we'd only be regaining on initial connect, this should be safe. The same trick is used below for release as well
regaincount += 1
users.Bot.change_nick(botconfig.NICK)
def mustrelease(cli, server, bot_nick, nick, msg):
nonlocal releasecount
if not botconfig.PASS or bot_nick == nick or releasecount > 3:
return # prevents the bot from trying to release without a password
if var.NICKSERV_RELEASE_COMMAND:
cli.ns_release(nick=botconfig.NICK, password=botconfig.PASS, nickserv=var.NICKSERV, command=var.NICKSERV_GHOST_COMMAND)
else:
cli.ns_ghost(nick=botconfig.NICK, password=botconfig.PASS, nickserv=var.NICKSERV, command=var.NICKSERV_GHOST_COMMAND)
releasecount += 1
users.Bot.change_nick(botconfig.NICK)
@hook("unavailresource", hookid=239)
@hook("nicknameinuse", hookid=239)
def must_use_temp_nick(cli, *etc):
users.Bot.nick += "_"
users.Bot.change_nick()
cli.user(botconfig.NICK, "") # TODO: can we remove this?
hook.unhook(239)
hook("unavailresource", hookid=240)(mustrelease)
hook("nicknameinuse", hookid=241)(mustregain)
request_caps = {"account-notify", "extended-join", "multi-prefix"}
if botconfig.SASL_AUTHENTICATION:
request_caps.add("sasl")
supported_caps = set()
@hook("cap")
def on_cap(cli, svr, mynick, cmd, caps, star=None):
if cmd == "LS":
if caps == "*":
# Multi-line LS
supported_caps.update(star.split())
else:
supported_caps.update(caps.split())
if botconfig.SASL_AUTHENTICATION and "sasl" not in supported_caps:
alog("Server does not support SASL authentication")
cli.quit()
common_caps = request_caps & supported_caps
if common_caps:
cli.send("CAP REQ " ":{0}".format(" ".join(common_caps)))
elif cmd == "ACK":
if "sasl" in caps:
cli.send("AUTHENTICATE PLAIN")
else:
cli.send("CAP END")
elif cmd == "NAK":
# This isn't supposed to happen. The server claimed to support a
# capability but now claims otherwise.
alog("Server refused capabilities: {0}".format(" ".join(caps)))
if botconfig.SASL_AUTHENTICATION:
@hook("authenticate")
def auth_plus(cli, something, plus):
if plus == "+":
account = (botconfig.USERNAME or botconfig.NICK).encode("utf-8")
password = botconfig.PASS.encode("utf-8")
auth_token = base64.b64encode(b"\0".join((account, account, password))).decode("utf-8")
cli.send("AUTHENTICATE " + auth_token)
@hook("903")
def on_successful_auth(cli, blah, blahh, blahhh):
cli.send("CAP END")
@hook("904")
@hook("905")
@hook("906")
@hook("907")
def on_failure_auth(cli, *etc):
alog("Authentication failed. Did you fill the account name "
"in botconfig.USERNAME if it's different from the bot nick?")
cli.quit()
users.Bot = users.BotUser(cli, botconfig.NICK)
# vim: set sw=4 expandtab:
| 38.292929
| 131
| 0.621736
|
79487cebe6f17d3be0cc2debe1c694640e3d3f56
| 5,952
|
py
|
Python
|
zvmsdk/tests/unit/test_networkops.py
|
jichenjc/python-zvm-sdk
|
c081805c6079107b4823af898babdf92cf5577ee
|
[
"Apache-2.0"
] | null | null | null |
zvmsdk/tests/unit/test_networkops.py
|
jichenjc/python-zvm-sdk
|
c081805c6079107b4823af898babdf92cf5577ee
|
[
"Apache-2.0"
] | null | null | null |
zvmsdk/tests/unit/test_networkops.py
|
jichenjc/python-zvm-sdk
|
c081805c6079107b4823af898babdf92cf5577ee
|
[
"Apache-2.0"
] | null | null | null |
# Copyright 2017 IBM Corp.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import mock
from zvmsdk.tests.unit import base
from zvmsdk import networkops
class SDKNetworkOpsTestCase(base.SDKTestCase):
def setUp(self):
self.networkops = networkops.get_networkops()
@mock.patch.object(networkops.get_networkops().zvmclient, 'create_nic')
def test_create_nic(self, create_nic):
self.networkops.create_nic("fakeid", '1000', 'Fake_nic_id',
ip_addr="ipaddr",
active=True)
create_nic.assert_called_with("fakeid", vdev='1000',
nic_id='Fake_nic_id',
mac_addr=None, ip_addr="ipaddr",
active=True)
@mock.patch.object(networkops.get_networkops().zvmclient,
'get_vm_nic_vswitch_info')
def test_get_vm_nic_vswitch_info(self, get_nic_vswitch_info):
self.networkops.get_vm_nic_vswitch_info("fakenode")
get_nic_vswitch_info.assert_called_with("fakenode")
@mock.patch.object(networkops.get_networkops().zvmclient,
'get_vswitch_list')
def test_get_vswitch_list(self, get_vswitch_list):
self.networkops.get_vswitch_list()
get_vswitch_list.assert_called_with()
@mock.patch.object(networkops.get_networkops().zvmclient,
'couple_nic_to_vswitch')
def test_couple_nic_to_vswitch(self, couple_nic_to_vswitch):
self.networkops.couple_nic_to_vswitch("fake_userid", "nic_vdev",
"fake_VS_name",
True)
couple_nic_to_vswitch.assert_called_with("fake_userid",
"nic_vdev",
"fake_VS_name",
active=True)
@mock.patch.object(networkops.get_networkops().zvmclient,
'uncouple_nic_from_vswitch')
def test_uncouple_nic_from_vswitch(self, uncouple_nic_from_vswitch):
self.networkops.uncouple_nic_from_vswitch("fake_userid",
"nic_vdev",
True)
uncouple_nic_from_vswitch.assert_called_with("fake_userid",
"nic_vdev",
active=True)
@mock.patch.object(networkops.get_networkops().zvmclient, 'add_vswitch')
def test_add_vswitch(self, add_vswitch):
self.networkops.add_vswitch("fakename", "fakerdev",
controller='*',
connection='CONNECT', network_type='IP',
router="NONROUTER", vid='UNAWARE',
port_type='ACCESS', gvrp='GVRP',
queue_mem=8, native_vid=2, persist=False)
add_vswitch.assert_called_with("fakename", rdev="fakerdev",
controller='*', connection='CONNECT',
network_type='IP', router="NONROUTER",
vid='UNAWARE', port_type='ACCESS',
gvrp='GVRP', queue_mem=8,
native_vid=2, persist=False)
@mock.patch.object(networkops.get_networkops().zvmclient,
'grant_user_to_vswitch')
def test_grant_user_to_vswitch(self, grant_user):
self.networkops.grant_user_to_vswitch("vswitch_name", "userid")
grant_user.assert_called_with("vswitch_name", "userid")
@mock.patch.object(networkops.get_networkops().zvmclient,
'revoke_user_from_vswitch')
def test_revoke_user_from_vswitch(self, revoke_user):
self.networkops.revoke_user_from_vswitch("vswitch_name", "userid")
revoke_user.assert_called_with("vswitch_name", "userid")
@mock.patch.object(networkops.get_networkops().zvmclient,
'set_vswitch_port_vlan_id')
def test_set_vswitch_port_vlan_id(self, set_vswitch):
self.networkops.set_vswitch_port_vlan_id("vswitch_name",
"userid", "vlan_id")
set_vswitch.assert_called_with("vswitch_name", "userid", "vlan_id")
@mock.patch.object(networkops.get_networkops().zvmclient, 'set_vswitch')
def test_set_vswitch(self, set_vswitch):
self.networkops.set_vswitch("vswitch_name", grant_userid='fake_id')
set_vswitch.assert_called_with("vswitch_name", grant_userid='fake_id')
@mock.patch.object(networkops.get_networkops().zvmclient, 'delete_vswitch')
def test_delete_vswitch(self, delete_vswitch):
self.networkops.delete_vswitch("vswitch_name", True)
delete_vswitch.assert_called_with("vswitch_name", True)
@mock.patch.object(networkops.get_networkops().zvmclient, 'delete_nic')
def test_delete_nic(self, delete_nic):
self.networkops.delete_nic("userid", "vdev", True)
delete_nic.assert_called_with("userid", "vdev",
active=True)
| 50.440678
| 80
| 0.579637
|
79487cef795b3b05a7ec6da869b93b6ed4124c30
| 6,430
|
py
|
Python
|
deprecated_nets/net_focal.py
|
danielmk/pyDentateeLife2020
|
b4a9f2beaa0c74dbc9583e2cf228856612596f8a
|
[
"MIT"
] | 1
|
2022-02-24T20:39:46.000Z
|
2022-02-24T20:39:46.000Z
|
deprecated_nets/net_focal.py
|
danielmk/pyDentateeLife2020
|
b4a9f2beaa0c74dbc9583e2cf228856612596f8a
|
[
"MIT"
] | null | null | null |
deprecated_nets/net_focal.py
|
danielmk/pyDentateeLife2020
|
b4a9f2beaa0c74dbc9583e2cf228856612596f8a
|
[
"MIT"
] | 4
|
2020-02-18T09:25:20.000Z
|
2021-11-20T23:52:29.000Z
|
# -*- coding: utf-8 -*-
"""
This module implements the class StandardNetwork.
StandardNetwork creates a ring network as defined in Santhakumar et al. 2005
with some changes as in Yim et al. 2015.
See StandardNetwork docstring for details.
Created on Tue Nov 28 13:01:38 2017
@author: DanielM
"""
from neuron import h, gui
import ouropy
import numpy as np
from granulecell import GranuleCell
from mossycell_cat import MossyCell
from basketcell import BasketCell
from hippcell import HippCell
class TunedNetwork(ouropy.gennetwork.GenNetwork):
""" This model implements the ring model from Santhakumar et al. 2005.
with some changes as in Yim et al. 2015.
It features inhibition but omits the MC->GC connection.
"""
name = "TunedNetwork"
def __init__(self, seed=None, temporal_patterns=np.array([]),
spatial_patterns_gcs=np.array([]),
spatial_patterns_bcs=np.array([])):
self.init_params = locals()
self.init_params['self'] = str(self.init_params['self'])
# Setup cells
self.mk_population(GranuleCell, 2000)
self.mk_population(MossyCell, 60)
self.mk_population(BasketCell, 24)
self.mk_population(HippCell, 24)
# Set seed for reproducibility
if seed:
self.set_numpy_seed(seed)
# Setup recordings
self.populations[0].record_aps()
self.populations[1].record_aps()
self.populations[2].record_aps()
self.populations[3].record_aps()
temporal_patterns = np.array(temporal_patterns)
print(np.shape(temporal_patterns))
#temporal_patterns = np.atleast_2d(temporal_patterns)
if type(spatial_patterns_gcs) == np.ndarray and type(temporal_patterns) == np.ndarray:
#spatial_patterns_gcs = np.atleast_2d(spatial_patterns_gcs)
for pat in range(len(spatial_patterns_gcs)):
# PP -> GC
#Original
ouropy.gennetwork.PerforantPathPoissonTmgsyn(self.populations[0],
temporal_patterns[pat],
spatial_patterns_gcs[pat],
'midd', 5.5, 0, 1, 0, 0, 1.25*10**(-3))
if type(spatial_patterns_bcs) == np.ndarray and type(temporal_patterns) == np.ndarray:
#spatial_patterns_bcs = np.atleast_2d(spatial_patterns_bcs)
for pat in range(len(spatial_patterns_bcs)):
# PP -> BC
ouropy.gennetwork.PerforantPathPoissonTmgsyn(self.populations[2],
temporal_patterns[pat],
spatial_patterns_bcs[pat],
'ddend', 6.3, 0, 1, 0, 0, 1*10**(-3))
# GC -> MC
ouropy.gennetwork.tmgsynConnection(self.populations[0], self.populations[1],
12, 'proxd',
1, 6.2, 500, 0.1, 0, 0, 10, 1.5, 0.2*10**(-2) * 10)
# GC -> BC
#Weight x4, target_pool = 2
ouropy.gennetwork.tmgsynConnection(self.populations[0], self.populations[2],
8, 'proxd',
1, 0.6, 500, 0.1, 0, 0, 10, 0.8, 18.8*10**(-2))
# GC -> HC
# Divergence x4; Weight doubled; Connected randomly.
ouropy.gennetwork.tmgsynConnection(self.populations[0], self.populations[3],
24, 'proxd',
12, 0.6, 500, 0.1, 0, 0, 10, 1.5, 1.5*10**(-2))
# MC -> MC
ouropy.gennetwork.tmgsynConnection(self.populations[1], self. populations[1],
24, 'proxd',
3, 2.2, 0, 1, 0, 0, 10, 2, 0.5*10**(-3))
# MC -> BC
ouropy.gennetwork.tmgsynConnection(self.populations[1], self.populations[2],
12, 'proxd',
1, 0.1, 0, 1, 0, 0, 10, 3, 0.3*10**(-3))
# MC -> HC
ouropy.gennetwork.tmgsynConnection(self.populations[1], self.populations[3],
20, 'midd',
2, 3.6, 0, 1, 0, 0, 10, 3, 0.2*10**(-3))
# BC -> GC
# Nr. synapses x3; Weight *1/4; changed from 5.5 to 20 (Hefft & Jonas, 2005)
ouropy.gennetwork.tmgsynConnection(self.populations[2], self.populations[0],
400, 'soma',
400, 20, 0, 1, 0, -70, 10, 0.85, 1.2*10**(-3))
# We reseed here to make sure that those connections are consistent
# between this and net_global. The only connection that differs between
# net_tuned and net_global will be the BC -> GC connection.
if seed:
self.set_numpy_seed(seed)
# BC -> MC
ouropy.gennetwork.tmgsynConnection(self.populations[2], self.populations[1],
28, 'proxd',
3, 3.3, 0, 1, 0, -70, -10, 1.5, 1.5*10**(-3))
# BC -> BC
ouropy.gennetwork.tmgsynConnection(self.populations[2], self.populations[2],
12,'proxd',
2, 1.8, 0,1,0,-70, -10, 0.8, 7.6*10**(-3))
# HC -> GC
# Weight x10; Nr synapses x4; changed from 6 to 20 (Hefft & Jonas, 2005)
ouropy.gennetwork.tmgsynConnection(self.populations[3], self.populations[0],
2000, 'dd',
640, 20, 0, 1, 0, -70, 10, 1.6, 0.6*10**(-2))
# HC -> MC
ouropy.gennetwork.tmgsynConnection(self.populations[3], self.populations[1],
60, ['mid1d', 'mid2d'],
4, 6, 0, 1, 0, -70, 10, 1, 1.5*10**(-3))
# HC -> BC
ouropy.gennetwork.tmgsynConnection(self.populations[3], self.populations[2],
24, 'ddend',
4, 5.8, 0, 1, 0, -70, 10, 1.6, 0.5*10**(-3))
| 45.928571
| 98
| 0.494712
|
79487ea03abce6670e8d67a0190d2b23885b6b09
| 484
|
py
|
Python
|
apps/controllerx/cx_core/action_type/scene_action_type.py
|
xaviml/z2m_ikea_controller
|
e612af5a913e8b4784dcaa23ea5319115427d083
|
[
"MIT"
] | 19
|
2019-11-21T19:51:40.000Z
|
2020-01-14T09:24:33.000Z
|
apps/controllerx/cx_core/action_type/scene_action_type.py
|
xaviml/z2m_ikea_controller
|
e612af5a913e8b4784dcaa23ea5319115427d083
|
[
"MIT"
] | 11
|
2019-11-20T16:43:35.000Z
|
2020-01-17T16:23:06.000Z
|
apps/controllerx/cx_core/action_type/scene_action_type.py
|
xaviml/z2m_ikea_controller
|
e612af5a913e8b4784dcaa23ea5319115427d083
|
[
"MIT"
] | 5
|
2019-12-20T21:31:07.000Z
|
2020-01-06T18:49:52.000Z
|
from typing import Any, Optional
from cx_core.action_type.base import ActionType
from cx_core.integration import EventData
class SceneActionType(ActionType):
scene: str
def initialize(self, **kwargs: Any) -> None:
self.scene = kwargs["scene"]
async def run(self, extra: Optional[EventData] = None) -> None:
await self.controller.call_service("scene/turn_on", entity_id=self.scene)
def __str__(self) -> str:
return f"Scene ({self.scene})"
| 26.888889
| 81
| 0.698347
|
79487fb0bbd3bbc063df9ae6b52c149cf2402698
| 2,448
|
py
|
Python
|
userbot/utils/tools.py
|
techyminati/ProjectFizilion
|
2a7a63519b93fb9e424377500cf4fdded6408bfa
|
[
"Naumen",
"Condor-1.1",
"MS-PL"
] | 77
|
2020-02-09T19:23:30.000Z
|
2021-12-21T16:13:43.000Z
|
userbot/utils/tools.py
|
techyminati/ProjectFizilion
|
2a7a63519b93fb9e424377500cf4fdded6408bfa
|
[
"Naumen",
"Condor-1.1",
"MS-PL"
] | 20
|
2020-03-23T15:22:09.000Z
|
2021-09-28T09:30:54.000Z
|
userbot/utils/tools.py
|
techyminati/ProjectFizilion
|
2a7a63519b93fb9e424377500cf4fdded6408bfa
|
[
"Naumen",
"Condor-1.1",
"MS-PL"
] | 1,011
|
2020-02-01T10:04:35.000Z
|
2022-03-26T13:08:07.000Z
|
# Copyright (C) 2019 Adek Maulana
#
# Licensed under the Raphielscape Public License, Version 1.d (the "License");
# you may not use this file except in compliance with the License.
#
import re
import hashlib
from telethon.tl.types import DocumentAttributeFilename
async def md5(fname: str) -> str:
hash_md5 = hashlib.md5()
with open(fname, "rb") as f:
for chunk in iter(lambda: f.read(4096), b""):
hash_md5.update(chunk)
return hash_md5.hexdigest()
def humanbytes(size: int) -> str:
if size is None or isinstance(size, str):
return ""
power = 2**10
raised_to_pow = 0
dict_power_n = {0: "", 1: "Ki", 2: "Mi", 3: "Gi", 4: "Ti"}
while size > power:
size /= power
raised_to_pow += 1
return str(round(size, 2)) + " " + dict_power_n[raised_to_pow] + "B"
def time_formatter(seconds: int) -> str:
minutes, seconds = divmod(seconds, 60)
hours, minutes = divmod(minutes, 60)
days, hours = divmod(hours, 24)
tmp = (
((str(days) + " day(s), ") if days else "") +
((str(hours) + " hour(s), ") if hours else "") +
((str(minutes) + " minute(s), ") if minutes else "") +
((str(seconds) + " second(s), ") if seconds else "")
)
return tmp[:-2]
def human_to_bytes(size: str) -> int:
units = {
"M": 2**20, "MB": 2**20,
"G": 2**30, "GB": 2**30,
"T": 2**40, "TB": 2**40
}
size = size.upper()
if not re.match(r' ', size):
size = re.sub(r'([KMGT])', r' \1', size)
number, unit = [string.strip() for string in size.split()]
return int(float(number) * units[unit])
async def check_media(reply_message):
if reply_message and reply_message.media:
if reply_message.photo:
data = reply_message.photo
elif reply_message.document:
if (
DocumentAttributeFilename(file_name="AnimatedSticker.tgs")
in reply_message.media.document.attributes
):
return False
if (
reply_message.gif
or reply_message.video
or reply_message.audio
or reply_message.voice
):
return False
data = reply_message.media.document
else:
return False
else:
return False
if not data or data is None:
return False
else:
return data
| 27.818182
| 78
| 0.557598
|
79488020c71ba7a0abc2a63cef22c737c0d48e0c
| 752
|
py
|
Python
|
opencanary/iphelper.py
|
p1r473/opencanary
|
516cd80e1f21179e87f114356215a1df6ecf9774
|
[
"BSD-3-Clause"
] | 1,412
|
2015-08-06T03:54:37.000Z
|
2022-03-29T07:30:54.000Z
|
opencanary/iphelper.py
|
p1r473/opencanary
|
516cd80e1f21179e87f114356215a1df6ecf9774
|
[
"BSD-3-Clause"
] | 170
|
2015-08-11T00:33:06.000Z
|
2022-03-30T05:11:38.000Z
|
opencanary/iphelper.py
|
p1r473/opencanary
|
516cd80e1f21179e87f114356215a1df6ecf9774
|
[
"BSD-3-Clause"
] | 297
|
2015-08-17T09:25:41.000Z
|
2022-03-31T08:25:41.000Z
|
import struct
import socket
def ip2int(addr):
"""
Convert an IP in string format to decimal format
"""
return struct.unpack("!I", socket.inet_aton(addr))[0]
def check_ip(ip, network_range):
"""
Test if the IP is in range
Range is expected to be in CIDR notation format. If no MASK is
given /32 is used. It return True if the IP is in the range.
"""
netItem = str(network_range).split('/')
rangeIP = netItem[0]
if len(netItem) == 2:
rangeMask = int(netItem[1])
else:
rangeMask = 32
try:
ripInt = ip2int(rangeIP)
ipInt = ip2int(ip)
result = not ((ipInt ^ ripInt) & 0xFFFFFFFF << (32 - rangeMask));
except:
result = False
return result
| 22.117647
| 73
| 0.599734
|
7948805f24dc955c83b98f705e15daa05cb1126c
| 676
|
py
|
Python
|
users/forms.py
|
Syntaxii/Sosa-local
|
7c9ed939c11cae92cb78028c06b51682f1cf50e6
|
[
"MIT"
] | 3
|
2018-11-15T00:47:31.000Z
|
2018-11-27T20:57:57.000Z
|
users/forms.py
|
Syntaxii/Sosa-local
|
7c9ed939c11cae92cb78028c06b51682f1cf50e6
|
[
"MIT"
] | 7
|
2020-06-06T02:01:59.000Z
|
2022-02-10T15:09:16.000Z
|
users/forms.py
|
Syntaxii/Sosa-local
|
7c9ed939c11cae92cb78028c06b51682f1cf50e6
|
[
"MIT"
] | null | null | null |
from django import forms
from pages import models
from django.contrib.auth.forms import UserCreationForm, UserChangeForm
from .models import CustomUser
class CustomUserCreationForm(UserCreationForm):
class Meta(UserCreationForm.Meta):
model = CustomUser
fields = ('email', 'username',)
class CustomUserChangeForm(UserChangeForm):
class Meta:
model = CustomUser
fields = ('email', 'username',)
# class PostBoard(forms.ModelForm):
#
# class Meta:
# model = models.board
# fields = ('boardR', 'boardB', 'boardG', 'backgroundR', 'backgroundG', 'backgroundB', 'board_tilt_x',
# 'board_tilt_y', 'admin_ID')
| 28.166667
| 110
| 0.680473
|
794880624067d3f2afb6326fed151a7683b316a1
| 5,655
|
py
|
Python
|
experiment/code - old/rsft-gain-loss-experiment-master/mpl/config.py
|
JanaJarecki/Goals-and-experience
|
4946999f971abb6b7986eedafed4bb29e36f1d0e
|
[
"MIT"
] | 1
|
2021-11-02T10:21:16.000Z
|
2021-11-02T10:21:16.000Z
|
experiment/code - old/rsft-gain-loss-experiment-master/mpl/config.py
|
JanaJarecki/Goals-and-experience
|
4946999f971abb6b7986eedafed4bb29e36f1d0e
|
[
"MIT"
] | null | null | null |
experiment/code - old/rsft-gain-loss-experiment-master/mpl/config.py
|
JanaJarecki/Goals-and-experience
|
4946999f971abb6b7986eedafed4bb29e36f1d0e
|
[
"MIT"
] | 1
|
2021-11-02T10:21:22.000Z
|
2021-11-02T10:21:22.000Z
|
# <imports>
from otree.api import Currency as c
from otree.constants import BaseConstants
# </imports>
# ******************************************************************************************************************** #
# *** CLASS CONSTANTS *** #
# ******************************************************************************************************************** #
class Constants(BaseConstants):
# ---------------------------------------------------------------------------------------------------------------- #
# --- Task-specific Settings --- #
# ---------------------------------------------------------------------------------------------------------------- #
# lottery payoffs
# "high" and "low" outcomes (in currency units set in settings.py) of "lottery A" and "lottery B"
# note that payoffs are identical for all choices and only probabilities of "high" and "low" outcomes change
lottery_a_hi = 2.00
lottery_a_lo = 1.60
lottery_b_hi = 3.85
lottery_b_lo = 0.10
# number of binary choices between "lottery A" and "lottery B"
# note that the number of choices determines the probabilities of high and low outcomes of lotteries "A" and "B"
# for <num_choices = X>, the probability of outcome "high" is 1/X for the first choice, 2/X for the second, etc.
num_choices = 10
# include 'certain' choice (** only applies if <variation_type = 'probability'> **)
# if <certain_choice = True>, the binary choice with probability of the outcome "high" being equal to 1 is included
# if <certain_choice = False>, the list only contains (<num_choices> - 1) binary decision pairs
# note, however, that the probability of outcome "high" is set by <num_choices>, not (<num_choices> - 1), though
# i.e., if <certain_choice = False>, the last choice implies a probability of (X - 1)/X (given <num_choices = X>)
certain_choice = True
# ---------------------------------------------------------------------------------------------------------------- #
# --- Overall Settings and Appearance --- #
# ---------------------------------------------------------------------------------------------------------------- #
# show each lottery pair on a separate page
# if <one_choice_per_page = True>, each single binary choice between lottery "A" and "B" is shown on a separate page
# if <one_choice_per_page = False>, all <num_choices> choices are displayed in a table on one page
one_choice_per_page = False
# order choices between lottery pairs randomly
# if <random_order = True>, the ordering of binary decisions is randomized for display
# if <random_order = False>, binary choices are listed in ascending order of the probability of the "high" outcome
random_order = False
# enforce consistency, i.e. only allow for a single switching point
# if <enforce_consistency = True>, all options "A" above a selected option "A" are automatically selected
# similarly, all options "B" below a selected option "B" are automatically checked, implying consistent choices
# note that <enforce_consistency> is only implemented if <one_choice_per_page = False> and <random_order = False>
enforce_consistency = False
# depict probabilities as percentage numbers
# if <percentage = True>, the probability of outcome "high" will be displayed as percentage number
# if <percentage = False>, the probabilities will be displayed as fractions, i.e. "1/X", "2/X", etc.
percentage = True
# show small pie charts for each lottery
# if <small_pies = True>, a pie chart depicting the probabilities of outcomes is rendered next to each lottery
# if <small_pies = False>, no graphical representation of probabilities is displayed
small_pies = True
# display lotteries in terms of large pie charts
# if <large_pies = True>, lotteries are depicted as pie charts; if <large_pies = False> lotteries are list items
# note that <large_pies = True> only affects the task's appearance if <one_choice_per_page = True>
large_pies = True
# show progress bar
# if <progress_bar = True> and <one_choice_per_page = True>, a progress bar is rendered
# if <progress_bar = False>, no information with respect to the advance within the task is displayed
# the progress bar graphically depicts the advance within the task in terms of how many decision have been made
# further, information in terms of "page x out of <num_choices>" (with x denoting the current choice) is provided
progress_bar = True
# show instructions page
# if <instructions = True>, a separate template "Instructions.html" is rendered prior to the task
# if <instructions = False>, the task starts immediately (e.g. in case of printed instructions)
instructions = True
# show results page summarizing the task's outcome including payoff information
# if <results = True>, a separate page containing all relevant information is displayed after finishing the task
# if <results = False>, the template "Decision.html" will not be rendered
results = True
# ---------------------------------------------------------------------------------------------------------------- #
# --- oTree Settings (Don't Modify) --- #
# ---------------------------------------------------------------------------------------------------------------- #
name_in_url = 'mpl'
players_per_group = None
if one_choice_per_page:
if certain_choice:
num_rounds = num_choices
else:
num_rounds = num_choices - 1
else:
num_rounds = 1
| 55.441176
| 120
| 0.586914
|
794880bd7c9d1bfe5ce13943dcf142a965006f31
| 5,316
|
py
|
Python
|
jtnn/jtmpn.py
|
alvin-hsu/icml18-jtnn
|
da024c7fe6d56d61ec4e0fa30ad175f4e54ef1e5
|
[
"MIT"
] | 5
|
2018-04-09T03:02:28.000Z
|
2019-03-20T06:08:28.000Z
|
jtnn/jtmpn.py
|
cyclone923/jtvae
|
0292265bb97c31a925a03b18a3d3b38c560c89c6
|
[
"MIT"
] | 1
|
2021-03-31T08:30:23.000Z
|
2021-03-31T08:30:23.000Z
|
jtnn/jtmpn.py
|
kamikaze0923/jtvae
|
0292265bb97c31a925a03b18a3d3b38c560c89c6
|
[
"MIT"
] | 2
|
2020-02-29T05:15:08.000Z
|
2020-02-29T19:23:38.000Z
|
import torch
import torch.nn as nn
from .nnutils import create_var, index_select_ND
from .chemutils import get_mol
#from mpn import atom_features, bond_features, ATOM_FDIM, BOND_FDIM
import rdkit.Chem as Chem
ELEM_LIST = ['C', 'N', 'O', 'S', 'F', 'Si', 'P', 'Cl', 'Br', 'Mg', 'Na', 'Ca', 'Fe', 'Al', 'I', 'B', 'K', 'Se', 'Zn', 'H', 'Cu', 'Mn', 'unknown']
ATOM_FDIM = len(ELEM_LIST) + 6 + 5 + 1
BOND_FDIM = 5
MAX_NB = 10
def onek_encoding_unk(x, allowable_set):
if x not in allowable_set:
x = allowable_set[-1]
return [x == s for s in allowable_set]
def atom_features(atom):
return torch.Tensor(onek_encoding_unk(atom.GetSymbol(), ELEM_LIST)
+ onek_encoding_unk(atom.GetDegree(), [0,1,2,3,4,5])
+ onek_encoding_unk(atom.GetFormalCharge(), [-1,-2,1,2,0])
+ [atom.GetIsAromatic()])
def bond_features(bond):
bt = bond.GetBondType()
return torch.Tensor([bt == Chem.rdchem.BondType.SINGLE, bt == Chem.rdchem.BondType.DOUBLE, bt == Chem.rdchem.BondType.TRIPLE, bt == Chem.rdchem.BondType.AROMATIC, bond.IsInRing()])
class JTMPN(nn.Module):
def __init__(self, hidden_size, depth):
super(JTMPN, self).__init__()
self.hidden_size = hidden_size
self.depth = depth
self.W_i = nn.Linear(ATOM_FDIM + BOND_FDIM, hidden_size, bias=False)
self.W_h = nn.Linear(hidden_size, hidden_size, bias=False)
self.W_o = nn.Linear(ATOM_FDIM + hidden_size, hidden_size)
def forward(self, cand_batch, tree_mess):
fatoms,fbonds = [],[]
in_bonds,all_bonds = [],[]
mess_dict,all_mess = {},[create_var(torch.zeros(self.hidden_size))] #Ensure index 0 is vec(0)
total_atoms = 0
scope = []
for e,vec in tree_mess.items():
mess_dict[e] = len(all_mess)
all_mess.append(vec)
for mol,all_nodes,ctr_node in cand_batch:
n_atoms = mol.GetNumAtoms()
ctr_bid = ctr_node.idx
for atom in mol.GetAtoms():
fatoms.append( atom_features(atom) )
in_bonds.append([])
for bond in mol.GetBonds():
a1 = bond.GetBeginAtom()
a2 = bond.GetEndAtom()
x = a1.GetIdx() + total_atoms
y = a2.GetIdx() + total_atoms
#Here x_nid,y_nid could be 0
x_nid,y_nid = a1.GetAtomMapNum(),a2.GetAtomMapNum()
x_bid = all_nodes[x_nid - 1].idx if x_nid > 0 else -1
y_bid = all_nodes[y_nid - 1].idx if y_nid > 0 else -1
bfeature = bond_features(bond)
b = len(all_mess) + len(all_bonds) #bond idx offseted by len(all_mess)
all_bonds.append((x,y))
fbonds.append( torch.cat([fatoms[x], bfeature], 0) )
in_bonds[y].append(b)
b = len(all_mess) + len(all_bonds)
all_bonds.append((y,x))
fbonds.append( torch.cat([fatoms[y], bfeature], 0) )
in_bonds[x].append(b)
if x_bid >= 0 and y_bid >= 0 and x_bid != y_bid:
if (x_bid,y_bid) in mess_dict:
mess_idx = mess_dict[(x_bid,y_bid)]
in_bonds[y].append(mess_idx)
if (y_bid,x_bid) in mess_dict:
mess_idx = mess_dict[(y_bid,x_bid)]
in_bonds[x].append(mess_idx)
scope.append((total_atoms,n_atoms))
total_atoms += n_atoms
total_bonds = len(all_bonds)
total_mess = len(all_mess)
fatoms = torch.stack(fatoms, 0)
fbonds = torch.stack(fbonds, 0)
agraph = torch.zeros(total_atoms,MAX_NB).long()
bgraph = torch.zeros(total_bonds,MAX_NB).long()
tree_message = torch.stack(all_mess, dim=0)
for a in range(total_atoms):
for i,b in enumerate(in_bonds[a]):
agraph[a,i] = b
for b1 in range(total_bonds):
x,y = all_bonds[b1]
for i,b2 in enumerate(in_bonds[x]): #b2 is offseted by len(all_mess)
if b2 < total_mess or all_bonds[b2-total_mess][0] != y:
bgraph[b1,i] = b2
fatoms = create_var(fatoms)
fbonds = create_var(fbonds)
agraph = create_var(agraph)
bgraph = create_var(bgraph)
binput = self.W_i(fbonds)
graph_message = nn.ReLU()(binput)
for i in range(self.depth - 1):
message = torch.cat([tree_message,graph_message], dim=0)
nei_message = index_select_ND(message, 0, bgraph)
nei_message = nei_message.sum(dim=1)
nei_message = self.W_h(nei_message)
graph_message = nn.ReLU()(binput + nei_message)
message = torch.cat([tree_message,graph_message], dim=0)
nei_message = index_select_ND(message, 0, agraph)
nei_message = nei_message.sum(dim=1)
ainput = torch.cat([fatoms, nei_message], dim=1)
atom_hiddens = nn.ReLU()(self.W_o(ainput))
mol_vecs = []
for st,le in scope:
mol_vec = atom_hiddens.narrow(0, st, le).sum(dim=0) / le
mol_vecs.append(mol_vec)
mol_vecs = torch.stack(mol_vecs, dim=0)
return mol_vecs
| 38.244604
| 184
| 0.569037
|
794880c6b08068f326ee6775f22957e220bd764c
| 2,482
|
py
|
Python
|
Python Games/Colour Game in python Gui.py
|
lazydinoz/HackFest21
|
84bfbfbb2c75a6511226a87d2e947984db878ba1
|
[
"MIT"
] | 6
|
2022-01-01T17:12:32.000Z
|
2022-02-22T07:42:57.000Z
|
Python Games/Colour Game in python Gui.py
|
lazydinoz/HackFest21
|
84bfbfbb2c75a6511226a87d2e947984db878ba1
|
[
"MIT"
] | 95
|
2021-10-21T21:16:28.000Z
|
2021-11-02T13:32:00.000Z
|
Python Games/Colour Game in python Gui.py
|
lazydinoz/HackFest21
|
84bfbfbb2c75a6511226a87d2e947984db878ba1
|
[
"MIT"
] | 25
|
2021-10-03T07:21:58.000Z
|
2021-10-31T15:31:18.000Z
|
# import the modules
import tkinter
import random
# list of possible colour.
colours = ['Red','Blue','Green','Pink','Black',
'Yellow','Orange','White','Purple','Brown']
score = 0
# the game time left, initially 30 seconds.
timeleft = 30
# function that will start the game.
def startGame(event):
if timeleft == 30:
# start the countdown timer.
countdown()
# run the function to
# choose the next colour.
nextColour()
# Function to choose and
# display the next colour.
def nextColour():
# use the globally declared 'score'
# and 'play' variables above.
global score
global timeleft
# if a game is currently in play
if timeleft > 0:
# make the text entry box active.
e.focus_set()
# if the colour typed is equal
# to the colour of the text
if e.get().lower() == colours[1].lower():
score += 1
# clear the text entry box.
e.delete(0, tkinter.END)
random.shuffle(colours)
# change the colour to type, by changing the
# text _and_ the colour to a random colour value
label.config(fg = str(colours[1]), text = str(colours[0]))
# update the score.
scoreLabel.config(text = "Score: " + str(score))
# Countdown timer function
def countdown():
global timeleft
# if a game is in play
if timeleft > 0:
# decrement the timer.
timeleft -= 1
# update the time left label
timeLabel.config(text = "Time left: "
+ str(timeleft))
# run the function again after 1 second.
timeLabel.after(1000, countdown)
# Driver Code
# create a GUI window
root = tkinter.Tk()
# set the title
root.title("COLORGAME")
# set the size
root.geometry("375x200")
# add an instructions label
instructions = tkinter.Label(root, text = "Type in the colour"
"of the words, and not the word text!",
font = ('Helvetica', 12))
instructions.pack()
# add a score label
scoreLabel = tkinter.Label(root, text = "Press enter to start",
font = ('Helvetica', 12))
scoreLabel.pack()
# add a time left label
timeLabel = tkinter.Label(root, text = "Time left: " +
str(timeleft), font = ('Helvetica', 12))
timeLabel.pack()
# add a label for displaying the colours
label = tkinter.Label(root, font = ('Helvetica', 60))
label.pack()
# add a text entry box for
# typing in colours
e = tkinter.Entry(root)
# run the 'startGame' function
# when the enter key is pressed
root.bind('<Return>', startGame)
e.pack()
# set focus on the entry box
e.focus_set()
# start the GUI
root.mainloop()
| 20.016129
| 63
| 0.67083
|
794880f14a683e629c74c76f09884b014cbc846d
| 6,210
|
py
|
Python
|
test/server_tests.py
|
pmandera/snaut
|
19f32b204e6fbaf5162f5f788d2128e769bccdb2
|
[
"Apache-2.0"
] | 2
|
2016-04-27T14:00:23.000Z
|
2019-06-24T16:08:43.000Z
|
test/server_tests.py
|
pmandera/snaut
|
19f32b204e6fbaf5162f5f788d2128e769bccdb2
|
[
"Apache-2.0"
] | null | null | null |
test/server_tests.py
|
pmandera/snaut
|
19f32b204e6fbaf5162f5f788d2128e769bccdb2
|
[
"Apache-2.0"
] | 1
|
2019-06-25T20:15:02.000Z
|
2019-06-25T20:15:02.000Z
|
from configparser import RawConfigParser
import snaut.snaut as snaut
import unittest
import json
import csv
import io
from example_space import example_semspace
class SnautTestCase(unittest.TestCase):
def setUp(self):
conf = RawConfigParser()
conf.add_section('semantic_space')
conf.set('semantic_space', 'semspaces_dir', './data')
conf.set('semantic_space', 'preload_space', 'no')
conf.set('semantic_space', 'prenormalize', 'no')
conf.set('semantic_space', 'numpy_dtype', 'float64')
conf.set('semantic_space', 'matrix_size_limit', '-1')
conf.set('semantic_space', 'allow_space_change', 'no')
conf.add_section('server')
conf.set('server', 'doc_dir', './doc')
conf.set('server', 'static_dir', './snaut/templates')
conf.set('server', 'template_dir', './snaut/static')
conf.set('server', 'log_name', 'snaut')
conf.set('server', 'log_file', '')
conf.set('server', 'log_level', 'critical')
conf.set('server', 'root_prefix', '')
self.app = snaut.app_factory(conf, example_semspace).test_client()
def tearDown(self):
pass
def test_status(self):
response = self.app.get('/status', follow_redirects=True)
status = json.loads(response.data)
assert status['semspaceLoaded'] is True
assert status['semspaceTitle'] == 'Random semantic space'
assert status['semspaceDesc'] == 'Demo semantic space description.'
def test_similar_words1(self):
data = {'words1': ['first', 'fifth', 'twelfth'], 'metric': 'cosine'}
data_json = json.dumps(data)
response = self.app.post('/similar/', data=data_json,
follow_redirects=True,
content_type='application/json')
print(response)
result = json.loads(response.data)
print(result)
assert result['notDefined']['words1'] == ['twelfth']
assert result['notDefined']['words2'] is None
sims = result['similarities']
for w, nns in list(sims.items()):
assert nns[0][0] == w
assert len(nns) == 10
def test_similar_words1_words2(self):
data = {'words1': ['first', 'fifth', 'twelfth'], 'words2': ['third',
'second', 'fifth', 'thirteenth'], 'metric': 'cosine'}
data_json = json.dumps(data)
response = self.app.post('/similar/', data=data_json,
follow_redirects=True,
content_type='application/json')
print(response)
result = json.loads(response.data)
print(result)
assert result['notDefined']['words1'] == ['twelfth']
assert result['notDefined']['words2'] == ['thirteenth']
sims = result['similarities']
for w, nns in list(sims.items()):
assert len(nns) == 3
def test_similarity_matrix_words1(self):
data = {'words1': ['first', 'fifth', 'twelfth'], 'metric': 'cosine'}
data_json = json.dumps(data)
response = self.app.post('/similarity-matrix/',
data=dict(data=data_json),
follow_redirects=True)
content_disposition = response.headers["Content-Disposition"]
assert content_disposition == "attachment; filename=similarities.csv"
f = io.StringIO(response.data)
reader = list(csv.reader(f, delimiter=','))
cols = reader[0][1:]
assert cols == ['first', 'fifth']
rows = [r[0] for r in reader[1:]]
assert rows == example_semspace.words
for row in reader[1:]:
row_word = row[0]
row_distances = list(zip(cols, row[1:]))
for col_word, dist in row_distances:
pair_dist = example_semspace.pair_distance(row_word, col_word)
print((col_word, row_word, float(dist), pair_dist))
assert float(dist) - pair_dist < 10e-10
def test_similarity_matrix_words1_words2(self):
data = {'words1': ['first', 'fifth', 'twelfth'], 'words2': ['third',
'second', 'fifth', 'thirteenth'], 'metric': 'cosine'}
data_json = json.dumps(data)
response = self.app.post('/similarity-matrix/',
data=dict(data=data_json),
follow_redirects=True)
content_disposition = response.headers["Content-Disposition"]
assert content_disposition == "attachment; filename=similarities.csv"
f = io.StringIO(response.data)
reader = list(csv.reader(f, delimiter=','))
cols = reader[0][1:]
assert cols == ['first', 'fifth']
rows = [r[0] for r in reader[1:]]
assert rows == ['third', 'second', 'fifth']
for row in reader[1:]:
row_word = row[0]
row_distances = list(zip(cols, row[1:]))
for col_word, dist in row_distances:
pair_dist = example_semspace.pair_distance(row_word, col_word)
print((col_word, row_word, float(dist), pair_dist))
assert float(dist) - pair_dist < 10e-10
def test_pairs(self):
data = {'wordPairs': [
[['first'], ['second']],
[['fifth'], ['sixth']],
[['twelfth'], ['eleventh']]],
'metric': 'cosine'}
data_json = json.dumps(data)
print(data_json)
response = self.app.post('/pairs/', data=dict(data=data_json),
follow_redirects=True)
content_disposition = response.headers["Content-Disposition"]
assert content_disposition == "attachment; filename=word-pairs.csv"
f = io.StringIO(response.data)
reader = list(csv.reader(f, delimiter=','))
cols = reader[0]
print(reader)
assert cols == ['word_1', 'word_2', 'distance']
for w1, w2, dist in reader[1:]:
pair_dist = example_semspace.pair_distance(w1, w2)
print((w1, w2, float(dist), pair_dist))
assert float(dist) - pair_dist < 10e-10
if __name__ == '__main__':
unittest.main()
| 37.409639
| 78
| 0.571014
|
7948814ac614bf86596f3c8684099ea1804c1be9
| 1,807
|
py
|
Python
|
self_supervision/embedding_reducer.py
|
Viole-Grace/AdNAN
|
b21fb781b363f03d49b0de5744f732e0d5c4c42e
|
[
"Apache-2.0"
] | 1
|
2020-02-02T11:21:49.000Z
|
2020-02-02T11:21:49.000Z
|
self_supervision/embedding_reducer.py
|
Viole-Grace/AdNAN
|
b21fb781b363f03d49b0de5744f732e0d5c4c42e
|
[
"Apache-2.0"
] | null | null | null |
self_supervision/embedding_reducer.py
|
Viole-Grace/AdNAN
|
b21fb781b363f03d49b0de5744f732e0d5c4c42e
|
[
"Apache-2.0"
] | null | null | null |
import umap
class EmbeddingReducer:
"""
Class to convert high dimensional embeddings into lower dimensional embeddings.
Current implementation uses only UMAP to do so
@TODO:
- add t-SNE mode
- add isomap mode
- add locally linear embedding mode
"""
def __init__(self, embedding_to_reduce, dimension=None, mode=None, optimal_size=10):
"""
Initialize parameters for dimensionality reduction model
Args:
embedding_to_reduce (list | torch.Tensor | np.array): list | tensor | array of high dimensional embeddings
dimension (int, optional): output number of dimensions. Defaults to None.
optimal_size (int, optional): number of points to look at to form a local representation. Larger value makes less fine grained distinctions in data. Defaults to 10.
"""
self.embeddings = embedding_to_reduce
self.dimensions = dimension
self.mode = mode
self.optimal_size = optimal_size
if self.dimensions == None:
self.dimensions = 2
def umap_dimensionality_reduction(self):
return umap.UMAP(n_components=self.dimensions,
n_neighbors=3*self.optimal_size,
min_dist=0.0,
metric='cosine',
random_state=0,
low_memory=False)
def get_output(self):
mode = self.mode
if mode == None or mode == "umap":
self.dimension_reduction_model = self.umap_dimensionality_reduction()
# print(type(self.embeddings))
reduced_embeddings = self.dimension_reduction_model.fit_transform(X=self.embeddings)
return reduced_embeddings
| 36.14
| 176
| 0.614831
|
794881c75a86937d68af0dcac474af498222b631
| 294
|
py
|
Python
|
socket/echo_client.py
|
LaurenceYang/learn-python
|
819994039abd3af298f73b1a73976eaa95071096
|
[
"Apache-2.0"
] | 12
|
2017-10-01T00:20:37.000Z
|
2017-10-02T10:42:18.000Z
|
socket/echo_client.py
|
LaurenceYang/learn-python
|
819994039abd3af298f73b1a73976eaa95071096
|
[
"Apache-2.0"
] | null | null | null |
socket/echo_client.py
|
LaurenceYang/learn-python
|
819994039abd3af298f73b1a73976eaa95071096
|
[
"Apache-2.0"
] | null | null | null |
import socket
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
# 建立连接:
s.connect(('127.0.0.1', 9999))
# 接收欢迎消息:
print(s.recv(1024).decode('utf-8'))
for data in [b'Michael', b'Tracy', b'Sarah']:
# 发送数据:
s.send(data)
print(s.recv(1024).decode('utf-8'))
s.send(b'exit')
s.close()
| 22.615385
| 53
| 0.642857
|
794885b67a539c8b02a13d477e425428aed33209
| 3,587
|
py
|
Python
|
lib/invoke_tasks.py
|
thesmarthomeninja/Video_Gaming_ML
|
e9c147f33a790a9cd3e4ee631ddbf6bbf91c3921
|
[
"MIT"
] | null | null | null |
lib/invoke_tasks.py
|
thesmarthomeninja/Video_Gaming_ML
|
e9c147f33a790a9cd3e4ee631ddbf6bbf91c3921
|
[
"MIT"
] | 4
|
2020-09-25T22:39:46.000Z
|
2022-02-09T23:39:43.000Z
|
lib/invoke_tasks.py
|
AsimKhan2019/Serpent-AI
|
e9c147f33a790a9cd3e4ee631ddbf6bbf91c3921
|
[
"MIT"
] | null | null | null |
from invoke import task
from lib.frame_grabber import FrameGrabber
from lib.games import *
import lib.datasets
from lib.machine_learning.context_classification.context_classifiers import SVMContextClassifier
from lib.machine_learning.context_classification.context_classifiers import CNNInceptionV3ContextClassifier
import numpy as np
import skimage.io
import skimage.transform
import sklearn
import os
import h5py
import pickle
@task
def start_frame_grabber(ctx, width=640, height=480, x_offset=0, y_offset=0):
frame_grabber = FrameGrabber(
width=width,
height=height,
x_offset=x_offset,
y_offset=y_offset
)
frame_grabber.start()
# Shortcut Tasks
@task
def hexagon(ctx):
game = SuperHexagonGame()
game.launch()
game.play(game_agent_class_name="SuperHexagonGameAgent")
@task
def isaac(ctx):
game = BindingOfIsaacRebirthGame()
game.launch()
game.play(game_agent_class_name="BindingOfIsaacRebirthGameAgent")
@task
def isaac_launch(ctx):
game = BindingOfIsaacRebirthGame()
game.launch()
@task
def isaac_play(ctx):
game = BindingOfIsaacRebirthGame()
game.launch(dry_run=True)
game.play(game_agent_class_name="BindingOfIsaacRebirthGameAgent")
@task
def boat_launch(ctx):
game = YouMustBuildABoatGame()
game.launch()
@task
def boat_play(ctx):
game = YouMustBuildABoatGame()
game.launch(dry_run=True)
game.play(game_agent_class_name="YouMustBuildABoatGameAgent")
@task
def boat_context_train(ctx):
lib.datasets.create_training_and_validation_sets(
[
"datasets/collect_frames_for_context/game_over",
"datasets/collect_frames_for_context/level_gallery",
"datasets/collect_frames_for_context/level_hell",
"datasets/collect_frames_for_context/level_jail",
"datasets/collect_frames_for_context/level_pagoda",
"datasets/collect_frames_for_context/level_pyramid",
"datasets/collect_frames_for_context/level_ruins",
"datasets/collect_frames_for_context/level_sewers",
"datasets/collect_frames_for_context/level_thicket",
"datasets/collect_frames_for_context/level_tower",
"datasets/collect_frames_for_context/level_vault",
]
)
context_classifier = CNNInceptionV3ContextClassifier(input_shape=(384, 512, 3))
context_classifier.train()
# context_classifier = SVMContextClassifier()
# context_classifier.train(preprocessing_func=boat_context_preprocess)
# context_classifier.validate(preprocessing_func=boat_context_preprocess)
context_classifier.save_classifier("datasets/you_must_build_a_boat_context.model")
def boat_context_preprocess(frame):
return skimage.transform.resize(frame,(frame.shape[0] // 32, frame.shape[1] // 32))
@task
def boat_train_model(ctx):
model = sklearn.linear_model.SGDRegressor()
data_path = f"datasets/ymbab"
data = list()
scores = list()
if os.path.isdir(data_path):
files = os.scandir(data_path)
for file in files:
if file.name.endswith(".h5"):
with h5py.File(f"datasets/ymbab/{file.name}", "r") as f:
count = len(f.items()) // 2
for i in range(count):
data.append(f[f"{i}"][:].flatten())
scores.append(f[f"{i}_score"].value)
model.fit(data, scores)
serialized_model = pickle.dumps(model)
with open("datasets/ymbab_matching.model", "wb") as f:
f.write(serialized_model)
| 26.375
| 107
| 0.706719
|
794886d947c77a5b1c014a6465a523d9bc3ae40e
| 168
|
py
|
Python
|
helloworld/stock.py
|
SeungmanLee/pythonPractice
|
791b41471167b22108e5fb8a0f06489f85a12da2
|
[
"MIT"
] | null | null | null |
helloworld/stock.py
|
SeungmanLee/pythonPractice
|
791b41471167b22108e5fb8a0f06489f85a12da2
|
[
"MIT"
] | null | null | null |
helloworld/stock.py
|
SeungmanLee/pythonPractice
|
791b41471167b22108e5fb8a0f06489f85a12da2
|
[
"MIT"
] | null | null | null |
def cal_upper(price):
offset = price * 0.3
return price + offset
def cal_lower(price):
offset = price * 0.3
return price - offset
author = "Pystock"
| 15.272727
| 25
| 0.636905
|
7948874b4f51aea3922d8601e9bdaf9cdce889ce
| 5,045
|
py
|
Python
|
dictionary.py
|
flav-io/flavio-eos
|
98fd95c0599c924feb7155feb3fb8505623bee65
|
[
"MIT"
] | null | null | null |
dictionary.py
|
flav-io/flavio-eos
|
98fd95c0599c924feb7155feb3fb8505623bee65
|
[
"MIT"
] | null | null | null |
dictionary.py
|
flav-io/flavio-eos
|
98fd95c0599c924feb7155feb3fb8505623bee65
|
[
"MIT"
] | null | null | null |
# this is written as a list of tuples as 1 parameter in code A
# could map to 2 parameters in code B. Dictionaries are constructed below
eos_flavio_parameters = [
("GSW::sin^2(theta)", "s2w"),
# ("CKM::A", "A"),
# ("CKM::lambda", "laC"),
# ("CKM::rhobar", "rhobar"),
# ("CKM::etabar", "etabar"),
("QCD::alpha_s(MZ)", "alpha_s"),
("G_Fermi", "GF"),
("mass::e", "m_e"),
("mass::mu", "m_mu"),
("mass::tau", "m_tau"),
("mass::d(2GeV)", "m_d"),
("mass::s(2GeV)", "m_s"),
("mass::c", "m_c"),
("mass::b(MSbar)", "m_b"),
("mass::t(pole)", "m_t"),
("mass::pi^0", "m_pi0"),
("mass::pi^+", "m_pi+"),
("mass::K_d", "m_K0"),
("mass::K_u", "m_K+"),
("mass::K^*_d", "m_K*0"),
("mass::K^*_u", "m_K*+"),
("mass::D^+", "m_D+"),
("mass::D^0", "m_D0"),
("mass::B_d", "m_B0"),
("mass::B_u", "m_B+"),
("mass::B_s", "m_Bs"),
("mass::Lambda", "m_Lambda"),
("mass::Lambda_b", "m_Lambdab"),
("mass::W", "m_W"),
("mass::Z", "m_Z"),
("decay-constant::B_d", "f_B0"),
("decay-constant::B_u", "f_B+"),
("decay-constant::B_s", "f_Bs"),
("decay-constant::K_d", "f_K0"),
("decay-constant::K_u", "f_K+"),
("decay-constant::pi", "f_pi+"),
# ("life_time::B_d", "tau_B0"),
# ("life_time::B_u", "tau_B+"),
# ("life_time::B_s", "tau_Bs"),
# ("life_time::Lambda_b", "tau_Lambdab"),
("Lambda::alpha", "Lambda->ppi alpha_-"),
("Lambda_b->Lambda::a_0_time^V@DM2016", "Lambdab->Lambda SSE a0_fVt"),
("Lambda_b->Lambda::a_1_time^V@DM2016", "Lambdab->Lambda SSE a1_fVt"),
("Lambda_b->Lambda::a_2_time^V@DM2016", "Lambdab->Lambda SSE a2_fVt"),
("Lambda_b->Lambda::a_0_time^A@DM2016", "Lambdab->Lambda SSE a0_fAt"),
("Lambda_b->Lambda::a_1_time^A@DM2016", "Lambdab->Lambda SSE a1_fAt"),
("Lambda_b->Lambda::a_2_time^A@DM2016", "Lambdab->Lambda SSE a2_fAt"),
("Lambda_b->Lambda::a_0_long^V@DM2016", "Lambdab->Lambda SSE a0_fV0"),
("Lambda_b->Lambda::a_1_long^V@DM2016", "Lambdab->Lambda SSE a1_fV0"),
("Lambda_b->Lambda::a_2_long^V@DM2016", "Lambdab->Lambda SSE a2_fV0"),
("Lambda_b->Lambda::a_0_perp^V@DM2016", "Lambdab->Lambda SSE a0_fVperp"),
("Lambda_b->Lambda::a_1_perp^V@DM2016", "Lambdab->Lambda SSE a1_fAperp"),
("Lambda_b->Lambda::a_2_perp^V@DM2016", "Lambdab->Lambda SSE a2_fAperp"),
("Lambda_b->Lambda::a_0_long^A@DM2016", "Lambdab->Lambda SSE a0_fA0"),
("Lambda_b->Lambda::a_1_long^A@DM2016", "Lambdab->Lambda SSE a1_fA0"),
("Lambda_b->Lambda::a_2_long^A@DM2016", "Lambdab->Lambda SSE a2_fA0"),
("Lambda_b->Lambda::a_1_perp^A@DM2016", "Lambdab->Lambda SSE a1_fAperp"),
("Lambda_b->Lambda::a_2_perp^A@DM2016", "Lambdab->Lambda SSE a2_fAperp"),
("Lambda_b->Lambda::a_0_long^T@DM2016", "Lambdab->Lambda SSE a0_fT0"),
("Lambda_b->Lambda::a_1_long^T@DM2016", "Lambdab->Lambda SSE a1_fT0"),
("Lambda_b->Lambda::a_2_long^T@DM2016", "Lambdab->Lambda SSE a2_fT0"),
("Lambda_b->Lambda::a_0_perp^T@DM2016", "Lambdab->Lambda SSE a0_fTperp"),
("Lambda_b->Lambda::a_1_perp^T@DM2016", "Lambdab->Lambda SSE a1_fTperp"),
("Lambda_b->Lambda::a_2_perp^T@DM2016", "Lambdab->Lambda SSE a2_fTperp"),
("Lambda_b->Lambda::a_0_long^T5@DM2016", "Lambdab->Lambda SSE a0_fT50"),
("Lambda_b->Lambda::a_1_long^T5@DM2016", "Lambdab->Lambda SSE a1_fT50"),
("Lambda_b->Lambda::a_2_long^T5@DM2016", "Lambdab->Lambda SSE a2_fT50"),
("Lambda_b->Lambda::a_1_perp^T5@DM2016", "Lambdab->Lambda SSE a1_fT5perp"),
("Lambda_b->Lambda::a_2_perp^T5@DM2016", "Lambdab->Lambda SSE a2_fT5perp"),
# ("B->K^*::alpha^A0_0@BSZ2015", "B->K* BSZ a0_A0"),
("B->K^*::alpha^A0_1@BSZ2015", "B->K* BSZ a1_A0"),
("B->K^*::alpha^A0_2@BSZ2015", "B->K* BSZ a2_A0"),
("B->K^*::alpha^A1_0@BSZ2015", "B->K* BSZ a0_A1"),
("B->K^*::alpha^A1_1@BSZ2015", "B->K* BSZ a1_A1"),
("B->K^*::alpha^A1_2@BSZ2015", "B->K* BSZ a2_A1"),
("B->K^*::alpha^A12_1@BSZ2015", "B->K* BSZ a1_A12"),
("B->K^*::alpha^A12_2@BSZ2015", "B->K* BSZ a2_A12"),
("B->K^*::alpha^V_0@BSZ2015", "B->K* BSZ a0_V"),
("B->K^*::alpha^V_1@BSZ2015", "B->K* BSZ a1_V"),
("B->K^*::alpha^V_2@BSZ2015", "B->K* BSZ a2_V"),
("B->K^*::alpha^T1_0@BSZ2015", "B->K* BSZ a0_T1"),
("B->K^*::alpha^T1_1@BSZ2015", "B->K* BSZ a2_T1"),
("B->K^*::alpha^T1_2@BSZ2015", "B->K* BSZ a2_T1"),
("B->K^*::alpha^T2_1@BSZ2015", "B->K* BSZ a1_T2"),
("B->K^*::alpha^T2_2@BSZ2015", "B->K* BSZ a2_T2"),
("B->K^*::alpha^T23_0@BSZ2015", "B->K* BSZ a0_T23"),
("B->K^*::alpha^T23_1@BSZ2015", "B->K* BSZ a1_T23"),
("B->K^*::alpha^T23_2@BSZ2015", "B->K* BSZ a2_T23"),
("B->K^*::f_Kstar_par", "f_K*0"),
("B->K^*::f_Kstar_perp@2GeV", "f_perp_K*0"),
("B->K^*::f_Kstar_par", "f_K*+"),
("B->K^*::f_Kstar_perp@2GeV", "f_perp_K*+"),
("exp::BR(B->X_clnu)", "BR(B->Xcenu)_exp"),
("exp::C(B->X_clnu, B->X_ulnu)", "C_BXlnu"),
]
# dictionaries
dict_eos2flavio = {e: f for e, f in eos_flavio_parameters}
dict_flavio2eos = {f: e for e, f in eos_flavio_parameters}
| 48.980583
| 79
| 0.582359
|
794888539fdcbdb32b68c8470c276961419ec0ed
| 11,344
|
py
|
Python
|
sdk/python/pulumi_azure_native/network/v20181001/get_public_ip_address.py
|
pulumi-bot/pulumi-azure-native
|
f7b9490b5211544318e455e5cceafe47b628e12c
|
[
"Apache-2.0"
] | null | null | null |
sdk/python/pulumi_azure_native/network/v20181001/get_public_ip_address.py
|
pulumi-bot/pulumi-azure-native
|
f7b9490b5211544318e455e5cceafe47b628e12c
|
[
"Apache-2.0"
] | null | null | null |
sdk/python/pulumi_azure_native/network/v20181001/get_public_ip_address.py
|
pulumi-bot/pulumi-azure-native
|
f7b9490b5211544318e455e5cceafe47b628e12c
|
[
"Apache-2.0"
] | null | null | null |
# coding=utf-8
# *** WARNING: this file was generated by the Pulumi SDK Generator. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union
from ... import _utilities, _tables
from . import outputs
__all__ = [
'GetPublicIPAddressResult',
'AwaitableGetPublicIPAddressResult',
'get_public_ip_address',
]
@pulumi.output_type
class GetPublicIPAddressResult:
"""
Public IP address resource.
"""
def __init__(__self__, dns_settings=None, etag=None, id=None, idle_timeout_in_minutes=None, ip_address=None, ip_configuration=None, ip_tags=None, location=None, name=None, provisioning_state=None, public_ip_address_version=None, public_ip_allocation_method=None, public_ip_prefix=None, resource_guid=None, sku=None, tags=None, type=None, zones=None):
if dns_settings and not isinstance(dns_settings, dict):
raise TypeError("Expected argument 'dns_settings' to be a dict")
pulumi.set(__self__, "dns_settings", dns_settings)
if etag and not isinstance(etag, str):
raise TypeError("Expected argument 'etag' to be a str")
pulumi.set(__self__, "etag", etag)
if id and not isinstance(id, str):
raise TypeError("Expected argument 'id' to be a str")
pulumi.set(__self__, "id", id)
if idle_timeout_in_minutes and not isinstance(idle_timeout_in_minutes, int):
raise TypeError("Expected argument 'idle_timeout_in_minutes' to be a int")
pulumi.set(__self__, "idle_timeout_in_minutes", idle_timeout_in_minutes)
if ip_address and not isinstance(ip_address, str):
raise TypeError("Expected argument 'ip_address' to be a str")
pulumi.set(__self__, "ip_address", ip_address)
if ip_configuration and not isinstance(ip_configuration, dict):
raise TypeError("Expected argument 'ip_configuration' to be a dict")
pulumi.set(__self__, "ip_configuration", ip_configuration)
if ip_tags and not isinstance(ip_tags, list):
raise TypeError("Expected argument 'ip_tags' to be a list")
pulumi.set(__self__, "ip_tags", ip_tags)
if location and not isinstance(location, str):
raise TypeError("Expected argument 'location' to be a str")
pulumi.set(__self__, "location", location)
if name and not isinstance(name, str):
raise TypeError("Expected argument 'name' to be a str")
pulumi.set(__self__, "name", name)
if provisioning_state and not isinstance(provisioning_state, str):
raise TypeError("Expected argument 'provisioning_state' to be a str")
pulumi.set(__self__, "provisioning_state", provisioning_state)
if public_ip_address_version and not isinstance(public_ip_address_version, str):
raise TypeError("Expected argument 'public_ip_address_version' to be a str")
pulumi.set(__self__, "public_ip_address_version", public_ip_address_version)
if public_ip_allocation_method and not isinstance(public_ip_allocation_method, str):
raise TypeError("Expected argument 'public_ip_allocation_method' to be a str")
pulumi.set(__self__, "public_ip_allocation_method", public_ip_allocation_method)
if public_ip_prefix and not isinstance(public_ip_prefix, dict):
raise TypeError("Expected argument 'public_ip_prefix' to be a dict")
pulumi.set(__self__, "public_ip_prefix", public_ip_prefix)
if resource_guid and not isinstance(resource_guid, str):
raise TypeError("Expected argument 'resource_guid' to be a str")
pulumi.set(__self__, "resource_guid", resource_guid)
if sku and not isinstance(sku, dict):
raise TypeError("Expected argument 'sku' to be a dict")
pulumi.set(__self__, "sku", sku)
if tags and not isinstance(tags, dict):
raise TypeError("Expected argument 'tags' to be a dict")
pulumi.set(__self__, "tags", tags)
if type and not isinstance(type, str):
raise TypeError("Expected argument 'type' to be a str")
pulumi.set(__self__, "type", type)
if zones and not isinstance(zones, list):
raise TypeError("Expected argument 'zones' to be a list")
pulumi.set(__self__, "zones", zones)
@property
@pulumi.getter(name="dnsSettings")
def dns_settings(self) -> Optional['outputs.PublicIPAddressDnsSettingsResponse']:
"""
The FQDN of the DNS record associated with the public IP address.
"""
return pulumi.get(self, "dns_settings")
@property
@pulumi.getter
def etag(self) -> Optional[str]:
"""
A unique read-only string that changes whenever the resource is updated.
"""
return pulumi.get(self, "etag")
@property
@pulumi.getter
def id(self) -> Optional[str]:
"""
Resource ID.
"""
return pulumi.get(self, "id")
@property
@pulumi.getter(name="idleTimeoutInMinutes")
def idle_timeout_in_minutes(self) -> Optional[int]:
"""
The idle timeout of the public IP address.
"""
return pulumi.get(self, "idle_timeout_in_minutes")
@property
@pulumi.getter(name="ipAddress")
def ip_address(self) -> Optional[str]:
"""
The IP address associated with the public IP address resource.
"""
return pulumi.get(self, "ip_address")
@property
@pulumi.getter(name="ipConfiguration")
def ip_configuration(self) -> 'outputs.IPConfigurationResponse':
"""
The IP configuration associated with the public IP address.
"""
return pulumi.get(self, "ip_configuration")
@property
@pulumi.getter(name="ipTags")
def ip_tags(self) -> Optional[Sequence['outputs.IpTagResponse']]:
"""
The list of tags associated with the public IP address.
"""
return pulumi.get(self, "ip_tags")
@property
@pulumi.getter
def location(self) -> Optional[str]:
"""
Resource location.
"""
return pulumi.get(self, "location")
@property
@pulumi.getter
def name(self) -> str:
"""
Resource name.
"""
return pulumi.get(self, "name")
@property
@pulumi.getter(name="provisioningState")
def provisioning_state(self) -> Optional[str]:
"""
The provisioning state of the PublicIP resource. Possible values are: 'Updating', 'Deleting', and 'Failed'.
"""
return pulumi.get(self, "provisioning_state")
@property
@pulumi.getter(name="publicIPAddressVersion")
def public_ip_address_version(self) -> Optional[str]:
"""
The public IP address version. Possible values are: 'IPv4' and 'IPv6'.
"""
return pulumi.get(self, "public_ip_address_version")
@property
@pulumi.getter(name="publicIPAllocationMethod")
def public_ip_allocation_method(self) -> Optional[str]:
"""
The public IP allocation method. Possible values are: 'Static' and 'Dynamic'.
"""
return pulumi.get(self, "public_ip_allocation_method")
@property
@pulumi.getter(name="publicIPPrefix")
def public_ip_prefix(self) -> Optional['outputs.SubResourceResponse']:
"""
The Public IP Prefix this Public IP Address should be allocated from.
"""
return pulumi.get(self, "public_ip_prefix")
@property
@pulumi.getter(name="resourceGuid")
def resource_guid(self) -> Optional[str]:
"""
The resource GUID property of the public IP resource.
"""
return pulumi.get(self, "resource_guid")
@property
@pulumi.getter
def sku(self) -> Optional['outputs.PublicIPAddressSkuResponse']:
"""
The public IP address SKU.
"""
return pulumi.get(self, "sku")
@property
@pulumi.getter
def tags(self) -> Optional[Mapping[str, str]]:
"""
Resource tags.
"""
return pulumi.get(self, "tags")
@property
@pulumi.getter
def type(self) -> str:
"""
Resource type.
"""
return pulumi.get(self, "type")
@property
@pulumi.getter
def zones(self) -> Optional[Sequence[str]]:
"""
A list of availability zones denoting the IP allocated for the resource needs to come from.
"""
return pulumi.get(self, "zones")
class AwaitableGetPublicIPAddressResult(GetPublicIPAddressResult):
# pylint: disable=using-constant-test
def __await__(self):
if False:
yield self
return GetPublicIPAddressResult(
dns_settings=self.dns_settings,
etag=self.etag,
id=self.id,
idle_timeout_in_minutes=self.idle_timeout_in_minutes,
ip_address=self.ip_address,
ip_configuration=self.ip_configuration,
ip_tags=self.ip_tags,
location=self.location,
name=self.name,
provisioning_state=self.provisioning_state,
public_ip_address_version=self.public_ip_address_version,
public_ip_allocation_method=self.public_ip_allocation_method,
public_ip_prefix=self.public_ip_prefix,
resource_guid=self.resource_guid,
sku=self.sku,
tags=self.tags,
type=self.type,
zones=self.zones)
def get_public_ip_address(expand: Optional[str] = None,
public_ip_address_name: Optional[str] = None,
resource_group_name: Optional[str] = None,
opts: Optional[pulumi.InvokeOptions] = None) -> AwaitableGetPublicIPAddressResult:
"""
Public IP address resource.
:param str expand: Expands referenced resources.
:param str public_ip_address_name: The name of the subnet.
:param str resource_group_name: The name of the resource group.
"""
__args__ = dict()
__args__['expand'] = expand
__args__['publicIpAddressName'] = public_ip_address_name
__args__['resourceGroupName'] = resource_group_name
if opts is None:
opts = pulumi.InvokeOptions()
if opts.version is None:
opts.version = _utilities.get_version()
__ret__ = pulumi.runtime.invoke('azure-native:network/v20181001:getPublicIPAddress', __args__, opts=opts, typ=GetPublicIPAddressResult).value
return AwaitableGetPublicIPAddressResult(
dns_settings=__ret__.dns_settings,
etag=__ret__.etag,
id=__ret__.id,
idle_timeout_in_minutes=__ret__.idle_timeout_in_minutes,
ip_address=__ret__.ip_address,
ip_configuration=__ret__.ip_configuration,
ip_tags=__ret__.ip_tags,
location=__ret__.location,
name=__ret__.name,
provisioning_state=__ret__.provisioning_state,
public_ip_address_version=__ret__.public_ip_address_version,
public_ip_allocation_method=__ret__.public_ip_allocation_method,
public_ip_prefix=__ret__.public_ip_prefix,
resource_guid=__ret__.resource_guid,
sku=__ret__.sku,
tags=__ret__.tags,
type=__ret__.type,
zones=__ret__.zones)
| 38.982818
| 354
| 0.657616
|
79488855d312791738451399c8be216398dba2ff
| 24,668
|
py
|
Python
|
contrib/xclasses/xclasses.py
|
forthinspired/ficl
|
5e8b8c233302a5c02b8c2d77ebcabc95c1833bea
|
[
"BSD-2-Clause"
] | 6
|
2017-04-27T10:52:30.000Z
|
2021-07-10T22:45:01.000Z
|
contrib/xclasses/xclasses.py
|
forthinspired/ficl
|
5e8b8c233302a5c02b8c2d77ebcabc95c1833bea
|
[
"BSD-2-Clause"
] | null | null | null |
contrib/xclasses/xclasses.py
|
forthinspired/ficl
|
5e8b8c233302a5c02b8c2d77ebcabc95c1833bea
|
[
"BSD-2-Clause"
] | null | null | null |
import copy
import string
import sys
import time
import types
def capitalize(s):
return string.upper(s[0]) + s[1:]
def fprint(f, s):
print >> f, s
def fprintHeader(f, comment = "//"):
fprint(f, comment)
fprint(f, comment + " Generated by xclasses.py at " + time.strftime("%Y/%m/%d %H:%M:%S"))
fprint(f, comment)
fprint(f, comment)
fprint(f, "")
def fprintFooter(f, comment = "//"):
fprint(f, "")
fprint(f, "")
fprint(f, comment + " end of file")
fprint(f, "")
multicallCallTypeFunction = 0
multicallCallTypeMethod = 1
multicallCallTypeVirtualMethod = 2
multicallReturnTypeVoid = 0
multicallReturnTypeInteger = 16
multicallReturnTypeCstring = 32
multicallReturnTypeFloat = 48
multicallExplicitVtable = 512
ficlVmName = "ficlVm"
h_headers = []
def xAddHHeader(line):
h_headers.append(line)
h_footers = []
def xAddHFooter(line):
h_footers.append(line)
ficl_headers = []
def xAddFiclHeader(line):
ficl_headers.append(line)
ficl_footers = []
def xAddFiclFooter(line):
ficl_footers.append(line)
c_headers = []
def xAddCHeader(line):
c_headers.append(line)
c_footers = []
def xAddCFooter(line):
c_footers.append(line)
classes = []
class xVariable:
def __init__(self, name, typeCPP = None, cells = None, count = None, defaultValue = None, cstring = None):
self.comments = []
self.setName(name)
self.setCells(cells)
self.setCount(count)
self.setDefaultValue(defaultValue)
self.setCString(cstring)
self.setTypeCPP(typeCPP)
def setName(self, name):
self.name = name
return self
def setTypeCPP(self, typeCPP):
self.typeCPP = typeCPP
if (typeCPP == "char *"):
self.setCString(1)
return self
def setCells(self, cells):
if cells == None:
self.cells = 1
else:
self.cells = cells
return self
def setCString(self, cstring):
self.cstring = cstring
return self
def isCString(self):
return self.cstring
def getTotalSize(self):
return self.cells * self.count
def setCount(self, count):
if type(count) != types.IntType:
count = 1
self.count = count
return self
def setDefaultValue(self, defaultValue):
if (defaultValue != None) and (type(defaultValue) != types.StringType):
defaultValue = str(defaultValue)
self.defaultValue = defaultValue
return self
def addComment(self, c):
self.comments.append(c)
return self
def isFloat(self):
return self.typeCPP == "float"
def stringCPP(self, wantDefaultValues=1):
if (type(self.typeCPP) != types.StringType):
sys.exit("didn't set a CPP type on variable " + self.name + "!")
output = self.typeCPP
if (self.typeCPP[-1] != "*") and (self.typeCPP[-1] != "&"):
output += " "
output += self.name
if self.count > 1:
output += "[" + str(self.count) + "]"
if self.count == 0:
output += "[]"
if wantDefaultValues and (self.defaultValue != None):
output += " = " + self.defaultValue
return output
def printH(self, f):
if len(self.comments):
for comment in self.comments:
fprint(f, "\t" + "// " + comment)
fprint(f, "\t" + self.stringCPP() + ";")
def printF(self, f):
totalCells = self.count * self.cells
if (totalCells <= 1):
typeF = "cell:"
else:
typeF = str(totalCells) + " cells:"
if len(self.comments):
for comment in self.comments:
fprint(f, "\t" + "// " + comment)
fprint(f, "\t" + "S\" " + typeF + " ." + self.name + " \" evaluate")
class xMethod:
def __init__(self, name, returnType = None, virtual = None, static = None, body = None):
self.arguments = []
self.comments = []
self.setName(name)
self.setReturnType(returnType)
self.setVirtual(virtual)
self.setStatic(static)
self.setBody(body)
self.setThunkVariable(None)
self.vtableOffset = 0
def copy():
clone = xMethod(self.name, self.returnType, self.virtual, self.static)
clone.arguments = self.arguments
clone.comments = self.comments
def setName(self, name):
self.name = name
return self
def setReturnType(self, returnType):
if returnType.__class__ == xVariable:
self.returnType = returnType
elif type(returnType) == types.StringType:
self.returnType = xVariable("ignored", returnType)
else:
self.returnType = None
return self
def returnTypeIsVoid(self):
return(self.returnType == None) or (self.returnType.typeCPP == None) or (self.returnType.typeCPP == "") or (self.returnType.typeCPP == "void")
def setVirtual(self, virtual):
self.virtual = virtual
return self
def isVirtual(self):
return self.virtual > 0
def isPureVirtual(self):
return self.virtual > 1
def setStatic(self, static):
self.static = static
return self
def setThunkVariable(self, thunkVariable):
self.thunkVariable = thunkVariable
return self
def isStatic(self):
return self.static
# a constructor or a destructor
def isClassSpecial(self):
return (self.returnType == None) or (self.returnType.typeCPP == None) or (self.returnType.typeCPP == "")
def setBody(self, body):
self.body = body
return self
def addArgument(self, argument):
self.arguments.append(argument)
return self
def addComment(self, c):
self.comments.append(c)
return self
def prototype(self, isDefinition=None):
arguments = ""
for a in self.arguments:
if len(arguments):
arguments += ", "
arguments += a.stringCPP(not isDefinition)
if len(arguments) == 0:
arguments = "void"
className = ""
if (isDefinition):
className = self.memberOf.name + "::"
modifiers = ""
if self.virtual and (not isDefinition):
modifiers += "virtual "
if self.static and (not isDefinition):
modifiers += "static "
returnType = ""
name = self.name
if (name == "") or (name == "~"):
name += self.memberOf.name
if (self.returnType != None) and (len(self.returnType.typeCPP) > 0):
returnType = self.returnType.typeCPP + " "
return modifiers + returnType + className + name + "(" + arguments + ")"
def printH(self, f):
pureVirtual = ""
if (self.virtual > 1):
pureVirtual = " = 0"
suffix = ";"
modifiers = ""
if self.body != None:
modifiers = "inline "
suffix = " " + self.body
fprint(f, "\t" + modifiers + self.prototype() + pureVirtual + suffix)
def printF(self, f):
if not self.isVirtual():
return
if len(self.comments):
for comment in self.comments:
fprint(f, "\t" + "// " + comment)
flags = multicallReturnTypeInteger
if self.returnTypeIsVoid():
flags = multicallReturnTypeVoid
elif (self.returnType.isCString()):
flags = multicallReturnTypeCString
elif (self.returnType.typeCPP == "float"):
flags = multicallReturnTypeFloat
flags |= multicallCallTypeVirtualMethod
# move floating-point arguments from float stack
floatArgumentsBitfield = 0
cstringArgumentsBitfield = 0
argumentNumber = 0
cstrings = 0
name = self.name
if (self.memberOf.pureVirtual):
vtable = ""
else:
vtable = " drop [ " + self.memberOf.name + "-vtable literal ] "
flags |= multicallExplicitVtable
if (name == "") or (name == "~"):
name += self.memberOf.name
for a in self.arguments:
if a.isFloat():
floatArgumentsBitfield |= (1 << argumentNumber)
elif a.isCString():
cstringArgumentsBitfield |= (1 << argumentNumber)
cstrings += 1
argumentNumber += 1
fprint(f, "\tS\" : " + name + vtable + str(len(self.arguments) + cstrings) + " " + str(floatArgumentsBitfield) + " " + str(cstringArgumentsBitfield) + " " + str(self.vtableOffset) + " " + str(flags) + " multicall ; \" evaluate ")
def printCPP(self, f):
if (self.thunkVariable != None):
if (self.returnType != None) and (self.returnType.isCString()):
sys.exit("Can't thunk char * return values, sorry.")
fprint(f, "")
fprint(f, self.prototype(1))
fprint(f, "\t{")
fprint(f, "\tif (" + self.thunkVariable.name + " == NULL)")
if self.isClassSpecial() or self.returnTypeIsVoid():
fprint(f, "\t\treturn;")
elif (self.returnType.isFloat()):
fprint(f, "\t\treturn 0.0f;")
else:
fprint(f, "\t\treturn (" + self.returnType.typeCPP + ")0;")
fprint(f, "")
ficlVmName = self.memberOf.getFiclVmName()
## output stack-checking code! how cool is that? --lch
dataStackPush = 2 # why 2? we always push this and ficlClass.
dataStackPop = 0
floatStackPush = 0
floatStackPop = 0
for a in self.arguments:
if (a.isCString()):
dataStackPush = dataStackPush + 2
elif (a.isFloat()):
floatStackPush = floatStackPush + 1
else:
dataStackPush = dataStackPush + 1
if (not self.returnTypeIsVoid()):
if (self.returnType.isFloat()):
floatStackPop = 1
else:
dataStackPop = 1
if (dataStackPush or dataStackPop or floatStackPush or floatStackPop):
fprint(f, "#ifdef _DEBUG")
if (dataStackPush or dataStackPop):
fprint(f, "\tficlStackCheck(" + ficlVmName + "->dataStack, " + str(dataStackPush) + ", " + str(dataStackPop) + ");")
if (floatStackPush or floatStackPop):
fprint(f, "\tficlStackCheck(" + ficlVmName + "->floatStack, " + str(floatStackPush) + ", " + str(floatStackPop) + ");")
fprint(f, "#endif // _DEBUG")
reversedArguments = copy.copy(self.arguments)
reversedArguments.reverse()
for a in reversedArguments:
if (a.isCString()):
fprint(f, "\tficlStackPushPointer(" + ficlVmName + "->dataStack, " + a.name + ");")
fprint(f, "\tficlStackPushInteger(" + ficlVmName + "->dataStack, strlen(" + a.name + "));")
elif (a.isFloat()):
fprint(f, "\tficlStackPushFloat(" + ficlVmName + "->floatStack, " + a.name + ");")
else:
fprint(f, "\tficlStackPushInteger(" + ficlVmName + "->dataStack, (int)" + a.name + ");")
fprint(f, "\tficlStackPushPointer(" + ficlVmName + "->dataStack, this);")
fprint(f, "\tficlStackPushPointer(" + ficlVmName + "->dataStack, ficlClass);")
fprint(f, "\tficlVmExecuteXT(" + ficlVmName + ", " + self.thunkVariable.name + ");")
if (not self.returnTypeIsVoid()):
if (self.returnType.isFloat()):
fprint(f, "\treturn ficlStackPopFloat(" + ficlVmName + "->floatStack);")
else:
fprint(f, "\treturn (" + self.returnType.typeCPP + ")ficlStackPopInteger(" + ficlVmName + "->dataStack);")
fprint(f, "\t}")
fprint(f, "")
# don't do virtual functions
if self.isVirtual() or self.isClassSpecial():
return
name = self.name
if (name == "") or (name == "~"):
name += self.memberOf.name
fprint(f, "// " + self.memberOf.name + "::" + name)
if len(self.comments):
fprint(f, "\t" + "//")
for comment in self.comments:
fprint(f, "\t" + "// " + comment)
arguments = ""
for a in self.arguments:
if len(arguments):
arguments += ", "
arguments += a.stringCPP()
if len(arguments) == 0:
arguments = "void"
classModifier = self.memberOf.name + "::"
calltype = "FICL_MULTICALL_CALLTYPE_METHOD"
if self.isStatic():
classModifier = ""
calltype = "FICL_MULTICALL_CALLTYPE_FUNCTION"
returnString = "FICL_MULTICALL_RETURNTYPE_INTEGER"
if self.returnTypeIsVoid():
returnString = "FICL_MULTICALL_RETURNTYPE_VOID"
elif (self.returnType.typeCPP == "float"):
returnString = "FICL_MULTICALL_RETURNTYPE_FLOAT"
elif (self.returnType.isCString()):
returnString = "FICL_MULTICALL_RETURNTYPE_CSTRING"
# set bits in argumentFlags
floatArgumentsBitfield = 0
cstringArgumentsBitfield = 0
argumentNumber = 0
cstrings = 0
for a in self.arguments:
if a.isFloat():
floatArgumentsBitfield |= (1 << argumentNumber)
elif a.isCString():
cstringArgumentsBitfield |= (1 << argumentNumber)
cstrings += 1
argumentNumber += 1
uniqueSuffix = "_" + self.memberOf.name + "_" + name
# constructor is blank!
if (self.name == ""):
uniqueSuffix = "_" + self.memberOf.name + "_constructor"
# destructor is just a squiggle!
elif (self.name == "~"):
uniqueSuffix = "_" + self.memberOf.name + "_destructor"
printingHash = {}
printingHash["classname"] = "xMethod" + uniqueSuffix
printingHash["variablename"] = "instance" + uniqueSuffix
printingHash["address"] = self.returnType.typeCPP + " (" + classModifier + "*address)(" + arguments + ")"
printingHash["function"] = self.memberOf.name + "::" + name
printingHash["methodname"] = name
printingHash["argumentCount"] = str(len(self.arguments) + cstrings)
printingHash["floatArgumentsBitfield"] = str(floatArgumentsBitfield)
printingHash["cstringArgumentsBitfield"] = str(cstringArgumentsBitfield)
printingHash["flags"] = calltype + " | " + returnString
fprint(f, """
struct %(classname)s
{
char *name;
int argumentCount;
int floatArgumentBitfield;
int cstringArgumentBitfield;
int flags;
%(address)s;
int zero;
};
static %(classname)s %(variablename)s = { "%(methodname)s", %(argumentCount)s, %(floatArgumentsBitfield)s, %(cstringArgumentsBitfield)s, %(flags)s, %(function)s, 0 };
""" % printingHash)
class xClass:
def __init__(self, name):
self.members = []
self.methods = []
self.verbatim = []
self.name = name
self.superclass = None
self.superclassName = None
self.containsVtable = 0
self.vtableEntries = 0
self.firstMember = None
self.memberCellsTotal = 0
self.thunkedSubclass = None
self.pureVirtual = 0
self.setFiclVmName(None)
classes.append(self)
def subclassOf(self, superclass):
if type(superclass) == types.StringType:
self.superclassName = superclass
else:
self.superclass = superclass
self.superclassName = superclass.name
if superclass.containsVtable:
self.containsVtable = 2
self.pureVirtual = superclass.pureVirtual
self.vtableEntries = superclass.vtableEntries
else:
self.containsVtable = 0
return self
def thunkedSubclassOf(self, superclass):
self.subclassOf(superclass)
self.addMember(xVariable("ficlClass", "void *"))
for method in superclass.methods:
if not method.isClassSpecial() or method.isPureVirtual():
method = copy.deepcopy(method)
if method.isPureVirtual():
method.setVirtual(1)
self.addThunkedMethod(method)
self.constructor = xMethod("")
self.addMethod(self.constructor)
self.thunkedSubclass = 1
return self
def forwardDeclare(self):
xAddHHeader("class " + self.name + ";")
def addVerbatim(self, v):
self.verbatim.append(v)
return self
def addMember(self, variable):
self.members.append(variable)
self.memberCellsTotal += variable.getTotalSize()
if (self.firstMember == None):
self.firstMember = variable
return self
def removeMember(self, variable):
self.members.remove(variable)
self.memberCellsTotal -= variable.getTotalSize()
if (self.firstMember == variable):
self.firstMember = self.members[0]
return self
def addMemberArray(self, array):
map(self.addMember, copy.deepcopy(array))
def findPreviousInstanceOfVirtualMethod(self, name):
for method in self.methods:
if method.name == name:
return method
if (self.superclass != None) and (type(self.superclass) != types.StringType):
return self.superclass.findPreviousInstanceOfVirtualMethod(name)
return None
def setFiclVmName(self, name):
self.ficlVmName = name
return self
def getFiclVmName(self):
if self.ficlVmName != None:
return self.ficlVmName
global ficlVmName
return ficlVmName
def addMethod(self, method):
method.memberOf = self
if method.virtual:
previousInstance = self.findPreviousInstanceOfVirtualMethod(method.name)
if (previousInstance != None):
method.vtableOffset = previousInstance.vtableOffset
if previousInstance.isPureVirtual() and (not method.isPureVirtual()):
self.pureVirtual -= 1
else:
method.vtableOffset = self.vtableEntries
self.vtableEntries = self.vtableEntries + 1
if (not self.containsVtable):
self.containsVtable = 1
if method.isPureVirtual():
self.pureVirtual += 1
self.methods.append(method)
return self
def lookupMethod(self, methodName):
for m in self.methods:
if (m.name == methodName):
return m
return None
def removeMethod(self, method):
if (type(method) == types.StringType):
method = self.lookupMethod(method)
if method == None:
return None
method.memberOf = None
self.methods.remove(method)
if method.virtual:
previousInstance = self.findPreviousInstanceOfVirtualMethod(method.name)
if (previousInstance == None):
for m in self.methods:
if (m.vtableOffset >= method.vtableOffset):
m.vtableOffset = m.vtableOffset - 1
self.vtableEntries = self.vtableEntries - 1
if (self.vtableEntries == 0):
self.containsVtable = 0
if previousInstance.isPureVirtual() and (not method.isPureVirtual()):
self.pureVirtual += 1
else:
if method.isPureVirtual():
self.pureVirtual -= 1
if method.thunkVariable != None:
self.removeMember(method.thunkVariable)
return self
def addThunkedMethod(self, method):
method = copy.deepcopy(method)
self.addMethod(method)
name = capitalize(method.name)
if (method.isClassSpecial()):
if (name == ""):
name = "Constructor"
else:
name = "Destructor"
thunkVariable = xVariable("xt" + name, "ficlWord *")
self.addMember(thunkVariable)
method.setThunkVariable(thunkVariable)
return self
def addNoopConstructor(self):
self.addVerbatim(self.name + "() { }")
return self
def addConstructor(self, virtual = 0):
method = xMethod("")
method.setVirtual(virtual)
self.addMethod(method)
return method
def addDestructor(self, virtual = 0):
method = xMethod("~")
method.setVirtual(virtual)
self.addMethod(method)
return method
def addMemberWithAccessors(self, variable, writeBodiesToo = 1):
self.addMember(variable)
capitalizedName = capitalize(variable.name)
m = xMethod("set" + capitalizedName, "void").addArgument(variable)
if writeBodiesToo:
m.setBody("\t{ this->" + variable.name + " = " + variable.name + "; }")
self.addMethod(m)
m = xMethod("get" + capitalizedName, variable.typeCPP)
if writeBodiesToo:
m.setBody("\t{ return this->" + variable.name + "; }")
self.addMethod(m)
def addMethodArray(self, array):
map(self.addMethod, copy.deepcopy(array))
def addThunkedMethodArray(self, array):
map(self.addThunkedMethod, copy.deepcopy(array))
def printHforward(self, f):
fprint(f, "class " + self.name + ";")
def printH(self, f):
if (self.thunkedSubclass):
body = "\n\t\t{\n"
for m in self.methods:
if m.thunkVariable != None:
body += "\t\t" + m.thunkVariable.name + " = NULL;\n"
body += "\t\t}\n"
self.constructor.setBody(body)
s = ""
if self.superclassName != None:
s = " : public " + self.superclassName
fprint(f, "class " + self.name + s)
fprint(f, "\t" + "{")
fprint(f, "\t" + "public:")
fprint(f, "")
for member in self.members:
member.printH(f)
fprint(f, "")
for method in self.methods:
method.printH(f)
for v in self.verbatim:
fprint(f, "\t" + v + "\n")
fprint(f, "\t" + "};\n\n")
def printF(self, f):
s = self.superclassName
if s == None:
s = "object"
fprint(f, "")
fprint(f, "//")
fprint(f, "// " + self.name)
fprint(f, "//")
fprint(f, ": declare-" + self.name)
fprint(f, "\t" + "S\" " + s + " subclass " + self.name + " \" evaluate")
fprint(f, "")
if self.containsVtable == 1:
fprint(f, "\t" + "S\" cell: .vtable\" evaluate")
for member in self.members:
member.printF(f)
fprint(f, "")
if (self.firstMember == None):
fprint(f, "\t" + "S\" : default-init 2drop ; \" evaluate // no members!")
else:
storeFiclClass = ""
if (self.thunkedSubclass != None):
storeFiclClass = "this this my=> .ficlClass ! drop "
setVtable = ""
if self.containsVtable and (not self.pureVirtual):
setVtable = self.name + "-vtable this my=> .vtable ! "
fprint(f, "\t" + "S\" : default-init { 2:this -- } this my=> super my=> init this my=> ." + self.firstMember.name + " " + str(self.memberCellsTotal) + " cells 0 fill " + setVtable + storeFiclClass + "; \" evaluate")
fprint(f, "\t// " + self.name + " methods:")
fprint(f, "\t" + self.name + "-declare-methods")
for method in self.methods:
method.printF(f)
fprint(f, "\t;")
fprint(f, "")
fprint(f, ": end-" + self.name)
fprint(f, "\t" + "S\" end-class \" evaluate")
fprint(f, "\t" + "S\" " + self.name + " 2constant " + self.name + ".constant \" evaluate")
fprint(f, "\t;")
fprint(f, "")
def printCPP(self, f):
fprint(f, "//")
fprint(f, "// " + self.name)
fprint(f, "//")
for method in self.methods:
method.printCPP(f)
fprint(f, "")
fprint(f, "// " + self.name + " final structure")
fprint(f, "static xMethod *" + self.name + "_methods[] =")
fprint(f, "\t" + "{")
for method in self.methods:
if (method.isVirtual() or method.isClassSpecial()):
continue
fprint(f, "\t" + "(xMethod *)(&instance_" + self.name + "_" + method.name + "),")
fprint(f, "\t" + "NULL")
fprint(f, "\t" + "};")
if self.containsVtable and (not self.pureVirtual):
fprint(f, "")
fprint(f, "// " + self.name + " instance, so we can get the vtable")
fprint(f, "static " + self.name + " " + self.name + "_instance;" )
fprint(f, "")
def xclassesFooter():
f = open("xclasses.h", "wt")
fprintHeader(f)
fprint(f, "#ifndef __XCLASSES_H")
fprint(f, "#define __XCLASSES_H")
fprint(f, "")
fprint(f, "extern void xclassesDefineMethods(ficlVm *vm);")
fprint(f, "")
fprint(f, "enum xtype");
fprint(f, "\t{");
fprint(f, "\txtypeInvalid = 0,");
for c in classes:
fprint(f, "\txtype_" + c.name + ",");
fprint(f, "\txtypeLast,");
fprint(f, "\t};");
fprint(f, "");
for line in h_headers:
fprint(f, line)
fprint(f, "")
fprint(f, "")
for c in classes:
c.printH(f)
for line in h_footers:
fprint(f, line)
fprint(f, "")
fprint(f, "#endif // __XCLASSES_H")
fprintFooter(f)
f.close()
f = open("xclasses.f", "wt")
fprintHeader(f)
fprint(f, ": use-default-init S\" : init { 2:this } this my=> super my=> init this my=> default-init ; \" evaluate ;");
for line in ficl_headers:
fprint(f, line)
fprint(f, "")
for c in classes:
c.printF(f)
for line in ficl_footers:
fprint(f, line)
fprint(f, "")
fprintFooter(f)
f.close()
f = open("xclasses.cpp", "wt")
fprintHeader(f)
for line in c_headers:
fprint(f, line)
fprint(f, "")
fprint(f, "#include \"xclasses.h\"")
fprint(f, """
struct xMethod
{
char *name;
int argumentCount;
int floatArgumentBitfield;
int cstringArgumentBitfield;
int flags;
void *address;
int zero;
};
struct xClass
{
char *name;
xMethod **methods;
void **instance;
};
""")
for c in classes:
c.printCPP(f)
fprint(f, """
static xClass classes[] =
{
""")
for c in classes:
vtableVariable = "NULL"
if c.containsVtable and (not c.pureVirtual):
vtableVariable = "(void **)&" + c.name + "_instance"
fprint(f, "\t" + "{ \"" + c.name + "\", " + c.name + "_methods, " + vtableVariable + " },")
fprint(f, """
{ NULL, NULL }
};
void xclassesDefineMethods(ficlVm *vm)
{
char buffer[1024];
xClass *c;
xMethod **m;
for (c = classes; c->name != NULL; c++)
{
sprintf(buffer, " : %s-declare-methods ", c->name);
ficlVmEvaluate(vm, buffer);
for (m = c->methods; *m != NULL; m++)
{
xMethod *method = *m;
/* why is this here? I dunno, but MSVC seems to be packing my struct. So if address is zero, the next dword has the address. --lch */
if (method->address == NULL)
method->address = (void *)method->zero;
sprintf(buffer, " S\\" : %s drop %d %d %d %d %d multicall ; \\" evaluate ",
method->name,
method->argumentCount,
method->floatArgumentBitfield,
method->cstringArgumentBitfield,
method->address,
method->flags
);
ficlVmEvaluate(vm, buffer);
}
ficlVmEvaluate(vm, " ; ");
if (c->instance != NULL)
{
sprintf(buffer, "%s-vtable", c->name);
ficlDictionarySetConstantPointer(ficlVmGetDictionary(vm), buffer, *(c->instance));
}
}
}
""")
for line in c_footers:
fprint(f, line)
fprint(f, "")
fprintFooter(f)
f.close()
| 28.32147
| 232
| 0.634871
|
7948886a1c03efbf449794ff9b3fb7745751acd9
| 3,832
|
py
|
Python
|
pybridair/objects.py
|
netmanchris/pybrid
|
3f38b3047c3e1d8f4e5e57a603110f645ac78a0d
|
[
"Apache-2.0"
] | null | null | null |
pybridair/objects.py
|
netmanchris/pybrid
|
3f38b3047c3e1d8f4e5e57a603110f645ac78a0d
|
[
"Apache-2.0"
] | 1
|
2018-10-23T19:32:04.000Z
|
2018-10-23T19:32:04.000Z
|
pybridair/objects.py
|
netmanchris/pybrid
|
3f38b3047c3e1d8f4e5e57a603110f645ac78a0d
|
[
"Apache-2.0"
] | null | null | null |
# !/usr/bin/env python3
# coding=utf-8
# author: @netmanchris
# -*- coding: utf-8 -*-
import pybrid.data
from pybrid.auth import BridAuth
import datetime
class BridDev:
def __init__(self, ip_address: str, auth: BridAuth, cache_time: float = 15,
aggregate_type='15-minute'):
"""
Initialise AwairDev object.
:param device_name: The name of the device a can be found in the Awair app. Careful, case sensitive.
:param auth: The authentication object as created by the BridAuth function.
:param cache_time: The time in minutes that the state values should be cached. When this time has expired, new values
will be requested. Keep in mind that the API has daily limits so setting this too low might
cause problems.
:param aggregate_type: Type can be 'current', '5-minute' or '15-minute' referring to the aggregation. Keep in mind that
not all tiers have access to all of them.
"""
self._auth = auth
self._cache_time = cache_time
if aggregate_type in ['current', '5-minute', '15-minute']:
self._aggregate_type = aggregate_type
else:
raise ValueError("The argument aggregate_type cannot have this value.")
self._last_update = datetime.datetime.now() # records the last update
self._ip_address = ip_address
# Get device type and ID from name
device = pybrid.device.get_device_info(self._auth)
self._type = device['Model'] # get the device type
self._serial = device['Serial Number']
# Initiate data fields
self._data = {}
self._last_update = None
self.refresh()
def get_state(self, indicator: str) -> float:
"""
Function to get the state of a specific indicator.
The values are cached, in accordance with the cache time that is set for the object.
:param indicator: A string containing one of the values from: score, temp, humid, co2, voc or dust.
:return: The value of the specific indicator.
"""
now = datetime.datetime.now()
delta_min = (now - self._last_update).total_seconds() / 60
if delta_min > self._cache_time:
self.refresh()
return self._data[indicator]
def name(self) -> str:
"""
Function to get the name of the device.
:return: The name of the device.
"""
return self._device_name
def type(self) -> str:
"""
Function to get the name of the device.
:return: The type of the device.
"""
return self._type
def serial(self) -> str:
"""
Function to get the name of the device.
:return: The name of the device.
"""
return self._serial
def refresh(self):
"""
Function to refresh the state of the objects.
The values are cached internally for the period equal to the cache
time value. The refresh function refreshed these values, independent of the time that has past since the last
refresh.
"""
if self._aggregate_type == 'current':
data: list = pybrid.data.get_status(self._auth)
elif self._aggregate_type == '5-minute':
data: list = pybrid.data.get_status(self._auth)
elif self._aggregate_type == '15-minute':
data: list = pybrid.data.get_status(self._auth)
self._data['score'] = data['Sensors']['Quality']
self._data['temp'] = data['Sensors']['Temperature']
self._data['humid'] = data['Sensors']['Humidity']
self._data['co'] = data['Sensors']['CO']
self._last_update = datetime.datetime.now() # records the time of the last update
| 36.150943
| 127
| 0.612213
|
794888adea59a6543f953931d32c64d80ec8f9db
| 3,663
|
py
|
Python
|
ev3sim/robots/demo.py
|
scottwedge/ev3sim
|
751c9902e7615d27d52e4b45b34e6acb47c06d24
|
[
"BSD-3-Clause"
] | null | null | null |
ev3sim/robots/demo.py
|
scottwedge/ev3sim
|
751c9902e7615d27d52e4b45b34e6acb47c06d24
|
[
"BSD-3-Clause"
] | null | null | null |
ev3sim/robots/demo.py
|
scottwedge/ev3sim
|
751c9902e7615d27d52e4b45b34e6acb47c06d24
|
[
"BSD-3-Clause"
] | null | null | null |
"""
Some demo code for the ev3dev simulator.
This code will:
* print sensor values
* randomly move the motors every few seconds
* correct itself if it goes over the white line.
"""
from ev3dev2.motor import LargeMotor
from ev3dev2.sensor.lego import ColorSensor, UltrasonicSensor
from ev3dev2.sensor import Sensor
from ev3sim.code_helpers import is_sim, wait_for_tick
if is_sim:
print("Hello from the simulator!!!")
else:
print("Hello from the brick!!!")
import random
import time
from collections import deque
# Some behavioural constants
STEP_LENGTH = (1, 3) # Move in a new direction every 1-3 seconds
MOTOR_SPEEDS = (-100, 100) # Motor values are anything between -100 and 100
PRINT_TIME = 5 # Print sensor values every 5 seconds
def random_between(a, b):
# Returns a random float between a and b:
return a + random.random() * (b - a)
# Initialise all sensors.
lm1 = LargeMotor(address="outB")
lm2 = LargeMotor(address="outC")
cs = ColorSensor(address="in2")
us = UltrasonicSensor(address="in3")
ir = Sensor(address="in1", driver_name="ht-nxt-ir-seek-v2")
compass = Sensor(address="in4", driver_name="ht-nxt-compass")
compass.command = "BEGIN-CAL"
compass.command = "END-CAL"
# This code moves in random directions, and stores the movements in a circular queue.
movement_queue = deque([], maxlen=5)
last_step_time = time.time()
last_print_time = time.time()
current_step_wait = 0
solving_white = False
while True:
if time.time() - last_step_time > current_step_wait:
# Set some new motor speeds, and a wait time.
last_step_time = time.time()
m1Speed, m2Speed = random_between(*MOTOR_SPEEDS), random_between(*MOTOR_SPEEDS)
current_step_wait = random_between(*STEP_LENGTH)
lm1.on_for_seconds(m1Speed, current_step_wait, block=False)
lm2.on_for_seconds(m2Speed, current_step_wait, block=False)
movement_queue.append(
{
"motor1Speed": m1Speed,
"motor2Speed": m2Speed,
"wait_time": current_step_wait,
}
)
solving_white = False
if time.time() - last_print_time > PRINT_TIME:
# Print sensor values.
last_print_time = time.time()
# We add each line to a string so that we can print the lines all at
# once, instead of one line at a time
message = "Sensor Values\n"
message += "=============\n"
message += "Colour Sensor\n"
message += f"RGB: {cs.rgb}\n"
message += "Ultrasonic\n"
message += f"Distance: {us.distance_centimeters}cm\n"
message += "Infrared\n"
message += f"Values: {[ir.value(x) for x in range(7)]}\n"
message += "Compass\n"
message += f"Bearing: {compass.value()}\n"
message += "============="
print(message)
# If we hit the white line, then reverse this ongoing action
# This white detection is bad, you should replace with something better (and more stable).
if sum(cs.rgb) > 600 and not solving_white:
# Reverse motor speeds, for the amount so far elapsed.
elapsed = time.time() - last_step_time
if len(movement_queue) > 0:
movement = movement_queue.pop()
# Set the last_step_time to now, and make sure we wait `elapsed` seconds.
last_step_time = time.time()
current_step_wait = elapsed
lm1.on_for_seconds(-movement["motor1Speed"], elapsed, block=False)
lm2.on_for_seconds(-movement["motor2Speed"], elapsed, block=False)
# Set this so we don't infinitely back up.
solving_white = True
wait_for_tick()
| 35.563107
| 94
| 0.657112
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.